Rollup merge of #58322 - taiki-e:librustc_codegen_ssa-2018, r=petrochenkov

librustc_codegen_ssa => 2018

Transitions `librustc_codegen_ssa` to Rust 2018; cc #58099

r? @petrochenkov
diff --git a/Cargo.lock b/Cargo.lock
index f135dd0..a36df24 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1,3 +1,5 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
 [[package]]
 name = "adler32"
 version = "1.0.3"
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
index 8540d92..f512e1d 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
@@ -400,6 +400,7 @@
                 test::TheBook,
                 test::UnstableBook,
                 test::RustcBook,
+                test::EmbeddedBook,
                 test::Rustfmt,
                 test::Miri,
                 test::Clippy,
diff --git a/src/doc/embedded-book b/src/doc/embedded-book
index d663113..bd2778f 160000
--- a/src/doc/embedded-book
+++ b/src/doc/embedded-book
@@ -1 +1 @@
-Subproject commit d663113d1d9fbd35f1145c29f6080a6350b7f419
+Subproject commit bd2778f304989ee52be8201504d6ec621dd60ca9
diff --git a/src/doc/rustdoc/src/unstable-features.md b/src/doc/rustdoc/src/unstable-features.md
index d3eb8cb..3463cdb 100644
--- a/src/doc/rustdoc/src/unstable-features.md
+++ b/src/doc/rustdoc/src/unstable-features.md
@@ -1,9 +1,8 @@
 # Unstable features
 
 Rustdoc is under active development, and like the Rust compiler, some features are only available
-on the nightly releases. Some of these are new and need some more testing before they're able to get
-released to the world at large, and some of them are tied to features in the Rust compiler that are
-themselves unstable. Several features here require a matching `#![feature(...)]` attribute to
+on nightly releases. Some of these features are new and need some more testing before they're able to be
+released to the world at large, and some of them are tied to features in the Rust compiler that are unstable. Several features here require a matching `#![feature(...)]` attribute to
 enable, and thus are more fully documented in the [Unstable Book]. Those sections will link over
 there as necessary.
 
@@ -428,4 +427,4 @@
 
 This flag allows you to keep doctest executables around after they're compiled or run.
 Usually, rustdoc will immediately discard a compiled doctest after it's been tested, but
-with this option, you can keep those binaries around for farther testing.
\ No newline at end of file
+with this option, you can keep those binaries around for farther testing.
diff --git a/src/libarena/Cargo.toml b/src/libarena/Cargo.toml
index e2af67d..82fc64b 100644
--- a/src/libarena/Cargo.toml
+++ b/src/libarena/Cargo.toml
@@ -2,6 +2,7 @@
 authors = ["The Rust Project Developers"]
 name = "arena"
 version = "0.0.0"
+edition = "2018"
 
 [lib]
 name = "arena"
@@ -9,4 +10,4 @@
 crate-type = ["dylib"]
 
 [dependencies]
-rustc_data_structures = { path = "../librustc_data_structures" }
\ No newline at end of file
+rustc_data_structures = { path = "../librustc_data_structures" }
diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs
index aa522d8..8ae046c 100644
--- a/src/libarena/lib.rs
+++ b/src/libarena/lib.rs
@@ -11,17 +11,17 @@
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/",
        test(no_crate_inject, attr(deny(warnings))))]
 
+#![deny(rust_2018_idioms)]
+
 #![feature(alloc)]
 #![feature(core_intrinsics)]
 #![feature(dropck_eyepatch)]
-#![feature(nll)]
 #![feature(raw_vec_internals)]
 #![cfg_attr(test, feature(test))]
 
 #![allow(deprecated)]
 
 extern crate alloc;
-extern crate rustc_data_structures;
 
 use rustc_data_structures::sync::MTLock;
 
@@ -476,7 +476,7 @@
 #[cfg(test)]
 mod tests {
     extern crate test;
-    use self::test::Bencher;
+    use test::Bencher;
     use super::TypedArena;
     use std::cell::Cell;
 
@@ -511,15 +511,15 @@
 
         impl<'a> Wrap<'a> {
             fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
-                let r: &EI = self.0.alloc(EI::I(f()));
+                let r: &EI<'_> = self.0.alloc(EI::I(f()));
                 if let &EI::I(ref i) = r {
                     i
                 } else {
                     panic!("mismatch");
                 }
             }
-            fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
-                let r: &EI = self.0.alloc(EI::O(f()));
+            fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer<'_> {
+                let r: &EI<'_> = self.0.alloc(EI::O(f()));
                 if let &EI::O(ref o) = r {
                     o
                 } else {
@@ -609,7 +609,7 @@
         count: &'a Cell<u32>,
     }
 
-    impl<'a> Drop for DropCounter<'a> {
+    impl Drop for DropCounter<'_> {
         fn drop(&mut self) {
             self.count.set(self.count.get() + 1);
         }
@@ -619,7 +619,7 @@
     fn test_typed_arena_drop_count() {
         let counter = Cell::new(0);
         {
-            let arena: TypedArena<DropCounter> = TypedArena::default();
+            let arena: TypedArena<DropCounter<'_>> = TypedArena::default();
             for _ in 0..100 {
                 // Allocate something with drop glue to make sure it doesn't leak.
                 arena.alloc(DropCounter { count: &counter });
@@ -631,7 +631,7 @@
     #[test]
     fn test_typed_arena_drop_on_clear() {
         let counter = Cell::new(0);
-        let mut arena: TypedArena<DropCounter> = TypedArena::default();
+        let mut arena: TypedArena<DropCounter<'_>> = TypedArena::default();
         for i in 0..10 {
             for _ in 0..100 {
                 // Allocate something with drop glue to make sure it doesn't leak.
diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs
index 203be54..d4a1d15 100644
--- a/src/libcore/convert.rs
+++ b/src/libcore/convert.rs
@@ -17,7 +17,10 @@
 //! [`TryFrom<T>`][`TryFrom`] rather than [`Into<U>`][`Into`] or [`TryInto<U>`][`TryInto`],
 //! as [`From`] and [`TryFrom`] provide greater flexibility and offer
 //! equivalent [`Into`] or [`TryInto`] implementations for free, thanks to a
-//! blanket implementation in the standard library.
+//! blanket implementation in the standard library.  However, there are some cases
+//! where this is not possible, such as creating conversions into a type defined
+//! outside your library, so implementing [`Into`] instead of [`From`] is
+//! sometimes necessary.
 //!
 //! # Generic Implementations
 //!
diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs
index 78f1c3c..8b728c9 100644
--- a/src/libcore/lib.rs
+++ b/src/libcore/lib.rs
@@ -24,7 +24,7 @@
 //!   often generated by LLVM. Additionally, this library can make explicit
 //!   calls to these functions. Their signatures are the same as found in C.
 //!   These functions are often provided by the system libc, but can also be
-//!   provided by the [rlibc crate](https://crates.io/crates/rlibc).
+//!   provided by the [compiler-builtins crate](https://crates.io/crates/compiler_builtins).
 //!
 //! * `rust_begin_panic` - This function takes four arguments, a
 //!   `fmt::Arguments`, a `&'static str`, and two `u32`'s. These four arguments
diff --git a/src/libcore/macros.rs b/src/libcore/macros.rs
index 12b7adb..664490c 100644
--- a/src/libcore/macros.rs
+++ b/src/libcore/macros.rs
@@ -555,12 +555,12 @@
 #[macro_export]
 #[unstable(feature = "maybe_uninit", issue = "53491")]
 macro_rules! uninitialized_array {
-    // This `into_inner` is safe because an array of `MaybeUninit` does not
+    // This `into_initialized` is safe because an array of `MaybeUninit` does not
     // require initialization.
     // FIXME(#49147): Could be replaced by an array initializer, once those can
     // be any const expression.
     ($t:ty; $size:expr) => (unsafe {
-        MaybeUninit::<[MaybeUninit<$t>; $size]>::uninitialized().into_inner()
+        MaybeUninit::<[MaybeUninit<$t>; $size]>::uninitialized().into_initialized()
     });
 }
 
diff --git a/src/libcore/mem.rs b/src/libcore/mem.rs
index 6dad32f..2f86e13 100644
--- a/src/libcore/mem.rs
+++ b/src/libcore/mem.rs
@@ -1035,7 +1035,42 @@
     }
 }
 
-/// A newtype to construct uninitialized instances of `T`
+/// A newtype to construct uninitialized instances of `T`.
+///
+/// The compiler, in general, assumes that variables are properly initialized
+/// at their respective type.  For example, a variable of reference type must
+/// be aligned and non-NULL.  This is an invariant that must *always* be upheld,
+/// even in unsafe code.  As a consequence, 0-initializing a variable of reference
+/// type causes instantaneous undefined behavior, no matter whether that reference
+/// ever gets used to access memory:
+/// ```rust,no_run
+/// use std::mem;
+///
+/// let x: &i32 = unsafe { mem::zeroed() }; // undefined behavior!
+/// ```
+/// This is exploited by the compiler for various optimizations, such as eliding
+/// run-time checks and optimizing `enum` layout.
+///
+/// Not initializing memory at all (instead of 0-initializing it) causes the same
+/// issue: after all, the initial value of the variable might just happen to be
+/// one that violates the invariant.
+///
+/// `MaybeUninit` serves to enable unsafe code to deal with uninitialized data:
+/// it is a signal to the compiler indicating that the data here might *not*
+/// be initialized:
+/// ```rust
+/// #![feature(maybe_uninit)]
+/// use std::mem::MaybeUninit;
+///
+/// // Create an explicitly uninitialized reference.
+/// let mut x = MaybeUninit::<&i32>::uninitialized();
+/// // Set it to a valid value.
+/// x.set(&0);
+/// // Extract the initialized data -- this is only allowed *after* properly
+/// // initializing `x`!
+/// let x = unsafe { x.into_initialized() };
+/// ```
+/// The compiler then knows to not optimize this code.
 #[allow(missing_debug_implementations)]
 #[unstable(feature = "maybe_uninit", issue = "53491")]
 // NOTE after stabilizing `MaybeUninit` proceed to deprecate `mem::{uninitialized,zeroed}`
@@ -1084,11 +1119,14 @@
     }
 
     /// Set the value of the `MaybeUninit`. This overwrites any previous value without dropping it.
+    /// For your convenience, this also returns a mutable reference to the (now
+    /// safely initialized) content of `self`.
     #[unstable(feature = "maybe_uninit", issue = "53491")]
     #[inline(always)]
-    pub fn set(&mut self, val: T) {
+    pub fn set(&mut self, val: T) -> &mut T {
         unsafe {
             self.value = ManuallyDrop::new(val);
+            self.get_mut()
         }
     }
 
@@ -1102,11 +1140,19 @@
     /// state, otherwise this will immediately cause undefined behavior.
     #[unstable(feature = "maybe_uninit", issue = "53491")]
     #[inline(always)]
-    pub unsafe fn into_inner(self) -> T {
+    pub unsafe fn into_initialized(self) -> T {
         intrinsics::panic_if_uninhabited::<T>();
         ManuallyDrop::into_inner(self.value)
     }
 
+    /// Deprecated alternative to `into_initialized`.  Will never get stabilized.
+    /// Exists only to transition stdsimd to `into_initialized`.
+    #[inline(always)]
+    #[allow(unused)]
+    pub(crate) unsafe fn into_inner(self) -> T {
+        self.into_initialized()
+    }
+
     /// Get a reference to the contained value.
     ///
     /// # Unsafety
@@ -1134,16 +1180,16 @@
         &mut *self.value
     }
 
-    /// Get a pointer to the contained value. Reading from this pointer will be undefined
-    /// behavior unless the `MaybeUninit` is initialized.
+    /// Get a pointer to the contained value. Reading from this pointer or turning it
+    /// into a reference will be undefined behavior unless the `MaybeUninit` is initialized.
     #[unstable(feature = "maybe_uninit", issue = "53491")]
     #[inline(always)]
     pub fn as_ptr(&self) -> *const T {
         unsafe { &*self.value as *const T }
     }
 
-    /// Get a mutable pointer to the contained value. Reading from this pointer will be undefined
-    /// behavior unless the `MaybeUninit` is initialized.
+    /// Get a mutable pointer to the contained value. Reading from this pointer or turning it
+    /// into a reference will be undefined behavior unless the `MaybeUninit` is initialized.
     #[unstable(feature = "maybe_uninit", issue = "53491")]
     #[inline(always)]
     pub fn as_mut_ptr(&mut self) -> *mut T {
diff --git a/src/libcore/ops/function.rs b/src/libcore/ops/function.rs
index 3a1d765..c69f5fd 100644
--- a/src/libcore/ops/function.rs
+++ b/src/libcore/ops/function.rs
@@ -62,7 +62,7 @@
     label="expected an `Fn<{Args}>` closure, found `{Self}`",
 )]
 #[fundamental] // so that regex can rely that `&str: !FnMut`
-#[must_use]
+#[must_use = "closures are lazy and do nothing unless called"]
 pub trait Fn<Args> : FnMut<Args> {
     /// Performs the call operation.
     #[unstable(feature = "fn_traits", issue = "29625")]
@@ -141,7 +141,7 @@
     label="expected an `FnMut<{Args}>` closure, found `{Self}`",
 )]
 #[fundamental] // so that regex can rely that `&str: !FnMut`
-#[must_use]
+#[must_use = "closures are lazy and do nothing unless called"]
 pub trait FnMut<Args> : FnOnce<Args> {
     /// Performs the call operation.
     #[unstable(feature = "fn_traits", issue = "29625")]
@@ -220,7 +220,7 @@
     label="expected an `FnOnce<{Args}>` closure, found `{Self}`",
 )]
 #[fundamental] // so that regex can rely that `&str: !FnMut`
-#[must_use]
+#[must_use = "closures are lazy and do nothing unless called"]
 pub trait FnOnce<Args> {
     /// The returned type after the call operator is used.
     #[stable(feature = "fn_once_output", since = "1.12.0")]
diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs
index 02eef07..537aa92 100644
--- a/src/libcore/ptr.rs
+++ b/src/libcore/ptr.rs
@@ -573,7 +573,7 @@
 pub unsafe fn read<T>(src: *const T) -> T {
     let mut tmp = MaybeUninit::<T>::uninitialized();
     copy_nonoverlapping(src, tmp.as_mut_ptr(), 1);
-    tmp.into_inner()
+    tmp.into_initialized()
 }
 
 /// Reads the value from `src` without moving it. This leaves the
@@ -642,7 +642,7 @@
     copy_nonoverlapping(src as *const u8,
                         tmp.as_mut_ptr() as *mut u8,
                         mem::size_of::<T>());
-    tmp.into_inner()
+    tmp.into_initialized()
 }
 
 /// Overwrites a memory location with the given value without reading or
diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs
index 663c408..e8c1cd3 100644
--- a/src/librustc/dep_graph/graph.rs
+++ b/src/librustc/dep_graph/graph.rs
@@ -79,6 +79,16 @@
     loaded_from_cache: Lock<FxHashMap<DepNodeIndex, bool>>,
 }
 
+pub fn hash_result<R>(hcx: &mut StableHashingContext<'_>, result: &R) -> Option<Fingerprint>
+where
+    R: for<'a> HashStable<StableHashingContext<'a>>,
+{
+    let mut stable_hasher = StableHasher::new();
+    result.hash_stable(hcx, &mut stable_hasher);
+
+    Some(stable_hasher.finish())
+}
+
 impl DepGraph {
 
     pub fn new(prev_graph: PreviousDepGraph,
@@ -178,14 +188,16 @@
     ///   `arg` parameter.
     ///
     /// [rustc guide]: https://rust-lang.github.io/rustc-guide/incremental-compilation.html
-    pub fn with_task<'gcx, C, A, R>(&self,
-                                   key: DepNode,
-                                   cx: C,
-                                   arg: A,
-                                   task: fn(C, A) -> R)
-                                   -> (R, DepNodeIndex)
-        where C: DepGraphSafe + StableHashingContextProvider<'gcx>,
-              R: HashStable<StableHashingContext<'gcx>>,
+    pub fn with_task<'a, C, A, R>(
+        &self,
+        key: DepNode,
+        cx: C,
+        arg: A,
+        task: fn(C, A) -> R,
+        hash_result: impl FnOnce(&mut StableHashingContext<'_>, &R) -> Option<Fingerprint>,
+    ) -> (R, DepNodeIndex)
+    where
+        C: DepGraphSafe + StableHashingContextProvider<'a>,
     {
         self.with_task_impl(key, cx, arg, false, task,
             |_key| Some(TaskDeps {
@@ -196,17 +208,18 @@
             }),
             |data, key, fingerprint, task| {
                 data.borrow_mut().complete_task(key, task.unwrap(), fingerprint)
-            })
+            },
+            hash_result)
     }
 
     /// Creates a new dep-graph input with value `input`
-    pub fn input_task<'gcx, C, R>(&self,
+    pub fn input_task<'a, C, R>(&self,
                                    key: DepNode,
                                    cx: C,
                                    input: R)
                                    -> (R, DepNodeIndex)
-        where C: DepGraphSafe + StableHashingContextProvider<'gcx>,
-              R: HashStable<StableHashingContext<'gcx>>,
+        where C: DepGraphSafe + StableHashingContextProvider<'a>,
+              R: for<'b> HashStable<StableHashingContext<'b>>,
     {
         fn identity_fn<C, A>(_: C, arg: A) -> A {
             arg
@@ -216,10 +229,11 @@
             |_| None,
             |data, key, fingerprint, _| {
                 data.borrow_mut().alloc_node(key, SmallVec::new(), fingerprint)
-            })
+            },
+            hash_result::<R>)
     }
 
-    fn with_task_impl<'gcx, C, A, R>(
+    fn with_task_impl<'a, C, A, R>(
         &self,
         key: DepNode,
         cx: C,
@@ -230,11 +244,11 @@
         finish_task_and_alloc_depnode: fn(&Lock<CurrentDepGraph>,
                                           DepNode,
                                           Fingerprint,
-                                          Option<TaskDeps>) -> DepNodeIndex
+                                          Option<TaskDeps>) -> DepNodeIndex,
+        hash_result: impl FnOnce(&mut StableHashingContext<'_>, &R) -> Option<Fingerprint>,
     ) -> (R, DepNodeIndex)
     where
-        C: DepGraphSafe + StableHashingContextProvider<'gcx>,
-        R: HashStable<StableHashingContext<'gcx>>,
+        C: DepGraphSafe + StableHashingContextProvider<'a>,
     {
         if let Some(ref data) = self.data {
             let task_deps = create_task(key).map(|deps| Lock::new(deps));
@@ -269,15 +283,12 @@
                 profq_msg(hcx.sess(), ProfileQueriesMsg::TaskEnd)
             };
 
-            let mut stable_hasher = StableHasher::new();
-            result.hash_stable(&mut hcx, &mut stable_hasher);
-
-            let current_fingerprint = stable_hasher.finish();
+            let current_fingerprint = hash_result(&mut hcx, &result);
 
             let dep_node_index = finish_task_and_alloc_depnode(
                 &data.current,
                 key,
-                current_fingerprint,
+                current_fingerprint.unwrap_or(Fingerprint::ZERO),
                 task_deps.map(|lock| lock.into_inner()),
             );
 
@@ -285,15 +296,20 @@
             if let Some(prev_index) = data.previous.node_to_index_opt(&key) {
                 let prev_fingerprint = data.previous.fingerprint_by_index(prev_index);
 
-                let color = if current_fingerprint == prev_fingerprint {
-                    DepNodeColor::Green(dep_node_index)
+                let color = if let Some(current_fingerprint) = current_fingerprint {
+                    if current_fingerprint == prev_fingerprint {
+                        DepNodeColor::Green(dep_node_index)
+                    } else {
+                        DepNodeColor::Red
+                    }
                 } else {
+                    // Mark the node as Red if we can't hash the result
                     DepNodeColor::Red
                 };
 
                 debug_assert!(data.colors.get(prev_index).is_none(),
-                              "DepGraph::with_task() - Duplicate DepNodeColor \
-                               insertion for {:?}", key);
+                            "DepGraph::with_task() - Duplicate DepNodeColor \
+                            insertion for {:?}", key);
 
                 data.colors.insert(prev_index, color);
             }
@@ -342,14 +358,16 @@
 
     /// Execute something within an "eval-always" task which is a task
     // that runs whenever anything changes.
-    pub fn with_eval_always_task<'gcx, C, A, R>(&self,
-                                   key: DepNode,
-                                   cx: C,
-                                   arg: A,
-                                   task: fn(C, A) -> R)
-                                   -> (R, DepNodeIndex)
-        where C: DepGraphSafe + StableHashingContextProvider<'gcx>,
-              R: HashStable<StableHashingContext<'gcx>>,
+    pub fn with_eval_always_task<'a, C, A, R>(
+        &self,
+        key: DepNode,
+        cx: C,
+        arg: A,
+        task: fn(C, A) -> R,
+        hash_result: impl FnOnce(&mut StableHashingContext<'_>, &R) -> Option<Fingerprint>,
+    ) -> (R, DepNodeIndex)
+    where
+        C: DepGraphSafe + StableHashingContextProvider<'a>,
     {
         self.with_task_impl(key, cx, arg, false, task,
             |_| None,
@@ -359,7 +377,8 @@
                     &DepNode::new_no_params(DepKind::Krate)
                 ];
                 current.alloc_node(key, smallvec![krate_idx], fingerprint)
-            })
+            },
+            hash_result)
     }
 
     #[inline]
diff --git a/src/librustc/dep_graph/mod.rs b/src/librustc/dep_graph/mod.rs
index 022caab..b84d2ad 100644
--- a/src/librustc/dep_graph/mod.rs
+++ b/src/librustc/dep_graph/mod.rs
@@ -10,7 +10,7 @@
 
 pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig};
 pub use self::dep_node::{DepNode, DepKind, DepConstructor, WorkProductId, label_strs};
-pub use self::graph::{DepGraph, WorkProduct, DepNodeIndex, DepNodeColor, TaskDeps};
+pub use self::graph::{DepGraph, WorkProduct, DepNodeIndex, DepNodeColor, TaskDeps, hash_result};
 pub use self::graph::WorkProductFileKind;
 pub use self::prev::PreviousDepGraph;
 pub use self::query::DepGraphQuery;
diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs
index f84bb77..37552f1 100644
--- a/src/librustc/hir/map/collector.rs
+++ b/src/librustc/hir/map/collector.rs
@@ -48,14 +48,14 @@
     hir_body_nodes: Vec<(DefPathHash, Fingerprint)>,
 }
 
-fn input_dep_node_and_hash<'a, I>(
+fn input_dep_node_and_hash<I>(
     dep_graph: &DepGraph,
-    hcx: &mut StableHashingContext<'a>,
+    hcx: &mut StableHashingContext<'_>,
     dep_node: DepNode,
     input: I,
 ) -> (DepNodeIndex, Fingerprint)
 where
-    I: HashStable<StableHashingContext<'a>>,
+    I: for<'a> HashStable<StableHashingContext<'a>>,
 {
     let dep_node_index = dep_graph.input_task(dep_node, &mut *hcx, &input).1;
 
@@ -70,15 +70,15 @@
     (dep_node_index, hash)
 }
 
-fn alloc_hir_dep_nodes<'a, I>(
+fn alloc_hir_dep_nodes<I>(
     dep_graph: &DepGraph,
-    hcx: &mut StableHashingContext<'a>,
+    hcx: &mut StableHashingContext<'_>,
     def_path_hash: DefPathHash,
     item_like: I,
     hir_body_nodes: &mut Vec<(DefPathHash, Fingerprint)>,
 ) -> (DepNodeIndex, DepNodeIndex)
 where
-    I: HashStable<StableHashingContext<'a>>,
+    I: for<'a> HashStable<StableHashingContext<'a>>,
 {
     let sig = dep_graph.input_task(
         def_path_hash.to_dep_node(DepKind::Hir),
@@ -286,7 +286,7 @@
         self.parent_node = parent_node;
     }
 
-    fn with_dep_node_owner<T: HashStable<StableHashingContext<'a>>,
+    fn with_dep_node_owner<T: for<'b> HashStable<StableHashingContext<'b>>,
                            F: FnOnce(&mut Self)>(&mut self,
                                                  dep_node_owner: DefIndex,
                                                  item_like: &T,
diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs
index 91fc126..2bf8218 100644
--- a/src/librustc/hir/map/mod.rs
+++ b/src/librustc/hir/map/mod.rs
@@ -371,8 +371,12 @@
                 let def_id = self.local_def_id(variant.node.data.id());
                 Some(Def::Variant(def_id))
             }
-            Node::Field(_) |
+            Node::StructCtor(variant) => {
+                let def_id = self.local_def_id(variant.id());
+                Some(Def::StructCtor(def_id, def::CtorKind::from_hir(variant)))
+            }
             Node::AnonConst(_) |
+            Node::Field(_) |
             Node::Expr(_) |
             Node::Stmt(_) |
             Node::PathSegment(_) |
@@ -380,7 +384,6 @@
             Node::TraitRef(_) |
             Node::Pat(_) |
             Node::Binding(_) |
-            Node::StructCtor(_) |
             Node::Lifetime(_) |
             Node::Visibility(_) |
             Node::Block(_) |
@@ -931,7 +934,9 @@
         }
     }
 
-    pub fn expect_variant_data(&self, id: NodeId) -> &'hir VariantData {
+    pub fn expect_variant_data(&self, id: HirId) -> &'hir VariantData {
+        let id = self.hir_to_node_id(id); // FIXME(@ljedrz): remove when possible
+
         match self.find(id) {
             Some(Node::Item(i)) => {
                 match i.node {
@@ -946,7 +951,9 @@
         }
     }
 
-    pub fn expect_variant(&self, id: NodeId) -> &'hir Variant {
+    pub fn expect_variant(&self, id: HirId) -> &'hir Variant {
+        let id = self.hir_to_node_id(id); // FIXME(@ljedrz): remove when possible
+
         match self.find(id) {
             Some(Node::Variant(variant)) => variant,
             _ => bug!("expected variant, found {}", self.node_to_string(id)),
diff --git a/src/librustc/infer/error_reporting/mod.rs b/src/librustc/infer/error_reporting/mod.rs
index 8510533..1c23438 100644
--- a/src/librustc/infer/error_reporting/mod.rs
+++ b/src/librustc/infer/error_reporting/mod.rs
@@ -56,7 +56,6 @@
 use crate::hir::Node;
 use crate::middle::region;
 use std::{cmp, fmt};
-use syntax::ast::DUMMY_NODE_ID;
 use syntax_pos::{Pos, Span};
 use crate::traits::{ObligationCause, ObligationCauseCode};
 use crate::ty::error::TypeError;
@@ -182,8 +181,8 @@
         let cm = self.sess.source_map();
 
         let scope = region.free_region_binding_scope(self);
-        let node = self.hir().as_local_node_id(scope).unwrap_or(DUMMY_NODE_ID);
-        let tag = match self.hir().find(node) {
+        let node = self.hir().as_local_hir_id(scope).unwrap_or(hir::DUMMY_HIR_ID);
+        let tag = match self.hir().find_by_hir_id(node) {
             Some(Node::Block(_)) | Some(Node::Expr(_)) => "body",
             Some(Node::Item(it)) => Self::item_scope_tag(&it),
             Some(Node::TraitItem(it)) => Self::trait_item_scope_tag(&it),
@@ -192,7 +191,7 @@
         };
         let (prefix, span) = match *region {
             ty::ReEarlyBound(ref br) => {
-                let mut sp = cm.def_span(self.hir().span(node));
+                let mut sp = cm.def_span(self.hir().span_by_hir_id(node));
                 if let Some(param) = self.hir()
                     .get_generics(scope)
                     .and_then(|generics| generics.get_named(&br.name))
@@ -205,7 +204,7 @@
                 bound_region: ty::BoundRegion::BrNamed(_, ref name),
                 ..
             }) => {
-                let mut sp = cm.def_span(self.hir().span(node));
+                let mut sp = cm.def_span(self.hir().span_by_hir_id(node));
                 if let Some(param) = self.hir()
                     .get_generics(scope)
                     .and_then(|generics| generics.get_named(&name))
@@ -217,15 +216,15 @@
             ty::ReFree(ref fr) => match fr.bound_region {
                 ty::BrAnon(idx) => (
                     format!("the anonymous lifetime #{} defined on", idx + 1),
-                    self.hir().span(node),
+                    self.hir().span_by_hir_id(node),
                 ),
                 ty::BrFresh(_) => (
                     "an anonymous lifetime defined on".to_owned(),
-                    self.hir().span(node),
+                    self.hir().span_by_hir_id(node),
                 ),
                 _ => (
                     format!("the lifetime {} as defined on", fr.bound_region),
-                    cm.def_span(self.hir().span(node)),
+                    cm.def_span(self.hir().span_by_hir_id(node)),
                 ),
             },
             _ => bug!(),
@@ -1451,8 +1450,7 @@
                 format!(" for lifetime parameter `{}` in coherence check", name)
             }
             infer::UpvarRegion(ref upvar_id, _) => {
-                let var_node_id = self.tcx.hir().hir_to_node_id(upvar_id.var_path.hir_id);
-                let var_name = self.tcx.hir().name(var_node_id);
+                let var_name = self.tcx.hir().name_by_hir_id(upvar_id.var_path.hir_id);
                 format!(" for capture of `{}` by closure", var_name)
             }
             infer::NLL(..) => bug!("NLL variable found in lexical phase"),
diff --git a/src/librustc/infer/error_reporting/note.rs b/src/librustc/infer/error_reporting/note.rs
index efd7f3c..6c481e8 100644
--- a/src/librustc/infer/error_reporting/note.rs
+++ b/src/librustc/infer/error_reporting/note.rs
@@ -31,8 +31,7 @@
                               "...so that reference does not outlive borrowed content");
             }
             infer::ReborrowUpvar(span, ref upvar_id) => {
-                let var_node_id = self.tcx.hir().hir_to_node_id(upvar_id.var_path.hir_id);
-                let var_name = self.tcx.hir().name(var_node_id);
+                let var_name = self.tcx.hir().name_by_hir_id(upvar_id.var_path.hir_id);
                 err.span_note(span,
                               &format!("...so that closure can access `{}`", var_name));
             }
@@ -164,8 +163,7 @@
                 err
             }
             infer::ReborrowUpvar(span, ref upvar_id) => {
-                let var_node_id = self.tcx.hir().hir_to_node_id(upvar_id.var_path.hir_id);
-                let var_name = self.tcx.hir().name(var_node_id);
+                let var_name = self.tcx.hir().name_by_hir_id(upvar_id.var_path.hir_id);
                 let mut err = struct_span_err!(self.tcx.sess,
                                                span,
                                                E0313,
diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs
index 3ff76e9..cb31441 100644
--- a/src/librustc/lint/builtin.rs
+++ b/src/librustc/lint/builtin.rs
@@ -473,6 +473,7 @@
     MacroExpandedMacroExportsAccessedByAbsolutePaths(Span),
     ElidedLifetimesInPaths(usize, Span, bool, Span, String),
     UnknownCrateTypes(Span, String, String),
+    UnusedImports(String, Vec<(Span, String)>),
 }
 
 impl BuiltinLintDiagnostics {
@@ -554,6 +555,15 @@
             BuiltinLintDiagnostics::UnknownCrateTypes(span, note, sugg) => {
                 db.span_suggestion(span, &note, sugg, Applicability::MaybeIncorrect);
             }
+            BuiltinLintDiagnostics::UnusedImports(message, replaces) => {
+                if !replaces.is_empty() {
+                    db.multipart_suggestion(
+                        &message,
+                        replaces,
+                        Applicability::MachineApplicable,
+                    );
+                }
+            }
         }
     }
 }
diff --git a/src/librustc/macros.rs b/src/librustc/macros.rs
index 2978b35..ccae9d3 100644
--- a/src/librustc/macros.rs
+++ b/src/librustc/macros.rs
@@ -62,38 +62,36 @@
 #[macro_export]
 macro_rules! impl_stable_hash_for {
     // Enums
-    // FIXME(mark-i-m): Some of these should be `?` rather than `*`. See the git blame and change
-    // them back when `?` is supported again.
     (enum $enum_name:path {
         $( $variant:ident
            // this incorrectly allows specifying both tuple-like and struct-like fields, as in `Variant(a,b){c,d}`,
            // when it should be only one or the other
-           $( ( $($field:ident $(-> $delegate:tt)*),* ) )*
-           $( { $($named_field:ident $(-> $named_delegate:tt)*),* } )*
-        ),* $(,)*
+           $( ( $($field:ident $(-> $delegate:tt)?),* ) )?
+           $( { $($named_field:ident $(-> $named_delegate:tt)?),* } )?
+        ),* $(,)?
     }) => {
         impl_stable_hash_for!(
             impl<> for enum $enum_name [ $enum_name ] { $( $variant
-                $( ( $($field $(-> $delegate)*),* ) )*
-                $( { $($named_field $(-> $named_delegate)*),* } )*
+                $( ( $($field $(-> $delegate)?),* ) )?
+                $( { $($named_field $(-> $named_delegate)?),* } )?
             ),* }
         );
     };
     // We want to use the enum name both in the `impl ... for $enum_name` as well as for
     // importing all the variants. Unfortunately it seems we have to take the name
     // twice for this purpose
-    (impl<$($lt:lifetime $(: $lt_bound:lifetime)* ),* $(,)* $($T:ident),* $(,)*>
+    (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?>
         for enum $enum_name:path
         [ $enum_path:path ]
     {
         $( $variant:ident
            // this incorrectly allows specifying both tuple-like and struct-like fields, as in `Variant(a,b){c,d}`,
            // when it should be only one or the other
-           $( ( $($field:ident $(-> $delegate:tt)*),* ) )*
-           $( { $($named_field:ident $(-> $named_delegate:tt)*),* } )*
-        ),* $(,)*
+           $( ( $($field:ident $(-> $delegate:tt)?),* ) )?
+           $( { $($named_field:ident $(-> $named_delegate:tt)?),* } )?
+        ),* $(,)?
     }) => {
-        impl<'a, $($lt $(: $lt_bound)*,)* $($T,)*>
+        impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
             ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>
             for $enum_name
             where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
@@ -107,9 +105,9 @@
 
                 match *self {
                     $(
-                        $variant $( ( $(ref $field),* ) )* $( { $(ref $named_field),* } )* => {
-                            $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );*)*
-                            $($( __impl_stable_hash_field!($named_field, __ctx, __hasher $(, $named_delegate)*) );*)*
+                        $variant $( ( $(ref $field),* ) )? $( { $(ref $named_field),* } )? => {
+                            $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*)?
+                            $($( __impl_stable_hash_field!($named_field, __ctx, __hasher $(, $named_delegate)?) );*)?
                         }
                     )*
                 }
@@ -117,16 +115,15 @@
         }
     };
     // Structs
-    // FIXME(mark-i-m): same here.
-    (struct $struct_name:path { $($field:ident $(-> $delegate:tt)*),*  $(,)* }) => {
+    (struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
         impl_stable_hash_for!(
-            impl<'tcx> for struct $struct_name { $($field $(-> $delegate)*),* }
+            impl<'tcx> for struct $struct_name { $($field $(-> $delegate)?),* }
         );
     };
-    (impl<$($lt:lifetime $(: $lt_bound:lifetime)* ),* $(,)* $($T:ident),* $(,)*> for struct $struct_name:path {
-        $($field:ident $(-> $delegate:tt)*),* $(,)*
+    (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?> for struct $struct_name:path {
+        $($field:ident $(-> $delegate:tt)?),* $(,)?
     }) => {
-        impl<'a, $($lt $(: $lt_bound)*,)* $($T,)*>
+        impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
             ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name
             where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
         {
@@ -138,21 +135,20 @@
                     $(ref $field),*
                 } = *self;
 
-                $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );*
+                $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
             }
         }
     };
     // Tuple structs
-    // We cannot use normale parentheses here, the parser won't allow it
-    // FIXME(mark-i-m): same here.
-    (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)*),*  $(,)* }) => {
+    // We cannot use normal parentheses here, the parser won't allow it
+    (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),*  $(,)? }) => {
         impl_stable_hash_for!(
-            impl<'tcx> for tuple_struct $struct_name { $($field $(-> $delegate)*),* }
+            impl<'tcx> for tuple_struct $struct_name { $($field $(-> $delegate)?),* }
         );
     };
-    (impl<$($lt:lifetime $(: $lt_bound:lifetime)* ),* $(,)* $($T:ident),* $(,)*>
-     for tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)*),*  $(,)* }) => {
-        impl<'a, $($lt $(: $lt_bound)*,)* $($T,)*>
+    (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?>
+     for tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),*  $(,)? }) => {
+        impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
             ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name
             where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
         {
@@ -164,7 +160,7 @@
                     $(ref $field),*
                 ) = *self;
 
-                $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );*
+                $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
             }
         }
     };
diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs
index 73ba47d..99d0c5e 100644
--- a/src/librustc/middle/reachable.rs
+++ b/src/librustc/middle/reachable.rs
@@ -177,8 +177,8 @@
                             // Check the impl. If the generics on the self
                             // type of the impl require inlining, this method
                             // does too.
-                            let impl_node_id = self.tcx.hir().as_local_node_id(impl_did).unwrap();
-                            match self.tcx.hir().expect_item(impl_node_id).node {
+                            let impl_hir_id = self.tcx.hir().as_local_hir_id(impl_did).unwrap();
+                            match self.tcx.hir().expect_item_by_hir_id(impl_hir_id).node {
                                 hir::ItemKind::Impl(..) => {
                                     let generics = self.tcx.generics_of(impl_did);
                                     generics.requires_monomorphization(self.tcx)
diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs
index f7cd241..f187928 100644
--- a/src/librustc/middle/resolve_lifetime.rs
+++ b/src/librustc/middle/resolve_lifetime.rs
@@ -1248,12 +1248,12 @@
                 } => {
                     // FIXME (#24278): non-hygienic comparison
                     if let Some(def) = lifetimes.get(&hir::ParamName::Plain(label.modern())) {
-                        let node_id = tcx.hir().as_local_node_id(def.id().unwrap()).unwrap();
+                        let hir_id = tcx.hir().as_local_hir_id(def.id().unwrap()).unwrap();
 
                         signal_shadowing_problem(
                             tcx,
                             label.name,
-                            original_lifetime(tcx.hir().span(node_id)),
+                            original_lifetime(tcx.hir().span_by_hir_id(hir_id)),
                             shadower_label(label.span),
                         );
                         return;
@@ -2593,12 +2593,12 @@
                     ref lifetimes, s, ..
                 } => {
                     if let Some(&def) = lifetimes.get(&param.name.modern()) {
-                        let node_id = self.tcx.hir().as_local_node_id(def.id().unwrap()).unwrap();
+                        let hir_id = self.tcx.hir().as_local_hir_id(def.id().unwrap()).unwrap();
 
                         signal_shadowing_problem(
                             self.tcx,
                             param.name.ident().name,
-                            original_lifetime(self.tcx.hir().span(node_id)),
+                            original_lifetime(self.tcx.hir().span_by_hir_id(hir_id)),
                             shadower_lifetime(&param),
                         );
                         return;
diff --git a/src/librustc/mir/interpret/mod.rs b/src/librustc/mir/interpret/mod.rs
index efd233f..bb25d1b 100644
--- a/src/librustc/mir/interpret/mod.rs
+++ b/src/librustc/mir/interpret/mod.rs
@@ -27,7 +27,7 @@
 use std::fmt;
 use crate::mir;
 use crate::hir::def_id::DefId;
-use crate::ty::{self, TyCtxt, Instance};
+use crate::ty::{self, TyCtxt, Instance, subst::UnpackedKind};
 use crate::ty::layout::{self, Size};
 use std::io;
 use crate::rustc_serialize::{Encoder, Decodable, Encodable};
@@ -318,14 +318,29 @@
         id
     }
 
-    /// Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
-    /// by the linker and functions can be duplicated across crates.
-    /// We thus generate a new `AllocId` for every mention of a function. This means that
-    /// `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
     pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> AllocId {
-        let id = self.reserve();
-        self.id_to_kind.insert(id, AllocKind::Function(instance));
-        id
+        // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
+        // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be
+        // duplicated across crates.
+        // We thus generate a new `AllocId` for every mention of a function. This means that
+        // `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
+        // However, formatting code relies on function identity (see #58320), so we only do
+        // this for generic functions.  Lifetime parameters are ignored.
+        let is_generic = instance.substs.into_iter().any(|kind| {
+            match kind.unpack() {
+                UnpackedKind::Lifetime(_) => false,
+                _ => true,
+            }
+        });
+        if is_generic {
+            // Get a fresh ID
+            let id = self.reserve();
+            self.id_to_kind.insert(id, AllocKind::Function(instance));
+            id
+        } else {
+            // Deduplicate
+            self.intern(AllocKind::Function(instance))
+        }
     }
 
     /// Returns `None` in case the `AllocId` is dangling. An `EvalContext` can still have a
diff --git a/src/librustc/mir/mod.rs b/src/librustc/mir/mod.rs
index 009997b..a0f16ae 100644
--- a/src/librustc/mir/mod.rs
+++ b/src/librustc/mir/mod.rs
@@ -2154,7 +2154,7 @@
             span,
             ty,
             user_ty: None,
-            literal: tcx.intern_lazy_const(
+            literal: tcx.mk_lazy_const(
                 ty::LazyConst::Evaluated(ty::Const::zero_sized(ty)),
             ),
         })
diff --git a/src/librustc/mir/visit.rs b/src/librustc/mir/visit.rs
index 0180256..e582803 100644
--- a/src/librustc/mir/visit.rs
+++ b/src/librustc/mir/visit.rs
@@ -38,10 +38,10 @@
 // ```rust
 // fn super_basic_block_data(&mut self,
 //                           block: BasicBlock,
-//                           data: & $($mutability)* BasicBlockData<'tcx>) {
+//                           data: & $($mutability)? BasicBlockData<'tcx>) {
 //     let BasicBlockData {
-//         ref $($mutability)* statements,
-//         ref $($mutability)* terminator,
+//         statements,
+//         terminator,
 //         is_cleanup: _
 //     } = *data;
 //
@@ -67,111 +67,111 @@
 // `is_cleanup` above.
 
 macro_rules! make_mir_visitor {
-    ($visitor_trait_name:ident, $($mutability:ident)*) => {
+    ($visitor_trait_name:ident, $($mutability:ident)?) => {
         pub trait $visitor_trait_name<'tcx> {
             // Override these, and call `self.super_xxx` to revert back to the
             // default behavior.
 
-            fn visit_mir(&mut self, mir: & $($mutability)* Mir<'tcx>) {
+            fn visit_mir(&mut self, mir: & $($mutability)? Mir<'tcx>) {
                 self.super_mir(mir);
             }
 
             fn visit_basic_block_data(&mut self,
                                       block: BasicBlock,
-                                      data: & $($mutability)* BasicBlockData<'tcx>) {
+                                      data: & $($mutability)? BasicBlockData<'tcx>) {
                 self.super_basic_block_data(block, data);
             }
 
             fn visit_source_scope_data(&mut self,
-                                           scope_data: & $($mutability)* SourceScopeData) {
+                                           scope_data: & $($mutability)? SourceScopeData) {
                 self.super_source_scope_data(scope_data);
             }
 
             fn visit_statement(&mut self,
                                block: BasicBlock,
-                               statement: & $($mutability)* Statement<'tcx>,
+                               statement: & $($mutability)? Statement<'tcx>,
                                location: Location) {
                 self.super_statement(block, statement, location);
             }
 
             fn visit_assign(&mut self,
                             block: BasicBlock,
-                            place: & $($mutability)* Place<'tcx>,
-                            rvalue: & $($mutability)* Rvalue<'tcx>,
+                            place: & $($mutability)? Place<'tcx>,
+                            rvalue: & $($mutability)? Rvalue<'tcx>,
                             location: Location) {
                 self.super_assign(block, place, rvalue, location);
             }
 
             fn visit_terminator(&mut self,
                                 block: BasicBlock,
-                                terminator: & $($mutability)* Terminator<'tcx>,
+                                terminator: & $($mutability)? Terminator<'tcx>,
                                 location: Location) {
                 self.super_terminator(block, terminator, location);
             }
 
             fn visit_terminator_kind(&mut self,
                                      block: BasicBlock,
-                                     kind: & $($mutability)* TerminatorKind<'tcx>,
+                                     kind: & $($mutability)? TerminatorKind<'tcx>,
                                      location: Location) {
                 self.super_terminator_kind(block, kind, location);
             }
 
             fn visit_assert_message(&mut self,
-                                    msg: & $($mutability)* AssertMessage<'tcx>,
+                                    msg: & $($mutability)? AssertMessage<'tcx>,
                                     location: Location) {
                 self.super_assert_message(msg, location);
             }
 
             fn visit_rvalue(&mut self,
-                            rvalue: & $($mutability)* Rvalue<'tcx>,
+                            rvalue: & $($mutability)? Rvalue<'tcx>,
                             location: Location) {
                 self.super_rvalue(rvalue, location);
             }
 
             fn visit_operand(&mut self,
-                             operand: & $($mutability)* Operand<'tcx>,
+                             operand: & $($mutability)? Operand<'tcx>,
                              location: Location) {
                 self.super_operand(operand, location);
             }
 
             fn visit_ascribe_user_ty(&mut self,
-                                     place: & $($mutability)* Place<'tcx>,
-                                     variance: & $($mutability)* ty::Variance,
-                                     user_ty: & $($mutability)* UserTypeProjection<'tcx>,
+                                     place: & $($mutability)? Place<'tcx>,
+                                     variance: & $($mutability)? ty::Variance,
+                                     user_ty: & $($mutability)? UserTypeProjection<'tcx>,
                                      location: Location) {
                 self.super_ascribe_user_ty(place, variance, user_ty, location);
             }
 
             fn visit_retag(&mut self,
-                           kind: & $($mutability)* RetagKind,
-                           place: & $($mutability)* Place<'tcx>,
+                           kind: & $($mutability)? RetagKind,
+                           place: & $($mutability)? Place<'tcx>,
                            location: Location) {
                 self.super_retag(kind, place, location);
             }
 
             fn visit_place(&mut self,
-                            place: & $($mutability)* Place<'tcx>,
+                            place: & $($mutability)? Place<'tcx>,
                             context: PlaceContext<'tcx>,
                             location: Location) {
                 self.super_place(place, context, location);
             }
 
             fn visit_static(&mut self,
-                            static_: & $($mutability)* Static<'tcx>,
+                            static_: & $($mutability)? Static<'tcx>,
                             context: PlaceContext<'tcx>,
                             location: Location) {
                 self.super_static(static_, context, location);
             }
 
             fn visit_projection(&mut self,
-                                place: & $($mutability)* PlaceProjection<'tcx>,
+                                place: & $($mutability)? PlaceProjection<'tcx>,
                                 context: PlaceContext<'tcx>,
                                 location: Location) {
                 self.super_projection(place, context, location);
             }
 
             fn visit_projection_elem(&mut self,
-                                     place: & $($mutability)* PlaceElem<'tcx>,
+                                     place: & $($mutability)? PlaceElem<'tcx>,
                                      location: Location) {
                 self.super_projection_elem(place, location);
             }
@@ -183,36 +183,36 @@
             }
 
             fn visit_constant(&mut self,
-                              constant: & $($mutability)* Constant<'tcx>,
+                              constant: & $($mutability)? Constant<'tcx>,
                               location: Location) {
                 self.super_constant(constant, location);
             }
 
             fn visit_def_id(&mut self,
-                            def_id: & $($mutability)* DefId,
+                            def_id: & $($mutability)? DefId,
                             _: Location) {
                 self.super_def_id(def_id);
             }
 
             fn visit_span(&mut self,
-                          span: & $($mutability)* Span) {
+                          span: & $($mutability)? Span) {
                 self.super_span(span);
             }
 
             fn visit_source_info(&mut self,
-                                 source_info: & $($mutability)* SourceInfo) {
+                                 source_info: & $($mutability)? SourceInfo) {
                 self.super_source_info(source_info);
             }
 
             fn visit_ty(&mut self,
-                        ty: & $($mutability)* Ty<'tcx>,
+                        ty: & $($mutability)? Ty<'tcx>,
                         _: TyContext) {
                 self.super_ty(ty);
             }
 
             fn visit_user_type_projection(
                 &mut self,
-                ty: & $($mutability)* UserTypeProjection<'tcx>,
+                ty: & $($mutability)? UserTypeProjection<'tcx>,
             ) {
                 self.super_user_type_projection(ty);
             }
@@ -220,55 +220,55 @@
             fn visit_user_type_annotation(
                 &mut self,
                 index: UserTypeAnnotationIndex,
-                ty: & $($mutability)* CanonicalUserTypeAnnotation<'tcx>,
+                ty: & $($mutability)? CanonicalUserTypeAnnotation<'tcx>,
             ) {
                 self.super_user_type_annotation(index, ty);
             }
 
             fn visit_region(&mut self,
-                            region: & $($mutability)* ty::Region<'tcx>,
+                            region: & $($mutability)? ty::Region<'tcx>,
                             _: Location) {
                 self.super_region(region);
             }
 
             fn visit_const(&mut self,
-                           constant: & $($mutability)* &'tcx ty::LazyConst<'tcx>,
+                           constant: & $($mutability)? &'tcx ty::LazyConst<'tcx>,
                            _: Location) {
                 self.super_const(constant);
             }
 
             fn visit_substs(&mut self,
-                            substs: & $($mutability)* &'tcx Substs<'tcx>,
+                            substs: & $($mutability)? &'tcx Substs<'tcx>,
                             _: Location) {
                 self.super_substs(substs);
             }
 
             fn visit_closure_substs(&mut self,
-                                    substs: & $($mutability)* ClosureSubsts<'tcx>,
+                                    substs: & $($mutability)? ClosureSubsts<'tcx>,
                                     _: Location) {
                 self.super_closure_substs(substs);
             }
 
             fn visit_generator_substs(&mut self,
-                                      substs: & $($mutability)* GeneratorSubsts<'tcx>,
+                                      substs: & $($mutability)? GeneratorSubsts<'tcx>,
                                     _: Location) {
                 self.super_generator_substs(substs);
             }
 
             fn visit_local_decl(&mut self,
                                 local: Local,
-                                local_decl: & $($mutability)* LocalDecl<'tcx>) {
+                                local_decl: & $($mutability)? LocalDecl<'tcx>) {
                 self.super_local_decl(local, local_decl);
             }
 
             fn visit_local(&mut self,
-                            _local: & $($mutability)* Local,
+                            _local: & $($mutability)? Local,
                             _context: PlaceContext<'tcx>,
                             _location: Location) {
             }
 
             fn visit_source_scope(&mut self,
-                                      scope: & $($mutability)* SourceScope) {
+                                      scope: & $($mutability)? SourceScope) {
                 self.super_source_scope(scope);
             }
 
@@ -276,8 +276,8 @@
             // not meant to be overridden.
 
             fn super_mir(&mut self,
-                         mir: & $($mutability)* Mir<'tcx>) {
-                if let Some(yield_ty) = &$($mutability)* mir.yield_ty {
+                         mir: & $($mutability)? Mir<'tcx>) {
+                if let Some(yield_ty) = &$($mutability)? mir.yield_ty {
                     self.visit_ty(yield_ty, TyContext::YieldTy(SourceInfo {
                         span: mir.span,
                         scope: OUTERMOST_SOURCE_SCOPE,
@@ -291,21 +291,21 @@
                     (mut) => (mir.basic_blocks_mut().iter_enumerated_mut());
                     () => (mir.basic_blocks().iter_enumerated());
                 };
-                for (bb, data) in basic_blocks!($($mutability)*) {
+                for (bb, data) in basic_blocks!($($mutability)?) {
                     self.visit_basic_block_data(bb, data);
                 }
 
-                for scope in &$($mutability)* mir.source_scopes {
+                for scope in &$($mutability)? mir.source_scopes {
                     self.visit_source_scope_data(scope);
                 }
 
-                self.visit_ty(&$($mutability)* mir.return_ty(), TyContext::ReturnTy(SourceInfo {
+                self.visit_ty(&$($mutability)? mir.return_ty(), TyContext::ReturnTy(SourceInfo {
                     span: mir.span,
                     scope: OUTERMOST_SOURCE_SCOPE,
                 }));
 
                 for local in mir.local_decls.indices() {
-                    self.visit_local_decl(local, & $($mutability)* mir.local_decls[local]);
+                    self.visit_local_decl(local, & $($mutability)? mir.local_decls[local]);
                 }
 
                 macro_rules! type_annotations {
@@ -313,23 +313,23 @@
                     () => (mir.user_type_annotations.iter_enumerated());
                 };
 
-                for (index, annotation) in type_annotations!($($mutability)*) {
+                for (index, annotation) in type_annotations!($($mutability)?) {
                     self.visit_user_type_annotation(
                         index, annotation
                     );
                 }
 
-                self.visit_span(&$($mutability)* mir.span);
+                self.visit_span(&$($mutability)? mir.span);
             }
 
             fn super_basic_block_data(&mut self,
                                       block: BasicBlock,
-                                      data: & $($mutability)* BasicBlockData<'tcx>) {
+                                      data: & $($mutability)? BasicBlockData<'tcx>) {
                 let BasicBlockData {
-                    ref $($mutability)* statements,
-                    ref $($mutability)* terminator,
+                    statements,
+                    terminator,
                     is_cleanup: _
-                } = *data;
+                } = data;
 
                 let mut index = 0;
                 for statement in statements {
@@ -338,92 +338,83 @@
                     index += 1;
                 }
 
-                if let Some(ref $($mutability)* terminator) = *terminator {
+                if let Some(terminator) = terminator {
                     let location = Location { block: block, statement_index: index };
                     self.visit_terminator(block, terminator, location);
                 }
             }
 
-            fn super_source_scope_data(&mut self,
-                                           scope_data: & $($mutability)* SourceScopeData) {
+            fn super_source_scope_data(&mut self, scope_data: & $($mutability)? SourceScopeData) {
                 let SourceScopeData {
-                    ref $($mutability)* span,
-                    ref $($mutability)* parent_scope,
-                } = *scope_data;
+                    span,
+                    parent_scope,
+                } = scope_data;
 
                 self.visit_span(span);
-                if let Some(ref $($mutability)* parent_scope) = *parent_scope {
+                if let Some(parent_scope) = parent_scope {
                     self.visit_source_scope(parent_scope);
                 }
             }
 
             fn super_statement(&mut self,
                                block: BasicBlock,
-                               statement: & $($mutability)* Statement<'tcx>,
+                               statement: & $($mutability)? Statement<'tcx>,
                                location: Location) {
                 let Statement {
-                    ref $($mutability)* source_info,
-                    ref $($mutability)* kind,
-                } = *statement;
+                    source_info,
+                    kind,
+                } = statement;
 
                 self.visit_source_info(source_info);
-                match *kind {
-                    StatementKind::Assign(ref $($mutability)* place,
-                                          ref $($mutability)* rvalue) => {
+                match kind {
+                    StatementKind::Assign(place, rvalue) => {
                         self.visit_assign(block, place, rvalue, location);
                     }
-                    StatementKind::FakeRead(_, ref $($mutability)* place) => {
+                    StatementKind::FakeRead(_, place) => {
                         self.visit_place(
                             place,
                             PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
                             location
                         );
                     }
-                    StatementKind::SetDiscriminant{ ref $($mutability)* place, .. } => {
+                    StatementKind::SetDiscriminant { place, .. } => {
                         self.visit_place(
                             place,
                             PlaceContext::MutatingUse(MutatingUseContext::Store),
                             location
                         );
                     }
-                    StatementKind::StorageLive(ref $($mutability)* local) => {
+                    StatementKind::StorageLive(local) => {
                         self.visit_local(
                             local,
                             PlaceContext::NonUse(NonUseContext::StorageLive),
                             location
                         );
                     }
-                    StatementKind::StorageDead(ref $($mutability)* local) => {
+                    StatementKind::StorageDead(local) => {
                         self.visit_local(
                             local,
                             PlaceContext::NonUse(NonUseContext::StorageDead),
                             location
                         );
                     }
-                    StatementKind::InlineAsm { ref $($mutability)* outputs,
-                                               ref $($mutability)* inputs,
-                                               asm: _ } => {
-                        for output in & $($mutability)* outputs[..] {
+                    StatementKind::InlineAsm { outputs, inputs, asm: _ } => {
+                        for output in & $($mutability)? outputs[..] {
                             self.visit_place(
                                 output,
                                 PlaceContext::MutatingUse(MutatingUseContext::AsmOutput),
                                 location
                             );
                         }
-                        for (span, input) in & $($mutability)* inputs[..] {
+                        for (span, input) in & $($mutability)? inputs[..] {
                             self.visit_span(span);
                             self.visit_operand(input, location);
                         }
                     }
-                    StatementKind::Retag ( ref $($mutability)* kind,
-                                           ref $($mutability)* place ) => {
+                    StatementKind::Retag(kind, place) => {
                         self.visit_retag(kind, place, location);
                     }
-                    StatementKind::AscribeUserType(
-                        ref $($mutability)* place,
-                        ref $($mutability)* variance,
-                        ref $($mutability)* user_ty,
-                    ) => {
+                    StatementKind::AscribeUserType(place, variance, user_ty) => {
                         self.visit_ascribe_user_ty(place, variance, user_ty, location);
                     }
                     StatementKind::Nop => {}
@@ -432,8 +423,8 @@
 
             fn super_assign(&mut self,
                             _block: BasicBlock,
-                            place: &$($mutability)* Place<'tcx>,
-                            rvalue: &$($mutability)* Rvalue<'tcx>,
+                            place: &$($mutability)? Place<'tcx>,
+                            rvalue: &$($mutability)? Rvalue<'tcx>,
                             location: Location) {
                 self.visit_place(
                     place,
@@ -445,12 +436,9 @@
 
             fn super_terminator(&mut self,
                                 block: BasicBlock,
-                                terminator: &$($mutability)* Terminator<'tcx>,
+                                terminator: &$($mutability)? Terminator<'tcx>,
                                 location: Location) {
-                let Terminator {
-                    ref $($mutability)* source_info,
-                    ref $($mutability)* kind,
-                } = *terminator;
+                let Terminator { source_info, kind } = terminator;
 
                 self.visit_source_info(source_info);
                 self.visit_terminator_kind(block, kind, location);
@@ -458,21 +446,23 @@
 
             fn super_terminator_kind(&mut self,
                                      block: BasicBlock,
-                                     kind: & $($mutability)* TerminatorKind<'tcx>,
+                                     kind: & $($mutability)? TerminatorKind<'tcx>,
                                      source_location: Location) {
-                match *kind {
+                match kind {
                     TerminatorKind::Goto { target } => {
-                        self.visit_branch(block, target);
+                        self.visit_branch(block, *target);
                     }
 
-                    TerminatorKind::SwitchInt { ref $($mutability)* discr,
-                                                ref $($mutability)* switch_ty,
-                                                values: _,
-                                                ref targets } => {
+                    TerminatorKind::SwitchInt {
+                        discr,
+                        switch_ty,
+                        values: _,
+                        targets
+                    } => {
                         self.visit_operand(discr, source_location);
                         self.visit_ty(switch_ty, TyContext::Location(source_location));
-                        for &target in targets {
-                            self.visit_branch(block, target);
+                        for target in targets {
+                            self.visit_branch(block, *target);
                         }
                     }
 
@@ -483,113 +473,120 @@
                     TerminatorKind::Unreachable => {
                     }
 
-                    TerminatorKind::Drop { ref $($mutability)* location,
-                                           target,
-                                           unwind } => {
+                    TerminatorKind::Drop {
+                        location,
+                        target,
+                        unwind,
+                    } => {
                         self.visit_place(
                             location,
                             PlaceContext::MutatingUse(MutatingUseContext::Drop),
                             source_location
                         );
-                        self.visit_branch(block, target);
+                        self.visit_branch(block, *target);
                         unwind.map(|t| self.visit_branch(block, t));
                     }
 
-                    TerminatorKind::DropAndReplace { ref $($mutability)* location,
-                                                     ref $($mutability)* value,
-                                                     target,
-                                                     unwind } => {
+                    TerminatorKind::DropAndReplace {
+                        location,
+                        value,
+                        target,
+                        unwind,
+                    } => {
                         self.visit_place(
                             location,
                             PlaceContext::MutatingUse(MutatingUseContext::Drop),
                             source_location
                         );
                         self.visit_operand(value, source_location);
-                        self.visit_branch(block, target);
+                        self.visit_branch(block, *target);
                         unwind.map(|t| self.visit_branch(block, t));
                     }
 
-                    TerminatorKind::Call { ref $($mutability)* func,
-                                           ref $($mutability)* args,
-                                           ref $($mutability)* destination,
-                                           cleanup,
-                                           from_hir_call: _, } => {
+                    TerminatorKind::Call {
+                        func,
+                        args,
+                        destination,
+                        cleanup,
+                        from_hir_call: _,
+                    } => {
                         self.visit_operand(func, source_location);
                         for arg in args {
                             self.visit_operand(arg, source_location);
                         }
-                        if let Some((ref $($mutability)* destination, target)) = *destination {
+                        if let Some((destination, target)) = destination {
                             self.visit_place(
                                 destination,
                                 PlaceContext::MutatingUse(MutatingUseContext::Call),
                                 source_location
                             );
-                            self.visit_branch(block, target);
+                            self.visit_branch(block, *target);
                         }
                         cleanup.map(|t| self.visit_branch(block, t));
                     }
 
-                    TerminatorKind::Assert { ref $($mutability)* cond,
-                                             expected: _,
-                                             ref $($mutability)* msg,
-                                             target,
-                                             cleanup } => {
+                    TerminatorKind::Assert {
+                        cond,
+                        expected: _,
+                        msg,
+                        target,
+                        cleanup,
+                    } => {
                         self.visit_operand(cond, source_location);
                         self.visit_assert_message(msg, source_location);
-                        self.visit_branch(block, target);
+                        self.visit_branch(block, *target);
                         cleanup.map(|t| self.visit_branch(block, t));
                     }
 
-                    TerminatorKind::Yield { ref $($mutability)* value,
-                                              resume,
-                                              drop } => {
+                    TerminatorKind::Yield {
+                        value,
+                        resume,
+                        drop,
+                    } => {
                         self.visit_operand(value, source_location);
-                        self.visit_branch(block, resume);
+                        self.visit_branch(block, *resume);
                         drop.map(|t| self.visit_branch(block, t));
                     }
 
-                    TerminatorKind::FalseEdges { real_target, ref imaginary_targets} => {
-                        self.visit_branch(block, real_target);
+                    TerminatorKind::FalseEdges { real_target, imaginary_targets } => {
+                        self.visit_branch(block, *real_target);
                         for target in imaginary_targets {
                             self.visit_branch(block, *target);
                         }
                     }
 
                     TerminatorKind::FalseUnwind { real_target, unwind } => {
-                        self.visit_branch(block, real_target);
+                        self.visit_branch(block, *real_target);
                         if let Some(unwind) = unwind {
-                            self.visit_branch(block, unwind);
+                            self.visit_branch(block, *unwind);
                         }
                     }
                 }
             }
 
             fn super_assert_message(&mut self,
-                                    msg: & $($mutability)* AssertMessage<'tcx>,
+                                    msg: & $($mutability)? AssertMessage<'tcx>,
                                     location: Location) {
                 use crate::mir::interpret::EvalErrorKind::*;
-                if let BoundsCheck {
-                        ref $($mutability)* len,
-                        ref $($mutability)* index
-                    } = *msg {
+                if let BoundsCheck { len, index } = msg {
                     self.visit_operand(len, location);
                     self.visit_operand(index, location);
                 }
             }
 
             fn super_rvalue(&mut self,
-                            rvalue: & $($mutability)* Rvalue<'tcx>,
+                            rvalue: & $($mutability)? Rvalue<'tcx>,
                             location: Location) {
-                match *rvalue {
-                    Rvalue::Use(ref $($mutability)* operand) => {
+                match rvalue {
+                    Rvalue::Use(operand) => {
                         self.visit_operand(operand, location);
                     }
 
-                    Rvalue::Repeat(ref $($mutability)* value, _) => {
+                    Rvalue::Repeat(value, _) => {
                         self.visit_operand(value, location);
                     }
 
-                    Rvalue::Ref(ref $($mutability)* r, bk, ref $($mutability)* path) => {
+                    Rvalue::Ref(r, bk, path) => {
                         self.visit_region(r, location);
                         let ctx = match bk {
                             BorrowKind::Shared => PlaceContext::NonMutatingUse(
@@ -607,7 +604,7 @@
                         self.visit_place(path, ctx, location);
                     }
 
-                    Rvalue::Len(ref $($mutability)* path) => {
+                    Rvalue::Len(path) => {
                         self.visit_place(
                             path,
                             PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
@@ -615,28 +612,22 @@
                         );
                     }
 
-                    Rvalue::Cast(_cast_kind,
-                                 ref $($mutability)* operand,
-                                 ref $($mutability)* ty) => {
+                    Rvalue::Cast(_cast_kind, operand, ty) => {
                         self.visit_operand(operand, location);
                         self.visit_ty(ty, TyContext::Location(location));
                     }
 
-                    Rvalue::BinaryOp(_bin_op,
-                                     ref $($mutability)* lhs,
-                                     ref $($mutability)* rhs) |
-                    Rvalue::CheckedBinaryOp(_bin_op,
-                                     ref $($mutability)* lhs,
-                                     ref $($mutability)* rhs) => {
+                    Rvalue::BinaryOp(_bin_op, lhs, rhs)
+                    | Rvalue::CheckedBinaryOp(_bin_op, lhs, rhs) => {
                         self.visit_operand(lhs, location);
                         self.visit_operand(rhs, location);
                     }
 
-                    Rvalue::UnaryOp(_un_op, ref $($mutability)* op) => {
+                    Rvalue::UnaryOp(_un_op, op) => {
                         self.visit_operand(op, location);
                     }
 
-                    Rvalue::Discriminant(ref $($mutability)* place) => {
+                    Rvalue::Discriminant(place) => {
                         self.visit_place(
                             place,
                             PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
@@ -644,34 +635,39 @@
                         );
                     }
 
-                    Rvalue::NullaryOp(_op, ref $($mutability)* ty) => {
+                    Rvalue::NullaryOp(_op, ty) => {
                         self.visit_ty(ty, TyContext::Location(location));
                     }
 
-                    Rvalue::Aggregate(ref $($mutability)* kind,
-                                      ref $($mutability)* operands) => {
-                        let kind = &$($mutability)* **kind;
-                        match *kind {
-                            AggregateKind::Array(ref $($mutability)* ty) => {
+                    Rvalue::Aggregate(kind, operands) => {
+                        let kind = &$($mutability)? **kind;
+                        match kind {
+                            AggregateKind::Array(ty) => {
                                 self.visit_ty(ty, TyContext::Location(location));
                             }
                             AggregateKind::Tuple => {
                             }
-                            AggregateKind::Adt(_adt_def,
-                                               _variant_index,
-                                               ref $($mutability)* substs,
-                                               _user_substs,
-                                               _active_field_index) => {
+                            AggregateKind::Adt(
+                                _adt_def,
+                                _variant_index,
+                                substs,
+                                _user_substs,
+                                _active_field_index
+                            ) => {
                                 self.visit_substs(substs, location);
                             }
-                            AggregateKind::Closure(ref $($mutability)* def_id,
-                                                   ref $($mutability)* closure_substs) => {
+                            AggregateKind::Closure(
+                                def_id,
+                                closure_substs
+                            ) => {
                                 self.visit_def_id(def_id, location);
                                 self.visit_closure_substs(closure_substs, location);
                             }
-                            AggregateKind::Generator(ref $($mutability)* def_id,
-                                                     ref $($mutability)* generator_substs,
-                                                     _movability) => {
+                            AggregateKind::Generator(
+                                def_id,
+                                generator_substs,
+                                _movability,
+                            ) => {
                                 self.visit_def_id(def_id, location);
                                 self.visit_generator_substs(generator_substs, location);
                             }
@@ -685,33 +681,33 @@
             }
 
             fn super_operand(&mut self,
-                             operand: & $($mutability)* Operand<'tcx>,
+                             operand: & $($mutability)? Operand<'tcx>,
                              location: Location) {
-                match *operand {
-                    Operand::Copy(ref $($mutability)* place) => {
+                match operand {
+                    Operand::Copy(place) => {
                         self.visit_place(
                             place,
                             PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
                             location
                         );
                     }
-                    Operand::Move(ref $($mutability)* place) => {
+                    Operand::Move(place) => {
                         self.visit_place(
                             place,
                             PlaceContext::NonMutatingUse(NonMutatingUseContext::Move),
                             location
                         );
                     }
-                    Operand::Constant(ref $($mutability)* constant) => {
+                    Operand::Constant(constant) => {
                         self.visit_constant(constant, location);
                     }
                 }
             }
 
             fn super_ascribe_user_ty(&mut self,
-                                     place: & $($mutability)* Place<'tcx>,
-                                     _variance: & $($mutability)* ty::Variance,
-                                     user_ty: & $($mutability)* UserTypeProjection<'tcx>,
+                                     place: & $($mutability)? Place<'tcx>,
+                                     _variance: & $($mutability)? ty::Variance,
+                                     user_ty: & $($mutability)? UserTypeProjection<'tcx>,
                                      location: Location) {
                 self.visit_place(
                     place,
@@ -722,8 +718,8 @@
             }
 
             fn super_retag(&mut self,
-                           _kind: & $($mutability)* RetagKind,
-                           place: & $($mutability)* Place<'tcx>,
+                           _kind: & $($mutability)? RetagKind,
+                           place: & $($mutability)? Place<'tcx>,
                            location: Location) {
                 self.visit_place(
                     place,
@@ -733,45 +729,39 @@
             }
 
             fn super_place(&mut self,
-                            place: & $($mutability)* Place<'tcx>,
+                            place: & $($mutability)? Place<'tcx>,
                             context: PlaceContext<'tcx>,
                             location: Location) {
-                match *place {
-                    Place::Local(ref $($mutability)* local) => {
+                match place {
+                    Place::Local(local) => {
                         self.visit_local(local, context, location);
                     }
-                    Place::Static(ref $($mutability)* static_) => {
+                    Place::Static(static_) => {
                         self.visit_static(static_, context, location);
                     }
-                    Place::Promoted(ref $($mutability)* promoted) => {
-                        self.visit_ty(& $($mutability)* promoted.1, TyContext::Location(location));
+                    Place::Promoted(promoted) => {
+                        self.visit_ty(& $($mutability)? promoted.1, TyContext::Location(location));
                     },
-                    Place::Projection(ref $($mutability)* proj) => {
+                    Place::Projection(proj) => {
                         self.visit_projection(proj, context, location);
                     }
                 }
             }
 
             fn super_static(&mut self,
-                            static_: & $($mutability)* Static<'tcx>,
+                            static_: & $($mutability)? Static<'tcx>,
                             _context: PlaceContext<'tcx>,
                             location: Location) {
-                let Static {
-                    ref $($mutability)* def_id,
-                    ref $($mutability)* ty,
-                } = *static_;
+                let Static { def_id, ty } = static_;
                 self.visit_def_id(def_id, location);
                 self.visit_ty(ty, TyContext::Location(location));
             }
 
             fn super_projection(&mut self,
-                                proj: & $($mutability)* PlaceProjection<'tcx>,
+                                proj: & $($mutability)? PlaceProjection<'tcx>,
                                 context: PlaceContext<'tcx>,
                                 location: Location) {
-                let Projection {
-                    ref $($mutability)* base,
-                    ref $($mutability)* elem,
-                } = *proj;
+                let Projection { base, elem } = proj;
                 let context = if context.is_mutating_use() {
                     PlaceContext::MutatingUse(MutatingUseContext::Projection)
                 } else {
@@ -782,17 +772,17 @@
             }
 
             fn super_projection_elem(&mut self,
-                                     proj: & $($mutability)* PlaceElem<'tcx>,
+                                     proj: & $($mutability)? PlaceElem<'tcx>,
                                      location: Location) {
-                match *proj {
+                match proj {
                     ProjectionElem::Deref => {
                     }
                     ProjectionElem::Subslice { from: _, to: _ } => {
                     }
-                    ProjectionElem::Field(_field, ref $($mutability)* ty) => {
+                    ProjectionElem::Field(_field, ty) => {
                         self.visit_ty(ty, TyContext::Location(location));
                     }
-                    ProjectionElem::Index(ref $($mutability)* local) => {
+                    ProjectionElem::Index(local) => {
                         self.visit_local(
                             local,
                             PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
@@ -810,24 +800,24 @@
 
             fn super_local_decl(&mut self,
                                 local: Local,
-                                local_decl: & $($mutability)* LocalDecl<'tcx>) {
+                                local_decl: & $($mutability)? LocalDecl<'tcx>) {
                 let LocalDecl {
                     mutability: _,
-                    ref $($mutability)* ty,
-                    ref $($mutability)* user_ty,
+                    ty,
+                    user_ty,
                     name: _,
-                    ref $($mutability)* source_info,
-                    ref $($mutability)* visibility_scope,
+                    source_info,
+                    visibility_scope,
                     internal: _,
                     is_user_variable: _,
                     is_block_tail: _,
-                } = *local_decl;
+                } = local_decl;
 
                 self.visit_ty(ty, TyContext::LocalDecl {
                     local,
                     source_info: *source_info,
                 });
-                for (user_ty, _) in & $($mutability)* user_ty.contents {
+                for (user_ty, _) in & $($mutability)? user_ty.contents {
                     self.visit_user_type_projection(user_ty);
                 }
                 self.visit_source_info(source_info);
@@ -835,7 +825,7 @@
             }
 
             fn super_source_scope(&mut self,
-                                      _scope: & $($mutability)* SourceScope) {
+                                      _scope: & $($mutability)? SourceScope) {
             }
 
             fn super_branch(&mut self,
@@ -844,14 +834,14 @@
             }
 
             fn super_constant(&mut self,
-                              constant: & $($mutability)* Constant<'tcx>,
+                              constant: & $($mutability)? Constant<'tcx>,
                               location: Location) {
                 let Constant {
-                    ref $($mutability)* span,
-                    ref $($mutability)* ty,
-                    ref $($mutability)* user_ty,
-                    ref $($mutability)* literal,
-                } = *constant;
+                    span,
+                    ty,
+                    user_ty,
+                    literal,
+                } = constant;
 
                 self.visit_span(span);
                 self.visit_ty(ty, TyContext::Location(location));
@@ -859,17 +849,17 @@
                 self.visit_const(literal, location);
             }
 
-            fn super_def_id(&mut self, _def_id: & $($mutability)* DefId) {
+            fn super_def_id(&mut self, _def_id: & $($mutability)? DefId) {
             }
 
-            fn super_span(&mut self, _span: & $($mutability)* Span) {
+            fn super_span(&mut self, _span: & $($mutability)? Span) {
             }
 
-            fn super_source_info(&mut self, source_info: & $($mutability)* SourceInfo) {
+            fn super_source_info(&mut self, source_info: & $($mutability)? SourceInfo) {
                 let SourceInfo {
-                    ref $($mutability)* span,
-                    ref $($mutability)* scope,
-                } = *source_info;
+                    span,
+                    scope,
+                } = source_info;
 
                 self.visit_span(span);
                 self.visit_source_scope(scope);
@@ -877,49 +867,49 @@
 
             fn super_user_type_projection(
                 &mut self,
-                _ty: & $($mutability)* UserTypeProjection<'tcx>,
+                _ty: & $($mutability)? UserTypeProjection<'tcx>,
             ) {
             }
 
             fn super_user_type_annotation(
                 &mut self,
                 _index: UserTypeAnnotationIndex,
-                ty: & $($mutability)* CanonicalUserTypeAnnotation<'tcx>,
+                ty: & $($mutability)? CanonicalUserTypeAnnotation<'tcx>,
             ) {
-                self.visit_span(& $($mutability)* ty.span);
-                self.visit_ty(& $($mutability)* ty.inferred_ty, TyContext::UserTy(ty.span));
+                self.visit_span(& $($mutability)? ty.span);
+                self.visit_ty(& $($mutability)? ty.inferred_ty, TyContext::UserTy(ty.span));
             }
 
-            fn super_ty(&mut self, _ty: & $($mutability)* Ty<'tcx>) {
+            fn super_ty(&mut self, _ty: & $($mutability)? Ty<'tcx>) {
             }
 
-            fn super_region(&mut self, _region: & $($mutability)* ty::Region<'tcx>) {
+            fn super_region(&mut self, _region: & $($mutability)? ty::Region<'tcx>) {
             }
 
-            fn super_const(&mut self, _const: & $($mutability)* &'tcx ty::LazyConst<'tcx>) {
+            fn super_const(&mut self, _const: & $($mutability)? &'tcx ty::LazyConst<'tcx>) {
             }
 
-            fn super_substs(&mut self, _substs: & $($mutability)* &'tcx Substs<'tcx>) {
+            fn super_substs(&mut self, _substs: & $($mutability)? &'tcx Substs<'tcx>) {
             }
 
             fn super_generator_substs(&mut self,
-                                      _substs: & $($mutability)* GeneratorSubsts<'tcx>) {
+                                      _substs: & $($mutability)? GeneratorSubsts<'tcx>) {
             }
 
             fn super_closure_substs(&mut self,
-                                    _substs: & $($mutability)* ClosureSubsts<'tcx>) {
+                                    _substs: & $($mutability)? ClosureSubsts<'tcx>) {
             }
 
             // Convenience methods
 
-            fn visit_location(&mut self, mir: & $($mutability)* Mir<'tcx>, location: Location) {
-                let basic_block = & $($mutability)* mir[location.block];
+            fn visit_location(&mut self, mir: & $($mutability)? Mir<'tcx>, location: Location) {
+                let basic_block = & $($mutability)? mir[location.block];
                 if basic_block.statements.len() == location.statement_index {
-                    if let Some(ref $($mutability)* terminator) = basic_block.terminator {
+                    if let Some(ref $($mutability)? terminator) = basic_block.terminator {
                         self.visit_terminator(location.block, terminator, location)
                     }
                 } else {
-                    let statement = & $($mutability)*
+                    let statement = & $($mutability)?
                         basic_block.statements[location.statement_index];
                     self.visit_statement(location.block, statement, location)
                 }
diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs
index 79afc59..c17adaf 100644
--- a/src/librustc/traits/error_reporting.rs
+++ b/src/librustc/traits/error_reporting.rs
@@ -1035,7 +1035,8 @@
                  ).collect::<Vec<_>>())
             }
             Node::StructCtor(ref variant_data) => {
-                (self.tcx.sess.source_map().def_span(self.tcx.hir().span(variant_data.id())),
+                (self.tcx.sess.source_map().def_span(
+                    self.tcx.hir().span_by_hir_id(variant_data.hir_id())),
                  vec![ArgKind::empty(); variant_data.fields().len()])
             }
             _ => panic!("non-FnLike node found: {:?}", node),
diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs
index 99107a1..562a29f 100644
--- a/src/librustc/traits/project.rs
+++ b/src/librustc/traits/project.rs
@@ -408,7 +408,7 @@
                         if let Ok(evaluated) = tcx.const_eval(param_env.and(cid)) {
                             let substs = tcx.lift_to_global(&substs).unwrap();
                             let evaluated = evaluated.subst(tcx, substs);
-                            return tcx.intern_lazy_const(ty::LazyConst::Evaluated(evaluated));
+                            return tcx.mk_lazy_const(ty::LazyConst::Evaluated(evaluated));
                         }
                     }
                 } else {
@@ -420,7 +420,7 @@
                                 promoted: None
                             };
                             if let Ok(evaluated) = tcx.const_eval(param_env.and(cid)) {
-                                return tcx.intern_lazy_const(ty::LazyConst::Evaluated(evaluated));
+                                return tcx.mk_lazy_const(ty::LazyConst::Evaluated(evaluated));
                             }
                         }
                     }
diff --git a/src/librustc/traits/query/normalize.rs b/src/librustc/traits/query/normalize.rs
index f477f16..bcd1119 100644
--- a/src/librustc/traits/query/normalize.rs
+++ b/src/librustc/traits/query/normalize.rs
@@ -203,7 +203,7 @@
                         if let Ok(evaluated) = tcx.const_eval(param_env.and(cid)) {
                             let substs = tcx.lift_to_global(&substs).unwrap();
                             let evaluated = evaluated.subst(tcx, substs);
-                            return tcx.intern_lazy_const(ty::LazyConst::Evaluated(evaluated));
+                            return tcx.mk_lazy_const(ty::LazyConst::Evaluated(evaluated));
                         }
                     }
                 } else {
@@ -215,7 +215,7 @@
                                 promoted: None,
                             };
                             if let Ok(evaluated) = tcx.const_eval(param_env.and(cid)) {
-                                return tcx.intern_lazy_const(ty::LazyConst::Evaluated(evaluated));
+                                return tcx.mk_lazy_const(ty::LazyConst::Evaluated(evaluated));
                             }
                         }
                     }
diff --git a/src/librustc/traits/util.rs b/src/librustc/traits/util.rs
index 67c919a..754cc94 100644
--- a/src/librustc/traits/util.rs
+++ b/src/librustc/traits/util.rs
@@ -525,9 +525,9 @@
     }
 
     pub fn impl_is_default(self, node_item_def_id: DefId) -> bool {
-        match self.hir().as_local_node_id(node_item_def_id) {
-            Some(node_id) => {
-                let item = self.hir().expect_item(node_id);
+        match self.hir().as_local_hir_id(node_item_def_id) {
+            Some(hir_id) => {
+                let item = self.hir().expect_item_by_hir_id(hir_id);
                 if let hir::ItemKind::Impl(_, _, defaultness, ..) = item.node {
                     defaultness.is_default()
                 } else {
diff --git a/src/librustc/ty/codec.rs b/src/librustc/ty/codec.rs
index c9775b10..e93de32 100644
--- a/src/librustc/ty/codec.rs
+++ b/src/librustc/ty/codec.rs
@@ -252,7 +252,7 @@
     where D: TyDecoder<'a, 'tcx>,
           'tcx: 'a,
 {
-    Ok(decoder.tcx().intern_lazy_const(Decodable::decode(decoder)?))
+    Ok(decoder.tcx().mk_lazy_const(Decodable::decode(decoder)?))
 }
 
 #[inline]
diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs
index 140c772..11211c9 100644
--- a/src/librustc/ty/context.rs
+++ b/src/librustc/ty/context.rs
@@ -1,7 +1,7 @@
 //! type context book-keeping
 
 use crate::dep_graph::DepGraph;
-use crate::dep_graph::{DepNode, DepConstructor};
+use crate::dep_graph::{self, DepNode, DepConstructor};
 use crate::errors::DiagnosticBuilder;
 use crate::session::Session;
 use crate::session::config::{BorrowckMode, OutputFilenames};
@@ -127,6 +127,7 @@
     goal: InternedSet<'tcx, GoalKind<'tcx>>,
     goal_list: InternedSet<'tcx, List<Goal<'tcx>>>,
     projs: InternedSet<'tcx, List<ProjectionKind<'tcx>>>,
+    lazy_const: InternedSet<'tcx, LazyConst<'tcx>>,
 }
 
 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
@@ -144,6 +145,7 @@
             goal: Default::default(),
             goal_list: Default::default(),
             projs: Default::default(),
+            lazy_const: Default::default(),
         }
     }
 
@@ -1096,10 +1098,7 @@
         self.global_arenas.adt_def.alloc(def)
     }
 
-    pub fn intern_const_alloc(
-        self,
-        alloc: Allocation,
-    ) -> &'gcx Allocation {
+    pub fn intern_const_alloc(self, alloc: Allocation) -> &'gcx Allocation {
         self.allocation_interner.borrow_mut().intern(alloc, |alloc| {
             self.global_arenas.const_allocs.alloc(alloc)
         })
@@ -1119,10 +1118,6 @@
         })
     }
 
-    pub fn intern_lazy_const(self, c: ty::LazyConst<'tcx>) -> &'tcx ty::LazyConst<'tcx> {
-        self.global_interners.arena.alloc(c)
-    }
-
     pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
         self.layout_interner.borrow_mut().intern(layout, |layout| {
             self.global_arenas.layout.alloc(layout)
@@ -1435,7 +1430,8 @@
             self.dep_graph.with_task(dep_node,
                                      self,
                                      crate_hash,
-                                     |_, x| x // No transformation needed
+                                     |_, x| x, // No transformation needed
+                                     dep_graph::hash_result,
             );
         }
     }
@@ -2271,6 +2267,12 @@
     }
 }
 
+impl<'tcx: 'lcx, 'lcx> Borrow<LazyConst<'lcx>> for Interned<'tcx, LazyConst<'tcx>> {
+    fn borrow<'a>(&'a self) -> &'a LazyConst<'lcx> {
+        &self.0
+    }
+}
+
 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
     for Interned<'tcx, List<ExistentialPredicate<'tcx>>> {
     fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
@@ -2377,7 +2379,8 @@
 
 direct_interners!('tcx,
     region: mk_region(|r: &RegionKind| r.keep_in_local_tcx()) -> RegionKind,
-    goal: mk_goal(|c: &GoalKind<'_>| keep_local(c)) -> GoalKind<'tcx>
+    goal: mk_goal(|c: &GoalKind<'_>| keep_local(c)) -> GoalKind<'tcx>,
+    lazy_const: mk_lazy_const(|c: &LazyConst<'_>| keep_local(&c)) -> LazyConst<'tcx>
 );
 
 macro_rules! slice_interners {
@@ -2562,7 +2565,7 @@
 
     #[inline]
     pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
-        self.mk_ty(Array(ty, self.intern_lazy_const(
+        self.mk_ty(Array(ty, self.mk_lazy_const(
             ty::LazyConst::Evaluated(ty::Const::from_usize(self.global_tcx(), n))
         )))
     }
diff --git a/src/librustc/ty/item_path.rs b/src/librustc/ty/item_path.rs
index 8214c8b..3f49c1b 100644
--- a/src/librustc/ty/item_path.rs
+++ b/src/librustc/ty/item_path.rs
@@ -462,8 +462,8 @@
         // only occur very early in the compiler pipeline.
         let parent_def_id = self.parent_def_id(impl_def_id).unwrap();
         self.push_item_path(buffer, parent_def_id, pushed_prelude_crate);
-        let node_id = self.hir().as_local_node_id(impl_def_id).unwrap();
-        let item = self.hir().expect_item(node_id);
+        let hir_id = self.hir().as_local_hir_id(impl_def_id).unwrap();
+        let item = self.hir().expect_item_by_hir_id(hir_id);
         let span_str = self.sess.source_map().span_to_string(item.span);
         buffer.push(&format!("<impl at {}>", span_str));
     }
diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs
index 60e3ac6..1f08d93 100644
--- a/src/librustc/ty/mod.rs
+++ b/src/librustc/ty/mod.rs
@@ -2939,8 +2939,8 @@
 
     /// Get the attributes of a definition.
     pub fn get_attrs(self, did: DefId) -> Attributes<'gcx> {
-        if let Some(id) = self.hir().as_local_node_id(did) {
-            Attributes::Borrowed(self.hir().attrs(id))
+        if let Some(id) = self.hir().as_local_hir_id(did) {
+            Attributes::Borrowed(self.hir().attrs_by_hir_id(id))
         } else {
             Attributes::Owned(self.item_attrs(did))
         }
@@ -2991,8 +2991,8 @@
     /// with the name of the crate containing the impl.
     pub fn span_of_impl(self, impl_did: DefId) -> Result<Span, Symbol> {
         if impl_did.is_local() {
-            let node_id = self.hir().as_local_node_id(impl_did).unwrap();
-            Ok(self.hir().span(node_id))
+            let hir_id = self.hir().as_local_hir_id(impl_did).unwrap();
+            Ok(self.hir().span_by_hir_id(hir_id))
         } else {
             Err(self.crate_name(impl_did.krate))
         }
@@ -3110,8 +3110,8 @@
 fn associated_item_def_ids<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                      def_id: DefId)
                                      -> Lrc<Vec<DefId>> {
-    let id = tcx.hir().as_local_node_id(def_id).unwrap();
-    let item = tcx.hir().expect_item(id);
+    let id = tcx.hir().as_local_hir_id(def_id).unwrap();
+    let item = tcx.hir().expect_item_by_hir_id(id);
     let vec: Vec<_> = match item.node {
         hir::ItemKind::Trait(.., ref trait_item_refs) => {
             trait_item_refs.iter()
diff --git a/src/librustc/ty/query/config.rs b/src/librustc/ty/query/config.rs
index 255e39e..a3ee92f 100644
--- a/src/librustc/ty/query/config.rs
+++ b/src/librustc/ty/query/config.rs
@@ -20,7 +20,7 @@
 use std::fmt::Debug;
 use syntax_pos::symbol::InternedString;
 use rustc_data_structures::sync::Lock;
-use rustc_data_structures::stable_hasher::HashStable;
+use rustc_data_structures::fingerprint::Fingerprint;
 use crate::ich::StableHashingContext;
 
 // Query configuration and description traits.
@@ -30,7 +30,7 @@
     const CATEGORY: ProfileCategory;
 
     type Key: Eq + Hash + Clone + Debug;
-    type Value: Clone + for<'a> HashStable<StableHashingContext<'a>>;
+    type Value: Clone;
 }
 
 pub(super) trait QueryAccessors<'tcx>: QueryConfig<'tcx> {
@@ -44,6 +44,11 @@
     // Don't use this method to compute query results, instead use the methods on TyCtxt
     fn compute(tcx: TyCtxt<'_, 'tcx, '_>, key: Self::Key) -> Self::Value;
 
+    fn hash_result(
+        hcx: &mut StableHashingContext<'_>,
+        result: &Self::Value
+    ) -> Option<Fingerprint>;
+
     fn handle_cycle_error(tcx: TyCtxt<'_, 'tcx, '_>) -> Self::Value;
 }
 
diff --git a/src/librustc/ty/query/mod.rs b/src/librustc/ty/query/mod.rs
index 20a700b..d002b99 100644
--- a/src/librustc/ty/query/mod.rs
+++ b/src/librustc/ty/query/mod.rs
@@ -1,4 +1,4 @@
-use crate::dep_graph::{DepConstructor, DepNode};
+use crate::dep_graph::{self, DepConstructor, DepNode};
 use crate::errors::DiagnosticBuilder;
 use crate::hir::def_id::{CrateNum, DefId, DefIndex};
 use crate::hir::def::{Def, Export};
@@ -49,6 +49,7 @@
 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_data_structures::stable_hasher::StableVec;
 use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::fingerprint::Fingerprint;
 use rustc_target::spec::PanicStrategy;
 
 use std::borrow::Cow;
@@ -233,9 +234,9 @@
         /// ready for const evaluation.
         ///
         /// See the README for the `mir` module for details.
-        [] fn mir_const: MirConst(DefId) -> &'tcx Steal<mir::Mir<'tcx>>,
+        [no_hash] fn mir_const: MirConst(DefId) -> &'tcx Steal<mir::Mir<'tcx>>,
 
-        [] fn mir_validated: MirValidated(DefId) -> &'tcx Steal<mir::Mir<'tcx>>,
+        [no_hash] fn mir_validated: MirValidated(DefId) -> &'tcx Steal<mir::Mir<'tcx>>,
 
         /// MIR after our optimization passes have run. This is MIR that is ready
         /// for codegen. This is also the only query that can fetch non-local MIR, at present.
diff --git a/src/librustc/ty/query/plumbing.rs b/src/librustc/ty/query/plumbing.rs
index a26b21a..f63fbd7 100644
--- a/src/librustc/ty/query/plumbing.rs
+++ b/src/librustc/ty/query/plumbing.rs
@@ -113,7 +113,7 @@
             let mut lock = cache.borrow_mut();
             if let Some(value) = lock.results.get(key) {
                 profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
-                tcx.sess.profiler(|p| p.record_query_hit(Q::CATEGORY));
+                tcx.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
                 let result = Ok((value.value.clone(), value.index));
                 #[cfg(debug_assertions)]
                 {
@@ -375,7 +375,7 @@
 
         if dep_node.kind.is_anon() {
             profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
-            self.sess.profiler(|p| p.start_activity(Q::CATEGORY));
+            self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
 
             let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
                 job.start(self, diagnostics, |tcx| {
@@ -385,7 +385,7 @@
                 })
             });
 
-            self.sess.profiler(|p| p.end_activity(Q::CATEGORY));
+            self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
             profq_msg!(self, ProfileQueriesMsg::ProviderEnd);
 
             self.dep_graph.read_index(dep_node_index);
@@ -452,14 +452,14 @@
 
         let result = if let Some(result) = result {
             profq_msg!(self, ProfileQueriesMsg::CacheHit);
-            self.sess.profiler(|p| p.record_query_hit(Q::CATEGORY));
+            self.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
 
             result
         } else {
             // We could not load a result from the on-disk cache, so
             // recompute.
 
-            self.sess.profiler(|p| p.start_activity(Q::CATEGORY));
+            self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
 
             // The diagnostics for this query have already been
             // promoted to the current session during
@@ -472,7 +472,7 @@
                 })
             });
 
-            self.sess.profiler(|p| p.end_activity(Q::CATEGORY));
+            self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
             result
         };
 
@@ -499,7 +499,6 @@
         dep_node: &DepNode,
         dep_node_index: DepNodeIndex,
     ) {
-        use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
         use crate::ich::Fingerprint;
 
         assert!(Some(self.dep_graph.fingerprint_of(dep_node_index)) ==
@@ -509,11 +508,8 @@
 
         debug!("BEGIN verify_ich({:?})", dep_node);
         let mut hcx = self.create_stable_hashing_context();
-        let mut hasher = StableHasher::new();
 
-        result.hash_stable(&mut hcx, &mut hasher);
-
-        let new_hash: Fingerprint = hasher.finish();
+        let new_hash = Q::hash_result(&mut hcx, result).unwrap_or(Fingerprint::ZERO);
         debug!("END verify_ich({:?})", dep_node);
 
         let old_hash = self.dep_graph.fingerprint_of(dep_node_index);
@@ -541,7 +537,7 @@
                 key, dep_node);
 
         profq_msg!(self, ProfileQueriesMsg::ProviderBegin);
-        self.sess.profiler(|p| p.start_activity(Q::CATEGORY));
+        self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
 
         let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
             job.start(self, diagnostics, |tcx| {
@@ -549,17 +545,19 @@
                     tcx.dep_graph.with_eval_always_task(dep_node,
                                                         tcx,
                                                         key,
-                                                        Q::compute)
+                                                        Q::compute,
+                                                        Q::hash_result)
                 } else {
                     tcx.dep_graph.with_task(dep_node,
                                             tcx,
                                             key,
-                                            Q::compute)
+                                            Q::compute,
+                                            Q::hash_result)
                 }
             })
         });
 
-        self.sess.profiler(|p| p.end_activity(Q::CATEGORY));
+        self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
         profq_msg!(self, ProfileQueriesMsg::ProviderEnd);
 
         if unlikely!(self.sess.opts.debugging_opts.query_dep_graph) {
@@ -602,7 +600,7 @@
             let _ = self.get_query::<Q>(DUMMY_SP, key);
         } else {
             profq_msg!(self, ProfileQueriesMsg::CacheHit);
-            self.sess.profiler(|p| p.record_query_hit(Q::CATEGORY));
+            self.sess.profiler(|p| p.record_query_hit(Q::NAME, Q::CATEGORY));
         }
     }
 
@@ -679,6 +677,18 @@
     };
 }
 
+macro_rules! hash_result {
+    ([][$hcx:expr, $result:expr]) => {{
+        dep_graph::hash_result($hcx, &$result)
+    }};
+    ([no_hash$(, $modifiers:ident)*][$hcx:expr, $result:expr]) => {{
+        None
+    }};
+    ([$other:ident$(, $modifiers:ident)*][$($args:tt)*]) => {
+        hash_result!([$($modifiers),*][$($args)*])
+    };
+}
+
 macro_rules! define_queries {
     (<$tcx:tt> $($category:tt {
         $($(#[$attr:meta])* [$($modifiers:tt)*] fn $name:ident: $node:ident($K:ty) -> $V:ty,)*
@@ -729,6 +739,7 @@
                 sess.profiler(|p| {
                     $(
                         p.record_computed_queries(
+                            <queries::$name<'_> as QueryConfig<'_>>::NAME,
                             <queries::$name<'_> as QueryConfig<'_>>::CATEGORY,
                             self.$name.lock().results.len()
                         );
@@ -966,6 +977,13 @@
                 })
             }
 
+            fn hash_result(
+                _hcx: &mut StableHashingContext<'_>,
+                _result: &Self::Value
+            ) -> Option<Fingerprint> {
+                hash_result!([$($modifiers)*][_hcx, _result])
+            }
+
             fn handle_cycle_error(tcx: TyCtxt<'_, 'tcx, '_>) -> Self::Value {
                 handle_cycle_error!([$($modifiers)*][tcx])
             }
diff --git a/src/librustc/ty/structural_impls.rs b/src/librustc/ty/structural_impls.rs
index 62a4923..d09cfa8 100644
--- a/src/librustc/ty/structural_impls.rs
+++ b/src/librustc/ty/structural_impls.rs
@@ -1042,7 +1042,7 @@
                 ty::LazyConst::Unevaluated(*def_id, substs.fold_with(folder))
             }
         };
-        folder.tcx().intern_lazy_const(new)
+        folder.tcx().mk_lazy_const(new)
     }
 
     fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs
index a12ec34..4945bf8 100644
--- a/src/librustc/util/ppaux.rs
+++ b/src/librustc/util/ppaux.rs
@@ -802,7 +802,7 @@
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         write!(f, "UpvarId({:?};`{}`;{:?})",
                self.var_path.hir_id,
-               ty::tls::with(|tcx| tcx.hir().name(tcx.hir().hir_to_node_id(self.var_path.hir_id))),
+               ty::tls::with(|tcx| tcx.hir().name_by_hir_id(self.var_path.hir_id)),
                self.closure_expr_id)
     }
 }
diff --git a/src/librustc/util/profiling.rs b/src/librustc/util/profiling.rs
index 0e03946..f8fa01b 100644
--- a/src/librustc/util/profiling.rs
+++ b/src/librustc/util/profiling.rs
@@ -1,125 +1,13 @@
-use crate::session::config::Options;
-
+use std::collections::{BTreeMap, HashMap};
 use std::fs;
-use std::io::{self, StderrLock, Write};
+use std::io::{self, Write};
+use std::thread::ThreadId;
 use std::time::Instant;
 
-macro_rules! define_categories {
-    ($($name:ident,)*) => {
-        #[derive(Clone, Copy, Debug, PartialEq, Eq)]
-        pub enum ProfileCategory {
-            $($name),*
-        }
+use crate::session::config::{Options, OptLevel};
 
-        #[allow(nonstandard_style)]
-        struct Categories<T> {
-            $($name: T),*
-        }
-
-        impl<T: Default> Categories<T> {
-            fn new() -> Categories<T> {
-                Categories {
-                    $($name: T::default()),*
-                }
-            }
-        }
-
-        impl<T> Categories<T> {
-            fn get(&self, category: ProfileCategory) -> &T {
-                match category {
-                    $(ProfileCategory::$name => &self.$name),*
-                }
-            }
-
-            fn set(&mut self, category: ProfileCategory, value: T) {
-                match category {
-                    $(ProfileCategory::$name => self.$name = value),*
-                }
-            }
-        }
-
-        struct CategoryData {
-            times: Categories<u64>,
-            query_counts: Categories<(u64, u64)>,
-        }
-
-        impl CategoryData {
-            fn new() -> CategoryData {
-                CategoryData {
-                    times: Categories::new(),
-                    query_counts: Categories::new(),
-                }
-            }
-
-            fn print(&self, lock: &mut StderrLock<'_>) {
-                writeln!(lock, "| Phase            | Time (ms)      \
-                                | Time (%) | Queries        | Hits (%)")
-                    .unwrap();
-                writeln!(lock, "| ---------------- | -------------- \
-                                | -------- | -------------- | --------")
-                    .unwrap();
-
-                let total_time = ($(self.times.$name + )* 0) as f32;
-
-                $(
-                    let (hits, computed) = self.query_counts.$name;
-                    let total = hits + computed;
-                    let (hits, total) = if total > 0 {
-                        (format!("{:.2}",
-                        (((hits as f32) / (total as f32)) * 100.0)), total.to_string())
-                    } else {
-                        (String::new(), String::new())
-                    };
-
-                    writeln!(
-                        lock,
-                        "| {0: <16} | {1: <14} | {2: <8.2} | {3: <14} | {4: <8}",
-                        stringify!($name),
-                        self.times.$name / 1_000_000,
-                        ((self.times.$name as f32) / total_time) * 100.0,
-                        total,
-                        hits,
-                    ).unwrap();
-                )*
-            }
-
-            fn json(&self) -> String {
-                let mut json = String::from("[");
-
-                $(
-                    let (hits, computed) = self.query_counts.$name;
-                    let total = hits + computed;
-
-                    //normalize hits to 0%
-                    let hit_percent =
-                        if total > 0 {
-                            ((hits as f32) / (total as f32)) * 100.0
-                        } else {
-                            0.0
-                        };
-
-                    json.push_str(&format!(
-                        "{{ \"category\": \"{}\", \"time_ms\": {},\
-                            \"query_count\": {}, \"query_hits\": {} }},",
-                        stringify!($name),
-                        self.times.$name / 1_000_000,
-                        total,
-                        format!("{:.2}", hit_percent)
-                    ));
-                )*
-
-                //remove the trailing ',' character
-                json.pop();
-
-                json.push(']');
-
-                json
-            }
-        }
-    }
-}
-
-define_categories! {
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Ord, PartialOrd)]
+pub enum ProfileCategory {
     Parsing,
     Expansion,
     TypeChecking,
@@ -129,18 +17,151 @@
     Other,
 }
 
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum ProfilerEvent {
+    QueryStart { query_name: &'static str, category: ProfileCategory, time: Instant },
+    QueryEnd { query_name: &'static str, category: ProfileCategory, time: Instant },
+    GenericActivityStart { category: ProfileCategory, time: Instant },
+    GenericActivityEnd { category: ProfileCategory, time: Instant },
+    QueryCacheHit { query_name: &'static str, category: ProfileCategory },
+    QueryCount { query_name: &'static str, category: ProfileCategory, count: usize },
+}
+
+impl ProfilerEvent {
+    fn is_start_event(&self) -> bool {
+        use self::ProfilerEvent::*;
+
+        match self {
+            QueryStart { .. } | GenericActivityStart { .. } => true,
+            QueryEnd { .. } | GenericActivityEnd { .. } |
+            QueryCacheHit { .. } | QueryCount { .. } => false,
+        }
+    }
+}
+
 pub struct SelfProfiler {
-    timer_stack: Vec<ProfileCategory>,
-    data: CategoryData,
-    current_timer: Instant,
+    events: HashMap<ThreadId, Vec<ProfilerEvent>>,
+}
+
+struct CategoryResultData {
+    query_times: BTreeMap<&'static str, u64>,
+    query_cache_stats: BTreeMap<&'static str, (u64, u64)>, //(hits, total)
+}
+
+impl CategoryResultData {
+    fn new() -> CategoryResultData {
+        CategoryResultData {
+            query_times: BTreeMap::new(),
+            query_cache_stats: BTreeMap::new(),
+        }
+    }
+
+    fn total_time(&self) -> u64 {
+        let mut total = 0;
+        for (_, time) in &self.query_times {
+            total += time;
+        }
+
+        total
+    }
+
+    fn total_cache_data(&self) -> (u64, u64) {
+        let (mut hits, mut total) = (0, 0);
+
+        for (_, (h, t)) in &self.query_cache_stats {
+            hits += h;
+            total += t;
+        }
+
+        (hits, total)
+    }
+}
+
+impl Default for CategoryResultData {
+    fn default() -> CategoryResultData {
+        CategoryResultData::new()
+    }
+}
+
+struct CalculatedResults {
+    categories: BTreeMap<ProfileCategory, CategoryResultData>,
+    crate_name: Option<String>,
+    optimization_level: OptLevel,
+    incremental: bool,
+    verbose: bool,
+}
+
+impl CalculatedResults {
+    fn new() -> CalculatedResults {
+        CalculatedResults {
+            categories: BTreeMap::new(),
+            crate_name: None,
+            optimization_level: OptLevel::No,
+            incremental: false,
+            verbose: false,
+        }
+    }
+
+    fn consolidate(mut cr1: CalculatedResults, cr2: CalculatedResults) -> CalculatedResults {
+        for (category, data) in cr2.categories {
+            let cr1_data = cr1.categories.entry(category).or_default();
+
+            for (query, time) in data.query_times {
+                *cr1_data.query_times.entry(query).or_default() += time;
+            }
+
+            for (query, (hits, total)) in data.query_cache_stats {
+                let (h, t) = cr1_data.query_cache_stats.entry(query).or_insert((0, 0));
+                *h += hits;
+                *t += total;
+            }
+        }
+
+        cr1
+    }
+
+    fn total_time(&self) -> u64 {
+        let mut total = 0;
+
+        for (_, data) in &self.categories {
+            total += data.total_time();
+        }
+
+        total
+    }
+
+    fn with_options(mut self, opts: &Options) -> CalculatedResults {
+        self.crate_name = opts.crate_name.clone();
+        self.optimization_level = opts.optimize;
+        self.incremental = opts.incremental.is_some();
+        self.verbose = opts.debugging_opts.verbose;
+
+        self
+    }
+}
+
+fn time_between_ns(start: Instant, end: Instant) -> u64 {
+    if start < end {
+        let time = end - start;
+        (time.as_secs() * 1_000_000_000) + (time.subsec_nanos() as u64)
+    } else {
+        debug!("time_between_ns: ignorning instance of end < start");
+        0
+    }
+}
+
+fn calculate_percent(numerator: u64, denominator: u64) -> f32 {
+    if denominator > 0 {
+        ((numerator as f32) / (denominator as f32)) * 100.0
+    } else {
+        0.0
+    }
 }
 
 impl SelfProfiler {
     pub fn new() -> SelfProfiler {
         let mut profiler = SelfProfiler {
-            timer_stack: Vec::new(),
-            data: CategoryData::new(),
-            current_timer: Instant::now(),
+            events: HashMap::new(),
         };
 
         profiler.start_activity(ProfileCategory::Other);
@@ -148,104 +169,263 @@
         profiler
     }
 
+    #[inline]
     pub fn start_activity(&mut self, category: ProfileCategory) {
-        match self.timer_stack.last().cloned() {
-            None => {
-                self.current_timer = Instant::now();
-            },
-            Some(current_category) if current_category == category => {
-                //since the current category is the same as the new activity's category,
-                //we don't need to do anything with the timer, we just need to push it on the stack
-            }
-            Some(current_category) => {
-                let elapsed = self.stop_timer();
-
-                //record the current category's time
-                let new_time = self.data.times.get(current_category) + elapsed;
-                self.data.times.set(current_category, new_time);
-            }
-        }
-
-        //push the new category
-        self.timer_stack.push(category);
+        self.record(ProfilerEvent::GenericActivityStart {
+            category,
+            time: Instant::now(),
+        })
     }
 
-    pub fn record_computed_queries(&mut self, category: ProfileCategory, count: usize) {
-        let (hits, computed) = *self.data.query_counts.get(category);
-        self.data.query_counts.set(category, (hits, computed + count as u64));
-    }
-
-    pub fn record_query_hit(&mut self, category: ProfileCategory) {
-        let (hits, computed) = *self.data.query_counts.get(category);
-        self.data.query_counts.set(category, (hits + 1, computed));
-    }
-
+    #[inline]
     pub fn end_activity(&mut self, category: ProfileCategory) {
-        match self.timer_stack.pop() {
-            None => bug!("end_activity() was called but there was no running activity"),
-            Some(c) =>
-                assert!(
-                    c == category,
-                    "end_activity() was called but a different activity was running"),
-        }
+        self.record(ProfilerEvent::GenericActivityEnd {
+            category,
+            time: Instant::now(),
+        })
+    }
 
-        //check if the new running timer is in the same category as this one
-        //if it is, we don't need to do anything
-        if let Some(c) = self.timer_stack.last() {
-            if *c == category {
-                return;
+    #[inline]
+    pub fn record_computed_queries(
+        &mut self,
+        query_name: &'static str,
+        category: ProfileCategory,
+        count: usize)
+        {
+        self.record(ProfilerEvent::QueryCount {
+            query_name,
+            category,
+            count,
+        })
+    }
+
+    #[inline]
+    pub fn record_query_hit(&mut self, query_name: &'static str, category: ProfileCategory) {
+        self.record(ProfilerEvent::QueryCacheHit {
+            query_name,
+            category,
+        })
+    }
+
+    #[inline]
+    pub fn start_query(&mut self, query_name: &'static str, category: ProfileCategory) {
+        self.record(ProfilerEvent::QueryStart {
+            query_name,
+            category,
+            time: Instant::now(),
+        });
+    }
+
+    #[inline]
+    pub fn end_query(&mut self, query_name: &'static str, category: ProfileCategory) {
+        self.record(ProfilerEvent::QueryEnd {
+            query_name,
+            category,
+            time: Instant::now(),
+        })
+    }
+
+    #[inline]
+    fn record(&mut self, event: ProfilerEvent) {
+        let thread_id = std::thread::current().id();
+        let events = self.events.entry(thread_id).or_default();
+
+        events.push(event);
+    }
+
+    fn calculate_thread_results(events: &Vec<ProfilerEvent>) -> CalculatedResults {
+        use self::ProfilerEvent::*;
+
+        assert!(
+            events.last().map(|e| !e.is_start_event()).unwrap_or(true),
+            "there was an event running when calculate_reslts() was called"
+        );
+
+        let mut results = CalculatedResults::new();
+
+        //(event, child time to subtract)
+        let mut query_stack = Vec::new();
+
+        for event in events {
+            match event {
+                QueryStart { .. } | GenericActivityStart { .. } => {
+                    query_stack.push((event, 0));
+                },
+                QueryEnd { query_name, category, time: end_time } => {
+                    let previous_query = query_stack.pop();
+                    if let Some((QueryStart {
+                                    query_name: p_query_name,
+                                    time: start_time,
+                                    category: _ }, child_time_to_subtract)) = previous_query {
+                        assert_eq!(
+                            p_query_name,
+                            query_name,
+                            "Saw a query end but the previous query wasn't the corresponding start"
+                        );
+
+                        let time_ns = time_between_ns(*start_time, *end_time);
+                        let self_time_ns = time_ns - child_time_to_subtract;
+                        let result_data = results.categories.entry(*category).or_default();
+
+                        *result_data.query_times.entry(query_name).or_default() += self_time_ns;
+
+                        if let Some((_, child_time_to_subtract)) = query_stack.last_mut() {
+                            *child_time_to_subtract += time_ns;
+                        }
+                    } else {
+                        bug!("Saw a query end but the previous event wasn't a query start");
+                    }
+                }
+                GenericActivityEnd { category, time: end_time } => {
+                    let previous_event = query_stack.pop();
+                    if let Some((GenericActivityStart {
+                                    category: previous_category,
+                                    time: start_time }, child_time_to_subtract)) = previous_event {
+                        assert_eq!(
+                            previous_category,
+                            category,
+                            "Saw an end but the previous event wasn't the corresponding start"
+                        );
+
+                        let time_ns = time_between_ns(*start_time, *end_time);
+                        let self_time_ns = time_ns - child_time_to_subtract;
+                        let result_data = results.categories.entry(*category).or_default();
+
+                        *result_data.query_times
+                            .entry("{time spent not running queries}")
+                            .or_default() += self_time_ns;
+
+                        if let Some((_, child_time_to_subtract)) = query_stack.last_mut() {
+                            *child_time_to_subtract += time_ns;
+                        }
+                    } else {
+                        bug!("Saw an activity end but the previous event wasn't an activity start");
+                    }
+                },
+                QueryCacheHit { category, query_name } => {
+                    let result_data = results.categories.entry(*category).or_default();
+
+                    let (hits, total) =
+                        result_data.query_cache_stats.entry(query_name).or_insert((0, 0));
+                    *hits += 1;
+                    *total += 1;
+                },
+                QueryCount { category, query_name, count } => {
+                    let result_data = results.categories.entry(*category).or_default();
+
+                    let (_, totals) =
+                        result_data.query_cache_stats.entry(query_name).or_insert((0, 0));
+                    *totals += *count as u64;
+                },
             }
         }
 
-        //the new timer is different than the previous,
-        //so record the elapsed time and start a new timer
-        let elapsed = self.stop_timer();
-        let new_time = self.data.times.get(category) + elapsed;
-        self.data.times.set(category, new_time);
+        //normalize the times to ms
+        for (_, data) in &mut results.categories {
+            for (_, time) in &mut data.query_times {
+                *time = *time / 1_000_000;
+            }
+        }
+
+        results
     }
 
-    fn stop_timer(&mut self) -> u64 {
-        let elapsed = self.current_timer.elapsed();
-
-        self.current_timer = Instant::now();
-
-        (elapsed.as_secs() * 1_000_000_000) + (elapsed.subsec_nanos() as u64)
+    fn get_results(&self, opts: &Options) -> CalculatedResults {
+        self.events
+            .iter()
+            .map(|(_, r)| SelfProfiler::calculate_thread_results(r))
+            .fold(CalculatedResults::new(), CalculatedResults::consolidate)
+            .with_options(opts)
     }
 
     pub fn print_results(&mut self, opts: &Options) {
         self.end_activity(ProfileCategory::Other);
 
-        assert!(
-            self.timer_stack.is_empty(),
-            "there were timers running when print_results() was called");
+        let results = self.get_results(opts);
+
+        let total_time = results.total_time() as f32;
 
         let out = io::stderr();
         let mut lock = out.lock();
 
-        let crate_name =
-            opts.crate_name
-            .as_ref()
-            .map(|n| format!(" for {}", n))
-            .unwrap_or_default();
+        let crate_name = results.crate_name.map(|n| format!(" for {}", n)).unwrap_or_default();
 
         writeln!(lock, "Self profiling results{}:", crate_name).unwrap();
         writeln!(lock).unwrap();
 
-        self.data.print(&mut lock);
+        writeln!(lock, "| Phase                                     | Time (ms)      \
+                        | Time (%) | Queries        | Hits (%)")
+            .unwrap();
+        writeln!(lock, "| ----------------------------------------- | -------------- \
+                        | -------- | -------------- | --------")
+            .unwrap();
+
+        let mut categories: Vec<_> = results.categories.iter().collect();
+        categories.sort_by(|(_, data1), (_, data2)| data2.total_time().cmp(&data1.total_time()));
+
+        for (category, data) in categories {
+            let (category_hits, category_total) = data.total_cache_data();
+            let category_hit_percent = calculate_percent(category_hits, category_total);
+
+            writeln!(
+                lock,
+                "| {0: <41} | {1: >14} | {2: >8.2} | {3: >14} | {4: >8}",
+                format!("{:?}", category),
+                data.total_time(),
+                ((data.total_time() as f32) / total_time) * 100.0,
+                category_total,
+                format!("{:.2}", category_hit_percent),
+            ).unwrap();
+
+            //in verbose mode, show individual query data
+            if results.verbose {
+                //don't show queries that took less than 1ms
+                let mut times: Vec<_> = data.query_times.iter().filter(|(_, t)| **t > 0).collect();
+                times.sort_by(|(_, time1), (_, time2)| time2.cmp(time1));
+
+                for (query, time) in times {
+                    let (hits, total) = data.query_cache_stats.get(query).unwrap_or(&(0, 0));
+                    let hit_percent = calculate_percent(*hits, *total);
+
+                    writeln!(
+                        lock,
+                        "| - {0: <39} | {1: >14} | {2: >8.2} | {3: >14} | {4: >8}",
+                        query,
+                        time,
+                        ((*time as f32) / total_time) * 100.0,
+                        total,
+                        format!("{:.2}", hit_percent),
+                    ).unwrap();
+                }
+            }
+        }
 
         writeln!(lock).unwrap();
         writeln!(lock, "Optimization level: {:?}", opts.optimize).unwrap();
-
-        let incremental = if opts.incremental.is_some() { "on" } else { "off" };
-        writeln!(lock, "Incremental: {}", incremental).unwrap();
+        writeln!(lock, "Incremental: {}", if results.incremental { "on" } else { "off" }).unwrap();
     }
 
     pub fn save_results(&self, opts: &Options) {
-        let category_data = self.data.json();
+        let results = self.get_results(opts);
+
         let compilation_options =
             format!("{{ \"optimization_level\": \"{:?}\", \"incremental\": {} }}",
-                    opts.optimize,
-                    if opts.incremental.is_some() { "true" } else { "false" });
+                    results.optimization_level,
+                    if results.incremental { "true" } else { "false" });
+
+        let mut category_data = String::new();
+
+        for (category, data) in &results.categories {
+            let (hits, total) = data.total_cache_data();
+            let hit_percent = calculate_percent(hits, total);
+
+            category_data.push_str(&format!("{{ \"category\": \"{:?}\", \"time_ms\": {}, \
+                                                \"query_count\": {}, \"query_hits\": {} }}",
+                                            category,
+                                            data.total_time(),
+                                            total,
+                                            format!("{:.2}", hit_percent)));
+        }
 
         let json = format!("{{ \"category_data\": {}, \"compilation_options\": {} }}",
                         category_data,
diff --git a/src/librustc_codegen_llvm/back/rpath.rs b/src/librustc_codegen_llvm/back/rpath.rs
index aeff23d..a5c828e 100644
--- a/src/librustc_codegen_llvm/back/rpath.rs
+++ b/src/librustc_codegen_llvm/back/rpath.rs
@@ -101,9 +101,9 @@
 
     let cwd = env::current_dir().unwrap();
     let mut lib = fs::canonicalize(&cwd.join(lib)).unwrap_or_else(|_| cwd.join(lib));
-    lib.pop();
+    lib.pop(); // strip filename
     let mut output = cwd.join(&config.out_filename);
-    output.pop();
+    output.pop(); // strip filename
     let output = fs::canonicalize(&output).unwrap_or(output);
     let relative = path_relative_from(&lib, &output).unwrap_or_else(||
         panic!("couldn't create relative path from {:?} to {:?}", output, lib));
diff --git a/src/librustc_codegen_llvm/base.rs b/src/librustc_codegen_llvm/base.rs
index 6e1ef44..d9f44ca 100644
--- a/src/librustc_codegen_llvm/base.rs
+++ b/src/librustc_codegen_llvm/base.rs
@@ -20,6 +20,7 @@
 
 use llvm;
 use metadata;
+use rustc::dep_graph;
 use rustc::mir::mono::{Linkage, Visibility, Stats};
 use rustc::middle::cstore::{EncodedMetadata};
 use rustc::ty::TyCtxt;
@@ -145,7 +146,8 @@
     let ((stats, module), _) = tcx.dep_graph.with_task(dep_node,
                                                        tcx,
                                                        cgu_name,
-                                                       module_codegen);
+                                                       module_codegen,
+                                                       dep_graph::hash_result);
     let time_to_codegen = start_time.elapsed();
 
     // We assume that the cost to run LLVM on a CGU is proportional to
diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs
index b356ae3..f34a7e0 100644
--- a/src/librustc_driver/lib.rs
+++ b/src/librustc_driver/lib.rs
@@ -838,7 +838,15 @@
                 early_error(sopts.error_format, "no input filename given");
             }
             1 => panic!("make_input should have provided valid inputs"),
-            _ => early_error(sopts.error_format, "multiple input filenames provided"),
+            _ =>
+                early_error(
+                    sopts.error_format,
+                    &format!(
+                        "multiple input filenames provided (first two filenames are `{}` and `{}`)",
+                        matches.free[0],
+                        matches.free[1],
+                    ),
+                )
         }
     }
 
diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs
index 061d236..2821201 100644
--- a/src/librustc_errors/emitter.rs
+++ b/src/librustc_errors/emitter.rs
@@ -672,8 +672,8 @@
         //   | |  something about `foo`
         //   | something about `fn foo()`
         annotations_position.sort_by(|a, b| {
-            // Decreasing order
-            a.1.len().cmp(&b.1.len()).reverse()
+            // Decreasing order. When `a` and `b` are the same length, prefer `Primary`.
+            (a.1.len(), !a.1.is_primary).cmp(&(b.1.len(), !b.1.is_primary)).reverse()
         });
 
         // Write the underlines.
diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs
index 9b52199..51f3bcd 100644
--- a/src/librustc_incremental/persist/dirty_clean.rs
+++ b/src/librustc_incremental/persist/dirty_clean.rs
@@ -67,11 +67,11 @@
     label_strs::ImplTraitRef,
 ];
 
-/// DepNodes for MirValidated/Optimized, which is relevant in "executable"
+/// DepNodes for MirBuilt/Optimized, which is relevant in "executable"
 /// code, i.e., functions+methods
 const BASE_MIR: &[&str] = &[
     label_strs::MirOptimized,
-    label_strs::MirValidated,
+    label_strs::MirBuilt,
 ];
 
 /// Struct, Enum and Union DepNodes
diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs
index 49a3e33..b248c6b 100644
--- a/src/librustc_metadata/cstore_impl.rs
+++ b/src/librustc_metadata/cstore_impl.rs
@@ -29,6 +29,7 @@
 use syntax::source_map;
 use syntax::edition::Edition;
 use syntax::parse::source_file_to_stream;
+use syntax::parse::parser::emit_unclosed_delims;
 use syntax::symbol::Symbol;
 use syntax_pos::{Span, NO_EXPANSION, FileName};
 use rustc_data_structures::bit_set::BitSet;
@@ -436,7 +437,8 @@
 
         let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body);
         let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION);
-        let body = source_file_to_stream(&sess.parse_sess, source_file, None);
+        let (body, errors) = source_file_to_stream(&sess.parse_sess, source_file, None);
+        emit_unclosed_delims(&errors, &sess.diagnostic());
 
         // Mark the attrs as used
         let attrs = data.get_item_attrs(id.index, sess);
diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs
index d68ab97..4699f4c 100644
--- a/src/librustc_metadata/encoder.rs
+++ b/src/librustc_metadata/encoder.rs
@@ -674,7 +674,7 @@
         let def_id = field.did;
         debug!("IsolatedEncoder::encode_field({:?})", def_id);
 
-        let variant_id = tcx.hir().as_local_node_id(variant.did).unwrap();
+        let variant_id = tcx.hir().as_local_hir_id(variant.did).unwrap();
         let variant_data = tcx.hir().expect_variant_data(variant_id);
 
         Entry {
diff --git a/src/librustc_mir/borrow_check/error_reporting.rs b/src/librustc_mir/borrow_check/error_reporting.rs
index afb2696..5a8d754 100644
--- a/src/librustc_mir/borrow_check/error_reporting.rs
+++ b/src/librustc_mir/borrow_check/error_reporting.rs
@@ -833,13 +833,13 @@
                 format!("`{}` would have to be valid for `{}`...", name, region_name),
             );
 
-            if let Some(fn_node_id) = self.infcx.tcx.hir().as_local_node_id(self.mir_def_id) {
+            if let Some(fn_hir_id) = self.infcx.tcx.hir().as_local_hir_id(self.mir_def_id) {
                 err.span_label(
                     drop_span,
                     format!(
                         "...but `{}` will be dropped here, when the function `{}` returns",
                         name,
-                        self.infcx.tcx.hir().name(fn_node_id),
+                        self.infcx.tcx.hir().name_by_hir_id(fn_hir_id),
                     ),
                 );
 
diff --git a/src/librustc_mir/borrow_check/move_errors.rs b/src/librustc_mir/borrow_check/move_errors.rs
index f7d4692..2a5433d 100644
--- a/src/librustc_mir/borrow_check/move_errors.rs
+++ b/src/librustc_mir/borrow_check/move_errors.rs
@@ -308,9 +308,8 @@
                                         let upvar_decl = &self.mir.upvar_decls[field.index()];
                                         let upvar_hir_id =
                                             upvar_decl.var_hir_id.assert_crate_local();
-                                        let upvar_node_id =
-                                            self.infcx.tcx.hir().hir_to_node_id(upvar_hir_id);
-                                        let upvar_span = self.infcx.tcx.hir().span(upvar_node_id);
+                                        let upvar_span = self.infcx.tcx.hir().span_by_hir_id(
+                                            upvar_hir_id);
                                         diag.span_label(upvar_span, "captured outer variable");
                                         break;
                                     }
diff --git a/src/librustc_mir/borrow_check/nll/mod.rs b/src/librustc_mir/borrow_check/nll/mod.rs
index 1fca104..84fdbb9 100644
--- a/src/librustc_mir/borrow_check/nll/mod.rs
+++ b/src/librustc_mir/borrow_check/nll/mod.rs
@@ -209,7 +209,7 @@
 
 fn dump_mir_results<'a, 'gcx, 'tcx>(
     infcx: &InferCtxt<'a, 'gcx, 'tcx>,
-    source: MirSource,
+    source: MirSource<'tcx>,
     mir: &Mir<'tcx>,
     regioncx: &RegionInferenceContext<'_>,
     closure_region_requirements: &Option<ClosureRegionRequirements<'_>>,
diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs
index 2c4f359..3bb22d3 100644
--- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs
+++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs
@@ -10,7 +10,7 @@
 use rustc::ty::{self, RegionKind, RegionVid, Ty, TyCtxt};
 use rustc::util::ppaux::RegionHighlightMode;
 use rustc_errors::DiagnosticBuilder;
-use syntax::ast::{Name, DUMMY_NODE_ID};
+use syntax::ast::Name;
 use syntax::symbol::keywords;
 use syntax_pos::Span;
 use syntax_pos::symbol::InternedString;
@@ -293,9 +293,9 @@
         name: &InternedString,
     ) -> Span {
         let scope = error_region.free_region_binding_scope(tcx);
-        let node = tcx.hir().as_local_node_id(scope).unwrap_or(DUMMY_NODE_ID);
+        let node = tcx.hir().as_local_hir_id(scope).unwrap_or(hir::DUMMY_HIR_ID);
 
-        let span = tcx.sess.source_map().def_span(tcx.hir().span(node));
+        let span = tcx.sess.source_map().def_span(tcx.hir().span_by_hir_id(node));
         if let Some(param) = tcx.hir()
             .get_generics(scope)
             .and_then(|generics| generics.get_named(name))
@@ -681,10 +681,13 @@
 
         let (return_span, mir_description) = match tcx.hir().get(mir_node_id) {
             hir::Node::Expr(hir::Expr {
-                node: hir::ExprKind::Closure(_, _, _, span, gen_move),
+                node: hir::ExprKind::Closure(_, return_ty, _, span, gen_move),
                 ..
             }) => (
-                tcx.sess.source_map().end_point(*span),
+                match return_ty.output {
+                    hir::FunctionRetTy::DefaultReturn(_) => tcx.sess.source_map().end_point(*span),
+                    hir::FunctionRetTy::Return(_) => return_ty.output.span(),
+                },
                 if gen_move.is_some() {
                     " of generator"
                 } else {
diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs
index bd7b882..f6bbaf2 100644
--- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs
+++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs
@@ -71,11 +71,10 @@
         upvar_index: usize,
     ) -> (Symbol, Span) {
         let upvar_hir_id = mir.upvar_decls[upvar_index].var_hir_id.assert_crate_local();
-        let upvar_node_id = tcx.hir().hir_to_node_id(upvar_hir_id);
-        debug!("get_upvar_name_and_span_for_region: upvar_node_id={:?}", upvar_node_id);
+        debug!("get_upvar_name_and_span_for_region: upvar_hir_id={:?}", upvar_hir_id);
 
-        let upvar_name = tcx.hir().name(upvar_node_id);
-        let upvar_span = tcx.hir().span(upvar_node_id);
+        let upvar_name = tcx.hir().name_by_hir_id(upvar_hir_id);
+        let upvar_span = tcx.hir().span_by_hir_id(upvar_hir_id);
         debug!("get_upvar_name_and_span_for_region: upvar_name={:?} upvar_span={:?}",
                upvar_name, upvar_span);
 
diff --git a/src/librustc_mir/borrow_check/nll/type_check/mod.rs b/src/librustc_mir/borrow_check/nll/type_check/mod.rs
index 19ff47f..add07b1 100644
--- a/src/librustc_mir/borrow_check/nll/type_check/mod.rs
+++ b/src/librustc_mir/borrow_check/nll/type_check/mod.rs
@@ -2427,8 +2427,13 @@
 pub struct TypeckMir;
 
 impl MirPass for TypeckMir {
-    fn run_pass<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mut Mir<'tcx>) {
-        let def_id = src.def_id;
+    fn run_pass<'a, 'tcx>(
+        &self,
+        tcx: TyCtxt<'a, 'tcx, 'tcx>,
+        src: MirSource<'tcx>,
+        mir: &mut Mir<'tcx>,
+    ) {
+        let def_id = src.def_id();
         debug!("run_pass: {:?}", def_id);
 
         // When NLL is enabled, the borrow checker runs the typeck
diff --git a/src/librustc_mir/borrow_check/nll/universal_regions.rs b/src/librustc_mir/borrow_check/nll/universal_regions.rs
index 0a214e6..ad4444e 100644
--- a/src/librustc_mir/borrow_check/nll/universal_regions.rs
+++ b/src/librustc_mir/borrow_check/nll/universal_regions.rs
@@ -771,9 +771,8 @@
                 owner: fn_def_id.index,
                 local_id: *late_bound,
             };
-            let region_node_id = tcx.hir().hir_to_node_id(hir_id);
-            let name = tcx.hir().name(region_node_id).as_interned_str();
-            let region_def_id = tcx.hir().local_def_id(region_node_id);
+            let name = tcx.hir().name_by_hir_id(hir_id).as_interned_str();
+            let region_def_id = tcx.hir().local_def_id_from_hir_id(hir_id);
             let liberated_region = tcx.mk_region(ty::ReFree(ty::FreeRegion {
                 scope: fn_def_id,
                 bound_region: ty::BoundRegion::BrNamed(region_def_id, name),
diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs
index 0665867..88dbd93 100644
--- a/src/librustc_mir/build/expr/as_rvalue.rs
+++ b/src/librustc_mir/build/expr/as_rvalue.rs
@@ -268,7 +268,7 @@
                             span: expr_span,
                             ty: this.hir.tcx().types.u32,
                             user_ty: None,
-                            literal: this.hir.tcx().intern_lazy_const(ty::LazyConst::Evaluated(
+                            literal: this.hir.tcx().mk_lazy_const(ty::LazyConst::Evaluated(
                                 ty::Const::from_bits(
                                     this.hir.tcx(),
                                     0,
diff --git a/src/librustc_mir/build/matches/test.rs b/src/librustc_mir/build/matches/test.rs
index 395858c..efac4457 100644
--- a/src/librustc_mir/build/matches/test.rs
+++ b/src/librustc_mir/build/matches/test.rs
@@ -302,7 +302,7 @@
                     }
                     let eq_def_id = self.hir.tcx().lang_items().eq_trait().unwrap();
                     let (mty, method) = self.hir.trait_method(eq_def_id, "eq", ty, &[ty.into()]);
-                    let method = self.hir.tcx().intern_lazy_const(ty::LazyConst::Evaluated(method));
+                    let method = self.hir.tcx().mk_lazy_const(ty::LazyConst::Evaluated(method));
 
                     let re_erased = self.hir.tcx().types.re_erased;
                     // take the argument by reference
diff --git a/src/librustc_mir/build/misc.rs b/src/librustc_mir/build/misc.rs
index 1634c36..096020b 100644
--- a/src/librustc_mir/build/misc.rs
+++ b/src/librustc_mir/build/misc.rs
@@ -33,7 +33,7 @@
             span,
             ty,
             user_ty: None,
-            literal: self.hir.tcx().intern_lazy_const(ty::LazyConst::Evaluated(literal)),
+            literal: self.hir.tcx().mk_lazy_const(ty::LazyConst::Evaluated(literal)),
         };
         Operand::Constant(constant)
     }
diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs
index a52b032..ed35fb2 100644
--- a/src/librustc_mir/build/mod.rs
+++ b/src/librustc_mir/build/mod.rs
@@ -64,8 +64,8 @@
         ) => {
             (*body_id, ty.span)
         }
-        Node::AnonConst(hir::AnonConst { body, id, .. }) => {
-            (*body, tcx.hir().span(*id))
+        Node::AnonConst(hir::AnonConst { body, hir_id, .. }) => {
+            (*body, tcx.hir().span_by_hir_id(*hir_id))
         }
 
         _ => span_bug!(tcx.hir().span(id), "can't build MIR for {:?}", def_id),
@@ -114,7 +114,7 @@
                         let self_arg;
                         if let Some(ref fn_decl) = tcx.hir().fn_decl(owner_id) {
                             let ty_hir_id = fn_decl.inputs[index].hir_id;
-                            let ty_span = tcx.hir().span(tcx.hir().hir_to_node_id(ty_hir_id));
+                            let ty_span = tcx.hir().span_by_hir_id(ty_hir_id);
                             opt_ty_info = Some(ty_span);
                             self_arg = if index == 0 && fn_decl.implicit_self.has_implicit_self() {
                                 match fn_decl.implicit_self {
diff --git a/src/librustc_mir/hair/cx/block.rs b/src/librustc_mir/hair/cx/block.rs
index c24cf956..ed9f95f 100644
--- a/src/librustc_mir/hair/cx/block.rs
+++ b/src/librustc_mir/hair/cx/block.rs
@@ -48,7 +48,7 @@
     for (index, stmt) in stmts.iter().enumerate() {
         let hir_id = stmt.hir_id;
         let opt_dxn_ext = cx.region_scope_tree.opt_destruction_scope(hir_id.local_id);
-        let stmt_span = StatementSpan(cx.tcx.hir().span(stmt.id));
+        let stmt_span = StatementSpan(cx.tcx.hir().span_by_hir_id(hir_id));
         match stmt.node {
             hir::StmtKind::Expr(ref expr) |
             hir::StmtKind::Semi(ref expr) => {
diff --git a/src/librustc_mir/hair/cx/expr.rs b/src/librustc_mir/hair/cx/expr.rs
index 0759b95a..7e1365f 100644
--- a/src/librustc_mir/hair/cx/expr.rs
+++ b/src/librustc_mir/hair/cx/expr.rs
@@ -342,7 +342,7 @@
         }
 
         hir::ExprKind::Lit(ref lit) => ExprKind::Literal {
-            literal: cx.tcx.intern_lazy_const(ty::LazyConst::Evaluated(
+            literal: cx.tcx.mk_lazy_const(ty::LazyConst::Evaluated(
                 cx.const_eval_literal(&lit.node, expr_ty, lit.span, false)
             )),
             user_ty: None,
@@ -442,7 +442,7 @@
             } else {
                 if let hir::ExprKind::Lit(ref lit) = arg.node {
                     ExprKind::Literal {
-                        literal: cx.tcx.intern_lazy_const(ty::LazyConst::Evaluated(
+                        literal: cx.tcx.mk_lazy_const(ty::LazyConst::Evaluated(
                             cx.const_eval_literal(&lit.node, expr_ty, lit.span, true)
                         )),
                         user_ty: None,
@@ -702,7 +702,7 @@
                         ty: var_ty,
                         span: expr.span,
                         kind: ExprKind::Literal {
-                            literal: cx.tcx.intern_lazy_const(literal),
+                            literal: cx.tcx.mk_lazy_const(literal),
                             user_ty: None
                         },
                     }.to_ref();
@@ -856,7 +856,7 @@
         ty,
         span,
         kind: ExprKind::Literal {
-            literal: cx.tcx().intern_lazy_const(ty::LazyConst::Evaluated(
+            literal: cx.tcx().mk_lazy_const(ty::LazyConst::Evaluated(
                 ty::Const::zero_sized(ty)
             )),
             user_ty,
@@ -918,7 +918,7 @@
             let user_ty = user_substs_applied_to_def(cx, expr.hir_id, &def);
             debug!("convert_path_expr: user_ty={:?}", user_ty);
             ExprKind::Literal {
-                literal: cx.tcx.intern_lazy_const(ty::LazyConst::Evaluated(ty::Const::zero_sized(
+                literal: cx.tcx.mk_lazy_const(ty::LazyConst::Evaluated(ty::Const::zero_sized(
                     cx.tables().node_id_to_type(expr.hir_id),
                 ))),
                 user_ty,
@@ -930,7 +930,7 @@
             let user_ty = user_substs_applied_to_def(cx, expr.hir_id, &def);
             debug!("convert_path_expr: (const) user_ty={:?}", user_ty);
             ExprKind::Literal {
-                literal: cx.tcx.intern_lazy_const(ty::LazyConst::Unevaluated(def_id, substs)),
+                literal: cx.tcx.mk_lazy_const(ty::LazyConst::Unevaluated(def_id, substs)),
                 user_ty,
             }
         },
diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs
index 6d61801..47be822 100644
--- a/src/librustc_mir/hair/cx/mod.rs
+++ b/src/librustc_mir/hair/cx/mod.rs
@@ -110,7 +110,7 @@
     }
 
     pub fn usize_literal(&mut self, value: u64) -> &'tcx ty::LazyConst<'tcx> {
-        self.tcx.intern_lazy_const(ty::LazyConst::Evaluated(ty::Const::from_usize(self.tcx, value)))
+        self.tcx.mk_lazy_const(ty::LazyConst::Evaluated(ty::Const::from_usize(self.tcx, value)))
     }
 
     pub fn bool_ty(&mut self) -> Ty<'tcx> {
@@ -122,11 +122,11 @@
     }
 
     pub fn true_literal(&mut self) -> &'tcx ty::LazyConst<'tcx> {
-        self.tcx.intern_lazy_const(ty::LazyConst::Evaluated(ty::Const::from_bool(self.tcx, true)))
+        self.tcx.mk_lazy_const(ty::LazyConst::Evaluated(ty::Const::from_bool(self.tcx, true)))
     }
 
     pub fn false_literal(&mut self) -> &'tcx ty::LazyConst<'tcx> {
-        self.tcx.intern_lazy_const(ty::LazyConst::Evaluated(ty::Const::from_bool(self.tcx, false)))
+        self.tcx.mk_lazy_const(ty::LazyConst::Evaluated(ty::Const::from_bool(self.tcx, false)))
     }
 
     pub fn const_eval_literal(
diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs
index 9ca7f9d..3d6fcae 100644
--- a/src/librustc_mir/interpret/place.rs
+++ b/src/librustc_mir/interpret/place.rs
@@ -823,6 +823,8 @@
         let src = match self.try_read_immediate(src)? {
             Ok(src_val) => {
                 // Yay, we got a value that we can write directly.
+                // FIXME: Add a check to make sure that if `src` is indirect,
+                // it does not overlap with `dest`.
                 return self.write_immediate_no_validate(src_val, dest);
             }
             Err(mplace) => mplace,
@@ -836,7 +838,8 @@
         self.memory.copy(
             src_ptr, src_align,
             dest_ptr, dest_align,
-            dest.layout.size, false
+            dest.layout.size,
+            /*nonoverlapping*/ true,
         )?;
 
         Ok(())
diff --git a/src/librustc_mir/interpret/visitor.rs b/src/librustc_mir/interpret/visitor.rs
index 930bcb4..4ff5cde 100644
--- a/src/librustc_mir/interpret/visitor.rs
+++ b/src/librustc_mir/interpret/visitor.rs
@@ -125,14 +125,14 @@
 }
 
 macro_rules! make_value_visitor {
-    ($visitor_trait_name:ident, $($mutability:ident)*) => {
+    ($visitor_trait_name:ident, $($mutability:ident)?) => {
         // How to traverse a value and what to do when we are at the leaves.
         pub trait $visitor_trait_name<'a, 'mir, 'tcx: 'mir+'a, M: Machine<'a, 'mir, 'tcx>>: Sized {
             type V: Value<'a, 'mir, 'tcx, M>;
 
             /// The visitor must have an `EvalContext` in it.
-            fn ecx(&$($mutability)* self)
-                -> &$($mutability)* EvalContext<'a, 'mir, 'tcx, M>;
+            fn ecx(&$($mutability)? self)
+                -> &$($mutability)? EvalContext<'a, 'mir, 'tcx, M>;
 
             // Recursive actions, ready to be overloaded.
             /// Visit the given value, dispatching as appropriate to more specialized visitors.
diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs
index 7f3c24d..a76aa74 100644
--- a/src/librustc_mir/monomorphize/collector.rs
+++ b/src/librustc_mir/monomorphize/collector.rs
@@ -450,8 +450,8 @@
     if recursion_depth > *tcx.sess.recursion_limit.get() {
         let error = format!("reached the recursion limit while instantiating `{}`",
                             instance);
-        if let Some(node_id) = tcx.hir().as_local_node_id(def_id) {
-            tcx.sess.span_fatal(tcx.hir().span(node_id), &error);
+        if let Some(hir_id) = tcx.hir().as_local_hir_id(def_id) {
+            tcx.sess.span_fatal(tcx.hir().span_by_hir_id(hir_id), &error);
         } else {
             tcx.sess.fatal(&error);
         }
@@ -482,8 +482,8 @@
         let instance_name = instance.to_string();
         let msg = format!("reached the type-length limit while instantiating `{:.64}...`",
                           instance_name);
-        let mut diag = if let Some(node_id) = tcx.hir().as_local_node_id(instance.def_id()) {
-            tcx.sess.struct_span_fatal(tcx.hir().span(node_id), &msg)
+        let mut diag = if let Some(hir_id) = tcx.hir().as_local_hir_id(instance.def_id()) {
+            tcx.sess.struct_span_fatal(tcx.hir().span_by_hir_id(hir_id), &msg)
         } else {
             tcx.sess.struct_fatal(&msg)
         };
diff --git a/src/librustc_mir/shim.rs b/src/librustc_mir/shim.rs
index 942e7a1..09b8165 100644
--- a/src/librustc_mir/shim.rs
+++ b/src/librustc_mir/shim.rs
@@ -16,8 +16,10 @@
 use std::fmt;
 use std::iter;
 
-use crate::transform::{add_moves_for_packed_drops, add_call_guards};
-use crate::transform::{remove_noop_landing_pads, no_landing_pads, simplify};
+use crate::transform::{
+    add_moves_for_packed_drops, add_call_guards,
+    remove_noop_landing_pads, no_landing_pads, simplify, run_passes
+};
 use crate::util::elaborate_drops::{self, DropElaborator, DropStyle, DropFlagMode};
 use crate::util::patch::MirPatch;
 
@@ -113,12 +115,15 @@
         }
     };
     debug!("make_shim({:?}) = untransformed {:?}", instance, result);
-    add_moves_for_packed_drops::add_moves_for_packed_drops(
-        tcx, &mut result, instance.def_id());
-    no_landing_pads::no_landing_pads(tcx, &mut result);
-    remove_noop_landing_pads::remove_noop_landing_pads(tcx, &mut result);
-    simplify::simplify_cfg(&mut result);
-    add_call_guards::CriticalCallEdges.add_call_guards(&mut result);
+
+    run_passes(tcx, &mut result, instance, MirPhase::Const, &[
+        &add_moves_for_packed_drops::AddMovesForPackedDrops,
+        &no_landing_pads::NoLandingPads,
+        &remove_noop_landing_pads::RemoveNoopLandingPads,
+        &simplify::SimplifyCfg::new("make_shim"),
+        &add_call_guards::CriticalCallEdges,
+    ]);
+
     debug!("make_shim({:?}) = {:?}", instance, result);
 
     tcx.alloc_mir(result)
@@ -459,7 +464,7 @@
             span: self.span,
             ty: func_ty,
             user_ty: None,
-            literal: tcx.intern_lazy_const(ty::LazyConst::Evaluated(
+            literal: tcx.mk_lazy_const(ty::LazyConst::Evaluated(
                 ty::Const::zero_sized(func_ty),
             )),
         });
@@ -521,7 +526,7 @@
             span: self.span,
             ty: self.tcx.types.usize,
             user_ty: None,
-            literal: self.tcx.intern_lazy_const(ty::LazyConst::Evaluated(
+            literal: self.tcx.mk_lazy_const(ty::LazyConst::Evaluated(
                 ty::Const::from_usize(self.tcx, value),
             )),
         }
@@ -759,7 +764,7 @@
                 span,
                 ty,
                 user_ty: None,
-                literal: tcx.intern_lazy_const(ty::LazyConst::Evaluated(
+                literal: tcx.mk_lazy_const(ty::LazyConst::Evaluated(
                     ty::Const::zero_sized(ty)
                 )),
              }),
diff --git a/src/librustc_mir/transform/add_call_guards.rs b/src/librustc_mir/transform/add_call_guards.rs
index dab96fa..88042d6 100644
--- a/src/librustc_mir/transform/add_call_guards.rs
+++ b/src/librustc_mir/transform/add_call_guards.rs
@@ -33,7 +33,7 @@
 impl MirPass for AddCallGuards {
     fn run_pass<'a, 'tcx>(&self,
                           _tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _src: MirSource,
+                          _src: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         self.add_call_guards(mir);
     }
diff --git a/src/librustc_mir/transform/add_moves_for_packed_drops.rs b/src/librustc_mir/transform/add_moves_for_packed_drops.rs
index 1492f0c..4d4c89b 100644
--- a/src/librustc_mir/transform/add_moves_for_packed_drops.rs
+++ b/src/librustc_mir/transform/add_moves_for_packed_drops.rs
@@ -42,11 +42,11 @@
 impl MirPass for AddMovesForPackedDrops {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          src: MirSource,
+                          src: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>)
     {
         debug!("add_moves_for_packed_drops({:?} @ {:?})", src, mir.span);
-        add_moves_for_packed_drops(tcx, mir, src.def_id);
+        add_moves_for_packed_drops(tcx, mir, src.def_id());
     }
 }
 
diff --git a/src/librustc_mir/transform/add_retag.rs b/src/librustc_mir/transform/add_retag.rs
index 7bfcd31..e66c11a 100644
--- a/src/librustc_mir/transform/add_retag.rs
+++ b/src/librustc_mir/transform/add_retag.rs
@@ -77,7 +77,7 @@
 impl MirPass for AddRetag {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _src: MirSource,
+                          _src: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>)
     {
         if !tcx.sess.opts.debugging_opts.mir_emit_retag {
diff --git a/src/librustc_mir/transform/cleanup_post_borrowck.rs b/src/librustc_mir/transform/cleanup_post_borrowck.rs
index 240ef7c..890d2c56 100644
--- a/src/librustc_mir/transform/cleanup_post_borrowck.rs
+++ b/src/librustc_mir/transform/cleanup_post_borrowck.rs
@@ -35,7 +35,7 @@
 impl MirPass for CleanAscribeUserType {
     fn run_pass<'a, 'tcx>(&self,
                           _tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _source: MirSource,
+                          _source: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         let mut delete = DeleteAscribeUserType;
         delete.visit_mir(mir);
@@ -69,7 +69,7 @@
 impl MirPass for CleanFakeReadsAndBorrows {
     fn run_pass<'a, 'tcx>(&self,
                           _tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _source: MirSource,
+                          _source: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         let mut delete_reads = DeleteAndRecordFakeReads::default();
         delete_reads.visit_mir(mir);
diff --git a/src/librustc_mir/transform/const_prop.rs b/src/librustc_mir/transform/const_prop.rs
index dd1f37a..018f71c 100644
--- a/src/librustc_mir/transform/const_prop.rs
+++ b/src/librustc_mir/transform/const_prop.rs
@@ -30,7 +30,7 @@
 impl MirPass for ConstProp {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          source: MirSource,
+                          source: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         // will be evaluated by miri and produce its errors there
         if source.promoted.is_some() {
@@ -38,11 +38,11 @@
         }
 
         use rustc::hir::map::blocks::FnLikeNode;
-        let node_id = tcx.hir().as_local_node_id(source.def_id)
+        let node_id = tcx.hir().as_local_node_id(source.def_id())
                              .expect("Non-local call to local provider is_const_fn");
 
         let is_fn_like = FnLikeNode::from_node(tcx.hir().get(node_id)).is_some();
-        let is_assoc_const = match tcx.describe_def(source.def_id) {
+        let is_assoc_const = match tcx.describe_def(source.def_id()) {
             Some(Def::AssociatedConst(_)) => true,
             _ => false,
         };
@@ -50,11 +50,11 @@
         // Only run const prop on functions, methods, closures and associated constants
         if !is_fn_like && !is_assoc_const  {
             // skip anon_const/statics/consts because they'll be evaluated by miri anyway
-            trace!("ConstProp skipped for {:?}", source.def_id);
+            trace!("ConstProp skipped for {:?}", source.def_id());
             return
         }
 
-        trace!("ConstProp starting for {:?}", source.def_id);
+        trace!("ConstProp starting for {:?}", source.def_id());
 
         // FIXME(oli-obk, eddyb) Optimize locals (or even local paths) to hold
         // constants, instead of just checking for const-folding succeeding.
@@ -63,7 +63,7 @@
         let mut optimization_finder = ConstPropagator::new(mir, tcx, source);
         optimization_finder.visit_mir(mir);
 
-        trace!("ConstProp done for {:?}", source.def_id);
+        trace!("ConstProp done for {:?}", source.def_id());
     }
 }
 
@@ -74,7 +74,7 @@
     ecx: EvalContext<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
     mir: &'mir Mir<'tcx>,
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    source: MirSource,
+    source: MirSource<'tcx>,
     places: IndexVec<Local, Option<Const<'tcx>>>,
     can_const_prop: IndexVec<Local, bool>,
     param_env: ParamEnv<'tcx>,
@@ -107,10 +107,10 @@
     fn new(
         mir: &'mir Mir<'tcx>,
         tcx: TyCtxt<'a, 'tcx, 'tcx>,
-        source: MirSource,
+        source: MirSource<'tcx>,
     ) -> ConstPropagator<'a, 'mir, 'tcx> {
-        let param_env = tcx.param_env(source.def_id);
-        let ecx = mk_eval_cx(tcx, tcx.def_span(source.def_id), param_env);
+        let param_env = tcx.param_env(source.def_id());
+        let ecx = mk_eval_cx(tcx, tcx.def_span(source.def_id()), param_env);
         ConstPropagator {
             ecx,
             mir,
@@ -284,13 +284,13 @@
                 _ => None,
             },
             Place::Promoted(ref promoted) => {
-                let generics = self.tcx.generics_of(self.source.def_id);
+                let generics = self.tcx.generics_of(self.source.def_id());
                 if generics.requires_monomorphization(self.tcx) {
                     // FIXME: can't handle code with generics
                     return None;
                 }
-                let substs = Substs::identity_for_item(self.tcx, self.source.def_id);
-                let instance = Instance::new(self.source.def_id, substs);
+                let substs = Substs::identity_for_item(self.tcx, self.source.def_id());
+                let instance = Instance::new(self.source.def_id(), substs);
                 let cid = GlobalId {
                     instance,
                     promoted: Some(promoted.0),
@@ -358,10 +358,10 @@
                 )))
             }
             Rvalue::UnaryOp(op, ref arg) => {
-                let def_id = if self.tcx.is_closure(self.source.def_id) {
-                    self.tcx.closure_base_def_id(self.source.def_id)
+                let def_id = if self.tcx.is_closure(self.source.def_id()) {
+                    self.tcx.closure_base_def_id(self.source.def_id())
                 } else {
-                    self.source.def_id
+                    self.source.def_id()
                 };
                 let generics = self.tcx.generics_of(def_id);
                 if generics.requires_monomorphization(self.tcx) {
@@ -398,10 +398,10 @@
             Rvalue::BinaryOp(op, ref left, ref right) => {
                 trace!("rvalue binop {:?} for {:?} and {:?}", op, left, right);
                 let right = self.eval_operand(right, source_info)?;
-                let def_id = if self.tcx.is_closure(self.source.def_id) {
-                    self.tcx.closure_base_def_id(self.source.def_id)
+                let def_id = if self.tcx.is_closure(self.source.def_id()) {
+                    self.tcx.closure_base_def_id(self.source.def_id())
                 } else {
-                    self.source.def_id
+                    self.source.def_id()
                 };
                 let generics = self.tcx.generics_of(def_id);
                 if generics.requires_monomorphization(self.tcx) {
@@ -608,7 +608,7 @@
                     let node_id = self
                         .tcx
                         .hir()
-                        .as_local_node_id(self.source.def_id)
+                        .as_local_node_id(self.source.def_id())
                         .expect("some part of a failing const eval must be local");
                     use rustc::mir::interpret::EvalErrorKind::*;
                     let msg = match msg {
diff --git a/src/librustc_mir/transform/copy_prop.rs b/src/librustc_mir/transform/copy_prop.rs
index 4789c35..7d907ca 100644
--- a/src/librustc_mir/transform/copy_prop.rs
+++ b/src/librustc_mir/transform/copy_prop.rs
@@ -30,7 +30,7 @@
 impl MirPass for CopyPropagation {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _source: MirSource,
+                          _source: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         // We only run when the MIR optimization level is > 1.
         // This avoids a slow pass, and messing up debug info.
diff --git a/src/librustc_mir/transform/deaggregator.rs b/src/librustc_mir/transform/deaggregator.rs
index 669384e..9061dff 100644
--- a/src/librustc_mir/transform/deaggregator.rs
+++ b/src/librustc_mir/transform/deaggregator.rs
@@ -8,7 +8,7 @@
 impl MirPass for Deaggregator {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _source: MirSource,
+                          _source: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         let (basic_blocks, local_decls) = mir.basic_blocks_and_local_decls_mut();
         let local_decls = &*local_decls;
diff --git a/src/librustc_mir/transform/dump_mir.rs b/src/librustc_mir/transform/dump_mir.rs
index d7f697a..81e48fe 100644
--- a/src/librustc_mir/transform/dump_mir.rs
+++ b/src/librustc_mir/transform/dump_mir.rs
@@ -20,7 +20,7 @@
 
     fn run_pass<'a, 'tcx>(&self,
                           _tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _source: MirSource,
+                          _source: MirSource<'tcx>,
                           _mir: &mut Mir<'tcx>)
     {
     }
@@ -41,7 +41,7 @@
 pub fn on_mir_pass<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                              pass_num: &dyn fmt::Display,
                              pass_name: &str,
-                             source: MirSource,
+                             source: MirSource<'tcx>,
                              mir: &Mir<'tcx>,
                              is_after: bool) {
     if mir_util::dump_enabled(tcx, pass_name, source) {
diff --git a/src/librustc_mir/transform/elaborate_drops.rs b/src/librustc_mir/transform/elaborate_drops.rs
index 4aaa0be..c56acba 100644
--- a/src/librustc_mir/transform/elaborate_drops.rs
+++ b/src/librustc_mir/transform/elaborate_drops.rs
@@ -23,13 +23,13 @@
 impl MirPass for ElaborateDrops {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          src: MirSource,
+                          src: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>)
     {
         debug!("elaborate_drops({:?} @ {:?})", src, mir.span);
 
-        let id = tcx.hir().as_local_node_id(src.def_id).unwrap();
-        let param_env = tcx.param_env(src.def_id).with_reveal_all();
+        let id = tcx.hir().as_local_node_id(src.def_id()).unwrap();
+        let param_env = tcx.param_env(src.def_id()).with_reveal_all();
         let move_data = match MoveData::gather_moves(mir, tcx) {
             Ok(move_data) => move_data,
             Err((move_data, _move_errors)) => {
@@ -533,7 +533,7 @@
             span,
             ty: self.tcx.types.bool,
             user_ty: None,
-            literal: self.tcx.intern_lazy_const(ty::LazyConst::Evaluated(
+            literal: self.tcx.mk_lazy_const(ty::LazyConst::Evaluated(
                 ty::Const::from_bool(self.tcx, val),
             )),
         })))
diff --git a/src/librustc_mir/transform/erase_regions.rs b/src/librustc_mir/transform/erase_regions.rs
index b555a2a..d59bb3e 100644
--- a/src/librustc_mir/transform/erase_regions.rs
+++ b/src/librustc_mir/transform/erase_regions.rs
@@ -53,7 +53,7 @@
 impl MirPass for EraseRegions {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _: MirSource,
+                          _: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         EraseRegionsVisitor::new(tcx).visit_mir(mir);
     }
diff --git a/src/librustc_mir/transform/generator.rs b/src/librustc_mir/transform/generator.rs
index 9897f98..0866b87 100644
--- a/src/librustc_mir/transform/generator.rs
+++ b/src/librustc_mir/transform/generator.rs
@@ -198,7 +198,7 @@
             span: source_info.span,
             ty: self.tcx.types.u32,
             user_ty: None,
-            literal: self.tcx.intern_lazy_const(ty::LazyConst::Evaluated(ty::Const::from_bits(
+            literal: self.tcx.mk_lazy_const(ty::LazyConst::Evaluated(ty::Const::from_bits(
                 self.tcx,
                 state_disc.into(),
                 ty::ParamEnv::empty().and(self.tcx.types.u32)
@@ -376,14 +376,14 @@
 fn locals_live_across_suspend_points(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     mir: &Mir<'tcx>,
-    source: MirSource,
+    source: MirSource<'tcx>,
     movable: bool,
 ) -> (
     liveness::LiveVarSet<Local>,
     FxHashMap<BasicBlock, liveness::LiveVarSet<Local>>,
 ) {
     let dead_unwinds = BitSet::new_empty(mir.basic_blocks().len());
-    let node_id = tcx.hir().as_local_node_id(source.def_id).unwrap();
+    let node_id = tcx.hir().as_local_node_id(source.def_id()).unwrap();
 
     // Calculate when MIR locals have live storage. This gives us an upper bound of their
     // lifetimes.
@@ -484,7 +484,7 @@
 }
 
 fn compute_layout<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                            source: MirSource,
+                            source: MirSource<'tcx>,
                             upvars: Vec<Ty<'tcx>>,
                             interior: Ty<'tcx>,
                             movable: bool,
@@ -635,7 +635,7 @@
                 tcx: TyCtxt<'a, 'tcx, 'tcx>,
                 transform: &TransformVisitor<'a, 'tcx>,
                 def_id: DefId,
-                source: MirSource,
+                source: MirSource<'tcx>,
                 gen_ty: Ty<'tcx>,
                 mir: &Mir<'tcx>,
                 drop_clean: BasicBlock) -> Mir<'tcx> {
@@ -731,7 +731,7 @@
             span: mir.span,
             ty: tcx.types.bool,
             user_ty: None,
-            literal: tcx.intern_lazy_const(ty::LazyConst::Evaluated(
+            literal: tcx.mk_lazy_const(ty::LazyConst::Evaluated(
                 ty::Const::from_bool(tcx, false),
             )),
         }),
@@ -758,7 +758,7 @@
         tcx: TyCtxt<'a, 'tcx, 'tcx>,
         transform: TransformVisitor<'a, 'tcx>,
         def_id: DefId,
-        source: MirSource,
+        source: MirSource<'tcx>,
         mir: &mut Mir<'tcx>) {
     // Poison the generator when it unwinds
     for block in mir.basic_blocks_mut() {
@@ -869,7 +869,7 @@
 impl MirPass for StateTransform {
     fn run_pass<'a, 'tcx>(&self,
                     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                    source: MirSource,
+                    source: MirSource<'tcx>,
                     mir: &mut Mir<'tcx>) {
         let yield_ty = if let Some(yield_ty) = mir.yield_ty {
             yield_ty
@@ -880,7 +880,7 @@
 
         assert!(mir.generator_drop.is_none());
 
-        let def_id = source.def_id;
+        let def_id = source.def_id();
 
         // The first argument is the generator type passed by value
         let gen_ty = mir.local_decls.raw[1].ty;
diff --git a/src/librustc_mir/transform/inline.rs b/src/librustc_mir/transform/inline.rs
index 4fddf6f..4cb7826 100644
--- a/src/librustc_mir/transform/inline.rs
+++ b/src/librustc_mir/transform/inline.rs
@@ -40,7 +40,7 @@
 impl MirPass for Inline {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          source: MirSource,
+                          source: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 {
             Inliner { tcx, source }.run_pass(mir);
@@ -50,7 +50,7 @@
 
 struct Inliner<'a, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    source: MirSource,
+    source: MirSource<'tcx>,
 }
 
 impl<'a, 'tcx> Inliner<'a, 'tcx> {
@@ -69,10 +69,10 @@
 
         let mut callsites = VecDeque::new();
 
-        let param_env = self.tcx.param_env(self.source.def_id);
+        let param_env = self.tcx.param_env(self.source.def_id());
 
         // Only do inlining into fn bodies.
-        let id = self.tcx.hir().as_local_node_id(self.source.def_id).unwrap();
+        let id = self.tcx.hir().as_local_node_id(self.source.def_id()).unwrap();
         if self.tcx.hir().body_owner_kind(id).is_fn_or_closure() && self.source.promoted.is_none() {
             for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated() {
                 if let Some(callsite) = self.get_valid_function_call(bb,
@@ -274,7 +274,7 @@
 
         // FIXME: Give a bonus to functions with only a single caller
 
-        let param_env = tcx.param_env(self.source.def_id);
+        let param_env = tcx.param_env(self.source.def_id());
 
         let mut first_block = true;
         let mut cost = 0;
diff --git a/src/librustc_mir/transform/instcombine.rs b/src/librustc_mir/transform/instcombine.rs
index 21772e1..2909157 100644
--- a/src/librustc_mir/transform/instcombine.rs
+++ b/src/librustc_mir/transform/instcombine.rs
@@ -13,7 +13,7 @@
 impl MirPass for InstCombine {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _: MirSource,
+                          _: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         // We only run when optimizing MIR (at any level).
         if tcx.sess.opts.debugging_opts.mir_opt_level == 0 {
diff --git a/src/librustc_mir/transform/lower_128bit.rs b/src/librustc_mir/transform/lower_128bit.rs
index aa248ba..3d1f55e 100644
--- a/src/librustc_mir/transform/lower_128bit.rs
+++ b/src/librustc_mir/transform/lower_128bit.rs
@@ -12,7 +12,7 @@
 impl MirPass for Lower128Bit {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _src: MirSource,
+                          _src: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         let debugging_override = tcx.sess.opts.debugging_opts.lower_128bit_ops;
         let target_default = tcx.sess.host.options.i128_lowering;
diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs
index cc37a83..44061e6 100644
--- a/src/librustc_mir/transform/mod.rs
+++ b/src/librustc_mir/transform/mod.rs
@@ -2,7 +2,7 @@
 use crate::build;
 use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
 use rustc::mir::{Mir, MirPhase, Promoted};
-use rustc::ty::TyCtxt;
+use rustc::ty::{TyCtxt, InstanceDef};
 use rustc::ty::query::Providers;
 use rustc::ty::steal::Steal;
 use rustc::hir;
@@ -104,20 +104,25 @@
 
 /// Where a specific Mir comes from.
 #[derive(Debug, Copy, Clone)]
-pub struct MirSource {
-    pub def_id: DefId,
+pub struct MirSource<'tcx> {
+    pub instance: InstanceDef<'tcx>,
 
     /// If `Some`, this is a promoted rvalue within the parent function.
     pub promoted: Option<Promoted>,
 }
 
-impl MirSource {
+impl<'tcx> MirSource<'tcx> {
     pub fn item(def_id: DefId) -> Self {
         MirSource {
-            def_id,
+            instance: InstanceDef::Item(def_id),
             promoted: None
         }
     }
+
+    #[inline]
+    pub fn def_id(&self) -> DefId {
+        self.instance.def_id()
+    }
 }
 
 /// Generates a default name for the pass based on the name of the
@@ -141,14 +146,14 @@
 
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          source: MirSource,
+                          source: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>);
 }
 
 pub fn run_passes(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     mir: &mut Mir<'tcx>,
-    def_id: DefId,
+    instance: InstanceDef<'tcx>,
     mir_phase: MirPhase,
     passes: &[&dyn MirPass],
 ) {
@@ -160,7 +165,7 @@
         }
 
         let source = MirSource {
-            def_id,
+            instance,
             promoted,
         };
         let mut index = 0;
@@ -198,7 +203,7 @@
     let _ = tcx.unsafety_check_result(def_id);
 
     let mut mir = tcx.mir_built(def_id).steal();
-    run_passes(tcx, &mut mir, def_id, MirPhase::Const, &[
+    run_passes(tcx, &mut mir, InstanceDef::Item(def_id), MirPhase::Const, &[
         // What we need to do constant evaluation.
         &simplify::SimplifyCfg::new("initial"),
         &type_check::TypeckMir,
@@ -217,7 +222,7 @@
     }
 
     let mut mir = tcx.mir_const(def_id).steal();
-    run_passes(tcx, &mut mir, def_id, MirPhase::Validated, &[
+    run_passes(tcx, &mut mir, InstanceDef::Item(def_id), MirPhase::Validated, &[
         // What we need to run borrowck etc.
         &qualify_consts::QualifyAndPromoteConstants,
         &simplify::SimplifyCfg::new("qualify-consts"),
@@ -235,7 +240,7 @@
     }
 
     let mut mir = tcx.mir_validated(def_id).steal();
-    run_passes(tcx, &mut mir, def_id, MirPhase::Optimized, &[
+    run_passes(tcx, &mut mir, InstanceDef::Item(def_id), MirPhase::Optimized, &[
         // Remove all things not needed by analysis
         &no_landing_pads::NoLandingPads,
         &simplify_branches::SimplifyBranches::new("initial"),
diff --git a/src/librustc_mir/transform/no_landing_pads.rs b/src/librustc_mir/transform/no_landing_pads.rs
index 15b59d3..089d9b9 100644
--- a/src/librustc_mir/transform/no_landing_pads.rs
+++ b/src/librustc_mir/transform/no_landing_pads.rs
@@ -11,7 +11,7 @@
 impl MirPass for NoLandingPads {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _: MirSource,
+                          _: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         no_landing_pads(tcx, mir)
     }
diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs
index ab4e3ad..76b8b83 100644
--- a/src/librustc_mir/transform/qualify_consts.rs
+++ b/src/librustc_mir/transform/qualify_consts.rs
@@ -1159,7 +1159,7 @@
 impl MirPass for QualifyAndPromoteConstants {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          src: MirSource,
+                          src: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         // There's not really any point in promoting errorful MIR.
         if mir.return_ty().references_error() {
@@ -1171,7 +1171,7 @@
             return;
         }
 
-        let def_id = src.def_id;
+        let def_id = src.def_id();
         let id = tcx.hir().as_local_node_id(def_id).unwrap();
         let mut const_promoted_temps = None;
         let mode = match tcx.hir().body_owner_kind(id) {
diff --git a/src/librustc_mir/transform/remove_noop_landing_pads.rs b/src/librustc_mir/transform/remove_noop_landing_pads.rs
index 4fcb4c1..68832b7 100644
--- a/src/librustc_mir/transform/remove_noop_landing_pads.rs
+++ b/src/librustc_mir/transform/remove_noop_landing_pads.rs
@@ -24,7 +24,7 @@
 impl MirPass for RemoveNoopLandingPads {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _src: MirSource,
+                          _src: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         remove_noop_landing_pads(tcx, mir);
     }
diff --git a/src/librustc_mir/transform/rustc_peek.rs b/src/librustc_mir/transform/rustc_peek.rs
index 806c1c1..40e02e7 100644
--- a/src/librustc_mir/transform/rustc_peek.rs
+++ b/src/librustc_mir/transform/rustc_peek.rs
@@ -24,8 +24,8 @@
 
 impl MirPass for SanityCheck {
     fn run_pass<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          src: MirSource, mir: &mut Mir<'tcx>) {
-        let def_id = src.def_id;
+                          src: MirSource<'tcx>, mir: &mut Mir<'tcx>) {
+        let def_id = src.def_id();
         let id = tcx.hir().as_local_node_id(def_id).unwrap();
         if !tcx.has_attr(def_id, "rustc_mir") {
             debug!("skipping rustc_peek::SanityCheck on {}", tcx.item_path_str(def_id));
diff --git a/src/librustc_mir/transform/simplify.rs b/src/librustc_mir/transform/simplify.rs
index 90486d1..14e7895 100644
--- a/src/librustc_mir/transform/simplify.rs
+++ b/src/librustc_mir/transform/simplify.rs
@@ -59,7 +59,7 @@
 
     fn run_pass<'a, 'tcx>(&self,
                           _tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _src: MirSource,
+                          _src: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         debug!("SimplifyCfg({:?}) - simplifying {:?}", self.label, mir);
         simplify_cfg(mir);
@@ -298,7 +298,7 @@
 impl MirPass for SimplifyLocals {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _: MirSource,
+                          _: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         let mut marker = DeclMarker { locals: BitSet::new_empty(mir.local_decls.len()) };
         marker.visit_mir(mir);
diff --git a/src/librustc_mir/transform/simplify_branches.rs b/src/librustc_mir/transform/simplify_branches.rs
index 0dc89bf..3c4d122 100644
--- a/src/librustc_mir/transform/simplify_branches.rs
+++ b/src/librustc_mir/transform/simplify_branches.rs
@@ -21,7 +21,7 @@
 
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _src: MirSource,
+                          _src: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         for block in mir.basic_blocks_mut() {
             let terminator = block.terminator_mut();
diff --git a/src/librustc_mir/transform/uniform_array_move_out.rs b/src/librustc_mir/transform/uniform_array_move_out.rs
index 0991843..fd8d68a 100644
--- a/src/librustc_mir/transform/uniform_array_move_out.rs
+++ b/src/librustc_mir/transform/uniform_array_move_out.rs
@@ -39,7 +39,7 @@
 impl MirPass for UniformArrayMoveOut {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _src: MirSource,
+                          _src: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         let mut patch = MirPatch::new(mir);
         {
@@ -161,7 +161,7 @@
 impl MirPass for RestoreSubsliceArrayMoveOut {
     fn run_pass<'a, 'tcx>(&self,
                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                          _src: MirSource,
+                          _src: MirSource<'tcx>,
                           mir: &mut Mir<'tcx>) {
         let mut patch = MirPatch::new(mir);
         {
diff --git a/src/librustc_mir/util/elaborate_drops.rs b/src/librustc_mir/util/elaborate_drops.rs
index 23e92b3..1bc956e 100644
--- a/src/librustc_mir/util/elaborate_drops.rs
+++ b/src/librustc_mir/util/elaborate_drops.rs
@@ -963,7 +963,7 @@
             span: self.source_info.span,
             ty: self.tcx().types.usize,
             user_ty: None,
-            literal: self.tcx().intern_lazy_const(ty::LazyConst::Evaluated(
+            literal: self.tcx().mk_lazy_const(ty::LazyConst::Evaluated(
                 ty::Const::from_usize(self.tcx(), val.into())
             )),
         })
diff --git a/src/librustc_mir/util/liveness.rs b/src/librustc_mir/util/liveness.rs
index 08ef58a..847699c 100644
--- a/src/librustc_mir/util/liveness.rs
+++ b/src/librustc_mir/util/liveness.rs
@@ -307,7 +307,7 @@
 pub fn dump_mir<'a, 'tcx, V: Idx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     pass_name: &str,
-    source: MirSource,
+    source: MirSource<'tcx>,
     mir: &Mir<'tcx>,
     map: &impl LiveVariableMap<LiveVar = V>,
     result: &LivenessResult<V>,
@@ -317,7 +317,7 @@
     }
     let node_path = item_path::with_forced_impl_filename_line(|| {
         // see notes on #41697 below
-        tcx.item_path_str(source.def_id)
+        tcx.item_path_str(source.def_id())
     });
     dump_matched_mir_node(tcx, pass_name, &node_path, source, mir, map, result);
 }
@@ -326,14 +326,14 @@
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     pass_name: &str,
     node_path: &str,
-    source: MirSource,
+    source: MirSource<'tcx>,
     mir: &Mir<'tcx>,
     map: &dyn LiveVariableMap<LiveVar = V>,
     result: &LivenessResult<V>,
 ) {
     let mut file_path = PathBuf::new();
     file_path.push(Path::new(&tcx.sess.opts.debugging_opts.dump_mir_dir));
-    let item_id = tcx.hir().as_local_node_id(source.def_id).unwrap();
+    let item_id = tcx.hir().as_local_node_id(source.def_id()).unwrap();
     let file_name = format!("rustc.node{}{}-liveness.mir", item_id, pass_name);
     file_path.push(&file_name);
     let _ = fs::File::create(&file_path).and_then(|mut file| {
@@ -348,7 +348,7 @@
 
 pub fn write_mir_fn<'a, 'tcx, V: Idx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    src: MirSource,
+    src: MirSource<'tcx>,
     mir: &Mir<'tcx>,
     map: &dyn LiveVariableMap<LiveVar = V>,
     w: &mut dyn Write,
diff --git a/src/librustc_mir/util/pretty.rs b/src/librustc_mir/util/pretty.rs
index 2e1fc75..1357f8f 100644
--- a/src/librustc_mir/util/pretty.rs
+++ b/src/librustc_mir/util/pretty.rs
@@ -1,4 +1,3 @@
-use rustc::hir;
 use rustc::hir::def_id::{DefId, LOCAL_CRATE};
 use rustc::mir::*;
 use rustc::mir::visit::Visitor;
@@ -69,7 +68,7 @@
     pass_num: Option<&dyn Display>,
     pass_name: &str,
     disambiguator: &dyn Display,
-    source: MirSource,
+    source: MirSource<'tcx>,
     mir: &Mir<'tcx>,
     extra_data: F,
 ) where
@@ -81,7 +80,7 @@
 
     let node_path = item_path::with_forced_impl_filename_line(|| {
         // see notes on #41697 below
-        tcx.item_path_str(source.def_id)
+        tcx.item_path_str(source.def_id())
     });
     dump_matched_mir_node(
         tcx,
@@ -98,7 +97,7 @@
 pub fn dump_enabled<'a, 'gcx, 'tcx>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
     pass_name: &str,
-    source: MirSource,
+    source: MirSource<'tcx>,
 ) -> bool {
     let filters = match tcx.sess.opts.debugging_opts.dump_mir {
         None => return false,
@@ -106,7 +105,7 @@
     };
     let node_path = item_path::with_forced_impl_filename_line(|| {
         // see notes on #41697 below
-        tcx.item_path_str(source.def_id)
+        tcx.item_path_str(source.def_id())
     });
     filters.split('|').any(|or_filter| {
         or_filter.split('&').all(|and_filter| {
@@ -125,7 +124,7 @@
     pass_name: &str,
     node_path: &str,
     disambiguator: &dyn Display,
-    source: MirSource,
+    source: MirSource<'tcx>,
     mir: &Mir<'tcx>,
     mut extra_data: F,
 ) where
@@ -151,7 +150,7 @@
         let _: io::Result<()> = try {
             let mut file =
                 create_dump_file(tcx, "dot", pass_num, pass_name, disambiguator, source)?;
-            write_mir_fn_graphviz(tcx, source.def_id, mir, &mut file)?;
+            write_mir_fn_graphviz(tcx, source.def_id(), mir, &mut file)?;
         };
     }
 }
@@ -165,7 +164,7 @@
     pass_num: Option<&dyn Display>,
     pass_name: &str,
     disambiguator: &dyn Display,
-    source: MirSource,
+    source: MirSource<'tcx>,
 ) -> PathBuf {
     let promotion_id = match source.promoted {
         Some(id) => format!("-{:?}", id),
@@ -184,13 +183,32 @@
     let mut file_path = PathBuf::new();
     file_path.push(Path::new(&tcx.sess.opts.debugging_opts.dump_mir_dir));
 
-    let item_name = tcx.hir()
-        .def_path(source.def_id)
+    let item_name = tcx
+        .def_path(source.def_id())
         .to_filename_friendly_no_crate();
+    // All drop shims have the same DefId, so we have to add the type
+    // to get unique file names.
+    let shim_disambiguator = match source.instance {
+        ty::InstanceDef::DropGlue(_, Some(ty)) => {
+            // Unfortunately, pretty-printed typed are not very filename-friendly.
+            // We dome some filtering.
+            let mut s = ".".to_owned();
+            s.extend(ty.to_string()
+                .chars()
+                .filter_map(|c| match c {
+                    ' ' => None,
+                    ':' => Some('_'),
+                    c => Some(c)
+                }));
+            s
+        }
+        _ => String::new(),
+    };
 
     let file_name = format!(
-        "rustc.{}{}{}.{}.{}.{}",
+        "rustc.{}{}{}{}.{}.{}.{}",
         item_name,
+        shim_disambiguator,
         promotion_id,
         pass_num,
         pass_name,
@@ -213,7 +231,7 @@
     pass_num: Option<&dyn Display>,
     pass_name: &str,
     disambiguator: &dyn Display,
-    source: MirSource,
+    source: MirSource<'tcx>,
 ) -> io::Result<fs::File> {
     let file_path = dump_path(tcx, extension, pass_num, pass_name, disambiguator, source);
     if let Some(parent) = file_path.parent() {
@@ -253,7 +271,7 @@
         for (i, mir) in mir.promoted.iter_enumerated() {
             writeln!(w, "")?;
             let src = MirSource {
-                def_id,
+                instance: ty::InstanceDef::Item(def_id),
                 promoted: Some(i),
             };
             write_mir_fn(tcx, src, mir, &mut |_, _| Ok(()), w)?;
@@ -264,7 +282,7 @@
 
 pub fn write_mir_fn<'a, 'gcx, 'tcx, F>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    src: MirSource,
+    src: MirSource<'tcx>,
     mir: &Mir<'tcx>,
     extra_data: &mut F,
     w: &mut dyn Write,
@@ -528,7 +546,7 @@
 /// local variables (both user-defined bindings and compiler temporaries).
 pub fn write_mir_intro<'a, 'gcx, 'tcx>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
-    src: MirSource,
+    src: MirSource<'tcx>,
     mir: &Mir<'_>,
     w: &mut dyn Write,
 ) -> io::Result<()> {
@@ -570,45 +588,49 @@
 
 fn write_mir_sig(
     tcx: TyCtxt<'_, '_, '_>,
-    src: MirSource,
+    src: MirSource<'tcx>,
     mir: &Mir<'_>,
     w: &mut dyn Write,
 ) -> io::Result<()> {
-    let id = tcx.hir().as_local_node_id(src.def_id).unwrap();
-    let body_owner_kind = tcx.hir().body_owner_kind(id);
-    match (body_owner_kind, src.promoted) {
-        (_, Some(i)) => write!(w, "{:?} in", i)?,
-        (hir::BodyOwnerKind::Closure, _) |
-        (hir::BodyOwnerKind::Fn, _) => write!(w, "fn")?,
-        (hir::BodyOwnerKind::Const, _) => write!(w, "const")?,
-        (hir::BodyOwnerKind::Static(hir::MutImmutable), _) => write!(w, "static")?,
-        (hir::BodyOwnerKind::Static(hir::MutMutable), _) => write!(w, "static mut")?,
+    use rustc::hir::def::Def;
+
+    trace!("write_mir_sig: {:?}", src.instance);
+    let descr = tcx.describe_def(src.def_id());
+    let is_function = match descr {
+        Some(Def::Fn(_)) | Some(Def::Method(_)) | Some(Def::StructCtor(..)) => true,
+        _ => tcx.is_closure(src.def_id()),
+    };
+    match (descr, src.promoted) {
+        (_, Some(i)) => write!(w, "{:?} in ", i)?,
+        (Some(Def::StructCtor(..)), _) => write!(w, "struct ")?,
+        (Some(Def::Const(_)), _) => write!(w, "const ")?,
+        (Some(Def::Static(_, /*is_mutbl*/false)), _) => write!(w, "static ")?,
+        (Some(Def::Static(_, /*is_mutbl*/true)), _) => write!(w, "static mut ")?,
+        (_, _) if is_function => write!(w, "fn ")?,
+        (None, _) => {}, // things like anon const, not an item
+        _ => bug!("Unexpected def description {:?}", descr),
     }
 
     item_path::with_forced_impl_filename_line(|| {
         // see notes on #41697 elsewhere
-        write!(w, " {}", tcx.item_path_str(src.def_id))
+        write!(w, "{}", tcx.item_path_str(src.def_id()))
     })?;
 
-    match (body_owner_kind, src.promoted) {
-        (hir::BodyOwnerKind::Closure, None) |
-        (hir::BodyOwnerKind::Fn, None) => {
-            write!(w, "(")?;
+    if src.promoted.is_none() && is_function {
+        write!(w, "(")?;
 
-            // fn argument types.
-            for (i, arg) in mir.args_iter().enumerate() {
-                if i != 0 {
-                    write!(w, ", ")?;
-                }
-                write!(w, "{:?}: {}", Place::Local(arg), mir.local_decls[arg].ty)?;
+        // fn argument types.
+        for (i, arg) in mir.args_iter().enumerate() {
+            if i != 0 {
+                write!(w, ", ")?;
             }
+            write!(w, "{:?}: {}", Place::Local(arg), mir.local_decls[arg].ty)?;
+        }
 
-            write!(w, ") -> {}", mir.return_ty())?;
-        }
-        (hir::BodyOwnerKind::Const, _) | (hir::BodyOwnerKind::Static(_), _) | (_, Some(_)) => {
-            assert_eq!(mir.arg_count, 0);
-            write!(w, ": {} =", mir.return_ty())?;
-        }
+        write!(w, ") -> {}", mir.return_ty())?;
+    } else {
+        assert_eq!(mir.arg_count, 0);
+        write!(w, ": {} =", mir.return_ty())?;
     }
 
     if let Some(yield_ty) = mir.yield_ty {
@@ -616,6 +638,9 @@
         writeln!(w, "yields {}", yield_ty)?;
     }
 
+    write!(w, " ")?;
+    // Next thing that gets printed is the opening {
+
     Ok(())
 }
 
diff --git a/src/librustc_resolve/check_unused.rs b/src/librustc_resolve/check_unused.rs
index 6399608..3b6179f 100644
--- a/src/librustc_resolve/check_unused.rs
+++ b/src/librustc_resolve/check_unused.rs
@@ -7,23 +7,52 @@
 //
 // Unused trait imports can't be checked until the method resolution. We save
 // candidates here, and do the actual check in librustc_typeck/check_unused.rs.
+//
+// Checking for unused imports is split into three steps:
+//
+//  - `UnusedImportCheckVisitor` walks the AST to find all the unused imports
+//    inside of `UseTree`s, recording their `NodeId`s and grouping them by
+//    the parent `use` item
+//
+//  - `calc_unused_spans` then walks over all the `use` items marked in the
+//    previous step to collect the spans associated with the `NodeId`s and to
+//    calculate the spans that can be removed by rustfix; This is done in a
+//    separate step to be able to collapse the adjacent spans that rustfix
+//    will remove
+//
+//  - `check_crate` finally emits the diagnostics based on the data generated
+//    in the last step
 
 use std::ops::{Deref, DerefMut};
 
 use crate::Resolver;
 use crate::resolve_imports::ImportDirectiveSubclass;
 
-use rustc::{lint, ty};
 use rustc::util::nodemap::NodeMap;
+use rustc::{lint, ty};
+use rustc_data_structures::fx::FxHashSet;
 use syntax::ast;
 use syntax::visit::{self, Visitor};
 use syntax_pos::{Span, MultiSpan, DUMMY_SP};
 
+struct UnusedImport<'a> {
+    use_tree: &'a ast::UseTree,
+    use_tree_id: ast::NodeId,
+    item_span: Span,
+    unused: FxHashSet<ast::NodeId>,
+}
+
+impl<'a> UnusedImport<'a> {
+    fn add(&mut self, id: ast::NodeId) {
+        self.unused.insert(id);
+    }
+}
 
 struct UnusedImportCheckVisitor<'a, 'b: 'a> {
     resolver: &'a mut Resolver<'b>,
     /// All the (so far) unused imports, grouped path list
-    unused_imports: NodeMap<NodeMap<Span>>,
+    unused_imports: NodeMap<UnusedImport<'a>>,
+    base_use_tree: Option<&'a ast::UseTree>,
     base_id: ast::NodeId,
     item_span: Span,
 }
@@ -46,7 +75,7 @@
 impl<'a, 'b> UnusedImportCheckVisitor<'a, 'b> {
     // We have information about whether `use` (import) directives are actually
     // used now. If an import is not used at all, we signal a lint error.
-    fn check_import(&mut self, item_id: ast::NodeId, id: ast::NodeId, span: Span) {
+    fn check_import(&mut self, id: ast::NodeId) {
         let mut used = false;
         self.per_ns(|this, ns| used |= this.used_imports.contains(&(id, ns)));
         if !used {
@@ -54,16 +83,31 @@
                 // Check later.
                 return;
             }
-            self.unused_imports.entry(item_id).or_default().insert(id, span);
+            self.unused_import(self.base_id).add(id);
         } else {
             // This trait import is definitely used, in a way other than
             // method resolution.
             self.maybe_unused_trait_imports.remove(&id);
-            if let Some(i) = self.unused_imports.get_mut(&item_id) {
-                i.remove(&id);
+            if let Some(i) = self.unused_imports.get_mut(&self.base_id) {
+                i.unused.remove(&id);
             }
         }
     }
+
+    fn unused_import(&mut self, id: ast::NodeId) -> &mut UnusedImport<'a> {
+        let use_tree_id = self.base_id;
+        let use_tree = self.base_use_tree.unwrap();
+        let item_span = self.item_span;
+
+        self.unused_imports
+            .entry(id)
+            .or_insert_with(|| UnusedImport {
+                use_tree,
+                use_tree_id,
+                item_span,
+                unused: FxHashSet::default(),
+            })
+    }
 }
 
 impl<'a, 'b> Visitor<'a> for UnusedImportCheckVisitor<'a, 'b> {
@@ -88,31 +132,112 @@
         // This allows the grouping of all the lints in the same item
         if !nested {
             self.base_id = id;
+            self.base_use_tree = Some(use_tree);
         }
 
         if let ast::UseTreeKind::Nested(ref items) = use_tree.kind {
-            // If it's the parent group, cover the entire use item
-            let span = if nested {
-                use_tree.span
-            } else {
-                self.item_span
-            };
-
             if items.is_empty() {
-                self.unused_imports
-                    .entry(self.base_id)
-                    .or_default()
-                    .insert(id, span);
+                self.unused_import(self.base_id).add(id);
             }
         } else {
-            let base_id = self.base_id;
-            self.check_import(base_id, id, use_tree.span);
+            self.check_import(id);
         }
 
         visit::walk_use_tree(self, use_tree, id);
     }
 }
 
+enum UnusedSpanResult {
+    Used,
+    FlatUnused(Span, Span),
+    NestedFullUnused(Vec<Span>, Span),
+    NestedPartialUnused(Vec<Span>, Vec<Span>),
+}
+
+fn calc_unused_spans(
+    unused_import: &UnusedImport<'_>,
+    use_tree: &ast::UseTree,
+    use_tree_id: ast::NodeId,
+) -> UnusedSpanResult {
+    // The full span is the whole item's span if this current tree is not nested inside another
+    // This tells rustfix to remove the whole item if all the imports are unused
+    let full_span = if unused_import.use_tree.span == use_tree.span {
+        unused_import.item_span
+    } else {
+        use_tree.span
+    };
+    match use_tree.kind {
+        ast::UseTreeKind::Simple(..) | ast::UseTreeKind::Glob => {
+            if unused_import.unused.contains(&use_tree_id) {
+                UnusedSpanResult::FlatUnused(use_tree.span, full_span)
+            } else {
+                UnusedSpanResult::Used
+            }
+        }
+        ast::UseTreeKind::Nested(ref nested) => {
+            if nested.len() == 0 {
+                return UnusedSpanResult::FlatUnused(use_tree.span, full_span);
+            }
+
+            let mut unused_spans = Vec::new();
+            let mut to_remove = Vec::new();
+            let mut all_nested_unused = true;
+            let mut previous_unused = false;
+            for (pos, (use_tree, use_tree_id)) in nested.iter().enumerate() {
+                let remove = match calc_unused_spans(unused_import, use_tree, *use_tree_id) {
+                    UnusedSpanResult::Used => {
+                        all_nested_unused = false;
+                        None
+                    }
+                    UnusedSpanResult::FlatUnused(span, remove) => {
+                        unused_spans.push(span);
+                        Some(remove)
+                    }
+                    UnusedSpanResult::NestedFullUnused(mut spans, remove) => {
+                        unused_spans.append(&mut spans);
+                        Some(remove)
+                    }
+                    UnusedSpanResult::NestedPartialUnused(mut spans, mut to_remove_extra) => {
+                        all_nested_unused = false;
+                        unused_spans.append(&mut spans);
+                        to_remove.append(&mut to_remove_extra);
+                        None
+                    }
+                };
+                if let Some(remove) = remove {
+                    let remove_span = if nested.len() == 1 {
+                        remove
+                    } else if pos == nested.len() - 1 || !all_nested_unused {
+                        // Delete everything from the end of the last import, to delete the
+                        // previous comma
+                        nested[pos - 1].0.span.shrink_to_hi().to(use_tree.span)
+                    } else {
+                        // Delete everything until the next import, to delete the trailing commas
+                        use_tree.span.to(nested[pos + 1].0.span.shrink_to_lo())
+                    };
+
+                    // Try to collapse adjacent spans into a single one. This prevents all cases of
+                    // overlapping removals, which are not supported by rustfix
+                    if previous_unused && !to_remove.is_empty() {
+                        let previous = to_remove.pop().unwrap();
+                        to_remove.push(previous.to(remove_span));
+                    } else {
+                        to_remove.push(remove_span);
+                    }
+                }
+                previous_unused = remove.is_some();
+            }
+            if unused_spans.is_empty() {
+                UnusedSpanResult::Used
+            } else if all_nested_unused {
+                UnusedSpanResult::NestedFullUnused(unused_spans, full_span)
+            } else {
+                UnusedSpanResult::NestedPartialUnused(unused_spans, to_remove)
+            }
+        }
+    }
+}
+
 pub fn check_crate(resolver: &mut Resolver<'_>, krate: &ast::Crate) {
     for directive in resolver.potentially_unused_imports.iter() {
         match directive.subclass {
@@ -152,14 +277,33 @@
     let mut visitor = UnusedImportCheckVisitor {
         resolver,
         unused_imports: Default::default(),
+        base_use_tree: None,
         base_id: ast::DUMMY_NODE_ID,
         item_span: DUMMY_SP,
     };
     visit::walk_crate(&mut visitor, krate);
 
-    for (id, spans) in &visitor.unused_imports {
+    for unused in visitor.unused_imports.values() {
+        let mut fixes = Vec::new();
+        let mut spans = match calc_unused_spans(unused, unused.use_tree, unused.use_tree_id) {
+            UnusedSpanResult::Used => continue,
+            UnusedSpanResult::FlatUnused(span, remove) => {
+                fixes.push((remove, String::new()));
+                vec![span]
+            }
+            UnusedSpanResult::NestedFullUnused(spans, remove) => {
+                fixes.push((remove, String::new()));
+                spans
+            }
+            UnusedSpanResult::NestedPartialUnused(spans, remove) => {
+                for fix in &remove {
+                    fixes.push((*fix, String::new()));
+                }
+                spans
+            }
+        };
+
         let len = spans.len();
-        let mut spans = spans.values().cloned().collect::<Vec<Span>>();
         spans.sort();
         let ms = MultiSpan::from_spans(spans.clone());
         let mut span_snippets = spans.iter()
@@ -177,6 +321,21 @@
                           } else {
                               String::new()
                           });
-        visitor.session.buffer_lint(lint::builtin::UNUSED_IMPORTS, *id, ms, &msg);
+
+        let fix_msg = if fixes.len() == 1 && fixes[0].0 == unused.item_span {
+            "remove the whole `use` item"
+        } else if spans.len() > 1 {
+            "remove the unused imports"
+        } else {
+            "remove the unused import"
+        };
+
+        visitor.session.buffer_lint_with_diagnostic(
+            lint::builtin::UNUSED_IMPORTS,
+            unused.use_tree_id,
+            ms,
+            &msg,
+            lint::builtin::BuiltinLintDiagnostics::UnusedImports(fix_msg.into(), fixes),
+        );
     }
 }
diff --git a/src/librustc_resolve/error_reporting.rs b/src/librustc_resolve/error_reporting.rs
index b131a6b..8300e69 100644
--- a/src/librustc_resolve/error_reporting.rs
+++ b/src/librustc_resolve/error_reporting.rs
@@ -1,16 +1,423 @@
-use crate::{CrateLint, PathResult, Segment};
-use crate::macros::ParentScope;
-use crate::resolve_imports::ImportResolver;
+use std::cmp::Reverse;
 
+use log::debug;
+use rustc::hir::def::*;
+use rustc::hir::def::Namespace::*;
+use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
+use rustc::session::config::nightly_options;
+use syntax::ast::{ExprKind};
 use syntax::symbol::keywords;
 use syntax_pos::Span;
 
-use log::debug;
+use crate::errors::{Applicability, DiagnosticBuilder, DiagnosticId};
+use crate::macros::ParentScope;
+use crate::resolve_imports::ImportResolver;
+use crate::{import_candidate_to_enum_paths, is_self_type, is_self_value, path_names_to_string};
+use crate::{AssocSuggestion, CrateLint, ImportSuggestion, ModuleOrUniformRoot, PathResult,
+            PathSource, Resolver, Segment};
 
-use std::cmp::Reverse;
+impl<'a> Resolver<'a> {
+    /// Handles error reporting for `smart_resolve_path_fragment` function.
+    /// Creates base error and amends it with one short label and possibly some longer helps/notes.
+    pub(crate) fn smart_resolve_report_errors(
+        &mut self,
+        path: &[Segment],
+        span: Span,
+        source: PathSource<'_>,
+        def: Option<Def>,
+    ) -> (DiagnosticBuilder<'a>, Vec<ImportSuggestion>) {
+        let ident_span = path.last().map_or(span, |ident| ident.ident.span);
+        let ns = source.namespace();
+        let is_expected = &|def| source.is_expected(def);
+        let is_enum_variant = &|def| if let Def::Variant(..) = def { true } else { false };
+
+        // Make the base error.
+        let expected = source.descr_expected();
+        let path_str = Segment::names_to_string(path);
+        let item_str = path.last().unwrap().ident;
+        let code = source.error_code(def.is_some());
+        let (base_msg, fallback_label, base_span) = if let Some(def) = def {
+            (format!("expected {}, found {} `{}`", expected, def.kind_name(), path_str),
+                format!("not a {}", expected),
+                span)
+        } else {
+            let item_span = path.last().unwrap().ident.span;
+            let (mod_prefix, mod_str) = if path.len() == 1 {
+                (String::new(), "this scope".to_string())
+            } else if path.len() == 2 && path[0].ident.name == keywords::PathRoot.name() {
+                (String::new(), "the crate root".to_string())
+            } else {
+                let mod_path = &path[..path.len() - 1];
+                let mod_prefix = match self.resolve_path_without_parent_scope(
+                    mod_path, Some(TypeNS), false, span, CrateLint::No
+                ) {
+                    PathResult::Module(ModuleOrUniformRoot::Module(module)) =>
+                        module.def(),
+                    _ => None,
+                }.map_or(String::new(), |def| format!("{} ", def.kind_name()));
+                (mod_prefix, format!("`{}`", Segment::names_to_string(mod_path)))
+            };
+            (format!("cannot find {} `{}` in {}{}", expected, item_str, mod_prefix, mod_str),
+                format!("not found in {}", mod_str),
+                item_span)
+        };
+
+        let code = DiagnosticId::Error(code.into());
+        let mut err = self.session.struct_span_err_with_code(base_span, &base_msg, code);
+
+        // Emit help message for fake-self from other languages (e.g., `this` in Javascript).
+        if ["this", "my"].contains(&&*item_str.as_str())
+            && self.self_value_is_available(path[0].ident.span, span) {
+            err.span_suggestion(
+                span,
+                "did you mean",
+                "self".to_string(),
+                Applicability::MaybeIncorrect,
+            );
+        }
+
+        // Emit special messages for unresolved `Self` and `self`.
+        if is_self_type(path, ns) {
+            __diagnostic_used!(E0411);
+            err.code(DiagnosticId::Error("E0411".into()));
+            err.span_label(span, format!("`Self` is only available in impls, traits, \
+                                          and type definitions"));
+            return (err, Vec::new());
+        }
+        if is_self_value(path, ns) {
+            debug!("smart_resolve_path_fragment: E0424, source={:?}", source);
+
+            __diagnostic_used!(E0424);
+            err.code(DiagnosticId::Error("E0424".into()));
+            err.span_label(span, match source {
+                PathSource::Pat => {
+                    format!("`self` value is a keyword \
+                             and may not be bound to \
+                             variables or shadowed")
+                }
+                _ => {
+                    format!("`self` value is a keyword \
+                             only available in methods \
+                             with `self` parameter")
+                }
+            });
+            return (err, Vec::new());
+        }
+
+        // Try to lookup name in more relaxed fashion for better error reporting.
+        let ident = path.last().unwrap().ident;
+        let candidates = self.lookup_import_candidates(ident, ns, is_expected);
+        if candidates.is_empty() && is_expected(Def::Enum(DefId::local(CRATE_DEF_INDEX))) {
+            let enum_candidates =
+                self.lookup_import_candidates(ident, ns, is_enum_variant);
+            let mut enum_candidates = enum_candidates.iter()
+                .map(|suggestion| {
+                    import_candidate_to_enum_paths(&suggestion)
+                }).collect::<Vec<_>>();
+            enum_candidates.sort();
+
+            if !enum_candidates.is_empty() {
+                // Contextualize for E0412 "cannot find type", but don't belabor the point
+                // (that it's a variant) for E0573 "expected type, found variant".
+                let preamble = if def.is_none() {
+                    let others = match enum_candidates.len() {
+                        1 => String::new(),
+                        2 => " and 1 other".to_owned(),
+                        n => format!(" and {} others", n)
+                    };
+                    format!("there is an enum variant `{}`{}; ",
+                            enum_candidates[0].0, others)
+                } else {
+                    String::new()
+                };
+                let msg = format!("{}try using the variant's enum", preamble);
+
+                err.span_suggestions(
+                    span,
+                    &msg,
+                    enum_candidates.into_iter()
+                        .map(|(_variant_path, enum_ty_path)| enum_ty_path)
+                        // Variants re-exported in prelude doesn't mean `prelude::v1` is the
+                        // type name!
+                        // FIXME: is there a more principled way to do this that
+                        // would work for other re-exports?
+                        .filter(|enum_ty_path| enum_ty_path != "std::prelude::v1")
+                        // Also write `Option` rather than `std::prelude::v1::Option`.
+                        .map(|enum_ty_path| {
+                            // FIXME #56861: DRY-er prelude filtering.
+                            enum_ty_path.trim_start_matches("std::prelude::v1::").to_owned()
+                        }),
+                    Applicability::MachineApplicable,
+                );
+            }
+        }
+        if path.len() == 1 && self.self_type_is_available(span) {
+            if let Some(candidate) = self.lookup_assoc_candidate(ident, ns, is_expected) {
+                let self_is_available = self.self_value_is_available(path[0].ident.span, span);
+                match candidate {
+                    AssocSuggestion::Field => {
+                        err.span_suggestion(
+                            span,
+                            "try",
+                            format!("self.{}", path_str),
+                            Applicability::MachineApplicable,
+                        );
+                        if !self_is_available {
+                            err.span_label(span, format!("`self` value is a keyword \
+                                                         only available in \
+                                                         methods with `self` parameter"));
+                        }
+                    }
+                    AssocSuggestion::MethodWithSelf if self_is_available => {
+                        err.span_suggestion(
+                            span,
+                            "try",
+                            format!("self.{}", path_str),
+                            Applicability::MachineApplicable,
+                        );
+                    }
+                    AssocSuggestion::MethodWithSelf | AssocSuggestion::AssocItem => {
+                        err.span_suggestion(
+                            span,
+                            "try",
+                            format!("Self::{}", path_str),
+                            Applicability::MachineApplicable,
+                        );
+                    }
+                }
+                return (err, candidates);
+            }
+        }
+
+        let mut levenshtein_worked = false;
+
+        // Try Levenshtein algorithm.
+        let suggestion = self.lookup_typo_candidate(path, ns, is_expected, span);
+        if let Some(suggestion) = suggestion {
+            let msg = format!(
+                "{} {} with a similar name exists",
+                suggestion.article, suggestion.kind
+            );
+            err.span_suggestion(
+                ident_span,
+                &msg,
+                suggestion.candidate.to_string(),
+                Applicability::MaybeIncorrect,
+            );
+
+            levenshtein_worked = true;
+        }
+
+        // Try context-dependent help if relaxed lookup didn't work.
+        if let Some(def) = def {
+            if self.smart_resolve_context_dependent_help(&mut err,
+                                                         span,
+                                                         source,
+                                                         def,
+                                                         &path_str,
+                                                         &fallback_label) {
+                return (err, candidates);
+            }
+        }
+
+        // Fallback label.
+        if !levenshtein_worked {
+            err.span_label(base_span, fallback_label);
+            self.type_ascription_suggestion(&mut err, base_span);
+        }
+        (err, candidates)
+    }
+
+    /// Provides context-dependent help for errors reported by the `smart_resolve_path_fragment`
+    /// function.
+    /// Returns `true` if able to provide context-dependent help.
+    fn smart_resolve_context_dependent_help(
+        &mut self,
+        err: &mut DiagnosticBuilder<'a>,
+        span: Span,
+        source: PathSource<'_>,
+        def: Def,
+        path_str: &str,
+        fallback_label: &str,
+    ) -> bool {
+        let ns = source.namespace();
+        let is_expected = &|def| source.is_expected(def);
+
+        match (def, source) {
+            (Def::Macro(..), _) => {
+                err.span_suggestion(
+                    span,
+                    "use `!` to invoke the macro",
+                    format!("{}!", path_str),
+                    Applicability::MaybeIncorrect,
+                );
+            }
+            (Def::TyAlias(..), PathSource::Trait(_)) => {
+                err.span_label(span, "type aliases cannot be used as traits");
+                if nightly_options::is_nightly_build() {
+                    err.note("did you mean to use a trait alias?");
+                }
+            }
+            (Def::Mod(..), PathSource::Expr(Some(parent))) => match parent.node {
+                ExprKind::Field(_, ident) => {
+                    err.span_suggestion(
+                        parent.span,
+                        "use the path separator to refer to an item",
+                        format!("{}::{}", path_str, ident),
+                        Applicability::MaybeIncorrect,
+                    );
+                }
+                ExprKind::MethodCall(ref segment, ..) => {
+                    let span = parent.span.with_hi(segment.ident.span.hi());
+                    err.span_suggestion(
+                        span,
+                        "use the path separator to refer to an item",
+                        format!("{}::{}", path_str, segment.ident),
+                        Applicability::MaybeIncorrect,
+                    );
+                }
+                _ => return false,
+            },
+            (Def::Enum(..), PathSource::TupleStruct)
+                | (Def::Enum(..), PathSource::Expr(..))  => {
+                if let Some(variants) = self.collect_enum_variants(def) {
+                    err.note(&format!("did you mean to use one \
+                                       of the following variants?\n{}",
+                        variants.iter()
+                            .map(|suggestion| path_names_to_string(suggestion))
+                            .map(|suggestion| format!("- `{}`", suggestion))
+                            .collect::<Vec<_>>()
+                            .join("\n")));
+                } else {
+                    err.note("did you mean to use one of the enum's variants?");
+                }
+            },
+            (Def::Struct(def_id), _) if ns == ValueNS => {
+                if let Some((ctor_def, ctor_vis))
+                        = self.struct_constructors.get(&def_id).cloned() {
+                    let accessible_ctor = self.is_accessible(ctor_vis);
+                    if is_expected(ctor_def) && !accessible_ctor {
+                        err.span_label(span, format!("constructor is not visible \
+                                                      here due to private fields"));
+                    }
+                } else {
+                    // HACK(estebank): find a better way to figure out that this was a
+                    // parser issue where a struct literal is being used on an expression
+                    // where a brace being opened means a block is being started. Look
+                    // ahead for the next text to see if `span` is followed by a `{`.
+                    let sm = self.session.source_map();
+                    let mut sp = span;
+                    loop {
+                        sp = sm.next_point(sp);
+                        match sm.span_to_snippet(sp) {
+                            Ok(ref snippet) => {
+                                if snippet.chars().any(|c| { !c.is_whitespace() }) {
+                                    break;
+                                }
+                            }
+                            _ => break,
+                        }
+                    }
+                    let followed_by_brace = match sm.span_to_snippet(sp) {
+                        Ok(ref snippet) if snippet == "{" => true,
+                        _ => false,
+                    };
+                    // In case this could be a struct literal that needs to be surrounded
+                    // by parenthesis, find the appropriate span.
+                    let mut i = 0;
+                    let mut closing_brace = None;
+                    loop {
+                        sp = sm.next_point(sp);
+                        match sm.span_to_snippet(sp) {
+                            Ok(ref snippet) => {
+                                if snippet == "}" {
+                                    let sp = span.to(sp);
+                                    if let Ok(snippet) = sm.span_to_snippet(sp) {
+                                        closing_brace = Some((sp, snippet));
+                                    }
+                                    break;
+                                }
+                            }
+                            _ => break,
+                        }
+                        i += 1;
+                        // The bigger the span, the more likely we're incorrect --
+                        // bound it to 100 chars long.
+                        if i > 100 {
+                            break;
+                        }
+                    }
+                    match source {
+                        PathSource::Expr(Some(parent)) => {
+                            match parent.node {
+                                ExprKind::MethodCall(ref path_assignment, _)  => {
+                                    err.span_suggestion(
+                                        sm.start_point(parent.span)
+                                            .to(path_assignment.ident.span),
+                                        "use `::` to access an associated function",
+                                        format!("{}::{}",
+                                                path_str,
+                                                path_assignment.ident),
+                                        Applicability::MaybeIncorrect
+                                    );
+                                },
+                                _ => {
+                                    err.span_label(
+                                        span,
+                                        format!("did you mean `{} {{ /* fields */ }}`?",
+                                                path_str),
+                                    );
+                                },
+                            }
+                        },
+                        PathSource::Expr(None) if followed_by_brace == true => {
+                            if let Some((sp, snippet)) = closing_brace {
+                                err.span_suggestion(
+                                    sp,
+                                    "surround the struct literal with parenthesis",
+                                    format!("({})", snippet),
+                                    Applicability::MaybeIncorrect,
+                                );
+                            } else {
+                                err.span_label(
+                                    span,
+                                    format!("did you mean `({} {{ /* fields */ }})`?",
+                                            path_str),
+                                );
+                            }
+                        },
+                        _ => {
+                            err.span_label(
+                                span,
+                                format!("did you mean `{} {{ /* fields */ }}`?",
+                                        path_str),
+                            );
+                        },
+                    }
+                }
+            }
+            (Def::Union(..), _) |
+            (Def::Variant(..), _) |
+            (Def::VariantCtor(_, CtorKind::Fictive), _) if ns == ValueNS => {
+                err.span_label(span, format!("did you mean `{} {{ /* fields */ }}`?",
+                                             path_str));
+            }
+            (Def::SelfTy(..), _) if ns == ValueNS => {
+                err.span_label(span, fallback_label);
+                err.note("can't use `Self` as a constructor, you must use the \
+                          implemented struct");
+            }
+            (Def::TyAlias(_), _) | (Def::AssociatedTy(..), _) if ns == ValueNS => {
+                err.note("can't use a type alias as a constructor");
+            }
+            _ => return false,
+        }
+        true
+    }
+}
 
 impl<'a, 'b:'a> ImportResolver<'a, 'b> {
-    /// Add suggestions for a path that cannot be resolved.
+    /// Adds suggestions for a path that cannot be resolved.
     pub(crate) fn make_path_suggestion(
         &mut self,
         span: Span,
@@ -24,7 +431,7 @@
             // On 2015 `{{root}}` is usually added implicitly.
             (Some(fst), Some(snd)) if fst.ident.name == keywords::PathRoot.name() &&
                                       !snd.ident.is_path_segment_keyword() => {}
-            // `ident::...` on 2018
+            // `ident::...` on 2018.
             (Some(fst), _) if fst.ident.span.rust_2018() &&
                               !fst.ident.is_path_segment_keyword() => {
                 // Insert a placeholder that's later replaced by `self`/`super`/etc.
@@ -63,7 +470,7 @@
         }
     }
 
-    /// Suggest a missing `crate::` if that resolves to an correct module.
+    /// Suggests a missing `crate::` if that resolves to an correct module.
     ///
     /// ```
     ///    |
@@ -94,7 +501,7 @@
         }
     }
 
-    /// Suggest a missing `super::` if that resolves to an correct module.
+    /// Suggests a missing `super::` if that resolves to an correct module.
     ///
     /// ```
     ///    |
@@ -118,7 +525,7 @@
         }
     }
 
-    /// Suggest a missing external crate name if that resolves to an correct module.
+    /// Suggests a missing external crate name if that resolves to an correct module.
     ///
     /// ```
     ///    |
@@ -139,7 +546,7 @@
         }
 
         // Sort extern crate names in reverse order to get
-        // 1) some consistent ordering for emitted dignostics and
+        // 1) some consistent ordering for emitted dignostics, and
         // 2) `std` suggestions before `core` suggestions.
         let mut extern_crate_names =
             self.resolver.extern_prelude.iter().map(|(ident, _)| ident.name).collect::<Vec<_>>();
diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs
index 8d345e6..ecbfcec 100644
--- a/src/librustc_resolve/lib.rs
+++ b/src/librustc_resolve/lib.rs
@@ -25,7 +25,6 @@
 use rustc::hir::def::Namespace::*;
 use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, DefId};
 use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap};
-use rustc::session::config::nightly_options;
 use rustc::ty;
 use rustc::util::nodemap::{NodeMap, NodeSet, FxHashMap, FxHashSet, DefIdMap};
 use rustc::{bug, span_bug};
@@ -1756,7 +1755,7 @@
         }
     }
 
-    /// resolve_hir_path, but takes a callback in case there was an error
+    /// Like `resolve_hir_path`, but takes a callback in case there was an error.
     fn resolve_hir_path_cb<F>(
         &mut self,
         path: &ast::Path,
@@ -1769,7 +1768,7 @@
         let span = path.span;
         let segments = &path.segments;
         let path = Segment::from_path(&path);
-        // FIXME (Manishearth): Intra doc links won't get warned of epoch changes
+        // FIXME(Manishearth): intra-doc links won't get warned of epoch changes.
         let def = match self.resolve_path_without_parent_scope(&path, Some(namespace), true,
                                                                span, CrateLint::No) {
             PathResult::Module(ModuleOrUniformRoot::Module(module)) =>
@@ -3184,383 +3183,11 @@
                                    source: PathSource<'_>,
                                    crate_lint: CrateLint)
                                    -> PathResolution {
-        let ident_span = path.last().map_or(span, |ident| ident.ident.span);
         let ns = source.namespace();
         let is_expected = &|def| source.is_expected(def);
-        let is_enum_variant = &|def| if let Def::Variant(..) = def { true } else { false };
 
-        // Base error is amended with one short label and possibly some longer helps/notes.
         let report_errors = |this: &mut Self, def: Option<Def>| {
-            // Make the base error.
-            let expected = source.descr_expected();
-            let path_str = Segment::names_to_string(path);
-            let item_str = path.last().unwrap().ident;
-            let code = source.error_code(def.is_some());
-            let (base_msg, fallback_label, base_span) = if let Some(def) = def {
-                (format!("expected {}, found {} `{}`", expected, def.kind_name(), path_str),
-                 format!("not a {}", expected),
-                 span)
-            } else {
-                let item_span = path.last().unwrap().ident.span;
-                let (mod_prefix, mod_str) = if path.len() == 1 {
-                    (String::new(), "this scope".to_string())
-                } else if path.len() == 2 && path[0].ident.name == keywords::PathRoot.name() {
-                    (String::new(), "the crate root".to_string())
-                } else {
-                    let mod_path = &path[..path.len() - 1];
-                    let mod_prefix = match this.resolve_path_without_parent_scope(
-                        mod_path, Some(TypeNS), false, span, CrateLint::No
-                    ) {
-                        PathResult::Module(ModuleOrUniformRoot::Module(module)) =>
-                            module.def(),
-                        _ => None,
-                    }.map_or(String::new(), |def| format!("{} ", def.kind_name()));
-                    (mod_prefix, format!("`{}`", Segment::names_to_string(mod_path)))
-                };
-                (format!("cannot find {} `{}` in {}{}", expected, item_str, mod_prefix, mod_str),
-                 format!("not found in {}", mod_str),
-                 item_span)
-            };
-
-            let code = DiagnosticId::Error(code.into());
-            let mut err = this.session.struct_span_err_with_code(base_span, &base_msg, code);
-
-            // Emit help message for fake-self from other languages like `this`(javascript)
-            if ["this", "my"].contains(&&*item_str.as_str())
-                && this.self_value_is_available(path[0].ident.span, span) {
-                err.span_suggestion(
-                    span,
-                    "did you mean",
-                    "self".to_string(),
-                    Applicability::MaybeIncorrect,
-                );
-            }
-
-            // Emit special messages for unresolved `Self` and `self`.
-            if is_self_type(path, ns) {
-                __diagnostic_used!(E0411);
-                err.code(DiagnosticId::Error("E0411".into()));
-                err.span_label(span, format!("`Self` is only available in impls, traits, \
-                                              and type definitions"));
-                return (err, Vec::new());
-            }
-            if is_self_value(path, ns) {
-                debug!("smart_resolve_path_fragment E0424 source:{:?}", source);
-
-                __diagnostic_used!(E0424);
-                err.code(DiagnosticId::Error("E0424".into()));
-                err.span_label(span, match source {
-                    PathSource::Pat => {
-                        format!("`self` value is a keyword \
-                                and may not be bound to \
-                                variables or shadowed")
-                    }
-                    _ => {
-                        format!("`self` value is a keyword \
-                                only available in methods \
-                                with `self` parameter")
-                    }
-                });
-                return (err, Vec::new());
-            }
-
-            // Try to lookup the name in more relaxed fashion for better error reporting.
-            let ident = path.last().unwrap().ident;
-            let candidates = this.lookup_import_candidates(ident, ns, is_expected);
-            if candidates.is_empty() && is_expected(Def::Enum(DefId::local(CRATE_DEF_INDEX))) {
-                let enum_candidates =
-                    this.lookup_import_candidates(ident, ns, is_enum_variant);
-                let mut enum_candidates = enum_candidates.iter()
-                    .map(|suggestion| {
-                        import_candidate_to_enum_paths(&suggestion)
-                    }).collect::<Vec<_>>();
-                enum_candidates.sort();
-
-                if !enum_candidates.is_empty() {
-                    // contextualize for E0412 "cannot find type", but don't belabor the point
-                    // (that it's a variant) for E0573 "expected type, found variant"
-                    let preamble = if def.is_none() {
-                        let others = match enum_candidates.len() {
-                            1 => String::new(),
-                            2 => " and 1 other".to_owned(),
-                            n => format!(" and {} others", n)
-                        };
-                        format!("there is an enum variant `{}`{}; ",
-                                enum_candidates[0].0, others)
-                    } else {
-                        String::new()
-                    };
-                    let msg = format!("{}try using the variant's enum", preamble);
-
-                    err.span_suggestions(
-                        span,
-                        &msg,
-                        enum_candidates.into_iter()
-                            .map(|(_variant_path, enum_ty_path)| enum_ty_path)
-                            // variants reëxported in prelude doesn't mean `prelude::v1` is the
-                            // type name! FIXME: is there a more principled way to do this that
-                            // would work for other reëxports?
-                            .filter(|enum_ty_path| enum_ty_path != "std::prelude::v1")
-                            // also say `Option` rather than `std::prelude::v1::Option`
-                            .map(|enum_ty_path| {
-                                // FIXME #56861: DRYer prelude filtering
-                                enum_ty_path.trim_start_matches("std::prelude::v1::").to_owned()
-                            }),
-                        Applicability::MachineApplicable,
-                    );
-                }
-            }
-            if path.len() == 1 && this.self_type_is_available(span) {
-                if let Some(candidate) = this.lookup_assoc_candidate(ident, ns, is_expected) {
-                    let self_is_available = this.self_value_is_available(path[0].ident.span, span);
-                    match candidate {
-                        AssocSuggestion::Field => {
-                            err.span_suggestion(
-                                span,
-                                "try",
-                                format!("self.{}", path_str),
-                                Applicability::MachineApplicable,
-                            );
-                            if !self_is_available {
-                                err.span_label(span, format!("`self` value is a keyword \
-                                                               only available in \
-                                                               methods with `self` parameter"));
-                            }
-                        }
-                        AssocSuggestion::MethodWithSelf if self_is_available => {
-                            err.span_suggestion(
-                                span,
-                                "try",
-                                format!("self.{}", path_str),
-                                Applicability::MachineApplicable,
-                            );
-                        }
-                        AssocSuggestion::MethodWithSelf | AssocSuggestion::AssocItem => {
-                            err.span_suggestion(
-                                span,
-                                "try",
-                                format!("Self::{}", path_str),
-                                Applicability::MachineApplicable,
-                            );
-                        }
-                    }
-                    return (err, candidates);
-                }
-            }
-
-            let mut levenshtein_worked = false;
-
-            // Try Levenshtein algorithm.
-            let suggestion = this.lookup_typo_candidate(path, ns, is_expected, span);
-            if let Some(suggestion) = suggestion {
-                let msg = format!(
-                    "{} {} with a similar name exists",
-                    suggestion.article, suggestion.kind
-                );
-                err.span_suggestion(
-                    ident_span,
-                    &msg,
-                    suggestion.candidate.to_string(),
-                    Applicability::MaybeIncorrect,
-                );
-
-                levenshtein_worked = true;
-            }
-
-            // Try context dependent help if relaxed lookup didn't work.
-            if let Some(def) = def {
-                match (def, source) {
-                    (Def::Macro(..), _) => {
-                        err.span_suggestion(
-                            span,
-                            "use `!` to invoke the macro",
-                            format!("{}!", path_str),
-                            Applicability::MaybeIncorrect,
-                        );
-                        return (err, candidates);
-                    }
-                    (Def::TyAlias(..), PathSource::Trait(_)) => {
-                        err.span_label(span, "type aliases cannot be used as traits");
-                        if nightly_options::is_nightly_build() {
-                            err.note("did you mean to use a trait alias?");
-                        }
-                        return (err, candidates);
-                    }
-                    (Def::Mod(..), PathSource::Expr(Some(parent))) => match parent.node {
-                        ExprKind::Field(_, ident) => {
-                            err.span_suggestion(
-                                parent.span,
-                                "use the path separator to refer to an item",
-                                format!("{}::{}", path_str, ident),
-                                Applicability::MaybeIncorrect,
-                            );
-                            return (err, candidates);
-                        }
-                        ExprKind::MethodCall(ref segment, ..) => {
-                            let span = parent.span.with_hi(segment.ident.span.hi());
-                            err.span_suggestion(
-                                span,
-                                "use the path separator to refer to an item",
-                                format!("{}::{}", path_str, segment.ident),
-                                Applicability::MaybeIncorrect,
-                            );
-                            return (err, candidates);
-                        }
-                        _ => {}
-                    },
-                    (Def::Enum(..), PathSource::TupleStruct)
-                        | (Def::Enum(..), PathSource::Expr(..))  => {
-                        if let Some(variants) = this.collect_enum_variants(def) {
-                            err.note(&format!("did you mean to use one \
-                                               of the following variants?\n{}",
-                                variants.iter()
-                                    .map(|suggestion| path_names_to_string(suggestion))
-                                    .map(|suggestion| format!("- `{}`", suggestion))
-                                    .collect::<Vec<_>>()
-                                    .join("\n")));
-
-                        } else {
-                            err.note("did you mean to use one of the enum's variants?");
-                        }
-                        return (err, candidates);
-                    },
-                    (Def::Struct(def_id), _) if ns == ValueNS => {
-                        if let Some((ctor_def, ctor_vis))
-                                = this.struct_constructors.get(&def_id).cloned() {
-                            let accessible_ctor = this.is_accessible(ctor_vis);
-                            if is_expected(ctor_def) && !accessible_ctor {
-                                err.span_label(span, format!("constructor is not visible \
-                                                              here due to private fields"));
-                            }
-                        } else {
-                            // HACK(estebank): find a better way to figure out that this was a
-                            // parser issue where a struct literal is being used on an expression
-                            // where a brace being opened means a block is being started. Look
-                            // ahead for the next text to see if `span` is followed by a `{`.
-                            let sm = this.session.source_map();
-                            let mut sp = span;
-                            loop {
-                                sp = sm.next_point(sp);
-                                match sm.span_to_snippet(sp) {
-                                    Ok(ref snippet) => {
-                                        if snippet.chars().any(|c| { !c.is_whitespace() }) {
-                                            break;
-                                        }
-                                    }
-                                    _ => break,
-                                }
-                            }
-                            let followed_by_brace = match sm.span_to_snippet(sp) {
-                                Ok(ref snippet) if snippet == "{" => true,
-                                _ => false,
-                            };
-                            // In case this could be a struct literal that needs to be surrounded
-                            // by parenthesis, find the appropriate span.
-                            let mut i = 0;
-                            let mut closing_brace = None;
-                            loop {
-                                sp = sm.next_point(sp);
-                                match sm.span_to_snippet(sp) {
-                                    Ok(ref snippet) => {
-                                        if snippet == "}" {
-                                            let sp = span.to(sp);
-                                            if let Ok(snippet) = sm.span_to_snippet(sp) {
-                                                closing_brace = Some((sp, snippet));
-                                            }
-                                            break;
-                                        }
-                                    }
-                                    _ => break,
-                                }
-                                i += 1;
-                                if i > 100 { // The bigger the span the more likely we're
-                                    break;   // incorrect. Bound it to 100 chars long.
-                                }
-                            }
-                            match source {
-                                PathSource::Expr(Some(parent)) => {
-                                    match parent.node {
-                                        ExprKind::MethodCall(ref path_assignment, _)  => {
-                                            err.span_suggestion(
-                                                sm.start_point(parent.span)
-                                                  .to(path_assignment.ident.span),
-                                                "use `::` to access an associated function",
-                                                format!("{}::{}",
-                                                        path_str,
-                                                        path_assignment.ident),
-                                                Applicability::MaybeIncorrect
-                                            );
-                                            return (err, candidates);
-                                        },
-                                        _ => {
-                                            err.span_label(
-                                                span,
-                                                format!("did you mean `{} {{ /* fields */ }}`?",
-                                                        path_str),
-                                            );
-                                            return (err, candidates);
-                                        },
-                                    }
-                                },
-                                PathSource::Expr(None) if followed_by_brace == true => {
-                                    if let Some((sp, snippet)) = closing_brace {
-                                        err.span_suggestion(
-                                            sp,
-                                            "surround the struct literal with parenthesis",
-                                            format!("({})", snippet),
-                                            Applicability::MaybeIncorrect,
-                                        );
-                                    } else {
-                                        err.span_label(
-                                            span,
-                                            format!("did you mean `({} {{ /* fields */ }})`?",
-                                                    path_str),
-                                        );
-                                    }
-                                    return (err, candidates);
-                                },
-                                _ => {
-                                    err.span_label(
-                                        span,
-                                        format!("did you mean `{} {{ /* fields */ }}`?",
-                                                path_str),
-                                    );
-                                    return (err, candidates);
-                                },
-                            }
-                        }
-                        return (err, candidates);
-                    }
-                    (Def::Union(..), _) |
-                    (Def::Variant(..), _) |
-                    (Def::VariantCtor(_, CtorKind::Fictive), _) if ns == ValueNS => {
-                        err.span_label(span, format!("did you mean `{} {{ /* fields */ }}`?",
-                                                     path_str));
-                        return (err, candidates);
-                    }
-                    (Def::SelfTy(..), _) if ns == ValueNS => {
-                        err.span_label(span, fallback_label);
-                        err.note("can't use `Self` as a constructor, you must use the \
-                                  implemented struct");
-                        return (err, candidates);
-                    }
-                    (Def::TyAlias(_), _) | (Def::AssociatedTy(..), _) if ns == ValueNS => {
-                        err.note("can't use a type alias as a constructor");
-                        return (err, candidates);
-                    }
-                    _ => {}
-                }
-            }
-
-            // Fallback label.
-            if !levenshtein_worked {
-                err.span_label(base_span, fallback_label);
-                this.type_ascription_suggestion(&mut err, base_span);
-            }
-            (err, candidates)
-        };
-        let report_errors = |this: &mut Self, def: Option<Def>| {
-            let (err, candidates) = report_errors(this, def);
+            let (err, candidates) = this.smart_resolve_report_errors(path, span, source, def);
             let def_id = this.current_module.normal_ancestor_id;
             let node_id = this.definitions.as_local_node_id(def_id).unwrap();
             let better = def.is_some();
@@ -3631,7 +3258,8 @@
         debug!("self.current_type_ascription {:?}", self.current_type_ascription);
         if let Some(sp) = self.current_type_ascription.last() {
             let mut sp = *sp;
-            loop {  // try to find the `:`, bail on first non-':'/non-whitespace
+            loop {
+                // Try to find the `:`; bail on first non-':' / non-whitespace.
                 sp = cm.next_point(sp);
                 if let Ok(snippet) = cm.span_to_snippet(sp.to(cm.next_point(sp))) {
                     debug!("snippet {:?}", snippet);
@@ -5520,7 +5148,6 @@
     (variant_path_string, enum_path_string)
 }
 
-
 /// When an entity with a given name is not available in scope, we search for
 /// entities with that name in all crates. This method allows outputting the
 /// results of this search in a programmer-friendly way
diff --git a/src/librustc_typeck/Cargo.toml b/src/librustc_typeck/Cargo.toml
index 68b28a6..dcfcd74 100644
--- a/src/librustc_typeck/Cargo.toml
+++ b/src/librustc_typeck/Cargo.toml
@@ -2,6 +2,7 @@
 authors = ["The Rust Project Developers"]
 name = "rustc_typeck"
 version = "0.0.0"
+edition = "2018"
 
 [lib]
 name = "rustc_typeck"
@@ -14,7 +15,7 @@
 log = "0.4"
 rustc = { path = "../librustc" }
 rustc_data_structures = { path = "../librustc_data_structures" }
-rustc_errors = { path = "../librustc_errors" }
+errors = { path = "../librustc_errors", package = "rustc_errors" }
 rustc_target = { path = "../librustc_target" }
 smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
 syntax = { path = "../libsyntax" }
diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs
index 8da0b6dc..ee3fd6e 100644
--- a/src/librustc_typeck/astconv.rs
+++ b/src/librustc_typeck/astconv.rs
@@ -3,13 +3,13 @@
 //! instance of `AstConv`.
 
 use errors::{Applicability, DiagnosticId};
-use hir::{self, GenericArg, GenericArgs};
-use hir::def::Def;
-use hir::def_id::DefId;
-use hir::HirVec;
-use lint;
-use middle::resolve_lifetime as rl;
-use namespace::Namespace;
+use crate::hir::{self, GenericArg, GenericArgs};
+use crate::hir::def::Def;
+use crate::hir::def_id::DefId;
+use crate::hir::HirVec;
+use crate::lint;
+use crate::middle::resolve_lifetime as rl;
+use crate::namespace::Namespace;
 use rustc::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS;
 use rustc::traits;
 use rustc::ty::{self, Ty, TyCtxt, ToPredicate, TypeFoldable};
@@ -18,15 +18,15 @@
 use rustc::ty::wf::object_region_bounds;
 use rustc_data_structures::sync::Lrc;
 use rustc_target::spec::abi;
-use require_c_abi_if_variadic;
+use crate::require_c_abi_if_variadic;
 use smallvec::SmallVec;
 use syntax::ast;
 use syntax::feature_gate::{GateIssue, emit_feature_err};
 use syntax::ptr::P;
 use syntax::util::lev_distance::find_best_match_for_name;
 use syntax_pos::{DUMMY_SP, Span, MultiSpan};
-use util::common::ErrorReported;
-use util::nodemap::FxHashMap;
+use crate::util::common::ErrorReported;
+use crate::util::nodemap::FxHashMap;
 
 use std::collections::BTreeSet;
 use std::iter;
@@ -111,7 +111,7 @@
     {
         let tcx = self.tcx();
         let lifetime_name = |def_id| {
-            tcx.hir().name(tcx.hir().as_local_node_id(def_id).unwrap()).as_interned_str()
+            tcx.hir().name_by_hir_id(tcx.hir().as_local_hir_id(def_id).unwrap()).as_interned_str()
         };
 
         let r = match tcx.named_region(lifetime.hir_id) {
@@ -1682,12 +1682,13 @@
                 assert_eq!(opt_self_ty, None);
                 self.prohibit_generics(&path.segments);
 
-                let node_id = tcx.hir().as_local_node_id(did).unwrap();
-                let item_id = tcx.hir().get_parent_node(node_id);
-                let item_def_id = tcx.hir().local_def_id(item_id);
+                let hir_id = tcx.hir().as_local_hir_id(did).unwrap();
+                let item_id = tcx.hir().get_parent_node_by_hir_id(hir_id);
+                let item_def_id = tcx.hir().local_def_id_from_hir_id(item_id);
                 let generics = tcx.generics_of(item_def_id);
-                let index = generics.param_def_id_to_index[&tcx.hir().local_def_id(node_id)];
-                tcx.mk_ty_param(index, tcx.hir().name(node_id).as_interned_str())
+                let index = generics.param_def_id_to_index[
+                    &tcx.hir().local_def_id_from_hir_id(hir_id)];
+                tcx.mk_ty_param(index, tcx.hir().name_by_hir_id(hir_id).as_interned_str())
             }
             Def::SelfTy(_, Some(def_id)) => {
                 // `Self` in impl (we know the concrete type).
@@ -1793,7 +1794,7 @@
                 let length_def_id = tcx.hir().local_def_id(length.id);
                 let substs = Substs::identity_for_item(tcx, length_def_id);
                 let length = ty::LazyConst::Unevaluated(length_def_id, substs);
-                let length = tcx.intern_lazy_const(length);
+                let length = tcx.mk_lazy_const(length);
                 let array_ty = tcx.mk_ty(ty::Array(self.ast_ty_to_ty(&ty), length));
                 self.normalize_ty(ast_ty.span, array_ty)
             }
diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs
index a90d83f..3a670c8 100644
--- a/src/librustc_typeck/check/_match.rs
+++ b/src/librustc_typeck/check/_match.rs
@@ -1,5 +1,6 @@
-use check::{FnCtxt, Expectation, Diverges, Needs};
-use check::coercion::CoerceMany;
+use crate::check::{FnCtxt, Expectation, Diverges, Needs};
+use crate::check::coercion::CoerceMany;
+use crate::util::nodemap::FxHashMap;
 use errors::Applicability;
 use rustc::hir::{self, PatKind};
 use rustc::hir::def::{Def, CtorKind};
@@ -13,7 +14,6 @@
 use syntax::ptr::P;
 use syntax::util::lev_distance::find_best_match_for_name;
 use syntax_pos::Span;
-use util::nodemap::FxHashMap;
 
 use std::collections::hash_map::Entry::{Occupied, Vacant};
 use std::cmp;
diff --git a/src/librustc_typeck/check/cast.rs b/src/librustc_typeck/check/cast.rs
index 85cae17..be6d432 100644
--- a/src/librustc_typeck/check/cast.rs
+++ b/src/librustc_typeck/check/cast.rs
@@ -31,8 +31,8 @@
 use super::FnCtxt;
 
 use errors::{DiagnosticBuilder,Applicability};
-use hir::def_id::DefId;
-use lint;
+use crate::hir::def_id::DefId;
+use crate::lint;
 use rustc::hir;
 use rustc::session::Session;
 use rustc::traits;
@@ -43,7 +43,7 @@
 use rustc::middle::lang_items;
 use syntax::ast;
 use syntax_pos::Span;
-use util::common::ErrorReported;
+use crate::util::common::ErrorReported;
 
 /// Reifies a cast check to be checked once we have full type information for
 /// a function context.
@@ -294,7 +294,7 @@
                                   .emit();
             }
             CastError::SizedUnsizedCast => {
-                use structured_errors::{SizedUnsizedCastError, StructuredDiagnostic};
+                use crate::structured_errors::{SizedUnsizedCastError, StructuredDiagnostic};
                 SizedUnsizedCastError::new(&fcx.tcx.sess,
                                            self.span,
                                            self.expr_ty,
diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs
index df83c92..24c3009 100644
--- a/src/librustc_typeck/check/closure.rs
+++ b/src/librustc_typeck/check/closure.rs
@@ -2,8 +2,8 @@
 
 use super::{check_fn, Expectation, FnCtxt, GeneratorTypes};
 
-use astconv::AstConv;
-use middle::region;
+use crate::astconv::AstConv;
+use crate::middle::region;
 use rustc::hir::def_id::DefId;
 use rustc::infer::{InferOk, InferResult};
 use rustc::infer::LateBoundRegionConversionTime;
diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs
index d1dfe94..8a91e42 100644
--- a/src/librustc_typeck/check/coercion.rs
+++ b/src/librustc_typeck/check/coercion.rs
@@ -50,7 +50,7 @@
 //! sort of a minor point so I've opted to leave it for later---after all
 //! we may want to adjust precisely when coercions occur.
 
-use check::{FnCtxt, Needs};
+use crate::check::{FnCtxt, Needs};
 use errors::DiagnosticBuilder;
 use rustc::hir;
 use rustc::hir::def_id::DefId;
diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs
index 0eb8d7d..0cc5071 100644
--- a/src/librustc_typeck/check/compare_method.rs
+++ b/src/librustc_typeck/check/compare_method.rs
@@ -736,8 +736,8 @@
         in impl_m_type_params.zip(trait_m_type_params)
     {
         if impl_synthetic != trait_synthetic {
-            let impl_node_id = tcx.hir().as_local_node_id(impl_def_id).unwrap();
-            let impl_span = tcx.hir().span(impl_node_id);
+            let impl_hir_id = tcx.hir().as_local_hir_id(impl_def_id).unwrap();
+            let impl_span = tcx.hir().span_by_hir_id(impl_hir_id);
             let trait_span = tcx.def_span(trait_def_id);
             let mut err = struct_span_err!(tcx.sess,
                                            impl_span,
@@ -840,7 +840,7 @@
                             match param.kind {
                                 GenericParamKind::Lifetime { .. } => None,
                                 GenericParamKind::Type { .. } => {
-                                    if param.id == impl_node_id {
+                                    if param.hir_id == impl_hir_id {
                                         Some(&param.bounds)
                                     } else {
                                         None
diff --git a/src/librustc_typeck/check/demand.rs b/src/librustc_typeck/check/demand.rs
index 0d4690c..82f0037 100644
--- a/src/librustc_typeck/check/demand.rs
+++ b/src/librustc_typeck/check/demand.rs
@@ -1,4 +1,4 @@
-use check::FnCtxt;
+use crate::check::FnCtxt;
 use rustc::infer::InferOk;
 use rustc::traits::{ObligationCause, ObligationCauseCode};
 
diff --git a/src/librustc_typeck/check/dropck.rs b/src/librustc_typeck/check/dropck.rs
index 60b5db0..0fc8241 100644
--- a/src/librustc_typeck/check/dropck.rs
+++ b/src/librustc_typeck/check/dropck.rs
@@ -1,13 +1,13 @@
-use check::regionck::RegionCtxt;
+use crate::check::regionck::RegionCtxt;
 
-use hir::def_id::DefId;
+use crate::hir::def_id::DefId;
 use rustc::infer::outlives::env::OutlivesEnvironment;
 use rustc::infer::{self, InferOk, SuppressRegionErrors};
 use rustc::middle::region;
 use rustc::traits::{ObligationCause, TraitEngine, TraitEngineExt};
 use rustc::ty::subst::{Subst, Substs, UnpackedKind};
 use rustc::ty::{self, Ty, TyCtxt};
-use util::common::ErrorReported;
+use crate::util::common::ErrorReported;
 
 use syntax::ast;
 use syntax_pos::Span;
@@ -184,7 +184,7 @@
     // absent. So we report an error that the Drop impl injected a
     // predicate that is not present on the struct definition.
 
-    let self_type_node_id = tcx.hir().as_local_node_id(self_type_did).unwrap();
+    let self_type_hir_id = tcx.hir().as_local_hir_id(self_type_did).unwrap();
 
     let drop_impl_span = tcx.def_span(drop_impl_did);
 
@@ -216,7 +216,7 @@
         // repeated `contains` calls.
 
         if !assumptions_in_impl_context.contains(&predicate) {
-            let item_span = tcx.hir().span(self_type_node_id);
+            let item_span = tcx.hir().span_by_hir_id(self_type_hir_id);
             struct_span_err!(
                 tcx.sess,
                 drop_impl_span,
diff --git a/src/librustc_typeck/check/generator_interior.rs b/src/librustc_typeck/check/generator_interior.rs
index 225fa1d..7f4b0a9 100644
--- a/src/librustc_typeck/check/generator_interior.rs
+++ b/src/librustc_typeck/check/generator_interior.rs
@@ -11,7 +11,7 @@
 use rustc_data_structures::sync::Lrc;
 use syntax_pos::Span;
 use super::FnCtxt;
-use util::nodemap::FxHashMap;
+use crate::util::nodemap::FxHashMap;
 
 struct InteriorVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs
index 82d4300..912ea39 100644
--- a/src/librustc_typeck/check/intrinsic.rs
+++ b/src/librustc_typeck/check/intrinsic.rs
@@ -4,7 +4,7 @@
 use rustc::traits::{ObligationCause, ObligationCauseCode};
 use rustc::ty::{self, TyCtxt, Ty};
 use rustc::ty::subst::Subst;
-use require_same_types;
+use crate::require_same_types;
 
 use rustc_target::spec::abi::Abi;
 use syntax::symbol::Symbol;
diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs
index 2cf2974..34b248a 100644
--- a/src/librustc_typeck/check/method/confirm.rs
+++ b/src/librustc_typeck/check/method/confirm.rs
@@ -1,9 +1,9 @@
 use super::{probe, MethodCallee};
 
-use astconv::AstConv;
-use check::{FnCtxt, PlaceOp, callee, Needs};
-use hir::GenericArg;
-use hir::def_id::DefId;
+use crate::astconv::AstConv;
+use crate::check::{FnCtxt, PlaceOp, callee, Needs};
+use crate::hir::GenericArg;
+use crate::hir::def_id::DefId;
 use rustc::ty::subst::Substs;
 use rustc::traits;
 use rustc::ty::{self, Ty, GenericParamDefKind};
diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs
index b7d0157..02cd5b7 100644
--- a/src/librustc_typeck/check/method/mod.rs
+++ b/src/librustc_typeck/check/method/mod.rs
@@ -10,9 +10,9 @@
 pub use self::CandidateSource::*;
 pub use self::suggest::{SelfSource, TraitInfo};
 
-use check::FnCtxt;
+use crate::check::FnCtxt;
+use crate::namespace::Namespace;
 use errors::{Applicability, DiagnosticBuilder};
-use namespace::Namespace;
 use rustc_data_structures::sync::Lrc;
 use rustc::hir;
 use rustc::hir::def::Def;
@@ -29,7 +29,7 @@
 use crate::{check_type_alias_enum_variants_enabled};
 use self::probe::{IsSuggestion, ProbeScope};
 
-pub fn provide(providers: &mut ty::query::Providers) {
+pub fn provide(providers: &mut ty::query::Providers<'_>) {
     suggest::provide(providers);
     probe::provide(providers);
 }
diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs
index 6236774..cf31a54 100644
--- a/src/librustc_typeck/check/method/probe.rs
+++ b/src/librustc_typeck/check/method/probe.rs
@@ -3,11 +3,11 @@
 use super::{CandidateSource, ImplSource, TraitSource};
 use super::suggest;
 
-use check::autoderef::{self, Autoderef};
-use check::FnCtxt;
-use hir::def_id::DefId;
-use hir::def::Def;
-use namespace::Namespace;
+use crate::check::autoderef::{self, Autoderef};
+use crate::check::FnCtxt;
+use crate::hir::def_id::DefId;
+use crate::hir::def::Def;
+use crate::namespace::Namespace;
 
 use rustc_data_structures::sync::Lrc;
 use rustc::hir;
@@ -85,6 +85,37 @@
 
 #[derive(Debug)]
 struct Candidate<'tcx> {
+    // Candidates are (I'm not quite sure, but they are mostly) basically
+    // some metadata on top of a `ty::AssociatedItem` (without substs).
+    //
+    // However, method probing wants to be able to evaluate the predicates
+    // for a function with the substs applied - for example, if a function
+    // has `where Self: Sized`, we don't want to consider it unless `Self`
+    // is actually `Sized`, and similarly, return-type suggestions want
+    // to consider the "actual" return type.
+    //
+    // The way this is handled is through `xform_self_ty`. It contains
+    // the receiver type of this candidate, but `xform_self_ty`,
+    // `xform_ret_ty` and `kind` (which contains the predicates) have the
+    // generic parameters of this candidate substituted with the *same set*
+    // of inference variables, which acts as some weird sort of "query".
+    //
+    // When we check out a candidate, we require `xform_self_ty` to be
+    // a subtype of the passed-in self-type, and this equates the type
+    // variables in the rest of the fields.
+    //
+    // For example, if we have this candidate:
+    // ```
+    //    trait Foo {
+    //        fn foo(&self) where Self: Sized;
+    //    }
+    // ```
+    //
+    // Then `xform_self_ty` will be `&'erased ?X` and `kind` will contain
+    // the predicate `?X: Sized`, so if we are evaluating `Foo` for a
+    // the receiver `&T`, we'll do the subtyping which will make `?X`
+    // get the right value, then when we evaluate the predicate we'll check
+    // if `T: Sized`.
     xform_self_ty: Ty<'tcx>,
     xform_ret_ty: Option<Ty<'tcx>>,
     item: ty::AssociatedItem,
@@ -506,13 +537,28 @@
         match self_ty.value.value.sty {
             ty::Dynamic(ref data, ..) => {
                 if let Some(p) = data.principal() {
-                    let InferOk { value: instantiated_self_ty, obligations: _ } =
-                        self.fcx.probe_instantiate_query_response(
-                            self.span, &self.orig_steps_var_values, self_ty)
-                        .unwrap_or_else(|_| {
-                            span_bug!(self.span, "{:?} was applicable but now isn't?", self_ty)
-                        });
-                    self.assemble_inherent_candidates_from_object(instantiated_self_ty);
+                    // Subtle: we can't use `instantiate_query_response` here: using it will
+                    // commit to all of the type equalities assumed by inference going through
+                    // autoderef (see the `method-probe-no-guessing` test).
+                    //
+                    // However, in this code, it is OK if we end up with an object type that is
+                    // "more general" than the object type that we are evaluating. For *every*
+                    // object type `MY_OBJECT`, a function call that goes through a trait-ref
+                    // of the form `<MY_OBJECT as SuperTraitOf(MY_OBJECT)>::func` is a valid
+                    // `ObjectCandidate`, and it should be discoverable "exactly" through one
+                    // of the iterations in the autoderef loop, so there is no problem with it
+                    // being discoverable in another one of these iterations.
+                    //
+                    // Using `instantiate_canonical_with_fresh_inference_vars` on our
+                    // `Canonical<QueryResponse<Ty<'tcx>>>` and then *throwing away* the
+                    // `CanonicalVarValues` will exactly give us such a generalization - it
+                    // will still match the original object type, but it won't pollute our
+                    // type variables in any form, so just do that!
+                    let (QueryResponse { value: generalized_self_ty, .. }, _ignored_var_values) =
+                        self.fcx.instantiate_canonical_with_fresh_inference_vars(
+                            self.span, &self_ty);
+
+                    self.assemble_inherent_candidates_from_object(generalized_self_ty);
                     self.assemble_inherent_impl_candidates_for_type(p.def_id());
                 }
             }
diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs
index 55b6e8f..8f98b34 100644
--- a/src/librustc_typeck/check/method/suggest.rs
+++ b/src/librustc_typeck/check/method/suggest.rs
@@ -1,10 +1,11 @@
 //! Give useful errors and suggestions to users when an item can't be
 //! found or is otherwise invalid.
 
-use check::FnCtxt;
+use crate::check::FnCtxt;
+use crate::middle::lang_items::FnOnceTraitLangItem;
+use crate::namespace::Namespace;
+use crate::util::nodemap::FxHashSet;
 use errors::{Applicability, DiagnosticBuilder};
-use middle::lang_items::FnOnceTraitLangItem;
-use namespace::Namespace;
 use rustc_data_structures::sync::Lrc;
 use rustc::hir::{self, ExprKind, Node, QPath};
 use rustc::hir::def::Def;
@@ -15,7 +16,6 @@
 use rustc::traits::Obligation;
 use rustc::ty::{self, Adt, Ty, TyCtxt, ToPolyTraitRef, ToPredicate, TypeFoldable};
 use rustc::ty::item_path::with_crate_prefix;
-use util::nodemap::FxHashSet;
 use syntax_pos::{Span, FileName};
 use syntax::ast;
 use syntax::util::lev_distance::find_best_match_for_name;
diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs
index 3e2a9d7..467032f 100644
--- a/src/librustc_typeck/check/mod.rs
+++ b/src/librustc_typeck/check/mod.rs
@@ -83,15 +83,15 @@
 pub mod intrinsic;
 mod op;
 
-use astconv::{AstConv, PathSeg};
+use crate::astconv::{AstConv, PathSeg};
 use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
 use rustc::hir::{self, ExprKind, GenericArg, ItemKind, Node, PatKind, QPath};
 use rustc::hir::def::{CtorKind, Def};
 use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
 use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
 use rustc::hir::itemlikevisit::ItemLikeVisitor;
-use middle::lang_items;
-use namespace::Namespace;
+use crate::middle::lang_items;
+use crate::namespace::Namespace;
 use rustc::infer::{self, InferCtxt, InferOk, InferResult, RegionVariableOrigin};
 use rustc::infer::canonical::{Canonical, OriginalQueryValues, QueryResponse};
 use rustc_data_structures::indexed_vec::Idx;
@@ -130,14 +130,14 @@
 use std::ops::{self, Deref};
 use std::slice;
 
-use require_c_abi_if_variadic;
-use session::{CompileIncomplete, Session};
-use session::config::EntryFnType;
-use TypeAndSubsts;
-use lint;
-use util::captures::Captures;
-use util::common::{ErrorReported, indenter};
-use util::nodemap::{DefIdMap, DefIdSet, FxHashMap, FxHashSet, NodeMap};
+use crate::require_c_abi_if_variadic;
+use crate::session::{CompileIncomplete, Session};
+use crate::session::config::EntryFnType;
+use crate::TypeAndSubsts;
+use crate::lint;
+use crate::util::captures::Captures;
+use crate::util::common::{ErrorReported, indenter};
+use crate::util::nodemap::{DefIdMap, DefIdSet, FxHashMap, FxHashSet, NodeMap};
 
 pub use self::Expectation::*;
 use self::autoderef::Autoderef;
@@ -1883,14 +1883,14 @@
         // Check for duplicate discriminant values
         if let Some(i) = disr_vals.iter().position(|&x| x.val == discr.val) {
             let variant_did = def.variants[VariantIdx::new(i)].did;
-            let variant_i_node_id = tcx.hir().as_local_node_id(variant_did).unwrap();
-            let variant_i = tcx.hir().expect_variant(variant_i_node_id);
+            let variant_i_hir_id = tcx.hir().as_local_hir_id(variant_did).unwrap();
+            let variant_i = tcx.hir().expect_variant(variant_i_hir_id);
             let i_span = match variant_i.node.disr_expr {
-                Some(ref expr) => tcx.hir().span(expr.id),
-                None => tcx.hir().span(variant_i_node_id)
+                Some(ref expr) => tcx.hir().span_by_hir_id(expr.hir_id),
+                None => tcx.hir().span_by_hir_id(variant_i_hir_id)
             };
             let span = match v.node.disr_expr {
-                Some(ref expr) => tcx.hir().span(expr.id),
+                Some(ref expr) => tcx.hir().span_by_hir_id(expr.hir_id),
                 None => v.span
             };
             struct_span_err!(tcx.sess, span, E0081,
@@ -3044,7 +3044,7 @@
         // arguments which we skipped above.
         if variadic {
             fn variadic_error<'tcx>(s: &Session, span: Span, t: Ty<'tcx>, cast_ty: &str) {
-                use structured_errors::{VariadicError, StructuredDiagnostic};
+                use crate::structured_errors::{VariadicError, StructuredDiagnostic};
                 VariadicError::new(s, span, t, cast_ty).diagnostic().emit();
             }
 
@@ -3685,8 +3685,8 @@
         display
     }
 
-    fn no_such_field_err<T: Display>(&self, span: Span, field: T, expr_t: &ty::TyS)
-        -> DiagnosticBuilder {
+    fn no_such_field_err<T: Display>(&self, span: Span, field: T, expr_t: &ty::TyS<'_>)
+        -> DiagnosticBuilder<'_> {
         type_error_struct!(self.tcx().sess, span, expr_t, E0609,
                            "no field `{}` on type `{}`",
                            field, expr_t)
@@ -4597,7 +4597,7 @@
                 if element_ty.references_error() {
                     tcx.types.err
                 } else if let Ok(count) = count {
-                    tcx.mk_ty(ty::Array(t, tcx.intern_lazy_const(ty::LazyConst::Evaluated(count))))
+                    tcx.mk_ty(ty::Array(t, tcx.mk_lazy_const(ty::LazyConst::Evaluated(count))))
                 } else {
                     tcx.types.err
                 }
@@ -5257,7 +5257,7 @@
         &self,
         blk: &'gcx hir::Block,
         expected_ty: Ty<'tcx>,
-        err: &mut DiagnosticBuilder,
+        err: &mut DiagnosticBuilder<'_>,
     ) {
         if let Some(span_semi) = self.could_remove_semicolon(blk, expected_ty) {
             err.span_suggestion(
@@ -5703,8 +5703,8 @@
     });
     for (&used, param) in types_used.iter().zip(types) {
         if !used {
-            let id = tcx.hir().as_local_node_id(param.def_id).unwrap();
-            let span = tcx.hir().span(id);
+            let id = tcx.hir().as_local_hir_id(param.def_id).unwrap();
+            let span = tcx.hir().span_by_hir_id(id);
             struct_span_err!(tcx.sess, span, E0091, "type parameter `{}` is unused", param.name)
                 .span_label(span, "unused type parameter")
                 .emit();
@@ -5725,7 +5725,7 @@
     );
     handler.note_without_error(&format!("rustc {} running on {}",
         option_env!("CFG_VERSION").unwrap_or("unknown_version"),
-        ::session::config::host_triple(),
+        crate::session::config::host_triple(),
     ));
 }
 
diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs
index b90c18e..c058977 100644
--- a/src/librustc_typeck/check/regionck.rs
+++ b/src/librustc_typeck/check/regionck.rs
@@ -72,11 +72,11 @@
 //! relation, except that a borrowed pointer never owns its
 //! contents.
 
-use check::dropck;
-use check::FnCtxt;
-use middle::mem_categorization as mc;
-use middle::mem_categorization::Categorization;
-use middle::region;
+use crate::check::dropck;
+use crate::check::FnCtxt;
+use crate::middle::mem_categorization as mc;
+use crate::middle::mem_categorization::Categorization;
+use crate::middle::region;
 use rustc::hir::def_id::DefId;
 use rustc::infer::outlives::env::OutlivesEnvironment;
 use rustc::infer::{self, RegionObligation, SuppressRegionErrors};
diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs
index ffd7c21..1816b74 100644
--- a/src/librustc_typeck/check/upvar.rs
+++ b/src/librustc_typeck/check/upvar.rs
@@ -32,9 +32,9 @@
 
 use super::FnCtxt;
 
-use middle::expr_use_visitor as euv;
-use middle::mem_categorization as mc;
-use middle::mem_categorization::Categorization;
+use crate::middle::expr_use_visitor as euv;
+use crate::middle::mem_categorization as mc;
+use crate::middle::mem_categorization::Categorization;
 use rustc::hir;
 use rustc::hir::def_id::DefId;
 use rustc::hir::def_id::LocalDefId;
@@ -650,6 +650,5 @@
 }
 
 fn var_name(tcx: TyCtxt, var_hir_id: hir::HirId) -> ast::Name {
-    let var_node_id = tcx.hir().hir_to_node_id(var_hir_id);
-    tcx.hir().name(var_node_id)
+    tcx.hir().name_by_hir_id(var_hir_id)
 }
diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs
index 9788170..b51fd58 100644
--- a/src/librustc_typeck/check/wfcheck.rs
+++ b/src/librustc_typeck/check/wfcheck.rs
@@ -1,7 +1,7 @@
-use check::{Inherited, FnCtxt};
-use constrained_type_params::{identify_constrained_type_params, Parameter};
+use crate::check::{Inherited, FnCtxt};
+use crate::constrained_type_params::{identify_constrained_type_params, Parameter};
 
-use hir::def_id::DefId;
+use crate::hir::def_id::DefId;
 use rustc::traits::{self, ObligationCauseCode};
 use rustc::ty::{self, Lift, Ty, TyCtxt, TyKind, GenericParamDefKind, TypeFoldable, ToPredicate};
 use rustc::ty::subst::{Subst, Substs};
@@ -62,11 +62,11 @@
 /// not included it frequently leads to confusing errors in fn bodies. So it's better to check
 /// the types first.
 pub fn check_item_well_formed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
-    let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
-    let item = tcx.hir().expect_item(node_id);
+    let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+    let item = tcx.hir().expect_item_by_hir_id(hir_id);
 
-    debug!("check_item_well_formed(it.id={}, it.name={})",
-           item.id,
+    debug!("check_item_well_formed(it.hir_id={:?}, it.name={})",
+           item.hir_id,
            tcx.item_path_str(def_id));
 
     match item.node {
@@ -88,7 +88,7 @@
         // won't be allowed unless there's an *explicit* implementation of `Send`
         // for `T`
         hir::ItemKind::Impl(_, polarity, defaultness, _, ref trait_ref, ref self_ty, _) => {
-            let is_auto = tcx.impl_trait_ref(tcx.hir().local_def_id(item.id))
+            let is_auto = tcx.impl_trait_ref(tcx.hir().local_def_id_from_hir_id(item.hir_id))
                                 .map_or(false, |trait_ref| tcx.trait_is_auto(trait_ref.def_id));
             if let (hir::Defaultness::Default { .. }, true) = (defaultness, is_auto) {
                 tcx.sess.span_err(item.span, "impls of auto traits cannot be default");
diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs
index 238b087..e02e706 100644
--- a/src/librustc_typeck/check/writeback.rs
+++ b/src/librustc_typeck/check/writeback.rs
@@ -2,7 +2,7 @@
 // unresolved type variables and replaces "ty_var" types with their
 // substitutions.
 
-use check::FnCtxt;
+use crate::check::FnCtxt;
 use errors::DiagnosticBuilder;
 use rustc::hir;
 use rustc::hir::def_id::{DefId, DefIndex};
@@ -407,8 +407,7 @@
             if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
                 if self.rustc_dump_user_substs {
                     // This is a unit-testing mechanism.
-                    let node_id = self.tcx().hir().hir_to_node_id(hir_id);
-                    let span = self.tcx().hir().span(node_id);
+                    let span = self.tcx().hir().span_by_hir_id(hir_id);
                     // We need to buffer the errors in order to guarantee a consistent
                     // order when emitting them.
                     let err = self.tcx().sess.struct_span_err(
@@ -739,15 +738,14 @@
 
 impl Locatable for DefIndex {
     fn to_span(&self, tcx: &TyCtxt) -> Span {
-        let node_id = tcx.hir().def_index_to_node_id(*self);
-        tcx.hir().span(node_id)
+        let hir_id = tcx.hir().def_index_to_hir_id(*self);
+        tcx.hir().span_by_hir_id(hir_id)
     }
 }
 
 impl Locatable for hir::HirId {
     fn to_span(&self, tcx: &TyCtxt) -> Span {
-        let node_id = tcx.hir().hir_to_node_id(*self);
-        tcx.hir().span(node_id)
+        tcx.hir().span_by_hir_id(*self)
     }
 }
 
diff --git a/src/librustc_typeck/check_unused.rs b/src/librustc_typeck/check_unused.rs
index a7e19fc..18194ee 100644
--- a/src/librustc_typeck/check_unused.rs
+++ b/src/librustc_typeck/check_unused.rs
@@ -1,4 +1,4 @@
-use lint;
+use crate::lint;
 use rustc::ty::TyCtxt;
 
 use errors::Applicability;
diff --git a/src/librustc_typeck/coherence/builtin.rs b/src/librustc_typeck/coherence/builtin.rs
index bd2373d..3ec08f2 100644
--- a/src/librustc_typeck/coherence/builtin.rs
+++ b/src/librustc_typeck/coherence/builtin.rs
@@ -76,7 +76,7 @@
 fn visit_implementation_of_copy<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_did: DefId) {
     debug!("visit_implementation_of_copy: impl_did={:?}", impl_did);
 
-    let impl_node_id = if let Some(n) = tcx.hir().as_local_node_id(impl_did) {
+    let impl_hir_id = if let Some(n) = tcx.hir().as_local_hir_id(impl_did) {
         n
     } else {
         debug!("visit_implementation_of_copy(): impl not in this crate");
@@ -87,7 +87,7 @@
     debug!("visit_implementation_of_copy: self_type={:?} (bound)",
            self_type);
 
-    let span = tcx.hir().span(impl_node_id);
+    let span = tcx.hir().span_by_hir_id(impl_hir_id);
     let param_env = tcx.param_env(impl_did);
     assert!(!self_type.has_escaping_bound_vars());
 
@@ -97,7 +97,7 @@
     match param_env.can_type_implement_copy(tcx, self_type) {
         Ok(()) => {}
         Err(CopyImplementationError::InfrigingFields(fields)) => {
-            let item = tcx.hir().expect_item(impl_node_id);
+            let item = tcx.hir().expect_item_by_hir_id(impl_hir_id);
             let span = if let ItemKind::Impl(.., Some(ref tr), _, _) = item.node {
                 tr.path.span
             } else {
@@ -114,7 +114,7 @@
             err.emit()
         }
         Err(CopyImplementationError::NotAnAdt) => {
-            let item = tcx.hir().expect_item(impl_node_id);
+            let item = tcx.hir().expect_item_by_hir_id(impl_hir_id);
             let span = if let ItemKind::Impl(.., ref ty, _) = item.node {
                 ty.span
             } else {
diff --git a/src/librustc_typeck/coherence/inherent_impls_overlap.rs b/src/librustc_typeck/coherence/inherent_impls_overlap.rs
index 52dee29..138c598 100644
--- a/src/librustc_typeck/coherence/inherent_impls_overlap.rs
+++ b/src/librustc_typeck/coherence/inherent_impls_overlap.rs
@@ -1,11 +1,11 @@
-use namespace::Namespace;
+use crate::namespace::Namespace;
 use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
 use rustc::hir;
 use rustc::hir::itemlikevisit::ItemLikeVisitor;
 use rustc::traits::{self, IntercrateMode};
 use rustc::ty::TyCtxt;
 
-use lint;
+use crate::lint;
 
 pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                                     crate_num: CrateNum) {
diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs
index 853c4c8..4eee68b 100644
--- a/src/librustc_typeck/coherence/mod.rs
+++ b/src/librustc_typeck/coherence/mod.rs
@@ -5,7 +5,7 @@
 // done by the orphan and overlap modules. Then we build up various
 // mappings. That mapping code resides here.
 
-use hir::def_id::{DefId, LOCAL_CRATE};
+use crate::hir::def_id::{DefId, LOCAL_CRATE};
 use rustc::traits;
 use rustc::ty::{self, TyCtxt, TypeFoldable};
 use rustc::ty::query::Providers;
diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs
index 9dc74c5..bb4fba1 100644
--- a/src/librustc_typeck/collect.rs
+++ b/src/librustc_typeck/collect.rs
@@ -14,13 +14,13 @@
 //! At present, however, we do run collection across all items in the
 //! crate as a kind of pass. This should eventually be factored away.
 
-use astconv::{AstConv, Bounds};
-use constrained_type_params as ctp;
-use check::intrinsic::intrisic_operation_unsafety;
-use lint;
-use middle::lang_items::SizedTraitLangItem;
-use middle::resolve_lifetime as rl;
-use middle::weak_lang_items;
+use crate::astconv::{AstConv, Bounds};
+use crate::constrained_type_params as ctp;
+use crate::check::intrinsic::intrisic_operation_unsafety;
+use crate::lint;
+use crate::middle::lang_items::SizedTraitLangItem;
+use crate::middle::resolve_lifetime as rl;
+use crate::middle::weak_lang_items;
 use rustc::mir::mono::Linkage;
 use rustc::ty::query::Providers;
 use rustc::ty::subst::Substs;
@@ -68,7 +68,7 @@
     );
 }
 
-pub fn provide(providers: &mut Providers) {
+pub fn provide(providers: &mut Providers<'_>) {
     *providers = Providers {
         type_of,
         generics_of,
@@ -737,8 +737,8 @@
 }
 
 fn trait_def<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx ty::TraitDef {
-    let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
-    let item = tcx.hir().expect_item(node_id);
+    let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+    let item = tcx.hir().expect_item_by_hir_id(hir_id);
 
     let (is_auto, unsafety) = match item.node {
         hir::ItemKind::Trait(is_auto, unsafety, ..) => (is_auto == hir::IsAuto::Yes, unsafety),
@@ -1509,8 +1509,8 @@
 ) -> Option<ty::TraitRef<'tcx>> {
     let icx = ItemCtxt::new(tcx, def_id);
 
-    let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
-    match tcx.hir().expect_item(node_id).node {
+    let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+    match tcx.hir().expect_item_by_hir_id(hir_id).node {
         hir::ItemKind::Impl(.., ref opt_trait_ref, _, _) => {
             opt_trait_ref.as_ref().map(|ast_trait_ref| {
                 let selfty = tcx.type_of(def_id);
@@ -1522,8 +1522,8 @@
 }
 
 fn impl_polarity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> hir::ImplPolarity {
-    let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
-    match tcx.hir().expect_item(node_id).node {
+    let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+    match tcx.hir().expect_item_by_hir_id(hir_id).node {
         hir::ItemKind::Impl(_, polarity, ..) => polarity,
         ref item => bug!("impl_polarity: {:?} not an impl", item),
     }
diff --git a/src/librustc_typeck/constrained_type_params.rs b/src/librustc_typeck/constrained_type_params.rs
index 199ea31..d1f33b6 100644
--- a/src/librustc_typeck/constrained_type_params.rs
+++ b/src/librustc_typeck/constrained_type_params.rs
@@ -124,7 +124,7 @@
 /// which is determined by 1, which requires `U`, that is determined
 /// by 0. I should probably pick a less tangled example, but I can't
 /// think of any.
-pub fn setup_constraining_predicates<'tcx>(tcx: TyCtxt,
+pub fn setup_constraining_predicates<'tcx>(tcx: TyCtxt<'_, '_, '_>,
                                            predicates: &mut [(ty::Predicate<'tcx>, Span)],
                                            impl_trait_ref: Option<ty::TraitRef<'tcx>>,
                                            input_parameters: &mut FxHashSet<Parameter>)
diff --git a/src/librustc_typeck/impl_wf_check.rs b/src/librustc_typeck/impl_wf_check.rs
index 07f5fca..6de06b6 100644
--- a/src/librustc_typeck/impl_wf_check.rs
+++ b/src/librustc_typeck/impl_wf_check.rs
@@ -8,7 +8,7 @@
 //! specialization errors. These things can (and probably should) be
 //! fixed, but for the moment it's easier to do these checks early.
 
-use constrained_type_params as ctp;
+use crate::constrained_type_params as ctp;
 use rustc::hir;
 use rustc::hir::itemlikevisit::ItemLikeVisitor;
 use rustc::hir::def_id::DefId;
@@ -162,7 +162,7 @@
     // used elsewhere are not projected back out.
 }
 
-fn report_unused_parameter(tcx: TyCtxt,
+fn report_unused_parameter(tcx: TyCtxt<'_, '_, '_>,
                            span: Span,
                            kind: &str,
                            name: &str)
diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs
index 8d77310..e99ec53 100644
--- a/src/librustc_typeck/lib.rs
+++ b/src/librustc_typeck/lib.rs
@@ -72,17 +72,15 @@
 
 #![recursion_limit="256"]
 
+#![deny(rust_2018_idioms)]
+#![allow(explicit_outlives_requirements)]
+
+#![allow(elided_lifetimes_in_paths)] // WIP
+
 #[macro_use] extern crate log;
 #[macro_use] extern crate syntax;
-extern crate syntax_pos;
-
-extern crate arena;
 
 #[macro_use] extern crate rustc;
-extern crate rustc_data_structures;
-extern crate rustc_errors as errors;
-extern crate rustc_target;
-extern crate smallvec;
 
 // N.B., this module needs to be declared first so diagnostics are
 // registered before they are used.
@@ -141,7 +139,7 @@
     }
 }
 
-fn require_c_abi_if_variadic(tcx: TyCtxt,
+fn require_c_abi_if_variadic(tcx: TyCtxt<'_, '_, '_>,
                              decl: &hir::FnDecl,
                              abi: Abi,
                              span: Span) {
@@ -310,7 +308,7 @@
     }
 }
 
-pub fn provide(providers: &mut Providers) {
+pub fn provide(providers: &mut Providers<'_>) {
     collect::provide(providers);
     coherence::provide(providers);
     check::provide(providers);
diff --git a/src/librustc_typeck/outlives/explicit.rs b/src/librustc_typeck/outlives/explicit.rs
index 38f4b37..574086f 100644
--- a/src/librustc_typeck/outlives/explicit.rs
+++ b/src/librustc_typeck/outlives/explicit.rs
@@ -1,6 +1,6 @@
 use rustc::hir::def_id::DefId;
 use rustc::ty::{self, OutlivesPredicate, TyCtxt};
-use util::nodemap::FxHashMap;
+use crate::util::nodemap::FxHashMap;
 
 use super::utils::*;
 
diff --git a/src/librustc_typeck/outlives/implicit_infer.rs b/src/librustc_typeck/outlives/implicit_infer.rs
index e388a3e..0ff884d 100644
--- a/src/librustc_typeck/outlives/implicit_infer.rs
+++ b/src/librustc_typeck/outlives/implicit_infer.rs
@@ -1,5 +1,4 @@
-use rustc::hir;
-use hir::Node;
+use rustc::hir::{self, Node};
 use rustc::hir::def_id::DefId;
 use rustc::hir::itemlikevisit::ItemLikeVisitor;
 use rustc::ty::subst::{Kind, Subst, UnpackedKind};
diff --git a/src/librustc_typeck/outlives/mod.rs b/src/librustc_typeck/outlives/mod.rs
index f0310f2..b3634d3 100644
--- a/src/librustc_typeck/outlives/mod.rs
+++ b/src/librustc_typeck/outlives/mod.rs
@@ -12,7 +12,7 @@
 pub mod test;
 mod utils;
 
-pub fn provide(providers: &mut Providers) {
+pub fn provide(providers: &mut Providers<'_>) {
     *providers = Providers {
         inferred_outlives_of,
         inferred_outlives_crate,
diff --git a/src/librustc_typeck/variance/mod.rs b/src/librustc_typeck/variance/mod.rs
index afb6a68..3474227 100644
--- a/src/librustc_typeck/variance/mod.rs
+++ b/src/librustc_typeck/variance/mod.rs
@@ -46,12 +46,12 @@
 
 fn variances_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId)
                           -> Lrc<Vec<ty::Variance>> {
-    let id = tcx.hir().as_local_node_id(item_def_id).expect("expected local def-id");
+    let id = tcx.hir().as_local_hir_id(item_def_id).expect("expected local def-id");
     let unsupported = || {
         // Variance not relevant.
-        span_bug!(tcx.hir().span(id), "asked to compute variance for wrong kind of item")
+        span_bug!(tcx.hir().span_by_hir_id(id), "asked to compute variance for wrong kind of item")
     };
-    match tcx.hir().get(id) {
+    match tcx.hir().get_by_hir_id(id) {
         Node::Item(item) => match item.node {
             hir::ItemKind::Enum(..) |
             hir::ItemKind::Struct(..) |
diff --git a/src/librustc_typeck/variance/terms.rs b/src/librustc_typeck/variance/terms.rs
index d53e2d2..ec0acfb 100644
--- a/src/librustc_typeck/variance/terms.rs
+++ b/src/librustc_typeck/variance/terms.rs
@@ -15,7 +15,7 @@
 use syntax::ast;
 use rustc::hir;
 use rustc::hir::itemlikevisit::ItemLikeVisitor;
-use util::nodemap::NodeMap;
+use crate::util::nodemap::NodeMap;
 
 use self::VarianceTerm::*;
 
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
index bd05255..b0bb033 100644
--- a/src/librustdoc/clean/mod.rs
+++ b/src/librustdoc/clean/mod.rs
@@ -517,6 +517,7 @@
     StaticItem(Static),
     ConstantItem(Constant),
     TraitItem(Trait),
+    TraitAliasItem(TraitAlias),
     ImplItem(Impl),
     /// A method signature only. Used for required methods in traits (ie,
     /// non-default-methods).
@@ -554,6 +555,7 @@
             ItemEnum::TyMethodItem(ref i) => &i.generics,
             ItemEnum::MethodItem(ref i) => &i.generics,
             ItemEnum::ForeignFunctionItem(ref f) => &f.generics,
+            ItemEnum::TraitAliasItem(ref ta) => &ta.generics,
             _ => return None,
         })
     }
@@ -603,6 +605,7 @@
         items.extend(self.impls.iter().flat_map(|x| x.clean(cx)));
         items.extend(self.macros.iter().map(|x| x.clean(cx)));
         items.extend(self.proc_macros.iter().map(|x| x.clean(cx)));
+        items.extend(self.trait_aliases.iter().map(|x| x.clean(cx)));
 
         // determine if we should display the inner contents or
         // the outer `mod` item for the source code.
@@ -1724,6 +1727,30 @@
     pub fn self_type(&self) -> Option<SelfTy> {
         self.inputs.values.get(0).and_then(|v| v.to_self())
     }
+
+    /// Returns the sugared return type for an async function.
+    ///
+    /// For example, if the return type is `impl std::future::Future<Output = i32>`, this function
+    /// will return `i32`.
+    ///
+    /// # Panics
+    ///
+    /// This function will panic if the return type does not match the expected sugaring for async
+    /// functions.
+    pub fn sugared_async_return_type(&self) -> FunctionRetTy {
+        match &self.output {
+            FunctionRetTy::Return(Type::ImplTrait(bounds)) => {
+                match &bounds[0] {
+                    GenericBound::TraitBound(PolyTrait { trait_, .. }, ..) => {
+                        let bindings = trait_.bindings().unwrap();
+                        FunctionRetTy::Return(bindings[0].ty.clone())
+                    }
+                    _ => panic!("unexpected desugaring of async function"),
+                }
+            }
+            _ => panic!("unexpected desugaring of async function"),
+        }
+    }
 }
 
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
@@ -1885,13 +1912,38 @@
                 items: self.items.clean(cx),
                 generics: self.generics.clean(cx),
                 bounds: self.bounds.clean(cx),
-                is_spotlight: is_spotlight,
+                is_spotlight,
                 is_auto: self.is_auto.clean(cx),
             }),
         }
     }
 }
 
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
+pub struct TraitAlias {
+    pub generics: Generics,
+    pub bounds: Vec<GenericBound>,
+}
+
+impl Clean<Item> for doctree::TraitAlias {
+    fn clean(&self, cx: &DocContext) -> Item {
+        let attrs = self.attrs.clean(cx);
+        Item {
+            name: Some(self.name.clean(cx)),
+            attrs,
+            source: self.whence.clean(cx),
+            def_id: cx.tcx.hir().local_def_id(self.id),
+            visibility: self.vis.clean(cx),
+            stability: self.stab.clean(cx),
+            deprecation: self.depr.clean(cx),
+            inner: TraitAliasItem(TraitAlias {
+                generics: self.generics.clean(cx),
+                bounds: self.bounds.clean(cx),
+            }),
+        }
+    }
+}
+
 impl Clean<bool> for hir::IsAuto {
     fn clean(&self, _: &DocContext) -> bool {
         match *self {
@@ -2223,6 +2275,7 @@
     Macro,
     Attr,
     Derive,
+    TraitAlias,
 }
 
 pub trait GetDefId {
@@ -2282,6 +2335,21 @@
             _ => None,
         }
     }
+
+    pub fn bindings(&self) -> Option<&[TypeBinding]> {
+        match *self {
+            ResolvedPath { ref path, .. } => {
+                path.segments.last().and_then(|seg| {
+                    if let GenericArgs::AngleBracketed { ref bindings, .. } = seg.args {
+                        Some(&**bindings)
+                    } else {
+                        None
+                    }
+                })
+            }
+            _ => None
+        }
+    }
 }
 
 impl GetDefId for Type {
@@ -3819,10 +3887,9 @@
             MacroKind::Derive => (i, TypeKind::Derive),
             MacroKind::ProcMacroStub => unreachable!(),
         },
+        Def::TraitAlias(i) => (i, TypeKind::TraitAlias),
         Def::SelfTy(Some(def_id), _) => (def_id, TypeKind::Trait),
-        Def::SelfTy(_, Some(impl_def_id)) => {
-            return impl_def_id
-        }
+        Def::SelfTy(_, Some(impl_def_id)) => return impl_def_id,
         _ => return def.def_id()
     };
     if did.is_local() { return did }
diff --git a/src/librustdoc/doctree.rs b/src/librustdoc/doctree.rs
index cc27da7..e845838 100644
--- a/src/librustdoc/doctree.rs
+++ b/src/librustdoc/doctree.rs
@@ -38,6 +38,7 @@
     pub foreigns: Vec<hir::ForeignMod>,
     pub macros: Vec<Macro>,
     pub proc_macros: Vec<ProcMacro>,
+    pub trait_aliases: Vec<TraitAlias>,
     pub is_crate: bool,
 }
 
@@ -53,21 +54,22 @@
             where_inner: syntax_pos::DUMMY_SP,
             attrs      : hir::HirVec::new(),
             extern_crates: Vec::new(),
-            imports    : Vec::new(),
-            structs    : Vec::new(),
-            unions     : Vec::new(),
-            enums      : Vec::new(),
-            fns        : Vec::new(),
-            mods       : Vec::new(),
-            typedefs   : Vec::new(),
-            existentials: Vec::new(),
-            statics    : Vec::new(),
-            constants  : Vec::new(),
-            traits     : Vec::new(),
-            impls      : Vec::new(),
-            foreigns   : Vec::new(),
-            macros     : Vec::new(),
-            proc_macros: Vec::new(),
+            imports    :   Vec::new(),
+            structs    :   Vec::new(),
+            unions     :   Vec::new(),
+            enums      :   Vec::new(),
+            fns        :   Vec::new(),
+            mods       :   Vec::new(),
+            typedefs   :   Vec::new(),
+            existentials:  Vec::new(),
+            statics    :   Vec::new(),
+            constants  :   Vec::new(),
+            traits     :   Vec::new(),
+            impls      :   Vec::new(),
+            foreigns   :   Vec::new(),
+            macros     :   Vec::new(),
+            proc_macros:   Vec::new(),
+            trait_aliases: Vec::new(),
             is_crate   : false,
         }
     }
@@ -208,6 +210,18 @@
     pub depr: Option<attr::Deprecation>,
 }
 
+pub struct TraitAlias {
+    pub name: Name,
+    pub generics: hir::Generics,
+    pub bounds: hir::HirVec<hir::GenericBound>,
+    pub attrs: hir::HirVec<ast::Attribute>,
+    pub id: ast::NodeId,
+    pub whence: Span,
+    pub vis: hir::Visibility,
+    pub stab: Option<attr::Stability>,
+    pub depr: Option<attr::Deprecation>,
+}
+
 #[derive(Debug)]
 pub struct Impl {
     pub unsafety: hir::Unsafety,
diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs
index 5a3e698..c03e679 100644
--- a/src/librustdoc/html/format.rs
+++ b/src/librustdoc/html/format.rs
@@ -5,6 +5,7 @@
 //! assume that HTML output is desired, although it may be possible to redesign
 //! them in the future to instead emit any format desired.
 
+use std::borrow::Cow;
 use std::fmt;
 
 use rustc::hir::def_id::DefId;
@@ -44,14 +45,16 @@
 pub struct CommaSep<'a, T: 'a>(pub &'a [T]);
 pub struct AbiSpace(pub Abi);
 
-/// Wrapper struct for properly emitting a method declaration.
-pub struct Method<'a> {
+/// Wrapper struct for properly emitting a function or method declaration.
+pub struct Function<'a> {
     /// The declaration to emit.
     pub decl: &'a clean::FnDecl,
     /// The length of the function's "name", used to determine line-wrapping.
     pub name_len: usize,
     /// The number of spaces to indent each successive line with, if line-wrapping is necessary.
     pub indent: usize,
+    /// Whether the function is async or not.
+    pub asyncness: hir::IsAsync,
 }
 
 /// Wrapper struct for emitting a where clause from Generics.
@@ -829,9 +832,9 @@
     }
 }
 
-impl<'a> fmt::Display for Method<'a> {
+impl<'a> fmt::Display for Function<'a> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        let &Method { decl, name_len, indent } = self;
+        let &Function { decl, name_len, indent, asyncness } = self;
         let amp = if f.alternate() { "&" } else { "&amp;" };
         let mut args = String::new();
         let mut args_plain = String::new();
@@ -891,11 +894,17 @@
             args_plain.push_str(", ...");
         }
 
-        let arrow_plain = format!("{:#}", decl.output);
-        let arrow = if f.alternate() {
-            format!("{:#}", decl.output)
+        let output = if let hir::IsAsync::Async = asyncness {
+            Cow::Owned(decl.sugared_async_return_type())
         } else {
-            decl.output.to_string()
+            Cow::Borrowed(&decl.output)
+        };
+
+        let arrow_plain = format!("{:#}", &output);
+        let arrow = if f.alternate() {
+            format!("{:#}", &output)
+        } else {
+            output.to_string()
         };
 
         let pad = " ".repeat(name_len);
diff --git a/src/librustdoc/html/item_type.rs b/src/librustdoc/html/item_type.rs
index e20d385..8a3b548 100644
--- a/src/librustdoc/html/item_type.rs
+++ b/src/librustdoc/html/item_type.rs
@@ -42,6 +42,7 @@
     Existential     = 22,
     ProcAttribute   = 23,
     ProcDerive      = 24,
+    TraitAlias      = 25,
 }
 
 
@@ -86,6 +87,7 @@
             clean::AssociatedTypeItem(..)  => ItemType::AssociatedType,
             clean::ForeignTypeItem         => ItemType::ForeignType,
             clean::KeywordItem(..)         => ItemType::Keyword,
+            clean::TraitAliasItem(..)      => ItemType::TraitAlias,
             clean::ProcMacroItem(ref mac)  => match mac.kind {
                 MacroKind::Bang            => ItemType::Macro,
                 MacroKind::Attr            => ItemType::ProcAttribute,
@@ -100,20 +102,21 @@
 impl From<clean::TypeKind> for ItemType {
     fn from(kind: clean::TypeKind) -> ItemType {
         match kind {
-            clean::TypeKind::Struct   => ItemType::Struct,
-            clean::TypeKind::Union    => ItemType::Union,
-            clean::TypeKind::Enum     => ItemType::Enum,
-            clean::TypeKind::Function => ItemType::Function,
-            clean::TypeKind::Trait    => ItemType::Trait,
-            clean::TypeKind::Module   => ItemType::Module,
-            clean::TypeKind::Static   => ItemType::Static,
-            clean::TypeKind::Const    => ItemType::Constant,
-            clean::TypeKind::Variant  => ItemType::Variant,
-            clean::TypeKind::Typedef  => ItemType::Typedef,
-            clean::TypeKind::Foreign  => ItemType::ForeignType,
-            clean::TypeKind::Macro    => ItemType::Macro,
-            clean::TypeKind::Attr     => ItemType::ProcAttribute,
-            clean::TypeKind::Derive   => ItemType::ProcDerive,
+            clean::TypeKind::Struct     => ItemType::Struct,
+            clean::TypeKind::Union      => ItemType::Union,
+            clean::TypeKind::Enum       => ItemType::Enum,
+            clean::TypeKind::Function   => ItemType::Function,
+            clean::TypeKind::Trait      => ItemType::Trait,
+            clean::TypeKind::Module     => ItemType::Module,
+            clean::TypeKind::Static     => ItemType::Static,
+            clean::TypeKind::Const      => ItemType::Constant,
+            clean::TypeKind::Variant    => ItemType::Variant,
+            clean::TypeKind::Typedef    => ItemType::Typedef,
+            clean::TypeKind::Foreign    => ItemType::ForeignType,
+            clean::TypeKind::Macro      => ItemType::Macro,
+            clean::TypeKind::Attr       => ItemType::ProcAttribute,
+            clean::TypeKind::Derive     => ItemType::ProcDerive,
+            clean::TypeKind::TraitAlias => ItemType::TraitAlias,
         }
     }
 }
@@ -146,6 +149,7 @@
             ItemType::Existential     => "existential",
             ItemType::ProcAttribute   => "attr",
             ItemType::ProcDerive      => "derive",
+            ItemType::TraitAlias      => "traitalias",
         }
     }
 
@@ -160,6 +164,7 @@
             ItemType::Primitive |
             ItemType::AssociatedType |
             ItemType::Existential |
+            ItemType::TraitAlias |
             ItemType::ForeignType => NameSpace::Type,
 
             ItemType::ExternCrate |
diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs
index a85ac19..3a8e8a6 100644
--- a/src/librustdoc/html/render.rs
+++ b/src/librustdoc/html/render.rs
@@ -62,7 +62,7 @@
 use html::escape::Escape;
 use html::format::{AsyncSpace, ConstnessSpace};
 use html::format::{GenericBounds, WhereClause, href, AbiSpace};
-use html::format::{VisSpace, Method, UnsafetySpace, MutableSpace};
+use html::format::{VisSpace, Function, UnsafetySpace, MutableSpace};
 use html::format::fmt_impl_for_trait_page;
 use html::item_type::ItemType;
 use html::markdown::{self, Markdown, MarkdownHtml, MarkdownSummaryLine, ErrorCodes, IdMap};
@@ -1836,6 +1836,7 @@
     keywords: FxHashSet<ItemEntry>,
     attributes: FxHashSet<ItemEntry>,
     derives: FxHashSet<ItemEntry>,
+    trait_aliases: FxHashSet<ItemEntry>,
 }
 
 impl AllTypes {
@@ -1856,6 +1857,7 @@
             keywords: new_set(100),
             attributes: new_set(100),
             derives: new_set(100),
+            trait_aliases: new_set(100),
         }
     }
 
@@ -1879,6 +1881,7 @@
                 ItemType::Constant => self.constants.insert(ItemEntry::new(new_url, name)),
                 ItemType::ProcAttribute => self.attributes.insert(ItemEntry::new(new_url, name)),
                 ItemType::ProcDerive => self.derives.insert(ItemEntry::new(new_url, name)),
+                ItemType::TraitAlias => self.trait_aliases.insert(ItemEntry::new(new_url, name)),
                 _ => true,
             };
         }
@@ -1922,6 +1925,7 @@
         print_entries(f, &self.derives, "Derive Macros", "derives")?;
         print_entries(f, &self.functions, "Functions", "functions")?;
         print_entries(f, &self.typedefs, "Typedefs", "typedefs")?;
+        print_entries(f, &self.trait_aliases, "Trait Aliases", "trait-aliases")?;
         print_entries(f, &self.existentials, "Existentials", "existentials")?;
         print_entries(f, &self.statics, "Statics", "statics")?;
         print_entries(f, &self.constants, "Constants", "constants")
@@ -2419,6 +2423,7 @@
             clean::ForeignTypeItem => write!(fmt, "Foreign Type ")?,
             clean::KeywordItem(..) => write!(fmt, "Keyword ")?,
             clean::ExistentialItem(..) => write!(fmt, "Existential Type ")?,
+            clean::TraitAliasItem(..) => write!(fmt, "Trait Alias ")?,
             _ => {
                 // We don't generate pages for any other type.
                 unreachable!();
@@ -2457,6 +2462,7 @@
             clean::ForeignTypeItem => item_foreign_type(fmt, self.cx, self.item),
             clean::KeywordItem(ref k) => item_keyword(fmt, self.cx, self.item, k),
             clean::ExistentialItem(ref e, _) => item_existential(fmt, self.cx, self.item, e),
+            clean::TraitAliasItem(ref ta) => item_trait_alias(fmt, self.cx, self.item, ta),
             _ => {
                 // We don't generate pages for any other type.
                 unreachable!();
@@ -2977,10 +2983,11 @@
            name = it.name.as_ref().unwrap(),
            generics = f.generics,
            where_clause = WhereClause { gens: &f.generics, indent: 0, end_newline: true },
-           decl = Method {
+           decl = Function {
               decl: &f.decl,
               name_len,
               indent: 0,
+              asyncness: f.header.asyncness,
            })?;
     document(w, cx, it)
 }
@@ -3014,23 +3021,17 @@
     Ok(())
 }
 
-fn bounds(t_bounds: &[clean::GenericBound]) -> String {
+fn bounds(t_bounds: &[clean::GenericBound], trait_alias: bool) -> String {
     let mut bounds = String::new();
-    let mut bounds_plain = String::new();
     if !t_bounds.is_empty() {
-        if !bounds.is_empty() {
-            bounds.push(' ');
-            bounds_plain.push(' ');
+        if !trait_alias {
+            bounds.push_str(": ");
         }
-        bounds.push_str(": ");
-        bounds_plain.push_str(": ");
         for (i, p) in t_bounds.iter().enumerate() {
             if i > 0 {
                 bounds.push_str(" + ");
-                bounds_plain.push_str(" + ");
             }
             bounds.push_str(&(*p).to_string());
-            bounds_plain.push_str(&format!("{:#}", *p));
         }
     }
     bounds
@@ -3050,7 +3051,7 @@
     it: &clean::Item,
     t: &clean::Trait,
 ) -> fmt::Result {
-    let bounds = bounds(&t.bounds);
+    let bounds = bounds(&t.bounds, false);
     let types = t.items.iter().filter(|m| m.is_associated_type()).collect::<Vec<_>>();
     let consts = t.items.iter().filter(|m| m.is_associated_const()).collect::<Vec<_>>();
     let required = t.items.iter().filter(|m| m.is_ty_method()).collect::<Vec<_>>();
@@ -3424,10 +3425,11 @@
                href = href,
                name = name,
                generics = *g,
-               decl = Method {
+               decl = Function {
                    decl: d,
                    name_len: head_len,
                    indent,
+                   asyncness: header.asyncness,
                },
                where_clause = WhereClause {
                    gens: g,
@@ -4280,7 +4282,26 @@
            it.name.as_ref().unwrap(),
            t.generics,
            where_clause = WhereClause { gens: &t.generics, indent: 0, end_newline: true },
-           bounds = bounds(&t.bounds))?;
+           bounds = bounds(&t.bounds, false))?;
+
+    document(w, cx, it)?;
+
+    // Render any items associated directly to this alias, as otherwise they
+    // won't be visible anywhere in the docs. It would be nice to also show
+    // associated items from the aliased type (see discussion in #32077), but
+    // we need #14072 to make sense of the generics.
+    render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
+}
+
+fn item_trait_alias(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
+                    t: &clean::TraitAlias) -> fmt::Result {
+    write!(w, "<pre class='rust trait-alias'>")?;
+    render_attributes(w, it)?;
+    write!(w, "trait {}{}{} = {};</pre>",
+           it.name.as_ref().unwrap(),
+           t.generics,
+           WhereClause { gens: &t.generics, indent: 0, end_newline: true },
+           bounds(&t.bounds, true))?;
 
     document(w, cx, it)?;
 
@@ -4844,6 +4865,7 @@
         ItemType::Existential     => ("existentials", "Existentials"),
         ItemType::ProcAttribute   => ("attributes", "Attribute Macros"),
         ItemType::ProcDerive      => ("derives", "Derive Macros"),
+        ItemType::TraitAlias      => ("trait-aliases", "Trait aliases"),
     }
 }
 
diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js
index 877ac9a..840f943 100644
--- a/src/librustdoc/html/static/main.js
+++ b/src/librustdoc/html/static/main.js
@@ -68,7 +68,8 @@
                      "keyword",
                      "existential",
                      "attr",
-                     "derive"];
+                     "derive",
+                     "traitalias"];
 
     var search_input = document.getElementsByClassName("search-input")[0];
 
@@ -79,8 +80,6 @@
     // 2 for "In Return Types"
     var currentTab = 0;
 
-    var themesWidth = null;
-
     var titleBeforeSearch = document.title;
 
     function getPageId() {
@@ -240,7 +239,7 @@
         return String.fromCharCode(c);
     }
 
-    function displayHelp(display, ev) {
+    function displayHelp(display, ev, help) {
         if (display === true) {
             if (hasClass(help, "hidden")) {
                 ev.preventDefault();
@@ -258,7 +257,7 @@
         hideModal();
         var search = document.getElementById("search");
         if (hasClass(help, "hidden") === false) {
-            displayHelp(false, ev);
+            displayHelp(false, ev, help);
         } else if (hasClass(search, "hidden") === false) {
             ev.preventDefault();
             addClass(search, "hidden");
@@ -289,7 +288,7 @@
 
             case "s":
             case "S":
-                displayHelp(false, ev);
+                displayHelp(false, ev, help);
                 hideModal();
                 ev.preventDefault();
                 focusSearchBar();
@@ -304,7 +303,7 @@
             case "?":
                 if (ev.shiftKey) {
                     hideModal();
-                    displayHelp(true, ev);
+                    displayHelp(true, ev, help);
                 }
                 break;
             }
@@ -654,7 +653,7 @@
                                 return MAX_LEV_DISTANCE + 1;
                             }
                         }
-                        return lev_distance;//Math.ceil(total / done);
+                        return Math.ceil(total / done);
                     }
                 }
                 return MAX_LEV_DISTANCE + 1;
@@ -1786,6 +1785,7 @@
         block("type", "Type Definitions");
         block("foreigntype", "Foreign Types");
         block("keyword", "Keywords");
+        block("traitalias", "Trait Aliases");
     }
 
     window.initSidebarItems = initSidebarItems;
@@ -2432,7 +2432,7 @@
             // for vertical layout (column-oriented flex layout for divs caused
             // errors in mobile browsers).
             if (e.tagName === "H2" || e.tagName === "H3") {
-                let nextTagName = e.nextElementSibling.tagName;
+                var nextTagName = e.nextElementSibling.tagName;
                 if (nextTagName == "H2" || nextTagName == "H3") {
                     e.nextElementSibling.style.display = "flex";
                 } else {
diff --git a/src/librustdoc/html/static/themes/dark.css b/src/librustdoc/html/static/themes/dark.css
index 6935ecd..333fe76 100644
--- a/src/librustdoc/html/static/themes/dark.css
+++ b/src/librustdoc/html/static/themes/dark.css
@@ -94,6 +94,7 @@
 }
 .content .highlighted a, .content .highlighted span { color: #eee !important; }
 .content .highlighted.trait { background-color: #013191; }
+.content .highlighted.traitalias { background-color: #013191; }
 .content .highlighted.mod,
 .content .highlighted.externcrate { background-color: #afc6e4; }
 .content .highlighted.mod { background-color: #803a1b; }
@@ -128,6 +129,7 @@
 .content span.externcrate,
 .content span.mod, .content a.mod, .block a.current.mod { color: #bda000; }
 .content span.trait, .content a.trait, .block a.current.trait { color: #b78cf2; }
+.content span.traitalias, .content a.traitalias, .block a.current.traitalias { color: #b397da; }
 .content span.fn, .content a.fn, .block a.current.fn,
 .content span.method, .content a.method, .block a.current.method,
 .content span.tymethod, .content a.tymethod, .block a.current.tymethod,
diff --git a/src/librustdoc/html/static/themes/light.css b/src/librustdoc/html/static/themes/light.css
index 306e8dc..19ae67b 100644
--- a/src/librustdoc/html/static/themes/light.css
+++ b/src/librustdoc/html/static/themes/light.css
@@ -96,6 +96,7 @@
 }
 .content .highlighted a, .content .highlighted span { color: #000 !important; }
 .content .highlighted.trait { background-color: #c7b6ff; }
+.content .highlighted.traitalias { background-color: #c7b6ff; }
 .content .highlighted.mod,
 .content .highlighted.externcrate { background-color: #afc6e4; }
 .content .highlighted.enum { background-color: #b4d1b9; }
@@ -128,6 +129,7 @@
 .content span.externcrate,
 .content span.mod, .content a.mod, .block a.current.mod { color: #4d76ae; }
 .content span.trait, .content a.trait, .block a.current.trait { color: #7c5af3; }
+.content span.traitalias, .content a.traitalias, .block a.current.traitalias { color: #6841f1; }
 .content span.fn, .content a.fn, .block a.current.fn,
 .content span.method, .content a.method, .block a.current.method,
 .content span.tymethod, .content a.tymethod, .block a.current.tymethod,
diff --git a/src/librustdoc/passes/mod.rs b/src/librustdoc/passes/mod.rs
index c9a3a2c..5f3da4c 100644
--- a/src/librustdoc/passes/mod.rs
+++ b/src/librustdoc/passes/mod.rs
@@ -224,6 +224,7 @@
             | clean::ConstantItem(..)
             | clean::UnionItem(..)
             | clean::AssociatedConstItem(..)
+            | clean::TraitAliasItem(..)
             | clean::ForeignTypeItem => {
                 if i.def_id.is_local() {
                     if !self.access_levels.is_exported(i.def_id) {
diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs
index b8eb777..352ff78 100644
--- a/src/librustdoc/visit_ast.rs
+++ b/src/librustdoc/visit_ast.rs
@@ -547,8 +547,19 @@
                 };
                 om.traits.push(t);
             },
-            hir::ItemKind::TraitAlias(..) => {
-                unimplemented!("trait objects are not yet implemented")
+            hir::ItemKind::TraitAlias(ref gen, ref b) => {
+                let t = TraitAlias {
+                    name: ident.name,
+                    generics: gen.clone(),
+                    bounds: b.iter().cloned().collect(),
+                    id: item.id,
+                    attrs: item.attrs.clone(),
+                    whence: item.span,
+                    vis: item.vis.clone(),
+                    stab: self.stability(item.id),
+                    depr: self.deprecation(item.id),
+                };
+                om.trait_aliases.push(t);
             },
 
             hir::ItemKind::Impl(unsafety,
diff --git a/src/libserialize/Cargo.toml b/src/libserialize/Cargo.toml
index 3e04081..949af0e 100644
--- a/src/libserialize/Cargo.toml
+++ b/src/libserialize/Cargo.toml
@@ -2,6 +2,7 @@
 authors = ["The Rust Project Developers"]
 name = "serialize"
 version = "0.0.0"
+edition = "2018"
 
 [lib]
 name = "serialize"
diff --git a/src/libserialize/collection_impls.rs b/src/libserialize/collection_impls.rs
index f3afc3b..c0a8fa9 100644
--- a/src/libserialize/collection_impls.rs
+++ b/src/libserialize/collection_impls.rs
@@ -2,7 +2,7 @@
 
 use std::hash::{Hash, BuildHasher};
 
-use {Decodable, Encodable, Decoder, Encoder};
+use crate::{Decodable, Encodable, Decoder, Encoder};
 use std::collections::{LinkedList, VecDeque, BTreeMap, BTreeSet, HashMap, HashSet};
 use std::rc::Rc;
 use std::sync::Arc;
diff --git a/src/libserialize/hex.rs b/src/libserialize/hex.rs
index 6127440..73b9122 100644
--- a/src/libserialize/hex.rs
+++ b/src/libserialize/hex.rs
@@ -60,7 +60,7 @@
 }
 
 impl fmt::Display for FromHexError {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
             InvalidHexCharacter(ch, idx) =>
                 write!(f, "Invalid character '{}' at position {}", ch, idx),
@@ -145,8 +145,8 @@
 #[cfg(test)]
 mod tests {
     extern crate test;
-    use self::test::Bencher;
-    use hex::{FromHex, ToHex};
+    use test::Bencher;
+    use crate::hex::{FromHex, ToHex};
 
     #[test]
     pub fn test_to_hex() {
diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs
index 362b457..5b3444b 100644
--- a/src/libserialize/json.rs
+++ b/src/libserialize/json.rs
@@ -199,9 +199,8 @@
 use std::str::FromStr;
 use std::string;
 use std::{char, f64, fmt, str};
-use std;
 
-use Encodable;
+use crate::Encodable;
 
 /// Represents a json value
 #[derive(Clone, PartialEq, PartialOrd, Debug)]
@@ -221,8 +220,8 @@
 
 pub struct PrettyJson<'a> { inner: &'a Json }
 
-pub struct AsJson<'a, T: 'a> { inner: &'a T }
-pub struct AsPrettyJson<'a, T: 'a> { inner: &'a T, indent: Option<usize> }
+pub struct AsJson<'a, T> { inner: &'a T }
+pub struct AsPrettyJson<'a, T> { inner: &'a T, indent: Option<usize> }
 
 /// The errors that can arise while parsing a JSON stream.
 #[derive(Clone, Copy, PartialEq, Debug)]
@@ -295,18 +294,18 @@
 }
 
 /// Shortcut function to decode a JSON `&str` into an object
-pub fn decode<T: ::Decodable>(s: &str) -> DecodeResult<T> {
+pub fn decode<T: crate::Decodable>(s: &str) -> DecodeResult<T> {
     let json = match from_str(s) {
         Ok(x) => x,
         Err(e) => return Err(ParseError(e))
     };
 
     let mut decoder = Decoder::new(json);
-    ::Decodable::decode(&mut decoder)
+    crate::Decodable::decode(&mut decoder)
 }
 
 /// Shortcut function to encode a `T` into a JSON `String`
-pub fn encode<T: ::Encodable>(object: &T) -> Result<string::String, EncoderError> {
+pub fn encode<T: crate::Encodable>(object: &T) -> Result<string::String, EncoderError> {
     let mut s = String::new();
     {
         let mut encoder = Encoder::new(&mut s);
@@ -316,7 +315,7 @@
 }
 
 impl fmt::Display for ErrorCode {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         error_str(*self).fmt(f)
     }
 }
@@ -326,14 +325,14 @@
 }
 
 impl fmt::Display for ParserError {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         // FIXME this should be a nicer error
         fmt::Debug::fmt(self, f)
     }
 }
 
 impl fmt::Display for DecoderError {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         // FIXME this should be a nicer error
         fmt::Debug::fmt(self, f)
     }
@@ -344,7 +343,7 @@
 }
 
 impl fmt::Display for EncoderError {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         // FIXME this should be a nicer error
         fmt::Debug::fmt(self, f)
     }
@@ -477,7 +476,7 @@
     })
 }
 
-impl<'a> ::Encoder for Encoder<'a> {
+impl<'a> crate::Encoder for Encoder<'a> {
     type Error = EncoderError;
 
     fn emit_unit(&mut self) -> EncodeResult {
@@ -727,7 +726,7 @@
     }
 }
 
-impl<'a> ::Encoder for PrettyEncoder<'a> {
+impl<'a> crate::Encoder for PrettyEncoder<'a> {
     type Error = EncoderError;
 
     fn emit_unit(&mut self) -> EncodeResult {
@@ -997,7 +996,7 @@
 }
 
 impl Encodable for Json {
-    fn encode<E: ::Encoder>(&self, e: &mut E) -> Result<(), E::Error> {
+    fn encode<E: crate::Encoder>(&self, e: &mut E) -> Result<(), E::Error> {
         match *self {
             Json::I64(v) => v.encode(e),
             Json::U64(v) => v.encode(e),
@@ -1013,20 +1012,20 @@
 
 /// Create an `AsJson` wrapper which can be used to print a value as JSON
 /// on-the-fly via `write!`
-pub fn as_json<T>(t: &T) -> AsJson<T> {
+pub fn as_json<T>(t: &T) -> AsJson<'_, T> {
     AsJson { inner: t }
 }
 
 /// Create an `AsPrettyJson` wrapper which can be used to print a value as JSON
 /// on-the-fly via `write!`
-pub fn as_pretty_json<T>(t: &T) -> AsPrettyJson<T> {
+pub fn as_pretty_json<T>(t: &T) -> AsPrettyJson<'_, T> {
     AsPrettyJson { inner: t, indent: None }
 }
 
 impl Json {
     /// Borrow this json object as a pretty object to generate a pretty
     /// representation for it via `Display`.
-    pub fn pretty(&self) -> PrettyJson {
+    pub fn pretty(&self) -> PrettyJson<'_> {
         PrettyJson { inner: self }
     }
 
@@ -1300,7 +1299,7 @@
     /// Provides access to the StackElement at a given index.
     /// lower indices are at the bottom of the stack while higher indices are
     /// at the top.
-    pub fn get(&self, idx: usize) -> StackElement {
+    pub fn get(&self, idx: usize) -> StackElement<'_> {
         match self.stack[idx] {
             InternalIndex(i) => StackElement::Index(i),
             InternalKey(start, size) => {
@@ -1311,8 +1310,8 @@
         }
     }
 
-    /// Compares this stack with an array of StackElements.
-    pub fn is_equal_to(&self, rhs: &[StackElement]) -> bool {
+    /// Compares this stack with an array of StackElement<'_>s.
+    pub fn is_equal_to(&self, rhs: &[StackElement<'_>]) -> bool {
         if self.stack.len() != rhs.len() { return false; }
         for (i, r) in rhs.iter().enumerate() {
             if self.get(i) != *r { return false; }
@@ -1322,7 +1321,7 @@
 
     /// Returns true if the bottom-most elements of this stack are the same as
     /// the ones passed as parameter.
-    pub fn starts_with(&self, rhs: &[StackElement]) -> bool {
+    pub fn starts_with(&self, rhs: &[StackElement<'_>]) -> bool {
         if self.stack.len() < rhs.len() { return false; }
         for (i, r) in rhs.iter().enumerate() {
             if self.get(i) != *r { return false; }
@@ -1332,7 +1331,7 @@
 
     /// Returns true if the top-most elements of this stack are the same as
     /// the ones passed as parameter.
-    pub fn ends_with(&self, rhs: &[StackElement]) -> bool {
+    pub fn ends_with(&self, rhs: &[StackElement<'_>]) -> bool {
         if self.stack.len() < rhs.len() { return false; }
         let offset = self.stack.len() - rhs.len();
         for (i, r) in rhs.iter().enumerate() {
@@ -1342,7 +1341,7 @@
     }
 
     /// Returns the top-most element (if any).
-    pub fn top(&self) -> Option<StackElement> {
+    pub fn top(&self) -> Option<StackElement<'_>> {
         match self.stack.last() {
             None => None,
             Some(&InternalIndex(i)) => Some(StackElement::Index(i)),
@@ -2115,7 +2114,7 @@
     }
 }
 
-impl ::Decoder for Decoder {
+impl crate::Decoder for Decoder {
     type Error = DecoderError;
 
     fn read_nil(&mut self) -> DecodeResult<()> {
@@ -2172,7 +2171,7 @@
         Err(ExpectedError("single character string".to_owned(), s.to_string()))
     }
 
-    fn read_str(&mut self) -> DecodeResult<Cow<str>> {
+    fn read_str(&mut self) -> DecodeResult<Cow<'_, str>> {
         expect!(self.pop(), String).map(Cow::Owned)
     }
 
@@ -2518,7 +2517,7 @@
 
 impl fmt::Display for Json {
     /// Encodes a json value into a string
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         let mut shim = FormatShim { inner: f };
         let mut encoder = Encoder::new(&mut shim);
         match self.encode(&mut encoder) {
@@ -2530,7 +2529,7 @@
 
 impl<'a> fmt::Display for PrettyJson<'a> {
     /// Encodes a json value into a string
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         let mut shim = FormatShim { inner: f };
         let mut encoder = PrettyEncoder::new(&mut shim);
         match self.inner.encode(&mut encoder) {
@@ -2542,7 +2541,7 @@
 
 impl<'a, T: Encodable> fmt::Display for AsJson<'a, T> {
     /// Encodes a json value into a string
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         let mut shim = FormatShim { inner: f };
         let mut encoder = Encoder::new(&mut shim);
         match self.inner.encode(&mut encoder) {
@@ -2562,7 +2561,7 @@
 
 impl<'a, T: Encodable> fmt::Display for AsPrettyJson<'a, T> {
     /// Encodes a json value into a string
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         let mut shim = FormatShim { inner: f };
         let mut encoder = PrettyEncoder::new(&mut shim);
         if let Some(n) = self.indent {
@@ -2584,1220 +2583,13 @@
 
 #[cfg(test)]
 mod tests {
+    // Benchmarks and tests that require private items
+
     extern crate test;
-    use self::Animal::*;
-    use self::test::Bencher;
-    use {Encodable, Decodable};
-    use super::Json::*;
-    use super::ErrorCode::*;
-    use super::ParserError::*;
-    use super::DecoderError::*;
-    use super::JsonEvent::*;
-    use super::{Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser,
-                StackElement, Stack, Decoder, Encoder, EncoderError};
-    use std::{i64, u64, f32, f64};
-    use std::io::prelude::*;
-    use std::collections::BTreeMap;
+    use test::Bencher;
+    use super::{from_str, Parser, StackElement, Stack};
     use std::string;
 
-    #[derive(RustcDecodable, Eq, PartialEq, Debug)]
-    struct OptionData {
-        opt: Option<usize>,
-    }
-
-    #[test]
-    fn test_decode_option_none() {
-        let s ="{}";
-        let obj: OptionData = super::decode(s).unwrap();
-        assert_eq!(obj, OptionData { opt: None });
-    }
-
-    #[test]
-    fn test_decode_option_some() {
-        let s = "{ \"opt\": 10 }";
-        let obj: OptionData = super::decode(s).unwrap();
-        assert_eq!(obj, OptionData { opt: Some(10) });
-    }
-
-    #[test]
-    fn test_decode_option_malformed() {
-        check_err::<OptionData>("{ \"opt\": [] }",
-                                ExpectedError("Number".to_string(), "[]".to_string()));
-        check_err::<OptionData>("{ \"opt\": false }",
-                                ExpectedError("Number".to_string(), "false".to_string()));
-    }
-
-    #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
-    enum Animal {
-        Dog,
-        Frog(string::String, isize)
-    }
-
-    #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
-    struct Inner {
-        a: (),
-        b: usize,
-        c: Vec<string::String>,
-    }
-
-    #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
-    struct Outer {
-        inner: Vec<Inner>,
-    }
-
-    fn mk_object(items: &[(string::String, Json)]) -> Json {
-        let mut d = BTreeMap::new();
-
-        for item in items {
-            match *item {
-                (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); },
-            }
-        };
-
-        Object(d)
-    }
-
-    #[test]
-    fn test_from_str_trait() {
-        let s = "null";
-        assert!(s.parse::<Json>().unwrap() == s.parse().unwrap());
-    }
-
-    #[test]
-    fn test_write_null() {
-        assert_eq!(Null.to_string(), "null");
-        assert_eq!(Null.pretty().to_string(), "null");
-    }
-
-    #[test]
-    fn test_write_i64() {
-        assert_eq!(U64(0).to_string(), "0");
-        assert_eq!(U64(0).pretty().to_string(), "0");
-
-        assert_eq!(U64(1234).to_string(), "1234");
-        assert_eq!(U64(1234).pretty().to_string(), "1234");
-
-        assert_eq!(I64(-5678).to_string(), "-5678");
-        assert_eq!(I64(-5678).pretty().to_string(), "-5678");
-
-        assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000");
-        assert_eq!(U64(7650007200025252000).pretty().to_string(), "7650007200025252000");
-    }
-
-    #[test]
-    fn test_write_f64() {
-        assert_eq!(F64(3.0).to_string(), "3.0");
-        assert_eq!(F64(3.0).pretty().to_string(), "3.0");
-
-        assert_eq!(F64(3.1).to_string(), "3.1");
-        assert_eq!(F64(3.1).pretty().to_string(), "3.1");
-
-        assert_eq!(F64(-1.5).to_string(), "-1.5");
-        assert_eq!(F64(-1.5).pretty().to_string(), "-1.5");
-
-        assert_eq!(F64(0.5).to_string(), "0.5");
-        assert_eq!(F64(0.5).pretty().to_string(), "0.5");
-
-        assert_eq!(F64(f64::NAN).to_string(), "null");
-        assert_eq!(F64(f64::NAN).pretty().to_string(), "null");
-
-        assert_eq!(F64(f64::INFINITY).to_string(), "null");
-        assert_eq!(F64(f64::INFINITY).pretty().to_string(), "null");
-
-        assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null");
-        assert_eq!(F64(f64::NEG_INFINITY).pretty().to_string(), "null");
-    }
-
-    #[test]
-    fn test_write_str() {
-        assert_eq!(String("".to_string()).to_string(), "\"\"");
-        assert_eq!(String("".to_string()).pretty().to_string(), "\"\"");
-
-        assert_eq!(String("homura".to_string()).to_string(), "\"homura\"");
-        assert_eq!(String("madoka".to_string()).pretty().to_string(), "\"madoka\"");
-    }
-
-    #[test]
-    fn test_write_bool() {
-        assert_eq!(Boolean(true).to_string(), "true");
-        assert_eq!(Boolean(true).pretty().to_string(), "true");
-
-        assert_eq!(Boolean(false).to_string(), "false");
-        assert_eq!(Boolean(false).pretty().to_string(), "false");
-    }
-
-    #[test]
-    fn test_write_array() {
-        assert_eq!(Array(vec![]).to_string(), "[]");
-        assert_eq!(Array(vec![]).pretty().to_string(), "[]");
-
-        assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]");
-        assert_eq!(
-            Array(vec![Boolean(true)]).pretty().to_string(),
-            "\
-            [\n  \
-                true\n\
-            ]"
-        );
-
-        let long_test_array = Array(vec![
-            Boolean(false),
-            Null,
-            Array(vec![String("foo\nbar".to_string()), F64(3.5)])]);
-
-        assert_eq!(long_test_array.to_string(),
-            "[false,null,[\"foo\\nbar\",3.5]]");
-        assert_eq!(
-            long_test_array.pretty().to_string(),
-            "\
-            [\n  \
-                false,\n  \
-                null,\n  \
-                [\n    \
-                    \"foo\\nbar\",\n    \
-                    3.5\n  \
-                ]\n\
-            ]"
-        );
-    }
-
-    #[test]
-    fn test_write_object() {
-        assert_eq!(mk_object(&[]).to_string(), "{}");
-        assert_eq!(mk_object(&[]).pretty().to_string(), "{}");
-
-        assert_eq!(
-            mk_object(&[
-                ("a".to_string(), Boolean(true))
-            ]).to_string(),
-            "{\"a\":true}"
-        );
-        assert_eq!(
-            mk_object(&[("a".to_string(), Boolean(true))]).pretty().to_string(),
-            "\
-            {\n  \
-                \"a\": true\n\
-            }"
-        );
-
-        let complex_obj = mk_object(&[
-                ("b".to_string(), Array(vec![
-                    mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
-                    mk_object(&[("d".to_string(), String("".to_string()))])
-                ]))
-            ]);
-
-        assert_eq!(
-            complex_obj.to_string(),
-            "{\
-                \"b\":[\
-                    {\"c\":\"\\f\\r\"},\
-                    {\"d\":\"\"}\
-                ]\
-            }"
-        );
-        assert_eq!(
-            complex_obj.pretty().to_string(),
-            "\
-            {\n  \
-                \"b\": [\n    \
-                    {\n      \
-                        \"c\": \"\\f\\r\"\n    \
-                    },\n    \
-                    {\n      \
-                        \"d\": \"\"\n    \
-                    }\n  \
-                ]\n\
-            }"
-        );
-
-        let a = mk_object(&[
-            ("a".to_string(), Boolean(true)),
-            ("b".to_string(), Array(vec![
-                mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
-                mk_object(&[("d".to_string(), String("".to_string()))])
-            ]))
-        ]);
-
-        // We can't compare the strings directly because the object fields be
-        // printed in a different order.
-        assert_eq!(a.clone(), a.to_string().parse().unwrap());
-        assert_eq!(a.clone(), a.pretty().to_string().parse().unwrap());
-    }
-
-    #[test]
-    fn test_write_enum() {
-        let animal = Dog;
-        assert_eq!(
-            super::as_json(&animal).to_string(),
-            "\"Dog\""
-        );
-        assert_eq!(
-            super::as_pretty_json(&animal).to_string(),
-            "\"Dog\""
-        );
-
-        let animal = Frog("Henry".to_string(), 349);
-        assert_eq!(
-            super::as_json(&animal).to_string(),
-            "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"
-        );
-        assert_eq!(
-            super::as_pretty_json(&animal).to_string(),
-            "{\n  \
-               \"variant\": \"Frog\",\n  \
-               \"fields\": [\n    \
-                 \"Henry\",\n    \
-                 349\n  \
-               ]\n\
-             }"
-        );
-    }
-
-    macro_rules! check_encoder_for_simple {
-        ($value:expr, $expected:expr) => ({
-            let s = super::as_json(&$value).to_string();
-            assert_eq!(s, $expected);
-
-            let s = super::as_pretty_json(&$value).to_string();
-            assert_eq!(s, $expected);
-        })
-    }
-
-    #[test]
-    fn test_write_some() {
-        check_encoder_for_simple!(Some("jodhpurs".to_string()), "\"jodhpurs\"");
-    }
-
-    #[test]
-    fn test_write_none() {
-        check_encoder_for_simple!(None::<string::String>, "null");
-    }
-
-    #[test]
-    fn test_write_char() {
-        check_encoder_for_simple!('a', "\"a\"");
-        check_encoder_for_simple!('\t', "\"\\t\"");
-        check_encoder_for_simple!('\u{0000}', "\"\\u0000\"");
-        check_encoder_for_simple!('\u{001b}', "\"\\u001b\"");
-        check_encoder_for_simple!('\u{007f}', "\"\\u007f\"");
-        check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\"");
-        check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\"");
-        check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\"");
-    }
-
-    #[test]
-    fn test_trailing_characters() {
-        assert_eq!(from_str("nulla"),  Err(SyntaxError(TrailingCharacters, 1, 5)));
-        assert_eq!(from_str("truea"),  Err(SyntaxError(TrailingCharacters, 1, 5)));
-        assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6)));
-        assert_eq!(from_str("1a"),     Err(SyntaxError(TrailingCharacters, 1, 2)));
-        assert_eq!(from_str("[]a"),    Err(SyntaxError(TrailingCharacters, 1, 3)));
-        assert_eq!(from_str("{}a"),    Err(SyntaxError(TrailingCharacters, 1, 3)));
-    }
-
-    #[test]
-    fn test_read_identifiers() {
-        assert_eq!(from_str("n"),    Err(SyntaxError(InvalidSyntax, 1, 2)));
-        assert_eq!(from_str("nul"),  Err(SyntaxError(InvalidSyntax, 1, 4)));
-        assert_eq!(from_str("t"),    Err(SyntaxError(InvalidSyntax, 1, 2)));
-        assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4)));
-        assert_eq!(from_str("f"),    Err(SyntaxError(InvalidSyntax, 1, 2)));
-        assert_eq!(from_str("faz"),  Err(SyntaxError(InvalidSyntax, 1, 3)));
-
-        assert_eq!(from_str("null"), Ok(Null));
-        assert_eq!(from_str("true"), Ok(Boolean(true)));
-        assert_eq!(from_str("false"), Ok(Boolean(false)));
-        assert_eq!(from_str(" null "), Ok(Null));
-        assert_eq!(from_str(" true "), Ok(Boolean(true)));
-        assert_eq!(from_str(" false "), Ok(Boolean(false)));
-    }
-
-    #[test]
-    fn test_decode_identifiers() {
-        let v: () = super::decode("null").unwrap();
-        assert_eq!(v, ());
-
-        let v: bool = super::decode("true").unwrap();
-        assert_eq!(v, true);
-
-        let v: bool = super::decode("false").unwrap();
-        assert_eq!(v, false);
-    }
-
-    #[test]
-    fn test_read_number() {
-        assert_eq!(from_str("+"),   Err(SyntaxError(InvalidSyntax, 1, 1)));
-        assert_eq!(from_str("."),   Err(SyntaxError(InvalidSyntax, 1, 1)));
-        assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1)));
-        assert_eq!(from_str("-"),   Err(SyntaxError(InvalidNumber, 1, 2)));
-        assert_eq!(from_str("00"),  Err(SyntaxError(InvalidNumber, 1, 2)));
-        assert_eq!(from_str("1."),  Err(SyntaxError(InvalidNumber, 1, 3)));
-        assert_eq!(from_str("1e"),  Err(SyntaxError(InvalidNumber, 1, 3)));
-        assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4)));
-
-        assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20)));
-        assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21)));
-
-        assert_eq!(from_str("3"), Ok(U64(3)));
-        assert_eq!(from_str("3.1"), Ok(F64(3.1)));
-        assert_eq!(from_str("-1.2"), Ok(F64(-1.2)));
-        assert_eq!(from_str("0.4"), Ok(F64(0.4)));
-        assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5)));
-        assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15)));
-        assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01)));
-        assert_eq!(from_str(" 3 "), Ok(U64(3)));
-
-        assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN)));
-        assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64)));
-        assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX)));
-    }
-
-    #[test]
-    fn test_decode_numbers() {
-        let v: f64 = super::decode("3").unwrap();
-        assert_eq!(v, 3.0);
-
-        let v: f64 = super::decode("3.1").unwrap();
-        assert_eq!(v, 3.1);
-
-        let v: f64 = super::decode("-1.2").unwrap();
-        assert_eq!(v, -1.2);
-
-        let v: f64 = super::decode("0.4").unwrap();
-        assert_eq!(v, 0.4);
-
-        let v: f64 = super::decode("0.4e5").unwrap();
-        assert_eq!(v, 0.4e5);
-
-        let v: f64 = super::decode("0.4e15").unwrap();
-        assert_eq!(v, 0.4e15);
-
-        let v: f64 = super::decode("0.4e-01").unwrap();
-        assert_eq!(v, 0.4e-01);
-
-        let v: u64 = super::decode("0").unwrap();
-        assert_eq!(v, 0);
-
-        let v: u64 = super::decode("18446744073709551615").unwrap();
-        assert_eq!(v, u64::MAX);
-
-        let v: i64 = super::decode("-9223372036854775808").unwrap();
-        assert_eq!(v, i64::MIN);
-
-        let v: i64 = super::decode("9223372036854775807").unwrap();
-        assert_eq!(v, i64::MAX);
-
-        let res: DecodeResult<i64> = super::decode("765.25");
-        assert_eq!(res, Err(ExpectedError("Integer".to_string(),
-                                          "765.25".to_string())));
-    }
-
-    #[test]
-    fn test_read_str() {
-        assert_eq!(from_str("\""),    Err(SyntaxError(EOFWhileParsingString, 1, 2)));
-        assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
-
-        assert_eq!(from_str("\"\""), Ok(String("".to_string())));
-        assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string())));
-        assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string())));
-        assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string())));
-        assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string())));
-        assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string())));
-        assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string())));
-        assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string())));
-        assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string())));
-        assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string())));
-    }
-
-    #[test]
-    fn test_decode_str() {
-        let s = [("\"\"", ""),
-                 ("\"foo\"", "foo"),
-                 ("\"\\\"\"", "\""),
-                 ("\"\\b\"", "\x08"),
-                 ("\"\\n\"", "\n"),
-                 ("\"\\r\"", "\r"),
-                 ("\"\\t\"", "\t"),
-                 ("\"\\u12ab\"", "\u{12ab}"),
-                 ("\"\\uAB12\"", "\u{AB12}")];
-
-        for &(i, o) in &s {
-            let v: string::String = super::decode(i).unwrap();
-            assert_eq!(v, o);
-        }
-    }
-
-    #[test]
-    fn test_read_array() {
-        assert_eq!(from_str("["),     Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
-        assert_eq!(from_str("[1"),    Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
-        assert_eq!(from_str("[1,"),   Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
-        assert_eq!(from_str("[1,]"),  Err(SyntaxError(InvalidSyntax,        1, 4)));
-        assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax,        1, 4)));
-
-        assert_eq!(from_str("[]"), Ok(Array(vec![])));
-        assert_eq!(from_str("[ ]"), Ok(Array(vec![])));
-        assert_eq!(from_str("[true]"), Ok(Array(vec![Boolean(true)])));
-        assert_eq!(from_str("[ false ]"), Ok(Array(vec![Boolean(false)])));
-        assert_eq!(from_str("[null]"), Ok(Array(vec![Null])));
-        assert_eq!(from_str("[3, 1]"),
-                     Ok(Array(vec![U64(3), U64(1)])));
-        assert_eq!(from_str("\n[3, 2]\n"),
-                     Ok(Array(vec![U64(3), U64(2)])));
-        assert_eq!(from_str("[2, [4, 1]]"),
-               Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])])));
-    }
-
-    #[test]
-    fn test_decode_array() {
-        let v: Vec<()> = super::decode("[]").unwrap();
-        assert_eq!(v, []);
-
-        let v: Vec<()> = super::decode("[null]").unwrap();
-        assert_eq!(v, [()]);
-
-        let v: Vec<bool> = super::decode("[true]").unwrap();
-        assert_eq!(v, [true]);
-
-        let v: Vec<isize> = super::decode("[3, 1]").unwrap();
-        assert_eq!(v, [3, 1]);
-
-        let v: Vec<Vec<usize>> = super::decode("[[3], [1, 2]]").unwrap();
-        assert_eq!(v, [vec![3], vec![1, 2]]);
-    }
-
-    #[test]
-    fn test_decode_tuple() {
-        let t: (usize, usize, usize) = super::decode("[1, 2, 3]").unwrap();
-        assert_eq!(t, (1, 2, 3));
-
-        let t: (usize, string::String) = super::decode("[1, \"two\"]").unwrap();
-        assert_eq!(t, (1, "two".to_string()));
-    }
-
-    #[test]
-    fn test_decode_tuple_malformed_types() {
-        assert!(super::decode::<(usize, string::String)>("[1, 2]").is_err());
-    }
-
-    #[test]
-    fn test_decode_tuple_malformed_length() {
-        assert!(super::decode::<(usize, usize)>("[1, 2, 3]").is_err());
-    }
-
-    #[test]
-    fn test_read_object() {
-        assert_eq!(from_str("{"),       Err(SyntaxError(EOFWhileParsingObject, 1, 2)));
-        assert_eq!(from_str("{ "),      Err(SyntaxError(EOFWhileParsingObject, 1, 3)));
-        assert_eq!(from_str("{1"),      Err(SyntaxError(KeyMustBeAString,      1, 2)));
-        assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
-        assert_eq!(from_str("{\"a\""),  Err(SyntaxError(EOFWhileParsingObject, 1, 5)));
-        assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
-
-        assert_eq!(from_str("{\"a\" 1"),   Err(SyntaxError(ExpectedColon,         1, 6)));
-        assert_eq!(from_str("{\"a\":"),    Err(SyntaxError(EOFWhileParsingValue,  1, 6)));
-        assert_eq!(from_str("{\"a\":1"),   Err(SyntaxError(EOFWhileParsingObject, 1, 7)));
-        assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax,         1, 8)));
-        assert_eq!(from_str("{\"a\":1,"),  Err(SyntaxError(EOFWhileParsingObject, 1, 8)));
-
-        assert_eq!(from_str("{}").unwrap(), mk_object(&[]));
-        assert_eq!(from_str("{\"a\": 3}").unwrap(),
-                  mk_object(&[("a".to_string(), U64(3))]));
-
-        assert_eq!(from_str(
-                      "{ \"a\": null, \"b\" : true }").unwrap(),
-                  mk_object(&[
-                      ("a".to_string(), Null),
-                      ("b".to_string(), Boolean(true))]));
-        assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(),
-                  mk_object(&[
-                      ("a".to_string(), Null),
-                      ("b".to_string(), Boolean(true))]));
-        assert_eq!(from_str(
-                      "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(),
-                  mk_object(&[
-                      ("a".to_string(), F64(1.0)),
-                      ("b".to_string(), Array(vec![Boolean(true)]))
-                  ]));
-        assert_eq!(from_str(
-                      "{\
-                          \"a\": 1.0, \
-                          \"b\": [\
-                              true,\
-                              \"foo\\nbar\", \
-                              { \"c\": {\"d\": null} } \
-                          ]\
-                      }").unwrap(),
-                  mk_object(&[
-                      ("a".to_string(), F64(1.0)),
-                      ("b".to_string(), Array(vec![
-                          Boolean(true),
-                          String("foo\nbar".to_string()),
-                          mk_object(&[
-                              ("c".to_string(), mk_object(&[("d".to_string(), Null)]))
-                          ])
-                      ]))
-                  ]));
-    }
-
-    #[test]
-    fn test_decode_struct() {
-        let s = "{
-            \"inner\": [
-                { \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] }
-            ]
-        }";
-
-        let v: Outer = super::decode(s).unwrap();
-        assert_eq!(
-            v,
-            Outer {
-                inner: vec![
-                    Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] }
-                ]
-            }
-        );
-    }
-
-    #[derive(RustcDecodable)]
-    struct FloatStruct {
-        f: f64,
-        a: Vec<f64>
-    }
-    #[test]
-    fn test_decode_struct_with_nan() {
-        let s = "{\"f\":null,\"a\":[null,123]}";
-        let obj: FloatStruct = super::decode(s).unwrap();
-        assert!(obj.f.is_nan());
-        assert!(obj.a[0].is_nan());
-        assert_eq!(obj.a[1], 123f64);
-    }
-
-    #[test]
-    fn test_decode_option() {
-        let value: Option<string::String> = super::decode("null").unwrap();
-        assert_eq!(value, None);
-
-        let value: Option<string::String> = super::decode("\"jodhpurs\"").unwrap();
-        assert_eq!(value, Some("jodhpurs".to_string()));
-    }
-
-    #[test]
-    fn test_decode_enum() {
-        let value: Animal = super::decode("\"Dog\"").unwrap();
-        assert_eq!(value, Dog);
-
-        let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}";
-        let value: Animal = super::decode(s).unwrap();
-        assert_eq!(value, Frog("Henry".to_string(), 349));
-    }
-
-    #[test]
-    fn test_decode_map() {
-        let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\
-                  \"fields\":[\"Henry\", 349]}}";
-        let mut map: BTreeMap<string::String, Animal> = super::decode(s).unwrap();
-
-        assert_eq!(map.remove(&"a".to_string()), Some(Dog));
-        assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349)));
-    }
-
-    #[test]
-    fn test_multiline_errors() {
-        assert_eq!(from_str("{\n  \"foo\":\n \"bar\""),
-            Err(SyntaxError(EOFWhileParsingObject, 3, 8)));
-    }
-
-    #[derive(RustcDecodable)]
-    #[allow(dead_code)]
-    struct DecodeStruct {
-        x: f64,
-        y: bool,
-        z: string::String,
-        w: Vec<DecodeStruct>
-    }
-    #[derive(RustcDecodable)]
-    enum DecodeEnum {
-        A(f64),
-        B(string::String)
-    }
-    fn check_err<T: Decodable>(to_parse: &'static str, expected: DecoderError) {
-        let res: DecodeResult<T> = match from_str(to_parse) {
-            Err(e) => Err(ParseError(e)),
-            Ok(json) => Decodable::decode(&mut Decoder::new(json))
-        };
-        match res {
-            Ok(_) => panic!("`{:?}` parsed & decoded ok, expecting error `{:?}`",
-                              to_parse, expected),
-            Err(ParseError(e)) => panic!("`{:?}` is not valid json: {:?}",
-                                           to_parse, e),
-            Err(e) => {
-                assert_eq!(e, expected);
-            }
-        }
-    }
-    #[test]
-    fn test_decode_errors_struct() {
-        check_err::<DecodeStruct>("[]", ExpectedError("Object".to_string(), "[]".to_string()));
-        check_err::<DecodeStruct>("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}",
-                                  ExpectedError("Number".to_string(), "true".to_string()));
-        check_err::<DecodeStruct>("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}",
-                                  ExpectedError("Boolean".to_string(), "[]".to_string()));
-        check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}",
-                                  ExpectedError("String".to_string(), "{}".to_string()));
-        check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}",
-                                  ExpectedError("Array".to_string(), "null".to_string()));
-        check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\"}",
-                                  MissingFieldError("w".to_string()));
-    }
-    #[test]
-    fn test_decode_errors_enum() {
-        check_err::<DecodeEnum>("{}",
-                                MissingFieldError("variant".to_string()));
-        check_err::<DecodeEnum>("{\"variant\": 1}",
-                                ExpectedError("String".to_string(), "1".to_string()));
-        check_err::<DecodeEnum>("{\"variant\": \"A\"}",
-                                MissingFieldError("fields".to_string()));
-        check_err::<DecodeEnum>("{\"variant\": \"A\", \"fields\": null}",
-                                ExpectedError("Array".to_string(), "null".to_string()));
-        check_err::<DecodeEnum>("{\"variant\": \"C\", \"fields\": []}",
-                                UnknownVariantError("C".to_string()));
-    }
-
-    #[test]
-    fn test_find(){
-        let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
-        let found_str = json_value.find("dog");
-        assert!(found_str.unwrap().as_string().unwrap() == "cat");
-    }
-
-    #[test]
-    fn test_find_path(){
-        let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
-        let found_str = json_value.find_path(&["dog", "cat", "mouse"]);
-        assert!(found_str.unwrap().as_string().unwrap() == "cheese");
-    }
-
-    #[test]
-    fn test_search(){
-        let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
-        let found_str = json_value.search("mouse").and_then(|j| j.as_string());
-        assert!(found_str.unwrap() == "cheese");
-    }
-
-    #[test]
-    fn test_index(){
-        let json_value = from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap();
-        let ref array = json_value["animals"];
-        assert_eq!(array[0].as_string().unwrap(), "dog");
-        assert_eq!(array[1].as_string().unwrap(), "cat");
-        assert_eq!(array[2].as_string().unwrap(), "mouse");
-    }
-
-    #[test]
-    fn test_is_object(){
-        let json_value = from_str("{}").unwrap();
-        assert!(json_value.is_object());
-    }
-
-    #[test]
-    fn test_as_object(){
-        let json_value = from_str("{}").unwrap();
-        let json_object = json_value.as_object();
-        assert!(json_object.is_some());
-    }
-
-    #[test]
-    fn test_is_array(){
-        let json_value = from_str("[1, 2, 3]").unwrap();
-        assert!(json_value.is_array());
-    }
-
-    #[test]
-    fn test_as_array(){
-        let json_value = from_str("[1, 2, 3]").unwrap();
-        let json_array = json_value.as_array();
-        let expected_length = 3;
-        assert!(json_array.is_some() && json_array.unwrap().len() == expected_length);
-    }
-
-    #[test]
-    fn test_is_string(){
-        let json_value = from_str("\"dog\"").unwrap();
-        assert!(json_value.is_string());
-    }
-
-    #[test]
-    fn test_as_string(){
-        let json_value = from_str("\"dog\"").unwrap();
-        let json_str = json_value.as_string();
-        let expected_str = "dog";
-        assert_eq!(json_str, Some(expected_str));
-    }
-
-    #[test]
-    fn test_is_number(){
-        let json_value = from_str("12").unwrap();
-        assert!(json_value.is_number());
-    }
-
-    #[test]
-    fn test_is_i64(){
-        let json_value = from_str("-12").unwrap();
-        assert!(json_value.is_i64());
-
-        let json_value = from_str("12").unwrap();
-        assert!(!json_value.is_i64());
-
-        let json_value = from_str("12.0").unwrap();
-        assert!(!json_value.is_i64());
-    }
-
-    #[test]
-    fn test_is_u64(){
-        let json_value = from_str("12").unwrap();
-        assert!(json_value.is_u64());
-
-        let json_value = from_str("-12").unwrap();
-        assert!(!json_value.is_u64());
-
-        let json_value = from_str("12.0").unwrap();
-        assert!(!json_value.is_u64());
-    }
-
-    #[test]
-    fn test_is_f64(){
-        let json_value = from_str("12").unwrap();
-        assert!(!json_value.is_f64());
-
-        let json_value = from_str("-12").unwrap();
-        assert!(!json_value.is_f64());
-
-        let json_value = from_str("12.0").unwrap();
-        assert!(json_value.is_f64());
-
-        let json_value = from_str("-12.0").unwrap();
-        assert!(json_value.is_f64());
-    }
-
-    #[test]
-    fn test_as_i64(){
-        let json_value = from_str("-12").unwrap();
-        let json_num = json_value.as_i64();
-        assert_eq!(json_num, Some(-12));
-    }
-
-    #[test]
-    fn test_as_u64(){
-        let json_value = from_str("12").unwrap();
-        let json_num = json_value.as_u64();
-        assert_eq!(json_num, Some(12));
-    }
-
-    #[test]
-    fn test_as_f64(){
-        let json_value = from_str("12.0").unwrap();
-        let json_num = json_value.as_f64();
-        assert_eq!(json_num, Some(12f64));
-    }
-
-    #[test]
-    fn test_is_boolean(){
-        let json_value = from_str("false").unwrap();
-        assert!(json_value.is_boolean());
-    }
-
-    #[test]
-    fn test_as_boolean(){
-        let json_value = from_str("false").unwrap();
-        let json_bool = json_value.as_boolean();
-        let expected_bool = false;
-        assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool);
-    }
-
-    #[test]
-    fn test_is_null(){
-        let json_value = from_str("null").unwrap();
-        assert!(json_value.is_null());
-    }
-
-    #[test]
-    fn test_as_null(){
-        let json_value = from_str("null").unwrap();
-        let json_null = json_value.as_null();
-        let expected_null = ();
-        assert!(json_null.is_some() && json_null.unwrap() == expected_null);
-    }
-
-    #[test]
-    fn test_encode_hashmap_with_numeric_key() {
-        use std::str::from_utf8;
-        use std::collections::HashMap;
-        let mut hm: HashMap<usize, bool> = HashMap::new();
-        hm.insert(1, true);
-        let mut mem_buf = Vec::new();
-        write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
-        let json_str = from_utf8(&mem_buf[..]).unwrap();
-        match from_str(json_str) {
-            Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
-            _ => {} // it parsed and we are good to go
-        }
-    }
-
-    #[test]
-    fn test_prettyencode_hashmap_with_numeric_key() {
-        use std::str::from_utf8;
-        use std::collections::HashMap;
-        let mut hm: HashMap<usize, bool> = HashMap::new();
-        hm.insert(1, true);
-        let mut mem_buf = Vec::new();
-        write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
-        let json_str = from_utf8(&mem_buf[..]).unwrap();
-        match from_str(json_str) {
-            Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
-            _ => {} // it parsed and we are good to go
-        }
-    }
-
-    #[test]
-    fn test_prettyencoder_indent_level_param() {
-        use std::str::from_utf8;
-        use std::collections::BTreeMap;
-
-        let mut tree = BTreeMap::new();
-
-        tree.insert("hello".to_string(), String("guten tag".to_string()));
-        tree.insert("goodbye".to_string(), String("sayonara".to_string()));
-
-        let json = Array(
-            // The following layout below should look a lot like
-            // the pretty-printed JSON (indent * x)
-            vec!
-            ( // 0x
-                String("greetings".to_string()), // 1x
-                Object(tree), // 1x + 2x + 2x + 1x
-            ) // 0x
-            // End JSON array (7 lines)
-        );
-
-        // Helper function for counting indents
-        fn indents(source: &str) -> usize {
-            let trimmed = source.trim_start_matches(' ');
-            source.len() - trimmed.len()
-        }
-
-        // Test up to 4 spaces of indents (more?)
-        for i in 0..4 {
-            let mut writer = Vec::new();
-            write!(&mut writer, "{}",
-                   super::as_pretty_json(&json).indent(i)).unwrap();
-
-            let printed = from_utf8(&writer[..]).unwrap();
-
-            // Check for indents at each line
-            let lines: Vec<&str> = printed.lines().collect();
-            assert_eq!(lines.len(), 7); // JSON should be 7 lines
-
-            assert_eq!(indents(lines[0]), 0 * i); // [
-            assert_eq!(indents(lines[1]), 1 * i); //   "greetings",
-            assert_eq!(indents(lines[2]), 1 * i); //   {
-            assert_eq!(indents(lines[3]), 2 * i); //     "hello": "guten tag",
-            assert_eq!(indents(lines[4]), 2 * i); //     "goodbye": "sayonara"
-            assert_eq!(indents(lines[5]), 1 * i); //   },
-            assert_eq!(indents(lines[6]), 0 * i); // ]
-
-            // Finally, test that the pretty-printed JSON is valid
-            from_str(printed).ok().expect("Pretty-printed JSON is invalid!");
-        }
-    }
-
-    #[test]
-    fn test_hashmap_with_enum_key() {
-        use std::collections::HashMap;
-        use json;
-        #[derive(RustcEncodable, Eq, Hash, PartialEq, RustcDecodable, Debug)]
-        enum Enum {
-            Foo,
-            #[allow(dead_code)]
-            Bar,
-        }
-        let mut map = HashMap::new();
-        map.insert(Enum::Foo, 0);
-        let result = json::encode(&map).unwrap();
-        assert_eq!(&result[..], r#"{"Foo":0}"#);
-        let decoded: HashMap<Enum, _> = json::decode(&result).unwrap();
-        assert_eq!(map, decoded);
-    }
-
-    #[test]
-    fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() {
-        use std::collections::HashMap;
-        use Decodable;
-        let json_str = "{\"1\":true}";
-        let json_obj = match from_str(json_str) {
-            Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
-            Ok(o) => o
-        };
-        let mut decoder = Decoder::new(json_obj);
-        let _hm: HashMap<usize, bool> = Decodable::decode(&mut decoder).unwrap();
-    }
-
-    #[test]
-    fn test_hashmap_with_numeric_key_will_error_with_string_keys() {
-        use std::collections::HashMap;
-        use Decodable;
-        let json_str = "{\"a\":true}";
-        let json_obj = match from_str(json_str) {
-            Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
-            Ok(o) => o
-        };
-        let mut decoder = Decoder::new(json_obj);
-        let result: Result<HashMap<usize, bool>, DecoderError> = Decodable::decode(&mut decoder);
-        assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string())));
-    }
-
-    fn assert_stream_equal(src: &str,
-                           expected: Vec<(JsonEvent, Vec<StackElement>)>) {
-        let mut parser = Parser::new(src.chars());
-        let mut i = 0;
-        loop {
-            let evt = match parser.next() {
-                Some(e) => e,
-                None => { break; }
-            };
-            let (ref expected_evt, ref expected_stack) = expected[i];
-            if !parser.stack().is_equal_to(expected_stack) {
-                panic!("Parser stack is not equal to {:?}", expected_stack);
-            }
-            assert_eq!(&evt, expected_evt);
-            i+=1;
-        }
-    }
-    #[test]
-    fn test_streaming_parser() {
-        assert_stream_equal(
-            r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#,
-            vec![
-                (ObjectStart,             vec![]),
-                  (StringValue("bar".to_string()),   vec![StackElement::Key("foo")]),
-                  (ArrayStart,            vec![StackElement::Key("array")]),
-                    (U64Value(0),         vec![StackElement::Key("array"), StackElement::Index(0)]),
-                    (U64Value(1),         vec![StackElement::Key("array"), StackElement::Index(1)]),
-                    (U64Value(2),         vec![StackElement::Key("array"), StackElement::Index(2)]),
-                    (U64Value(3),         vec![StackElement::Key("array"), StackElement::Index(3)]),
-                    (U64Value(4),         vec![StackElement::Key("array"), StackElement::Index(4)]),
-                    (U64Value(5),         vec![StackElement::Key("array"), StackElement::Index(5)]),
-                  (ArrayEnd,              vec![StackElement::Key("array")]),
-                  (ArrayStart,            vec![StackElement::Key("idents")]),
-                    (NullValue,           vec![StackElement::Key("idents"),
-                                               StackElement::Index(0)]),
-                    (BooleanValue(true),  vec![StackElement::Key("idents"),
-                                               StackElement::Index(1)]),
-                    (BooleanValue(false), vec![StackElement::Key("idents"),
-                                               StackElement::Index(2)]),
-                  (ArrayEnd,              vec![StackElement::Key("idents")]),
-                (ObjectEnd,               vec![]),
-            ]
-        );
-    }
-    fn last_event(src: &str) -> JsonEvent {
-        let mut parser = Parser::new(src.chars());
-        let mut evt = NullValue;
-        loop {
-            evt = match parser.next() {
-                Some(e) => e,
-                None => return evt,
-            }
-        }
-    }
-
-    #[test]
-    fn test_read_object_streaming() {
-        assert_eq!(last_event("{ "),      Error(SyntaxError(EOFWhileParsingObject, 1, 3)));
-        assert_eq!(last_event("{1"),      Error(SyntaxError(KeyMustBeAString,      1, 2)));
-        assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
-        assert_eq!(last_event("{\"a\""),  Error(SyntaxError(EOFWhileParsingObject, 1, 5)));
-        assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
-
-        assert_eq!(last_event("{\"a\" 1"),   Error(SyntaxError(ExpectedColon,         1, 6)));
-        assert_eq!(last_event("{\"a\":"),    Error(SyntaxError(EOFWhileParsingValue,  1, 6)));
-        assert_eq!(last_event("{\"a\":1"),   Error(SyntaxError(EOFWhileParsingObject, 1, 7)));
-        assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax,         1, 8)));
-        assert_eq!(last_event("{\"a\":1,"),  Error(SyntaxError(EOFWhileParsingObject, 1, 8)));
-        assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8)));
-
-        assert_stream_equal(
-            "{}",
-            vec![(ObjectStart, vec![]), (ObjectEnd, vec![])]
-        );
-        assert_stream_equal(
-            "{\"a\": 3}",
-            vec![
-                (ObjectStart,        vec![]),
-                  (U64Value(3),      vec![StackElement::Key("a")]),
-                (ObjectEnd,          vec![]),
-            ]
-        );
-        assert_stream_equal(
-            "{ \"a\": null, \"b\" : true }",
-            vec![
-                (ObjectStart,           vec![]),
-                  (NullValue,           vec![StackElement::Key("a")]),
-                  (BooleanValue(true),  vec![StackElement::Key("b")]),
-                (ObjectEnd,             vec![]),
-            ]
-        );
-        assert_stream_equal(
-            "{\"a\" : 1.0 ,\"b\": [ true ]}",
-            vec![
-                (ObjectStart,           vec![]),
-                  (F64Value(1.0),       vec![StackElement::Key("a")]),
-                  (ArrayStart,          vec![StackElement::Key("b")]),
-                    (BooleanValue(true),vec![StackElement::Key("b"), StackElement::Index(0)]),
-                  (ArrayEnd,            vec![StackElement::Key("b")]),
-                (ObjectEnd,             vec![]),
-            ]
-        );
-        assert_stream_equal(
-            r#"{
-                "a": 1.0,
-                "b": [
-                    true,
-                    "foo\nbar",
-                    { "c": {"d": null} }
-                ]
-            }"#,
-            vec![
-                (ObjectStart,                   vec![]),
-                  (F64Value(1.0),               vec![StackElement::Key("a")]),
-                  (ArrayStart,                  vec![StackElement::Key("b")]),
-                    (BooleanValue(true),        vec![StackElement::Key("b"),
-                                                     StackElement::Index(0)]),
-                    (StringValue("foo\nbar".to_string()),  vec![StackElement::Key("b"),
-                                                                StackElement::Index(1)]),
-                    (ObjectStart,               vec![StackElement::Key("b"),
-                                                     StackElement::Index(2)]),
-                      (ObjectStart,             vec![StackElement::Key("b"),
-                                                     StackElement::Index(2),
-                                                     StackElement::Key("c")]),
-                        (NullValue,             vec![StackElement::Key("b"),
-                                                     StackElement::Index(2),
-                                                     StackElement::Key("c"),
-                                                     StackElement::Key("d")]),
-                      (ObjectEnd,               vec![StackElement::Key("b"),
-                                                     StackElement::Index(2),
-                                                     StackElement::Key("c")]),
-                    (ObjectEnd,                 vec![StackElement::Key("b"),
-                                                     StackElement::Index(2)]),
-                  (ArrayEnd,                    vec![StackElement::Key("b")]),
-                (ObjectEnd,                     vec![]),
-            ]
-        );
-    }
-    #[test]
-    fn test_read_array_streaming() {
-        assert_stream_equal(
-            "[]",
-            vec![
-                (ArrayStart, vec![]),
-                (ArrayEnd,   vec![]),
-            ]
-        );
-        assert_stream_equal(
-            "[ ]",
-            vec![
-                (ArrayStart, vec![]),
-                (ArrayEnd,   vec![]),
-            ]
-        );
-        assert_stream_equal(
-            "[true]",
-            vec![
-                (ArrayStart,             vec![]),
-                    (BooleanValue(true), vec![StackElement::Index(0)]),
-                (ArrayEnd,               vec![]),
-            ]
-        );
-        assert_stream_equal(
-            "[ false ]",
-            vec![
-                (ArrayStart,              vec![]),
-                    (BooleanValue(false), vec![StackElement::Index(0)]),
-                (ArrayEnd,                vec![]),
-            ]
-        );
-        assert_stream_equal(
-            "[null]",
-            vec![
-                (ArrayStart,    vec![]),
-                    (NullValue, vec![StackElement::Index(0)]),
-                (ArrayEnd,      vec![]),
-            ]
-        );
-        assert_stream_equal(
-            "[3, 1]",
-            vec![
-                (ArrayStart,      vec![]),
-                    (U64Value(3), vec![StackElement::Index(0)]),
-                    (U64Value(1), vec![StackElement::Index(1)]),
-                (ArrayEnd,        vec![]),
-            ]
-        );
-        assert_stream_equal(
-            "\n[3, 2]\n",
-            vec![
-                (ArrayStart,      vec![]),
-                    (U64Value(3), vec![StackElement::Index(0)]),
-                    (U64Value(2), vec![StackElement::Index(1)]),
-                (ArrayEnd,        vec![]),
-            ]
-        );
-        assert_stream_equal(
-            "[2, [4, 1]]",
-            vec![
-                (ArrayStart,           vec![]),
-                    (U64Value(2),      vec![StackElement::Index(0)]),
-                    (ArrayStart,       vec![StackElement::Index(1)]),
-                        (U64Value(4),  vec![StackElement::Index(1), StackElement::Index(0)]),
-                        (U64Value(1),  vec![StackElement::Index(1), StackElement::Index(1)]),
-                    (ArrayEnd,         vec![StackElement::Index(1)]),
-                (ArrayEnd,             vec![]),
-            ]
-        );
-
-        assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1,  2)));
-
-        assert_eq!(from_str("["),     Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
-        assert_eq!(from_str("[1"),    Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
-        assert_eq!(from_str("[1,"),   Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
-        assert_eq!(from_str("[1,]"),  Err(SyntaxError(InvalidSyntax,        1, 4)));
-        assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax,        1, 4)));
-
-    }
-    #[test]
-    fn test_trailing_characters_streaming() {
-        assert_eq!(last_event("nulla"),  Error(SyntaxError(TrailingCharacters, 1, 5)));
-        assert_eq!(last_event("truea"),  Error(SyntaxError(TrailingCharacters, 1, 5)));
-        assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6)));
-        assert_eq!(last_event("1a"),     Error(SyntaxError(TrailingCharacters, 1, 2)));
-        assert_eq!(last_event("[]a"),    Error(SyntaxError(TrailingCharacters, 1, 3)));
-        assert_eq!(last_event("{}a"),    Error(SyntaxError(TrailingCharacters, 1, 3)));
-    }
-    #[test]
-    fn test_read_identifiers_streaming() {
-        assert_eq!(Parser::new("null".chars()).next(), Some(NullValue));
-        assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true)));
-        assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false)));
-
-        assert_eq!(last_event("n"),    Error(SyntaxError(InvalidSyntax, 1, 2)));
-        assert_eq!(last_event("nul"),  Error(SyntaxError(InvalidSyntax, 1, 4)));
-        assert_eq!(last_event("t"),    Error(SyntaxError(InvalidSyntax, 1, 2)));
-        assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4)));
-        assert_eq!(last_event("f"),    Error(SyntaxError(InvalidSyntax, 1, 2)));
-        assert_eq!(last_event("faz"),  Error(SyntaxError(InvalidSyntax, 1, 3)));
-    }
-
     #[test]
     fn test_stack() {
         let mut stack = Stack::new();
@@ -3862,76 +2654,6 @@
         assert!(stack.get(1) == StackElement::Key("foo"));
     }
 
-    #[test]
-    fn test_to_json() {
-        use std::collections::{HashMap,BTreeMap};
-        use super::ToJson;
-
-        let array2 = Array(vec![U64(1), U64(2)]);
-        let array3 = Array(vec![U64(1), U64(2), U64(3)]);
-        let object = {
-            let mut tree_map = BTreeMap::new();
-            tree_map.insert("a".to_string(), U64(1));
-            tree_map.insert("b".to_string(), U64(2));
-            Object(tree_map)
-        };
-
-        assert_eq!(array2.to_json(), array2);
-        assert_eq!(object.to_json(), object);
-        assert_eq!(3_isize.to_json(), I64(3));
-        assert_eq!(4_i8.to_json(), I64(4));
-        assert_eq!(5_i16.to_json(), I64(5));
-        assert_eq!(6_i32.to_json(), I64(6));
-        assert_eq!(7_i64.to_json(), I64(7));
-        assert_eq!(8_usize.to_json(), U64(8));
-        assert_eq!(9_u8.to_json(), U64(9));
-        assert_eq!(10_u16.to_json(), U64(10));
-        assert_eq!(11_u32.to_json(), U64(11));
-        assert_eq!(12_u64.to_json(), U64(12));
-        assert_eq!(13.0_f32.to_json(), F64(13.0_f64));
-        assert_eq!(14.0_f64.to_json(), F64(14.0_f64));
-        assert_eq!(().to_json(), Null);
-        assert_eq!(f32::INFINITY.to_json(), Null);
-        assert_eq!(f64::NAN.to_json(), Null);
-        assert_eq!(true.to_json(), Boolean(true));
-        assert_eq!(false.to_json(), Boolean(false));
-        assert_eq!("abc".to_json(), String("abc".to_string()));
-        assert_eq!("abc".to_string().to_json(), String("abc".to_string()));
-        assert_eq!((1_usize, 2_usize).to_json(), array2);
-        assert_eq!((1_usize, 2_usize, 3_usize).to_json(), array3);
-        assert_eq!([1_usize, 2_usize].to_json(), array2);
-        assert_eq!((&[1_usize, 2_usize, 3_usize]).to_json(), array3);
-        assert_eq!((vec![1_usize, 2_usize]).to_json(), array2);
-        assert_eq!(vec![1_usize, 2_usize, 3_usize].to_json(), array3);
-        let mut tree_map = BTreeMap::new();
-        tree_map.insert("a".to_string(), 1 as usize);
-        tree_map.insert("b".to_string(), 2);
-        assert_eq!(tree_map.to_json(), object);
-        let mut hash_map = HashMap::new();
-        hash_map.insert("a".to_string(), 1 as usize);
-        hash_map.insert("b".to_string(), 2);
-        assert_eq!(hash_map.to_json(), object);
-        assert_eq!(Some(15).to_json(), I64(15));
-        assert_eq!(Some(15 as usize).to_json(), U64(15));
-        assert_eq!(None::<isize>.to_json(), Null);
-    }
-
-    #[test]
-    fn test_encode_hashmap_with_arbitrary_key() {
-        use std::collections::HashMap;
-        #[derive(PartialEq, Eq, Hash, RustcEncodable)]
-        struct ArbitraryType(usize);
-        let mut hm: HashMap<ArbitraryType, bool> = HashMap::new();
-        hm.insert(ArbitraryType(1), true);
-        let mut mem_buf = string::String::new();
-        let mut encoder = Encoder::new(&mut mem_buf);
-        let result = hm.encode(&mut encoder);
-        match result.unwrap_err() {
-            EncoderError::BadHashmapKey => (),
-            _ => panic!("expected bad hash map key")
-        }
-    }
-
     #[bench]
     fn bench_streaming_small(b: &mut Bencher) {
         b.iter( || {
diff --git a/src/libserialize/lib.rs b/src/libserialize/lib.rs
index fe93a2d..b8eeb4d 100644
--- a/src/libserialize/lib.rs
+++ b/src/libserialize/lib.rs
@@ -8,6 +8,8 @@
        html_playground_url = "https://play.rust-lang.org/",
        test(attr(allow(unused_variables), deny(warnings))))]
 
+#![deny(rust_2018_idioms)]
+
 #![feature(box_syntax)]
 #![feature(core_intrinsics)]
 #![feature(specialization)]
@@ -20,8 +22,6 @@
 pub use self::serialize::{SpecializationError, SpecializedEncoder, SpecializedDecoder};
 pub use self::serialize::{UseSpecializedEncodable, UseSpecializedDecodable};
 
-extern crate smallvec;
-
 mod serialize;
 mod collection_impls;
 
@@ -30,7 +30,3 @@
 
 pub mod opaque;
 pub mod leb128;
-
-mod rustc_serialize {
-    pub use serialize::*;
-}
diff --git a/src/libserialize/opaque.rs b/src/libserialize/opaque.rs
index b8d4f8a..a6a5c31 100644
--- a/src/libserialize/opaque.rs
+++ b/src/libserialize/opaque.rs
@@ -1,6 +1,6 @@
-use leb128::{self, read_signed_leb128, write_signed_leb128};
+use crate::leb128::{self, read_signed_leb128, write_signed_leb128};
+use crate::serialize;
 use std::borrow::Cow;
-use serialize;
 
 // -----------------------------------------------------------------------------
 // Encoder
@@ -312,7 +312,7 @@
     }
 
     #[inline]
-    fn read_str(&mut self) -> Result<Cow<str>, Self::Error> {
+    fn read_str(&mut self) -> Result<Cow<'_, str>, Self::Error> {
         let len = self.read_usize()?;
         let s = ::std::str::from_utf8(&self.data[self.position..self.position + len]).unwrap();
         self.position += len;
@@ -324,288 +324,3 @@
         err.to_string()
     }
 }
-
-
-#[cfg(test)]
-mod tests {
-    use serialize::{Encodable, Decodable};
-    use std::fmt::Debug;
-    use super::{Encoder, Decoder};
-
-    #[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
-    struct Struct {
-        a: (),
-        b: u8,
-        c: u16,
-        d: u32,
-        e: u64,
-        f: usize,
-
-        g: i8,
-        h: i16,
-        i: i32,
-        j: i64,
-        k: isize,
-
-        l: char,
-        m: String,
-        n: f32,
-        o: f64,
-        p: bool,
-        q: Option<u32>,
-    }
-
-
-    fn check_round_trip<T: Encodable + Decodable + PartialEq + Debug>(values: Vec<T>) {
-        let mut encoder = Encoder::new(Vec::new());
-
-        for value in &values {
-            Encodable::encode(&value, &mut encoder).unwrap();
-        }
-
-        let data = encoder.into_inner();
-        let mut decoder = Decoder::new(&data[..], 0);
-
-        for value in values {
-            let decoded = Decodable::decode(&mut decoder).unwrap();
-            assert_eq!(value, decoded);
-        }
-    }
-
-    #[test]
-    fn test_unit() {
-        check_round_trip(vec![(), (), (), ()]);
-    }
-
-    #[test]
-    fn test_u8() {
-        let mut vec = vec![];
-        for i in ::std::u8::MIN..::std::u8::MAX {
-            vec.push(i);
-        }
-        check_round_trip(vec);
-    }
-
-    #[test]
-    fn test_u16() {
-        for i in ::std::u16::MIN..::std::u16::MAX {
-            check_round_trip(vec![1, 2, 3, i, i, i]);
-        }
-    }
-
-    #[test]
-    fn test_u32() {
-        check_round_trip(vec![1, 2, 3, ::std::u32::MIN, 0, 1, ::std::u32::MAX, 2, 1]);
-    }
-
-    #[test]
-    fn test_u64() {
-        check_round_trip(vec![1, 2, 3, ::std::u64::MIN, 0, 1, ::std::u64::MAX, 2, 1]);
-    }
-
-    #[test]
-    fn test_usize() {
-        check_round_trip(vec![1, 2, 3, ::std::usize::MIN, 0, 1, ::std::usize::MAX, 2, 1]);
-    }
-
-    #[test]
-    fn test_i8() {
-        let mut vec = vec![];
-        for i in ::std::i8::MIN..::std::i8::MAX {
-            vec.push(i);
-        }
-        check_round_trip(vec);
-    }
-
-    #[test]
-    fn test_i16() {
-        for i in ::std::i16::MIN..::std::i16::MAX {
-            check_round_trip(vec![-1, 2, -3, i, i, i, 2]);
-        }
-    }
-
-    #[test]
-    fn test_i32() {
-        check_round_trip(vec![-1, 2, -3, ::std::i32::MIN, 0, 1, ::std::i32::MAX, 2, 1]);
-    }
-
-    #[test]
-    fn test_i64() {
-        check_round_trip(vec![-1, 2, -3, ::std::i64::MIN, 0, 1, ::std::i64::MAX, 2, 1]);
-    }
-
-    #[test]
-    fn test_isize() {
-        check_round_trip(vec![-1, 2, -3, ::std::isize::MIN, 0, 1, ::std::isize::MAX, 2, 1]);
-    }
-
-    #[test]
-    fn test_bool() {
-        check_round_trip(vec![false, true, true, false, false]);
-    }
-
-    #[test]
-    fn test_f32() {
-        let mut vec = vec![];
-        for i in -100..100 {
-            vec.push((i as f32) / 3.0);
-        }
-        check_round_trip(vec);
-    }
-
-    #[test]
-    fn test_f64() {
-        let mut vec = vec![];
-        for i in -100..100 {
-            vec.push((i as f64) / 3.0);
-        }
-        check_round_trip(vec);
-    }
-
-    #[test]
-    fn test_char() {
-        let vec = vec!['a', 'b', 'c', 'd', 'A', 'X', ' ', '#', 'Ö', 'Ä', 'µ', '€'];
-        check_round_trip(vec);
-    }
-
-    #[test]
-    fn test_string() {
-        let vec = vec!["abcbuÖeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
-                       "abcbuÖganeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
-                       "abcbuÖganeiovÄnameÜavmpßvmea€µsbpapmaebn".to_string(),
-                       "abcbuÖganeiovÄnameÜavmpßvmeabpnvapeapmaebn".to_string(),
-                       "abcbuÖganeiÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
-                       "abcbuÖganeiovÄnameÜavmpßvmea€µsbpmaebn".to_string(),
-                       "abcbuÖganeiovÄnameÜavmpßvmea€µnvapeapmaebn".to_string()];
-
-        check_round_trip(vec);
-    }
-
-    #[test]
-    fn test_option() {
-        check_round_trip(vec![Some(-1i8)]);
-        check_round_trip(vec![Some(-2i16)]);
-        check_round_trip(vec![Some(-3i32)]);
-        check_round_trip(vec![Some(-4i64)]);
-        check_round_trip(vec![Some(-5isize)]);
-
-        let none_i8: Option<i8> = None;
-        check_round_trip(vec![none_i8]);
-
-        let none_i16: Option<i16> = None;
-        check_round_trip(vec![none_i16]);
-
-        let none_i32: Option<i32> = None;
-        check_round_trip(vec![none_i32]);
-
-        let none_i64: Option<i64> = None;
-        check_round_trip(vec![none_i64]);
-
-        let none_isize: Option<isize> = None;
-        check_round_trip(vec![none_isize]);
-    }
-
-    #[test]
-    fn test_struct() {
-        check_round_trip(vec![Struct {
-                                  a: (),
-                                  b: 10,
-                                  c: 11,
-                                  d: 12,
-                                  e: 13,
-                                  f: 14,
-
-                                  g: 15,
-                                  h: 16,
-                                  i: 17,
-                                  j: 18,
-                                  k: 19,
-
-                                  l: 'x',
-                                  m: "abc".to_string(),
-                                  n: 20.5,
-                                  o: 21.5,
-                                  p: false,
-                                  q: None,
-                              }]);
-
-        check_round_trip(vec![Struct {
-                                  a: (),
-                                  b: 101,
-                                  c: 111,
-                                  d: 121,
-                                  e: 131,
-                                  f: 141,
-
-                                  g: -15,
-                                  h: -16,
-                                  i: -17,
-                                  j: -18,
-                                  k: -19,
-
-                                  l: 'y',
-                                  m: "def".to_string(),
-                                  n: -20.5,
-                                  o: -21.5,
-                                  p: true,
-                                  q: Some(1234567),
-                              }]);
-    }
-
-    #[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
-    enum Enum {
-        Variant1,
-        Variant2(usize, f32),
-        Variant3 {
-            a: i32,
-            b: char,
-            c: bool,
-        },
-    }
-
-    #[test]
-    fn test_enum() {
-        check_round_trip(vec![Enum::Variant1,
-                              Enum::Variant2(1, 2.5),
-                              Enum::Variant3 {
-                                  a: 3,
-                                  b: 'b',
-                                  c: false,
-                              },
-                              Enum::Variant3 {
-                                  a: -4,
-                                  b: 'f',
-                                  c: true,
-                              }]);
-    }
-
-    #[test]
-    fn test_sequence() {
-        let mut vec = vec![];
-        for i in -100i64..100i64 {
-            vec.push(i * 100000);
-        }
-
-        check_round_trip(vec![vec]);
-    }
-
-    #[test]
-    fn test_hash_map() {
-        use std::collections::HashMap;
-        let mut map = HashMap::new();
-        for i in -100i64..100i64 {
-            map.insert(i * 100000, i * 10000);
-        }
-
-        check_round_trip(vec![map]);
-    }
-
-    #[test]
-    fn test_tuples() {
-        check_round_trip(vec![('x', (), false, 0.5f32)]);
-        check_round_trip(vec![(9i8, 10u16, 1.5f64)]);
-        check_round_trip(vec![(-12i16, 11u8, 12usize)]);
-        check_round_trip(vec![(1234567isize, 100000000000000u64, 99999999999999i64)]);
-        check_round_trip(vec![(String::new(), "some string".to_string())]);
-    }
-}
diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs
index 03844b3..977a36a 100644
--- a/src/libserialize/serialize.rs
+++ b/src/libserialize/serialize.rs
@@ -175,7 +175,7 @@
     fn read_f64(&mut self) -> Result<f64, Self::Error>;
     fn read_f32(&mut self) -> Result<f32, Self::Error>;
     fn read_char(&mut self) -> Result<char, Self::Error>;
-    fn read_str(&mut self) -> Result<Cow<str>, Self::Error>;
+    fn read_str(&mut self) -> Result<Cow<'_, str>, Self::Error>;
 
     // Compound types:
     fn read_enum<T, F>(&mut self, _name: &str, f: F) -> Result<T, Self::Error>
diff --git a/src/libserialize/tests/json.rs b/src/libserialize/tests/json.rs
new file mode 100644
index 0000000..3fb6bda
--- /dev/null
+++ b/src/libserialize/tests/json.rs
@@ -0,0 +1,1282 @@
+extern crate serialize as rustc_serialize;
+
+use rustc_serialize::{Encodable, Decodable};
+use rustc_serialize::json;
+use json::Json::*;
+use json::ErrorCode::*;
+use json::ParserError::*;
+use json::DecoderError::*;
+use json::JsonEvent::*;
+use json::{Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser, StackElement,
+           Decoder, Encoder, EncoderError};
+
+use Animal::*;
+use std::{i64, u64, f32, f64};
+use std::io::prelude::*;
+use std::collections::BTreeMap;
+use std::string;
+
+#[derive(RustcDecodable, Eq, PartialEq, Debug)]
+struct OptionData {
+    opt: Option<usize>,
+}
+
+#[test]
+fn test_decode_option_none() {
+    let s ="{}";
+    let obj: OptionData = json::decode(s).unwrap();
+    assert_eq!(obj, OptionData { opt: None });
+}
+
+#[test]
+fn test_decode_option_some() {
+    let s = "{ \"opt\": 10 }";
+    let obj: OptionData = json::decode(s).unwrap();
+    assert_eq!(obj, OptionData { opt: Some(10) });
+}
+
+#[test]
+fn test_decode_option_malformed() {
+    check_err::<OptionData>("{ \"opt\": [] }",
+                            ExpectedError("Number".to_string(), "[]".to_string()));
+    check_err::<OptionData>("{ \"opt\": false }",
+                            ExpectedError("Number".to_string(), "false".to_string()));
+}
+
+#[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
+enum Animal {
+    Dog,
+    Frog(string::String, isize)
+}
+
+#[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
+struct Inner {
+    a: (),
+    b: usize,
+    c: Vec<string::String>,
+}
+
+#[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
+struct Outer {
+    inner: Vec<Inner>,
+}
+
+fn mk_object(items: &[(string::String, Json)]) -> Json {
+    let mut d = BTreeMap::new();
+
+    for item in items {
+        match *item {
+            (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); },
+        }
+    };
+
+    Object(d)
+}
+
+#[test]
+fn test_from_str_trait() {
+    let s = "null";
+    assert!(s.parse::<Json>().unwrap() == s.parse().unwrap());
+}
+
+#[test]
+fn test_write_null() {
+    assert_eq!(Null.to_string(), "null");
+    assert_eq!(Null.pretty().to_string(), "null");
+}
+
+#[test]
+fn test_write_i64() {
+    assert_eq!(U64(0).to_string(), "0");
+    assert_eq!(U64(0).pretty().to_string(), "0");
+
+    assert_eq!(U64(1234).to_string(), "1234");
+    assert_eq!(U64(1234).pretty().to_string(), "1234");
+
+    assert_eq!(I64(-5678).to_string(), "-5678");
+    assert_eq!(I64(-5678).pretty().to_string(), "-5678");
+
+    assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000");
+    assert_eq!(U64(7650007200025252000).pretty().to_string(), "7650007200025252000");
+}
+
+#[test]
+fn test_write_f64() {
+    assert_eq!(F64(3.0).to_string(), "3.0");
+    assert_eq!(F64(3.0).pretty().to_string(), "3.0");
+
+    assert_eq!(F64(3.1).to_string(), "3.1");
+    assert_eq!(F64(3.1).pretty().to_string(), "3.1");
+
+    assert_eq!(F64(-1.5).to_string(), "-1.5");
+    assert_eq!(F64(-1.5).pretty().to_string(), "-1.5");
+
+    assert_eq!(F64(0.5).to_string(), "0.5");
+    assert_eq!(F64(0.5).pretty().to_string(), "0.5");
+
+    assert_eq!(F64(f64::NAN).to_string(), "null");
+    assert_eq!(F64(f64::NAN).pretty().to_string(), "null");
+
+    assert_eq!(F64(f64::INFINITY).to_string(), "null");
+    assert_eq!(F64(f64::INFINITY).pretty().to_string(), "null");
+
+    assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null");
+    assert_eq!(F64(f64::NEG_INFINITY).pretty().to_string(), "null");
+}
+
+#[test]
+fn test_write_str() {
+    assert_eq!(String("".to_string()).to_string(), "\"\"");
+    assert_eq!(String("".to_string()).pretty().to_string(), "\"\"");
+
+    assert_eq!(String("homura".to_string()).to_string(), "\"homura\"");
+    assert_eq!(String("madoka".to_string()).pretty().to_string(), "\"madoka\"");
+}
+
+#[test]
+fn test_write_bool() {
+    assert_eq!(Boolean(true).to_string(), "true");
+    assert_eq!(Boolean(true).pretty().to_string(), "true");
+
+    assert_eq!(Boolean(false).to_string(), "false");
+    assert_eq!(Boolean(false).pretty().to_string(), "false");
+}
+
+#[test]
+fn test_write_array() {
+    assert_eq!(Array(vec![]).to_string(), "[]");
+    assert_eq!(Array(vec![]).pretty().to_string(), "[]");
+
+    assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]");
+    assert_eq!(
+        Array(vec![Boolean(true)]).pretty().to_string(),
+        "\
+        [\n  \
+            true\n\
+        ]"
+    );
+
+    let long_test_array = Array(vec![
+        Boolean(false),
+        Null,
+        Array(vec![String("foo\nbar".to_string()), F64(3.5)])]);
+
+    assert_eq!(long_test_array.to_string(),
+        "[false,null,[\"foo\\nbar\",3.5]]");
+    assert_eq!(
+        long_test_array.pretty().to_string(),
+        "\
+        [\n  \
+            false,\n  \
+            null,\n  \
+            [\n    \
+                \"foo\\nbar\",\n    \
+                3.5\n  \
+            ]\n\
+        ]"
+    );
+}
+
+#[test]
+fn test_write_object() {
+    assert_eq!(mk_object(&[]).to_string(), "{}");
+    assert_eq!(mk_object(&[]).pretty().to_string(), "{}");
+
+    assert_eq!(
+        mk_object(&[
+            ("a".to_string(), Boolean(true))
+        ]).to_string(),
+        "{\"a\":true}"
+    );
+    assert_eq!(
+        mk_object(&[("a".to_string(), Boolean(true))]).pretty().to_string(),
+        "\
+        {\n  \
+            \"a\": true\n\
+        }"
+    );
+
+    let complex_obj = mk_object(&[
+            ("b".to_string(), Array(vec![
+                mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
+                mk_object(&[("d".to_string(), String("".to_string()))])
+            ]))
+        ]);
+
+    assert_eq!(
+        complex_obj.to_string(),
+        "{\
+            \"b\":[\
+                {\"c\":\"\\f\\r\"},\
+                {\"d\":\"\"}\
+            ]\
+        }"
+    );
+    assert_eq!(
+        complex_obj.pretty().to_string(),
+        "\
+        {\n  \
+            \"b\": [\n    \
+                {\n      \
+                    \"c\": \"\\f\\r\"\n    \
+                },\n    \
+                {\n      \
+                    \"d\": \"\"\n    \
+                }\n  \
+            ]\n\
+        }"
+    );
+
+    let a = mk_object(&[
+        ("a".to_string(), Boolean(true)),
+        ("b".to_string(), Array(vec![
+            mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
+            mk_object(&[("d".to_string(), String("".to_string()))])
+        ]))
+    ]);
+
+    // We can't compare the strings directly because the object fields be
+    // printed in a different order.
+    assert_eq!(a.clone(), a.to_string().parse().unwrap());
+    assert_eq!(a.clone(), a.pretty().to_string().parse().unwrap());
+}
+
+#[test]
+fn test_write_enum() {
+    let animal = Dog;
+    assert_eq!(
+        json::as_json(&animal).to_string(),
+        "\"Dog\""
+    );
+    assert_eq!(
+        json::as_pretty_json(&animal).to_string(),
+        "\"Dog\""
+    );
+
+    let animal = Frog("Henry".to_string(), 349);
+    assert_eq!(
+        json::as_json(&animal).to_string(),
+        "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"
+    );
+    assert_eq!(
+        json::as_pretty_json(&animal).to_string(),
+        "{\n  \
+           \"variant\": \"Frog\",\n  \
+           \"fields\": [\n    \
+             \"Henry\",\n    \
+             349\n  \
+           ]\n\
+         }"
+    );
+}
+
+macro_rules! check_encoder_for_simple {
+    ($value:expr, $expected:expr) => ({
+        let s = json::as_json(&$value).to_string();
+        assert_eq!(s, $expected);
+
+        let s = json::as_pretty_json(&$value).to_string();
+        assert_eq!(s, $expected);
+    })
+}
+
+#[test]
+fn test_write_some() {
+    check_encoder_for_simple!(Some("jodhpurs".to_string()), "\"jodhpurs\"");
+}
+
+#[test]
+fn test_write_none() {
+    check_encoder_for_simple!(None::<string::String>, "null");
+}
+
+#[test]
+fn test_write_char() {
+    check_encoder_for_simple!('a', "\"a\"");
+    check_encoder_for_simple!('\t', "\"\\t\"");
+    check_encoder_for_simple!('\u{0000}', "\"\\u0000\"");
+    check_encoder_for_simple!('\u{001b}', "\"\\u001b\"");
+    check_encoder_for_simple!('\u{007f}', "\"\\u007f\"");
+    check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\"");
+    check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\"");
+    check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\"");
+}
+
+#[test]
+fn test_trailing_characters() {
+    assert_eq!(from_str("nulla"),  Err(SyntaxError(TrailingCharacters, 1, 5)));
+    assert_eq!(from_str("truea"),  Err(SyntaxError(TrailingCharacters, 1, 5)));
+    assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6)));
+    assert_eq!(from_str("1a"),     Err(SyntaxError(TrailingCharacters, 1, 2)));
+    assert_eq!(from_str("[]a"),    Err(SyntaxError(TrailingCharacters, 1, 3)));
+    assert_eq!(from_str("{}a"),    Err(SyntaxError(TrailingCharacters, 1, 3)));
+}
+
+#[test]
+fn test_read_identifiers() {
+    assert_eq!(from_str("n"),    Err(SyntaxError(InvalidSyntax, 1, 2)));
+    assert_eq!(from_str("nul"),  Err(SyntaxError(InvalidSyntax, 1, 4)));
+    assert_eq!(from_str("t"),    Err(SyntaxError(InvalidSyntax, 1, 2)));
+    assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+    assert_eq!(from_str("f"),    Err(SyntaxError(InvalidSyntax, 1, 2)));
+    assert_eq!(from_str("faz"),  Err(SyntaxError(InvalidSyntax, 1, 3)));
+
+    assert_eq!(from_str("null"), Ok(Null));
+    assert_eq!(from_str("true"), Ok(Boolean(true)));
+    assert_eq!(from_str("false"), Ok(Boolean(false)));
+    assert_eq!(from_str(" null "), Ok(Null));
+    assert_eq!(from_str(" true "), Ok(Boolean(true)));
+    assert_eq!(from_str(" false "), Ok(Boolean(false)));
+}
+
+#[test]
+fn test_decode_identifiers() {
+    let v: () = json::decode("null").unwrap();
+    assert_eq!(v, ());
+
+    let v: bool = json::decode("true").unwrap();
+    assert_eq!(v, true);
+
+    let v: bool = json::decode("false").unwrap();
+    assert_eq!(v, false);
+}
+
+#[test]
+fn test_read_number() {
+    assert_eq!(from_str("+"),   Err(SyntaxError(InvalidSyntax, 1, 1)));
+    assert_eq!(from_str("."),   Err(SyntaxError(InvalidSyntax, 1, 1)));
+    assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1)));
+    assert_eq!(from_str("-"),   Err(SyntaxError(InvalidNumber, 1, 2)));
+    assert_eq!(from_str("00"),  Err(SyntaxError(InvalidNumber, 1, 2)));
+    assert_eq!(from_str("1."),  Err(SyntaxError(InvalidNumber, 1, 3)));
+    assert_eq!(from_str("1e"),  Err(SyntaxError(InvalidNumber, 1, 3)));
+    assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4)));
+
+    assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20)));
+    assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21)));
+
+    assert_eq!(from_str("3"), Ok(U64(3)));
+    assert_eq!(from_str("3.1"), Ok(F64(3.1)));
+    assert_eq!(from_str("-1.2"), Ok(F64(-1.2)));
+    assert_eq!(from_str("0.4"), Ok(F64(0.4)));
+    assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5)));
+    assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15)));
+    assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01)));
+    assert_eq!(from_str(" 3 "), Ok(U64(3)));
+
+    assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN)));
+    assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64)));
+    assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX)));
+}
+
+#[test]
+fn test_decode_numbers() {
+    let v: f64 = json::decode("3").unwrap();
+    assert_eq!(v, 3.0);
+
+    let v: f64 = json::decode("3.1").unwrap();
+    assert_eq!(v, 3.1);
+
+    let v: f64 = json::decode("-1.2").unwrap();
+    assert_eq!(v, -1.2);
+
+    let v: f64 = json::decode("0.4").unwrap();
+    assert_eq!(v, 0.4);
+
+    let v: f64 = json::decode("0.4e5").unwrap();
+    assert_eq!(v, 0.4e5);
+
+    let v: f64 = json::decode("0.4e15").unwrap();
+    assert_eq!(v, 0.4e15);
+
+    let v: f64 = json::decode("0.4e-01").unwrap();
+    assert_eq!(v, 0.4e-01);
+
+    let v: u64 = json::decode("0").unwrap();
+    assert_eq!(v, 0);
+
+    let v: u64 = json::decode("18446744073709551615").unwrap();
+    assert_eq!(v, u64::MAX);
+
+    let v: i64 = json::decode("-9223372036854775808").unwrap();
+    assert_eq!(v, i64::MIN);
+
+    let v: i64 = json::decode("9223372036854775807").unwrap();
+    assert_eq!(v, i64::MAX);
+
+    let res: DecodeResult<i64> = json::decode("765.25");
+    assert_eq!(res, Err(ExpectedError("Integer".to_string(),
+                                      "765.25".to_string())));
+}
+
+#[test]
+fn test_read_str() {
+    assert_eq!(from_str("\""),    Err(SyntaxError(EOFWhileParsingString, 1, 2)));
+    assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
+
+    assert_eq!(from_str("\"\""), Ok(String("".to_string())));
+    assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string())));
+    assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string())));
+    assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string())));
+    assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string())));
+    assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string())));
+    assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string())));
+    assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string())));
+    assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string())));
+    assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string())));
+}
+
+#[test]
+fn test_decode_str() {
+    let s = [("\"\"", ""),
+             ("\"foo\"", "foo"),
+             ("\"\\\"\"", "\""),
+             ("\"\\b\"", "\x08"),
+             ("\"\\n\"", "\n"),
+             ("\"\\r\"", "\r"),
+             ("\"\\t\"", "\t"),
+             ("\"\\u12ab\"", "\u{12ab}"),
+             ("\"\\uAB12\"", "\u{AB12}")];
+
+    for &(i, o) in &s {
+        let v: string::String = json::decode(i).unwrap();
+        assert_eq!(v, o);
+    }
+}
+
+#[test]
+fn test_read_array() {
+    assert_eq!(from_str("["),     Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
+    assert_eq!(from_str("[1"),    Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
+    assert_eq!(from_str("[1,"),   Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
+    assert_eq!(from_str("[1,]"),  Err(SyntaxError(InvalidSyntax,        1, 4)));
+    assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax,        1, 4)));
+
+    assert_eq!(from_str("[]"), Ok(Array(vec![])));
+    assert_eq!(from_str("[ ]"), Ok(Array(vec![])));
+    assert_eq!(from_str("[true]"), Ok(Array(vec![Boolean(true)])));
+    assert_eq!(from_str("[ false ]"), Ok(Array(vec![Boolean(false)])));
+    assert_eq!(from_str("[null]"), Ok(Array(vec![Null])));
+    assert_eq!(from_str("[3, 1]"),
+                 Ok(Array(vec![U64(3), U64(1)])));
+    assert_eq!(from_str("\n[3, 2]\n"),
+                 Ok(Array(vec![U64(3), U64(2)])));
+    assert_eq!(from_str("[2, [4, 1]]"),
+           Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])])));
+}
+
+#[test]
+fn test_decode_array() {
+    let v: Vec<()> = json::decode("[]").unwrap();
+    assert_eq!(v, []);
+
+    let v: Vec<()> = json::decode("[null]").unwrap();
+    assert_eq!(v, [()]);
+
+    let v: Vec<bool> = json::decode("[true]").unwrap();
+    assert_eq!(v, [true]);
+
+    let v: Vec<isize> = json::decode("[3, 1]").unwrap();
+    assert_eq!(v, [3, 1]);
+
+    let v: Vec<Vec<usize>> = json::decode("[[3], [1, 2]]").unwrap();
+    assert_eq!(v, [vec![3], vec![1, 2]]);
+}
+
+#[test]
+fn test_decode_tuple() {
+    let t: (usize, usize, usize) = json::decode("[1, 2, 3]").unwrap();
+    assert_eq!(t, (1, 2, 3));
+
+    let t: (usize, string::String) = json::decode("[1, \"two\"]").unwrap();
+    assert_eq!(t, (1, "two".to_string()));
+}
+
+#[test]
+fn test_decode_tuple_malformed_types() {
+    assert!(json::decode::<(usize, string::String)>("[1, 2]").is_err());
+}
+
+#[test]
+fn test_decode_tuple_malformed_length() {
+    assert!(json::decode::<(usize, usize)>("[1, 2, 3]").is_err());
+}
+
+#[test]
+fn test_read_object() {
+    assert_eq!(from_str("{"),       Err(SyntaxError(EOFWhileParsingObject, 1, 2)));
+    assert_eq!(from_str("{ "),      Err(SyntaxError(EOFWhileParsingObject, 1, 3)));
+    assert_eq!(from_str("{1"),      Err(SyntaxError(KeyMustBeAString,      1, 2)));
+    assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
+    assert_eq!(from_str("{\"a\""),  Err(SyntaxError(EOFWhileParsingObject, 1, 5)));
+    assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
+
+    assert_eq!(from_str("{\"a\" 1"),   Err(SyntaxError(ExpectedColon,         1, 6)));
+    assert_eq!(from_str("{\"a\":"),    Err(SyntaxError(EOFWhileParsingValue,  1, 6)));
+    assert_eq!(from_str("{\"a\":1"),   Err(SyntaxError(EOFWhileParsingObject, 1, 7)));
+    assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax,         1, 8)));
+    assert_eq!(from_str("{\"a\":1,"),  Err(SyntaxError(EOFWhileParsingObject, 1, 8)));
+
+    assert_eq!(from_str("{}").unwrap(), mk_object(&[]));
+    assert_eq!(from_str("{\"a\": 3}").unwrap(),
+                mk_object(&[("a".to_string(), U64(3))]));
+
+    assert_eq!(from_str(
+                    "{ \"a\": null, \"b\" : true }").unwrap(),
+                mk_object(&[
+                    ("a".to_string(), Null),
+                    ("b".to_string(), Boolean(true))]));
+    assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(),
+                mk_object(&[
+                    ("a".to_string(), Null),
+                    ("b".to_string(), Boolean(true))]));
+    assert_eq!(from_str(
+                    "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(),
+                mk_object(&[
+                    ("a".to_string(), F64(1.0)),
+                    ("b".to_string(), Array(vec![Boolean(true)]))
+                ]));
+    assert_eq!(from_str(
+                    "{\
+                        \"a\": 1.0, \
+                        \"b\": [\
+                            true,\
+                            \"foo\\nbar\", \
+                            { \"c\": {\"d\": null} } \
+                        ]\
+                    }").unwrap(),
+                mk_object(&[
+                    ("a".to_string(), F64(1.0)),
+                    ("b".to_string(), Array(vec![
+                        Boolean(true),
+                        String("foo\nbar".to_string()),
+                        mk_object(&[
+                            ("c".to_string(), mk_object(&[("d".to_string(), Null)]))
+                        ])
+                    ]))
+                ]));
+}
+
+#[test]
+fn test_decode_struct() {
+    let s = "{
+        \"inner\": [
+            { \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] }
+        ]
+    }";
+
+    let v: Outer = json::decode(s).unwrap();
+    assert_eq!(
+        v,
+        Outer {
+            inner: vec![
+                Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] }
+            ]
+        }
+    );
+}
+
+#[derive(RustcDecodable)]
+struct FloatStruct {
+    f: f64,
+    a: Vec<f64>
+}
+#[test]
+fn test_decode_struct_with_nan() {
+    let s = "{\"f\":null,\"a\":[null,123]}";
+    let obj: FloatStruct = json::decode(s).unwrap();
+    assert!(obj.f.is_nan());
+    assert!(obj.a[0].is_nan());
+    assert_eq!(obj.a[1], 123f64);
+}
+
+#[test]
+fn test_decode_option() {
+    let value: Option<string::String> = json::decode("null").unwrap();
+    assert_eq!(value, None);
+
+    let value: Option<string::String> = json::decode("\"jodhpurs\"").unwrap();
+    assert_eq!(value, Some("jodhpurs".to_string()));
+}
+
+#[test]
+fn test_decode_enum() {
+    let value: Animal = json::decode("\"Dog\"").unwrap();
+    assert_eq!(value, Dog);
+
+    let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}";
+    let value: Animal = json::decode(s).unwrap();
+    assert_eq!(value, Frog("Henry".to_string(), 349));
+}
+
+#[test]
+fn test_decode_map() {
+    let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\
+              \"fields\":[\"Henry\", 349]}}";
+    let mut map: BTreeMap<string::String, Animal> = json::decode(s).unwrap();
+
+    assert_eq!(map.remove(&"a".to_string()), Some(Dog));
+    assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349)));
+}
+
+#[test]
+fn test_multiline_errors() {
+    assert_eq!(from_str("{\n  \"foo\":\n \"bar\""),
+        Err(SyntaxError(EOFWhileParsingObject, 3, 8)));
+}
+
+#[derive(RustcDecodable)]
+#[allow(dead_code)]
+struct DecodeStruct {
+    x: f64,
+    y: bool,
+    z: string::String,
+    w: Vec<DecodeStruct>
+}
+#[derive(RustcDecodable)]
+enum DecodeEnum {
+    A(f64),
+    B(string::String)
+}
+fn check_err<T: Decodable>(to_parse: &'static str, expected: DecoderError) {
+    let res: DecodeResult<T> = match from_str(to_parse) {
+        Err(e) => Err(ParseError(e)),
+        Ok(json) => Decodable::decode(&mut Decoder::new(json))
+    };
+    match res {
+        Ok(_) => panic!("`{:?}` parsed & decoded ok, expecting error `{:?}`",
+                           to_parse, expected),
+        Err(ParseError(e)) => panic!("`{:?}` is not valid json: {:?}",
+                                        to_parse, e),
+        Err(e) => {
+            assert_eq!(e, expected);
+        }
+    }
+}
+#[test]
+fn test_decode_errors_struct() {
+    check_err::<DecodeStruct>("[]", ExpectedError("Object".to_string(), "[]".to_string()));
+    check_err::<DecodeStruct>("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}",
+                              ExpectedError("Number".to_string(), "true".to_string()));
+    check_err::<DecodeStruct>("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}",
+                              ExpectedError("Boolean".to_string(), "[]".to_string()));
+    check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}",
+                              ExpectedError("String".to_string(), "{}".to_string()));
+    check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}",
+                              ExpectedError("Array".to_string(), "null".to_string()));
+    check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\"}",
+                              MissingFieldError("w".to_string()));
+}
+#[test]
+fn test_decode_errors_enum() {
+    check_err::<DecodeEnum>("{}",
+                            MissingFieldError("variant".to_string()));
+    check_err::<DecodeEnum>("{\"variant\": 1}",
+                            ExpectedError("String".to_string(), "1".to_string()));
+    check_err::<DecodeEnum>("{\"variant\": \"A\"}",
+                            MissingFieldError("fields".to_string()));
+    check_err::<DecodeEnum>("{\"variant\": \"A\", \"fields\": null}",
+                            ExpectedError("Array".to_string(), "null".to_string()));
+    check_err::<DecodeEnum>("{\"variant\": \"C\", \"fields\": []}",
+                            UnknownVariantError("C".to_string()));
+}
+
+#[test]
+fn test_find(){
+    let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
+    let found_str = json_value.find("dog");
+    assert!(found_str.unwrap().as_string().unwrap() == "cat");
+}
+
+#[test]
+fn test_find_path(){
+    let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
+    let found_str = json_value.find_path(&["dog", "cat", "mouse"]);
+    assert!(found_str.unwrap().as_string().unwrap() == "cheese");
+}
+
+#[test]
+fn test_search(){
+    let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
+    let found_str = json_value.search("mouse").and_then(|j| j.as_string());
+    assert!(found_str.unwrap() == "cheese");
+}
+
+#[test]
+fn test_index(){
+    let json_value = from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap();
+    let ref array = json_value["animals"];
+    assert_eq!(array[0].as_string().unwrap(), "dog");
+    assert_eq!(array[1].as_string().unwrap(), "cat");
+    assert_eq!(array[2].as_string().unwrap(), "mouse");
+}
+
+#[test]
+fn test_is_object(){
+    let json_value = from_str("{}").unwrap();
+    assert!(json_value.is_object());
+}
+
+#[test]
+fn test_as_object(){
+    let json_value = from_str("{}").unwrap();
+    let json_object = json_value.as_object();
+    assert!(json_object.is_some());
+}
+
+#[test]
+fn test_is_array(){
+    let json_value = from_str("[1, 2, 3]").unwrap();
+    assert!(json_value.is_array());
+}
+
+#[test]
+fn test_as_array(){
+    let json_value = from_str("[1, 2, 3]").unwrap();
+    let json_array = json_value.as_array();
+    let expected_length = 3;
+    assert!(json_array.is_some() && json_array.unwrap().len() == expected_length);
+}
+
+#[test]
+fn test_is_string(){
+    let json_value = from_str("\"dog\"").unwrap();
+    assert!(json_value.is_string());
+}
+
+#[test]
+fn test_as_string(){
+    let json_value = from_str("\"dog\"").unwrap();
+    let json_str = json_value.as_string();
+    let expected_str = "dog";
+    assert_eq!(json_str, Some(expected_str));
+}
+
+#[test]
+fn test_is_number(){
+    let json_value = from_str("12").unwrap();
+    assert!(json_value.is_number());
+}
+
+#[test]
+fn test_is_i64(){
+    let json_value = from_str("-12").unwrap();
+    assert!(json_value.is_i64());
+
+    let json_value = from_str("12").unwrap();
+    assert!(!json_value.is_i64());
+
+    let json_value = from_str("12.0").unwrap();
+    assert!(!json_value.is_i64());
+}
+
+#[test]
+fn test_is_u64(){
+    let json_value = from_str("12").unwrap();
+    assert!(json_value.is_u64());
+
+    let json_value = from_str("-12").unwrap();
+    assert!(!json_value.is_u64());
+
+    let json_value = from_str("12.0").unwrap();
+    assert!(!json_value.is_u64());
+}
+
+#[test]
+fn test_is_f64(){
+    let json_value = from_str("12").unwrap();
+    assert!(!json_value.is_f64());
+
+    let json_value = from_str("-12").unwrap();
+    assert!(!json_value.is_f64());
+
+    let json_value = from_str("12.0").unwrap();
+    assert!(json_value.is_f64());
+
+    let json_value = from_str("-12.0").unwrap();
+    assert!(json_value.is_f64());
+}
+
+#[test]
+fn test_as_i64(){
+    let json_value = from_str("-12").unwrap();
+    let json_num = json_value.as_i64();
+    assert_eq!(json_num, Some(-12));
+}
+
+#[test]
+fn test_as_u64(){
+    let json_value = from_str("12").unwrap();
+    let json_num = json_value.as_u64();
+    assert_eq!(json_num, Some(12));
+}
+
+#[test]
+fn test_as_f64(){
+    let json_value = from_str("12.0").unwrap();
+    let json_num = json_value.as_f64();
+    assert_eq!(json_num, Some(12f64));
+}
+
+#[test]
+fn test_is_boolean(){
+    let json_value = from_str("false").unwrap();
+    assert!(json_value.is_boolean());
+}
+
+#[test]
+fn test_as_boolean(){
+    let json_value = from_str("false").unwrap();
+    let json_bool = json_value.as_boolean();
+    let expected_bool = false;
+    assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool);
+}
+
+#[test]
+fn test_is_null(){
+    let json_value = from_str("null").unwrap();
+    assert!(json_value.is_null());
+}
+
+#[test]
+fn test_as_null(){
+    let json_value = from_str("null").unwrap();
+    let json_null = json_value.as_null();
+    let expected_null = ();
+    assert!(json_null.is_some() && json_null.unwrap() == expected_null);
+}
+
+#[test]
+fn test_encode_hashmap_with_numeric_key() {
+    use std::str::from_utf8;
+    use std::collections::HashMap;
+    let mut hm: HashMap<usize, bool> = HashMap::new();
+    hm.insert(1, true);
+    let mut mem_buf = Vec::new();
+    write!(&mut mem_buf, "{}", json::as_pretty_json(&hm)).unwrap();
+    let json_str = from_utf8(&mem_buf[..]).unwrap();
+    match from_str(json_str) {
+        Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+        _ => {} // it parsed and we are good to go
+    }
+}
+
+#[test]
+fn test_prettyencode_hashmap_with_numeric_key() {
+    use std::str::from_utf8;
+    use std::collections::HashMap;
+    let mut hm: HashMap<usize, bool> = HashMap::new();
+    hm.insert(1, true);
+    let mut mem_buf = Vec::new();
+    write!(&mut mem_buf, "{}", json::as_pretty_json(&hm)).unwrap();
+    let json_str = from_utf8(&mem_buf[..]).unwrap();
+    match from_str(json_str) {
+        Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+        _ => {} // it parsed and we are good to go
+    }
+}
+
+#[test]
+fn test_prettyencoder_indent_level_param() {
+    use std::str::from_utf8;
+    use std::collections::BTreeMap;
+
+    let mut tree = BTreeMap::new();
+
+    tree.insert("hello".to_string(), String("guten tag".to_string()));
+    tree.insert("goodbye".to_string(), String("sayonara".to_string()));
+
+    let json = Array(
+        // The following layout below should look a lot like
+        // the pretty-printed JSON (indent * x)
+        vec!
+        ( // 0x
+            String("greetings".to_string()), // 1x
+            Object(tree), // 1x + 2x + 2x + 1x
+        ) // 0x
+        // End JSON array (7 lines)
+    );
+
+    // Helper function for counting indents
+    fn indents(source: &str) -> usize {
+        let trimmed = source.trim_start_matches(' ');
+        source.len() - trimmed.len()
+    }
+
+    // Test up to 4 spaces of indents (more?)
+    for i in 0..4 {
+        let mut writer = Vec::new();
+        write!(&mut writer, "{}",
+                json::as_pretty_json(&json).indent(i)).unwrap();
+
+        let printed = from_utf8(&writer[..]).unwrap();
+
+        // Check for indents at each line
+        let lines: Vec<&str> = printed.lines().collect();
+        assert_eq!(lines.len(), 7); // JSON should be 7 lines
+
+        assert_eq!(indents(lines[0]), 0 * i); // [
+        assert_eq!(indents(lines[1]), 1 * i); //   "greetings",
+        assert_eq!(indents(lines[2]), 1 * i); //   {
+        assert_eq!(indents(lines[3]), 2 * i); //     "hello": "guten tag",
+        assert_eq!(indents(lines[4]), 2 * i); //     "goodbye": "sayonara"
+        assert_eq!(indents(lines[5]), 1 * i); //   },
+        assert_eq!(indents(lines[6]), 0 * i); // ]
+
+        // Finally, test that the pretty-printed JSON is valid
+        from_str(printed).ok().expect("Pretty-printed JSON is invalid!");
+    }
+}
+
+#[test]
+fn test_hashmap_with_enum_key() {
+    use std::collections::HashMap;
+    #[derive(RustcEncodable, Eq, Hash, PartialEq, RustcDecodable, Debug)]
+    enum Enum {
+        Foo,
+        #[allow(dead_code)]
+        Bar,
+    }
+    let mut map = HashMap::new();
+    map.insert(Enum::Foo, 0);
+    let result = json::encode(&map).unwrap();
+    assert_eq!(&result[..], r#"{"Foo":0}"#);
+    let decoded: HashMap<Enum, _> = json::decode(&result).unwrap();
+    assert_eq!(map, decoded);
+}
+
+#[test]
+fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() {
+    use std::collections::HashMap;
+    let json_str = "{\"1\":true}";
+    let json_obj = match from_str(json_str) {
+        Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+        Ok(o) => o
+    };
+    let mut decoder = Decoder::new(json_obj);
+    let _hm: HashMap<usize, bool> = Decodable::decode(&mut decoder).unwrap();
+}
+
+#[test]
+fn test_hashmap_with_numeric_key_will_error_with_string_keys() {
+    use std::collections::HashMap;
+    let json_str = "{\"a\":true}";
+    let json_obj = match from_str(json_str) {
+        Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+        Ok(o) => o
+    };
+    let mut decoder = Decoder::new(json_obj);
+    let result: Result<HashMap<usize, bool>, DecoderError> = Decodable::decode(&mut decoder);
+    assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string())));
+}
+
+fn assert_stream_equal(src: &str,
+                        expected: Vec<(JsonEvent, Vec<StackElement<'_>>)>) {
+    let mut parser = Parser::new(src.chars());
+    let mut i = 0;
+    loop {
+        let evt = match parser.next() {
+            Some(e) => e,
+            None => { break; }
+        };
+        let (ref expected_evt, ref expected_stack) = expected[i];
+        if !parser.stack().is_equal_to(expected_stack) {
+            panic!("Parser stack is not equal to {:?}", expected_stack);
+        }
+        assert_eq!(&evt, expected_evt);
+        i+=1;
+    }
+}
+#[test]
+fn test_streaming_parser() {
+    assert_stream_equal(
+        r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#,
+        vec![
+            (ObjectStart,             vec![]),
+              (StringValue("bar".to_string()),   vec![StackElement::Key("foo")]),
+              (ArrayStart,            vec![StackElement::Key("array")]),
+                (U64Value(0),         vec![StackElement::Key("array"), StackElement::Index(0)]),
+                (U64Value(1),         vec![StackElement::Key("array"), StackElement::Index(1)]),
+                (U64Value(2),         vec![StackElement::Key("array"), StackElement::Index(2)]),
+                (U64Value(3),         vec![StackElement::Key("array"), StackElement::Index(3)]),
+                (U64Value(4),         vec![StackElement::Key("array"), StackElement::Index(4)]),
+                (U64Value(5),         vec![StackElement::Key("array"), StackElement::Index(5)]),
+              (ArrayEnd,              vec![StackElement::Key("array")]),
+              (ArrayStart,            vec![StackElement::Key("idents")]),
+                (NullValue,           vec![StackElement::Key("idents"),
+                                           StackElement::Index(0)]),
+                (BooleanValue(true),  vec![StackElement::Key("idents"),
+                                           StackElement::Index(1)]),
+                (BooleanValue(false), vec![StackElement::Key("idents"),
+                                           StackElement::Index(2)]),
+              (ArrayEnd,              vec![StackElement::Key("idents")]),
+            (ObjectEnd,               vec![]),
+        ]
+    );
+}
+fn last_event(src: &str) -> JsonEvent {
+    let mut parser = Parser::new(src.chars());
+    let mut evt = NullValue;
+    loop {
+        evt = match parser.next() {
+            Some(e) => e,
+            None => return evt,
+        }
+    }
+}
+
+#[test]
+fn test_read_object_streaming() {
+    assert_eq!(last_event("{ "),      Error(SyntaxError(EOFWhileParsingObject, 1, 3)));
+    assert_eq!(last_event("{1"),      Error(SyntaxError(KeyMustBeAString,      1, 2)));
+    assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
+    assert_eq!(last_event("{\"a\""),  Error(SyntaxError(EOFWhileParsingObject, 1, 5)));
+    assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
+
+    assert_eq!(last_event("{\"a\" 1"),   Error(SyntaxError(ExpectedColon,         1, 6)));
+    assert_eq!(last_event("{\"a\":"),    Error(SyntaxError(EOFWhileParsingValue,  1, 6)));
+    assert_eq!(last_event("{\"a\":1"),   Error(SyntaxError(EOFWhileParsingObject, 1, 7)));
+    assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax,         1, 8)));
+    assert_eq!(last_event("{\"a\":1,"),  Error(SyntaxError(EOFWhileParsingObject, 1, 8)));
+    assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8)));
+
+    assert_stream_equal(
+        "{}",
+        vec![(ObjectStart, vec![]), (ObjectEnd, vec![])]
+    );
+    assert_stream_equal(
+        "{\"a\": 3}",
+        vec![
+            (ObjectStart,        vec![]),
+              (U64Value(3),      vec![StackElement::Key("a")]),
+            (ObjectEnd,          vec![]),
+        ]
+    );
+    assert_stream_equal(
+        "{ \"a\": null, \"b\" : true }",
+        vec![
+            (ObjectStart,           vec![]),
+              (NullValue,           vec![StackElement::Key("a")]),
+              (BooleanValue(true),  vec![StackElement::Key("b")]),
+            (ObjectEnd,             vec![]),
+        ]
+    );
+    assert_stream_equal(
+        "{\"a\" : 1.0 ,\"b\": [ true ]}",
+        vec![
+            (ObjectStart,           vec![]),
+              (F64Value(1.0),       vec![StackElement::Key("a")]),
+              (ArrayStart,          vec![StackElement::Key("b")]),
+                (BooleanValue(true),vec![StackElement::Key("b"), StackElement::Index(0)]),
+              (ArrayEnd,            vec![StackElement::Key("b")]),
+            (ObjectEnd,             vec![]),
+        ]
+    );
+    assert_stream_equal(
+        r#"{
+            "a": 1.0,
+            "b": [
+                true,
+                "foo\nbar",
+                { "c": {"d": null} }
+            ]
+        }"#,
+        vec![
+            (ObjectStart,                   vec![]),
+              (F64Value(1.0),               vec![StackElement::Key("a")]),
+              (ArrayStart,                  vec![StackElement::Key("b")]),
+                (BooleanValue(true),        vec![StackElement::Key("b"),
+                                                StackElement::Index(0)]),
+                (StringValue("foo\nbar".to_string()),  vec![StackElement::Key("b"),
+                                                            StackElement::Index(1)]),
+                (ObjectStart,               vec![StackElement::Key("b"),
+                                                 StackElement::Index(2)]),
+                  (ObjectStart,             vec![StackElement::Key("b"),
+                                                 StackElement::Index(2),
+                                                 StackElement::Key("c")]),
+                    (NullValue,             vec![StackElement::Key("b"),
+                                                 StackElement::Index(2),
+                                                 StackElement::Key("c"),
+                                                 StackElement::Key("d")]),
+                  (ObjectEnd,               vec![StackElement::Key("b"),
+                                                 StackElement::Index(2),
+                                                 StackElement::Key("c")]),
+                (ObjectEnd,                 vec![StackElement::Key("b"),
+                                                 StackElement::Index(2)]),
+              (ArrayEnd,                    vec![StackElement::Key("b")]),
+            (ObjectEnd,                     vec![]),
+        ]
+    );
+}
+#[test]
+fn test_read_array_streaming() {
+    assert_stream_equal(
+        "[]",
+        vec![
+            (ArrayStart, vec![]),
+            (ArrayEnd,   vec![]),
+        ]
+    );
+    assert_stream_equal(
+        "[ ]",
+        vec![
+            (ArrayStart, vec![]),
+            (ArrayEnd,   vec![]),
+        ]
+    );
+    assert_stream_equal(
+        "[true]",
+        vec![
+            (ArrayStart,             vec![]),
+                (BooleanValue(true), vec![StackElement::Index(0)]),
+            (ArrayEnd,               vec![]),
+        ]
+    );
+    assert_stream_equal(
+        "[ false ]",
+        vec![
+            (ArrayStart,              vec![]),
+                (BooleanValue(false), vec![StackElement::Index(0)]),
+            (ArrayEnd,                vec![]),
+        ]
+    );
+    assert_stream_equal(
+        "[null]",
+        vec![
+            (ArrayStart,    vec![]),
+                (NullValue, vec![StackElement::Index(0)]),
+            (ArrayEnd,      vec![]),
+        ]
+    );
+    assert_stream_equal(
+        "[3, 1]",
+        vec![
+            (ArrayStart,      vec![]),
+                (U64Value(3), vec![StackElement::Index(0)]),
+                (U64Value(1), vec![StackElement::Index(1)]),
+            (ArrayEnd,        vec![]),
+        ]
+    );
+    assert_stream_equal(
+        "\n[3, 2]\n",
+        vec![
+            (ArrayStart,      vec![]),
+                (U64Value(3), vec![StackElement::Index(0)]),
+                (U64Value(2), vec![StackElement::Index(1)]),
+            (ArrayEnd,        vec![]),
+        ]
+    );
+    assert_stream_equal(
+        "[2, [4, 1]]",
+        vec![
+            (ArrayStart,           vec![]),
+                (U64Value(2),      vec![StackElement::Index(0)]),
+                (ArrayStart,       vec![StackElement::Index(1)]),
+                    (U64Value(4),  vec![StackElement::Index(1), StackElement::Index(0)]),
+                    (U64Value(1),  vec![StackElement::Index(1), StackElement::Index(1)]),
+                (ArrayEnd,         vec![StackElement::Index(1)]),
+            (ArrayEnd,             vec![]),
+        ]
+    );
+
+    assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1,  2)));
+
+    assert_eq!(from_str("["),     Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
+    assert_eq!(from_str("[1"),    Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
+    assert_eq!(from_str("[1,"),   Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
+    assert_eq!(from_str("[1,]"),  Err(SyntaxError(InvalidSyntax,        1, 4)));
+    assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax,        1, 4)));
+
+}
+#[test]
+fn test_trailing_characters_streaming() {
+    assert_eq!(last_event("nulla"),  Error(SyntaxError(TrailingCharacters, 1, 5)));
+    assert_eq!(last_event("truea"),  Error(SyntaxError(TrailingCharacters, 1, 5)));
+    assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6)));
+    assert_eq!(last_event("1a"),     Error(SyntaxError(TrailingCharacters, 1, 2)));
+    assert_eq!(last_event("[]a"),    Error(SyntaxError(TrailingCharacters, 1, 3)));
+    assert_eq!(last_event("{}a"),    Error(SyntaxError(TrailingCharacters, 1, 3)));
+}
+#[test]
+fn test_read_identifiers_streaming() {
+    assert_eq!(Parser::new("null".chars()).next(), Some(NullValue));
+    assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true)));
+    assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false)));
+
+    assert_eq!(last_event("n"),    Error(SyntaxError(InvalidSyntax, 1, 2)));
+    assert_eq!(last_event("nul"),  Error(SyntaxError(InvalidSyntax, 1, 4)));
+    assert_eq!(last_event("t"),    Error(SyntaxError(InvalidSyntax, 1, 2)));
+    assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4)));
+    assert_eq!(last_event("f"),    Error(SyntaxError(InvalidSyntax, 1, 2)));
+    assert_eq!(last_event("faz"),  Error(SyntaxError(InvalidSyntax, 1, 3)));
+}
+
+#[test]
+fn test_to_json() {
+    use std::collections::{HashMap,BTreeMap};
+    use json::ToJson;
+
+    let array2 = Array(vec![U64(1), U64(2)]);
+    let array3 = Array(vec![U64(1), U64(2), U64(3)]);
+    let object = {
+        let mut tree_map = BTreeMap::new();
+        tree_map.insert("a".to_string(), U64(1));
+        tree_map.insert("b".to_string(), U64(2));
+        Object(tree_map)
+    };
+
+    assert_eq!(array2.to_json(), array2);
+    assert_eq!(object.to_json(), object);
+    assert_eq!(3_isize.to_json(), I64(3));
+    assert_eq!(4_i8.to_json(), I64(4));
+    assert_eq!(5_i16.to_json(), I64(5));
+    assert_eq!(6_i32.to_json(), I64(6));
+    assert_eq!(7_i64.to_json(), I64(7));
+    assert_eq!(8_usize.to_json(), U64(8));
+    assert_eq!(9_u8.to_json(), U64(9));
+    assert_eq!(10_u16.to_json(), U64(10));
+    assert_eq!(11_u32.to_json(), U64(11));
+    assert_eq!(12_u64.to_json(), U64(12));
+    assert_eq!(13.0_f32.to_json(), F64(13.0_f64));
+    assert_eq!(14.0_f64.to_json(), F64(14.0_f64));
+    assert_eq!(().to_json(), Null);
+    assert_eq!(f32::INFINITY.to_json(), Null);
+    assert_eq!(f64::NAN.to_json(), Null);
+    assert_eq!(true.to_json(), Boolean(true));
+    assert_eq!(false.to_json(), Boolean(false));
+    assert_eq!("abc".to_json(), String("abc".to_string()));
+    assert_eq!("abc".to_string().to_json(), String("abc".to_string()));
+    assert_eq!((1_usize, 2_usize).to_json(), array2);
+    assert_eq!((1_usize, 2_usize, 3_usize).to_json(), array3);
+    assert_eq!([1_usize, 2_usize].to_json(), array2);
+    assert_eq!((&[1_usize, 2_usize, 3_usize]).to_json(), array3);
+    assert_eq!((vec![1_usize, 2_usize]).to_json(), array2);
+    assert_eq!(vec![1_usize, 2_usize, 3_usize].to_json(), array3);
+    let mut tree_map = BTreeMap::new();
+    tree_map.insert("a".to_string(), 1 as usize);
+    tree_map.insert("b".to_string(), 2);
+    assert_eq!(tree_map.to_json(), object);
+    let mut hash_map = HashMap::new();
+    hash_map.insert("a".to_string(), 1 as usize);
+    hash_map.insert("b".to_string(), 2);
+    assert_eq!(hash_map.to_json(), object);
+    assert_eq!(Some(15).to_json(), I64(15));
+    assert_eq!(Some(15 as usize).to_json(), U64(15));
+    assert_eq!(None::<isize>.to_json(), Null);
+}
+
+#[test]
+fn test_encode_hashmap_with_arbitrary_key() {
+    use std::collections::HashMap;
+    #[derive(PartialEq, Eq, Hash, RustcEncodable)]
+    struct ArbitraryType(usize);
+    let mut hm: HashMap<ArbitraryType, bool> = HashMap::new();
+    hm.insert(ArbitraryType(1), true);
+    let mut mem_buf = string::String::new();
+    let mut encoder = Encoder::new(&mut mem_buf);
+    let result = hm.encode(&mut encoder);
+    match result.unwrap_err() {
+        EncoderError::BadHashmapKey => (),
+        _ => panic!("expected bad hash map key")
+    }
+}
diff --git a/src/libserialize/tests/opaque.rs b/src/libserialize/tests/opaque.rs
new file mode 100644
index 0000000..fff6fc6
--- /dev/null
+++ b/src/libserialize/tests/opaque.rs
@@ -0,0 +1,282 @@
+extern crate serialize as rustc_serialize;
+
+use rustc_serialize::{Encodable, Decodable};
+use rustc_serialize::opaque::{Encoder, Decoder};
+use std::fmt::Debug;
+
+#[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
+struct Struct {
+    a: (),
+    b: u8,
+    c: u16,
+    d: u32,
+    e: u64,
+    f: usize,
+
+    g: i8,
+    h: i16,
+    i: i32,
+    j: i64,
+    k: isize,
+
+    l: char,
+    m: String,
+    n: f32,
+    o: f64,
+    p: bool,
+    q: Option<u32>,
+}
+
+
+fn check_round_trip<T: Encodable + Decodable + PartialEq + Debug>(values: Vec<T>) {
+    let mut encoder = Encoder::new(Vec::new());
+
+    for value in &values {
+        Encodable::encode(&value, &mut encoder).unwrap();
+    }
+
+    let data = encoder.into_inner();
+    let mut decoder = Decoder::new(&data[..], 0);
+
+    for value in values {
+        let decoded = Decodable::decode(&mut decoder).unwrap();
+        assert_eq!(value, decoded);
+    }
+}
+
+#[test]
+fn test_unit() {
+    check_round_trip(vec![(), (), (), ()]);
+}
+
+#[test]
+fn test_u8() {
+    let mut vec = vec![];
+    for i in ::std::u8::MIN..::std::u8::MAX {
+        vec.push(i);
+    }
+    check_round_trip(vec);
+}
+
+#[test]
+fn test_u16() {
+    for i in ::std::u16::MIN..::std::u16::MAX {
+        check_round_trip(vec![1, 2, 3, i, i, i]);
+    }
+}
+
+#[test]
+fn test_u32() {
+    check_round_trip(vec![1, 2, 3, ::std::u32::MIN, 0, 1, ::std::u32::MAX, 2, 1]);
+}
+
+#[test]
+fn test_u64() {
+    check_round_trip(vec![1, 2, 3, ::std::u64::MIN, 0, 1, ::std::u64::MAX, 2, 1]);
+}
+
+#[test]
+fn test_usize() {
+    check_round_trip(vec![1, 2, 3, ::std::usize::MIN, 0, 1, ::std::usize::MAX, 2, 1]);
+}
+
+#[test]
+fn test_i8() {
+    let mut vec = vec![];
+    for i in ::std::i8::MIN..::std::i8::MAX {
+        vec.push(i);
+    }
+    check_round_trip(vec);
+}
+
+#[test]
+fn test_i16() {
+    for i in ::std::i16::MIN..::std::i16::MAX {
+        check_round_trip(vec![-1, 2, -3, i, i, i, 2]);
+    }
+}
+
+#[test]
+fn test_i32() {
+    check_round_trip(vec![-1, 2, -3, ::std::i32::MIN, 0, 1, ::std::i32::MAX, 2, 1]);
+}
+
+#[test]
+fn test_i64() {
+    check_round_trip(vec![-1, 2, -3, ::std::i64::MIN, 0, 1, ::std::i64::MAX, 2, 1]);
+}
+
+#[test]
+fn test_isize() {
+    check_round_trip(vec![-1, 2, -3, ::std::isize::MIN, 0, 1, ::std::isize::MAX, 2, 1]);
+}
+
+#[test]
+fn test_bool() {
+    check_round_trip(vec![false, true, true, false, false]);
+}
+
+#[test]
+fn test_f32() {
+    let mut vec = vec![];
+    for i in -100..100 {
+        vec.push((i as f32) / 3.0);
+    }
+    check_round_trip(vec);
+}
+
+#[test]
+fn test_f64() {
+    let mut vec = vec![];
+    for i in -100..100 {
+        vec.push((i as f64) / 3.0);
+    }
+    check_round_trip(vec);
+}
+
+#[test]
+fn test_char() {
+    let vec = vec!['a', 'b', 'c', 'd', 'A', 'X', ' ', '#', 'Ö', 'Ä', 'µ', '€'];
+    check_round_trip(vec);
+}
+
+#[test]
+fn test_string() {
+    let vec = vec!["abcbuÖeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+                   "abcbuÖganeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+                   "abcbuÖganeiovÄnameÜavmpßvmea€µsbpapmaebn".to_string(),
+                   "abcbuÖganeiovÄnameÜavmpßvmeabpnvapeapmaebn".to_string(),
+                   "abcbuÖganeiÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+                   "abcbuÖganeiovÄnameÜavmpßvmea€µsbpmaebn".to_string(),
+                   "abcbuÖganeiovÄnameÜavmpßvmea€µnvapeapmaebn".to_string()];
+
+    check_round_trip(vec);
+}
+
+#[test]
+fn test_option() {
+    check_round_trip(vec![Some(-1i8)]);
+    check_round_trip(vec![Some(-2i16)]);
+    check_round_trip(vec![Some(-3i32)]);
+    check_round_trip(vec![Some(-4i64)]);
+    check_round_trip(vec![Some(-5isize)]);
+
+    let none_i8: Option<i8> = None;
+    check_round_trip(vec![none_i8]);
+
+    let none_i16: Option<i16> = None;
+    check_round_trip(vec![none_i16]);
+
+    let none_i32: Option<i32> = None;
+    check_round_trip(vec![none_i32]);
+
+    let none_i64: Option<i64> = None;
+    check_round_trip(vec![none_i64]);
+
+    let none_isize: Option<isize> = None;
+    check_round_trip(vec![none_isize]);
+}
+
+#[test]
+fn test_struct() {
+    check_round_trip(vec![Struct {
+                              a: (),
+                              b: 10,
+                              c: 11,
+                              d: 12,
+                              e: 13,
+                              f: 14,
+
+                              g: 15,
+                              h: 16,
+                              i: 17,
+                              j: 18,
+                              k: 19,
+
+                              l: 'x',
+                              m: "abc".to_string(),
+                              n: 20.5,
+                              o: 21.5,
+                              p: false,
+                              q: None,
+                          }]);
+
+    check_round_trip(vec![Struct {
+                              a: (),
+                              b: 101,
+                              c: 111,
+                              d: 121,
+                              e: 131,
+                              f: 141,
+
+                              g: -15,
+                              h: -16,
+                              i: -17,
+                              j: -18,
+                              k: -19,
+
+                              l: 'y',
+                              m: "def".to_string(),
+                              n: -20.5,
+                              o: -21.5,
+                              p: true,
+                              q: Some(1234567),
+                          }]);
+}
+
+#[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
+enum Enum {
+    Variant1,
+    Variant2(usize, f32),
+    Variant3 {
+        a: i32,
+        b: char,
+        c: bool,
+    },
+}
+
+#[test]
+fn test_enum() {
+    check_round_trip(vec![Enum::Variant1,
+                          Enum::Variant2(1, 2.5),
+                          Enum::Variant3 {
+                              a: 3,
+                              b: 'b',
+                              c: false,
+                          },
+                          Enum::Variant3 {
+                              a: -4,
+                              b: 'f',
+                              c: true,
+                          }]);
+}
+
+#[test]
+fn test_sequence() {
+    let mut vec = vec![];
+    for i in -100i64..100i64 {
+        vec.push(i * 100000);
+    }
+
+    check_round_trip(vec![vec]);
+}
+
+#[test]
+fn test_hash_map() {
+    use std::collections::HashMap;
+    let mut map = HashMap::new();
+    for i in -100i64..100i64 {
+        map.insert(i * 100000, i * 10000);
+    }
+
+    check_round_trip(vec![map]);
+}
+
+#[test]
+fn test_tuples() {
+    check_round_trip(vec![('x', (), false, 0.5f32)]);
+    check_round_trip(vec![(9i8, 10u16, 1.5f64)]);
+    check_round_trip(vec![(-12i16, 11u8, 12usize)]);
+    check_round_trip(vec![(1234567isize, 100000000000000u64, 99999999999999i64)]);
+    check_round_trip(vec![(String::new(), "some string".to_string())]);
+}
diff --git a/src/libstd/net/ip.rs b/src/libstd/net/ip.rs
index f45cd8b..4e06467 100644
--- a/src/libstd/net/ip.rs
+++ b/src/libstd/net/ip.rs
@@ -392,8 +392,7 @@
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn octets(&self) -> [u8; 4] {
-        let bits = u32::from_be(self.inner.s_addr);
-        [(bits >> 24) as u8, (bits >> 16) as u8, (bits >> 8) as u8, bits as u8]
+        self.inner.s_addr.to_ne_bytes()
     }
 
     /// Returns [`true`] for the special 'unspecified' address (0.0.0.0).
diff --git a/src/libstd/sys/sgx/ext/arch.rs b/src/libstd/sys/sgx/ext/arch.rs
index 3bd87b5..97f7d91 100644
--- a/src/libstd/sys/sgx/ext/arch.rs
+++ b/src/libstd/sys/sgx/ext/arch.rs
@@ -41,7 +41,7 @@
         );
 
         match error {
-            0 => Ok(out.into_inner()),
+            0 => Ok(out.into_initialized()),
             err => Err(err),
         }
     }
@@ -69,6 +69,6 @@
               "{rdx}"(report.as_mut_ptr())
         );
 
-        report.into_inner()
+        report.into_initialized()
     }
 }
diff --git a/src/libstd/sys/unix/stdio.rs b/src/libstd/sys/unix/stdio.rs
index 8a6b7b5f..715f2ea 100644
--- a/src/libstd/sys/unix/stdio.rs
+++ b/src/libstd/sys/unix/stdio.rs
@@ -12,7 +12,7 @@
     pub fn read(&self, data: &mut [u8]) -> io::Result<usize> {
         let fd = FileDesc::new(libc::STDIN_FILENO);
         let ret = fd.read(data);
-        fd.into_raw();
+        fd.into_raw(); // do not close this FD
         ret
     }
 }
@@ -23,7 +23,7 @@
     pub fn write(&self, data: &[u8]) -> io::Result<usize> {
         let fd = FileDesc::new(libc::STDOUT_FILENO);
         let ret = fd.write(data);
-        fd.into_raw();
+        fd.into_raw(); // do not close this FD
         ret
     }
 
@@ -38,7 +38,7 @@
     pub fn write(&self, data: &[u8]) -> io::Result<usize> {
         let fd = FileDesc::new(libc::STDERR_FILENO);
         let ret = fd.write(data);
-        fd.into_raw();
+        fd.into_raw(); // do not close this FD
         ret
     }
 
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 2e3233c..d3fc1c0 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -33,6 +33,15 @@
     }
 }
 
+#[derive(Clone, Debug)]
+pub struct UnmatchedBrace {
+    pub expected_delim: token::DelimToken,
+    pub found_delim: token::DelimToken,
+    pub found_span: Span,
+    pub unclosed_span: Option<Span>,
+    pub candidate_span: Option<Span>,
+}
+
 pub struct StringReader<'a> {
     pub sess: &'a ParseSess,
     /// The absolute offset within the source_map of the next character to read
@@ -58,6 +67,7 @@
     span_src_raw: Span,
     /// Stack of open delimiters and their spans. Used for error message.
     open_braces: Vec<(token::DelimToken, Span)>,
+    crate unmatched_braces: Vec<UnmatchedBrace>,
     /// The type and spans for all braces
     ///
     /// Used only for error recovery when arriving to EOF with mismatched braces.
@@ -222,6 +232,7 @@
             span: syntax_pos::DUMMY_SP,
             span_src_raw: syntax_pos::DUMMY_SP,
             open_braces: Vec::new(),
+            unmatched_braces: Vec::new(),
             matching_delim_spans: Vec::new(),
             override_span,
             last_unclosed_found_span: None,
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index 7699d9e..0db36c8 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -1,5 +1,5 @@
 use crate::print::pprust::token_to_string;
-use crate::parse::lexer::StringReader;
+use crate::parse::lexer::{StringReader, UnmatchedBrace};
 use crate::parse::{token, PResult};
 use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
 
@@ -101,38 +101,38 @@
                     }
                     // Incorrect delimiter.
                     token::CloseDelim(other) => {
-                        let token_str = token_to_string(&self.token);
+                        let mut unclosed_delimiter = None;
+                        let mut candidate = None;
                         if self.last_unclosed_found_span != Some(self.span) {
                             // do not complain about the same unclosed delimiter multiple times
                             self.last_unclosed_found_span = Some(self.span);
-                            let msg = format!("incorrect close delimiter: `{}`", token_str);
-                            let mut err = self.sess.span_diagnostic.struct_span_err(
-                                self.span,
-                                &msg,
-                            );
-                            err.span_label(self.span, "incorrect close delimiter");
                             // This is a conservative error: only report the last unclosed
                             // delimiter. The previous unclosed delimiters could actually be
                             // closed! The parser just hasn't gotten to them yet.
                             if let Some(&(_, sp)) = self.open_braces.last() {
-                                err.span_label(sp, "un-closed delimiter");
+                                unclosed_delimiter = Some(sp);
                             };
                             if let Some(current_padding) = sm.span_to_margin(self.span) {
                                 for (brace, brace_span) in &self.open_braces {
                                     if let Some(padding) = sm.span_to_margin(*brace_span) {
                                         // high likelihood of these two corresponding
                                         if current_padding == padding && brace == &other {
-                                            err.span_label(
-                                                *brace_span,
-                                                "close delimiter possibly meant for this",
-                                            );
+                                            candidate = Some(*brace_span);
                                         }
                                     }
                                 }
                             }
-                            err.emit();
+                            let (tok, _) = self.open_braces.pop().unwrap();
+                            self.unmatched_braces.push(UnmatchedBrace {
+                                expected_delim: tok,
+                                found_delim: other,
+                                found_span: self.span,
+                                unclosed_span: unclosed_delimiter,
+                                candidate_span: candidate,
+                            });
+                        } else {
+                            self.open_braces.pop();
                         }
-                        self.open_braces.pop().unwrap();
 
                         // If the incorrect delimiter matches an earlier opening
                         // delimiter, then don't consume it (it can be used to
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index c723d59..317d693 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -9,6 +9,7 @@
 use crate::symbol::Symbol;
 use crate::tokenstream::{TokenStream, TokenTree};
 use crate::diagnostics::plugin::ErrorMap;
+use crate::print::pprust::token_to_string;
 
 use rustc_data_structures::sync::{Lrc, Lock};
 use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
@@ -136,15 +137,17 @@
     new_parser_from_source_str(sess, name, source).parse_inner_attributes()
 }
 
-pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess,
-                                    override_span: Option<Span>)
-                                    -> TokenStream {
+pub fn parse_stream_from_source_str(
+    name: FileName,
+    source: String,
+    sess: &ParseSess,
+    override_span: Option<Span>,
+) -> (TokenStream, Vec<lexer::UnmatchedBrace>) {
     source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
 }
 
 /// Create a new parser from a source string
-pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
-                                      -> Parser<'_> {
+pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
     panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
 }
 
@@ -195,12 +198,14 @@
 
 /// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
 /// initial token stream.
-fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
-    -> Result<Parser<'_>, Vec<Diagnostic>>
-{
+fn maybe_source_file_to_parser(
+    sess: &ParseSess,
+    source_file: Lrc<SourceFile>,
+) -> Result<Parser<'_>, Vec<Diagnostic>> {
     let end_pos = source_file.end_pos;
-    let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?);
-
+    let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
+    let mut parser = stream_to_parser(sess, stream);
+    parser.unclosed_delims = unclosed_delims;
     if parser.token == token::Eof && parser.span.is_dummy() {
         parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
     }
@@ -247,25 +252,44 @@
 }
 
 /// Given a source_file, produce a sequence of token-trees
-pub fn source_file_to_stream(sess: &ParseSess,
-                             source_file: Lrc<SourceFile>,
-                             override_span: Option<Span>) -> TokenStream {
+pub fn source_file_to_stream(
+    sess: &ParseSess,
+    source_file: Lrc<SourceFile>,
+    override_span: Option<Span>,
+) -> (TokenStream, Vec<lexer::UnmatchedBrace>) {
     panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
 }
 
 /// Given a source file, produce a sequence of token-trees. Returns any buffered errors from
 /// parsing the token tream.
-pub fn maybe_file_to_stream(sess: &ParseSess,
-                            source_file: Lrc<SourceFile>,
-                            override_span: Option<Span>) -> Result<TokenStream, Vec<Diagnostic>> {
+pub fn maybe_file_to_stream(
+    sess: &ParseSess,
+    source_file: Lrc<SourceFile>,
+    override_span: Option<Span>,
+) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
     let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
     srdr.real_token();
 
     match srdr.parse_all_token_trees() {
-        Ok(stream) => Ok(stream),
+        Ok(stream) => Ok((stream, srdr.unmatched_braces)),
         Err(err) => {
             let mut buffer = Vec::with_capacity(1);
             err.buffer(&mut buffer);
+            // Not using `emit_unclosed_delims` to use `db.buffer`
+            for unmatched in srdr.unmatched_braces {
+                let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
+                    "incorrect close delimiter: `{}`",
+                    token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
+                ));
+                db.span_label(unmatched.found_span, "incorrect close delimiter");
+                if let Some(sp) = unmatched.candidate_span {
+                    db.span_label(sp, "close delimiter possibly meant for this");
+                }
+                if let Some(sp) = unmatched.unclosed_span {
+                    db.span_label(sp, "un-closed delimiter");
+                }
+                db.buffer(&mut buffer);
+            }
             Err(buffer)
         }
     }
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index d711458..69d6407 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -35,7 +35,7 @@
 use crate::source_map::{self, SourceMap, Spanned, respan};
 use crate::errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
 use crate::parse::{self, SeqSep, classify, token};
-use crate::parse::lexer::TokenAndSpan;
+use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
 use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use crate::parse::token::DelimToken;
 use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
@@ -251,6 +251,11 @@
     ///
     /// See the comments in the `parse_path_segment` function for more details.
     crate unmatched_angle_bracket_count: u32,
+    crate max_angle_bracket_count: u32,
+    /// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery
+    /// it gets removed from here. Every entry left at the end gets emitted as an independent
+    /// error.
+    crate unclosed_delims: Vec<UnmatchedBrace>,
 }
 
 
@@ -575,6 +580,8 @@
             desugar_doc_comments,
             cfg_mods: true,
             unmatched_angle_bracket_count: 0,
+            max_angle_bracket_count: 0,
+            unclosed_delims: Vec::new(),
         };
 
         let tok = parser.next_tok();
@@ -644,11 +651,11 @@
 
     /// Expect and consume the token t. Signal an error if
     /// the next token is not t.
-    pub fn expect(&mut self, t: &token::Token) -> PResult<'a,  ()> {
+    pub fn expect(&mut self, t: &token::Token) -> PResult<'a,  bool /* recovered */> {
         if self.expected_tokens.is_empty() {
             if self.token == *t {
                 self.bump();
-                Ok(())
+                Ok(false)
             } else {
                 let token_str = pprust::token_to_string(t);
                 let this_token_str = self.this_token_descr();
@@ -663,6 +670,12 @@
                     self.sess.source_map().next_point(self.prev_span)
                 };
                 let label_exp = format!("expected `{}`", token_str);
+                match self.recover_closing_delimiter(&[t.clone()], err) {
+                    Err(e) => err = e,
+                    Ok(recovered) => {
+                        return Ok(recovered);
+                    }
+                }
                 let cm = self.sess.source_map();
                 match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
                     (Ok(ref a), Ok(ref b)) if a.line == b.line => {
@@ -682,12 +695,64 @@
         }
     }
 
+    fn recover_closing_delimiter(
+        &mut self,
+        tokens: &[token::Token],
+        mut err: DiagnosticBuilder<'a>,
+    ) -> PResult<'a, bool> {
+        let mut pos = None;
+        // we want to use the last closing delim that would apply
+        for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
+            if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
+                && Some(self.span) > unmatched.unclosed_span
+            {
+                pos = Some(i);
+            }
+        }
+        match pos {
+            Some(pos) => {
+                // Recover and assume that the detected unclosed delimiter was meant for
+                // this location. Emit the diagnostic and act as if the delimiter was
+                // present for the parser's sake.
+
+                 // Don't attempt to recover from this unclosed delimiter more than once.
+                let unmatched = self.unclosed_delims.remove(pos);
+                let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
+
+                 // We want to suggest the inclusion of the closing delimiter where it makes
+                // the most sense, which is immediately after the last token:
+                //
+                //  {foo(bar {}}
+                //      -      ^
+                //      |      |
+                //      |      help: `)` may belong here (FIXME: #58270)
+                //      |
+                //      unclosed delimiter
+                if let Some(sp) = unmatched.unclosed_span {
+                    err.span_label(sp, "unclosed delimiter");
+                }
+                err.span_suggestion_short(
+                    self.sess.source_map().next_point(self.prev_span),
+                    &format!("{} may belong here", delim.to_string()),
+                    delim.to_string(),
+                    Applicability::MaybeIncorrect,
+                );
+                err.emit();
+                self.expected_tokens.clear();  // reduce errors
+                Ok(true)
+            }
+            _ => Err(err),
+        }
+    }
+
     /// Expect next token to be edible or inedible token.  If edible,
     /// then consume it; if inedible, then return without consuming
     /// anything.  Signal a fatal error if next token is unexpected.
-    pub fn expect_one_of(&mut self,
-                         edible: &[token::Token],
-                         inedible: &[token::Token]) -> PResult<'a,  ()>{
+    pub fn expect_one_of(
+        &mut self,
+        edible: &[token::Token],
+        inedible: &[token::Token],
+    ) -> PResult<'a, bool /* recovered */> {
         fn tokens_to_string(tokens: &[TokenType]) -> String {
             let mut i = tokens.iter();
             // This might be a sign we need a connect method on Iterator.
@@ -707,10 +772,10 @@
         }
         if edible.contains(&self.token) {
             self.bump();
-            Ok(())
+            Ok(false)
         } else if inedible.contains(&self.token) {
             // leave it in the input
-            Ok(())
+            Ok(false)
         } else {
             let mut expected = edible.iter()
                 .map(|x| TokenType::Token(x.clone()))
@@ -761,6 +826,15 @@
             } else {
                 label_sp
             };
+            match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt {
+                TokenType::Token(t) => Some(t.clone()),
+                _ => None,
+            }).collect::<Vec<_>>(), err) {
+                Err(e) => err = e,
+                Ok(recovered) => {
+                    return Ok(recovered);
+                }
+            }
 
             let cm = self.sess.source_map();
             match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
@@ -1070,6 +1144,7 @@
         if ate {
             // See doc comment for `unmatched_angle_bracket_count`.
             self.unmatched_angle_bracket_count += 1;
+            self.max_angle_bracket_count += 1;
             debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
         }
 
@@ -1110,12 +1185,12 @@
         };
 
         match ate {
-            Some(x) => {
+            Some(_) => {
                 // See doc comment for `unmatched_angle_bracket_count`.
                 self.unmatched_angle_bracket_count -= 1;
                 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
 
-                Ok(x)
+                Ok(())
             },
             None => self.unexpected(),
         }
@@ -1144,19 +1219,22 @@
                                   -> PResult<'a, Vec<T>> where
         F: FnMut(&mut Parser<'a>) -> PResult<'a,  T>,
     {
-        let val = self.parse_seq_to_before_end(ket, sep, f)?;
-        self.bump();
+        let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
+        if !recovered {
+            self.bump();
+        }
         Ok(val)
     }
 
     /// Parse a sequence, not including the closing delimiter. The function
     /// f must consume tokens until reaching the next separator or
     /// closing bracket.
-    pub fn parse_seq_to_before_end<T, F>(&mut self,
-                                         ket: &token::Token,
-                                         sep: SeqSep,
-                                         f: F)
-                                         -> PResult<'a, Vec<T>>
+    pub fn parse_seq_to_before_end<T, F>(
+        &mut self,
+        ket: &token::Token,
+        sep: SeqSep,
+        f: F,
+    ) -> PResult<'a, (Vec<T>, bool)>
         where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
     {
         self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
@@ -1168,10 +1246,11 @@
         sep: SeqSep,
         expect: TokenExpectType,
         mut f: F,
-    ) -> PResult<'a, Vec<T>>
+    ) -> PResult<'a, (Vec<T>, bool /* recovered */)>
         where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
     {
-        let mut first: bool = true;
+        let mut first = true;
+        let mut recovered = false;
         let mut v = vec![];
         while !kets.iter().any(|k| {
                 match expect {
@@ -1187,23 +1266,30 @@
                 if first {
                     first = false;
                 } else {
-                    if let Err(mut e) = self.expect(t) {
-                        // Attempt to keep parsing if it was a similar separator
-                        if let Some(ref tokens) = t.similar_tokens() {
-                            if tokens.contains(&self.token) {
-                                self.bump();
-                            }
+                    match self.expect(t) {
+                        Ok(false) => {}
+                        Ok(true) => {
+                            recovered = true;
+                            break;
                         }
-                        e.emit();
-                        // Attempt to keep parsing if it was an omitted separator
-                        match f(self) {
-                            Ok(t) => {
-                                v.push(t);
-                                continue;
-                            },
-                            Err(mut e) => {
-                                e.cancel();
-                                break;
+                        Err(mut e) => {
+                            // Attempt to keep parsing if it was a similar separator
+                            if let Some(ref tokens) = t.similar_tokens() {
+                                if tokens.contains(&self.token) {
+                                    self.bump();
+                                }
+                            }
+                            e.emit();
+                            // Attempt to keep parsing if it was an omitted separator
+                            match f(self) {
+                                Ok(t) => {
+                                    v.push(t);
+                                    continue;
+                                },
+                                Err(mut e) => {
+                                    e.cancel();
+                                    break;
+                                }
                             }
                         }
                     }
@@ -1222,23 +1308,26 @@
             v.push(t);
         }
 
-        Ok(v)
+        Ok((v, recovered))
     }
 
     /// Parse a sequence, including the closing delimiter. The function
     /// f must consume tokens until reaching the next separator or
     /// closing bracket.
-    fn parse_unspanned_seq<T, F>(&mut self,
-                                     bra: &token::Token,
-                                     ket: &token::Token,
-                                     sep: SeqSep,
-                                     f: F)
-                                     -> PResult<'a, Vec<T>> where
+    fn parse_unspanned_seq<T, F>(
+        &mut self,
+        bra: &token::Token,
+        ket: &token::Token,
+        sep: SeqSep,
+        f: F,
+    ) -> PResult<'a, Vec<T>> where
         F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     {
         self.expect(bra)?;
-        let result = self.parse_seq_to_before_end(ket, sep, f)?;
-        self.eat(ket);
+        let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
+        if !recovered {
+            self.eat(ket);
+        }
         Ok(result)
     }
 
@@ -2290,7 +2379,10 @@
             // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
             // it isn't, then we reset the unmatched angle bracket count as we're about to start
             // parsing a new path.
-            if style == PathStyle::Expr { self.unmatched_angle_bracket_count = 0; }
+            if style == PathStyle::Expr {
+                self.unmatched_angle_bracket_count = 0;
+                self.max_angle_bracket_count = 0;
+            }
 
             let args = if self.eat_lt() {
                 // `<'a, T, A = U>`
@@ -2302,12 +2394,14 @@
             } else {
                 // `(T, U) -> R`
                 self.bump(); // `(`
-                let inputs = self.parse_seq_to_before_tokens(
+                let (inputs, recovered) = self.parse_seq_to_before_tokens(
                     &[&token::CloseDelim(token::Paren)],
                     SeqSep::trailing_allowed(token::Comma),
                     TokenExpectType::Expect,
                     |p| p.parse_ty())?;
-                self.bump(); // `)`
+                if !recovered {
+                    self.bump(); // `)`
+                }
                 let span = lo.to(self.prev_span);
                 let output = if self.eat(&token::RArrow) {
                     Some(self.parse_ty_common(false, false)?)
@@ -2513,9 +2607,13 @@
                 // (e,) is a tuple with only one field, e
                 let mut es = vec![];
                 let mut trailing_comma = false;
+                let mut recovered = false;
                 while self.token != token::CloseDelim(token::Paren) {
                     es.push(self.parse_expr()?);
-                    self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?;
+                    recovered = self.expect_one_of(
+                        &[],
+                        &[token::Comma, token::CloseDelim(token::Paren)],
+                    )?;
                     if self.eat(&token::Comma) {
                         trailing_comma = true;
                     } else {
@@ -2523,7 +2621,9 @@
                         break;
                     }
                 }
-                self.bump();
+                if !recovered {
+                    self.bump();
+                }
 
                 hi = self.prev_span;
                 ex = if es.len() == 1 && !trailing_comma {
@@ -2720,6 +2820,21 @@
                     hi = pth.span;
                     ex = ExprKind::Path(None, pth);
                 } else {
+                    if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
+                        // Don't complain about bare semicolons after unclosed braces
+                        // recovery in order to keep the error count down. Fixing the
+                        // delimiters will possibly also fix the bare semicolon found in
+                        // expression context. For example, silence the following error:
+                        // ```
+                        // error: expected expression, found `;`
+                        //  --> file.rs:2:13
+                        //   |
+                        // 2 |     foo(bar(;
+                        //   |             ^ expected expression
+                        // ```
+                        self.bump();
+                        return Ok(self.mk_expr(self.span, ExprKind::Err, ThinVec::new()));
+                    }
                     match self.parse_literal_maybe_minus() {
                         Ok(expr) => {
                             hi = expr.span;
@@ -2819,7 +2934,7 @@
 
             match self.expect_one_of(&[token::Comma],
                                      &[token::CloseDelim(token::Brace)]) {
-                Ok(()) => if let Some(f) = parsed_field.or(recovery_field) {
+                Ok(_) => if let Some(f) = parsed_field.or(recovery_field) {
                     // only include the field if there's no parse error for the field name
                     fields.push(f);
                 }
@@ -5939,7 +6054,7 @@
 
         let sp = self.span;
         let mut variadic = false;
-        let args: Vec<Option<Arg>> =
+        let (args, recovered): (Vec<Option<Arg>>, bool) =
             self.parse_seq_to_before_end(
                 &token::CloseDelim(token::Paren),
                 SeqSep::trailing_allowed(token::Comma),
@@ -5987,7 +6102,9 @@
                 }
             )?;
 
-        self.eat(&token::CloseDelim(token::Paren));
+        if !recovered {
+            self.eat(&token::CloseDelim(token::Paren));
+        }
 
         let args: Vec<_> = args.into_iter().filter_map(|x| x).collect();
 
@@ -6132,15 +6249,15 @@
 
         // Parse the rest of the function parameter list.
         let sep = SeqSep::trailing_allowed(token::Comma);
-        let fn_inputs = if let Some(self_arg) = self_arg {
+        let (fn_inputs, recovered) = if let Some(self_arg) = self_arg {
             if self.check(&token::CloseDelim(token::Paren)) {
-                vec![self_arg]
+                (vec![self_arg], false)
             } else if self.eat(&token::Comma) {
                 let mut fn_inputs = vec![self_arg];
-                fn_inputs.append(&mut self.parse_seq_to_before_end(
-                    &token::CloseDelim(token::Paren), sep, parse_arg_fn)?
-                );
-                fn_inputs
+                let (mut input, recovered) = self.parse_seq_to_before_end(
+                    &token::CloseDelim(token::Paren), sep, parse_arg_fn)?;
+                fn_inputs.append(&mut input);
+                (fn_inputs, recovered)
             } else {
                 return self.unexpected();
             }
@@ -6148,8 +6265,10 @@
             self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)?
         };
 
-        // Parse closing paren and return type.
-        self.expect(&token::CloseDelim(token::Paren))?;
+        if !recovered {
+            // Parse closing paren and return type.
+            self.expect(&token::CloseDelim(token::Paren))?;
+        }
         Ok(P(FnDecl {
             inputs: fn_inputs,
             output: self.parse_ret_ty(true)?,
@@ -6169,7 +6288,7 @@
                     SeqSep::trailing_allowed(token::Comma),
                     TokenExpectType::NoExpect,
                     |p| p.parse_fn_block_arg()
-                )?;
+                )?.0;
                 self.expect_or()?;
                 args
             }
@@ -8168,7 +8287,7 @@
             // eat a matched-delimiter token tree:
             let (delim, tts) = self.expect_delimited_token_tree()?;
             if delim != MacDelimiter::Brace {
-                self.expect(&token::Semi)?
+                self.expect(&token::Semi)?;
             }
 
             Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim })))
@@ -8313,11 +8432,14 @@
     /// entry point for the parser.
     pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
         let lo = self.span;
-        Ok(ast::Crate {
+        let krate = Ok(ast::Crate {
             attrs: self.parse_inner_attributes()?,
             module: self.parse_mod_items(&token::Eof, lo)?,
             span: lo.to(self.span),
-        })
+        });
+        emit_unclosed_delims(&self.unclosed_delims, self.diagnostic());
+        self.unclosed_delims.clear();
+        krate
     }
 
     pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
@@ -8346,3 +8468,20 @@
         }
     }
 }
+
+pub fn emit_unclosed_delims(unclosed_delims: &[UnmatchedBrace], handler: &errors::Handler) {
+    for unmatched in unclosed_delims {
+        let mut err = handler.struct_span_err(unmatched.found_span, &format!(
+            "incorrect close delimiter: `{}`",
+            pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
+        ));
+        err.span_label(unmatched.found_span, "incorrect close delimiter");
+        if let Some(sp) = unmatched.candidate_span {
+            err.span_label(sp, "close delimiter possibly meant for this");
+        }
+        if let Some(sp) = unmatched.unclosed_span {
+            err.span_label(sp, "un-closed delimiter");
+        }
+        err.emit();
+    }
+}
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index d5856c6..09924e3 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -10,6 +10,7 @@
 use crate::ptr::P;
 use crate::symbol::keywords;
 use crate::syntax::parse::parse_stream_from_source_str;
+use crate::syntax::parse::parser::emit_unclosed_delims;
 use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
 
 use serialize::{Decodable, Decoder, Encodable, Encoder};
@@ -501,8 +502,8 @@
     /// Enables better error recovery when the wrong token is found.
     crate fn similar_tokens(&self) -> Option<Vec<Token>> {
         match *self {
-            Comma => Some(vec![Dot, Lt]),
-            Semi => Some(vec![Colon]),
+            Comma => Some(vec![Dot, Lt, Semi]),
+            Semi => Some(vec![Colon, Comma]),
             _ => None
         }
     }
@@ -559,7 +560,10 @@
             // FIXME(#43081): Avoid this pretty-print + reparse hack
             let source = pprust::token_to_string(self);
             let filename = FileName::macro_expansion_source_code(&source);
-            parse_stream_from_source_str(filename, source, sess, Some(span))
+            let (tokens, errors) = parse_stream_from_source_str(
+                filename, source, sess, Some(span));
+            emit_unclosed_delims(&errors, &sess.span_diagnostic);
+            tokens
         });
 
         // During early phases of the compiler the AST could get modified
@@ -800,12 +804,13 @@
         let source = pprust::attr_to_string(attr);
         let macro_filename = FileName::macro_expansion_source_code(&source);
         if attr.is_sugared_doc {
-            let stream = parse_stream_from_source_str(
+            let (stream, errors) = parse_stream_from_source_str(
                 macro_filename,
                 source,
                 sess,
                 Some(span),
             );
+            emit_unclosed_delims(&errors, &sess.span_diagnostic);
             builder.push(stream);
             continue
         }
@@ -822,12 +827,13 @@
         // ... and for more complicated paths, fall back to a reparse hack that
         // should eventually be removed.
         } else {
-            let stream = parse_stream_from_source_str(
+            let (stream, errors) = parse_stream_from_source_str(
                 macro_filename,
                 source,
                 sess,
                 Some(span),
             );
+            emit_unclosed_delims(&errors, &sess.span_diagnostic);
             brackets.push(stream);
         }
 
diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs
index dbe2b8d..bcf1da6 100644
--- a/src/libsyntax/util/parser_testing.rs
+++ b/src/libsyntax/util/parser_testing.rs
@@ -12,8 +12,11 @@
 /// Map a string to tts, using a made-up filename:
 pub fn string_to_stream(source_str: String) -> TokenStream {
     let ps = ParseSess::new(FilePathMapping::empty());
-    source_file_to_stream(&ps, ps.source_map()
-                             .new_source_file(PathBuf::from("bogofile").into(), source_str), None)
+    source_file_to_stream(
+        &ps,
+        ps.source_map().new_source_file(PathBuf::from("bogofile").into(),
+        source_str,
+    ), None).0
 }
 
 /// Map string to parser (via tts)
diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs
index 7302626..2158cfc 100644
--- a/src/libsyntax_ext/proc_macro_server.rs
+++ b/src/libsyntax_ext/proc_macro_server.rs
@@ -12,6 +12,7 @@
 use syntax::ext::base::ExtCtxt;
 use syntax::parse::lexer::comments;
 use syntax::parse::{self, token, ParseSess};
+use syntax::parse::parser::emit_unclosed_delims;
 use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
 use syntax_pos::hygiene::{SyntaxContext, Transparency};
 use syntax_pos::symbol::{keywords, Symbol};
@@ -409,12 +410,14 @@
         stream.is_empty()
     }
     fn from_str(&mut self, src: &str) -> Self::TokenStream {
-        parse::parse_stream_from_source_str(
+        let (tokens, errors) = parse::parse_stream_from_source_str(
             FileName::proc_macro_source_code(src.clone()),
             src.to_string(),
             self.sess,
             Some(self.call_site),
-        )
+        );
+        emit_unclosed_delims(&errors, &self.sess.span_diagnostic);
+        tokens
     }
     fn to_string(&mut self, stream: &Self::TokenStream) -> String {
         stream.to_string()
diff --git a/src/libterm/Cargo.toml b/src/libterm/Cargo.toml
index 8021e81..4eba9a9 100644
--- a/src/libterm/Cargo.toml
+++ b/src/libterm/Cargo.toml
@@ -2,6 +2,7 @@
 authors = ["The Rust Project Developers"]
 name = "term"
 version = "0.0.0"
+edition = "2018"
 
 [lib]
 name = "term"
diff --git a/src/libterm/lib.rs b/src/libterm/lib.rs
index 4d31262..caca9fa 100644
--- a/src/libterm/lib.rs
+++ b/src/libterm/lib.rs
@@ -35,20 +35,20 @@
        test(attr(deny(warnings))))]
 #![deny(missing_docs)]
 
+#![deny(rust_2018_idioms)]
+
 #![cfg_attr(windows, feature(libc))]
 // Handle rustfmt skips
 #![feature(custom_attribute)]
-#![feature(nll)]
 #![allow(unused_attributes)]
 
 use std::io::prelude::*;
+use std::io::{self, Stdout, Stderr};
 
 pub use terminfo::TerminfoTerminal;
 #[cfg(windows)]
 pub use win::WinConsole;
 
-use std::io::{self, Stdout, Stderr};
-
 pub mod terminfo;
 
 #[cfg(windows)]
diff --git a/src/libterm/terminfo/mod.rs b/src/libterm/terminfo/mod.rs
index eaa96df..4c3b0b1 100644
--- a/src/libterm/terminfo/mod.rs
+++ b/src/libterm/terminfo/mod.rs
@@ -5,18 +5,16 @@
 use std::error;
 use std::fmt;
 use std::fs::File;
-use std::io::prelude::*;
-use std::io;
-use std::io::BufReader;
+use std::io::{self, prelude::*, BufReader};
 use std::path::Path;
 
-use Attr;
-use color;
-use Terminal;
-use self::searcher::get_dbpath_for_term;
-use self::parser::compiled::{parse, msys_terminfo};
-use self::parm::{expand, Variables, Param};
+use crate::Attr;
+use crate::color;
+use crate::Terminal;
 
+use searcher::get_dbpath_for_term;
+use parser::compiled::{parse, msys_terminfo};
+use parm::{expand, Variables, Param};
 
 /// A parsed terminfo database entry.
 #[derive(Debug)]
@@ -49,7 +47,7 @@
     }
 
     fn cause(&self) -> Option<&dyn error::Error> {
-        use self::Error::*;
+        use Error::*;
         match *self {
             IoError(ref e) => Some(e),
             _ => None,
@@ -58,8 +56,8 @@
 }
 
 impl fmt::Display for Error {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        use self::Error::*;
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        use Error::*;
         match *self {
             TermUnset => Ok(()),
             MalformedTerminfo(ref e) => e.fmt(f),
diff --git a/src/libterm/terminfo/parm.rs b/src/libterm/terminfo/parm.rs
index 434dd4a..4e81b8a 100644
--- a/src/libterm/terminfo/parm.rs
+++ b/src/libterm/terminfo/parm.rs
@@ -40,23 +40,27 @@
 /// Container for static and dynamic variable arrays
 pub struct Variables {
     /// Static variables A-Z
-    sta: [Param; 26],
+    sta_va: [Param; 26],
     /// Dynamic variables a-z
-    dyn: [Param; 26],
+    dyn_va: [Param; 26],
 }
 
 impl Variables {
     /// Return a new zero-initialized Variables
     pub fn new() -> Variables {
         Variables {
-            sta: [Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
-                  Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
-                  Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
-                  Number(0), Number(0), Number(0), Number(0), Number(0)],
-            dyn: [Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
-                  Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
-                  Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
-                  Number(0), Number(0), Number(0), Number(0), Number(0)],
+            sta_va: [
+                Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+                Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+                Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+                Number(0), Number(0), Number(0), Number(0), Number(0)
+            ],
+            dyn_va: [
+                Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+                Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+                Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+                Number(0), Number(0), Number(0), Number(0), Number(0)
+            ],
         }
     }
 }
@@ -249,14 +253,14 @@
                 if cur >= 'A' && cur <= 'Z' {
                     if let Some(arg) = stack.pop() {
                         let idx = (cur as u8) - b'A';
-                        vars.sta[idx as usize] = arg;
+                        vars.sta_va[idx as usize] = arg;
                     } else {
                         return Err("stack is empty".to_string());
                     }
                 } else if cur >= 'a' && cur <= 'z' {
                     if let Some(arg) = stack.pop() {
                         let idx = (cur as u8) - b'a';
-                        vars.dyn[idx as usize] = arg;
+                        vars.dyn_va[idx as usize] = arg;
                     } else {
                         return Err("stack is empty".to_string());
                     }
@@ -267,10 +271,10 @@
             GetVar => {
                 if cur >= 'A' && cur <= 'Z' {
                     let idx = (cur as u8) - b'A';
-                    stack.push(vars.sta[idx as usize].clone());
+                    stack.push(vars.sta_va[idx as usize].clone());
                 } else if cur >= 'a' && cur <= 'z' {
                     let idx = (cur as u8) - b'a';
-                    stack.push(vars.dyn[idx as usize].clone());
+                    stack.push(vars.dyn_va[idx as usize].clone());
                 } else {
                     return Err("bad variable name in %g".to_string());
                 }
diff --git a/src/libterm/terminfo/parser/compiled.rs b/src/libterm/terminfo/parser/compiled.rs
index 63d0183..b7b9ce8 100644
--- a/src/libterm/terminfo/parser/compiled.rs
+++ b/src/libterm/terminfo/parser/compiled.rs
@@ -3,14 +3,14 @@
 //! ncurses-compatible compiled terminfo format parsing (term(5))
 
 use std::collections::HashMap;
-use std::io::prelude::*;
 use std::io;
+use std::io::prelude::*;
 use super::super::TermInfo;
 
 // These are the orders ncurses uses in its compiled format (as of 5.9). Not sure if portable.
 
 #[rustfmt_skip]
-pub static boolfnames: &'static[&'static str] = &["auto_left_margin", "auto_right_margin",
+pub static boolfnames: &[&str] = &["auto_left_margin", "auto_right_margin",
     "no_esc_ctlc", "ceol_standout_glitch", "eat_newline_glitch", "erase_overstrike", "generic_type",
     "hard_copy", "has_meta_key", "has_status_line", "insert_null_glitch", "memory_above",
     "memory_below", "move_insert_mode", "move_standout_mode", "over_strike", "status_line_esc_ok",
@@ -23,13 +23,13 @@
     "return_does_clr_eol"];
 
 #[rustfmt_skip]
-pub static boolnames: &'static[&'static str] = &["bw", "am", "xsb", "xhp", "xenl", "eo",
+pub static boolnames: &[&str] = &["bw", "am", "xsb", "xhp", "xenl", "eo",
     "gn", "hc", "km", "hs", "in", "db", "da", "mir", "msgr", "os", "eslok", "xt", "hz", "ul", "xon",
     "nxon", "mc5i", "chts", "nrrmc", "npc", "ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy",
     "xvpa", "sam", "cpix", "lpix", "OTbs", "OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr"];
 
 #[rustfmt_skip]
-pub static numfnames: &'static[&'static str] = &[ "columns", "init_tabs", "lines",
+pub static numfnames: &[&str] = &[ "columns", "init_tabs", "lines",
     "lines_of_memory", "magic_cookie_glitch", "padding_baud_rate", "virtual_terminal",
     "width_status_line", "num_labels", "label_height", "label_width", "max_attributes",
     "maximum_windows", "max_colors", "max_pairs", "no_color_video", "buffer_capacity",
@@ -40,13 +40,13 @@
     "new_line_delay", "backspace_delay", "horizontal_tab_delay", "number_of_function_keys"];
 
 #[rustfmt_skip]
-pub static numnames: &'static[&'static str] = &[ "cols", "it", "lines", "lm", "xmc", "pb",
+pub static numnames: &[&str] = &[ "cols", "it", "lines", "lm", "xmc", "pb",
     "vt", "wsl", "nlab", "lh", "lw", "ma", "wnum", "colors", "pairs", "ncv", "bufsz", "spinv",
     "spinh", "maddr", "mjump", "mcs", "mls", "npins", "orc", "orl", "orhi", "orvi", "cps", "widcs",
     "btns", "bitwin", "bitype", "UTug", "OTdC", "OTdN", "OTdB", "OTdT", "OTkn"];
 
 #[rustfmt_skip]
-pub static stringfnames: &'static[&'static str] = &[ "back_tab", "bell", "carriage_return",
+pub static stringfnames: &[&str] = &[ "back_tab", "bell", "carriage_return",
     "change_scroll_region", "clear_all_tabs", "clear_screen", "clr_eol", "clr_eos",
     "column_address", "command_character", "cursor_address", "cursor_down", "cursor_home",
     "cursor_invisible", "cursor_left", "cursor_mem_address", "cursor_normal", "cursor_right",
@@ -120,7 +120,7 @@
     "acs_plus", "memory_lock", "memory_unlock", "box_chars_1"];
 
 #[rustfmt_skip]
-pub static stringnames: &'static[&'static str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear",
+pub static stringnames: &[&str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear",
     "_", "_", "hpa", "cmdch", "cup", "cud1", "home", "civis", "cub1", "mrcup", "cnorm", "cuf1",
     "ll", "cuu1", "cvvis", "dch1", "dl1", "dsl", "hd", "smacs", "blink", "bold", "smcup", "smdc",
     "dim", "smir", "invis", "prot", "rev", "smso", "smul", "ech", "rmacs", "sgr0", "rmcup", "rmdc",
diff --git a/src/libterm/win.rs b/src/libterm/win.rs
index 25b03ba..1051ddf 100644
--- a/src/libterm/win.rs
+++ b/src/libterm/win.rs
@@ -7,9 +7,9 @@
 use std::io;
 use std::io::prelude::*;
 
-use Attr;
-use color;
-use Terminal;
+use crate::Attr;
+use crate::color;
+use crate::Terminal;
 
 /// A Terminal implementation which uses the Win32 Console API.
 pub struct WinConsole<T> {
diff --git a/src/test/codegen/box-maybe-uninit.rs b/src/test/codegen/box-maybe-uninit.rs
index a7fb74c..ad1d259 100644
--- a/src/test/codegen/box-maybe-uninit.rs
+++ b/src/test/codegen/box-maybe-uninit.rs
@@ -9,5 +9,8 @@
 pub fn box_uninitialized() -> Box<MaybeUninit<usize>> {
     // CHECK-LABEL: @box_uninitialized
     // CHECK-NOT: store
+    // CHECK-NOT: alloca
+    // CHECK-NOT: memcpy
+    // CHECK-NOT: memset
     Box::new(MaybeUninit::uninitialized())
 }
diff --git a/src/test/incremental/hashes/call_expressions.rs b/src/test/incremental/hashes/call_expressions.rs
index 52de065..f0f1f09 100644
--- a/src/test/incremental/hashes/call_expressions.rs
+++ b/src/test/incremental/hashes/call_expressions.rs
@@ -25,7 +25,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_callee_function() {
     callee2(1, 2)
@@ -40,7 +40,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_argument_function() {
     callee1(1, 3)
@@ -81,7 +81,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_callee_method() {
     let s = Struct;
@@ -98,7 +98,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_argument_method() {
     let s = Struct;
@@ -115,7 +115,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_ufcs_callee_method() {
     let s = Struct;
@@ -132,7 +132,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_argument_method_ufcs() {
     let s = Struct;
@@ -149,7 +149,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 // One might think this would be expanded in the HirBody/Mir, but it actually
 // results in slightly different Hir/Mir.
@@ -171,7 +171,7 @@
     #[cfg(not(cfail1))]
     use super::Struct2 as Struct;
 
-    #[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+    #[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
     #[rustc_clean(cfg="cfail3")]
 
 
diff --git a/src/test/incremental/hashes/closure_expressions.rs b/src/test/incremental/hashes/closure_expressions.rs
index 0e8cf80..4e82729 100644
--- a/src/test/incremental/hashes/closure_expressions.rs
+++ b/src/test/incremental/hashes/closure_expressions.rs
@@ -37,7 +37,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_parameter() {
     let x = 0u32;
@@ -53,7 +53,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_parameter_pattern() {
     let _ = |&x: &u32| x;
@@ -84,7 +84,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_type_ascription_to_parameter() {
     let closure = |x: u32| x + 1u32;
@@ -101,7 +101,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_parameter_type() {
     let closure = |x: u16| (x as u64) + 1;
diff --git a/src/test/incremental/hashes/enum_constructors.rs b/src/test/incremental/hashes/enum_constructors.rs
index e9b557b..a74c3ab 100644
--- a/src/test/incremental/hashes/enum_constructors.rs
+++ b/src/test/incremental/hashes/enum_constructors.rs
@@ -34,7 +34,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_field_value_struct_like() -> Enum {
     Enum::Struct {
@@ -96,7 +96,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_constructor_path_struct_like() {
     let _ = Enum2::Struct {
@@ -119,7 +119,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_constructor_variant_struct_like() {
     let _ = Enum2::Struct2 {
@@ -139,7 +139,7 @@
 
     #[rustc_clean(
         cfg="cfail2",
-        except="FnSignature,Hir,HirBody,MirOptimized,MirValidated,\
+        except="FnSignature,Hir,HirBody,MirOptimized,MirBuilt,\
                 TypeckTables"
     )]
     #[rustc_clean(cfg="cfail3")]
@@ -161,7 +161,7 @@
     #[cfg(not(cfail1))]
     use super::Enum2::Struct2 as Variant;
 
-    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated")]
+    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt")]
     #[rustc_clean(cfg="cfail3")]
     pub fn function() -> Enum2 {
         Variant {
@@ -180,7 +180,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_field_value_tuple_like() -> Enum {
     Enum::Tuple(0, 1, 3)
@@ -197,7 +197,7 @@
 #[cfg(not(cfail1))]
 #[rustc_clean(
     cfg="cfail2",
-    except="HirBody,MirOptimized,MirValidated,TypeckTables"
+    except="HirBody,MirOptimized,MirBuilt,TypeckTables"
 )]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_constructor_path_tuple_like() {
@@ -215,7 +215,7 @@
 #[cfg(not(cfail1))]
 #[rustc_clean(
     cfg="cfail2",
-    except="HirBody,MirOptimized,MirValidated,TypeckTables"
+    except="HirBody,MirOptimized,MirBuilt,TypeckTables"
 )]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_constructor_variant_tuple_like() {
@@ -232,7 +232,7 @@
 
     #[rustc_clean(
         cfg="cfail2",
-        except="FnSignature,Hir,HirBody,MirOptimized,MirValidated,\
+        except="FnSignature,Hir,HirBody,MirOptimized,MirBuilt,\
                 TypeckTables"
     )]
     #[rustc_clean(cfg="cfail3")]
@@ -251,7 +251,7 @@
     #[cfg(not(cfail1))]
     use super::Enum2::Tuple2 as Variant;
 
-    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated,TypeckTables")]
+    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt,TypeckTables")]
     #[rustc_clean(cfg="cfail3")]
     pub fn function() -> Enum2 {
         Variant(0, 1, 2)
@@ -278,7 +278,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_constructor_path_c_like() {
     let _ = Clike2::B;
@@ -293,7 +293,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_constructor_variant_c_like() {
     let _ = Clike::C;
@@ -309,7 +309,7 @@
 
     #[rustc_clean(
         cfg="cfail2",
-        except="FnSignature,Hir,HirBody,MirOptimized,MirValidated,\
+        except="FnSignature,Hir,HirBody,MirOptimized,MirBuilt,\
                 TypeckTables"
     )]
     #[rustc_clean(cfg="cfail3")]
@@ -328,7 +328,7 @@
     #[cfg(not(cfail1))]
     use super::Clike::B as Variant;
 
-    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated")]
+    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt")]
     #[rustc_clean(cfg="cfail3")]
     pub fn function() -> Clike {
         Variant
diff --git a/src/test/incremental/hashes/exported_vs_not.rs b/src/test/incremental/hashes/exported_vs_not.rs
index 1880dd2..c9f844f 100644
--- a/src/test/incremental/hashes/exported_vs_not.rs
+++ b/src/test/incremental/hashes/exported_vs_not.rs
@@ -16,7 +16,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn body_not_exported_to_metadata() -> u32 {
     2
@@ -35,7 +35,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 #[inline]
 pub fn body_exported_to_metadata_because_of_inline() -> u32 {
@@ -55,7 +55,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 #[inline]
 pub fn body_exported_to_metadata_because_of_generic() -> u32 {
diff --git a/src/test/incremental/hashes/for_loops.rs b/src/test/incremental/hashes/for_loops.rs
index 90c1ecf..da093de 100644
--- a/src/test/incremental/hashes/for_loops.rs
+++ b/src/test/incremental/hashes/for_loops.rs
@@ -25,7 +25,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_loop_body() {
     let mut _x = 0;
@@ -48,7 +48,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_iteration_variable_name() {
     let mut _x = 0;
@@ -71,7 +71,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_iteration_variable_pattern() {
     let mut _x = 0;
@@ -94,7 +94,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_iterable() {
     let mut _x = 0;
@@ -116,7 +116,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_break() {
     let mut _x = 0;
@@ -187,7 +187,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_break_label() {
     let mut _x = 0;
@@ -237,7 +237,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_continue_label() {
     let mut _x = 0;
@@ -262,7 +262,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_continue_to_break() {
     let mut _x = 0;
diff --git a/src/test/incremental/hashes/function_interfaces.rs b/src/test/incremental/hashes/function_interfaces.rs
index 21263c8..4330b00 100644
--- a/src/test/incremental/hashes/function_interfaces.rs
+++ b/src/test/incremental/hashes/function_interfaces.rs
@@ -24,7 +24,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg = "cfail2",
-              except = "Hir, HirBody, MirValidated, MirOptimized, TypeckTables, FnSignature")]
+              except = "Hir, HirBody, MirBuilt, MirOptimized, TypeckTables, FnSignature")]
 #[rustc_clean(cfg = "cfail3")]
 pub fn add_parameter(p: i32) {}
 
@@ -47,7 +47,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg = "cfail2",
-              except = "Hir, HirBody, MirValidated, MirOptimized, TypeckTables, FnSignature")]
+              except = "Hir, HirBody, MirBuilt, MirOptimized, TypeckTables, FnSignature")]
 #[rustc_clean(cfg = "cfail3")]
 pub fn type_of_parameter(p: i64) {}
 
@@ -59,7 +59,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg = "cfail2",
-              except = "Hir, HirBody, MirValidated, MirOptimized, TypeckTables, FnSignature")]
+              except = "Hir, HirBody, MirBuilt, MirOptimized, TypeckTables, FnSignature")]
 #[rustc_clean(cfg = "cfail3")]
 pub fn type_of_parameter_ref(p: &mut i32) {}
 
@@ -71,7 +71,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg = "cfail2",
-              except = "Hir, HirBody, MirValidated, MirOptimized, TypeckTables, FnSignature")]
+              except = "Hir, HirBody, MirBuilt, MirOptimized, TypeckTables, FnSignature")]
 #[rustc_clean(cfg = "cfail3")]
 pub fn order_of_parameters(p2: i64, p1: i32) {}
 
@@ -83,7 +83,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg = "cfail2",
-              except = "Hir, HirBody, MirValidated, MirOptimized, TypeckTables, FnSignature")]
+              except = "Hir, HirBody, MirBuilt, MirOptimized, TypeckTables, FnSignature")]
 #[rustc_clean(cfg = "cfail3")]
 pub unsafe fn make_unsafe() {}
 
@@ -94,7 +94,7 @@
 pub fn make_extern() {}
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg = "cfail2", except = "Hir, HirBody, TypeckTables, FnSignature")]
+#[rustc_clean(cfg = "cfail2", except = "Hir, HirBody, MirBuilt, TypeckTables, FnSignature")]
 #[rustc_clean(cfg = "cfail3")]
 pub extern "C" fn make_extern() {}
 
@@ -292,7 +292,7 @@
     use super::ReferencedType2 as ReturnType;
 
     #[rustc_clean(cfg = "cfail2",
-                  except = "Hir, HirBody, MirValidated, MirOptimized, TypeckTables, FnSignature")]
+                  except = "Hir, HirBody, MirBuilt, MirOptimized, TypeckTables, FnSignature")]
     #[rustc_clean(cfg = "cfail3")]
     pub fn indirect_return_type() -> ReturnType {
         ReturnType {}
@@ -309,7 +309,7 @@
     use super::ReferencedType2 as ParameterType;
 
     #[rustc_clean(cfg = "cfail2",
-                  except = "Hir, HirBody, MirValidated, MirOptimized, TypeckTables, FnSignature")]
+                  except = "Hir, HirBody, MirBuilt, MirOptimized, TypeckTables, FnSignature")]
     #[rustc_clean(cfg = "cfail3")]
     pub fn indirect_parameter_type(p: ParameterType) {}
 }
diff --git a/src/test/incremental/hashes/if_expressions.rs b/src/test/incremental/hashes/if_expressions.rs
index 18dba63..a01247f 100644
--- a/src/test/incremental/hashes/if_expressions.rs
+++ b/src/test/incremental/hashes/if_expressions.rs
@@ -25,7 +25,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_condition(x: bool) -> u32 {
     if !x {
@@ -46,7 +46,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_then_branch(x: bool) -> u32 {
     if x {
@@ -69,7 +69,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_else_branch(x: bool) -> u32 {
     if x {
@@ -120,7 +120,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_condition_if_let(x: Option<u32>) -> u32 {
     if let Some(_) = x {
@@ -143,7 +143,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_then_branch_if_let(x: Option<u32>) -> u32 {
     if let Some(x) = x {
@@ -166,7 +166,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_else_branch_if_let(x: Option<u32>) -> u32 {
     if let Some(x) = x {
diff --git a/src/test/incremental/hashes/inherent_impls.rs b/src/test/incremental/hashes/inherent_impls.rs
index 92ce5a6..d1574ae 100644
--- a/src/test/incremental/hashes/inherent_impls.rs
+++ b/src/test/incremental/hashes/inherent_impls.rs
@@ -42,7 +42,7 @@
 #[rustc_clean(cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 impl Foo {
-    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated,TypeckTables")]
+    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt,TypeckTables")]
     #[rustc_clean(cfg="cfail3")]
     pub fn method_body() {
         println!("Hello, world!");
@@ -63,7 +63,7 @@
 #[rustc_clean(cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 impl Foo {
-    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated,TypeckTables")]
+    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt,TypeckTables")]
     #[rustc_clean(cfg="cfail3")]
     #[inline]
     pub fn method_body_inlined() {
@@ -114,7 +114,7 @@
 impl Foo {
     #[rustc_clean(
         cfg="cfail2",
-        except="Hir,HirBody,FnSignature,TypeckTables,MirOptimized,MirValidated"
+        except="Hir,HirBody,FnSignature,TypeckTables,MirOptimized,MirBuilt"
     )]
     #[rustc_clean(cfg="cfail3")]
     pub fn method_selfmutness(&mut self) { }
@@ -154,7 +154,7 @@
 impl Foo {
     #[rustc_clean(
         cfg="cfail2",
-        except="Hir,HirBody,FnSignature,TypeckTables,MirOptimized,MirValidated"
+        except="Hir,HirBody,FnSignature,TypeckTables,MirOptimized,MirBuilt"
     )]
     #[rustc_clean(cfg="cfail3")]
     pub fn add_method_parameter(&self, _: i32) { }
@@ -172,7 +172,7 @@
 #[rustc_clean(cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 impl Foo {
-    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated")]
+    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt")]
     #[rustc_clean(cfg="cfail3")]
     pub fn change_method_parameter_name(&self, b: i64) { }
 }
@@ -191,7 +191,7 @@
 impl Foo {
     #[rustc_clean(
         cfg="cfail2",
-        except="Hir,HirBody,FnSignature,MirOptimized,MirValidated,TypeckTables")]
+        except="Hir,HirBody,FnSignature,MirOptimized,MirBuilt,TypeckTables")]
     #[rustc_clean(cfg="cfail3")]
     pub fn change_method_return_type(&self) -> u8 { 0 }
 }
@@ -226,7 +226,7 @@
 #[rustc_clean(cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 impl Foo {
-    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated")]
+    #[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt")]
     #[rustc_clean(cfg="cfail3")]
     pub fn change_method_parameter_order(&self, b: i64, a: i64) { }
 }
@@ -245,7 +245,7 @@
 impl Foo {
     #[rustc_clean(
         cfg="cfail2",
-        except="Hir,HirBody,FnSignature,TypeckTables,MirOptimized,MirValidated"
+        except="Hir,HirBody,FnSignature,TypeckTables,MirOptimized,MirBuilt"
     )]
     #[rustc_clean(cfg="cfail3")]
     pub unsafe fn make_method_unsafe(&self) { }
@@ -263,7 +263,7 @@
 #[rustc_clean(cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 impl Foo {
-    #[rustc_clean(cfg="cfail2", except="Hir,HirBody,FnSignature,TypeckTables")]
+    #[rustc_clean(cfg="cfail2", except="Hir,HirBody,MirBuilt,FnSignature,TypeckTables")]
     #[rustc_clean(cfg="cfail3")]
     pub extern fn make_method_extern(&self) { }
 }
@@ -447,7 +447,7 @@
 impl<T> Bar<T> {
     #[rustc_clean(
         cfg="cfail2",
-        except="GenericsOfItem,FnSignature,TypeckTables,TypeOfItem,MirOptimized,MirValidated"
+        except="GenericsOfItem,FnSignature,TypeckTables,TypeOfItem,MirOptimized,MirBuilt"
     )]
     #[rustc_clean(cfg="cfail3")]
     pub fn add_type_parameter_to_impl(&self) { }
@@ -465,7 +465,7 @@
 #[rustc_clean(cfg="cfail2", except="Hir,HirBody")]
 #[rustc_clean(cfg="cfail3")]
 impl Bar<u64> {
-    #[rustc_clean(cfg="cfail2", except="FnSignature,MirOptimized,MirValidated,TypeckTables")]
+    #[rustc_clean(cfg="cfail2", except="FnSignature,MirOptimized,MirBuilt,TypeckTables")]
     #[rustc_clean(cfg="cfail3")]
     pub fn change_impl_self_type(&self) { }
 }
diff --git a/src/test/incremental/hashes/inline_asm.rs b/src/test/incremental/hashes/inline_asm.rs
index e73aa89..c5e7f52 100644
--- a/src/test/incremental/hashes/inline_asm.rs
+++ b/src/test/incremental/hashes/inline_asm.rs
@@ -33,7 +33,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 pub fn change_template(a: i32) -> i32 {
@@ -69,7 +69,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 pub fn change_output(a: i32) -> i32 {
@@ -105,7 +105,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 pub fn change_input(_a: i32, _b: i32) -> i32 {
@@ -140,7 +140,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 pub fn change_input_constraint(_a: i32, _b: i32) -> i32 {
@@ -175,7 +175,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 pub fn change_clobber(_a: i32) -> i32 {
@@ -210,7 +210,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 pub fn change_options(_a: i32) -> i32 {
diff --git a/src/test/incremental/hashes/let_expressions.rs b/src/test/incremental/hashes/let_expressions.rs
index b6050f0..a2b33fe 100644
--- a/src/test/incremental/hashes/let_expressions.rs
+++ b/src/test/incremental/hashes/let_expressions.rs
@@ -22,7 +22,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized")]
+    except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_name() {
     let _y = 2u64;
@@ -38,7 +38,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,TypeckTables,MirValidated,MirOptimized")]
+    except="HirBody,TypeckTables,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_type() {
     let _x: u32 = 2u32;
@@ -54,7 +54,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,TypeckTables,MirValidated,MirOptimized")]
+    except="HirBody,TypeckTables,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_type() {
     let _x: u8 = 2;
@@ -70,7 +70,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,TypeckTables,MirValidated,MirOptimized")]
+    except="HirBody,TypeckTables,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_mutability_of_reference_type() {
     let _x: &mut u64;
@@ -86,7 +86,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,TypeckTables,MirValidated,MirOptimized")]
+    except="HirBody,TypeckTables,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_mutability_of_slot() {
     let _x: u64 = 0;
@@ -102,7 +102,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,TypeckTables,MirValidated,MirOptimized")]
+    except="HirBody,TypeckTables,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_simple_binding_to_pattern() {
     let (_a, _b) = (0u8, 'x');
@@ -118,7 +118,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized")]
+    except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_name_in_pattern() {
     let (_a, _c) = (1u8, 'y');
@@ -134,7 +134,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,TypeckTables,MirValidated,MirOptimized")]
+    except="HirBody,TypeckTables,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_ref_in_pattern() {
     let (ref _a, _b) = (1u8, 'y');
@@ -150,7 +150,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,TypeckTables,MirValidated,MirOptimized")]
+    except="HirBody,TypeckTables,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_amp_in_pattern() {
     let (&_a, _b) = (&1u8, 'y');
@@ -166,7 +166,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,TypeckTables,MirValidated,MirOptimized")]
+    except="HirBody,TypeckTables,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_mutability_of_binding_in_pattern() {
     let (mut _a, _b) = (99u8, 'q');
@@ -182,7 +182,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,TypeckTables,MirValidated,MirOptimized")]
+    except="HirBody,TypeckTables,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_initializer() {
     let _x: i16 = 3i16;
@@ -198,7 +198,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized")]
+    except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_initializer() {
     let _x = 5u16;
diff --git a/src/test/incremental/hashes/loop_expressions.rs b/src/test/incremental/hashes/loop_expressions.rs
index a218b01..a48d150 100644
--- a/src/test/incremental/hashes/loop_expressions.rs
+++ b/src/test/incremental/hashes/loop_expressions.rs
@@ -25,7 +25,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_loop_body() {
     let mut _x = 0;
@@ -47,7 +47,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_break() {
     let mut _x = 0;
@@ -118,7 +118,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_break_label() {
     let mut _x = 0;
@@ -168,7 +168,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_continue_label() {
     let mut _x = 0;
@@ -193,7 +193,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_continue_to_break() {
     let mut _x = 0;
diff --git a/src/test/incremental/hashes/match_expressions.rs b/src/test/incremental/hashes/match_expressions.rs
index b6b934e..11fe84d 100644
--- a/src/test/incremental/hashes/match_expressions.rs
+++ b/src/test/incremental/hashes/match_expressions.rs
@@ -26,7 +26,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+    except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_arm(x: u32) -> u32 {
     match x {
@@ -51,7 +51,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized")]
+    except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_order_of_arms(x: u32) -> u32 {
     match x {
@@ -75,7 +75,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+    except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_guard_clause(x: u32, y: bool) -> u32 {
     match x {
@@ -99,7 +99,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+    except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_guard_clause(x: u32, y: bool) -> u32 {
     match x {
@@ -123,7 +123,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+    except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_at_binding(x: u32) -> u32 {
     match x {
@@ -147,7 +147,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized")]
+    except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_name_of_at_binding(x: u32) -> u32 {
     match x {
@@ -170,7 +170,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+    except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_simple_name_to_pattern(x: u32) -> u32 {
     match (x, x & 1) {
@@ -193,7 +193,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized")]
+    except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_name_in_pattern(x: u32) -> u32 {
     match (x, x & 1) {
@@ -216,7 +216,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+    except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_mutability_of_binding_in_pattern(x: u32) -> u32 {
     match (x, x & 1) {
@@ -238,7 +238,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+    except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_ref_to_binding_in_pattern(x: u32) -> u32 {
     match (x, x & 1) {
@@ -260,7 +260,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_amp_to_binding_in_pattern(x: u32) -> u32 {
     match (&x, x & 1) {
@@ -283,7 +283,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized")]
+    except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_rhs_of_arm(x: u32) -> u32 {
     match x {
@@ -307,7 +307,7 @@
 
 #[cfg(not(cfail1))]
 #[rustc_clean(cfg="cfail2",
-    except="HirBody,MirValidated,MirOptimized,TypeckTables")]
+    except="HirBody,MirBuilt,MirOptimized,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_alternative_to_arm(x: u32) -> u32 {
     match x {
diff --git a/src/test/incremental/hashes/panic_exprs.rs b/src/test/incremental/hashes/panic_exprs.rs
index 3ae2c39..9a3c931 100644
--- a/src/test/incremental/hashes/panic_exprs.rs
+++ b/src/test/incremental/hashes/panic_exprs.rs
@@ -18,7 +18,7 @@
 
 
 // Indexing expression ---------------------------------------------------------
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn indexing(slice: &[u8]) -> u8 {
     #[cfg(cfail1)]
@@ -33,7 +33,7 @@
 
 
 // Arithmetic overflow plus ----------------------------------------------------
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn arithmetic_overflow_plus(val: i32) -> i32 {
     #[cfg(cfail1)]
@@ -48,7 +48,7 @@
 
 
 // Arithmetic overflow minus ----------------------------------------------------
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn arithmetic_overflow_minus(val: i32) -> i32 {
     #[cfg(cfail1)]
@@ -63,7 +63,7 @@
 
 
 // Arithmetic overflow mult ----------------------------------------------------
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn arithmetic_overflow_mult(val: i32) -> i32 {
     #[cfg(cfail1)]
@@ -78,7 +78,7 @@
 
 
 // Arithmetic overflow negation ------------------------------------------------
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn arithmetic_overflow_negation(val: i32) -> i32 {
     #[cfg(cfail1)]
@@ -93,7 +93,7 @@
 
 
 // Division by zero ------------------------------------------------------------
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn division_by_zero(val: i32) -> i32 {
     #[cfg(cfail1)]
@@ -107,7 +107,7 @@
 }
 
 // Division by zero ------------------------------------------------------------
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn mod_by_zero(val: i32) -> i32 {
     #[cfg(cfail1)]
@@ -122,7 +122,7 @@
 
 
 // shift left ------------------------------------------------------------------
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn shift_left(val: i32, shift: usize) -> i32 {
     #[cfg(cfail1)]
@@ -137,7 +137,7 @@
 
 
 // shift right ------------------------------------------------------------------
-#[rustc_clean(cfg="cfail2", except="HirBody,MirValidated,MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirBuilt,MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn shift_right(val: i32, shift: usize) -> i32 {
     #[cfg(cfail1)]
diff --git a/src/test/incremental/hashes/struct_constructors.rs b/src/test/incremental/hashes/struct_constructors.rs
index 5444fe7..a42fda3 100644
--- a/src/test/incremental/hashes/struct_constructors.rs
+++ b/src/test/incremental/hashes/struct_constructors.rs
@@ -31,7 +31,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_field_value_regular_struct() -> RegularStruct {
     RegularStruct {
@@ -82,7 +82,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_field_regular_struct() -> RegularStruct {
     let struct1 = RegularStruct {
@@ -117,7 +117,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_field_label_regular_struct() -> RegularStruct {
     let struct1 = RegularStruct {
@@ -152,7 +152,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_constructor_path_regular_struct() {
     let _ = RegularStruct2 {
@@ -173,7 +173,7 @@
 
     #[rustc_clean(
         cfg="cfail2",
-        except="FnSignature,Hir,HirBody,MirOptimized,MirValidated,TypeckTables"
+        except="FnSignature,Hir,HirBody,MirOptimized,MirBuilt,TypeckTables"
     )]
     #[rustc_clean(cfg="cfail3")]
     pub fn function() -> Struct {
@@ -196,7 +196,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_field_value_tuple_struct() -> TupleStruct {
     TupleStruct(0, 1, 3)
@@ -213,7 +213,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirValidated,TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody,MirOptimized,MirBuilt,TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_constructor_path_tuple_struct() {
     let _ = TupleStruct2(0, 1, 2);
@@ -230,7 +230,7 @@
 
     #[rustc_clean(
         cfg="cfail2",
-        except="FnSignature,Hir,HirBody,MirOptimized,MirValidated,TypeckTables"
+        except="FnSignature,Hir,HirBody,MirOptimized,MirBuilt,TypeckTables"
     )]
     #[rustc_clean(cfg="cfail3")]
     pub fn function() -> Struct {
diff --git a/src/test/incremental/hashes/unary_and_binary_exprs.rs b/src/test/incremental/hashes/unary_and_binary_exprs.rs
index 26cc41f..ef8035a 100644
--- a/src/test/incremental/hashes/unary_and_binary_exprs.rs
+++ b/src/test/incremental/hashes/unary_and_binary_exprs.rs
@@ -21,7 +21,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn const_negation() -> i32 {
     -1
@@ -36,7 +36,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn const_bitwise_not() -> i32 {
     !99
@@ -51,7 +51,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn var_negation(x: i32, y: i32) -> i32 {
     -y
@@ -66,7 +66,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn var_bitwise_not(x: i32, y: i32) -> i32 {
     !y
@@ -81,7 +81,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated,TypeckTables", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt,TypeckTables", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn var_deref(x: &i32, y: &i32) -> i32 {
     *y
@@ -96,7 +96,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn first_const_add() -> i32 {
     2 + 3
@@ -111,7 +111,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn second_const_add() -> i32 {
     1 + 3
@@ -126,7 +126,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn first_var_add(a: i32, b: i32) -> i32 {
     b + 2
@@ -141,7 +141,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn second_var_add(a: i32, b: i32) -> i32 {
     1 + b
@@ -156,7 +156,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn plus_to_minus(a: i32) -> i32 {
     1 - a
@@ -171,7 +171,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn plus_to_mult(a: i32) -> i32 {
     1 * a
@@ -186,7 +186,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn plus_to_div(a: i32) -> i32 {
     1 / a
@@ -201,7 +201,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn plus_to_mod(a: i32) -> i32 {
     1 % a
@@ -216,7 +216,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn and_to_or(a: bool, b: bool) -> bool {
     a || b
@@ -231,7 +231,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn bitwise_and_to_bitwise_or(a: i32) -> i32 {
     1 | a
@@ -246,7 +246,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn bitwise_and_to_bitwise_xor(a: i32) -> i32 {
     1 ^ a
@@ -261,7 +261,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn bitwise_and_to_lshift(a: i32) -> i32 {
     a << 1
@@ -276,7 +276,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn bitwise_and_to_rshift(a: i32) -> i32 {
     a >> 1
@@ -291,7 +291,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn eq_to_uneq(a: i32) -> bool {
     a != 1
@@ -306,7 +306,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn eq_to_lt(a: i32) -> bool {
     a < 1
@@ -321,7 +321,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn eq_to_gt(a: i32) -> bool {
     a > 1
@@ -336,7 +336,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn eq_to_le(a: i32) -> bool {
     a <= 1
@@ -351,7 +351,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn eq_to_ge(a: i32) -> bool {
     a >= 1
@@ -368,7 +368,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated,TypeckTables", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt,TypeckTables", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn type_cast(a: u8) -> u64 {
     let b = a as u32;
@@ -385,7 +385,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn value_cast(a: u32) -> i32 {
     2 as i32
@@ -403,7 +403,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn place() -> i32 {
     let mut x = 10;
@@ -423,7 +423,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn rvalue() -> i32 {
     let mut x = 10;
@@ -440,7 +440,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(except="HirBody,MirOptimized,MirValidated", cfg="cfail2")]
+#[rustc_clean(except="HirBody,MirOptimized,MirBuilt", cfg="cfail2")]
 #[rustc_clean(cfg="cfail3")]
 pub fn index_to_slice(s: &[u8], i: usize, j: usize) -> u8 {
     s[j]
diff --git a/src/test/incremental/hashes/while_let_loops.rs b/src/test/incremental/hashes/while_let_loops.rs
index edfea05..c708d5b 100644
--- a/src/test/incremental/hashes/while_let_loops.rs
+++ b/src/test/incremental/hashes/while_let_loops.rs
@@ -25,7 +25,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_loop_body() {
     let mut _x = 0;
@@ -48,7 +48,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_loop_condition() {
     let mut _x = 0;
@@ -70,7 +70,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_break() {
     let mut _x = 0;
@@ -141,7 +141,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_break_label() {
     let mut _x = 0;
@@ -191,7 +191,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_continue_label() {
     let mut _x = 0;
@@ -216,7 +216,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_continue_to_break() {
     let mut _x = 0;
diff --git a/src/test/incremental/hashes/while_loops.rs b/src/test/incremental/hashes/while_loops.rs
index 85c2c9f..c7b84a1 100644
--- a/src/test/incremental/hashes/while_loops.rs
+++ b/src/test/incremental/hashes/while_loops.rs
@@ -25,7 +25,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_loop_body() {
     let mut _x = 0;
@@ -48,7 +48,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_loop_condition() {
     let mut _x = 0;
@@ -70,7 +70,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized, TypeckTables")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized, TypeckTables")]
 #[rustc_clean(cfg="cfail3")]
 pub fn add_break() {
     let mut _x = 0;
@@ -141,7 +141,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_break_label() {
     let mut _x = 0;
@@ -191,7 +191,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_continue_label() {
     let mut _x = 0;
@@ -216,7 +216,7 @@
 }
 
 #[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="HirBody, MirValidated, MirOptimized")]
+#[rustc_clean(cfg="cfail2", except="HirBody, MirBuilt, MirOptimized")]
 #[rustc_clean(cfg="cfail3")]
 pub fn change_continue_to_break() {
     let mut _x = 0;
diff --git a/src/test/run-pass/methods/method-probe-no-guessing-dyn-trait.rs b/src/test/run-pass/methods/method-probe-no-guessing-dyn-trait.rs
new file mode 100644
index 0000000..8c8165a
--- /dev/null
+++ b/src/test/run-pass/methods/method-probe-no-guessing-dyn-trait.rs
@@ -0,0 +1,59 @@
+// Check that method matching does not make "guesses" depending on
+// Deref impls that don't eventually end up being picked.
+
+use std::ops::Deref;
+
+// An impl with less derefs will get called over an impl with more derefs,
+// so `(t: Foo<_>).my_fn()` will use `<Foo<u32> as MyTrait1>::my_fn(t)`,
+// and does *not* force the `_` to equal `()`, because the Deref impl
+// was *not* used.
+
+trait MyTrait1 {
+    fn my_fn(&self) {}
+}
+
+impl MyTrait1 for Foo<u32> {}
+
+struct Foo<T>(T);
+
+impl Deref for Foo<()> {
+    type Target = dyn MyTrait1 + 'static;
+    fn deref(&self) -> &(dyn MyTrait1 + 'static) {
+        panic!()
+    }
+}
+
+// ...but if there is no impl with less derefs, the "guess" will be
+// forced, so `(t: Bar<_>).my_fn2()` is `<dyn MyTrait2 as MyTrait2>::my_fn2(*t)`,
+// and because the deref impl is used, the `_` is forced to equal `u8`.
+
+trait MyTrait2 {
+    fn my_fn2(&self) {}
+}
+
+impl MyTrait2 for u32 {}
+struct Bar<T>(T, u32);
+impl Deref for Bar<u8> {
+    type Target = dyn MyTrait2 + 'static;
+    fn deref(&self) -> &(dyn MyTrait2 + 'static) {
+        &self.1
+    }
+}
+
+// actually invoke things
+
+fn main() {
+    let mut foo: Option<Foo<_>> = None;
+    let mut bar: Option<Bar<_>> = None;
+    let mut first_iter = true;
+    loop {
+        if !first_iter {
+            foo.as_ref().unwrap().my_fn();
+            bar.as_ref().unwrap().my_fn2();
+            break;
+        }
+        foo = Some(Foo(0));
+        bar = Some(Bar(Default::default(), 0));
+        first_iter = false;
+    }
+}
diff --git a/src/test/run-pass/panic-uninitialized-zeroed.rs b/src/test/run-pass/panic-uninitialized-zeroed.rs
index d47ff6c..31c0d29 100644
--- a/src/test/run-pass/panic-uninitialized-zeroed.rs
+++ b/src/test/run-pass/panic-uninitialized-zeroed.rs
@@ -36,7 +36,7 @@
 
         assert_eq!(
             panic::catch_unwind(|| {
-                mem::MaybeUninit::<!>::uninitialized().into_inner()
+                mem::MaybeUninit::<!>::uninitialized().into_initialized()
             }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
                 s == "Attempted to instantiate uninhabited type !"
             })),
@@ -63,7 +63,7 @@
 
         assert_eq!(
             panic::catch_unwind(|| {
-                mem::MaybeUninit::<Foo>::uninitialized().into_inner()
+                mem::MaybeUninit::<Foo>::uninitialized().into_initialized()
             }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
                 s == "Attempted to instantiate uninhabited type Foo"
             })),
@@ -90,7 +90,7 @@
 
         assert_eq!(
             panic::catch_unwind(|| {
-                mem::MaybeUninit::<Bar>::uninitialized().into_inner()
+                mem::MaybeUninit::<Bar>::uninitialized().into_initialized()
             }).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
                 s == "Attempted to instantiate uninhabited type Bar"
             })),
diff --git a/src/test/rustdoc/async-fn.rs b/src/test/rustdoc/async-fn.rs
index a0b6c29..ba4997a 100644
--- a/src/test/rustdoc/async-fn.rs
+++ b/src/test/rustdoc/async-fn.rs
@@ -1,14 +1,35 @@
 // edition:2018
-// compile-flags:-Z unstable-options
-
-// FIXME: once `--edition` is stable in rustdoc, remove that `compile-flags` directive
 
 #![feature(async_await, futures_api)]
 
-// @has async_fn/struct.S.html
-// @has - '//code' 'pub async fn f()'
-pub struct S;
+// @has async_fn/fn.foo.html '//pre[@class="rust fn"]' 'pub async fn foo() -> Option<Foo>'
+pub async fn foo() -> Option<Foo> {
+    None
+}
 
-impl S {
+// @has async_fn/fn.bar.html '//pre[@class="rust fn"]' 'pub async fn bar(a: i32, b: i32) -> i32'
+pub async fn bar(a: i32, b: i32) -> i32 {
+    0
+}
+
+// @has async_fn/fn.baz.html '//pre[@class="rust fn"]' 'pub async fn baz<T>(a: T) -> T'
+pub async fn baz<T>(a: T) -> T {
+    a
+}
+
+trait Bar {}
+
+impl Bar for () {}
+
+// @has async_fn/fn.quux.html '//pre[@class="rust fn"]' 'pub async fn quux() -> impl Bar'
+pub async fn quux() -> impl Bar {
+    ()
+}
+
+// @has async_fn/struct.Foo.html
+// @matches - '//code' 'pub async fn f\(\)$'
+pub struct Foo;
+
+impl Foo {
     pub async fn f() {}
 }
diff --git a/src/test/rustdoc/trait_alias.rs b/src/test/rustdoc/trait_alias.rs
new file mode 100644
index 0000000..98b8d87
--- /dev/null
+++ b/src/test/rustdoc/trait_alias.rs
@@ -0,0 +1,21 @@
+#![feature(trait_alias)]
+
+#![crate_name = "foo"]
+
+use std::fmt::Debug;
+
+// @has foo/all.html '//a[@href="traitalias.CopyAlias.html"]' 'CopyAlias'
+// @has foo/all.html '//a[@href="traitalias.Alias2.html"]' 'Alias2'
+// @has foo/all.html '//a[@href="traitalias.Foo.html"]' 'Foo'
+
+// @has foo/index.html '//h2[@id="trait-aliases"]' 'Trait aliases'
+// @has foo/index.html '//a[@class="traitalias"]' 'CopyAlias'
+// @has foo/index.html '//a[@class="traitalias"]' 'Alias2'
+// @has foo/index.html '//a[@class="traitalias"]' 'Foo'
+
+// @has foo/traitalias.CopyAlias.html '//section[@id="main"]/pre' 'trait CopyAlias = Copy;'
+pub trait CopyAlias = Copy;
+// @has foo/traitalias.Alias2.html '//section[@id="main"]/pre' 'trait Alias2 = Copy + Debug;'
+pub trait Alias2 = Copy + Debug;
+// @has foo/traitalias.Foo.html '//section[@id="main"]/pre' 'trait Foo<T> = Into<T> + Debug;'
+pub trait Foo<T> = Into<T> + Debug;
diff --git a/src/test/ui/augmented-assignments.nll.stderr b/src/test/ui/augmented-assignments.nll.stderr
index 840b377..33c94d6 100644
--- a/src/test/ui/augmented-assignments.nll.stderr
+++ b/src/test/ui/augmented-assignments.nll.stderr
@@ -9,7 +9,7 @@
 LL | |     //~^ value used here after move
 LL | |     +=
 LL | |     x;  //~ value moved here
-   | |     -
+   | |     ^
    | |     |
    | |_____move out of `x` occurs here
    |       borrow later used here
diff --git a/src/test/ui/bad/bad-lint-cap2.stderr b/src/test/ui/bad/bad-lint-cap2.stderr
index d7ec414..b963872 100644
--- a/src/test/ui/bad/bad-lint-cap2.stderr
+++ b/src/test/ui/bad/bad-lint-cap2.stderr
@@ -2,7 +2,7 @@
   --> $DIR/bad-lint-cap2.rs:6:5
    |
 LL | use std::option; //~ ERROR
-   |     ^^^^^^^^^^^
+   | ----^^^^^^^^^^^- help: remove the whole `use` item
    |
 note: lint level defined here
   --> $DIR/bad-lint-cap2.rs:4:9
diff --git a/src/test/ui/bad/bad-lint-cap3.stderr b/src/test/ui/bad/bad-lint-cap3.stderr
index 5bf0b08..21ed50b 100644
--- a/src/test/ui/bad/bad-lint-cap3.stderr
+++ b/src/test/ui/bad/bad-lint-cap3.stderr
@@ -2,7 +2,7 @@
   --> $DIR/bad-lint-cap3.rs:7:5
    |
 LL | use std::option; //~ WARN
-   |     ^^^^^^^^^^^
+   | ----^^^^^^^^^^^- help: remove the whole `use` item
    |
 note: lint level defined here
   --> $DIR/bad-lint-cap3.rs:4:9
diff --git a/src/test/ui/imports/unused.stderr b/src/test/ui/imports/unused.stderr
index b56e930..fa82e97 100644
--- a/src/test/ui/imports/unused.stderr
+++ b/src/test/ui/imports/unused.stderr
@@ -2,7 +2,7 @@
   --> $DIR/unused.rs:7:24
    |
 LL |         pub(super) use super::f; //~ ERROR unused
-   |                        ^^^^^^^^
+   |         ---------------^^^^^^^^- help: remove the whole `use` item
    |
 note: lint level defined here
   --> $DIR/unused.rs:1:9
diff --git a/src/test/ui/issues/issue-30730.stderr b/src/test/ui/issues/issue-30730.stderr
index 0a90107..3cfadd3 100644
--- a/src/test/ui/issues/issue-30730.stderr
+++ b/src/test/ui/issues/issue-30730.stderr
@@ -2,7 +2,7 @@
   --> $DIR/issue-30730.rs:3:5
    |
 LL | use std::thread;
-   |     ^^^^^^^^^^^
+   | ----^^^^^^^^^^^- help: remove the whole `use` item
    |
 note: lint level defined here
   --> $DIR/issue-30730.rs:2:9
diff --git a/src/test/ui/issues/issue-52891.stderr b/src/test/ui/issues/issue-52891.stderr
index 55d6110..65b2b94 100644
--- a/src/test/ui/issues/issue-52891.stderr
+++ b/src/test/ui/issues/issue-52891.stderr
@@ -90,7 +90,7 @@
 LL |       m,
    |  ______-
 LL | |     a}; //~ ERROR `a` is defined multiple times
-   | |     -
+   | |     ^
    | |     |
    | |_____`a` reimported here
    |       help: remove unnecessary import
diff --git a/src/test/ui/lint/lint-directives-on-use-items-issue-10534.stderr b/src/test/ui/lint/lint-directives-on-use-items-issue-10534.stderr
index 170b98a..e588d24 100644
--- a/src/test/ui/lint/lint-directives-on-use-items-issue-10534.stderr
+++ b/src/test/ui/lint/lint-directives-on-use-items-issue-10534.stderr
@@ -2,7 +2,7 @@
   --> $DIR/lint-directives-on-use-items-issue-10534.rs:12:9
    |
 LL |     use a::x; //~ ERROR: unused import
-   |         ^^^^
+   |     ----^^^^- help: remove the whole `use` item
    |
 note: lint level defined here
   --> $DIR/lint-directives-on-use-items-issue-10534.rs:1:9
@@ -14,7 +14,7 @@
   --> $DIR/lint-directives-on-use-items-issue-10534.rs:21:9
    |
 LL |     use a::y; //~ ERROR: unused import
-   |         ^^^^
+   |     ----^^^^- help: remove the whole `use` item
    |
 note: lint level defined here
   --> $DIR/lint-directives-on-use-items-issue-10534.rs:20:12
diff --git a/src/test/ui/lint/lint-unused-imports.rs b/src/test/ui/lint/lint-unused-imports.rs
index 4892524..9c5b206 100644
--- a/src/test/ui/lint/lint-unused-imports.rs
+++ b/src/test/ui/lint/lint-unused-imports.rs
@@ -6,7 +6,7 @@
 use std::mem::*;            // shouldn't get errors for not using
                             // everything imported
 use std::fmt::{};
-//~^ ERROR unused import: `use std::fmt::{};`
+//~^ ERROR unused import: `std::fmt::{}`
 
 // Should get errors for both 'Some' and 'None'
 use std::option::Option::{Some, None};
diff --git a/src/test/ui/lint/lint-unused-imports.stderr b/src/test/ui/lint/lint-unused-imports.stderr
index 214f4a4..7970b02 100644
--- a/src/test/ui/lint/lint-unused-imports.stderr
+++ b/src/test/ui/lint/lint-unused-imports.stderr
@@ -1,8 +1,8 @@
-error: unused import: `use std::fmt::{};`
-  --> $DIR/lint-unused-imports.rs:8:1
+error: unused import: `std::fmt::{}`
+  --> $DIR/lint-unused-imports.rs:8:5
    |
 LL | use std::fmt::{};
-   | ^^^^^^^^^^^^^^^^^
+   | ----^^^^^^^^^^^^- help: remove the whole `use` item
    |
 note: lint level defined here
   --> $DIR/lint-unused-imports.rs:1:9
@@ -14,37 +14,39 @@
   --> $DIR/lint-unused-imports.rs:12:27
    |
 LL | use std::option::Option::{Some, None};
-   |                           ^^^^  ^^^^
+   | --------------------------^^^^--^^^^-- help: remove the whole `use` item
 
 error: unused import: `test::A`
   --> $DIR/lint-unused-imports.rs:15:5
    |
 LL | use test::A;       //~ ERROR unused import: `test::A`
-   |     ^^^^^^^
+   | ----^^^^^^^- help: remove the whole `use` item
 
 error: unused import: `bar`
   --> $DIR/lint-unused-imports.rs:24:18
    |
 LL | use test2::{foo, bar}; //~ ERROR unused import: `bar`
-   |                  ^^^
+   |                --^^^
+   |                |
+   |                help: remove the unused import
 
 error: unused import: `foo::Square`
   --> $DIR/lint-unused-imports.rs:52:13
    |
 LL |         use foo::Square; //~ ERROR unused import: `foo::Square`
-   |             ^^^^^^^^^^^
+   |         ----^^^^^^^^^^^- help: remove the whole `use` item
 
 error: unused import: `self::g`
   --> $DIR/lint-unused-imports.rs:68:9
    |
 LL |     use self::g; //~ ERROR unused import: `self::g`
-   |         ^^^^^^^
+   |     ----^^^^^^^- help: remove the whole `use` item
 
 error: unused import: `test2::foo`
   --> $DIR/lint-unused-imports.rs:77:9
    |
 LL |     use test2::foo; //~ ERROR unused import: `test2::foo`
-   |         ^^^^^^^^^^
+   |     ----^^^^^^^^^^- help: remove the whole `use` item
 
 error: unused import: `test::B2`
   --> $DIR/lint-unused-imports.rs:20:5
diff --git a/src/test/ui/lint/lints-in-foreign-macros.stderr b/src/test/ui/lint/lints-in-foreign-macros.stderr
index 8287ca5..b808ca7 100644
--- a/src/test/ui/lint/lints-in-foreign-macros.stderr
+++ b/src/test/ui/lint/lints-in-foreign-macros.stderr
@@ -2,7 +2,7 @@
   --> $DIR/lints-in-foreign-macros.rs:11:16
    |
 LL |     () => {use std::string::ToString;} //~ WARN: unused import
-   |                ^^^^^^^^^^^^^^^^^^^^^
+   |            ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
 ...
 LL | mod a { foo!(); }
    |         ------- in this macro invocation
@@ -17,13 +17,13 @@
   --> $DIR/lints-in-foreign-macros.rs:16:18
    |
 LL | mod c { baz!(use std::string::ToString;); } //~ WARN: unused import
-   |                  ^^^^^^^^^^^^^^^^^^^^^
+   |              ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
 
 warning: unused import: `std::string::ToString`
   --> $DIR/lints-in-foreign-macros.rs:17:19
    |
 LL | mod d { baz2!(use std::string::ToString;); } //~ WARN: unused import
-   |                   ^^^^^^^^^^^^^^^^^^^^^
+   |               ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
 
 warning: missing documentation for crate
   --> $DIR/lints-in-foreign-macros.rs:4:1
diff --git a/src/test/ui/methods/method-deref-to-same-trait-object-with-separate-params.rs b/src/test/ui/methods/method-deref-to-same-trait-object-with-separate-params.rs
new file mode 100644
index 0000000..a5dae1c
--- /dev/null
+++ b/src/test/ui/methods/method-deref-to-same-trait-object-with-separate-params.rs
@@ -0,0 +1,177 @@
+#![feature(arbitrary_self_types, coerce_unsized, dispatch_from_dyn, unsize, unsized_locals)]
+
+// This tests a few edge-cases around `arbitrary_self_types`. Most specifically,
+// it checks that the `ObjectCandidate` you get from method matching can't
+// match a trait with the same DefId as a supertrait but a bad type parameter.
+
+use std::marker::PhantomData;
+
+mod internal {
+    use std::ops::{CoerceUnsized, Deref, DispatchFromDyn};
+    use std::marker::{PhantomData, Unsize};
+
+    pub struct Smaht<T: ?Sized, MISC>(pub Box<T>, pub PhantomData<MISC>);
+
+    impl<T: ?Sized, MISC> Deref for Smaht<T, MISC> {
+        type Target = T;
+
+        fn deref(&self) -> &Self::Target {
+            &self.0
+        }
+    }
+    impl<T: ?Sized + Unsize<U>, U: ?Sized, MISC> CoerceUnsized<Smaht<U, MISC>>
+        for Smaht<T, MISC>
+    {}
+    impl<T: ?Sized + Unsize<U>, U: ?Sized, MISC> DispatchFromDyn<Smaht<U, MISC>>
+        for Smaht<T, MISC>
+    {}
+
+    pub trait Foo: X<u32> {}
+    pub trait X<T> {
+        fn foo(self: Smaht<Self, T>) -> T;
+    }
+
+    impl X<u32> for () {
+        fn foo(self: Smaht<Self, u32>) -> u32 {
+            0
+        }
+    }
+
+    pub trait Marker {}
+    impl Marker for dyn Foo {}
+    impl<T: Marker + ?Sized> X<u64> for T {
+        fn foo(self: Smaht<Self, u64>) -> u64 {
+            1
+        }
+    }
+
+    impl Deref for dyn Foo {
+        type Target = ();
+        fn deref(&self) -> &() { &() }
+    }
+
+    impl Foo for () {}
+}
+
+pub trait FinalFoo {
+    fn foo(&self) -> u8;
+}
+
+impl FinalFoo for () {
+    fn foo(&self) -> u8 { 0 }
+}
+
+mod nuisance_foo {
+    pub trait NuisanceFoo {
+        fn foo(self);
+    }
+
+    impl<T: ?Sized> NuisanceFoo for T {
+        fn foo(self) {}
+    }
+}
+
+
+fn objectcandidate_impl() {
+    let x: internal::Smaht<(), u32> = internal::Smaht(Box::new(()), PhantomData);
+    let x: internal::Smaht<dyn internal::Foo, u32> = x;
+
+    // This picks `<dyn internal::Foo as X<u32>>::foo` via `ObjectCandidate`.
+    //
+    // The `TraitCandidate` is not relevant because `X` is not in scope.
+    let z = x.foo();
+
+    // Observe the type of `z` is `u32`
+    let _seetype: () = z; //~ ERROR mismatched types
+    //~| expected (), found u32
+}
+
+fn traitcandidate_impl() {
+    use internal::X;
+
+    let x: internal::Smaht<(), u64> = internal::Smaht(Box::new(()), PhantomData);
+    let x: internal::Smaht<dyn internal::Foo, u64> = x;
+
+    // This picks `<dyn internal::Foo as X<u64>>::foo` via `TraitCandidate`.
+    //
+    // The `ObjectCandidate` does not apply, as it only applies to
+    // `X<u32>` (and not `X<u64>`).
+    let z = x.foo();
+
+    // Observe the type of `z` is `u64`
+    let _seetype: () = z; //~ ERROR mismatched types
+    //~| expected (), found u64
+}
+
+fn traitcandidate_impl_with_nuisance() {
+    use internal::X;
+    use nuisance_foo::NuisanceFoo;
+
+    let x: internal::Smaht<(), u64> = internal::Smaht(Box::new(()), PhantomData);
+    let x: internal::Smaht<dyn internal::Foo, u64> = x;
+
+    // This picks `<dyn internal::Foo as X<u64>>::foo` via `TraitCandidate`.
+    //
+    // The `ObjectCandidate` does not apply, as it only applies to
+    // `X<u32>` (and not `X<u64>`).
+    //
+    // The NuisanceFoo impl has the same priority as the `X` impl,
+    // so we get a conflict.
+    let z = x.foo(); //~ ERROR multiple applicable items in scope
+}
+
+
+fn neither_impl() {
+    let x: internal::Smaht<(), u64> = internal::Smaht(Box::new(()), PhantomData);
+    let x: internal::Smaht<dyn internal::Foo, u64> = x;
+
+    // This can't pick the `TraitCandidate` impl, because `Foo` is not
+    // imported. However, this also can't pick the `ObjectCandidate`
+    // impl, because it only applies to `X<u32>` (and not `X<u64>`).
+    //
+    // Therefore, neither of the candidates is applicable, and we pick
+    // the `FinalFoo` impl after another deref, which will return `u8`.
+    let z = x.foo();
+
+    // Observe the type of `z` is `u8`
+    let _seetype: () = z; //~ ERROR mismatched types
+    //~| expected (), found u8
+}
+
+fn both_impls() {
+    use internal::X;
+
+    let x: internal::Smaht<(), u32> = internal::Smaht(Box::new(()), PhantomData);
+    let x: internal::Smaht<dyn internal::Foo, u32> = x;
+
+    // This can pick both the `TraitCandidate` and the `ObjectCandidate` impl.
+    //
+    // However, the `ObjectCandidate` is considered an "inherent candidate",
+    // and therefore has priority over both the `TraitCandidate` as well as
+    // any other "nuisance" candidate" (if present).
+    let z = x.foo();
+
+    // Observe the type of `z` is `u32`
+    let _seetype: () = z; //~ ERROR mismatched types
+    //~| expected (), found u32
+}
+
+
+fn both_impls_with_nuisance() {
+    // Similar to the `both_impls` example, except with a nuisance impl to
+    // make sure the `ObjectCandidate` indeed has a higher priority.
+
+    use internal::X;
+    use nuisance_foo::NuisanceFoo;
+
+    let x: internal::Smaht<(), u32> = internal::Smaht(Box::new(()), PhantomData);
+    let x: internal::Smaht<dyn internal::Foo, u32> = x;
+    let z = x.foo();
+
+    // Observe the type of `z` is `u32`
+    let _seetype: () = z; //~ ERROR mismatched types
+    //~| expected (), found u32
+}
+
+fn main() {
+}
diff --git a/src/test/ui/methods/method-deref-to-same-trait-object-with-separate-params.stderr b/src/test/ui/methods/method-deref-to-same-trait-object-with-separate-params.stderr
new file mode 100644
index 0000000..2d8449b
--- /dev/null
+++ b/src/test/ui/methods/method-deref-to-same-trait-object-with-separate-params.stderr
@@ -0,0 +1,72 @@
+error[E0308]: mismatched types
+  --> $DIR/method-deref-to-same-trait-object-with-separate-params.rs:85:24
+   |
+LL |     let _seetype: () = z; //~ ERROR mismatched types
+   |                        ^ expected (), found u32
+   |
+   = note: expected type `()`
+              found type `u32`
+
+error[E0308]: mismatched types
+  --> $DIR/method-deref-to-same-trait-object-with-separate-params.rs:102:24
+   |
+LL |     let _seetype: () = z; //~ ERROR mismatched types
+   |                        ^ expected (), found u64
+   |
+   = note: expected type `()`
+              found type `u64`
+
+error[E0034]: multiple applicable items in scope
+  --> $DIR/method-deref-to-same-trait-object-with-separate-params.rs:120:15
+   |
+LL |     let z = x.foo(); //~ ERROR multiple applicable items in scope
+   |               ^^^ multiple `foo` found
+   |
+note: candidate #1 is defined in an impl of the trait `internal::X` for the type `_`
+  --> $DIR/method-deref-to-same-trait-object-with-separate-params.rs:43:9
+   |
+LL |         fn foo(self: Smaht<Self, u64>) -> u64 {
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+note: candidate #2 is defined in an impl of the trait `nuisance_foo::NuisanceFoo` for the type `_`
+  --> $DIR/method-deref-to-same-trait-object-with-separate-params.rs:70:9
+   |
+LL |         fn foo(self) {}
+   |         ^^^^^^^^^^^^
+note: candidate #3 is defined in the trait `FinalFoo`
+  --> $DIR/method-deref-to-same-trait-object-with-separate-params.rs:57:5
+   |
+LL |     fn foo(&self) -> u8;
+   |     ^^^^^^^^^^^^^^^^^^^^
+   = help: to disambiguate the method call, write `FinalFoo::foo(x)` instead
+
+error[E0308]: mismatched types
+  --> $DIR/method-deref-to-same-trait-object-with-separate-params.rs:137:24
+   |
+LL |     let _seetype: () = z; //~ ERROR mismatched types
+   |                        ^ expected (), found u8
+   |
+   = note: expected type `()`
+              found type `u8`
+
+error[E0308]: mismatched types
+  --> $DIR/method-deref-to-same-trait-object-with-separate-params.rs:155:24
+   |
+LL |     let _seetype: () = z; //~ ERROR mismatched types
+   |                        ^ expected (), found u32
+   |
+   = note: expected type `()`
+              found type `u32`
+
+error[E0308]: mismatched types
+  --> $DIR/method-deref-to-same-trait-object-with-separate-params.rs:172:24
+   |
+LL |     let _seetype: () = z; //~ ERROR mismatched types
+   |                        ^ expected (), found u32
+   |
+   = note: expected type `()`
+              found type `u32`
+
+error: aborting due to 6 previous errors
+
+Some errors occurred: E0034, E0308.
+For more information about an error, try `rustc --explain E0034`.
diff --git a/src/test/ui/methods/method-trait-object-with-hrtb.rs b/src/test/ui/methods/method-trait-object-with-hrtb.rs
new file mode 100644
index 0000000..da2f13f
--- /dev/null
+++ b/src/test/ui/methods/method-trait-object-with-hrtb.rs
@@ -0,0 +1,41 @@
+// compile-pass
+
+// Check that method probing ObjectCandidate works in the presence of
+// auto traits and/or HRTBs.
+
+mod internal {
+    pub trait MyObject<'a> {
+        type Output;
+
+        fn foo(&self) -> Self::Output;
+    }
+
+    impl<'a> MyObject<'a> for () {
+        type Output = &'a u32;
+
+        fn foo(&self) -> Self::Output { &4 }
+    }
+}
+
+fn t1(d: &dyn for<'a> internal::MyObject<'a, Output=&'a u32>) {
+    d.foo();
+}
+
+fn t2(d: &dyn internal::MyObject<'static, Output=&'static u32>) {
+    d.foo();
+}
+
+fn t3(d: &(dyn for<'a> internal::MyObject<'a, Output=&'a u32> + Sync)) {
+    d.foo();
+}
+
+fn t4(d: &(dyn internal::MyObject<'static, Output=&'static u32> + Sync)) {
+    d.foo();
+}
+
+fn main() {
+    t1(&());
+    t2(&());
+    t3(&());
+    t4(&());
+}
diff --git a/src/test/ui/nll/issue-58053.rs b/src/test/ui/nll/issue-58053.rs
new file mode 100644
index 0000000..d433890
--- /dev/null
+++ b/src/test/ui/nll/issue-58053.rs
@@ -0,0 +1,14 @@
+#![allow(warnings)]
+#![feature(nll)]
+
+fn main() {
+    let i = &3;
+
+    let f = |x: &i32| -> &i32 { x };
+    //~^ ERROR lifetime may not live long enough
+    let j = f(i);
+
+    let g = |x: &i32| { x };
+    //~^ ERROR lifetime may not live long enough
+    let k = g(i);
+}
diff --git a/src/test/ui/nll/issue-58053.stderr b/src/test/ui/nll/issue-58053.stderr
new file mode 100644
index 0000000..9048983
--- /dev/null
+++ b/src/test/ui/nll/issue-58053.stderr
@@ -0,0 +1,20 @@
+error: lifetime may not live long enough
+  --> $DIR/issue-58053.rs:7:33
+   |
+LL |     let f = |x: &i32| -> &i32 { x };
+   |                 -        ----   ^ returning this value requires that `'1` must outlive `'2`
+   |                 |        |
+   |                 |        return type of closure is &'2 i32
+   |                 let's call the lifetime of this reference `'1`
+
+error: lifetime may not live long enough
+  --> $DIR/issue-58053.rs:11:25
+   |
+LL |     let g = |x: &i32| { x };
+   |                 -   -   ^ returning this value requires that `'1` must outlive `'2`
+   |                 |   |
+   |                 |   return type of closure is &'2 i32
+   |                 let's call the lifetime of this reference `'1`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/test/ui/parser-recovery-2.stderr b/src/test/ui/parser-recovery-2.stderr
index 92d8cbc..76f7af3 100644
--- a/src/test/ui/parser-recovery-2.stderr
+++ b/src/test/ui/parser-recovery-2.stderr
@@ -1,3 +1,9 @@
+error: unexpected token: `;`
+  --> $DIR/parser-recovery-2.rs:12:15
+   |
+LL |     let x = y.;  //~ ERROR unexpected token
+   |               ^
+
 error: incorrect close delimiter: `)`
   --> $DIR/parser-recovery-2.rs:8:5
    |
@@ -7,12 +13,6 @@
 LL |     ) //~ ERROR incorrect close delimiter: `)`
    |     ^ incorrect close delimiter
 
-error: unexpected token: `;`
-  --> $DIR/parser-recovery-2.rs:12:15
-   |
-LL |     let x = y.;  //~ ERROR unexpected token
-   |               ^
-
 error[E0425]: cannot find function `foo` in this scope
   --> $DIR/parser-recovery-2.rs:7:17
    |
diff --git a/src/test/ui/parser/issue-10636-2.rs b/src/test/ui/parser/issue-10636-2.rs
index a02fd41..6fb6363 100644
--- a/src/test/ui/parser/issue-10636-2.rs
+++ b/src/test/ui/parser/issue-10636-2.rs
@@ -5,7 +5,7 @@
     option.map(|some| 42;
                           //~^ ERROR: expected one of
 
-} //~ ERROR: incorrect close delimiter
+}
 //~^ ERROR: expected expression, found `)`
 
 fn main() {}
diff --git a/src/test/ui/parser/issue-10636-2.stderr b/src/test/ui/parser/issue-10636-2.stderr
index 9b3115c..38d57ce 100644
--- a/src/test/ui/parser/issue-10636-2.stderr
+++ b/src/test/ui/parser/issue-10636-2.stderr
@@ -1,25 +1,17 @@
-error: incorrect close delimiter: `}`
-  --> $DIR/issue-10636-2.rs:8:1
-   |
-LL | pub fn trace_option(option: Option<isize>) {
-   |                                            - close delimiter possibly meant for this
-LL |     option.map(|some| 42;
-   |               - un-closed delimiter
-...
-LL | } //~ ERROR: incorrect close delimiter
-   | ^ incorrect close delimiter
-
 error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;`
   --> $DIR/issue-10636-2.rs:5:25
    |
 LL |     option.map(|some| 42;
-   |                         ^ expected one of `)`, `,`, `.`, `?`, or an operator here
+   |               -         ^
+   |               |         |
+   |               |         help: `)` may belong here
+   |               unclosed delimiter
 
 error: expected expression, found `)`
   --> $DIR/issue-10636-2.rs:8:1
    |
-LL | } //~ ERROR: incorrect close delimiter
+LL | }
    | ^ expected expression
 
-error: aborting due to 3 previous errors
+error: aborting due to 2 previous errors
 
diff --git a/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr b/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr
index 805ba8b..abb0820 100644
--- a/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr
+++ b/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr
@@ -1,3 +1,9 @@
+error: unexpected close delimiter: `}`
+  --> $DIR/macro-mismatched-delim-paren-brace.rs:5:1
+   |
+LL | } //~ ERROR unexpected close delimiter: `}`
+   | ^ unexpected close delimiter
+
 error: incorrect close delimiter: `}`
   --> $DIR/macro-mismatched-delim-paren-brace.rs:4:5
    |
@@ -7,11 +13,5 @@
 LL |     } //~ ERROR incorrect close delimiter
    |     ^ incorrect close delimiter
 
-error: unexpected close delimiter: `}`
-  --> $DIR/macro-mismatched-delim-paren-brace.rs:5:1
-   |
-LL | } //~ ERROR unexpected close delimiter: `}`
-   | ^ unexpected close delimiter
-
 error: aborting due to 2 previous errors
 
diff --git a/src/test/ui/resolve/token-error-correct-3.rs b/src/test/ui/resolve/token-error-correct-3.rs
index 86cf711..b1ca0bb 100644
--- a/src/test/ui/resolve/token-error-correct-3.rs
+++ b/src/test/ui/resolve/token-error-correct-3.rs
@@ -17,7 +17,7 @@
             //~| expected type `()`
             //~| found type `std::result::Result<bool, std::io::Error>`
             //~| expected one of
-        } else { //~ ERROR: incorrect close delimiter: `}`
+        } else {
             //~^ ERROR: expected one of
             //~| unexpected token
             Ok(false);
diff --git a/src/test/ui/resolve/token-error-correct-3.stderr b/src/test/ui/resolve/token-error-correct-3.stderr
index 2164d27..a6bb83c 100644
--- a/src/test/ui/resolve/token-error-correct-3.stderr
+++ b/src/test/ui/resolve/token-error-correct-3.stderr
@@ -1,19 +1,11 @@
-error: incorrect close delimiter: `}`
-  --> $DIR/token-error-correct-3.rs:20:9
-   |
-LL |         if !is_directory(path.as_ref()) { //~ ERROR: cannot find function `is_directory`
-   |                                         - close delimiter possibly meant for this
-LL |             callback(path.as_ref(); //~ ERROR expected one of
-   |                     - un-closed delimiter
-...
-LL |         } else { //~ ERROR: incorrect close delimiter: `}`
-   |         ^ incorrect close delimiter
-
 error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;`
   --> $DIR/token-error-correct-3.rs:14:35
    |
 LL |             callback(path.as_ref(); //~ ERROR expected one of
-   |                                   ^ expected one of `)`, `,`, `.`, `?`, or an operator here
+   |                     -             ^
+   |                     |             |
+   |                     |             help: `)` may belong here
+   |                     unclosed delimiter
 
 error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)`
   --> $DIR/token-error-correct-3.rs:20:9
@@ -21,7 +13,7 @@
 LL |             fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types
    |                                                             - expected one of `.`, `;`, `?`, `}`, or an operator here
 ...
-LL |         } else { //~ ERROR: incorrect close delimiter: `}`
+LL |         } else {
    |         ^ unexpected token
 
 error[E0425]: cannot find function `is_directory` in this scope
@@ -41,7 +33,7 @@
    = note: expected type `()`
               found type `std::result::Result<bool, std::io::Error>`
 
-error: aborting due to 5 previous errors
+error: aborting due to 4 previous errors
 
 Some errors occurred: E0308, E0425.
 For more information about an error, try `rustc --explain E0308`.
diff --git a/src/test/ui/resolve/token-error-correct.rs b/src/test/ui/resolve/token-error-correct.rs
index b97e22f..d649077 100644
--- a/src/test/ui/resolve/token-error-correct.rs
+++ b/src/test/ui/resolve/token-error-correct.rs
@@ -2,6 +2,8 @@
 
 fn main() {
     foo(bar(;
-    //~^ ERROR: expected expression, found `;`
+    //~^ ERROR cannot find function `bar` in this scope
 }
 //~^ ERROR: incorrect close delimiter: `}`
+
+fn foo(_: usize) {}
diff --git a/src/test/ui/resolve/token-error-correct.stderr b/src/test/ui/resolve/token-error-correct.stderr
index 0a45904..b0827ea 100644
--- a/src/test/ui/resolve/token-error-correct.stderr
+++ b/src/test/ui/resolve/token-error-correct.stderr
@@ -5,15 +5,16 @@
    |           - close delimiter possibly meant for this
 LL |     foo(bar(;
    |            - un-closed delimiter
-LL |     //~^ ERROR: expected expression, found `;`
+LL |     //~^ ERROR cannot find function `bar` in this scope
 LL | }
    | ^ incorrect close delimiter
 
-error: expected expression, found `;`
-  --> $DIR/token-error-correct.rs:4:13
+error[E0425]: cannot find function `bar` in this scope
+  --> $DIR/token-error-correct.rs:4:9
    |
 LL |     foo(bar(;
-   |             ^ expected expression
+   |         ^^^ not found in this scope
 
 error: aborting due to 2 previous errors
 
+For more information about this error, try `rustc --explain E0425`.
diff --git a/src/test/ui/rfc-2166-underscore-imports/basic.stderr b/src/test/ui/rfc-2166-underscore-imports/basic.stderr
index 3080359..c7b36ea 100644
--- a/src/test/ui/rfc-2166-underscore-imports/basic.stderr
+++ b/src/test/ui/rfc-2166-underscore-imports/basic.stderr
@@ -2,7 +2,7 @@
   --> $DIR/basic.rs:26:9
    |
 LL |     use m::Tr1 as _; //~ WARN unused import
-   |         ^^^^^^^^^^^
+   |     ----^^^^^^^^^^^- help: remove the whole `use` item
    |
 note: lint level defined here
   --> $DIR/basic.rs:4:9
@@ -14,5 +14,5 @@
   --> $DIR/basic.rs:27:9
    |
 LL |     use S as _; //~ WARN unused import
-   |         ^^^^^^
+   |     ----^^^^^^- help: remove the whole `use` item
 
diff --git a/src/test/ui/rfc-2166-underscore-imports/unused-2018.stderr b/src/test/ui/rfc-2166-underscore-imports/unused-2018.stderr
index 4163c28..0bbc172 100644
--- a/src/test/ui/rfc-2166-underscore-imports/unused-2018.stderr
+++ b/src/test/ui/rfc-2166-underscore-imports/unused-2018.stderr
@@ -2,7 +2,7 @@
   --> $DIR/unused-2018.rs:6:9
    |
 LL |     use core::any; //~ ERROR unused import: `core::any`
-   |         ^^^^^^^^^
+   |     ----^^^^^^^^^- help: remove the whole `use` item
    |
 note: lint level defined here
   --> $DIR/unused-2018.rs:3:9
@@ -14,7 +14,7 @@
   --> $DIR/unused-2018.rs:10:9
    |
 LL |     use core; //~ ERROR unused import: `core`
-   |         ^^^^
+   |     ----^^^^- help: remove the whole `use` item
 
 error: aborting due to 2 previous errors
 
diff --git a/src/test/ui/span/multispan-import-lint.stderr b/src/test/ui/span/multispan-import-lint.stderr
index a730d08..6bd0e9b 100644
--- a/src/test/ui/span/multispan-import-lint.stderr
+++ b/src/test/ui/span/multispan-import-lint.stderr
@@ -10,4 +10,8 @@
 LL | #![warn(unused)]
    |         ^^^^^^
    = note: #[warn(unused_imports)] implied by #[warn(unused)]
+help: remove the unused imports
+   |
+LL | use std::cmp::{min};
+   |               -- --
 
diff --git a/src/test/ui/use/use-nested-groups-unused-imports.rs b/src/test/ui/use/use-nested-groups-unused-imports.rs
index 5bdc7b2..5fe8595 100644
--- a/src/test/ui/use/use-nested-groups-unused-imports.rs
+++ b/src/test/ui/use/use-nested-groups-unused-imports.rs
@@ -18,7 +18,7 @@
 use foo::bar::baz::{*, *};
     //~^ ERROR unused import: `*`
 use foo::{};
-    //~^ ERROR unused import: `use foo::{};`
+    //~^ ERROR unused import: `foo::{}`
 
 fn main() {
     let _: Bar;
diff --git a/src/test/ui/use/use-nested-groups-unused-imports.stderr b/src/test/ui/use/use-nested-groups-unused-imports.stderr
index f60c7f5..6af6f44 100644
--- a/src/test/ui/use/use-nested-groups-unused-imports.stderr
+++ b/src/test/ui/use/use-nested-groups-unused-imports.stderr
@@ -2,7 +2,7 @@
   --> $DIR/use-nested-groups-unused-imports.rs:16:11
    |
 LL | use foo::{Foo, bar::{baz::{}, foobar::*}, *};
-   |           ^^^        ^^^^^^^  ^^^^^^^^^   ^
+   | ----------^^^--------^^^^^^^--^^^^^^^^^---^-- help: remove the whole `use` item
    |
 note: lint level defined here
   --> $DIR/use-nested-groups-unused-imports.rs:3:9
@@ -14,13 +14,15 @@
   --> $DIR/use-nested-groups-unused-imports.rs:18:24
    |
 LL | use foo::bar::baz::{*, *};
-   |                        ^
+   |                      --^
+   |                      |
+   |                      help: remove the unused import
 
-error: unused import: `use foo::{};`
-  --> $DIR/use-nested-groups-unused-imports.rs:20:1
+error: unused import: `foo::{}`
+  --> $DIR/use-nested-groups-unused-imports.rs:20:5
    |
 LL | use foo::{};
-   | ^^^^^^^^^^^^
+   | ----^^^^^^^- help: remove the whole `use` item
 
 error: aborting due to 3 previous errors
 
diff --git a/src/tools/cargo b/src/tools/cargo
index 4e74e2f..865cb70 160000
--- a/src/tools/cargo
+++ b/src/tools/cargo
@@ -1 +1 @@
-Subproject commit 4e74e2fc0908524d17735c768067117d3e84ee9c
+Subproject commit 865cb70106a6b1171a500ff68f93ab52eea56e72
diff --git a/src/tools/rustc-std-workspace-core/Cargo.toml b/src/tools/rustc-std-workspace-core/Cargo.toml
index f000d63..d527ce1 100644
--- a/src/tools/rustc-std-workspace-core/Cargo.toml
+++ b/src/tools/rustc-std-workspace-core/Cargo.toml
@@ -6,6 +6,7 @@
 description = """
 Hack for the compiler's own build system
 """
+edition = "2018"
 
 [lib]
 path = "lib.rs"
diff --git a/src/tools/rustc-std-workspace-core/lib.rs b/src/tools/rustc-std-workspace-core/lib.rs
index e2946fe..99d51bc 100644
--- a/src/tools/rustc-std-workspace-core/lib.rs
+++ b/src/tools/rustc-std-workspace-core/lib.rs
@@ -1,6 +1,5 @@
 #![feature(no_core)]
 #![no_core]
-
-extern crate core;
+#![deny(rust_2018_idioms)]
 
 pub use core::*;
diff --git a/src/tools/rustc-workspace-hack/Cargo.toml b/src/tools/rustc-workspace-hack/Cargo.toml
index f5eeddd..f943ac9 100644
--- a/src/tools/rustc-workspace-hack/Cargo.toml
+++ b/src/tools/rustc-workspace-hack/Cargo.toml
@@ -6,6 +6,7 @@
 description = """
 Hack for the compiler's own build system
 """
+edition = "2018"
 
 [lib]
 path = "lib.rs"