Auto merge of #68827 - ssomers:btree_navigation_revisited, r=Mark-Simulacrum

BTreeMap navigation done safer & faster

It turns out that there was a faster way to do the tree navigation code bundled in #67073, by moving from edge to KV and from KV to next edge separately. It extracts most of the code as safe functions, and contains the duplication of handles within the short wrapper functions.

This somehow hits a sweet spot in the compiler because it reports boosts all over the board:
```
>cargo benchcmp pre3.txt posz4.txt --threshold 5
 name                                           pre3.txt ns/iter  posz4.txt ns/iter  diff ns/iter   diff %  speedup
 btree::map::first_and_last_0                   40                37                           -3   -7.50%   x 1.08
 btree::map::first_and_last_100                 58                44                          -14  -24.14%   x 1.32
 btree::map::iter_1000                          8,920             3,419                    -5,501  -61.67%   x 2.61
 btree::map::iter_100000                        1,069,290         411,615                -657,675  -61.51%   x 2.60
 btree::map::iter_20                            169               58                         -111  -65.68%   x 2.91
 btree::map::iter_mut_1000                      8,701             3,303                    -5,398  -62.04%   x 2.63
 btree::map::iter_mut_100000                    1,034,560         405,975                -628,585  -60.76%   x 2.55
 btree::map::iter_mut_20                        165               58                         -107  -64.85%   x 2.84
 btree::set::clone_100                          1,831             1,562                      -269  -14.69%   x 1.17
 btree::set::clone_100_and_clear                1,831             1,565                      -266  -14.53%   x 1.17
 btree::set::clone_100_and_into_iter            1,917             1,541                      -376  -19.61%   x 1.24
 btree::set::clone_100_and_pop_all              2,609             2,441                      -168   -6.44%   x 1.07
 btree::set::clone_100_and_remove_all           4,598             3,927                      -671  -14.59%   x 1.17
 btree::set::clone_100_and_remove_half          2,765             2,551                      -214   -7.74%   x 1.08
 btree::set::clone_10k                          191,610           164,616                 -26,994  -14.09%   x 1.16
 btree::set::clone_10k_and_clear                192,003           164,616                 -27,387  -14.26%   x 1.17
 btree::set::clone_10k_and_into_iter            200,037           163,010                 -37,027  -18.51%   x 1.23
 btree::set::clone_10k_and_pop_all              267,023           250,913                 -16,110   -6.03%   x 1.06
 btree::set::clone_10k_and_remove_all           536,230           464,100                 -72,130  -13.45%   x 1.16
 btree::set::clone_10k_and_remove_half          453,350           430,545                 -22,805   -5.03%   x 1.05
 btree::set::difference_random_100_vs_100       1,787             801                        -986  -55.18%   x 2.23
 btree::set::difference_random_100_vs_10k       2,978             2,696                      -282   -9.47%   x 1.10
 btree::set::difference_random_10k_vs_100       111,075           54,734                  -56,341  -50.72%   x 2.03
 btree::set::difference_random_10k_vs_10k       246,380           175,980                 -70,400  -28.57%   x 1.40
 btree::set::difference_staggered_100_vs_100    1,789             951                        -838  -46.84%   x 1.88
 btree::set::difference_staggered_100_vs_10k    2,798             2,606                      -192   -6.86%   x 1.07
 btree::set::difference_staggered_10k_vs_10k    176,452           97,401                  -79,051  -44.80%   x 1.81
 btree::set::intersection_100_neg_vs_10k_pos    34                32                           -2   -5.88%   x 1.06
 btree::set::intersection_100_pos_vs_100_neg    30                27                           -3  -10.00%   x 1.11
 btree::set::intersection_random_100_vs_100     1,537             613                        -924  -60.12%   x 2.51
 btree::set::intersection_random_100_vs_10k     2,793             2,649                      -144   -5.16%   x 1.05
 btree::set::intersection_random_10k_vs_10k     222,127           147,166                 -74,961  -33.75%   x 1.51
 btree::set::intersection_staggered_100_vs_100  1,447             622                        -825  -57.01%   x 2.33
 btree::set::intersection_staggered_100_vs_10k  2,606             2,382                      -224   -8.60%   x 1.09
 btree::set::intersection_staggered_10k_vs_10k  143,620           58,790                  -84,830  -59.07%   x 2.44
 btree::set::is_subset_100_vs_100               1,349             488                        -861  -63.83%   x 2.76
 btree::set::is_subset_100_vs_10k               1,720             1,428                      -292  -16.98%   x 1.20
 btree::set::is_subset_10k_vs_10k               135,984           48,527                  -87,457  -64.31%   x 2.80
```
The `first_and_last` ones are noise (they don't do iteration), the others seem genuine.
As always, approved by Miri.

Also, a separate commit with some more benchmarks of mutable behaviour (which also benefit).

r? @cuviper
diff --git a/.github/ISSUE_TEMPLATE/blank_issue.md b/.github/ISSUE_TEMPLATE/blank_issue.md
new file mode 100644
index 0000000..9aef3eb
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/blank_issue.md
@@ -0,0 +1,4 @@
+---
+name: Blank Issue
+about: Create a blank issue.
+---
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 0000000..5675579
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,44 @@
+---
+name: Bug Report
+about: Create a bug report for Rust.
+labels: C-bug
+---
+<!--
+Thank you for filing a bug report! 🐛 Please provide a short summary of the bug,
+along with any information you feel relevant to replicating the bug.
+-->
+
+I tried this code:
+
+```rust
+<code>
+```
+
+I expected to see this happen: *explanation*
+
+Instead, this happened: *explanation*
+
+### Meta
+<!--
+If you're using the stable version of the compiler, you should also check if the
+bug also exists in the beta or nightly versions.
+-->
+
+`rustc --version --verbose`:
+```
+<version>
+```
+
+<!--
+Include a backtrace in the code block by setting `RUST_BACKTRACE=1` in your
+environment. E.g. `RUST_BACKTRACE=1 cargo build`.
+-->
+<details><summary>Backtrace</summary>
+<p>
+
+```
+<backtrace>
+```
+
+</p>
+</details>
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 0000000..bd7dc0a
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,5 @@
+blank_issues_enabled: true
+contact_links:
+  - name: Rust Programming Language Forum
+    url: https://users.rust-lang.org
+    about: Please ask and answer questions about Rust here.
diff --git a/.github/ISSUE_TEMPLATE/ice.md b/.github/ISSUE_TEMPLATE/ice.md
new file mode 100644
index 0000000..e669e49
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/ice.md
@@ -0,0 +1,52 @@
+---
+name: Internal Compiler Error
+about: Create a report for an internal compiler error in rustc.
+labels: C-bug, I-ICE, T-compiler
+---
+<!--
+Thank you for finding an Internal Compiler Error! 🧊  If possible, try to provide
+a minimal verifiable example. You can read "Rust Bug Minimization Patterns" for
+how to create smaller examples.
+
+http://blog.pnkfx.org/blog/2019/11/18/rust-bug-minimization-patterns/
+
+-->
+
+### Code
+
+```
+<code>
+```
+
+
+### Meta
+<!--
+If you're using the stable version of the compiler, you should also check if the
+bug also exists in the beta or nightly versions.
+-->
+
+`rustc --version --verbose`:
+```
+<version>
+```
+
+### Error output
+
+```
+<output>
+```
+
+<!--
+Include a backtrace in the code block by setting `RUST_BACKTRACE=1` in your
+environment. E.g. `RUST_BACKTRACE=1 cargo build`.
+-->
+<details><summary><strong>Backtrace</strong></summary>
+<p>
+
+```
+<backtrace>
+```
+
+</p>
+</details>
+
diff --git a/.github/ISSUE_TEMPLATE/tracking_issue.md b/.github/ISSUE_TEMPLATE/tracking_issue.md
new file mode 100644
index 0000000..f935912
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/tracking_issue.md
@@ -0,0 +1,58 @@
+---
+name: Tracking Issue
+about: A tracking issue for a feature in Rust.
+title: Tracking Issue for XXX
+labels: C-tracking-issue
+---
+<!--
+Thank you for creating a tracking issue! 📜 Tracking issues are for tracking a
+feature from implementation to stabilisation. Make sure to include the relevant
+RFC for the feature if it has one. Otherwise provide a short summary of the
+feature and link any relevant PRs or issues, and remove any sections that are
+not relevant to the feature.
+
+Remember to add team labels to the tracking issue.
+For a language team feature, this would e.g., be `T-lang`.
+Such a feature should also be labeled with e.g., `F-my_feature`.
+This label is used to associate issues (e.g., bugs and design questions) to the feature.
+-->
+
+This is a tracking issue for the RFC "XXX" (rust-lang/rfcs#NNN).
+The feature gate for the issue is `#![feature(FFF)]`.
+
+### About tracking issues
+
+Tracking issues are used to record the overall progress of implementation.
+They are also uses as hubs connecting to other relevant issues, e.g., bugs or open design questions.
+A tracking issue is however *not* meant for large scale discussion, questions, or bug reports about a feature.
+Instead, open a dedicated issue for the specific matter and add the relevant feature gate label.
+
+### Steps
+<!--
+Include each step required to complete the feature. Typically this is a PR
+implementing a feature, followed by a PR that stabilises the feature. However
+for larger features an implementation could be broken up into multiple PRs.
+-->
+
+- [ ] Implement the RFC (cc @rust-lang/XXX -- can anyone write up mentoring
+      instructions?)
+- [ ] Adjust documentation ([see instructions on rustc-guide][doc-guide])
+- [ ] Stabilization PR ([see instructions on rustc-guide][stabilization-guide])
+
+[stabilization-guide]: https://rust-lang.github.io/rustc-guide/stabilization_guide.html#stabilization-pr
+[doc-guide]: https://rust-lang.github.io/rustc-guide/stabilization_guide.html#documentation-prs
+
+### Unresolved Questions
+<!--
+Include any open questions that need to be answered before the feature can be
+stabilised.
+-->
+
+XXX --- list all the "unresolved questions" found in the RFC to ensure they are
+not forgotten
+
+### Implementation history
+
+<!--
+Include a list of all the PRs that were involved in implementing the feature.
+-->
diff --git a/.mailmap b/.mailmap
index 6ab6be2..e5aad52 100644
--- a/.mailmap
+++ b/.mailmap
@@ -100,6 +100,7 @@
 Guillaume Gomez <guillaume1.gomez@gmail.com>
 Guillaume Gomez <guillaume1.gomez@gmail.com> ggomez <ggomez@ggo.ifr.lan>
 Guillaume Gomez <guillaume1.gomez@gmail.com> Guillaume Gomez <ggomez@ggo.ifr.lan>
+Hanna Kruppe <hanna.kruppe@gmail.com> <robin.kruppe@gmail.com>
 Heather <heather@cynede.net> <Cynede@Gentoo.org>
 Heather <heather@cynede.net> <Heather@cynede.net>
 Herman J. Radtke III <herman@hermanradtke.com> Herman J. Radtke III <hermanradtke@gmail.com>
@@ -113,6 +114,7 @@
 James Miller <bladeon@gmail.com> <james@aatch.net>
 James Perry <james.austin.perry@gmail.com>
 Jason Fager <jfager@gmail.com>
+Jason Liquorish <jason@liquori.sh> <Bassetts@users.noreply.github.com>
 Jason Orendorff <jorendorff@mozilla.com> <jason.orendorff@gmail.com>
 Jason Orendorff <jorendorff@mozilla.com> <jason@mozmac-2.local>
 Jason Toffaletti <toffaletti@gmail.com> Jason Toffaletti <jason@topsy.com>
diff --git a/Cargo.lock b/Cargo.lock
index ec976b6..beda399 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -121,9 +121,9 @@
 
 [[package]]
 name = "backtrace"
-version = "0.3.40"
+version = "0.3.44"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "924c76597f0d9ca25d762c25a4d369d51267536465dc5064bdf0eb073ed477ea"
+checksum = "e4036b9bf40f3cf16aba72a3d65e8a520fc4bafcdc7079aea8f848c58c5b5536"
 dependencies = [
  "backtrace-sys",
  "cfg-if",
@@ -281,7 +281,7 @@
 
 [[package]]
 name = "cargo"
-version = "0.43.0"
+version = "0.44.0"
 dependencies = [
  "anyhow",
  "atty",
@@ -292,6 +292,7 @@
  "clap",
  "core-foundation 0.7.0",
  "crates-io",
+ "crossbeam-channel",
  "crossbeam-utils 0.7.0",
  "crypto-hash",
  "curl",
@@ -497,7 +498,7 @@
  "itertools 0.8.0",
  "lazy_static 1.4.0",
  "matches",
- "pulldown-cmark 0.6.1",
+ "pulldown-cmark 0.7.0",
  "quine-mc_cluskey",
  "regex-syntax",
  "semver",
@@ -575,9 +576,9 @@
 
 [[package]]
 name = "compiler_builtins"
-version = "0.1.24"
+version = "0.1.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b9975aefa63997ef75ca9cf013ff1bb81487aaa0b622c21053afd3b92979a7af"
+checksum = "438ac08ddc5efe81452f984a9e33ba425b00b31d1f48e6acd9e2210aa28cc52e"
 dependencies = [
  "cc",
  "rustc-std-workspace-core",
@@ -721,12 +722,11 @@
 
 [[package]]
 name = "crossbeam-channel"
-version = "0.3.8"
+version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f0ed1a4de2235cabda8558ff5840bffb97fcb64c97827f354a451307df5f72b"
+checksum = "acec9a3b0b3559f15aee4f90746c4e5e293b701c0f7d3925d24e01645267b68c"
 dependencies = [
- "crossbeam-utils 0.6.5",
- "smallvec 0.6.10",
+ "crossbeam-utils 0.7.0",
 ]
 
 [[package]]
@@ -879,14 +879,13 @@
 
 [[package]]
 name = "derive_more"
-version = "0.13.0"
+version = "0.99.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f57d78cf3bd45270dad4e70c21ec77a960b36c7a841ff9db76aaa775a8fb871"
+checksum = "2159be042979966de68315bce7034bb000c775f22e3e834e1c52ff78f041cae8"
 dependencies = [
- "proc-macro2 0.4.30",
- "quote 0.6.12",
- "rustc_version",
- "syn 0.15.35",
+ "proc-macro2 1.0.3",
+ "quote 1.0.2",
+ "syn 1.0.11",
 ]
 
 [[package]]
@@ -1077,13 +1076,14 @@
 
 [[package]]
 name = "filetime"
-version = "0.2.4"
+version = "0.2.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a2df5c1a8c4be27e7707789dc42ae65976e60b394afd293d1419ab915833e646"
+checksum = "1ff6d4dab0aa0c8e6346d46052e93b13a16cf847b54ed357087c35011048cc7d"
 dependencies = [
  "cfg-if",
  "libc",
  "redox_syscall",
+ "winapi 0.3.8",
 ]
 
 [[package]]
@@ -1537,9 +1537,9 @@
 
 [[package]]
 name = "ignore"
-version = "0.4.10"
+version = "0.4.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ec16832258409d571aaef8273f3c3cc5b060d784e159d1a0f3b0017308f84a7"
+checksum = "522daefc3b69036f80c7d2990b28ff9e0471c683bad05ca258e0a01dd22c5a1e"
 dependencies = [
  "crossbeam-channel",
  "globset",
@@ -1548,7 +1548,7 @@
  "memchr",
  "regex",
  "same-file",
- "thread_local",
+ "thread_local 1.0.1",
  "walkdir",
  "winapi-util",
 ]
@@ -1564,7 +1564,7 @@
  "rand_xoshiro",
  "sized-chunks",
  "typenum",
- "version_check 0.9.1",
+ "version_check",
 ]
 
 [[package]]
@@ -1657,9 +1657,9 @@
 
 [[package]]
 name = "jsonrpc-client-transports"
-version = "13.1.0"
+version = "14.0.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "39577db48b004cffb4c5b8e5c9b993c177c52599ecbee88711e815acf65144db"
+checksum = "0a9ae166c4d1f702d297cd76d4b55758ace80272ffc6dbb139fdc1bf810de40b"
 dependencies = [
  "failure",
  "futures",
@@ -1676,9 +1676,9 @@
 
 [[package]]
 name = "jsonrpc-core"
-version = "13.2.0"
+version = "14.0.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "91d767c183a7e58618a609499d359ce3820700b3ebb4823a18c343b4a2a41a0d"
+checksum = "fe3b688648f1ef5d5072229e2d672ecb92cbff7d1c79bcf3fd5898f3f3df0970"
 dependencies = [
  "futures",
  "log",
@@ -1689,63 +1689,62 @@
 
 [[package]]
 name = "jsonrpc-core-client"
-version = "13.1.0"
+version = "14.0.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f047c10738edee7c3c6acf5241a0ce33df32ef9230c1a7fb03e4a77ee72c992f"
+checksum = "080dc110be17701097df238fad3c816d4a478a1899dfbcf8ec8957dd40ec7304"
 dependencies = [
  "jsonrpc-client-transports",
 ]
 
 [[package]]
 name = "jsonrpc-derive"
-version = "13.1.0"
+version = "14.0.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "29f9149f785deaae92a4c834a9a1a83a4313b8cfedccf15362cd4cf039a64501"
+checksum = "8609af8f63b626e8e211f52441fcdb6ec54f1a446606b10d5c89ae9bf8a20058"
 dependencies = [
  "proc-macro-crate",
- "proc-macro2 0.4.30",
- "quote 0.6.12",
- "syn 0.15.35",
+ "proc-macro2 1.0.3",
+ "quote 1.0.2",
+ "syn 1.0.11",
 ]
 
 [[package]]
 name = "jsonrpc-ipc-server"
-version = "13.1.0"
+version = "14.0.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "256c5e4292c17b4c2ecdf542299dc8e9d6b3939c075c54825570ad9317fe5751"
+checksum = "b579cd0840d7db3ebaadf52f6f31ec601a260e78d610e44f68634f919e34497a"
 dependencies = [
  "jsonrpc-core",
  "jsonrpc-server-utils",
  "log",
  "parity-tokio-ipc",
- "parking_lot",
+ "parking_lot 0.9.0",
  "tokio-service",
 ]
 
 [[package]]
 name = "jsonrpc-pubsub"
-version = "13.1.0"
+version = "14.0.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2c08b444cc0ed70263798834343d0ac875e664257df8079160f23ac1ea79446"
+checksum = "5b31c9b90731276fdd24d896f31bb10aecf2e5151733364ae81123186643d939"
 dependencies = [
  "jsonrpc-core",
  "log",
- "parking_lot",
+ "parking_lot 0.10.0",
  "serde",
 ]
 
 [[package]]
 name = "jsonrpc-server-utils"
-version = "13.1.0"
+version = "14.0.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44561bfdd31401bad790527f1e951dde144f2341ddc3e1b859d32945e1a34eff"
+checksum = "95b7635e618a0edbbe0d2a2bbbc69874277c49383fcf6c3c0414491cfb517d22"
 dependencies = [
  "bytes",
  "globset",
  "jsonrpc-core",
  "lazy_static 1.4.0",
  "log",
- "num_cpus",
  "tokio",
  "tokio-codec",
  "unicase",
@@ -2009,15 +2008,15 @@
 dependencies = [
  "byteorder",
  "memmap",
- "parking_lot",
+ "parking_lot 0.9.0",
  "rustc-hash",
 ]
 
 [[package]]
 name = "memchr"
-version = "2.2.0"
+version = "2.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2efc7bc57c883d4a4d6e3246905283d8dae951bb3bd32f49d6ef297f546e1c39"
+checksum = "53445de381a1f436797497c61d851644d0e8e88e6140f22872ad33a704933978"
 
 [[package]]
 name = "memmap"
@@ -2351,11 +2350,21 @@
 checksum = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252"
 dependencies = [
  "lock_api",
- "parking_lot_core",
+ "parking_lot_core 0.6.2",
  "rustc_version",
 ]
 
 [[package]]
+name = "parking_lot"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92e98c49ab0b7ce5b222f2cc9193fc4efe11c6d0bd4f648e374684a6857b1cfc"
+dependencies = [
+ "lock_api",
+ "parking_lot_core 0.7.0",
+]
+
+[[package]]
 name = "parking_lot_core"
 version = "0.6.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2371,6 +2380,20 @@
 ]
 
 [[package]]
+name = "parking_lot_core"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7582838484df45743c8434fbff785e8edf260c28748353d44bc0da32e0ceabf1"
+dependencies = [
+ "cfg-if",
+ "cloudabi",
+ "libc",
+ "redox_syscall",
+ "smallvec 1.0.0",
+ "winapi 0.3.8",
+]
+
+[[package]]
 name = "percent-encoding"
 version = "1.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2504,12 +2527,11 @@
 
 [[package]]
 name = "pretty_env_logger"
-version = "0.3.0"
+version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "df8b3f4e0475def7d9c2e5de8e5a1306949849761e107b360d03e98eafaffd61"
+checksum = "926d36b9553851b8b0005f1275891b392ee4d2d833852c417ed025477350fb9d"
 dependencies = [
- "chrono",
- "env_logger 0.6.2",
+ "env_logger 0.7.1",
  "log",
 ]
 
@@ -2582,17 +2604,6 @@
 
 [[package]]
 name = "pulldown-cmark"
-version = "0.5.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77043da1282374688ee212dc44b3f37ff929431de9c9adc3053bd3cee5630357"
-dependencies = [
- "bitflags",
- "memchr",
- "unicase",
-]
-
-[[package]]
-name = "pulldown-cmark"
 version = "0.6.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1c205cc82214f3594e2d50686730314f817c67ffa80fe800cf0db78c3c2b9d9e"
@@ -2604,6 +2615,17 @@
 ]
 
 [[package]]
+name = "pulldown-cmark"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c2d7fd131800e0d63df52aff46201acaab70b431a4a1ec6f0343fe8e64f35a4"
+dependencies = [
+ "bitflags",
+ "memchr",
+ "unicase",
+]
+
+[[package]]
 name = "punycode"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2641,18 +2663,24 @@
 
 [[package]]
 name = "racer"
-version = "2.1.29"
+version = "2.1.31"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7a6d7ffceb4da3e0a29c18986f0469c209f4db3ab9f2ffe286eaa1104a3e5028"
+checksum = "0ff33fa15ac0384376741d16ddb05a65263dde4e2c5d0f7a9f3747db788764aa"
 dependencies = [
  "bitflags",
  "clap",
  "derive_more",
- "env_logger 0.6.2",
- "humantime 1.3.0",
+ "env_logger 0.7.1",
+ "humantime 2.0.0",
  "lazy_static 1.4.0",
  "log",
  "rls-span",
+ "rustc-ap-rustc_ast_pretty",
+ "rustc-ap-rustc_data_structures",
+ "rustc-ap-rustc_errors",
+ "rustc-ap-rustc_parse",
+ "rustc-ap-rustc_session",
+ "rustc-ap-rustc_span",
  "rustc-ap-syntax",
 ]
 
@@ -2876,7 +2904,7 @@
  "aho-corasick",
  "memchr",
  "regex-syntax",
- "thread_local",
+ "thread_local 0.3.6",
  "utf8-ranges",
 ]
 
@@ -3033,7 +3061,6 @@
 dependencies = [
  "clippy_lints",
  "env_logger 0.7.1",
- "failure",
  "futures",
  "log",
  "rand 0.7.3",
@@ -3067,6 +3094,7 @@
 version = "0.1.0"
 dependencies = [
  "clap",
+ "codespan",
  "codespan-reporting",
  "failure",
  "mdbook",
@@ -3083,12 +3111,10 @@
  "bitflags",
  "byteorder",
  "chalk-engine",
- "fmt_macros",
- "graphviz",
  "jobserver",
  "log",
  "measureme",
- "parking_lot",
+ "parking_lot 0.9.0",
  "polonius-engine",
  "rustc-rayon",
  "rustc-rayon-core",
@@ -3111,26 +3137,57 @@
 
 [[package]]
 name = "rustc-ap-arena"
-version = "610.0.0"
+version = "642.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7475f4c707269b56eb7144c53591e3cd6369a5aa1d66434829ea11df96d5e7e3"
+checksum = "ea82fa3d9a8add7422228ca1a2cbba0784fa8861f56148ff64da08b3c7921b03"
 dependencies = [
  "rustc-ap-rustc_data_structures",
- "smallvec 0.6.10",
+ "smallvec 1.0.0",
 ]
 
 [[package]]
 name = "rustc-ap-graphviz"
-version = "610.0.0"
+version = "642.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6e59a55520f140a70a3e0fad80a36e807caa85e9d7016167b91a5b521ea929be"
+checksum = "638d0b2b3bcf99824e0cb5a25dbc547b61dc20942e11daf6a97e981918aa18e5"
+
+[[package]]
+name = "rustc-ap-rustc_ast_pretty"
+version = "642.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d38bab04dd676dee6d2f9670506a18c31bfce38bf7f8420aa83eb1140ecde049"
+dependencies = [
+ "log",
+ "rustc-ap-rustc_data_structures",
+ "rustc-ap-rustc_span",
+ "rustc-ap-syntax",
+]
+
+[[package]]
+name = "rustc-ap-rustc_attr"
+version = "642.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10b843ba8b1ed43739133047673b9f6a54d3b3b4d328d69c6ea89ff971395f35"
+dependencies = [
+ "rustc-ap-rustc_ast_pretty",
+ "rustc-ap-rustc_data_structures",
+ "rustc-ap-rustc_errors",
+ "rustc-ap-rustc_feature",
+ "rustc-ap-rustc_macros",
+ "rustc-ap-rustc_session",
+ "rustc-ap-rustc_span",
+ "rustc-ap-serialize",
+ "rustc-ap-syntax",
+ "smallvec 1.0.0",
+]
 
 [[package]]
 name = "rustc-ap-rustc_data_structures"
-version = "610.0.0"
+version = "642.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6420857d5a088f680ec1ba736ffba4ee9c1964b0d397e6318f38d461f4f7d5cb"
+checksum = "dc3d1c6d0a80ab0c1df76405377cec0f3d5423fb5b0953a8eac70a2ad6c44df2"
 dependencies = [
+ "bitflags",
  "cfg-if",
  "crossbeam-utils 0.6.5",
  "ena",
@@ -3138,58 +3195,78 @@
  "jobserver",
  "lazy_static 1.4.0",
  "log",
- "parking_lot",
+ "measureme",
+ "parking_lot 0.9.0",
  "rustc-ap-graphviz",
  "rustc-ap-rustc_index",
  "rustc-ap-serialize",
  "rustc-hash",
  "rustc-rayon",
  "rustc-rayon-core",
- "smallvec 0.6.10",
+ "smallvec 1.0.0",
  "stable_deref_trait",
+ "winapi 0.3.8",
 ]
 
 [[package]]
 name = "rustc-ap-rustc_errors"
-version = "610.0.0"
+version = "642.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8abfca0960131262254a91d02ff4903526a261ede730d7a2c75b4234c867cdc0"
+checksum = "4909a1eca29331332257230f29120a8ff68c9e37d868c564fcd599e430cf8914"
 dependencies = [
  "annotate-snippets",
  "atty",
  "log",
  "rustc-ap-rustc_data_structures",
+ "rustc-ap-rustc_span",
  "rustc-ap-serialize",
- "rustc-ap-syntax_pos",
- "term_size",
  "termcolor",
+ "termize",
  "unicode-width",
+ "winapi 0.3.8",
 ]
 
 [[package]]
-name = "rustc-ap-rustc_index"
-version = "610.0.0"
+name = "rustc-ap-rustc_feature"
+version = "642.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a395509dcb90a92c1479c085639594624e06b4ab3fc7c1b795b46a61f2d4f65"
+checksum = "63ab887a181d795cf5fd3edadf367760deafb90aefb844f168ab5255266e3478"
+dependencies = [
+ "lazy_static 1.4.0",
+ "rustc-ap-rustc_data_structures",
+ "rustc-ap-rustc_span",
+]
+
+[[package]]
+name = "rustc-ap-rustc_fs_util"
+version = "642.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70814116df3c5fbec8f06f6a1d013ca481f620fd22a9475754e9bf3ee9ba70d8"
+
+[[package]]
+name = "rustc-ap-rustc_index"
+version = "642.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac1bf1d3cf3d119d41353d6fd229ef7272d5097bc0924de021c0294bf86d48bf"
 dependencies = [
  "rustc-ap-serialize",
- "smallvec 0.6.10",
+ "smallvec 1.0.0",
 ]
 
 [[package]]
 name = "rustc-ap-rustc_lexer"
-version = "610.0.0"
+version = "642.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "64eac8a0e6efb8f55292aa24be0208c7c0538236c613e79952fd1fa3d54bcf8e"
+checksum = "4cda21a32cebdc11ec4f5393aa2fcde5ed1b2f673a8571e5a4dcdf07e4ae9cac"
 dependencies = [
  "unicode-xid 0.2.0",
 ]
 
 [[package]]
 name = "rustc-ap-rustc_macros"
-version = "610.0.0"
+version = "642.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f99795e8be4877e9e05d59f201e1740c1cf673364655def5848606d9e25b75af"
+checksum = "75c47b48ea51910ecfd853c9248a9bf4c767bc823449ab6a1d864dff65fbae16"
 dependencies = [
  "itertools 0.8.0",
  "proc-macro2 1.0.3",
@@ -3199,56 +3276,53 @@
 ]
 
 [[package]]
-name = "rustc-ap-rustc_target"
-version = "610.0.0"
+name = "rustc-ap-rustc_parse"
+version = "642.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f22e21fdd8e1c0030f507158fa79b9f1e080e6241aba994d0f97c14a0a07a826"
+checksum = "abd88e89cd5b5d28dcd3a347a3d534c08627d9455570dc1a2d402cb8437b9d30"
 dependencies = [
  "bitflags",
  "log",
- "rustc-ap-rustc_data_structures",
- "rustc-ap-rustc_index",
- "rustc-ap-serialize",
- "rustc-ap-syntax_pos",
-]
-
-[[package]]
-name = "rustc-ap-serialize"
-version = "610.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bb1cd6ef5135408d62559866e79986ca261f4c1333253d500e5e66fe66d1432e"
-dependencies = [
- "indexmap",
- "smallvec 0.6.10",
-]
-
-[[package]]
-name = "rustc-ap-syntax"
-version = "610.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "61fc1c901d2cbd24cae95d7bc5a58aa7661ec3dc5320c78c32830a52a685c33c"
-dependencies = [
- "bitflags",
- "lazy_static 1.4.0",
- "log",
+ "rustc-ap-rustc_ast_pretty",
+ "rustc-ap-rustc_attr",
  "rustc-ap-rustc_data_structures",
  "rustc-ap-rustc_errors",
- "rustc-ap-rustc_index",
+ "rustc-ap-rustc_feature",
  "rustc-ap-rustc_lexer",
- "rustc-ap-rustc_target",
- "rustc-ap-serialize",
- "rustc-ap-syntax_pos",
- "scoped-tls",
- "smallvec 0.6.10",
+ "rustc-ap-rustc_session",
+ "rustc-ap-rustc_span",
+ "rustc-ap-syntax",
+ "smallvec 1.0.0",
+ "unicode-normalization",
 ]
 
 [[package]]
-name = "rustc-ap-syntax_pos"
-version = "610.0.0"
+name = "rustc-ap-rustc_session"
+version = "642.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "230534f638255853bb9f13987537e00a818435a0cc54b68d97221b6822c8f1bc"
+checksum = "5b8487b4575fbb2d1fc6f1cd61225efd108a4d36817e6fb9b643d57fcae9cb12"
+dependencies = [
+ "log",
+ "num_cpus",
+ "rustc-ap-rustc_data_structures",
+ "rustc-ap-rustc_errors",
+ "rustc-ap-rustc_feature",
+ "rustc-ap-rustc_fs_util",
+ "rustc-ap-rustc_index",
+ "rustc-ap-rustc_span",
+ "rustc-ap-rustc_target",
+ "rustc-ap-serialize",
+ "rustc-ap-syntax",
+]
+
+[[package]]
+name = "rustc-ap-rustc_span"
+version = "642.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f69746c0d4c21bf20a5bb2bd247261a1aa8631f04202d7303352942dde70d987"
 dependencies = [
  "cfg-if",
+ "log",
  "rustc-ap-arena",
  "rustc-ap-rustc_data_structures",
  "rustc-ap-rustc_index",
@@ -3259,6 +3333,48 @@
 ]
 
 [[package]]
+name = "rustc-ap-rustc_target"
+version = "642.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8bbc6ae09b5d42ec66edd520e8412e0615c53a7c93607fe33dc4abab60ba7c8b"
+dependencies = [
+ "bitflags",
+ "log",
+ "rustc-ap-rustc_data_structures",
+ "rustc-ap-rustc_index",
+ "rustc-ap-rustc_macros",
+ "rustc-ap-rustc_span",
+ "rustc-ap-serialize",
+]
+
+[[package]]
+name = "rustc-ap-serialize"
+version = "642.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e13a1ead0252fc3d96da4c336a95950be6795f2b00c84a67ccadf26142f8cb41"
+dependencies = [
+ "indexmap",
+ "smallvec 1.0.0",
+]
+
+[[package]]
+name = "rustc-ap-syntax"
+version = "642.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e1f59f48ca3a2ec16a7e82e718ed5aadf9c9e08cf63015d28b4e774767524a6a"
+dependencies = [
+ "log",
+ "rustc-ap-rustc_data_structures",
+ "rustc-ap-rustc_index",
+ "rustc-ap-rustc_lexer",
+ "rustc-ap-rustc_macros",
+ "rustc-ap-rustc_span",
+ "rustc-ap-serialize",
+ "scoped-tls",
+ "smallvec 1.0.0",
+]
+
+[[package]]
 name = "rustc-demangle"
 version = "0.1.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3441,6 +3557,7 @@
  "flate2",
  "libc",
  "log",
+ "measureme",
  "rustc",
  "rustc-demangle",
  "rustc_attr",
@@ -3521,7 +3638,7 @@
  "lazy_static 1.4.0",
  "log",
  "measureme",
- "parking_lot",
+ "parking_lot 0.9.0",
  "rustc-hash",
  "rustc-rayon",
  "rustc-rayon-core",
@@ -3541,6 +3658,7 @@
  "log",
  "rustc",
  "rustc_ast_pretty",
+ "rustc_codegen_ssa",
  "rustc_codegen_utils",
  "rustc_data_structures",
  "rustc_error_codes",
@@ -3618,6 +3736,7 @@
 name = "rustc_hir"
 version = "0.0.0"
 dependencies = [
+ "lazy_static 1.4.0",
  "rustc_ast_pretty",
  "rustc_data_structures",
  "rustc_errors",
@@ -3656,6 +3775,28 @@
 ]
 
 [[package]]
+name = "rustc_infer"
+version = "0.0.0"
+dependencies = [
+ "fmt_macros",
+ "graphviz",
+ "log",
+ "rustc",
+ "rustc_attr",
+ "rustc_data_structures",
+ "rustc_error_codes",
+ "rustc_errors",
+ "rustc_hir",
+ "rustc_index",
+ "rustc_macros",
+ "rustc_session",
+ "rustc_span",
+ "rustc_target",
+ "smallvec 1.0.0",
+ "syntax",
+]
+
+[[package]]
 name = "rustc_interface"
 version = "0.0.0"
 dependencies = [
@@ -3675,6 +3816,7 @@
  "rustc_expand",
  "rustc_hir",
  "rustc_incremental",
+ "rustc_infer",
  "rustc_lint",
  "rustc_metadata",
  "rustc_mir",
@@ -3717,6 +3859,7 @@
  "rustc_feature",
  "rustc_hir",
  "rustc_index",
+ "rustc_infer",
  "rustc_session",
  "rustc_span",
  "rustc_target",
@@ -3737,7 +3880,6 @@
 name = "rustc_macros"
 version = "0.1.0"
 dependencies = [
- "itertools 0.8.0",
  "proc-macro2 1.0.3",
  "quote 1.0.2",
  "syn 1.0.11",
@@ -3787,6 +3929,7 @@
  "rustc_errors",
  "rustc_hir",
  "rustc_index",
+ "rustc_infer",
  "rustc_lexer",
  "rustc_macros",
  "rustc_span",
@@ -3801,7 +3944,6 @@
 version = "0.0.0"
 dependencies = [
  "arena",
- "itertools 0.8.0",
  "log",
  "rustc",
  "rustc_apfloat",
@@ -3810,6 +3952,7 @@
  "rustc_errors",
  "rustc_hir",
  "rustc_index",
+ "rustc_infer",
  "rustc_macros",
  "rustc_session",
  "rustc_span",
@@ -3850,6 +3993,7 @@
  "rustc_feature",
  "rustc_hir",
  "rustc_index",
+ "rustc_infer",
  "rustc_session",
  "rustc_span",
  "rustc_target",
@@ -3900,6 +4044,7 @@
  "rustc_expand",
  "rustc_feature",
  "rustc_hir",
+ "rustc_infer",
  "rustc_metadata",
  "rustc_session",
  "rustc_span",
@@ -3989,6 +4134,7 @@
  "rustc",
  "rustc_data_structures",
  "rustc_hir",
+ "rustc_infer",
  "rustc_macros",
  "rustc_span",
  "rustc_target",
@@ -4004,7 +4150,9 @@
  "rustc",
  "rustc_data_structures",
  "rustc_hir",
+ "rustc_infer",
  "rustc_span",
+ "rustc_target",
 ]
 
 [[package]]
@@ -4019,6 +4167,7 @@
  "rustc_errors",
  "rustc_hir",
  "rustc_index",
+ "rustc_infer",
  "rustc_span",
  "rustc_target",
  "smallvec 1.0.0",
@@ -4040,7 +4189,7 @@
 dependencies = [
  "itertools 0.8.0",
  "minifier",
- "pulldown-cmark 0.5.3",
+ "pulldown-cmark 0.7.0",
  "rustc-rayon",
  "serde",
  "serde_json",
@@ -4094,7 +4243,7 @@
 
 [[package]]
 name = "rustfmt-nightly"
-version = "1.4.11"
+version = "1.4.12"
 dependencies = [
  "annotate-snippets",
  "bytecount",
@@ -4110,9 +4259,14 @@
  "lazy_static 1.4.0",
  "log",
  "regex",
+ "rustc-ap-rustc_ast_pretty",
+ "rustc-ap-rustc_data_structures",
+ "rustc-ap-rustc_errors",
+ "rustc-ap-rustc_parse",
+ "rustc-ap-rustc_session",
+ "rustc-ap-rustc_span",
  "rustc-ap-rustc_target",
  "rustc-ap-syntax",
- "rustc-ap-syntax_pos",
  "rustc-workspace-hack",
  "rustfmt-config_proc_macro",
  "serde",
@@ -4546,9 +4700,9 @@
 
 [[package]]
 name = "tar"
-version = "0.4.20"
+version = "0.4.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a303ba60a099fcd2aaa646b14d2724591a96a75283e4b7ed3d1a1658909d9ae2"
+checksum = "b3196bfbffbba3e57481b6ea32249fbaf590396a52505a2615adbb79d9d826d3"
 dependencies = [
  "filetime",
  "libc",
@@ -4601,17 +4755,6 @@
 ]
 
 [[package]]
-name = "term_size"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e5b9a66db815dcfd2da92db471106457082577c3c278d4138ab3e3b4e189327"
-dependencies = [
- "kernel32-sys",
- "libc",
- "winapi 0.2.8",
-]
-
-[[package]]
 name = "termcolor"
 version = "1.0.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4685,6 +4828,15 @@
 ]
 
 [[package]]
+name = "thread_local"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
+dependencies = [
+ "lazy_static 1.4.0",
+]
+
+[[package]]
 name = "tidy"
 version = "0.1.0"
 dependencies = [
@@ -4838,7 +4990,7 @@
  "log",
  "mio",
  "num_cpus",
- "parking_lot",
+ "parking_lot 0.9.0",
  "slab",
  "tokio-executor",
  "tokio-io",
@@ -5037,11 +5189,11 @@
 
 [[package]]
 name = "unicase"
-version = "2.5.1"
+version = "2.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150"
+checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
 dependencies = [
- "version_check 0.1.5",
+ "version_check",
 ]
 
 [[package]]
@@ -5213,12 +5365,6 @@
 
 [[package]]
 name = "version_check"
-version = "0.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
-
-[[package]]
-name = "version_check"
 version = "0.9.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce"
diff --git a/RELEASES.md b/RELEASES.md
index 10e485c..427aa71 100644
--- a/RELEASES.md
+++ b/RELEASES.md
@@ -1,3 +1,13 @@
+Version 1.41.1 (2020-02-27)
+===========================
+
+* [Always check types of static items][69145]
+* [Always check lifetime bounds of `Copy` impls][69145]
+* [Fix miscompilation in callers of `Layout::repeat`][69225]
+
+[69225]: https://github.com/rust-lang/rust/issues/69225
+[69145]: https://github.com/rust-lang/rust/pull/69145
+
 Version 1.41.0 (2020-01-30)
 ===========================
 
diff --git a/config.toml.example b/config.toml.example
index c9e1733..9b7327e 100644
--- a/config.toml.example
+++ b/config.toml.example
@@ -395,6 +395,15 @@
 # rustc to execute.
 #lld = false
 
+# Indicates whether LLD will be used to link Rust crates during bootstrap on
+# supported platforms. The LLD from the bootstrap distribution will be used
+# and not the LLD compiled during the bootstrap.
+#
+# LLD will not be used if we're cross linking or running tests.
+#
+# Explicitly setting the linker for a target will override this option.
+#use-lld = false
+
 # Indicates whether some LLVM tools, like llvm-objdump, will be made available in the
 # sysroot.
 #llvm-tools = false
@@ -435,6 +444,10 @@
 # Use LLVM libunwind as the implementation for Rust's unwinder.
 #llvm-libunwind = false
 
+# Enable Windows Control Flow Guard checks in the standard library.
+# This only applies from stage 1 onwards, and only for Windows targets.
+#control-flow-guard = false
+
 # =============================================================================
 # Options for specific targets
 #
@@ -463,6 +476,7 @@
 # Linker to be used to link Rust code. Note that the
 # default value is platform specific, and if not specified it may also depend on
 # what platform is crossing to what platform.
+# Setting this will override the `use-lld` option for Rust code.
 #linker = "cc"
 
 # Path to the `llvm-config` binary of the installation of a custom LLVM to link
diff --git a/rustfmt.toml b/rustfmt.toml
index 2a03484..8f4c901 100644
--- a/rustfmt.toml
+++ b/rustfmt.toml
@@ -7,6 +7,7 @@
 # tidy only checks files which are not ignored, each entry follows gitignore style
 ignore = [
     "build",
+    "/vendor/",
 
     # tests for now are not formatted, as they are sometimes pretty-printing constrained
     # (and generally rustfmt can move around comments in UI-testing incompatible ways)
diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs
index a34ec44..daa030c 100644
--- a/src/bootstrap/bin/rustc.rs
+++ b/src/bootstrap/bin/rustc.rs
@@ -47,7 +47,7 @@
     };
     let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
     let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
-    let on_fail = env::var_os("RUSTC_ON_FAIL").map(|of| Command::new(of));
+    let on_fail = env::var_os("RUSTC_ON_FAIL").map(Command::new);
 
     let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc));
     let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir));
@@ -64,7 +64,7 @@
     if let Some(crate_name) = crate_name {
         if let Some(target) = env::var_os("RUSTC_TIME") {
             if target == "all"
-                || target.into_string().unwrap().split(",").any(|c| c.trim() == crate_name)
+                || target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name)
             {
                 cmd.arg("-Ztime");
             }
@@ -134,6 +134,11 @@
             cmd.arg(format!("-Clinker={}", host_linker));
         }
 
+        // Override linker flavor if necessary.
+        if let Ok(host_linker_flavor) = env::var("RUSTC_HOST_LINKER_FLAVOR") {
+            cmd.arg(format!("-Clinker-flavor={}", host_linker_flavor));
+        }
+
         if let Ok(s) = env::var("RUSTC_HOST_CRT_STATIC") {
             if s == "true" {
                 cmd.arg("-C").arg("target-feature=+crt-static");
@@ -189,7 +194,7 @@
                 crate_name,
                 is_test,
                 dur.as_secs(),
-                dur.subsec_nanos() / 1_000_000
+                dur.subsec_millis()
             );
 
             match status.code() {
diff --git a/src/bootstrap/bin/rustdoc.rs b/src/bootstrap/bin/rustdoc.rs
index 8c8b33a..0434586 100644
--- a/src/bootstrap/bin/rustdoc.rs
+++ b/src/bootstrap/bin/rustdoc.rs
@@ -61,7 +61,7 @@
     }
 
     // Needed to be able to run all rustdoc tests.
-    if let Some(_) = env::var_os("RUSTDOC_GENERATE_REDIRECT_PAGES") {
+    if env::var_os("RUSTDOC_GENERATE_REDIRECT_PAGES").is_some() {
         // This "unstable-options" can be removed when `--generate-redirect-pages` is stabilized
         if !has_unstable {
             cmd.arg("-Z").arg("unstable-options");
diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py
index 6737086..50e1726 100644
--- a/src/bootstrap/bootstrap.py
+++ b/src/bootstrap/bootstrap.py
@@ -80,7 +80,7 @@
             option = "-s"
         run(["curl", option,
              "-y", "30", "-Y", "10",    # timeout if speed is < 10 bytes/sec for > 30 seconds
-             "--connect-timeout", "30", # timeout if cannot connect within 30 seconds
+             "--connect-timeout", "30",  # timeout if cannot connect within 30 seconds
              "--retry", "3", "-Sf", "-o", path, url],
             verbose=verbose,
             exception=exception)
@@ -332,7 +332,6 @@
         self.use_vendored_sources = ''
         self.verbose = False
 
-
     def download_stage0(self):
         """Fetch the build system for Rust, written in Rust
 
@@ -351,7 +350,7 @@
             try:
                 with tempfile.NamedTemporaryFile(delete=False) as temp_file:
                     temp_path = temp_file.name
-                with tarfile.open(temp_path, "w:xz") as tar:
+                with tarfile.open(temp_path, "w:xz"):
                     pass
                 return True
             except tarfile.CompressionError:
@@ -397,7 +396,7 @@
 
         if self.rustfmt() and self.rustfmt().startswith(self.bin_root()) and (
             not os.path.exists(self.rustfmt())
-            or self.program_out_of_date(self.rustfmt_stamp())
+            or self.program_out_of_date(self.rustfmt_stamp(), self.rustfmt_channel)
         ):
             if rustfmt_channel:
                 tarball_suffix = '.tar.xz' if support_xz() else '.tar.gz'
@@ -407,7 +406,7 @@
                 self.fix_executable("{}/bin/rustfmt".format(self.bin_root()))
                 self.fix_executable("{}/bin/cargo-fmt".format(self.bin_root()))
                 with output(self.rustfmt_stamp()) as rustfmt_stamp:
-                    rustfmt_stamp.write(self.date)
+                    rustfmt_stamp.write(self.date + self.rustfmt_channel)
 
     def _download_stage0_helper(self, filename, pattern, tarball_suffix, date=None):
         if date is None:
@@ -521,12 +520,12 @@
         """
         return os.path.join(self.bin_root(), '.rustfmt-stamp')
 
-    def program_out_of_date(self, stamp_path):
+    def program_out_of_date(self, stamp_path, extra=""):
         """Check if the given program stamp is out of date"""
         if not os.path.exists(stamp_path) or self.clean:
             return True
         with open(stamp_path, 'r') as stamp:
-            return self.date != stamp.read()
+            return (self.date + extra) != stamp.read()
 
     def bin_root(self):
         """Return the binary root directory
@@ -825,7 +824,7 @@
                 if not os.path.exists(vendor_dir):
                     print('error: vendoring required, but vendor directory does not exist.')
                     print('       Run `cargo vendor` without sudo to initialize the '
-                        'vendor directory.')
+                          'vendor directory.')
                     raise Exception("{} not found".format(vendor_dir))
 
         if self.use_vendored_sources:
@@ -839,7 +838,7 @@
                     "\n"
                     "[source.vendored-sources]\n"
                     "directory = '{}/vendor'\n"
-                .format(self.rust_root))
+                    .format(self.rust_root))
         else:
             if os.path.exists('.cargo'):
                 shutil.rmtree('.cargo')
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
index d9c894a..e4b57cd 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
@@ -510,7 +510,7 @@
             Subcommand::Format { .. } | Subcommand::Clean { .. } => panic!(),
         };
 
-        let builder = Builder {
+        Builder {
             build,
             top_stage: build.config.stage.unwrap_or(2),
             kind,
@@ -518,9 +518,7 @@
             stack: RefCell::new(Vec::new()),
             time_spent_on_dependencies: Cell::new(Duration::new(0, 0)),
             paths: paths.to_owned(),
-        };
-
-        builder
+        }
     }
 
     pub fn execute_cli(&self) {
@@ -694,7 +692,7 @@
         cmd.env_remove("MAKEFLAGS");
         cmd.env_remove("MFLAGS");
 
-        if let Some(linker) = self.linker(compiler.host) {
+        if let Some(linker) = self.linker(compiler.host, true) {
             cmd.env("RUSTC_TARGET_LINKER", linker);
         }
         cmd
@@ -753,13 +751,12 @@
             cargo.env("RUST_CHECK", "1");
         }
 
-        let stage;
-        if compiler.stage == 0 && self.local_rebuild {
+        let stage = if compiler.stage == 0 && self.local_rebuild {
             // Assume the local-rebuild rustc already has stage1 features.
-            stage = 1;
+            1
         } else {
-            stage = compiler.stage;
-        }
+            compiler.stage
+        };
 
         let mut rustflags = Rustflags::new(&target);
         if stage != 0 {
@@ -850,7 +847,13 @@
             rustflags.arg("-Zforce-unstable-if-unmarked");
         }
 
-        rustflags.arg("-Zexternal-macro-backtrace");
+        // cfg(bootstrap): the flag was renamed from `-Zexternal-macro-backtrace`
+        // to `-Zmacro-backtrace`, keep only the latter after beta promotion.
+        if stage == 0 {
+            rustflags.arg("-Zexternal-macro-backtrace");
+        } else {
+            rustflags.arg("-Zmacro-backtrace");
+        }
 
         let want_rustdoc = self.doc_tests != DocTests::No;
 
@@ -949,10 +952,31 @@
             }
         }
 
-        if let Some(host_linker) = self.linker(compiler.host) {
+        // FIXME: Don't use LLD if we're compiling libtest, since it fails to link it.
+        // See https://github.com/rust-lang/rust/issues/68647.
+        let can_use_lld = mode != Mode::Std;
+
+        // FIXME: The beta compiler doesn't pick the `lld-link` flavor for `*-pc-windows-msvc`
+        // Remove `RUSTC_HOST_LINKER_FLAVOR` when this is fixed
+        let lld_linker_flavor = |linker: &Path, target: Interned<String>| {
+            compiler.stage == 0
+                && linker.file_name() == Some(OsStr::new("rust-lld"))
+                && target.contains("pc-windows-msvc")
+        };
+
+        if let Some(host_linker) = self.linker(compiler.host, can_use_lld) {
+            if lld_linker_flavor(host_linker, compiler.host) {
+                cargo.env("RUSTC_HOST_LINKER_FLAVOR", "lld-link");
+            }
+
             cargo.env("RUSTC_HOST_LINKER", host_linker);
         }
-        if let Some(target_linker) = self.linker(target) {
+
+        if let Some(target_linker) = self.linker(target, can_use_lld) {
+            if lld_linker_flavor(target_linker, target) {
+                rustflags.arg("-Clinker-flavor=lld-link");
+            }
+
             let target = crate::envify(&target);
             cargo.env(&format!("CARGO_TARGET_{}_LINKER", target), target_linker);
         }
@@ -1111,6 +1135,20 @@
             );
         }
 
+        // If Control Flow Guard is enabled, pass the `control_flow_guard=checks` flag to rustc
+        // when compiling the standard library, since this might be linked into the final outputs
+        // produced by rustc. Since this mitigation is only available on Windows, only enable it
+        // for the standard library in case the compiler is run on a non-Windows platform.
+        // This is not needed for stage 0 artifacts because these will only be used for building
+        // the stage 1 compiler.
+        if cfg!(windows)
+            && mode == Mode::Std
+            && self.config.control_flow_guard
+            && compiler.stage >= 1
+        {
+            rustflags.arg("-Zcontrol_flow_guard=checks");
+        }
+
         // For `cargo doc` invocations, make rustdoc print the Rust version into the docs
         cargo.env("RUSTDOC_CRATE_VERSION", self.rust_version());
 
@@ -1252,12 +1290,7 @@
         };
 
         if self.config.print_step_timings && dur > Duration::from_millis(100) {
-            println!(
-                "[TIMING] {:?} -- {}.{:03}",
-                step,
-                dur.as_secs(),
-                dur.subsec_nanos() / 1_000_000
-            );
+            println!("[TIMING] {:?} -- {}.{:03}", step, dur.as_secs(), dur.subsec_millis());
         }
 
         {
@@ -1302,7 +1335,7 @@
 
     fn arg(&mut self, arg: &str) -> &mut Self {
         assert_eq!(arg.split_whitespace().count(), 1);
-        if self.0.len() > 0 {
+        if !self.0.is_empty() {
             self.0.push_str(" ");
         }
         self.0.push_str(arg);
diff --git a/src/bootstrap/builder/tests.rs b/src/bootstrap/builder/tests.rs
index 5fefb97..cca8ab8 100644
--- a/src/bootstrap/builder/tests.rs
+++ b/src/bootstrap/builder/tests.rs
@@ -19,7 +19,6 @@
     config.out = dir;
     config.build = INTERNER.intern_str("A");
     config.hosts = vec![config.build]
-        .clone()
         .into_iter()
         .chain(host.iter().map(|s| INTERNER.intern_str(s)))
         .collect::<Vec<_>>();
diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs
index 3881023..504cba4 100644
--- a/src/bootstrap/channel.rs
+++ b/src/bootstrap/channel.rs
@@ -13,7 +13,7 @@
 use crate::Build;
 
 // The version number
-pub const CFG_RELEASE_NUM: &str = "1.42.0";
+pub const CFG_RELEASE_NUM: &str = "1.43.0";
 
 pub struct GitInfo {
     inner: Option<Info>,
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
index eced035..7dded96 100644
--- a/src/bootstrap/compile.rs
+++ b/src/bootstrap/compile.rs
@@ -18,7 +18,6 @@
 use build_helper::{output, t, up_to_date};
 use filetime::FileTime;
 use serde::Deserialize;
-use serde_json;
 
 use crate::builder::Cargo;
 use crate::dist;
@@ -149,7 +148,8 @@
     // which is provided by std for this target.
     if target == "x86_64-fortanix-unknown-sgx" {
         let src_path_env = "X86_FORTANIX_SGX_LIBS";
-        let src = env::var(src_path_env).expect(&format!("{} not found in env", src_path_env));
+        let src =
+            env::var(src_path_env).unwrap_or_else(|_| panic!("{} not found in env", src_path_env));
         copy_and_stamp(Path::new(&src), "libunwind.a");
     }
 
@@ -361,7 +361,7 @@
                 );
             }
 
-            let target = sysroot_dir.join(file.to_string() + ".o");
+            let target = sysroot_dir.join((*file).to_string() + ".o");
             builder.copy(dst_file, &target);
             target_deps.push(target);
         }
@@ -515,7 +515,7 @@
         .env("CFG_VERSION", builder.rust_version())
         .env("CFG_PREFIX", builder.config.prefix.clone().unwrap_or_default());
 
-    let libdir_relative = builder.config.libdir_relative().unwrap_or(Path::new("lib"));
+    let libdir_relative = builder.config.libdir_relative().unwrap_or_else(|| Path::new("lib"));
     cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
 
     if let Some(ref ver_date) = builder.rust_info.commit_date() {
@@ -843,11 +843,11 @@
         };
         for filename in filenames {
             // Skip files like executables
-            if !filename.ends_with(".rlib")
-                && !filename.ends_with(".lib")
-                && !filename.ends_with(".a")
-                && !is_dylib(&filename)
-                && !(is_check && filename.ends_with(".rmeta"))
+            if !(filename.ends_with(".rlib")
+                || filename.ends_with(".lib")
+                || filename.ends_with(".a")
+                || is_dylib(&filename)
+                || (is_check && filename.ends_with(".rmeta")))
             {
                 continue;
             }
@@ -905,7 +905,7 @@
     for (prefix, extension, expected_len) in toplevel {
         let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| {
             filename.starts_with(&prefix[..])
-                && filename[prefix.len()..].starts_with("-")
+                && filename[prefix.len()..].starts_with('-')
                 && filename.ends_with(&extension[..])
                 && meta.len() == expected_len
         });
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
index 110c8b8..746cddb 100644
--- a/src/bootstrap/config.rs
+++ b/src/bootstrap/config.rs
@@ -16,7 +16,6 @@
 pub use crate::flags::Subcommand;
 use build_helper::t;
 use serde::Deserialize;
-use toml;
 
 /// Global configuration for the entire build and/or bootstrap.
 ///
@@ -83,6 +82,7 @@
     pub llvm_use_linker: Option<String>,
     pub llvm_allow_old_toolchain: Option<bool>,
 
+    pub use_lld: bool,
     pub lld_enabled: bool,
     pub lldb_enabled: bool,
     pub llvm_tools_enabled: bool,
@@ -116,6 +116,7 @@
     pub targets: Vec<Interned<String>>,
     pub local_rebuild: bool,
     pub jemalloc: bool,
+    pub control_flow_guard: bool,
 
     // dist misc
     pub dist_sign_folder: Option<PathBuf>,
@@ -176,6 +177,15 @@
     pub no_std: bool,
 }
 
+impl Target {
+    pub fn from_triple(triple: &str) -> Self {
+        let mut target: Self = Default::default();
+        if triple.contains("-none-") || triple.contains("nvptx") {
+            target.no_std = true;
+        }
+        target
+    }
+}
 /// Structure of the `config.toml` file that configuration is read from.
 ///
 /// This structure uses `Decodable` to automatically decode a TOML configuration
@@ -322,6 +332,7 @@
     save_toolstates: Option<String>,
     codegen_backends: Option<Vec<String>>,
     lld: Option<bool>,
+    use_lld: Option<bool>,
     llvm_tools: Option<bool>,
     lldb: Option<bool>,
     deny_warnings: Option<bool>,
@@ -332,6 +343,7 @@
     jemalloc: Option<bool>,
     test_compare_mode: Option<bool>,
     llvm_libunwind: Option<bool>,
+    control_flow_guard: Option<bool>,
 }
 
 /// TOML representation of how each build target is configured.
@@ -350,6 +362,7 @@
     musl_root: Option<String>,
     wasi_root: Option<String>,
     qemu_rootfs: Option<String>,
+    no_std: Option<bool>,
 }
 
 impl Config {
@@ -440,7 +453,7 @@
                     }
                 }
             })
-            .unwrap_or_else(|| TomlConfig::default());
+            .unwrap_or_else(TomlConfig::default);
 
         let build = toml.build.clone().unwrap_or_default();
         // set by bootstrap.py
@@ -539,7 +552,7 @@
             config.llvm_ldflags = llvm.ldflags.clone();
             set(&mut config.llvm_use_libcxx, llvm.use_libcxx);
             config.llvm_use_linker = llvm.use_linker.clone();
-            config.llvm_allow_old_toolchain = llvm.allow_old_toolchain.clone();
+            config.llvm_allow_old_toolchain = llvm.allow_old_toolchain;
         }
 
         if let Some(ref rust) = toml.rust {
@@ -566,6 +579,7 @@
             if let Some(true) = rust.incremental {
                 config.incremental = true;
             }
+            set(&mut config.use_lld, rust.use_lld);
             set(&mut config.lld_enabled, rust.lld);
             set(&mut config.lldb_enabled, rust.lldb);
             set(&mut config.llvm_tools_enabled, rust.llvm_tools);
@@ -578,6 +592,7 @@
             set(&mut config.rust_verify_llvm_ir, rust.verify_llvm_ir);
             config.rust_thin_lto_import_instr_limit = rust.thin_lto_import_instr_limit;
             set(&mut config.rust_remap_debuginfo, rust.remap_debuginfo);
+            set(&mut config.control_flow_guard, rust.control_flow_guard);
 
             if let Some(ref backends) = rust.codegen_backends {
                 config.rust_codegen_backends =
@@ -590,7 +605,7 @@
 
         if let Some(ref t) = toml.target {
             for (triple, cfg) in t {
-                let mut target = Target::default();
+                let mut target = Target::from_triple(triple);
 
                 if let Some(ref s) = cfg.llvm_config {
                     target.llvm_config = Some(config.src.join(s));
@@ -601,12 +616,15 @@
                 if let Some(ref s) = cfg.android_ndk {
                     target.ndk = Some(config.src.join(s));
                 }
+                if let Some(s) = cfg.no_std {
+                    target.no_std = s;
+                }
                 target.cc = cfg.cc.clone().map(PathBuf::from);
                 target.cxx = cfg.cxx.clone().map(PathBuf::from);
                 target.ar = cfg.ar.clone().map(PathBuf::from);
                 target.ranlib = cfg.ranlib.clone().map(PathBuf::from);
                 target.linker = cfg.linker.clone().map(PathBuf::from);
-                target.crt_static = cfg.crt_static.clone();
+                target.crt_static = cfg.crt_static;
                 target.musl_root = cfg.musl_root.clone().map(PathBuf::from);
                 target.wasi_root = cfg.wasi_root.clone().map(PathBuf::from);
                 target.qemu_rootfs = cfg.qemu_rootfs.clone().map(PathBuf::from);
diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py
index 7cfc538..2a46c56 100755
--- a/src/bootstrap/configure.py
+++ b/src/bootstrap/configure.py
@@ -60,10 +60,11 @@
 o("lldb", "rust.lldb", "build lldb")
 o("missing-tools", "dist.missing-tools", "allow failures when building tools")
 o("use-libcxx", "llvm.use-libcxx", "build LLVM with libc++")
+o("control-flow-guard", "rust.control-flow-guard", "Enable Control Flow Guard")
 
-o("cflags", "llvm.cflags", "build LLVM with these extra compiler flags")
-o("cxxflags", "llvm.cxxflags", "build LLVM with these extra compiler flags")
-o("ldflags", "llvm.ldflags", "build LLVM with these extra linker flags")
+v("llvm-cflags", "llvm.cflags", "build LLVM with these extra compiler flags")
+v("llvm-cxxflags", "llvm.cxxflags", "build LLVM with these extra compiler flags")
+v("llvm-ldflags", "llvm.ldflags", "build LLVM with these extra linker flags")
 
 o("llvm-libunwind", "rust.llvm-libunwind", "use LLVM libunwind")
 
@@ -392,11 +393,12 @@
 
 
 def is_number(value):
-  try:
-    float(value)
-    return True
-  except ValueError:
-    return False
+    try:
+        float(value)
+        return True
+    except ValueError:
+        return False
+
 
 # Here we walk through the constructed configuration we have from the parsed
 # command line arguments. We then apply each piece of configuration by
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
index facf816..8003d89 100644
--- a/src/bootstrap/dist.rs
+++ b/src/bootstrap/dist.rs
@@ -828,7 +828,7 @@
         assert!(builder.config.extended);
         let name = pkgname(builder, "rust-analysis");
 
-        if &compiler.host != builder.config.build {
+        if compiler.host != builder.config.build {
             return distdir(builder).join(format!("{}-{}.tar.gz", name, target));
         }
 
@@ -877,7 +877,7 @@
             Some(path) => path,
             None => return false,
         };
-        if spath.ends_with("~") || spath.ends_with(".pyc") {
+        if spath.ends_with('~') || spath.ends_with(".pyc") {
             return false;
         }
 
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
index 2040565..b0d9a5b 100644
--- a/src/bootstrap/doc.rs
+++ b/src/bootstrap/doc.rs
@@ -560,7 +560,7 @@
         builder.ensure(Rustc { stage, target });
 
         // Build rustdoc.
-        builder.ensure(tool::Rustdoc { compiler: compiler });
+        builder.ensure(tool::Rustdoc { compiler });
 
         // Symlink compiler docs to the output directory of rustdoc documentation.
         let out_dir = builder.stage_out(compiler, Mode::ToolRustc).join(target).join("doc");
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
index 2101ef2..516be6a 100644
--- a/src/bootstrap/flags.rs
+++ b/src/bootstrap/flags.rs
@@ -571,7 +571,7 @@
 }
 
 fn parse_deny_warnings(matches: &getopts::Matches) -> Option<bool> {
-    match matches.opt_str("warnings").as_ref().map(|v| v.as_str()) {
+    match matches.opt_str("warnings").as_deref() {
         Some("deny") => Some(true),
         Some("warn") => Some(false),
         Some(value) => {
diff --git a/src/bootstrap/format.rs b/src/bootstrap/format.rs
index 6e5e3fe..a4acb14 100644
--- a/src/bootstrap/format.rs
+++ b/src/bootstrap/format.rs
@@ -1,7 +1,7 @@
 //! Runs rustfmt on the repository.
 
 use crate::Build;
-use build_helper::t;
+use build_helper::{output, t};
 use ignore::WalkBuilder;
 use std::path::Path;
 use std::process::Command;
@@ -53,6 +53,17 @@
     for ignore in rustfmt_config.ignore {
         ignore_fmt.add(&format!("!{}", ignore)).expect(&ignore);
     }
+    let untracked_paths_output = output(
+        Command::new("git").arg("status").arg("--porcelain").arg("--untracked-files=normal"),
+    );
+    let untracked_paths = untracked_paths_output
+        .lines()
+        .filter(|entry| entry.starts_with("??"))
+        .map(|entry| entry.split(" ").nth(1).expect("every git status entry should list a path"));
+    for untracked_path in untracked_paths {
+        eprintln!("skip untracked path {} during rustfmt invocations", untracked_path);
+        ignore_fmt.add(&format!("!{}", untracked_path)).expect(&untracked_path);
+    }
     let ignore_fmt = ignore_fmt.build().unwrap();
 
     let rustfmt_path = build.config.initial_rustfmt.as_ref().unwrap_or_else(|| {
diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs
index f8734eb..6549262 100644
--- a/src/bootstrap/install.rs
+++ b/src/bootstrap/install.rs
@@ -126,9 +126,8 @@
         None => return path.to_path_buf(),
     };
     for part in path.components() {
-        match part {
-            Component::Normal(s) => ret.push(s),
-            _ => {}
+        if let Component::Normal(s) = part {
+            ret.push(s)
         }
     }
     ret
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
index 1fee3fd..a476d25 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
@@ -103,7 +103,6 @@
 //! More documentation can be found in each respective module below, and you can
 //! also check out the `src/bootstrap/README.md` file for more information.
 
-#![feature(core_intrinsics)]
 #![feature(drain_filter)]
 
 use std::cell::{Cell, RefCell};
@@ -239,9 +238,10 @@
     hosts: Vec<Interned<String>>,
     targets: Vec<Interned<String>>,
 
-    // Stage 0 (downloaded) compiler and cargo or their local rust equivalents
+    // Stage 0 (downloaded) compiler, lld and cargo or their local rust equivalents
     initial_rustc: PathBuf,
     initial_cargo: PathBuf,
+    initial_lld: PathBuf,
 
     // Runtime state filled in later on
     // C/C++ compilers and archiver for all targets
@@ -343,9 +343,18 @@
         // we always try to use git for LLVM builds
         let in_tree_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-project"));
 
+        let initial_sysroot = config.initial_rustc.parent().unwrap().parent().unwrap();
+        let initial_lld = initial_sysroot
+            .join("lib")
+            .join("rustlib")
+            .join(config.build)
+            .join("bin")
+            .join("rust-lld");
+
         let mut build = Build {
             initial_rustc: config.initial_rustc.clone(),
             initial_cargo: config.initial_cargo.clone(),
+            initial_lld,
             local_rebuild: config.local_rebuild,
             fail_fast: config.cmd.fail_fast(),
             doc_tests: config.cmd.doc_tests(),
@@ -444,7 +453,7 @@
             builder.execute_cli();
         } else {
             let builder = builder::Builder::new(&self);
-            let _ = builder.execute_cli();
+            builder.execute_cli();
         }
 
         // Check for postponed failures from `test --no-fail-fast`.
@@ -810,7 +819,7 @@
     }
 
     /// Returns the path to the linker for the given target if it needs to be overridden.
-    fn linker(&self, target: Interned<String>) -> Option<&Path> {
+    fn linker(&self, target: Interned<String>, can_use_lld: bool) -> Option<&Path> {
         if let Some(linker) = self.config.target_config.get(&target).and_then(|c| c.linker.as_ref())
         {
             Some(linker)
@@ -819,6 +828,8 @@
             && !target.contains("msvc")
         {
             Some(self.cc(target))
+        } else if can_use_lld && self.config.use_lld && self.build == target {
+            Some(&self.initial_lld)
         } else {
             None
         }
@@ -839,7 +850,7 @@
             .target_config
             .get(&target)
             .and_then(|t| t.musl_root.as_ref())
-            .or(self.config.musl_root.as_ref())
+            .or_else(|| self.config.musl_root.as_ref())
             .map(|p| &**p)
     }
 
@@ -1026,7 +1037,7 @@
     }
 
     fn llvm_link_tools_dynamically(&self, target: Interned<String>) -> bool {
-        (target.contains("linux-gnu") || target.contains("apple-darwin"))
+        target.contains("linux-gnu") || target.contains("apple-darwin")
     }
 
     /// Returns the `version` string associated with this compiler for Rust
diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs
index 8a26adc..292aa3b 100644
--- a/src/bootstrap/metadata.rs
+++ b/src/bootstrap/metadata.rs
@@ -5,7 +5,6 @@
 
 use build_helper::output;
 use serde::Deserialize;
-use serde_json;
 
 use crate::cache::INTERNER;
 use crate::{Build, Crate};
diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs
index 5bbd9f4..21dcb1d 100644
--- a/src/bootstrap/native.rs
+++ b/src/bootstrap/native.rs
@@ -15,8 +15,6 @@
 use std::process::Command;
 
 use build_helper::{output, t};
-use cc;
-use cmake;
 
 use crate::builder::{Builder, RunConfig, ShouldRun, Step};
 use crate::cache::Interned;
@@ -205,7 +203,7 @@
             cfg.define("LLVM_ENABLE_LIBXML2", "OFF");
         }
 
-        if enabled_llvm_projects.len() > 0 {
+        if !enabled_llvm_projects.is_empty() {
             enabled_llvm_projects.sort();
             enabled_llvm_projects.dedup();
             cfg.define("LLVM_ENABLE_PROJECTS", enabled_llvm_projects.join(";"));
@@ -264,7 +262,7 @@
             cfg.define("PYTHON_EXECUTABLE", python);
         }
 
-        configure_cmake(builder, target, &mut cfg);
+        configure_cmake(builder, target, &mut cfg, true);
 
         // FIXME: we don't actually need to build all LLVM tools and all LLVM
         //        libraries here, e.g., we just want a few components and a few
@@ -303,7 +301,12 @@
     panic!("\n\nbad LLVM version: {}, need >=7.0\n\n", version)
 }
 
-fn configure_cmake(builder: &Builder<'_>, target: Interned<String>, cfg: &mut cmake::Config) {
+fn configure_cmake(
+    builder: &Builder<'_>,
+    target: Interned<String>,
+    cfg: &mut cmake::Config,
+    use_compiler_launcher: bool,
+) {
     // Do not print installation messages for up-to-date files.
     // LLVM and LLD builds can produce a lot of those and hit CI limits on log size.
     cfg.define("CMAKE_INSTALL_MESSAGE", "LAZY");
@@ -374,9 +377,11 @@
     } else {
         // If ccache is configured we inform the build a little differently how
         // to invoke ccache while also invoking our compilers.
-        if let Some(ref ccache) = builder.config.ccache {
-            cfg.define("CMAKE_C_COMPILER_LAUNCHER", ccache)
-                .define("CMAKE_CXX_COMPILER_LAUNCHER", ccache);
+        if use_compiler_launcher {
+            if let Some(ref ccache) = builder.config.ccache {
+                cfg.define("CMAKE_C_COMPILER_LAUNCHER", ccache)
+                    .define("CMAKE_CXX_COMPILER_LAUNCHER", ccache);
+            }
         }
         cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
             .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
@@ -460,7 +465,7 @@
         t!(fs::create_dir_all(&out_dir));
 
         let mut cfg = cmake::Config::new(builder.src.join("src/llvm-project/lld"));
-        configure_cmake(builder, target, &mut cfg);
+        configure_cmake(builder, target, &mut cfg, true);
 
         // This is an awful, awful hack. Discovered when we migrated to using
         // clang-cl to compile LLVM/LLD it turns out that LLD, when built out of
@@ -573,7 +578,7 @@
         }
 
         let out_dir = builder.native_dir(self.target).join("sanitizers");
-        let runtimes = supported_sanitizers(&out_dir, self.target);
+        let runtimes = supported_sanitizers(&out_dir, self.target, &builder.config.channel);
         if runtimes.is_empty() {
             return runtimes;
         }
@@ -597,10 +602,7 @@
         let _time = util::timeit(&builder);
 
         let mut cfg = cmake::Config::new(&compiler_rt_dir);
-        cfg.target(&self.target);
-        cfg.host(&builder.config.build);
         cfg.profile("Release");
-
         cfg.define("CMAKE_C_COMPILER_TARGET", self.target);
         cfg.define("COMPILER_RT_BUILD_BUILTINS", "OFF");
         cfg.define("COMPILER_RT_BUILD_CRT", "OFF");
@@ -612,6 +614,12 @@
         cfg.define("COMPILER_RT_USE_LIBCXX", "OFF");
         cfg.define("LLVM_CONFIG_PATH", &llvm_config);
 
+        // On Darwin targets the sanitizer runtimes are build as universal binaries.
+        // Unfortunately sccache currently lacks support to build them successfully.
+        // Disable compiler launcher on Darwin targets to avoid potential issues.
+        let use_compiler_launcher = !self.target.contains("apple-darwin");
+        configure_cmake(builder, self.target, &mut cfg, use_compiler_launcher);
+
         t!(fs::create_dir_all(&out_dir));
         cfg.out_dir(out_dir);
 
@@ -637,7 +645,11 @@
 }
 
 /// Returns sanitizers available on a given target.
-fn supported_sanitizers(out_dir: &Path, target: Interned<String>) -> Vec<SanitizerRuntime> {
+fn supported_sanitizers(
+    out_dir: &Path,
+    target: Interned<String>,
+    channel: &str,
+) -> Vec<SanitizerRuntime> {
     let mut result = Vec::new();
     match &*target {
         "x86_64-apple-darwin" => {
@@ -646,7 +658,7 @@
                     cmake_target: format!("clang_rt.{}_osx_dynamic", s),
                     path: out_dir
                         .join(&format!("build/lib/darwin/libclang_rt.{}_osx_dynamic.dylib", s)),
-                    name: format!("librustc_rt.{}.dylib", s),
+                    name: format!("librustc-{}_rt.{}.dylib", channel, s),
                 });
             }
         }
@@ -655,7 +667,7 @@
                 result.push(SanitizerRuntime {
                     cmake_target: format!("clang_rt.{}-x86_64", s),
                     path: out_dir.join(&format!("build/lib/linux/libclang_rt.{}-x86_64.a", s)),
-                    name: format!("librustc_rt.{}.a", s),
+                    name: format!("librustc-{}_rt.{}.a", channel, s),
                 });
             }
         }
@@ -664,7 +676,7 @@
                 result.push(SanitizerRuntime {
                     cmake_target: format!("clang_rt.{}-x86_64", s),
                     path: out_dir.join(&format!("build/lib/fuchsia/libclang_rt.{}-x86_64.a", s)),
-                    name: format!("librustc_rt.{}.a", s),
+                    name: format!("librustc-{}_rt.{}.a", channel, s),
                 });
             }
         }
@@ -673,7 +685,7 @@
                 result.push(SanitizerRuntime {
                     cmake_target: format!("clang_rt.{}-aarch64", s),
                     path: out_dir.join(&format!("build/lib/fuchsia/libclang_rt.{}-aarch64.a", s)),
-                    name: format!("librustc_rt.{}.a", s),
+                    name: format!("librustc-{}_rt.{}.a", channel, s),
                 });
             }
         }
diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs
index 8ff7056..530e74d 100644
--- a/src/bootstrap/sanity.rs
+++ b/src/bootstrap/sanity.rs
@@ -17,6 +17,7 @@
 
 use build_helper::{output, t};
 
+use crate::config::Target;
 use crate::Build;
 
 struct Finder {
@@ -192,13 +193,9 @@
             panic!("the iOS target is only supported on macOS");
         }
 
+        build.config.target_config.entry(target.clone()).or_insert(Target::from_triple(target));
+
         if target.contains("-none-") || target.contains("nvptx") {
-            if build.no_std(*target).is_none() {
-                let target = build.config.target_config.entry(target.clone()).or_default();
-
-                target.no_std = true;
-            }
-
             if build.no_std(*target) == Some(false) {
                 panic!("All the *-none-* and nvptx* targets are no-std targets")
             }
diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs
index 6adf9dd..4cfda60 100644
--- a/src/bootstrap/test.rs
+++ b/src/bootstrap/test.rs
@@ -388,6 +388,8 @@
             cargo.env("XARGO_RUST_SRC", builder.src.join("src"));
             // Debug things.
             cargo.env("RUST_BACKTRACE", "1");
+            // Overwrite bootstrap's `rustc` wrapper overwriting our flags.
+            cargo.env("RUSTC_DEBUG_ASSERTIONS", "true");
             // Let cargo-miri know where xargo ended up.
             cargo.env("XARGO", builder.out.join("bin").join("xargo"));
 
@@ -397,7 +399,7 @@
             }
 
             // # Determine where Miri put its sysroot.
-            // To this end, we run `cargo miri setup --env` and capture the output.
+            // To this end, we run `cargo miri setup --print-sysroot` and capture the output.
             // (We do this separately from the above so that when the setup actually
             // happens we get some output.)
             // We re-use the `cargo` from above.
@@ -596,7 +598,7 @@
             .env("RUSTDOC_REAL", builder.rustdoc(self.compiler))
             .env("RUSTDOC_CRATE_VERSION", builder.rust_version())
             .env("RUSTC_BOOTSTRAP", "1");
-        if let Some(linker) = builder.linker(self.compiler.host) {
+        if let Some(linker) = builder.linker(self.compiler.host, true) {
             cmd.env("RUSTC_TARGET_LINKER", linker);
         }
         try_run(builder, &mut cmd);
@@ -662,7 +664,7 @@
                 target: self.target,
                 mode: "js-doc-test",
                 suite: "rustdoc-js",
-                path: None,
+                path: "src/test/rustdoc-js",
                 compare_mode: None,
             });
         } else {
@@ -698,7 +700,7 @@
             target: self.target,
             mode: "ui",
             suite: "rustdoc-ui",
-            path: Some("src/test/rustdoc-ui"),
+            path: "src/test/rustdoc-ui",
             compare_mode: None,
         })
     }
@@ -843,7 +845,7 @@
                     target: self.target,
                     mode: $mode,
                     suite: $suite,
-                    path: Some($path),
+                    path: $path,
                     compare_mode: $compare_mode,
                 })
             }
@@ -926,7 +928,7 @@
     target: Interned<String>,
     mode: &'static str,
     suite: &'static str,
-    path: Option<&'static str>,
+    path: &'static str,
     compare_mode: Option<&'static str>,
 }
 
@@ -949,7 +951,7 @@
         let suite = self.suite;
 
         // Path for test suite
-        let suite_path = self.path.unwrap_or("");
+        let suite_path = self.path;
 
         // Skip codegen tests if they aren't enabled in configuration.
         if !builder.config.codegen_tests && suite == "codegen" {
@@ -1035,7 +1037,8 @@
         flags.push("-Zunstable-options".to_string());
         flags.push(builder.config.cmd.rustc_args().join(" "));
 
-        if let Some(linker) = builder.linker(target) {
+        // Don't use LLD here since we want to test that rustc finds and uses a linker by itself.
+        if let Some(linker) = builder.linker(target, false) {
             cmd.arg("--linker").arg(linker);
         }
 
@@ -1050,10 +1053,10 @@
         cmd.arg("--docck-python").arg(builder.python());
 
         if builder.config.build.ends_with("apple-darwin") {
-            // Force /usr/bin/python on macOS for LLDB tests because we're loading the
+            // Force /usr/bin/python3 on macOS for LLDB tests because we're loading the
             // LLDB plugin's compiled module which only works with the system python
             // (namely not Homebrew-installed python)
-            cmd.arg("--lldb-python").arg("/usr/bin/python");
+            cmd.arg("--lldb-python").arg("/usr/bin/python3");
         } else {
             cmd.arg("--lldb-python").arg(builder.python());
         }
@@ -1263,15 +1266,15 @@
     }
 }
 
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-struct DocTest {
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+struct BookTest {
     compiler: Compiler,
-    path: &'static str,
+    path: PathBuf,
     name: &'static str,
     is_ext_doc: bool,
 }
 
-impl Step for DocTest {
+impl Step for BookTest {
     type Output = ();
     const ONLY_HOSTS: bool = true;
 
@@ -1279,12 +1282,59 @@
         run.never()
     }
 
-    /// Runs `rustdoc --test` for all documentation in `src/doc`.
+    /// Runs the documentation tests for a book in `src/doc`.
     ///
-    /// This will run all tests in our markdown documentation (e.g., the book)
-    /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
-    /// `compiler`.
+    /// This uses the `rustdoc` that sits next to `compiler`.
     fn run(self, builder: &Builder<'_>) {
+        // External docs are different from local because:
+        // - Some books need pre-processing by mdbook before being tested.
+        // - They need to save their state to toolstate.
+        // - They are only tested on the "checktools" builders.
+        //
+        // The local docs are tested by default, and we don't want to pay the
+        // cost of building mdbook, so they use `rustdoc --test` directly.
+        // Also, the unstable book is special because SUMMARY.md is generated,
+        // so it is easier to just run `rustdoc` on its files.
+        if self.is_ext_doc {
+            self.run_ext_doc(builder);
+        } else {
+            self.run_local_doc(builder);
+        }
+    }
+}
+
+impl BookTest {
+    /// This runs the equivalent of `mdbook test` (via the rustbook wrapper)
+    /// which in turn runs `rustdoc --test` on each file in the book.
+    fn run_ext_doc(self, builder: &Builder<'_>) {
+        let compiler = self.compiler;
+
+        builder.ensure(compile::Std { compiler, target: compiler.host });
+
+        // mdbook just executes a binary named "rustdoc", so we need to update
+        // PATH so that it points to our rustdoc.
+        let mut rustdoc_path = builder.rustdoc(compiler);
+        rustdoc_path.pop();
+        let old_path = env::var_os("PATH").unwrap_or_default();
+        let new_path = env::join_paths(iter::once(rustdoc_path).chain(env::split_paths(&old_path)))
+            .expect("could not add rustdoc to PATH");
+
+        let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook);
+        let path = builder.src.join(&self.path);
+        rustbook_cmd.env("PATH", new_path).arg("test").arg(path);
+        builder.add_rust_test_threads(&mut rustbook_cmd);
+        builder.info(&format!("Testing rustbook {}", self.path.display()));
+        let _time = util::timeit(&builder);
+        let toolstate = if try_run(builder, &mut rustbook_cmd) {
+            ToolState::TestPass
+        } else {
+            ToolState::TestFail
+        };
+        builder.save_toolstate(self.name, toolstate);
+    }
+
+    /// This runs `rustdoc --test` on all `.md` files in the path.
+    fn run_local_doc(self, builder: &Builder<'_>) {
         let compiler = self.compiler;
 
         builder.ensure(compile::Std { compiler, target: compiler.host });
@@ -1293,7 +1343,6 @@
         // tests for all files that end in `*.md`
         let mut stack = vec![builder.src.join(self.path)];
         let _time = util::timeit(&builder);
-
         let mut files = Vec::new();
         while let Some(p) = stack.pop() {
             if p.is_dir() {
@@ -1305,25 +1354,13 @@
                 continue;
             }
 
-            // The nostarch directory in the book is for no starch, and so isn't
-            // guaranteed to builder. We don't care if it doesn't build, so skip it.
-            if p.to_str().map_or(false, |p| p.contains("nostarch")) {
-                continue;
-            }
-
             files.push(p);
         }
 
         files.sort();
 
-        let mut toolstate = ToolState::TestPass;
         for file in files {
-            if !markdown_test(builder, compiler, &file) {
-                toolstate = ToolState::TestFail;
-            }
-        }
-        if self.is_ext_doc {
-            builder.save_toolstate(self.name, toolstate);
+            markdown_test(builder, compiler, &file);
         }
     }
 }
@@ -1352,9 +1389,9 @@
                 }
 
                 fn run(self, builder: &Builder<'_>) {
-                    builder.ensure(DocTest {
+                    builder.ensure(BookTest {
                         compiler: self.compiler,
-                        path: $path,
+                        path: PathBuf::from($path),
                         name: $book_name,
                         is_ext_doc: !$default,
                     });
@@ -1424,13 +1461,10 @@
 }
 
 fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) -> bool {
-    match fs::read_to_string(markdown) {
-        Ok(contents) => {
-            if !contents.contains("```") {
-                return true;
-            }
+    if let Ok(contents) = fs::read_to_string(markdown) {
+        if !contents.contains("```") {
+            return true;
         }
-        Err(_) => {}
     }
 
     builder.info(&format!("doc tests for: {}", markdown.display()));
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
index 7f24768..67e0ed5 100644
--- a/src/bootstrap/tool.rs
+++ b/src/bootstrap/tool.rs
@@ -234,7 +234,7 @@
         cargo.env("RUSTC_EXTERNAL_TOOL", "1");
     }
 
-    let mut features = extra_features.iter().cloned().collect::<Vec<_>>();
+    let mut features = extra_features.to_vec();
     if builder.build.config.cargo_native_static {
         if path.ends_with("cargo")
             || path.ends_with("rls")
diff --git a/src/bootstrap/toolstate.rs b/src/bootstrap/toolstate.rs
index b068c82..bb012a3 100644
--- a/src/bootstrap/toolstate.rs
+++ b/src/bootstrap/toolstate.rs
@@ -124,7 +124,7 @@
     let output = t!(String::from_utf8(output.stdout));
 
     for (tool, submodule) in STABLE_TOOLS.iter().chain(NIGHTLY_TOOLS.iter()) {
-        let changed = output.lines().any(|l| l.starts_with("M") && l.ends_with(submodule));
+        let changed = output.lines().any(|l| l.starts_with('M') && l.ends_with(submodule));
         eprintln!("Verifying status of {}...", tool);
         if !changed {
             continue;
diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs
index 7d1efe4..eac790f 100644
--- a/src/bootstrap/util.rs
+++ b/src/bootstrap/util.rs
@@ -98,7 +98,7 @@
     fn drop(&mut self) {
         let time = self.1.elapsed();
         if !self.0 {
-            println!("\tfinished in {}.{:03}", time.as_secs(), time.subsec_nanos() / 1_000_000);
+            println!("\tfinished in {}.{:03}", time.as_secs(), time.subsec_millis());
         }
     }
 }
diff --git a/src/ci/azure-pipelines/auto.yml b/src/ci/azure-pipelines/auto.yml
index 79a49fc..74b7469 100644
--- a/src/ci/azure-pipelines/auto.yml
+++ b/src/ci/azure-pipelines/auto.yml
@@ -63,7 +63,7 @@
 - job: macOS
   timeoutInMinutes: 600
   pool:
-    vmImage: macos-10.13
+    vmImage: macos-10.15
   steps:
   - template: steps/run.yml
   strategy:
diff --git a/src/ci/azure-pipelines/steps/run.yml b/src/ci/azure-pipelines/steps/run.yml
index f536388..ee9425a 100644
--- a/src/ci/azure-pipelines/steps/run.yml
+++ b/src/ci/azure-pipelines/steps/run.yml
@@ -31,6 +31,9 @@
 - bash: src/ci/scripts/setup-environment.sh
   displayName: Setup environment
 
+- bash: src/ci/scripts/clean-disk.sh
+  displayName: Clean disk
+
 - bash: src/ci/scripts/should-skip-this.sh
   displayName: Decide whether to run this job
 
@@ -48,10 +51,6 @@
   displayName: Install clang
   condition: and(succeeded(), not(variables.SKIP_JOB))
 
-- bash: src/ci/scripts/switch-xcode.sh
-  displayName: Switch to Xcode 9.3
-  condition: and(succeeded(), not(variables.SKIP_JOB))
-
 - bash: src/ci/scripts/install-wix.sh
   displayName: Install wix
   condition: and(succeeded(), not(variables.SKIP_JOB))
diff --git a/src/ci/azure-pipelines/try.yml b/src/ci/azure-pipelines/try.yml
index b6177b2..f8ddf0e 100644
--- a/src/ci/azure-pipelines/try.yml
+++ b/src/ci/azure-pipelines/try.yml
@@ -25,7 +25,7 @@
 # - job: macOS
 #   timeoutInMinutes: 600
 #   pool:
-#     vmImage: macos-10.13
+#     vmImage: macos-10.15
 #   steps:
 #   - template: steps/run.yml
 #   strategy:
diff --git a/src/ci/cpu-usage-over-time.py b/src/ci/cpu-usage-over-time.py
index daf2167..78ac060 100644
--- a/src/ci/cpu-usage-over-time.py
+++ b/src/ci/cpu-usage-over-time.py
@@ -148,11 +148,11 @@
     print('unknown platform', sys.platform)
     sys.exit(1)
 
-cur_state = State();
+cur_state = State()
 print("Time,Idle")
 while True:
-    time.sleep(1);
-    next_state = State();
+    time.sleep(1)
+    next_state = State()
     now = datetime.datetime.utcnow().isoformat()
     idle = next_state.idle_since(cur_state)
     print("%s,%s" % (now, idle))
diff --git a/src/ci/docker/dist-various-2/Dockerfile b/src/ci/docker/dist-various-2/Dockerfile
index 2ae6c58..5bb5436 100644
--- a/src/ci/docker/dist-various-2/Dockerfile
+++ b/src/ci/docker/dist-various-2/Dockerfile
@@ -48,7 +48,7 @@
 COPY dist-various-2/build-x86_64-fortanix-unknown-sgx-toolchain.sh /tmp/
 # We pass the commit id of the port of LLVM's libunwind to the build script.
 # Any update to the commit id here, should cause the container image to be re-built from this point on.
-RUN /tmp/build-x86_64-fortanix-unknown-sgx-toolchain.sh "53b586346f2c7870e20b170decdc30729d97c42b"
+RUN /tmp/build-x86_64-fortanix-unknown-sgx-toolchain.sh "5125c169b30837208a842f85f7ae44a83533bd0e"
 
 COPY dist-various-2/build-wasi-toolchain.sh /tmp/
 RUN /tmp/build-wasi-toolchain.sh
diff --git a/src/ci/scripts/clean-disk.sh b/src/ci/scripts/clean-disk.sh
new file mode 100755
index 0000000..c50de37
--- /dev/null
+++ b/src/ci/scripts/clean-disk.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+# This script deletes some of the Azure-provided artifacts. We don't use these,
+# and disk space is at a premium on our builders.
+
+set -euo pipefail
+IFS=$'\n\t'
+
+source "$(cd "$(dirname "$0")" && pwd)/../shared.sh"
+
+# All the Linux builds happen inside Docker.
+if isLinux; then
+    # 6.7GB
+    sudo rm -rf /opt/ghc
+    # 16GB
+    sudo rm -rf /usr/share/dotnet
+fi
diff --git a/src/ci/scripts/install-clang.sh b/src/ci/scripts/install-clang.sh
index e16a481..c242f5d 100755
--- a/src/ci/scripts/install-clang.sh
+++ b/src/ci/scripts/install-clang.sh
@@ -19,9 +19,7 @@
     # native clang is configured to use the correct path, but our custom one
     # doesn't. This sets the SDKROOT environment variable to the SDK so that
     # our own clang can figure out the correct include path on its own.
-    if ! [[ -d "/usr/include" ]]; then
-        ciCommandSetEnv SDKROOT "$(xcrun --sdk macosx --show-sdk-path)"
-    fi
+    ciCommandSetEnv SDKROOT "$(xcrun --sdk macosx --show-sdk-path)"
 
     # Configure `AR` specifically so rustbuild doesn't try to infer it as
     # `clang-ar` by accident.
diff --git a/src/ci/scripts/switch-xcode.sh b/src/ci/scripts/switch-xcode.sh
deleted file mode 100755
index 2cbb2dd..0000000
--- a/src/ci/scripts/switch-xcode.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-# Switch to XCode 9.3 on OSX since it seems to be the last version that supports
-# i686-apple-darwin. We'll eventually want to upgrade this and it will probably
-# force us to drop i686-apple-darwin, but let's keep the wheels turning for now.
-
-set -euo pipefail
-IFS=$'\n\t'
-
-source "$(cd "$(dirname "$0")" && pwd)/../shared.sh"
-
-if isMacOS; then
-    sudo xcode-select --switch /Applications/Xcode_9.3.app
-fi
diff --git a/src/doc/book b/src/doc/book
index 87dd684..6fb3705 160000
--- a/src/doc/book
+++ b/src/doc/book
@@ -1 +1 @@
-Subproject commit 87dd6843678575f8dda962f239d14ef4be14b352
+Subproject commit 6fb3705e5230311b096d47f7e2c91f9ce24393d0
diff --git a/src/doc/edition-guide b/src/doc/edition-guide
index 1a23902..37f9e68 160000
--- a/src/doc/edition-guide
+++ b/src/doc/edition-guide
@@ -1 +1 @@
-Subproject commit 1a2390247ad6d08160e0dd74f40a01a9578659c2
+Subproject commit 37f9e6848411188a1062ead1bd8ebe4b8aa16899
diff --git a/src/doc/embedded-book b/src/doc/embedded-book
index 4d78994..b2e1092 160000
--- a/src/doc/embedded-book
+++ b/src/doc/embedded-book
@@ -1 +1 @@
-Subproject commit 4d78994915af1bde9a95c04a8c27d8dca066232a
+Subproject commit b2e1092bf67bd4d7686c4553f186edbb7f5f92db
diff --git a/src/doc/reference b/src/doc/reference
index 11e893f..64239df 160000
--- a/src/doc/reference
+++ b/src/doc/reference
@@ -1 +1 @@
-Subproject commit 11e893fc1357bc688418ddf1087c2b7aa25d154d
+Subproject commit 64239df6d173562b9deb4f012e4c3e6e960c4754
diff --git a/src/doc/rust-by-example b/src/doc/rust-by-example
index 1c2bd02..32facd5 160000
--- a/src/doc/rust-by-example
+++ b/src/doc/rust-by-example
@@ -1 +1 @@
-Subproject commit 1c2bd024d13f8011307e13386cf1fea2180352b5
+Subproject commit 32facd5522ddbbf37baf01e4e4b6562bc55c071a
diff --git a/src/doc/rustc-guide b/src/doc/rustc-guide
index 92baf72..5bd60bc 160000
--- a/src/doc/rustc-guide
+++ b/src/doc/rustc-guide
@@ -1 +1 @@
-Subproject commit 92baf7293dd2d418d2ac4b141b0faa822075d9f7
+Subproject commit 5bd60bc51efaec04e69e2e18b59678e2af066433
diff --git a/src/doc/rustc/src/command-line-arguments.md b/src/doc/rustc/src/command-line-arguments.md
index 577d03d..659f8f6 100644
--- a/src/doc/rustc/src/command-line-arguments.md
+++ b/src/doc/rustc/src/command-line-arguments.md
@@ -215,21 +215,29 @@
 
 This flag will set which lints should be set to the [warn level](lints/levels.md#warn).
 
+_Note:_ The order of these lint level arguments is taken into account, see [lint level via compiler flag](lints/levels.md#via-compiler-flag) for more information.
+
 <a id="option-a-allow"></a>
 ## `-A`: set lint allowed
 
 This flag will set which lints should be set to the [allow level](lints/levels.md#allow).
 
+_Note:_ The order of these lint level arguments is taken into account, see [lint level via compiler flag](lints/levels.md#via-compiler-flag) for more information.
+
 <a id="option-d-deny"></a>
 ## `-D`: set lint denied
 
 This flag will set which lints should be set to the [deny level](lints/levels.md#deny).
 
+_Note:_ The order of these lint level arguments is taken into account, see [lint level via compiler flag](lints/levels.md#via-compiler-flag) for more information.
+
 <a id="option-f-forbid"></a>
 ## `-F`: set lint forbidden
 
 This flag will set which lints should be set to the [forbid level](lints/levels.md#forbid).
 
+_Note:_ The order of these lint level arguments is taken into account, see [lint level via compiler flag](lints/levels.md#via-compiler-flag) for more information.
+
 <a id="option-z-unstable"></a>
 ## `-Z`: set unstable options
 
diff --git a/src/doc/rustc/src/lints/levels.md b/src/doc/rustc/src/lints/levels.md
index 2944e86..3cfe2f6 100644
--- a/src/doc/rustc/src/lints/levels.md
+++ b/src/doc/rustc/src/lints/levels.md
@@ -164,6 +164,18 @@
 $ rustc lib.rs --crate-type=lib -D missing-docs -A unused-variables
 ```
 
+The order of these command line arguments is taken into account. The following allows the `unused-variables` lint, because it is the last argument for that lint:
+
+```bash
+$ rustc lib.rs --crate-type=lib -D unused-variables -A unused-variables
+```
+
+You can make use of this behavior by overriding the level of one specific lint out of a group of lints. The following example denies all the lints in the `unused` group, but explicitly allows the `unused-variables` lint in that group:
+
+```bash
+$ rustc lib.rs --crate-type=lib -D unused -A unused-variables
+```
+
 ### Via an attribute
 
 You can also modify the lint level with a crate-wide attribute:
diff --git a/src/doc/rustdoc/book.toml b/src/doc/rustdoc/book.toml
new file mode 100644
index 0000000..ba30c10
--- /dev/null
+++ b/src/doc/rustdoc/book.toml
@@ -0,0 +1,4 @@
+[book]
+authors = ["The Rust Project Developers"]
+src = "src"
+title = "The rustdoc book"
diff --git a/src/doc/rustdoc/src/the-doc-attribute.md b/src/doc/rustdoc/src/the-doc-attribute.md
index 80ac405..ef143c8 100644
--- a/src/doc/rustdoc/src/the-doc-attribute.md
+++ b/src/doc/rustdoc/src/the-doc-attribute.md
@@ -39,7 +39,7 @@
 
 ## At the crate level
 
-These options control how the docs look at a macro level.
+These options control how the docs look at a crate level.
 
 ### `html_favicon_url`
 
diff --git a/src/doc/unstable-book/src/compiler-flags/control-flow-guard.md b/src/doc/unstable-book/src/compiler-flags/control-flow-guard.md
new file mode 100644
index 0000000..f871df4
--- /dev/null
+++ b/src/doc/unstable-book/src/compiler-flags/control-flow-guard.md
@@ -0,0 +1,34 @@
+# `control_flow_guard`
+
+The tracking issue for this feature is: [#68793](https://github.com/rust-lang/rust/issues/68793).
+
+------------------------
+
+The `-Zcontrol_flow_guard=checks` compiler flag enables the Windows [Control Flow Guard][cfguard-docs] platform security feature. When enabled, the compiler outputs a list of valid indirect call targets, and inserts runtime checks on all indirect jump instructions to ensure that the destination is in the list of valid call targets.
+
+[cfguard-docs]: https://docs.microsoft.com/en-us/windows/win32/secbp/control-flow-guard
+
+For testing purposes, the `-Zcontrol_flow_guard=nochecks` compiler flag can be used to emit only the list of valid call targets, but not the runtime checks.
+
+It is strongly recommended to also enable Control Flow Guard checks in all linked libraries, including the standard library. 
+
+To enable Control Flow Guard in the standard library, you can use the [cargo `-Zbuild-std` functionality][build-std] to recompile the standard library with the same configuration options as the main program. 
+
+[build-std]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std
+
+For example:
+```cmd
+rustup toolchain install --force nightly
+rustup component add rust-src
+SET RUSTFLAGS=-Zcontrol_flow_guard=checks
+cargo +nightly build -Z build-std --target x86_64-pc-windows-msvc
+```
+
+```PowerShell
+rustup toolchain install --force nightly
+rustup component add rust-src
+$Env:RUSTFLAGS = "-Zcontrol_flow_guard=checks"
+cargo +nightly build -Z build-std --target x86_64-pc-windows-msvc
+```
+
+Alternatively, if you are building the standard library from source, you can set `control-flow-guard = true` in the config.toml file.
diff --git a/src/doc/unstable-book/src/compiler-flags/sanitizer.md b/src/doc/unstable-book/src/compiler-flags/sanitizer.md
index 64bff2a..414ac7e 100644
--- a/src/doc/unstable-book/src/compiler-flags/sanitizer.md
+++ b/src/doc/unstable-book/src/compiler-flags/sanitizer.md
@@ -170,7 +170,7 @@
 ## MemorySanitizer
 
 Use of uninitialized memory. Note that we are using `-Zbuild-std` to instrument
-standard library, and passing `-msan-track-origins=2` to the LLVM to track
+the standard library, and passing `-Zsanitizer-track-origins` to track the
 origins of uninitialized memory:
 
 ```shell
@@ -185,7 +185,15 @@
     }
 }
 
-$ env RUSTFLAGS="-Zsanitizer=memory -Cllvm-args=-msan-track-origins=2" cargo -Zbuild-std run --target x86_64-unknown-linux-gnu
+$ export \
+  CC=clang \
+  CXX=clang++ \
+  CFLAGS='-fsanitize=memory -fsanitize-memory-track-origins' \
+  CXXFLAGS='-fsanitize=memory -fsanitize-memory-track-origins' \
+  RUSTFLAGS='-Zsanitizer=memory -Zsanitizer-memory-track-origins' \
+  RUSTDOCFLAGS='-Zsanitizer=memory -Zsanitizer-memory-track-origins'
+$ cargo clean
+$ cargo -Zbuild-std run --target x86_64-unknown-linux-gnu
 ==9416==WARNING: MemorySanitizer: use-of-uninitialized-value
     #0 0x560c04f7488a in core::fmt::num::imp::fmt_u64::haa293b0b098501ca $RUST/build/x86_64-unknown-linux-gnu/stage1/lib/rustlib/src/rust/src/libcore/fmt/num.rs:202:16
 ...
diff --git a/src/doc/unstable-book/src/compiler-flags/self-profile-events.md b/src/doc/unstable-book/src/compiler-flags/self-profile-events.md
new file mode 100644
index 0000000..3ce1874
--- /dev/null
+++ b/src/doc/unstable-book/src/compiler-flags/self-profile-events.md
@@ -0,0 +1,74 @@
+# `self-profile-events`
+
+---------------------
+
+The `-Zself-profile-events` compiler flag controls what events are recorded by the self-profiler when it is enabled via the `-Zself-profile` flag.
+
+This flag takes a comma delimited list of event types to record.
+
+For example:
+
+```console
+$ rustc -Zself-profile -Zself-profile-events=default,args
+```
+
+## Event types
+
+- `query-provider`
+  - Traces each query used internally by the compiler.
+
+- `generic-activity`
+  - Traces other parts of the compiler not covered by the query system.
+
+- `query-cache-hit`
+  - Adds tracing information that records when the in-memory query cache is "hit" and does not need to re-execute a query which has been cached.
+  - Disabled by default because this significantly increases the trace file size.
+
+- `query-blocked`
+  - Tracks time that a query tries to run but is blocked waiting on another thread executing the same query to finish executing.
+  - Query blocking only occurs when the compiler is built with parallel mode support.
+
+- `incr-cache-load`
+  - Tracks time that is spent loading and deserializing query results from the incremental compilation on-disk cache.
+
+- `query-keys`
+  - Adds a serialized representation of each query's query key to the tracing data.
+  - Disabled by default because this significantly increases the trace file size.
+
+- `function-args`
+  - Adds additional tracing data to some `generic-activity` events.
+  - Disabled by default for parity with `query-keys`.
+
+- `llvm`
+  - Adds tracing information about LLVM passes and codegeneration.
+  - Disabled by default because this only works when `-Znew-llvm-pass-manager` is enabled.
+
+## Event synonyms
+
+- `none`
+  - Disables all events.
+  Equivalent to the self-profiler being disabled.
+
+- `default`
+  - The default set of events which stikes a balance between providing detailed tracing data and adding additional overhead to the compilation.
+
+- `args`
+  - Equivalent to `query-keys` and `function-args`.
+
+- `all`
+  - Enables all events.
+
+## Examples
+
+Enable the profiler and capture the default set of events (both invocations are equivalent):
+
+```console
+$ rustc -Zself-profile
+$ rustc -Zself-profile -Zself-profile-events=default
+```
+
+Enable the profiler and capture the default events and their arguments:
+
+```console
+$ rustc -Zself-profile -Zself-profile-events=default,args
+```
diff --git a/src/doc/unstable-book/src/compiler-flags/self-profile.md b/src/doc/unstable-book/src/compiler-flags/self-profile.md
new file mode 100644
index 0000000..6de1c77
--- /dev/null
+++ b/src/doc/unstable-book/src/compiler-flags/self-profile.md
@@ -0,0 +1,47 @@
+# `self-profile`
+
+--------------------
+
+The `-Zself-profile` compiler flag enables rustc's internal profiler.
+When enabled, the compiler will output three binary files in the specified directory (or the current working directory if no directory is specified).
+These files can be analyzed by using the tools in the [`measureme`] repository.
+
+To control the data recorded in the trace files, use the `-Zself-profile-events` flag.
+
+For example:
+
+First, run a compilation session and provide the `-Zself-profile` flag:
+
+```console
+$ rustc --crate-name foo -Zself-profile`
+```
+
+This will generate three files in the working directory such as:
+
+- `foo-1234.events`
+- `foo-1234.string_data`
+- `foo-1234.string_index`
+
+Where `foo` is the name of the crate and `1234` is the process id of the rustc process.
+
+To get a summary of where the compiler is spending its time:
+
+```console
+$ ../measureme/target/release/summarize summarize foo-1234
+```
+
+To generate a flamegraph of the same data:
+
+```console
+$ ../measureme/target/release/inferno foo-1234
+```
+
+To dump the event data in a Chromium-profiler compatible format:
+
+```console
+$ ../measureme/target/release/crox foo-1234
+```
+
+For more information, consult the [`measureme`] documentation.
+
+[`measureme`]: https://github.com/rust-lang/measureme.git
diff --git a/src/doc/unstable-book/src/language-features/generators.md b/src/doc/unstable-book/src/language-features/generators.md
index 97cf58e..8bc6241 100644
--- a/src/doc/unstable-book/src/language-features/generators.md
+++ b/src/doc/unstable-book/src/language-features/generators.md
@@ -37,11 +37,11 @@
         return "foo"
     };
 
-    match Pin::new(&mut generator).resume() {
+    match Pin::new(&mut generator).resume(()) {
         GeneratorState::Yielded(1) => {}
         _ => panic!("unexpected value from resume"),
     }
-    match Pin::new(&mut generator).resume() {
+    match Pin::new(&mut generator).resume(()) {
         GeneratorState::Complete("foo") => {}
         _ => panic!("unexpected value from resume"),
     }
@@ -71,9 +71,9 @@
     };
 
     println!("1");
-    Pin::new(&mut generator).resume();
+    Pin::new(&mut generator).resume(());
     println!("3");
-    Pin::new(&mut generator).resume();
+    Pin::new(&mut generator).resume(());
     println!("5");
 }
 ```
@@ -92,10 +92,10 @@
 # use std::ops::GeneratorState;
 # use std::pin::Pin;
 
-pub trait Generator {
+pub trait Generator<R = ()> {
     type Yield;
     type Return;
-    fn resume(self: Pin<&mut Self>) -> GeneratorState<Self::Yield, Self::Return>;
+    fn resume(self: Pin<&mut Self>, resume: R) -> GeneratorState<Self::Yield, Self::Return>;
 }
 ```
 
@@ -152,10 +152,6 @@
 * Whenever a generator is dropped it will drop all captured environment
   variables.
 
-Note that unlike closures, generators at this time cannot take any arguments.
-That is, generators must always look like `|| { ... }`. This restriction may be
-lifted at a future date, the design is ongoing!
-
 ### Generators as state machines
 
 In the compiler, generators are currently compiled as state machines. Each
@@ -179,8 +175,8 @@
         return ret
     };
 
-    Pin::new(&mut generator).resume();
-    Pin::new(&mut generator).resume();
+    Pin::new(&mut generator).resume(());
+    Pin::new(&mut generator).resume(());
 }
 ```
 
@@ -205,7 +201,7 @@
             type Yield = i32;
             type Return = &'static str;
 
-            fn resume(mut self: Pin<&mut Self>) -> GeneratorState<i32, &'static str> {
+            fn resume(mut self: Pin<&mut Self>, resume: ()) -> GeneratorState<i32, &'static str> {
                 use std::mem;
                 match mem::replace(&mut *self, __Generator::Done) {
                     __Generator::Start(s) => {
@@ -228,8 +224,8 @@
         __Generator::Start(ret)
     };
 
-    Pin::new(&mut generator).resume();
-    Pin::new(&mut generator).resume();
+    Pin::new(&mut generator).resume(());
+    Pin::new(&mut generator).resume(());
 }
 ```
 
diff --git a/src/doc/unstable-book/src/language-features/no-sanitize.md b/src/doc/unstable-book/src/language-features/no-sanitize.md
new file mode 100644
index 0000000..28c6839
--- /dev/null
+++ b/src/doc/unstable-book/src/language-features/no-sanitize.md
@@ -0,0 +1,29 @@
+# `no_sanitize`
+
+The tracking issue for this feature is: [#39699]
+
+[#39699]: https://github.com/rust-lang/rust/issues/39699
+
+------------------------
+
+The `no_sanitize` attribute can be used to selectively disable sanitizer
+instrumentation in an annotated function. This might be useful to: avoid
+instrumentation overhead in a performance critical function, or avoid
+instrumenting code that contains constructs unsupported by given sanitizer.
+
+The precise effect of this annotation depends on particular sanitizer in use.
+For example, with `no_sanitize(thread)`, the thread sanitizer will no longer
+instrument non-atomic store / load operations, but it will instrument atomic
+operations to avoid reporting false positives and provide meaning full stack
+traces.
+
+## Examples
+
+``` rust
+#![feature(no_sanitize)]
+
+#[no_sanitize(address)]
+fn foo() {
+  // ...
+}
+```
diff --git a/src/etc/debugger_pretty_printers_common.py b/src/etc/debugger_pretty_printers_common.py
index 385ce8e..b3f8f50 100644
--- a/src/etc/debugger_pretty_printers_common.py
+++ b/src/etc/debugger_pretty_printers_common.py
@@ -212,7 +212,6 @@
         # REGULAR STRUCT
         return TYPE_KIND_REGULAR_STRUCT
 
-
     def __classify_union(self):
         assert self.get_dwarf_type_kind() == DWARF_TYPE_CODE_UNION
 
@@ -233,7 +232,6 @@
         else:
             return TYPE_KIND_REGULAR_UNION
 
-
     def __conforms_to_field_layout(self, expected_fields):
         actual_fields = self.get_fields()
         actual_field_count = len(actual_fields)
@@ -363,6 +361,7 @@
     assert data_ptr.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_PTR
     return (tail, head, data_ptr, capacity)
 
+
 def extract_length_and_ptr_from_slice(slice_val):
     assert (slice_val.type.get_type_kind() == TYPE_KIND_SLICE or
             slice_val.type.get_type_kind() == TYPE_KIND_STR_SLICE)
@@ -376,8 +375,10 @@
     assert data_ptr.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_PTR
     return (length, data_ptr)
 
+
 UNQUALIFIED_TYPE_MARKERS = frozenset(["(", "[", "&", "*"])
 
+
 def extract_type_name(qualified_type_name):
     """Extracts the type name from a fully qualified path"""
     if qualified_type_name[0] in UNQUALIFIED_TYPE_MARKERS:
@@ -393,6 +394,7 @@
     else:
         return qualified_type_name[index + 2:]
 
+
 try:
     compat_str = unicode  # Python 2
 except NameError:
diff --git a/src/etc/dec2flt_table.py b/src/etc/dec2flt_table.py
index 85395d2..4979882 100755
--- a/src/etc/dec2flt_table.py
+++ b/src/etc/dec2flt_table.py
@@ -14,7 +14,6 @@
 even larger, and it's already uncomfortably large (6 KiB).
 """
 from __future__ import print_function
-import sys
 from math import ceil, log
 from fractions import Fraction
 from collections import namedtuple
@@ -82,6 +81,7 @@
     ulp_err = abs_err / Fraction(2) ** z.exp
     return float(ulp_err)
 
+
 HEADER = """
 //! Tables of approximations of powers of ten.
 //! DO NOT MODIFY: Generated by `src/etc/dec2flt_table.py`
diff --git a/src/etc/gdb_rust_pretty_printing.py b/src/etc/gdb_rust_pretty_printing.py
index 5da01b9..0914c22 100755
--- a/src/etc/gdb_rust_pretty_printing.py
+++ b/src/etc/gdb_rust_pretty_printing.py
@@ -9,7 +9,7 @@
 if sys.version_info[0] >= 3:
     xrange = range
 
-rust_enabled = 'set language rust' in gdb.execute('complete set language ru', to_string = True)
+rust_enabled = 'set language rust' in gdb.execute('complete set language ru', to_string=True)
 
 # The btree pretty-printers fail in a confusing way unless
 # https://sourceware.org/bugzilla/show_bug.cgi?id=21763 is fixed.
@@ -21,9 +21,10 @@
     if int(_match.group(1)) > 8 or (int(_match.group(1)) == 8 and int(_match.group(2)) >= 1):
         gdb_81 = True
 
-#===============================================================================
+# ===============================================================================
 # GDB Pretty Printing Module for Rust
-#===============================================================================
+# ===============================================================================
+
 
 class GdbType(rustpp.Type):
 
@@ -133,39 +134,39 @@
 
     if type_kind == rustpp.TYPE_KIND_REGULAR_STRUCT:
         return RustStructPrinter(val,
-                                 omit_first_field = False,
-                                 omit_type_name = False,
-                                 is_tuple_like = False)
+                                 omit_first_field=False,
+                                 omit_type_name=False,
+                                 is_tuple_like=False)
 
     if type_kind == rustpp.TYPE_KIND_STRUCT_VARIANT:
         return RustStructPrinter(val,
-                                 omit_first_field = True,
-                                 omit_type_name = False,
-                                 is_tuple_like = False)
+                                 omit_first_field=True,
+                                 omit_type_name=False,
+                                 is_tuple_like=False)
 
     if type_kind == rustpp.TYPE_KIND_STR_SLICE:
         return RustStringSlicePrinter(val)
 
     if type_kind == rustpp.TYPE_KIND_TUPLE:
         return RustStructPrinter(val,
-                                 omit_first_field = False,
-                                 omit_type_name = True,
-                                 is_tuple_like = True)
+                                 omit_first_field=False,
+                                 omit_type_name=True,
+                                 is_tuple_like=True)
 
     if type_kind == rustpp.TYPE_KIND_TUPLE_STRUCT:
         return RustStructPrinter(val,
-                                 omit_first_field = False,
-                                 omit_type_name = False,
-                                 is_tuple_like = True)
+                                 omit_first_field=False,
+                                 omit_type_name=False,
+                                 is_tuple_like=True)
 
     if type_kind == rustpp.TYPE_KIND_CSTYLE_VARIANT:
         return RustCStyleVariantPrinter(val.get_child_at_index(0))
 
     if type_kind == rustpp.TYPE_KIND_TUPLE_VARIANT:
         return RustStructPrinter(val,
-                                 omit_first_field = True,
-                                 omit_type_name = False,
-                                 is_tuple_like = True)
+                                 omit_first_field=True,
+                                 omit_type_name=False,
+                                 is_tuple_like=True)
 
     if type_kind == rustpp.TYPE_KIND_SINGLETON_ENUM:
         variant = get_field_at_index(gdb_val, 0)
@@ -189,9 +190,9 @@
     return None
 
 
-#=------------------------------------------------------------------------------
+# =------------------------------------------------------------------------------
 # Pretty Printer Classes
-#=------------------------------------------------------------------------------
+# =------------------------------------------------------------------------------
 class RustEmptyPrinter(object):
     def __init__(self, val):
         self.__val = val
@@ -355,6 +356,7 @@
             else:
                 yield keys[i]['value']['value']
 
+
 class RustStdBTreeSetPrinter(object):
     def __init__(self, val):
         self.__val = val
@@ -429,6 +431,7 @@
     def display_hint(self):
         return "string"
 
+
 class RustCStyleVariantPrinter(object):
     def __init__(self, val):
         assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_ENUM
diff --git a/src/etc/generate-deriving-span-tests.py b/src/etc/generate-deriving-span-tests.py
index afa6bbd..c42f942 100755
--- a/src/etc/generate-deriving-span-tests.py
+++ b/src/etc/generate-deriving-span-tests.py
@@ -8,7 +8,8 @@
 sample usage: src/etc/generate-deriving-span-tests.py
 """
 
-import os, stat
+import os
+import stat
 
 TEST_DIR = os.path.abspath(
     os.path.join(os.path.dirname(__file__), '../test/ui/derives/'))
@@ -56,6 +57,7 @@
 
 ENUM_TUPLE, ENUM_STRUCT, STRUCT_FIELDS, STRUCT_TUPLE = range(4)
 
+
 def create_test_case(type, trait, super_traits, error_count):
     string = [ENUM_STRING, ENUM_STRUCT_VARIANT_STRING, STRUCT_STRING, STRUCT_TUPLE_STRING][type]
     all_traits = ','.join([trait] + super_traits)
@@ -63,8 +65,9 @@
     error_deriving = '#[derive(%s)]' % super_traits if super_traits else ''
 
     errors = '\n'.join('//~%s ERROR' % ('^' * n) for n in range(error_count))
-    code = string.format(traits = all_traits, errors = errors)
-    return TEMPLATE.format(error_deriving=error_deriving, code = code)
+    code = string.format(traits=all_traits, errors=errors)
+    return TEMPLATE.format(error_deriving=error_deriving, code=code)
+
 
 def write_file(name, string):
     test_file = os.path.join(TEST_DIR, 'derives-span-%s.rs' % name)
@@ -86,10 +89,10 @@
 
 traits = {
     'Default': (STRUCT, [], 1),
-    'FromPrimitive': (0, [], 0), # only works for C-like enums
+    'FromPrimitive': (0, [], 0),  # only works for C-like enums
 
-    'Decodable': (0, [], 0), # FIXME: quoting gives horrible spans
-    'Encodable': (0, [], 0), # FIXME: quoting gives horrible spans
+    'Decodable': (0, [], 0),  # FIXME: quoting gives horrible spans
+    'Encodable': (0, [], 0),  # FIXME: quoting gives horrible spans
 }
 
 for (trait, supers, errs) in [('Clone', [], 1),
diff --git a/src/etc/generate-keyword-tests.py b/src/etc/generate-keyword-tests.py
index bc046a8..77c3d27 100755
--- a/src/etc/generate-keyword-tests.py
+++ b/src/etc/generate-keyword-tests.py
@@ -11,7 +11,6 @@
 
 import sys
 import os
-import datetime
 import stat
 
 
diff --git a/src/etc/htmldocck.py b/src/etc/htmldocck.py
index e8be2b9..7789b24 100644
--- a/src/etc/htmldocck.py
+++ b/src/etc/htmldocck.py
@@ -131,6 +131,7 @@
 except NameError:
     unichr = chr
 
+
 class CustomHTMLParser(HTMLParser):
     """simplified HTML parser.
 
@@ -169,21 +170,25 @@
         HTMLParser.close(self)
         return self.__builder.close()
 
+
 Command = namedtuple('Command', 'negated cmd args lineno context')
 
+
 class FailedCheck(Exception):
     pass
 
+
 class InvalidCheck(Exception):
     pass
 
+
 def concat_multi_lines(f):
     """returns a generator out of the file object, which
     - removes `\\` then `\n` then a shared prefix with the previous line then
       optional whitespace;
     - keeps a line number (starting from 0) of the first line being
       concatenated."""
-    lastline = None # set to the last line when the last line has a backslash
+    lastline = None  # set to the last line when the last line has a backslash
     firstlineno = None
     catenated = ''
     for lineno, line in enumerate(f):
@@ -208,6 +213,7 @@
     if lastline is not None:
         print_err(lineno, line, 'Trailing backslash at the end of the file')
 
+
 LINE_PATTERN = re.compile(r'''
     (?<=(?<!\S)@)(?P<negated>!?)
     (?P<cmd>[A-Za-z]+(?:-[A-Za-z]+)*)
@@ -252,7 +258,7 @@
 
 def normalize_xpath(path):
     if path.startswith('//'):
-        return '.' + path # avoid warnings
+        return '.' + path  # avoid warnings
     elif path.startswith('.//'):
         return path
     else:
@@ -316,7 +322,7 @@
 
 def check_string(data, pat, regexp):
     if not pat:
-        return True # special case a presence testing
+        return True  # special case a presence testing
     elif regexp:
         return re.search(pat, data, flags=re.UNICODE) is not None
     else:
@@ -353,7 +359,7 @@
                 ret = check_string(value, pat, regexp)
                 if ret:
                     break
-    except Exception as e:
+    except Exception:
         print('Failed to get path "{}"'.format(path))
         raise
     return ret
@@ -363,6 +369,7 @@
     path = normalize_xpath(path)
     return len(tree.findall(path))
 
+
 def stderr(*args):
     if sys.version_info.major < 3:
         file = codecs.getwriter('utf-8')(sys.stderr)
@@ -371,6 +378,7 @@
 
     print(*args, file=file)
 
+
 def print_err(lineno, context, err, message=None):
     global ERR_COUNT
     ERR_COUNT += 1
@@ -381,31 +389,33 @@
     if context:
         stderr("\t{}".format(context))
 
+
 ERR_COUNT = 0
 
+
 def check_command(c, cache):
     try:
         cerr = ""
-        if c.cmd == 'has' or c.cmd == 'matches': # string test
+        if c.cmd == 'has' or c.cmd == 'matches':  # string test
             regexp = (c.cmd == 'matches')
-            if len(c.args) == 1 and not regexp: # @has <path> = file existence
+            if len(c.args) == 1 and not regexp:  # @has <path> = file existence
                 try:
                     cache.get_file(c.args[0])
                     ret = True
                 except FailedCheck as err:
                     cerr = str(err)
                     ret = False
-            elif len(c.args) == 2: # @has/matches <path> <pat> = string test
+            elif len(c.args) == 2:  # @has/matches <path> <pat> = string test
                 cerr = "`PATTERN` did not match"
                 ret = check_string(cache.get_file(c.args[0]), c.args[1], regexp)
-            elif len(c.args) == 3: # @has/matches <path> <pat> <match> = XML tree test
+            elif len(c.args) == 3:  # @has/matches <path> <pat> <match> = XML tree test
                 cerr = "`XPATH PATTERN` did not match"
                 tree = cache.get_tree(c.args[0])
                 pat, sep, attr = c.args[1].partition('/@')
-                if sep: # attribute
+                if sep:  # attribute
                     tree = cache.get_tree(c.args[0])
                     ret = check_tree_attr(tree, pat, attr, c.args[2], regexp)
-                else: # normalized text
+                else:  # normalized text
                     pat = c.args[1]
                     if pat.endswith('/text()'):
                         pat = pat[:-7]
@@ -413,16 +423,16 @@
             else:
                 raise InvalidCheck('Invalid number of @{} arguments'.format(c.cmd))
 
-        elif c.cmd == 'count': # count test
-            if len(c.args) == 3: # @count <path> <pat> <count> = count test
+        elif c.cmd == 'count':  # count test
+            if len(c.args) == 3:  # @count <path> <pat> <count> = count test
                 expected = int(c.args[2])
                 found = get_tree_count(cache.get_tree(c.args[0]), c.args[1])
                 cerr = "Expected {} occurrences but found {}".format(expected, found)
                 ret = expected == found
             else:
                 raise InvalidCheck('Invalid number of @{} arguments'.format(c.cmd))
-        elif c.cmd == 'has-dir': # has-dir test
-            if len(c.args) == 1: # @has-dir <path> = has-dir test
+        elif c.cmd == 'has-dir':  # has-dir test
+            if len(c.args) == 1:  # @has-dir <path> = has-dir test
                 try:
                     cache.get_dir(c.args[0])
                     ret = True
@@ -448,11 +458,13 @@
     except InvalidCheck as err:
         print_err(c.lineno, c.context, str(err))
 
+
 def check(target, commands):
     cache = CachedFiles(target)
     for c in commands:
         check_command(c, cache)
 
+
 if __name__ == '__main__':
     if len(sys.argv) != 3:
         stderr('Usage: {} <doc dir> <template>'.format(sys.argv[0]))
diff --git a/src/etc/installer/gfx/rust-logo.png b/src/etc/installer/gfx/rust-logo.png
index 2c3de30..99ee750 100644
--- a/src/etc/installer/gfx/rust-logo.png
+++ b/src/etc/installer/gfx/rust-logo.png
Binary files differ
diff --git a/src/etc/lldb_batchmode.py b/src/etc/lldb_batchmode.py
index 7c2e914..d9c4bc5 100644
--- a/src/etc/lldb_batchmode.py
+++ b/src/etc/lldb_batchmode.py
@@ -157,6 +157,7 @@
 # ~main
 ####################################################################################################
 
+
 if len(sys.argv) != 3:
     print("usage: python lldb_batchmode.py target-path script-path")
     sys.exit(1)
diff --git a/src/etc/lldb_rust_formatters.py b/src/etc/lldb_rust_formatters.py
index fdc1c4f..0c4021b 100644
--- a/src/etc/lldb_rust_formatters.py
+++ b/src/etc/lldb_rust_formatters.py
@@ -1,10 +1,10 @@
 import lldb
-import re
 import debugger_pretty_printers_common as rustpp
 
-#===============================================================================
+# ===============================================================================
 # LLDB Pretty Printing Module for Rust
-#===============================================================================
+# ===============================================================================
+
 
 class LldbType(rustpp.Type):
 
@@ -84,16 +84,16 @@
         type_kind == rustpp.TYPE_KIND_EMPTY):
         return print_struct_val(val,
                                 internal_dict,
-                                omit_first_field = False,
-                                omit_type_name = False,
-                                is_tuple_like = False)
+                                omit_first_field=False,
+                                omit_type_name=False,
+                                is_tuple_like=False)
 
     if type_kind == rustpp.TYPE_KIND_STRUCT_VARIANT:
         return print_struct_val(val,
                                 internal_dict,
-                                omit_first_field = True,
-                                omit_type_name = False,
-                                is_tuple_like = False)
+                                omit_first_field=True,
+                                omit_type_name=False,
+                                is_tuple_like=False)
 
     if type_kind == rustpp.TYPE_KIND_SLICE:
         return print_vec_slice_val(val, internal_dict)
@@ -110,16 +110,16 @@
     if type_kind == rustpp.TYPE_KIND_TUPLE:
         return print_struct_val(val,
                                 internal_dict,
-                                omit_first_field = False,
-                                omit_type_name = True,
-                                is_tuple_like = True)
+                                omit_first_field=False,
+                                omit_type_name=True,
+                                is_tuple_like=True)
 
     if type_kind == rustpp.TYPE_KIND_TUPLE_STRUCT:
         return print_struct_val(val,
                                 internal_dict,
-                                omit_first_field = False,
-                                omit_type_name = False,
-                                is_tuple_like = True)
+                                omit_first_field=False,
+                                omit_type_name=False,
+                                is_tuple_like=True)
 
     if type_kind == rustpp.TYPE_KIND_CSTYLE_VARIANT:
         return val.type.get_unqualified_type_name()
@@ -127,9 +127,9 @@
     if type_kind == rustpp.TYPE_KIND_TUPLE_VARIANT:
         return print_struct_val(val,
                                 internal_dict,
-                                omit_first_field = True,
-                                omit_type_name = False,
-                                is_tuple_like = True)
+                                omit_first_field=True,
+                                omit_type_name=False,
+                                is_tuple_like=True)
 
     if type_kind == rustpp.TYPE_KIND_SINGLETON_ENUM:
         return print_val(lldb_val.GetChildAtIndex(0), internal_dict)
@@ -157,9 +157,9 @@
     return lldb_val.GetValue()
 
 
-#=--------------------------------------------------------------------------------------------------
+# =---------------------------------------------------------------------------------------
 # Type-Specialized Printing Functions
-#=--------------------------------------------------------------------------------------------------
+# =---------------------------------------------------------------------------------------
 
 def print_struct_val(val, internal_dict, omit_first_field, omit_type_name, is_tuple_like):
     """
@@ -212,6 +212,7 @@
     return template % {"type_name": type_name,
                        "body": body}
 
+
 def print_pointer_val(val, internal_dict):
     """Prints a pointer value with Rust syntax"""
     assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR
@@ -253,18 +254,21 @@
                                               length,
                                               internal_dict)
 
+
 def print_str_slice_val(val, internal_dict):
     (length, data_ptr) = rustpp.extract_length_and_ptr_from_slice(val)
     return read_utf8_string(data_ptr, length)
 
+
 def print_std_string_val(val, internal_dict):
     vec = val.get_child_at_index(0)
     (length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(vec)
     return read_utf8_string(data_ptr, length)
 
-#=--------------------------------------------------------------------------------------------------
+# =-----------------------------------------------------------------------
 # Helper Functions
-#=--------------------------------------------------------------------------------------------------
+# =-----------------------------------------------------------------------
+
 
 def print_array_of_values(array_name, data_ptr_val, length, internal_dict):
     """Prints a contiguous memory range, interpreting it as values of the
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 9fb0de6..f41404b 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -200,21 +200,27 @@
         align as *mut u8
     } else {
         let layout = Layout::from_size_align_unchecked(size, align);
-        let ptr = alloc(layout);
-        if !ptr.is_null() { ptr } else { handle_alloc_error(layout) }
+        match Global.alloc(layout) {
+            Ok(ptr) => ptr.as_ptr(),
+            Err(_) => handle_alloc_error(layout),
+        }
     }
 }
 
 #[cfg_attr(not(test), lang = "box_free")]
 #[inline]
+// This signature has to be the same as `Box`, otherwise an ICE will happen.
+// When an additional parameter to `Box` is added (like `A: AllocRef`), this has to be added here as
+// well.
+// For example if `Box` is changed to  `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
+// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
 pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
-    let ptr = ptr.as_ptr();
-    let size = size_of_val(&*ptr);
-    let align = min_align_of_val(&*ptr);
+    let size = size_of_val(ptr.as_ref());
+    let align = min_align_of_val(ptr.as_ref());
     // We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
     if size != 0 {
         let layout = Layout::from_size_align_unchecked(size, align);
-        dealloc(ptr as *mut u8, layout);
+        Global.dealloc(ptr.cast().into(), layout);
     }
 }
 
diff --git a/src/liballoc/benches/btree/map.rs b/src/liballoc/benches/btree/map.rs
index ea69769..83cdebf 100644
--- a/src/liballoc/benches/btree/map.rs
+++ b/src/liballoc/benches/btree/map.rs
@@ -1,5 +1,6 @@
 use std::collections::BTreeMap;
 use std::iter::Iterator;
+use std::ops::Bound::{Excluded, Unbounded};
 use std::vec::Vec;
 
 use rand::{seq::SliceRandom, thread_rng, Rng};
@@ -200,3 +201,58 @@
 pub fn first_and_last_10k(b: &mut Bencher) {
     bench_first_and_last(b, 10_000);
 }
+
+#[bench]
+pub fn range_excluded_excluded(b: &mut Bencher) {
+    let size = 144;
+    let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+    b.iter(|| {
+        for first in 0..size {
+            for last in first + 1..size {
+                black_box(map.range((Excluded(first), Excluded(last))));
+            }
+        }
+    });
+}
+
+#[bench]
+pub fn range_excluded_unbounded(b: &mut Bencher) {
+    let size = 144;
+    let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+    b.iter(|| {
+        for first in 0..size {
+            black_box(map.range((Excluded(first), Unbounded)));
+        }
+    });
+}
+
+#[bench]
+pub fn range_included_included(b: &mut Bencher) {
+    let size = 144;
+    let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+    b.iter(|| {
+        for first in 0..size {
+            for last in first..size {
+                black_box(map.range(first..=last));
+            }
+        }
+    });
+}
+
+#[bench]
+pub fn range_included_unbounded(b: &mut Bencher) {
+    let size = 144;
+    let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+    b.iter(|| {
+        for first in 0..size {
+            black_box(map.range(first..));
+        }
+    });
+}
+
+#[bench]
+pub fn range_unbounded_unbounded(b: &mut Bencher) {
+    let size = 144;
+    let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+    b.iter(|| map.range(..));
+}
diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs
index 7e5efbe..3ac4bd8 100644
--- a/src/liballoc/boxed.rs
+++ b/src/liballoc/boxed.rs
@@ -196,12 +196,14 @@
     #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
         let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
-        if layout.size() == 0 {
-            return Box(NonNull::dangling().into());
+        unsafe {
+            let ptr = if layout.size() == 0 {
+                NonNull::dangling()
+            } else {
+                Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast()
+            };
+            Box::from_raw(ptr.as_ptr())
         }
-        let ptr =
-            unsafe { Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)) };
-        Box(ptr.cast().into())
     }
 
     /// Constructs a new `Box` with uninitialized contents, with the memory
@@ -264,15 +266,14 @@
     #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
         let layout = alloc::Layout::array::<mem::MaybeUninit<T>>(len).unwrap();
-        let ptr = if layout.size() == 0 {
-            NonNull::dangling()
-        } else {
-            unsafe {
+        unsafe {
+            let ptr = if layout.size() == 0 {
+                NonNull::dangling()
+            } else {
                 Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast()
-            }
-        };
-        let slice = unsafe { slice::from_raw_parts_mut(ptr.as_ptr(), len) };
-        Box(Unique::from(slice))
+            };
+            Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len))
+        }
     }
 }
 
@@ -308,7 +309,7 @@
     #[unstable(feature = "new_uninit", issue = "63291")]
     #[inline]
     pub unsafe fn assume_init(self) -> Box<T> {
-        Box(Box::into_unique(self).cast())
+        Box::from_raw(Box::into_raw(self) as *mut T)
     }
 }
 
@@ -346,7 +347,7 @@
     #[unstable(feature = "new_uninit", issue = "63291")]
     #[inline]
     pub unsafe fn assume_init(self) -> Box<[T]> {
-        Box(Unique::new_unchecked(Box::into_raw(self) as _))
+        Box::from_raw(Box::into_raw(self) as *mut [T])
     }
 }
 
@@ -1104,6 +1105,7 @@
 #[stable(feature = "pin", since = "1.33.0")]
 impl<T: ?Sized> Unpin for Box<T> {}
 
+#[cfg(bootstrap)]
 #[unstable(feature = "generator_trait", issue = "43122")]
 impl<G: ?Sized + Generator + Unpin> Generator for Box<G> {
     type Yield = G::Yield;
@@ -1114,6 +1116,7 @@
     }
 }
 
+#[cfg(bootstrap)]
 #[unstable(feature = "generator_trait", issue = "43122")]
 impl<G: ?Sized + Generator> Generator for Pin<Box<G>> {
     type Yield = G::Yield;
@@ -1124,6 +1127,28 @@
     }
 }
 
+#[cfg(not(bootstrap))]
+#[unstable(feature = "generator_trait", issue = "43122")]
+impl<G: ?Sized + Generator<R> + Unpin, R> Generator<R> for Box<G> {
+    type Yield = G::Yield;
+    type Return = G::Return;
+
+    fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
+        G::resume(Pin::new(&mut *self), arg)
+    }
+}
+
+#[cfg(not(bootstrap))]
+#[unstable(feature = "generator_trait", issue = "43122")]
+impl<G: ?Sized + Generator<R>, R> Generator<R> for Pin<Box<G>> {
+    type Yield = G::Yield;
+    type Return = G::Return;
+
+    fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
+        G::resume((*self).as_mut(), arg)
+    }
+}
+
 #[stable(feature = "futures_api", since = "1.36.0")]
 impl<F: ?Sized + Future + Unpin> Future for Box<F> {
     type Output = F::Output;
diff --git a/src/liballoc/collections/binary_heap.rs b/src/liballoc/collections/binary_heap.rs
index c527b37..f38fe99 100644
--- a/src/liballoc/collections/binary_heap.rs
+++ b/src/liballoc/collections/binary_heap.rs
@@ -147,7 +147,7 @@
 
 use core::fmt;
 use core::iter::{FromIterator, FusedIterator, TrustedLen};
-use core::mem::{size_of, swap, ManuallyDrop};
+use core::mem::{self, size_of, swap, ManuallyDrop};
 use core::ops::{Deref, DerefMut};
 use core::ptr;
 
@@ -1239,7 +1239,19 @@
 impl<'a, T: Ord> Drop for DrainSorted<'a, T> {
     /// Removes heap elements in heap order.
     fn drop(&mut self) {
-        while let Some(_) = self.inner.pop() {}
+        struct DropGuard<'r, 'a, T: Ord>(&'r mut DrainSorted<'a, T>);
+
+        impl<'r, 'a, T: Ord> Drop for DropGuard<'r, 'a, T> {
+            fn drop(&mut self) {
+                while let Some(_) = self.0.inner.pop() {}
+            }
+        }
+
+        while let Some(item) = self.inner.pop() {
+            let guard = DropGuard(self);
+            drop(item);
+            mem::forget(guard);
+        }
     }
 }
 
diff --git a/src/liballoc/collections/btree/map.rs b/src/liballoc/collections/btree/map.rs
index e62855f..8b9ffdf 100644
--- a/src/liballoc/collections/btree/map.rs
+++ b/src/liballoc/collections/btree/map.rs
@@ -227,7 +227,7 @@
 impl<K: Clone + Ord, V: Clone> BTreeClone for BTreeMap<K, V> {
     fn clone_from(&mut self, other: &Self) {
         // This truncates `self` to `other.len()` by calling `split_off` on
-        // the first key after `other.len()` elements if it exists
+        // the first key after `other.len()` elements if it exists.
         let split_off_key = if self.len() > other.len() {
             let diff = self.len() - other.len();
             if diff <= other.len() {
@@ -247,11 +247,10 @@
         // After truncation, `self` is at most as long as `other` so this loop
         // replaces every key-value pair in `self`. Since `oiter` is in sorted
         // order and the structure of the `BTreeMap` stays the same,
-        // the BTree invariants are maintained at the end of the loop
+        // the BTree invariants are maintained at the end of the loop.
         while !siter.is_empty() {
             if let Some((ok, ov)) = oiter.next() {
-                // SAFETY: This is safe because the `siter.front != siter.back` check
-                // ensures that `siter` is nonempty
+                // SAFETY: This is safe because `siter` is nonempty.
                 let (sk, sv) = unsafe { siter.next_unchecked() };
                 sk.clone_from(ok);
                 sv.clone_from(ov);
@@ -259,7 +258,7 @@
                 break;
             }
         }
-        // If `other` is longer than `self`, the remaining elements are inserted
+        // If `other` is longer than `self`, the remaining elements are inserted.
         self.extend(oiter.map(|(k, v)| ((*k).clone(), (*v).clone())));
     }
 }
@@ -1471,7 +1470,22 @@
 #[stable(feature = "btree_drop", since = "1.7.0")]
 impl<K, V> Drop for IntoIter<K, V> {
     fn drop(&mut self) {
-        self.for_each(drop);
+        struct DropGuard<'a, K, V>(&'a mut IntoIter<K, V>);
+
+        impl<'a, K, V> Drop for DropGuard<'a, K, V> {
+            fn drop(&mut self) {
+                // Continue the same loop we perform below. This only runs when unwinding, so we
+                // don't have to care about panics this time (they'll abort).
+                while let Some(_) = self.0.next() {}
+            }
+        }
+
+        while let Some(pair) = self.next() {
+            let guard = DropGuard(self);
+            drop(pair);
+            mem::forget(guard);
+        }
+
         unsafe {
             let mut node = ptr::read(&self.front).into_node().forget_type();
             if node.is_shared_root() {
@@ -1862,65 +1876,51 @@
     let mut max_node = root2;
     let mut min_found = false;
     let mut max_found = false;
-    let mut diverged = false;
 
     loop {
-        let min_edge = match (min_found, range.start_bound()) {
-            (false, Included(key)) => match search::search_linear(&min_node, key) {
-                (i, true) => {
+        let front = match (min_found, range.start_bound()) {
+            (false, Included(key)) => match search::search_node(min_node, key) {
+                Found(kv) => {
                     min_found = true;
-                    i
+                    kv.left_edge()
                 }
-                (i, false) => i,
+                GoDown(edge) => edge,
             },
-            (false, Excluded(key)) => match search::search_linear(&min_node, key) {
-                (i, true) => {
+            (false, Excluded(key)) => match search::search_node(min_node, key) {
+                Found(kv) => {
                     min_found = true;
-                    i + 1
+                    kv.right_edge()
                 }
-                (i, false) => i,
+                GoDown(edge) => edge,
             },
-            (_, Unbounded) => 0,
-            (true, Included(_)) => min_node.len(),
-            (true, Excluded(_)) => 0,
+            (true, Included(_)) => min_node.last_edge(),
+            (true, Excluded(_)) => min_node.first_edge(),
+            (_, Unbounded) => min_node.first_edge(),
         };
 
-        let max_edge = match (max_found, range.end_bound()) {
-            (false, Included(key)) => match search::search_linear(&max_node, key) {
-                (i, true) => {
+        let back = match (max_found, range.end_bound()) {
+            (false, Included(key)) => match search::search_node(max_node, key) {
+                Found(kv) => {
                     max_found = true;
-                    i + 1
+                    kv.right_edge()
                 }
-                (i, false) => i,
+                GoDown(edge) => edge,
             },
-            (false, Excluded(key)) => match search::search_linear(&max_node, key) {
-                (i, true) => {
+            (false, Excluded(key)) => match search::search_node(max_node, key) {
+                Found(kv) => {
                     max_found = true;
-                    i
+                    kv.left_edge()
                 }
-                (i, false) => i,
+                GoDown(edge) => edge,
             },
-            (_, Unbounded) => max_node.len(),
-            (true, Included(_)) => 0,
-            (true, Excluded(_)) => max_node.len(),
+            (true, Included(_)) => max_node.first_edge(),
+            (true, Excluded(_)) => max_node.last_edge(),
+            (_, Unbounded) => max_node.last_edge(),
         };
 
-        if !diverged {
-            if max_edge < min_edge {
-                panic!("Ord is ill-defined in BTreeMap range")
-            }
-            if min_edge != max_edge {
-                diverged = true;
-            }
+        if front.partial_cmp(&back) == Some(Ordering::Greater) {
+            panic!("Ord is ill-defined in BTreeMap range");
         }
-
-        // Safety guarantee: `min_edge` is always in range for `min_node`, because
-        // `min_edge` is unconditionally calculated for each iteration's value of `min_node`,
-        // either (if not found) as the edge index returned by `search_linear`,
-        // or (if found) as the KV index returned by `search_linear`, possibly + 1.
-        // Likewise for `max_node` versus `max_edge`.
-        let front = unsafe { Handle::new_edge(min_node, min_edge) };
-        let back = unsafe { Handle::new_edge(max_node, max_edge) };
         match (front.force(), back.force()) {
             (Leaf(f), Leaf(b)) => {
                 return (f, b);
diff --git a/src/liballoc/collections/btree/node.rs b/src/liballoc/collections/btree/node.rs
index 62b96ab..c1bd68a 100644
--- a/src/liballoc/collections/btree/node.rs
+++ b/src/liballoc/collections/btree/node.rs
@@ -31,6 +31,7 @@
 // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
 //   This implies that even an empty internal node has at least one edge.
 
+use core::cmp::Ordering;
 use core::marker::PhantomData;
 use core::mem::{self, MaybeUninit};
 use core::ptr::{self, NonNull, Unique};
@@ -826,6 +827,14 @@
     }
 }
 
+impl<BorrowType, K, V, NodeType, HandleType> PartialOrd
+    for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+        if self.node.node == other.node.node { Some(self.idx.cmp(&other.idx)) } else { None }
+    }
+}
+
 impl<BorrowType, K, V, NodeType, HandleType>
     Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
 {
diff --git a/src/liballoc/collections/btree/search.rs b/src/liballoc/collections/btree/search.rs
index e680e36..2ba5ceb 100644
--- a/src/liballoc/collections/btree/search.rs
+++ b/src/liballoc/collections/btree/search.rs
@@ -10,6 +10,10 @@
     GoDown(Handle<NodeRef<BorrowType, K, V, GoDownType>, marker::Edge>),
 }
 
+/// Looks up a given key in a (sub)tree headed by the given node, recursively.
+/// Returns a `Found` with the handle of the matching KV, if any. Otherwise,
+/// returns a `GoDown` with the handle of the possible leaf edge where the key
+/// belongs.
 pub fn search_tree<BorrowType, K, V, Q: ?Sized>(
     mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
     key: &Q,
@@ -32,6 +36,10 @@
     }
 }
 
+/// Looks up a given key in a given node, without recursion.
+/// Returns a `Found` with the handle of the matching KV, if any. Otherwise,
+/// returns a `GoDown` with the handle of the edge where the key might be found.
+/// If the node is a leaf, a `GoDown` edge is not an actual edge but a possible edge.
 pub fn search_node<BorrowType, K, V, Type, Q: ?Sized>(
     node: NodeRef<BorrowType, K, V, Type>,
     key: &Q,
@@ -50,8 +58,8 @@
 /// or could exist, and whether it exists in the node itself. If it doesn't
 /// exist in the node itself, it may exist in the subtree with that index
 /// (if the node has subtrees). If the key doesn't exist in node or subtree,
-/// the returned index is the position or subtree to insert at.
-pub fn search_linear<BorrowType, K, V, Type, Q: ?Sized>(
+/// the returned index is the position or subtree where the key belongs.
+fn search_linear<BorrowType, K, V, Type, Q: ?Sized>(
     node: &NodeRef<BorrowType, K, V, Type>,
     key: &Q,
 ) -> (usize, bool)
diff --git a/src/liballoc/collections/linked_list.rs b/src/liballoc/collections/linked_list.rs
index b88ca8a..a9b4e3e 100644
--- a/src/liballoc/collections/linked_list.rs
+++ b/src/liballoc/collections/linked_list.rs
@@ -878,6 +878,52 @@
         unsafe { self.split_off_after_node(split_node, at) }
     }
 
+    /// Removes the element at the given index and returns it.
+    ///
+    /// This operation should compute in O(n) time.
+    ///
+    /// # Panics
+    /// Panics if at >= len
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(linked_list_remove)]
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut d = LinkedList::new();
+    ///
+    /// d.push_front(1);
+    /// d.push_front(2);
+    /// d.push_front(3);
+    ///
+    /// assert_eq!(d.remove(1), 2);
+    /// assert_eq!(d.remove(0), 3);
+    /// assert_eq!(d.remove(0), 1);
+    /// ```
+    #[unstable(feature = "linked_list_remove", issue = "69210")]
+    pub fn remove(&mut self, at: usize) -> T {
+        let len = self.len();
+        assert!(at < len, "Cannot remove at an index outside of the list bounds");
+
+        // Below, we iterate towards the node at the given index, either from
+        // the start or the end, depending on which would be faster.
+        let offset_from_end = len - at - 1;
+        if at <= offset_from_end {
+            let mut cursor = self.cursor_front_mut();
+            for _ in 0..at {
+                cursor.move_next();
+            }
+            cursor.remove_current().unwrap()
+        } else {
+            let mut cursor = self.cursor_back_mut();
+            for _ in 0..offset_from_end {
+                cursor.move_prev();
+            }
+            cursor.remove_current().unwrap()
+        }
+    }
+
     /// Creates an iterator which uses a closure to determine if an element should be removed.
     ///
     /// If the closure returns true, then the element is removed and yielded.
@@ -1565,7 +1611,24 @@
     F: FnMut(&mut T) -> bool,
 {
     fn drop(&mut self) {
-        self.for_each(drop);
+        struct DropGuard<'r, 'a, T, F>(&'r mut DrainFilter<'a, T, F>)
+        where
+            F: FnMut(&mut T) -> bool;
+
+        impl<'r, 'a, T, F> Drop for DropGuard<'r, 'a, T, F>
+        where
+            F: FnMut(&mut T) -> bool,
+        {
+            fn drop(&mut self) {
+                self.0.for_each(drop);
+            }
+        }
+
+        while let Some(item) = self.next() {
+            let guard = DropGuard(self);
+            drop(item);
+            mem::forget(guard);
+        }
     }
 }
 
diff --git a/src/liballoc/collections/vec_deque.rs b/src/liballoc/collections/vec_deque.rs
index 2cc450b..85d1d98 100644
--- a/src/liballoc/collections/vec_deque.rs
+++ b/src/liballoc/collections/vec_deque.rs
@@ -22,6 +22,11 @@
 use crate::raw_vec::RawVec;
 use crate::vec::Vec;
 
+#[stable(feature = "drain", since = "1.6.0")]
+pub use self::drain::Drain;
+
+mod drain;
+
 #[cfg(test)]
 mod tests;
 
@@ -866,6 +871,18 @@
     /// ```
     #[stable(feature = "deque_extras", since = "1.16.0")]
     pub fn truncate(&mut self, len: usize) {
+        /// Runs the destructor for all items in the slice when it gets dropped (normally or
+        /// during unwinding).
+        struct Dropper<'a, T>(&'a mut [T]);
+
+        impl<'a, T> Drop for Dropper<'a, T> {
+            fn drop(&mut self) {
+                unsafe {
+                    ptr::drop_in_place(self.0);
+                }
+            }
+        }
+
         // Safe because:
         //
         // * Any slice passed to `drop_in_place` is valid; the second case has
@@ -888,8 +905,11 @@
                 let drop_back = back as *mut _;
                 let drop_front = front.get_unchecked_mut(len..) as *mut _;
                 self.head = self.wrap_sub(self.head, num_dropped);
+
+                // Make sure the second half is dropped even when a destructor
+                // in the first one panics.
+                let _back_dropper = Dropper(&mut *drop_back);
                 ptr::drop_in_place(drop_front);
-                ptr::drop_in_place(drop_back);
             }
         }
     }
@@ -2526,113 +2546,6 @@
 #[stable(feature = "fused", since = "1.26.0")]
 impl<T> FusedIterator for IntoIter<T> {}
 
-/// A draining iterator over the elements of a `VecDeque`.
-///
-/// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its
-/// documentation for more.
-///
-/// [`drain`]: struct.VecDeque.html#method.drain
-/// [`VecDeque`]: struct.VecDeque.html
-#[stable(feature = "drain", since = "1.6.0")]
-pub struct Drain<'a, T: 'a> {
-    after_tail: usize,
-    after_head: usize,
-    iter: Iter<'a, T>,
-    deque: NonNull<VecDeque<T>>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("Drain")
-            .field(&self.after_tail)
-            .field(&self.after_head)
-            .field(&self.iter)
-            .finish()
-    }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-unsafe impl<T: Sync> Sync for Drain<'_, T> {}
-#[stable(feature = "drain", since = "1.6.0")]
-unsafe impl<T: Send> Send for Drain<'_, T> {}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<T> Drop for Drain<'_, T> {
-    fn drop(&mut self) {
-        self.for_each(drop);
-
-        let source_deque = unsafe { self.deque.as_mut() };
-
-        // T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
-        //
-        //        T   t   h   H
-        // [. . . o o x x o o . . .]
-        //
-        let orig_tail = source_deque.tail;
-        let drain_tail = source_deque.head;
-        let drain_head = self.after_tail;
-        let orig_head = self.after_head;
-
-        let tail_len = count(orig_tail, drain_tail, source_deque.cap());
-        let head_len = count(drain_head, orig_head, source_deque.cap());
-
-        // Restore the original head value
-        source_deque.head = orig_head;
-
-        match (tail_len, head_len) {
-            (0, 0) => {
-                source_deque.head = 0;
-                source_deque.tail = 0;
-            }
-            (0, _) => {
-                source_deque.tail = drain_head;
-            }
-            (_, 0) => {
-                source_deque.head = drain_tail;
-            }
-            _ => unsafe {
-                if tail_len <= head_len {
-                    source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
-                    source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
-                } else {
-                    source_deque.head = source_deque.wrap_add(drain_tail, head_len);
-                    source_deque.wrap_copy(drain_tail, drain_head, head_len);
-                }
-            },
-        }
-    }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<T> Iterator for Drain<'_, T> {
-    type Item = T;
-
-    #[inline]
-    fn next(&mut self) -> Option<T> {
-        self.iter.next().map(|elt| unsafe { ptr::read(elt) })
-    }
-
-    #[inline]
-    fn size_hint(&self) -> (usize, Option<usize>) {
-        self.iter.size_hint()
-    }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<T> DoubleEndedIterator for Drain<'_, T> {
-    #[inline]
-    fn next_back(&mut self) -> Option<T> {
-        self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
-    }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<T> ExactSizeIterator for Drain<'_, T> {}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<T> FusedIterator for Drain<'_, T> {}
-
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<A: PartialEq> PartialEq for VecDeque<A> {
     fn eq(&self, other: &VecDeque<A>) -> bool {
diff --git a/src/liballoc/collections/vec_deque/drain.rs b/src/liballoc/collections/vec_deque/drain.rs
new file mode 100644
index 0000000..1ae94de
--- /dev/null
+++ b/src/liballoc/collections/vec_deque/drain.rs
@@ -0,0 +1,126 @@
+use core::iter::FusedIterator;
+use core::ptr::{self, NonNull};
+use core::{fmt, mem};
+
+use super::{count, Iter, VecDeque};
+
+/// A draining iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its
+/// documentation for more.
+///
+/// [`drain`]: struct.VecDeque.html#method.drain
+/// [`VecDeque`]: struct.VecDeque.html
+#[stable(feature = "drain", since = "1.6.0")]
+pub struct Drain<'a, T: 'a> {
+    pub(crate) after_tail: usize,
+    pub(crate) after_head: usize,
+    pub(crate) iter: Iter<'a, T>,
+    pub(crate) deque: NonNull<VecDeque<T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("Drain")
+            .field(&self.after_tail)
+            .field(&self.after_head)
+            .field(&self.iter)
+            .finish()
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<T: Sync> Sync for Drain<'_, T> {}
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<T: Send> Send for Drain<'_, T> {}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Drop for Drain<'_, T> {
+    fn drop(&mut self) {
+        struct DropGuard<'r, 'a, T>(&'r mut Drain<'a, T>);
+
+        impl<'r, 'a, T> Drop for DropGuard<'r, 'a, T> {
+            fn drop(&mut self) {
+                self.0.for_each(drop);
+
+                let source_deque = unsafe { self.0.deque.as_mut() };
+
+                // T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
+                //
+                //        T   t   h   H
+                // [. . . o o x x o o . . .]
+                //
+                let orig_tail = source_deque.tail;
+                let drain_tail = source_deque.head;
+                let drain_head = self.0.after_tail;
+                let orig_head = self.0.after_head;
+
+                let tail_len = count(orig_tail, drain_tail, source_deque.cap());
+                let head_len = count(drain_head, orig_head, source_deque.cap());
+
+                // Restore the original head value
+                source_deque.head = orig_head;
+
+                match (tail_len, head_len) {
+                    (0, 0) => {
+                        source_deque.head = 0;
+                        source_deque.tail = 0;
+                    }
+                    (0, _) => {
+                        source_deque.tail = drain_head;
+                    }
+                    (_, 0) => {
+                        source_deque.head = drain_tail;
+                    }
+                    _ => unsafe {
+                        if tail_len <= head_len {
+                            source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
+                            source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
+                        } else {
+                            source_deque.head = source_deque.wrap_add(drain_tail, head_len);
+                            source_deque.wrap_copy(drain_tail, drain_head, head_len);
+                        }
+                    },
+                }
+            }
+        }
+
+        while let Some(item) = self.next() {
+            let guard = DropGuard(self);
+            drop(item);
+            mem::forget(guard);
+        }
+
+        DropGuard(self);
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Iterator for Drain<'_, T> {
+    type Item = T;
+
+    #[inline]
+    fn next(&mut self) -> Option<T> {
+        self.iter.next().map(|elt| unsafe { ptr::read(elt) })
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> DoubleEndedIterator for Drain<'_, T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<T> {
+        self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> ExactSizeIterator for Drain<'_, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Drain<'_, T> {}
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index e1b549b..1446549 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -280,7 +280,7 @@
             // 0, getting to here necessarily means the `RawVec` is overfull.
             assert!(elem_size != 0, "capacity overflow");
 
-            let (new_cap, uniq) = match self.current_layout() {
+            let (new_cap, ptr) = match self.current_layout() {
                 Some(cur) => {
                     // Since we guarantee that we never allocate more than
                     // `isize::MAX` bytes, `elem_size * self.cap <= isize::MAX` as
@@ -297,7 +297,7 @@
                     alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
                     let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), cur, new_size);
                     match ptr_res {
-                        Ok(ptr) => (new_cap, ptr.cast().into()),
+                        Ok(ptr) => (new_cap, ptr),
                         Err(_) => handle_alloc_error(Layout::from_size_align_unchecked(
                             new_size,
                             cur.align(),
@@ -308,13 +308,14 @@
                     // Skip to 4 because tiny `Vec`'s are dumb; but not if that
                     // would cause overflow.
                     let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
-                    match self.a.alloc_array::<T>(new_cap) {
-                        Ok(ptr) => (new_cap, ptr.into()),
-                        Err(_) => handle_alloc_error(Layout::array::<T>(new_cap).unwrap()),
+                    let layout = Layout::array::<T>(new_cap).unwrap();
+                    match self.a.alloc(layout) {
+                        Ok(ptr) => (new_cap, ptr),
+                        Err(_) => handle_alloc_error(layout),
                     }
                 }
             };
-            self.ptr = uniq;
+            self.ptr = ptr.cast().into();
             self.cap = new_cap;
         }
     }
diff --git a/src/liballoc/string.rs b/src/liballoc/string.rs
index 96f871d..f5afea1 100644
--- a/src/liballoc/string.rs
+++ b/src/liballoc/string.rs
@@ -319,7 +319,7 @@
 /// assert_eq!(vec![0, 159], value.unwrap_err().into_bytes());
 /// ```
 #[stable(feature = "rust1", since = "1.0.0")]
-#[derive(Debug)]
+#[derive(Debug, Clone, PartialEq, Eq)]
 pub struct FromUtf8Error {
     bytes: Vec<u8>,
     error: Utf8Error,
@@ -2106,18 +2106,11 @@
     }
 }
 
-/// An error when parsing a `String`.
+/// A type alias for [`Infallible`].
 ///
-/// This `enum` is slightly awkward: it will never actually exist. This error is
-/// part of the type signature of the implementation of [`FromStr`] on
-/// [`String`]. The return type of [`from_str`], requires that an error be
-/// defined, but, given that a [`String`] can always be made into a new
-/// [`String`] without error, this type will never actually be returned. As
-/// such, it is only here to satisfy said signature, and is useless otherwise.
+/// This alias exists for backwards compatibility, and may be eventually deprecated.
 ///
-/// [`FromStr`]: ../../std/str/trait.FromStr.html
-/// [`String`]: struct.String.html
-/// [`from_str`]: ../../std/str/trait.FromStr.html#tymethod.from_str
+/// [`Infallible`]: ../../core/convert/enum.Infallible.html
 #[stable(feature = "str_parse_error", since = "1.5.0")]
 pub type ParseError = core::convert::Infallible;
 
@@ -2125,7 +2118,7 @@
 impl FromStr for String {
     type Err = core::convert::Infallible;
     #[inline]
-    fn from_str(s: &str) -> Result<String, ParseError> {
+    fn from_str(s: &str) -> Result<String, Self::Err> {
         Ok(String::from(s))
     }
 }
@@ -2208,6 +2201,14 @@
     }
 }
 
+#[stable(feature = "string_as_mut", since = "1.43.0")]
+impl AsMut<str> for String {
+    #[inline]
+    fn as_mut(&mut self) -> &mut str {
+        self
+    }
+}
+
 #[stable(feature = "rust1", since = "1.0.0")]
 impl AsRef<[u8]> for String {
     #[inline]
diff --git a/src/liballoc/tests/binary_heap.rs b/src/liballoc/tests/binary_heap.rs
index f49ca71..be5516f 100644
--- a/src/liballoc/tests/binary_heap.rs
+++ b/src/liballoc/tests/binary_heap.rs
@@ -1,6 +1,8 @@
 use std::collections::binary_heap::{Drain, PeekMut};
 use std::collections::BinaryHeap;
 use std::iter::TrustedLen;
+use std::panic::{catch_unwind, AssertUnwindSafe};
+use std::sync::atomic::{AtomicU32, Ordering};
 
 #[test]
 fn test_iterator() {
@@ -276,6 +278,37 @@
 }
 
 #[test]
+fn test_drain_sorted_leak() {
+    static DROPS: AtomicU32 = AtomicU32::new(0);
+
+    #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
+    struct D(u32, bool);
+
+    impl Drop for D {
+        fn drop(&mut self) {
+            DROPS.fetch_add(1, Ordering::SeqCst);
+
+            if self.1 {
+                panic!("panic in `drop`");
+            }
+        }
+    }
+
+    let mut q = BinaryHeap::from(vec![
+        D(0, false),
+        D(1, false),
+        D(2, false),
+        D(3, true),
+        D(4, false),
+        D(5, false),
+    ]);
+
+    catch_unwind(AssertUnwindSafe(|| drop(q.drain_sorted()))).ok();
+
+    assert_eq!(DROPS.load(Ordering::SeqCst), 6);
+}
+
+#[test]
 fn test_extend_ref() {
     let mut a = BinaryHeap::new();
     a.push(1);
diff --git a/src/liballoc/tests/btree/map.rs b/src/liballoc/tests/btree/map.rs
index 0a26d7b..fd07a4d 100644
--- a/src/liballoc/tests/btree/map.rs
+++ b/src/liballoc/tests/btree/map.rs
@@ -5,7 +5,9 @@
 use std::iter::FromIterator;
 use std::ops::Bound::{self, Excluded, Included, Unbounded};
 use std::ops::RangeBounds;
+use std::panic::catch_unwind;
 use std::rc::Rc;
+use std::sync::atomic::{AtomicU32, Ordering};
 
 use super::DeterministicRng;
 
@@ -15,7 +17,7 @@
     #[cfg(not(miri))] // Miri is too slow
     let size = 10000;
     #[cfg(miri)]
-    let size = 200;
+    let size = 144; // to obtain height 3 tree (having edges to both kinds of nodes)
     assert_eq!(map.len(), 0);
 
     for i in 0..size {
@@ -381,8 +383,8 @@
 }
 
 #[test]
-fn test_range_depth_2() {
-    // Assuming that node.CAPACITY is 11, having 12 pairs implies a depth 2 tree
+fn test_range_height_2() {
+    // Assuming that node.CAPACITY is 11, having 12 pairs implies a height 2 tree
     // with 2 leaves. Depending on details we don't want or need to rely upon,
     // the single key at the root will be 6 or 7.
 
@@ -524,7 +526,7 @@
     #[cfg(not(miri))] // Miri is too slow
     let size = 1000;
     #[cfg(miri)]
-    let size = 200;
+    let size = 144; // to obtain height 3 tree (having edges to both kinds of nodes)
     let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
 
     fn test(map: &BTreeMap<u32, u32>, size: u32, min: Bound<&u32>, max: Bound<&u32>) {
@@ -561,14 +563,15 @@
 
 #[test]
 fn test_range() {
-    #[cfg(not(miri))] // Miri is too slow
     let size = 200;
+    #[cfg(not(miri))] // Miri is too slow
+    let step = 1;
     #[cfg(miri)]
-    let size = 30;
+    let step = 66;
     let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
 
-    for i in 0..size {
-        for j in i..size {
+    for i in (0..size).step_by(step) {
+        for j in (i..size).step_by(step) {
             let mut kvs = map.range((Included(&i), Included(&j))).map(|(&k, &v)| (k, v));
             let mut pairs = (i..=j).map(|i| (i, i));
 
@@ -583,14 +586,15 @@
 
 #[test]
 fn test_range_mut() {
-    #[cfg(not(miri))] // Miri is too slow
     let size = 200;
+    #[cfg(not(miri))] // Miri is too slow
+    let step = 1;
     #[cfg(miri)]
-    let size = 30;
+    let step = 66;
     let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
 
-    for i in 0..size {
-        for j in i..size {
+    for i in (0..size).step_by(step) {
+        for j in (i..size).step_by(step) {
             let mut kvs = map.range_mut((Included(&i), Included(&j))).map(|(&k, &mut v)| (k, v));
             let mut pairs = (i..=j).map(|i| (i, i));
 
@@ -758,10 +762,7 @@
 #[test]
 fn test_clone() {
     let mut map = BTreeMap::new();
-    #[cfg(not(miri))] // Miri is too slow
-    let size = 100;
-    #[cfg(miri)]
-    let size = 30;
+    let size = 12; // to obtain height 2 tree (having edges to leaf nodes)
     assert_eq!(map.len(), 0);
 
     for i in 0..size {
@@ -788,24 +789,36 @@
         assert_eq!(map.len(), size / 2 - i - 1);
         assert_eq!(map, map.clone());
     }
+
+    // Full 2-level and minimal 3-level tree (sizes 143, 144 -- the only ones we clone for).
+    for i in 1..=144 {
+        assert_eq!(map.insert(i, i), None);
+        assert_eq!(map.len(), i);
+        if i >= 143 {
+            assert_eq!(map, map.clone());
+        }
+    }
 }
 
 #[test]
 fn test_clone_from() {
     let mut map1 = BTreeMap::new();
-    let size = 30;
+    let max_size = 12; // to obtain height 2 tree (having edges to leaf nodes)
 
-    for i in 0..size {
+    // Range to max_size inclusive, because i is the size of map1 being tested.
+    for i in 0..=max_size {
         let mut map2 = BTreeMap::new();
         for j in 0..i {
             let mut map1_copy = map2.clone();
-            map1_copy.clone_from(&map1);
+            map1_copy.clone_from(&map1); // small cloned from large
             assert_eq!(map1_copy, map1);
             let mut map2_copy = map1.clone();
-            map2_copy.clone_from(&map2);
+            map2_copy.clone_from(&map2); // large cloned from small
             assert_eq!(map2_copy, map2);
             map2.insert(100 * j + 1, 2 * j + 1);
         }
+        map2.clone_from(&map1); // same length
+        assert_eq!(map2, map1);
         map1.insert(i, 10 * i);
     }
 }
@@ -956,6 +969,7 @@
 // Tests for several randomly chosen sizes.
 create_append_test!(test_append_170, 170);
 create_append_test!(test_append_181, 181);
+#[cfg(not(miri))] // Miri is too slow
 create_append_test!(test_append_239, 239);
 #[cfg(not(miri))] // Miri is too slow
 create_append_test!(test_append_1700, 1700);
@@ -1005,3 +1019,29 @@
     assert!(map.into_iter().eq(data.clone().into_iter().filter(|x| x.0 < key)));
     assert!(right.into_iter().eq(data.into_iter().filter(|x| x.0 >= key)));
 }
+
+#[test]
+fn test_into_iter_drop_leak() {
+    static DROPS: AtomicU32 = AtomicU32::new(0);
+
+    struct D;
+
+    impl Drop for D {
+        fn drop(&mut self) {
+            if DROPS.fetch_add(1, Ordering::SeqCst) == 3 {
+                panic!("panic in `drop`");
+            }
+        }
+    }
+
+    let mut map = BTreeMap::new();
+    map.insert("a", D);
+    map.insert("b", D);
+    map.insert("c", D);
+    map.insert("d", D);
+    map.insert("e", D);
+
+    catch_unwind(move || drop(map.into_iter())).ok();
+
+    assert_eq!(DROPS.load(Ordering::SeqCst), 5);
+}
diff --git a/src/liballoc/tests/lib.rs b/src/liballoc/tests/lib.rs
index c1ae67a..ea75f89 100644
--- a/src/liballoc/tests/lib.rs
+++ b/src/liballoc/tests/lib.rs
@@ -12,6 +12,7 @@
 #![feature(binary_heap_into_iter_sorted)]
 #![feature(binary_heap_drain_sorted)]
 #![feature(vec_remove_item)]
+#![feature(split_inclusive)]
 
 use std::collections::hash_map::DefaultHasher;
 use std::hash::{Hash, Hasher};
diff --git a/src/liballoc/tests/linked_list.rs b/src/liballoc/tests/linked_list.rs
index b773651..afcb9e0 100644
--- a/src/liballoc/tests/linked_list.rs
+++ b/src/liballoc/tests/linked_list.rs
@@ -1,5 +1,5 @@
 use std::collections::LinkedList;
-use std::panic::catch_unwind;
+use std::panic::{catch_unwind, AssertUnwindSafe};
 
 #[test]
 fn test_basic() {
@@ -532,6 +532,74 @@
 }
 
 #[test]
+fn drain_filter_drop_panic_leak() {
+    static mut DROPS: i32 = 0;
+
+    struct D(bool);
+
+    impl Drop for D {
+        fn drop(&mut self) {
+            unsafe {
+                DROPS += 1;
+            }
+
+            if self.0 {
+                panic!("panic in `drop`");
+            }
+        }
+    }
+
+    let mut q = LinkedList::new();
+    q.push_back(D(false));
+    q.push_back(D(false));
+    q.push_back(D(false));
+    q.push_back(D(false));
+    q.push_back(D(false));
+    q.push_front(D(false));
+    q.push_front(D(true));
+    q.push_front(D(false));
+
+    catch_unwind(AssertUnwindSafe(|| drop(q.drain_filter(|_| true)))).ok();
+
+    assert_eq!(unsafe { DROPS }, 8);
+    assert!(q.is_empty());
+}
+
+#[test]
+fn drain_filter_pred_panic_leak() {
+    static mut DROPS: i32 = 0;
+
+    #[derive(Debug)]
+    struct D(u32);
+
+    impl Drop for D {
+        fn drop(&mut self) {
+            unsafe {
+                DROPS += 1;
+            }
+        }
+    }
+
+    let mut q = LinkedList::new();
+    q.push_back(D(3));
+    q.push_back(D(4));
+    q.push_back(D(5));
+    q.push_back(D(6));
+    q.push_back(D(7));
+    q.push_front(D(2));
+    q.push_front(D(1));
+    q.push_front(D(0));
+
+    catch_unwind(AssertUnwindSafe(|| {
+        drop(q.drain_filter(|item| if item.0 >= 2 { panic!() } else { true }))
+    }))
+    .ok();
+
+    assert_eq!(unsafe { DROPS }, 2); // 0 and 1
+    assert_eq!(q.len(), 6);
+}
+
+#[test]
 fn test_drop() {
     static mut DROPS: i32 = 0;
     struct Elem;
diff --git a/src/liballoc/tests/slice.rs b/src/liballoc/tests/slice.rs
index 51ddb5e..3d6b4bf 100644
--- a/src/liballoc/tests/slice.rs
+++ b/src/liballoc/tests/slice.rs
@@ -852,6 +852,86 @@
 }
 
 #[test]
+fn test_splitator_inclusive() {
+    let xs = &[1, 2, 3, 4, 5];
+
+    let splits: &[&[_]] = &[&[1, 2], &[3, 4], &[5]];
+    assert_eq!(xs.split_inclusive(|x| *x % 2 == 0).collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1], &[2, 3, 4, 5]];
+    assert_eq!(xs.split_inclusive(|x| *x == 1).collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+    assert_eq!(xs.split_inclusive(|x| *x == 5).collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+    assert_eq!(xs.split_inclusive(|x| *x == 10).collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1], &[2], &[3], &[4], &[5]];
+    assert_eq!(xs.split_inclusive(|_| true).collect::<Vec<&[i32]>>(), splits);
+
+    let xs: &[i32] = &[];
+    let splits: &[&[i32]] = &[&[]];
+    assert_eq!(xs.split_inclusive(|x| *x == 5).collect::<Vec<&[i32]>>(), splits);
+}
+
+#[test]
+fn test_splitator_inclusive_reverse() {
+    let xs = &[1, 2, 3, 4, 5];
+
+    let splits: &[&[_]] = &[&[5], &[3, 4], &[1, 2]];
+    assert_eq!(xs.split_inclusive(|x| *x % 2 == 0).rev().collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[2, 3, 4, 5], &[1]];
+    assert_eq!(xs.split_inclusive(|x| *x == 1).rev().collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+    assert_eq!(xs.split_inclusive(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+    assert_eq!(xs.split_inclusive(|x| *x == 10).rev().collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[5], &[4], &[3], &[2], &[1]];
+    assert_eq!(xs.split_inclusive(|_| true).rev().collect::<Vec<_>>(), splits);
+
+    let xs: &[i32] = &[];
+    let splits: &[&[i32]] = &[&[]];
+    assert_eq!(xs.split_inclusive(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
+}
+
+#[test]
+fn test_splitator_mut_inclusive() {
+    let xs = &mut [1, 2, 3, 4, 5];
+
+    let splits: &[&[_]] = &[&[1, 2], &[3, 4], &[5]];
+    assert_eq!(xs.split_inclusive_mut(|x| *x % 2 == 0).collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1], &[2, 3, 4, 5]];
+    assert_eq!(xs.split_inclusive_mut(|x| *x == 1).collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+    assert_eq!(xs.split_inclusive_mut(|x| *x == 5).collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+    assert_eq!(xs.split_inclusive_mut(|x| *x == 10).collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1], &[2], &[3], &[4], &[5]];
+    assert_eq!(xs.split_inclusive_mut(|_| true).collect::<Vec<_>>(), splits);
+
+    let xs: &mut [i32] = &mut [];
+    let splits: &[&[i32]] = &[&[]];
+    assert_eq!(xs.split_inclusive_mut(|x| *x == 5).collect::<Vec<_>>(), splits);
+}
+
+#[test]
+fn test_splitator_mut_inclusive_reverse() {
+    let xs = &mut [1, 2, 3, 4, 5];
+
+    let splits: &[&[_]] = &[&[5], &[3, 4], &[1, 2]];
+    assert_eq!(xs.split_inclusive_mut(|x| *x % 2 == 0).rev().collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[2, 3, 4, 5], &[1]];
+    assert_eq!(xs.split_inclusive_mut(|x| *x == 1).rev().collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+    assert_eq!(xs.split_inclusive_mut(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+    assert_eq!(xs.split_inclusive_mut(|x| *x == 10).rev().collect::<Vec<_>>(), splits);
+    let splits: &[&[_]] = &[&[5], &[4], &[3], &[2], &[1]];
+    assert_eq!(xs.split_inclusive_mut(|_| true).rev().collect::<Vec<_>>(), splits);
+
+    let xs: &mut [i32] = &mut [];
+    let splits: &[&[i32]] = &[&[]];
+    assert_eq!(xs.split_inclusive_mut(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
+}
+
+#[test]
 fn test_splitnator() {
     let xs = &[1, 2, 3, 4, 5];
 
diff --git a/src/liballoc/tests/str.rs b/src/liballoc/tests/str.rs
index d3c7261..b703df6 100644
--- a/src/liballoc/tests/str.rs
+++ b/src/liballoc/tests/str.rs
@@ -1248,6 +1248,49 @@
 }
 
 #[test]
+fn test_split_char_iterator_inclusive() {
+    let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+    let split: Vec<&str> = data.split_inclusive('\n').collect();
+    assert_eq!(split, ["\n", "Märy häd ä little lämb\n", "Little lämb\n"]);
+
+    let uppercase_separated = "SheePSharKTurtlECaT";
+    let mut first_char = true;
+    let split: Vec<&str> = uppercase_separated
+        .split_inclusive(|c: char| {
+            let split = !first_char && c.is_uppercase();
+            first_char = split;
+            split
+        })
+        .collect();
+    assert_eq!(split, ["SheeP", "SharK", "TurtlE", "CaT"]);
+}
+
+#[test]
+fn test_split_char_iterator_inclusive_rev() {
+    let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+    let split: Vec<&str> = data.split_inclusive('\n').rev().collect();
+    assert_eq!(split, ["Little lämb\n", "Märy häd ä little lämb\n", "\n"]);
+
+    // Note that the predicate is stateful and thus dependent
+    // on the iteration order.
+    // (A different predicate is needed for reverse iterator vs normal iterator.)
+    // Not sure if anything can be done though.
+    let uppercase_separated = "SheePSharKTurtlECaT";
+    let mut term_char = true;
+    let split: Vec<&str> = uppercase_separated
+        .split_inclusive(|c: char| {
+            let split = term_char && c.is_uppercase();
+            term_char = c.is_uppercase();
+            split
+        })
+        .rev()
+        .collect();
+    assert_eq!(split, ["CaT", "TurtlE", "SharK", "SheeP"]);
+}
+
+#[test]
 fn test_rsplit() {
     let data = "\nMäry häd ä little lämb\nLittle lämb\n";
 
diff --git a/src/liballoc/tests/string.rs b/src/liballoc/tests/string.rs
index dd44495..08859b2 100644
--- a/src/liballoc/tests/string.rs
+++ b/src/liballoc/tests/string.rs
@@ -50,7 +50,11 @@
 
     let xs = b"hello\xFF".to_vec();
     let err = String::from_utf8(xs).unwrap_err();
+    assert_eq!(err.as_bytes(), b"hello\xff");
+    let err_clone = err.clone();
+    assert_eq!(err, err_clone);
     assert_eq!(err.into_bytes(), b"hello\xff".to_vec());
+    assert_eq!(err_clone.utf8_error().valid_up_to(), 5);
 }
 
 #[test]
diff --git a/src/liballoc/tests/vec.rs b/src/liballoc/tests/vec.rs
index 2a9bfef..9c4ac52 100644
--- a/src/liballoc/tests/vec.rs
+++ b/src/liballoc/tests/vec.rs
@@ -1,6 +1,7 @@
 use std::borrow::Cow;
 use std::collections::TryReserveError::*;
 use std::mem::size_of;
+use std::panic::{catch_unwind, AssertUnwindSafe};
 use std::vec::{Drain, IntoIter};
 use std::{isize, usize};
 
@@ -586,6 +587,44 @@
 }
 
 #[test]
+fn test_drain_leak() {
+    static mut DROPS: i32 = 0;
+
+    #[derive(Debug, PartialEq)]
+    struct D(u32, bool);
+
+    impl Drop for D {
+        fn drop(&mut self) {
+            unsafe {
+                DROPS += 1;
+            }
+
+            if self.1 {
+                panic!("panic in `drop`");
+            }
+        }
+    }
+
+    let mut v = vec![
+        D(0, false),
+        D(1, false),
+        D(2, false),
+        D(3, false),
+        D(4, true),
+        D(5, false),
+        D(6, false),
+    ];
+
+    catch_unwind(AssertUnwindSafe(|| {
+        v.drain(2..=5);
+    }))
+    .ok();
+
+    assert_eq!(unsafe { DROPS }, 4);
+    assert_eq!(v, vec![D(0, false), D(1, false), D(6, false),]);
+}
+
+#[test]
 fn test_splice() {
     let mut v = vec![1, 2, 3, 4, 5];
     let a = [10, 11, 12];
@@ -727,6 +766,31 @@
 }
 
 #[test]
+fn test_into_iter_leak() {
+    static mut DROPS: i32 = 0;
+
+    struct D(bool);
+
+    impl Drop for D {
+        fn drop(&mut self) {
+            unsafe {
+                DROPS += 1;
+            }
+
+            if self.0 {
+                panic!("panic in `drop`");
+            }
+        }
+    }
+
+    let v = vec![D(false), D(true), D(false)];
+
+    catch_unwind(move || drop(v.into_iter())).ok();
+
+    assert_eq!(unsafe { DROPS }, 3);
+}
+
+#[test]
 fn test_cow_from() {
     let borrowed: &[_] = &["borrowed", "(slice)"];
     let owned = vec!["owned", "(vec)"];
diff --git a/src/liballoc/tests/vec_deque.rs b/src/liballoc/tests/vec_deque.rs
index 1ab3694..101dd67 100644
--- a/src/liballoc/tests/vec_deque.rs
+++ b/src/liballoc/tests/vec_deque.rs
@@ -2,7 +2,7 @@
 use std::collections::{vec_deque::Drain, VecDeque};
 use std::fmt::Debug;
 use std::mem::size_of;
-use std::panic::catch_unwind;
+use std::panic::{catch_unwind, AssertUnwindSafe};
 use std::{isize, usize};
 
 use crate::hash;
@@ -1573,3 +1573,75 @@
     assert_eq!(iter.try_rfold(0_i8, |acc, &x| acc.checked_add(x)), None);
     assert_eq!(iter.next_back(), Some(&70));
 }
+
+#[test]
+fn truncate_leak() {
+    static mut DROPS: i32 = 0;
+
+    struct D(bool);
+
+    impl Drop for D {
+        fn drop(&mut self) {
+            unsafe {
+                DROPS += 1;
+            }
+
+            if self.0 {
+                panic!("panic in `drop`");
+            }
+        }
+    }
+
+    let mut q = VecDeque::new();
+    q.push_back(D(false));
+    q.push_back(D(false));
+    q.push_back(D(false));
+    q.push_back(D(false));
+    q.push_back(D(false));
+    q.push_front(D(true));
+    q.push_front(D(false));
+    q.push_front(D(false));
+
+    catch_unwind(AssertUnwindSafe(|| q.truncate(1))).ok();
+
+    assert_eq!(unsafe { DROPS }, 7);
+}
+
+#[test]
+fn test_drain_leak() {
+    static mut DROPS: i32 = 0;
+
+    #[derive(Debug, PartialEq)]
+    struct D(u32, bool);
+
+    impl Drop for D {
+        fn drop(&mut self) {
+            unsafe {
+                DROPS += 1;
+            }
+
+            if self.1 {
+                panic!("panic in `drop`");
+            }
+        }
+    }
+
+    let mut v = VecDeque::new();
+    v.push_back(D(4, false));
+    v.push_back(D(5, false));
+    v.push_back(D(6, false));
+    v.push_front(D(3, false));
+    v.push_front(D(2, true));
+    v.push_front(D(1, false));
+    v.push_front(D(0, false));
+
+    catch_unwind(AssertUnwindSafe(|| {
+        v.drain(1..=4);
+    }))
+    .ok();
+
+    assert_eq!(unsafe { DROPS }, 4);
+    assert_eq!(v.len(), 3);
+    drop(v);
+    assert_eq!(unsafe { DROPS }, 7);
+}
diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs
index 4f6b787..29987ac 100644
--- a/src/liballoc/vec.rs
+++ b/src/liballoc/vec.rs
@@ -2622,7 +2622,9 @@
 unsafe impl<#[may_dangle] T> Drop for IntoIter<T> {
     fn drop(&mut self) {
         // destroy the remaining elements
-        for _x in self.by_ref() {}
+        unsafe {
+            ptr::drop_in_place(self.as_mut_slice());
+        }
 
         // RawVec handles deallocation
         let _ = unsafe { RawVec::from_raw_parts(self.buf.as_ptr(), self.cap) };
@@ -2702,23 +2704,42 @@
 #[stable(feature = "drain", since = "1.6.0")]
 impl<T> Drop for Drain<'_, T> {
     fn drop(&mut self) {
-        // exhaust self first
-        self.for_each(drop);
+        /// Continues dropping the remaining elements in the `Drain`, then moves back the
+        /// un-`Drain`ed elements to restore the original `Vec`.
+        struct DropGuard<'r, 'a, T>(&'r mut Drain<'a, T>);
 
-        if self.tail_len > 0 {
-            unsafe {
-                let source_vec = self.vec.as_mut();
-                // memmove back untouched tail, update to new length
-                let start = source_vec.len();
-                let tail = self.tail_start;
-                if tail != start {
-                    let src = source_vec.as_ptr().add(tail);
-                    let dst = source_vec.as_mut_ptr().add(start);
-                    ptr::copy(src, dst, self.tail_len);
+        impl<'r, 'a, T> Drop for DropGuard<'r, 'a, T> {
+            fn drop(&mut self) {
+                // Continue the same loop we have below. If the loop already finished, this does
+                // nothing.
+                self.0.for_each(drop);
+
+                if self.0.tail_len > 0 {
+                    unsafe {
+                        let source_vec = self.0.vec.as_mut();
+                        // memmove back untouched tail, update to new length
+                        let start = source_vec.len();
+                        let tail = self.0.tail_start;
+                        if tail != start {
+                            let src = source_vec.as_ptr().add(tail);
+                            let dst = source_vec.as_mut_ptr().add(start);
+                            ptr::copy(src, dst, self.0.tail_len);
+                        }
+                        source_vec.set_len(start + self.0.tail_len);
+                    }
                 }
-                source_vec.set_len(start + self.tail_len);
             }
         }
+
+        // exhaust self first
+        while let Some(item) = self.next() {
+            let guard = DropGuard(self);
+            drop(item);
+            mem::forget(guard);
+        }
+
+        // Drop a `DropGuard` to move back the non-drained tail of `self`.
+        DropGuard(self);
     }
 }
 
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index 38df843..a04e75b 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -241,11 +241,13 @@
     #[unstable(feature = "alloc_layout_extra", issue = "55724")]
     #[inline]
     pub fn repeat(&self, n: usize) -> Result<(Self, usize), LayoutErr> {
-        // This cannot overflow. Quoting from the invariant of Layout:
-        // > `size`, when rounded up to the nearest multiple of `align`,
-        // > must not overflow (i.e., the rounded value must be less than
-        // > `usize::MAX`)
-        let padded_size = self.size() + self.padding_needed_for(self.align());
+        // Warning, removing the checked_add here led to segfaults in #67174. Further
+        // analysis in #69225 seems to indicate that this is an LTO-related
+        // miscompilation, so #67174 might be able to be reapplied in the future.
+        let padded_size = self
+            .size()
+            .checked_add(self.padding_needed_for(self.align()))
+            .ok_or(LayoutErr { private: () })?;
         let alloc_size = padded_size.checked_mul(n).ok_or(LayoutErr { private: () })?;
 
         unsafe {
@@ -593,9 +595,8 @@
 ///
 /// * the starting address for that memory block was previously
 ///   returned by a previous call to an allocation method (`alloc`,
-///   `alloc_zeroed`, `alloc_excess`, `alloc_one`, `alloc_array`) or
-///   reallocation method (`realloc`, `realloc_excess`, or
-///   `realloc_array`), and
+///   `alloc_zeroed`, `alloc_excess`) or reallocation method
+///   (`realloc`, `realloc_excess`), and
 ///
 /// * the memory block has not been subsequently deallocated, where
 ///   blocks are deallocated either by being passed to a deallocation
@@ -606,11 +607,6 @@
 /// methods in the `AllocRef` trait state that allocation requests
 /// must be non-zero size, or else undefined behavior can result.
 ///
-/// * However, some higher-level allocation methods (`alloc_one`,
-///   `alloc_array`) are well-defined on zero-sized types and can
-///   optionally support them: it is left up to the implementor
-///   whether to return `Err`, or to return `Ok` with some pointer.
-///
 /// * If an `AllocRef` implementation chooses to return `Ok` in this
 ///   case (i.e., the pointer denotes a zero-sized inaccessible block)
 ///   then that returned pointer must be considered "currently
@@ -853,6 +849,59 @@
         result
     }
 
+    /// Behaves like `realloc`, but also ensures that the new contents
+    /// are set to zero before being returned.
+    ///
+    /// # Safety
+    ///
+    /// This function is unsafe for the same reasons that `realloc` is.
+    ///
+    /// # Errors
+    ///
+    /// Returns `Err` only if the new layout
+    /// does not meet the allocator's size
+    /// and alignment constraints of the allocator, or if reallocation
+    /// otherwise fails.
+    ///
+    /// Implementations are encouraged to return `Err` on memory
+    /// exhaustion rather than panicking or aborting, but this is not
+    /// a strict requirement. (Specifically: it is *legal* to
+    /// implement this trait atop an underlying native allocation
+    /// library that aborts on memory exhaustion.)
+    ///
+    /// Clients wishing to abort computation in response to a
+    /// reallocation error are encouraged to call the [`handle_alloc_error`] function,
+    /// rather than directly invoking `panic!` or similar.
+    ///
+    /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
+    unsafe fn realloc_zeroed(
+        &mut self,
+        ptr: NonNull<u8>,
+        layout: Layout,
+        new_size: usize,
+    ) -> Result<NonNull<u8>, AllocErr> {
+        let old_size = layout.size();
+
+        if new_size >= old_size {
+            if let Ok(()) = self.grow_in_place_zeroed(ptr, layout, new_size) {
+                return Ok(ptr);
+            }
+        } else if new_size < old_size {
+            if let Ok(()) = self.shrink_in_place(ptr, layout, new_size) {
+                return Ok(ptr);
+            }
+        }
+
+        // otherwise, fall back on alloc + copy + dealloc.
+        let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
+        let result = self.alloc_zeroed(new_layout);
+        if let Ok(new_ptr) = result {
+            ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size));
+            self.dealloc(ptr, layout);
+        }
+        result
+    }
+
     /// Behaves like `alloc`, but also ensures that the contents
     /// are set to zero before being returned.
     ///
@@ -904,6 +953,31 @@
         self.alloc(layout).map(|p| Excess(p, usable_size.1))
     }
 
+    /// Behaves like `alloc`, but also returns the whole size of
+    /// the returned block. For some `layout` inputs, like arrays, this
+    /// may include extra storage usable for additional data.
+    /// Also it ensures that the contents are set to zero before being returned.
+    ///
+    /// # Safety
+    ///
+    /// This function is unsafe for the same reasons that `alloc` is.
+    ///
+    /// # Errors
+    ///
+    /// Returning `Err` indicates that either memory is exhausted or
+    /// `layout` does not meet allocator's size or alignment
+    /// constraints, just as in `alloc`.
+    ///
+    /// Clients wishing to abort computation in response to an
+    /// allocation error are encouraged to call the [`handle_alloc_error`] function,
+    /// rather than directly invoking `panic!` or similar.
+    ///
+    /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
+    unsafe fn alloc_excess_zeroed(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
+        let usable_size = self.usable_size(&layout);
+        self.alloc_zeroed(layout).map(|p| Excess(p, usable_size.1))
+    }
+
     /// Behaves like `realloc`, but also returns the whole size of
     /// the returned block. For some `layout` inputs, like arrays, this
     /// may include extra storage usable for additional data.
@@ -934,6 +1008,37 @@
         self.realloc(ptr, layout, new_size).map(|p| Excess(p, usable_size.1))
     }
 
+    /// Behaves like `realloc`, but also returns the whole size of
+    /// the returned block. For some `layout` inputs, like arrays, this
+    /// may include extra storage usable for additional data.
+    /// Also it ensures that the contents are set to zero before being returned.
+    ///
+    /// # Safety
+    ///
+    /// This function is unsafe for the same reasons that `realloc` is.
+    ///
+    /// # Errors
+    ///
+    /// Returning `Err` indicates that either memory is exhausted or
+    /// `layout` does not meet allocator's size or alignment
+    /// constraints, just as in `realloc`.
+    ///
+    /// Clients wishing to abort computation in response to a
+    /// reallocation error are encouraged to call the [`handle_alloc_error`] function,
+    /// rather than directly invoking `panic!` or similar.
+    ///
+    /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
+    unsafe fn realloc_excess_zeroed(
+        &mut self,
+        ptr: NonNull<u8>,
+        layout: Layout,
+        new_size: usize,
+    ) -> Result<Excess, AllocErr> {
+        let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
+        let usable_size = self.usable_size(&new_layout);
+        self.realloc_zeroed(ptr, layout, new_size).map(|p| Excess(p, usable_size.1))
+    }
+
     /// Attempts to extend the allocation referenced by `ptr` to fit `new_size`.
     ///
     /// If this returns `Ok`, then the allocator has asserted that the
@@ -983,6 +1088,34 @@
         if new_size <= u { Ok(()) } else { Err(CannotReallocInPlace) }
     }
 
+    /// Behaves like `grow_in_place`, but also ensures that the new
+    /// contents are set to zero before being returned.
+    ///
+    /// # Safety
+    ///
+    /// This function is unsafe for the same reasons that `grow_in_place` is.
+    ///
+    /// # Errors
+    ///
+    /// Returns `Err(CannotReallocInPlace)` when the allocator is
+    /// unable to assert that the memory block referenced by `ptr`
+    /// could fit `layout`.
+    ///
+    /// Note that one cannot pass `CannotReallocInPlace` to the `handle_alloc_error`
+    /// function; clients are expected either to be able to recover from
+    /// `grow_in_place` failures without aborting, or to fall back on
+    /// another reallocation method before resorting to an abort.
+    unsafe fn grow_in_place_zeroed(
+        &mut self,
+        ptr: NonNull<u8>,
+        layout: Layout,
+        new_size: usize,
+    ) -> Result<(), CannotReallocInPlace> {
+        self.grow_in_place(ptr, layout, new_size)?;
+        ptr.as_ptr().add(layout.size()).write_bytes(0, new_size - layout.size());
+        Ok(())
+    }
+
     /// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`.
     ///
     /// If this returns `Ok`, then the allocator has asserted that the
@@ -1035,195 +1168,4 @@
         // new_layout.size() <= layout.size()        [required by this method]
         if l <= new_size { Ok(()) } else { Err(CannotReallocInPlace) }
     }
-
-    // == COMMON USAGE PATTERNS ==
-    // alloc_one, dealloc_one, alloc_array, realloc_array. dealloc_array
-
-    /// Allocates a block suitable for holding an instance of `T`.
-    ///
-    /// Captures a common usage pattern for allocators.
-    ///
-    /// The returned block is suitable for passing to the
-    /// `realloc`/`dealloc` methods of this allocator.
-    ///
-    /// Note to implementors: If this returns `Ok(ptr)`, then `ptr`
-    /// must be considered "currently allocated" and must be
-    /// acceptable input to methods such as `realloc` or `dealloc`,
-    /// *even if* `T` is a zero-sized type. In other words, if your
-    /// `AllocRef` implementation overrides this method in a manner
-    /// that can return a zero-sized `ptr`, then all reallocation and
-    /// deallocation methods need to be similarly overridden to accept
-    /// such values as input.
-    ///
-    /// # Errors
-    ///
-    /// Returning `Err` indicates that either memory is exhausted or
-    /// `T` does not meet allocator's size or alignment constraints.
-    ///
-    /// For zero-sized `T`, may return either of `Ok` or `Err`, but
-    /// will *not* yield undefined behavior.
-    ///
-    /// Clients wishing to abort computation in response to an
-    /// allocation error are encouraged to call the [`handle_alloc_error`] function,
-    /// rather than directly invoking `panic!` or similar.
-    ///
-    /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
-    fn alloc_one<T>(&mut self) -> Result<NonNull<T>, AllocErr>
-    where
-        Self: Sized,
-    {
-        let k = Layout::new::<T>();
-        if k.size() > 0 { unsafe { self.alloc(k).map(|p| p.cast()) } } else { Err(AllocErr) }
-    }
-
-    /// Deallocates a block suitable for holding an instance of `T`.
-    ///
-    /// The given block must have been produced by this allocator,
-    /// and must be suitable for storing a `T` (in terms of alignment
-    /// as well as minimum and maximum size); otherwise yields
-    /// undefined behavior.
-    ///
-    /// Captures a common usage pattern for allocators.
-    ///
-    /// # Safety
-    ///
-    /// This function is unsafe because undefined behavior can result
-    /// if the caller does not ensure both:
-    ///
-    /// * `ptr` must denote a block of memory currently allocated via this allocator
-    ///
-    /// * the layout of `T` must *fit* that block of memory.
-    unsafe fn dealloc_one<T>(&mut self, ptr: NonNull<T>)
-    where
-        Self: Sized,
-    {
-        let k = Layout::new::<T>();
-        if k.size() > 0 {
-            self.dealloc(ptr.cast(), k);
-        }
-    }
-
-    /// Allocates a block suitable for holding `n` instances of `T`.
-    ///
-    /// Captures a common usage pattern for allocators.
-    ///
-    /// The returned block is suitable for passing to the
-    /// `realloc`/`dealloc` methods of this allocator.
-    ///
-    /// Note to implementors: If this returns `Ok(ptr)`, then `ptr`
-    /// must be considered "currently allocated" and must be
-    /// acceptable input to methods such as `realloc` or `dealloc`,
-    /// *even if* `T` is a zero-sized type. In other words, if your
-    /// `AllocRef` implementation overrides this method in a manner
-    /// that can return a zero-sized `ptr`, then all reallocation and
-    /// deallocation methods need to be similarly overridden to accept
-    /// such values as input.
-    ///
-    /// # Errors
-    ///
-    /// Returning `Err` indicates that either memory is exhausted or
-    /// `[T; n]` does not meet allocator's size or alignment
-    /// constraints.
-    ///
-    /// For zero-sized `T` or `n == 0`, may return either of `Ok` or
-    /// `Err`, but will *not* yield undefined behavior.
-    ///
-    /// Always returns `Err` on arithmetic overflow.
-    ///
-    /// Clients wishing to abort computation in response to an
-    /// allocation error are encouraged to call the [`handle_alloc_error`] function,
-    /// rather than directly invoking `panic!` or similar.
-    ///
-    /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
-    fn alloc_array<T>(&mut self, n: usize) -> Result<NonNull<T>, AllocErr>
-    where
-        Self: Sized,
-    {
-        match Layout::array::<T>(n) {
-            Ok(layout) if layout.size() > 0 => unsafe { self.alloc(layout).map(|p| p.cast()) },
-            _ => Err(AllocErr),
-        }
-    }
-
-    /// Reallocates a block previously suitable for holding `n_old`
-    /// instances of `T`, returning a block suitable for holding
-    /// `n_new` instances of `T`.
-    ///
-    /// Captures a common usage pattern for allocators.
-    ///
-    /// The returned block is suitable for passing to the
-    /// `realloc`/`dealloc` methods of this allocator.
-    ///
-    /// # Safety
-    ///
-    /// This function is unsafe because undefined behavior can result
-    /// if the caller does not ensure all of the following:
-    ///
-    /// * `ptr` must be currently allocated via this allocator,
-    ///
-    /// * the layout of `[T; n_old]` must *fit* that block of memory.
-    ///
-    /// # Errors
-    ///
-    /// Returning `Err` indicates that either memory is exhausted or
-    /// `[T; n_new]` does not meet allocator's size or alignment
-    /// constraints.
-    ///
-    /// For zero-sized `T` or `n_new == 0`, may return either of `Ok` or
-    /// `Err`, but will *not* yield undefined behavior.
-    ///
-    /// Always returns `Err` on arithmetic overflow.
-    ///
-    /// Clients wishing to abort computation in response to a
-    /// reallocation error are encouraged to call the [`handle_alloc_error`] function,
-    /// rather than directly invoking `panic!` or similar.
-    ///
-    /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
-    unsafe fn realloc_array<T>(
-        &mut self,
-        ptr: NonNull<T>,
-        n_old: usize,
-        n_new: usize,
-    ) -> Result<NonNull<T>, AllocErr>
-    where
-        Self: Sized,
-    {
-        match (Layout::array::<T>(n_old), Layout::array::<T>(n_new)) {
-            (Ok(k_old), Ok(k_new)) if k_old.size() > 0 && k_new.size() > 0 => {
-                debug_assert!(k_old.align() == k_new.align());
-                self.realloc(ptr.cast(), k_old, k_new.size()).map(NonNull::cast)
-            }
-            _ => Err(AllocErr),
-        }
-    }
-
-    /// Deallocates a block suitable for holding `n` instances of `T`.
-    ///
-    /// Captures a common usage pattern for allocators.
-    ///
-    /// # Safety
-    ///
-    /// This function is unsafe because undefined behavior can result
-    /// if the caller does not ensure both:
-    ///
-    /// * `ptr` must denote a block of memory currently allocated via this allocator
-    ///
-    /// * the layout of `[T; n]` must *fit* that block of memory.
-    ///
-    /// # Errors
-    ///
-    /// Returning `Err` indicates that either `[T; n]` or the given
-    /// memory block does not meet allocator's size or alignment
-    /// constraints.
-    ///
-    /// Always returns `Err` on arithmetic overflow.
-    unsafe fn dealloc_array<T>(&mut self, ptr: NonNull<T>, n: usize) -> Result<(), AllocErr>
-    where
-        Self: Sized,
-    {
-        match Layout::array::<T>(n) {
-            Ok(k) if k.size() > 0 => Ok(self.dealloc(ptr.cast(), k)),
-            _ => Err(AllocErr),
-        }
-    }
 }
diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs
index e7eecf7..9ebb317 100644
--- a/src/libcore/cell.rs
+++ b/src/libcore/cell.rs
@@ -1245,6 +1245,38 @@
         let borrow = orig.borrow.clone();
         (Ref { value: a, borrow }, Ref { value: b, borrow: orig.borrow })
     }
+
+    /// Convert into a reference to the underlying data.
+    ///
+    /// The underlying `RefCell` can never be mutably borrowed from again and will always appear
+    /// already immutably borrowed. It is not a good idea to leak more than a constant number of
+    /// references. The `RefCell` can be immutably borrowed again if only a smaller number of leaks
+    /// have occurred in total.
+    ///
+    /// This is an associated function that needs to be used as
+    /// `Ref::leak(...)`. A method would interfere with methods of the
+    /// same name on the contents of a `RefCell` used through `Deref`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(cell_leak)]
+    /// use std::cell::{RefCell, Ref};
+    /// let cell = RefCell::new(0);
+    ///
+    /// let value = Ref::leak(cell.borrow());
+    /// assert_eq!(*value, 0);
+    ///
+    /// assert!(cell.try_borrow().is_ok());
+    /// assert!(cell.try_borrow_mut().is_err());
+    /// ```
+    #[unstable(feature = "cell_leak", issue = "69099")]
+    pub fn leak(orig: Ref<'b, T>) -> &'b T {
+        // By forgetting this Ref we ensure that the borrow counter in the RefCell never goes back
+        // to UNUSED again. No further mutable references can be created from the original cell.
+        mem::forget(orig.borrow);
+        orig.value
+    }
 }
 
 #[unstable(feature = "coerce_unsized", issue = "27732")]
@@ -1330,6 +1362,37 @@
         let borrow = orig.borrow.clone();
         (RefMut { value: a, borrow }, RefMut { value: b, borrow: orig.borrow })
     }
+
+    /// Convert into a mutable reference to the underlying data.
+    ///
+    /// The underlying `RefCell` can not be borrowed from again and will always appear already
+    /// mutably borrowed, making the returned reference the only to the interior.
+    ///
+    /// This is an associated function that needs to be used as
+    /// `RefMut::leak(...)`. A method would interfere with methods of the
+    /// same name on the contents of a `RefCell` used through `Deref`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(cell_leak)]
+    /// use std::cell::{RefCell, RefMut};
+    /// let cell = RefCell::new(0);
+    ///
+    /// let value = RefMut::leak(cell.borrow_mut());
+    /// assert_eq!(*value, 0);
+    /// *value = 1;
+    ///
+    /// assert!(cell.try_borrow_mut().is_err());
+    /// ```
+    #[unstable(feature = "cell_leak", issue = "69099")]
+    pub fn leak(orig: RefMut<'b, T>) -> &'b mut T {
+        // By forgetting this BorrowRefMut we ensure that the borrow counter in the RefCell never
+        // goes back to UNUSED again. No further references can be created from the original cell,
+        // making the current borrow the only reference for the remaining lifetime.
+        mem::forget(orig.borrow);
+        orig.value
+    }
 }
 
 struct BorrowRefMut<'b> {
@@ -1475,6 +1538,7 @@
 #[lang = "unsafe_cell"]
 #[stable(feature = "rust1", since = "1.0.0")]
 #[repr(transparent)]
+#[cfg_attr(not(bootstrap), repr(no_niche))] // rust-lang/rust#68303.
 pub struct UnsafeCell<T: ?Sized> {
     value: T,
 }
diff --git a/src/libcore/char/methods.rs b/src/libcore/char/methods.rs
index c341bb5..3024007 100644
--- a/src/libcore/char/methods.rs
+++ b/src/libcore/char/methods.rs
@@ -1072,9 +1072,13 @@
     /// assert!(!esc.is_ascii_alphabetic());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_alphabetic(&self) -> bool {
-        self.is_ascii() && (*self as u8).is_ascii_alphabetic()
+    pub const fn is_ascii_alphabetic(&self) -> bool {
+        match *self {
+            'A'..='Z' | 'a'..='z' => true,
+            _ => false,
+        }
     }
 
     /// Checks if the value is an ASCII uppercase character:
@@ -1104,9 +1108,13 @@
     /// assert!(!esc.is_ascii_uppercase());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_uppercase(&self) -> bool {
-        self.is_ascii() && (*self as u8).is_ascii_uppercase()
+    pub const fn is_ascii_uppercase(&self) -> bool {
+        match *self {
+            'A'..='Z' => true,
+            _ => false,
+        }
     }
 
     /// Checks if the value is an ASCII lowercase character:
@@ -1136,9 +1144,13 @@
     /// assert!(!esc.is_ascii_lowercase());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_lowercase(&self) -> bool {
-        self.is_ascii() && (*self as u8).is_ascii_lowercase()
+    pub const fn is_ascii_lowercase(&self) -> bool {
+        match *self {
+            'a'..='z' => true,
+            _ => false,
+        }
     }
 
     /// Checks if the value is an ASCII alphanumeric character:
@@ -1171,9 +1183,13 @@
     /// assert!(!esc.is_ascii_alphanumeric());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_alphanumeric(&self) -> bool {
-        self.is_ascii() && (*self as u8).is_ascii_alphanumeric()
+    pub const fn is_ascii_alphanumeric(&self) -> bool {
+        match *self {
+            '0'..='9' | 'A'..='Z' | 'a'..='z' => true,
+            _ => false,
+        }
     }
 
     /// Checks if the value is an ASCII decimal digit:
@@ -1203,9 +1219,13 @@
     /// assert!(!esc.is_ascii_digit());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_digit(&self) -> bool {
-        self.is_ascii() && (*self as u8).is_ascii_digit()
+    pub const fn is_ascii_digit(&self) -> bool {
+        match *self {
+            '0'..='9' => true,
+            _ => false,
+        }
     }
 
     /// Checks if the value is an ASCII hexadecimal digit:
@@ -1238,9 +1258,13 @@
     /// assert!(!esc.is_ascii_hexdigit());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_hexdigit(&self) -> bool {
-        self.is_ascii() && (*self as u8).is_ascii_hexdigit()
+    pub const fn is_ascii_hexdigit(&self) -> bool {
+        match *self {
+            '0'..='9' | 'A'..='F' | 'a'..='f' => true,
+            _ => false,
+        }
     }
 
     /// Checks if the value is an ASCII punctuation character:
@@ -1274,9 +1298,13 @@
     /// assert!(!esc.is_ascii_punctuation());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_punctuation(&self) -> bool {
-        self.is_ascii() && (*self as u8).is_ascii_punctuation()
+    pub const fn is_ascii_punctuation(&self) -> bool {
+        match *self {
+            '!'..='/' | ':'..='@' | '['..='`' | '{'..='~' => true,
+            _ => false,
+        }
     }
 
     /// Checks if the value is an ASCII graphic character:
@@ -1306,9 +1334,13 @@
     /// assert!(!esc.is_ascii_graphic());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_graphic(&self) -> bool {
-        self.is_ascii() && (*self as u8).is_ascii_graphic()
+    pub const fn is_ascii_graphic(&self) -> bool {
+        match *self {
+            '!'..='~' => true,
+            _ => false,
+        }
     }
 
     /// Checks if the value is an ASCII whitespace character:
@@ -1355,9 +1387,13 @@
     /// assert!(!esc.is_ascii_whitespace());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_whitespace(&self) -> bool {
-        self.is_ascii() && (*self as u8).is_ascii_whitespace()
+    pub const fn is_ascii_whitespace(&self) -> bool {
+        match *self {
+            '\t' | '\n' | '\x0C' | '\r' | ' ' => true,
+            _ => false,
+        }
     }
 
     /// Checks if the value is an ASCII control character:
@@ -1389,8 +1425,12 @@
     /// assert!(esc.is_ascii_control());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_control(&self) -> bool {
-        self.is_ascii() && (*self as u8).is_ascii_control()
+    pub const fn is_ascii_control(&self) -> bool {
+        match *self {
+            '\0'..='\x1F' | '\x7F' => true,
+            _ => false,
+        }
     }
 }
diff --git a/src/libcore/cmp.rs b/src/libcore/cmp.rs
index e41a7af..604be7d 100644
--- a/src/libcore/cmp.rs
+++ b/src/libcore/cmp.rs
@@ -361,6 +361,7 @@
     /// assert!(data == b);
     /// ```
     #[inline]
+    #[must_use]
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn reverse(self) -> Ordering {
         match self {
@@ -398,6 +399,7 @@
     /// assert_eq!(result, Ordering::Less);
     /// ```
     #[inline]
+    #[must_use]
     #[stable(feature = "ordering_chaining", since = "1.17.0")]
     pub fn then(self, other: Ordering) -> Ordering {
         match self {
@@ -435,6 +437,7 @@
     /// assert_eq!(result, Ordering::Less);
     /// ```
     #[inline]
+    #[must_use]
     #[stable(feature = "ordering_chaining", since = "1.17.0")]
     pub fn then_with<F: FnOnce() -> Ordering>(self, f: F) -> Ordering {
         match self {
@@ -576,6 +579,7 @@
     /// assert_eq!(10.cmp(&5), Ordering::Greater);
     /// assert_eq!(5.cmp(&5), Ordering::Equal);
     /// ```
+    #[must_use]
     #[stable(feature = "rust1", since = "1.0.0")]
     fn cmp(&self, other: &Self) -> Ordering;
 
@@ -591,6 +595,7 @@
     /// ```
     #[stable(feature = "ord_max_min", since = "1.21.0")]
     #[inline]
+    #[must_use]
     fn max(self, other: Self) -> Self
     where
         Self: Sized,
@@ -610,6 +615,7 @@
     /// ```
     #[stable(feature = "ord_max_min", since = "1.21.0")]
     #[inline]
+    #[must_use]
     fn min(self, other: Self) -> Self
     where
         Self: Sized,
@@ -635,6 +641,7 @@
     /// assert!(0.clamp(-2, 1) == 0);
     /// assert!(2.clamp(-2, 1) == 1);
     /// ```
+    #[must_use]
     #[unstable(feature = "clamp", issue = "44095")]
     fn clamp(self, min: Self, max: Self) -> Self
     where
@@ -915,6 +922,7 @@
 /// assert_eq!(2, cmp::min(2, 2));
 /// ```
 #[inline]
+#[must_use]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub fn min<T: Ord>(v1: T, v2: T) -> T {
     v1.min(v2)
@@ -935,6 +943,7 @@
 /// assert_eq!(cmp::min_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2);
 /// ```
 #[inline]
+#[must_use]
 #[unstable(feature = "cmp_min_max_by", issue = "64460")]
 pub fn min_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T {
     match compare(&v1, &v2) {
@@ -958,6 +967,7 @@
 /// assert_eq!(cmp::min_by_key(-2, 2, |x: &i32| x.abs()), -2);
 /// ```
 #[inline]
+#[must_use]
 #[unstable(feature = "cmp_min_max_by", issue = "64460")]
 pub fn min_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T {
     min_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2)))
@@ -978,6 +988,7 @@
 /// assert_eq!(2, cmp::max(2, 2));
 /// ```
 #[inline]
+#[must_use]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub fn max<T: Ord>(v1: T, v2: T) -> T {
     v1.max(v2)
@@ -998,6 +1009,7 @@
 /// assert_eq!(cmp::max_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 2);
 /// ```
 #[inline]
+#[must_use]
 #[unstable(feature = "cmp_min_max_by", issue = "64460")]
 pub fn max_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T {
     match compare(&v1, &v2) {
@@ -1021,6 +1033,7 @@
 /// assert_eq!(cmp::max_by_key(-2, 2, |x: &i32| x.abs()), 2);
 /// ```
 #[inline]
+#[must_use]
 #[unstable(feature = "cmp_min_max_by", issue = "64460")]
 pub fn max_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T {
     max_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2)))
diff --git a/src/libcore/fmt/float.rs b/src/libcore/fmt/float.rs
index 284e949..5ef6730 100644
--- a/src/libcore/fmt/float.rs
+++ b/src/libcore/fmt/float.rs
@@ -29,7 +29,6 @@
             *num,
             sign,
             precision,
-            false,
             buf.get_mut(),
             parts.get_mut(),
         );
@@ -59,7 +58,6 @@
             *num,
             sign,
             precision,
-            false,
             buf.get_mut(),
             parts.get_mut(),
         );
diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs
index 900ef63..993b107 100644
--- a/src/libcore/fmt/mod.rs
+++ b/src/libcore/fmt/mod.rs
@@ -238,16 +238,8 @@
 // NB. Argument is essentially an optimized partially applied formatting function,
 // equivalent to `exists T.(&T, fn(&T, &mut Formatter<'_>) -> Result`.
 
-struct Void {
-    _priv: (),
-    /// Erases all oibits, because `Void` erases the type of the object that
-    /// will be used to produce formatted output. Since we do not know what
-    /// oibits the real types have (and they can have any or none), we need to
-    /// take the most conservative approach and forbid all oibits.
-    ///
-    /// It was added after #45197 showed that one could share a `!Sync`
-    /// object across threads by passing it into `format_args!`.
-    _oibit_remover: PhantomData<*mut dyn Fn()>,
+extern "C" {
+    type Opaque;
 }
 
 /// This struct represents the generic "argument" which is taken by the Xprintf
@@ -259,16 +251,23 @@
 #[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
 #[doc(hidden)]
 pub struct ArgumentV1<'a> {
-    value: &'a Void,
-    formatter: fn(&Void, &mut Formatter<'_>) -> Result,
+    value: &'a Opaque,
+    formatter: fn(&Opaque, &mut Formatter<'_>) -> Result,
 }
 
-impl<'a> ArgumentV1<'a> {
-    #[inline(never)]
-    fn show_usize(x: &usize, f: &mut Formatter<'_>) -> Result {
-        Display::fmt(x, f)
-    }
+// This gurantees a single stable value for the function pointer associated with
+// indices/counts in the formatting infrastructure.
+//
+// Note that a function defined as such would not be correct as functions are
+// always tagged unnamed_addr with the current lowering to LLVM IR, so their
+// address is not considered important to LLVM and as such the as_usize cast
+// could have been miscompiled. In practice, we never call as_usize on non-usize
+// containing data (as a matter of static generation of the formatting
+// arguments), so this is merely an additional check.
+#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
+static USIZE_MARKER: fn(&usize, &mut Formatter<'_>) -> Result = |_, _| loop {};
 
+impl<'a> ArgumentV1<'a> {
     #[doc(hidden)]
     #[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
     pub fn new<'b, T>(x: &'b T, f: fn(&T, &mut Formatter<'_>) -> Result) -> ArgumentV1<'b> {
@@ -278,11 +277,13 @@
     #[doc(hidden)]
     #[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
     pub fn from_usize(x: &usize) -> ArgumentV1<'_> {
-        ArgumentV1::new(x, ArgumentV1::show_usize)
+        ArgumentV1::new(x, USIZE_MARKER)
     }
 
     fn as_usize(&self) -> Option<usize> {
-        if self.formatter as usize == ArgumentV1::show_usize as usize {
+        if self.formatter as usize == USIZE_MARKER as usize {
+            // SAFETY: The `formatter` field is only set to USIZE_MARKER if
+            // the value is a usize, so this is safe
             Some(unsafe { *(self.value as *const _ as *const usize) })
         } else {
             None
@@ -1080,18 +1081,7 @@
     fmt.precision = getcount(args, &arg.format.precision);
 
     // Extract the correct argument
-    let value = {
-        #[cfg(bootstrap)]
-        {
-            match arg.position {
-                rt::v1::Position::At(i) => args[i],
-            }
-        }
-        #[cfg(not(bootstrap))]
-        {
-            args[arg.position]
-        }
-    };
+    let value = args[arg.position];
 
     // Then actually do some printing
     (value.formatter)(value.value, fmt)
@@ -1367,11 +1357,11 @@
             let mut align = old_align;
             if self.sign_aware_zero_pad() {
                 // a sign always goes first
-                let sign = unsafe { str::from_utf8_unchecked(formatted.sign) };
+                let sign = formatted.sign;
                 self.buf.write_str(sign)?;
 
                 // remove the sign from the formatted parts
-                formatted.sign = b"";
+                formatted.sign = "";
                 width = width.saturating_sub(sign.len());
                 align = rt::v1::Alignment::Right;
                 self.fill = '0';
@@ -1403,7 +1393,7 @@
         }
 
         if !formatted.sign.is_empty() {
-            write_bytes(self.buf, formatted.sign)?;
+            self.buf.write_str(formatted.sign)?;
         }
         for part in formatted.parts {
             match *part {
diff --git a/src/libcore/fmt/num.rs b/src/libcore/fmt/num.rs
index d562639..5dfd3a8 100644
--- a/src/libcore/fmt/num.rs
+++ b/src/libcore/fmt/num.rs
@@ -4,6 +4,7 @@
 
 use crate::fmt;
 use crate::mem::MaybeUninit;
+use crate::num::flt2dec;
 use crate::ops::{Div, Rem, Sub};
 use crate::ptr;
 use crate::slice;
@@ -256,6 +257,161 @@
     };
 }
 
+macro_rules! impl_Exp {
+    ($($t:ident),* as $u:ident via $conv_fn:ident named $name:ident) => {
+        fn $name(
+            mut n: $u,
+            is_nonnegative: bool,
+            upper: bool,
+            f: &mut fmt::Formatter<'_>
+        ) -> fmt::Result {
+            let (mut n, mut exponent, trailing_zeros, added_precision) = {
+                let mut exponent = 0;
+                // count and remove trailing decimal zeroes
+                while n % 10 == 0 && n >= 10 {
+                    n /= 10;
+                    exponent += 1;
+                }
+                let trailing_zeros = exponent;
+
+                let (added_precision, subtracted_precision) = match f.precision() {
+                    Some(fmt_prec) => {
+                        // number of decimal digits minus 1
+                        let mut tmp = n;
+                        let mut prec = 0;
+                        while tmp >= 10 {
+                            tmp /= 10;
+                            prec += 1;
+                        }
+                        (fmt_prec.saturating_sub(prec), prec.saturating_sub(fmt_prec))
+                    }
+                    None => (0,0)
+                };
+                for _ in 1..subtracted_precision {
+                    n/=10;
+                    exponent += 1;
+                }
+                if subtracted_precision != 0 {
+                    let rem = n % 10;
+                    n /= 10;
+                    exponent += 1;
+                    // round up last digit
+                    if rem >= 5 {
+                        n += 1;
+                    }
+                }
+                (n, exponent, trailing_zeros, added_precision)
+            };
+
+            // 39 digits (worst case u128) + . = 40
+            let mut buf = [MaybeUninit::<u8>::uninit(); 40];
+            let mut curr = buf.len() as isize; //index for buf
+            let buf_ptr = MaybeUninit::first_ptr_mut(&mut buf);
+            let lut_ptr = DEC_DIGITS_LUT.as_ptr();
+
+            // decode 2 chars at a time
+            while n >= 100 {
+                let d1 = ((n % 100) as isize) << 1;
+                curr -= 2;
+                unsafe {
+                    ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);
+                }
+                n /= 100;
+                exponent += 2;
+            }
+            // n is <= 99, so at most 2 chars long
+            let mut n = n as isize; // possibly reduce 64bit math
+            // decode second-to-last character
+            if n >= 10 {
+                curr -= 1;
+                unsafe {
+                    *buf_ptr.offset(curr) = (n as u8 % 10_u8) + b'0';
+                }
+                n /= 10;
+                exponent += 1;
+            }
+            // add decimal point iff >1 mantissa digit will be printed
+            if exponent != trailing_zeros || added_precision != 0 {
+                curr -= 1;
+                unsafe {
+                    *buf_ptr.offset(curr) = b'.';
+                }
+            }
+
+            let buf_slice = unsafe {
+                // decode last character
+                curr -= 1;
+                *buf_ptr.offset(curr) = (n as u8) + b'0';
+
+                let len = buf.len() - curr as usize;
+                slice::from_raw_parts(buf_ptr.offset(curr), len)
+            };
+
+            // stores 'e' (or 'E') and the up to 2-digit exponent
+            let mut exp_buf = [MaybeUninit::<u8>::uninit(); 3];
+            let exp_ptr = MaybeUninit::first_ptr_mut(&mut exp_buf);
+            let exp_slice = unsafe {
+                *exp_ptr.offset(0) = if upper {b'E'} else {b'e'};
+                let len = if exponent < 10 {
+                    *exp_ptr.offset(1) = (exponent as u8) + b'0';
+                    2
+                } else {
+                    let off = exponent << 1;
+                    ptr::copy_nonoverlapping(lut_ptr.offset(off), exp_ptr.offset(1), 2);
+                    3
+                };
+                slice::from_raw_parts(exp_ptr, len)
+            };
+
+            let parts = &[
+                flt2dec::Part::Copy(buf_slice),
+                flt2dec::Part::Zero(added_precision),
+                flt2dec::Part::Copy(exp_slice)
+            ];
+            let sign = if !is_nonnegative {
+                "-"
+            } else if f.sign_plus() {
+                "+"
+            } else {
+                ""
+            };
+            let formatted = flt2dec::Formatted{sign, parts};
+            f.pad_formatted_parts(&formatted)
+        }
+
+        $(
+            #[stable(feature = "integer_exp_format", since = "1.42.0")]
+            impl fmt::LowerExp for $t {
+                #[allow(unused_comparisons)]
+                fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+                    let is_nonnegative = *self >= 0;
+                    let n = if is_nonnegative {
+                        self.$conv_fn()
+                    } else {
+                        // convert the negative num to positive by summing 1 to it's 2 complement
+                        (!self.$conv_fn()).wrapping_add(1)
+                    };
+                    $name(n, is_nonnegative, false, f)
+                }
+            })*
+        $(
+            #[stable(feature = "integer_exp_format", since = "1.42.0")]
+            impl fmt::UpperExp for $t {
+                #[allow(unused_comparisons)]
+                fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+                    let is_nonnegative = *self >= 0;
+                    let n = if is_nonnegative {
+                        self.$conv_fn()
+                    } else {
+                        // convert the negative num to positive by summing 1 to it's 2 complement
+                        (!self.$conv_fn()).wrapping_add(1)
+                    };
+                    $name(n, is_nonnegative, true, f)
+                }
+            })*
+    };
+}
+
 // Include wasm32 in here since it doesn't reflect the native pointer size, and
 // often cares strongly about getting a smaller code size.
 #[cfg(any(target_pointer_width = "64", target_arch = "wasm32"))]
@@ -265,6 +421,10 @@
         i8, u8, i16, u16, i32, u32, i64, u64, usize, isize
             as u64 via to_u64 named fmt_u64
     );
+    impl_Exp!(
+        i8, u8, i16, u16, i32, u32, i64, u64, usize, isize
+            as u64 via to_u64 named exp_u64
+    );
 }
 
 #[cfg(not(any(target_pointer_width = "64", target_arch = "wasm32")))]
@@ -272,6 +432,9 @@
     use super::*;
     impl_Display!(i8, u8, i16, u16, i32, u32, isize, usize as u32 via to_u32 named fmt_u32);
     impl_Display!(i64, u64 as u64 via to_u64 named fmt_u64);
+    impl_Exp!(i8, u8, i16, u16, i32, u32, isize, usize as u32 via to_u32 named exp_u32);
+    impl_Exp!(i64, u64 as u64 via to_u64 named exp_u64);
 }
 
 impl_Display!(i128, u128 as u128 via to_u128 named fmt_u128);
+impl_Exp!(i128, u128 as u128 via to_u128 named exp_u128);
diff --git a/src/libcore/fmt/rt/v1.rs b/src/libcore/fmt/rt/v1.rs
index fd81f932..f646047 100644
--- a/src/libcore/fmt/rt/v1.rs
+++ b/src/libcore/fmt/rt/v1.rs
@@ -7,9 +7,6 @@
 
 #[derive(Copy, Clone)]
 pub struct Argument {
-    #[cfg(bootstrap)]
-    pub position: Position,
-    #[cfg(not(bootstrap))]
     pub position: usize,
     pub format: FormatSpec,
 }
@@ -42,9 +39,3 @@
     Param(usize),
     Implied,
 }
-
-#[cfg(bootstrap)]
-#[derive(Copy, Clone)]
-pub enum Position {
-    At(usize),
-}
diff --git a/src/libcore/hash/sip.rs b/src/libcore/hash/sip.rs
index 7ebe01e..aa50e7c 100644
--- a/src/libcore/hash/sip.rs
+++ b/src/libcore/hash/sip.rs
@@ -121,7 +121,9 @@
     }};
 }
 
-/// Loads an u64 using up to 7 bytes of a byte slice.
+/// Loads a u64 using up to 7 bytes of a byte slice. It looks clumsy but the
+/// `copy_nonoverlapping` calls that occur (via `load_int_le!`) all have fixed
+/// sizes and avoid calling `memcpy`, which is good for speed.
 ///
 /// Unsafe because: unchecked indexing at start..start+len
 #[inline]
@@ -302,7 +304,7 @@
 
         if self.ntail != 0 {
             needed = 8 - self.ntail;
-            self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail;
+            self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << (8 * self.ntail);
             if length < needed {
                 self.ntail += length;
                 return;
diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs
index 416c73f..43f8cfc 100644
--- a/src/libcore/intrinsics.rs
+++ b/src/libcore/intrinsics.rs
@@ -1305,9 +1305,11 @@
 
     /// Performs an unchecked division, resulting in undefined behavior
     /// where y = 0 or x = `T::min_value()` and y = -1
+    #[rustc_const_unstable(feature = "const_int_unchecked_arith", issue = "none")]
     pub fn unchecked_div<T>(x: T, y: T) -> T;
     /// Returns the remainder of an unchecked division, resulting in
     /// undefined behavior where y = 0 or x = `T::min_value()` and y = -1
+    #[rustc_const_unstable(feature = "const_int_unchecked_arith", issue = "none")]
     pub fn unchecked_rem<T>(x: T, y: T) -> T;
 
     /// Performs an unchecked left shift, resulting in undefined behavior when
@@ -1321,14 +1323,17 @@
 
     /// Returns the result of an unchecked addition, resulting in
     /// undefined behavior when `x + y > T::max_value()` or `x + y < T::min_value()`.
+    #[rustc_const_unstable(feature = "const_int_unchecked_arith", issue = "none")]
     pub fn unchecked_add<T>(x: T, y: T) -> T;
 
     /// Returns the result of an unchecked subtraction, resulting in
     /// undefined behavior when `x - y > T::max_value()` or `x - y < T::min_value()`.
+    #[rustc_const_unstable(feature = "const_int_unchecked_arith", issue = "none")]
     pub fn unchecked_sub<T>(x: T, y: T) -> T;
 
     /// Returns the result of an unchecked multiplication, resulting in
     /// undefined behavior when `x * y > T::max_value()` or `x * y < T::min_value()`.
+    #[rustc_const_unstable(feature = "const_int_unchecked_arith", issue = "none")]
     pub fn unchecked_mul<T>(x: T, y: T) -> T;
 
     /// Performs rotate left.
@@ -1510,6 +1515,7 @@
 /// ```
 ///
 /// [`Vec::append`]: ../../std/vec/struct.Vec.html#method.append
+#[doc(alias = "memcpy")]
 #[stable(feature = "rust1", since = "1.0.0")]
 #[inline]
 pub unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize) {
@@ -1574,6 +1580,7 @@
 ///     dst
 /// }
 /// ```
+#[doc(alias = "memmove")]
 #[stable(feature = "rust1", since = "1.0.0")]
 #[inline]
 pub unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
diff --git a/src/libcore/iter/adapters/mod.rs b/src/libcore/iter/adapters/mod.rs
index 7d10ef3..5860653 100644
--- a/src/libcore/iter/adapters/mod.rs
+++ b/src/libcore/iter/adapters/mod.rs
@@ -1890,17 +1890,15 @@
     #[inline]
     fn nth(&mut self, n: usize) -> Option<I::Item> {
         // Can't just add n + self.n due to overflow.
-        if self.n == 0 {
-            self.iter.nth(n)
-        } else {
+        if self.n > 0 {
             let to_skip = self.n;
             self.n = 0;
             // nth(n) skips n+1
             if self.iter.nth(to_skip - 1).is_none() {
                 return None;
             }
-            self.iter.nth(n)
         }
+        self.iter.nth(n)
     }
 
     #[inline]
@@ -1916,17 +1914,13 @@
 
     #[inline]
     fn last(mut self) -> Option<I::Item> {
-        if self.n == 0 {
-            self.iter.last()
-        } else {
-            let next = self.next();
-            if next.is_some() {
-                // recurse. n should be 0.
-                self.last().or(next)
-            } else {
-                None
+        if self.n > 0 {
+            // nth(n) skips n+1
+            if self.iter.nth(self.n - 1).is_none() {
+                return None;
             }
         }
+        self.iter.last()
     }
 
     #[inline]
diff --git a/src/libcore/iter/range.rs b/src/libcore/iter/range.rs
index eac3c10..28fbd00 100644
--- a/src/libcore/iter/range.rs
+++ b/src/libcore/iter/range.rs
@@ -341,16 +341,15 @@
 
     #[inline]
     fn next(&mut self) -> Option<A> {
-        self.compute_is_empty();
-        if self.is_empty.unwrap_or_default() {
+        if self.is_empty() {
             return None;
         }
         let is_iterating = self.start < self.end;
-        self.is_empty = Some(!is_iterating);
         Some(if is_iterating {
             let n = self.start.add_one();
             mem::replace(&mut self.start, n)
         } else {
+            self.exhausted = true;
             self.start.clone()
         })
     }
@@ -369,8 +368,7 @@
 
     #[inline]
     fn nth(&mut self, n: usize) -> Option<A> {
-        self.compute_is_empty();
-        if self.is_empty.unwrap_or_default() {
+        if self.is_empty() {
             return None;
         }
 
@@ -379,19 +377,20 @@
 
             match plus_n.partial_cmp(&self.end) {
                 Some(Less) => {
-                    self.is_empty = Some(false);
                     self.start = plus_n.add_one();
                     return Some(plus_n);
                 }
                 Some(Equal) => {
-                    self.is_empty = Some(true);
+                    self.start = plus_n.clone();
+                    self.exhausted = true;
                     return Some(plus_n);
                 }
                 _ => {}
             }
         }
 
-        self.is_empty = Some(true);
+        self.start = self.end.clone();
+        self.exhausted = true;
         None
     }
 
@@ -402,8 +401,6 @@
         F: FnMut(B, Self::Item) -> R,
         R: Try<Ok = B>,
     {
-        self.compute_is_empty();
-
         if self.is_empty() {
             return Try::from_ok(init);
         }
@@ -416,7 +413,7 @@
             accum = f(accum, n)?;
         }
 
-        self.is_empty = Some(true);
+        self.exhausted = true;
 
         if self.start == self.end {
             accum = f(accum, self.start.clone())?;
@@ -445,24 +442,22 @@
 impl<A: Step> DoubleEndedIterator for ops::RangeInclusive<A> {
     #[inline]
     fn next_back(&mut self) -> Option<A> {
-        self.compute_is_empty();
-        if self.is_empty.unwrap_or_default() {
+        if self.is_empty() {
             return None;
         }
         let is_iterating = self.start < self.end;
-        self.is_empty = Some(!is_iterating);
         Some(if is_iterating {
             let n = self.end.sub_one();
             mem::replace(&mut self.end, n)
         } else {
+            self.exhausted = true;
             self.end.clone()
         })
     }
 
     #[inline]
     fn nth_back(&mut self, n: usize) -> Option<A> {
-        self.compute_is_empty();
-        if self.is_empty.unwrap_or_default() {
+        if self.is_empty() {
             return None;
         }
 
@@ -471,19 +466,20 @@
 
             match minus_n.partial_cmp(&self.start) {
                 Some(Greater) => {
-                    self.is_empty = Some(false);
                     self.end = minus_n.sub_one();
                     return Some(minus_n);
                 }
                 Some(Equal) => {
-                    self.is_empty = Some(true);
+                    self.end = minus_n.clone();
+                    self.exhausted = true;
                     return Some(minus_n);
                 }
                 _ => {}
             }
         }
 
-        self.is_empty = Some(true);
+        self.end = self.start.clone();
+        self.exhausted = true;
         None
     }
 
@@ -494,8 +490,6 @@
         F: FnMut(B, Self::Item) -> R,
         R: Try<Ok = B>,
     {
-        self.compute_is_empty();
-
         if self.is_empty() {
             return Try::from_ok(init);
         }
@@ -508,7 +502,7 @@
             accum = f(accum, n)?;
         }
 
-        self.is_empty = Some(true);
+        self.exhausted = true;
 
         if self.start == self.end {
             accum = f(accum, self.start.clone())?;
diff --git a/src/libcore/iter/sources.rs b/src/libcore/iter/sources.rs
index 5a31aca..a1d4e1b 100644
--- a/src/libcore/iter/sources.rs
+++ b/src/libcore/iter/sources.rs
@@ -398,7 +398,7 @@
 /// See its documentation for more.
 ///
 /// [`once_with`]: fn.once_with.html
-#[derive(Copy, Clone, Debug)]
+#[derive(Clone, Debug)]
 #[stable(feature = "iter_once_with", since = "1.43.0")]
 pub struct OnceWith<F> {
     gen: Option<F>,
diff --git a/src/libcore/iter/traits/iterator.rs b/src/libcore/iter/traits/iterator.rs
index 1d05567..6a529bf 100644
--- a/src/libcore/iter/traits/iterator.rs
+++ b/src/libcore/iter/traits/iterator.rs
@@ -719,6 +719,8 @@
     /// ```
     ///
     /// of these layers.
+    ///
+    /// Note that `iter.filter(f).next()` is equivalent to `iter.find(f)`.
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
     fn filter<P>(self, predicate: P) -> Filter<Self, P>
@@ -2152,6 +2154,8 @@
     /// // we can still use `iter`, as there are more elements.
     /// assert_eq!(iter.next(), Some(&3));
     /// ```
+    ///
+    /// Note that `iter.find(f)` is equivalent to `iter.filter(f).next()`.
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
     fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs
index 7738ea2..7bd1d00 100644
--- a/src/libcore/lib.rs
+++ b/src/libcore/lib.rs
@@ -70,8 +70,17 @@
 #![feature(bound_cloned)]
 #![feature(cfg_target_has_atomic)]
 #![feature(concat_idents)]
+#![feature(const_ascii_ctype_on_intrinsics)]
 #![feature(const_alloc_layout)]
 #![feature(const_if_match)]
+#![feature(const_loop)]
+#![feature(const_checked_int_methods)]
+#![feature(const_euclidean_int_methods)]
+#![feature(const_overflowing_int_methods)]
+#![feature(const_saturating_int_methods)]
+#![feature(const_int_unchecked_arith)]
+#![feature(const_int_pow)]
+#![feature(constctlz)]
 #![feature(const_panic)]
 #![feature(const_fn_union)]
 #![feature(const_generics)]
@@ -132,8 +141,8 @@
 #![feature(associated_type_bounds)]
 #![feature(const_type_id)]
 #![feature(const_caller_location)]
-#![cfg_attr(bootstrap, feature(slice_patterns))]
 #![feature(assoc_int_consts)]
+#![cfg_attr(not(bootstrap), feature(no_niche))] // rust-lang/rust#68303
 
 #[prelude_import]
 #[allow(unused)]
@@ -258,6 +267,9 @@
 mod tuple;
 mod unit;
 
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub mod primitive;
+
 // Pull in the `core_arch` crate directly into libcore. The contents of
 // `core_arch` are in a different repository: rust-lang/stdarch.
 //
diff --git a/src/libcore/marker.rs b/src/libcore/marker.rs
index b4b595f..2800f11 100644
--- a/src/libcore/marker.rs
+++ b/src/libcore/marker.rs
@@ -727,6 +727,10 @@
 /// [`Pin<P>`]: ../pin/struct.Pin.html
 /// [`pin module`]: ../../std/pin/index.html
 #[stable(feature = "pin", since = "1.33.0")]
+#[rustc_on_unimplemented(
+    on(_Self = "std::future::Future", note = "consider using `Box::pin`",),
+    message = "`{Self}` cannot be unpinned"
+)]
 #[lang = "unpin"]
 pub auto trait Unpin {}
 
diff --git a/src/libcore/mem/maybe_uninit.rs b/src/libcore/mem/maybe_uninit.rs
index 5fb3c65..58aaac2 100644
--- a/src/libcore/mem/maybe_uninit.rs
+++ b/src/libcore/mem/maybe_uninit.rs
@@ -669,7 +669,7 @@
     /// // Now we can use `buf` as a normal slice:
     /// buf.sort_unstable();
     /// assert!(
-    ///     buf.chunks(2).all(|chunk| chunk[0] <= chunk[1]),
+    ///     buf.windows(2).all(|pair| pair[0] <= pair[1]),
     ///     "buffer is sorted",
     /// );
     /// ```
diff --git a/src/libcore/num/f32.rs b/src/libcore/num/f32.rs
index da8dd9a..0a4fc64 100644
--- a/src/libcore/num/f32.rs
+++ b/src/libcore/num/f32.rs
@@ -130,7 +130,7 @@
     pub const LOG2_E: f32 = 1.44269504088896340735992468100189214_f32;
 
     /// log<sub>2</sub>(10)
-    #[unstable(feature = "extra_log_consts", issue = "50540")]
+    #[stable(feature = "extra_log_consts", since = "1.43.0")]
     pub const LOG2_10: f32 = 3.32192809488736234787031942948939018_f32;
 
     /// log<sub>10</sub>(e)
@@ -138,7 +138,7 @@
     pub const LOG10_E: f32 = 0.434294481903251827651128918916605082_f32;
 
     /// log<sub>10</sub>(2)
-    #[unstable(feature = "extra_log_consts", issue = "50540")]
+    #[stable(feature = "extra_log_consts", since = "1.43.0")]
     pub const LOG10_2: f32 = 0.301029995663981195213738894724493027_f32;
 
     /// ln(2)
diff --git a/src/libcore/num/f64.rs b/src/libcore/num/f64.rs
index a6081f1..a3acf0f 100644
--- a/src/libcore/num/f64.rs
+++ b/src/libcore/num/f64.rs
@@ -126,7 +126,7 @@
     pub const E: f64 = 2.71828182845904523536028747135266250_f64;
 
     /// log<sub>2</sub>(10)
-    #[unstable(feature = "extra_log_consts", issue = "50540")]
+    #[stable(feature = "extra_log_consts", since = "1.43.0")]
     pub const LOG2_10: f64 = 3.32192809488736234787031942948939018_f64;
 
     /// log<sub>2</sub>(e)
@@ -134,7 +134,7 @@
     pub const LOG2_E: f64 = 1.44269504088896340735992468100189214_f64;
 
     /// log<sub>10</sub>(2)
-    #[unstable(feature = "extra_log_consts", issue = "50540")]
+    #[stable(feature = "extra_log_consts", since = "1.43.0")]
     pub const LOG10_2: f64 = 0.301029995663981195213738894724493027_f64;
 
     /// log<sub>10</sub>(e)
diff --git a/src/libcore/num/flt2dec/mod.rs b/src/libcore/num/flt2dec/mod.rs
index 9e760c1..f5cd26a 100644
--- a/src/libcore/num/flt2dec/mod.rs
+++ b/src/libcore/num/flt2dec/mod.rs
@@ -237,7 +237,7 @@
 #[derive(Clone)]
 pub struct Formatted<'a> {
     /// A byte slice representing a sign, either `""`, `"-"` or `"+"`.
-    pub sign: &'static [u8],
+    pub sign: &'static str,
     /// Formatted parts to be rendered after a sign and optional zero padding.
     pub parts: &'a [Part<'a>],
 }
@@ -259,7 +259,7 @@
         if out.len() < self.sign.len() {
             return None;
         }
-        out[..self.sign.len()].copy_from_slice(self.sign);
+        out[..self.sign.len()].copy_from_slice(self.sign.as_bytes());
 
         let mut written = self.sign.len();
         for part in self.parts {
@@ -402,38 +402,38 @@
 }
 
 /// Returns the static byte string corresponding to the sign to be formatted.
-/// It can be either `b""`, `b"+"` or `b"-"`.
-fn determine_sign(sign: Sign, decoded: &FullDecoded, negative: bool) -> &'static [u8] {
+/// It can be either `""`, `"+"` or `"-"`.
+fn determine_sign(sign: Sign, decoded: &FullDecoded, negative: bool) -> &'static str {
     match (*decoded, sign) {
-        (FullDecoded::Nan, _) => b"",
-        (FullDecoded::Zero, Sign::Minus) => b"",
+        (FullDecoded::Nan, _) => "",
+        (FullDecoded::Zero, Sign::Minus) => "",
         (FullDecoded::Zero, Sign::MinusRaw) => {
             if negative {
-                b"-"
+                "-"
             } else {
-                b""
+                ""
             }
         }
-        (FullDecoded::Zero, Sign::MinusPlus) => b"+",
+        (FullDecoded::Zero, Sign::MinusPlus) => "+",
         (FullDecoded::Zero, Sign::MinusPlusRaw) => {
             if negative {
-                b"-"
+                "-"
             } else {
-                b"+"
+                "+"
             }
         }
         (_, Sign::Minus) | (_, Sign::MinusRaw) => {
             if negative {
-                b"-"
+                "-"
             } else {
-                b""
+                ""
             }
         }
         (_, Sign::MinusPlus) | (_, Sign::MinusPlusRaw) => {
             if negative {
-                b"-"
+                "-"
             } else {
-                b"+"
+                "+"
             }
         }
     }
@@ -462,7 +462,6 @@
     v: T,
     sign: Sign,
     frac_digits: usize,
-    _upper: bool,
     buf: &'a mut [u8],
     parts: &'a mut [Part<'a>],
 ) -> Formatted<'a>
@@ -679,7 +678,6 @@
     v: T,
     sign: Sign,
     frac_digits: usize,
-    _upper: bool,
     buf: &'a mut [u8],
     parts: &'a mut [Part<'a>],
 ) -> Formatted<'a>
diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs
index 39c7d6d2..43c5f7c 100644
--- a/src/libcore/num/mod.rs
+++ b/src/libcore/num/mod.rs
@@ -8,9 +8,18 @@
 use crate::fmt;
 use crate::intrinsics;
 use crate::mem;
-use crate::ops;
 use crate::str::FromStr;
 
+// Used because the `?` operator is not allowed in a const context.
+macro_rules! try_opt {
+    ($e:expr) => {
+        match $e {
+            Some(x) => x,
+            None => return None,
+        }
+    };
+}
+
 macro_rules! impl_nonzero_fmt {
     ( #[$stability: meta] ( $( $Trait: ident ),+ ) for $Ty: ident ) => {
         $(
@@ -69,8 +78,9 @@
 
                 /// Creates a non-zero if the given value is not zero.
                 #[$stability]
+                #[rustc_const_unstable(feature = "const_nonzero_int_methods", issue = "53718")]
                 #[inline]
-                pub fn new(n: $Int) -> Option<Self> {
+                pub const fn new(n: $Int) -> Option<Self> {
                     if n != 0 {
                         // SAFETY: we just checked that there's no `0`
                         Some(unsafe { Self(n) })
@@ -701,10 +711,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_add(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_add(self, rhs: Self) -> Option<Self> {
                 let (a, b) = self.overflowing_add(rhs);
                 if b {None} else {Some(a)}
             }
@@ -725,10 +736,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_sub(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
                 let (a, b) = self.overflowing_sub(rhs);
                 if b {None} else {Some(a)}
             }
@@ -749,10 +761,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_mul(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_mul(self, rhs: Self) -> Option<Self> {
                 let (a, b) = self.overflowing_mul(rhs);
                 if b {None} else {Some(a)}
             }
@@ -774,10 +787,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_div(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_div(self, rhs: Self) -> Option<Self> {
                 if rhs == 0 || (self == Self::min_value() && rhs == -1) {
                     None
                 } else {
@@ -802,10 +816,11 @@
 assert_eq!((1", stringify!($SelfT), ").checked_div_euclid(0), None);
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_div_euclid(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_div_euclid(self, rhs: Self) -> Option<Self> {
                 if rhs == 0 || (self == Self::min_value() && rhs == -1) {
                     None
                 } else {
@@ -831,10 +846,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_rem(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_rem(self, rhs: Self) -> Option<Self> {
                 if rhs == 0 || (self == Self::min_value() && rhs == -1) {
                     None
                 } else {
@@ -860,10 +876,11 @@
 assert_eq!(", stringify!($SelfT), "::MIN.checked_rem_euclid(-1), None);
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_rem_euclid(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_rem_euclid(self, rhs: Self) -> Option<Self> {
                 if rhs == 0 || (self == Self::min_value() && rhs == -1) {
                     None
                 } else {
@@ -887,8 +904,9 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[inline]
-            pub fn checked_neg(self) -> Option<Self> {
+            pub const fn checked_neg(self) -> Option<Self> {
                 let (a, b) = self.overflowing_neg();
                 if b {None} else {Some(a)}
             }
@@ -908,10 +926,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_shl(self, rhs: u32) -> Option<Self> {
+            pub const fn checked_shl(self, rhs: u32) -> Option<Self> {
                 let (a, b) = self.overflowing_shl(rhs);
                 if b {None} else {Some(a)}
             }
@@ -931,10 +950,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_shr(self, rhs: u32) -> Option<Self> {
+            pub const fn checked_shr(self, rhs: u32) -> Option<Self> {
                 let (a, b) = self.overflowing_shr(rhs);
                 if b {None} else {Some(a)}
             }
@@ -956,8 +976,9 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "no_panic_abs", since = "1.13.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[inline]
-            pub fn checked_abs(self) -> Option<Self> {
+            pub const fn checked_abs(self) -> Option<Self> {
                 if self.is_negative() {
                     self.checked_neg()
                 } else {
@@ -981,26 +1002,27 @@
 ```"),
 
             #[stable(feature = "no_panic_pow", since = "1.34.0")]
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_pow(self, mut exp: u32) -> Option<Self> {
+            pub const fn checked_pow(self, mut exp: u32) -> Option<Self> {
                 let mut base = self;
                 let mut acc: Self = 1;
 
                 while exp > 1 {
                     if (exp & 1) == 1 {
-                        acc = acc.checked_mul(base)?;
+                        acc = try_opt!(acc.checked_mul(base));
                     }
                     exp /= 2;
-                    base = base.checked_mul(base)?;
+                    base = try_opt!(base.checked_mul(base));
                 }
 
                 // Deal with the final bit of the exponent separately, since
                 // squaring the base afterwards is not necessary and may cause a
                 // needless overflow.
                 if exp == 1 {
-                    acc = acc.checked_mul(base)?;
+                    acc = try_opt!(acc.checked_mul(base));
                 }
 
                 Some(acc)
@@ -1080,8 +1102,9 @@
 ```"),
 
             #[unstable(feature = "saturating_neg", issue = "59983")]
+            #[rustc_const_unstable(feature = "const_saturating_int_methods", issue = "53718")]
             #[inline]
-            pub fn saturating_neg(self) -> Self {
+            pub const fn saturating_neg(self) -> Self {
                 intrinsics::saturating_sub(0, self)
             }
         }
@@ -1106,8 +1129,9 @@
 ```"),
 
             #[unstable(feature = "saturating_neg", issue = "59983")]
+            #[rustc_const_unstable(feature = "const_saturating_int_methods", issue = "53718")]
             #[inline]
-            pub fn saturating_abs(self) -> Self {
+            pub const fn saturating_abs(self) -> Self {
                 if self.is_negative() {
                     self.saturating_neg()
                 } else {
@@ -1133,17 +1157,19 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_saturating_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn saturating_mul(self, rhs: Self) -> Self {
-                self.checked_mul(rhs).unwrap_or_else(|| {
-                    if (self < 0) == (rhs < 0) {
+            pub const fn saturating_mul(self, rhs: Self) -> Self {
+                match self.checked_mul(rhs) {
+                    Some(x) => x,
+                    None => if (self < 0) == (rhs < 0) {
                         Self::max_value()
                     } else {
                         Self::min_value()
                     }
-                })
+                }
             }
         }
 
@@ -1164,10 +1190,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "no_panic_pow", since = "1.34.0")]
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn saturating_pow(self, exp: u32) -> Self {
+            pub const fn saturating_pow(self, exp: u32) -> Self {
                 match self.checked_pow(exp) {
                     Some(x) => x,
                     None if self < 0 && exp % 2 == 1 => Self::min_value(),
@@ -1269,10 +1296,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "num_wrapping", since = "1.2.0")]
+            #[rustc_const_unstable(feature = "const_wrapping_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn wrapping_div(self, rhs: Self) -> Self {
+            pub const fn wrapping_div(self, rhs: Self) -> Self {
                 self.overflowing_div(rhs).0
             }
         }
@@ -1298,10 +1326,11 @@
 assert_eq!((-128i8).wrapping_div_euclid(-1), -128);
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn wrapping_div_euclid(self, rhs: Self) -> Self {
+            pub const fn wrapping_div_euclid(self, rhs: Self) -> Self {
                 self.overflowing_div_euclid(rhs).0
             }
         }
@@ -1328,10 +1357,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "num_wrapping", since = "1.2.0")]
+            #[rustc_const_unstable(feature = "const_wrapping_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn wrapping_rem(self, rhs: Self) -> Self {
+            pub const fn wrapping_rem(self, rhs: Self) -> Self {
                 self.overflowing_rem(rhs).0
             }
         }
@@ -1356,10 +1386,11 @@
 assert_eq!((-128i8).wrapping_rem_euclid(-1), 0);
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn wrapping_rem_euclid(self, rhs: Self) -> Self {
+            pub const fn wrapping_rem_euclid(self, rhs: Self) -> Self {
                 self.overflowing_rem_euclid(rhs).0
             }
         }
@@ -1503,10 +1534,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "no_panic_pow", since = "1.34.0")]
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn wrapping_pow(self, mut exp: u32) -> Self {
+            pub const fn wrapping_pow(self, mut exp: u32) -> Self {
                 let mut base = self;
                 let mut acc: Self = 1;
 
@@ -1635,9 +1667,10 @@
 ```"),
             #[inline]
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_overflowing_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
-            pub fn overflowing_div(self, rhs: Self) -> (Self, bool) {
+            pub const fn overflowing_div(self, rhs: Self) -> (Self, bool) {
                 if self == Self::min_value() && rhs == -1 {
                     (self, true)
                 } else {
@@ -1669,9 +1702,10 @@
 ```"),
             #[inline]
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
-            pub fn overflowing_div_euclid(self, rhs: Self) -> (Self, bool) {
+            pub const fn overflowing_div_euclid(self, rhs: Self) -> (Self, bool) {
                 if self == Self::min_value() && rhs == -1 {
                     (self, true)
                 } else {
@@ -1703,9 +1737,10 @@
 ```"),
             #[inline]
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_overflowing_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
-            pub fn overflowing_rem(self, rhs: Self) -> (Self, bool) {
+            pub const fn overflowing_rem(self, rhs: Self) -> (Self, bool) {
                 if self == Self::min_value() && rhs == -1 {
                     (0, true)
                 } else {
@@ -1736,10 +1771,11 @@
 assert_eq!(", stringify!($SelfT), "::MIN.overflowing_rem_euclid(-1), (0, true));
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn overflowing_rem_euclid(self, rhs: Self) -> (Self, bool) {
+            pub const fn overflowing_rem_euclid(self, rhs: Self) -> (Self, bool) {
                 if self == Self::min_value() && rhs == -1 {
                     (0, true)
                 } else {
@@ -1876,10 +1912,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "no_panic_pow", since = "1.34.0")]
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn overflowing_pow(self, mut exp: u32) -> (Self, bool) {
+            pub const fn overflowing_pow(self, mut exp: u32) -> (Self, bool) {
                 let mut base = self;
                 let mut acc: Self = 1;
                 let mut overflown = false;
@@ -1925,11 +1962,12 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
             #[rustc_inherit_overflow_checks]
-            pub fn pow(self, mut exp: u32) -> Self {
+            pub const fn pow(self, mut exp: u32) -> Self {
                 let mut base = self;
                 let mut acc = 1;
 
@@ -1981,11 +2019,12 @@
 assert_eq!((-a).div_euclid(-b), 2); // -7 >= -4 * 2
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
             #[rustc_inherit_overflow_checks]
-            pub fn div_euclid(self, rhs: Self) -> Self {
+            pub const fn div_euclid(self, rhs: Self) -> Self {
                 let q = self / rhs;
                 if self % rhs < 0 {
                     return if rhs > 0 { q - 1 } else { q + 1 }
@@ -2020,11 +2059,12 @@
 assert_eq!((-a).rem_euclid(-b), 1);
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
             #[rustc_inherit_overflow_checks]
-            pub fn rem_euclid(self, rhs: Self) -> Self {
+            pub const fn rem_euclid(self, rhs: Self) -> Self {
                 let r = self % rhs;
                 if r < 0 {
                     if rhs < 0 {
@@ -2847,10 +2887,11 @@
 assert_eq!((", stringify!($SelfT), "::max_value() - 2).checked_add(3), None);", $EndFeature, "
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_add(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_add(self, rhs: Self) -> Option<Self> {
                 let (a, b) = self.overflowing_add(rhs);
                 if b {None} else {Some(a)}
             }
@@ -2869,10 +2910,11 @@
 assert_eq!(0", stringify!($SelfT), ".checked_sub(1), None);", $EndFeature, "
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_sub(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
                 let (a, b) = self.overflowing_sub(rhs);
                 if b {None} else {Some(a)}
             }
@@ -2891,10 +2933,11 @@
 assert_eq!(", stringify!($SelfT), "::max_value().checked_mul(2), None);", $EndFeature, "
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_mul(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_mul(self, rhs: Self) -> Option<Self> {
                 let (a, b) = self.overflowing_mul(rhs);
                 if b {None} else {Some(a)}
             }
@@ -2913,10 +2956,11 @@
 assert_eq!(1", stringify!($SelfT), ".checked_div(0), None);", $EndFeature, "
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_div(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_div(self, rhs: Self) -> Option<Self> {
                 match rhs {
                     0 => None,
                     // SAFETY: div by zero has been checked above and unsigned types have no other
@@ -2939,10 +2983,11 @@
 assert_eq!(1", stringify!($SelfT), ".checked_div_euclid(0), None);
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_div_euclid(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_div_euclid(self, rhs: Self) -> Option<Self> {
                 if rhs == 0 {
                     None
                 } else {
@@ -2965,10 +3010,11 @@
 assert_eq!(5", stringify!($SelfT), ".checked_rem(0), None);", $EndFeature, "
 ```"),
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_rem(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_rem(self, rhs: Self) -> Option<Self> {
                 if rhs == 0 {
                     None
                 } else {
@@ -2992,10 +3038,11 @@
 assert_eq!(5", stringify!($SelfT), ".checked_rem_euclid(0), None);
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_rem_euclid(self, rhs: Self) -> Option<Self> {
+            pub const fn checked_rem_euclid(self, rhs: Self) -> Option<Self> {
                 if rhs == 0 {
                     None
                 } else {
@@ -3019,8 +3066,9 @@
 assert_eq!(1", stringify!($SelfT), ".checked_neg(), None);", $EndFeature, "
 ```"),
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[inline]
-            pub fn checked_neg(self) -> Option<Self> {
+            pub const fn checked_neg(self) -> Option<Self> {
                 let (a, b) = self.overflowing_neg();
                 if b {None} else {Some(a)}
             }
@@ -3039,10 +3087,11 @@
 assert_eq!(0x10", stringify!($SelfT), ".checked_shl(129), None);", $EndFeature, "
 ```"),
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_shl(self, rhs: u32) -> Option<Self> {
+            pub const fn checked_shl(self, rhs: u32) -> Option<Self> {
                 let (a, b) = self.overflowing_shl(rhs);
                 if b {None} else {Some(a)}
             }
@@ -3061,10 +3110,11 @@
 assert_eq!(0x10", stringify!($SelfT), ".checked_shr(129), None);", $EndFeature, "
 ```"),
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_checked_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_shr(self, rhs: u32) -> Option<Self> {
+            pub const fn checked_shr(self, rhs: u32) -> Option<Self> {
                 let (a, b) = self.overflowing_shr(rhs);
                 if b {None} else {Some(a)}
             }
@@ -3083,26 +3133,27 @@
 assert_eq!(", stringify!($SelfT), "::max_value().checked_pow(2), None);", $EndFeature, "
 ```"),
             #[stable(feature = "no_panic_pow", since = "1.34.0")]
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn checked_pow(self, mut exp: u32) -> Option<Self> {
+            pub const fn checked_pow(self, mut exp: u32) -> Option<Self> {
                 let mut base = self;
                 let mut acc: Self = 1;
 
                 while exp > 1 {
                     if (exp & 1) == 1 {
-                        acc = acc.checked_mul(base)?;
+                        acc = try_opt!(acc.checked_mul(base));
                     }
                     exp /= 2;
-                    base = base.checked_mul(base)?;
+                    base = try_opt!(base.checked_mul(base));
                 }
 
                 // Deal with the final bit of the exponent separately, since
                 // squaring the base afterwards is not necessary and may cause a
                 // needless overflow.
                 if exp == 1 {
-                    acc = acc.checked_mul(base)?;
+                    acc = try_opt!(acc.checked_mul(base));
                 }
 
                 Some(acc)
@@ -3170,11 +3221,15 @@
 "::MAX);", $EndFeature, "
 ```"),
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_saturating_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn saturating_mul(self, rhs: Self) -> Self {
-                self.checked_mul(rhs).unwrap_or(Self::max_value())
+            pub const fn saturating_mul(self, rhs: Self) -> Self {
+                match self.checked_mul(rhs) {
+                    Some(x) => x,
+                    None => Self::max_value(),
+                }
             }
         }
 
@@ -3194,10 +3249,11 @@
 $EndFeature, "
 ```"),
             #[stable(feature = "no_panic_pow", since = "1.34.0")]
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn saturating_pow(self, exp: u32) -> Self {
+            pub const fn saturating_pow(self, exp: u32) -> Self {
                 match self.checked_pow(exp) {
                     Some(x) => x,
                     None => Self::max_value(),
@@ -3289,10 +3345,11 @@
 ", $Feature, "assert_eq!(100", stringify!($SelfT), ".wrapping_div(10), 10);", $EndFeature, "
 ```"),
             #[stable(feature = "num_wrapping", since = "1.2.0")]
+            #[rustc_const_unstable(feature = "const_wrapping_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn wrapping_div(self, rhs: Self) -> Self {
+            pub const fn wrapping_div(self, rhs: Self) -> Self {
                 self / rhs
             }
         }
@@ -3315,10 +3372,11 @@
 assert_eq!(100", stringify!($SelfT), ".wrapping_div_euclid(10), 10);
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn wrapping_div_euclid(self, rhs: Self) -> Self {
+            pub const fn wrapping_div_euclid(self, rhs: Self) -> Self {
                 self / rhs
             }
         }
@@ -3339,10 +3397,11 @@
 ", $Feature, "assert_eq!(100", stringify!($SelfT), ".wrapping_rem(10), 0);", $EndFeature, "
 ```"),
             #[stable(feature = "num_wrapping", since = "1.2.0")]
+            #[rustc_const_unstable(feature = "const_wrapping_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn wrapping_rem(self, rhs: Self) -> Self {
+            pub const fn wrapping_rem(self, rhs: Self) -> Self {
                 self % rhs
             }
         }
@@ -3366,10 +3425,11 @@
 assert_eq!(100", stringify!($SelfT), ".wrapping_rem_euclid(10), 0);
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn wrapping_rem_euclid(self, rhs: Self) -> Self {
+            pub const fn wrapping_rem_euclid(self, rhs: Self) -> Self {
                 self % rhs
             }
         }
@@ -3483,10 +3543,11 @@
 assert_eq!(3u8.wrapping_pow(6), 217);", $EndFeature, "
 ```"),
             #[stable(feature = "no_panic_pow", since = "1.34.0")]
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn wrapping_pow(self, mut exp: u32) -> Self {
+            pub const fn wrapping_pow(self, mut exp: u32) -> Self {
                 let mut base = self;
                 let mut acc: Self = 1;
 
@@ -3614,9 +3675,10 @@
 ```"),
             #[inline]
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_overflowing_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
-            pub fn overflowing_div(self, rhs: Self) -> (Self, bool) {
+            pub const fn overflowing_div(self, rhs: Self) -> (Self, bool) {
                 (self / rhs, false)
             }
         }
@@ -3645,9 +3707,10 @@
 ```"),
             #[inline]
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
-            pub fn overflowing_div_euclid(self, rhs: Self) -> (Self, bool) {
+            pub const fn overflowing_div_euclid(self, rhs: Self) -> (Self, bool) {
                 (self / rhs, false)
             }
         }
@@ -3673,9 +3736,10 @@
 ```"),
             #[inline]
             #[stable(feature = "wrapping", since = "1.7.0")]
+            #[rustc_const_unstable(feature = "const_overflowing_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
-            pub fn overflowing_rem(self, rhs: Self) -> (Self, bool) {
+            pub const fn overflowing_rem(self, rhs: Self) -> (Self, bool) {
                 (self % rhs, false)
             }
         }
@@ -3704,9 +3768,10 @@
 ```"),
             #[inline]
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
-            pub fn overflowing_rem_euclid(self, rhs: Self) -> (Self, bool) {
+            pub const fn overflowing_rem_euclid(self, rhs: Self) -> (Self, bool) {
                 (self % rhs, false)
             }
         }
@@ -3805,10 +3870,11 @@
 assert_eq!(3u8.overflowing_pow(6), (217, true));", $EndFeature, "
 ```"),
             #[stable(feature = "no_panic_pow", since = "1.34.0")]
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
-            pub fn overflowing_pow(self, mut exp: u32) -> (Self, bool) {
+            pub const fn overflowing_pow(self, mut exp: u32) -> (Self, bool) {
                 let mut base = self;
                 let mut acc: Self = 1;
                 let mut overflown = false;
@@ -3851,11 +3917,12 @@
 ", $Feature, "assert_eq!(2", stringify!($SelfT), ".pow(5), 32);", $EndFeature, "
 ```"),
         #[stable(feature = "rust1", since = "1.0.0")]
+        #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
         #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
         #[inline]
         #[rustc_inherit_overflow_checks]
-        pub fn pow(self, mut exp: u32) -> Self {
+        pub const fn pow(self, mut exp: u32) -> Self {
             let mut base = self;
             let mut acc = 1;
 
@@ -3897,11 +3964,12 @@
 assert_eq!(7", stringify!($SelfT), ".div_euclid(4), 1); // or any other integer type
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
             #[rustc_inherit_overflow_checks]
-            pub fn div_euclid(self, rhs: Self) -> Self {
+            pub const fn div_euclid(self, rhs: Self) -> Self {
                 self / rhs
             }
         }
@@ -3926,11 +3994,12 @@
 assert_eq!(7", stringify!($SelfT), ".rem_euclid(4), 3); // or any other integer type
 ```"),
             #[stable(feature = "euclidean_division", since = "1.38.0")]
+            #[rustc_const_unstable(feature = "const_euclidean_int_methods", issue = "53718")]
             #[must_use = "this returns the result of the operation, \
                           without modifying the original"]
             #[inline]
             #[rustc_inherit_overflow_checks]
-            pub fn rem_euclid(self, rhs: Self) -> Self {
+            pub const fn rem_euclid(self, rhs: Self) -> Self {
                 self % rhs
             }
         }
@@ -3964,7 +4033,8 @@
         // overflow cases it instead ends up returning the maximum value
         // of the type, and can return 0 for 0.
         #[inline]
-        fn one_less_than_next_power_of_two(self) -> Self {
+        #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
+        const fn one_less_than_next_power_of_two(self) -> Self {
             if self <= 1 { return 0; }
 
             let p = self - 1;
@@ -3992,10 +4062,11 @@
 assert_eq!(3", stringify!($SelfT), ".next_power_of_two(), 4);", $EndFeature, "
 ```"),
             #[stable(feature = "rust1", since = "1.0.0")]
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
             #[inline]
-            pub fn next_power_of_two(self) -> Self {
-                // Call the trait to get overflow checks
-                ops::Add::add(self.one_less_than_next_power_of_two(), 1)
+            #[rustc_inherit_overflow_checks]
+            pub const fn next_power_of_two(self) -> Self {
+                self.one_less_than_next_power_of_two() + 1
             }
         }
 
@@ -4017,7 +4088,8 @@
 ```"),
             #[inline]
             #[stable(feature = "rust1", since = "1.0.0")]
-            pub fn checked_next_power_of_two(self) -> Option<Self> {
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
+            pub const fn checked_next_power_of_two(self) -> Option<Self> {
                 self.one_less_than_next_power_of_two().checked_add(1)
             }
         }
@@ -4041,7 +4113,8 @@
 ```"),
             #[unstable(feature = "wrapping_next_power_of_two", issue = "32463",
                        reason = "needs decision on wrapping behaviour")]
-            pub fn wrapping_next_power_of_two(self) -> Self {
+            #[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
+            pub const fn wrapping_next_power_of_two(self) -> Self {
                 self.one_less_than_next_power_of_two().wrapping_add(1)
             }
         }
@@ -4251,8 +4324,9 @@
     /// assert!(!non_ascii.is_ascii());
     /// ```
     #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
+    #[rustc_const_stable(feature = "const_ascii_methods_on_intrinsics", since = "1.43.0")]
     #[inline]
-    pub fn is_ascii(&self) -> bool {
+    pub const fn is_ascii(&self) -> bool {
         *self & 128 == 0
     }
 
@@ -4399,8 +4473,9 @@
     /// assert!(!esc.is_ascii_alphabetic());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_alphabetic(&self) -> bool {
+    pub const fn is_ascii_alphabetic(&self) -> bool {
         matches!(*self, b'A'..=b'Z' | b'a'..=b'z')
     }
 
@@ -4431,8 +4506,9 @@
     /// assert!(!esc.is_ascii_uppercase());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_uppercase(&self) -> bool {
+    pub const fn is_ascii_uppercase(&self) -> bool {
         matches!(*self, b'A'..=b'Z')
     }
 
@@ -4463,8 +4539,9 @@
     /// assert!(!esc.is_ascii_lowercase());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_lowercase(&self) -> bool {
+    pub const fn is_ascii_lowercase(&self) -> bool {
         matches!(*self, b'a'..=b'z')
     }
 
@@ -4498,8 +4575,9 @@
     /// assert!(!esc.is_ascii_alphanumeric());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_alphanumeric(&self) -> bool {
+    pub const fn is_ascii_alphanumeric(&self) -> bool {
         matches!(*self, b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z')
     }
 
@@ -4530,8 +4608,9 @@
     /// assert!(!esc.is_ascii_digit());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_digit(&self) -> bool {
+    pub const fn is_ascii_digit(&self) -> bool {
         matches!(*self, b'0'..=b'9')
     }
 
@@ -4565,8 +4644,9 @@
     /// assert!(!esc.is_ascii_hexdigit());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_hexdigit(&self) -> bool {
+    pub const fn is_ascii_hexdigit(&self) -> bool {
         matches!(*self, b'0'..=b'9' | b'A'..=b'F' | b'a'..=b'f')
     }
 
@@ -4601,8 +4681,9 @@
     /// assert!(!esc.is_ascii_punctuation());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_punctuation(&self) -> bool {
+    pub const fn is_ascii_punctuation(&self) -> bool {
         matches!(*self, b'!'..=b'/' | b':'..=b'@' | b'['..=b'`' | b'{'..=b'~')
     }
 
@@ -4633,8 +4714,9 @@
     /// assert!(!esc.is_ascii_graphic());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_graphic(&self) -> bool {
+    pub const fn is_ascii_graphic(&self) -> bool {
         matches!(*self, b'!'..=b'~')
     }
 
@@ -4682,8 +4764,9 @@
     /// assert!(!esc.is_ascii_whitespace());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_whitespace(&self) -> bool {
+    pub const fn is_ascii_whitespace(&self) -> bool {
         matches!(*self, b'\t' | b'\n' | b'\x0C' | b'\r' | b' ')
     }
 
@@ -4716,8 +4799,9 @@
     /// assert!(esc.is_ascii_control());
     /// ```
     #[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
+    #[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
     #[inline]
-    pub fn is_ascii_control(&self) -> bool {
+    pub const fn is_ascii_control(&self) -> bool {
         matches!(*self, b'\0'..=b'\x1F' | b'\x7F')
     }
 }
diff --git a/src/libcore/ops/generator.rs b/src/libcore/ops/generator.rs
index 5401fff..4e43561 100644
--- a/src/libcore/ops/generator.rs
+++ b/src/libcore/ops/generator.rs
@@ -50,11 +50,11 @@
 ///         return "foo"
 ///     };
 ///
-///     match Pin::new(&mut generator).resume() {
+///     match Pin::new(&mut generator).resume(()) {
 ///         GeneratorState::Yielded(1) => {}
 ///         _ => panic!("unexpected return from resume"),
 ///     }
-///     match Pin::new(&mut generator).resume() {
+///     match Pin::new(&mut generator).resume(()) {
 ///         GeneratorState::Complete("foo") => {}
 ///         _ => panic!("unexpected return from resume"),
 ///     }
@@ -67,7 +67,7 @@
 #[lang = "generator"]
 #[unstable(feature = "generator_trait", issue = "43122")]
 #[fundamental]
-pub trait Generator {
+pub trait Generator<#[cfg(not(bootstrap))] R = ()> {
     /// The type of value this generator yields.
     ///
     /// This associated type corresponds to the `yield` expression and the
@@ -110,9 +110,13 @@
     /// been returned previously. While generator literals in the language are
     /// guaranteed to panic on resuming after `Complete`, this is not guaranteed
     /// for all implementations of the `Generator` trait.
-    fn resume(self: Pin<&mut Self>) -> GeneratorState<Self::Yield, Self::Return>;
+    fn resume(
+        self: Pin<&mut Self>,
+        #[cfg(not(bootstrap))] arg: R,
+    ) -> GeneratorState<Self::Yield, Self::Return>;
 }
 
+#[cfg(bootstrap)]
 #[unstable(feature = "generator_trait", issue = "43122")]
 impl<G: ?Sized + Generator> Generator for Pin<&mut G> {
     type Yield = G::Yield;
@@ -123,6 +127,7 @@
     }
 }
 
+#[cfg(bootstrap)]
 #[unstable(feature = "generator_trait", issue = "43122")]
 impl<G: ?Sized + Generator + Unpin> Generator for &mut G {
     type Yield = G::Yield;
@@ -132,3 +137,25 @@
         G::resume(Pin::new(&mut *self))
     }
 }
+
+#[cfg(not(bootstrap))]
+#[unstable(feature = "generator_trait", issue = "43122")]
+impl<G: ?Sized + Generator<R>, R> Generator<R> for Pin<&mut G> {
+    type Yield = G::Yield;
+    type Return = G::Return;
+
+    fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
+        G::resume((*self).as_mut(), arg)
+    }
+}
+
+#[cfg(not(bootstrap))]
+#[unstable(feature = "generator_trait", issue = "43122")]
+impl<G: ?Sized + Generator<R> + Unpin, R> Generator<R> for &mut G {
+    type Yield = G::Yield;
+    type Return = G::Return;
+
+    fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
+        G::resume(Pin::new(&mut *self), arg)
+    }
+}
diff --git a/src/libcore/ops/range.rs b/src/libcore/ops/range.rs
index d38b351..8ffad82 100644
--- a/src/libcore/ops/range.rs
+++ b/src/libcore/ops/range.rs
@@ -340,41 +340,21 @@
     // support that mode.
     pub(crate) start: Idx,
     pub(crate) end: Idx,
-    pub(crate) is_empty: Option<bool>,
+
     // This field is:
-    //  - `None` when next() or next_back() was never called
-    //  - `Some(false)` when `start <= end` assuming no overflow
-    //  - `Some(true)` otherwise
-    // The field cannot be a simple `bool` because the `..=` constructor can
-    // accept non-PartialOrd types, also we want the constructor to be const.
-}
-
-trait RangeInclusiveEquality: Sized {
-    fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool;
-}
-
-impl<T> RangeInclusiveEquality for T {
-    #[inline]
-    default fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool {
-        range.is_empty.unwrap_or_default()
-    }
-}
-
-impl<T: PartialOrd> RangeInclusiveEquality for T {
-    #[inline]
-    fn canonicalized_is_empty(range: &RangeInclusive<Self>) -> bool {
-        range.is_empty()
-    }
+    //  - `false` upon construction
+    //  - `false` when iteration has yielded an element and the iterator is not exhausted
+    //  - `true` when iteration has been used to exhaust the iterator
+    //
+    // This is required to support PartialEq and Hash without a PartialOrd bound or specialization.
+    pub(crate) exhausted: bool,
 }
 
 #[stable(feature = "inclusive_range", since = "1.26.0")]
 impl<Idx: PartialEq> PartialEq for RangeInclusive<Idx> {
     #[inline]
     fn eq(&self, other: &Self) -> bool {
-        self.start == other.start
-            && self.end == other.end
-            && RangeInclusiveEquality::canonicalized_is_empty(self)
-                == RangeInclusiveEquality::canonicalized_is_empty(other)
+        self.start == other.start && self.end == other.end && self.exhausted == other.exhausted
     }
 }
 
@@ -386,7 +366,7 @@
     fn hash<H: Hasher>(&self, state: &mut H) {
         self.start.hash(state);
         self.end.hash(state);
-        RangeInclusiveEquality::canonicalized_is_empty(self).hash(state);
+        self.exhausted.hash(state);
     }
 }
 
@@ -405,7 +385,7 @@
     #[rustc_promotable]
     #[rustc_const_stable(feature = "const_range_new", since = "1.32.0")]
     pub const fn new(start: Idx, end: Idx) -> Self {
-        Self { start, end, is_empty: None }
+        Self { start, end, exhausted: false }
     }
 
     /// Returns the lower bound of the range (inclusive).
@@ -481,6 +461,9 @@
         self.start.fmt(fmt)?;
         write!(fmt, "..=")?;
         self.end.fmt(fmt)?;
+        if self.exhausted {
+            write!(fmt, " (exhausted)")?;
+        }
         Ok(())
     }
 }
@@ -552,15 +535,7 @@
     #[unstable(feature = "range_is_empty", reason = "recently added", issue = "48111")]
     #[inline]
     pub fn is_empty(&self) -> bool {
-        self.is_empty.unwrap_or_else(|| !(self.start <= self.end))
-    }
-
-    // If this range's `is_empty` is field is unknown (`None`), update it to be a concrete value.
-    #[inline]
-    pub(crate) fn compute_is_empty(&mut self) {
-        if self.is_empty.is_none() {
-            self.is_empty = Some(!(self.start <= self.end));
-        }
+        self.exhausted || !(self.start <= self.end)
     }
 }
 
diff --git a/src/libcore/option.rs b/src/libcore/option.rs
index ad0491f..9b32442 100644
--- a/src/libcore/option.rs
+++ b/src/libcore/option.rs
@@ -317,7 +317,7 @@
     // Getting to contained values
     /////////////////////////////////////////////////////////////////////////
 
-    /// Unwraps an option, yielding the content of a [`Some`].
+    /// Returns the contained [`Some`] value, consuming the `self` value.
     ///
     /// # Panics
     ///
@@ -348,17 +348,22 @@
         }
     }
 
-    /// Moves the value `v` out of the `Option<T>` if it is [`Some(v)`].
+    /// Returns the contained [`Some`] value, consuming the `self` value.
     ///
-    /// In general, because this function may panic, its use is discouraged.
+    /// Because this function may panic, its use is generally discouraged.
     /// Instead, prefer to use pattern matching and handle the [`None`]
-    /// case explicitly.
+    /// case explicitly, or call [`unwrap_or`], [`unwrap_or_else`], or
+    /// [`unwrap_or_default`].
+    ///
+    /// [`unwrap_or`]: #method.unwrap_or
+    /// [`unwrap_or_else`]: #method.unwrap_or_else
+    /// [`unwrap_or_default`]: #method.unwrap_or_default
     ///
     /// # Panics
     ///
     /// Panics if the self value equals [`None`].
     ///
-    /// [`Some(v)`]: #variant.Some
+    /// [`Some`]: #variant.Some
     /// [`None`]: #variant.None
     ///
     /// # Examples
@@ -382,12 +387,13 @@
         }
     }
 
-    /// Returns the contained value or a default.
+    /// Returns the contained [`Some`] value or a provided default.
     ///
     /// Arguments passed to `unwrap_or` are eagerly evaluated; if you are passing
     /// the result of a function call, it is recommended to use [`unwrap_or_else`],
     /// which is lazily evaluated.
     ///
+    /// [`Some`]: #variant.Some
     /// [`unwrap_or_else`]: #method.unwrap_or_else
     ///
     /// # Examples
@@ -405,7 +411,7 @@
         }
     }
 
-    /// Returns the contained value or computes it from a closure.
+    /// Returns the contained [`Some`] value or computes it from a closure.
     ///
     /// # Examples
     ///
@@ -455,6 +461,12 @@
     /// Applies a function to the contained value (if any),
     /// or returns the provided default (if not).
     ///
+    /// Arguments passed to `map_or` are eagerly evaluated; if you are passing
+    /// the result of a function call, it is recommended to use [`map_or_else`],
+    /// which is lazily evaluated.
+    ///
+    /// [`map_or_else`]: #method.map_or_else
+    ///
     /// # Examples
     ///
     /// ```
@@ -980,7 +992,7 @@
 }
 
 impl<T: fmt::Debug> Option<T> {
-    /// Unwraps an option, expecting [`None`] and returning nothing.
+    /// Consumes `self` while expecting [`None`] and returning nothing.
     ///
     /// # Panics
     ///
@@ -1023,7 +1035,7 @@
         }
     }
 
-    /// Unwraps an option, expecting [`None`] and returning nothing.
+    /// Consumes `self` while expecting [`None`] and returning nothing.
     ///
     /// # Panics
     ///
@@ -1068,7 +1080,7 @@
 }
 
 impl<T: Default> Option<T> {
-    /// Returns the contained value or a default
+    /// Returns the contained [`Some`] value or a default
     ///
     /// Consumes the `self` argument then, if [`Some`], returns the contained
     /// value, otherwise if [`None`], returns the [default value] for that
diff --git a/src/libcore/primitive.rs b/src/libcore/primitive.rs
new file mode 100644
index 0000000..e20b2c5
--- /dev/null
+++ b/src/libcore/primitive.rs
@@ -0,0 +1,67 @@
+//! This module reexports the primitive types to allow usage that is not
+//! possibly shadowed by other declared types.
+//!
+//! This is normally only useful in macro generated code.
+//!
+//! An example of this is when generating a new struct and an impl for it:
+//!
+//! ```rust,compile_fail
+//! pub struct bool;
+//!
+//! impl QueryId for bool {
+//!     const SOME_PROPERTY: bool = true;
+//! }
+//!
+//! # trait QueryId { const SOME_PROPERTY: core::primitive::bool; }
+//! ```
+//!
+//! Note that the `SOME_PROPERTY` associated constant would not compile, as its
+//! type `bool` refers to the struct, rather than to the primitive bool type.
+//!
+//! A correct implementation could look like:
+//!
+//! ```rust
+//! # #[allow(non_camel_case_types)]
+//! pub struct bool;
+//!
+//! impl QueryId for bool {
+//!     const SOME_PROPERTY: core::primitive::bool = true;
+//! }
+//!
+//! # trait QueryId { const SOME_PROPERTY: core::primitive::bool; }
+//! ```
+
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use bool;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use char;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use f32;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use f64;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use i128;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use i16;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use i32;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use i64;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use i8;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use isize;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use str;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use u128;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use u16;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use u32;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use u64;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use u8;
+#[stable(feature = "core_primitive", since = "1.43.0")]
+pub use usize;
diff --git a/src/libcore/ptr/mod.rs b/src/libcore/ptr/mod.rs
index 0ee5096..88b490a 100644
--- a/src/libcore/ptr/mod.rs
+++ b/src/libcore/ptr/mod.rs
@@ -119,10 +119,13 @@
 ///
 /// Behavior is undefined if any of the following conditions are violated:
 ///
-/// * `to_drop` must be [valid] for reads.
+/// * `to_drop` must be [valid] for both reads and writes.
 ///
 /// * `to_drop` must be properly aligned.
 ///
+/// * The value `to_drop` points to must be valid for dropping, which may mean it must uphold
+///   additional invariants - this is type-dependent.
+///
 /// Additionally, if `T` is not [`Copy`], using the pointed-to value after
 /// calling `drop_in_place` can cause undefined behavior. Note that `*to_drop =
 /// foo` counts as a use because it will cause the value to be dropped
@@ -289,7 +292,7 @@
 ///
 /// Behavior is undefined if any of the following conditions are violated:
 ///
-/// * Both `x` and `y` must be [valid] for reads and writes.
+/// * Both `x` and `y` must be [valid] for both reads and writes.
 ///
 /// * Both `x` and `y` must be properly aligned.
 ///
@@ -355,7 +358,7 @@
 ///
 /// Behavior is undefined if any of the following conditions are violated:
 ///
-/// * Both `x` and `y` must be [valid] for reads and writes of `count *
+/// * Both `x` and `y` must be [valid] for both reads and writes of `count *
 ///   size_of::<T>()` bytes.
 ///
 /// * Both `x` and `y` must be properly aligned.
@@ -471,10 +474,12 @@
 ///
 /// Behavior is undefined if any of the following conditions are violated:
 ///
-/// * `dst` must be [valid] for writes.
+/// * `dst` must be [valid] for both reads and writes.
 ///
 /// * `dst` must be properly aligned.
 ///
+/// * `dst` must point to a properly initialized value of type `T`.
+///
 /// Note that even if `T` has size `0`, the pointer must be non-NULL and properly aligned.
 ///
 /// [valid]: ../ptr/index.html#safety
@@ -514,6 +519,8 @@
 /// * `src` must be properly aligned. Use [`read_unaligned`] if this is not the
 ///   case.
 ///
+/// * `src` must point to a properly initialized value of type `T`.
+///
 /// Note that even if `T` has size `0`, the pointer must be non-NULL and properly aligned.
 ///
 /// # Examples
@@ -628,6 +635,8 @@
 ///
 /// * `src` must be [valid] for reads.
 ///
+/// * `src` must point to a properly initialized value of type `T`.
+///
 /// Like [`read`], `read_unaligned` creates a bitwise copy of `T`, regardless of
 /// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the returned
 /// value and the value at `*src` can [violate memory safety][read-ownership].
@@ -922,6 +931,8 @@
 ///
 /// * `src` must be properly aligned.
 ///
+/// * `src` must point to a properly initialized value of type `T`.
+///
 /// Like [`read`], `read_volatile` creates a bitwise copy of `T`, regardless of
 /// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the returned
 /// value and the value at `*src` can [violate memory safety][read-ownership].
diff --git a/src/libcore/result.rs b/src/libcore/result.rs
index bc70dbd..0bc29e1 100644
--- a/src/libcore/result.rs
+++ b/src/libcore/result.rs
@@ -524,6 +524,12 @@
     /// Applies a function to the contained value (if any),
     /// or returns the provided default (if not).
     ///
+    /// Arguments passed to `map_or` are eagerly evaluated; if you are passing
+    /// the result of a function call, it is recommended to use [`map_or_else`],
+    /// which is lazily evaluated.
+    ///
+    /// [`map_or_else`]: #method.map_or_else
+    ///
     /// # Examples
     ///
     /// ```
@@ -792,8 +798,7 @@
         }
     }
 
-    /// Unwraps a result, yielding the content of an [`Ok`].
-    /// Else, it returns `optb`.
+    /// Returns the contained [`Ok`] value or a provided default.
     ///
     /// Arguments passed to `unwrap_or` are eagerly evaluated; if you are passing
     /// the result of a function call, it is recommended to use [`unwrap_or_else`],
@@ -808,27 +813,25 @@
     /// Basic usage:
     ///
     /// ```
-    /// let optb = 2;
+    /// let default = 2;
     /// let x: Result<u32, &str> = Ok(9);
-    /// assert_eq!(x.unwrap_or(optb), 9);
+    /// assert_eq!(x.unwrap_or(default), 9);
     ///
     /// let x: Result<u32, &str> = Err("error");
-    /// assert_eq!(x.unwrap_or(optb), optb);
+    /// assert_eq!(x.unwrap_or(default), default);
     /// ```
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
-    pub fn unwrap_or(self, optb: T) -> T {
+    pub fn unwrap_or(self, default: T) -> T {
         match self {
             Ok(t) => t,
-            Err(_) => optb,
+            Err(_) => default,
         }
     }
 
-    /// Unwraps a result, yielding the content of an [`Ok`].
-    /// If the value is an [`Err`] then it calls `op` with its value.
+    /// Returns the contained [`Ok`] value or computes it from a closure.
     ///
     /// [`Ok`]: enum.Result.html#variant.Ok
-    /// [`Err`]: enum.Result.html#variant.Err
     ///
     /// # Examples
     ///
@@ -931,7 +934,44 @@
 }
 
 impl<T, E: fmt::Debug> Result<T, E> {
-    /// Unwraps a result, yielding the content of an [`Ok`].
+    /// Returns the contained [`Ok`] value, consuming the `self` value.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the value is an [`Err`], with a panic message including the
+    /// passed message, and the content of the [`Err`].
+    ///
+    /// [`Ok`]: enum.Result.html#variant.Ok
+    /// [`Err`]: enum.Result.html#variant.Err
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```{.should_panic}
+    /// let x: Result<u32, &str> = Err("emergency failure");
+    /// x.expect("Testing expect"); // panics with `Testing expect: emergency failure`
+    /// ```
+    #[inline]
+    #[track_caller]
+    #[stable(feature = "result_expect", since = "1.4.0")]
+    pub fn expect(self, msg: &str) -> T {
+        match self {
+            Ok(t) => t,
+            Err(e) => unwrap_failed(msg, &e),
+        }
+    }
+
+    /// Returns the contained [`Ok`] value, consuming the `self` value.
+    ///
+    /// Because this function may panic, its use is generally discouraged.
+    /// Instead, prefer to use pattern matching and handle the [`Err`]
+    /// case explicitly, or call [`unwrap_or`], [`unwrap_or_else`], or
+    /// [`unwrap_or_default`].
+    ///
+    /// [`unwrap_or`]: #method.unwrap_or
+    /// [`unwrap_or_else`]: #method.unwrap_or_else
+    /// [`unwrap_or_default`]: #method.unwrap_or_default
     ///
     /// # Panics
     ///
@@ -963,13 +1003,15 @@
             Err(e) => unwrap_failed("called `Result::unwrap()` on an `Err` value", &e),
         }
     }
+}
 
-    /// Unwraps a result, yielding the content of an [`Ok`].
+impl<T: fmt::Debug, E> Result<T, E> {
+    /// Returns the contained [`Err`] value, consuming the `self` value.
     ///
     /// # Panics
     ///
-    /// Panics if the value is an [`Err`], with a panic message including the
-    /// passed message, and the content of the [`Err`].
+    /// Panics if the value is an [`Ok`], with a panic message including the
+    /// passed message, and the content of the [`Ok`].
     ///
     /// [`Ok`]: enum.Result.html#variant.Ok
     /// [`Err`]: enum.Result.html#variant.Err
@@ -979,22 +1021,20 @@
     /// Basic usage:
     ///
     /// ```{.should_panic}
-    /// let x: Result<u32, &str> = Err("emergency failure");
-    /// x.expect("Testing expect"); // panics with `Testing expect: emergency failure`
+    /// let x: Result<u32, &str> = Ok(10);
+    /// x.expect_err("Testing expect_err"); // panics with `Testing expect_err: 10`
     /// ```
     #[inline]
     #[track_caller]
-    #[stable(feature = "result_expect", since = "1.4.0")]
-    pub fn expect(self, msg: &str) -> T {
+    #[stable(feature = "result_expect_err", since = "1.17.0")]
+    pub fn expect_err(self, msg: &str) -> E {
         match self {
-            Ok(t) => t,
-            Err(e) => unwrap_failed(msg, &e),
+            Ok(t) => unwrap_failed(msg, &t),
+            Err(e) => e,
         }
     }
-}
 
-impl<T: fmt::Debug, E> Result<T, E> {
-    /// Unwraps a result, yielding the content of an [`Err`].
+    /// Returns the contained [`Err`] value, consuming the `self` value.
     ///
     /// # Panics
     ///
@@ -1025,38 +1065,10 @@
             Err(e) => e,
         }
     }
-
-    /// Unwraps a result, yielding the content of an [`Err`].
-    ///
-    /// # Panics
-    ///
-    /// Panics if the value is an [`Ok`], with a panic message including the
-    /// passed message, and the content of the [`Ok`].
-    ///
-    /// [`Ok`]: enum.Result.html#variant.Ok
-    /// [`Err`]: enum.Result.html#variant.Err
-    ///
-    /// # Examples
-    ///
-    /// Basic usage:
-    ///
-    /// ```{.should_panic}
-    /// let x: Result<u32, &str> = Ok(10);
-    /// x.expect_err("Testing expect_err"); // panics with `Testing expect_err: 10`
-    /// ```
-    #[inline]
-    #[track_caller]
-    #[stable(feature = "result_expect_err", since = "1.17.0")]
-    pub fn expect_err(self, msg: &str) -> E {
-        match self {
-            Ok(t) => unwrap_failed(msg, &t),
-            Err(e) => e,
-        }
-    }
 }
 
 impl<T: Default, E> Result<T, E> {
-    /// Returns the contained value or a default
+    /// Returns the contained [`Ok`] value or a default
     ///
     /// Consumes the `self` argument then, if [`Ok`], returns the contained
     /// value, otherwise if [`Err`], returns the default value for that
@@ -1095,7 +1107,7 @@
 
 #[unstable(feature = "unwrap_infallible", reason = "newly added", issue = "61695")]
 impl<T, E: Into<!>> Result<T, E> {
-    /// Unwraps a result that can never be an [`Err`], yielding the content of the [`Ok`].
+    /// Returns the contained [`Ok`] value, but never panics.
     ///
     /// Unlike [`unwrap`], this method is known to never panic on the
     /// result types it is implemented for. Therefore, it can be used
diff --git a/src/libcore/slice/mod.rs b/src/libcore/slice/mod.rs
index 9b4d201..7c65f59 100644
--- a/src/libcore/slice/mod.rs
+++ b/src/libcore/slice/mod.rs
@@ -1156,6 +1156,69 @@
     }
 
     /// Returns an iterator over subslices separated by elements that match
+    /// `pred`. The matched element is contained in the end of the previous
+    /// subslice as a terminator.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(split_inclusive)]
+    /// let slice = [10, 40, 33, 20];
+    /// let mut iter = slice.split_inclusive(|num| num % 3 == 0);
+    ///
+    /// assert_eq!(iter.next().unwrap(), &[10, 40, 33]);
+    /// assert_eq!(iter.next().unwrap(), &[20]);
+    /// assert!(iter.next().is_none());
+    /// ```
+    ///
+    /// If the last element of the slice is matched,
+    /// that element will be considered the terminator of the preceding slice.
+    /// That slice will be the last item returned by the iterator.
+    ///
+    /// ```
+    /// #![feature(split_inclusive)]
+    /// let slice = [3, 10, 40, 33];
+    /// let mut iter = slice.split_inclusive(|num| num % 3 == 0);
+    ///
+    /// assert_eq!(iter.next().unwrap(), &[3]);
+    /// assert_eq!(iter.next().unwrap(), &[10, 40, 33]);
+    /// assert!(iter.next().is_none());
+    /// ```
+    #[unstable(feature = "split_inclusive", issue = "none")]
+    #[inline]
+    pub fn split_inclusive<F>(&self, pred: F) -> SplitInclusive<'_, T, F>
+    where
+        F: FnMut(&T) -> bool,
+    {
+        SplitInclusive { v: self, pred, finished: false }
+    }
+
+    /// Returns an iterator over mutable subslices separated by elements that
+    /// match `pred`. The matched element is contained in the previous
+    /// subslice as a terminator.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(split_inclusive)]
+    /// let mut v = [10, 40, 30, 20, 60, 50];
+    ///
+    /// for group in v.split_inclusive_mut(|num| *num % 3 == 0) {
+    ///     let terminator_idx = group.len()-1;
+    ///     group[terminator_idx] = 1;
+    /// }
+    /// assert_eq!(v, [10, 40, 1, 20, 1, 1]);
+    /// ```
+    #[unstable(feature = "split_inclusive", issue = "none")]
+    #[inline]
+    pub fn split_inclusive_mut<F>(&mut self, pred: F) -> SplitInclusiveMut<'_, T, F>
+    where
+        F: FnMut(&T) -> bool,
+    {
+        SplitInclusiveMut { v: self, pred, finished: false }
+    }
+
+    /// Returns an iterator over subslices separated by elements that match
     /// `pred`, starting at the end of the slice and working backwards.
     /// The matched element is not contained in the subslices.
     ///
@@ -3675,7 +3738,106 @@
 #[stable(feature = "fused", since = "1.26.0")]
 impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
 
-/// An iterator over the subslices of the vector which are separated
+/// An iterator over subslices separated by elements that match a predicate
+/// function. Unlike `Split`, it contains the matched part as a terminator
+/// of the subslice.
+///
+/// This struct is created by the [`split_inclusive`] method on [slices].
+///
+/// [`split_inclusive`]: ../../std/primitive.slice.html#method.split_inclusive
+/// [slices]: ../../std/primitive.slice.html
+#[unstable(feature = "split_inclusive", issue = "none")]
+pub struct SplitInclusive<'a, T: 'a, P>
+where
+    P: FnMut(&T) -> bool,
+{
+    v: &'a [T],
+    pred: P,
+    finished: bool,
+}
+
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<T: fmt::Debug, P> fmt::Debug for SplitInclusive<'_, T, P>
+where
+    P: FnMut(&T) -> bool,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("SplitInclusive")
+            .field("v", &self.v)
+            .field("finished", &self.finished)
+            .finish()
+    }
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<T, P> Clone for SplitInclusive<'_, T, P>
+where
+    P: Clone + FnMut(&T) -> bool,
+{
+    fn clone(&self) -> Self {
+        SplitInclusive { v: self.v, pred: self.pred.clone(), finished: self.finished }
+    }
+}
+
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<'a, T, P> Iterator for SplitInclusive<'a, T, P>
+where
+    P: FnMut(&T) -> bool,
+{
+    type Item = &'a [T];
+
+    #[inline]
+    fn next(&mut self) -> Option<&'a [T]> {
+        if self.finished {
+            return None;
+        }
+
+        let idx =
+            self.v.iter().position(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(self.v.len());
+        if idx == self.v.len() {
+            self.finished = true;
+        }
+        let ret = Some(&self.v[..idx]);
+        self.v = &self.v[idx..];
+        ret
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) }
+    }
+}
+
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<'a, T, P> DoubleEndedIterator for SplitInclusive<'a, T, P>
+where
+    P: FnMut(&T) -> bool,
+{
+    #[inline]
+    fn next_back(&mut self) -> Option<&'a [T]> {
+        if self.finished {
+            return None;
+        }
+
+        // The last index of self.v is already checked and found to match
+        // by the last iteration, so we start searching a new match
+        // one index to the left.
+        let remainder = if self.v.len() == 0 { &[] } else { &self.v[..(self.v.len() - 1)] };
+        let idx = remainder.iter().rposition(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(0);
+        if idx == 0 {
+            self.finished = true;
+        }
+        let ret = Some(&self.v[idx..]);
+        self.v = &self.v[..idx];
+        ret
+    }
+}
+
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<T, P> FusedIterator for SplitInclusive<'_, T, P> where P: FnMut(&T) -> bool {}
+
+/// An iterator over the mutable subslices of the vector which are separated
 /// by elements that match `pred`.
 ///
 /// This struct is created by the [`split_mut`] method on [slices].
@@ -3789,6 +3951,114 @@
 #[stable(feature = "fused", since = "1.26.0")]
 impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
 
+/// An iterator over the mutable subslices of the vector which are separated
+/// by elements that match `pred`. Unlike `SplitMut`, it contains the matched
+/// parts in the ends of the subslices.
+///
+/// This struct is created by the [`split_inclusive_mut`] method on [slices].
+///
+/// [`split_inclusive_mut`]: ../../std/primitive.slice.html#method.split_inclusive_mut
+/// [slices]: ../../std/primitive.slice.html
+#[unstable(feature = "split_inclusive", issue = "none")]
+pub struct SplitInclusiveMut<'a, T: 'a, P>
+where
+    P: FnMut(&T) -> bool,
+{
+    v: &'a mut [T],
+    pred: P,
+    finished: bool,
+}
+
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<T: fmt::Debug, P> fmt::Debug for SplitInclusiveMut<'_, T, P>
+where
+    P: FnMut(&T) -> bool,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("SplitInclusiveMut")
+            .field("v", &self.v)
+            .field("finished", &self.finished)
+            .finish()
+    }
+}
+
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<'a, T, P> Iterator for SplitInclusiveMut<'a, T, P>
+where
+    P: FnMut(&T) -> bool,
+{
+    type Item = &'a mut [T];
+
+    #[inline]
+    fn next(&mut self) -> Option<&'a mut [T]> {
+        if self.finished {
+            return None;
+        }
+
+        let idx_opt = {
+            // work around borrowck limitations
+            let pred = &mut self.pred;
+            self.v.iter().position(|x| (*pred)(x))
+        };
+        let idx = idx_opt.map(|idx| idx + 1).unwrap_or(self.v.len());
+        if idx == self.v.len() {
+            self.finished = true;
+        }
+        let tmp = mem::replace(&mut self.v, &mut []);
+        let (head, tail) = tmp.split_at_mut(idx);
+        self.v = tail;
+        Some(head)
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        if self.finished {
+            (0, Some(0))
+        } else {
+            // if the predicate doesn't match anything, we yield one slice
+            // if it matches every element, we yield len+1 empty slices.
+            (1, Some(self.v.len() + 1))
+        }
+    }
+}
+
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<'a, T, P> DoubleEndedIterator for SplitInclusiveMut<'a, T, P>
+where
+    P: FnMut(&T) -> bool,
+{
+    #[inline]
+    fn next_back(&mut self) -> Option<&'a mut [T]> {
+        if self.finished {
+            return None;
+        }
+
+        let idx_opt = if self.v.len() == 0 {
+            None
+        } else {
+            // work around borrowck limitations
+            let pred = &mut self.pred;
+
+            // The last index of self.v is already checked and found to match
+            // by the last iteration, so we start searching a new match
+            // one index to the left.
+            let remainder = &self.v[..(self.v.len() - 1)];
+            remainder.iter().rposition(|x| (*pred)(x))
+        };
+        let idx = idx_opt.map(|idx| idx + 1).unwrap_or(0);
+        if idx == 0 {
+            self.finished = true;
+        }
+        let tmp = mem::replace(&mut self.v, &mut []);
+        let (head, tail) = tmp.split_at_mut(idx);
+        self.v = head;
+        Some(tail)
+    }
+}
+
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<T, P> FusedIterator for SplitInclusiveMut<'_, T, P> where P: FnMut(&T) -> bool {}
+
 /// An iterator over subslices separated by elements that match a predicate
 /// function, starting from the end of the slice.
 ///
@@ -5584,21 +5854,18 @@
 
 #[doc(hidden)]
 // intermediate trait for specialization of slice's PartialOrd
-trait SlicePartialOrd<B> {
-    fn partial_compare(&self, other: &[B]) -> Option<Ordering>;
+trait SlicePartialOrd: Sized {
+    fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
 }
 
-impl<A> SlicePartialOrd<A> for [A]
-where
-    A: PartialOrd,
-{
-    default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
-        let l = cmp::min(self.len(), other.len());
+impl<A: PartialOrd> SlicePartialOrd for A {
+    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
+        let l = cmp::min(left.len(), right.len());
 
         // Slice to the loop iteration range to enable bound check
         // elimination in the compiler
-        let lhs = &self[..l];
-        let rhs = &other[..l];
+        let lhs = &left[..l];
+        let rhs = &right[..l];
 
         for i in 0..l {
             match lhs[i].partial_cmp(&rhs[i]) {
@@ -5607,36 +5874,61 @@
             }
         }
 
-        self.len().partial_cmp(&other.len())
+        left.len().partial_cmp(&right.len())
     }
 }
 
-impl<A> SlicePartialOrd<A> for [A]
+// This is the impl that we would like to have. Unfortunately it's not sound.
+// See `partial_ord_slice.rs`.
+/*
+impl<A> SlicePartialOrd for A
 where
     A: Ord,
 {
-    default fn partial_compare(&self, other: &[A]) -> Option<Ordering> {
-        Some(SliceOrd::compare(self, other))
+    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
+        Some(SliceOrd::compare(left, right))
     }
 }
+*/
+
+impl<A: AlwaysApplicableOrd> SlicePartialOrd for A {
+    fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
+        Some(SliceOrd::compare(left, right))
+    }
+}
+
+trait AlwaysApplicableOrd: SliceOrd + Ord {}
+
+macro_rules! always_applicable_ord {
+    ($([$($p:tt)*] $t:ty,)*) => {
+        $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
+    }
+}
+
+always_applicable_ord! {
+    [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
+    [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
+    [] bool, [] char,
+    [T: ?Sized] *const T, [T: ?Sized] *mut T,
+    [T: AlwaysApplicableOrd] &T,
+    [T: AlwaysApplicableOrd] &mut T,
+    [T: AlwaysApplicableOrd] Option<T>,
+}
 
 #[doc(hidden)]
 // intermediate trait for specialization of slice's Ord
-trait SliceOrd<B> {
-    fn compare(&self, other: &[B]) -> Ordering;
+trait SliceOrd: Sized {
+    fn compare(left: &[Self], right: &[Self]) -> Ordering;
 }
 
-impl<A> SliceOrd<A> for [A]
-where
-    A: Ord,
-{
-    default fn compare(&self, other: &[A]) -> Ordering {
-        let l = cmp::min(self.len(), other.len());
+impl<A: Ord> SliceOrd for A {
+    default fn compare(left: &[Self], right: &[Self]) -> Ordering {
+        let l = cmp::min(left.len(), right.len());
 
         // Slice to the loop iteration range to enable bound check
         // elimination in the compiler
-        let lhs = &self[..l];
-        let rhs = &other[..l];
+        let lhs = &left[..l];
+        let rhs = &right[..l];
 
         for i in 0..l {
             match lhs[i].cmp(&rhs[i]) {
@@ -5645,19 +5937,19 @@
             }
         }
 
-        self.len().cmp(&other.len())
+        left.len().cmp(&right.len())
     }
 }
 
 // memcmp compares a sequence of unsigned bytes lexicographically.
 // this matches the order we want for [u8], but no others (not even [i8]).
-impl SliceOrd<u8> for [u8] {
+impl SliceOrd for u8 {
     #[inline]
-    fn compare(&self, other: &[u8]) -> Ordering {
+    fn compare(left: &[Self], right: &[Self]) -> Ordering {
         let order =
-            unsafe { memcmp(self.as_ptr(), other.as_ptr(), cmp::min(self.len(), other.len())) };
+            unsafe { memcmp(left.as_ptr(), right.as_ptr(), cmp::min(left.len(), right.len())) };
         if order == 0 {
-            self.len().cmp(&other.len())
+            left.len().cmp(&right.len())
         } else if order < 0 {
             Less
         } else {
diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs
index 5a7cddd..9c0db5d 100644
--- a/src/libcore/str/mod.rs
+++ b/src/libcore/str/mod.rs
@@ -12,7 +12,7 @@
 use crate::char;
 use crate::fmt::{self, Write};
 use crate::iter::{Chain, FlatMap, Flatten};
-use crate::iter::{Cloned, Filter, FusedIterator, Map, TrustedLen, TrustedRandomAccess};
+use crate::iter::{Copied, Filter, FusedIterator, Map, TrustedLen, TrustedRandomAccess};
 use crate::mem;
 use crate::ops::Try;
 use crate::option;
@@ -750,7 +750,7 @@
 /// [`str`]: ../../std/primitive.str.html
 #[stable(feature = "rust1", since = "1.0.0")]
 #[derive(Clone, Debug)]
-pub struct Bytes<'a>(Cloned<slice::Iter<'a, u8>>);
+pub struct Bytes<'a>(Copied<slice::Iter<'a, u8>>);
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl Iterator for Bytes<'_> {
@@ -1133,6 +1133,26 @@
     }
 
     #[inline]
+    fn next_inclusive(&mut self) -> Option<&'a str> {
+        if self.finished {
+            return None;
+        }
+
+        let haystack = self.matcher.haystack();
+        match self.matcher.next_match() {
+            // SAFETY: `Searcher` guarantees that `b` lies on unicode boundary,
+            // and self.start is either the start of the original string,
+            // or `b` was assigned to it, so it also lies on unicode boundary.
+            Some((_, b)) => unsafe {
+                let elt = haystack.get_unchecked(self.start..b);
+                self.start = b;
+                Some(elt)
+            },
+            None => self.get_end(),
+        }
+    }
+
+    #[inline]
     fn next_back(&mut self) -> Option<&'a str>
     where
         P::Searcher: ReverseSearcher<'a>,
@@ -1168,6 +1188,49 @@
             },
         }
     }
+
+    #[inline]
+    fn next_back_inclusive(&mut self) -> Option<&'a str>
+    where
+        P::Searcher: ReverseSearcher<'a>,
+    {
+        if self.finished {
+            return None;
+        }
+
+        if !self.allow_trailing_empty {
+            self.allow_trailing_empty = true;
+            match self.next_back_inclusive() {
+                Some(elt) if !elt.is_empty() => return Some(elt),
+                _ => {
+                    if self.finished {
+                        return None;
+                    }
+                }
+            }
+        }
+
+        let haystack = self.matcher.haystack();
+        match self.matcher.next_match_back() {
+            // SAFETY: `Searcher` guarantees that `b` lies on unicode boundary,
+            // and self.end is either the end of the original string,
+            // or `b` was assigned to it, so it also lies on unicode boundary.
+            Some((_, b)) => unsafe {
+                let elt = haystack.get_unchecked(b..self.end);
+                self.end = b;
+                Some(elt)
+            },
+            // SAFETY: self.start is either the start of the original string,
+            // or start of a substring that represents the part of the string that hasn't
+            // iterated yet. Either way, it is guaranteed to lie on unicode boundary.
+            // self.end is either the end of the original string,
+            // or `b` was assigned to it, so it also lies on unicode boundary.
+            None => unsafe {
+                self.finished = true;
+                Some(haystack.get_unchecked(self.start..self.end))
+            },
+        }
+    }
 }
 
 generate_pattern_iterators! {
@@ -1499,7 +1562,7 @@
 
 /// Walks through `v` checking that it's a valid UTF-8 sequence,
 /// returning `Ok(())` in that case, or, if it is invalid, `Err(err)`.
-#[inline]
+#[inline(always)]
 fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> {
     let mut index = 0;
     let len = v.len();
@@ -2423,7 +2486,7 @@
     /// Callers of this function are responsible that these preconditions are
     /// satisfied:
     ///
-    /// * The starting index must come before the ending index;
+    /// * The starting index must not exceed the ending index;
     /// * Indexes must be within bounds of the original slice;
     /// * Indexes must lie on UTF-8 sequence boundaries.
     ///
@@ -2455,7 +2518,7 @@
     /// Callers of this function are responsible that these preconditions are
     /// satisfied:
     ///
-    /// * The starting index must come before the ending index;
+    /// * The starting index must not exceed the ending index;
     /// * Indexes must be within bounds of the original slice;
     /// * Indexes must lie on UTF-8 sequence boundaries.
     ///
@@ -2500,7 +2563,7 @@
     /// Callers of this function are responsible that three preconditions are
     /// satisfied:
     ///
-    /// * `begin` must come before `end`.
+    /// * `begin` must not exceed `end`.
     /// * `begin` and `end` must be byte positions within the string slice.
     /// * `begin` and `end` must lie on UTF-8 sequence boundaries.
     ///
@@ -2549,7 +2612,7 @@
     /// Callers of this function are responsible that three preconditions are
     /// satisfied:
     ///
-    /// * `begin` must come before `end`.
+    /// * `begin` must not exceed `end`.
     /// * `begin` and `end` must be byte positions within the string slice.
     /// * `begin` and `end` must lie on UTF-8 sequence boundaries.
     #[stable(feature = "str_slice_mut", since = "1.5.0")]
@@ -2658,7 +2721,8 @@
     ///
     /// It's important to remember that [`char`] represents a Unicode Scalar
     /// Value, and may not match your idea of what a 'character' is. Iteration
-    /// over grapheme clusters may be what you actually want.
+    /// over grapheme clusters may be what you actually want. This functionality
+    /// is not provided by Rust's standard library, check crates.io instead.
     ///
     /// # Examples
     ///
@@ -2778,7 +2842,7 @@
     #[stable(feature = "rust1", since = "1.0.0")]
     #[inline]
     pub fn bytes(&self) -> Bytes<'_> {
-        Bytes(self.as_bytes().iter().cloned())
+        Bytes(self.as_bytes().iter().copied())
     }
 
     /// Splits a string slice by whitespace.
@@ -3212,6 +3276,42 @@
         })
     }
 
+    /// An iterator over substrings of this string slice, separated by
+    /// characters matched by a pattern. Differs from the iterator produced by
+    /// `split` in that `split_inclusive` leaves the matched part as the
+    /// terminator of the substring.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(split_inclusive)]
+    /// let v: Vec<&str> = "Mary had a little lamb\nlittle lamb\nlittle lamb."
+    ///     .split_inclusive('\n').collect();
+    /// assert_eq!(v, ["Mary had a little lamb\n", "little lamb\n", "little lamb."]);
+    /// ```
+    ///
+    /// If the last element of the string is matched,
+    /// that element will be considered the terminator of the preceding substring.
+    /// That substring will be the last item returned by the iterator.
+    ///
+    /// ```
+    /// #![feature(split_inclusive)]
+    /// let v: Vec<&str> = "Mary had a little lamb\nlittle lamb\nlittle lamb.\n"
+    ///     .split_inclusive('\n').collect();
+    /// assert_eq!(v, ["Mary had a little lamb\n", "little lamb\n", "little lamb.\n"]);
+    /// ```
+    #[unstable(feature = "split_inclusive", issue = "none")]
+    #[inline]
+    pub fn split_inclusive<'a, P: Pattern<'a>>(&'a self, pat: P) -> SplitInclusive<'a, P> {
+        SplitInclusive(SplitInternal {
+            start: 0,
+            end: self.len(),
+            matcher: pat.into_searcher(self),
+            allow_trailing_empty: false,
+            finished: false,
+        })
+    }
+
     /// An iterator over substrings of the given string slice, separated by
     /// characters matched by a pattern and yielded in reverse order.
     ///
@@ -3895,7 +3995,7 @@
             debug_assert_eq!(
                 start, 0,
                 "The first search step from Searcher \
-                must include the first character"
+                 must include the first character"
             );
             // SAFETY: `Searcher` is known to return valid indices.
             unsafe { Some(self.get_unchecked(len..)) }
@@ -3934,7 +4034,7 @@
                 end,
                 self.len(),
                 "The first search step from ReverseSearcher \
-                must include the last character"
+                 must include the last character"
             );
             // SAFETY: `Searcher` is known to return valid indices.
             unsafe { Some(self.get_unchecked(..start)) }
@@ -4405,6 +4505,19 @@
     inner: Map<Filter<SliceSplit<'a, u8, IsAsciiWhitespace>, BytesIsNotEmpty>, UnsafeBytesToStr>,
 }
 
+/// An iterator over the substrings of a string,
+/// terminated by a substring matching to a predicate function
+/// Unlike `Split`, it contains the matched part as a terminator
+/// of the subslice.
+///
+/// This struct is created by the [`split_inclusive`] method on [`str`].
+/// See its documentation for more.
+///
+/// [`split_inclusive`]: ../../std/primitive.str.html#method.split_inclusive
+/// [`str`]: ../../std/primitive.str.html
+#[unstable(feature = "split_inclusive", issue = "none")]
+pub struct SplitInclusive<'a, P: Pattern<'a>>(SplitInternal<'a, P>);
+
 impl_fn_for_zst! {
     #[derive(Clone)]
     struct IsWhitespace impl Fn = |c: char| -> bool {
@@ -4495,6 +4608,44 @@
 #[stable(feature = "split_ascii_whitespace", since = "1.34.0")]
 impl FusedIterator for SplitAsciiWhitespace<'_> {}
 
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<'a, P: Pattern<'a>> Iterator for SplitInclusive<'a, P> {
+    type Item = &'a str;
+
+    #[inline]
+    fn next(&mut self) -> Option<&'a str> {
+        self.0.next_inclusive()
+    }
+}
+
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<'a, P: Pattern<'a, Searcher: fmt::Debug>> fmt::Debug for SplitInclusive<'a, P> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("SplitInclusive").field("0", &self.0).finish()
+    }
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<'a, P: Pattern<'a, Searcher: Clone>> Clone for SplitInclusive<'a, P> {
+    fn clone(&self) -> Self {
+        SplitInclusive(self.0.clone())
+    }
+}
+
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<'a, P: Pattern<'a, Searcher: ReverseSearcher<'a>>> DoubleEndedIterator
+    for SplitInclusive<'a, P>
+{
+    #[inline]
+    fn next_back(&mut self) -> Option<&'a str> {
+        self.0.next_back_inclusive()
+    }
+}
+
+#[unstable(feature = "split_inclusive", issue = "none")]
+impl<'a, P: Pattern<'a>> FusedIterator for SplitInclusive<'a, P> {}
+
 /// An iterator of [`u16`] over the string encoded as UTF-16.
 ///
 /// [`u16`]: ../../std/primitive.u16.html
diff --git a/src/libcore/str/pattern.rs b/src/libcore/str/pattern.rs
index ef64d8b..6c826e5 100644
--- a/src/libcore/str/pattern.rs
+++ b/src/libcore/str/pattern.rs
@@ -1050,7 +1050,7 @@
         // &v[..period]. If it is, we use "Algorithm CP1". Otherwise we use
         // "Algorithm CP2", which is optimized for when the period of the needle
         // is large.
-        if &needle[..crit_pos] == &needle[period..period + crit_pos] {
+        if needle[..crit_pos] == needle[period..period + crit_pos] {
             // short period case -- the period is exact
             // compute a separate critical factorization for the reversed needle
             // x = u' v' where |v'| < period(x).
diff --git a/src/libcore/tests/fmt/num.rs b/src/libcore/tests/fmt/num.rs
index 10fcf8b..a50c2b4 100644
--- a/src/libcore/tests/fmt/num.rs
+++ b/src/libcore/tests/fmt/num.rs
@@ -38,6 +38,16 @@
     assert_eq!(format!("{:o}", 1i16), "1");
     assert_eq!(format!("{:o}", 1i32), "1");
     assert_eq!(format!("{:o}", 1i64), "1");
+    assert_eq!(format!("{:e}", 1isize), "1e0");
+    assert_eq!(format!("{:e}", 1i8), "1e0");
+    assert_eq!(format!("{:e}", 1i16), "1e0");
+    assert_eq!(format!("{:e}", 1i32), "1e0");
+    assert_eq!(format!("{:e}", 1i64), "1e0");
+    assert_eq!(format!("{:E}", 1isize), "1E0");
+    assert_eq!(format!("{:E}", 1i8), "1E0");
+    assert_eq!(format!("{:E}", 1i16), "1E0");
+    assert_eq!(format!("{:E}", 1i32), "1E0");
+    assert_eq!(format!("{:E}", 1i64), "1E0");
 
     assert_eq!(format!("{}", 1usize), "1");
     assert_eq!(format!("{}", 1u8), "1");
@@ -69,6 +79,14 @@
     assert_eq!(format!("{:o}", 1u16), "1");
     assert_eq!(format!("{:o}", 1u32), "1");
     assert_eq!(format!("{:o}", 1u64), "1");
+    assert_eq!(format!("{:e}", 1u8), "1e0");
+    assert_eq!(format!("{:e}", 1u16), "1e0");
+    assert_eq!(format!("{:e}", 1u32), "1e0");
+    assert_eq!(format!("{:e}", 1u64), "1e0");
+    assert_eq!(format!("{:E}", 1u8), "1E0");
+    assert_eq!(format!("{:E}", 1u16), "1E0");
+    assert_eq!(format!("{:E}", 1u32), "1E0");
+    assert_eq!(format!("{:E}", 1u64), "1E0");
 
     // Test a larger number
     assert_eq!(format!("{:b}", 55), "110111");
@@ -76,6 +94,64 @@
     assert_eq!(format!("{}", 55), "55");
     assert_eq!(format!("{:x}", 55), "37");
     assert_eq!(format!("{:X}", 55), "37");
+    assert_eq!(format!("{:e}", 55), "5.5e1");
+    assert_eq!(format!("{:E}", 55), "5.5E1");
+    assert_eq!(format!("{:e}", 10000000000u64), "1e10");
+    assert_eq!(format!("{:E}", 10000000000u64), "1E10");
+    assert_eq!(format!("{:e}", 10000000001u64), "1.0000000001e10");
+    assert_eq!(format!("{:E}", 10000000001u64), "1.0000000001E10");
+}
+
+#[test]
+fn test_format_int_exp_limits() {
+    use core::{i128, i16, i32, i64, i8, u128, u16, u32, u64, u8};
+    assert_eq!(format!("{:e}", i8::MIN), "-1.28e2");
+    assert_eq!(format!("{:e}", i8::MAX), "1.27e2");
+    assert_eq!(format!("{:e}", i16::MIN), "-3.2768e4");
+    assert_eq!(format!("{:e}", i16::MAX), "3.2767e4");
+    assert_eq!(format!("{:e}", i32::MIN), "-2.147483648e9");
+    assert_eq!(format!("{:e}", i32::MAX), "2.147483647e9");
+    assert_eq!(format!("{:e}", i64::MIN), "-9.223372036854775808e18");
+    assert_eq!(format!("{:e}", i64::MAX), "9.223372036854775807e18");
+    assert_eq!(format!("{:e}", i128::MIN), "-1.70141183460469231731687303715884105728e38");
+    assert_eq!(format!("{:e}", i128::MAX), "1.70141183460469231731687303715884105727e38");
+
+    assert_eq!(format!("{:e}", u8::MAX), "2.55e2");
+    assert_eq!(format!("{:e}", u16::MAX), "6.5535e4");
+    assert_eq!(format!("{:e}", u32::MAX), "4.294967295e9");
+    assert_eq!(format!("{:e}", u64::MAX), "1.8446744073709551615e19");
+    assert_eq!(format!("{:e}", u128::MAX), "3.40282366920938463463374607431768211455e38");
+}
+
+#[test]
+fn test_format_int_exp_precision() {
+    use core::{i128, i16, i32, i64, i8};
+
+    //test that float and integer match
+    let big_int: u32 = 314_159_265;
+    assert_eq!(format!("{:.1e}", big_int), format!("{:.1e}", f64::from(big_int)));
+
+    //test adding precision
+    assert_eq!(format!("{:.10e}", i8::MIN), "-1.2800000000e2");
+    assert_eq!(format!("{:.10e}", i16::MIN), "-3.2768000000e4");
+    assert_eq!(format!("{:.10e}", i32::MIN), "-2.1474836480e9");
+    assert_eq!(format!("{:.20e}", i64::MIN), "-9.22337203685477580800e18");
+    assert_eq!(format!("{:.40e}", i128::MIN), "-1.7014118346046923173168730371588410572800e38");
+
+    //test rounding
+    assert_eq!(format!("{:.1e}", i8::MIN), "-1.3e2");
+    assert_eq!(format!("{:.1e}", i16::MIN), "-3.3e4");
+    assert_eq!(format!("{:.1e}", i32::MIN), "-2.1e9");
+    assert_eq!(format!("{:.1e}", i64::MIN), "-9.2e18");
+    assert_eq!(format!("{:.1e}", i128::MIN), "-1.7e38");
+
+    //test huge precision
+    assert_eq!(format!("{:.1000e}", 1), format!("1.{}e0", "0".repeat(1000)));
+    //test zero precision
+    assert_eq!(format!("{:.0e}", 1), format!("1e0",));
+
+    //test padding with precision (and sign)
+    assert_eq!(format!("{:+10.3e}", 1), "  +1.000e0");
 }
 
 #[test]
@@ -86,6 +162,8 @@
     assert_eq!(format!("{:o}", 0), "0");
     assert_eq!(format!("{:x}", 0), "0");
     assert_eq!(format!("{:X}", 0), "0");
+    assert_eq!(format!("{:e}", 0), "0e0");
+    assert_eq!(format!("{:E}", 0), "0E0");
 
     assert_eq!(format!("{}", 0u32), "0");
     assert_eq!(format!("{:?}", 0u32), "0");
@@ -93,6 +171,8 @@
     assert_eq!(format!("{:o}", 0u32), "0");
     assert_eq!(format!("{:x}", 0u32), "0");
     assert_eq!(format!("{:X}", 0u32), "0");
+    assert_eq!(format!("{:e}", 0u32), "0e0");
+    assert_eq!(format!("{:E}", 0u32), "0E0");
 }
 
 #[test]
diff --git a/src/libcore/tests/iter.rs b/src/libcore/tests/iter.rs
index bd3218e..5b41ef3 100644
--- a/src/libcore/tests/iter.rs
+++ b/src/libcore/tests/iter.rs
@@ -1956,11 +1956,19 @@
     assert_eq!(r.next(), None);
     assert_eq!(r.next(), None);
 
+    assert_eq!(*r.start(), 10);
+    assert_eq!(*r.end(), 10);
+    assert_ne!(r, 10..=10);
+
     let mut r = 10..=10;
     assert_eq!(r.next_back(), Some(10));
     assert!(r.is_empty());
     assert_eq!(r.next_back(), None);
 
+    assert_eq!(*r.start(), 10);
+    assert_eq!(*r.end(), 10);
+    assert_ne!(r, 10..=10);
+
     let mut r = 10..=12;
     assert_eq!(r.next(), Some(10));
     assert_eq!(r.next(), Some(11));
@@ -2078,6 +2086,9 @@
     assert_eq!((10..=15).nth(5), Some(15));
     assert_eq!((10..=15).nth(6), None);
 
+    let mut exhausted_via_next = 10_u8..=20;
+    while exhausted_via_next.next().is_some() {}
+
     let mut r = 10_u8..=20;
     assert_eq!(r.nth(2), Some(12));
     assert_eq!(r, 13..=20);
@@ -2087,6 +2098,7 @@
     assert_eq!(ExactSizeIterator::is_empty(&r), false);
     assert_eq!(r.nth(10), None);
     assert_eq!(r.is_empty(), true);
+    assert_eq!(r, exhausted_via_next);
     assert_eq!(ExactSizeIterator::is_empty(&r), true);
 }
 
@@ -2098,6 +2110,9 @@
     assert_eq!((10..=15).nth_back(6), None);
     assert_eq!((-120..=80_i8).nth_back(200), Some(-120));
 
+    let mut exhausted_via_next_back = 10_u8..=20;
+    while exhausted_via_next_back.next_back().is_some() {}
+
     let mut r = 10_u8..=20;
     assert_eq!(r.nth_back(2), Some(18));
     assert_eq!(r, 10..=17);
@@ -2107,6 +2122,7 @@
     assert_eq!(ExactSizeIterator::is_empty(&r), false);
     assert_eq!(r.nth_back(10), None);
     assert_eq!(r.is_empty(), true);
+    assert_eq!(r, exhausted_via_next_back);
     assert_eq!(ExactSizeIterator::is_empty(&r), true);
 }
 
diff --git a/src/libcore/tests/lib.rs b/src/libcore/tests/lib.rs
index bfc3ee0..991458d 100644
--- a/src/libcore/tests/lib.rs
+++ b/src/libcore/tests/lib.rs
@@ -17,7 +17,6 @@
 #![feature(range_is_empty)]
 #![feature(raw)]
 #![feature(saturating_neg)]
-#![cfg_attr(bootstrap, feature(slice_patterns))]
 #![feature(sort_internals)]
 #![feature(slice_partition_at_index)]
 #![feature(specialization)]
diff --git a/src/libcore/tests/num/flt2dec/mod.rs b/src/libcore/tests/num/flt2dec/mod.rs
index f693504..e945d9c 100644
--- a/src/libcore/tests/num/flt2dec/mod.rs
+++ b/src/libcore/tests/num/flt2dec/mod.rs
@@ -500,94 +500,91 @@
 {
     use core::num::flt2dec::Sign::*;
 
-    fn to_string<T, F>(f: &mut F, v: T, sign: Sign, frac_digits: usize, upper: bool) -> String
+    fn to_string<T, F>(f: &mut F, v: T, sign: Sign, frac_digits: usize) -> String
     where
         T: DecodableFloat,
         F: FnMut(&Decoded, &mut [u8]) -> (usize, i16),
     {
         to_string_with_parts(|buf, parts| {
-            to_shortest_str(|d, b| f(d, b), v, sign, frac_digits, upper, buf, parts)
+            to_shortest_str(|d, b| f(d, b), v, sign, frac_digits, buf, parts)
         })
     }
 
     let f = &mut f_;
 
-    assert_eq!(to_string(f, 0.0, Minus, 0, false), "0");
-    assert_eq!(to_string(f, 0.0, MinusRaw, 0, false), "0");
-    assert_eq!(to_string(f, 0.0, MinusPlus, 0, false), "+0");
-    assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0, false), "+0");
-    assert_eq!(to_string(f, -0.0, Minus, 0, false), "0");
-    assert_eq!(to_string(f, -0.0, MinusRaw, 0, false), "-0");
-    assert_eq!(to_string(f, -0.0, MinusPlus, 0, false), "+0");
-    assert_eq!(to_string(f, -0.0, MinusPlusRaw, 0, false), "-0");
-    assert_eq!(to_string(f, 0.0, Minus, 1, true), "0.0");
-    assert_eq!(to_string(f, 0.0, MinusRaw, 1, true), "0.0");
-    assert_eq!(to_string(f, 0.0, MinusPlus, 1, true), "+0.0");
-    assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1, true), "+0.0");
-    assert_eq!(to_string(f, -0.0, Minus, 8, true), "0.00000000");
-    assert_eq!(to_string(f, -0.0, MinusRaw, 8, true), "-0.00000000");
-    assert_eq!(to_string(f, -0.0, MinusPlus, 8, true), "+0.00000000");
-    assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8, true), "-0.00000000");
+    assert_eq!(to_string(f, 0.0, Minus, 0), "0");
+    assert_eq!(to_string(f, 0.0, MinusRaw, 0), "0");
+    assert_eq!(to_string(f, 0.0, MinusPlus, 0), "+0");
+    assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0), "+0");
+    assert_eq!(to_string(f, -0.0, Minus, 0), "0");
+    assert_eq!(to_string(f, -0.0, MinusRaw, 0), "-0");
+    assert_eq!(to_string(f, -0.0, MinusPlus, 0), "+0");
+    assert_eq!(to_string(f, -0.0, MinusPlusRaw, 0), "-0");
+    assert_eq!(to_string(f, 0.0, Minus, 1), "0.0");
+    assert_eq!(to_string(f, 0.0, MinusRaw, 1), "0.0");
+    assert_eq!(to_string(f, 0.0, MinusPlus, 1), "+0.0");
+    assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1), "+0.0");
+    assert_eq!(to_string(f, -0.0, Minus, 8), "0.00000000");
+    assert_eq!(to_string(f, -0.0, MinusRaw, 8), "-0.00000000");
+    assert_eq!(to_string(f, -0.0, MinusPlus, 8), "+0.00000000");
+    assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8), "-0.00000000");
 
-    assert_eq!(to_string(f, 1.0 / 0.0, Minus, 0, false), "inf");
-    assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, 0, true), "inf");
-    assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, 0, false), "+inf");
-    assert_eq!(to_string(f, 1.0 / 0.0, MinusPlusRaw, 0, true), "+inf");
-    assert_eq!(to_string(f, 0.0 / 0.0, Minus, 0, false), "NaN");
-    assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, 1, true), "NaN");
-    assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, 8, false), "NaN");
-    assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, 64, true), "NaN");
-    assert_eq!(to_string(f, -1.0 / 0.0, Minus, 0, false), "-inf");
-    assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, 1, true), "-inf");
-    assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, 8, false), "-inf");
-    assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, 64, true), "-inf");
+    assert_eq!(to_string(f, 1.0 / 0.0, Minus, 0), "inf");
+    assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, 0), "inf");
+    assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, 0), "+inf");
+    assert_eq!(to_string(f, 1.0 / 0.0, MinusPlusRaw, 0), "+inf");
+    assert_eq!(to_string(f, 0.0 / 0.0, Minus, 0), "NaN");
+    assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, 1), "NaN");
+    assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, 8), "NaN");
+    assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, 64), "NaN");
+    assert_eq!(to_string(f, -1.0 / 0.0, Minus, 0), "-inf");
+    assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, 1), "-inf");
+    assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, 8), "-inf");
+    assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, 64), "-inf");
 
-    assert_eq!(to_string(f, 3.14, Minus, 0, false), "3.14");
-    assert_eq!(to_string(f, 3.14, MinusRaw, 0, false), "3.14");
-    assert_eq!(to_string(f, 3.14, MinusPlus, 0, false), "+3.14");
-    assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0, false), "+3.14");
-    assert_eq!(to_string(f, -3.14, Minus, 0, false), "-3.14");
-    assert_eq!(to_string(f, -3.14, MinusRaw, 0, false), "-3.14");
-    assert_eq!(to_string(f, -3.14, MinusPlus, 0, false), "-3.14");
-    assert_eq!(to_string(f, -3.14, MinusPlusRaw, 0, false), "-3.14");
-    assert_eq!(to_string(f, 3.14, Minus, 1, true), "3.14");
-    assert_eq!(to_string(f, 3.14, MinusRaw, 2, true), "3.14");
-    assert_eq!(to_string(f, 3.14, MinusPlus, 3, true), "+3.140");
-    assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4, true), "+3.1400");
-    assert_eq!(to_string(f, -3.14, Minus, 8, true), "-3.14000000");
-    assert_eq!(to_string(f, -3.14, MinusRaw, 8, true), "-3.14000000");
-    assert_eq!(to_string(f, -3.14, MinusPlus, 8, true), "-3.14000000");
-    assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8, true), "-3.14000000");
+    assert_eq!(to_string(f, 3.14, Minus, 0), "3.14");
+    assert_eq!(to_string(f, 3.14, MinusRaw, 0), "3.14");
+    assert_eq!(to_string(f, 3.14, MinusPlus, 0), "+3.14");
+    assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0), "+3.14");
+    assert_eq!(to_string(f, -3.14, Minus, 0), "-3.14");
+    assert_eq!(to_string(f, -3.14, MinusRaw, 0), "-3.14");
+    assert_eq!(to_string(f, -3.14, MinusPlus, 0), "-3.14");
+    assert_eq!(to_string(f, -3.14, MinusPlusRaw, 0), "-3.14");
+    assert_eq!(to_string(f, 3.14, Minus, 1), "3.14");
+    assert_eq!(to_string(f, 3.14, MinusRaw, 2), "3.14");
+    assert_eq!(to_string(f, 3.14, MinusPlus, 3), "+3.140");
+    assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4), "+3.1400");
+    assert_eq!(to_string(f, -3.14, Minus, 8), "-3.14000000");
+    assert_eq!(to_string(f, -3.14, MinusRaw, 8), "-3.14000000");
+    assert_eq!(to_string(f, -3.14, MinusPlus, 8), "-3.14000000");
+    assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8), "-3.14000000");
 
-    assert_eq!(to_string(f, 7.5e-11, Minus, 0, false), "0.000000000075");
-    assert_eq!(to_string(f, 7.5e-11, Minus, 3, false), "0.000000000075");
-    assert_eq!(to_string(f, 7.5e-11, Minus, 12, false), "0.000000000075");
-    assert_eq!(to_string(f, 7.5e-11, Minus, 13, false), "0.0000000000750");
+    assert_eq!(to_string(f, 7.5e-11, Minus, 0), "0.000000000075");
+    assert_eq!(to_string(f, 7.5e-11, Minus, 3), "0.000000000075");
+    assert_eq!(to_string(f, 7.5e-11, Minus, 12), "0.000000000075");
+    assert_eq!(to_string(f, 7.5e-11, Minus, 13), "0.0000000000750");
 
-    assert_eq!(to_string(f, 1.9971e20, Minus, 0, false), "199710000000000000000");
-    assert_eq!(to_string(f, 1.9971e20, Minus, 1, false), "199710000000000000000.0");
-    assert_eq!(to_string(f, 1.9971e20, Minus, 8, false), "199710000000000000000.00000000");
+    assert_eq!(to_string(f, 1.9971e20, Minus, 0), "199710000000000000000");
+    assert_eq!(to_string(f, 1.9971e20, Minus, 1), "199710000000000000000.0");
+    assert_eq!(to_string(f, 1.9971e20, Minus, 8), "199710000000000000000.00000000");
 
-    assert_eq!(to_string(f, f32::MAX, Minus, 0, false), format!("34028235{:0>31}", ""));
-    assert_eq!(to_string(f, f32::MAX, Minus, 1, false), format!("34028235{:0>31}.0", ""));
-    assert_eq!(to_string(f, f32::MAX, Minus, 8, false), format!("34028235{:0>31}.00000000", ""));
+    assert_eq!(to_string(f, f32::MAX, Minus, 0), format!("34028235{:0>31}", ""));
+    assert_eq!(to_string(f, f32::MAX, Minus, 1), format!("34028235{:0>31}.0", ""));
+    assert_eq!(to_string(f, f32::MAX, Minus, 8), format!("34028235{:0>31}.00000000", ""));
 
     let minf32 = ldexp_f32(1.0, -149);
-    assert_eq!(to_string(f, minf32, Minus, 0, false), format!("0.{:0>44}1", ""));
-    assert_eq!(to_string(f, minf32, Minus, 45, false), format!("0.{:0>44}1", ""));
-    assert_eq!(to_string(f, minf32, Minus, 46, false), format!("0.{:0>44}10", ""));
+    assert_eq!(to_string(f, minf32, Minus, 0), format!("0.{:0>44}1", ""));
+    assert_eq!(to_string(f, minf32, Minus, 45), format!("0.{:0>44}1", ""));
+    assert_eq!(to_string(f, minf32, Minus, 46), format!("0.{:0>44}10", ""));
 
-    assert_eq!(to_string(f, f64::MAX, Minus, 0, false), format!("17976931348623157{:0>292}", ""));
-    assert_eq!(to_string(f, f64::MAX, Minus, 1, false), format!("17976931348623157{:0>292}.0", ""));
-    assert_eq!(
-        to_string(f, f64::MAX, Minus, 8, false),
-        format!("17976931348623157{:0>292}.00000000", "")
-    );
+    assert_eq!(to_string(f, f64::MAX, Minus, 0), format!("17976931348623157{:0>292}", ""));
+    assert_eq!(to_string(f, f64::MAX, Minus, 1), format!("17976931348623157{:0>292}.0", ""));
+    assert_eq!(to_string(f, f64::MAX, Minus, 8), format!("17976931348623157{:0>292}.00000000", ""));
 
     let minf64 = ldexp_f64(1.0, -1074);
-    assert_eq!(to_string(f, minf64, Minus, 0, false), format!("0.{:0>323}5", ""));
-    assert_eq!(to_string(f, minf64, Minus, 324, false), format!("0.{:0>323}5", ""));
-    assert_eq!(to_string(f, minf64, Minus, 325, false), format!("0.{:0>323}50", ""));
+    assert_eq!(to_string(f, minf64, Minus, 0), format!("0.{:0>323}5", ""));
+    assert_eq!(to_string(f, minf64, Minus, 324), format!("0.{:0>323}5", ""));
+    assert_eq!(to_string(f, minf64, Minus, 325), format!("0.{:0>323}50", ""));
 
     if cfg!(miri) {
         // Miri is too slow
@@ -595,7 +592,7 @@
     }
 
     // very large output
-    assert_eq!(to_string(f, 1.1, Minus, 80000, false), format!("1.1{:0>79999}", ""));
+    assert_eq!(to_string(f, 1.1, Minus, 80000), format!("1.1{:0>79999}", ""));
 }
 
 pub fn to_shortest_exp_str_test<F>(mut f_: F)
@@ -996,166 +993,157 @@
 {
     use core::num::flt2dec::Sign::*;
 
-    fn to_string<T, F>(f: &mut F, v: T, sign: Sign, frac_digits: usize, upper: bool) -> String
+    fn to_string<T, F>(f: &mut F, v: T, sign: Sign, frac_digits: usize) -> String
     where
         T: DecodableFloat,
         F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16),
     {
         to_string_with_parts(|buf, parts| {
-            to_exact_fixed_str(|d, b, l| f(d, b, l), v, sign, frac_digits, upper, buf, parts)
+            to_exact_fixed_str(|d, b, l| f(d, b, l), v, sign, frac_digits, buf, parts)
         })
     }
 
     let f = &mut f_;
 
-    assert_eq!(to_string(f, 0.0, Minus, 0, false), "0");
-    assert_eq!(to_string(f, 0.0, MinusRaw, 0, false), "0");
-    assert_eq!(to_string(f, 0.0, MinusPlus, 0, false), "+0");
-    assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0, false), "+0");
-    assert_eq!(to_string(f, -0.0, Minus, 0, false), "0");
-    assert_eq!(to_string(f, -0.0, MinusRaw, 0, false), "-0");
-    assert_eq!(to_string(f, -0.0, MinusPlus, 0, false), "+0");
-    assert_eq!(to_string(f, -0.0, MinusPlusRaw, 0, false), "-0");
-    assert_eq!(to_string(f, 0.0, Minus, 1, true), "0.0");
-    assert_eq!(to_string(f, 0.0, MinusRaw, 1, true), "0.0");
-    assert_eq!(to_string(f, 0.0, MinusPlus, 1, true), "+0.0");
-    assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1, true), "+0.0");
-    assert_eq!(to_string(f, -0.0, Minus, 8, true), "0.00000000");
-    assert_eq!(to_string(f, -0.0, MinusRaw, 8, true), "-0.00000000");
-    assert_eq!(to_string(f, -0.0, MinusPlus, 8, true), "+0.00000000");
-    assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8, true), "-0.00000000");
+    assert_eq!(to_string(f, 0.0, Minus, 0), "0");
+    assert_eq!(to_string(f, 0.0, MinusRaw, 0), "0");
+    assert_eq!(to_string(f, 0.0, MinusPlus, 0), "+0");
+    assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0), "+0");
+    assert_eq!(to_string(f, -0.0, Minus, 0), "0");
+    assert_eq!(to_string(f, -0.0, MinusRaw, 0), "-0");
+    assert_eq!(to_string(f, -0.0, MinusPlus, 0), "+0");
+    assert_eq!(to_string(f, -0.0, MinusPlusRaw, 0), "-0");
+    assert_eq!(to_string(f, 0.0, Minus, 1), "0.0");
+    assert_eq!(to_string(f, 0.0, MinusRaw, 1), "0.0");
+    assert_eq!(to_string(f, 0.0, MinusPlus, 1), "+0.0");
+    assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1), "+0.0");
+    assert_eq!(to_string(f, -0.0, Minus, 8), "0.00000000");
+    assert_eq!(to_string(f, -0.0, MinusRaw, 8), "-0.00000000");
+    assert_eq!(to_string(f, -0.0, MinusPlus, 8), "+0.00000000");
+    assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8), "-0.00000000");
 
-    assert_eq!(to_string(f, 1.0 / 0.0, Minus, 0, false), "inf");
-    assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, 1, true), "inf");
-    assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, 8, false), "+inf");
-    assert_eq!(to_string(f, 1.0 / 0.0, MinusPlusRaw, 64, true), "+inf");
-    assert_eq!(to_string(f, 0.0 / 0.0, Minus, 0, false), "NaN");
-    assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, 1, true), "NaN");
-    assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, 8, false), "NaN");
-    assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, 64, true), "NaN");
-    assert_eq!(to_string(f, -1.0 / 0.0, Minus, 0, false), "-inf");
-    assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, 1, true), "-inf");
-    assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, 8, false), "-inf");
-    assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, 64, true), "-inf");
+    assert_eq!(to_string(f, 1.0 / 0.0, Minus, 0), "inf");
+    assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, 1), "inf");
+    assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, 8), "+inf");
+    assert_eq!(to_string(f, 1.0 / 0.0, MinusPlusRaw, 64), "+inf");
+    assert_eq!(to_string(f, 0.0 / 0.0, Minus, 0), "NaN");
+    assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, 1), "NaN");
+    assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, 8), "NaN");
+    assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, 64), "NaN");
+    assert_eq!(to_string(f, -1.0 / 0.0, Minus, 0), "-inf");
+    assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, 1), "-inf");
+    assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, 8), "-inf");
+    assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, 64), "-inf");
 
-    assert_eq!(to_string(f, 3.14, Minus, 0, false), "3");
-    assert_eq!(to_string(f, 3.14, MinusRaw, 0, false), "3");
-    assert_eq!(to_string(f, 3.14, MinusPlus, 0, false), "+3");
-    assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0, false), "+3");
-    assert_eq!(to_string(f, -3.14, Minus, 0, false), "-3");
-    assert_eq!(to_string(f, -3.14, MinusRaw, 0, false), "-3");
-    assert_eq!(to_string(f, -3.14, MinusPlus, 0, false), "-3");
-    assert_eq!(to_string(f, -3.14, MinusPlusRaw, 0, false), "-3");
-    assert_eq!(to_string(f, 3.14, Minus, 1, true), "3.1");
-    assert_eq!(to_string(f, 3.14, MinusRaw, 2, true), "3.14");
-    assert_eq!(to_string(f, 3.14, MinusPlus, 3, true), "+3.140");
-    assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4, true), "+3.1400");
-    assert_eq!(to_string(f, -3.14, Minus, 8, true), "-3.14000000");
-    assert_eq!(to_string(f, -3.14, MinusRaw, 8, true), "-3.14000000");
-    assert_eq!(to_string(f, -3.14, MinusPlus, 8, true), "-3.14000000");
-    assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8, true), "-3.14000000");
+    assert_eq!(to_string(f, 3.14, Minus, 0), "3");
+    assert_eq!(to_string(f, 3.14, MinusRaw, 0), "3");
+    assert_eq!(to_string(f, 3.14, MinusPlus, 0), "+3");
+    assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0), "+3");
+    assert_eq!(to_string(f, -3.14, Minus, 0), "-3");
+    assert_eq!(to_string(f, -3.14, MinusRaw, 0), "-3");
+    assert_eq!(to_string(f, -3.14, MinusPlus, 0), "-3");
+    assert_eq!(to_string(f, -3.14, MinusPlusRaw, 0), "-3");
+    assert_eq!(to_string(f, 3.14, Minus, 1), "3.1");
+    assert_eq!(to_string(f, 3.14, MinusRaw, 2), "3.14");
+    assert_eq!(to_string(f, 3.14, MinusPlus, 3), "+3.140");
+    assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4), "+3.1400");
+    assert_eq!(to_string(f, -3.14, Minus, 8), "-3.14000000");
+    assert_eq!(to_string(f, -3.14, MinusRaw, 8), "-3.14000000");
+    assert_eq!(to_string(f, -3.14, MinusPlus, 8), "-3.14000000");
+    assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8), "-3.14000000");
 
-    assert_eq!(to_string(f, 0.195, Minus, 0, false), "0");
-    assert_eq!(to_string(f, 0.195, MinusRaw, 0, false), "0");
-    assert_eq!(to_string(f, 0.195, MinusPlus, 0, false), "+0");
-    assert_eq!(to_string(f, 0.195, MinusPlusRaw, 0, false), "+0");
-    assert_eq!(to_string(f, -0.195, Minus, 0, false), "-0");
-    assert_eq!(to_string(f, -0.195, MinusRaw, 0, false), "-0");
-    assert_eq!(to_string(f, -0.195, MinusPlus, 0, false), "-0");
-    assert_eq!(to_string(f, -0.195, MinusPlusRaw, 0, false), "-0");
-    assert_eq!(to_string(f, 0.195, Minus, 1, true), "0.2");
-    assert_eq!(to_string(f, 0.195, MinusRaw, 2, true), "0.20");
-    assert_eq!(to_string(f, 0.195, MinusPlus, 3, true), "+0.195");
-    assert_eq!(to_string(f, 0.195, MinusPlusRaw, 4, true), "+0.1950");
-    assert_eq!(to_string(f, -0.195, Minus, 5, true), "-0.19500");
-    assert_eq!(to_string(f, -0.195, MinusRaw, 6, true), "-0.195000");
-    assert_eq!(to_string(f, -0.195, MinusPlus, 7, true), "-0.1950000");
-    assert_eq!(to_string(f, -0.195, MinusPlusRaw, 8, true), "-0.19500000");
+    assert_eq!(to_string(f, 0.195, Minus, 0), "0");
+    assert_eq!(to_string(f, 0.195, MinusRaw, 0), "0");
+    assert_eq!(to_string(f, 0.195, MinusPlus, 0), "+0");
+    assert_eq!(to_string(f, 0.195, MinusPlusRaw, 0), "+0");
+    assert_eq!(to_string(f, -0.195, Minus, 0), "-0");
+    assert_eq!(to_string(f, -0.195, MinusRaw, 0), "-0");
+    assert_eq!(to_string(f, -0.195, MinusPlus, 0), "-0");
+    assert_eq!(to_string(f, -0.195, MinusPlusRaw, 0), "-0");
+    assert_eq!(to_string(f, 0.195, Minus, 1), "0.2");
+    assert_eq!(to_string(f, 0.195, MinusRaw, 2), "0.20");
+    assert_eq!(to_string(f, 0.195, MinusPlus, 3), "+0.195");
+    assert_eq!(to_string(f, 0.195, MinusPlusRaw, 4), "+0.1950");
+    assert_eq!(to_string(f, -0.195, Minus, 5), "-0.19500");
+    assert_eq!(to_string(f, -0.195, MinusRaw, 6), "-0.195000");
+    assert_eq!(to_string(f, -0.195, MinusPlus, 7), "-0.1950000");
+    assert_eq!(to_string(f, -0.195, MinusPlusRaw, 8), "-0.19500000");
 
-    assert_eq!(to_string(f, 999.5, Minus, 0, false), "1000");
-    assert_eq!(to_string(f, 999.5, Minus, 1, false), "999.5");
-    assert_eq!(to_string(f, 999.5, Minus, 2, false), "999.50");
-    assert_eq!(to_string(f, 999.5, Minus, 3, false), "999.500");
-    assert_eq!(to_string(f, 999.5, Minus, 30, false), "999.500000000000000000000000000000");
+    assert_eq!(to_string(f, 999.5, Minus, 0), "1000");
+    assert_eq!(to_string(f, 999.5, Minus, 1), "999.5");
+    assert_eq!(to_string(f, 999.5, Minus, 2), "999.50");
+    assert_eq!(to_string(f, 999.5, Minus, 3), "999.500");
+    assert_eq!(to_string(f, 999.5, Minus, 30), "999.500000000000000000000000000000");
 
-    assert_eq!(to_string(f, 0.5, Minus, 0, false), "1");
-    assert_eq!(to_string(f, 0.5, Minus, 1, false), "0.5");
-    assert_eq!(to_string(f, 0.5, Minus, 2, false), "0.50");
-    assert_eq!(to_string(f, 0.5, Minus, 3, false), "0.500");
+    assert_eq!(to_string(f, 0.5, Minus, 0), "1");
+    assert_eq!(to_string(f, 0.5, Minus, 1), "0.5");
+    assert_eq!(to_string(f, 0.5, Minus, 2), "0.50");
+    assert_eq!(to_string(f, 0.5, Minus, 3), "0.500");
 
-    assert_eq!(to_string(f, 0.95, Minus, 0, false), "1");
-    assert_eq!(to_string(f, 0.95, Minus, 1, false), "0.9"); // because it really is less than 0.95
-    assert_eq!(to_string(f, 0.95, Minus, 2, false), "0.95");
-    assert_eq!(to_string(f, 0.95, Minus, 3, false), "0.950");
-    assert_eq!(to_string(f, 0.95, Minus, 10, false), "0.9500000000");
-    assert_eq!(to_string(f, 0.95, Minus, 30, false), "0.949999999999999955591079014994");
+    assert_eq!(to_string(f, 0.95, Minus, 0), "1");
+    assert_eq!(to_string(f, 0.95, Minus, 1), "0.9"); // because it really is less than 0.95
+    assert_eq!(to_string(f, 0.95, Minus, 2), "0.95");
+    assert_eq!(to_string(f, 0.95, Minus, 3), "0.950");
+    assert_eq!(to_string(f, 0.95, Minus, 10), "0.9500000000");
+    assert_eq!(to_string(f, 0.95, Minus, 30), "0.949999999999999955591079014994");
 
-    assert_eq!(to_string(f, 0.095, Minus, 0, false), "0");
-    assert_eq!(to_string(f, 0.095, Minus, 1, false), "0.1");
-    assert_eq!(to_string(f, 0.095, Minus, 2, false), "0.10");
-    assert_eq!(to_string(f, 0.095, Minus, 3, false), "0.095");
-    assert_eq!(to_string(f, 0.095, Minus, 4, false), "0.0950");
-    assert_eq!(to_string(f, 0.095, Minus, 10, false), "0.0950000000");
-    assert_eq!(to_string(f, 0.095, Minus, 30, false), "0.095000000000000001110223024625");
+    assert_eq!(to_string(f, 0.095, Minus, 0), "0");
+    assert_eq!(to_string(f, 0.095, Minus, 1), "0.1");
+    assert_eq!(to_string(f, 0.095, Minus, 2), "0.10");
+    assert_eq!(to_string(f, 0.095, Minus, 3), "0.095");
+    assert_eq!(to_string(f, 0.095, Minus, 4), "0.0950");
+    assert_eq!(to_string(f, 0.095, Minus, 10), "0.0950000000");
+    assert_eq!(to_string(f, 0.095, Minus, 30), "0.095000000000000001110223024625");
 
-    assert_eq!(to_string(f, 0.0095, Minus, 0, false), "0");
-    assert_eq!(to_string(f, 0.0095, Minus, 1, false), "0.0");
-    assert_eq!(to_string(f, 0.0095, Minus, 2, false), "0.01");
-    assert_eq!(to_string(f, 0.0095, Minus, 3, false), "0.009"); // really is less than 0.0095
-    assert_eq!(to_string(f, 0.0095, Minus, 4, false), "0.0095");
-    assert_eq!(to_string(f, 0.0095, Minus, 5, false), "0.00950");
-    assert_eq!(to_string(f, 0.0095, Minus, 10, false), "0.0095000000");
-    assert_eq!(to_string(f, 0.0095, Minus, 30, false), "0.009499999999999999764077607267");
+    assert_eq!(to_string(f, 0.0095, Minus, 0), "0");
+    assert_eq!(to_string(f, 0.0095, Minus, 1), "0.0");
+    assert_eq!(to_string(f, 0.0095, Minus, 2), "0.01");
+    assert_eq!(to_string(f, 0.0095, Minus, 3), "0.009"); // really is less than 0.0095
+    assert_eq!(to_string(f, 0.0095, Minus, 4), "0.0095");
+    assert_eq!(to_string(f, 0.0095, Minus, 5), "0.00950");
+    assert_eq!(to_string(f, 0.0095, Minus, 10), "0.0095000000");
+    assert_eq!(to_string(f, 0.0095, Minus, 30), "0.009499999999999999764077607267");
 
-    assert_eq!(to_string(f, 7.5e-11, Minus, 0, false), "0");
-    assert_eq!(to_string(f, 7.5e-11, Minus, 3, false), "0.000");
-    assert_eq!(to_string(f, 7.5e-11, Minus, 10, false), "0.0000000001");
-    assert_eq!(to_string(f, 7.5e-11, Minus, 11, false), "0.00000000007"); // ditto
-    assert_eq!(to_string(f, 7.5e-11, Minus, 12, false), "0.000000000075");
-    assert_eq!(to_string(f, 7.5e-11, Minus, 13, false), "0.0000000000750");
-    assert_eq!(to_string(f, 7.5e-11, Minus, 20, false), "0.00000000007500000000");
-    assert_eq!(to_string(f, 7.5e-11, Minus, 30, false), "0.000000000074999999999999999501");
+    assert_eq!(to_string(f, 7.5e-11, Minus, 0), "0");
+    assert_eq!(to_string(f, 7.5e-11, Minus, 3), "0.000");
+    assert_eq!(to_string(f, 7.5e-11, Minus, 10), "0.0000000001");
+    assert_eq!(to_string(f, 7.5e-11, Minus, 11), "0.00000000007"); // ditto
+    assert_eq!(to_string(f, 7.5e-11, Minus, 12), "0.000000000075");
+    assert_eq!(to_string(f, 7.5e-11, Minus, 13), "0.0000000000750");
+    assert_eq!(to_string(f, 7.5e-11, Minus, 20), "0.00000000007500000000");
+    assert_eq!(to_string(f, 7.5e-11, Minus, 30), "0.000000000074999999999999999501");
 
-    assert_eq!(to_string(f, 1.0e25, Minus, 0, false), "10000000000000000905969664");
-    assert_eq!(to_string(f, 1.0e25, Minus, 1, false), "10000000000000000905969664.0");
-    assert_eq!(to_string(f, 1.0e25, Minus, 3, false), "10000000000000000905969664.000");
+    assert_eq!(to_string(f, 1.0e25, Minus, 0), "10000000000000000905969664");
+    assert_eq!(to_string(f, 1.0e25, Minus, 1), "10000000000000000905969664.0");
+    assert_eq!(to_string(f, 1.0e25, Minus, 3), "10000000000000000905969664.000");
 
-    assert_eq!(to_string(f, 1.0e-6, Minus, 0, false), "0");
-    assert_eq!(to_string(f, 1.0e-6, Minus, 3, false), "0.000");
-    assert_eq!(to_string(f, 1.0e-6, Minus, 6, false), "0.000001");
-    assert_eq!(to_string(f, 1.0e-6, Minus, 9, false), "0.000001000");
-    assert_eq!(to_string(f, 1.0e-6, Minus, 12, false), "0.000001000000");
-    assert_eq!(to_string(f, 1.0e-6, Minus, 22, false), "0.0000010000000000000000");
-    assert_eq!(to_string(f, 1.0e-6, Minus, 23, false), "0.00000099999999999999995");
-    assert_eq!(to_string(f, 1.0e-6, Minus, 24, false), "0.000000999999999999999955");
-    assert_eq!(to_string(f, 1.0e-6, Minus, 25, false), "0.0000009999999999999999547");
-    assert_eq!(to_string(f, 1.0e-6, Minus, 35, false), "0.00000099999999999999995474811182589");
+    assert_eq!(to_string(f, 1.0e-6, Minus, 0), "0");
+    assert_eq!(to_string(f, 1.0e-6, Minus, 3), "0.000");
+    assert_eq!(to_string(f, 1.0e-6, Minus, 6), "0.000001");
+    assert_eq!(to_string(f, 1.0e-6, Minus, 9), "0.000001000");
+    assert_eq!(to_string(f, 1.0e-6, Minus, 12), "0.000001000000");
+    assert_eq!(to_string(f, 1.0e-6, Minus, 22), "0.0000010000000000000000");
+    assert_eq!(to_string(f, 1.0e-6, Minus, 23), "0.00000099999999999999995");
+    assert_eq!(to_string(f, 1.0e-6, Minus, 24), "0.000000999999999999999955");
+    assert_eq!(to_string(f, 1.0e-6, Minus, 25), "0.0000009999999999999999547");
+    assert_eq!(to_string(f, 1.0e-6, Minus, 35), "0.00000099999999999999995474811182589");
+    assert_eq!(to_string(f, 1.0e-6, Minus, 45), "0.000000999999999999999954748111825886258685614");
     assert_eq!(
-        to_string(f, 1.0e-6, Minus, 45, false),
-        "0.000000999999999999999954748111825886258685614"
-    );
-    assert_eq!(
-        to_string(f, 1.0e-6, Minus, 55, false),
+        to_string(f, 1.0e-6, Minus, 55),
         "0.0000009999999999999999547481118258862586856139387236908"
     );
     assert_eq!(
-        to_string(f, 1.0e-6, Minus, 65, false),
+        to_string(f, 1.0e-6, Minus, 65),
         "0.00000099999999999999995474811182588625868561393872369080781936646"
     );
     assert_eq!(
-        to_string(f, 1.0e-6, Minus, 75, false),
+        to_string(f, 1.0e-6, Minus, 75),
         "0.000000999999999999999954748111825886258685613938723690807819366455078125000"
     );
 
-    assert_eq!(to_string(f, f32::MAX, Minus, 0, false), "340282346638528859811704183484516925440");
-    assert_eq!(
-        to_string(f, f32::MAX, Minus, 1, false),
-        "340282346638528859811704183484516925440.0"
-    );
-    assert_eq!(
-        to_string(f, f32::MAX, Minus, 2, false),
-        "340282346638528859811704183484516925440.00"
-    );
+    assert_eq!(to_string(f, f32::MAX, Minus, 0), "340282346638528859811704183484516925440");
+    assert_eq!(to_string(f, f32::MAX, Minus, 1), "340282346638528859811704183484516925440.0");
+    assert_eq!(to_string(f, f32::MAX, Minus, 2), "340282346638528859811704183484516925440.00");
 
     if cfg!(miri) {
         // Miri is too slow
@@ -1163,24 +1151,24 @@
     }
 
     let minf32 = ldexp_f32(1.0, -149);
-    assert_eq!(to_string(f, minf32, Minus, 0, false), "0");
-    assert_eq!(to_string(f, minf32, Minus, 1, false), "0.0");
-    assert_eq!(to_string(f, minf32, Minus, 2, false), "0.00");
-    assert_eq!(to_string(f, minf32, Minus, 4, false), "0.0000");
-    assert_eq!(to_string(f, minf32, Minus, 8, false), "0.00000000");
-    assert_eq!(to_string(f, minf32, Minus, 16, false), "0.0000000000000000");
-    assert_eq!(to_string(f, minf32, Minus, 32, false), "0.00000000000000000000000000000000");
+    assert_eq!(to_string(f, minf32, Minus, 0), "0");
+    assert_eq!(to_string(f, minf32, Minus, 1), "0.0");
+    assert_eq!(to_string(f, minf32, Minus, 2), "0.00");
+    assert_eq!(to_string(f, minf32, Minus, 4), "0.0000");
+    assert_eq!(to_string(f, minf32, Minus, 8), "0.00000000");
+    assert_eq!(to_string(f, minf32, Minus, 16), "0.0000000000000000");
+    assert_eq!(to_string(f, minf32, Minus, 32), "0.00000000000000000000000000000000");
     assert_eq!(
-        to_string(f, minf32, Minus, 64, false),
+        to_string(f, minf32, Minus, 64),
         "0.0000000000000000000000000000000000000000000014012984643248170709"
     );
     assert_eq!(
-        to_string(f, minf32, Minus, 128, false),
+        to_string(f, minf32, Minus, 128),
         "0.0000000000000000000000000000000000000000000014012984643248170709\
                   2372958328991613128026194187651577175706828388979108268586060149"
     );
     assert_eq!(
-        to_string(f, minf32, Minus, 256, false),
+        to_string(f, minf32, Minus, 256),
         "0.0000000000000000000000000000000000000000000014012984643248170709\
                   2372958328991613128026194187651577175706828388979108268586060148\
                   6638188362121582031250000000000000000000000000000000000000000000\
@@ -1188,7 +1176,7 @@
     );
 
     assert_eq!(
-        to_string(f, f64::MAX, Minus, 0, false),
+        to_string(f, f64::MAX, Minus, 0),
         "1797693134862315708145274237317043567980705675258449965989174768\
                 0315726078002853876058955863276687817154045895351438246423432132\
                 6889464182768467546703537516986049910576551282076245490090389328\
@@ -1196,7 +1184,7 @@
                 26204144723168738177180919299881250404026184124858368"
     );
     assert_eq!(
-        to_string(f, f64::MAX, Minus, 10, false),
+        to_string(f, f64::MAX, Minus, 10),
         "1797693134862315708145274237317043567980705675258449965989174768\
                 0315726078002853876058955863276687817154045895351438246423432132\
                 6889464182768467546703537516986049910576551282076245490090389328\
@@ -1205,16 +1193,16 @@
     );
 
     let minf64 = ldexp_f64(1.0, -1074);
-    assert_eq!(to_string(f, minf64, Minus, 0, false), "0");
-    assert_eq!(to_string(f, minf64, Minus, 1, false), "0.0");
-    assert_eq!(to_string(f, minf64, Minus, 10, false), "0.0000000000");
+    assert_eq!(to_string(f, minf64, Minus, 0), "0");
+    assert_eq!(to_string(f, minf64, Minus, 1), "0.0");
+    assert_eq!(to_string(f, minf64, Minus, 10), "0.0000000000");
     assert_eq!(
-        to_string(f, minf64, Minus, 100, false),
+        to_string(f, minf64, Minus, 100),
         "0.0000000000000000000000000000000000000000000000000000000000000000\
                   000000000000000000000000000000000000"
     );
     assert_eq!(
-        to_string(f, minf64, Minus, 1000, false),
+        to_string(f, minf64, Minus, 1000),
         "0.0000000000000000000000000000000000000000000000000000000000000000\
                   0000000000000000000000000000000000000000000000000000000000000000\
                   0000000000000000000000000000000000000000000000000000000000000000\
@@ -1234,15 +1222,15 @@
     );
 
     // very large output
-    assert_eq!(to_string(f, 0.0, Minus, 80000, false), format!("0.{:0>80000}", ""));
-    assert_eq!(to_string(f, 1.0e1, Minus, 80000, false), format!("10.{:0>80000}", ""));
-    assert_eq!(to_string(f, 1.0e0, Minus, 80000, false), format!("1.{:0>80000}", ""));
+    assert_eq!(to_string(f, 0.0, Minus, 80000), format!("0.{:0>80000}", ""));
+    assert_eq!(to_string(f, 1.0e1, Minus, 80000), format!("10.{:0>80000}", ""));
+    assert_eq!(to_string(f, 1.0e0, Minus, 80000), format!("1.{:0>80000}", ""));
     assert_eq!(
-        to_string(f, 1.0e-1, Minus, 80000, false),
+        to_string(f, 1.0e-1, Minus, 80000),
         format!("0.1000000000000000055511151231257827021181583404541015625{:0>79945}", "")
     );
     assert_eq!(
-        to_string(f, 1.0e-20, Minus, 80000, false),
+        to_string(f, 1.0e-20, Minus, 80000),
         format!(
             "0.0000000000000000000099999999999999994515327145420957165172950370\
                           2787392447107715776066783064379706047475337982177734375{:0>79881}",
diff --git a/src/libcore/tests/time.rs b/src/libcore/tests/time.rs
index 273f125..c1fbdf7 100644
--- a/src/libcore/tests/time.rs
+++ b/src/libcore/tests/time.rs
@@ -12,6 +12,12 @@
 }
 
 #[test]
+#[should_panic]
+fn new_overflow() {
+    let _ = Duration::new(::core::u64::MAX, 1_000_000_000);
+}
+
+#[test]
 fn secs() {
     assert_eq!(Duration::new(0, 0).as_secs(), 0);
     assert_eq!(Duration::new(0, 500_000_005).as_secs(), 0);
diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs
index d04f5c1..a53e001 100644
--- a/src/libgraphviz/lib.rs
+++ b/src/libgraphviz/lib.rs
@@ -597,6 +597,8 @@
     NoNodeLabels,
     NoEdgeStyles,
     NoNodeStyles,
+
+    Monospace,
 }
 
 /// Returns vec holding all the default render options.
@@ -626,6 +628,14 @@
     W: Write,
 {
     writeln!(w, "digraph {} {{", g.graph_id().as_slice())?;
+
+    // Global graph properties
+    if options.contains(&RenderOption::Monospace) {
+        writeln!(w, r#"    graph[fontname="monospace"];"#)?;
+        writeln!(w, r#"    node[fontname="monospace"];"#)?;
+        writeln!(w, r#"    edge[fontname="monospace"];"#)?;
+    }
+
     for n in g.nodes().iter() {
         write!(w, "    ")?;
         let id = g.node_id(n);
diff --git a/src/libpanic_unwind/seh.rs b/src/libpanic_unwind/seh.rs
index d9dca2c..6f507e8 100644
--- a/src/libpanic_unwind/seh.rs
+++ b/src/libpanic_unwind/seh.rs
@@ -282,12 +282,7 @@
     //
     // In any case, we basically need to do something like this until we can
     // express more operations in statics (and we may never be able to).
-    if !cfg!(bootstrap) {
-        atomic_store(
-            &mut THROW_INFO.pmfnUnwind as *mut _ as *mut u32,
-            ptr!(exception_cleanup) as u32,
-        );
-    }
+    atomic_store(&mut THROW_INFO.pmfnUnwind as *mut _ as *mut u32, ptr!(exception_cleanup) as u32);
     atomic_store(
         &mut THROW_INFO.pCatchableTypeArray as *mut _ as *mut u32,
         ptr!(&CATCHABLE_TYPE_ARRAY as *const _) as u32,
@@ -300,12 +295,10 @@
         &mut CATCHABLE_TYPE.pType as *mut _ as *mut u32,
         ptr!(&TYPE_DESCRIPTOR as *const _) as u32,
     );
-    if !cfg!(bootstrap) {
-        atomic_store(
-            &mut CATCHABLE_TYPE.copyFunction as *mut _ as *mut u32,
-            ptr!(exception_copy) as u32,
-        );
-    }
+    atomic_store(
+        &mut CATCHABLE_TYPE.copyFunction as *mut _ as *mut u32,
+        ptr!(exception_copy) as u32,
+    );
 
     extern "system" {
         #[unwind(allowed)]
diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml
index 782c687..af2be30 100644
--- a/src/librustc/Cargo.toml
+++ b/src/librustc/Cargo.toml
@@ -12,8 +12,6 @@
 [dependencies]
 arena = { path = "../libarena" }
 bitflags = "1.2.1"
-fmt_macros = { path = "../libfmt_macros" }
-graphviz = { path = "../libgraphviz" }
 jobserver = "0.1"
 scoped-tls = "1.0"
 log = { version = "0.4", features = ["release_max_level_info", "std"] }
diff --git a/src/librustc/arena.rs b/src/librustc/arena.rs
index 15e92d8..ca55d41 100644
--- a/src/librustc/arena.rs
+++ b/src/librustc/arena.rs
@@ -35,7 +35,8 @@
                 rustc::mir::Promoted,
                 rustc::mir::BodyAndCache<$tcx>
             >,
-            [] tables: rustc::ty::TypeckTables<$tcx>,
+            [decode] tables: rustc::ty::TypeckTables<$tcx>,
+            [decode] borrowck_result: rustc::mir::BorrowCheckResult<$tcx>,
             [] const_allocs: rustc::mir::interpret::Allocation,
             [] vtable_method: Option<(
                 rustc_hir::def_id::DefId,
@@ -47,22 +48,23 @@
             [] item_local_set: rustc_hir::ItemLocalSet,
             [decode] mir_const_qualif: rustc_index::bit_set::BitSet<rustc::mir::Local>,
             [] trait_impls_of: rustc::ty::trait_def::TraitImpls,
+            [] associated_items: rustc::ty::AssociatedItems,
             [] dropck_outlives:
                 rustc::infer::canonical::Canonical<'tcx,
                     rustc::infer::canonical::QueryResponse<'tcx,
-                        rustc::traits::query::dropck_outlives::DropckOutlivesResult<'tcx>
+                        rustc::traits::query::DropckOutlivesResult<'tcx>
                     >
                 >,
             [] normalize_projection_ty:
                 rustc::infer::canonical::Canonical<'tcx,
                     rustc::infer::canonical::QueryResponse<'tcx,
-                        rustc::traits::query::normalize::NormalizationResult<'tcx>
+                        rustc::traits::query::NormalizationResult<'tcx>
                     >
                 >,
             [] implied_outlives_bounds:
                 rustc::infer::canonical::Canonical<'tcx,
                     rustc::infer::canonical::QueryResponse<'tcx,
-                        Vec<rustc::traits::query::outlives_bounds::OutlivesBound<'tcx>>
+                        Vec<rustc::traits::query::OutlivesBound<'tcx>>
                     >
                 >,
             [] type_op_subtype:
@@ -127,7 +129,7 @@
             [] tys: rustc::ty::TyS<$tcx>,
 
             // HIR types
-            [few] hir_forest: rustc::hir::map::Forest<$tcx>,
+            [few] hir_krate: rustc_hir::Crate<$tcx>,
             [] arm: rustc_hir::Arm<$tcx>,
             [] attribute: syntax::ast::Attribute,
             [] block: rustc_hir::Block<$tcx>,
@@ -216,6 +218,7 @@
 
 arena_types!(impl_arena_allocatable, [], 'tcx);
 
+#[marker]
 pub trait ArenaAllocatable {}
 
 impl<T: Copy> ArenaAllocatable for T {}
diff --git a/src/librustc/benches/lib.rs b/src/librustc/benches/lib.rs
index de82b26..237751b 100644
--- a/src/librustc/benches/lib.rs
+++ b/src/librustc/benches/lib.rs
@@ -1,4 +1,3 @@
-#![cfg_attr(bootstrap, feature(slice_patterns))]
 #![feature(test)]
 
 extern crate test;
diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs
index 9df8e28..eb7e287 100644
--- a/src/librustc/dep_graph/dep_node.rs
+++ b/src/librustc/dep_graph/dep_node.rs
@@ -35,7 +35,7 @@
 //! "infer" some properties for each kind of `DepNode`:
 //!
 //! * Whether a `DepNode` of a given kind has any parameters at all. Some
-//!   `DepNode`s, like `Krate`, represent global concepts with only one value.
+//!   `DepNode`s, like `AllLocalTraitImpls`, represent global concepts with only one value.
 //! * Whether it is possible, in principle, to reconstruct a query key from a
 //!   given `DepNode`. Many `DepKind`s only require a single `DefId` parameter,
 //!   in which case it is possible to map the node's fingerprint back to the
@@ -76,10 +76,6 @@
     ($x:tt) => {{}};
 }
 
-macro_rules! replace {
-    ($x:tt with $($y:tt)*) => ($($y)*)
-}
-
 macro_rules! is_anon_attr {
     (anon) => {
         true
@@ -99,19 +95,18 @@
 }
 
 macro_rules! contains_anon_attr {
-    ($($attr:ident),*) => ({$(is_anon_attr!($attr) | )* false});
+    ($($attr:ident $(($($attr_args:tt)*))* ),*) => ({$(is_anon_attr!($attr) | )* false});
 }
 
 macro_rules! contains_eval_always_attr {
-    ($($attr:ident),*) => ({$(is_eval_always_attr!($attr) | )* false});
+    ($($attr:ident $(($($attr_args:tt)*))* ),*) => ({$(is_eval_always_attr!($attr) | )* false});
 }
 
 macro_rules! define_dep_nodes {
     (<$tcx:tt>
     $(
-        [$($attr:ident),* ]
+        [$($attrs:tt)*]
         $variant:ident $(( $tuple_arg_ty:ty $(,)? ))*
-                       $({ $($struct_arg_name:ident : $struct_arg_ty:ty),* })*
       ,)*
     ) => (
         #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash,
@@ -126,7 +121,7 @@
                 match *self {
                     $(
                         DepKind :: $variant => {
-                            if contains_anon_attr!($($attr),*) {
+                            if contains_anon_attr!($($attrs)*) {
                                 return false;
                             }
 
@@ -136,13 +131,6 @@
                                     ::CAN_RECONSTRUCT_QUERY_KEY;
                             })*
 
-                            // struct args
-                            $({
-
-                                return <( $($struct_arg_ty,)* ) as DepNodeParams>
-                                    ::CAN_RECONSTRUCT_QUERY_KEY;
-                            })*
-
                             true
                         }
                     )*
@@ -152,7 +140,7 @@
             pub fn is_anon(&self) -> bool {
                 match *self {
                     $(
-                        DepKind :: $variant => { contains_anon_attr!($($attr),*) }
+                        DepKind :: $variant => { contains_anon_attr!($($attrs)*) }
                     )*
                 }
             }
@@ -160,7 +148,7 @@
             pub fn is_eval_always(&self) -> bool {
                 match *self {
                     $(
-                        DepKind :: $variant => { contains_eval_always_attr!($($attr), *) }
+                        DepKind :: $variant => { contains_eval_always_attr!($($attrs)*) }
                     )*
                 }
             }
@@ -176,12 +164,6 @@
                                 return true;
                             })*
 
-                            // struct args
-                            $({
-                                $(erase!($struct_arg_name);)*
-                                return true;
-                            })*
-
                             false
                         }
                     )*
@@ -189,11 +171,43 @@
             }
         }
 
-        pub enum DepConstructor<$tcx> {
+        pub struct DepConstructor;
+
+        impl DepConstructor {
             $(
-                $variant $(( $tuple_arg_ty ))*
-                         $({ $($struct_arg_name : $struct_arg_ty),* })*
-            ),*
+                #[inline(always)]
+                #[allow(unreachable_code, non_snake_case)]
+                pub fn $variant<'tcx>(_tcx: TyCtxt<'tcx>, $(arg: $tuple_arg_ty)*) -> DepNode {
+                    // tuple args
+                    $({
+                        erase!($tuple_arg_ty);
+                        let hash = DepNodeParams::to_fingerprint(&arg, _tcx);
+                        let dep_node = DepNode {
+                            kind: DepKind::$variant,
+                            hash
+                        };
+
+                        #[cfg(debug_assertions)]
+                        {
+                            if !dep_node.kind.can_reconstruct_query_key() &&
+                            (_tcx.sess.opts.debugging_opts.incremental_info ||
+                                _tcx.sess.opts.debugging_opts.query_dep_graph)
+                            {
+                                _tcx.dep_graph.register_dep_node_debug_str(dep_node, || {
+                                    arg.to_debug_str(_tcx)
+                                });
+                            }
+                        }
+
+                        return dep_node;
+                    })*
+
+                    DepNode {
+                        kind: DepKind::$variant,
+                        hash: Fingerprint::ZERO,
+                    }
+                }
+            )*
         }
 
         #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash,
@@ -204,75 +218,6 @@
         }
 
         impl DepNode {
-            #[allow(unreachable_code, non_snake_case)]
-            pub fn new<'tcx>(tcx: TyCtxt<'tcx>,
-                                       dep: DepConstructor<'tcx>)
-                                       -> DepNode
-            {
-                match dep {
-                    $(
-                        DepConstructor :: $variant $(( replace!(($tuple_arg_ty) with arg) ))*
-                                                   $({ $($struct_arg_name),* })*
-                            =>
-                        {
-                            // tuple args
-                            $({
-                                erase!($tuple_arg_ty);
-                                let hash = DepNodeParams::to_fingerprint(&arg, tcx);
-                                let dep_node = DepNode {
-                                    kind: DepKind::$variant,
-                                    hash
-                                };
-
-                                #[cfg(debug_assertions)]
-                                {
-                                    if !dep_node.kind.can_reconstruct_query_key() &&
-                                    (tcx.sess.opts.debugging_opts.incremental_info ||
-                                        tcx.sess.opts.debugging_opts.query_dep_graph)
-                                    {
-                                        tcx.dep_graph.register_dep_node_debug_str(dep_node, || {
-                                            arg.to_debug_str(tcx)
-                                        });
-                                    }
-                                }
-
-                                return dep_node;
-                            })*
-
-                            // struct args
-                            $({
-                                let tupled_args = ( $($struct_arg_name,)* );
-                                let hash = DepNodeParams::to_fingerprint(&tupled_args,
-                                                                         tcx);
-                                let dep_node = DepNode {
-                                    kind: DepKind::$variant,
-                                    hash
-                                };
-
-                                #[cfg(debug_assertions)]
-                                {
-                                    if !dep_node.kind.can_reconstruct_query_key() &&
-                                    (tcx.sess.opts.debugging_opts.incremental_info ||
-                                        tcx.sess.opts.debugging_opts.query_dep_graph)
-                                    {
-                                        tcx.dep_graph.register_dep_node_debug_str(dep_node, || {
-                                            tupled_args.to_debug_str(tcx)
-                                        });
-                                    }
-                                }
-
-                                return dep_node;
-                            })*
-
-                            DepNode {
-                                kind: DepKind::$variant,
-                                hash: Fingerprint::ZERO,
-                            }
-                        }
-                    )*
-                }
-            }
-
             /// Construct a DepNode from the given DepKind and DefPathHash. This
             /// method will assert that the given DepKind actually requires a
             /// single DefId/DefPathHash parameter.
@@ -400,19 +345,6 @@
     // We use this for most things when incr. comp. is turned off.
     [] Null,
 
-    // Represents the `Krate` as a whole (the `hir::Krate` value) (as
-    // distinct from the krate module). This is basically a hash of
-    // the entire krate, so if you read from `Krate` (e.g., by calling
-    // `tcx.hir().krate()`), we will have to assume that any change
-    // means that you need to be recompiled. This is because the
-    // `Krate` value gives you access to all other items. To avoid
-    // this fate, do not call `tcx.hir().krate()`; instead, prefer
-    // wrappers like `tcx.visit_all_items_in_krate()`.  If there is no
-    // suitable wrapper, you can use `tcx.dep_graph.ignore()` to gain
-    // access to the krate, but you must remember to add suitable
-    // edges yourself for the individual items that you read.
-    [eval_always] Krate,
-
     // Represents the body of a function or method. The def-id is that of the
     // function/method.
     [eval_always] HirBody(DefId),
diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs
index 258723b..531a45b 100644
--- a/src/librustc/dep_graph/graph.rs
+++ b/src/librustc/dep_graph/graph.rs
@@ -1122,6 +1122,7 @@
 }
 
 impl DepGraphData {
+    #[inline(never)]
     fn read_index(&self, source: DepNodeIndex) {
         ty::tls::with_context_opt(|icx| {
             let icx = if let Some(icx) = icx { icx } else { return };
diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs
deleted file mode 100644
index db5e319..0000000
--- a/src/librustc/hir/check_attr.rs
+++ /dev/null
@@ -1,125 +0,0 @@
-//! This module implements some validity checks for attributes.
-//! In particular it verifies that `#[inline]` and `#[repr]` attributes are
-//! attached to items that actually support them and if there are
-//! conflicts between multiple such attributes attached to the same
-//! item.
-
-use rustc_hir as hir;
-use rustc_hir::{Item, ItemKind, TraitItem, TraitItemKind};
-
-use std::fmt::{self, Display};
-
-#[derive(Copy, Clone, PartialEq)]
-pub enum MethodKind {
-    Trait { body: bool },
-    Inherent,
-}
-
-#[derive(Copy, Clone, PartialEq)]
-pub enum Target {
-    ExternCrate,
-    Use,
-    Static,
-    Const,
-    Fn,
-    Closure,
-    Mod,
-    ForeignMod,
-    GlobalAsm,
-    TyAlias,
-    OpaqueTy,
-    Enum,
-    Struct,
-    Union,
-    Trait,
-    TraitAlias,
-    Impl,
-    Expression,
-    Statement,
-    AssocConst,
-    Method(MethodKind),
-    AssocTy,
-    ForeignFn,
-    ForeignStatic,
-    ForeignTy,
-}
-
-impl Display for Target {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(
-            f,
-            "{}",
-            match *self {
-                Target::ExternCrate => "extern crate",
-                Target::Use => "use",
-                Target::Static => "static item",
-                Target::Const => "constant item",
-                Target::Fn => "function",
-                Target::Closure => "closure",
-                Target::Mod => "module",
-                Target::ForeignMod => "foreign module",
-                Target::GlobalAsm => "global asm",
-                Target::TyAlias => "type alias",
-                Target::OpaqueTy => "opaque type",
-                Target::Enum => "enum",
-                Target::Struct => "struct",
-                Target::Union => "union",
-                Target::Trait => "trait",
-                Target::TraitAlias => "trait alias",
-                Target::Impl => "item",
-                Target::Expression => "expression",
-                Target::Statement => "statement",
-                Target::AssocConst => "associated const",
-                Target::Method(_) => "method",
-                Target::AssocTy => "associated type",
-                Target::ForeignFn => "foreign function",
-                Target::ForeignStatic => "foreign static item",
-                Target::ForeignTy => "foreign type",
-            }
-        )
-    }
-}
-
-impl Target {
-    pub fn from_item(item: &Item<'_>) -> Target {
-        match item.kind {
-            ItemKind::ExternCrate(..) => Target::ExternCrate,
-            ItemKind::Use(..) => Target::Use,
-            ItemKind::Static(..) => Target::Static,
-            ItemKind::Const(..) => Target::Const,
-            ItemKind::Fn(..) => Target::Fn,
-            ItemKind::Mod(..) => Target::Mod,
-            ItemKind::ForeignMod(..) => Target::ForeignMod,
-            ItemKind::GlobalAsm(..) => Target::GlobalAsm,
-            ItemKind::TyAlias(..) => Target::TyAlias,
-            ItemKind::OpaqueTy(..) => Target::OpaqueTy,
-            ItemKind::Enum(..) => Target::Enum,
-            ItemKind::Struct(..) => Target::Struct,
-            ItemKind::Union(..) => Target::Union,
-            ItemKind::Trait(..) => Target::Trait,
-            ItemKind::TraitAlias(..) => Target::TraitAlias,
-            ItemKind::Impl { .. } => Target::Impl,
-        }
-    }
-
-    pub fn from_trait_item(trait_item: &TraitItem<'_>) -> Target {
-        match trait_item.kind {
-            TraitItemKind::Const(..) => Target::AssocConst,
-            TraitItemKind::Method(_, hir::TraitMethod::Required(_)) => {
-                Target::Method(MethodKind::Trait { body: false })
-            }
-            TraitItemKind::Method(_, hir::TraitMethod::Provided(_)) => {
-                Target::Method(MethodKind::Trait { body: true })
-            }
-            TraitItemKind::Type(..) => Target::AssocTy,
-        }
-    }
-
-    pub fn from_foreign_item(foreign_item: &hir::ForeignItem<'_>) -> Target {
-        match foreign_item.kind {
-            hir::ForeignItemKind::Fn(..) => Target::ForeignFn,
-            hir::ForeignItemKind::Static(..) => Target::ForeignStatic,
-            hir::ForeignItemKind::Type => Target::ForeignTy,
-        }
-    }
-}
diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs
index b6be4bb..bf1fc09 100644
--- a/src/librustc/hir/map/collector.rs
+++ b/src/librustc/hir/map/collector.rs
@@ -140,6 +140,7 @@
                 trait_impls: _,
                 body_ids: _,
                 modules: _,
+                proc_macros: _,
             } = *krate;
 
             alloc_hir_dep_nodes(
@@ -223,12 +224,9 @@
             (commandline_args_hash, crate_disambiguator.to_fingerprint()),
         );
 
-        let (_, crate_hash) = input_dep_node_and_hash(
-            self.dep_graph,
-            &mut self.hcx,
-            DepNode::new_no_params(DepKind::Krate),
-            crate_hash_input,
-        );
+        let mut stable_hasher = StableHasher::new();
+        crate_hash_input.hash_stable(&mut self.hcx, &mut stable_hasher);
+        let crate_hash: Fingerprint = stable_hasher.finish();
 
         let svh = Svh::new(crate_hash.to_smaller_hash());
         (self.map, svh)
diff --git a/src/librustc/hir/map/hir_id_validator.rs b/src/librustc/hir/map/hir_id_validator.rs
index 76e42b8..a4f9193 100644
--- a/src/librustc/hir/map/hir_id_validator.rs
+++ b/src/librustc/hir/map/hir_id_validator.rs
@@ -7,12 +7,12 @@
 use rustc_hir::itemlikevisit::ItemLikeVisitor;
 use rustc_hir::{HirId, ItemLocalId};
 
-pub fn check_crate(hir_map: &Map<'_>) {
+pub fn check_crate(hir_map: &Map<'_>, sess: &rustc_session::Session) {
     hir_map.dep_graph.assert_ignored();
 
     let errors = Lock::new(Vec::new());
 
-    par_iter(&hir_map.krate().modules).for_each(|(module_id, _)| {
+    par_iter(&hir_map.krate.modules).for_each(|(module_id, _)| {
         let local_def_id = hir_map.local_def_id(*module_id);
         hir_map.visit_item_likes_in_module(
             local_def_id,
@@ -24,7 +24,7 @@
 
     if !errors.is_empty() {
         let message = errors.iter().fold(String::new(), |s1, s2| s1 + "\n" + s2);
-        bug!("{}", message);
+        sess.delay_span_bug(rustc_span::DUMMY_SP, &message);
     }
 }
 
diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs
index 6d7f531..adda0cd 100644
--- a/src/librustc/hir/map/mod.rs
+++ b/src/librustc/hir/map/mod.rs
@@ -129,30 +129,6 @@
     }
 }
 
-/// Stores a crate and any number of inlined items from other crates.
-pub struct Forest<'hir> {
-    krate: Crate<'hir>,
-    pub dep_graph: DepGraph,
-}
-
-impl Forest<'hir> {
-    pub fn new(krate: Crate<'hir>, dep_graph: &DepGraph) -> Forest<'hir> {
-        Forest { krate, dep_graph: dep_graph.clone() }
-    }
-
-    pub fn krate(&self) -> &Crate<'hir> {
-        self.dep_graph.read(DepNode::new_no_params(DepKind::Krate));
-        &self.krate
-    }
-
-    /// This is used internally in the dependency tracking system.
-    /// Use the `krate` method to ensure your dependency on the
-    /// crate is tracked.
-    pub fn untracked_krate(&self) -> &Crate<'hir> {
-        &self.krate
-    }
-}
-
 /// This type is effectively a `HashMap<HirId, Entry<'hir>>`,
 /// but it is implemented as 2 layers of arrays.
 /// - first we have `A = IndexVec<DefIndex, B>` mapping `DefIndex`s to an inner value
@@ -162,11 +138,8 @@
 /// Represents a mapping from `NodeId`s to AST elements and their parent `NodeId`s.
 #[derive(Clone)]
 pub struct Map<'hir> {
-    /// The backing storage for all the AST nodes.
-    pub forest: &'hir Forest<'hir>,
+    krate: &'hir Crate<'hir>,
 
-    /// Same as the dep_graph in forest, just available with one fewer
-    /// deref. This is a gratuitous micro-optimization.
     pub dep_graph: DepGraph,
 
     /// The SVH of the local crate.
@@ -217,6 +190,13 @@
 }
 
 impl<'hir> Map<'hir> {
+    /// This is used internally in the dependency tracking system.
+    /// Use the `krate` method to ensure your dependency on the
+    /// crate is tracked.
+    pub fn untracked_krate(&self) -> &Crate<'hir> {
+        &self.krate
+    }
+
     #[inline]
     fn lookup(&self, id: HirId) -> Option<&Entry<'hir>> {
         let local_map = self.map.get(id.owner)?;
@@ -401,40 +381,36 @@
         self.lookup(id).cloned()
     }
 
-    pub fn krate(&self) -> &'hir Crate<'hir> {
-        self.forest.krate()
-    }
-
     pub fn item(&self, id: HirId) -> &'hir Item<'hir> {
         self.read(id);
 
-        // N.B., intentionally bypass `self.forest.krate()` so that we
+        // N.B., intentionally bypass `self.krate()` so that we
         // do not trigger a read of the whole krate here
-        self.forest.krate.item(id)
+        self.krate.item(id)
     }
 
     pub fn trait_item(&self, id: TraitItemId) -> &'hir TraitItem<'hir> {
         self.read(id.hir_id);
 
-        // N.B., intentionally bypass `self.forest.krate()` so that we
+        // N.B., intentionally bypass `self.krate()` so that we
         // do not trigger a read of the whole krate here
-        self.forest.krate.trait_item(id)
+        self.krate.trait_item(id)
     }
 
     pub fn impl_item(&self, id: ImplItemId) -> &'hir ImplItem<'hir> {
         self.read(id.hir_id);
 
-        // N.B., intentionally bypass `self.forest.krate()` so that we
+        // N.B., intentionally bypass `self.krate()` so that we
         // do not trigger a read of the whole krate here
-        self.forest.krate.impl_item(id)
+        self.krate.impl_item(id)
     }
 
     pub fn body(&self, id: BodyId) -> &'hir Body<'hir> {
         self.read(id.hir_id);
 
-        // N.B., intentionally bypass `self.forest.krate()` so that we
+        // N.B., intentionally bypass `self.krate()` so that we
         // do not trigger a read of the whole krate here
-        self.forest.krate.body(id)
+        self.krate.body(id)
     }
 
     pub fn fn_decl_by_hir_id(&self, hir_id: HirId) -> Option<&'hir FnDecl<'hir>> {
@@ -530,9 +506,9 @@
     pub fn trait_impls(&self, trait_did: DefId) -> &'hir [HirId] {
         self.dep_graph.read(DepNode::new_no_params(DepKind::AllLocalTraitImpls));
 
-        // N.B., intentionally bypass `self.forest.krate()` so that we
+        // N.B., intentionally bypass `self.krate()` so that we
         // do not trigger a read of the whole krate here
-        self.forest.krate.trait_impls.get(&trait_did).map_or(&[], |xs| &xs[..])
+        self.krate.trait_impls.get(&trait_did).map_or(&[], |xs| &xs[..])
     }
 
     /// Gets the attributes on the crate. This is preferable to
@@ -542,7 +518,7 @@
         let def_path_hash = self.definitions.def_path_hash(CRATE_DEF_INDEX);
 
         self.dep_graph.read(def_path_hash.to_dep_node(DepKind::Hir));
-        &self.forest.krate.attrs
+        &self.krate.attrs
     }
 
     pub fn get_module(&self, module: DefId) -> (&'hir Mod<'hir>, Span, HirId) {
@@ -550,7 +526,7 @@
         self.read(hir_id);
         match self.find_entry(hir_id).unwrap().node {
             Node::Item(&Item { span, kind: ItemKind::Mod(ref m), .. }) => (m, span, hir_id),
-            Node::Crate => (&self.forest.krate.module, self.forest.krate.span, hir_id),
+            Node::Crate => (&self.krate.module, self.krate.span, hir_id),
             node => panic!("not a module: {:?}", node),
         }
     }
@@ -567,7 +543,7 @@
         // in the expect_* calls the loops below
         self.read(hir_id);
 
-        let module = &self.forest.krate.modules[&hir_id];
+        let module = &self.krate.modules[&hir_id];
 
         for id in &module.items {
             visitor.visit_item(self.expect_item(*id));
@@ -984,7 +960,7 @@
             // Unit/tuple structs/variants take the attributes straight from
             // the struct/variant definition.
             Some(Node::Ctor(..)) => return self.attrs(self.get_parent_item(id)),
-            Some(Node::Crate) => Some(&self.forest.krate.attrs[..]),
+            Some(Node::Crate) => Some(&self.krate.attrs[..]),
             _ => None,
         };
         attrs.unwrap_or(&[])
@@ -1063,7 +1039,7 @@
             Some(Node::Visibility(v)) => bug!("unexpected Visibility {:?}", v),
             Some(Node::Local(local)) => local.span,
             Some(Node::MacroDef(macro_def)) => macro_def.span,
-            Some(Node::Crate) => self.forest.krate.span,
+            Some(Node::Crate) => self.krate.span,
             None => bug!("hir::map::Map::span: id not in map: {:?}", hir_id),
         }
     }
@@ -1231,7 +1207,8 @@
 pub fn map_crate<'hir>(
     sess: &rustc_session::Session,
     cstore: &CrateStoreDyn,
-    forest: &'hir Forest<'hir>,
+    krate: &'hir Crate<'hir>,
+    dep_graph: DepGraph,
     definitions: Definitions,
 ) -> Map<'hir> {
     let _prof_timer = sess.prof.generic_activity("build_hir_map");
@@ -1244,34 +1221,21 @@
         .collect();
 
     let (map, crate_hash) = {
-        let hcx = crate::ich::StableHashingContext::new(sess, &forest.krate, &definitions, cstore);
+        let hcx = crate::ich::StableHashingContext::new(sess, krate, &definitions, cstore);
 
-        let mut collector = NodeCollector::root(
-            sess,
-            &forest.krate,
-            &forest.dep_graph,
-            &definitions,
-            &hir_to_node_id,
-            hcx,
-        );
-        intravisit::walk_crate(&mut collector, &forest.krate);
+        let mut collector =
+            NodeCollector::root(sess, krate, &dep_graph, &definitions, &hir_to_node_id, hcx);
+        intravisit::walk_crate(&mut collector, krate);
 
         let crate_disambiguator = sess.local_crate_disambiguator();
         let cmdline_args = sess.opts.dep_tracking_hash();
         collector.finalize_and_compute_crate_hash(crate_disambiguator, cstore, cmdline_args)
     };
 
-    let map = Map {
-        forest,
-        dep_graph: forest.dep_graph.clone(),
-        crate_hash,
-        map,
-        hir_to_node_id,
-        definitions,
-    };
+    let map = Map { krate, dep_graph, crate_hash, map, hir_to_node_id, definitions };
 
     sess.time("validate_HIR_map", || {
-        hir_id_validator::check_crate(&map);
+        hir_id_validator::check_crate(&map, sess);
     });
 
     map
diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs
index 6275c0a..7d48280 100644
--- a/src/librustc/hir/mod.rs
+++ b/src/librustc/hir/mod.rs
@@ -2,12 +2,52 @@
 //!
 //! [rustc guide]: https://rust-lang.github.io/rustc-guide/hir.html
 
-pub mod check_attr;
 pub mod exports;
 pub mod map;
 
 use crate::ty::query::Providers;
+use crate::ty::TyCtxt;
+use rustc_hir::def_id::LOCAL_CRATE;
+use rustc_hir::print;
+use rustc_hir::Crate;
+use std::ops::Deref;
+
+/// A wrapper type which allows you to access HIR.
+#[derive(Clone)]
+pub struct Hir<'tcx> {
+    tcx: TyCtxt<'tcx>,
+    map: &'tcx map::Map<'tcx>,
+}
+
+impl<'tcx> Hir<'tcx> {
+    pub fn krate(&self) -> &'tcx Crate<'tcx> {
+        self.tcx.hir_crate(LOCAL_CRATE)
+    }
+}
+
+impl<'tcx> Deref for Hir<'tcx> {
+    type Target = &'tcx map::Map<'tcx>;
+
+    #[inline(always)]
+    fn deref(&self) -> &Self::Target {
+        &self.map
+    }
+}
+
+impl<'hir> print::PpAnn for Hir<'hir> {
+    fn nested(&self, state: &mut print::State<'_>, nested: print::Nested) {
+        self.map.nested(state, nested)
+    }
+}
+
+impl<'tcx> TyCtxt<'tcx> {
+    #[inline(always)]
+    pub fn hir(self) -> Hir<'tcx> {
+        Hir { tcx: self, map: &self.hir_map }
+    }
+}
 
 pub fn provide(providers: &mut Providers<'_>) {
+    providers.hir_crate = |tcx, _| tcx.hir_map.untracked_krate();
     map::provide(providers);
 }
diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs
index aade4c3..1a9c5d1 100644
--- a/src/librustc/ich/hcx.rs
+++ b/src/librustc/ich/hcx.rs
@@ -149,7 +149,7 @@
     #[inline]
     pub fn source_map(&mut self) -> &mut CachingSourceMapView<'a> {
         match self.caching_source_map {
-            Some(ref mut cm) => cm,
+            Some(ref mut sm) => sm,
             ref mut none => {
                 *none = Some(CachingSourceMapView::new(self.raw_source_map));
                 none.as_mut().unwrap()
@@ -220,27 +220,8 @@
 }
 
 impl<'a> HashStable<StableHashingContext<'a>> for ast::NodeId {
-    fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
-        match hcx.node_id_hashing_mode {
-            NodeIdHashingMode::Ignore => {
-                // Don't do anything.
-            }
-            NodeIdHashingMode::HashDefPath => {
-                hcx.definitions.node_to_hir_id(*self).hash_stable(hcx, hasher);
-            }
-        }
-    }
-}
-
-impl<'a> ToStableHashKey<StableHashingContext<'a>> for ast::NodeId {
-    type KeyType = (DefPathHash, hir::ItemLocalId);
-
-    #[inline]
-    fn to_stable_hash_key(
-        &self,
-        hcx: &StableHashingContext<'a>,
-    ) -> (DefPathHash, hir::ItemLocalId) {
-        hcx.definitions.node_to_hir_id(*self).to_stable_hash_key(hcx)
+    fn hash_stable(&self, _: &mut StableHashingContext<'a>, _: &mut StableHasher) {
+        panic!("Node IDs should not appear in incremental state");
     }
 }
 
@@ -249,6 +230,12 @@
         self.hash_spans
     }
 
+    #[inline]
+    fn hash_def_id(&mut self, def_id: DefId, hasher: &mut StableHasher) {
+        let hcx = self;
+        hcx.def_path_hash(def_id).hash_stable(hcx, hasher);
+    }
+
     fn byte_pos_to_line_and_col(
         &mut self,
         byte: BytePos,
diff --git a/src/librustc/ich/impls_hir.rs b/src/librustc/ich/impls_hir.rs
index 061b82e..eadc9dd 100644
--- a/src/librustc/ich/impls_hir.rs
+++ b/src/librustc/ich/impls_hir.rs
@@ -12,12 +12,6 @@
 
 impl<'ctx> rustc_hir::HashStableContext for StableHashingContext<'ctx> {
     #[inline]
-    fn hash_def_id(&mut self, def_id: DefId, hasher: &mut StableHasher) {
-        let hcx = self;
-        hcx.def_path_hash(def_id).hash_stable(hcx, hasher);
-    }
-
-    #[inline]
     fn hash_hir_id(&mut self, hir_id: hir::HirId, hasher: &mut StableHasher) {
         let hcx = self;
         match hcx.node_id_hashing_mode {
@@ -40,40 +34,14 @@
         }
     }
 
-    // The following implementations of HashStable for `ItemId`, `TraitItemId`, and
-    // `ImplItemId` deserve special attention. Normally we do not hash `NodeId`s within
-    // the HIR, since they just signify a HIR nodes own path. But `ItemId` et al
-    // are used when another item in the HIR is *referenced* and we certainly
-    // want to pick up on a reference changing its target, so we hash the NodeIds
-    // in "DefPath Mode".
-
-    fn hash_item_id(&mut self, id: hir::ItemId, hasher: &mut StableHasher) {
+    fn hash_reference_to_item(&mut self, id: hir::HirId, hasher: &mut StableHasher) {
         let hcx = self;
-        let hir::ItemId { id } = id;
 
         hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
             id.hash_stable(hcx, hasher);
         })
     }
 
-    fn hash_impl_item_id(&mut self, id: hir::ImplItemId, hasher: &mut StableHasher) {
-        let hcx = self;
-        let hir::ImplItemId { hir_id } = id;
-
-        hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
-            hir_id.hash_stable(hcx, hasher);
-        })
-    }
-
-    fn hash_trait_item_id(&mut self, id: hir::TraitItemId, hasher: &mut StableHasher) {
-        let hcx = self;
-        let hir::TraitItemId { hir_id } = id;
-
-        hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
-            hir_id.hash_stable(hcx, hasher);
-        })
-    }
-
     fn hash_hir_mod(&mut self, module: &hir::Mod<'_>, hasher: &mut StableHasher) {
         let hcx = self;
         let hir::Mod { inner: ref inner_span, ref item_ids } = *module;
@@ -283,12 +251,6 @@
     }
 }
 
-impl<'a> HashStable<StableHashingContext<'a>> for crate::middle::lang_items::LangItem {
-    fn hash_stable(&self, _: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
-        ::std::hash::Hash::hash(self, hasher);
-    }
-}
-
 impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitCandidate {
     fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
         hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
@@ -308,7 +270,6 @@
 
         let import_keys = import_ids
             .iter()
-            .map(|node_id| hcx.node_to_hir_id(*node_id))
             .map(|hir_id| (hcx.local_def_path_hash(hir_id.owner), hir_id.local_id))
             .collect();
         (hcx.def_path_hash(*def_id), import_keys)
diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs
index e173379..d1815d5 100644
--- a/src/librustc/ich/impls_syntax.rs
+++ b/src/librustc/ich/impls_syntax.rs
@@ -12,13 +12,6 @@
 
 impl<'ctx> rustc_target::HashStableContext for StableHashingContext<'ctx> {}
 
-impl<'a> HashStable<StableHashingContext<'a>> for ast::Lifetime {
-    fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
-        self.id.hash_stable(hcx, hasher);
-        self.ident.hash_stable(hcx, hasher);
-    }
-}
-
 impl<'a> HashStable<StableHashingContext<'a>> for [ast::Attribute] {
     fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
         if self.len() == 0 {
diff --git a/src/librustc/ich/impls_ty.rs b/src/librustc/ich/impls_ty.rs
index 6af0cee..844250f 100644
--- a/src/librustc/ich/impls_ty.rs
+++ b/src/librustc/ich/impls_ty.rs
@@ -63,9 +63,12 @@
     fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
         mem::discriminant(self).hash_stable(hcx, hasher);
         match *self {
-            ty::ReErased | ty::ReStatic | ty::ReEmpty => {
+            ty::ReErased | ty::ReStatic => {
                 // No variant fields to hash for these ...
             }
+            ty::ReEmpty(universe) => {
+                universe.hash_stable(hcx, hasher);
+            }
             ty::ReLateBound(db, ty::BrAnon(i)) => {
                 db.hash_stable(hcx, hasher);
                 i.hash_stable(hcx, hasher);
diff --git a/src/librustc/infer/at.rs b/src/librustc/infer/at.rs
deleted file mode 100644
index c58f1bd..0000000
--- a/src/librustc/infer/at.rs
+++ /dev/null
@@ -1,312 +0,0 @@
-//! A nice interface for working with the infcx. The basic idea is to
-//! do `infcx.at(cause, param_env)`, which sets the "cause" of the
-//! operation as well as the surrounding parameter environment. Then
-//! you can do something like `.sub(a, b)` or `.eq(a, b)` to create a
-//! subtype or equality relationship respectively. The first argument
-//! is always the "expected" output from the POV of diagnostics.
-//!
-//! Examples:
-//!
-//!     infcx.at(cause, param_env).sub(a, b)
-//!     // requires that `a <: b`, with `a` considered the "expected" type
-//!
-//!     infcx.at(cause, param_env).sup(a, b)
-//!     // requires that `b <: a`, with `a` considered the "expected" type
-//!
-//!     infcx.at(cause, param_env).eq(a, b)
-//!     // requires that `a == b`, with `a` considered the "expected" type
-//!
-//! For finer-grained control, you can also do use `trace`:
-//!
-//!     infcx.at(...).trace(a, b).sub(&c, &d)
-//!
-//! This will set `a` and `b` as the "root" values for
-//! error-reporting, but actually operate on `c` and `d`. This is
-//! sometimes useful when the types of `c` and `d` are not traceable
-//! things. (That system should probably be refactored.)
-
-use super::*;
-
-use crate::ty::relate::{Relate, TypeRelation};
-use crate::ty::Const;
-
-pub struct At<'a, 'tcx> {
-    pub infcx: &'a InferCtxt<'a, 'tcx>,
-    pub cause: &'a ObligationCause<'tcx>,
-    pub param_env: ty::ParamEnv<'tcx>,
-}
-
-pub struct Trace<'a, 'tcx> {
-    at: At<'a, 'tcx>,
-    a_is_expected: bool,
-    trace: TypeTrace<'tcx>,
-}
-
-impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
-    #[inline]
-    pub fn at(
-        &'a self,
-        cause: &'a ObligationCause<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-    ) -> At<'a, 'tcx> {
-        At { infcx: self, cause, param_env }
-    }
-}
-
-pub trait ToTrace<'tcx>: Relate<'tcx> + Copy {
-    fn to_trace(
-        cause: &ObligationCause<'tcx>,
-        a_is_expected: bool,
-        a: Self,
-        b: Self,
-    ) -> TypeTrace<'tcx>;
-}
-
-impl<'a, 'tcx> At<'a, 'tcx> {
-    /// Hacky routine for equating two impl headers in coherence.
-    pub fn eq_impl_headers(
-        self,
-        expected: &ty::ImplHeader<'tcx>,
-        actual: &ty::ImplHeader<'tcx>,
-    ) -> InferResult<'tcx, ()> {
-        debug!("eq_impl_header({:?} = {:?})", expected, actual);
-        match (expected.trait_ref, actual.trait_ref) {
-            (Some(a_ref), Some(b_ref)) => self.eq(a_ref, b_ref),
-            (None, None) => self.eq(expected.self_ty, actual.self_ty),
-            _ => bug!("mk_eq_impl_headers given mismatched impl kinds"),
-        }
-    }
-
-    /// Makes `a <: b`, where `a` may or may not be expected.
-    pub fn sub_exp<T>(self, a_is_expected: bool, a: T, b: T) -> InferResult<'tcx, ()>
-    where
-        T: ToTrace<'tcx>,
-    {
-        self.trace_exp(a_is_expected, a, b).sub(&a, &b)
-    }
-
-    /// Makes `actual <: expected`. For example, if type-checking a
-    /// call like `foo(x)`, where `foo: fn(i32)`, you might have
-    /// `sup(i32, x)`, since the "expected" type is the type that
-    /// appears in the signature.
-    pub fn sup<T>(self, expected: T, actual: T) -> InferResult<'tcx, ()>
-    where
-        T: ToTrace<'tcx>,
-    {
-        self.sub_exp(false, actual, expected)
-    }
-
-    /// Makes `expected <: actual`.
-    pub fn sub<T>(self, expected: T, actual: T) -> InferResult<'tcx, ()>
-    where
-        T: ToTrace<'tcx>,
-    {
-        self.sub_exp(true, expected, actual)
-    }
-
-    /// Makes `expected <: actual`.
-    pub fn eq_exp<T>(self, a_is_expected: bool, a: T, b: T) -> InferResult<'tcx, ()>
-    where
-        T: ToTrace<'tcx>,
-    {
-        self.trace_exp(a_is_expected, a, b).eq(&a, &b)
-    }
-
-    /// Makes `expected <: actual`.
-    pub fn eq<T>(self, expected: T, actual: T) -> InferResult<'tcx, ()>
-    where
-        T: ToTrace<'tcx>,
-    {
-        self.trace(expected, actual).eq(&expected, &actual)
-    }
-
-    pub fn relate<T>(self, expected: T, variance: ty::Variance, actual: T) -> InferResult<'tcx, ()>
-    where
-        T: ToTrace<'tcx>,
-    {
-        match variance {
-            ty::Variance::Covariant => self.sub(expected, actual),
-            ty::Variance::Invariant => self.eq(expected, actual),
-            ty::Variance::Contravariant => self.sup(expected, actual),
-
-            // We could make this make sense but it's not readily
-            // exposed and I don't feel like dealing with it. Note
-            // that bivariance in general does a bit more than just
-            // *nothing*, it checks that the types are the same
-            // "modulo variance" basically.
-            ty::Variance::Bivariant => panic!("Bivariant given to `relate()`"),
-        }
-    }
-
-    /// Computes the least-upper-bound, or mutual supertype, of two
-    /// values. The order of the arguments doesn't matter, but since
-    /// this can result in an error (e.g., if asked to compute LUB of
-    /// u32 and i32), it is meaningful to call one of them the
-    /// "expected type".
-    pub fn lub<T>(self, expected: T, actual: T) -> InferResult<'tcx, T>
-    where
-        T: ToTrace<'tcx>,
-    {
-        self.trace(expected, actual).lub(&expected, &actual)
-    }
-
-    /// Computes the greatest-lower-bound, or mutual subtype, of two
-    /// values. As with `lub` order doesn't matter, except for error
-    /// cases.
-    pub fn glb<T>(self, expected: T, actual: T) -> InferResult<'tcx, T>
-    where
-        T: ToTrace<'tcx>,
-    {
-        self.trace(expected, actual).glb(&expected, &actual)
-    }
-
-    /// Sets the "trace" values that will be used for
-    /// error-reporting, but doesn't actually perform any operation
-    /// yet (this is useful when you want to set the trace using
-    /// distinct values from those you wish to operate upon).
-    pub fn trace<T>(self, expected: T, actual: T) -> Trace<'a, 'tcx>
-    where
-        T: ToTrace<'tcx>,
-    {
-        self.trace_exp(true, expected, actual)
-    }
-
-    /// Like `trace`, but the expected value is determined by the
-    /// boolean argument (if true, then the first argument `a` is the
-    /// "expected" value).
-    pub fn trace_exp<T>(self, a_is_expected: bool, a: T, b: T) -> Trace<'a, 'tcx>
-    where
-        T: ToTrace<'tcx>,
-    {
-        let trace = ToTrace::to_trace(self.cause, a_is_expected, a, b);
-        Trace { at: self, trace: trace, a_is_expected }
-    }
-}
-
-impl<'a, 'tcx> Trace<'a, 'tcx> {
-    /// Makes `a <: b` where `a` may or may not be expected (if
-    /// `a_is_expected` is true, then `a` is expected).
-    /// Makes `expected <: actual`.
-    pub fn sub<T>(self, a: &T, b: &T) -> InferResult<'tcx, ()>
-    where
-        T: Relate<'tcx>,
-    {
-        debug!("sub({:?} <: {:?})", a, b);
-        let Trace { at, trace, a_is_expected } = self;
-        at.infcx.commit_if_ok(|_| {
-            let mut fields = at.infcx.combine_fields(trace, at.param_env);
-            fields
-                .sub(a_is_expected)
-                .relate(a, b)
-                .map(move |_| InferOk { value: (), obligations: fields.obligations })
-        })
-    }
-
-    /// Makes `a == b`; the expectation is set by the call to
-    /// `trace()`.
-    pub fn eq<T>(self, a: &T, b: &T) -> InferResult<'tcx, ()>
-    where
-        T: Relate<'tcx>,
-    {
-        debug!("eq({:?} == {:?})", a, b);
-        let Trace { at, trace, a_is_expected } = self;
-        at.infcx.commit_if_ok(|_| {
-            let mut fields = at.infcx.combine_fields(trace, at.param_env);
-            fields
-                .equate(a_is_expected)
-                .relate(a, b)
-                .map(move |_| InferOk { value: (), obligations: fields.obligations })
-        })
-    }
-
-    pub fn lub<T>(self, a: &T, b: &T) -> InferResult<'tcx, T>
-    where
-        T: Relate<'tcx>,
-    {
-        debug!("lub({:?} \\/ {:?})", a, b);
-        let Trace { at, trace, a_is_expected } = self;
-        at.infcx.commit_if_ok(|_| {
-            let mut fields = at.infcx.combine_fields(trace, at.param_env);
-            fields
-                .lub(a_is_expected)
-                .relate(a, b)
-                .map(move |t| InferOk { value: t, obligations: fields.obligations })
-        })
-    }
-
-    pub fn glb<T>(self, a: &T, b: &T) -> InferResult<'tcx, T>
-    where
-        T: Relate<'tcx>,
-    {
-        debug!("glb({:?} /\\ {:?})", a, b);
-        let Trace { at, trace, a_is_expected } = self;
-        at.infcx.commit_if_ok(|_| {
-            let mut fields = at.infcx.combine_fields(trace, at.param_env);
-            fields
-                .glb(a_is_expected)
-                .relate(a, b)
-                .map(move |t| InferOk { value: t, obligations: fields.obligations })
-        })
-    }
-}
-
-impl<'tcx> ToTrace<'tcx> for Ty<'tcx> {
-    fn to_trace(
-        cause: &ObligationCause<'tcx>,
-        a_is_expected: bool,
-        a: Self,
-        b: Self,
-    ) -> TypeTrace<'tcx> {
-        TypeTrace { cause: cause.clone(), values: Types(ExpectedFound::new(a_is_expected, a, b)) }
-    }
-}
-
-impl<'tcx> ToTrace<'tcx> for ty::Region<'tcx> {
-    fn to_trace(
-        cause: &ObligationCause<'tcx>,
-        a_is_expected: bool,
-        a: Self,
-        b: Self,
-    ) -> TypeTrace<'tcx> {
-        TypeTrace { cause: cause.clone(), values: Regions(ExpectedFound::new(a_is_expected, a, b)) }
-    }
-}
-
-impl<'tcx> ToTrace<'tcx> for &'tcx Const<'tcx> {
-    fn to_trace(
-        cause: &ObligationCause<'tcx>,
-        a_is_expected: bool,
-        a: Self,
-        b: Self,
-    ) -> TypeTrace<'tcx> {
-        TypeTrace { cause: cause.clone(), values: Consts(ExpectedFound::new(a_is_expected, a, b)) }
-    }
-}
-
-impl<'tcx> ToTrace<'tcx> for ty::TraitRef<'tcx> {
-    fn to_trace(
-        cause: &ObligationCause<'tcx>,
-        a_is_expected: bool,
-        a: Self,
-        b: Self,
-    ) -> TypeTrace<'tcx> {
-        TypeTrace {
-            cause: cause.clone(),
-            values: TraitRefs(ExpectedFound::new(a_is_expected, a, b)),
-        }
-    }
-}
-
-impl<'tcx> ToTrace<'tcx> for ty::PolyTraitRef<'tcx> {
-    fn to_trace(
-        cause: &ObligationCause<'tcx>,
-        a_is_expected: bool,
-        a: Self,
-        b: Self,
-    ) -> TypeTrace<'tcx> {
-        TypeTrace {
-            cause: cause.clone(),
-            values: PolyTraitRefs(ExpectedFound::new(a_is_expected, a, b)),
-        }
-    }
-}
diff --git a/src/librustc/infer/canonical.rs b/src/librustc/infer/canonical.rs
new file mode 100644
index 0000000..76d0d57
--- /dev/null
+++ b/src/librustc/infer/canonical.rs
@@ -0,0 +1,357 @@
+//! **Canonicalization** is the key to constructing a query in the
+//! middle of type inference. Ordinarily, it is not possible to store
+//! types from type inference in query keys, because they contain
+//! references to inference variables whose lifetimes are too short
+//! and so forth. Canonicalizing a value T1 using `canonicalize_query`
+//! produces two things:
+//!
+//! - a value T2 where each unbound inference variable has been
+//!   replaced with a **canonical variable**;
+//! - a map M (of type `CanonicalVarValues`) from those canonical
+//!   variables back to the original.
+//!
+//! We can then do queries using T2. These will give back constraints
+//! on the canonical variables which can be translated, using the map
+//! M, into constraints in our source context. This process of
+//! translating the results back is done by the
+//! `instantiate_query_result` method.
+//!
+//! For a more detailed look at what is happening here, check
+//! out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang.github.io/rustc-guide/traits/canonicalization.html
+
+use crate::infer::MemberConstraint;
+use crate::ty::subst::GenericArg;
+use crate::ty::{self, BoundVar, List, Region, TyCtxt};
+use rustc_index::vec::IndexVec;
+use rustc_macros::HashStable;
+use rustc_serialize::UseSpecializedDecodable;
+use smallvec::SmallVec;
+use std::ops::Index;
+
+/// A "canonicalized" type `V` is one where all free inference
+/// variables have been rewritten to "canonical vars". These are
+/// numbered starting from 0 in order of first appearance.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+#[derive(HashStable, TypeFoldable, Lift)]
+pub struct Canonical<'tcx, V> {
+    pub max_universe: ty::UniverseIndex,
+    pub variables: CanonicalVarInfos<'tcx>,
+    pub value: V,
+}
+
+pub type CanonicalVarInfos<'tcx> = &'tcx List<CanonicalVarInfo>;
+
+impl<'tcx> UseSpecializedDecodable for CanonicalVarInfos<'tcx> {}
+
+/// A set of values corresponding to the canonical variables from some
+/// `Canonical`. You can give these values to
+/// `canonical_value.substitute` to substitute them into the canonical
+/// value at the right places.
+///
+/// When you canonicalize a value `V`, you get back one of these
+/// vectors with the original values that were replaced by canonical
+/// variables. You will need to supply it later to instantiate the
+/// canonicalized query response.
+#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+#[derive(HashStable, TypeFoldable, Lift)]
+pub struct CanonicalVarValues<'tcx> {
+    pub var_values: IndexVec<BoundVar, GenericArg<'tcx>>,
+}
+
+/// When we canonicalize a value to form a query, we wind up replacing
+/// various parts of it with canonical variables. This struct stores
+/// those replaced bits to remember for when we process the query
+/// result.
+#[derive(Clone, Debug)]
+pub struct OriginalQueryValues<'tcx> {
+    /// Map from the universes that appear in the query to the
+    /// universes in the caller context. For the time being, we only
+    /// ever put ROOT values into the query, so this map is very
+    /// simple.
+    pub universe_map: SmallVec<[ty::UniverseIndex; 4]>,
+
+    /// This is equivalent to `CanonicalVarValues`, but using a
+    /// `SmallVec` yields a significant performance win.
+    pub var_values: SmallVec<[GenericArg<'tcx>; 8]>,
+}
+
+impl Default for OriginalQueryValues<'tcx> {
+    fn default() -> Self {
+        let mut universe_map = SmallVec::default();
+        universe_map.push(ty::UniverseIndex::ROOT);
+
+        Self { universe_map, var_values: SmallVec::default() }
+    }
+}
+
+/// Information about a canonical variable that is included with the
+/// canonical value. This is sufficient information for code to create
+/// a copy of the canonical value in some other inference context,
+/// with fresh inference variables replacing the canonical values.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable, HashStable)]
+pub struct CanonicalVarInfo {
+    pub kind: CanonicalVarKind,
+}
+
+impl CanonicalVarInfo {
+    pub fn universe(&self) -> ty::UniverseIndex {
+        self.kind.universe()
+    }
+
+    pub fn is_existential(&self) -> bool {
+        match self.kind {
+            CanonicalVarKind::Ty(_) => true,
+            CanonicalVarKind::PlaceholderTy(_) => false,
+            CanonicalVarKind::Region(_) => true,
+            CanonicalVarKind::PlaceholderRegion(..) => false,
+            CanonicalVarKind::Const(_) => true,
+            CanonicalVarKind::PlaceholderConst(_) => false,
+        }
+    }
+}
+
+/// Describes the "kind" of the canonical variable. This is a "kind"
+/// in the type-theory sense of the term -- i.e., a "meta" type system
+/// that analyzes type-like values.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable, HashStable)]
+pub enum CanonicalVarKind {
+    /// Some kind of type inference variable.
+    Ty(CanonicalTyVarKind),
+
+    /// A "placeholder" that represents "any type".
+    PlaceholderTy(ty::PlaceholderType),
+
+    /// Region variable `'?R`.
+    Region(ty::UniverseIndex),
+
+    /// A "placeholder" that represents "any region". Created when you
+    /// are solving a goal like `for<'a> T: Foo<'a>` to represent the
+    /// bound region `'a`.
+    PlaceholderRegion(ty::PlaceholderRegion),
+
+    /// Some kind of const inference variable.
+    Const(ty::UniverseIndex),
+
+    /// A "placeholder" that represents "any const".
+    PlaceholderConst(ty::PlaceholderConst),
+}
+
+impl CanonicalVarKind {
+    pub fn universe(self) -> ty::UniverseIndex {
+        match self {
+            CanonicalVarKind::Ty(kind) => match kind {
+                CanonicalTyVarKind::General(ui) => ui,
+                CanonicalTyVarKind::Float | CanonicalTyVarKind::Int => ty::UniverseIndex::ROOT,
+            },
+
+            CanonicalVarKind::PlaceholderTy(placeholder) => placeholder.universe,
+            CanonicalVarKind::Region(ui) => ui,
+            CanonicalVarKind::PlaceholderRegion(placeholder) => placeholder.universe,
+            CanonicalVarKind::Const(ui) => ui,
+            CanonicalVarKind::PlaceholderConst(placeholder) => placeholder.universe,
+        }
+    }
+}
+
+/// Rust actually has more than one category of type variables;
+/// notably, the type variables we create for literals (e.g., 22 or
+/// 22.) can only be instantiated with integral/float types (e.g.,
+/// usize or f32). In order to faithfully reproduce a type, we need to
+/// know what set of types a given type variable can be unified with.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable, HashStable)]
+pub enum CanonicalTyVarKind {
+    /// General type variable `?T` that can be unified with arbitrary types.
+    General(ty::UniverseIndex),
+
+    /// Integral type variable `?I` (that can only be unified with integral types).
+    Int,
+
+    /// Floating-point type variable `?F` (that can only be unified with float types).
+    Float,
+}
+
+/// After we execute a query with a canonicalized key, we get back a
+/// `Canonical<QueryResponse<..>>`. You can use
+/// `instantiate_query_result` to access the data in this result.
+#[derive(Clone, Debug, HashStable, TypeFoldable, Lift)]
+pub struct QueryResponse<'tcx, R> {
+    pub var_values: CanonicalVarValues<'tcx>,
+    pub region_constraints: QueryRegionConstraints<'tcx>,
+    pub certainty: Certainty,
+    pub value: R,
+}
+
+#[derive(Clone, Debug, Default, HashStable, TypeFoldable, Lift)]
+pub struct QueryRegionConstraints<'tcx> {
+    pub outlives: Vec<QueryOutlivesConstraint<'tcx>>,
+    pub member_constraints: Vec<MemberConstraint<'tcx>>,
+}
+
+impl QueryRegionConstraints<'_> {
+    /// Represents an empty (trivially true) set of region
+    /// constraints.
+    pub fn is_empty(&self) -> bool {
+        self.outlives.is_empty() && self.member_constraints.is_empty()
+    }
+}
+
+pub type Canonicalized<'tcx, V> = Canonical<'tcx, V>;
+
+pub type CanonicalizedQueryResponse<'tcx, T> = &'tcx Canonical<'tcx, QueryResponse<'tcx, T>>;
+
+/// Indicates whether or not we were able to prove the query to be
+/// true.
+#[derive(Copy, Clone, Debug, HashStable)]
+pub enum Certainty {
+    /// The query is known to be true, presuming that you apply the
+    /// given `var_values` and the region-constraints are satisfied.
+    Proven,
+
+    /// The query is not known to be true, but also not known to be
+    /// false. The `var_values` represent *either* values that must
+    /// hold in order for the query to be true, or helpful tips that
+    /// *might* make it true. Currently rustc's trait solver cannot
+    /// distinguish the two (e.g., due to our preference for where
+    /// clauses over impls).
+    ///
+    /// After some unifiations and things have been done, it makes
+    /// sense to try and prove again -- of course, at that point, the
+    /// canonical form will be different, making this a distinct
+    /// query.
+    Ambiguous,
+}
+
+impl Certainty {
+    pub fn is_proven(&self) -> bool {
+        match self {
+            Certainty::Proven => true,
+            Certainty::Ambiguous => false,
+        }
+    }
+
+    pub fn is_ambiguous(&self) -> bool {
+        !self.is_proven()
+    }
+}
+
+impl<'tcx, R> QueryResponse<'tcx, R> {
+    pub fn is_proven(&self) -> bool {
+        self.certainty.is_proven()
+    }
+
+    pub fn is_ambiguous(&self) -> bool {
+        !self.is_proven()
+    }
+}
+
+impl<'tcx, R> Canonical<'tcx, QueryResponse<'tcx, R>> {
+    pub fn is_proven(&self) -> bool {
+        self.value.is_proven()
+    }
+
+    pub fn is_ambiguous(&self) -> bool {
+        !self.is_proven()
+    }
+}
+
+impl<'tcx, V> Canonical<'tcx, V> {
+    /// Allows you to map the `value` of a canonical while keeping the
+    /// same set of bound variables.
+    ///
+    /// **WARNING:** This function is very easy to mis-use, hence the
+    /// name!  In particular, the new value `W` must use all **the
+    /// same type/region variables** in **precisely the same order**
+    /// as the original! (The ordering is defined by the
+    /// `TypeFoldable` implementation of the type in question.)
+    ///
+    /// An example of a **correct** use of this:
+    ///
+    /// ```rust,ignore (not real code)
+    /// let a: Canonical<'_, T> = ...;
+    /// let b: Canonical<'_, (T,)> = a.unchecked_map(|v| (v, ));
+    /// ```
+    ///
+    /// An example of an **incorrect** use of this:
+    ///
+    /// ```rust,ignore (not real code)
+    /// let a: Canonical<'tcx, T> = ...;
+    /// let ty: Ty<'tcx> = ...;
+    /// let b: Canonical<'tcx, (T, Ty<'tcx>)> = a.unchecked_map(|v| (v, ty));
+    /// ```
+    pub fn unchecked_map<W>(self, map_op: impl FnOnce(V) -> W) -> Canonical<'tcx, W> {
+        let Canonical { max_universe, variables, value } = self;
+        Canonical { max_universe, variables, value: map_op(value) }
+    }
+}
+
+pub type QueryOutlivesConstraint<'tcx> =
+    ty::Binder<ty::OutlivesPredicate<GenericArg<'tcx>, Region<'tcx>>>;
+
+CloneTypeFoldableAndLiftImpls! {
+    crate::infer::canonical::Certainty,
+    crate::infer::canonical::CanonicalVarInfo,
+    crate::infer::canonical::CanonicalVarKind,
+}
+
+CloneTypeFoldableImpls! {
+    for <'tcx> {
+        crate::infer::canonical::CanonicalVarInfos<'tcx>,
+    }
+}
+
+impl<'tcx> CanonicalVarValues<'tcx> {
+    pub fn len(&self) -> usize {
+        self.var_values.len()
+    }
+
+    /// Makes an identity substitution from this one: each bound var
+    /// is matched to the same bound var, preserving the original kinds.
+    /// For example, if we have:
+    /// `self.var_values == [Type(u32), Lifetime('a), Type(u64)]`
+    /// we'll return a substitution `subst` with:
+    /// `subst.var_values == [Type(^0), Lifetime(^1), Type(^2)]`.
+    pub fn make_identity(&self, tcx: TyCtxt<'tcx>) -> Self {
+        use crate::ty::subst::GenericArgKind;
+
+        CanonicalVarValues {
+            var_values: self
+                .var_values
+                .iter()
+                .zip(0..)
+                .map(|(kind, i)| match kind.unpack() {
+                    GenericArgKind::Type(..) => {
+                        tcx.mk_ty(ty::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i).into())).into()
+                    }
+                    GenericArgKind::Lifetime(..) => tcx
+                        .mk_region(ty::ReLateBound(ty::INNERMOST, ty::BoundRegion::BrAnon(i)))
+                        .into(),
+                    GenericArgKind::Const(ct) => tcx
+                        .mk_const(ty::Const {
+                            ty: ct.ty,
+                            val: ty::ConstKind::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i)),
+                        })
+                        .into(),
+                })
+                .collect(),
+        }
+    }
+}
+
+impl<'a, 'tcx> IntoIterator for &'a CanonicalVarValues<'tcx> {
+    type Item = GenericArg<'tcx>;
+    type IntoIter = ::std::iter::Cloned<::std::slice::Iter<'a, GenericArg<'tcx>>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.var_values.iter().cloned()
+    }
+}
+
+impl<'tcx> Index<BoundVar> for CanonicalVarValues<'tcx> {
+    type Output = GenericArg<'tcx>;
+
+    fn index(&self, value: BoundVar) -> &GenericArg<'tcx> {
+        &self.var_values[value]
+    }
+}
diff --git a/src/librustc/infer/canonical/canonicalizer.rs b/src/librustc/infer/canonical/canonicalizer.rs
deleted file mode 100644
index b720168..0000000
--- a/src/librustc/infer/canonical/canonicalizer.rs
+++ /dev/null
@@ -1,664 +0,0 @@
-//! This module contains the "canonicalizer" itself.
-//!
-//! For an overview of what canonicalization is and how it fits into
-//! rustc, check out the [chapter in the rustc guide][c].
-//!
-//! [c]: https://rust-lang.github.io/rustc-guide/traits/canonicalization.html
-
-use crate::infer::canonical::{
-    Canonical, CanonicalTyVarKind, CanonicalVarInfo, CanonicalVarKind, Canonicalized,
-    OriginalQueryValues,
-};
-use crate::infer::InferCtxt;
-use crate::ty::flags::FlagComputation;
-use crate::ty::fold::{TypeFoldable, TypeFolder};
-use crate::ty::subst::GenericArg;
-use crate::ty::{self, BoundVar, InferConst, List, Ty, TyCtxt, TypeFlags};
-use std::sync::atomic::Ordering;
-
-use rustc_data_structures::fx::FxHashMap;
-use rustc_index::vec::Idx;
-use smallvec::SmallVec;
-
-impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
-    /// Canonicalizes a query value `V`. When we canonicalize a query,
-    /// we not only canonicalize unbound inference variables, but we
-    /// *also* replace all free regions whatsoever. So for example a
-    /// query like `T: Trait<'static>` would be canonicalized to
-    ///
-    /// ```text
-    /// T: Trait<'?0>
-    /// ```
-    ///
-    /// with a mapping M that maps `'?0` to `'static`.
-    ///
-    /// To get a good understanding of what is happening here, check
-    /// out the [chapter in the rustc guide][c].
-    ///
-    /// [c]: https://rust-lang.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query
-    pub fn canonicalize_query<V>(
-        &self,
-        value: &V,
-        query_state: &mut OriginalQueryValues<'tcx>,
-    ) -> Canonicalized<'tcx, V>
-    where
-        V: TypeFoldable<'tcx>,
-    {
-        self.tcx.sess.perf_stats.queries_canonicalized.fetch_add(1, Ordering::Relaxed);
-
-        Canonicalizer::canonicalize(
-            value,
-            Some(self),
-            self.tcx,
-            &CanonicalizeAllFreeRegions,
-            query_state,
-        )
-    }
-
-    /// Canonicalizes a query *response* `V`. When we canonicalize a
-    /// query response, we only canonicalize unbound inference
-    /// variables, and we leave other free regions alone. So,
-    /// continuing with the example from `canonicalize_query`, if
-    /// there was an input query `T: Trait<'static>`, it would have
-    /// been canonicalized to
-    ///
-    /// ```text
-    /// T: Trait<'?0>
-    /// ```
-    ///
-    /// with a mapping M that maps `'?0` to `'static`. But if we found that there
-    /// exists only one possible impl of `Trait`, and it looks like
-    ///
-    ///     impl<T> Trait<'static> for T { .. }
-    ///
-    /// then we would prepare a query result R that (among other
-    /// things) includes a mapping to `'?0 := 'static`. When
-    /// canonicalizing this query result R, we would leave this
-    /// reference to `'static` alone.
-    ///
-    /// To get a good understanding of what is happening here, check
-    /// out the [chapter in the rustc guide][c].
-    ///
-    /// [c]: https://rust-lang.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query-result
-    pub fn canonicalize_response<V>(&self, value: &V) -> Canonicalized<'tcx, V>
-    where
-        V: TypeFoldable<'tcx>,
-    {
-        let mut query_state = OriginalQueryValues::default();
-        Canonicalizer::canonicalize(
-            value,
-            Some(self),
-            self.tcx,
-            &CanonicalizeQueryResponse,
-            &mut query_state,
-        )
-    }
-
-    pub fn canonicalize_user_type_annotation<V>(&self, value: &V) -> Canonicalized<'tcx, V>
-    where
-        V: TypeFoldable<'tcx>,
-    {
-        let mut query_state = OriginalQueryValues::default();
-        Canonicalizer::canonicalize(
-            value,
-            Some(self),
-            self.tcx,
-            &CanonicalizeUserTypeAnnotation,
-            &mut query_state,
-        )
-    }
-
-    /// A hacky variant of `canonicalize_query` that does not
-    /// canonicalize `'static`. Unfortunately, the existing leak
-    /// check treats `'static` differently in some cases (see also
-    /// #33684), so if we are performing an operation that may need to
-    /// prove "leak-check" related things, we leave `'static`
-    /// alone.
-    ///
-    /// `'static` is also special cased when winnowing candidates when
-    /// selecting implementation candidates, so we also have to leave `'static`
-    /// alone for queries that do selection.
-    //
-    // FIXME(#48536): once the above issues are resolved, we can remove this
-    // and just use `canonicalize_query`.
-    pub fn canonicalize_hr_query_hack<V>(
-        &self,
-        value: &V,
-        query_state: &mut OriginalQueryValues<'tcx>,
-    ) -> Canonicalized<'tcx, V>
-    where
-        V: TypeFoldable<'tcx>,
-    {
-        self.tcx.sess.perf_stats.queries_canonicalized.fetch_add(1, Ordering::Relaxed);
-
-        Canonicalizer::canonicalize(
-            value,
-            Some(self),
-            self.tcx,
-            &CanonicalizeFreeRegionsOtherThanStatic,
-            query_state,
-        )
-    }
-}
-
-/// Controls how we canonicalize "free regions" that are not inference
-/// variables. This depends on what we are canonicalizing *for* --
-/// e.g., if we are canonicalizing to create a query, we want to
-/// replace those with inference variables, since we want to make a
-/// maximally general query. But if we are canonicalizing a *query
-/// response*, then we don't typically replace free regions, as they
-/// must have been introduced from other parts of the system.
-trait CanonicalizeRegionMode {
-    fn canonicalize_free_region(
-        &self,
-        canonicalizer: &mut Canonicalizer<'_, 'tcx>,
-        r: ty::Region<'tcx>,
-    ) -> ty::Region<'tcx>;
-
-    fn any(&self) -> bool;
-}
-
-struct CanonicalizeQueryResponse;
-
-impl CanonicalizeRegionMode for CanonicalizeQueryResponse {
-    fn canonicalize_free_region(
-        &self,
-        canonicalizer: &mut Canonicalizer<'_, 'tcx>,
-        r: ty::Region<'tcx>,
-    ) -> ty::Region<'tcx> {
-        match r {
-            ty::ReFree(_) | ty::ReEmpty | ty::ReErased | ty::ReStatic | ty::ReEarlyBound(..) => r,
-            ty::RePlaceholder(placeholder) => canonicalizer.canonical_var_for_region(
-                CanonicalVarInfo { kind: CanonicalVarKind::PlaceholderRegion(*placeholder) },
-                r,
-            ),
-            ty::ReVar(vid) => {
-                let universe = canonicalizer.region_var_universe(*vid);
-                canonicalizer.canonical_var_for_region(
-                    CanonicalVarInfo { kind: CanonicalVarKind::Region(universe) },
-                    r,
-                )
-            }
-            _ => {
-                // Other than `'static` or `'empty`, the query
-                // response should be executing in a fully
-                // canonicalized environment, so there shouldn't be
-                // any other region names it can come up.
-                //
-                // rust-lang/rust#57464: `impl Trait` can leak local
-                // scopes (in manner violating typeck). Therefore, use
-                // `delay_span_bug` to allow type error over an ICE.
-                ty::tls::with_context(|c| {
-                    c.tcx.sess.delay_span_bug(
-                        rustc_span::DUMMY_SP,
-                        &format!("unexpected region in query response: `{:?}`", r),
-                    );
-                });
-                r
-            }
-        }
-    }
-
-    fn any(&self) -> bool {
-        false
-    }
-}
-
-struct CanonicalizeUserTypeAnnotation;
-
-impl CanonicalizeRegionMode for CanonicalizeUserTypeAnnotation {
-    fn canonicalize_free_region(
-        &self,
-        canonicalizer: &mut Canonicalizer<'_, 'tcx>,
-        r: ty::Region<'tcx>,
-    ) -> ty::Region<'tcx> {
-        match r {
-            ty::ReEarlyBound(_) | ty::ReFree(_) | ty::ReErased | ty::ReEmpty | ty::ReStatic => r,
-            ty::ReVar(_) => canonicalizer.canonical_var_for_region_in_root_universe(r),
-            _ => {
-                // We only expect region names that the user can type.
-                bug!("unexpected region in query response: `{:?}`", r)
-            }
-        }
-    }
-
-    fn any(&self) -> bool {
-        false
-    }
-}
-
-struct CanonicalizeAllFreeRegions;
-
-impl CanonicalizeRegionMode for CanonicalizeAllFreeRegions {
-    fn canonicalize_free_region(
-        &self,
-        canonicalizer: &mut Canonicalizer<'_, 'tcx>,
-        r: ty::Region<'tcx>,
-    ) -> ty::Region<'tcx> {
-        canonicalizer.canonical_var_for_region_in_root_universe(r)
-    }
-
-    fn any(&self) -> bool {
-        true
-    }
-}
-
-struct CanonicalizeFreeRegionsOtherThanStatic;
-
-impl CanonicalizeRegionMode for CanonicalizeFreeRegionsOtherThanStatic {
-    fn canonicalize_free_region(
-        &self,
-        canonicalizer: &mut Canonicalizer<'_, 'tcx>,
-        r: ty::Region<'tcx>,
-    ) -> ty::Region<'tcx> {
-        if let ty::ReStatic = r {
-            r
-        } else {
-            canonicalizer.canonical_var_for_region_in_root_universe(r)
-        }
-    }
-
-    fn any(&self) -> bool {
-        true
-    }
-}
-
-struct Canonicalizer<'cx, 'tcx> {
-    infcx: Option<&'cx InferCtxt<'cx, 'tcx>>,
-    tcx: TyCtxt<'tcx>,
-    variables: SmallVec<[CanonicalVarInfo; 8]>,
-    query_state: &'cx mut OriginalQueryValues<'tcx>,
-    // Note that indices is only used once `var_values` is big enough to be
-    // heap-allocated.
-    indices: FxHashMap<GenericArg<'tcx>, BoundVar>,
-    canonicalize_region_mode: &'cx dyn CanonicalizeRegionMode,
-    needs_canonical_flags: TypeFlags,
-
-    binder_index: ty::DebruijnIndex,
-}
-
-impl<'cx, 'tcx> TypeFolder<'tcx> for Canonicalizer<'cx, 'tcx> {
-    fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
-        self.tcx
-    }
-
-    fn fold_binder<T>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T>
-    where
-        T: TypeFoldable<'tcx>,
-    {
-        self.binder_index.shift_in(1);
-        let t = t.super_fold_with(self);
-        self.binder_index.shift_out(1);
-        t
-    }
-
-    fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
-        match *r {
-            ty::ReLateBound(index, ..) => {
-                if index >= self.binder_index {
-                    bug!("escaping late-bound region during canonicalization");
-                } else {
-                    r
-                }
-            }
-
-            ty::ReVar(vid) => {
-                let r = self
-                    .infcx
-                    .unwrap()
-                    .borrow_region_constraints()
-                    .opportunistic_resolve_var(self.tcx, vid);
-                debug!(
-                    "canonical: region var found with vid {:?}, \
-                     opportunistically resolved to {:?}",
-                    vid, r
-                );
-                self.canonicalize_region_mode.canonicalize_free_region(self, r)
-            }
-
-            ty::ReStatic
-            | ty::ReEarlyBound(..)
-            | ty::ReFree(_)
-            | ty::ReScope(_)
-            | ty::RePlaceholder(..)
-            | ty::ReEmpty
-            | ty::ReErased => self.canonicalize_region_mode.canonicalize_free_region(self, r),
-
-            ty::ReClosureBound(..) => {
-                bug!("closure bound region encountered during canonicalization");
-            }
-        }
-    }
-
-    fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
-        match t.kind {
-            ty::Infer(ty::TyVar(vid)) => {
-                debug!("canonical: type var found with vid {:?}", vid);
-                match self.infcx.unwrap().probe_ty_var(vid) {
-                    // `t` could be a float / int variable; canonicalize that instead.
-                    Ok(t) => {
-                        debug!("(resolved to {:?})", t);
-                        self.fold_ty(t)
-                    }
-
-                    // `TyVar(vid)` is unresolved, track its universe index in the canonicalized
-                    // result.
-                    Err(mut ui) => {
-                        if !self.infcx.unwrap().tcx.sess.opts.debugging_opts.chalk {
-                            // FIXME: perf problem described in #55921.
-                            ui = ty::UniverseIndex::ROOT;
-                        }
-                        self.canonicalize_ty_var(
-                            CanonicalVarInfo {
-                                kind: CanonicalVarKind::Ty(CanonicalTyVarKind::General(ui)),
-                            },
-                            t,
-                        )
-                    }
-                }
-            }
-
-            ty::Infer(ty::IntVar(_)) => self.canonicalize_ty_var(
-                CanonicalVarInfo { kind: CanonicalVarKind::Ty(CanonicalTyVarKind::Int) },
-                t,
-            ),
-
-            ty::Infer(ty::FloatVar(_)) => self.canonicalize_ty_var(
-                CanonicalVarInfo { kind: CanonicalVarKind::Ty(CanonicalTyVarKind::Float) },
-                t,
-            ),
-
-            ty::Infer(ty::FreshTy(_))
-            | ty::Infer(ty::FreshIntTy(_))
-            | ty::Infer(ty::FreshFloatTy(_)) => {
-                bug!("encountered a fresh type during canonicalization")
-            }
-
-            ty::Placeholder(placeholder) => self.canonicalize_ty_var(
-                CanonicalVarInfo { kind: CanonicalVarKind::PlaceholderTy(placeholder) },
-                t,
-            ),
-
-            ty::Bound(debruijn, _) => {
-                if debruijn >= self.binder_index {
-                    bug!("escaping bound type during canonicalization")
-                } else {
-                    t
-                }
-            }
-
-            ty::Closure(..)
-            | ty::Generator(..)
-            | ty::GeneratorWitness(..)
-            | ty::Bool
-            | ty::Char
-            | ty::Int(..)
-            | ty::Uint(..)
-            | ty::Float(..)
-            | ty::Adt(..)
-            | ty::Str
-            | ty::Error
-            | ty::Array(..)
-            | ty::Slice(..)
-            | ty::RawPtr(..)
-            | ty::Ref(..)
-            | ty::FnDef(..)
-            | ty::FnPtr(_)
-            | ty::Dynamic(..)
-            | ty::Never
-            | ty::Tuple(..)
-            | ty::Projection(..)
-            | ty::UnnormalizedProjection(..)
-            | ty::Foreign(..)
-            | ty::Param(..)
-            | ty::Opaque(..) => {
-                if t.flags.intersects(self.needs_canonical_flags) {
-                    t.super_fold_with(self)
-                } else {
-                    t
-                }
-            }
-        }
-    }
-
-    fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
-        match ct.val {
-            ty::ConstKind::Infer(InferConst::Var(vid)) => {
-                debug!("canonical: const var found with vid {:?}", vid);
-                match self.infcx.unwrap().probe_const_var(vid) {
-                    Ok(c) => {
-                        debug!("(resolved to {:?})", c);
-                        return self.fold_const(c);
-                    }
-
-                    // `ConstVar(vid)` is unresolved, track its universe index in the
-                    // canonicalized result
-                    Err(mut ui) => {
-                        if !self.infcx.unwrap().tcx.sess.opts.debugging_opts.chalk {
-                            // FIXME: perf problem described in #55921.
-                            ui = ty::UniverseIndex::ROOT;
-                        }
-                        return self.canonicalize_const_var(
-                            CanonicalVarInfo { kind: CanonicalVarKind::Const(ui) },
-                            ct,
-                        );
-                    }
-                }
-            }
-            ty::ConstKind::Infer(InferConst::Fresh(_)) => {
-                bug!("encountered a fresh const during canonicalization")
-            }
-            ty::ConstKind::Bound(debruijn, _) => {
-                if debruijn >= self.binder_index {
-                    bug!("escaping bound type during canonicalization")
-                } else {
-                    return ct;
-                }
-            }
-            ty::ConstKind::Placeholder(placeholder) => {
-                return self.canonicalize_const_var(
-                    CanonicalVarInfo { kind: CanonicalVarKind::PlaceholderConst(placeholder) },
-                    ct,
-                );
-            }
-            _ => {}
-        }
-
-        let flags = FlagComputation::for_const(ct);
-        if flags.intersects(self.needs_canonical_flags) { ct.super_fold_with(self) } else { ct }
-    }
-}
-
-impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
-    /// The main `canonicalize` method, shared impl of
-    /// `canonicalize_query` and `canonicalize_response`.
-    fn canonicalize<V>(
-        value: &V,
-        infcx: Option<&InferCtxt<'_, 'tcx>>,
-        tcx: TyCtxt<'tcx>,
-        canonicalize_region_mode: &dyn CanonicalizeRegionMode,
-        query_state: &mut OriginalQueryValues<'tcx>,
-    ) -> Canonicalized<'tcx, V>
-    where
-        V: TypeFoldable<'tcx>,
-    {
-        let needs_canonical_flags = if canonicalize_region_mode.any() {
-            TypeFlags::KEEP_IN_LOCAL_TCX |
-            TypeFlags::HAS_FREE_REGIONS | // `HAS_RE_PLACEHOLDER` implies `HAS_FREE_REGIONS`
-            TypeFlags::HAS_TY_PLACEHOLDER |
-            TypeFlags::HAS_CT_PLACEHOLDER
-        } else {
-            TypeFlags::KEEP_IN_LOCAL_TCX
-                | TypeFlags::HAS_RE_PLACEHOLDER
-                | TypeFlags::HAS_TY_PLACEHOLDER
-                | TypeFlags::HAS_CT_PLACEHOLDER
-        };
-
-        // Fast path: nothing that needs to be canonicalized.
-        if !value.has_type_flags(needs_canonical_flags) {
-            let canon_value = Canonical {
-                max_universe: ty::UniverseIndex::ROOT,
-                variables: List::empty(),
-                value: value.clone(),
-            };
-            return canon_value;
-        }
-
-        let mut canonicalizer = Canonicalizer {
-            infcx,
-            tcx,
-            canonicalize_region_mode,
-            needs_canonical_flags,
-            variables: SmallVec::new(),
-            query_state,
-            indices: FxHashMap::default(),
-            binder_index: ty::INNERMOST,
-        };
-        let out_value = value.fold_with(&mut canonicalizer);
-
-        // Once we have canonicalized `out_value`, it should not
-        // contain anything that ties it to this inference context
-        // anymore, so it should live in the global arena.
-        debug_assert!(!out_value.has_type_flags(TypeFlags::KEEP_IN_LOCAL_TCX));
-
-        let canonical_variables = tcx.intern_canonical_var_infos(&canonicalizer.variables);
-
-        let max_universe = canonical_variables
-            .iter()
-            .map(|cvar| cvar.universe())
-            .max()
-            .unwrap_or(ty::UniverseIndex::ROOT);
-
-        Canonical { max_universe, variables: canonical_variables, value: out_value }
-    }
-
-    /// Creates a canonical variable replacing `kind` from the input,
-    /// or returns an existing variable if `kind` has already been
-    /// seen. `kind` is expected to be an unbound variable (or
-    /// potentially a free region).
-    fn canonical_var(&mut self, info: CanonicalVarInfo, kind: GenericArg<'tcx>) -> BoundVar {
-        let Canonicalizer { variables, query_state, indices, .. } = self;
-
-        let var_values = &mut query_state.var_values;
-
-        // This code is hot. `variables` and `var_values` are usually small
-        // (fewer than 8 elements ~95% of the time). They are SmallVec's to
-        // avoid allocations in those cases. We also don't use `indices` to
-        // determine if a kind has been seen before until the limit of 8 has
-        // been exceeded, to also avoid allocations for `indices`.
-        let var = if !var_values.spilled() {
-            // `var_values` is stack-allocated. `indices` isn't used yet. Do a
-            // direct linear search of `var_values`.
-            if let Some(idx) = var_values.iter().position(|&k| k == kind) {
-                // `kind` is already present in `var_values`.
-                BoundVar::new(idx)
-            } else {
-                // `kind` isn't present in `var_values`. Append it. Likewise
-                // for `info` and `variables`.
-                variables.push(info);
-                var_values.push(kind);
-                assert_eq!(variables.len(), var_values.len());
-
-                // If `var_values` has become big enough to be heap-allocated,
-                // fill up `indices` to facilitate subsequent lookups.
-                if var_values.spilled() {
-                    assert!(indices.is_empty());
-                    *indices = var_values
-                        .iter()
-                        .enumerate()
-                        .map(|(i, &kind)| (kind, BoundVar::new(i)))
-                        .collect();
-                }
-                // The cv is the index of the appended element.
-                BoundVar::new(var_values.len() - 1)
-            }
-        } else {
-            // `var_values` is large. Do a hashmap search via `indices`.
-            *indices.entry(kind).or_insert_with(|| {
-                variables.push(info);
-                var_values.push(kind);
-                assert_eq!(variables.len(), var_values.len());
-                BoundVar::new(variables.len() - 1)
-            })
-        };
-
-        var
-    }
-
-    /// Shorthand helper that creates a canonical region variable for
-    /// `r` (always in the root universe). The reason that we always
-    /// put these variables into the root universe is because this
-    /// method is used during **query construction:** in that case, we
-    /// are taking all the regions and just putting them into the most
-    /// generic context we can. This may generate solutions that don't
-    /// fit (e.g., that equate some region variable with a placeholder
-    /// it can't name) on the caller side, but that's ok, the caller
-    /// can figure that out. In the meantime, it maximizes our
-    /// caching.
-    ///
-    /// (This works because unification never fails -- and hence trait
-    /// selection is never affected -- due to a universe mismatch.)
-    fn canonical_var_for_region_in_root_universe(
-        &mut self,
-        r: ty::Region<'tcx>,
-    ) -> ty::Region<'tcx> {
-        self.canonical_var_for_region(
-            CanonicalVarInfo { kind: CanonicalVarKind::Region(ty::UniverseIndex::ROOT) },
-            r,
-        )
-    }
-
-    /// Returns the universe in which `vid` is defined.
-    fn region_var_universe(&self, vid: ty::RegionVid) -> ty::UniverseIndex {
-        self.infcx.unwrap().borrow_region_constraints().var_universe(vid)
-    }
-
-    /// Creates a canonical variable (with the given `info`)
-    /// representing the region `r`; return a region referencing it.
-    fn canonical_var_for_region(
-        &mut self,
-        info: CanonicalVarInfo,
-        r: ty::Region<'tcx>,
-    ) -> ty::Region<'tcx> {
-        let var = self.canonical_var(info, r.into());
-        let region = ty::ReLateBound(self.binder_index, ty::BoundRegion::BrAnon(var.as_u32()));
-        self.tcx().mk_region(region)
-    }
-
-    /// Given a type variable `ty_var` of the given kind, first check
-    /// if `ty_var` is bound to anything; if so, canonicalize
-    /// *that*. Otherwise, create a new canonical variable for
-    /// `ty_var`.
-    fn canonicalize_ty_var(&mut self, info: CanonicalVarInfo, ty_var: Ty<'tcx>) -> Ty<'tcx> {
-        let infcx = self.infcx.expect("encountered ty-var without infcx");
-        let bound_to = infcx.shallow_resolve(ty_var);
-        if bound_to != ty_var {
-            self.fold_ty(bound_to)
-        } else {
-            let var = self.canonical_var(info, ty_var.into());
-            self.tcx().mk_ty(ty::Bound(self.binder_index, var.into()))
-        }
-    }
-
-    /// Given a type variable `const_var` of the given kind, first check
-    /// if `const_var` is bound to anything; if so, canonicalize
-    /// *that*. Otherwise, create a new canonical variable for
-    /// `const_var`.
-    fn canonicalize_const_var(
-        &mut self,
-        info: CanonicalVarInfo,
-        const_var: &'tcx ty::Const<'tcx>,
-    ) -> &'tcx ty::Const<'tcx> {
-        let infcx = self.infcx.expect("encountered const-var without infcx");
-        let bound_to = infcx.shallow_resolve(const_var);
-        if bound_to != const_var {
-            self.fold_const(bound_to)
-        } else {
-            let var = self.canonical_var(info, const_var.into());
-            self.tcx().mk_const(ty::Const {
-                val: ty::ConstKind::Bound(self.binder_index, var.into()),
-                ty: self.fold_ty(const_var.ty),
-            })
-        }
-    }
-}
diff --git a/src/librustc/infer/canonical/mod.rs b/src/librustc/infer/canonical/mod.rs
deleted file mode 100644
index a588d3d..0000000
--- a/src/librustc/infer/canonical/mod.rs
+++ /dev/null
@@ -1,493 +0,0 @@
-//! **Canonicalization** is the key to constructing a query in the
-//! middle of type inference. Ordinarily, it is not possible to store
-//! types from type inference in query keys, because they contain
-//! references to inference variables whose lifetimes are too short
-//! and so forth. Canonicalizing a value T1 using `canonicalize_query`
-//! produces two things:
-//!
-//! - a value T2 where each unbound inference variable has been
-//!   replaced with a **canonical variable**;
-//! - a map M (of type `CanonicalVarValues`) from those canonical
-//!   variables back to the original.
-//!
-//! We can then do queries using T2. These will give back constraints
-//! on the canonical variables which can be translated, using the map
-//! M, into constraints in our source context. This process of
-//! translating the results back is done by the
-//! `instantiate_query_result` method.
-//!
-//! For a more detailed look at what is happening here, check
-//! out the [chapter in the rustc guide][c].
-//!
-//! [c]: https://rust-lang.github.io/rustc-guide/traits/canonicalization.html
-
-use crate::infer::region_constraints::MemberConstraint;
-use crate::infer::{ConstVariableOrigin, ConstVariableOriginKind};
-use crate::infer::{InferCtxt, RegionVariableOrigin, TypeVariableOrigin, TypeVariableOriginKind};
-use crate::ty::fold::TypeFoldable;
-use crate::ty::subst::GenericArg;
-use crate::ty::{self, BoundVar, List, Region, TyCtxt};
-use rustc_index::vec::IndexVec;
-use rustc_macros::HashStable;
-use rustc_serialize::UseSpecializedDecodable;
-use rustc_span::source_map::Span;
-use smallvec::SmallVec;
-use std::ops::Index;
-
-mod canonicalizer;
-
-pub mod query_response;
-
-mod substitute;
-
-/// A "canonicalized" type `V` is one where all free inference
-/// variables have been rewritten to "canonical vars". These are
-/// numbered starting from 0 in order of first appearance.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-#[derive(HashStable, TypeFoldable, Lift)]
-pub struct Canonical<'tcx, V> {
-    pub max_universe: ty::UniverseIndex,
-    pub variables: CanonicalVarInfos<'tcx>,
-    pub value: V,
-}
-
-pub type CanonicalVarInfos<'tcx> = &'tcx List<CanonicalVarInfo>;
-
-impl<'tcx> UseSpecializedDecodable for CanonicalVarInfos<'tcx> {}
-
-/// A set of values corresponding to the canonical variables from some
-/// `Canonical`. You can give these values to
-/// `canonical_value.substitute` to substitute them into the canonical
-/// value at the right places.
-///
-/// When you canonicalize a value `V`, you get back one of these
-/// vectors with the original values that were replaced by canonical
-/// variables. You will need to supply it later to instantiate the
-/// canonicalized query response.
-#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-#[derive(HashStable, TypeFoldable, Lift)]
-pub struct CanonicalVarValues<'tcx> {
-    pub var_values: IndexVec<BoundVar, GenericArg<'tcx>>,
-}
-
-/// When we canonicalize a value to form a query, we wind up replacing
-/// various parts of it with canonical variables. This struct stores
-/// those replaced bits to remember for when we process the query
-/// result.
-#[derive(Clone, Debug)]
-pub struct OriginalQueryValues<'tcx> {
-    /// Map from the universes that appear in the query to the
-    /// universes in the caller context. For the time being, we only
-    /// ever put ROOT values into the query, so this map is very
-    /// simple.
-    pub universe_map: SmallVec<[ty::UniverseIndex; 4]>,
-
-    /// This is equivalent to `CanonicalVarValues`, but using a
-    /// `SmallVec` yields a significant performance win.
-    pub var_values: SmallVec<[GenericArg<'tcx>; 8]>,
-}
-
-impl Default for OriginalQueryValues<'tcx> {
-    fn default() -> Self {
-        let mut universe_map = SmallVec::default();
-        universe_map.push(ty::UniverseIndex::ROOT);
-
-        Self { universe_map, var_values: SmallVec::default() }
-    }
-}
-
-/// Information about a canonical variable that is included with the
-/// canonical value. This is sufficient information for code to create
-/// a copy of the canonical value in some other inference context,
-/// with fresh inference variables replacing the canonical values.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable, HashStable)]
-pub struct CanonicalVarInfo {
-    pub kind: CanonicalVarKind,
-}
-
-impl CanonicalVarInfo {
-    pub fn universe(&self) -> ty::UniverseIndex {
-        self.kind.universe()
-    }
-
-    pub fn is_existential(&self) -> bool {
-        match self.kind {
-            CanonicalVarKind::Ty(_) => true,
-            CanonicalVarKind::PlaceholderTy(_) => false,
-            CanonicalVarKind::Region(_) => true,
-            CanonicalVarKind::PlaceholderRegion(..) => false,
-            CanonicalVarKind::Const(_) => true,
-            CanonicalVarKind::PlaceholderConst(_) => false,
-        }
-    }
-}
-
-/// Describes the "kind" of the canonical variable. This is a "kind"
-/// in the type-theory sense of the term -- i.e., a "meta" type system
-/// that analyzes type-like values.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable, HashStable)]
-pub enum CanonicalVarKind {
-    /// Some kind of type inference variable.
-    Ty(CanonicalTyVarKind),
-
-    /// A "placeholder" that represents "any type".
-    PlaceholderTy(ty::PlaceholderType),
-
-    /// Region variable `'?R`.
-    Region(ty::UniverseIndex),
-
-    /// A "placeholder" that represents "any region". Created when you
-    /// are solving a goal like `for<'a> T: Foo<'a>` to represent the
-    /// bound region `'a`.
-    PlaceholderRegion(ty::PlaceholderRegion),
-
-    /// Some kind of const inference variable.
-    Const(ty::UniverseIndex),
-
-    /// A "placeholder" that represents "any const".
-    PlaceholderConst(ty::PlaceholderConst),
-}
-
-impl CanonicalVarKind {
-    pub fn universe(self) -> ty::UniverseIndex {
-        match self {
-            CanonicalVarKind::Ty(kind) => match kind {
-                CanonicalTyVarKind::General(ui) => ui,
-                CanonicalTyVarKind::Float | CanonicalTyVarKind::Int => ty::UniverseIndex::ROOT,
-            },
-
-            CanonicalVarKind::PlaceholderTy(placeholder) => placeholder.universe,
-            CanonicalVarKind::Region(ui) => ui,
-            CanonicalVarKind::PlaceholderRegion(placeholder) => placeholder.universe,
-            CanonicalVarKind::Const(ui) => ui,
-            CanonicalVarKind::PlaceholderConst(placeholder) => placeholder.universe,
-        }
-    }
-}
-
-/// Rust actually has more than one category of type variables;
-/// notably, the type variables we create for literals (e.g., 22 or
-/// 22.) can only be instantiated with integral/float types (e.g.,
-/// usize or f32). In order to faithfully reproduce a type, we need to
-/// know what set of types a given type variable can be unified with.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable, HashStable)]
-pub enum CanonicalTyVarKind {
-    /// General type variable `?T` that can be unified with arbitrary types.
-    General(ty::UniverseIndex),
-
-    /// Integral type variable `?I` (that can only be unified with integral types).
-    Int,
-
-    /// Floating-point type variable `?F` (that can only be unified with float types).
-    Float,
-}
-
-/// After we execute a query with a canonicalized key, we get back a
-/// `Canonical<QueryResponse<..>>`. You can use
-/// `instantiate_query_result` to access the data in this result.
-#[derive(Clone, Debug, HashStable, TypeFoldable, Lift)]
-pub struct QueryResponse<'tcx, R> {
-    pub var_values: CanonicalVarValues<'tcx>,
-    pub region_constraints: QueryRegionConstraints<'tcx>,
-    pub certainty: Certainty,
-    pub value: R,
-}
-
-#[derive(Clone, Debug, Default, HashStable, TypeFoldable, Lift)]
-pub struct QueryRegionConstraints<'tcx> {
-    pub outlives: Vec<QueryOutlivesConstraint<'tcx>>,
-    pub member_constraints: Vec<MemberConstraint<'tcx>>,
-}
-
-impl QueryRegionConstraints<'_> {
-    /// Represents an empty (trivially true) set of region
-    /// constraints.
-    pub fn is_empty(&self) -> bool {
-        self.outlives.is_empty() && self.member_constraints.is_empty()
-    }
-}
-
-pub type Canonicalized<'tcx, V> = Canonical<'tcx, V>;
-
-pub type CanonicalizedQueryResponse<'tcx, T> = &'tcx Canonical<'tcx, QueryResponse<'tcx, T>>;
-
-/// Indicates whether or not we were able to prove the query to be
-/// true.
-#[derive(Copy, Clone, Debug, HashStable)]
-pub enum Certainty {
-    /// The query is known to be true, presuming that you apply the
-    /// given `var_values` and the region-constraints are satisfied.
-    Proven,
-
-    /// The query is not known to be true, but also not known to be
-    /// false. The `var_values` represent *either* values that must
-    /// hold in order for the query to be true, or helpful tips that
-    /// *might* make it true. Currently rustc's trait solver cannot
-    /// distinguish the two (e.g., due to our preference for where
-    /// clauses over impls).
-    ///
-    /// After some unifiations and things have been done, it makes
-    /// sense to try and prove again -- of course, at that point, the
-    /// canonical form will be different, making this a distinct
-    /// query.
-    Ambiguous,
-}
-
-impl Certainty {
-    pub fn is_proven(&self) -> bool {
-        match self {
-            Certainty::Proven => true,
-            Certainty::Ambiguous => false,
-        }
-    }
-
-    pub fn is_ambiguous(&self) -> bool {
-        !self.is_proven()
-    }
-}
-
-impl<'tcx, R> QueryResponse<'tcx, R> {
-    pub fn is_proven(&self) -> bool {
-        self.certainty.is_proven()
-    }
-
-    pub fn is_ambiguous(&self) -> bool {
-        !self.is_proven()
-    }
-}
-
-impl<'tcx, R> Canonical<'tcx, QueryResponse<'tcx, R>> {
-    pub fn is_proven(&self) -> bool {
-        self.value.is_proven()
-    }
-
-    pub fn is_ambiguous(&self) -> bool {
-        !self.is_proven()
-    }
-}
-
-impl<'tcx, V> Canonical<'tcx, V> {
-    /// Allows you to map the `value` of a canonical while keeping the
-    /// same set of bound variables.
-    ///
-    /// **WARNING:** This function is very easy to mis-use, hence the
-    /// name!  In particular, the new value `W` must use all **the
-    /// same type/region variables** in **precisely the same order**
-    /// as the original! (The ordering is defined by the
-    /// `TypeFoldable` implementation of the type in question.)
-    ///
-    /// An example of a **correct** use of this:
-    ///
-    /// ```rust,ignore (not real code)
-    /// let a: Canonical<'_, T> = ...;
-    /// let b: Canonical<'_, (T,)> = a.unchecked_map(|v| (v, ));
-    /// ```
-    ///
-    /// An example of an **incorrect** use of this:
-    ///
-    /// ```rust,ignore (not real code)
-    /// let a: Canonical<'tcx, T> = ...;
-    /// let ty: Ty<'tcx> = ...;
-    /// let b: Canonical<'tcx, (T, Ty<'tcx>)> = a.unchecked_map(|v| (v, ty));
-    /// ```
-    pub fn unchecked_map<W>(self, map_op: impl FnOnce(V) -> W) -> Canonical<'tcx, W> {
-        let Canonical { max_universe, variables, value } = self;
-        Canonical { max_universe, variables, value: map_op(value) }
-    }
-}
-
-pub type QueryOutlivesConstraint<'tcx> =
-    ty::Binder<ty::OutlivesPredicate<GenericArg<'tcx>, Region<'tcx>>>;
-
-impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
-    /// Creates a substitution S for the canonical value with fresh
-    /// inference variables and applies it to the canonical value.
-    /// Returns both the instantiated result *and* the substitution S.
-    ///
-    /// This is only meant to be invoked as part of constructing an
-    /// inference context at the start of a query (see
-    /// `InferCtxtBuilder::enter_with_canonical`). It basically
-    /// brings the canonical value "into scope" within your new infcx.
-    ///
-    /// At the end of processing, the substitution S (once
-    /// canonicalized) then represents the values that you computed
-    /// for each of the canonical inputs to your query.
-
-    pub fn instantiate_canonical_with_fresh_inference_vars<T>(
-        &self,
-        span: Span,
-        canonical: &Canonical<'tcx, T>,
-    ) -> (T, CanonicalVarValues<'tcx>)
-    where
-        T: TypeFoldable<'tcx>,
-    {
-        // For each universe that is referred to in the incoming
-        // query, create a universe in our local inference context. In
-        // practice, as of this writing, all queries have no universes
-        // in them, so this code has no effect, but it is looking
-        // forward to the day when we *do* want to carry universes
-        // through into queries.
-        let universes: IndexVec<ty::UniverseIndex, _> = std::iter::once(ty::UniverseIndex::ROOT)
-            .chain((0..canonical.max_universe.as_u32()).map(|_| self.create_next_universe()))
-            .collect();
-
-        let canonical_inference_vars =
-            self.instantiate_canonical_vars(span, canonical.variables, |ui| universes[ui]);
-        let result = canonical.substitute(self.tcx, &canonical_inference_vars);
-        (result, canonical_inference_vars)
-    }
-
-    /// Given the "infos" about the canonical variables from some
-    /// canonical, creates fresh variables with the same
-    /// characteristics (see `instantiate_canonical_var` for
-    /// details). You can then use `substitute` to instantiate the
-    /// canonical variable with these inference variables.
-    fn instantiate_canonical_vars(
-        &self,
-        span: Span,
-        variables: &List<CanonicalVarInfo>,
-        universe_map: impl Fn(ty::UniverseIndex) -> ty::UniverseIndex,
-    ) -> CanonicalVarValues<'tcx> {
-        let var_values: IndexVec<BoundVar, GenericArg<'tcx>> = variables
-            .iter()
-            .map(|info| self.instantiate_canonical_var(span, *info, &universe_map))
-            .collect();
-
-        CanonicalVarValues { var_values }
-    }
-
-    /// Given the "info" about a canonical variable, creates a fresh
-    /// variable for it. If this is an existentially quantified
-    /// variable, then you'll get a new inference variable; if it is a
-    /// universally quantified variable, you get a placeholder.
-    fn instantiate_canonical_var(
-        &self,
-        span: Span,
-        cv_info: CanonicalVarInfo,
-        universe_map: impl Fn(ty::UniverseIndex) -> ty::UniverseIndex,
-    ) -> GenericArg<'tcx> {
-        match cv_info.kind {
-            CanonicalVarKind::Ty(ty_kind) => {
-                let ty = match ty_kind {
-                    CanonicalTyVarKind::General(ui) => self.next_ty_var_in_universe(
-                        TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, span },
-                        universe_map(ui),
-                    ),
-
-                    CanonicalTyVarKind::Int => self.next_int_var(),
-
-                    CanonicalTyVarKind::Float => self.next_float_var(),
-                };
-                ty.into()
-            }
-
-            CanonicalVarKind::PlaceholderTy(ty::PlaceholderType { universe, name }) => {
-                let universe_mapped = universe_map(universe);
-                let placeholder_mapped = ty::PlaceholderType { universe: universe_mapped, name };
-                self.tcx.mk_ty(ty::Placeholder(placeholder_mapped)).into()
-            }
-
-            CanonicalVarKind::Region(ui) => self
-                .next_region_var_in_universe(
-                    RegionVariableOrigin::MiscVariable(span),
-                    universe_map(ui),
-                )
-                .into(),
-
-            CanonicalVarKind::PlaceholderRegion(ty::PlaceholderRegion { universe, name }) => {
-                let universe_mapped = universe_map(universe);
-                let placeholder_mapped = ty::PlaceholderRegion { universe: universe_mapped, name };
-                self.tcx.mk_region(ty::RePlaceholder(placeholder_mapped)).into()
-            }
-
-            CanonicalVarKind::Const(ui) => self
-                .next_const_var_in_universe(
-                    self.next_ty_var_in_universe(
-                        TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, span },
-                        universe_map(ui),
-                    ),
-                    ConstVariableOrigin { kind: ConstVariableOriginKind::MiscVariable, span },
-                    universe_map(ui),
-                )
-                .into(),
-
-            CanonicalVarKind::PlaceholderConst(ty::PlaceholderConst { universe, name }) => {
-                let universe_mapped = universe_map(universe);
-                let placeholder_mapped = ty::PlaceholderConst { universe: universe_mapped, name };
-                self.tcx
-                    .mk_const(ty::Const {
-                        val: ty::ConstKind::Placeholder(placeholder_mapped),
-                        ty: self.tcx.types.err, // FIXME(const_generics)
-                    })
-                    .into()
-            }
-        }
-    }
-}
-
-CloneTypeFoldableAndLiftImpls! {
-    crate::infer::canonical::Certainty,
-    crate::infer::canonical::CanonicalVarInfo,
-    crate::infer::canonical::CanonicalVarKind,
-}
-
-CloneTypeFoldableImpls! {
-    for <'tcx> {
-        crate::infer::canonical::CanonicalVarInfos<'tcx>,
-    }
-}
-
-impl<'tcx> CanonicalVarValues<'tcx> {
-    pub fn len(&self) -> usize {
-        self.var_values.len()
-    }
-
-    /// Makes an identity substitution from this one: each bound var
-    /// is matched to the same bound var, preserving the original kinds.
-    /// For example, if we have:
-    /// `self.var_values == [Type(u32), Lifetime('a), Type(u64)]`
-    /// we'll return a substitution `subst` with:
-    /// `subst.var_values == [Type(^0), Lifetime(^1), Type(^2)]`.
-    pub fn make_identity(&self, tcx: TyCtxt<'tcx>) -> Self {
-        use crate::ty::subst::GenericArgKind;
-
-        CanonicalVarValues {
-            var_values: self
-                .var_values
-                .iter()
-                .zip(0..)
-                .map(|(kind, i)| match kind.unpack() {
-                    GenericArgKind::Type(..) => {
-                        tcx.mk_ty(ty::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i).into())).into()
-                    }
-                    GenericArgKind::Lifetime(..) => tcx
-                        .mk_region(ty::ReLateBound(ty::INNERMOST, ty::BoundRegion::BrAnon(i)))
-                        .into(),
-                    GenericArgKind::Const(ct) => tcx
-                        .mk_const(ty::Const {
-                            ty: ct.ty,
-                            val: ty::ConstKind::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i)),
-                        })
-                        .into(),
-                })
-                .collect(),
-        }
-    }
-}
-
-impl<'a, 'tcx> IntoIterator for &'a CanonicalVarValues<'tcx> {
-    type Item = GenericArg<'tcx>;
-    type IntoIter = ::std::iter::Cloned<::std::slice::Iter<'a, GenericArg<'tcx>>>;
-
-    fn into_iter(self) -> Self::IntoIter {
-        self.var_values.iter().cloned()
-    }
-}
-
-impl<'tcx> Index<BoundVar> for CanonicalVarValues<'tcx> {
-    type Output = GenericArg<'tcx>;
-
-    fn index(&self, value: BoundVar) -> &GenericArg<'tcx> {
-        &self.var_values[value]
-    }
-}
diff --git a/src/librustc/infer/canonical/query_response.rs b/src/librustc/infer/canonical/query_response.rs
deleted file mode 100644
index 012900f..0000000
--- a/src/librustc/infer/canonical/query_response.rs
+++ /dev/null
@@ -1,658 +0,0 @@
-//! This module contains the code to instantiate a "query result", and
-//! in particular to extract out the resulting region obligations and
-//! encode them therein.
-//!
-//! For an overview of what canonicaliation is and how it fits into
-//! rustc, check out the [chapter in the rustc guide][c].
-//!
-//! [c]: https://rust-lang.github.io/rustc-guide/traits/canonicalization.html
-
-use crate::arena::ArenaAllocatable;
-use crate::infer::canonical::substitute::substitute_value;
-use crate::infer::canonical::{
-    Canonical, CanonicalVarValues, CanonicalizedQueryResponse, Certainty, OriginalQueryValues,
-    QueryOutlivesConstraint, QueryRegionConstraints, QueryResponse,
-};
-use crate::infer::region_constraints::{Constraint, RegionConstraintData};
-use crate::infer::InferCtxtBuilder;
-use crate::infer::{InferCtxt, InferOk, InferResult};
-use crate::traits::query::{Fallible, NoSolution};
-use crate::traits::TraitEngine;
-use crate::traits::{Obligation, ObligationCause, PredicateObligation};
-use crate::ty::fold::TypeFoldable;
-use crate::ty::subst::{GenericArg, GenericArgKind};
-use crate::ty::{self, BoundVar, Ty, TyCtxt};
-use rustc_data_structures::captures::Captures;
-use rustc_index::vec::Idx;
-use rustc_index::vec::IndexVec;
-use rustc_span::DUMMY_SP;
-use std::fmt::Debug;
-
-impl<'tcx> InferCtxtBuilder<'tcx> {
-    /// The "main method" for a canonicalized trait query. Given the
-    /// canonical key `canonical_key`, this method will create a new
-    /// inference context, instantiate the key, and run your operation
-    /// `op`. The operation should yield up a result (of type `R`) as
-    /// well as a set of trait obligations that must be fully
-    /// satisfied. These obligations will be processed and the
-    /// canonical result created.
-    ///
-    /// Returns `NoSolution` in the event of any error.
-    ///
-    /// (It might be mildly nicer to implement this on `TyCtxt`, and
-    /// not `InferCtxtBuilder`, but that is a bit tricky right now.
-    /// In part because we would need a `for<'tcx>` sort of
-    /// bound for the closure and in part because it is convenient to
-    /// have `'tcx` be free on this function so that we can talk about
-    /// `K: TypeFoldable<'tcx>`.)
-    pub fn enter_canonical_trait_query<K, R>(
-        &mut self,
-        canonical_key: &Canonical<'tcx, K>,
-        operation: impl FnOnce(&InferCtxt<'_, 'tcx>, &mut dyn TraitEngine<'tcx>, K) -> Fallible<R>,
-    ) -> Fallible<CanonicalizedQueryResponse<'tcx, R>>
-    where
-        K: TypeFoldable<'tcx>,
-        R: Debug + TypeFoldable<'tcx>,
-        Canonical<'tcx, QueryResponse<'tcx, R>>: ArenaAllocatable,
-    {
-        self.enter_with_canonical(
-            DUMMY_SP,
-            canonical_key,
-            |ref infcx, key, canonical_inference_vars| {
-                let mut fulfill_cx = TraitEngine::new(infcx.tcx);
-                let value = operation(infcx, &mut *fulfill_cx, key)?;
-                infcx.make_canonicalized_query_response(
-                    canonical_inference_vars,
-                    value,
-                    &mut *fulfill_cx,
-                )
-            },
-        )
-    }
-}
-
-impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
-    /// This method is meant to be invoked as the final step of a canonical query
-    /// implementation. It is given:
-    ///
-    /// - the instantiated variables `inference_vars` created from the query key
-    /// - the result `answer` of the query
-    /// - a fulfillment context `fulfill_cx` that may contain various obligations which
-    ///   have yet to be proven.
-    ///
-    /// Given this, the function will process the obligations pending
-    /// in `fulfill_cx`:
-    ///
-    /// - If all the obligations can be proven successfully, it will
-    ///   package up any resulting region obligations (extracted from
-    ///   `infcx`) along with the fully resolved value `answer` into a
-    ///   query result (which is then itself canonicalized).
-    /// - If some obligations can be neither proven nor disproven, then
-    ///   the same thing happens, but the resulting query is marked as ambiguous.
-    /// - Finally, if any of the obligations result in a hard error,
-    ///   then `Err(NoSolution)` is returned.
-    pub fn make_canonicalized_query_response<T>(
-        &self,
-        inference_vars: CanonicalVarValues<'tcx>,
-        answer: T,
-        fulfill_cx: &mut dyn TraitEngine<'tcx>,
-    ) -> Fallible<CanonicalizedQueryResponse<'tcx, T>>
-    where
-        T: Debug + TypeFoldable<'tcx>,
-        Canonical<'tcx, QueryResponse<'tcx, T>>: ArenaAllocatable,
-    {
-        let query_response = self.make_query_response(inference_vars, answer, fulfill_cx)?;
-        let canonical_result = self.canonicalize_response(&query_response);
-
-        debug!("make_canonicalized_query_response: canonical_result = {:#?}", canonical_result);
-
-        Ok(self.tcx.arena.alloc(canonical_result))
-    }
-
-    /// A version of `make_canonicalized_query_response` that does
-    /// not pack in obligations, for contexts that want to drop
-    /// pending obligations instead of treating them as an ambiguity (e.g.
-    /// typeck "probing" contexts).
-    ///
-    /// If you DO want to keep track of pending obligations (which
-    /// include all region obligations, so this includes all cases
-    /// that care about regions) with this function, you have to
-    /// do it yourself, by e.g., having them be a part of the answer.
-    pub fn make_query_response_ignoring_pending_obligations<T>(
-        &self,
-        inference_vars: CanonicalVarValues<'tcx>,
-        answer: T,
-    ) -> Canonical<'tcx, QueryResponse<'tcx, T>>
-    where
-        T: Debug + TypeFoldable<'tcx>,
-    {
-        self.canonicalize_response(&QueryResponse {
-            var_values: inference_vars,
-            region_constraints: QueryRegionConstraints::default(),
-            certainty: Certainty::Proven, // Ambiguities are OK!
-            value: answer,
-        })
-    }
-
-    /// Helper for `make_canonicalized_query_response` that does
-    /// everything up until the final canonicalization.
-    fn make_query_response<T>(
-        &self,
-        inference_vars: CanonicalVarValues<'tcx>,
-        answer: T,
-        fulfill_cx: &mut dyn TraitEngine<'tcx>,
-    ) -> Result<QueryResponse<'tcx, T>, NoSolution>
-    where
-        T: Debug + TypeFoldable<'tcx>,
-    {
-        let tcx = self.tcx;
-
-        debug!(
-            "make_query_response(\
-             inference_vars={:?}, \
-             answer={:?})",
-            inference_vars, answer,
-        );
-
-        // Select everything, returning errors.
-        let true_errors = fulfill_cx.select_where_possible(self).err().unwrap_or_else(Vec::new);
-        debug!("true_errors = {:#?}", true_errors);
-
-        if !true_errors.is_empty() {
-            // FIXME -- we don't indicate *why* we failed to solve
-            debug!("make_query_response: true_errors={:#?}", true_errors);
-            return Err(NoSolution);
-        }
-
-        // Anything left unselected *now* must be an ambiguity.
-        let ambig_errors = fulfill_cx.select_all_or_error(self).err().unwrap_or_else(Vec::new);
-        debug!("ambig_errors = {:#?}", ambig_errors);
-
-        let region_obligations = self.take_registered_region_obligations();
-        let region_constraints = self.with_region_constraints(|region_constraints| {
-            make_query_region_constraints(
-                tcx,
-                region_obligations.iter().map(|(_, r_o)| (r_o.sup_type, r_o.sub_region)),
-                region_constraints,
-            )
-        });
-
-        let certainty =
-            if ambig_errors.is_empty() { Certainty::Proven } else { Certainty::Ambiguous };
-
-        Ok(QueryResponse {
-            var_values: inference_vars,
-            region_constraints,
-            certainty,
-            value: answer,
-        })
-    }
-
-    /// Given the (canonicalized) result to a canonical query,
-    /// instantiates the result so it can be used, plugging in the
-    /// values from the canonical query. (Note that the result may
-    /// have been ambiguous; you should check the certainty level of
-    /// the query before applying this function.)
-    ///
-    /// To get a good understanding of what is happening here, check
-    /// out the [chapter in the rustc guide][c].
-    ///
-    /// [c]: https://rust-lang.github.io/rustc-guide/traits/canonicalization.html#processing-the-canonicalized-query-result
-    pub fn instantiate_query_response_and_region_obligations<R>(
-        &self,
-        cause: &ObligationCause<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-        original_values: &OriginalQueryValues<'tcx>,
-        query_response: &Canonical<'tcx, QueryResponse<'tcx, R>>,
-    ) -> InferResult<'tcx, R>
-    where
-        R: Debug + TypeFoldable<'tcx>,
-    {
-        let InferOk { value: result_subst, mut obligations } =
-            self.query_response_substitution(cause, param_env, original_values, query_response)?;
-
-        obligations.extend(self.query_outlives_constraints_into_obligations(
-            cause,
-            param_env,
-            &query_response.value.region_constraints.outlives,
-            &result_subst,
-        ));
-
-        let user_result: R =
-            query_response.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
-
-        Ok(InferOk { value: user_result, obligations })
-    }
-
-    /// An alternative to
-    /// `instantiate_query_response_and_region_obligations` that is more
-    /// efficient for NLL. NLL is a bit more advanced in the
-    /// "transition to chalk" than the rest of the compiler. During
-    /// the NLL type check, all of the "processing" of types and
-    /// things happens in queries -- the NLL checker itself is only
-    /// interested in the region obligations (`'a: 'b` or `T: 'b`)
-    /// that come out of these queries, which it wants to convert into
-    /// MIR-based constraints and solve. Therefore, it is most
-    /// convenient for the NLL Type Checker to **directly consume**
-    /// the `QueryOutlivesConstraint` values that arise from doing a
-    /// query. This is contrast to other parts of the compiler, which
-    /// would prefer for those `QueryOutlivesConstraint` to be converted
-    /// into the older infcx-style constraints (e.g., calls to
-    /// `sub_regions` or `register_region_obligation`).
-    ///
-    /// Therefore, `instantiate_nll_query_response_and_region_obligations` performs the same
-    /// basic operations as `instantiate_query_response_and_region_obligations` but
-    /// it returns its result differently:
-    ///
-    /// - It creates a substitution `S` that maps from the original
-    ///   query variables to the values computed in the query
-    ///   result. If any errors arise, they are propagated back as an
-    ///   `Err` result.
-    /// - In the case of a successful substitution, we will append
-    ///   `QueryOutlivesConstraint` values onto the
-    ///   `output_query_region_constraints` vector for the solver to
-    ///   use (if an error arises, some values may also be pushed, but
-    ///   they should be ignored).
-    /// - It **can happen** (though it rarely does currently) that
-    ///   equating types and things will give rise to subobligations
-    ///   that must be processed. In this case, those subobligations
-    ///   are propagated back in the return value.
-    /// - Finally, the query result (of type `R`) is propagated back,
-    ///   after applying the substitution `S`.
-    pub fn instantiate_nll_query_response_and_region_obligations<R>(
-        &self,
-        cause: &ObligationCause<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-        original_values: &OriginalQueryValues<'tcx>,
-        query_response: &Canonical<'tcx, QueryResponse<'tcx, R>>,
-        output_query_region_constraints: &mut QueryRegionConstraints<'tcx>,
-    ) -> InferResult<'tcx, R>
-    where
-        R: Debug + TypeFoldable<'tcx>,
-    {
-        let result_subst =
-            self.query_response_substitution_guess(cause, original_values, query_response);
-
-        // Compute `QueryOutlivesConstraint` values that unify each of
-        // the original values `v_o` that was canonicalized into a
-        // variable...
-        let mut obligations = vec![];
-
-        for (index, original_value) in original_values.var_values.iter().enumerate() {
-            // ...with the value `v_r` of that variable from the query.
-            let result_value = query_response.substitute_projected(self.tcx, &result_subst, |v| {
-                &v.var_values[BoundVar::new(index)]
-            });
-            match (original_value.unpack(), result_value.unpack()) {
-                (
-                    GenericArgKind::Lifetime(ty::ReErased),
-                    GenericArgKind::Lifetime(ty::ReErased),
-                ) => {
-                    // No action needed.
-                }
-
-                (GenericArgKind::Lifetime(v_o), GenericArgKind::Lifetime(v_r)) => {
-                    // To make `v_o = v_r`, we emit `v_o: v_r` and `v_r: v_o`.
-                    if v_o != v_r {
-                        output_query_region_constraints
-                            .outlives
-                            .push(ty::Binder::dummy(ty::OutlivesPredicate(v_o.into(), v_r)));
-                        output_query_region_constraints
-                            .outlives
-                            .push(ty::Binder::dummy(ty::OutlivesPredicate(v_r.into(), v_o)));
-                    }
-                }
-
-                (GenericArgKind::Type(v1), GenericArgKind::Type(v2)) => {
-                    let ok = self.at(cause, param_env).eq(v1, v2)?;
-                    obligations.extend(ok.into_obligations());
-                }
-
-                (GenericArgKind::Const(v1), GenericArgKind::Const(v2)) => {
-                    let ok = self.at(cause, param_env).eq(v1, v2)?;
-                    obligations.extend(ok.into_obligations());
-                }
-
-                _ => {
-                    bug!("kind mismatch, cannot unify {:?} and {:?}", original_value, result_value);
-                }
-            }
-        }
-
-        // ...also include the other query region constraints from the query.
-        output_query_region_constraints.outlives.extend(
-            query_response.value.region_constraints.outlives.iter().filter_map(|r_c| {
-                let r_c = substitute_value(self.tcx, &result_subst, r_c);
-
-                // Screen out `'a: 'a` cases -- we skip the binder here but
-                // only compare the inner values to one another, so they are still at
-                // consistent binding levels.
-                let &ty::OutlivesPredicate(k1, r2) = r_c.skip_binder();
-                if k1 != r2.into() { Some(r_c) } else { None }
-            }),
-        );
-
-        // ...also include the query member constraints.
-        output_query_region_constraints.member_constraints.extend(
-            query_response
-                .value
-                .region_constraints
-                .member_constraints
-                .iter()
-                .map(|p_c| substitute_value(self.tcx, &result_subst, p_c)),
-        );
-
-        let user_result: R =
-            query_response.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
-
-        Ok(InferOk { value: user_result, obligations })
-    }
-
-    /// Given the original values and the (canonicalized) result from
-    /// computing a query, returns a substitution that can be applied
-    /// to the query result to convert the result back into the
-    /// original namespace.
-    ///
-    /// The substitution also comes accompanied with subobligations
-    /// that arose from unification; these might occur if (for
-    /// example) we are doing lazy normalization and the value
-    /// assigned to a type variable is unified with an unnormalized
-    /// projection.
-    fn query_response_substitution<R>(
-        &self,
-        cause: &ObligationCause<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-        original_values: &OriginalQueryValues<'tcx>,
-        query_response: &Canonical<'tcx, QueryResponse<'tcx, R>>,
-    ) -> InferResult<'tcx, CanonicalVarValues<'tcx>>
-    where
-        R: Debug + TypeFoldable<'tcx>,
-    {
-        debug!(
-            "query_response_substitution(original_values={:#?}, query_response={:#?})",
-            original_values, query_response,
-        );
-
-        let result_subst =
-            self.query_response_substitution_guess(cause, original_values, query_response);
-
-        let obligations = self
-            .unify_query_response_substitution_guess(
-                cause,
-                param_env,
-                original_values,
-                &result_subst,
-                query_response,
-            )?
-            .into_obligations();
-
-        Ok(InferOk { value: result_subst, obligations })
-    }
-
-    /// Given the original values and the (canonicalized) result from
-    /// computing a query, returns a **guess** at a substitution that
-    /// can be applied to the query result to convert the result back
-    /// into the original namespace. This is called a **guess**
-    /// because it uses a quick heuristic to find the values for each
-    /// canonical variable; if that quick heuristic fails, then we
-    /// will instantiate fresh inference variables for each canonical
-    /// variable instead. Therefore, the result of this method must be
-    /// properly unified
-    fn query_response_substitution_guess<R>(
-        &self,
-        cause: &ObligationCause<'tcx>,
-        original_values: &OriginalQueryValues<'tcx>,
-        query_response: &Canonical<'tcx, QueryResponse<'tcx, R>>,
-    ) -> CanonicalVarValues<'tcx>
-    where
-        R: Debug + TypeFoldable<'tcx>,
-    {
-        debug!(
-            "query_response_substitution_guess(original_values={:#?}, query_response={:#?})",
-            original_values, query_response,
-        );
-
-        // For each new universe created in the query result that did
-        // not appear in the original query, create a local
-        // superuniverse.
-        let mut universe_map = original_values.universe_map.clone();
-        let num_universes_in_query = original_values.universe_map.len();
-        let num_universes_in_response = query_response.max_universe.as_usize() + 1;
-        for _ in num_universes_in_query..num_universes_in_response {
-            universe_map.push(self.create_next_universe());
-        }
-        assert!(universe_map.len() >= 1); // always have the root universe
-        assert_eq!(universe_map[ty::UniverseIndex::ROOT.as_usize()], ty::UniverseIndex::ROOT);
-
-        // Every canonical query result includes values for each of
-        // the inputs to the query. Therefore, we begin by unifying
-        // these values with the original inputs that were
-        // canonicalized.
-        let result_values = &query_response.value.var_values;
-        assert_eq!(original_values.var_values.len(), result_values.len());
-
-        // Quickly try to find initial values for the canonical
-        // variables in the result in terms of the query. We do this
-        // by iterating down the values that the query gave to each of
-        // the canonical inputs. If we find that one of those values
-        // is directly equal to one of the canonical variables in the
-        // result, then we can type the corresponding value from the
-        // input. See the example above.
-        let mut opt_values: IndexVec<BoundVar, Option<GenericArg<'tcx>>> =
-            IndexVec::from_elem_n(None, query_response.variables.len());
-
-        // In terms of our example above, we are iterating over pairs like:
-        // [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]
-        for (original_value, result_value) in original_values.var_values.iter().zip(result_values) {
-            match result_value.unpack() {
-                GenericArgKind::Type(result_value) => {
-                    // e.g., here `result_value` might be `?0` in the example above...
-                    if let ty::Bound(debruijn, b) = result_value.kind {
-                        // ...in which case we would set `canonical_vars[0]` to `Some(?U)`.
-
-                        // We only allow a `ty::INNERMOST` index in substitutions.
-                        assert_eq!(debruijn, ty::INNERMOST);
-                        opt_values[b.var] = Some(*original_value);
-                    }
-                }
-                GenericArgKind::Lifetime(result_value) => {
-                    // e.g., here `result_value` might be `'?1` in the example above...
-                    if let &ty::RegionKind::ReLateBound(debruijn, br) = result_value {
-                        // ... in which case we would set `canonical_vars[0]` to `Some('static)`.
-
-                        // We only allow a `ty::INNERMOST` index in substitutions.
-                        assert_eq!(debruijn, ty::INNERMOST);
-                        opt_values[br.assert_bound_var()] = Some(*original_value);
-                    }
-                }
-                GenericArgKind::Const(result_value) => {
-                    if let ty::Const { val: ty::ConstKind::Bound(debrujin, b), .. } = result_value {
-                        // ...in which case we would set `canonical_vars[0]` to `Some(const X)`.
-
-                        // We only allow a `ty::INNERMOST` index in substitutions.
-                        assert_eq!(*debrujin, ty::INNERMOST);
-                        opt_values[*b] = Some(*original_value);
-                    }
-                }
-            }
-        }
-
-        // Create a result substitution: if we found a value for a
-        // given variable in the loop above, use that. Otherwise, use
-        // a fresh inference variable.
-        let result_subst = CanonicalVarValues {
-            var_values: query_response
-                .variables
-                .iter()
-                .enumerate()
-                .map(|(index, info)| {
-                    if info.is_existential() {
-                        match opt_values[BoundVar::new(index)] {
-                            Some(k) => k,
-                            None => self.instantiate_canonical_var(cause.span, *info, |u| {
-                                universe_map[u.as_usize()]
-                            }),
-                        }
-                    } else {
-                        self.instantiate_canonical_var(cause.span, *info, |u| {
-                            universe_map[u.as_usize()]
-                        })
-                    }
-                })
-                .collect(),
-        };
-
-        result_subst
-    }
-
-    /// Given a "guess" at the values for the canonical variables in
-    /// the input, try to unify with the *actual* values found in the
-    /// query result. Often, but not always, this is a no-op, because
-    /// we already found the mapping in the "guessing" step.
-    ///
-    /// See also: `query_response_substitution_guess`
-    fn unify_query_response_substitution_guess<R>(
-        &self,
-        cause: &ObligationCause<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-        original_values: &OriginalQueryValues<'tcx>,
-        result_subst: &CanonicalVarValues<'tcx>,
-        query_response: &Canonical<'tcx, QueryResponse<'tcx, R>>,
-    ) -> InferResult<'tcx, ()>
-    where
-        R: Debug + TypeFoldable<'tcx>,
-    {
-        // A closure that yields the result value for the given
-        // canonical variable; this is taken from
-        // `query_response.var_values` after applying the substitution
-        // `result_subst`.
-        let substituted_query_response = |index: BoundVar| -> GenericArg<'tcx> {
-            query_response.substitute_projected(self.tcx, &result_subst, |v| &v.var_values[index])
-        };
-
-        // Unify the original value for each variable with the value
-        // taken from `query_response` (after applying `result_subst`).
-        Ok(self.unify_canonical_vars(
-            cause,
-            param_env,
-            original_values,
-            substituted_query_response,
-        )?)
-    }
-
-    /// Converts the region constraints resulting from a query into an
-    /// iterator of obligations.
-    fn query_outlives_constraints_into_obligations<'a>(
-        &'a self,
-        cause: &'a ObligationCause<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-        unsubstituted_region_constraints: &'a [QueryOutlivesConstraint<'tcx>],
-        result_subst: &'a CanonicalVarValues<'tcx>,
-    ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a + Captures<'tcx> {
-        unsubstituted_region_constraints.iter().map(move |constraint| {
-            let constraint = substitute_value(self.tcx, result_subst, constraint);
-            let &ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
-
-            Obligation::new(
-                cause.clone(),
-                param_env,
-                match k1.unpack() {
-                    GenericArgKind::Lifetime(r1) => ty::Predicate::RegionOutlives(
-                        ty::Binder::bind(ty::OutlivesPredicate(r1, r2)),
-                    ),
-                    GenericArgKind::Type(t1) => {
-                        ty::Predicate::TypeOutlives(ty::Binder::bind(ty::OutlivesPredicate(t1, r2)))
-                    }
-                    GenericArgKind::Const(..) => {
-                        // Consts cannot outlive one another, so we don't expect to
-                        // ecounter this branch.
-                        span_bug!(cause.span, "unexpected const outlives {:?}", constraint);
-                    }
-                },
-            )
-        })
-    }
-
-    /// Given two sets of values for the same set of canonical variables, unify them.
-    /// The second set is produced lazily by supplying indices from the first set.
-    fn unify_canonical_vars(
-        &self,
-        cause: &ObligationCause<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-        variables1: &OriginalQueryValues<'tcx>,
-        variables2: impl Fn(BoundVar) -> GenericArg<'tcx>,
-    ) -> InferResult<'tcx, ()> {
-        self.commit_if_ok(|_| {
-            let mut obligations = vec![];
-            for (index, value1) in variables1.var_values.iter().enumerate() {
-                let value2 = variables2(BoundVar::new(index));
-
-                match (value1.unpack(), value2.unpack()) {
-                    (GenericArgKind::Type(v1), GenericArgKind::Type(v2)) => {
-                        obligations
-                            .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
-                    }
-                    (
-                        GenericArgKind::Lifetime(ty::ReErased),
-                        GenericArgKind::Lifetime(ty::ReErased),
-                    ) => {
-                        // no action needed
-                    }
-                    (GenericArgKind::Lifetime(v1), GenericArgKind::Lifetime(v2)) => {
-                        obligations
-                            .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
-                    }
-                    (GenericArgKind::Const(v1), GenericArgKind::Const(v2)) => {
-                        let ok = self.at(cause, param_env).eq(v1, v2)?;
-                        obligations.extend(ok.into_obligations());
-                    }
-                    _ => {
-                        bug!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,);
-                    }
-                }
-            }
-            Ok(InferOk { value: (), obligations })
-        })
-    }
-}
-
-/// Given the region obligations and constraints scraped from the infcx,
-/// creates query region constraints.
-pub fn make_query_region_constraints<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    outlives_obligations: impl Iterator<Item = (Ty<'tcx>, ty::Region<'tcx>)>,
-    region_constraints: &RegionConstraintData<'tcx>,
-) -> QueryRegionConstraints<'tcx> {
-    let RegionConstraintData { constraints, verifys, givens, member_constraints } =
-        region_constraints;
-
-    assert!(verifys.is_empty());
-    assert!(givens.is_empty());
-
-    let outlives: Vec<_> = constraints
-        .into_iter()
-        .map(|(k, _)| match *k {
-            // Swap regions because we are going from sub (<=) to outlives
-            // (>=).
-            Constraint::VarSubVar(v1, v2) => ty::OutlivesPredicate(
-                tcx.mk_region(ty::ReVar(v2)).into(),
-                tcx.mk_region(ty::ReVar(v1)),
-            ),
-            Constraint::VarSubReg(v1, r2) => {
-                ty::OutlivesPredicate(r2.into(), tcx.mk_region(ty::ReVar(v1)))
-            }
-            Constraint::RegSubVar(r1, v2) => {
-                ty::OutlivesPredicate(tcx.mk_region(ty::ReVar(v2)).into(), r1)
-            }
-            Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),
-        })
-        .map(ty::Binder::dummy) // no bound vars in the code above
-        .chain(
-            outlives_obligations
-                .map(|(ty, r)| ty::OutlivesPredicate(ty.into(), r))
-                .map(ty::Binder::dummy), // no bound vars in the code above
-        )
-        .collect();
-
-    QueryRegionConstraints { outlives, member_constraints: member_constraints.clone() }
-}
diff --git a/src/librustc/infer/canonical/substitute.rs b/src/librustc/infer/canonical/substitute.rs
deleted file mode 100644
index 9251634..0000000
--- a/src/librustc/infer/canonical/substitute.rs
+++ /dev/null
@@ -1,77 +0,0 @@
-//! This module contains code to substitute new values into a
-//! `Canonical<'tcx, T>`.
-//!
-//! For an overview of what canonicalization is and how it fits into
-//! rustc, check out the [chapter in the rustc guide][c].
-//!
-//! [c]: https://rust-lang.github.io/rustc-guide/traits/canonicalization.html
-
-use crate::infer::canonical::{Canonical, CanonicalVarValues};
-use crate::ty::fold::TypeFoldable;
-use crate::ty::subst::GenericArgKind;
-use crate::ty::{self, TyCtxt};
-
-impl<'tcx, V> Canonical<'tcx, V> {
-    /// Instantiate the wrapped value, replacing each canonical value
-    /// with the value given in `var_values`.
-    pub fn substitute(&self, tcx: TyCtxt<'tcx>, var_values: &CanonicalVarValues<'tcx>) -> V
-    where
-        V: TypeFoldable<'tcx>,
-    {
-        self.substitute_projected(tcx, var_values, |value| value)
-    }
-
-    /// Allows one to apply a substitute to some subset of
-    /// `self.value`. Invoke `projection_fn` with `self.value` to get
-    /// a value V that is expressed in terms of the same canonical
-    /// variables bound in `self` (usually this extracts from subset
-    /// of `self`). Apply the substitution `var_values` to this value
-    /// V, replacing each of the canonical variables.
-    pub fn substitute_projected<T>(
-        &self,
-        tcx: TyCtxt<'tcx>,
-        var_values: &CanonicalVarValues<'tcx>,
-        projection_fn: impl FnOnce(&V) -> &T,
-    ) -> T
-    where
-        T: TypeFoldable<'tcx>,
-    {
-        assert_eq!(self.variables.len(), var_values.len());
-        let value = projection_fn(&self.value);
-        substitute_value(tcx, var_values, value)
-    }
-}
-
-/// Substitute the values from `var_values` into `value`. `var_values`
-/// must be values for the set of canonical variables that appear in
-/// `value`.
-pub(super) fn substitute_value<'a, 'tcx, T>(
-    tcx: TyCtxt<'tcx>,
-    var_values: &CanonicalVarValues<'tcx>,
-    value: &'a T,
-) -> T
-where
-    T: TypeFoldable<'tcx>,
-{
-    if var_values.var_values.is_empty() {
-        value.clone()
-    } else {
-        let fld_r =
-            |br: ty::BoundRegion| match var_values.var_values[br.assert_bound_var()].unpack() {
-                GenericArgKind::Lifetime(l) => l,
-                r => bug!("{:?} is a region but value is {:?}", br, r),
-            };
-
-        let fld_t = |bound_ty: ty::BoundTy| match var_values.var_values[bound_ty.var].unpack() {
-            GenericArgKind::Type(ty) => ty,
-            r => bug!("{:?} is a type but value is {:?}", bound_ty, r),
-        };
-
-        let fld_c = |bound_ct: ty::BoundVar, _| match var_values.var_values[bound_ct].unpack() {
-            GenericArgKind::Const(ct) => ct,
-            c => bug!("{:?} is a const but value is {:?}", bound_ct, c),
-        };
-
-        tcx.replace_escaping_bound_vars(value, fld_r, fld_t, fld_c).0
-    }
-}
diff --git a/src/librustc/infer/combine.rs b/src/librustc/infer/combine.rs
deleted file mode 100644
index 5d765a2..0000000
--- a/src/librustc/infer/combine.rs
+++ /dev/null
@@ -1,673 +0,0 @@
-///////////////////////////////////////////////////////////////////////////
-// # Type combining
-//
-// There are four type combiners: equate, sub, lub, and glb.  Each
-// implements the trait `Combine` and contains methods for combining
-// two instances of various things and yielding a new instance.  These
-// combiner methods always yield a `Result<T>`.  There is a lot of
-// common code for these operations, implemented as default methods on
-// the `Combine` trait.
-//
-// Each operation may have side-effects on the inference context,
-// though these can be unrolled using snapshots. On success, the
-// LUB/GLB operations return the appropriate bound. The Eq and Sub
-// operations generally return the first operand.
-//
-// ## Contravariance
-//
-// When you are relating two things which have a contravariant
-// relationship, you should use `contratys()` or `contraregions()`,
-// rather than inversing the order of arguments!  This is necessary
-// because the order of arguments is not relevant for LUB and GLB.  It
-// is also useful to track which value is the "expected" value in
-// terms of error reporting.
-
-use super::equate::Equate;
-use super::glb::Glb;
-use super::lub::Lub;
-use super::sub::Sub;
-use super::type_variable::TypeVariableValue;
-use super::unify_key::replace_if_possible;
-use super::unify_key::{ConstVarValue, ConstVariableValue};
-use super::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
-use super::{InferCtxt, MiscVariable, TypeTrace};
-
-use crate::traits::{Obligation, PredicateObligations};
-use crate::ty::error::TypeError;
-use crate::ty::relate::{self, Relate, RelateResult, TypeRelation};
-use crate::ty::subst::SubstsRef;
-use crate::ty::{self, InferConst, Ty, TyCtxt};
-use crate::ty::{IntType, UintType};
-
-use rustc_hir::def_id::DefId;
-use rustc_span::{Span, DUMMY_SP};
-use syntax::ast;
-
-#[derive(Clone)]
-pub struct CombineFields<'infcx, 'tcx> {
-    pub infcx: &'infcx InferCtxt<'infcx, 'tcx>,
-    pub trace: TypeTrace<'tcx>,
-    pub cause: Option<ty::relate::Cause>,
-    pub param_env: ty::ParamEnv<'tcx>,
-    pub obligations: PredicateObligations<'tcx>,
-}
-
-#[derive(Copy, Clone, Debug)]
-pub enum RelationDir {
-    SubtypeOf,
-    SupertypeOf,
-    EqTo,
-}
-
-impl<'infcx, 'tcx> InferCtxt<'infcx, 'tcx> {
-    pub fn super_combine_tys<R>(
-        &self,
-        relation: &mut R,
-        a: Ty<'tcx>,
-        b: Ty<'tcx>,
-    ) -> RelateResult<'tcx, Ty<'tcx>>
-    where
-        R: TypeRelation<'tcx>,
-    {
-        let a_is_expected = relation.a_is_expected();
-
-        match (&a.kind, &b.kind) {
-            // Relate integral variables to other types
-            (&ty::Infer(ty::IntVar(a_id)), &ty::Infer(ty::IntVar(b_id))) => {
-                self.int_unification_table
-                    .borrow_mut()
-                    .unify_var_var(a_id, b_id)
-                    .map_err(|e| int_unification_error(a_is_expected, e))?;
-                Ok(a)
-            }
-            (&ty::Infer(ty::IntVar(v_id)), &ty::Int(v)) => {
-                self.unify_integral_variable(a_is_expected, v_id, IntType(v))
-            }
-            (&ty::Int(v), &ty::Infer(ty::IntVar(v_id))) => {
-                self.unify_integral_variable(!a_is_expected, v_id, IntType(v))
-            }
-            (&ty::Infer(ty::IntVar(v_id)), &ty::Uint(v)) => {
-                self.unify_integral_variable(a_is_expected, v_id, UintType(v))
-            }
-            (&ty::Uint(v), &ty::Infer(ty::IntVar(v_id))) => {
-                self.unify_integral_variable(!a_is_expected, v_id, UintType(v))
-            }
-
-            // Relate floating-point variables to other types
-            (&ty::Infer(ty::FloatVar(a_id)), &ty::Infer(ty::FloatVar(b_id))) => {
-                self.float_unification_table
-                    .borrow_mut()
-                    .unify_var_var(a_id, b_id)
-                    .map_err(|e| float_unification_error(relation.a_is_expected(), e))?;
-                Ok(a)
-            }
-            (&ty::Infer(ty::FloatVar(v_id)), &ty::Float(v)) => {
-                self.unify_float_variable(a_is_expected, v_id, v)
-            }
-            (&ty::Float(v), &ty::Infer(ty::FloatVar(v_id))) => {
-                self.unify_float_variable(!a_is_expected, v_id, v)
-            }
-
-            // All other cases of inference are errors
-            (&ty::Infer(_), _) | (_, &ty::Infer(_)) => {
-                Err(TypeError::Sorts(ty::relate::expected_found(relation, &a, &b)))
-            }
-
-            _ => ty::relate::super_relate_tys(relation, a, b),
-        }
-    }
-
-    pub fn super_combine_consts<R>(
-        &self,
-        relation: &mut R,
-        a: &'tcx ty::Const<'tcx>,
-        b: &'tcx ty::Const<'tcx>,
-    ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>>
-    where
-        R: TypeRelation<'tcx>,
-    {
-        debug!("{}.consts({:?}, {:?})", relation.tag(), a, b);
-        if a == b {
-            return Ok(a);
-        }
-
-        let a = replace_if_possible(self.const_unification_table.borrow_mut(), a);
-        let b = replace_if_possible(self.const_unification_table.borrow_mut(), b);
-
-        let a_is_expected = relation.a_is_expected();
-
-        match (a.val, b.val) {
-            (
-                ty::ConstKind::Infer(InferConst::Var(a_vid)),
-                ty::ConstKind::Infer(InferConst::Var(b_vid)),
-            ) => {
-                self.const_unification_table
-                    .borrow_mut()
-                    .unify_var_var(a_vid, b_vid)
-                    .map_err(|e| const_unification_error(a_is_expected, e))?;
-                return Ok(a);
-            }
-
-            // All other cases of inference with other variables are errors.
-            (ty::ConstKind::Infer(InferConst::Var(_)), ty::ConstKind::Infer(_))
-            | (ty::ConstKind::Infer(_), ty::ConstKind::Infer(InferConst::Var(_))) => {
-                bug!("tried to combine ConstKind::Infer/ConstKind::Infer(InferConst::Var)")
-            }
-
-            (ty::ConstKind::Infer(InferConst::Var(vid)), _) => {
-                return self.unify_const_variable(a_is_expected, vid, b);
-            }
-
-            (_, ty::ConstKind::Infer(InferConst::Var(vid))) => {
-                return self.unify_const_variable(!a_is_expected, vid, a);
-            }
-
-            _ => {}
-        }
-
-        ty::relate::super_relate_consts(relation, a, b)
-    }
-
-    pub fn unify_const_variable(
-        &self,
-        vid_is_expected: bool,
-        vid: ty::ConstVid<'tcx>,
-        value: &'tcx ty::Const<'tcx>,
-    ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
-        self.const_unification_table
-            .borrow_mut()
-            .unify_var_value(
-                vid,
-                ConstVarValue {
-                    origin: ConstVariableOrigin {
-                        kind: ConstVariableOriginKind::ConstInference,
-                        span: DUMMY_SP,
-                    },
-                    val: ConstVariableValue::Known { value },
-                },
-            )
-            .map_err(|e| const_unification_error(vid_is_expected, e))?;
-        Ok(value)
-    }
-
-    fn unify_integral_variable(
-        &self,
-        vid_is_expected: bool,
-        vid: ty::IntVid,
-        val: ty::IntVarValue,
-    ) -> RelateResult<'tcx, Ty<'tcx>> {
-        self.int_unification_table
-            .borrow_mut()
-            .unify_var_value(vid, Some(val))
-            .map_err(|e| int_unification_error(vid_is_expected, e))?;
-        match val {
-            IntType(v) => Ok(self.tcx.mk_mach_int(v)),
-            UintType(v) => Ok(self.tcx.mk_mach_uint(v)),
-        }
-    }
-
-    fn unify_float_variable(
-        &self,
-        vid_is_expected: bool,
-        vid: ty::FloatVid,
-        val: ast::FloatTy,
-    ) -> RelateResult<'tcx, Ty<'tcx>> {
-        self.float_unification_table
-            .borrow_mut()
-            .unify_var_value(vid, Some(ty::FloatVarValue(val)))
-            .map_err(|e| float_unification_error(vid_is_expected, e))?;
-        Ok(self.tcx.mk_mach_float(val))
-    }
-}
-
-impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> {
-    pub fn tcx(&self) -> TyCtxt<'tcx> {
-        self.infcx.tcx
-    }
-
-    pub fn equate<'a>(&'a mut self, a_is_expected: bool) -> Equate<'a, 'infcx, 'tcx> {
-        Equate::new(self, a_is_expected)
-    }
-
-    pub fn sub<'a>(&'a mut self, a_is_expected: bool) -> Sub<'a, 'infcx, 'tcx> {
-        Sub::new(self, a_is_expected)
-    }
-
-    pub fn lub<'a>(&'a mut self, a_is_expected: bool) -> Lub<'a, 'infcx, 'tcx> {
-        Lub::new(self, a_is_expected)
-    }
-
-    pub fn glb<'a>(&'a mut self, a_is_expected: bool) -> Glb<'a, 'infcx, 'tcx> {
-        Glb::new(self, a_is_expected)
-    }
-
-    /// Here, `dir` is either `EqTo`, `SubtypeOf`, or `SupertypeOf`.
-    /// The idea is that we should ensure that the type `a_ty` is equal
-    /// to, a subtype of, or a supertype of (respectively) the type
-    /// to which `b_vid` is bound.
-    ///
-    /// Since `b_vid` has not yet been instantiated with a type, we
-    /// will first instantiate `b_vid` with a *generalized* version
-    /// of `a_ty`. Generalization introduces other inference
-    /// variables wherever subtyping could occur.
-    pub fn instantiate(
-        &mut self,
-        a_ty: Ty<'tcx>,
-        dir: RelationDir,
-        b_vid: ty::TyVid,
-        a_is_expected: bool,
-    ) -> RelateResult<'tcx, ()> {
-        use self::RelationDir::*;
-
-        // Get the actual variable that b_vid has been inferred to
-        debug_assert!(self.infcx.type_variables.borrow_mut().probe(b_vid).is_unknown());
-
-        debug!("instantiate(a_ty={:?} dir={:?} b_vid={:?})", a_ty, dir, b_vid);
-
-        // Generalize type of `a_ty` appropriately depending on the
-        // direction.  As an example, assume:
-        //
-        // - `a_ty == &'x ?1`, where `'x` is some free region and `?1` is an
-        //   inference variable,
-        // - and `dir` == `SubtypeOf`.
-        //
-        // Then the generalized form `b_ty` would be `&'?2 ?3`, where
-        // `'?2` and `?3` are fresh region