Auto merge of #61822 - JohnTitor:add-long-e0592, r=GuillaumeGomez,Centril
Add explanation for E0592
This is a part of #61137
r? @GuillaumeGomez
diff --git a/.azure-pipelines/auto.yml b/.azure-pipelines/auto.yml
index 68a382b..4f000e7 100644
--- a/.azure-pipelines/auto.yml
+++ b/.azure-pipelines/auto.yml
@@ -140,7 +140,6 @@
IMAGE: x86_64-gnu-aux
x86_64-gnu-tools:
IMAGE: x86_64-gnu-tools
- # FIXME if: branch = auto OR (type = pull_request AND commit_message =~ /(?i:^update.*\b(rls|rustfmt|clippy|miri|cargo)\b)/)
x86_64-gnu-debug:
IMAGE: x86_64-gnu-debug
x86_64-gnu-nopt:
diff --git a/.azure-pipelines/master.yml b/.azure-pipelines/master.yml
index 3f3025a..e2baa92 100644
--- a/.azure-pipelines/master.yml
+++ b/.azure-pipelines/master.yml
@@ -6,6 +6,9 @@
trigger:
- master
+variables:
+- group: prod-credentials
+
pool:
vmImage: ubuntu-16.04
@@ -16,9 +19,7 @@
- script: |
export MESSAGE_FILE=$(mktemp -t msg.XXXXXX)
. src/ci/docker/x86_64-gnu-tools/repo.sh
- # FIXME(pietro): committing is disabled until we switch to Azure Pipelines
- # as the source of truth, or until we setup a separate test repo.
- #commit_toolstate_change "$MESSAGE_FILE" "$BUILD_SOURCESDIRECTORY/src/tools/publish_toolstate.py" "$(git rev-parse HEAD)" "$(git log --format=%s -n1 HEAD)" "$MESSAGE_FILE" "$TOOLSTATE_REPO_ACCESS_TOKEN"
+ commit_toolstate_change "$MESSAGE_FILE" "$BUILD_SOURCESDIRECTORY/src/tools/publish_toolstate.py" "$(git rev-parse HEAD)" "$(git log --format=%s -n1 HEAD)" "$MESSAGE_FILE" "$TOOLSTATE_REPO_ACCESS_TOKEN"
displayName: Publish toolstate
env:
- TOOLSTATE_REPO_ACCESS_TOKEN: $(TOOLSTATE_REPO_ACCESS_TOKEN_SECRET)
+ TOOLSTATE_REPO_ACCESS_TOKEN: $(TOOLSTATE_REPO_ACCESS_TOKEN)
diff --git a/.azure-pipelines/pr.yml b/.azure-pipelines/pr.yml
index 84c9454..88b5067 100644
--- a/.azure-pipelines/pr.yml
+++ b/.azure-pipelines/pr.yml
@@ -4,10 +4,11 @@
trigger: none
pr:
-- master # FIXME: really just want any branch, but want an explicit "pr" property set so it's clear
+- master
jobs:
- job: Linux
+ timeoutInMinutes: 600
pool:
vmImage: ubuntu-16.04
steps:
@@ -15,8 +16,18 @@
strategy:
matrix:
x86_64-gnu-llvm-6.0:
- RUST_BACKTRACE: 1
+ IMAGE: x86_64-gnu-llvm-6.0
+ mingw-check:
+ IMAGE: mingw-check
-# x86_64-gnu-tools: {}
-# # if: branch = auto OR (type = pull_request AND commit_message =~ /(?i:^update.*\b(rls|rustfmt|clippy|miri|cargo)\b)/)
-# mingw-check: {}
+# TODO: enable this job if the commit message matches this regex, need tools
+# figure out how to get the current commit message on azure and stick it in a
+# condition somewhere
+# if: commit_message =~ /(?i:^update.*\b(rls|rustfmt|clippy|miri|cargo)\b)/
+# - job: Linux-x86_64-gnu-tools
+# pool:
+# vmImage: ubuntu-16.04
+# steps:
+# - template: steps/run.yml
+# variables:
+# IMAGE: x86_64-gnu-tools
diff --git a/.azure-pipelines/steps/run.yml b/.azure-pipelines/steps/run.yml
index 49bac62..3f98aa4 100644
--- a/.azure-pipelines/steps/run.yml
+++ b/.azure-pipelines/steps/run.yml
@@ -11,6 +11,12 @@
- checkout: self
fetchDepth: 2
+# Spawn a background process to collect CPU usage statistics which we'll upload
+# at the end of the build. See the comments in the script here for more
+# information.
+- bash: python src/ci/cpu-usage-over-time.py &> cpu-usage.csv &
+ displayName: "Collect CPU-usage statistics in the background"
+
- bash: printenv | sort
displayName: Show environment variables
@@ -118,6 +124,7 @@
CI: true
SRC: .
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
+ TOOLSTATE_REPO_ACCESS_TOKEN: $(TOOLSTATE_REPO_ACCESS_TOKEN)
displayName: Run build
# If we're a deploy builder, use the `aws` command to publish everything to our
@@ -142,3 +149,13 @@
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
condition: and(succeeded(), or(eq(variables.DEPLOY, '1'), eq(variables.DEPLOY_ALT, '1')))
displayName: Upload artifacts
+
+# Upload CPU usage statistics that we've been gathering this whole time. Always
+# execute this step in case we want to inspect failed builds, but don't let
+# errors here ever fail the build since this is just informational.
+- bash: aws s3 cp --acl public-read cpu-usage.csv s3://$DEPLOY_BUCKET/rustc-builds/$BUILD_SOURCEVERSION/cpu-$SYSTEM_JOBNAME.csv
+ env:
+ AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
+ condition: variables['AWS_SECRET_ACCESS_KEY']
+ continueOnError: true
+ displayName: Upload CPU usage statistics
diff --git a/Cargo.lock b/Cargo.lock
index 948074c..403fe41 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -33,7 +33,7 @@
[[package]]
name = "ammonia"
-version = "1.1.0"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"html5ever 0.22.5 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -70,7 +70,7 @@
version = "0.0.0"
dependencies = [
"rustc_data_structures 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -190,7 +190,7 @@
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -213,7 +213,7 @@
name = "build-manifest"
version = "0.1.0"
dependencies = [
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -266,6 +266,7 @@
"bufstream 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"bytesize 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cargo-test-macro 0.1.0",
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
"core-foundation 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"crates-io 0.26.0",
@@ -279,8 +280,8 @@
"flate2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fwdansi 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2-curl 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2-curl 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"home 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -290,7 +291,7 @@
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
- "libgit2-sys 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -303,7 +304,7 @@
"rustfix 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"same-file 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_ignored 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"shell-escape 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -315,18 +316,27 @@
"unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "varisat 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
+name = "cargo-test-macro"
+version = "0.1.0"
+dependencies = [
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "cargo_metadata"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -338,7 +348,7 @@
dependencies = [
"failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -417,7 +427,7 @@
"rustc-workspace-hack 1.0.0",
"rustc_tools_util 0.2.0",
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -437,8 +447,8 @@
"quine-mc_cluskey 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-normalization 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -506,7 +516,7 @@
"miow 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rustfix 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -525,7 +535,7 @@
"miow 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rustfix 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -566,7 +576,7 @@
"curl 0.4.21 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"http 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -594,7 +604,7 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -710,9 +720,9 @@
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -720,10 +730,10 @@
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -746,20 +756,31 @@
[[package]]
name = "directories"
-version = "1.0.2"
+version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dirs-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "dirs"
-version = "1.0.4"
+version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
- "redox_users 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_users 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "dirs-sys"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_users 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -785,7 +806,7 @@
dependencies = [
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"strum 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -869,9 +890,9 @@
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -910,6 +931,9 @@
[[package]]
name = "fmt_macros"
version = "0.0.0"
+dependencies = [
+ "syntax_pos 0.0.0",
+]
[[package]]
name = "fnv"
@@ -961,6 +985,11 @@
]
[[package]]
+name = "fuchsia-cprng"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
name = "fuchsia-zircon"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1015,12 +1044,12 @@
[[package]]
name = "git2"
-version = "0.8.0"
+version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
- "libgit2-sys 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"openssl-sys 0.9.43 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1029,11 +1058,11 @@
[[package]]
name = "git2-curl"
-version = "0.9.0"
+version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"curl 0.4.21 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -1070,7 +1099,7 @@
"pest_derive 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -1085,13 +1114,18 @@
"pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "hashbrown"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "hashbrown"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
@@ -1129,10 +1163,10 @@
dependencies = [
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "markup5ever 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "markup5ever 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -1142,7 +1176,7 @@
dependencies = [
"bytes 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -1247,7 +1281,7 @@
[[package]]
name = "itoa"
-version = "0.4.3"
+version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@@ -1282,7 +1316,7 @@
dependencies = [
"futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -1312,6 +1346,11 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "leb128"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
name = "libc"
version = "0.2.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1321,11 +1360,10 @@
[[package]]
name = "libgit2-sys"
-version = "0.7.11"
+version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "curl-sys 0.4.18 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
"libssh2-sys 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1413,7 +1451,7 @@
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"num-derive 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1448,12 +1486,12 @@
[[package]]
name = "markup5ever"
-version = "0.7.2"
+version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"phf 0.7.22 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_codegen 0.7.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1471,7 +1509,7 @@
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "ammonia 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ammonia 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
"elasticlunr-rs 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1485,7 +1523,7 @@
"open 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1499,7 +1537,7 @@
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "ammonia 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ammonia 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
"elasticlunr-rs 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1513,7 +1551,7 @@
"open 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1651,10 +1689,10 @@
version = "0.1.0"
dependencies = [
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "cargo_metadata 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cargo_metadata 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"colored 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"compiletest_rs 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "directories 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "directories 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1695,9 +1733,9 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -1774,6 +1812,14 @@
]
[[package]]
+name = "ordered-float"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "ordermap"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1813,6 +1859,7 @@
version = "0.0.0"
dependencies = [
"alloc 0.0.0",
+ "cfg-if 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1836,11 +1883,29 @@
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
+name = "partial_ref"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "partial_ref_derive 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "partial_ref_derive"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "percent-encoding"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1884,9 +1949,9 @@
dependencies = [
"pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"pest_meta 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -1931,7 +1996,7 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"phf_shared 0.7.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -1983,7 +2048,7 @@
[[package]]
name = "proc-macro2"
-version = "0.4.24"
+version = "0.4.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2063,10 +2128,10 @@
[[package]]
name = "quote"
-version = "0.6.10"
+version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -2087,11 +2152,13 @@
[[package]]
name = "rand"
-version = "0.4.3"
+version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2128,6 +2195,11 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "rand_core"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
name = "rand_hc"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2144,6 +2216,19 @@
]
[[package]]
+name = "rand_os"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "rand_pcg"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2166,19 +2251,26 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rayon-core 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rayon-core"
-version = "1.4.0"
+version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-deque 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rdrand"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -2196,12 +2288,12 @@
[[package]]
name = "redox_users"
-version = "0.2.0"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"argon2rs 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2297,7 +2389,7 @@
"rustc-workspace-hack 1.0.0",
"rustc_tools_util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustfmt-nightly 1.2.2",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_ignored 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2322,7 +2414,7 @@
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-data 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-span 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2337,7 +2429,7 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rls-span 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -2349,7 +2441,7 @@
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -2399,7 +2491,7 @@
"rustc_target 0.0.0",
"scoped-tls 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_pos 0.0.0",
"tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2445,7 +2537,7 @@
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-rayon 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-rayon-core 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"stable_deref_trait 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2481,7 +2573,7 @@
version = "407.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -2497,7 +2589,7 @@
"rustc-ap-serialize 407.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-syntax_pos 407.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"scoped-tls 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -2609,10 +2701,10 @@
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2625,7 +2717,7 @@
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
"rustc_target 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_pos 0.0.0",
]
@@ -2636,7 +2728,7 @@
dependencies = [
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_cratesio_shim 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -2745,7 +2837,7 @@
"rustc-rayon-core 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_cratesio_shim 0.0.0",
"serialize 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"stable_deref_trait 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2779,7 +2871,7 @@
"rustc_typeck 0.0.0",
"scoped-tls 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_ext 0.0.0",
"syntax_pos 0.0.0",
@@ -2844,7 +2936,7 @@
"rustc_typeck 0.0.0",
"scoped-tls 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_ext 0.0.0",
"syntax_pos 0.0.0",
@@ -2887,9 +2979,9 @@
version = "0.1.0"
dependencies = [
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -2905,7 +2997,7 @@
"rustc_errors 0.0.0",
"rustc_target 0.0.0",
"serialize 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"stable_deref_trait 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_ext 0.0.0",
@@ -2929,7 +3021,7 @@
"rustc_errors 0.0.0",
"rustc_target 0.0.0",
"serialize 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_pos 0.0.0",
]
@@ -2994,7 +3086,7 @@
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
"rustc_metadata 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_pos 0.0.0",
]
@@ -3048,7 +3140,7 @@
"rustc 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_target 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_pos 0.0.0",
]
@@ -3074,7 +3166,7 @@
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
"rustc_target 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_pos 0.0.0",
]
@@ -3115,7 +3207,7 @@
dependencies = [
"failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -3130,7 +3222,7 @@
"cargo_metadata 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"derive-new 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "dirs 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dirs 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -3143,7 +3235,7 @@
"rustc-ap-syntax 407.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-syntax_pos 407.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-workspace-hack 1.0.0",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -3207,7 +3299,7 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -3217,7 +3309,7 @@
[[package]]
name = "serde"
-version = "1.0.82"
+version = "1.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -3228,9 +3320,9 @@
version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -3238,7 +3330,7 @@
version = "0.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -3246,9 +3338,9 @@
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ryu 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -3256,7 +3348,7 @@
version = "0.0.0"
dependencies = [
"indexmap 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -3309,11 +3401,8 @@
[[package]]
name = "smallvec"
-version = "0.6.7"
+version = "0.6.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
[[package]]
name = "socket2"
@@ -3338,6 +3427,7 @@
"alloc 0.0.0",
"backtrace 0.3.29 (registry+https://github.com/rust-lang/crates.io-index)",
"cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
"dlmalloc 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -3364,7 +3454,7 @@
"new_debug_unreachable 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_shared 0.7.22 (registry+https://github.com/rust-lang/crates.io-index)",
"precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_codegen 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -3376,8 +3466,8 @@
dependencies = [
"phf_generator 0.7.22 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_shared 0.7.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -3410,9 +3500,9 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"heck 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -3427,11 +3517,11 @@
[[package]]
name = "syn"
-version = "0.15.22"
+version = "0.15.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -3448,9 +3538,9 @@
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -3467,7 +3557,7 @@
"rustc_target 0.0.0",
"scoped-tls 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax_pos 0.0.0",
]
@@ -3480,7 +3570,7 @@
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
"rustc_target 0.0.0",
- "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_pos 0.0.0",
]
@@ -3612,7 +3702,7 @@
version = "0.1.0"
dependencies = [
"regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -3819,7 +3909,7 @@
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -3827,7 +3917,7 @@
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -3936,6 +4026,7 @@
version = "0.0.0"
dependencies = [
"cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"compiler_builtins 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -3956,7 +4047,7 @@
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -3976,6 +4067,76 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "varisat"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "leb128 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "partial_ref 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)",
+ "varisat-checker 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "varisat-dimacs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "varisat-formula 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "varisat-internal-macros 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "varisat-internal-proof 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "vec_mut_scan 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "varisat-checker"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hashbrown 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "varisat-dimacs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "varisat-formula 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "varisat-internal-proof 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "varisat-dimacs"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "varisat-formula 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "varisat-formula"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "varisat-internal-macros"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)",
+ "synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "varisat-internal-proof"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "varisat-formula 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
name = "vcpkg"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3986,6 +4147,11 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
+name = "vec_mut_scan"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
name = "vergen"
version = "3.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4111,7 +4277,7 @@
"checksum adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7e522997b529f05601e05166c07ed17789691f562762c7f3b987263d2dedee5c"
"checksum aho-corasick 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "1e9a933f4e58658d7b12defcf96dc5c720f20832deebe3e0a19efd3b6aaeeb9e"
"checksum aho-corasick 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e6f484ae0c99fec2e858eb6134949117399f222608d84cadb3f58c1f97c2364c"
-"checksum ammonia 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd4c682378117e4186a492b2252b9537990e1617f44aed9788b9a1149de45477"
+"checksum ammonia 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a8b93ecb80665873703bf3b0a77f369c96b183d8e0afaf30a3ff5ff07dfc6409"
"checksum annotate-snippets 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e8bcdcd5b291ce85a78f2b9d082a8de9676c12b1840d386d67bc5eea6f9d2b4e"
"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
"checksum arc-swap 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1025aeae2b664ca0ea726a89d574fe8f4e77dd712d443236ad1de00379450cf6"
@@ -4172,8 +4338,9 @@
"checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a"
"checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
"checksum digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90"
-"checksum directories 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "72d337a64190607d4fcca2cb78982c5dd57f4916e19696b48a575fa746b6cb0f"
-"checksum dirs 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "88972de891f6118092b643d85a0b28e0678e0f948d7f879aa32f2d5aafe97d2a"
+"checksum directories 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ccc83e029c3cebb4c8155c644d34e3a070ccdb4ff90d369c74cd73f7cb3c984"
+"checksum dirs 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "3fd78930633bd1c6e35c4b42b1df7b0cbc6bc191146e512bb3bedf243fcc3901"
+"checksum dirs-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "937756392ec77d1f2dd9dc3ac9d69867d109a2121479d72c364e42f4cab21e2d"
"checksum dlmalloc 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f283302e035e61c23f2b86b3093e8c6273a4c3125742d6087e96ade001ca5e63"
"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
"checksum elasticlunr-rs 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a99a310cd1f9770e7bf8e48810c7bcbb0e078c8fb23a8c7bcf0da4c2bf61a455"
@@ -4196,6 +4363,7 @@
"checksum fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
"checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674"
"checksum fst 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d94485a00b1827b861dd9d1a2cc9764f9044d4c535514c0760a5a2012ef3399f"
+"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
"checksum futf 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7c9c1ce3fa9336301af935ab852c437817d14cd33690446569392e65170aac3b"
@@ -4203,12 +4371,13 @@
"checksum fwdansi 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "34dd4c507af68d37ffef962063dfa1944ce0dd4d5b82043dbab1dabe088610c3"
"checksum generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ef25c5683767570c2bbd7deba372926a55eaae9982d7726ee2a1050239d45b9d"
"checksum getopts 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)" = "72327b15c228bfe31f1390f93dd5e9279587f0463836393c9df719ce62a3e450"
-"checksum git2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c7339329bfa14a00223244311560d11f8f489b453fb90092af97f267a6090ab0"
-"checksum git2-curl 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d58551e903ed7e2d6fe3a2f3c7efa3a784ec29b19d0fbb035aaf0497c183fbdd"
+"checksum git2 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "924b2e7d2986e625dcad89e8a429a7b3adee3c3d71e585f4a66c4f7e78715e31"
+"checksum git2-curl 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f21f0550fd5d3f7c5adb94797fcd3d1002d7fc1fa349c82fe44f3c97ef80b62c"
"checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
"checksum globset 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ef4feaabe24a0a658fd9cf4a9acf6ed284f045c77df0f49020ba3245cfb7b454"
"checksum handlebars 0.32.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d89ec99d1594f285d4590fc32bac5f75cdab383f1123d504d27862c644a807dd"
"checksum handlebars 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d82e5750d8027a97b9640e3fefa66bbaf852a35228e1c90790efd13c4b09c166"
+"checksum hashbrown 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "29fba9abe4742d586dfd0c06ae4f7e73a1c2d86b856933509b269d82cdf06e18"
"checksum hashbrown 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9529213c67695ca2d146e6f263b7b72df8fa973368beadf767e8ed80c03f2f36"
"checksum heck 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ea04fa3ead4e05e51a7c806fc07271fdbde4e246a6c6d1efd52e72230b771b82"
"checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77"
@@ -4225,7 +4394,7 @@
"checksum is-match 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7e5b386aef33a1c677be65237cb9d32c3f3ef56bd035949710c4bb13083eb053"
"checksum itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)" = "f58856976b776fedd95533137617a02fb25719f40e7d9b01c7043cd65474f450"
"checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358"
-"checksum itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b"
+"checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f"
"checksum jemalloc-sys 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7bef0d4ce37578dfd80b466e3d8324bd9de788e249f1accebb0c472ea4b52bdc"
"checksum jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "b3d51e24009d966c8285d524dbaf6d60926636b2a89caee9ce0bd612494ddc16"
"checksum json 0.11.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9ad0485404155f45cce53a40d4b2d6ac356418300daed05273d9e26f91c390be"
@@ -4234,8 +4403,9 @@
"checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14"
"checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
+"checksum leb128 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3576a87f2ba00f6f106fdfcd16db1d698d648a26ad8e0573cad8537c3c362d2a"
"checksum libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)" = "c6785aa7dd976f5fbf3b71cfd9cd49d7f783c1ff565a858d71031c6c313aa5c6"
-"checksum libgit2-sys 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "48441cb35dc255da8ae72825689a95368bf510659ae1ad55dc4aa88cb1789bf1"
+"checksum libgit2-sys 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "941a41e23f77323b8c9d2ee118aec9ee39dfc176078c18b4757d3bad049d9ff7"
"checksum libnghttp2-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d75d7966bda4730b722d1eab8e668df445368a24394bae9fc1e8dc0ab3dbe4f4"
"checksum libssh2-sys 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "126a1f4078368b163bfdee65fbab072af08a1b374a5551b21e87ade27b1fbf9d"
"checksum libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)" = "2eb5e43362e38e2bca2fd5f5134c4d4564a23a5c28e9b95411652021a8675ebe"
@@ -4248,7 +4418,7 @@
"checksum mac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
"checksum macro-utils 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f2c4deaccc2ead6a28c16c0ba82f07d52b6475397415ce40876e559b0b0ea510"
"checksum maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "08cbb6b4fef96b6d77bfc40ec491b1690c779e77b05cd9f07f787ed376fd4c43"
-"checksum markup5ever 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bfedc97d5a503e96816d10fedcd5b42f760b2e525ce2f7ec71f6a41780548475"
+"checksum markup5ever 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)" = "897636f9850c3eef4905a5540683ed53dc9393860f0846cab2c2ddf9939862ff"
"checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
"checksum mdbook 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "90b5a8d7e341ceee5db3882a06078d42661ddcfa2b3687319cc5da76ec4e782f"
"checksum mdbook 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0ba0d44cb4089c741b9a91f3e5218298a40699c2f3a070a85014eed290c60819"
@@ -4278,12 +4448,15 @@
"checksum openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de"
"checksum openssl-src 111.1.0+1.1.1a (registry+https://github.com/rust-lang/crates.io-index)" = "26bb632127731bf4ac49bf86a5dde12d2ca0918c2234fc39d79d4da2ccbc6da7"
"checksum openssl-sys 0.9.43 (registry+https://github.com/rust-lang/crates.io-index)" = "33c86834957dd5b915623e94f2f4ab2c70dd8f6b70679824155d5ae21dbd495d"
+"checksum ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "18869315e81473c951eb56ad5558bbc56978562d3ecfb87abb7a1e944cea4518"
"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
"checksum ordslice 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dd20eec3dbe4376829cb7d80ae6ac45e0a766831dca50202ff2d40db46a8a024"
"checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37"
"checksum packed_simd 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "25d36de864f7218ec5633572a800109bbe5a1cc8d9d95a967f3daf93ea7e6ddc"
"checksum parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ab41b4aed082705d1056416ae4468b6ea99d52599ecf3169b00088d43113e337"
"checksum parking_lot_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94c8c7923936b28d546dfd14d4472eaf34c99b14e1c973a32b3e6d4eb04298c9"
+"checksum partial_ref 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b85fa89a02abf59d36821c373b5ed38c8e075505f1a08618b000fce81229bc"
+"checksum partial_ref_derive 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "759319b785d033e4279ec98fb2d1fb767a1af5b6a8996086c07168169cff079b"
"checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831"
"checksum pest 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0fce5d8b5cc33983fc74f78ad552b5522ab41442c4ca91606e4236eb4b5ceefc"
"checksum pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "54f0c72a98d8ab3c99560bfd16df8059cc10e1f9a8e83e6e3b97718dd766e9c3"
@@ -4301,7 +4474,7 @@
"checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
"checksum pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3a029430f0d744bc3d15dd474d591bed2402b645d024583082b9f63bb936dac6"
"checksum pretty_env_logger 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df8b3f4e0475def7d9c2e5de8e5a1306949849761e107b360d03e98eafaffd61"
-"checksum proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)" = "77619697826f31a02ae974457af0b29b723e5619e113e9397b8b82c6bd253f09"
+"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
"checksum proptest 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24f5844db2f839e97e3021980975f6ebf8691d9b9b2ca67ed3feb38dc3edb52c"
"checksum pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d6fdf85cda6cadfae5428a54661d431330b312bc767ddbc57adbedc24da66e32"
"checksum pulldown-cmark 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "051e60ace841b3bfecd402fe5051c06cb3bec4a6e6fdd060a37aa8eb829a1db3"
@@ -4309,21 +4482,24 @@
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
"checksum quine-mc_cluskey 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "07589615d719a60c8dd8a4622e7946465dfef20d1a428f969e3443e7386d5f45"
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
-"checksum quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "53fa22a1994bd0f9372d7a816207d8a2677ad0325b073f5c5332760f0fb62b5c"
+"checksum quote 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "faf4799c5d274f3868a4aae320a0a182cbd2baee377b378f080e16a23e9d80db"
"checksum racer 2.1.22 (registry+https://github.com/rust-lang/crates.io-index)" = "1e4323343f25bc372dc9293ac6b5cd3034b32784af1e7de9366b4db71466d8c7"
-"checksum rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8356f47b32624fef5b3301c1be97e5944ecdd595409cc5da11d05f211db6cfbd"
+"checksum rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293"
"checksum rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ae9d223d52ae411a33cf7e54ec6034ec165df296ccd23533d671a28252b6f66a"
"checksum rand_chacha 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "771b009e3a508cb67e8823dda454aaa5368c7bc1c16829fb77d3e980440dd34a"
"checksum rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0905b6b7079ec73b314d4c748701f6931eb79fd97c668caa3f1899b22b32c6db"
+"checksum rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0e7a549d590831370895ab7ba4ea0c1b6b011d106b5ff2da6eee112615e6dc0"
"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
+"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
"checksum rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "086bd09a33c7044e56bb44d5bdde5a60e7f119a9e95b0775f545de759a32fe05"
"checksum rand_xorshift 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "effa3fcaa47e18db002bdde6060944b6d2f9cfd8db471c30e873448ad9187be3"
"checksum rayon 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80e811e76f1dbf68abf87a759083d34600017fc4e10b6bd5ad84a700f9dba4b1"
-"checksum rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d24ad214285a7729b174ed6d3bcfcb80177807f959d95fafd5bfc5c4f201ac8"
+"checksum rayon-core 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b055d1e92aba6877574d8fe604a63c8b5df60f60e5982bf7ccbb1338ea527356"
+"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
"checksum redox_syscall 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "679da7508e9a6390aeaf7fbd02a800fdc64b73fe2204dd2c8ae66d22d9d5ad5d"
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
-"checksum redox_users 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "214a97e49be64fd2c86f568dd0cb2c757d2cc53de95b273b6ad0a1c908482f26"
+"checksum redox_users 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3fe5204c3a17e97dde73f285d49be585df59ed84b50a872baf416e73b62c3828"
"checksum regex 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9329abc99e39129fcceabd24cf5d85b4671ef7c29c50e972bc5afe32438ec384"
"checksum regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "8f0a0bcab2fd7d1d7c54fa9eae6f43eddeb9ce2e7352f8518a814a4f65d60c58"
"checksum regex-syntax 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7d707a4fa2637f2dca2ef9fd02225ec7661fe01a53623c1e6515b6916511f7a7"
@@ -4362,7 +4538,7 @@
"checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27"
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
-"checksum serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)" = "6fa52f19aee12441d5ad11c9a00459122bd8f98707cadf9778c540674f1935b6"
+"checksum serde 1.0.92 (registry+https://github.com/rust-lang/crates.io-index)" = "32746bf0f26eab52f06af0d0aa1984f641341d06d8d673c693871da2d188c9be"
"checksum serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)" = "477b13b646f5b5b56fc95bedfc3b550d12141ce84f466f6c44b9a17589923885"
"checksum serde_ignored 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "190e9765dcedb56be63b6e0993a006c7e3b071a016a304736e4a315dc01fb142"
"checksum serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)" = "c37ccd6be3ed1fdf419ee848f7c758eb31b054d7cd3ae3600e3bae0adf569811"
@@ -4373,7 +4549,7 @@
"checksum siphasher 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0df90a788073e8d0235a67e50441d47db7c8ad9debd91cbf43736a2a92d36537"
"checksum sized-chunks 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a2a2eb3fe454976eefb479f78f9b394d34d661b647c6326a3a6e66f68bb12c26"
"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
-"checksum smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)" = "b73ea3738b47563803ef814925e69be00799a8c07420be8b996f8e98fb2336db"
+"checksum smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ab606a9c5e214920bb66c458cd7be8ef094f813f20fe77a54cc7dbfff220d4b7"
"checksum socket2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c4d11a52082057d87cb5caa31ad812f4504b97ab44732cd8359df2e9ff9f48e7"
"checksum stable_deref_trait 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ffbc596e092fe5f598b12ef46cc03754085ac2f4d8c739ad61c4ae266cc3b3fa"
"checksum string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "25d70109977172b127fe834e5449e5ab1740b9ba49fa18a2020f509174f25423"
@@ -4384,7 +4560,7 @@
"checksum strum 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f6c3a2071519ab6a48f465808c4c1ffdd00dfc8e93111d02b4fc5abab177676e"
"checksum strum_macros 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8baacebd7b7c9b864d83a6ba7a246232983e277b86fa5cdec77f565715a4b136"
"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"
-"checksum syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)" = "ae8b29eb5210bc5cf63ed6149cbf9adfc82ac0be023d8735c176ee74a2db4da7"
+"checksum syn 0.15.35 (registry+https://github.com/rust-lang/crates.io-index)" = "641e117d55514d6d918490e47102f7e08d096fdde360247e4a10f7a91a8478d3"
"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6"
"checksum synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02353edf96d6e4dc81aea2d8490a7e9db177bf8acb0e951c24940bf866cb313f"
"checksum tar 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)" = "a303ba60a099fcd2aaa646b14d2724591a96a75283e4b7ed3d1a1658909d9ae2"
@@ -4433,8 +4609,15 @@
"checksum utf-8 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f1262dfab4c30d5cb7c07026be00ee343a6cf5027fdc0104a9160f354e5db75c"
"checksum utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "796f7e48bef87609f7ade7e06495a87d5cd06c7866e6a5cbfceffc558a243737"
"checksum utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d"
+"checksum varisat 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a2640f5949bcd945ffdb030f5f336d0a5da8fe8ddab8e8230e2e030ea0623cfa"
+"checksum varisat-checker 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a44da8d5e25b089d66fb3d14ae87994e2f7ba7f86ff396b7c490083d8a9a0a7b"
+"checksum varisat-dimacs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f992cf40560ad73983369414fcc5a42fb9c9e39ae7ff215c75725f9c6785f0b9"
+"checksum varisat-formula 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "78d44ab5c6de769e855c77add5b0efa73ed3320b06485f04c8d3fad9b2eb9997"
+"checksum varisat-internal-macros 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0e76c43d9badf53d22b0edd25667d65b7b67167e2cce249c9d1e3ca0f02dc81c"
+"checksum varisat-internal-proof 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5a7553f03a4a8581410fb1813add70ce54e481d0e3eb1ca2cc1754faf46ff9ad"
"checksum vcpkg 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "def296d3eb3b12371b2c7d0e83bfe1403e4db2d7a0bba324a12b21c4ee13143d"
"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
+"checksum vec_mut_scan 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d5668931075a8dfe6eb3e9e585d06f0ab4d9b377663e94d135ef51933ff9f6"
"checksum vergen 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "6aba5e34f93dc7051dfad05b98a18e9156f27e7b431fe1d2398cb6061c0a1dba"
"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs
index f5fb6f0..595deb0 100644
--- a/src/bootstrap/bin/rustc.rs
+++ b/src/bootstrap/bin/rustc.rs
@@ -93,6 +93,21 @@
env::join_paths(&dylib_path).unwrap());
let mut maybe_crate = None;
+ // Get the name of the crate we're compiling, if any.
+ let maybe_crate_name = args.windows(2)
+ .find(|a| &*a[0] == "--crate-name")
+ .map(|crate_name| &*crate_name[1]);
+
+ if let Some(current_crate) = maybe_crate_name {
+ if let Some(target) = env::var_os("RUSTC_TIME") {
+ if target == "all" ||
+ target.into_string().unwrap().split(",").any(|c| c.trim() == current_crate)
+ {
+ cmd.arg("-Ztime");
+ }
+ }
+ }
+
// Non-zero stages must all be treated uniformly to avoid problems when attempting to uplift
// compiler libraries and such from stage 1 to 2.
if stage == "0" {
@@ -152,10 +167,7 @@
cmd.arg(format!("-Clinker={}", target_linker));
}
- let crate_name = args.windows(2)
- .find(|a| &*a[0] == "--crate-name")
- .unwrap();
- let crate_name = &*crate_name[1];
+ let crate_name = maybe_crate_name.unwrap();
maybe_crate = Some(crate_name);
// If we're compiling specifically the `panic_abort` crate then we pass
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
index 2281a45..2e9df48 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
@@ -787,7 +787,7 @@
let libtest_stamp = match cmd {
"check" | "clippy" | "fix" => check::libtest_stamp(self, cmp, target),
- _ => compile::libstd_stamp(self, cmp, target),
+ _ => compile::libtest_stamp(self, cmp, target),
};
let librustc_stamp = match cmd {
@@ -1327,665 +1327,4 @@
}
#[cfg(test)]
-mod __test {
- use super::*;
- use crate::config::Config;
- use std::thread;
-
- use pretty_assertions::assert_eq;
-
- fn configure(host: &[&str], target: &[&str]) -> Config {
- let mut config = Config::default_opts();
- // don't save toolstates
- config.save_toolstates = None;
- config.skip_only_host_steps = false;
- config.dry_run = true;
- // try to avoid spurious failures in dist where we create/delete each others file
- let dir = config.out.join("tmp-rustbuild-tests").join(
- &thread::current()
- .name()
- .unwrap_or("unknown")
- .replace(":", "-"),
- );
- t!(fs::create_dir_all(&dir));
- config.out = dir;
- config.build = INTERNER.intern_str("A");
- config.hosts = vec![config.build]
- .clone()
- .into_iter()
- .chain(host.iter().map(|s| INTERNER.intern_str(s)))
- .collect::<Vec<_>>();
- config.targets = config
- .hosts
- .clone()
- .into_iter()
- .chain(target.iter().map(|s| INTERNER.intern_str(s)))
- .collect::<Vec<_>>();
- config
- }
-
- fn first<A, B>(v: Vec<(A, B)>) -> Vec<A> {
- v.into_iter().map(|(a, _)| a).collect::<Vec<_>>()
- }
-
- #[test]
- fn dist_baseline() {
- let build = Build::new(configure(&[], &[]));
- let mut builder = Builder::new(&build);
- builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
-
- let a = INTERNER.intern_str("A");
-
- assert_eq!(
- first(builder.cache.all::<dist::Docs>()),
- &[dist::Docs { host: a },]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Mingw>()),
- &[dist::Mingw { host: a },]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Rustc>()),
- &[dist::Rustc {
- compiler: Compiler { host: a, stage: 2 }
- },]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Std>()),
- &[dist::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },]
- );
- assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
- }
-
- #[test]
- fn dist_with_targets() {
- let build = Build::new(configure(&[], &["B"]));
- let mut builder = Builder::new(&build);
- builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
-
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
-
- assert_eq!(
- first(builder.cache.all::<dist::Docs>()),
- &[
- dist::Docs { host: a },
- dist::Docs { host: b },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Mingw>()),
- &[dist::Mingw { host: a }, dist::Mingw { host: b },]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Rustc>()),
- &[dist::Rustc {
- compiler: Compiler { host: a, stage: 2 }
- },]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Std>()),
- &[
- dist::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },
- dist::Std {
- compiler: Compiler { host: a, stage: 2 },
- target: b,
- },
- ]
- );
- assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
- }
-
- #[test]
- fn dist_with_hosts() {
- let build = Build::new(configure(&["B"], &[]));
- let mut builder = Builder::new(&build);
- builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
-
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
-
- assert_eq!(
- first(builder.cache.all::<dist::Docs>()),
- &[
- dist::Docs { host: a },
- dist::Docs { host: b },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Mingw>()),
- &[dist::Mingw { host: a }, dist::Mingw { host: b },]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Rustc>()),
- &[
- dist::Rustc {
- compiler: Compiler { host: a, stage: 2 }
- },
- dist::Rustc {
- compiler: Compiler { host: b, stage: 2 }
- },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Std>()),
- &[
- dist::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },
- dist::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: b,
- },
- ]
- );
- assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
- }
-
- #[test]
- fn dist_only_cross_host() {
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
- let mut build = Build::new(configure(&["B"], &[]));
- build.config.docs = false;
- build.config.extended = true;
- build.hosts = vec![b];
- let mut builder = Builder::new(&build);
- builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
-
- assert_eq!(
- first(builder.cache.all::<dist::Rustc>()),
- &[
- dist::Rustc {
- compiler: Compiler { host: b, stage: 2 }
- },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<compile::Rustc>()),
- &[
- compile::Rustc {
- compiler: Compiler { host: a, stage: 0 },
- target: a,
- },
- compile::Rustc {
- compiler: Compiler { host: a, stage: 1 },
- target: b,
- },
- ]
- );
- }
-
- #[test]
- fn dist_with_targets_and_hosts() {
- let build = Build::new(configure(&["B"], &["C"]));
- let mut builder = Builder::new(&build);
- builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
-
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
- let c = INTERNER.intern_str("C");
-
- assert_eq!(
- first(builder.cache.all::<dist::Docs>()),
- &[
- dist::Docs { host: a },
- dist::Docs { host: b },
- dist::Docs { host: c },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Mingw>()),
- &[
- dist::Mingw { host: a },
- dist::Mingw { host: b },
- dist::Mingw { host: c },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Rustc>()),
- &[
- dist::Rustc {
- compiler: Compiler { host: a, stage: 2 }
- },
- dist::Rustc {
- compiler: Compiler { host: b, stage: 2 }
- },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Std>()),
- &[
- dist::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },
- dist::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: b,
- },
- dist::Std {
- compiler: Compiler { host: a, stage: 2 },
- target: c,
- },
- ]
- );
- assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
- }
-
- #[test]
- fn dist_with_target_flag() {
- let mut config = configure(&["B"], &["C"]);
- config.skip_only_host_steps = true; // as-if --target=C was passed
- let build = Build::new(config);
- let mut builder = Builder::new(&build);
- builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
-
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
- let c = INTERNER.intern_str("C");
-
- assert_eq!(
- first(builder.cache.all::<dist::Docs>()),
- &[
- dist::Docs { host: a },
- dist::Docs { host: b },
- dist::Docs { host: c },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Mingw>()),
- &[
- dist::Mingw { host: a },
- dist::Mingw { host: b },
- dist::Mingw { host: c },
- ]
- );
- assert_eq!(first(builder.cache.all::<dist::Rustc>()), &[]);
- assert_eq!(
- first(builder.cache.all::<dist::Std>()),
- &[
- dist::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },
- dist::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: b,
- },
- dist::Std {
- compiler: Compiler { host: a, stage: 2 },
- target: c,
- },
- ]
- );
- assert_eq!(first(builder.cache.all::<dist::Src>()), &[]);
- }
-
- #[test]
- fn dist_with_same_targets_and_hosts() {
- let build = Build::new(configure(&["B"], &["B"]));
- let mut builder = Builder::new(&build);
- builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
-
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
-
- assert_eq!(
- first(builder.cache.all::<dist::Docs>()),
- &[
- dist::Docs { host: a },
- dist::Docs { host: b },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Mingw>()),
- &[dist::Mingw { host: a }, dist::Mingw { host: b },]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Rustc>()),
- &[
- dist::Rustc {
- compiler: Compiler { host: a, stage: 2 }
- },
- dist::Rustc {
- compiler: Compiler { host: b, stage: 2 }
- },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<dist::Std>()),
- &[
- dist::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },
- dist::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: b,
- },
- ]
- );
- assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
- assert_eq!(
- first(builder.cache.all::<compile::Std>()),
- &[
- compile::Std {
- compiler: Compiler { host: a, stage: 0 },
- target: a,
- },
- compile::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },
- compile::Std {
- compiler: Compiler { host: a, stage: 2 },
- target: a,
- },
- compile::Std {
- compiler: Compiler { host: a, stage: 1 },
- target: b,
- },
- compile::Std {
- compiler: Compiler { host: a, stage: 2 },
- target: b,
- },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<compile::Test>()),
- &[
- compile::Test {
- compiler: Compiler { host: a, stage: 0 },
- target: a,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 2 },
- target: a,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 1 },
- target: b,
- },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<compile::Assemble>()),
- &[
- compile::Assemble {
- target_compiler: Compiler { host: a, stage: 0 },
- },
- compile::Assemble {
- target_compiler: Compiler { host: a, stage: 1 },
- },
- compile::Assemble {
- target_compiler: Compiler { host: a, stage: 2 },
- },
- compile::Assemble {
- target_compiler: Compiler { host: b, stage: 2 },
- },
- ]
- );
- }
-
- #[test]
- fn build_default() {
- let build = Build::new(configure(&["B"], &["C"]));
- let mut builder = Builder::new(&build);
- builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]);
-
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
- let c = INTERNER.intern_str("C");
-
- assert!(!builder.cache.all::<compile::Std>().is_empty());
- assert!(!builder.cache.all::<compile::Assemble>().is_empty());
- assert_eq!(
- first(builder.cache.all::<compile::Rustc>()),
- &[
- compile::Rustc {
- compiler: Compiler { host: a, stage: 0 },
- target: a,
- },
- compile::Rustc {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },
- compile::Rustc {
- compiler: Compiler { host: a, stage: 2 },
- target: a,
- },
- compile::Rustc {
- compiler: Compiler { host: b, stage: 2 },
- target: a,
- },
- compile::Rustc {
- compiler: Compiler { host: a, stage: 1 },
- target: b,
- },
- compile::Rustc {
- compiler: Compiler { host: a, stage: 2 },
- target: b,
- },
- compile::Rustc {
- compiler: Compiler { host: b, stage: 2 },
- target: b,
- },
- ]
- );
-
- assert_eq!(
- first(builder.cache.all::<compile::Test>()),
- &[
- compile::Test {
- compiler: Compiler { host: a, stage: 0 },
- target: a,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 2 },
- target: a,
- },
- compile::Test {
- compiler: Compiler { host: b, stage: 2 },
- target: a,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 1 },
- target: b,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 2 },
- target: b,
- },
- compile::Test {
- compiler: Compiler { host: b, stage: 2 },
- target: b,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 2 },
- target: c,
- },
- compile::Test {
- compiler: Compiler { host: b, stage: 2 },
- target: c,
- },
- ]
- );
- }
-
- #[test]
- fn build_with_target_flag() {
- let mut config = configure(&["B"], &["C"]);
- config.skip_only_host_steps = true;
- let build = Build::new(config);
- let mut builder = Builder::new(&build);
- builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]);
-
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
- let c = INTERNER.intern_str("C");
-
- assert!(!builder.cache.all::<compile::Std>().is_empty());
- assert_eq!(
- first(builder.cache.all::<compile::Assemble>()),
- &[
- compile::Assemble {
- target_compiler: Compiler { host: a, stage: 0 },
- },
- compile::Assemble {
- target_compiler: Compiler { host: a, stage: 1 },
- },
- compile::Assemble {
- target_compiler: Compiler { host: a, stage: 2 },
- },
- compile::Assemble {
- target_compiler: Compiler { host: b, stage: 2 },
- },
- ]
- );
- assert_eq!(
- first(builder.cache.all::<compile::Rustc>()),
- &[
- compile::Rustc {
- compiler: Compiler { host: a, stage: 0 },
- target: a,
- },
- compile::Rustc {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },
- compile::Rustc {
- compiler: Compiler { host: a, stage: 1 },
- target: b,
- },
- ]
- );
-
- assert_eq!(
- first(builder.cache.all::<compile::Test>()),
- &[
- compile::Test {
- compiler: Compiler { host: a, stage: 0 },
- target: a,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 1 },
- target: a,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 2 },
- target: a,
- },
- compile::Test {
- compiler: Compiler { host: b, stage: 2 },
- target: a,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 1 },
- target: b,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 2 },
- target: b,
- },
- compile::Test {
- compiler: Compiler { host: b, stage: 2 },
- target: b,
- },
- compile::Test {
- compiler: Compiler { host: a, stage: 2 },
- target: c,
- },
- compile::Test {
- compiler: Compiler { host: b, stage: 2 },
- target: c,
- },
- ]
- );
- }
-
- #[test]
- fn test_with_no_doc_stage0() {
- let mut config = configure(&[], &[]);
- config.stage = Some(0);
- config.cmd = Subcommand::Test {
- paths: vec!["src/libstd".into()],
- test_args: vec![],
- rustc_args: vec![],
- fail_fast: true,
- doc_tests: DocTests::No,
- bless: false,
- compare_mode: None,
- rustfix_coverage: false,
- };
-
- let build = Build::new(config);
- let mut builder = Builder::new(&build);
-
- let host = INTERNER.intern_str("A");
-
- builder.run_step_descriptions(
- &[StepDescription::from::<test::Crate>()],
- &["src/libstd".into()],
- );
-
- // Ensure we don't build any compiler artifacts.
- assert!(!builder.cache.contains::<compile::Rustc>());
- assert_eq!(
- first(builder.cache.all::<test::Crate>()),
- &[test::Crate {
- compiler: Compiler { host, stage: 0 },
- target: host,
- mode: Mode::Std,
- test_kind: test::TestKind::Test,
- krate: INTERNER.intern_str("std"),
- },]
- );
- }
-
- #[test]
- fn test_exclude() {
- let mut config = configure(&[], &[]);
- config.exclude = vec![
- "src/test/run-pass".into(),
- "src/tools/tidy".into(),
- ];
- config.cmd = Subcommand::Test {
- paths: Vec::new(),
- test_args: Vec::new(),
- rustc_args: Vec::new(),
- fail_fast: true,
- doc_tests: DocTests::No,
- bless: false,
- compare_mode: None,
- rustfix_coverage: false,
- };
-
- let build = Build::new(config);
- let builder = Builder::new(&build);
- builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Test), &[]);
-
- // Ensure we have really excluded run-pass & tidy
- assert!(!builder.cache.contains::<test::RunPass>());
- assert!(!builder.cache.contains::<test::Tidy>());
-
- // Ensure other tests are not affected.
- assert!(builder.cache.contains::<test::RunPassFullDeps>());
- assert!(builder.cache.contains::<test::RustdocUi>());
- }
-}
+mod tests;
diff --git a/src/bootstrap/builder/tests.rs b/src/bootstrap/builder/tests.rs
new file mode 100644
index 0000000..46c58d1
--- /dev/null
+++ b/src/bootstrap/builder/tests.rs
@@ -0,0 +1,656 @@
+use super::*;
+use crate::config::Config;
+use std::thread;
+
+use pretty_assertions::assert_eq;
+
+fn configure(host: &[&str], target: &[&str]) -> Config {
+ let mut config = Config::default_opts();
+ // don't save toolstates
+ config.save_toolstates = None;
+ config.skip_only_host_steps = false;
+ config.dry_run = true;
+ // try to avoid spurious failures in dist where we create/delete each others file
+ let dir = config.out.join("tmp-rustbuild-tests").join(
+ &thread::current()
+ .name()
+ .unwrap_or("unknown")
+ .replace(":", "-"),
+ );
+ t!(fs::create_dir_all(&dir));
+ config.out = dir;
+ config.build = INTERNER.intern_str("A");
+ config.hosts = vec![config.build]
+ .clone()
+ .into_iter()
+ .chain(host.iter().map(|s| INTERNER.intern_str(s)))
+ .collect::<Vec<_>>();
+ config.targets = config
+ .hosts
+ .clone()
+ .into_iter()
+ .chain(target.iter().map(|s| INTERNER.intern_str(s)))
+ .collect::<Vec<_>>();
+ config
+}
+
+fn first<A, B>(v: Vec<(A, B)>) -> Vec<A> {
+ v.into_iter().map(|(a, _)| a).collect::<Vec<_>>()
+}
+
+#[test]
+fn dist_baseline() {
+ let build = Build::new(configure(&[], &[]));
+ let mut builder = Builder::new(&build);
+ builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
+
+ let a = INTERNER.intern_str("A");
+
+ assert_eq!(
+ first(builder.cache.all::<dist::Docs>()),
+ &[dist::Docs { host: a },]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Mingw>()),
+ &[dist::Mingw { host: a },]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Rustc>()),
+ &[dist::Rustc {
+ compiler: Compiler { host: a, stage: 2 }
+ },]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Std>()),
+ &[dist::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },]
+ );
+ assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
+}
+
+#[test]
+fn dist_with_targets() {
+ let build = Build::new(configure(&[], &["B"]));
+ let mut builder = Builder::new(&build);
+ builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
+
+ let a = INTERNER.intern_str("A");
+ let b = INTERNER.intern_str("B");
+
+ assert_eq!(
+ first(builder.cache.all::<dist::Docs>()),
+ &[
+ dist::Docs { host: a },
+ dist::Docs { host: b },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Mingw>()),
+ &[dist::Mingw { host: a }, dist::Mingw { host: b },]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Rustc>()),
+ &[dist::Rustc {
+ compiler: Compiler { host: a, stage: 2 }
+ },]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Std>()),
+ &[
+ dist::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },
+ dist::Std {
+ compiler: Compiler { host: a, stage: 2 },
+ target: b,
+ },
+ ]
+ );
+ assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
+}
+
+#[test]
+fn dist_with_hosts() {
+ let build = Build::new(configure(&["B"], &[]));
+ let mut builder = Builder::new(&build);
+ builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
+
+ let a = INTERNER.intern_str("A");
+ let b = INTERNER.intern_str("B");
+
+ assert_eq!(
+ first(builder.cache.all::<dist::Docs>()),
+ &[
+ dist::Docs { host: a },
+ dist::Docs { host: b },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Mingw>()),
+ &[dist::Mingw { host: a }, dist::Mingw { host: b },]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Rustc>()),
+ &[
+ dist::Rustc {
+ compiler: Compiler { host: a, stage: 2 }
+ },
+ dist::Rustc {
+ compiler: Compiler { host: b, stage: 2 }
+ },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Std>()),
+ &[
+ dist::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },
+ dist::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: b,
+ },
+ ]
+ );
+ assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
+}
+
+#[test]
+fn dist_only_cross_host() {
+ let a = INTERNER.intern_str("A");
+ let b = INTERNER.intern_str("B");
+ let mut build = Build::new(configure(&["B"], &[]));
+ build.config.docs = false;
+ build.config.extended = true;
+ build.hosts = vec![b];
+ let mut builder = Builder::new(&build);
+ builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
+
+ assert_eq!(
+ first(builder.cache.all::<dist::Rustc>()),
+ &[
+ dist::Rustc {
+ compiler: Compiler { host: b, stage: 2 }
+ },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<compile::Rustc>()),
+ &[
+ compile::Rustc {
+ compiler: Compiler { host: a, stage: 0 },
+ target: a,
+ },
+ compile::Rustc {
+ compiler: Compiler { host: a, stage: 1 },
+ target: b,
+ },
+ ]
+ );
+}
+
+#[test]
+fn dist_with_targets_and_hosts() {
+ let build = Build::new(configure(&["B"], &["C"]));
+ let mut builder = Builder::new(&build);
+ builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
+
+ let a = INTERNER.intern_str("A");
+ let b = INTERNER.intern_str("B");
+ let c = INTERNER.intern_str("C");
+
+ assert_eq!(
+ first(builder.cache.all::<dist::Docs>()),
+ &[
+ dist::Docs { host: a },
+ dist::Docs { host: b },
+ dist::Docs { host: c },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Mingw>()),
+ &[
+ dist::Mingw { host: a },
+ dist::Mingw { host: b },
+ dist::Mingw { host: c },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Rustc>()),
+ &[
+ dist::Rustc {
+ compiler: Compiler { host: a, stage: 2 }
+ },
+ dist::Rustc {
+ compiler: Compiler { host: b, stage: 2 }
+ },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Std>()),
+ &[
+ dist::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },
+ dist::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: b,
+ },
+ dist::Std {
+ compiler: Compiler { host: a, stage: 2 },
+ target: c,
+ },
+ ]
+ );
+ assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
+}
+
+#[test]
+fn dist_with_target_flag() {
+ let mut config = configure(&["B"], &["C"]);
+ config.skip_only_host_steps = true; // as-if --target=C was passed
+ let build = Build::new(config);
+ let mut builder = Builder::new(&build);
+ builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
+
+ let a = INTERNER.intern_str("A");
+ let b = INTERNER.intern_str("B");
+ let c = INTERNER.intern_str("C");
+
+ assert_eq!(
+ first(builder.cache.all::<dist::Docs>()),
+ &[
+ dist::Docs { host: a },
+ dist::Docs { host: b },
+ dist::Docs { host: c },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Mingw>()),
+ &[
+ dist::Mingw { host: a },
+ dist::Mingw { host: b },
+ dist::Mingw { host: c },
+ ]
+ );
+ assert_eq!(first(builder.cache.all::<dist::Rustc>()), &[]);
+ assert_eq!(
+ first(builder.cache.all::<dist::Std>()),
+ &[
+ dist::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },
+ dist::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: b,
+ },
+ dist::Std {
+ compiler: Compiler { host: a, stage: 2 },
+ target: c,
+ },
+ ]
+ );
+ assert_eq!(first(builder.cache.all::<dist::Src>()), &[]);
+}
+
+#[test]
+fn dist_with_same_targets_and_hosts() {
+ let build = Build::new(configure(&["B"], &["B"]));
+ let mut builder = Builder::new(&build);
+ builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
+
+ let a = INTERNER.intern_str("A");
+ let b = INTERNER.intern_str("B");
+
+ assert_eq!(
+ first(builder.cache.all::<dist::Docs>()),
+ &[
+ dist::Docs { host: a },
+ dist::Docs { host: b },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Mingw>()),
+ &[dist::Mingw { host: a }, dist::Mingw { host: b },]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Rustc>()),
+ &[
+ dist::Rustc {
+ compiler: Compiler { host: a, stage: 2 }
+ },
+ dist::Rustc {
+ compiler: Compiler { host: b, stage: 2 }
+ },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<dist::Std>()),
+ &[
+ dist::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },
+ dist::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: b,
+ },
+ ]
+ );
+ assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
+ assert_eq!(
+ first(builder.cache.all::<compile::Std>()),
+ &[
+ compile::Std {
+ compiler: Compiler { host: a, stage: 0 },
+ target: a,
+ },
+ compile::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },
+ compile::Std {
+ compiler: Compiler { host: a, stage: 2 },
+ target: a,
+ },
+ compile::Std {
+ compiler: Compiler { host: a, stage: 1 },
+ target: b,
+ },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<compile::Test>()),
+ &[
+ compile::Test {
+ compiler: Compiler { host: a, stage: 0 },
+ target: a,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 2 },
+ target: a,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 1 },
+ target: b,
+ },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<compile::Assemble>()),
+ &[
+ compile::Assemble {
+ target_compiler: Compiler { host: a, stage: 0 },
+ },
+ compile::Assemble {
+ target_compiler: Compiler { host: a, stage: 1 },
+ },
+ compile::Assemble {
+ target_compiler: Compiler { host: a, stage: 2 },
+ },
+ compile::Assemble {
+ target_compiler: Compiler { host: b, stage: 2 },
+ },
+ ]
+ );
+}
+
+#[test]
+fn build_default() {
+ let build = Build::new(configure(&["B"], &["C"]));
+ let mut builder = Builder::new(&build);
+ builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]);
+
+ let a = INTERNER.intern_str("A");
+ let b = INTERNER.intern_str("B");
+ let c = INTERNER.intern_str("C");
+
+ assert!(!builder.cache.all::<compile::Std>().is_empty());
+ assert!(!builder.cache.all::<compile::Assemble>().is_empty());
+ assert_eq!(
+ first(builder.cache.all::<compile::Rustc>()),
+ &[
+ compile::Rustc {
+ compiler: Compiler { host: a, stage: 0 },
+ target: a,
+ },
+ compile::Rustc {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },
+ compile::Rustc {
+ compiler: Compiler { host: a, stage: 2 },
+ target: a,
+ },
+ compile::Rustc {
+ compiler: Compiler { host: b, stage: 2 },
+ target: a,
+ },
+ compile::Rustc {
+ compiler: Compiler { host: a, stage: 1 },
+ target: b,
+ },
+ compile::Rustc {
+ compiler: Compiler { host: a, stage: 2 },
+ target: b,
+ },
+ compile::Rustc {
+ compiler: Compiler { host: b, stage: 2 },
+ target: b,
+ },
+ ]
+ );
+
+ assert_eq!(
+ first(builder.cache.all::<compile::Test>()),
+ &[
+ compile::Test {
+ compiler: Compiler { host: a, stage: 0 },
+ target: a,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 2 },
+ target: a,
+ },
+ compile::Test {
+ compiler: Compiler { host: b, stage: 2 },
+ target: a,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 1 },
+ target: b,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 2 },
+ target: b,
+ },
+ compile::Test {
+ compiler: Compiler { host: b, stage: 2 },
+ target: b,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 2 },
+ target: c,
+ },
+ compile::Test {
+ compiler: Compiler { host: b, stage: 2 },
+ target: c,
+ },
+ ]
+ );
+}
+
+#[test]
+fn build_with_target_flag() {
+ let mut config = configure(&["B"], &["C"]);
+ config.skip_only_host_steps = true;
+ let build = Build::new(config);
+ let mut builder = Builder::new(&build);
+ builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]);
+
+ let a = INTERNER.intern_str("A");
+ let b = INTERNER.intern_str("B");
+ let c = INTERNER.intern_str("C");
+
+ assert!(!builder.cache.all::<compile::Std>().is_empty());
+ assert_eq!(
+ first(builder.cache.all::<compile::Assemble>()),
+ &[
+ compile::Assemble {
+ target_compiler: Compiler { host: a, stage: 0 },
+ },
+ compile::Assemble {
+ target_compiler: Compiler { host: a, stage: 1 },
+ },
+ compile::Assemble {
+ target_compiler: Compiler { host: a, stage: 2 },
+ },
+ compile::Assemble {
+ target_compiler: Compiler { host: b, stage: 2 },
+ },
+ ]
+ );
+ assert_eq!(
+ first(builder.cache.all::<compile::Rustc>()),
+ &[
+ compile::Rustc {
+ compiler: Compiler { host: a, stage: 0 },
+ target: a,
+ },
+ compile::Rustc {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },
+ compile::Rustc {
+ compiler: Compiler { host: a, stage: 1 },
+ target: b,
+ },
+ ]
+ );
+
+ assert_eq!(
+ first(builder.cache.all::<compile::Test>()),
+ &[
+ compile::Test {
+ compiler: Compiler { host: a, stage: 0 },
+ target: a,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 1 },
+ target: a,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 2 },
+ target: a,
+ },
+ compile::Test {
+ compiler: Compiler { host: b, stage: 2 },
+ target: a,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 1 },
+ target: b,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 2 },
+ target: b,
+ },
+ compile::Test {
+ compiler: Compiler { host: b, stage: 2 },
+ target: b,
+ },
+ compile::Test {
+ compiler: Compiler { host: a, stage: 2 },
+ target: c,
+ },
+ compile::Test {
+ compiler: Compiler { host: b, stage: 2 },
+ target: c,
+ },
+ ]
+ );
+}
+
+#[test]
+fn test_with_no_doc_stage0() {
+ let mut config = configure(&[], &[]);
+ config.stage = Some(0);
+ config.cmd = Subcommand::Test {
+ paths: vec!["src/libstd".into()],
+ test_args: vec![],
+ rustc_args: vec![],
+ fail_fast: true,
+ doc_tests: DocTests::No,
+ bless: false,
+ compare_mode: None,
+ rustfix_coverage: false,
+ };
+
+ let build = Build::new(config);
+ let mut builder = Builder::new(&build);
+
+ let host = INTERNER.intern_str("A");
+
+ builder.run_step_descriptions(
+ &[StepDescription::from::<test::Crate>()],
+ &["src/libstd".into()],
+ );
+
+ // Ensure we don't build any compiler artifacts.
+ assert!(!builder.cache.contains::<compile::Rustc>());
+ assert_eq!(
+ first(builder.cache.all::<test::Crate>()),
+ &[test::Crate {
+ compiler: Compiler { host, stage: 0 },
+ target: host,
+ mode: Mode::Std,
+ test_kind: test::TestKind::Test,
+ krate: INTERNER.intern_str("std"),
+ },]
+ );
+}
+
+#[test]
+fn test_exclude() {
+ let mut config = configure(&[], &[]);
+ config.exclude = vec![
+ "src/test/run-pass".into(),
+ "src/tools/tidy".into(),
+ ];
+ config.cmd = Subcommand::Test {
+ paths: Vec::new(),
+ test_args: Vec::new(),
+ rustc_args: Vec::new(),
+ fail_fast: true,
+ doc_tests: DocTests::No,
+ bless: false,
+ compare_mode: None,
+ rustfix_coverage: false,
+ };
+
+ let build = Build::new(config);
+ let builder = Builder::new(&build);
+ builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Test), &[]);
+
+ // Ensure we have really excluded run-pass & tidy
+ assert!(!builder.cache.contains::<test::RunPass>());
+ assert!(!builder.cache.contains::<test::Tidy>());
+
+ // Ensure other tests are not affected.
+ assert!(builder.cache.contains::<test::RunPassFullDeps>());
+ assert!(builder.cache.contains::<test::RustdocUi>());
+}
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
index c7fa8e7..576267e 100644
--- a/src/bootstrap/compile.rs
+++ b/src/bootstrap/compile.rs
@@ -775,6 +775,10 @@
cargo.env("CFG_LLVM_ROOT", s);
}
}
+ // Some LLVM linker flags (-L and -l) may be needed to link librustc_llvm.
+ if let Some(ref s) = builder.config.llvm_ldflags {
+ cargo.env("LLVM_LINKER_FLAGS", s);
+ }
// Building with a static libstdc++ is only supported on linux right now,
// not for MSVC or macOS
if builder.config.llvm_static_stdcpp &&
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
index 2749619..45bc77e 100644
--- a/src/bootstrap/dist.rs
+++ b/src/bootstrap/dist.rs
@@ -804,6 +804,7 @@
const LLVM_PROJECTS: &[&str] = &[
"llvm-project/clang", "llvm-project\\clang",
+ "llvm-project/libunwind", "llvm-project\\libunwind",
"llvm-project/lld", "llvm-project\\lld",
"llvm-project/lldb", "llvm-project\\lldb",
"llvm-project/llvm", "llvm-project\\llvm",
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
index 2a3577a..eac46c1 100644
--- a/src/bootstrap/doc.rs
+++ b/src/bootstrap/doc.rs
@@ -46,10 +46,11 @@
}
fn run(self, builder: &Builder<'_>) {
- builder.ensure(Rustbook {
+ builder.ensure(RustbookSrc {
target: self.target,
name: INTERNER.intern_str($book_name),
version: $book_ver,
+ src: doc_src(builder),
})
}
}
@@ -64,7 +65,7 @@
EmbeddedBook, "src/doc/embedded-book", "embedded-book", RustbookVersion::MdBook2;
Nomicon, "src/doc/nomicon", "nomicon", RustbookVersion::MdBook2;
Reference, "src/doc/reference", "reference", RustbookVersion::MdBook1;
- RustByExample, "src/doc/rust-by-example", "rust-by-example", RustbookVersion::MdBook1;
+ RustByExample, "src/doc/rust-by-example", "rust-by-example", RustbookVersion::MdBook2;
RustcBook, "src/doc/rustc", "rustc", RustbookVersion::MdBook1;
RustdocBook, "src/doc/rustdoc", "rustdoc", RustbookVersion::MdBook2;
);
@@ -75,35 +76,8 @@
MdBook2,
}
-#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
-struct Rustbook {
- target: Interned<String>,
- name: Interned<String>,
- version: RustbookVersion,
-}
-
-impl Step for Rustbook {
- type Output = ();
-
- // rustbook is never directly called, and only serves as a shim for the nomicon and the
- // reference.
- fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
- run.never()
- }
-
- /// Invoke `rustbook` for `target` for the doc book `name`.
- ///
- /// This will not actually generate any documentation if the documentation has
- /// already been generated.
- fn run(self, builder: &Builder<'_>) {
- let src = builder.src.join("src/doc");
- builder.ensure(RustbookSrc {
- target: self.target,
- name: self.name,
- src: INTERNER.intern_path(src),
- version: self.version,
- });
- }
+fn doc_src(builder: &Builder<'_>) -> Interned<PathBuf> {
+ INTERNER.intern_path(builder.src.join("src/doc"))
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
@@ -274,33 +248,37 @@
let name = self.name;
// build book
- builder.ensure(Rustbook {
+ builder.ensure(RustbookSrc {
target,
name: INTERNER.intern_string(name.to_string()),
version: RustbookVersion::MdBook2,
+ src: doc_src(builder),
});
// building older edition redirects
let source_name = format!("{}/first-edition", name);
- builder.ensure(Rustbook {
+ builder.ensure(RustbookSrc {
target,
name: INTERNER.intern_string(source_name),
version: RustbookVersion::MdBook2,
+ src: doc_src(builder),
});
let source_name = format!("{}/second-edition", name);
- builder.ensure(Rustbook {
+ builder.ensure(RustbookSrc {
target,
name: INTERNER.intern_string(source_name),
version: RustbookVersion::MdBook2,
+ src: doc_src(builder),
});
let source_name = format!("{}/2018-edition", name);
- builder.ensure(Rustbook {
+ builder.ensure(RustbookSrc {
target,
name: INTERNER.intern_string(source_name),
version: RustbookVersion::MdBook2,
+ src: doc_src(builder),
});
// build the version info page and CSS
@@ -898,11 +876,6 @@
fn run(self, builder: &Builder<'_>) {
let target = self.target;
- builder.ensure(compile::Std {
- compiler: builder.compiler(builder.top_stage, builder.config.build),
- target,
- });
-
builder.info(&format!("Generating unstable book md files ({})", target));
let out = builder.md_doc_out(target).join("unstable-book");
builder.create_dir(&out);
diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs
index 0047be4..5575867 100644
--- a/src/bootstrap/install.rs
+++ b/src/bootstrap/install.rs
@@ -251,7 +251,10 @@
};
Analysis, "analysis", Self::should_build(_config), only_hosts: false, {
builder.ensure(dist::Analysis {
- compiler: self.compiler,
+ // Find the actual compiler (handling the full bootstrap option) which
+ // produced the save-analysis data because that data isn't copied
+ // through the sysroot uplifting.
+ compiler: builder.compiler_for(builder.top_stage, builder.config.build, self.target),
target: self.target
});
install_analysis(builder, self.compiler.stage, self.target);
diff --git a/src/bootstrap/job.rs b/src/bootstrap/job.rs
index df492e0..6867d62 100644
--- a/src/bootstrap/job.rs
+++ b/src/bootstrap/job.rs
@@ -32,6 +32,7 @@
use std::env;
use std::io;
use std::mem;
+use std::ptr;
use crate::Build;
type HANDLE = *mut u8;
@@ -118,8 +119,8 @@
SetErrorMode(mode & !SEM_NOGPFAULTERRORBOX);
// Create a new job object for us to use
- let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
- assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());
+ let job = CreateJobObjectW(ptr::null_mut(), ptr::null());
+ assert!(!job.is_null(), "{}", io::Error::last_os_error());
// Indicate that when all handles to the job object are gone that all
// process in the object should be killed. Note that this includes our
@@ -166,8 +167,8 @@
};
let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap());
- assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error());
- let mut parent_handle = 0 as *mut _;
+ assert!(!parent.is_null(), "{}", io::Error::last_os_error());
+ let mut parent_handle = ptr::null_mut();
let r = DuplicateHandle(GetCurrentProcess(), job,
parent, &mut parent_handle,
0, FALSE, DUPLICATE_SAME_ACCESS);
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
index b9d287a..7f652c0 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
@@ -270,14 +270,9 @@
#[derive(Debug)]
struct Crate {
name: Interned<String>,
- version: String,
deps: HashSet<Interned<String>>,
id: String,
path: PathBuf,
- doc_step: String,
- build_step: String,
- test_step: String,
- bench_step: String,
}
impl Crate {
diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs
index 4a71fd2..b622b36 100644
--- a/src/bootstrap/metadata.rs
+++ b/src/bootstrap/metadata.rs
@@ -20,7 +20,6 @@
struct Package {
id: String,
name: String,
- version: String,
source: Option<String>,
manifest_path: String,
}
@@ -84,12 +83,7 @@
let mut path = PathBuf::from(package.manifest_path);
path.pop();
build.crates.insert(name, Crate {
- build_step: format!("build-crate-{}", name),
- doc_step: format!("doc-crate-{}", name),
- test_step: format!("test-crate-{}", name),
- bench_step: format!("bench-crate-{}", name),
name,
- version: package.version,
id: package.id,
deps: HashSet::new(),
path,
diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs
index bf3601c..8b6e856 100644
--- a/src/bootstrap/native.rs
+++ b/src/bootstrap/native.rs
@@ -416,7 +416,7 @@
cfg.build_arg("-j").build_arg(builder.jobs().to_string());
let mut cflags = builder.cflags(target, GitRepo::Llvm).join(" ");
- if let Some(ref s) = builder.config.llvm_cxxflags {
+ if let Some(ref s) = builder.config.llvm_cflags {
cflags.push_str(&format!(" {}", s));
}
cfg.define("CMAKE_C_FLAGS", cflags);
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
index 83a897f..bd77f7a 100644
--- a/src/bootstrap/tool.rs
+++ b/src/bootstrap/tool.rs
@@ -268,10 +268,6 @@
}
impl Tool {
- pub fn get_mode(&self) -> Mode {
- Mode::ToolBootstrap
- }
-
/// Whether this tool requires LLVM to run
pub fn uses_llvm_tools(&self) -> bool {
match self {
@@ -659,23 +655,14 @@
pub fn tool_cmd(&self, tool: Tool) -> Command {
let mut cmd = Command::new(self.tool_exe(tool));
let compiler = self.compiler(0, self.config.build);
- self.prepare_tool_cmd(compiler, tool, &mut cmd);
- cmd
- }
-
- /// Prepares the `cmd` provided to be able to run the `compiler` provided.
- ///
- /// Notably this munges the dynamic library lookup path to point to the
- /// right location to run `compiler`.
- fn prepare_tool_cmd(&self, compiler: Compiler, tool: Tool, cmd: &mut Command) {
let host = &compiler.host;
+ // Prepares the `cmd` provided to be able to run the `compiler` provided.
+ //
+ // Notably this munges the dynamic library lookup path to point to the
+ // right location to run `compiler`.
let mut lib_paths: Vec<PathBuf> = vec![
- if compiler.stage == 0 {
- self.build.rustc_snapshot_libdir()
- } else {
- PathBuf::from(&self.sysroot_libdir(compiler, compiler.host))
- },
- self.cargo_out(compiler, tool.get_mode(), *host).join("deps"),
+ self.build.rustc_snapshot_libdir(),
+ self.cargo_out(compiler, Mode::ToolBootstrap, *host).join("deps"),
];
// On MSVC a tool may invoke a C compiler (e.g., compiletest in run-make
@@ -696,6 +683,7 @@
}
}
- add_lib_path(lib_paths, cmd);
+ add_lib_path(lib_paths, &mut cmd);
+ cmd
}
}
diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs
index 9f68467..47f5edd 100644
--- a/src/bootstrap/util.rs
+++ b/src/bootstrap/util.rs
@@ -209,7 +209,7 @@
let h = CreateFileW(path.as_ptr(),
GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
- 0 as *mut _,
+ ptr::null_mut(),
OPEN_EXISTING,
FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
ptr::null_mut());
diff --git a/src/ci/cpu-usage-over-time.py b/src/ci/cpu-usage-over-time.py
new file mode 100644
index 0000000..78427a6
--- /dev/null
+++ b/src/ci/cpu-usage-over-time.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+# ignore-tidy-linelength
+
+# This is a small script that we use on CI to collect CPU usage statistics of
+# our builders. By seeing graphs of CPU usage over time we hope to correlate
+# that with possible improvements to Rust's own build system, ideally diagnosing
+# that either builders are always fully using their CPU resources or they're
+# idle for long stretches of time.
+#
+# This script is relatively simple, but it's platform specific. Each platform
+# (OSX/Windows/Linux) has a different way of calculating the current state of
+# CPU at a point in time. We then compare two captured states to determine the
+# percentage of time spent in one state versus another. The state capturing is
+# all platform-specific but the loop at the bottom is the cross platform part
+# that executes everywhere.
+#
+# # Viewing statistics
+#
+# All builders will upload their CPU statistics as CSV files to our S3 buckets.
+# These URLS look like:
+#
+# https://$bucket.s3.amazonaws.com/rustc-builds/$commit/cpu-$builder.csv
+#
+# for example
+#
+# https://rust-lang-ci2.s3.amazonaws.com/rustc-builds/68baada19cd5340f05f0db15a3e16d6671609bcc/cpu-x86_64-apple.csv
+#
+# Each CSV file has two columns. The first is the timestamp of the measurement
+# and the second column is the % of idle cpu time in that time slice. Ideally
+# the second column is always zero.
+#
+# Once you've downloaded a file there's various ways to plot it and visualize
+# it. For command line usage you can use a script like so:
+#
+# set timefmt '%Y-%m-%dT%H:%M:%S'
+# set xdata time
+# set ylabel "Idle CPU %"
+# set xlabel "Time"
+# set datafile sep ','
+# set term png
+# set output "printme.png"
+# set grid
+# builder = "i686-apple"
+# plot "cpu-".builder.".csv" using 1:2 with lines title builder
+#
+# Executed as `gnuplot < ./foo.plot` it will generate a graph called
+# `printme.png` which you can then open up. If you know how to improve this
+# script or the viewing process that would be much appreciated :) (or even if
+# you know how to automate it!)
+
+import datetime
+import sys
+import time
+
+if sys.platform == 'linux2':
+ class State:
+ def __init__(self):
+ with open('/proc/stat', 'r') as file:
+ data = file.readline().split()
+ if data[0] != 'cpu':
+ raise Exception('did not start with "cpu"')
+ self.user = int(data[1])
+ self.nice = int(data[2])
+ self.system = int(data[3])
+ self.idle = int(data[4])
+ self.iowait = int(data[5])
+ self.irq = int(data[6])
+ self.softirq = int(data[7])
+ self.steal = int(data[8])
+ self.guest = int(data[9])
+ self.guest_nice = int(data[10])
+
+ def idle_since(self, prev):
+ user = self.user - prev.user
+ nice = self.nice - prev.nice
+ system = self.system - prev.system
+ idle = self.idle - prev.idle
+ iowait = self.iowait - prev.iowait
+ irq = self.irq - prev.irq
+ softirq = self.softirq - prev.softirq
+ steal = self.steal - prev.steal
+ guest = self.guest - prev.guest
+ guest_nice = self.guest_nice - prev.guest_nice
+ total = user + nice + system + idle + iowait + irq + softirq + steal + guest + guest_nice
+ return float(idle) / float(total) * 100
+
+elif sys.platform == 'win32':
+ from ctypes.wintypes import DWORD
+ from ctypes import Structure, windll, WinError, GetLastError, byref
+
+ class FILETIME(Structure):
+ _fields_ = [
+ ("dwLowDateTime", DWORD),
+ ("dwHighDateTime", DWORD),
+ ]
+
+ class State:
+ def __init__(self):
+ idle, kernel, user = FILETIME(), FILETIME(), FILETIME()
+
+ success = windll.kernel32.GetSystemTimes(
+ byref(idle),
+ byref(kernel),
+ byref(user),
+ )
+
+ assert success, WinError(GetLastError())[1]
+
+ self.idle = (idle.dwHighDateTime << 32) | idle.dwLowDateTime
+ self.kernel = (kernel.dwHighDateTime << 32) | kernel.dwLowDateTime
+ self.user = (user.dwHighDateTime << 32) | user.dwLowDateTime
+
+ def idle_since(self, prev):
+ idle = self.idle - prev.idle
+ user = self.user - prev.user
+ kernel = self.kernel - prev.kernel
+ return float(idle) / float(user + kernel) * 100
+
+elif sys.platform == 'darwin':
+ from ctypes import *
+ libc = cdll.LoadLibrary('/usr/lib/libc.dylib')
+
+ PROESSOR_CPU_LOAD_INFO = c_int(2)
+ CPU_STATE_USER = 0
+ CPU_STATE_SYSTEM = 1
+ CPU_STATE_IDLE = 2
+ CPU_STATE_NICE = 3
+ c_int_p = POINTER(c_int)
+
+ class State:
+ def __init__(self):
+ num_cpus_u = c_uint(0)
+ cpu_info = c_int_p()
+ cpu_info_cnt = c_int(0)
+ err = libc.host_processor_info(
+ libc.mach_host_self(),
+ PROESSOR_CPU_LOAD_INFO,
+ byref(num_cpus_u),
+ byref(cpu_info),
+ byref(cpu_info_cnt),
+ )
+ assert err == 0
+ self.user = 0
+ self.system = 0
+ self.idle = 0
+ self.nice = 0
+ cur = 0
+ while cur < cpu_info_cnt.value:
+ self.user += cpu_info[cur + CPU_STATE_USER]
+ self.system += cpu_info[cur + CPU_STATE_SYSTEM]
+ self.idle += cpu_info[cur + CPU_STATE_IDLE]
+ self.nice += cpu_info[cur + CPU_STATE_NICE]
+ cur += num_cpus_u.value
+
+ def idle_since(self, prev):
+ user = self.user - prev.user
+ system = self.system - prev.system
+ idle = self.idle - prev.idle
+ nice = self.nice - prev.nice
+ return float(idle) / float(user + system + idle + nice) * 100.0
+
+else:
+ print('unknown platform', sys.platform)
+ sys.exit(1)
+
+cur_state = State();
+print("Time,Idle")
+while True:
+ time.sleep(1);
+ next_state = State();
+ now = datetime.datetime.utcnow().isoformat()
+ idle = next_state.idle_since(cur_state)
+ print("%s,%s" % (now, idle))
+ sys.stdout.flush()
+ cur_state = next_state
diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh
index c9642db..e6cd794 100755
--- a/src/ci/docker/run.sh
+++ b/src/ci/docker/run.sh
@@ -149,6 +149,7 @@
--env TF_BUILD \
--env BUILD_SOURCEBRANCHNAME \
--env TOOLSTATE_REPO_ACCESS_TOKEN \
+ --env TOOLSTATE_REPO \
--env CI_JOB_NAME="${CI_JOB_NAME-$IMAGE}" \
--volume "$HOME/.cargo:/cargo" \
--volume "$HOME/rustsrc:$HOME/rustsrc" \
diff --git a/src/ci/docker/test-various/Dockerfile b/src/ci/docker/test-various/Dockerfile
index 611a24a..c45b1a9 100644
--- a/src/ci/docker/test-various/Dockerfile
+++ b/src/ci/docker/test-various/Dockerfile
@@ -15,10 +15,6 @@
wget \
patch
-# FIXME: build the `ptx-linker` instead.
-RUN curl -sL https://github.com/denzp/rust-ptx-linker/releases/download/v0.9.0-alpha.2/rust-ptx-linker.linux64.tar.gz | \
- tar -xzvC /usr/bin
-
RUN curl -sL https://nodejs.org/dist/v9.2.0/node-v9.2.0-linux-x64.tar.xz | \
tar -xJ
diff --git a/src/ci/docker/x86_64-gnu-distcheck/Dockerfile b/src/ci/docker/x86_64-gnu-distcheck/Dockerfile
index f16dd98..364f45a 100644
--- a/src/ci/docker/x86_64-gnu-distcheck/Dockerfile
+++ b/src/ci/docker/x86_64-gnu-distcheck/Dockerfile
@@ -21,3 +21,10 @@
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --set rust.ignore-git=false
ENV SCRIPT python2.7 ../x.py test distcheck
ENV DIST_SRC 1
+
+# The purpose of this builder is to test that we can `./x.py test` successfully
+# from a tarball, not to test LLVM/rustc's own set of assertions. These cause a
+# significant hit to CI compile time (over a half hour as observed in #61185),
+# so disable assertions for this builder.
+ENV NO_LLVM_ASSERTIONS=1
+ENV NO_DEBUG_ASSERTIONS=1
diff --git a/src/ci/docker/x86_64-gnu-tools/repo.sh b/src/ci/docker/x86_64-gnu-tools/repo.sh
index 6364bc2..741d4dc 100644
--- a/src/ci/docker/x86_64-gnu-tools/repo.sh
+++ b/src/ci/docker/x86_64-gnu-tools/repo.sh
@@ -55,7 +55,7 @@
git config --global credential.helper store
printf 'https://%s:x-oauth-basic@github.com\n' "$TOOLSTATE_REPO_ACCESS_TOKEN" \
> "$HOME/.git-credentials"
- git clone --depth=1 https://github.com/rust-lang-nursery/rust-toolstate.git
+ git clone --depth=1 $TOOLSTATE_REPO
cd rust-toolstate
FAILURE=1
diff --git a/src/doc/rust-by-example b/src/doc/rust-by-example
index 18566f4..d8eec1d 160000
--- a/src/doc/rust-by-example
+++ b/src/doc/rust-by-example
@@ -1 +1 @@
-Subproject commit 18566f4dedc3ef5bf61f5f85685d5966db99cc11
+Subproject commit d8eec1dd65470b9a68e80ac1cba8fad0daac4916
diff --git a/src/liballoc/collections/vec_deque.rs b/src/liballoc/collections/vec_deque.rs
index 31e49d0..71faf67 100644
--- a/src/liballoc/collections/vec_deque.rs
+++ b/src/liballoc/collections/vec_deque.rs
@@ -2709,6 +2709,11 @@
#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
impl<T> From<Vec<T>> for VecDeque<T> {
+ /// Turn a [`Vec<T>`] into a [`VecDeque<T>`].
+ ///
+ /// This avoids reallocating where possible, but the conditions for that are
+ /// strict, and subject to change, and so shouldn't be relied upon unless the
+ /// `Vec<T>` came from `From<VecDeque<T>>` and hasn't been reallocated.
fn from(mut other: Vec<T>) -> Self {
unsafe {
let other_buf = other.as_mut_ptr();
@@ -2735,6 +2740,32 @@
#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
impl<T> From<VecDeque<T>> for Vec<T> {
+ /// Turn a [`VecDeque<T>`] into a [`Vec<T>`].
+ ///
+ /// This never needs to re-allocate, but does need to do O(n) data movement if
+ /// the circular buffer doesn't happen to be at the beginning of the allocation.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// // This one is O(1).
+ /// let deque: VecDeque<_> = (1..5).collect();
+ /// let ptr = deque.as_slices().0.as_ptr();
+ /// let vec = Vec::from(deque);
+ /// assert_eq!(vec, [1, 2, 3, 4]);
+ /// assert_eq!(vec.as_ptr(), ptr);
+ ///
+ /// // This one needs data rearranging.
+ /// let mut deque: VecDeque<_> = (1..5).collect();
+ /// deque.push_front(9);
+ /// deque.push_front(8);
+ /// let ptr = deque.as_slices().1.as_ptr();
+ /// let vec = Vec::from(deque);
+ /// assert_eq!(vec, [8, 9, 1, 2, 3, 4]);
+ /// assert_eq!(vec.as_ptr(), ptr);
+ /// ```
fn from(other: VecDeque<T>) -> Self {
unsafe {
let buf = other.buf.ptr();
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index c530ac2..5fc58c8 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -141,7 +141,7 @@
pub use std::boxed::Box;
}
#[cfg(test)]
-mod boxed_test;
+mod tests;
pub mod collections;
#[cfg(all(target_has_atomic = "ptr", target_has_atomic = "cas"))]
pub mod sync;
diff --git a/src/liballoc/macros.rs b/src/liballoc/macros.rs
index dd128e0..250c419 100644
--- a/src/liballoc/macros.rs
+++ b/src/liballoc/macros.rs
@@ -42,7 +42,7 @@
($($x:expr),*) => (
<[_]>::into_vec(box [$($x),*])
);
- ($($x:expr,)*) => (vec![$($x),*])
+ ($($x:expr,)*) => ($crate::vec![$($x),*])
}
// HACK(japaric): with cfg(test) the inherent `[T]::into_vec` method, which is
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index c827e21..ee78839 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -375,9 +375,9 @@
/// ```
/// use std::rc::Rc;
///
- /// let x = Rc::new(10);
+ /// let x = Rc::new("hello".to_owned());
/// let x_ptr = Rc::into_raw(x);
- /// assert_eq!(unsafe { *x_ptr }, 10);
+ /// assert_eq!(unsafe { &*x_ptr }, "hello");
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub fn into_raw(this: Self) -> *const T {
@@ -401,13 +401,13 @@
/// ```
/// use std::rc::Rc;
///
- /// let x = Rc::new(10);
+ /// let x = Rc::new("hello".to_owned());
/// let x_ptr = Rc::into_raw(x);
///
/// unsafe {
/// // Convert back to an `Rc` to prevent leak.
/// let x = Rc::from_raw(x_ptr);
- /// assert_eq!(*x, 10);
+ /// assert_eq!(&*x, "hello");
///
/// // Further calls to `Rc::from_raw(x_ptr)` would be memory unsafe.
/// }
@@ -437,10 +437,10 @@
///
/// use std::rc::Rc;
///
- /// let x = Rc::new(10);
+ /// let x = Rc::new("hello".to_owned());
/// let ptr = Rc::into_raw_non_null(x);
- /// let deref = unsafe { *ptr.as_ref() };
- /// assert_eq!(deref, 10);
+ /// let deref = unsafe { ptr.as_ref() };
+ /// assert_eq!(deref, "hello");
/// ```
#[unstable(feature = "rc_into_raw_non_null", issue = "47336")]
#[inline]
@@ -1294,17 +1294,17 @@
/// use std::rc::{Rc, Weak};
/// use std::ptr;
///
- /// let strong = Rc::new(42);
+ /// let strong = Rc::new("hello".to_owned());
/// let weak = Rc::downgrade(&strong);
/// // Both point to the same object
/// assert!(ptr::eq(&*strong, Weak::as_raw(&weak)));
/// // The strong here keeps it alive, so we can still access the object.
- /// assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
+ /// assert_eq!("hello", unsafe { &*Weak::as_raw(&weak) });
///
/// drop(strong);
/// // But not any more. We can do Weak::as_raw(&weak), but accessing the pointer would lead to
/// // undefined behaviour.
- /// // assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
+ /// // assert_eq!("hello", unsafe { &*Weak::as_raw(&weak) });
/// ```
///
/// [`null`]: ../../std/ptr/fn.null.html
@@ -1339,12 +1339,12 @@
///
/// use std::rc::{Rc, Weak};
///
- /// let strong = Rc::new(42);
+ /// let strong = Rc::new("hello".to_owned());
/// let weak = Rc::downgrade(&strong);
/// let raw = Weak::into_raw(weak);
///
/// assert_eq!(1, Rc::weak_count(&strong));
- /// assert_eq!(42, unsafe { *raw });
+ /// assert_eq!("hello", unsafe { &*raw });
///
/// drop(unsafe { Weak::from_raw(raw) });
/// assert_eq!(0, Rc::weak_count(&strong));
@@ -1380,14 +1380,14 @@
///
/// use std::rc::{Rc, Weak};
///
- /// let strong = Rc::new(42);
+ /// let strong = Rc::new("hello".to_owned());
///
/// let raw_1 = Weak::into_raw(Rc::downgrade(&strong));
/// let raw_2 = Weak::into_raw(Rc::downgrade(&strong));
///
/// assert_eq!(2, Rc::weak_count(&strong));
///
- /// assert_eq!(42, *Weak::upgrade(&unsafe { Weak::from_raw(raw_1) }).unwrap());
+ /// assert_eq!("hello", &*Weak::upgrade(&unsafe { Weak::from_raw(raw_1) }).unwrap());
/// assert_eq!(1, Rc::weak_count(&strong));
///
/// drop(strong);
@@ -1515,18 +1515,18 @@
///
/// ```
/// #![feature(weak_ptr_eq)]
- /// use std::rc::{Rc, Weak};
+ /// use std::rc::Rc;
///
/// let first_rc = Rc::new(5);
/// let first = Rc::downgrade(&first_rc);
/// let second = Rc::downgrade(&first_rc);
///
- /// assert!(Weak::ptr_eq(&first, &second));
+ /// assert!(first.ptr_eq(&second));
///
/// let third_rc = Rc::new(5);
/// let third = Rc::downgrade(&third_rc);
///
- /// assert!(!Weak::ptr_eq(&first, &third));
+ /// assert!(!first.ptr_eq(&third));
/// ```
///
/// Comparing `Weak::new`.
@@ -1537,16 +1537,16 @@
///
/// let first = Weak::new();
/// let second = Weak::new();
- /// assert!(Weak::ptr_eq(&first, &second));
+ /// assert!(first.ptr_eq(&second));
///
/// let third_rc = Rc::new(());
/// let third = Rc::downgrade(&third_rc);
- /// assert!(!Weak::ptr_eq(&first, &third));
+ /// assert!(!first.ptr_eq(&third));
/// ```
#[inline]
#[unstable(feature = "weak_ptr_eq", issue = "55981")]
- pub fn ptr_eq(this: &Self, other: &Self) -> bool {
- this.ptr.as_ptr() == other.ptr.as_ptr()
+ pub fn ptr_eq(&self, other: &Self) -> bool {
+ self.ptr.as_ptr() == other.ptr.as_ptr()
}
}
diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs
index 7086565..6c23b31 100644
--- a/src/liballoc/sync.rs
+++ b/src/liballoc/sync.rs
@@ -356,9 +356,9 @@
/// ```
/// use std::sync::Arc;
///
- /// let x = Arc::new(10);
+ /// let x = Arc::new("hello".to_owned());
/// let x_ptr = Arc::into_raw(x);
- /// assert_eq!(unsafe { *x_ptr }, 10);
+ /// assert_eq!(unsafe { &*x_ptr }, "hello");
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub fn into_raw(this: Self) -> *const T {
@@ -382,13 +382,13 @@
/// ```
/// use std::sync::Arc;
///
- /// let x = Arc::new(10);
+ /// let x = Arc::new("hello".to_owned());
/// let x_ptr = Arc::into_raw(x);
///
/// unsafe {
/// // Convert back to an `Arc` to prevent leak.
/// let x = Arc::from_raw(x_ptr);
- /// assert_eq!(*x, 10);
+ /// assert_eq!(&*x, "hello");
///
/// // Further calls to `Arc::from_raw(x_ptr)` would be memory unsafe.
/// }
@@ -418,10 +418,10 @@
///
/// use std::sync::Arc;
///
- /// let x = Arc::new(10);
+ /// let x = Arc::new("hello".to_owned());
/// let ptr = Arc::into_raw_non_null(x);
- /// let deref = unsafe { *ptr.as_ref() };
- /// assert_eq!(deref, 10);
+ /// let deref = unsafe { ptr.as_ref() };
+ /// assert_eq!(deref, "hello");
/// ```
#[unstable(feature = "rc_into_raw_non_null", issue = "47336")]
#[inline]
@@ -1083,17 +1083,17 @@
/// use std::sync::{Arc, Weak};
/// use std::ptr;
///
- /// let strong = Arc::new(42);
+ /// let strong = Arc::new("hello".to_owned());
/// let weak = Arc::downgrade(&strong);
/// // Both point to the same object
/// assert!(ptr::eq(&*strong, Weak::as_raw(&weak)));
/// // The strong here keeps it alive, so we can still access the object.
- /// assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
+ /// assert_eq!("hello", unsafe { &*Weak::as_raw(&weak) });
///
/// drop(strong);
/// // But not any more. We can do Weak::as_raw(&weak), but accessing the pointer would lead to
/// // undefined behaviour.
- /// // assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
+ /// // assert_eq!("hello", unsafe { &*Weak::as_raw(&weak) });
/// ```
///
/// [`null`]: ../../std/ptr/fn.null.html
@@ -1128,12 +1128,12 @@
///
/// use std::sync::{Arc, Weak};
///
- /// let strong = Arc::new(42);
+ /// let strong = Arc::new("hello".to_owned());
/// let weak = Arc::downgrade(&strong);
/// let raw = Weak::into_raw(weak);
///
/// assert_eq!(1, Arc::weak_count(&strong));
- /// assert_eq!(42, unsafe { *raw });
+ /// assert_eq!("hello", unsafe { &*raw });
///
/// drop(unsafe { Weak::from_raw(raw) });
/// assert_eq!(0, Arc::weak_count(&strong));
@@ -1170,14 +1170,14 @@
///
/// use std::sync::{Arc, Weak};
///
- /// let strong = Arc::new(42);
+ /// let strong = Arc::new("hello".to_owned());
///
/// let raw_1 = Weak::into_raw(Arc::downgrade(&strong));
/// let raw_2 = Weak::into_raw(Arc::downgrade(&strong));
///
/// assert_eq!(2, Arc::weak_count(&strong));
///
- /// assert_eq!(42, *Weak::upgrade(&unsafe { Weak::from_raw(raw_1) }).unwrap());
+ /// assert_eq!("hello", &*Weak::upgrade(&unsafe { Weak::from_raw(raw_1) }).unwrap());
/// assert_eq!(1, Arc::weak_count(&strong));
///
/// drop(strong);
@@ -1349,18 +1349,18 @@
///
/// ```
/// #![feature(weak_ptr_eq)]
- /// use std::sync::{Arc, Weak};
+ /// use std::sync::Arc;
///
/// let first_rc = Arc::new(5);
/// let first = Arc::downgrade(&first_rc);
/// let second = Arc::downgrade(&first_rc);
///
- /// assert!(Weak::ptr_eq(&first, &second));
+ /// assert!(first.ptr_eq(&second));
///
/// let third_rc = Arc::new(5);
/// let third = Arc::downgrade(&third_rc);
///
- /// assert!(!Weak::ptr_eq(&first, &third));
+ /// assert!(!first.ptr_eq(&third));
/// ```
///
/// Comparing `Weak::new`.
@@ -1371,16 +1371,16 @@
///
/// let first = Weak::new();
/// let second = Weak::new();
- /// assert!(Weak::ptr_eq(&first, &second));
+ /// assert!(first.ptr_eq(&second));
///
/// let third_rc = Arc::new(());
/// let third = Arc::downgrade(&third_rc);
- /// assert!(!Weak::ptr_eq(&first, &third));
+ /// assert!(!first.ptr_eq(&third));
/// ```
#[inline]
#[unstable(feature = "weak_ptr_eq", issue = "55981")]
- pub fn ptr_eq(this: &Self, other: &Self) -> bool {
- this.ptr.as_ptr() == other.ptr.as_ptr()
+ pub fn ptr_eq(&self, other: &Self) -> bool {
+ self.ptr.as_ptr() == other.ptr.as_ptr()
}
}
diff --git a/src/liballoc/boxed_test.rs b/src/liballoc/tests.rs
similarity index 100%
rename from src/liballoc/boxed_test.rs
rename to src/liballoc/tests.rs
diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs
index 1739b22..3d16e33 100644
--- a/src/libarena/lib.rs
+++ b/src/libarena/lib.rs
@@ -114,8 +114,8 @@
TypedArena {
// We set both `ptr` and `end` to 0 so that the first call to
// alloc() will trigger a grow().
- ptr: Cell::new(0 as *mut T),
- end: Cell::new(0 as *mut T),
+ ptr: Cell::new(ptr::null_mut()),
+ end: Cell::new(ptr::null_mut()),
chunks: RefCell::new(vec![]),
_own: PhantomData,
}
@@ -370,8 +370,8 @@
#[inline]
fn default() -> DroplessArena {
DroplessArena {
- ptr: Cell::new(0 as *mut u8),
- end: Cell::new(0 as *mut u8),
+ ptr: Cell::new(ptr::null_mut()),
+ end: Cell::new(ptr::null_mut()),
chunks: Default::default(),
}
}
@@ -617,218 +617,4 @@
}
#[cfg(test)]
-mod tests {
- extern crate test;
- use test::Bencher;
- use super::TypedArena;
- use std::cell::Cell;
-
- #[allow(dead_code)]
- #[derive(Debug, Eq, PartialEq)]
- struct Point {
- x: i32,
- y: i32,
- z: i32,
- }
-
- #[test]
- pub fn test_unused() {
- let arena: TypedArena<Point> = TypedArena::default();
- assert!(arena.chunks.borrow().is_empty());
- }
-
- #[test]
- fn test_arena_alloc_nested() {
- struct Inner {
- value: u8,
- }
- struct Outer<'a> {
- inner: &'a Inner,
- }
- enum EI<'e> {
- I(Inner),
- O(Outer<'e>),
- }
-
- struct Wrap<'a>(TypedArena<EI<'a>>);
-
- impl<'a> Wrap<'a> {
- fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
- let r: &EI<'_> = self.0.alloc(EI::I(f()));
- if let &EI::I(ref i) = r {
- i
- } else {
- panic!("mismatch");
- }
- }
- fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer<'_> {
- let r: &EI<'_> = self.0.alloc(EI::O(f()));
- if let &EI::O(ref o) = r {
- o
- } else {
- panic!("mismatch");
- }
- }
- }
-
- let arena = Wrap(TypedArena::default());
-
- let result = arena.alloc_outer(|| Outer {
- inner: arena.alloc_inner(|| Inner { value: 10 }),
- });
-
- assert_eq!(result.inner.value, 10);
- }
-
- #[test]
- pub fn test_copy() {
- let arena = TypedArena::default();
- for _ in 0..100000 {
- arena.alloc(Point { x: 1, y: 2, z: 3 });
- }
- }
-
- #[bench]
- pub fn bench_copy(b: &mut Bencher) {
- let arena = TypedArena::default();
- b.iter(|| arena.alloc(Point { x: 1, y: 2, z: 3 }))
- }
-
- #[bench]
- pub fn bench_copy_nonarena(b: &mut Bencher) {
- b.iter(|| {
- let _: Box<_> = Box::new(Point { x: 1, y: 2, z: 3 });
- })
- }
-
- #[allow(dead_code)]
- struct Noncopy {
- string: String,
- array: Vec<i32>,
- }
-
- #[test]
- pub fn test_noncopy() {
- let arena = TypedArena::default();
- for _ in 0..100000 {
- arena.alloc(Noncopy {
- string: "hello world".to_string(),
- array: vec![1, 2, 3, 4, 5],
- });
- }
- }
-
- #[test]
- pub fn test_typed_arena_zero_sized() {
- let arena = TypedArena::default();
- for _ in 0..100000 {
- arena.alloc(());
- }
- }
-
- #[test]
- pub fn test_typed_arena_clear() {
- let mut arena = TypedArena::default();
- for _ in 0..10 {
- arena.clear();
- for _ in 0..10000 {
- arena.alloc(Point { x: 1, y: 2, z: 3 });
- }
- }
- }
-
- #[bench]
- pub fn bench_typed_arena_clear(b: &mut Bencher) {
- let mut arena = TypedArena::default();
- b.iter(|| {
- arena.alloc(Point { x: 1, y: 2, z: 3 });
- arena.clear();
- })
- }
-
- // Drop tests
-
- struct DropCounter<'a> {
- count: &'a Cell<u32>,
- }
-
- impl Drop for DropCounter<'_> {
- fn drop(&mut self) {
- self.count.set(self.count.get() + 1);
- }
- }
-
- #[test]
- fn test_typed_arena_drop_count() {
- let counter = Cell::new(0);
- {
- let arena: TypedArena<DropCounter<'_>> = TypedArena::default();
- for _ in 0..100 {
- // Allocate something with drop glue to make sure it doesn't leak.
- arena.alloc(DropCounter { count: &counter });
- }
- };
- assert_eq!(counter.get(), 100);
- }
-
- #[test]
- fn test_typed_arena_drop_on_clear() {
- let counter = Cell::new(0);
- let mut arena: TypedArena<DropCounter<'_>> = TypedArena::default();
- for i in 0..10 {
- for _ in 0..100 {
- // Allocate something with drop glue to make sure it doesn't leak.
- arena.alloc(DropCounter { count: &counter });
- }
- arena.clear();
- assert_eq!(counter.get(), i * 100 + 100);
- }
- }
-
- thread_local! {
- static DROP_COUNTER: Cell<u32> = Cell::new(0)
- }
-
- struct SmallDroppable;
-
- impl Drop for SmallDroppable {
- fn drop(&mut self) {
- DROP_COUNTER.with(|c| c.set(c.get() + 1));
- }
- }
-
- #[test]
- fn test_typed_arena_drop_small_count() {
- DROP_COUNTER.with(|c| c.set(0));
- {
- let arena: TypedArena<SmallDroppable> = TypedArena::default();
- for _ in 0..100 {
- // Allocate something with drop glue to make sure it doesn't leak.
- arena.alloc(SmallDroppable);
- }
- // dropping
- };
- assert_eq!(DROP_COUNTER.with(|c| c.get()), 100);
- }
-
- #[bench]
- pub fn bench_noncopy(b: &mut Bencher) {
- let arena = TypedArena::default();
- b.iter(|| {
- arena.alloc(Noncopy {
- string: "hello world".to_string(),
- array: vec![1, 2, 3, 4, 5],
- })
- })
- }
-
- #[bench]
- pub fn bench_noncopy_nonarena(b: &mut Bencher) {
- b.iter(|| {
- let _: Box<_> = Box::new(Noncopy {
- string: "hello world".to_string(),
- array: vec![1, 2, 3, 4, 5],
- });
- })
- }
-}
+mod tests;
diff --git a/src/libarena/tests.rs b/src/libarena/tests.rs
new file mode 100644
index 0000000..fa41894
--- /dev/null
+++ b/src/libarena/tests.rs
@@ -0,0 +1,213 @@
+extern crate test;
+use test::Bencher;
+use super::TypedArena;
+use std::cell::Cell;
+
+#[allow(dead_code)]
+#[derive(Debug, Eq, PartialEq)]
+struct Point {
+ x: i32,
+ y: i32,
+ z: i32,
+}
+
+#[test]
+pub fn test_unused() {
+ let arena: TypedArena<Point> = TypedArena::default();
+ assert!(arena.chunks.borrow().is_empty());
+}
+
+#[test]
+fn test_arena_alloc_nested() {
+ struct Inner {
+ value: u8,
+ }
+ struct Outer<'a> {
+ inner: &'a Inner,
+ }
+ enum EI<'e> {
+ I(Inner),
+ O(Outer<'e>),
+ }
+
+ struct Wrap<'a>(TypedArena<EI<'a>>);
+
+ impl<'a> Wrap<'a> {
+ fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
+ let r: &EI<'_> = self.0.alloc(EI::I(f()));
+ if let &EI::I(ref i) = r {
+ i
+ } else {
+ panic!("mismatch");
+ }
+ }
+ fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer<'_> {
+ let r: &EI<'_> = self.0.alloc(EI::O(f()));
+ if let &EI::O(ref o) = r {
+ o
+ } else {
+ panic!("mismatch");
+ }
+ }
+ }
+
+ let arena = Wrap(TypedArena::default());
+
+ let result = arena.alloc_outer(|| Outer {
+ inner: arena.alloc_inner(|| Inner { value: 10 }),
+ });
+
+ assert_eq!(result.inner.value, 10);
+}
+
+#[test]
+pub fn test_copy() {
+ let arena = TypedArena::default();
+ for _ in 0..100000 {
+ arena.alloc(Point { x: 1, y: 2, z: 3 });
+ }
+}
+
+#[bench]
+pub fn bench_copy(b: &mut Bencher) {
+ let arena = TypedArena::default();
+ b.iter(|| arena.alloc(Point { x: 1, y: 2, z: 3 }))
+}
+
+#[bench]
+pub fn bench_copy_nonarena(b: &mut Bencher) {
+ b.iter(|| {
+ let _: Box<_> = Box::new(Point { x: 1, y: 2, z: 3 });
+ })
+}
+
+#[allow(dead_code)]
+struct Noncopy {
+ string: String,
+ array: Vec<i32>,
+}
+
+#[test]
+pub fn test_noncopy() {
+ let arena = TypedArena::default();
+ for _ in 0..100000 {
+ arena.alloc(Noncopy {
+ string: "hello world".to_string(),
+ array: vec![1, 2, 3, 4, 5],
+ });
+ }
+}
+
+#[test]
+pub fn test_typed_arena_zero_sized() {
+ let arena = TypedArena::default();
+ for _ in 0..100000 {
+ arena.alloc(());
+ }
+}
+
+#[test]
+pub fn test_typed_arena_clear() {
+ let mut arena = TypedArena::default();
+ for _ in 0..10 {
+ arena.clear();
+ for _ in 0..10000 {
+ arena.alloc(Point { x: 1, y: 2, z: 3 });
+ }
+ }
+}
+
+#[bench]
+pub fn bench_typed_arena_clear(b: &mut Bencher) {
+ let mut arena = TypedArena::default();
+ b.iter(|| {
+ arena.alloc(Point { x: 1, y: 2, z: 3 });
+ arena.clear();
+ })
+}
+
+// Drop tests
+
+struct DropCounter<'a> {
+ count: &'a Cell<u32>,
+}
+
+impl Drop for DropCounter<'_> {
+ fn drop(&mut self) {
+ self.count.set(self.count.get() + 1);
+ }
+}
+
+#[test]
+fn test_typed_arena_drop_count() {
+ let counter = Cell::new(0);
+ {
+ let arena: TypedArena<DropCounter<'_>> = TypedArena::default();
+ for _ in 0..100 {
+ // Allocate something with drop glue to make sure it doesn't leak.
+ arena.alloc(DropCounter { count: &counter });
+ }
+ };
+ assert_eq!(counter.get(), 100);
+}
+
+#[test]
+fn test_typed_arena_drop_on_clear() {
+ let counter = Cell::new(0);
+ let mut arena: TypedArena<DropCounter<'_>> = TypedArena::default();
+ for i in 0..10 {
+ for _ in 0..100 {
+ // Allocate something with drop glue to make sure it doesn't leak.
+ arena.alloc(DropCounter { count: &counter });
+ }
+ arena.clear();
+ assert_eq!(counter.get(), i * 100 + 100);
+ }
+}
+
+thread_local! {
+ static DROP_COUNTER: Cell<u32> = Cell::new(0)
+}
+
+struct SmallDroppable;
+
+impl Drop for SmallDroppable {
+ fn drop(&mut self) {
+ DROP_COUNTER.with(|c| c.set(c.get() + 1));
+ }
+}
+
+#[test]
+fn test_typed_arena_drop_small_count() {
+ DROP_COUNTER.with(|c| c.set(0));
+ {
+ let arena: TypedArena<SmallDroppable> = TypedArena::default();
+ for _ in 0..100 {
+ // Allocate something with drop glue to make sure it doesn't leak.
+ arena.alloc(SmallDroppable);
+ }
+ // dropping
+ };
+ assert_eq!(DROP_COUNTER.with(|c| c.get()), 100);
+}
+
+#[bench]
+pub fn bench_noncopy(b: &mut Bencher) {
+ let arena = TypedArena::default();
+ b.iter(|| {
+ arena.alloc(Noncopy {
+ string: "hello world".to_string(),
+ array: vec![1, 2, 3, 4, 5],
+ })
+ })
+}
+
+#[bench]
+pub fn bench_noncopy_nonarena(b: &mut Bencher) {
+ b.iter(|| {
+ let _: Box<_> = Box::new(Noncopy {
+ string: "hello world".to_string(),
+ array: vec![1, 2, 3, 4, 5],
+ });
+ })
+}
diff --git a/src/libcore/benches/ascii.rs b/src/libcore/benches/ascii.rs
index 10b6cc6..a337c46 100644
--- a/src/libcore/benches/ascii.rs
+++ b/src/libcore/benches/ascii.rs
@@ -191,7 +191,7 @@
fn case11_mask_mult_bool_match_range(bytes: &mut [u8]) {
fn is_ascii_lowercase(b: u8) -> bool {
match b {
- b'a'...b'z' => true,
+ b'a'..=b'z' => true,
_ => false
}
}
@@ -203,7 +203,7 @@
fn case12_mask_shifted_bool_match_range(bytes: &mut [u8]) {
fn is_ascii_lowercase(b: u8) -> bool {
match b {
- b'a'...b'z' => true,
+ b'a'..=b'z' => true,
_ => false
}
}
@@ -215,7 +215,7 @@
fn case13_subtract_shifted_bool_match_range(bytes: &mut [u8]) {
fn is_ascii_lowercase(b: u8) -> bool {
match b {
- b'a'...b'z' => true,
+ b'a'..=b'z' => true,
_ => false
}
}
@@ -227,7 +227,7 @@
fn case14_subtract_multiplied_bool_match_range(bytes: &mut [u8]) {
fn is_ascii_lowercase(b: u8) -> bool {
match b {
- b'a'...b'z' => true,
+ b'a'..=b'z' => true,
_ => false
}
}
diff --git a/src/libcore/char/convert.rs b/src/libcore/char/convert.rs
index ec9ac7c..0a870c6 100644
--- a/src/libcore/char/convert.rs
+++ b/src/libcore/char/convert.rs
@@ -123,7 +123,7 @@
}
}
-/// Maps a byte in 0x00...0xFF to a `char` whose code point has the same value, in U+0000 to U+00FF.
+/// Maps a byte in 0x00..=0xFF to a `char` whose code point has the same value, in U+0000..=U+00FF.
///
/// Unicode is designed such that this effectively decodes bytes
/// with the character encoding that IANA calls ISO-8859-1.
diff --git a/src/libcore/char/methods.rs b/src/libcore/char/methods.rs
index 18557e0..722c4c8 100644
--- a/src/libcore/char/methods.rs
+++ b/src/libcore/char/methods.rs
@@ -1042,8 +1042,8 @@
/// Checks if the value is an ASCII alphabetic character:
///
- /// - U+0041 'A' ... U+005A 'Z', or
- /// - U+0061 'a' ... U+007A 'z'.
+ /// - U+0041 'A' ..= U+005A 'Z', or
+ /// - U+0061 'a' ..= U+007A 'z'.
///
/// # Examples
///
@@ -1075,7 +1075,7 @@
}
/// Checks if the value is an ASCII uppercase character:
- /// U+0041 'A' ... U+005A 'Z'.
+ /// U+0041 'A' ..= U+005A 'Z'.
///
/// # Examples
///
@@ -1107,7 +1107,7 @@
}
/// Checks if the value is an ASCII lowercase character:
- /// U+0061 'a' ... U+007A 'z'.
+ /// U+0061 'a' ..= U+007A 'z'.
///
/// # Examples
///
@@ -1140,9 +1140,9 @@
/// Checks if the value is an ASCII alphanumeric character:
///
- /// - U+0041 'A' ... U+005A 'Z', or
- /// - U+0061 'a' ... U+007A 'z', or
- /// - U+0030 '0' ... U+0039 '9'.
+ /// - U+0041 'A' ..= U+005A 'Z', or
+ /// - U+0061 'a' ..= U+007A 'z', or
+ /// - U+0030 '0' ..= U+0039 '9'.
///
/// # Examples
///
@@ -1174,7 +1174,7 @@
}
/// Checks if the value is an ASCII decimal digit:
- /// U+0030 '0' ... U+0039 '9'.
+ /// U+0030 '0' ..= U+0039 '9'.
///
/// # Examples
///
@@ -1207,9 +1207,9 @@
/// Checks if the value is an ASCII hexadecimal digit:
///
- /// - U+0030 '0' ... U+0039 '9', or
- /// - U+0041 'A' ... U+0046 'F', or
- /// - U+0061 'a' ... U+0066 'f'.
+ /// - U+0030 '0' ..= U+0039 '9', or
+ /// - U+0041 'A' ..= U+0046 'F', or
+ /// - U+0061 'a' ..= U+0066 'f'.
///
/// # Examples
///
@@ -1242,10 +1242,10 @@
/// Checks if the value is an ASCII punctuation character:
///
- /// - U+0021 ... U+002F `! " # $ % & ' ( ) * + , - . /`, or
- /// - U+003A ... U+0040 `: ; < = > ? @`, or
- /// - U+005B ... U+0060 ``[ \ ] ^ _ ` ``, or
- /// - U+007B ... U+007E `{ | } ~`
+ /// - U+0021 ..= U+002F `! " # $ % & ' ( ) * + , - . /`, or
+ /// - U+003A ..= U+0040 `: ; < = > ? @`, or
+ /// - U+005B ..= U+0060 ``[ \ ] ^ _ ` ``, or
+ /// - U+007B ..= U+007E `{ | } ~`
///
/// # Examples
///
@@ -1277,7 +1277,7 @@
}
/// Checks if the value is an ASCII graphic character:
- /// U+0021 '!' ... U+007E '~'.
+ /// U+0021 '!' ..= U+007E '~'.
///
/// # Examples
///
@@ -1358,7 +1358,7 @@
}
/// Checks if the value is an ASCII control character:
- /// U+0000 NUL ... U+001F UNIT SEPARATOR, or U+007F DELETE.
+ /// U+0000 NUL ..= U+001F UNIT SEPARATOR, or U+007F DELETE.
/// Note that most ASCII whitespace characters are control
/// characters, but SPACE is not.
///
diff --git a/src/libcore/hint.rs b/src/libcore/hint.rs
index 94eddbe..519212b 100644
--- a/src/libcore/hint.rs
+++ b/src/libcore/hint.rs
@@ -111,31 +111,31 @@
/// This function is a no-op, and does not even read from `dummy`.
#[inline]
#[unstable(feature = "test", issue = "27812")]
+#[allow(unreachable_code)] // this makes #[cfg] a bit easier below.
pub fn black_box<T>(dummy: T) -> T {
- cfg_if! {
- if #[cfg(any(
- target_arch = "asmjs",
- all(
- target_arch = "wasm32",
- target_os = "emscripten"
- )
- ))] {
- #[inline]
- unsafe fn black_box_impl<T>(d: T) -> T {
- // these targets do not support inline assembly
- let ret = crate::ptr::read_volatile(&d);
- crate::mem::forget(d);
- ret
- }
- } else {
- #[inline]
- unsafe fn black_box_impl<T>(d: T) -> T {
- // we need to "use" the argument in some way LLVM can't
- // introspect.
- asm!("" : : "r"(&d));
- d
- }
- }
+ // We need to "use" the argument in some way LLVM can't introspect, and on
+ // targets that support it we can typically leverage inline assembly to do
+ // this. LLVM's intepretation of inline assembly is that it's, well, a black
+ // box. This isn't the greatest implementation since it probably deoptimizes
+ // more than we want, but it's so far good enough.
+ #[cfg(not(any(
+ target_arch = "asmjs",
+ all(
+ target_arch = "wasm32",
+ target_os = "emscripten"
+ )
+ )))]
+ unsafe {
+ asm!("" : : "r"(&dummy));
+ return dummy;
}
- unsafe { black_box_impl(dummy) }
+
+ // Not all platforms support inline assembly so try to do something without
+ // inline assembly which in theory still hinders at least some optimizations
+ // on those targets. This is the "best effort" scenario.
+ unsafe {
+ let ret = crate::ptr::read_volatile(&dummy);
+ crate::mem::forget(dummy);
+ ret
+ }
}
diff --git a/src/libcore/internal_macros.rs b/src/libcore/internal_macros.rs
index dd5b928..3acf2ec 100644
--- a/src/libcore/internal_macros.rs
+++ b/src/libcore/internal_macros.rs
@@ -117,84 +117,3 @@
)+
}
}
-
-/// A macro for defining `#[cfg]` if-else statements.
-///
-/// The macro provided by this crate, `cfg_if`, is similar to the `if/elif` C
-/// preprocessor macro by allowing definition of a cascade of `#[cfg]` cases,
-/// emitting the implementation which matches first.
-///
-/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code
-/// without having to rewrite each clause multiple times.
-///
-/// # Example
-///
-/// ```
-/// #[macro_use]
-/// extern crate cfg_if;
-///
-/// cfg_if! {
-/// if #[cfg(unix)] {
-/// fn foo() { /* unix specific functionality */ }
-/// } else if #[cfg(target_pointer_width = "32")] {
-/// fn foo() { /* non-unix, 32-bit functionality */ }
-/// } else {
-/// fn foo() { /* fallback implementation */ }
-/// }
-/// }
-///
-/// # fn main() {}
-/// ```
-macro_rules! cfg_if {
- // match if/else chains with a final `else`
- ($(
- if #[cfg($($meta:meta),*)] { $($it:item)* }
- ) else * else {
- $($it2:item)*
- }) => {
- cfg_if! {
- @__items
- () ;
- $( ( ($($meta),*) ($($it)*) ), )*
- ( () ($($it2)*) ),
- }
- };
-
- // match if/else chains lacking a final `else`
- (
- if #[cfg($($i_met:meta),*)] { $($i_it:item)* }
- $(
- else if #[cfg($($e_met:meta),*)] { $($e_it:item)* }
- )*
- ) => {
- cfg_if! {
- @__items
- () ;
- ( ($($i_met),*) ($($i_it)*) ),
- $( ( ($($e_met),*) ($($e_it)*) ), )*
- ( () () ),
- }
- };
-
- // Internal and recursive macro to emit all the items
- //
- // Collects all the negated cfgs in a list at the beginning and after the
- // semicolon is all the remaining items
- (@__items ($($not:meta,)*) ; ) => {};
- (@__items ($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
- // Emit all items within one block, applying an approprate #[cfg]. The
- // #[cfg] will require all `$m` matchers specified and must also negate
- // all previous matchers.
- cfg_if! { @__apply cfg(all($($m,)* not(any($($not),*)))), $($it)* }
-
- // Recurse to emit all other items in `$rest`, and when we do so add all
- // our `$m` matchers to the list of `$not` matchers as future emissions
- // will have to negate everything we just matched as well.
- cfg_if! { @__items ($($not,)* $($m,)*) ; $($rest)* }
- };
-
- // Internal macro to Apply a cfg attribute to a list of items
- (@__apply $m:meta, $($it:item)*) => {
- $(#[$m] $it)*
- };
-}
diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs
index 8d9a517..b30eff8 100644
--- a/src/libcore/intrinsics.rs
+++ b/src/libcore/intrinsics.rs
@@ -1607,3 +1607,9 @@
// Identical to the `f32` case.
(if x < y || x != x { y } else { x }) * 1.0
}
+
+/// For bootstrapping, implement unchecked_sub as just wrapping_sub.
+#[cfg(bootstrap)]
+pub unsafe fn unchecked_sub<T>(x: T, y: T) -> T {
+ sub_with_overflow(x, y).0
+}
diff --git a/src/libcore/iter/range.rs b/src/libcore/iter/range.rs
index 6bbf776..efda3b2 100644
--- a/src/libcore/iter/range.rs
+++ b/src/libcore/iter/range.rs
@@ -281,6 +281,19 @@
None
}
}
+
+ #[inline]
+ fn nth_back(&mut self, n: usize) -> Option<A> {
+ if let Some(minus_n) = self.end.sub_usize(n) {
+ if minus_n > self.start {
+ self.end = minus_n.sub_one();
+ return Some(self.end.clone())
+ }
+ }
+
+ self.end = self.start.clone();
+ None
+ }
}
#[stable(feature = "fused", since = "1.26.0")]
@@ -439,6 +452,34 @@
}
#[inline]
+ fn nth_back(&mut self, n: usize) -> Option<A> {
+ self.compute_is_empty();
+ if self.is_empty.unwrap_or_default() {
+ return None;
+ }
+
+ if let Some(minus_n) = self.end.sub_usize(n) {
+ use crate::cmp::Ordering::*;
+
+ match minus_n.partial_cmp(&self.start) {
+ Some(Greater) => {
+ self.is_empty = Some(false);
+ self.end = minus_n.sub_one();
+ return Some(minus_n);
+ }
+ Some(Equal) => {
+ self.is_empty = Some(true);
+ return Some(minus_n);
+ }
+ _ => {}
+ }
+ }
+
+ self.is_empty = Some(true);
+ None
+ }
+
+ #[inline]
fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
{
diff --git a/src/libcore/macros.rs b/src/libcore/macros.rs
index 9dfa09c..8b44025 100644
--- a/src/libcore/macros.rs
+++ b/src/libcore/macros.rs
@@ -6,13 +6,13 @@
#[stable(feature = "core", since = "1.6.0")]
macro_rules! panic {
() => (
- panic!("explicit panic")
+ $crate::panic!("explicit panic")
);
($msg:expr) => ({
$crate::panicking::panic(&($msg, file!(), line!(), __rust_unstable_column!()))
});
($msg:expr,) => (
- panic!($msg)
+ $crate::panic!($msg)
);
($fmt:expr, $($arg:tt)+) => ({
$crate::panicking::panic_fmt(format_args!($fmt, $($arg)*),
@@ -58,7 +58,7 @@
}
});
($left:expr, $right:expr,) => ({
- assert_eq!($left, $right)
+ $crate::assert_eq!($left, $right)
});
($left:expr, $right:expr, $($arg:tt)+) => ({
match (&($left), &($right)) {
@@ -115,7 +115,7 @@
}
});
($left:expr, $right:expr,) => {
- assert_ne!($left, $right)
+ $crate::assert_ne!($left, $right)
};
($left:expr, $right:expr, $($arg:tt)+) => ({
match (&($left), &($right)) {
@@ -208,7 +208,7 @@
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
macro_rules! debug_assert_eq {
- ($($arg:tt)*) => (if cfg!(debug_assertions) { assert_eq!($($arg)*); })
+ ($($arg:tt)*) => (if cfg!(debug_assertions) { $crate::assert_eq!($($arg)*); })
}
/// Asserts that two expressions are not equal to each other.
@@ -235,7 +235,7 @@
#[macro_export]
#[stable(feature = "assert_ne", since = "1.13.0")]
macro_rules! debug_assert_ne {
- ($($arg:tt)*) => (if cfg!(debug_assertions) { assert_ne!($($arg)*); })
+ ($($arg:tt)*) => (if cfg!(debug_assertions) { $crate::assert_ne!($($arg)*); })
}
/// Unwraps a result or propagates its error.
@@ -310,7 +310,7 @@
return $crate::result::Result::Err($crate::convert::From::from(err))
}
});
- ($expr:expr,) => (r#try!($expr));
+ ($expr:expr,) => ($crate::r#try!($expr));
}
/// Writes formatted data into a buffer.
@@ -425,10 +425,10 @@
#[allow_internal_unstable(format_args_nl)]
macro_rules! writeln {
($dst:expr) => (
- write!($dst, "\n")
+ $crate::write!($dst, "\n")
);
($dst:expr,) => (
- writeln!($dst)
+ $crate::writeln!($dst)
);
($dst:expr, $($arg:tt)*) => (
$dst.write_fmt(format_args_nl!($($arg)*))
@@ -494,10 +494,10 @@
panic!("internal error: entered unreachable code")
});
($msg:expr) => ({
- unreachable!("{}", $msg)
+ $crate::unreachable!("{}", $msg)
});
($msg:expr,) => ({
- unreachable!($msg)
+ $crate::unreachable!($msg)
});
($fmt:expr, $($arg:tt)*) => ({
panic!(concat!("internal error: entered unreachable code: ", $fmt), $($arg)*)
diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs
index 304b2fc..d70f556 100644
--- a/src/libcore/num/mod.rs
+++ b/src/libcore/num/mod.rs
@@ -4166,8 +4166,8 @@
/// Checks if the value is an ASCII alphabetic character:
///
- /// - U+0041 'A' ... U+005A 'Z', or
- /// - U+0061 'a' ... U+007A 'z'.
+ /// - U+0041 'A' ..= U+005A 'Z', or
+ /// - U+0061 'a' ..= U+007A 'z'.
///
/// # Examples
///
@@ -4202,7 +4202,7 @@
}
/// Checks if the value is an ASCII uppercase character:
- /// U+0041 'A' ... U+005A 'Z'.
+ /// U+0041 'A' ..= U+005A 'Z'.
///
/// # Examples
///
@@ -4237,7 +4237,7 @@
}
/// Checks if the value is an ASCII lowercase character:
- /// U+0061 'a' ... U+007A 'z'.
+ /// U+0061 'a' ..= U+007A 'z'.
///
/// # Examples
///
@@ -4273,9 +4273,9 @@
/// Checks if the value is an ASCII alphanumeric character:
///
- /// - U+0041 'A' ... U+005A 'Z', or
- /// - U+0061 'a' ... U+007A 'z', or
- /// - U+0030 '0' ... U+0039 '9'.
+ /// - U+0041 'A' ..= U+005A 'Z', or
+ /// - U+0061 'a' ..= U+007A 'z', or
+ /// - U+0030 '0' ..= U+0039 '9'.
///
/// # Examples
///
@@ -4310,7 +4310,7 @@
}
/// Checks if the value is an ASCII decimal digit:
- /// U+0030 '0' ... U+0039 '9'.
+ /// U+0030 '0' ..= U+0039 '9'.
///
/// # Examples
///
@@ -4346,9 +4346,9 @@
/// Checks if the value is an ASCII hexadecimal digit:
///
- /// - U+0030 '0' ... U+0039 '9', or
- /// - U+0041 'A' ... U+0046 'F', or
- /// - U+0061 'a' ... U+0066 'f'.
+ /// - U+0030 '0' ..= U+0039 '9', or
+ /// - U+0041 'A' ..= U+0046 'F', or
+ /// - U+0061 'a' ..= U+0066 'f'.
///
/// # Examples
///
@@ -4384,10 +4384,10 @@
/// Checks if the value is an ASCII punctuation character:
///
- /// - U+0021 ... U+002F `! " # $ % & ' ( ) * + , - . /`, or
- /// - U+003A ... U+0040 `: ; < = > ? @`, or
- /// - U+005B ... U+0060 ``[ \ ] ^ _ ` ``, or
- /// - U+007B ... U+007E `{ | } ~`
+ /// - U+0021 ..= U+002F `! " # $ % & ' ( ) * + , - . /`, or
+ /// - U+003A ..= U+0040 `: ; < = > ? @`, or
+ /// - U+005B ..= U+0060 ``[ \ ] ^ _ ` ``, or
+ /// - U+007B ..= U+007E `{ | } ~`
///
/// # Examples
///
@@ -4422,7 +4422,7 @@
}
/// Checks if the value is an ASCII graphic character:
- /// U+0021 '!' ... U+007E '~'.
+ /// U+0021 '!' ..= U+007E '~'.
///
/// # Examples
///
@@ -4509,7 +4509,7 @@
}
/// Checks if the value is an ASCII control character:
- /// U+0000 NUL ... U+001F UNIT SEPARATOR, or U+007F DELETE.
+ /// U+0000 NUL ..= U+001F UNIT SEPARATOR, or U+007F DELETE.
/// Note that most ASCII whitespace characters are control
/// characters, but SPACE is not.
///
diff --git a/src/libcore/option.rs b/src/libcore/option.rs
index 6b7f491..eec4b14 100644
--- a/src/libcore/option.rs
+++ b/src/libcore/option.rs
@@ -145,7 +145,7 @@
// which basically means it must be `Option`.
/// The `Option` type. See [the module level documentation](index.html) for more.
-#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
+#[derive(Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Option<T> {
/// No value
@@ -725,8 +725,6 @@
/// # Examples
///
/// ```
- /// #![feature(option_xor)]
- ///
/// let x = Some(2);
/// let y: Option<u32> = None;
/// assert_eq!(x.xor(y), Some(2));
@@ -744,7 +742,7 @@
/// assert_eq!(x.xor(y), None);
/// ```
#[inline]
- #[unstable(feature = "option_xor", issue = "50512")]
+ #[stable(feature = "option_xor", since = "1.37.0")]
pub fn xor(self, optb: Option<T>) -> Option<T> {
match (self, optb) {
(Some(a), None) => Some(a),
@@ -1041,6 +1039,25 @@
/////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for Option<T> {
+ #[inline]
+ fn clone(&self) -> Self {
+ match self {
+ Some(x) => Some(x.clone()),
+ None => None,
+ }
+ }
+
+ #[inline]
+ fn clone_from(&mut self, source: &Self) {
+ match (self, source) {
+ (Some(to), Some(from)) => to.clone_from(from),
+ (to, from) => *to = from.clone(),
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Default for Option<T> {
/// Returns [`None`][Option::None].
#[inline]
diff --git a/src/libcore/ptr/mod.rs b/src/libcore/ptr/mod.rs
index 80ac67d..8f026a5 100644
--- a/src/libcore/ptr/mod.rs
+++ b/src/libcore/ptr/mod.rs
@@ -984,8 +984,17 @@
/// operation because the returned value could be pointing to invalid
/// memory.
///
+ /// When calling this method, you have to ensure that if the pointer is
+ /// non-NULL, then it is properly aligned, dereferencable (for the whole
+ /// size of `T`) and points to an initialized instance of `T`. This applies
+ /// even if the result of this method is unused!
+ /// (The part about being initialized is not yet fully decided, but until
+ /// it is, the only safe approach is to ensure that they are indeed initialized.)
+ ///
/// Additionally, the lifetime `'a` returned is arbitrarily chosen and does
- /// not necessarily reflect the actual lifetime of the data.
+ /// not necessarily reflect the actual lifetime of the data. It is up to the
+ /// caller to ensure that for the duration of this lifetime, the memory this
+ /// pointer points to does not get written to outside of `UnsafeCell<U>`.
///
/// # Examples
///
@@ -1610,8 +1619,17 @@
/// operation because the returned value could be pointing to invalid
/// memory.
///
+ /// When calling this method, you have to ensure that if the pointer is
+ /// non-NULL, then it is properly aligned, dereferencable (for the whole
+ /// size of `T`) and points to an initialized instance of `T`. This applies
+ /// even if the result of this method is unused!
+ /// (The part about being initialized is not yet fully decided, but until
+ /// it is, the only safe approach is to ensure that they are indeed initialized.)
+ ///
/// Additionally, the lifetime `'a` returned is arbitrarily chosen and does
- /// not necessarily reflect the actual lifetime of the data.
+ /// not necessarily reflect the actual lifetime of the data. It is up to the
+ /// caller to ensure that for the duration of this lifetime, the memory this
+ /// pointer points to does not get written to outside of `UnsafeCell<U>`.
///
/// # Examples
///
@@ -1755,10 +1773,24 @@
///
/// # Safety
///
- /// As with `as_ref`, this is unsafe because it cannot verify the validity
+ /// As with [`as_ref`], this is unsafe because it cannot verify the validity
/// of the returned pointer, nor can it ensure that the lifetime `'a`
/// returned is indeed a valid lifetime for the contained data.
///
+ /// When calling this method, you have to ensure that if the pointer is
+ /// non-NULL, then it is properly aligned, dereferencable (for the whole
+ /// size of `T`) and points to an initialized instance of `T`. This applies
+ /// even if the result of this method is unused!
+ /// (The part about being initialized is not yet fully decided, but until
+ /// it is the only safe approach is to ensure that they are indeed initialized.)
+ ///
+ /// Additionally, the lifetime `'a` returned is arbitrarily chosen and does
+ /// not necessarily reflect the actual lifetime of the data. It is up to the
+ /// caller to ensure that for the duration of this lifetime, the memory this
+ /// pointer points to does not get accessed through any other pointer.
+ ///
+ /// [`as_ref`]: #method.as_ref
+ ///
/// # Examples
///
/// Basic usage:
diff --git a/src/libcore/result.rs b/src/libcore/result.rs
index bf8fd63..8a09877 100644
--- a/src/libcore/result.rs
+++ b/src/libcore/result.rs
@@ -240,7 +240,7 @@
///
/// [`Ok`]: enum.Result.html#variant.Ok
/// [`Err`]: enum.Result.html#variant.Err
-#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
+#[derive(Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
#[must_use = "this `Result` may be an `Err` variant, which should be handled"]
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Result<T, E> {
@@ -1004,6 +1004,27 @@
/////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone, E: Clone> Clone for Result<T, E> {
+ #[inline]
+ fn clone(&self) -> Self {
+ match self {
+ Ok(x) => Ok(x.clone()),
+ Err(x) => Err(x.clone()),
+ }
+ }
+
+ #[inline]
+ fn clone_from(&mut self, source: &Self) {
+ match (self, source) {
+ (Ok(to), Ok(from)) => to.clone_from(from),
+ (Err(to), Err(from)) => to.clone_from(from),
+ (to, from) => *to = from.clone(),
+ }
+ }
+}
+
+
+#[stable(feature = "rust1", since = "1.0.0")]
impl<T, E> IntoIterator for Result<T, E> {
type Item = T;
type IntoIter = IntoIter<T>;
diff --git a/src/libcore/slice/mod.rs b/src/libcore/slice/mod.rs
index c9c73f4..b2376cd 100644
--- a/src/libcore/slice/mod.rs
+++ b/src/libcore/slice/mod.rs
@@ -25,7 +25,7 @@
use crate::cmp::Ordering::{self, Less, Equal, Greater};
use crate::cmp;
use crate::fmt;
-use crate::intrinsics::assume;
+use crate::intrinsics::{assume, exact_div, unchecked_sub};
use crate::isize;
use crate::iter::*;
use crate::ops::{FnMut, Try, self};
@@ -2146,14 +2146,13 @@
/// Copying four bytes within a slice:
///
/// ```
- /// # #![feature(copy_within)]
/// let mut bytes = *b"Hello, World!";
///
/// bytes.copy_within(1..5, 8);
///
/// assert_eq!(&bytes, b"Hello, Wello!");
/// ```
- #[unstable(feature = "copy_within", issue = "54236")]
+ #[stable(feature = "copy_within", since = "1.37.0")]
pub fn copy_within<R: ops::RangeBounds<usize>>(&mut self, src: R, dest: usize)
where
T: Copy,
@@ -2178,8 +2177,8 @@
assert!(dest <= self.len() - count, "dest is out of bounds");
unsafe {
ptr::copy(
- self.get_unchecked(src_start),
- self.get_unchecked_mut(dest),
+ self.as_ptr().add(src_start),
+ self.as_mut_ptr().add(dest),
count,
);
}
@@ -2999,14 +2998,27 @@
// unexpected way. (Tested by `codegen/slice-position-bounds-check`.)
macro_rules! len {
($self: ident) => {{
+ #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
+
let start = $self.ptr;
- let diff = ($self.end as usize).wrapping_sub(start as usize);
let size = size_from_ptr(start);
if size == 0 {
+ // This _cannot_ use `unchecked_sub` because we depend on wrapping
+ // to represent the length of long ZST slice iterators.
+ let diff = ($self.end as usize).wrapping_sub(start as usize);
diff
} else {
- // Using division instead of `offset_from` helps LLVM remove bounds checks
- diff / size
+ // We know that `start <= end`, so can do better than `offset_from`,
+ // which needs to deal in signed. By setting appropriate flags here
+ // we can tell LLVM this, which helps it remove bounds checks.
+ // SAFETY: By the type invariant, `start <= end`
+ let diff = unsafe { unchecked_sub($self.end as usize, start as usize) };
+ // By also telling LLVM that the pointers are apart by an exact
+ // multiple of the type size, it can optimize `len() == 0` down to
+ // `start == end` instead of `(end - start) < size`.
+ // SAFETY: By the type invariant, the pointers are aligned so the
+ // distance between them must be a multiple of pointee size
+ unsafe { exact_div(diff, size) }
}
}}
}
diff --git a/src/libcore/tests/iter.rs b/src/libcore/tests/iter.rs
index bedb9e7..020618a 100644
--- a/src/libcore/tests/iter.rs
+++ b/src/libcore/tests/iter.rs
@@ -1658,6 +1658,23 @@
}
#[test]
+fn test_range_nth_back() {
+ assert_eq!((10..15).nth_back(0), Some(14));
+ assert_eq!((10..15).nth_back(1), Some(13));
+ assert_eq!((10..15).nth_back(4), Some(10));
+ assert_eq!((10..15).nth_back(5), None);
+ assert_eq!((-120..80_i8).nth_back(199), Some(-120));
+
+ let mut r = 10..20;
+ assert_eq!(r.nth_back(2), Some(17));
+ assert_eq!(r, 10..17);
+ assert_eq!(r.nth_back(2), Some(14));
+ assert_eq!(r, 10..14);
+ assert_eq!(r.nth_back(10), None);
+ assert_eq!(r, 10..10);
+}
+
+#[test]
fn test_range_from_nth() {
assert_eq!((10..).nth(0), Some(10));
assert_eq!((10..).nth(1), Some(11));
@@ -1715,6 +1732,26 @@
}
#[test]
+fn test_range_inclusive_nth_back() {
+ assert_eq!((10..=15).nth_back(0), Some(15));
+ assert_eq!((10..=15).nth_back(1), Some(14));
+ assert_eq!((10..=15).nth_back(5), Some(10));
+ assert_eq!((10..=15).nth_back(6), None);
+ assert_eq!((-120..=80_i8).nth_back(200), Some(-120));
+
+ let mut r = 10_u8..=20;
+ assert_eq!(r.nth_back(2), Some(18));
+ assert_eq!(r, 10..=17);
+ assert_eq!(r.nth_back(2), Some(15));
+ assert_eq!(r, 10..=14);
+ assert_eq!(r.is_empty(), false);
+ assert_eq!(ExactSizeIterator::is_empty(&r), false);
+ assert_eq!(r.nth_back(10), None);
+ assert_eq!(r.is_empty(), true);
+ assert_eq!(ExactSizeIterator::is_empty(&r), true);
+}
+
+#[test]
fn test_range_step() {
#![allow(deprecated)]
diff --git a/src/libcore/tests/lib.rs b/src/libcore/tests/lib.rs
index 928bdd7..bf072a9 100644
--- a/src/libcore/tests/lib.rs
+++ b/src/libcore/tests/lib.rs
@@ -28,7 +28,6 @@
#![feature(inner_deref)]
#![feature(slice_internals)]
#![feature(slice_partition_dedup)]
-#![feature(copy_within)]
#![feature(int_error_matching)]
#![feature(const_fn)]
#![warn(rust_2018_idioms)]
diff --git a/src/libcore/tests/slice.rs b/src/libcore/tests/slice.rs
index 9710f01..eaa799f 100644
--- a/src/libcore/tests/slice.rs
+++ b/src/libcore/tests/slice.rs
@@ -1512,6 +1512,13 @@
let mut bytes = *b"Hello, World!";
bytes.copy_within(.., 0);
assert_eq!(&bytes, b"Hello, World!");
+
+ // Ensure that copying at the end of slice won't cause UB.
+ let mut bytes = *b"Hello, World!";
+ bytes.copy_within(13..13, 5);
+ assert_eq!(&bytes, b"Hello, World!");
+ bytes.copy_within(5..5, 13);
+ assert_eq!(&bytes, b"Hello, World!");
}
#[test]
@@ -1536,6 +1543,13 @@
// 2 is greater than 1, so this range is invalid.
bytes.copy_within(2..1, 0);
}
+#[test]
+#[should_panic(expected = "attempted to index slice up to maximum usize")]
+fn test_copy_within_panics_src_out_of_bounds() {
+ let mut bytes = *b"Hello, World!";
+ // an inclusive range ending at usize::max_value() would make src_end overflow
+ bytes.copy_within(usize::max_value()..=usize::max_value(), 0);
+}
#[test]
fn test_is_sorted() {
diff --git a/src/libfmt_macros/Cargo.toml b/src/libfmt_macros/Cargo.toml
index 50779a2..fc32f21 100644
--- a/src/libfmt_macros/Cargo.toml
+++ b/src/libfmt_macros/Cargo.toml
@@ -8,3 +8,6 @@
name = "fmt_macros"
path = "lib.rs"
crate-type = ["dylib"]
+
+[dependencies]
+syntax_pos = { path = "../libsyntax_pos" }
diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs
index 1000f23..f6e9143 100644
--- a/src/libfmt_macros/lib.rs
+++ b/src/libfmt_macros/lib.rs
@@ -25,6 +25,17 @@
use std::string;
use std::iter;
+use syntax_pos::{InnerSpan, Symbol};
+
+#[derive(Copy, Clone)]
+struct InnerOffset(usize);
+
+impl InnerOffset {
+ fn to(self, end: InnerOffset) -> InnerSpan {
+ InnerSpan::new(self.0, end.0)
+ }
+}
+
/// A piece is a portion of the format string which represents the next part
/// to emit. These are emitted as a stream by the `Parser` class.
#[derive(Copy, Clone, PartialEq)]
@@ -40,7 +51,7 @@
#[derive(Copy, Clone, PartialEq)]
pub struct Argument<'a> {
/// Where to find this argument
- pub position: Position<'a>,
+ pub position: Position,
/// How to format the argument
pub format: FormatSpec<'a>,
}
@@ -55,9 +66,9 @@
/// Packed version of various flags provided
pub flags: u32,
/// The integer precision to use
- pub precision: Count<'a>,
+ pub precision: Count,
/// The string width requested for the resulting format
- pub width: Count<'a>,
+ pub width: Count,
/// The descriptor string representing the name of the format desired for
/// this argument, this can be empty or any number of characters, although
/// it is required to be one word.
@@ -66,16 +77,16 @@
/// Enum describing where an argument for a format can be located.
#[derive(Copy, Clone, PartialEq)]
-pub enum Position<'a> {
+pub enum Position {
/// The argument is implied to be located at an index
ArgumentImplicitlyIs(usize),
/// The argument is located at a specific index given in the format
ArgumentIs(usize),
/// The argument has a name.
- ArgumentNamed(&'a str),
+ ArgumentNamed(Symbol),
}
-impl Position<'_> {
+impl Position {
pub fn index(&self) -> Option<usize> {
match self {
ArgumentIs(i) | ArgumentImplicitlyIs(i) => Some(*i),
@@ -120,11 +131,11 @@
/// A count is used for the precision and width parameters of an integer, and
/// can reference either an argument or a literal integer.
#[derive(Copy, Clone, PartialEq)]
-pub enum Count<'a> {
+pub enum Count {
/// The count is specified explicitly.
CountIs(usize),
/// The count is specified by the argument with the given name.
- CountIsName(&'a str),
+ CountIsName(Symbol),
/// The count is specified by the argument at the given index.
CountIsParam(usize),
/// The count is implied and cannot be explicitly specified.
@@ -135,9 +146,8 @@
pub description: string::String,
pub note: Option<string::String>,
pub label: string::String,
- pub start: SpanIndex,
- pub end: SpanIndex,
- pub secondary_label: Option<(string::String, SpanIndex, SpanIndex)>,
+ pub span: InnerSpan,
+ pub secondary_label: Option<(string::String, InnerSpan)>,
}
/// The parser structure for interpreting the input format string. This is
@@ -156,24 +166,15 @@
/// `Some(raw count)` when the string is "raw", used to position spans correctly
style: Option<usize>,
/// Start and end byte offset of every successfully parsed argument
- pub arg_places: Vec<(SpanIndex, SpanIndex)>,
+ pub arg_places: Vec<InnerSpan>,
/// Characters that need to be shifted
skips: Vec<usize>,
- /// Span offset of the last opening brace seen, used for error reporting
- last_opening_brace_pos: Option<SpanIndex>,
+ /// Span of the last opening brace seen, used for error reporting
+ last_opening_brace: Option<InnerSpan>,
/// Wether the source string is comes from `println!` as opposed to `format!` or `print!`
append_newline: bool,
}
-#[derive(Clone, Copy, Debug)]
-pub struct SpanIndex(pub usize);
-
-impl SpanIndex {
- pub fn unwrap(self) -> usize {
- self.0
- }
-}
-
impl<'a> Iterator for Parser<'a> {
type Item = Piece<'a>;
@@ -181,19 +182,20 @@
if let Some(&(pos, c)) = self.cur.peek() {
match c {
'{' => {
- let curr_last_brace = self.last_opening_brace_pos;
- self.last_opening_brace_pos = Some(self.to_span_index(pos));
+ let curr_last_brace = self.last_opening_brace;
+ let byte_pos = self.to_span_index(pos);
+ self.last_opening_brace = Some(byte_pos.to(InnerOffset(byte_pos.0 + 1)));
self.cur.next();
if self.consume('{') {
- self.last_opening_brace_pos = curr_last_brace;
+ self.last_opening_brace = curr_last_brace;
Some(String(self.string(pos + 1)))
} else {
let arg = self.argument();
- if let Some(arg_pos) = self.must_consume('}').map(|end| {
- (self.to_span_index(pos), self.to_span_index(end + 1))
- }) {
- self.arg_places.push(arg_pos);
+ if let Some(end) = self.must_consume('}') {
+ let start = self.to_span_index(pos);
+ let end = self.to_span_index(end + 1);
+ self.arg_places.push(start.to(end));
}
Some(NextArgument(arg))
}
@@ -208,8 +210,7 @@
"unmatched `}` found",
"unmatched `}`",
"if you intended to print `}`, you can escape it using `}}`",
- err_pos,
- err_pos,
+ err_pos.to(err_pos),
);
None
}
@@ -241,7 +242,7 @@
style,
arg_places: vec![],
skips,
- last_opening_brace_pos: None,
+ last_opening_brace: None,
append_newline,
}
}
@@ -253,15 +254,13 @@
&mut self,
description: S1,
label: S2,
- start: SpanIndex,
- end: SpanIndex,
+ span: InnerSpan,
) {
self.errors.push(ParseError {
description: description.into(),
note: None,
label: label.into(),
- start,
- end,
+ span,
secondary_label: None,
});
}
@@ -274,15 +273,13 @@
description: S1,
label: S2,
note: S3,
- start: SpanIndex,
- end: SpanIndex,
+ span: InnerSpan,
) {
self.errors.push(ParseError {
description: description.into(),
note: Some(note.into()),
label: label.into(),
- start,
- end,
+ span,
secondary_label: None,
});
}
@@ -303,22 +300,21 @@
}
}
- fn raw(&self) -> usize {
- self.style.map(|raw| raw + 1).unwrap_or(0)
- }
-
- fn to_span_index(&self, pos: usize) -> SpanIndex {
+ fn to_span_index(&self, pos: usize) -> InnerOffset {
let mut pos = pos;
+ // This handles the raw string case, the raw argument is the number of #
+ // in r###"..."### (we need to add one because of the `r`).
+ let raw = self.style.map(|raw| raw + 1).unwrap_or(0);
for skip in &self.skips {
if pos > *skip {
pos += 1;
- } else if pos == *skip && self.raw() == 0 {
+ } else if pos == *skip && raw == 0 {
pos += 1;
} else {
break;
}
}
- SpanIndex(self.raw() + pos + 1)
+ InnerOffset(raw + pos + 1)
}
/// Forces consumption of the specified character. If the character is not
@@ -336,8 +332,8 @@
let label = "expected `}`".to_owned();
let (note, secondary_label) = if c == '}' {
(Some("if you intended to print `{`, you can escape it using `{{`".to_owned()),
- self.last_opening_brace_pos.map(|pos| {
- ("because of this opening brace".to_owned(), pos, pos)
+ self.last_opening_brace.map(|sp| {
+ ("because of this opening brace".to_owned(), sp)
}))
} else {
(None, None)
@@ -346,8 +342,7 @@
description,
note,
label,
- start: pos,
- end: pos,
+ span: pos.to(pos),
secondary_label,
});
None
@@ -361,8 +356,8 @@
let label = format!("expected `{:?}`", c);
let (note, secondary_label) = if c == '}' {
(Some("if you intended to print `{`, you can escape it using `{{`".to_owned()),
- self.last_opening_brace_pos.map(|pos| {
- ("because of this opening brace".to_owned(), pos, pos)
+ self.last_opening_brace.map(|sp| {
+ ("because of this opening brace".to_owned(), sp)
}))
} else {
(None, None)
@@ -371,12 +366,11 @@
description,
note,
label,
- start: pos,
- end: pos,
+ span: pos.to(pos),
secondary_label,
});
} else {
- self.err(description, format!("expected `{:?}`", c), pos, pos);
+ self.err(description, format!("expected `{:?}`", c), pos.to(pos));
}
None
}
@@ -435,20 +429,24 @@
/// integer index of an argument, a named argument, or a blank string.
/// Returns `Some(parsed_position)` if the position is not implicitly
/// consuming a macro argument, `None` if it's the case.
- fn position(&mut self) -> Option<Position<'a>> {
+ fn position(&mut self) -> Option<Position> {
if let Some(i) = self.integer() {
Some(ArgumentIs(i))
} else {
match self.cur.peek() {
- Some(&(_, c)) if c.is_alphabetic() => Some(ArgumentNamed(self.word())),
+ Some(&(_, c)) if c.is_alphabetic() => {
+ Some(ArgumentNamed(Symbol::intern(self.word())))
+ }
Some(&(pos, c)) if c == '_' => {
let invalid_name = self.string(pos);
self.err_with_note(format!("invalid argument name `{}`", invalid_name),
"invalid argument name",
"argument names cannot start with an underscore",
- self.to_span_index(pos),
- self.to_span_index(pos + invalid_name.len()));
- Some(ArgumentNamed(invalid_name))
+ self.to_span_index(pos).to(
+ self.to_span_index(pos + invalid_name.len())
+ ),
+ );
+ Some(ArgumentNamed(Symbol::intern(invalid_name)))
},
// This is an `ArgumentNext`.
@@ -556,7 +554,7 @@
/// Parses a Count parameter at the current position. This does not check
/// for 'CountIsNextParam' because that is only used in precision, not
/// width.
- fn count(&mut self) -> Count<'a> {
+ fn count(&mut self) -> Count {
if let Some(i) = self.integer() {
if self.consume('$') {
CountIsParam(i)
@@ -570,7 +568,7 @@
self.cur = tmp;
CountImplied
} else if self.consume('$') {
- CountIsName(word)
+ CountIsName(Symbol::intern(word))
} else {
self.cur = tmp;
CountImplied
@@ -624,245 +622,4 @@
}
#[cfg(test)]
-mod tests {
- use super::*;
-
- fn same(fmt: &'static str, p: &[Piece<'static>]) {
- let parser = Parser::new(fmt, None, vec![], false);
- assert!(parser.collect::<Vec<Piece<'static>>>() == p);
- }
-
- fn fmtdflt() -> FormatSpec<'static> {
- return FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "",
- };
- }
-
- fn musterr(s: &str) {
- let mut p = Parser::new(s, None, vec![], false);
- p.next();
- assert!(!p.errors.is_empty());
- }
-
- #[test]
- fn simple() {
- same("asdf", &[String("asdf")]);
- same("a{{b", &[String("a"), String("{b")]);
- same("a}}b", &[String("a"), String("}b")]);
- same("a}}", &[String("a"), String("}")]);
- same("}}", &[String("}")]);
- same("\\}}", &[String("\\"), String("}")]);
- }
-
- #[test]
- fn invalid01() {
- musterr("{")
- }
- #[test]
- fn invalid02() {
- musterr("}")
- }
- #[test]
- fn invalid04() {
- musterr("{3a}")
- }
- #[test]
- fn invalid05() {
- musterr("{:|}")
- }
- #[test]
- fn invalid06() {
- musterr("{:>>>}")
- }
-
- #[test]
- fn format_nothing() {
- same("{}",
- &[NextArgument(Argument {
- position: ArgumentImplicitlyIs(0),
- format: fmtdflt(),
- })]);
- }
- #[test]
- fn format_position() {
- same("{3}",
- &[NextArgument(Argument {
- position: ArgumentIs(3),
- format: fmtdflt(),
- })]);
- }
- #[test]
- fn format_position_nothing_else() {
- same("{3:}",
- &[NextArgument(Argument {
- position: ArgumentIs(3),
- format: fmtdflt(),
- })]);
- }
- #[test]
- fn format_type() {
- same("{3:a}",
- &[NextArgument(Argument {
- position: ArgumentIs(3),
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "a",
- },
- })]);
- }
- #[test]
- fn format_align_fill() {
- same("{3:>}",
- &[NextArgument(Argument {
- position: ArgumentIs(3),
- format: FormatSpec {
- fill: None,
- align: AlignRight,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "",
- },
- })]);
- same("{3:0<}",
- &[NextArgument(Argument {
- position: ArgumentIs(3),
- format: FormatSpec {
- fill: Some('0'),
- align: AlignLeft,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "",
- },
- })]);
- same("{3:*<abcd}",
- &[NextArgument(Argument {
- position: ArgumentIs(3),
- format: FormatSpec {
- fill: Some('*'),
- align: AlignLeft,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "abcd",
- },
- })]);
- }
- #[test]
- fn format_counts() {
- same("{:10s}",
- &[NextArgument(Argument {
- position: ArgumentImplicitlyIs(0),
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountImplied,
- width: CountIs(10),
- ty: "s",
- },
- })]);
- same("{:10$.10s}",
- &[NextArgument(Argument {
- position: ArgumentImplicitlyIs(0),
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountIs(10),
- width: CountIsParam(10),
- ty: "s",
- },
- })]);
- same("{:.*s}",
- &[NextArgument(Argument {
- position: ArgumentImplicitlyIs(1),
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountIsParam(0),
- width: CountImplied,
- ty: "s",
- },
- })]);
- same("{:.10$s}",
- &[NextArgument(Argument {
- position: ArgumentImplicitlyIs(0),
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountIsParam(10),
- width: CountImplied,
- ty: "s",
- },
- })]);
- same("{:a$.b$s}",
- &[NextArgument(Argument {
- position: ArgumentImplicitlyIs(0),
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountIsName("b"),
- width: CountIsName("a"),
- ty: "s",
- },
- })]);
- }
- #[test]
- fn format_flags() {
- same("{:-}",
- &[NextArgument(Argument {
- position: ArgumentImplicitlyIs(0),
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: (1 << FlagSignMinus as u32),
- precision: CountImplied,
- width: CountImplied,
- ty: "",
- },
- })]);
- same("{:+#}",
- &[NextArgument(Argument {
- position: ArgumentImplicitlyIs(0),
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: (1 << FlagSignPlus as u32) | (1 << FlagAlternate as u32),
- precision: CountImplied,
- width: CountImplied,
- ty: "",
- },
- })]);
- }
- #[test]
- fn format_mixture() {
- same("abcd {3:a} efg",
- &[String("abcd "),
- NextArgument(Argument {
- position: ArgumentIs(3),
- format: FormatSpec {
- fill: None,
- align: AlignUnknown,
- flags: 0,
- precision: CountImplied,
- width: CountImplied,
- ty: "a",
- },
- }),
- String(" efg")]);
- }
-}
+mod tests;
diff --git a/src/libfmt_macros/tests.rs b/src/libfmt_macros/tests.rs
new file mode 100644
index 0000000..7282d4a
--- /dev/null
+++ b/src/libfmt_macros/tests.rs
@@ -0,0 +1,243 @@
+use super::*;
+
+fn same(fmt: &'static str, p: &[Piece<'static>]) {
+ let parser = Parser::new(fmt, None, vec![], false);
+ assert!(parser.collect::<Vec<Piece<'static>>>() == p);
+}
+
+fn fmtdflt() -> FormatSpec<'static> {
+ return FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "",
+ };
+}
+
+fn musterr(s: &str) {
+ let mut p = Parser::new(s, None, vec![], false);
+ p.next();
+ assert!(!p.errors.is_empty());
+}
+
+#[test]
+fn simple() {
+ same("asdf", &[String("asdf")]);
+ same("a{{b", &[String("a"), String("{b")]);
+ same("a}}b", &[String("a"), String("}b")]);
+ same("a}}", &[String("a"), String("}")]);
+ same("}}", &[String("}")]);
+ same("\\}}", &[String("\\"), String("}")]);
+}
+
+#[test]
+fn invalid01() {
+ musterr("{")
+}
+#[test]
+fn invalid02() {
+ musterr("}")
+}
+#[test]
+fn invalid04() {
+ musterr("{3a}")
+}
+#[test]
+fn invalid05() {
+ musterr("{:|}")
+}
+#[test]
+fn invalid06() {
+ musterr("{:>>>}")
+}
+
+#[test]
+fn format_nothing() {
+ same("{}",
+ &[NextArgument(Argument {
+ position: ArgumentImplicitlyIs(0),
+ format: fmtdflt(),
+ })]);
+}
+#[test]
+fn format_position() {
+ same("{3}",
+ &[NextArgument(Argument {
+ position: ArgumentIs(3),
+ format: fmtdflt(),
+ })]);
+}
+#[test]
+fn format_position_nothing_else() {
+ same("{3:}",
+ &[NextArgument(Argument {
+ position: ArgumentIs(3),
+ format: fmtdflt(),
+ })]);
+}
+#[test]
+fn format_type() {
+ same("{3:a}",
+ &[NextArgument(Argument {
+ position: ArgumentIs(3),
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "a",
+ },
+ })]);
+}
+#[test]
+fn format_align_fill() {
+ same("{3:>}",
+ &[NextArgument(Argument {
+ position: ArgumentIs(3),
+ format: FormatSpec {
+ fill: None,
+ align: AlignRight,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "",
+ },
+ })]);
+ same("{3:0<}",
+ &[NextArgument(Argument {
+ position: ArgumentIs(3),
+ format: FormatSpec {
+ fill: Some('0'),
+ align: AlignLeft,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "",
+ },
+ })]);
+ same("{3:*<abcd}",
+ &[NextArgument(Argument {
+ position: ArgumentIs(3),
+ format: FormatSpec {
+ fill: Some('*'),
+ align: AlignLeft,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "abcd",
+ },
+ })]);
+}
+#[test]
+fn format_counts() {
+ use syntax_pos::{GLOBALS, Globals, edition};
+ GLOBALS.set(&Globals::new(edition::DEFAULT_EDITION), || {
+ same("{:10s}",
+ &[NextArgument(Argument {
+ position: ArgumentImplicitlyIs(0),
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountImplied,
+ width: CountIs(10),
+ ty: "s",
+ },
+ })]);
+ same("{:10$.10s}",
+ &[NextArgument(Argument {
+ position: ArgumentImplicitlyIs(0),
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountIs(10),
+ width: CountIsParam(10),
+ ty: "s",
+ },
+ })]);
+ same("{:.*s}",
+ &[NextArgument(Argument {
+ position: ArgumentImplicitlyIs(1),
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountIsParam(0),
+ width: CountImplied,
+ ty: "s",
+ },
+ })]);
+ same("{:.10$s}",
+ &[NextArgument(Argument {
+ position: ArgumentImplicitlyIs(0),
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountIsParam(10),
+ width: CountImplied,
+ ty: "s",
+ },
+ })]);
+ same("{:a$.b$s}",
+ &[NextArgument(Argument {
+ position: ArgumentImplicitlyIs(0),
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountIsName(Symbol::intern("b")),
+ width: CountIsName(Symbol::intern("a")),
+ ty: "s",
+ },
+ })]);
+ });
+}
+#[test]
+fn format_flags() {
+ same("{:-}",
+ &[NextArgument(Argument {
+ position: ArgumentImplicitlyIs(0),
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: (1 << FlagSignMinus as u32),
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "",
+ },
+ })]);
+ same("{:+#}",
+ &[NextArgument(Argument {
+ position: ArgumentImplicitlyIs(0),
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: (1 << FlagSignPlus as u32) | (1 << FlagAlternate as u32),
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "",
+ },
+ })]);
+}
+#[test]
+fn format_mixture() {
+ same("abcd {3:a} efg",
+ &[String("abcd "),
+ NextArgument(Argument {
+ position: ArgumentIs(3),
+ format: FormatSpec {
+ fill: None,
+ align: AlignUnknown,
+ flags: 0,
+ precision: CountImplied,
+ width: CountImplied,
+ ty: "a",
+ },
+ }),
+ String(" efg")]);
+}
diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs
index 489020d..a34e4fb 100644
--- a/src/libgraphviz/lib.rs
+++ b/src/libgraphviz/lib.rs
@@ -684,385 +684,4 @@
}
#[cfg(test)]
-mod tests {
- use NodeLabels::*;
- use super::{Id, Labeller, Nodes, Edges, GraphWalk, render, Style};
- use super::LabelText::{self, LabelStr, EscStr, HtmlStr};
- use std::io;
- use std::io::prelude::*;
-
- /// each node is an index in a vector in the graph.
- type Node = usize;
- struct Edge {
- from: usize,
- to: usize,
- label: &'static str,
- style: Style,
- }
-
- fn edge(from: usize, to: usize, label: &'static str, style: Style) -> Edge {
- Edge {
- from,
- to,
- label,
- style,
- }
- }
-
- struct LabelledGraph {
- /// The name for this graph. Used for labeling generated `digraph`.
- name: &'static str,
-
- /// Each node is an index into `node_labels`; these labels are
- /// used as the label text for each node. (The node *names*,
- /// which are unique identifiers, are derived from their index
- /// in this array.)
- ///
- /// If a node maps to None here, then just use its name as its
- /// text.
- node_labels: Vec<Option<&'static str>>,
-
- node_styles: Vec<Style>,
-
- /// Each edge relates a from-index to a to-index along with a
- /// label; `edges` collects them.
- edges: Vec<Edge>,
- }
-
- // A simple wrapper around LabelledGraph that forces the labels to
- // be emitted as EscStr.
- struct LabelledGraphWithEscStrs {
- graph: LabelledGraph,
- }
-
- enum NodeLabels<L> {
- AllNodesLabelled(Vec<L>),
- UnlabelledNodes(usize),
- SomeNodesLabelled(Vec<Option<L>>),
- }
-
- type Trivial = NodeLabels<&'static str>;
-
- impl NodeLabels<&'static str> {
- fn to_opt_strs(self) -> Vec<Option<&'static str>> {
- match self {
- UnlabelledNodes(len) => vec![None; len],
- AllNodesLabelled(lbls) => lbls.into_iter().map(|l| Some(l)).collect(),
- SomeNodesLabelled(lbls) => lbls.into_iter().collect(),
- }
- }
-
- fn len(&self) -> usize {
- match self {
- &UnlabelledNodes(len) => len,
- &AllNodesLabelled(ref lbls) => lbls.len(),
- &SomeNodesLabelled(ref lbls) => lbls.len(),
- }
- }
- }
-
- impl LabelledGraph {
- fn new(name: &'static str,
- node_labels: Trivial,
- edges: Vec<Edge>,
- node_styles: Option<Vec<Style>>)
- -> LabelledGraph {
- let count = node_labels.len();
- LabelledGraph {
- name,
- node_labels: node_labels.to_opt_strs(),
- edges,
- node_styles: match node_styles {
- Some(nodes) => nodes,
- None => vec![Style::None; count],
- },
- }
- }
- }
-
- impl LabelledGraphWithEscStrs {
- fn new(name: &'static str,
- node_labels: Trivial,
- edges: Vec<Edge>)
- -> LabelledGraphWithEscStrs {
- LabelledGraphWithEscStrs { graph: LabelledGraph::new(name, node_labels, edges, None) }
- }
- }
-
- fn id_name<'a>(n: &Node) -> Id<'a> {
- Id::new(format!("N{}", *n)).unwrap()
- }
-
- impl<'a> Labeller<'a> for LabelledGraph {
- type Node = Node;
- type Edge = &'a Edge;
- fn graph_id(&'a self) -> Id<'a> {
- Id::new(self.name).unwrap()
- }
- fn node_id(&'a self, n: &Node) -> Id<'a> {
- id_name(n)
- }
- fn node_label(&'a self, n: &Node) -> LabelText<'a> {
- match self.node_labels[*n] {
- Some(l) => LabelStr(l.into()),
- None => LabelStr(id_name(n).name()),
- }
- }
- fn edge_label(&'a self, e: &&'a Edge) -> LabelText<'a> {
- LabelStr(e.label.into())
- }
- fn node_style(&'a self, n: &Node) -> Style {
- self.node_styles[*n]
- }
- fn edge_style(&'a self, e: &&'a Edge) -> Style {
- e.style
- }
- }
-
- impl<'a> Labeller<'a> for LabelledGraphWithEscStrs {
- type Node = Node;
- type Edge = &'a Edge;
- fn graph_id(&'a self) -> Id<'a> {
- self.graph.graph_id()
- }
- fn node_id(&'a self, n: &Node) -> Id<'a> {
- self.graph.node_id(n)
- }
- fn node_label(&'a self, n: &Node) -> LabelText<'a> {
- match self.graph.node_label(n) {
- LabelStr(s) | EscStr(s) | HtmlStr(s) => EscStr(s),
- }
- }
- fn edge_label(&'a self, e: &&'a Edge) -> LabelText<'a> {
- match self.graph.edge_label(e) {
- LabelStr(s) | EscStr(s) | HtmlStr(s) => EscStr(s),
- }
- }
- }
-
- impl<'a> GraphWalk<'a> for LabelledGraph {
- type Node = Node;
- type Edge = &'a Edge;
- fn nodes(&'a self) -> Nodes<'a, Node> {
- (0..self.node_labels.len()).collect()
- }
- fn edges(&'a self) -> Edges<'a, &'a Edge> {
- self.edges.iter().collect()
- }
- fn source(&'a self, edge: &&'a Edge) -> Node {
- edge.from
- }
- fn target(&'a self, edge: &&'a Edge) -> Node {
- edge.to
- }
- }
-
- impl<'a> GraphWalk<'a> for LabelledGraphWithEscStrs {
- type Node = Node;
- type Edge = &'a Edge;
- fn nodes(&'a self) -> Nodes<'a, Node> {
- self.graph.nodes()
- }
- fn edges(&'a self) -> Edges<'a, &'a Edge> {
- self.graph.edges()
- }
- fn source(&'a self, edge: &&'a Edge) -> Node {
- edge.from
- }
- fn target(&'a self, edge: &&'a Edge) -> Node {
- edge.to
- }
- }
-
- fn test_input(g: LabelledGraph) -> io::Result<String> {
- let mut writer = Vec::new();
- render(&g, &mut writer).unwrap();
- let mut s = String::new();
- Read::read_to_string(&mut &*writer, &mut s)?;
- Ok(s)
- }
-
- // All of the tests use raw-strings as the format for the expected outputs,
- // so that you can cut-and-paste the content into a .dot file yourself to
- // see what the graphviz visualizer would produce.
-
- #[test]
- fn empty_graph() {
- let labels: Trivial = UnlabelledNodes(0);
- let r = test_input(LabelledGraph::new("empty_graph", labels, vec![], None));
- assert_eq!(r.unwrap(),
-r#"digraph empty_graph {
-}
-"#);
- }
-
- #[test]
- fn single_node() {
- let labels: Trivial = UnlabelledNodes(1);
- let r = test_input(LabelledGraph::new("single_node", labels, vec![], None));
- assert_eq!(r.unwrap(),
-r#"digraph single_node {
- N0[label="N0"];
-}
-"#);
- }
-
- #[test]
- fn single_node_with_style() {
- let labels: Trivial = UnlabelledNodes(1);
- let styles = Some(vec![Style::Dashed]);
- let r = test_input(LabelledGraph::new("single_node", labels, vec![], styles));
- assert_eq!(r.unwrap(),
-r#"digraph single_node {
- N0[label="N0"][style="dashed"];
-}
-"#);
- }
-
- #[test]
- fn single_edge() {
- let labels: Trivial = UnlabelledNodes(2);
- let result = test_input(LabelledGraph::new("single_edge",
- labels,
- vec![edge(0, 1, "E", Style::None)],
- None));
- assert_eq!(result.unwrap(),
-r#"digraph single_edge {
- N0[label="N0"];
- N1[label="N1"];
- N0 -> N1[label="E"];
-}
-"#);
- }
-
- #[test]
- fn single_edge_with_style() {
- let labels: Trivial = UnlabelledNodes(2);
- let result = test_input(LabelledGraph::new("single_edge",
- labels,
- vec![edge(0, 1, "E", Style::Bold)],
- None));
- assert_eq!(result.unwrap(),
-r#"digraph single_edge {
- N0[label="N0"];
- N1[label="N1"];
- N0 -> N1[label="E"][style="bold"];
-}
-"#);
- }
-
- #[test]
- fn test_some_labelled() {
- let labels: Trivial = SomeNodesLabelled(vec![Some("A"), None]);
- let styles = Some(vec![Style::None, Style::Dotted]);
- let result = test_input(LabelledGraph::new("test_some_labelled",
- labels,
- vec![edge(0, 1, "A-1", Style::None)],
- styles));
- assert_eq!(result.unwrap(),
-r#"digraph test_some_labelled {
- N0[label="A"];
- N1[label="N1"][style="dotted"];
- N0 -> N1[label="A-1"];
-}
-"#);
- }
-
- #[test]
- fn single_cyclic_node() {
- let labels: Trivial = UnlabelledNodes(1);
- let r = test_input(LabelledGraph::new("single_cyclic_node",
- labels,
- vec![edge(0, 0, "E", Style::None)],
- None));
- assert_eq!(r.unwrap(),
-r#"digraph single_cyclic_node {
- N0[label="N0"];
- N0 -> N0[label="E"];
-}
-"#);
- }
-
- #[test]
- fn hasse_diagram() {
- let labels = AllNodesLabelled(vec!["{x,y}", "{x}", "{y}", "{}"]);
- let r = test_input(LabelledGraph::new("hasse_diagram",
- labels,
- vec![edge(0, 1, "", Style::None),
- edge(0, 2, "", Style::None),
- edge(1, 3, "", Style::None),
- edge(2, 3, "", Style::None)],
- None));
- assert_eq!(r.unwrap(),
-r#"digraph hasse_diagram {
- N0[label="{x,y}"];
- N1[label="{x}"];
- N2[label="{y}"];
- N3[label="{}"];
- N0 -> N1[label=""];
- N0 -> N2[label=""];
- N1 -> N3[label=""];
- N2 -> N3[label=""];
-}
-"#);
- }
-
- #[test]
- fn left_aligned_text() {
- let labels = AllNodesLabelled(vec![
- "if test {\
- \\l branch1\
- \\l} else {\
- \\l branch2\
- \\l}\
- \\lafterward\
- \\l",
- "branch1",
- "branch2",
- "afterward"]);
-
- let mut writer = Vec::new();
-
- let g = LabelledGraphWithEscStrs::new("syntax_tree",
- labels,
- vec![edge(0, 1, "then", Style::None),
- edge(0, 2, "else", Style::None),
- edge(1, 3, ";", Style::None),
- edge(2, 3, ";", Style::None)]);
-
- render(&g, &mut writer).unwrap();
- let mut r = String::new();
- Read::read_to_string(&mut &*writer, &mut r).unwrap();
-
- assert_eq!(r,
-r#"digraph syntax_tree {
- N0[label="if test {\l branch1\l} else {\l branch2\l}\lafterward\l"];
- N1[label="branch1"];
- N2[label="branch2"];
- N3[label="afterward"];
- N0 -> N1[label="then"];
- N0 -> N2[label="else"];
- N1 -> N3[label=";"];
- N2 -> N3[label=";"];
-}
-"#);
- }
-
- #[test]
- fn simple_id_construction() {
- let id1 = Id::new("hello");
- match id1 {
- Ok(_) => {}
- Err(..) => panic!("'hello' is not a valid value for id anymore"),
- }
- }
-
- #[test]
- fn badly_formatted_id() {
- let id2 = Id::new("Weird { struct : ure } !!!");
- match id2 {
- Ok(_) => panic!("graphviz id suddenly allows spaces, brackets and stuff"),
- Err(..) => {}
- }
- }
-}
+mod tests;
diff --git a/src/libgraphviz/tests.rs b/src/libgraphviz/tests.rs
new file mode 100644
index 0000000..2f71357
--- /dev/null
+++ b/src/libgraphviz/tests.rs
@@ -0,0 +1,380 @@
+use NodeLabels::*;
+use super::{Id, Labeller, Nodes, Edges, GraphWalk, render, Style};
+use super::LabelText::{self, LabelStr, EscStr, HtmlStr};
+use std::io;
+use std::io::prelude::*;
+
+/// each node is an index in a vector in the graph.
+type Node = usize;
+struct Edge {
+ from: usize,
+ to: usize,
+ label: &'static str,
+ style: Style,
+}
+
+fn edge(from: usize, to: usize, label: &'static str, style: Style) -> Edge {
+ Edge {
+ from,
+ to,
+ label,
+ style,
+ }
+}
+
+struct LabelledGraph {
+ /// The name for this graph. Used for labeling generated `digraph`.
+ name: &'static str,
+
+ /// Each node is an index into `node_labels`; these labels are
+ /// used as the label text for each node. (The node *names*,
+ /// which are unique identifiers, are derived from their index
+ /// in this array.)
+ ///
+ /// If a node maps to None here, then just use its name as its
+ /// text.
+ node_labels: Vec<Option<&'static str>>,
+
+ node_styles: Vec<Style>,
+
+ /// Each edge relates a from-index to a to-index along with a
+ /// label; `edges` collects them.
+ edges: Vec<Edge>,
+}
+
+// A simple wrapper around LabelledGraph that forces the labels to
+// be emitted as EscStr.
+struct LabelledGraphWithEscStrs {
+ graph: LabelledGraph,
+}
+
+enum NodeLabels<L> {
+ AllNodesLabelled(Vec<L>),
+ UnlabelledNodes(usize),
+ SomeNodesLabelled(Vec<Option<L>>),
+}
+
+type Trivial = NodeLabels<&'static str>;
+
+impl NodeLabels<&'static str> {
+ fn to_opt_strs(self) -> Vec<Option<&'static str>> {
+ match self {
+ UnlabelledNodes(len) => vec![None; len],
+ AllNodesLabelled(lbls) => lbls.into_iter().map(|l| Some(l)).collect(),
+ SomeNodesLabelled(lbls) => lbls.into_iter().collect(),
+ }
+ }
+
+ fn len(&self) -> usize {
+ match self {
+ &UnlabelledNodes(len) => len,
+ &AllNodesLabelled(ref lbls) => lbls.len(),
+ &SomeNodesLabelled(ref lbls) => lbls.len(),
+ }
+ }
+}
+
+impl LabelledGraph {
+ fn new(name: &'static str,
+ node_labels: Trivial,
+ edges: Vec<Edge>,
+ node_styles: Option<Vec<Style>>)
+ -> LabelledGraph {
+ let count = node_labels.len();
+ LabelledGraph {
+ name,
+ node_labels: node_labels.to_opt_strs(),
+ edges,
+ node_styles: match node_styles {
+ Some(nodes) => nodes,
+ None => vec![Style::None; count],
+ },
+ }
+ }
+}
+
+impl LabelledGraphWithEscStrs {
+ fn new(name: &'static str,
+ node_labels: Trivial,
+ edges: Vec<Edge>)
+ -> LabelledGraphWithEscStrs {
+ LabelledGraphWithEscStrs { graph: LabelledGraph::new(name, node_labels, edges, None) }
+ }
+}
+
+fn id_name<'a>(n: &Node) -> Id<'a> {
+ Id::new(format!("N{}", *n)).unwrap()
+}
+
+impl<'a> Labeller<'a> for LabelledGraph {
+ type Node = Node;
+ type Edge = &'a Edge;
+ fn graph_id(&'a self) -> Id<'a> {
+ Id::new(self.name).unwrap()
+ }
+ fn node_id(&'a self, n: &Node) -> Id<'a> {
+ id_name(n)
+ }
+ fn node_label(&'a self, n: &Node) -> LabelText<'a> {
+ match self.node_labels[*n] {
+ Some(l) => LabelStr(l.into()),
+ None => LabelStr(id_name(n).name()),
+ }
+ }
+ fn edge_label(&'a self, e: &&'a Edge) -> LabelText<'a> {
+ LabelStr(e.label.into())
+ }
+ fn node_style(&'a self, n: &Node) -> Style {
+ self.node_styles[*n]
+ }
+ fn edge_style(&'a self, e: &&'a Edge) -> Style {
+ e.style
+ }
+}
+
+impl<'a> Labeller<'a> for LabelledGraphWithEscStrs {
+ type Node = Node;
+ type Edge = &'a Edge;
+ fn graph_id(&'a self) -> Id<'a> {
+ self.graph.graph_id()
+ }
+ fn node_id(&'a self, n: &Node) -> Id<'a> {
+ self.graph.node_id(n)
+ }
+ fn node_label(&'a self, n: &Node) -> LabelText<'a> {
+ match self.graph.node_label(n) {
+ LabelStr(s) | EscStr(s) | HtmlStr(s) => EscStr(s),
+ }
+ }
+ fn edge_label(&'a self, e: &&'a Edge) -> LabelText<'a> {
+ match self.graph.edge_label(e) {
+ LabelStr(s) | EscStr(s) | HtmlStr(s) => EscStr(s),
+ }
+ }
+}
+
+impl<'a> GraphWalk<'a> for LabelledGraph {
+ type Node = Node;
+ type Edge = &'a Edge;
+ fn nodes(&'a self) -> Nodes<'a, Node> {
+ (0..self.node_labels.len()).collect()
+ }
+ fn edges(&'a self) -> Edges<'a, &'a Edge> {
+ self.edges.iter().collect()
+ }
+ fn source(&'a self, edge: &&'a Edge) -> Node {
+ edge.from
+ }
+ fn target(&'a self, edge: &&'a Edge) -> Node {
+ edge.to
+ }
+}
+
+impl<'a> GraphWalk<'a> for LabelledGraphWithEscStrs {
+ type Node = Node;
+ type Edge = &'a Edge;
+ fn nodes(&'a self) -> Nodes<'a, Node> {
+ self.graph.nodes()
+ }
+ fn edges(&'a self) -> Edges<'a, &'a Edge> {
+ self.graph.edges()
+ }
+ fn source(&'a self, edge: &&'a Edge) -> Node {
+ edge.from
+ }
+ fn target(&'a self, edge: &&'a Edge) -> Node {
+ edge.to
+ }
+}
+
+fn test_input(g: LabelledGraph) -> io::Result<String> {
+ let mut writer = Vec::new();
+ render(&g, &mut writer).unwrap();
+ let mut s = String::new();
+ Read::read_to_string(&mut &*writer, &mut s)?;
+ Ok(s)
+}
+
+// All of the tests use raw-strings as the format for the expected outputs,
+// so that you can cut-and-paste the content into a .dot file yourself to
+// see what the graphviz visualizer would produce.
+
+#[test]
+fn empty_graph() {
+ let labels: Trivial = UnlabelledNodes(0);
+ let r = test_input(LabelledGraph::new("empty_graph", labels, vec![], None));
+ assert_eq!(r.unwrap(),
+r#"digraph empty_graph {
+}
+"#);
+}
+
+#[test]
+fn single_node() {
+ let labels: Trivial = UnlabelledNodes(1);
+ let r = test_input(LabelledGraph::new("single_node", labels, vec![], None));
+ assert_eq!(r.unwrap(),
+r#"digraph single_node {
+ N0[label="N0"];
+}
+"#);
+}
+
+#[test]
+fn single_node_with_style() {
+ let labels: Trivial = UnlabelledNodes(1);
+ let styles = Some(vec![Style::Dashed]);
+ let r = test_input(LabelledGraph::new("single_node", labels, vec![], styles));
+ assert_eq!(r.unwrap(),
+r#"digraph single_node {
+ N0[label="N0"][style="dashed"];
+}
+"#);
+}
+
+#[test]
+fn single_edge() {
+ let labels: Trivial = UnlabelledNodes(2);
+ let result = test_input(LabelledGraph::new("single_edge",
+ labels,
+ vec![edge(0, 1, "E", Style::None)],
+ None));
+ assert_eq!(result.unwrap(),
+r#"digraph single_edge {
+ N0[label="N0"];
+ N1[label="N1"];
+ N0 -> N1[label="E"];
+}
+"#);
+}
+
+#[test]
+fn single_edge_with_style() {
+ let labels: Trivial = UnlabelledNodes(2);
+ let result = test_input(LabelledGraph::new("single_edge",
+ labels,
+ vec![edge(0, 1, "E", Style::Bold)],
+ None));
+ assert_eq!(result.unwrap(),
+r#"digraph single_edge {
+ N0[label="N0"];
+ N1[label="N1"];
+ N0 -> N1[label="E"][style="bold"];
+}
+"#);
+}
+
+#[test]
+fn test_some_labelled() {
+ let labels: Trivial = SomeNodesLabelled(vec![Some("A"), None]);
+ let styles = Some(vec![Style::None, Style::Dotted]);
+ let result = test_input(LabelledGraph::new("test_some_labelled",
+ labels,
+ vec![edge(0, 1, "A-1", Style::None)],
+ styles));
+ assert_eq!(result.unwrap(),
+r#"digraph test_some_labelled {
+ N0[label="A"];
+ N1[label="N1"][style="dotted"];
+ N0 -> N1[label="A-1"];
+}
+"#);
+}
+
+#[test]
+fn single_cyclic_node() {
+ let labels: Trivial = UnlabelledNodes(1);
+ let r = test_input(LabelledGraph::new("single_cyclic_node",
+ labels,
+ vec![edge(0, 0, "E", Style::None)],
+ None));
+ assert_eq!(r.unwrap(),
+r#"digraph single_cyclic_node {
+ N0[label="N0"];
+ N0 -> N0[label="E"];
+}
+"#);
+}
+
+#[test]
+fn hasse_diagram() {
+ let labels = AllNodesLabelled(vec!["{x,y}", "{x}", "{y}", "{}"]);
+ let r = test_input(LabelledGraph::new("hasse_diagram",
+ labels,
+ vec![edge(0, 1, "", Style::None),
+ edge(0, 2, "", Style::None),
+ edge(1, 3, "", Style::None),
+ edge(2, 3, "", Style::None)],
+ None));
+ assert_eq!(r.unwrap(),
+r#"digraph hasse_diagram {
+ N0[label="{x,y}"];
+ N1[label="{x}"];
+ N2[label="{y}"];
+ N3[label="{}"];
+ N0 -> N1[label=""];
+ N0 -> N2[label=""];
+ N1 -> N3[label=""];
+ N2 -> N3[label=""];
+}
+"#);
+}
+
+#[test]
+fn left_aligned_text() {
+ let labels = AllNodesLabelled(vec![
+ "if test {\
+ \\l branch1\
+ \\l} else {\
+ \\l branch2\
+ \\l}\
+ \\lafterward\
+ \\l",
+ "branch1",
+ "branch2",
+ "afterward"]);
+
+ let mut writer = Vec::new();
+
+ let g = LabelledGraphWithEscStrs::new("syntax_tree",
+ labels,
+ vec![edge(0, 1, "then", Style::None),
+ edge(0, 2, "else", Style::None),
+ edge(1, 3, ";", Style::None),
+ edge(2, 3, ";", Style::None)]);
+
+ render(&g, &mut writer).unwrap();
+ let mut r = String::new();
+ Read::read_to_string(&mut &*writer, &mut r).unwrap();
+
+ assert_eq!(r,
+r#"digraph syntax_tree {
+ N0[label="if test {\l branch1\l} else {\l branch2\l}\lafterward\l"];
+ N1[label="branch1"];
+ N2[label="branch2"];
+ N3[label="afterward"];
+ N0 -> N1[label="then"];
+ N0 -> N2[label="else"];
+ N1 -> N3[label=";"];
+ N2 -> N3[label=";"];
+}
+"#);
+}
+
+#[test]
+fn simple_id_construction() {
+ let id1 = Id::new("hello");
+ match id1 {
+ Ok(_) => {}
+ Err(..) => panic!("'hello' is not a valid value for id anymore"),
+ }
+}
+
+#[test]
+fn badly_formatted_id() {
+ let id2 = Id::new("Weird { struct : ure } !!!");
+ match id2 {
+ Ok(_) => panic!("graphviz id suddenly allows spaces, brackets and stuff"),
+ Err(..) => {}
+ }
+}
diff --git a/src/libpanic_unwind/Cargo.toml b/src/libpanic_unwind/Cargo.toml
index 1b3901a..47cd09f 100644
--- a/src/libpanic_unwind/Cargo.toml
+++ b/src/libpanic_unwind/Cargo.toml
@@ -16,3 +16,4 @@
libc = { version = "0.2", default-features = false }
unwind = { path = "../libunwind" }
compiler_builtins = "0.1.0"
+cfg-if = "0.1.8"
diff --git a/src/libpanic_unwind/dummy.rs b/src/libpanic_unwind/dummy.rs
index 3a00d63..8675632 100644
--- a/src/libpanic_unwind/dummy.rs
+++ b/src/libpanic_unwind/dummy.rs
@@ -7,7 +7,7 @@
use core::intrinsics;
pub fn payload() -> *mut u8 {
- 0 as *mut u8
+ core::ptr::null_mut()
}
pub unsafe fn cleanup(_ptr: *mut u8) -> Box<dyn Any + Send> {
diff --git a/src/libpanic_unwind/lib.rs b/src/libpanic_unwind/lib.rs
index 72ddafb..2bb9ce6 100644
--- a/src/libpanic_unwind/lib.rs
+++ b/src/libpanic_unwind/lib.rs
@@ -38,10 +38,7 @@
use core::raw;
use core::panic::BoxMeUp;
-#[macro_use]
-mod macros;
-
-cfg_if! {
+cfg_if::cfg_if! {
if #[cfg(target_os = "emscripten")] {
#[path = "emcc.rs"]
mod imp;
diff --git a/src/libpanic_unwind/macros.rs b/src/libpanic_unwind/macros.rs
deleted file mode 100644
index 659e977..0000000
--- a/src/libpanic_unwind/macros.rs
+++ /dev/null
@@ -1,35 +0,0 @@
-/// A macro for defining `#[cfg]` if-else statements.
-///
-/// This is similar to the `if/elif` C preprocessor macro by allowing definition
-/// of a cascade of `#[cfg]` cases, emitting the implementation which matches
-/// first.
-///
-/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code
-/// without having to rewrite each clause multiple times.
-macro_rules! cfg_if {
- ($(
- if #[cfg($($meta:meta),*)] { $($it:item)* }
- ) else * else {
- $($it2:item)*
- }) => {
- __cfg_if_items! {
- () ;
- $( ( ($($meta),*) ($($it)*) ), )*
- ( () ($($it2)*) ),
- }
- }
-}
-
-macro_rules! __cfg_if_items {
- (($($not:meta,)*) ; ) => {};
- (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
- __cfg_if_apply! { cfg(all(not(any($($not),*)), $($m,)*)), $($it)* }
- __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* }
- }
-}
-
-macro_rules! __cfg_if_apply {
- ($m:meta, $($it:item)*) => {
- $(#[$m] $it)*
- }
-}
diff --git a/src/libpanic_unwind/seh.rs b/src/libpanic_unwind/seh.rs
index 996fdb9..809e461 100644
--- a/src/libpanic_unwind/seh.rs
+++ b/src/libpanic_unwind/seh.rs
@@ -104,7 +104,7 @@
pub const NAME2: [u8; 7] = [b'.', b'P', b'A', b'X', 0, 0, 0];
macro_rules! ptr {
- (0) => (0 as *mut u8);
+ (0) => (core::ptr::null_mut());
($e:expr) => ($e as *mut u8);
}
}
@@ -223,13 +223,13 @@
#[cfg_attr(not(test), lang = "msvc_try_filter")]
static mut TYPE_DESCRIPTOR1: _TypeDescriptor = _TypeDescriptor {
pVFTable: unsafe { &TYPE_INFO_VTABLE } as *const _ as *const _,
- spare: 0 as *mut _,
+ spare: core::ptr::null_mut(),
name: imp::NAME1,
};
static mut TYPE_DESCRIPTOR2: _TypeDescriptor = _TypeDescriptor {
pVFTable: unsafe { &TYPE_INFO_VTABLE } as *const _ as *const _,
- spare: 0 as *mut _,
+ spare: core::ptr::null_mut(),
name: imp::NAME2,
};
diff --git a/src/librustc/cfg/construct.rs b/src/librustc/cfg/construct.rs
index ef0d4be..8560232 100644
--- a/src/librustc/cfg/construct.rs
+++ b/src/librustc/cfg/construct.rs
@@ -8,7 +8,7 @@
use crate::hir::def_id::DefId;
struct CFGBuilder<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
owner_def_id: DefId,
tables: &'a ty::TypeckTables<'tcx>,
graph: CFGGraph,
@@ -30,8 +30,7 @@
break_index: CFGIndex, // where to go on a `break`
}
-pub fn construct<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body: &hir::Body) -> CFG {
+pub fn construct<'tcx>(tcx: TyCtxt<'tcx>, body: &hir::Body) -> CFG {
let mut graph = graph::Graph::new();
let entry = graph.add_node(CFGNodeData::Entry);
@@ -43,7 +42,7 @@
let body_exit;
// Find the tables for this body.
- let owner_def_id = tcx.hir().local_def_id(tcx.hir().body_owner(body.id()));
+ let owner_def_id = tcx.hir().body_owner_def_id(body.id());
let tables = tcx.typeck_tables_of(owner_def_id);
let mut cfg_builder = CFGBuilder {
@@ -358,7 +357,7 @@
args: I) -> CFGIndex {
let func_or_rcvr_exit = self.expr(func_or_rcvr, pred);
let ret = self.straightline(call_expr, func_or_rcvr_exit, args);
- let m = self.tcx.hir().get_module_parent_by_hir_id(call_expr.hir_id);
+ let m = self.tcx.hir().get_module_parent(call_expr.hir_id);
if self.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(call_expr)) {
self.add_unreachable_node()
} else {
diff --git a/src/librustc/cfg/graphviz.rs b/src/librustc/cfg/graphviz.rs
index 969c38b..66963e5 100644
--- a/src/librustc/cfg/graphviz.rs
+++ b/src/librustc/cfg/graphviz.rs
@@ -12,7 +12,7 @@
pub type Edge<'a> = &'a cfg::CFGEdge;
pub struct LabelledCFG<'a, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ pub tcx: TyCtxt<'tcx>,
pub cfg: &'a cfg::CFG,
pub name: String,
/// `labelled_edges` controls whether we emit labels on the edges
@@ -22,11 +22,11 @@
impl<'a, 'tcx> LabelledCFG<'a, 'tcx> {
fn local_id_to_string(&self, local_id: hir::ItemLocalId) -> String {
assert!(self.cfg.owner_def_id.is_local());
- let node_id = self.tcx.hir().hir_to_node_id(hir::HirId {
+ let hir_id = hir::HirId {
owner: self.tcx.hir().def_index_to_hir_id(self.cfg.owner_def_id.index).owner,
local_id
- });
- let s = self.tcx.hir().node_to_string(node_id);
+ };
+ let s = self.tcx.hir().node_to_string(hir_id);
// Replacing newlines with \\l causes each line to be left-aligned,
// improving presentation of (long) pretty-printed expressions.
diff --git a/src/librustc/cfg/mod.rs b/src/librustc/cfg/mod.rs
index 345dff8..db168d9 100644
--- a/src/librustc/cfg/mod.rs
+++ b/src/librustc/cfg/mod.rs
@@ -49,8 +49,7 @@
pub type CFGEdge = graph::Edge<CFGEdgeData>;
impl CFG {
- pub fn new<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body: &hir::Body) -> CFG {
+ pub fn new<'tcx>(tcx: TyCtxt<'tcx>, body: &hir::Body) -> CFG {
construct::construct(tcx, body)
}
diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs
index a364a6d..f764716 100644
--- a/src/librustc/dep_graph/dep_node.rs
+++ b/src/librustc/dep_graph/dep_node.rs
@@ -204,10 +204,10 @@
impl DepNode {
#[allow(unreachable_code, non_snake_case)]
#[inline(always)]
- pub fn new<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- dep: DepConstructor<'gcx>)
+ pub fn new<'a, 'tcx>(tcx: TyCtxt<'tcx>,
+ dep: DepConstructor<'tcx>)
-> DepNode
- where 'gcx: 'a + 'tcx,
+ where 'tcx: 'a,
'tcx: 'a
{
match dep {
@@ -307,7 +307,7 @@
/// refers to something from the previous compilation session that
/// has been removed.
#[inline]
- pub fn extract_def_id(&self, tcx: TyCtxt<'_, '_, '_>) -> Option<DefId> {
+ pub fn extract_def_id(&self, tcx: TyCtxt<'_>) -> Option<DefId> {
if self.kind.can_reconstruct_query_key() {
let def_path_hash = DefPathHash(self.hash);
tcx.def_path_hash_to_def_id.as_ref()?
@@ -400,7 +400,7 @@
impl DefId {
#[inline(always)]
- pub fn to_dep_node(self, tcx: TyCtxt<'_, '_, '_>, kind: DepKind) -> DepNode {
+ pub fn to_dep_node(self, tcx: TyCtxt<'_>, kind: DepKind) -> DepNode {
DepNode::from_def_path_hash(kind, tcx.def_path_hash(self))
}
}
@@ -442,49 +442,50 @@
]);
pub trait RecoverKey<'tcx>: Sized {
- fn recover(tcx: TyCtxt<'_, 'tcx, 'tcx>, dep_node: &DepNode) -> Option<Self>;
+ fn recover(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<Self>;
}
impl RecoverKey<'tcx> for CrateNum {
- fn recover(tcx: TyCtxt<'_, 'tcx, 'tcx>, dep_node: &DepNode) -> Option<Self> {
+ fn recover(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<Self> {
dep_node.extract_def_id(tcx).map(|id| id.krate)
}
}
impl RecoverKey<'tcx> for DefId {
- fn recover(tcx: TyCtxt<'_, 'tcx, 'tcx>, dep_node: &DepNode) -> Option<Self> {
+ fn recover(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<Self> {
dep_node.extract_def_id(tcx)
}
}
impl RecoverKey<'tcx> for DefIndex {
- fn recover(tcx: TyCtxt<'_, 'tcx, 'tcx>, dep_node: &DepNode) -> Option<Self> {
+ fn recover(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<Self> {
dep_node.extract_def_id(tcx).map(|id| id.index)
}
}
-trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug {
+trait DepNodeParams<'tcx>: fmt::Debug {
const CAN_RECONSTRUCT_QUERY_KEY: bool;
/// This method turns the parameters of a DepNodeConstructor into an opaque
/// Fingerprint to be used in DepNode.
/// Not all DepNodeParams support being turned into a Fingerprint (they
/// don't need to if the corresponding DepNode is anonymous).
- fn to_fingerprint(&self, _: TyCtxt<'a, 'gcx, 'tcx>) -> Fingerprint {
+ fn to_fingerprint(&self, _: TyCtxt<'tcx>) -> Fingerprint {
panic!("Not implemented. Accidentally called on anonymous node?")
}
- fn to_debug_str(&self, _: TyCtxt<'a, 'gcx, 'tcx>) -> String {
+ fn to_debug_str(&self, _: TyCtxt<'tcx>) -> String {
format!("{:?}", self)
}
}
-impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a, T> DepNodeParams<'a, 'gcx, 'tcx> for T
- where T: HashStable<StableHashingContext<'a>> + fmt::Debug
+impl<'tcx, T> DepNodeParams<'tcx> for T
+where
+ T: HashStable<StableHashingContext<'tcx>> + fmt::Debug,
{
default const CAN_RECONSTRUCT_QUERY_KEY: bool = false;
- default fn to_fingerprint(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Fingerprint {
+ default fn to_fingerprint(&self, tcx: TyCtxt<'tcx>) -> Fingerprint {
let mut hcx = tcx.create_stable_hashing_context();
let mut hasher = StableHasher::new();
@@ -493,39 +494,39 @@
hasher.finish()
}
- default fn to_debug_str(&self, _: TyCtxt<'a, 'gcx, 'tcx>) -> String {
+ default fn to_debug_str(&self, _: TyCtxt<'tcx>) -> String {
format!("{:?}", *self)
}
}
-impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for DefId {
+impl<'tcx> DepNodeParams<'tcx> for DefId {
const CAN_RECONSTRUCT_QUERY_KEY: bool = true;
- fn to_fingerprint(&self, tcx: TyCtxt<'_, '_, '_>) -> Fingerprint {
+ fn to_fingerprint(&self, tcx: TyCtxt<'_>) -> Fingerprint {
tcx.def_path_hash(*self).0
}
- fn to_debug_str(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> String {
+ fn to_debug_str(&self, tcx: TyCtxt<'tcx>) -> String {
tcx.def_path_str(*self)
}
}
-impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for DefIndex {
+impl<'tcx> DepNodeParams<'tcx> for DefIndex {
const CAN_RECONSTRUCT_QUERY_KEY: bool = true;
- fn to_fingerprint(&self, tcx: TyCtxt<'_, '_, '_>) -> Fingerprint {
+ fn to_fingerprint(&self, tcx: TyCtxt<'_>) -> Fingerprint {
tcx.hir().definitions().def_path_hash(*self).0
}
- fn to_debug_str(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> String {
+ fn to_debug_str(&self, tcx: TyCtxt<'tcx>) -> String {
tcx.def_path_str(DefId::local(*self))
}
}
-impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for CrateNum {
+impl<'tcx> DepNodeParams<'tcx> for CrateNum {
const CAN_RECONSTRUCT_QUERY_KEY: bool = true;
- fn to_fingerprint(&self, tcx: TyCtxt<'_, '_, '_>) -> Fingerprint {
+ fn to_fingerprint(&self, tcx: TyCtxt<'_>) -> Fingerprint {
let def_id = DefId {
krate: *self,
index: CRATE_DEF_INDEX,
@@ -533,18 +534,18 @@
tcx.def_path_hash(def_id).0
}
- fn to_debug_str(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> String {
+ fn to_debug_str(&self, tcx: TyCtxt<'tcx>) -> String {
tcx.crate_name(*self).as_str().to_string()
}
}
-impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for (DefId, DefId) {
+impl<'tcx> DepNodeParams<'tcx> for (DefId, DefId) {
const CAN_RECONSTRUCT_QUERY_KEY: bool = false;
// We actually would not need to specialize the implementation of this
// method but it's faster to combine the hashes than to instantiate a full
// hashing context and stable-hashing state.
- fn to_fingerprint(&self, tcx: TyCtxt<'_, '_, '_>) -> Fingerprint {
+ fn to_fingerprint(&self, tcx: TyCtxt<'_>) -> Fingerprint {
let (def_id_0, def_id_1) = *self;
let def_path_hash_0 = tcx.def_path_hash(def_id_0);
@@ -553,7 +554,7 @@
def_path_hash_0.0.combine(def_path_hash_1.0)
}
- fn to_debug_str(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> String {
+ fn to_debug_str(&self, tcx: TyCtxt<'tcx>) -> String {
let (def_id_0, def_id_1) = *self;
format!("({}, {})",
@@ -562,13 +563,13 @@
}
}
-impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for HirId {
+impl<'tcx> DepNodeParams<'tcx> for HirId {
const CAN_RECONSTRUCT_QUERY_KEY: bool = false;
// We actually would not need to specialize the implementation of this
// method but it's faster to combine the hashes than to instantiate a full
// hashing context and stable-hashing state.
- fn to_fingerprint(&self, tcx: TyCtxt<'_, '_, '_>) -> Fingerprint {
+ fn to_fingerprint(&self, tcx: TyCtxt<'_>) -> Fingerprint {
let HirId {
owner,
local_id,
diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs
index a87c98a..c2e3c12 100644
--- a/src/librustc/dep_graph/graph.rs
+++ b/src/librustc/dep_graph/graph.rs
@@ -90,7 +90,6 @@
}
impl DepGraph {
-
pub fn new(prev_graph: PreviousDepGraph,
prev_work_products: FxHashMap<WorkProductId, WorkProduct>) -> DepGraph {
let prev_graph_node_count = prev_graph.node_count();
@@ -558,8 +557,8 @@
/// a node index can be found for that node.
pub fn try_mark_green_and_read(
&self,
- tcx: TyCtxt<'_, '_, '_>,
- dep_node: &DepNode
+ tcx: TyCtxt<'_>,
+ dep_node: &DepNode,
) -> Option<(SerializedDepNodeIndex, DepNodeIndex)> {
self.try_mark_green(tcx, dep_node).map(|(prev_index, dep_node_index)| {
debug_assert!(self.is_green(&dep_node));
@@ -570,8 +569,8 @@
pub fn try_mark_green(
&self,
- tcx: TyCtxt<'_, '_, '_>,
- dep_node: &DepNode
+ tcx: TyCtxt<'_>,
+ dep_node: &DepNode,
) -> Option<(SerializedDepNodeIndex, DepNodeIndex)> {
debug_assert!(!dep_node.kind.is_eval_always());
@@ -604,10 +603,10 @@
/// Try to mark a dep-node which existed in the previous compilation session as green.
fn try_mark_previous_green<'tcx>(
&self,
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
data: &DepGraphData,
prev_dep_node_index: SerializedDepNodeIndex,
- dep_node: &DepNode
+ dep_node: &DepNode,
) -> Option<DepNodeIndex> {
debug!("try_mark_previous_green({:?}) - BEGIN", dep_node);
@@ -791,7 +790,7 @@
#[inline(never)]
fn emit_diagnostics<'tcx>(
&self,
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
data: &DepGraphData,
dep_node_index: DepNodeIndex,
did_allocation: bool,
@@ -842,7 +841,7 @@
//
// This method will only load queries that will end up in the disk cache.
// Other queries will not be executed.
- pub fn exec_cache_promotions<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+ pub fn exec_cache_promotions<'tcx>(&self, tcx: TyCtxt<'tcx>) {
let green_nodes: Vec<DepNode> = {
let data = self.data.as_ref().unwrap();
data.colors.values.indices().filter_map(|prev_index| {
diff --git a/src/librustc/dep_graph/safe.rs b/src/librustc/dep_graph/safe.rs
index fc767de..e5eda14 100644
--- a/src/librustc/dep_graph/safe.rs
+++ b/src/librustc/dep_graph/safe.rs
@@ -33,8 +33,7 @@
/// The type context itself can be used to access all kinds of tracked
/// state, but those accesses should always generate read events.
-impl<'a, 'gcx, 'tcx> DepGraphSafe for TyCtxt<'a, 'gcx, 'tcx> {
-}
+impl<'tcx> DepGraphSafe for TyCtxt<'tcx> {}
/// Tuples make it easy to build up state.
impl<A, B> DepGraphSafe for (A, B)
diff --git a/src/librustc/error_codes.rs b/src/librustc/error_codes.rs
index 6243e91..65821d8 100644
--- a/src/librustc/error_codes.rs
+++ b/src/librustc/error_codes.rs
@@ -1883,7 +1883,7 @@
# }
```
-The same applies to transmutes to `*mut fn()`, which were observedin practice.
+The same applies to transmutes to `*mut fn()`, which were observed in practice.
Note though that use of this type is generally incorrect.
The intention is typically to describe a function pointer, but just `fn()`
alone suffices for that. `*mut fn()` is a pointer to a fn pointer.
diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs
index f7d1094..4b84d56 100644
--- a/src/librustc/hir/check_attr.rs
+++ b/src/librustc/hir/check_attr.rs
@@ -87,11 +87,11 @@
}
}
-struct CheckAttrVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct CheckAttrVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> {
+impl CheckAttrVisitor<'tcx> {
/// Checks any attribute.
fn check_attributes(&self, item: &hir::Item, target: Target) {
if target == Target::Fn || target == Target::Const {
@@ -310,7 +310,7 @@
}
}
-impl<'a, 'tcx> Visitor<'tcx> for CheckAttrVisitor<'a, 'tcx> {
+impl Visitor<'tcx> for CheckAttrVisitor<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::OnlyBodies(&self.tcx.hir())
}
@@ -347,7 +347,7 @@
}
}
-fn check_mod_attrs<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) {
+fn check_mod_attrs<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(
module_def_id,
&mut CheckAttrVisitor { tcx }.as_deep_visitor()
diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs
index 0c4f5fb..debed38 100644
--- a/src/librustc/hir/def_id.rs
+++ b/src/librustc/hir/def_id.rs
@@ -177,7 +177,7 @@
LocalDefId::from_def_id(self)
}
- pub fn describe_as_module(&self, tcx: TyCtxt<'_, '_, '_>) -> String {
+ pub fn describe_as_module(&self, tcx: TyCtxt<'_>) -> String {
if self.is_local() && self.index == CRATE_DEF_INDEX {
format!("top-level module")
} else {
diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs
index f4f9d62..666cfc3 100644
--- a/src/librustc/hir/intravisit.rs
+++ b/src/librustc/hir/intravisit.rs
@@ -171,7 +171,7 @@
/// but cannot supply a `Map`; see `nested_visit_map` for advice.
#[allow(unused_variables)]
fn visit_nested_item(&mut self, id: ItemId) {
- let opt_item = self.nested_visit_map().inter().map(|map| map.expect_item_by_hir_id(id.id));
+ let opt_item = self.nested_visit_map().inter().map(|map| map.expect_item(id.id));
if let Some(item) = opt_item {
self.visit_item(item);
}
diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs
index b5e9f6b..5a548ce 100644
--- a/src/librustc/hir/lowering.rs
+++ b/src/librustc/hir/lowering.rs
@@ -274,6 +274,8 @@
enum ParamMode {
/// Any path in a type context.
Explicit,
+ /// Path in a type definition, where the anonymous lifetime `'_` is not allowed.
+ ExplicitNamed,
/// The `module::Type` in `module::Type::method` in an expression.
Optional,
}
@@ -413,8 +415,8 @@
/// needed from arbitrary locations in the crate,
/// e.g., the number of lifetime generic parameters
/// declared for every type and trait definition.
- struct MiscCollector<'lcx, 'interner: 'lcx> {
- lctx: &'lcx mut LoweringContext<'interner>,
+ struct MiscCollector<'tcx, 'interner: 'tcx> {
+ lctx: &'tcx mut LoweringContext<'interner>,
hir_id_owner: Option<NodeId>,
}
@@ -458,8 +460,8 @@
}
}
- impl<'lcx, 'interner> Visitor<'lcx> for MiscCollector<'lcx, 'interner> {
- fn visit_pat(&mut self, p: &'lcx Pat) {
+ impl<'tcx, 'interner> Visitor<'tcx> for MiscCollector<'tcx, 'interner> {
+ fn visit_pat(&mut self, p: &'tcx Pat) {
match p.node {
// Doesn't generate a HIR node
PatKind::Paren(..) => {},
@@ -473,7 +475,7 @@
visit::walk_pat(self, p)
}
- fn visit_item(&mut self, item: &'lcx Item) {
+ fn visit_item(&mut self, item: &'tcx Item) {
let hir_id = self.lctx.allocate_hir_id_counter(item.id);
match item.node {
@@ -505,7 +507,7 @@
});
}
- fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
+ fn visit_trait_item(&mut self, item: &'tcx TraitItem) {
self.lctx.allocate_hir_id_counter(item.id);
match item.node {
@@ -521,21 +523,21 @@
}
}
- fn visit_impl_item(&mut self, item: &'lcx ImplItem) {
+ fn visit_impl_item(&mut self, item: &'tcx ImplItem) {
self.lctx.allocate_hir_id_counter(item.id);
self.with_hir_id_owner(Some(item.id), |this| {
visit::walk_impl_item(this, item);
});
}
- fn visit_foreign_item(&mut self, i: &'lcx ForeignItem) {
+ fn visit_foreign_item(&mut self, i: &'tcx ForeignItem) {
// Ignore patterns in foreign items
self.with_hir_id_owner(None, |this| {
visit::walk_foreign_item(this, i)
});
}
- fn visit_ty(&mut self, t: &'lcx Ty) {
+ fn visit_ty(&mut self, t: &'tcx Ty) {
match t.node {
// Mirrors the case in visit::walk_ty
TyKind::BareFn(ref f) => {
@@ -559,11 +561,11 @@
}
}
- struct ItemLowerer<'lcx, 'interner: 'lcx> {
- lctx: &'lcx mut LoweringContext<'interner>,
+ struct ItemLowerer<'tcx, 'interner: 'tcx> {
+ lctx: &'tcx mut LoweringContext<'interner>,
}
- impl<'lcx, 'interner> ItemLowerer<'lcx, 'interner> {
+ impl<'tcx, 'interner> ItemLowerer<'tcx, 'interner> {
fn with_trait_impl_ref<F>(&mut self, trait_impl_ref: &Option<TraitRef>, f: F)
where
F: FnOnce(&mut Self),
@@ -579,8 +581,8 @@
}
}
- impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> {
- fn visit_mod(&mut self, m: &'lcx Mod, _s: Span, _attrs: &[Attribute], n: NodeId) {
+ impl<'tcx, 'interner> Visitor<'tcx> for ItemLowerer<'tcx, 'interner> {
+ fn visit_mod(&mut self, m: &'tcx Mod, _s: Span, _attrs: &[Attribute], n: NodeId) {
self.lctx.modules.insert(n, hir::ModuleItems {
items: BTreeSet::new(),
trait_items: BTreeSet::new(),
@@ -593,7 +595,7 @@
self.lctx.current_module = old;
}
- fn visit_item(&mut self, item: &'lcx Item) {
+ fn visit_item(&mut self, item: &'tcx Item) {
let mut item_hir_id = None;
self.lctx.with_hir_id_owner(item.id, |lctx| {
if let Some(hir_item) = lctx.lower_item(item) {
@@ -624,7 +626,7 @@
}
}
- fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
+ fn visit_trait_item(&mut self, item: &'tcx TraitItem) {
self.lctx.with_hir_id_owner(item.id, |lctx| {
let hir_item = lctx.lower_trait_item(item);
let id = hir::TraitItemId { hir_id: hir_item.hir_id };
@@ -635,7 +637,7 @@
visit::walk_trait_item(self, item);
}
- fn visit_impl_item(&mut self, item: &'lcx ImplItem) {
+ fn visit_impl_item(&mut self, item: &'tcx ImplItem) {
self.lctx.with_hir_id_owner(item.id, |lctx| {
let hir_item = lctx.lower_impl_item(item);
let id = hir::ImplItemId { hir_id: hir_item.hir_id };
@@ -1489,6 +1491,23 @@
P(self.lower_ty_direct(t, itctx))
}
+ fn lower_path_ty(
+ &mut self,
+ t: &Ty,
+ qself: &Option<QSelf>,
+ path: &Path,
+ param_mode: ParamMode,
+ itctx: ImplTraitContext<'_>
+ ) -> hir::Ty {
+ let id = self.lower_node_id(t.id);
+ let qpath = self.lower_qpath(t.id, qself, path, param_mode, itctx);
+ let ty = self.ty_path(id, t.span, qpath);
+ if let hir::TyKind::TraitObject(..) = ty.node {
+ self.maybe_lint_bare_trait(t.span, t.id, qself.is_none() && path.is_global());
+ }
+ ty
+ }
+
fn lower_ty_direct(&mut self, t: &Ty, mut itctx: ImplTraitContext<'_>) -> hir::Ty {
let kind = match t.node {
TyKind::Infer => hir::TyKind::Infer,
@@ -1534,13 +1553,7 @@
return self.lower_ty_direct(ty, itctx);
}
TyKind::Path(ref qself, ref path) => {
- let id = self.lower_node_id(t.id);
- let qpath = self.lower_qpath(t.id, qself, path, ParamMode::Explicit, itctx);
- let ty = self.ty_path(id, t.span, qpath);
- if let hir::TyKind::TraitObject(..) = ty.node {
- self.maybe_lint_bare_trait(t.span, t.id, qself.is_none() && path.is_global());
- }
- return ty;
+ return self.lower_path_ty(t, qself, path, ParamMode::Explicit, itctx);
}
TyKind::ImplicitSelf => {
let res = self.expect_full_res(t.id);
@@ -3086,6 +3099,18 @@
}
fn lower_struct_field(&mut self, (index, f): (usize, &StructField)) -> hir::StructField {
+ let ty = if let TyKind::Path(ref qself, ref path) = f.ty.node {
+ let t = self.lower_path_ty(
+ &f.ty,
+ qself,
+ path,
+ ParamMode::ExplicitNamed, // no `'_` in declarations (Issue #61124)
+ ImplTraitContext::disallowed()
+ );
+ P(t)
+ } else {
+ self.lower_ty(&f.ty, ImplTraitContext::disallowed())
+ };
hir::StructField {
span: f.span,
hir_id: self.lower_node_id(f.id),
@@ -3095,7 +3120,7 @@
None => Ident::new(sym::integer(index), f.span),
},
vis: self.lower_visibility(&f.vis, None),
- ty: self.lower_ty(&f.ty, ImplTraitContext::disallowed()),
+ ty,
attrs: self.lower_attrs(&f.attrs),
}
}
diff --git a/src/librustc/hir/map/hir_id_validator.rs b/src/librustc/hir/map/hir_id_validator.rs
index fafe671..32d0e06 100644
--- a/src/librustc/hir/map/hir_id_validator.rs
+++ b/src/librustc/hir/map/hir_id_validator.rs
@@ -1,6 +1,5 @@
use crate::hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX};
use crate::hir::{self, intravisit, HirId, ItemLocalId};
-use syntax::ast::NodeId;
use crate::hir::itemlikevisit::ItemLikeVisitor;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::{Lock, ParallelIterator, par_iter};
@@ -112,19 +111,9 @@
trace!("missing hir id {:#?}", hir_id);
- // We are already in ICE mode here, so doing a linear search
- // should be fine.
- let (node_id, _) = self.hir_map
- .definitions()
- .node_to_hir_id
- .iter()
- .enumerate()
- .find(|&(_, &entry)| hir_id == entry)
- .expect("no node_to_hir_id entry");
- let node_id = NodeId::from_usize(node_id);
missing_items.push(format!("[local_id: {}, node:{}]",
local_id,
- self.hir_map.node_to_string(node_id)));
+ self.hir_map.node_to_string(hir_id)));
}
self.error(|| format!(
"ItemLocalIds not assigned densely in {}. \
@@ -138,7 +127,7 @@
owner: owner_def_index,
local_id,
})
- .map(|h| format!("({:?} {})", h, self.hir_map.hir_to_string(h)))
+ .map(|h| format!("({:?} {})", h, self.hir_map.node_to_string(h)))
.collect::<Vec<_>>()));
}
}
@@ -156,14 +145,14 @@
if hir_id == hir::DUMMY_HIR_ID {
self.error(|| format!("HirIdValidator: HirId {:?} is invalid",
- self.hir_map.hir_to_string(hir_id)));
+ self.hir_map.node_to_string(hir_id)));
return;
}
if owner != hir_id.owner {
self.error(|| format!(
"HirIdValidator: The recorded owner of {} is {} instead of {}",
- self.hir_map.hir_to_string(hir_id),
+ self.hir_map.node_to_string(hir_id),
self.hir_map.def_path(DefId::local(hir_id.owner)).to_string_no_crate(),
self.hir_map.def_path(DefId::local(owner)).to_string_no_crate()));
}
diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs
index 978a555..85c8699 100644
--- a/src/librustc/hir/map/mod.rs
+++ b/src/librustc/hir/map/mod.rs
@@ -35,7 +35,7 @@
pub mod definitions;
mod hir_id_validator;
-/// Represents an entry and its parent `NodeId`.
+/// Represents an entry and its parent `HirId`.
#[derive(Copy, Clone, Debug)]
pub struct Entry<'hir> {
parent: HirId,
@@ -200,7 +200,7 @@
/// "reveals" the content of a node to the caller (who might not
/// otherwise have had access to those contents, and hence needs a
/// read recorded). If the function just returns a DefId or
- /// NodeId, no actual content was returned, so no read is needed.
+ /// HirId, no actual content was returned, so no read is needed.
pub fn read(&self, hir_id: HirId) {
if let Some(entry) = self.lookup(hir_id) {
self.dep_graph.read_index(entry.dep_node);
@@ -287,20 +287,10 @@
}
#[inline]
- pub fn def_index_to_node_id(&self, def_index: DefIndex) -> NodeId {
- self.definitions.def_index_to_node_id(def_index)
- }
-
- #[inline]
pub fn local_def_id_to_hir_id(&self, def_id: LocalDefId) -> HirId {
self.definitions.def_index_to_hir_id(def_id.to_def_id().index)
}
- #[inline]
- pub fn local_def_id_to_node_id(&self, def_id: LocalDefId) -> NodeId {
- self.definitions.as_local_node_id(def_id.to_def_id()).unwrap()
- }
-
fn def_kind(&self, node_id: NodeId) -> Option<DefKind> {
let node = if let Some(node) = self.find(node_id) {
node
@@ -422,12 +412,6 @@
self.forest.krate.body(id)
}
- pub fn fn_decl(&self, node_id: ast::NodeId) -> Option<FnDecl> {
- let hir_id = self.node_to_hir_id(node_id);
- self.fn_decl_by_hir_id(hir_id)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
pub fn fn_decl_by_hir_id(&self, hir_id: HirId) -> Option<FnDecl> {
if let Some(entry) = self.find_entry(hir_id) {
entry.fn_decl().cloned()
@@ -436,28 +420,22 @@
}
}
- /// Returns the `NodeId` that corresponds to the definition of
+ /// Returns the `HirId` that corresponds to the definition of
/// which this is the body of, i.e., a `fn`, `const` or `static`
/// item (possibly associated), a closure, or a `hir::AnonConst`.
- pub fn body_owner(&self, BodyId { hir_id }: BodyId) -> NodeId {
+ pub fn body_owner(&self, BodyId { hir_id }: BodyId) -> HirId {
let parent = self.get_parent_node_by_hir_id(hir_id);
assert!(self.lookup(parent).map_or(false, |e| e.is_body_owner(hir_id)));
- self.hir_to_node_id(parent)
+ parent
}
pub fn body_owner_def_id(&self, id: BodyId) -> DefId {
- self.local_def_id(self.body_owner(id))
+ self.local_def_id_from_hir_id(self.body_owner(id))
}
- /// Given a `NodeId`, returns the `BodyId` associated with it,
+ /// Given a `HirId`, returns the `BodyId` associated with it,
/// if the node is a body owner, otherwise returns `None`.
- pub fn maybe_body_owned_by(&self, id: NodeId) -> Option<BodyId> {
- let hir_id = self.node_to_hir_id(id);
- self.maybe_body_owned_by_by_hir_id(hir_id)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
- pub fn maybe_body_owned_by_by_hir_id(&self, hir_id: HirId) -> Option<BodyId> {
+ pub fn maybe_body_owned_by(&self, hir_id: HirId) -> Option<BodyId> {
if let Some(entry) = self.find_entry(hir_id) {
if self.dep_graph.is_fully_enabled() {
let hir_id_owner = hir_id.owner;
@@ -473,19 +451,13 @@
/// Given a body owner's id, returns the `BodyId` associated with it.
pub fn body_owned_by(&self, id: HirId) -> BodyId {
- self.maybe_body_owned_by_by_hir_id(id).unwrap_or_else(|| {
- span_bug!(self.span_by_hir_id(id), "body_owned_by: {} has no associated body",
- self.hir_to_string(id));
+ self.maybe_body_owned_by(id).unwrap_or_else(|| {
+ span_bug!(self.span(id), "body_owned_by: {} has no associated body",
+ self.node_to_string(id));
})
}
- pub fn body_owner_kind(&self, id: NodeId) -> BodyOwnerKind {
- let hir_id = self.node_to_hir_id(id);
- self.body_owner_kind_by_hir_id(hir_id)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
- pub fn body_owner_kind_by_hir_id(&self, id: HirId) -> BodyOwnerKind {
+ pub fn body_owner_kind(&self, id: HirId) -> BodyOwnerKind {
match self.get_by_hir_id(id) {
Node::Item(&Item { node: ItemKind::Const(..), .. }) |
Node::TraitItem(&TraitItem { node: TraitItemKind::Const(..), .. }) |
@@ -514,7 +486,7 @@
Node::Item(&Item { node: ItemKind::Trait(..), .. }) |
Node::Item(&Item { node: ItemKind::TraitAlias(..), .. }) => id,
Node::GenericParam(_) => self.get_parent_node_by_hir_id(id),
- _ => bug!("ty_param_owner: {} not a type parameter", self.hir_to_string(id))
+ _ => bug!("ty_param_owner: {} not a type parameter", self.node_to_string(id))
}
}
@@ -523,7 +495,7 @@
Node::Item(&Item { node: ItemKind::Trait(..), .. }) |
Node::Item(&Item { node: ItemKind::TraitAlias(..), .. }) => kw::SelfUpper,
Node::GenericParam(param) => param.name.ident().name,
- _ => bug!("ty_param_name: {} not a type parameter", self.hir_to_string(id)),
+ _ => bug!("ty_param_name: {} not a type parameter", self.node_to_string(id)),
}
}
@@ -576,7 +548,7 @@
let module = &self.forest.krate.modules[&node_id];
for id in &module.items {
- visitor.visit_item(self.expect_item_by_hir_id(*id));
+ visitor.visit_item(self.expect_item(*id));
}
for id in &module.trait_items {
@@ -709,7 +681,7 @@
/// If there is some error when walking the parents (e.g., a node does not
/// have a parent in the map or a node can't be found), then we return the
- /// last good `NodeId` we found. Note that reaching the crate root (`id == 0`),
+ /// last good `HirId` we found. Note that reaching the crate root (`id == 0`),
/// is not an error, since items in the crate module have the crate root as
/// parent.
fn walk_parent_nodes<F, F2>(&self,
@@ -745,7 +717,7 @@
}
}
- /// Retrieves the `NodeId` for `id`'s enclosing method, unless there's a
+ /// Retrieves the `HirId` for `id`'s enclosing method, unless there's a
/// `while` or `loop` before reaching it, as block tail returns are not
/// available in them.
///
@@ -753,7 +725,7 @@
/// fn foo(x: usize) -> bool {
/// if x == 1 {
/// true // `get_return_block` gets passed the `id` corresponding
- /// } else { // to this, it will return `foo`'s `NodeId`.
+ /// } else { // to this, it will return `foo`'s `HirId`.
/// false
/// }
/// }
@@ -793,17 +765,10 @@
self.walk_parent_nodes(id, match_fn, match_non_returning_block).ok()
}
- /// Retrieves the `NodeId` for `id`'s parent item, or `id` itself if no
+ /// Retrieves the `HirId` for `id`'s parent item, or `id` itself if no
/// parent item is in this map. The "parent item" is the closest parent node
/// in the HIR which is recorded by the map and is an item, either an item
/// in a module, trait, or impl.
- pub fn get_parent(&self, id: NodeId) -> NodeId {
- let hir_id = self.node_to_hir_id(id);
- let parent_hir_id = self.get_parent_item(hir_id);
- self.hir_to_node_id(parent_hir_id)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
pub fn get_parent_item(&self, hir_id: HirId) -> HirId {
match self.walk_parent_nodes(hir_id, |node| match *node {
Node::Item(_) |
@@ -819,13 +784,7 @@
/// Returns the `DefId` of `id`'s nearest module parent, or `id` itself if no
/// module parent is in this map.
- pub fn get_module_parent(&self, id: NodeId) -> DefId {
- let hir_id = self.node_to_hir_id(id);
- self.get_module_parent_by_hir_id(hir_id)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
- pub fn get_module_parent_by_hir_id(&self, id: HirId) -> DefId {
+ pub fn get_module_parent(&self, id: HirId) -> DefId {
self.local_def_id_from_hir_id(self.get_module_parent_node(id))
}
@@ -901,23 +860,11 @@
Some(scope)
}
- pub fn get_parent_did(&self, id: NodeId) -> DefId {
- let hir_id = self.node_to_hir_id(id);
- self.get_parent_did_by_hir_id(hir_id)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
- pub fn get_parent_did_by_hir_id(&self, id: HirId) -> DefId {
+ pub fn get_parent_did(&self, id: HirId) -> DefId {
self.local_def_id_from_hir_id(self.get_parent_item(id))
}
- pub fn get_foreign_abi(&self, id: NodeId) -> Abi {
- let hir_id = self.node_to_hir_id(id);
- self.get_foreign_abi_by_hir_id(hir_id)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
- pub fn get_foreign_abi_by_hir_id(&self, hir_id: HirId) -> Abi {
+ pub fn get_foreign_abi(&self, hir_id: HirId) -> Abi {
let parent = self.get_parent_item(hir_id);
if let Some(entry) = self.find_entry(parent) {
if let Entry {
@@ -927,33 +874,27 @@
return nm.abi;
}
}
- bug!("expected foreign mod or inlined parent, found {}", self.hir_to_string(parent))
+ bug!("expected foreign mod or inlined parent, found {}", self.node_to_string(parent))
}
- pub fn expect_item(&self, id: NodeId) -> &'hir Item {
- let hir_id = self.node_to_hir_id(id);
- self.expect_item_by_hir_id(hir_id)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
- pub fn expect_item_by_hir_id(&self, id: HirId) -> &'hir Item {
+ pub fn expect_item(&self, id: HirId) -> &'hir Item {
match self.find_by_hir_id(id) { // read recorded by `find`
Some(Node::Item(item)) => item,
- _ => bug!("expected item, found {}", self.hir_to_string(id))
+ _ => bug!("expected item, found {}", self.node_to_string(id))
}
}
pub fn expect_impl_item(&self, id: HirId) -> &'hir ImplItem {
match self.find_by_hir_id(id) {
Some(Node::ImplItem(item)) => item,
- _ => bug!("expected impl item, found {}", self.hir_to_string(id))
+ _ => bug!("expected impl item, found {}", self.node_to_string(id))
}
}
pub fn expect_trait_item(&self, id: HirId) -> &'hir TraitItem {
match self.find_by_hir_id(id) {
Some(Node::TraitItem(item)) => item,
- _ => bug!("expected trait item, found {}", self.hir_to_string(id))
+ _ => bug!("expected trait item, found {}", self.node_to_string(id))
}
}
@@ -963,26 +904,26 @@
match i.node {
ItemKind::Struct(ref struct_def, _) |
ItemKind::Union(ref struct_def, _) => struct_def,
- _ => bug!("struct ID bound to non-struct {}", self.hir_to_string(id))
+ _ => bug!("struct ID bound to non-struct {}", self.node_to_string(id))
}
}
Some(Node::Variant(variant)) => &variant.node.data,
Some(Node::Ctor(data)) => data,
- _ => bug!("expected struct or variant, found {}", self.hir_to_string(id))
+ _ => bug!("expected struct or variant, found {}", self.node_to_string(id))
}
}
pub fn expect_variant(&self, id: HirId) -> &'hir Variant {
match self.find_by_hir_id(id) {
Some(Node::Variant(variant)) => variant,
- _ => bug!("expected variant, found {}", self.hir_to_string(id)),
+ _ => bug!("expected variant, found {}", self.node_to_string(id)),
}
}
pub fn expect_foreign_item(&self, id: HirId) -> &'hir ForeignItem {
match self.find_by_hir_id(id) {
Some(Node::ForeignItem(item)) => item,
- _ => bug!("expected foreign item, found {}", self.hir_to_string(id))
+ _ => bug!("expected foreign item, found {}", self.node_to_string(id))
}
}
@@ -995,7 +936,7 @@
pub fn expect_expr_by_hir_id(&self, id: HirId) -> &'hir Expr {
match self.find_by_hir_id(id) { // read recorded by find
Some(Node::Expr(expr)) => expr,
- _ => bug!("expected expr, found {}", self.hir_to_string(id))
+ _ => bug!("expected expr, found {}", self.node_to_string(id))
}
}
@@ -1018,19 +959,13 @@
Node::GenericParam(param) => param.name.ident().name,
Node::Binding(&Pat { node: PatKind::Binding(_, _, l, _), .. }) => l.name,
Node::Ctor(..) => self.name_by_hir_id(self.get_parent_item(id)),
- _ => bug!("no name for {}", self.hir_to_string(id))
+ _ => bug!("no name for {}", self.node_to_string(id))
}
}
/// Given a node ID, gets a list of attributes associated with the AST
/// corresponding to the node-ID.
- pub fn attrs(&self, id: NodeId) -> &'hir [ast::Attribute] {
- let hir_id = self.node_to_hir_id(id);
- self.attrs_by_hir_id(hir_id)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
- pub fn attrs_by_hir_id(&self, id: HirId) -> &'hir [ast::Attribute] {
+ pub fn attrs(&self, id: HirId) -> &'hir [ast::Attribute] {
self.read(id); // reveals attributes on the node
let attrs = match self.find_entry(id).map(|entry| entry.node) {
Some(Node::Local(l)) => Some(&l.attrs[..]),
@@ -1046,7 +981,7 @@
Some(Node::GenericParam(param)) => Some(¶m.attrs[..]),
// Unit/tuple structs/variants take the attributes straight from
// the struct/variant definition.
- Some(Node::Ctor(..)) => return self.attrs_by_hir_id(self.get_parent_item(id)),
+ Some(Node::Ctor(..)) => return self.attrs(self.get_parent_item(id)),
Some(Node::Crate) => Some(&self.forest.krate.attrs[..]),
_ => None
};
@@ -1093,13 +1028,7 @@
})
}
- pub fn span(&self, id: NodeId) -> Span {
- let hir_id = self.node_to_hir_id(id);
- self.span_by_hir_id(hir_id)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
- pub fn span_by_hir_id(&self, hir_id: HirId) -> Span {
+ pub fn span(&self, hir_id: HirId) -> Span {
self.read(hir_id); // reveals span from node
match self.find_entry(hir_id).map(|entry| entry.node) {
Some(Node::Item(item)) => item.span,
@@ -1139,32 +1068,17 @@
}
pub fn span_if_local(&self, id: DefId) -> Option<Span> {
- self.as_local_node_id(id).map(|id| self.span(id))
+ self.as_local_hir_id(id).map(|id| self.span(id))
}
- pub fn node_to_string(&self, id: NodeId) -> String {
- hir_id_to_string(self, self.node_to_hir_id(id), true)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
- pub fn hir_to_string(&self, id: HirId) -> String {
+ pub fn node_to_string(&self, id: HirId) -> String {
hir_id_to_string(self, id, true)
}
- pub fn node_to_user_string(&self, id: NodeId) -> String {
- hir_id_to_string(self, self.node_to_hir_id(id), false)
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
pub fn hir_to_user_string(&self, id: HirId) -> String {
hir_id_to_string(self, id, false)
}
- pub fn node_to_pretty_string(&self, id: NodeId) -> String {
- print::to_string(self, |s| s.print_node(self.get(id)))
- }
-
- // FIXME(@ljedrz): replace the `NodeId` variant.
pub fn hir_to_pretty_string(&self, id: HirId) -> String {
print::to_string(self, |s| s.print_node(self.get_by_hir_id(id)))
}
@@ -1307,7 +1221,7 @@
impl<'hir> print::PpAnn for Map<'hir> {
fn nested(&self, state: &mut print::State<'_>, nested: print::Nested) -> io::Result<()> {
match nested {
- Nested::Item(id) => state.print_item(self.expect_item_by_hir_id(id.id)),
+ Nested::Item(id) => state.print_item(self.expect_item(id.id)),
Nested::TraitItem(id) => state.print_trait_item(self.trait_item(id)),
Nested::ImplItem(id) => state.print_impl_item(self.impl_item(id)),
Nested::Body(id) => state.print_expr(&self.body(id).value),
diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs
index 1b4c56c..0884a72 100644
--- a/src/librustc/hir/mod.rs
+++ b/src/librustc/hir/mod.rs
@@ -994,7 +994,7 @@
/// A literal.
Lit(P<Expr>),
- /// A range pattern (e.g., `1...2` or `1..2`).
+ /// A range pattern (e.g., `1..=2` or `1..2`).
Range(P<Expr>, P<Expr>, RangeEnd),
/// `[a, b, ..i, y, z]` is represented as:
@@ -2177,8 +2177,8 @@
/// References to traits in impls.
///
/// `resolve` maps each `TraitRef`'s `ref_id` to its defining trait; that's all
-/// that the `ref_id` is for. Note that `ref_id`'s value is not the `NodeId` of the
-/// trait being referred to but just a unique `NodeId` that serves as a key
+/// that the `ref_id` is for. Note that `ref_id`'s value is not the `HirId` of the
+/// trait being referred to but just a unique `HirId` that serves as a key
/// within the resolution map.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
pub struct TraitRef {
diff --git a/src/librustc/hir/upvars.rs b/src/librustc/hir/upvars.rs
index a053deb..54b4435 100644
--- a/src/librustc/hir/upvars.rs
+++ b/src/librustc/hir/upvars.rs
@@ -14,8 +14,8 @@
return None;
}
- let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
- let body = tcx.hir().body(tcx.hir().maybe_body_owned_by(node_id)?);
+ let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+ let body = tcx.hir().body(tcx.hir().maybe_body_owned_by(hir_id)?);
let mut local_collector = LocalCollector::default();
local_collector.visit_body(body);
@@ -55,7 +55,7 @@
}
struct CaptureCollector<'a, 'tcx> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
locals: &'a FxHashSet<HirId>,
upvars: FxIndexMap<HirId, hir::Upvar>,
}
diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs
index eff42ef..4ef4d70 100644
--- a/src/librustc/ich/hcx.rs
+++ b/src/librustc/ich/hcx.rs
@@ -61,12 +61,12 @@
/// We could also just store a plain reference to the hir::Crate but we want
/// to avoid that the crate is used to get untracked access to all of the HIR.
#[derive(Clone, Copy)]
-struct BodyResolver<'gcx>(&'gcx hir::Crate);
+struct BodyResolver<'tcx>(&'tcx hir::Crate);
-impl<'gcx> BodyResolver<'gcx> {
+impl<'tcx> BodyResolver<'tcx> {
// Return a reference to the hir::Body with the given BodyId.
// DOES NOT DO ANY TRACKING, use carefully.
- fn body(self, id: hir::BodyId) -> &'gcx hir::Body {
+ fn body(self, id: hir::BodyId) -> &'tcx hir::Body {
self.0.body(id)
}
}
@@ -205,8 +205,8 @@
}
}
-impl<'a, 'gcx, 'lcx> StableHashingContextProvider<'a> for TyCtxt<'a, 'gcx, 'lcx> {
- fn get_stable_hashing_context(&self) -> StableHashingContext<'a> {
+impl StableHashingContextProvider<'tcx> for TyCtxt<'tcx> {
+ fn get_stable_hashing_context(&self) -> StableHashingContext<'tcx> {
(*self).create_stable_hashing_context()
}
}
diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs
index e0c0127..4f61845 100644
--- a/src/librustc/ich/impls_syntax.rs
+++ b/src/librustc/ich/impls_syntax.rs
@@ -506,12 +506,12 @@
(pos.0 - source_file_start.0, width as u32)
}
-
-
-impl<'gcx> HashStable<StableHashingContext<'gcx>> for feature_gate::Features {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'gcx>,
- hasher: &mut StableHasher<W>) {
+impl<'tcx> HashStable<StableHashingContext<'tcx>> for feature_gate::Features {
+ fn hash_stable<W: StableHasherResult>(
+ &self,
+ hcx: &mut StableHashingContext<'tcx>,
+ hasher: &mut StableHasher<W>,
+ ) {
// Unfortunately we cannot exhaustively list fields here, since the
// struct is macro generated.
self.declared_lang_features.hash_stable(hcx, hasher);
diff --git a/src/librustc/ich/impls_ty.rs b/src/librustc/ich/impls_ty.rs
index 89e79c5..9b144b1 100644
--- a/src/librustc/ich/impls_ty.rs
+++ b/src/librustc/ich/impls_ty.rs
@@ -11,9 +11,10 @@
use crate::ty;
use crate::mir;
-impl<'a, 'gcx, T> HashStable<StableHashingContext<'a>>
-for &'gcx ty::List<T>
- where T: HashStable<StableHashingContext<'a>> {
+impl<'a, 'tcx, T> HashStable<StableHashingContext<'a>> for &'tcx ty::List<T>
+where
+ T: HashStable<StableHashingContext<'a>>,
+{
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
@@ -40,8 +41,9 @@
}
}
-impl<'a, 'gcx, T> ToStableHashKey<StableHashingContext<'a>> for &'gcx ty::List<T>
- where T: HashStable<StableHashingContext<'a>>
+impl<'a, 'tcx, T> ToStableHashKey<StableHashingContext<'a>> for &'tcx ty::List<T>
+where
+ T: HashStable<StableHashingContext<'a>>,
{
type KeyType = Fingerprint;
@@ -54,7 +56,7 @@
}
}
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for ty::subst::Kind<'gcx> {
+impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ty::subst::Kind<'tcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
@@ -117,20 +119,24 @@
}
}
-impl<'gcx, 'tcx> HashStable<StableHashingContext<'gcx>> for ty::ConstVid<'tcx> {
+impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ty::ConstVid<'tcx> {
#[inline]
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'gcx>,
- hasher: &mut StableHasher<W>) {
+ fn hash_stable<W: StableHasherResult>(
+ &self,
+ hcx: &mut StableHashingContext<'a>,
+ hasher: &mut StableHasher<W>,
+ ) {
self.index.hash_stable(hcx, hasher);
}
}
-impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::BoundVar {
+impl<'tcx> HashStable<StableHashingContext<'tcx>> for ty::BoundVar {
#[inline]
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'gcx>,
- hasher: &mut StableHasher<W>) {
+ fn hash_stable<W: StableHasherResult>(
+ &self,
+ hcx: &mut StableHashingContext<'tcx>,
+ hasher: &mut StableHasher<W>,
+ ) {
self.index().hash_stable(hcx, hasher);
}
}
diff --git a/src/librustc/infer/at.rs b/src/librustc/infer/at.rs
index 5772110..0bb9398 100644
--- a/src/librustc/infer/at.rs
+++ b/src/librustc/infer/at.rs
@@ -30,25 +30,25 @@
use crate::ty::Const;
use crate::ty::relate::{Relate, TypeRelation};
-pub struct At<'a, 'gcx: 'tcx, 'tcx: 'a> {
- pub infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+pub struct At<'a, 'tcx: 'a> {
+ pub infcx: &'a InferCtxt<'a, 'tcx>,
pub cause: &'a ObligationCause<'tcx>,
pub param_env: ty::ParamEnv<'tcx>,
}
-pub struct Trace<'a, 'gcx: 'tcx, 'tcx: 'a> {
- at: At<'a, 'gcx, 'tcx>,
+pub struct Trace<'a, 'tcx: 'a> {
+ at: At<'a, 'tcx>,
a_is_expected: bool,
trace: TypeTrace<'tcx>,
}
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
#[inline]
- pub fn at(&'a self,
- cause: &'a ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>)
- -> At<'a, 'gcx, 'tcx>
- {
+ pub fn at(
+ &'a self,
+ cause: &'a ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ ) -> At<'a, 'tcx> {
At { infcx: self, cause, param_env }
}
}
@@ -61,7 +61,7 @@
-> TypeTrace<'tcx>;
}
-impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> At<'a, 'tcx> {
/// Hacky routine for equating two impl headers in coherence.
pub fn eq_impl_headers(self,
expected: &ty::ImplHeader<'tcx>,
@@ -186,11 +186,9 @@
/// error-reporting, but doesn't actually perform any operation
/// yet (this is useful when you want to set the trace using
/// distinct values from those you wish to operate upon).
- pub fn trace<T>(self,
- expected: T,
- actual: T)
- -> Trace<'a, 'gcx, 'tcx>
- where T: ToTrace<'tcx>
+ pub fn trace<T>(self, expected: T, actual: T) -> Trace<'a, 'tcx>
+ where
+ T: ToTrace<'tcx>,
{
self.trace_exp(true, expected, actual)
}
@@ -198,19 +196,16 @@
/// Like `trace`, but the expected value is determined by the
/// boolean argument (if true, then the first argument `a` is the
/// "expected" value).
- pub fn trace_exp<T>(self,
- a_is_expected: bool,
- a: T,
- b: T)
- -> Trace<'a, 'gcx, 'tcx>
- where T: ToTrace<'tcx>
+ pub fn trace_exp<T>(self, a_is_expected: bool, a: T, b: T) -> Trace<'a, 'tcx>
+ where
+ T: ToTrace<'tcx>,
{
let trace = ToTrace::to_trace(self.cause, a_is_expected, a, b);
Trace { at: self, trace: trace, a_is_expected }
}
}
-impl<'a, 'gcx, 'tcx> Trace<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Trace<'a, 'tcx> {
/// Makes `a <: b` where `a` may or may not be expected (if
/// `a_is_expected` is true, then `a` is expected).
/// Makes `expected <: actual`.
diff --git a/src/librustc/infer/canonical/canonicalizer.rs b/src/librustc/infer/canonical/canonicalizer.rs
index ae4bfcaa..b4779ee 100644
--- a/src/librustc/infer/canonical/canonicalizer.rs
+++ b/src/librustc/infer/canonical/canonicalizer.rs
@@ -21,7 +21,7 @@
use rustc_data_structures::indexed_vec::Idx;
use smallvec::SmallVec;
-impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
/// Canonicalizes a query value `V`. When we canonicalize a query,
/// we not only canonicalize unbound inference variables, but we
/// *also* replace all free regions whatsoever. So for example a
@@ -41,9 +41,9 @@
&self,
value: &V,
query_state: &mut OriginalQueryValues<'tcx>,
- ) -> Canonicalized<'gcx, V>
+ ) -> Canonicalized<'tcx, V>
where
- V: TypeFoldable<'tcx> + Lift<'gcx>,
+ V: TypeFoldable<'tcx> + Lift<'tcx>,
{
self.tcx
.sess
@@ -85,9 +85,9 @@
/// out the [chapter in the rustc guide][c].
///
/// [c]: https://rust-lang.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query-result
- pub fn canonicalize_response<V>(&self, value: &V) -> Canonicalized<'gcx, V>
+ pub fn canonicalize_response<V>(&self, value: &V) -> Canonicalized<'tcx, V>
where
- V: TypeFoldable<'tcx> + Lift<'gcx>,
+ V: TypeFoldable<'tcx> + Lift<'tcx>,
{
let mut query_state = OriginalQueryValues::default();
Canonicalizer::canonicalize(
@@ -99,9 +99,9 @@
)
}
- pub fn canonicalize_user_type_annotation<V>(&self, value: &V) -> Canonicalized<'gcx, V>
+ pub fn canonicalize_user_type_annotation<V>(&self, value: &V) -> Canonicalized<'tcx, V>
where
- V: TypeFoldable<'tcx> + Lift<'gcx>,
+ V: TypeFoldable<'tcx> + Lift<'tcx>,
{
let mut query_state = OriginalQueryValues::default();
Canonicalizer::canonicalize(
@@ -130,9 +130,9 @@
&self,
value: &V,
query_state: &mut OriginalQueryValues<'tcx>,
- ) -> Canonicalized<'gcx, V>
+ ) -> Canonicalized<'tcx, V>
where
- V: TypeFoldable<'tcx> + Lift<'gcx>,
+ V: TypeFoldable<'tcx> + Lift<'tcx>,
{
self.tcx
.sess
@@ -160,7 +160,7 @@
trait CanonicalizeRegionMode {
fn canonicalize_free_region(
&self,
- canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
+ canonicalizer: &mut Canonicalizer<'_, 'tcx>,
r: ty::Region<'tcx>,
) -> ty::Region<'tcx>;
@@ -172,7 +172,7 @@
impl CanonicalizeRegionMode for CanonicalizeQueryResponse {
fn canonicalize_free_region(
&self,
- canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
+ canonicalizer: &mut Canonicalizer<'_, 'tcx>,
r: ty::Region<'tcx>,
) -> ty::Region<'tcx> {
match r {
@@ -221,7 +221,7 @@
impl CanonicalizeRegionMode for CanonicalizeUserTypeAnnotation {
fn canonicalize_free_region(
&self,
- canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
+ canonicalizer: &mut Canonicalizer<'_, 'tcx>,
r: ty::Region<'tcx>,
) -> ty::Region<'tcx> {
match r {
@@ -244,7 +244,7 @@
impl CanonicalizeRegionMode for CanonicalizeAllFreeRegions {
fn canonicalize_free_region(
&self,
- canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
+ canonicalizer: &mut Canonicalizer<'_, 'tcx>,
r: ty::Region<'tcx>,
) -> ty::Region<'tcx> {
canonicalizer.canonical_var_for_region_in_root_universe(r)
@@ -260,7 +260,7 @@
impl CanonicalizeRegionMode for CanonicalizeFreeRegionsOtherThanStatic {
fn canonicalize_free_region(
&self,
- canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
+ canonicalizer: &mut Canonicalizer<'_, 'tcx>,
r: ty::Region<'tcx>,
) -> ty::Region<'tcx> {
if let ty::ReStatic = r {
@@ -275,9 +275,9 @@
}
}
-struct Canonicalizer<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+struct Canonicalizer<'cx, 'tcx: 'cx> {
+ infcx: Option<&'cx InferCtxt<'cx, 'tcx>>,
+ tcx: TyCtxt<'tcx>,
variables: SmallVec<[CanonicalVarInfo; 8]>,
query_state: &'cx mut OriginalQueryValues<'tcx>,
// Note that indices is only used once `var_values` is big enough to be
@@ -289,8 +289,8 @@
binder_index: ty::DebruijnIndex,
}
-impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+impl<'cx, 'tcx> TypeFolder<'tcx> for Canonicalizer<'cx, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -495,18 +495,18 @@
}
}
-impl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
/// The main `canonicalize` method, shared impl of
/// `canonicalize_query` and `canonicalize_response`.
fn canonicalize<V>(
value: &V,
- infcx: Option<&InferCtxt<'_, 'gcx, 'tcx>>,
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ infcx: Option<&InferCtxt<'_, 'tcx>>,
+ tcx: TyCtxt<'tcx>,
canonicalize_region_mode: &dyn CanonicalizeRegionMode,
query_state: &mut OriginalQueryValues<'tcx>,
- ) -> Canonicalized<'gcx, V>
+ ) -> Canonicalized<'tcx, V>
where
- V: TypeFoldable<'tcx> + Lift<'gcx>,
+ V: TypeFoldable<'tcx> + Lift<'tcx>,
{
let needs_canonical_flags = if canonicalize_region_mode.any() {
TypeFlags::KEEP_IN_LOCAL_TCX |
diff --git a/src/librustc/infer/canonical/mod.rs b/src/librustc/infer/canonical/mod.rs
index 42f53bf..8b1c34a 100644
--- a/src/librustc/infer/canonical/mod.rs
+++ b/src/librustc/infer/canonical/mod.rs
@@ -44,15 +44,15 @@
/// variables have been rewritten to "canonical vars". These are
/// numbered starting from 0 in order of first appearance.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable, HashStable)]
-pub struct Canonical<'gcx, V> {
+pub struct Canonical<'tcx, V> {
pub max_universe: ty::UniverseIndex,
- pub variables: CanonicalVarInfos<'gcx>,
+ pub variables: CanonicalVarInfos<'tcx>,
pub value: V,
}
-pub type CanonicalVarInfos<'gcx> = &'gcx List<CanonicalVarInfo>;
+pub type CanonicalVarInfos<'tcx> = &'tcx List<CanonicalVarInfo>;
-impl<'gcx> UseSpecializedDecodable for CanonicalVarInfos<'gcx> {}
+impl<'tcx> UseSpecializedDecodable for CanonicalVarInfos<'tcx> {}
/// A set of values corresponding to the canonical variables from some
/// `Canonical`. You can give these values to
@@ -194,10 +194,10 @@
pub value: R,
}
-pub type Canonicalized<'gcx, V> = Canonical<'gcx, <V as Lift<'gcx>>::Lifted>;
+pub type Canonicalized<'tcx, V> = Canonical<'tcx, <V as Lift<'tcx>>::Lifted>;
-pub type CanonicalizedQueryResponse<'gcx, T> =
- &'gcx Canonical<'gcx, QueryResponse<'gcx, <T as Lift<'gcx>>::Lifted>>;
+pub type CanonicalizedQueryResponse<'tcx, T> =
+ &'tcx Canonical<'tcx, QueryResponse<'tcx, <T as Lift<'tcx>>::Lifted>>;
/// Indicates whether or not we were able to prove the query to be
/// true.
@@ -254,7 +254,7 @@
}
}
-impl<'gcx, V> Canonical<'gcx, V> {
+impl<'tcx, V> Canonical<'tcx, V> {
/// Allows you to map the `value` of a canonical while keeping the
/// same set of bound variables.
///
@@ -278,7 +278,7 @@
/// let ty: Ty<'tcx> = ...;
/// let b: Canonical<'tcx, (T, Ty<'tcx>)> = a.unchecked_map(|v| (v, ty));
/// ```
- pub fn unchecked_map<W>(self, map_op: impl FnOnce(V) -> W) -> Canonical<'gcx, W> {
+ pub fn unchecked_map<W>(self, map_op: impl FnOnce(V) -> W) -> Canonical<'tcx, W> {
let Canonical {
max_universe,
variables,
@@ -294,7 +294,7 @@
pub type QueryRegionConstraint<'tcx> = ty::Binder<ty::OutlivesPredicate<Kind<'tcx>, Region<'tcx>>>;
-impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
/// Creates a substitution S for the canonical value with fresh
/// inference variables and applies it to the canonical value.
/// Returns both the instantiated result *and* the substitution S.
@@ -478,7 +478,7 @@
/// `self.var_values == [Type(u32), Lifetime('a), Type(u64)]`
/// we'll return a substitution `subst` with:
/// `subst.var_values == [Type(^0), Lifetime(^1), Type(^2)]`.
- pub fn make_identity<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
+ pub fn make_identity(&self, tcx: TyCtxt<'tcx>) -> Self {
use crate::ty::subst::UnpackedKind;
CanonicalVarValues {
diff --git a/src/librustc/infer/canonical/query_response.rs b/src/librustc/infer/canonical/query_response.rs
index 413c142..8b11ebf 100644
--- a/src/librustc/infer/canonical/query_response.rs
+++ b/src/librustc/infer/canonical/query_response.rs
@@ -29,7 +29,7 @@
use crate::ty::{self, BoundVar, InferConst, Lift, Ty, TyCtxt};
use crate::util::captures::Captures;
-impl<'cx, 'gcx, 'tcx> InferCtxtBuilder<'cx, 'gcx, 'tcx> {
+impl<'tcx> InferCtxtBuilder<'tcx> {
/// The "main method" for a canonicalized trait query. Given the
/// canonical key `canonical_key`, this method will create a new
/// inference context, instantiate the key, and run your operation
@@ -42,20 +42,19 @@
///
/// (It might be mildly nicer to implement this on `TyCtxt`, and
/// not `InferCtxtBuilder`, but that is a bit tricky right now.
- /// In part because we would need a `for<'gcx: 'tcx>` sort of
+ /// In part because we would need a `for<'tcx>` sort of
/// bound for the closure and in part because it is convenient to
/// have `'tcx` be free on this function so that we can talk about
/// `K: TypeFoldable<'tcx>`.)
pub fn enter_canonical_trait_query<K, R>(
- &'tcx mut self,
+ &mut self,
canonical_key: &Canonical<'tcx, K>,
- operation: impl FnOnce(&InferCtxt<'_, 'gcx, 'tcx>, &mut dyn TraitEngine<'tcx>, K)
- -> Fallible<R>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, R>>
+ operation: impl FnOnce(&InferCtxt<'_, 'tcx>, &mut dyn TraitEngine<'tcx>, K) -> Fallible<R>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, R>>
where
K: TypeFoldable<'tcx>,
- R: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
- Canonical<'gcx, <QueryResponse<'gcx, R> as Lift<'gcx>>::Lifted>: ArenaAllocatable,
+ R: Debug + Lift<'tcx> + TypeFoldable<'tcx>,
+ Canonical<'tcx, <QueryResponse<'tcx, R> as Lift<'tcx>>::Lifted>: ArenaAllocatable,
{
self.enter_with_canonical(
DUMMY_SP,
@@ -73,7 +72,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
/// This method is meant to be invoked as the final step of a canonical query
/// implementation. It is given:
///
@@ -98,10 +97,10 @@
inference_vars: CanonicalVarValues<'tcx>,
answer: T,
fulfill_cx: &mut dyn TraitEngine<'tcx>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, T>>
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, T>>
where
- T: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
- Canonical<'gcx, <QueryResponse<'gcx, T> as Lift<'gcx>>::Lifted>: ArenaAllocatable,
+ T: Debug + Lift<'tcx> + TypeFoldable<'tcx>,
+ Canonical<'tcx, <QueryResponse<'tcx, T> as Lift<'tcx>>::Lifted>: ArenaAllocatable,
{
let query_response = self.make_query_response(inference_vars, answer, fulfill_cx)?;
let canonical_result = self.canonicalize_response(&query_response);
@@ -126,10 +125,10 @@
pub fn make_query_response_ignoring_pending_obligations<T>(
&self,
inference_vars: CanonicalVarValues<'tcx>,
- answer: T
- ) -> Canonical<'gcx, QueryResponse<'gcx, <T as Lift<'gcx>>::Lifted>>
+ answer: T,
+ ) -> Canonical<'tcx, QueryResponse<'tcx, <T as Lift<'tcx>>::Lifted>>
where
- T: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
+ T: Debug + Lift<'tcx> + TypeFoldable<'tcx>,
{
self.canonicalize_response(&QueryResponse {
var_values: inference_vars,
@@ -148,7 +147,7 @@
fulfill_cx: &mut dyn TraitEngine<'tcx>,
) -> Result<QueryResponse<'tcx, T>, NoSolution>
where
- T: Debug + TypeFoldable<'tcx> + Lift<'gcx>,
+ T: Debug + TypeFoldable<'tcx> + Lift<'tcx>,
{
let tcx = self.tcx;
@@ -567,7 +566,7 @@
param_env: ty::ParamEnv<'tcx>,
unsubstituted_region_constraints: &'a [QueryRegionConstraint<'tcx>],
result_subst: &'a CanonicalVarValues<'tcx>,
- ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a + Captures<'gcx> {
+ ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a + Captures<'tcx> {
unsubstituted_region_constraints
.iter()
.map(move |constraint| {
@@ -647,7 +646,7 @@
/// Given the region obligations and constraints scraped from the infcx,
/// creates query region constraints.
pub fn make_query_outlives<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
outlives_obligations: impl Iterator<Item = (Ty<'tcx>, ty::Region<'tcx>)>,
region_constraints: &RegionConstraintData<'tcx>,
) -> Vec<QueryRegionConstraint<'tcx>> {
diff --git a/src/librustc/infer/canonical/substitute.rs b/src/librustc/infer/canonical/substitute.rs
index 6b716d6..1234b96 100644
--- a/src/librustc/infer/canonical/substitute.rs
+++ b/src/librustc/infer/canonical/substitute.rs
@@ -14,7 +14,7 @@
impl<'tcx, V> Canonical<'tcx, V> {
/// Instantiate the wrapped value, replacing each canonical value
/// with the value given in `var_values`.
- pub fn substitute(&self, tcx: TyCtxt<'_, '_, 'tcx>, var_values: &CanonicalVarValues<'tcx>) -> V
+ pub fn substitute(&self, tcx: TyCtxt<'tcx>, var_values: &CanonicalVarValues<'tcx>) -> V
where
V: TypeFoldable<'tcx>,
{
@@ -29,7 +29,7 @@
/// V, replacing each of the canonical variables.
pub fn substitute_projected<T>(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
var_values: &CanonicalVarValues<'tcx>,
projection_fn: impl FnOnce(&V) -> &T,
) -> T
@@ -46,7 +46,7 @@
/// must be values for the set of canonical variables that appear in
/// `value`.
pub(super) fn substitute_value<'a, 'tcx, T>(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
var_values: &CanonicalVarValues<'tcx>,
value: &'a T,
) -> T
diff --git a/src/librustc/infer/combine.rs b/src/librustc/infer/combine.rs
index 4e6d534..2355056 100644
--- a/src/librustc/infer/combine.rs
+++ b/src/librustc/infer/combine.rs
@@ -44,8 +44,8 @@
use syntax_pos::{Span, DUMMY_SP};
#[derive(Clone)]
-pub struct CombineFields<'infcx, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> {
- pub infcx: &'infcx InferCtxt<'infcx, 'gcx, 'tcx>,
+pub struct CombineFields<'infcx, 'tcx: 'infcx> {
+ pub infcx: &'infcx InferCtxt<'infcx, 'tcx>,
pub trace: TypeTrace<'tcx>,
pub cause: Option<ty::relate::Cause>,
pub param_env: ty::ParamEnv<'tcx>,
@@ -57,13 +57,15 @@
SubtypeOf, SupertypeOf, EqTo
}
-impl<'infcx, 'gcx, 'tcx> InferCtxt<'infcx, 'gcx, 'tcx> {
- pub fn super_combine_tys<R>(&self,
- relation: &mut R,
- a: Ty<'tcx>,
- b: Ty<'tcx>)
- -> RelateResult<'tcx, Ty<'tcx>>
- where R: TypeRelation<'infcx, 'gcx, 'tcx>
+impl<'infcx, 'tcx> InferCtxt<'infcx, 'tcx> {
+ pub fn super_combine_tys<R>(
+ &self,
+ relation: &mut R,
+ a: Ty<'tcx>,
+ b: Ty<'tcx>,
+ ) -> RelateResult<'tcx, Ty<'tcx>>
+ where
+ R: TypeRelation<'tcx>,
{
let a_is_expected = relation.a_is_expected();
@@ -123,7 +125,7 @@
b: &'tcx ty::Const<'tcx>,
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>>
where
- R: TypeRelation<'infcx, 'gcx, 'tcx>,
+ R: TypeRelation<'tcx>,
{
let a_is_expected = relation.a_is_expected();
@@ -206,24 +208,24 @@
}
}
-impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> {
- pub fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> {
+impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> {
+ pub fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
- pub fn equate<'a>(&'a mut self, a_is_expected: bool) -> Equate<'a, 'infcx, 'gcx, 'tcx> {
+ pub fn equate<'a>(&'a mut self, a_is_expected: bool) -> Equate<'a, 'infcx, 'tcx> {
Equate::new(self, a_is_expected)
}
- pub fn sub<'a>(&'a mut self, a_is_expected: bool) -> Sub<'a, 'infcx, 'gcx, 'tcx> {
+ pub fn sub<'a>(&'a mut self, a_is_expected: bool) -> Sub<'a, 'infcx, 'tcx> {
Sub::new(self, a_is_expected)
}
- pub fn lub<'a>(&'a mut self, a_is_expected: bool) -> Lub<'a, 'infcx, 'gcx, 'tcx> {
+ pub fn lub<'a>(&'a mut self, a_is_expected: bool) -> Lub<'a, 'infcx, 'tcx> {
Lub::new(self, a_is_expected)
}
- pub fn glb<'a>(&'a mut self, a_is_expected: bool) -> Glb<'a, 'infcx, 'gcx, 'tcx> {
+ pub fn glb<'a>(&'a mut self, a_is_expected: bool) -> Glb<'a, 'infcx, 'tcx> {
Glb::new(self, a_is_expected)
}
@@ -353,8 +355,8 @@
}
}
-struct Generalizer<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> {
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+struct Generalizer<'cx, 'tcx: 'cx> {
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
/// The span, used when creating new type variables and things.
span: Span,
@@ -413,8 +415,8 @@
needs_wf: bool,
}
-impl<'cx, 'gcx, 'tcx> TypeRelation<'cx, 'gcx, 'tcx> for Generalizer<'cx, 'gcx, 'tcx> {
- fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
+impl TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
diff --git a/src/librustc/infer/equate.rs b/src/librustc/infer/equate.rs
index f614086..39d8241 100644
--- a/src/librustc/infer/equate.rs
+++ b/src/librustc/infer/equate.rs
@@ -11,25 +11,24 @@
use crate::infer::unify_key::replace_if_possible;
/// Ensures `a` is made equal to `b`. Returns `a` on success.
-pub struct Equate<'combine, 'infcx: 'combine, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> {
- fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>,
+pub struct Equate<'combine, 'infcx: 'combine, 'tcx: 'infcx> {
+ fields: &'combine mut CombineFields<'infcx, 'tcx>,
a_is_expected: bool,
}
-impl<'combine, 'infcx, 'gcx, 'tcx> Equate<'combine, 'infcx, 'gcx, 'tcx> {
- pub fn new(fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool)
- -> Equate<'combine, 'infcx, 'gcx, 'tcx>
- {
+impl<'combine, 'infcx, 'tcx> Equate<'combine, 'infcx, 'tcx> {
+ pub fn new(
+ fields: &'combine mut CombineFields<'infcx, 'tcx>,
+ a_is_expected: bool,
+ ) -> Equate<'combine, 'infcx, 'tcx> {
Equate { fields: fields, a_is_expected: a_is_expected }
}
}
-impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx>
- for Equate<'combine, 'infcx, 'gcx, 'tcx>
-{
+impl TypeRelation<'tcx> for Equate<'combine, 'infcx, 'tcx> {
fn tag(&self) -> &'static str { "Equate" }
- fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { self.fields.tcx() }
+ fn tcx(&self) -> TyCtxt<'tcx> { self.fields.tcx() }
fn a_is_expected(&self) -> bool { self.a_is_expected }
diff --git a/src/librustc/infer/error_reporting/mod.rs b/src/librustc/infer/error_reporting/mod.rs
index b1eba7d..321c068 100644
--- a/src/librustc/infer/error_reporting/mod.rs
+++ b/src/librustc/infer/error_reporting/mod.rs
@@ -67,7 +67,7 @@
pub mod nice_region_error;
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
pub fn note_and_explain_region(
self,
region_scope_tree: ®ion::ScopeTree,
@@ -86,7 +86,7 @@
)
};
let span = scope.span(self, region_scope_tree);
- let tag = match self.hir().find(scope.node_id(self, region_scope_tree)) {
+ let tag = match self.hir().find_by_hir_id(scope.hir_id(region_scope_tree)) {
Some(Node::Block(_)) => "block",
Some(Node::Expr(expr)) => match expr.node {
hir::ExprKind::Call(..) => "call",
@@ -191,7 +191,7 @@
};
let (prefix, span) = match *region {
ty::ReEarlyBound(ref br) => {
- let mut sp = cm.def_span(self.hir().span_by_hir_id(node));
+ let mut sp = cm.def_span(self.hir().span(node));
if let Some(param) = self.hir()
.get_generics(scope)
.and_then(|generics| generics.get_named(br.name))
@@ -204,7 +204,7 @@
bound_region: ty::BoundRegion::BrNamed(_, name),
..
}) => {
- let mut sp = cm.def_span(self.hir().span_by_hir_id(node));
+ let mut sp = cm.def_span(self.hir().span(node));
if let Some(param) = self.hir()
.get_generics(scope)
.and_then(|generics| generics.get_named(name))
@@ -216,11 +216,11 @@
ty::ReFree(ref fr) => match fr.bound_region {
ty::BrAnon(idx) => (
format!("the anonymous lifetime #{} defined on", idx + 1),
- self.hir().span_by_hir_id(node),
+ self.hir().span(node),
),
_ => (
format!("the lifetime {} as defined on", region),
- cm.def_span(self.hir().span_by_hir_id(node)),
+ cm.def_span(self.hir().span(node)),
),
},
_ => bug!(),
@@ -282,7 +282,7 @@
}
}
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn report_region_errors(
&self,
region_scope_tree: ®ion::ScopeTree,
@@ -445,13 +445,13 @@
use ty::print::Printer;
use ty::subst::Kind;
- struct AbsolutePathPrinter<'a, 'gcx, 'tcx> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ struct AbsolutePathPrinter<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
struct NonTrivialPath;
- impl<'gcx, 'tcx> Printer<'gcx, 'tcx> for AbsolutePathPrinter<'_, 'gcx, 'tcx> {
+ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> {
type Error = NonTrivialPath;
type Path = Vec<String>;
@@ -460,7 +460,7 @@
type DynExistential = !;
type Const = !;
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx> {
+ fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -1330,12 +1330,12 @@
if !param.is_self() {
let type_param = generics.type_param(param, self.tcx);
let hir = &self.tcx.hir();
- hir.as_local_node_id(type_param.def_id).map(|id| {
+ hir.as_local_hir_id(type_param.def_id).map(|id| {
// Get the `hir::Param` to verify whether it already has any bounds.
// We do this to avoid suggesting code that ends up as `T: 'a'b`,
// instead we suggest `T: 'a + 'b` in that case.
let mut has_bounds = false;
- if let Node::GenericParam(ref param) = hir.get(id) {
+ if let Node::GenericParam(ref param) = hir.get_by_hir_id(id) {
has_bounds = !param.bounds.is_empty();
}
let sp = hir.span(id);
@@ -1546,7 +1546,7 @@
}
}
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
fn report_inference_failure(
&self,
var_origin: RegionVariableOrigin,
diff --git a/src/librustc/infer/error_reporting/need_type_info.rs b/src/librustc/infer/error_reporting/need_type_info.rs
index 16b6792..362a680 100644
--- a/src/librustc/infer/error_reporting/need_type_info.rs
+++ b/src/librustc/infer/error_reporting/need_type_info.rs
@@ -9,16 +9,16 @@
use syntax_pos::Span;
use errors::DiagnosticBuilder;
-struct FindLocalByTypeVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+struct FindLocalByTypeVisitor<'a, 'tcx> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
target_ty: Ty<'tcx>,
- hir_map: &'a hir::map::Map<'gcx>,
- found_local_pattern: Option<&'gcx Pat>,
- found_arg_pattern: Option<&'gcx Pat>,
+ hir_map: &'a hir::map::Map<'tcx>,
+ found_local_pattern: Option<&'tcx Pat>,
+ found_arg_pattern: Option<&'tcx Pat>,
found_ty: Option<Ty<'tcx>>,
}
-impl<'a, 'gcx, 'tcx> FindLocalByTypeVisitor<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FindLocalByTypeVisitor<'a, 'tcx> {
fn node_matches_type(&mut self, hir_id: HirId) -> Option<Ty<'tcx>> {
let ty_opt = self.infcx.in_progress_tables.and_then(|tables| {
tables.borrow().node_type_opt(hir_id)
@@ -47,12 +47,12 @@
}
}
-impl<'a, 'gcx, 'tcx> Visitor<'gcx> for FindLocalByTypeVisitor<'a, 'gcx, 'tcx> {
- fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {
+impl<'a, 'tcx> Visitor<'tcx> for FindLocalByTypeVisitor<'a, 'tcx> {
+ fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::OnlyBodies(&self.hir_map)
}
- fn visit_local(&mut self, local: &'gcx Local) {
+ fn visit_local(&mut self, local: &'tcx Local) {
if let (None, Some(ty)) = (self.found_local_pattern, self.node_matches_type(local.hir_id)) {
self.found_local_pattern = Some(&*local.pat);
self.found_ty = Some(ty);
@@ -60,7 +60,7 @@
intravisit::walk_local(self, local);
}
- fn visit_body(&mut self, body: &'gcx Body) {
+ fn visit_body(&mut self, body: &'tcx Body) {
for argument in &body.arguments {
if let (None, Some(ty)) = (
self.found_arg_pattern,
@@ -74,8 +74,7 @@
}
}
-
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn extract_type_name(
&self,
ty: Ty<'tcx>,
@@ -102,8 +101,8 @@
&self,
body_id: Option<hir::BodyId>,
span: Span,
- ty: Ty<'tcx>
- ) -> DiagnosticBuilder<'gcx> {
+ ty: Ty<'tcx>,
+ ) -> DiagnosticBuilder<'tcx> {
let ty = self.resolve_vars_if_possible(&ty);
let name = self.extract_type_name(&ty, None);
@@ -229,8 +228,8 @@
pub fn need_type_info_err_in_generator(
&self,
span: Span,
- ty: Ty<'tcx>
- ) -> DiagnosticBuilder<'gcx> {
+ ty: Ty<'tcx>,
+ ) -> DiagnosticBuilder<'tcx> {
let ty = self.resolve_vars_if_possible(&ty);
let name = self.extract_type_name(&ty, None);
diff --git a/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs b/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs
index ecdcb4b..6bd2c04 100644
--- a/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs
+++ b/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs
@@ -5,7 +5,7 @@
use crate::infer::error_reporting::nice_region_error::util::AnonymousArgInfo;
use crate::util::common::ErrorReported;
-impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
/// Print the error message for lifetime errors when both the concerned regions are anonymous.
///
/// Consider a case where we have
diff --git a/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs b/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs
index ea74887..78d1d569 100644
--- a/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs
+++ b/src/librustc/infer/error_reporting/nice_region_error/find_anon_type.rs
@@ -5,7 +5,7 @@
use crate::hir::intravisit::{self, NestedVisitorMap, Visitor};
use crate::infer::error_reporting::nice_region_error::NiceRegionError;
-impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
/// This function calls the `visit_ty` method for the parameters
/// corresponding to the anonymous regions. The `nested_visitor.found_type`
/// contains the anonymous type.
@@ -28,8 +28,8 @@
) -> Option<(&hir::Ty, &hir::FnDecl)> {
if let Some(anon_reg) = self.tcx().is_suitable_region(region) {
let def_id = anon_reg.def_id;
- if let Some(node_id) = self.tcx().hir().as_local_node_id(def_id) {
- let fndecl = match self.tcx().hir().get(node_id) {
+ if let Some(hir_id) = self.tcx().hir().as_local_hir_id(def_id) {
+ let fndecl = match self.tcx().hir().get_by_hir_id(hir_id) {
Node::Item(&hir::Item {
node: hir::ItemKind::Fn(ref fndecl, ..),
..
@@ -60,9 +60,9 @@
// to the anonymous region.
fn find_component_for_bound_region(
&self,
- arg: &'gcx hir::Ty,
+ arg: &'tcx hir::Ty,
br: &ty::BoundRegion,
- ) -> Option<(&'gcx hir::Ty)> {
+ ) -> Option<(&'tcx hir::Ty)> {
let mut nested_visitor = FindNestedTypeVisitor {
tcx: self.tcx(),
bound_region: *br,
@@ -81,23 +81,23 @@
// walk the types like &mut Vec<&u8> and &u8 looking for the HIR
// where that lifetime appears. This allows us to highlight the
// specific part of the type in the error message.
-struct FindNestedTypeVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+struct FindNestedTypeVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
// The bound_region corresponding to the Refree(freeregion)
// associated with the anonymous region we are looking for.
bound_region: ty::BoundRegion,
// The type where the anonymous lifetime appears
// for e.g., Vec<`&u8`> and <`&u8`>
- found_type: Option<&'gcx hir::Ty>,
+ found_type: Option<&'tcx hir::Ty>,
current_index: ty::DebruijnIndex,
}
-impl<'a, 'gcx, 'tcx> Visitor<'gcx> for FindNestedTypeVisitor<'a, 'gcx, 'tcx> {
- fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {
+impl Visitor<'tcx> for FindNestedTypeVisitor<'tcx> {
+ fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::OnlyBodies(&self.tcx.hir())
}
- fn visit_ty(&mut self, arg: &'gcx hir::Ty) {
+ fn visit_ty(&mut self, arg: &'tcx hir::Ty) {
match arg.node {
hir::TyKind::BareFn(_) => {
self.current_index.shift_in(1);
@@ -208,15 +208,15 @@
// and would walk the types like Vec<Ref> in the above example and Ref looking for the HIR
// where that lifetime appears. This allows us to highlight the
// specific part of the type in the error message.
-struct TyPathVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+struct TyPathVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
found_it: bool,
bound_region: ty::BoundRegion,
current_index: ty::DebruijnIndex,
}
-impl<'a, 'gcx, 'tcx> Visitor<'gcx> for TyPathVisitor<'a, 'gcx, 'tcx> {
- fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {
+impl Visitor<'tcx> for TyPathVisitor<'tcx> {
+ fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::OnlyBodies(&self.tcx.hir())
}
@@ -267,7 +267,7 @@
}
}
- fn visit_ty(&mut self, arg: &'gcx hir::Ty) {
+ fn visit_ty(&mut self, arg: &'tcx hir::Ty) {
// ignore nested types
//
// If you have a type like `Foo<'a, &Ty>` we
diff --git a/src/librustc/infer/error_reporting/nice_region_error/mod.rs b/src/librustc/infer/error_reporting/nice_region_error/mod.rs
index d995fe9..541d9a9 100644
--- a/src/librustc/infer/error_reporting/nice_region_error/mod.rs
+++ b/src/librustc/infer/error_reporting/nice_region_error/mod.rs
@@ -14,7 +14,7 @@
mod static_impl_trait;
mod util;
-impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
pub fn try_report_nice_region_error(&self, error: &RegionResolutionError<'tcx>) -> bool {
match *error {
ConcreteFailure(..) | SubSupConflict(..) => {}
@@ -30,16 +30,16 @@
}
}
-pub struct NiceRegionError<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+pub struct NiceRegionError<'cx, 'tcx: 'cx> {
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
error: Option<RegionResolutionError<'tcx>>,
regions: Option<(Span, ty::Region<'tcx>, ty::Region<'tcx>)>,
tables: Option<&'cx ty::TypeckTables<'tcx>>,
}
-impl<'cx, 'gcx, 'tcx> NiceRegionError<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> NiceRegionError<'cx, 'tcx> {
pub fn new(
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
error: RegionResolutionError<'tcx>,
tables: Option<&'cx ty::TypeckTables<'tcx>>,
) -> Self {
@@ -47,7 +47,7 @@
}
pub fn new_from_span(
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
span: Span,
sub: ty::Region<'tcx>,
sup: ty::Region<'tcx>,
@@ -56,7 +56,7 @@
Self { infcx, error: None, regions: Some((span, sub, sup)), tables }
}
- fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
diff --git a/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs b/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs
index 0efc124..51bee49 100644
--- a/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs
+++ b/src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs
@@ -5,7 +5,7 @@
use crate::ty;
use errors::{Applicability, DiagnosticBuilder};
-impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
/// When given a `ConcreteFailure` for a function with arguments containing a named region and
/// an anonymous region, emit an descriptive diagnostic error.
pub(super) fn try_report_named_anon_conflict(&self) -> Option<DiagnosticBuilder<'a>> {
diff --git a/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs b/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs
index af20188..6ed2b67 100644
--- a/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs
+++ b/src/librustc/infer/error_reporting/nice_region_error/outlives_closure.rs
@@ -9,7 +9,7 @@
use crate::util::common::ErrorReported;
use crate::infer::lexical_region_resolve::RegionResolutionError::SubSupConflict;
-impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
/// Print the error message for lifetime errors when binding escapes a closure.
///
/// Consider a case where we have
@@ -48,11 +48,11 @@
if let (&SubregionOrigin::BindingTypeIsNotValidAtDecl(ref external_span),
&RegionKind::ReFree(ref free_region)) = (&sub_origin, sup_region) {
let hir = &self.tcx().hir();
- if let Some(node_id) = hir.as_local_node_id(free_region.scope) {
+ if let Some(hir_id) = hir.as_local_hir_id(free_region.scope) {
if let Node::Expr(Expr {
node: Closure(_, _, _, closure_span, None),
..
- }) = hir.get(node_id) {
+ }) = hir.get_by_hir_id(hir_id) {
let sup_sp = sup_origin.span();
let origin_sp = origin.span();
let mut err = self.tcx().sess.struct_span_err(
diff --git a/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs b/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs
index 1dd3919..b4fb018 100644
--- a/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs
+++ b/src/librustc/infer/error_reporting/nice_region_error/placeholder_error.rs
@@ -13,7 +13,7 @@
use std::fmt::{self, Write};
-impl NiceRegionError<'me, 'gcx, 'tcx> {
+impl NiceRegionError<'me, 'tcx> {
/// When given a `ConcreteFailure` for a function with arguments containing a named region and
/// an anonymous region, emit a descriptive diagnostic error.
pub(super) fn try_report_placeholder_conflict(&self) -> Option<DiagnosticBuilder<'me>> {
@@ -321,14 +321,14 @@
) {
// HACK(eddyb) maybe move this in a more central location.
#[derive(Copy, Clone)]
- struct Highlighted<'a, 'gcx, 'tcx, T> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ struct Highlighted<'tcx, T> {
+ tcx: TyCtxt<'tcx>,
highlight: RegionHighlightMode,
value: T,
}
- impl<'a, 'gcx, 'tcx, T> Highlighted<'a, 'gcx, 'tcx, T> {
- fn map<U>(self, f: impl FnOnce(T) -> U) -> Highlighted<'a, 'gcx, 'tcx, U> {
+ impl<'tcx, T> Highlighted<'tcx, T> {
+ fn map<U>(self, f: impl FnOnce(T) -> U) -> Highlighted<'tcx, U> {
Highlighted {
tcx: self.tcx,
highlight: self.highlight,
@@ -337,9 +337,11 @@
}
}
- impl<'a, 'gcx, 'tcx, T> fmt::Display for Highlighted<'a, 'gcx, 'tcx, T>
- where T: for<'b, 'c> Print<'gcx, 'tcx,
- FmtPrinter<'a, 'gcx, 'tcx, &'b mut fmt::Formatter<'c>>,
+ impl<'tcx, T> fmt::Display for Highlighted<'tcx, T>
+ where
+ T: for<'a, 'b, 'c> Print<
+ 'tcx,
+ FmtPrinter<'a, 'tcx, &'b mut fmt::Formatter<'c>>,
Error = fmt::Error,
>,
{
diff --git a/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs b/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs
index 23acaeb..9d405d4 100644
--- a/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs
+++ b/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs
@@ -6,7 +6,7 @@
use crate::util::common::ErrorReported;
use errors::Applicability;
-impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
/// Print the error message for lifetime errors when the return type is a static impl Trait.
pub(super) fn try_report_static_impl_trait(&self) -> Option<ErrorReported> {
if let Some(ref error) = self.error {
diff --git a/src/librustc/infer/error_reporting/nice_region_error/util.rs b/src/librustc/infer/error_reporting/nice_region_error/util.rs
index feade7a..f33f917 100644
--- a/src/librustc/infer/error_reporting/nice_region_error/util.rs
+++ b/src/librustc/infer/error_reporting/nice_region_error/util.rs
@@ -24,7 +24,7 @@
pub is_first: bool,
}
-impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
// This method walks the Type of the function body arguments using
// `fold_regions()` function and returns the
// &hir::Arg of the function argument corresponding to the anonymous
@@ -51,11 +51,11 @@
};
let hir = &self.tcx().hir();
- if let Some(node_id) = hir.as_local_node_id(id) {
- if let Some(body_id) = hir.maybe_body_owned_by(node_id) {
+ if let Some(hir_id) = hir.as_local_hir_id(id) {
+ if let Some(body_id) = hir.maybe_body_owned_by(hir_id) {
let body = hir.body(body_id);
let owner_id = hir.body_owner(body_id);
- let fn_decl = hir.fn_decl(owner_id).unwrap();
+ let fn_decl = hir.fn_decl_by_hir_id(owner_id).unwrap();
if let Some(tables) = self.tables {
body.arguments
.iter()
@@ -63,7 +63,7 @@
.filter_map(|(index, arg)| {
// May return None; sometimes the tables are not yet populated.
let ty_hir_id = fn_decl.inputs[index].hir_id;
- let arg_ty_span = hir.span(hir.hir_to_node_id(ty_hir_id));
+ let arg_ty_span = hir.span(ty_hir_id);
let ty = tables.node_type_opt(arg.hir_id)?;
let mut found_anon_region = false;
let new_arg_ty = self.tcx().fold_regions(&ty, &mut false, |r, _| {
diff --git a/src/librustc/infer/error_reporting/note.rs b/src/librustc/infer/error_reporting/note.rs
index 9eb46aa3..cc7c13c 100644
--- a/src/librustc/infer/error_reporting/note.rs
+++ b/src/librustc/infer/error_reporting/note.rs
@@ -4,7 +4,7 @@
use crate::ty::error::TypeError;
use errors::DiagnosticBuilder;
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub(super) fn note_region_origin(&self,
err: &mut DiagnosticBuilder<'_>,
origin: &SubregionOrigin<'tcx>) {
diff --git a/src/librustc/infer/freshen.rs b/src/librustc/infer/freshen.rs
index 8f52ef7..645f2b0 100644
--- a/src/librustc/infer/freshen.rs
+++ b/src/librustc/infer/freshen.rs
@@ -41,17 +41,16 @@
use super::InferCtxt;
use super::unify_key::ToType;
-pub struct TypeFreshener<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+pub struct TypeFreshener<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
ty_freshen_count: u32,
const_freshen_count: u32,
ty_freshen_map: FxHashMap<ty::InferTy, Ty<'tcx>>,
const_freshen_map: FxHashMap<ty::InferConst<'tcx>, &'tcx ty::Const<'tcx>>,
}
-impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> {
- pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>)
- -> TypeFreshener<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeFreshener<'a, 'tcx> {
+ pub fn new(infcx: &'a InferCtxt<'a, 'tcx>) -> TypeFreshener<'a, 'tcx> {
TypeFreshener {
infcx,
ty_freshen_count: 0,
@@ -113,8 +112,8 @@
}
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for TypeFreshener<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeFolder<'tcx> for TypeFreshener<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
diff --git a/src/librustc/infer/fudge.rs b/src/librustc/infer/fudge.rs
index 7461d8b..5936486 100644
--- a/src/librustc/infer/fudge.rs
+++ b/src/librustc/infer/fudge.rs
@@ -22,7 +22,7 @@
}).collect())
}
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
/// This rather funky routine is used while processing expected
/// types. What happens here is that we want to propagate a
/// coercion through the return type of a fn to its
@@ -133,8 +133,8 @@
}
}
-pub struct InferenceFudger<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+pub struct InferenceFudger<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
type_vars: (Range<TyVid>, Vec<TypeVariableOrigin>),
int_vars: Range<IntVid>,
float_vars: Range<FloatVid>,
@@ -142,8 +142,8 @@
const_vars: (Range<ConstVid<'tcx>>, Vec<ConstVariableOrigin>),
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for InferenceFudger<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeFolder<'tcx> for InferenceFudger<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
diff --git a/src/librustc/infer/glb.rs b/src/librustc/infer/glb.rs
index 63a8f17..7f184d3 100644
--- a/src/librustc/infer/glb.rs
+++ b/src/librustc/infer/glb.rs
@@ -8,25 +8,24 @@
use crate::ty::relate::{Relate, RelateResult, TypeRelation};
/// "Greatest lower bound" (common subtype)
-pub struct Glb<'combine, 'infcx: 'combine, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> {
- fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>,
+pub struct Glb<'combine, 'infcx: 'combine, 'tcx: 'infcx> {
+ fields: &'combine mut CombineFields<'infcx, 'tcx>,
a_is_expected: bool,
}
-impl<'combine, 'infcx, 'gcx, 'tcx> Glb<'combine, 'infcx, 'gcx, 'tcx> {
- pub fn new(fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool)
- -> Glb<'combine, 'infcx, 'gcx, 'tcx>
- {
+impl<'combine, 'infcx, 'tcx> Glb<'combine, 'infcx, 'tcx> {
+ pub fn new(
+ fields: &'combine mut CombineFields<'infcx, 'tcx>,
+ a_is_expected: bool,
+ ) -> Glb<'combine, 'infcx, 'tcx> {
Glb { fields: fields, a_is_expected: a_is_expected }
}
}
-impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx>
- for Glb<'combine, 'infcx, 'gcx, 'tcx>
-{
+impl TypeRelation<'tcx> for Glb<'combine, 'infcx, 'tcx> {
fn tag(&self) -> &'static str { "Glb" }
- fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { self.fields.tcx() }
+ fn tcx(&self) -> TyCtxt<'tcx> { self.fields.tcx() }
fn a_is_expected(&self) -> bool { self.a_is_expected }
@@ -87,10 +86,8 @@
}
}
-impl<'combine, 'infcx, 'gcx, 'tcx> LatticeDir<'infcx, 'gcx, 'tcx>
- for Glb<'combine, 'infcx, 'gcx, 'tcx>
-{
- fn infcx(&self) -> &'infcx InferCtxt<'infcx, 'gcx, 'tcx> {
+impl<'combine, 'infcx, 'tcx> LatticeDir<'infcx, 'tcx> for Glb<'combine, 'infcx, 'tcx> {
+ fn infcx(&self) -> &'infcx InferCtxt<'infcx, 'tcx> {
self.fields.infcx
}
diff --git a/src/librustc/infer/higher_ranked/mod.rs b/src/librustc/infer/higher_ranked/mod.rs
index fcec820..542ac49 100644
--- a/src/librustc/infer/higher_ranked/mod.rs
+++ b/src/librustc/infer/higher_ranked/mod.rs
@@ -9,7 +9,7 @@
use crate::ty::{self, Binder, TypeFoldable};
use crate::mir::interpret::ConstValue;
-impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> CombineFields<'a, 'tcx> {
pub fn higher_ranked_sub<T>(
&mut self,
a: &Binder<T>,
@@ -60,7 +60,7 @@
}
}
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
/// Replaces all regions (resp. types) bound by `binder` with placeholder
/// regions (resp. types) and return a map indicating which bound-region
/// placeholder region. This is the first step of checking subtyping
diff --git a/src/librustc/infer/lattice.rs b/src/librustc/infer/lattice.rs
index 0053b4c..c776663 100644
--- a/src/librustc/infer/lattice.rs
+++ b/src/librustc/infer/lattice.rs
@@ -27,8 +27,8 @@
use crate::ty::{self, Ty};
use crate::ty::relate::{RelateResult, TypeRelation};
-pub trait LatticeDir<'f, 'gcx: 'f+'tcx, 'tcx: 'f> : TypeRelation<'f, 'gcx, 'tcx> {
- fn infcx(&self) -> &'f InferCtxt<'f, 'gcx, 'tcx>;
+pub trait LatticeDir<'f, 'tcx: 'f>: TypeRelation<'tcx> {
+ fn infcx(&self) -> &'f InferCtxt<'f, 'tcx>;
fn cause(&self) -> &ObligationCause<'tcx>;
@@ -41,11 +41,14 @@
fn relate_bound(&mut self, v: Ty<'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, ()>;
}
-pub fn super_lattice_tys<'a, 'gcx, 'tcx, L>(this: &mut L,
- a: Ty<'tcx>,
- b: Ty<'tcx>)
- -> RelateResult<'tcx, Ty<'tcx>>
- where L: LatticeDir<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
+pub fn super_lattice_tys<'a, 'tcx, L>(
+ this: &mut L,
+ a: Ty<'tcx>,
+ b: Ty<'tcx>,
+) -> RelateResult<'tcx, Ty<'tcx>>
+where
+ L: LatticeDir<'a, 'tcx>,
+ 'tcx: 'a,
{
debug!("{}.lattice_tys({:?}, {:?})",
this.tag(),
diff --git a/src/librustc/infer/lexical_region_resolve/graphviz.rs b/src/librustc/infer/lexical_region_resolve/graphviz.rs
index 1878afd..aa4bbca 100644
--- a/src/librustc/infer/lexical_region_resolve/graphviz.rs
+++ b/src/librustc/infer/lexical_region_resolve/graphviz.rs
@@ -44,10 +44,10 @@
");
}
-pub fn maybe_print_constraints_for<'a, 'gcx, 'tcx>(
+pub fn maybe_print_constraints_for<'a, 'tcx>(
region_data: &RegionConstraintData<'tcx>,
- region_rels: &RegionRelations<'a, 'gcx, 'tcx>)
-{
+ region_rels: &RegionRelations<'a, 'tcx>,
+) {
let tcx = region_rels.tcx;
let context = region_rels.context;
@@ -107,9 +107,9 @@
}
}
-struct ConstraintGraph<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+struct ConstraintGraph<'a, 'tcx: 'a> {
graph_name: String,
- region_rels: &'a RegionRelations<'a, 'gcx, 'tcx>,
+ region_rels: &'a RegionRelations<'a, 'tcx>,
map: &'a BTreeMap<Constraint<'tcx>, SubregionOrigin<'tcx>>,
node_ids: FxHashMap<Node, usize>,
}
@@ -126,11 +126,12 @@
EnclScope(region::Scope, region::Scope),
}
-impl<'a, 'gcx, 'tcx> ConstraintGraph<'a, 'gcx, 'tcx> {
- fn new(name: String,
- region_rels: &'a RegionRelations<'a, 'gcx, 'tcx>,
- map: &'a ConstraintMap<'tcx>)
- -> ConstraintGraph<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> ConstraintGraph<'a, 'tcx> {
+ fn new(
+ name: String,
+ region_rels: &'a RegionRelations<'a, 'tcx>,
+ map: &'a ConstraintMap<'tcx>,
+ ) -> ConstraintGraph<'a, 'tcx> {
let mut i = 0;
let mut node_ids = FxHashMap::default();
{
@@ -161,7 +162,7 @@
}
}
-impl<'a, 'gcx, 'tcx> dot::Labeller<'a> for ConstraintGraph<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> dot::Labeller<'a> for ConstraintGraph<'a, 'tcx> {
type Node = Node;
type Edge = Edge<'tcx>;
fn graph_id(&self) -> dot::Id<'_> {
@@ -215,7 +216,7 @@
}
}
-impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'tcx> {
type Node = Node;
type Edge = Edge<'tcx>;
fn nodes(&self) -> dot::Nodes<'_, Node> {
@@ -246,10 +247,11 @@
pub type ConstraintMap<'tcx> = BTreeMap<Constraint<'tcx>, SubregionOrigin<'tcx>>;
-fn dump_region_data_to<'a, 'gcx, 'tcx>(region_rels: &RegionRelations<'a, 'gcx, 'tcx>,
- map: &ConstraintMap<'tcx>,
- path: &str)
- -> io::Result<()> {
+fn dump_region_data_to<'a, 'tcx>(
+ region_rels: &RegionRelations<'a, 'tcx>,
+ map: &ConstraintMap<'tcx>,
+ path: &str,
+) -> io::Result<()> {
debug!("dump_region_data map (len: {}) path: {}",
map.len(),
path);
diff --git a/src/librustc/infer/lexical_region_resolve/mod.rs b/src/librustc/infer/lexical_region_resolve/mod.rs
index bde2ed5..16f5a9d 100644
--- a/src/librustc/infer/lexical_region_resolve/mod.rs
+++ b/src/librustc/infer/lexical_region_resolve/mod.rs
@@ -30,13 +30,10 @@
/// assuming such values can be found. It returns the final values of
/// all the variables as well as a set of errors that must be reported.
pub fn resolve<'tcx>(
- region_rels: &RegionRelations<'_, '_, 'tcx>,
+ region_rels: &RegionRelations<'_, 'tcx>,
var_infos: VarInfos,
data: RegionConstraintData<'tcx>,
-) -> (
- LexicalRegionResolutions<'tcx>,
- Vec<RegionResolutionError<'tcx>>,
-) {
+) -> (LexicalRegionResolutions<'tcx>, Vec<RegionResolutionError<'tcx>>) {
debug!("RegionConstraintData: resolve_regions()");
let mut errors = vec![];
let mut resolver = LexicalResolver {
@@ -96,14 +93,14 @@
type RegionGraph<'tcx> = Graph<(), Constraint<'tcx>>;
-struct LexicalResolver<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- region_rels: &'cx RegionRelations<'cx, 'gcx, 'tcx>,
+struct LexicalResolver<'cx, 'tcx: 'cx> {
+ region_rels: &'cx RegionRelations<'cx, 'tcx>,
var_infos: VarInfos,
data: RegionConstraintData<'tcx>,
}
-impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> {
- fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.region_rels.tcx
}
@@ -136,14 +133,14 @@
/// Initially, the value for all variables is set to `'empty`, the
/// empty region. The `expansion` phase will grow this larger.
- fn construct_var_data(&self, tcx: TyCtxt<'_, '_, 'tcx>) -> LexicalRegionResolutions<'tcx> {
+ fn construct_var_data(&self, tcx: TyCtxt<'tcx>) -> LexicalRegionResolutions<'tcx> {
LexicalRegionResolutions {
error_region: tcx.lifetimes.re_static,
values: IndexVec::from_elem_n(VarValue::Value(tcx.lifetimes.re_empty), self.num_vars())
}
}
- fn dump_constraints(&self, free_regions: &RegionRelations<'_, '_, 'tcx>) {
+ fn dump_constraints(&self, free_regions: &RegionRelations<'_, 'tcx>) {
debug!(
"----() Start constraint listing (context={:?}) ()----",
free_regions.context
@@ -785,7 +782,7 @@
}
impl<'tcx> LexicalRegionResolutions<'tcx> {
- fn normalize<T>(&self, tcx: TyCtxt<'_, '_, 'tcx>, value: T) -> T
+ fn normalize<T>(&self, tcx: TyCtxt<'tcx>, value: T) -> T
where
T: TypeFoldable<'tcx>,
{
diff --git a/src/librustc/infer/lub.rs b/src/librustc/infer/lub.rs
index 29b319e..2a9f585 100644
--- a/src/librustc/infer/lub.rs
+++ b/src/librustc/infer/lub.rs
@@ -8,25 +8,24 @@
use crate::ty::relate::{Relate, RelateResult, TypeRelation};
/// "Least upper bound" (common supertype)
-pub struct Lub<'combine, 'infcx: 'combine, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> {
- fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>,
+pub struct Lub<'combine, 'infcx: 'combine, 'tcx: 'infcx> {
+ fields: &'combine mut CombineFields<'infcx, 'tcx>,
a_is_expected: bool,
}
-impl<'combine, 'infcx, 'gcx, 'tcx> Lub<'combine, 'infcx, 'gcx, 'tcx> {
- pub fn new(fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool)
- -> Lub<'combine, 'infcx, 'gcx, 'tcx>
- {
+impl<'combine, 'infcx, 'tcx> Lub<'combine, 'infcx, 'tcx> {
+ pub fn new(
+ fields: &'combine mut CombineFields<'infcx, 'tcx>,
+ a_is_expected: bool,
+ ) -> Lub<'combine, 'infcx, 'tcx> {
Lub { fields: fields, a_is_expected: a_is_expected }
}
}
-impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx>
- for Lub<'combine, 'infcx, 'gcx, 'tcx>
-{
+impl TypeRelation<'tcx> for Lub<'combine, 'infcx, 'tcx> {
fn tag(&self) -> &'static str { "Lub" }
- fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { self.fields.tcx() }
+ fn tcx(&self) -> TyCtxt<'tcx> { self.fields.tcx() }
fn a_is_expected(&self) -> bool { self.a_is_expected }
@@ -87,10 +86,8 @@
}
}
-impl<'combine, 'infcx, 'gcx, 'tcx> LatticeDir<'infcx, 'gcx, 'tcx>
- for Lub<'combine, 'infcx, 'gcx, 'tcx>
-{
- fn infcx(&self) -> &'infcx InferCtxt<'infcx, 'gcx, 'tcx> {
+impl<'combine, 'infcx, 'tcx> LatticeDir<'infcx, 'tcx> for Lub<'combine, 'infcx, 'tcx> {
+ fn infcx(&self) -> &'infcx InferCtxt<'infcx, 'tcx> {
self.fields.infcx
}
diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs
index 57ab2c9..47a276b 100644
--- a/src/librustc/infer/mod.rs
+++ b/src/librustc/infer/mod.rs
@@ -91,7 +91,7 @@
/// Indicates that the MIR borrowck will repeat these region
/// checks, so we should ignore errors if NLL is (unconditionally)
/// enabled.
- pub fn when_nll_is_enabled(tcx: TyCtxt<'_, '_, '_>) -> Self {
+ pub fn when_nll_is_enabled(tcx: TyCtxt<'_>) -> Self {
match tcx.borrowck_mode() {
// If we're on Migrate mode, report AST region errors
BorrowckMode::Migrate => SuppressRegionErrors { suppressed: false },
@@ -102,8 +102,8 @@
}
}
-pub struct InferCtxt<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct InferCtxt<'a, 'tcx> {
+ pub tcx: TyCtxt<'tcx>,
/// During type-checking/inference of a body, `in_progress_tables`
/// contains a reference to the tables being built up, which are
@@ -464,14 +464,14 @@
/// Helper type of a temporary returned by `tcx.infer_ctxt()`.
/// Necessary because we can't write the following bound:
-/// `F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>)`.
-pub struct InferCtxtBuilder<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- global_tcx: TyCtxt<'a, 'gcx, 'gcx>,
+/// `F: for<'b, 'tcx> where 'tcx FnOnce(InferCtxt<'b, 'tcx>)`.
+pub struct InferCtxtBuilder<'tcx> {
+ global_tcx: TyCtxt<'tcx>,
fresh_tables: Option<RefCell<ty::TypeckTables<'tcx>>>,
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'gcx> {
- pub fn infer_ctxt(self) -> InferCtxtBuilder<'a, 'gcx, 'tcx> {
+impl TyCtxt<'tcx> {
+ pub fn infer_ctxt(self) -> InferCtxtBuilder<'tcx> {
InferCtxtBuilder {
global_tcx: self,
fresh_tables: None,
@@ -479,7 +479,7 @@
}
}
-impl<'a, 'gcx, 'tcx> InferCtxtBuilder<'a, 'gcx, 'tcx> {
+impl<'tcx> InferCtxtBuilder<'tcx> {
/// Used only by `rustc_typeck` during body type-checking/inference,
/// will initialize `in_progress_tables` with fresh `TypeckTables`.
pub fn with_fresh_in_progress_tables(mut self, table_owner: DefId) -> Self {
@@ -495,10 +495,10 @@
/// the bound values in `C` to their instantiated values in `V`
/// (in other words, `S(C) = V`).
pub fn enter_with_canonical<T, R>(
- &'tcx mut self,
+ &mut self,
span: Span,
canonical: &Canonical<'tcx, T>,
- f: impl for<'b> FnOnce(InferCtxt<'b, 'gcx, 'tcx>, T, CanonicalVarValues<'tcx>) -> R,
+ f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>, T, CanonicalVarValues<'tcx>) -> R,
) -> R
where
T: TypeFoldable<'tcx>,
@@ -510,7 +510,7 @@
})
}
- pub fn enter<R>(&'tcx mut self, f: impl for<'b> FnOnce(InferCtxt<'b, 'gcx, 'tcx>) -> R) -> R {
+ pub fn enter<R>(&mut self, f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>) -> R) -> R {
let InferCtxtBuilder {
global_tcx,
ref fresh_tables,
@@ -567,7 +567,7 @@
/// Extracts `value`, registering any obligations into `fulfill_cx`.
pub fn into_value_registering_obligations(
self,
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
fulfill_cx: &mut dyn TraitEngine<'tcx>,
) -> T {
let InferOk { value, obligations } = self;
@@ -598,7 +598,7 @@
_in_progress_tables: Option<Ref<'a, ty::TypeckTables<'tcx>>>,
}
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn is_in_snapshot(&self) -> bool {
self.in_snapshot.get()
}
@@ -614,7 +614,7 @@
}
}
- pub fn freshener<'b>(&'b self) -> TypeFreshener<'b, 'gcx, 'tcx> {
+ pub fn freshener<'b>(&'b self) -> TypeFreshener<'b, 'tcx> {
freshen::TypeFreshener::new(self)
}
@@ -677,7 +677,7 @@
&'a self,
trace: TypeTrace<'tcx>,
param_env: ty::ParamEnv<'tcx>,
- ) -> CombineFields<'a, 'gcx, 'tcx> {
+ ) -> CombineFields<'a, 'tcx> {
CombineFields {
infcx: self,
trace,
@@ -1548,13 +1548,13 @@
}
}
-pub struct ShallowResolver<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+pub struct ShallowResolver<'a, 'tcx> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
}
-impl<'a, 'gcx, 'tcx> ShallowResolver<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> ShallowResolver<'a, 'tcx> {
#[inline(always)]
- pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self {
+ pub fn new(infcx: &'a InferCtxt<'a, 'tcx>) -> Self {
ShallowResolver { infcx }
}
@@ -1599,8 +1599,8 @@
}
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for ShallowResolver<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeFolder<'tcx> for ShallowResolver<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
@@ -1624,7 +1624,7 @@
}
}
-impl<'a, 'gcx, 'tcx> TypeTrace<'tcx> {
+impl<'tcx> TypeTrace<'tcx> {
pub fn span(&self) -> Span {
self.cause.span
}
@@ -1641,7 +1641,7 @@
}
}
- pub fn dummy(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> TypeTrace<'tcx> {
+ pub fn dummy(tcx: TyCtxt<'tcx>) -> TypeTrace<'tcx> {
TypeTrace {
cause: ObligationCause::dummy(),
values: Types(ExpectedFound {
diff --git a/src/librustc/infer/nll_relate/mod.rs b/src/librustc/infer/nll_relate/mod.rs
index c00fa9b..2c821d0 100644
--- a/src/librustc/infer/nll_relate/mod.rs
+++ b/src/librustc/infer/nll_relate/mod.rs
@@ -38,11 +38,11 @@
Eager,
}
-pub struct TypeRelating<'me, 'gcx: 'tcx, 'tcx: 'me, D>
+pub struct TypeRelating<'me, 'tcx: 'me, D>
where
D: TypeRelatingDelegate<'tcx>,
{
- infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,
+ infcx: &'me InferCtxt<'me, 'tcx>,
/// Callback to use when we deduce an outlives relationship
delegate: D,
@@ -135,12 +135,12 @@
#[derive(Copy, Clone)]
struct UniversallyQuantified(bool);
-impl<'me, 'gcx, 'tcx, D> TypeRelating<'me, 'gcx, 'tcx, D>
+impl<'me, 'tcx, D> TypeRelating<'me, 'tcx, D>
where
D: TypeRelatingDelegate<'tcx>,
{
pub fn new(
- infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,
+ infcx: &'me InferCtxt<'me, 'tcx>,
delegate: D,
ambient_variance: ty::Variance,
) -> Self {
@@ -416,7 +416,7 @@
/// for more details on why we want them.
fn vid_scopes<D: TypeRelatingDelegate<'tcx>>(
&self,
- relate: &'r mut TypeRelating<'_, '_, 'tcx, D>,
+ relate: &'r mut TypeRelating<'_, 'tcx, D>,
) -> &'r mut Vec<BoundRegionScope<'tcx>>;
/// Given a generalized type G that should replace the vid, relate
@@ -424,7 +424,7 @@
/// appeared.
fn relate_generalized_ty<D>(
&self,
- relate: &mut TypeRelating<'_, '_, 'tcx, D>,
+ relate: &mut TypeRelating<'_, 'tcx, D>,
generalized_ty: Ty<'tcx>,
) -> RelateResult<'tcx, Ty<'tcx>>
where
@@ -442,7 +442,7 @@
fn vid_scopes<D>(
&self,
- relate: &'r mut TypeRelating<'_, '_, 'tcx, D>,
+ relate: &'r mut TypeRelating<'_, 'tcx, D>,
) -> &'r mut Vec<BoundRegionScope<'tcx>>
where
D: TypeRelatingDelegate<'tcx>,
@@ -452,7 +452,7 @@
fn relate_generalized_ty<D>(
&self,
- relate: &mut TypeRelating<'_, '_, 'tcx, D>,
+ relate: &mut TypeRelating<'_, 'tcx, D>,
generalized_ty: Ty<'tcx>,
) -> RelateResult<'tcx, Ty<'tcx>>
where
@@ -474,7 +474,7 @@
fn vid_scopes<D>(
&self,
- relate: &'r mut TypeRelating<'_, '_, 'tcx, D>,
+ relate: &'r mut TypeRelating<'_, 'tcx, D>,
) -> &'r mut Vec<BoundRegionScope<'tcx>>
where
D: TypeRelatingDelegate<'tcx>,
@@ -484,7 +484,7 @@
fn relate_generalized_ty<D>(
&self,
- relate: &mut TypeRelating<'_, '_, 'tcx, D>,
+ relate: &mut TypeRelating<'_, 'tcx, D>,
generalized_ty: Ty<'tcx>,
) -> RelateResult<'tcx, Ty<'tcx>>
where
@@ -494,11 +494,11 @@
}
}
-impl<D> TypeRelation<'me, 'gcx, 'tcx> for TypeRelating<'me, 'gcx, 'tcx, D>
+impl<D> TypeRelation<'tcx> for TypeRelating<'me, 'tcx, D>
where
D: TypeRelatingDelegate<'tcx>,
{
- fn tcx(&self) -> TyCtxt<'me, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
@@ -798,11 +798,11 @@
/// scopes.
///
/// [blog post]: https://is.gd/0hKvIr
-struct TypeGeneralizer<'me, 'gcx: 'tcx, 'tcx: 'me, D>
+struct TypeGeneralizer<'me, 'tcx: 'me, D>
where
D: TypeRelatingDelegate<'tcx> + 'me,
{
- infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,
+ infcx: &'me InferCtxt<'me, 'tcx>,
delegate: &'me mut D,
@@ -823,11 +823,11 @@
universe: ty::UniverseIndex,
}
-impl<D> TypeRelation<'me, 'gcx, 'tcx> for TypeGeneralizer<'me, 'gcx, 'tcx, D>
+impl<D> TypeRelation<'tcx> for TypeGeneralizer<'me, 'tcx, D>
where
D: TypeRelatingDelegate<'tcx>,
{
- fn tcx(&self) -> TyCtxt<'me, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
diff --git a/src/librustc/infer/opaque_types/mod.rs b/src/librustc/infer/opaque_types/mod.rs
index aa415db..328ace5 100644
--- a/src/librustc/infer/opaque_types/mod.rs
+++ b/src/librustc/infer/opaque_types/mod.rs
@@ -73,7 +73,7 @@
pub origin: hir::ExistTyOrigin,
}
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
/// Replaces all opaque types in `value` with fresh inference variables
/// and creates appropriate obligations. For example, given the input:
///
@@ -307,7 +307,7 @@
let required_region_bounds = tcx.required_region_bounds(
opaque_type,
- bounds.predicates.clone(),
+ bounds.predicates,
);
debug_assert!(!required_region_bounds.is_empty());
@@ -430,8 +430,8 @@
&self,
def_id: DefId,
opaque_defn: &OpaqueTypeDecl<'tcx>,
- instantiated_ty: Ty<'gcx>,
- ) -> Ty<'gcx> {
+ instantiated_ty: Ty<'tcx>,
+ ) -> Ty<'tcx> {
debug!(
"infer_opaque_definition_from_instantiation(def_id={:?}, instantiated_ty={:?})",
def_id, instantiated_ty
@@ -446,7 +446,7 @@
// `impl Trait` return type, resulting in the parameters
// shifting.
let id_substs = InternalSubsts::identity_for_item(gcx, def_id);
- let map: FxHashMap<Kind<'tcx>, Kind<'gcx>> = opaque_defn
+ let map: FxHashMap<Kind<'tcx>, Kind<'tcx>> = opaque_defn
.substs
.iter()
.enumerate()
@@ -470,7 +470,7 @@
);
// We can unwrap here because our reverse mapper always
- // produces things with 'gcx lifetime, though the type folder
+ // produces things with 'tcx lifetime, though the type folder
// obscures that.
let definition_ty = gcx.lift(&definition_ty).unwrap();
@@ -491,14 +491,13 @@
//
// We ignore any type parameters because impl trait values are assumed to
// capture all the in-scope type parameters.
-struct OpaqueTypeOutlivesVisitor<'a, 'gcx, 'tcx> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+struct OpaqueTypeOutlivesVisitor<'a, 'tcx> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
least_region: ty::Region<'tcx>,
span: Span,
}
-impl<'tcx> TypeVisitor<'tcx> for OpaqueTypeOutlivesVisitor<'_, '_, 'tcx>
-{
+impl<'tcx> TypeVisitor<'tcx> for OpaqueTypeOutlivesVisitor<'_, 'tcx> {
fn visit_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> bool {
t.skip_binder().visit_with(self);
false // keep visiting
@@ -552,27 +551,27 @@
}
}
-struct ReverseMapper<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+struct ReverseMapper<'tcx> {
+ tcx: TyCtxt<'tcx>,
/// If errors have already been reported in this fn, we suppress
/// our own errors because they are sometimes derivative.
tainted_by_errors: bool,
opaque_type_def_id: DefId,
- map: FxHashMap<Kind<'tcx>, Kind<'gcx>>,
+ map: FxHashMap<Kind<'tcx>, Kind<'tcx>>,
map_missing_regions_to_empty: bool,
/// initially `Some`, set to `None` once error has been reported
hidden_ty: Option<Ty<'tcx>>,
}
-impl<'cx, 'gcx, 'tcx> ReverseMapper<'cx, 'gcx, 'tcx> {
+impl ReverseMapper<'tcx> {
fn new(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
tainted_by_errors: bool,
opaque_type_def_id: DefId,
- map: FxHashMap<Kind<'tcx>, Kind<'gcx>>,
+ map: FxHashMap<Kind<'tcx>, Kind<'tcx>>,
hidden_ty: Ty<'tcx>,
) -> Self {
Self {
@@ -599,8 +598,8 @@
}
}
-impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for ReverseMapper<'cx, 'gcx, 'tcx> {
- fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx> {
+impl TypeFolder<'tcx> for ReverseMapper<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -724,8 +723,8 @@
}
}
-struct Instantiator<'a, 'gcx: 'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+struct Instantiator<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
parent_def_id: DefId,
body_id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
@@ -733,7 +732,7 @@
obligations: Vec<PredicateObligation<'tcx>>,
}
-impl<'a, 'gcx, 'tcx> Instantiator<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Instantiator<'a, 'tcx> {
fn instantiate_opaque_types_in_map<T: TypeFoldable<'tcx>>(&mut self, value: &T) -> T {
debug!("instantiate_opaque_types_in_map(value={:?})", value);
let tcx = self.infcx.tcx;
@@ -820,7 +819,7 @@
},
_ => bug!(
"expected (impl) item, found {}",
- tcx.hir().hir_to_string(opaque_hir_id),
+ tcx.hir().node_to_string(opaque_hir_id),
),
};
if in_definition_scope {
@@ -944,7 +943,7 @@
/// and `opaque_hir_id` is the `HirId` of the definition of the existential type `Baz`.
/// For the above example, this function returns `true` for `f1` and `false` for `f2`.
pub fn may_define_existential_type(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
def_id: DefId,
opaque_hir_id: hir::HirId,
) -> bool {
diff --git a/src/librustc/infer/outlives/env.rs b/src/librustc/infer/outlives/env.rs
index 3e626999..4b5df44 100644
--- a/src/librustc/infer/outlives/env.rs
+++ b/src/librustc/infer/outlives/env.rs
@@ -67,7 +67,7 @@
/// because of implied bounds.
pub type RegionBoundPairs<'tcx> = Vec<(ty::Region<'tcx>, GenericKind<'tcx>)>;
-impl<'a, 'gcx: 'tcx, 'tcx: 'a> OutlivesEnvironment<'tcx> {
+impl<'a, 'tcx: 'a> OutlivesEnvironment<'tcx> {
pub fn new(param_env: ty::ParamEnv<'tcx>) -> Self {
let mut env = OutlivesEnvironment {
param_env,
@@ -160,7 +160,7 @@
/// Tests: `src/test/compile-fail/regions-free-region-ordering-*.rs`
pub fn add_implied_bounds(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'a, 'tcx>,
fn_sig_tys: &[Ty<'tcx>],
body_id: hir::HirId,
span: Span,
@@ -190,11 +190,8 @@
/// contain inference variables, it must be supplied, in which
/// case we will register "givens" on the inference context. (See
/// `RegionConstraintData`.)
- fn add_outlives_bounds<I>(
- &mut self,
- infcx: Option<&InferCtxt<'a, 'gcx, 'tcx>>,
- outlives_bounds: I,
- ) where
+ fn add_outlives_bounds<I>(&mut self, infcx: Option<&InferCtxt<'a, 'tcx>>, outlives_bounds: I)
+ where
I: IntoIterator<Item = OutlivesBound<'tcx>>,
{
// Record relationships such as `T:'x` that don't go into the
diff --git a/src/librustc/infer/outlives/free_region_map.rs b/src/librustc/infer/outlives/free_region_map.rs
index 5349e99..1250995 100644
--- a/src/librustc/infer/outlives/free_region_map.rs
+++ b/src/librustc/infer/outlives/free_region_map.rs
@@ -28,11 +28,12 @@
/// cases, this is more conservative than necessary, in order to
/// avoid making arbitrary choices. See
/// `TransitiveRelation::postdom_upper_bound` for more details.
- pub fn lub_free_regions<'a, 'gcx>(&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- r_a: Region<'tcx>,
- r_b: Region<'tcx>)
- -> Region<'tcx> {
+ pub fn lub_free_regions(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ r_a: Region<'tcx>,
+ r_b: Region<'tcx>,
+ ) -> Region<'tcx> {
debug!("lub_free_regions(r_a={:?}, r_b={:?})", r_a, r_b);
assert!(is_free(r_a));
assert!(is_free(r_b));
@@ -90,7 +91,7 @@
impl<'a, 'tcx> Lift<'tcx> for FreeRegionMap<'a> {
type Lifted = FreeRegionMap<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<FreeRegionMap<'tcx>> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<FreeRegionMap<'tcx>> {
self.relation.maybe_map(|&fr| tcx.lift(&fr))
.map(|relation| FreeRegionMap { relation })
}
diff --git a/src/librustc/infer/outlives/obligations.rs b/src/librustc/infer/outlives/obligations.rs
index 90b3be2..671718b 100644
--- a/src/librustc/infer/outlives/obligations.rs
+++ b/src/librustc/infer/outlives/obligations.rs
@@ -69,7 +69,7 @@
use crate::ty::{self, Region, Ty, TyCtxt, TypeFoldable};
use crate::ty::subst::UnpackedKind;
-impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
/// Registers that the given region obligation must be resolved
/// from within the scope of `body_id`. These regions are enqueued
/// and later processed by regionck, when full type information is
@@ -226,15 +226,15 @@
/// via a "delegate" of type `D` -- this is usually the `infcx`, which
/// accrues them into the `region_obligations` code, but for NLL we
/// use something else.
-pub struct TypeOutlives<'cx, 'gcx: 'tcx, 'tcx: 'cx, D>
+pub struct TypeOutlives<'cx, 'tcx: 'cx, D>
where
D: TypeOutlivesDelegate<'tcx>,
{
// See the comments on `process_registered_region_obligations` for the meaning
// of these fields.
delegate: D,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- verify_bound: VerifyBoundCx<'cx, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
+ verify_bound: VerifyBoundCx<'cx, 'tcx>,
}
pub trait TypeOutlivesDelegate<'tcx> {
@@ -254,13 +254,13 @@
);
}
-impl<'cx, 'gcx, 'tcx, D> TypeOutlives<'cx, 'gcx, 'tcx, D>
+impl<'cx, 'tcx, D> TypeOutlives<'cx, 'tcx, D>
where
D: TypeOutlivesDelegate<'tcx>,
{
pub fn new(
delegate: D,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
region_bound_pairs: &'cx RegionBoundPairs<'tcx>,
implicit_region_bound: Option<ty::Region<'tcx>>,
param_env: ty::ParamEnv<'tcx>,
@@ -487,7 +487,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> TypeOutlivesDelegate<'tcx> for &'cx InferCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> TypeOutlivesDelegate<'tcx> for &'cx InferCtxt<'cx, 'tcx> {
fn push_sub_region_constraint(
&mut self,
origin: SubregionOrigin<'tcx>,
diff --git a/src/librustc/infer/outlives/verify.rs b/src/librustc/infer/outlives/verify.rs
index e1ad5ae..96335e1 100644
--- a/src/librustc/infer/outlives/verify.rs
+++ b/src/librustc/infer/outlives/verify.rs
@@ -12,16 +12,16 @@
/// via a "delegate" of type `D` -- this is usually the `infcx`, which
/// accrues them into the `region_obligations` code, but for NLL we
/// use something else.
-pub struct VerifyBoundCx<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+pub struct VerifyBoundCx<'cx, 'tcx: 'cx> {
+ tcx: TyCtxt<'tcx>,
region_bound_pairs: &'cx RegionBoundPairs<'tcx>,
implicit_region_bound: Option<ty::Region<'tcx>>,
param_env: ty::ParamEnv<'tcx>,
}
-impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> {
pub fn new(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
region_bound_pairs: &'cx RegionBoundPairs<'tcx>,
implicit_region_bound: Option<ty::Region<'tcx>>,
param_env: ty::ParamEnv<'tcx>,
@@ -102,7 +102,7 @@
pub fn projection_declared_bounds_from_trait(
&self,
projection_ty: ty::ProjectionTy<'tcx>,
- ) -> impl Iterator<Item = ty::Region<'tcx>> + 'cx + Captures<'gcx> {
+ ) -> impl Iterator<Item = ty::Region<'tcx>> + 'cx + Captures<'tcx> {
self.declared_projection_bounds_from_trait(projection_ty)
}
@@ -244,7 +244,7 @@
fn declared_projection_bounds_from_trait(
&self,
projection_ty: ty::ProjectionTy<'tcx>,
- ) -> impl Iterator<Item = ty::Region<'tcx>> + 'cx + Captures<'gcx> {
+ ) -> impl Iterator<Item = ty::Region<'tcx>> + 'cx + Captures<'tcx> {
debug!("projection_bounds(projection_ty={:?})", projection_ty);
let tcx = self.tcx;
self.region_bounds_declared_on_associated_item(projection_ty.item_def_id)
@@ -284,7 +284,7 @@
fn region_bounds_declared_on_associated_item(
&self,
assoc_item_def_id: DefId,
- ) -> impl Iterator<Item = ty::Region<'tcx>> + 'cx + Captures<'gcx> {
+ ) -> impl Iterator<Item = ty::Region<'tcx>> + 'cx + Captures<'tcx> {
let tcx = self.tcx;
let assoc_item = tcx.associated_item(assoc_item_def_id);
let trait_def_id = assoc_item.container.assert_trait();
diff --git a/src/librustc/infer/region_constraints/leak_check.rs b/src/librustc/infer/region_constraints/leak_check.rs
index 8085258..30f6137 100644
--- a/src/librustc/infer/region_constraints/leak_check.rs
+++ b/src/librustc/infer/region_constraints/leak_check.rs
@@ -22,7 +22,7 @@
/// refactor the constraint set.
pub fn leak_check(
&mut self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
overly_polymorphic: bool,
placeholder_map: &PlaceholderMap<'tcx>,
_snapshot: &CombinedSnapshot<'_, 'tcx>,
@@ -109,7 +109,7 @@
fn fixed_point(
&mut self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
undo_log: &[UndoLog<'tcx>],
verifys: &[Verify<'tcx>],
) {
diff --git a/src/librustc/infer/region_constraints/mod.rs b/src/librustc/infer/region_constraints/mod.rs
index b6fe576..f2235fe 100644
--- a/src/librustc/infer/region_constraints/mod.rs
+++ b/src/librustc/infer/region_constraints/mod.rs
@@ -700,7 +700,7 @@
pub fn lub_regions(
&mut self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
origin: SubregionOrigin<'tcx>,
a: Region<'tcx>,
b: Region<'tcx>,
@@ -722,7 +722,7 @@
pub fn glb_regions(
&mut self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
origin: SubregionOrigin<'tcx>,
a: Region<'tcx>,
b: Region<'tcx>,
@@ -744,7 +744,7 @@
pub fn opportunistic_resolve_var(
&mut self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
rid: RegionVid,
) -> ty::Region<'tcx> {
let vid = self.unification_table.probe_value(rid).min_vid;
@@ -760,7 +760,7 @@
fn combine_vars(
&mut self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
t: CombineMapType,
a: Region<'tcx>,
b: Region<'tcx>,
@@ -849,8 +849,8 @@
}
}
-impl<'a, 'gcx, 'tcx> GenericKind<'tcx> {
- pub fn to_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
+impl<'tcx> GenericKind<'tcx> {
+ pub fn to_ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match *self {
GenericKind::Param(ref p) => p.to_ty(tcx),
GenericKind::Projection(ref p) => tcx.mk_projection(p.item_def_id, p.substs),
diff --git a/src/librustc/infer/resolve.rs b/src/librustc/infer/resolve.rs
index f487e7c..810c641 100644
--- a/src/librustc/infer/resolve.rs
+++ b/src/librustc/infer/resolve.rs
@@ -12,19 +12,19 @@
/// been unified with (similar to `shallow_resolve`, but deep). This is
/// useful for printing messages etc but also required at various
/// points for correctness.
-pub struct OpportunisticVarResolver<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+pub struct OpportunisticVarResolver<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
}
-impl<'a, 'gcx, 'tcx> OpportunisticVarResolver<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> OpportunisticVarResolver<'a, 'tcx> {
#[inline]
- pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self {
+ pub fn new(infcx: &'a InferCtxt<'a, 'tcx>) -> Self {
OpportunisticVarResolver { infcx }
}
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for OpportunisticVarResolver<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeFolder<'tcx> for OpportunisticVarResolver<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
@@ -50,18 +50,18 @@
/// The opportunistic type and region resolver is similar to the
/// opportunistic type resolver, but also opportunistically resolves
/// regions. It is useful for canonicalization.
-pub struct OpportunisticTypeAndRegionResolver<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+pub struct OpportunisticTypeAndRegionResolver<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
}
-impl<'a, 'gcx, 'tcx> OpportunisticTypeAndRegionResolver<'a, 'gcx, 'tcx> {
- pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self {
+impl<'a, 'tcx> OpportunisticTypeAndRegionResolver<'a, 'tcx> {
+ pub fn new(infcx: &'a InferCtxt<'a, 'tcx>) -> Self {
OpportunisticTypeAndRegionResolver { infcx }
}
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for OpportunisticTypeAndRegionResolver<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeFolder<'tcx> for OpportunisticTypeAndRegionResolver<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
@@ -101,20 +101,20 @@
/// type variables that don't yet have a value. The first unresolved type is stored.
/// It does not construct the fully resolved type (which might
/// involve some hashing and so forth).
-pub struct UnresolvedTypeFinder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+pub struct UnresolvedTypeFinder<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
/// Used to find the type parameter name and location for error reporting.
- pub first_unresolved: Option<(Ty<'tcx>,Option<Span>)>,
+ pub first_unresolved: Option<(Ty<'tcx>, Option<Span>)>,
}
-impl<'a, 'gcx, 'tcx> UnresolvedTypeFinder<'a, 'gcx, 'tcx> {
- pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self {
+impl<'a, 'tcx> UnresolvedTypeFinder<'a, 'tcx> {
+ pub fn new(infcx: &'a InferCtxt<'a, 'tcx>) -> Self {
UnresolvedTypeFinder { infcx, first_unresolved: None }
}
}
-impl<'a, 'gcx, 'tcx> TypeVisitor<'tcx> for UnresolvedTypeFinder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeVisitor<'tcx> for UnresolvedTypeFinder<'a, 'tcx> {
fn visit_ty(&mut self, t: Ty<'tcx>) -> bool {
let t = self.infcx.shallow_resolve(t);
if t.has_infer_types() {
@@ -157,9 +157,9 @@
/// Full type resolution replaces all type and region variables with
/// their concrete results. If any variable cannot be replaced (never unified, etc)
/// then an `Err` result is returned.
-pub fn fully_resolve<'a, 'gcx, 'tcx, T>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- value: &T) -> FixupResult<'tcx, T>
- where T : TypeFoldable<'tcx>
+pub fn fully_resolve<'a, 'tcx, T>(infcx: &InferCtxt<'a, 'tcx>, value: &T) -> FixupResult<'tcx, T>
+where
+ T: TypeFoldable<'tcx>,
{
let mut full_resolver = FullTypeResolver { infcx: infcx, err: None };
let result = value.fold_with(&mut full_resolver);
@@ -171,13 +171,13 @@
// N.B. This type is not public because the protocol around checking the
// `err` field is not enforcable otherwise.
-struct FullTypeResolver<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+struct FullTypeResolver<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
err: Option<FixupError<'tcx>>,
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for FullTypeResolver<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeFolder<'tcx> for FullTypeResolver<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
diff --git a/src/librustc/infer/sub.rs b/src/librustc/infer/sub.rs
index f2f36d5..ed84e3f 100644
--- a/src/librustc/infer/sub.rs
+++ b/src/librustc/infer/sub.rs
@@ -11,15 +11,16 @@
use std::mem;
/// Ensures `a` is made a subtype of `b`. Returns `a` on success.
-pub struct Sub<'combine, 'infcx: 'combine, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> {
- fields: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>,
+pub struct Sub<'combine, 'infcx: 'combine, 'tcx: 'infcx> {
+ fields: &'combine mut CombineFields<'infcx, 'tcx>,
a_is_expected: bool,
}
-impl<'combine, 'infcx, 'gcx, 'tcx> Sub<'combine, 'infcx, 'gcx, 'tcx> {
- pub fn new(f: &'combine mut CombineFields<'infcx, 'gcx, 'tcx>, a_is_expected: bool)
- -> Sub<'combine, 'infcx, 'gcx, 'tcx>
- {
+impl<'combine, 'infcx, 'tcx> Sub<'combine, 'infcx, 'tcx> {
+ pub fn new(
+ f: &'combine mut CombineFields<'infcx, 'tcx>,
+ a_is_expected: bool,
+ ) -> Sub<'combine, 'infcx, 'tcx> {
Sub { fields: f, a_is_expected: a_is_expected }
}
@@ -31,11 +32,9 @@
}
}
-impl<'combine, 'infcx, 'gcx, 'tcx> TypeRelation<'infcx, 'gcx, 'tcx>
- for Sub<'combine, 'infcx, 'gcx, 'tcx>
-{
+impl TypeRelation<'tcx> for Sub<'combine, 'infcx, 'tcx> {
fn tag(&self) -> &'static str { "Sub" }
- fn tcx(&self) -> TyCtxt<'infcx, 'gcx, 'tcx> { self.fields.infcx.tcx }
+ fn tcx(&self) -> TyCtxt<'tcx> { self.fields.infcx.tcx }
fn a_is_expected(&self) -> bool { self.a_is_expected }
fn with_cause<F,R>(&mut self, cause: Cause, f: F) -> R
diff --git a/src/librustc/infer/unify_key.rs b/src/librustc/infer/unify_key.rs
index 287a666..846611d 100644
--- a/src/librustc/infer/unify_key.rs
+++ b/src/librustc/infer/unify_key.rs
@@ -10,7 +10,7 @@
use std::cell::RefMut;
pub trait ToType {
- fn to_type<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>;
+ fn to_type<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>;
}
impl UnifyKey for ty::IntVid {
@@ -52,7 +52,7 @@
}
impl ToType for IntVarValue {
- fn to_type<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
+ fn to_type<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match *self {
ty::IntType(i) => tcx.mk_mach_int(i),
ty::UintType(i) => tcx.mk_mach_uint(i),
@@ -72,7 +72,7 @@
impl EqUnifyValue for FloatVarValue {}
impl ToType for FloatVarValue {
- fn to_type<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
+ fn to_type<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
tcx.mk_mach_float(self.0)
}
}
diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs
index b51257c..e90f4ca 100644
--- a/src/librustc/lint/context.rs
+++ b/src/librustc/lint/context.rs
@@ -509,7 +509,7 @@
/// Context for lint checking after type checking.
pub struct LateContext<'a, 'tcx: 'a> {
/// Type context we're checking in.
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ pub tcx: TyCtxt<'tcx>,
/// Side-tables for the body we are in.
// FIXME: Make this lazy to avoid running the TypeckTables query?
@@ -780,11 +780,11 @@
/// }
/// ```
pub fn get_def_path(&self, def_id: DefId) -> Vec<Symbol> {
- pub struct AbsolutePathPrinter<'a, 'tcx> {
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ pub struct AbsolutePathPrinter<'tcx> {
+ pub tcx: TyCtxt<'tcx>,
}
- impl<'tcx> Printer<'tcx, 'tcx> for AbsolutePathPrinter<'_, 'tcx> {
+ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> {
type Error = !;
type Path = Vec<Symbol>;
@@ -793,7 +793,7 @@
type DynExistential = ();
type Const = ();
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -1372,7 +1372,7 @@
late_lint_methods!(late_lint_pass_impl, [], ['tcx]);
fn late_lint_mod_pass<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
module_def_id: DefId,
pass: T,
) {
@@ -1399,12 +1399,12 @@
// Visit the crate attributes
if hir_id == hir::CRATE_HIR_ID {
- walk_list!(cx, visit_attribute, tcx.hir().attrs_by_hir_id(hir::CRATE_HIR_ID));
+ walk_list!(cx, visit_attribute, tcx.hir().attrs(hir::CRATE_HIR_ID));
}
}
pub fn late_lint_mod<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
module_def_id: DefId,
builtin_lints: T,
) {
@@ -1423,10 +1423,7 @@
}
}
-fn late_lint_pass_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
- pass: T
-) {
+fn late_lint_pass_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(tcx: TyCtxt<'tcx>, pass: T) {
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
let krate = tcx.hir().krate();
@@ -1459,10 +1456,7 @@
})
}
-fn late_lint_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
- builtin_lints: T
-) {
+fn late_lint_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(tcx: TyCtxt<'tcx>, builtin_lints: T) {
let mut passes = tcx.sess.lint_store.borrow().late_passes.lock().take().unwrap();
if !tcx.sess.opts.debugging_opts.no_interleave_lints {
@@ -1494,7 +1488,7 @@
/// Performs lint checking on a crate.
pub fn check_crate<'tcx, T: for<'a> LateLintPass<'a, 'tcx>>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
builtin_lints: impl FnOnce() -> T + Send,
) {
join(|| {
diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs
index f4eff61..041944d 100644
--- a/src/librustc/lint/mod.rs
+++ b/src/librustc/lint/mod.rs
@@ -761,14 +761,12 @@
return err
}
-pub fn maybe_lint_level_root(tcx: TyCtxt<'_, '_, '_>, id: hir::HirId) -> bool {
- let attrs = tcx.hir().attrs_by_hir_id(id);
+pub fn maybe_lint_level_root(tcx: TyCtxt<'_>, id: hir::HirId) -> bool {
+ let attrs = tcx.hir().attrs(id);
attrs.iter().any(|attr| Level::from_symbol(attr.name_or_empty()).is_some())
}
-fn lint_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, cnum: CrateNum)
- -> &'tcx LintLevelMap
-{
+fn lint_levels<'tcx>(tcx: TyCtxt<'tcx>, cnum: CrateNum) -> &'tcx LintLevelMap {
assert_eq!(cnum, LOCAL_CRATE);
let mut builder = LintLevelMapBuilder {
levels: LintLevelSets::builder(tcx.sess),
@@ -787,12 +785,12 @@
tcx.arena.alloc(builder.levels.build_map())
}
-struct LintLevelMapBuilder<'a, 'tcx: 'a> {
+struct LintLevelMapBuilder<'tcx> {
levels: levels::LintLevelsBuilder<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> LintLevelMapBuilder<'a, 'tcx> {
+impl LintLevelMapBuilder<'tcx> {
fn with_lint_attrs<F>(&mut self,
id: hir::HirId,
attrs: &[ast::Attribute],
@@ -808,7 +806,7 @@
}
}
-impl<'a, 'tcx> intravisit::Visitor<'tcx> for LintLevelMapBuilder<'a, 'tcx> {
+impl intravisit::Visitor<'tcx> for LintLevelMapBuilder<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'tcx> {
intravisit::NestedVisitorMap::All(&self.tcx.hir())
}
diff --git a/src/librustc/macros.rs b/src/librustc/macros.rs
index b408820..09fa924 100644
--- a/src/librustc/macros.rs
+++ b/src/librustc/macros.rs
@@ -195,7 +195,7 @@
$(
impl<$tcx> $crate::ty::Lift<$tcx> for $ty {
type Lifted = Self;
- fn lift_to_tcx<'a, 'gcx>(&self, _: $crate::ty::TyCtxt<'a, 'gcx, $tcx>) -> Option<Self> {
+ fn lift_to_tcx(&self, _: $crate::ty::TyCtxt<$tcx>) -> Option<Self> {
Some(Clone::clone(self))
}
}
@@ -218,7 +218,7 @@
(for <$tcx:lifetime> { $($ty:ty,)+ }) => {
$(
impl<$tcx> $crate::ty::fold::TypeFoldable<$tcx> for $ty {
- fn super_fold_with<'gcx: $tcx, F: $crate::ty::fold::TypeFolder<'gcx, $tcx>>(
+ fn super_fold_with<F: $crate::ty::fold::TypeFolder<$tcx>>(
&self,
_: &mut F
) -> $ty {
@@ -264,7 +264,7 @@
{
type Lifted = $lifted;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<$lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<$tcx>) -> Option<$lifted> {
$(let $field = tcx.lift(&self.$field)?;)*
Some(Self::Lifted { $($field),* })
}
@@ -283,7 +283,7 @@
{
type Lifted = $lifted;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<$lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<$tcx>) -> Option<$lifted> {
EnumLiftImpl!(@Variants(self, tcx) input($($variants)*) output())
}
}
@@ -332,7 +332,7 @@
impl<$($p),*> $crate::ty::fold::TypeFoldable<$tcx> for $s
$(where $($wc)*)*
{
- fn super_fold_with<'gcx: $tcx, V: $crate::ty::fold::TypeFolder<'gcx, $tcx>>(
+ fn super_fold_with<V: $crate::ty::fold::TypeFolder<$tcx>>(
&self,
folder: &mut V,
) -> Self {
@@ -359,7 +359,7 @@
impl<$($p),*> $crate::ty::fold::TypeFoldable<$tcx> for $s
$(where $($wc)*)*
{
- fn super_fold_with<'gcx: $tcx, V: $crate::ty::fold::TypeFolder<'gcx, $tcx>>(
+ fn super_fold_with<V: $crate::ty::fold::TypeFolder<$tcx>>(
&self,
folder: &mut V,
) -> Self {
@@ -386,7 +386,7 @@
impl<$($p),*> $crate::ty::fold::TypeFoldable<$tcx> for $s
$(where $($wc)*)*
{
- fn super_fold_with<'gcx: $tcx, V: $crate::ty::fold::TypeFolder<'gcx, $tcx>>(
+ fn super_fold_with<V: $crate::ty::fold::TypeFolder<$tcx>>(
&self,
folder: &mut V,
) -> Self {
diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs
index 6f9abcd..2e9e1ac 100644
--- a/src/librustc/middle/cstore.rs
+++ b/src/librustc/middle/cstore.rs
@@ -211,9 +211,7 @@
fn crates_untracked(&self) -> Vec<CrateNum>;
// utility functions
- fn encode_metadata<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> EncodedMetadata;
+ fn encode_metadata<'tcx>(&self, tcx: TyCtxt<'tcx>) -> EncodedMetadata;
fn metadata_encoding_version(&self) -> &[u8];
}
@@ -228,9 +226,7 @@
// In order to get this left-to-right dependency ordering, we perform a
// topological sort of all crates putting the leaves at the right-most
// positions.
-pub fn used_crates(tcx: TyCtxt<'_, '_, '_>, prefer: LinkagePreference)
- -> Vec<(CrateNum, LibSource)>
-{
+pub fn used_crates(tcx: TyCtxt<'_>, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)> {
let mut libs = tcx.crates()
.iter()
.cloned()
diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs
index 04f5b35..63503f5 100644
--- a/src/librustc/middle/dead.rs
+++ b/src/librustc/middle/dead.rs
@@ -26,8 +26,7 @@
// explored. For example, if it's a live Node::Item that is a
// function, then we should explore its block to check for codes that
// may need to be marked as live.
-fn should_explore<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- hir_id: hir::HirId) -> bool {
+fn should_explore<'tcx>(tcx: TyCtxt<'tcx>, hir_id: hir::HirId) -> bool {
match tcx.hir().find_by_hir_id(hir_id) {
Some(Node::Item(..)) |
Some(Node::ImplItem(..)) |
@@ -41,7 +40,7 @@
struct MarkSymbolVisitor<'a, 'tcx: 'a> {
worklist: Vec<hir::HirId>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
tables: &'a ty::TypeckTables<'tcx>,
live_symbols: FxHashSet<hir::HirId>,
repr_has_repr_c: bool,
@@ -293,7 +292,7 @@
fn visit_ty(&mut self, ty: &'tcx hir::Ty) {
match ty.node {
TyKind::Def(item_id, _) => {
- let item = self.tcx.hir().expect_item_by_hir_id(item_id.id);
+ let item = self.tcx.hir().expect_item(item_id.id);
intravisit::walk_item(self, item);
}
_ => ()
@@ -302,9 +301,11 @@
}
}
-fn has_allow_dead_code_or_lang_attr(tcx: TyCtxt<'_, '_, '_>,
- id: hir::HirId,
- attrs: &[ast::Attribute]) -> bool {
+fn has_allow_dead_code_or_lang_attr(
+ tcx: TyCtxt<'_>,
+ id: hir::HirId,
+ attrs: &[ast::Attribute],
+) -> bool {
if attr::contains_name(attrs, sym::lang) {
return true;
}
@@ -353,7 +354,7 @@
struct LifeSeeder<'k, 'tcx: 'k> {
worklist: Vec<hir::HirId>,
krate: &'k hir::Crate,
- tcx: TyCtxt<'k, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
// see `MarkSymbolVisitor::struct_constructors`
struct_constructors: FxHashMap<hir::HirId, hir::HirId>,
}
@@ -423,8 +424,8 @@
}
}
-fn create_and_seed_worklist<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn create_and_seed_worklist<'tcx>(
+ tcx: TyCtxt<'tcx>,
access_levels: &privacy::AccessLevels,
krate: &hir::Crate,
) -> (Vec<hir::HirId>, FxHashMap<hir::HirId, hir::HirId>) {
@@ -451,10 +452,11 @@
(life_seeder.worklist, life_seeder.struct_constructors)
}
-fn find_live<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- access_levels: &privacy::AccessLevels,
- krate: &hir::Crate)
- -> FxHashSet<hir::HirId> {
+fn find_live<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ access_levels: &privacy::AccessLevels,
+ krate: &hir::Crate,
+) -> FxHashSet<hir::HirId> {
let (worklist, struct_constructors) = create_and_seed_worklist(tcx, access_levels, krate);
let mut symbol_visitor = MarkSymbolVisitor {
worklist,
@@ -471,12 +473,12 @@
symbol_visitor.live_symbols
}
-struct DeadVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct DeadVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
live_symbols: FxHashSet<hir::HirId>,
}
-impl<'a, 'tcx> DeadVisitor<'a, 'tcx> {
+impl DeadVisitor<'tcx> {
fn should_warn_about_item(&mut self, item: &hir::Item) -> bool {
let should_warn = match item.node {
hir::ItemKind::Static(..)
@@ -554,7 +556,7 @@
}
}
-impl<'a, 'tcx> Visitor<'tcx> for DeadVisitor<'a, 'tcx> {
+impl Visitor<'tcx> for DeadVisitor<'tcx> {
/// Walk nested items in place so that we don't report dead-code
/// on inner functions when the outer function is already getting
/// an error. We could do this also by checking the parents, but
@@ -660,7 +662,7 @@
}
}
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn check_crate<'tcx>(tcx: TyCtxt<'tcx>) {
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
let krate = tcx.hir().krate();
let live_symbols = find_live(tcx, access_levels, krate);
diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs
index a24d25c..879da64 100644
--- a/src/librustc/middle/dependency_format.rs
+++ b/src/librustc/middle/dependency_format.rs
@@ -81,7 +81,7 @@
Dynamic,
}
-pub fn calculate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn calculate<'tcx>(tcx: TyCtxt<'tcx>) {
let sess = &tcx.sess;
let fmts = sess.crate_types.borrow().iter().map(|&ty| {
let linkage = calculate_type(tcx, ty);
@@ -92,9 +92,7 @@
sess.dependency_formats.set(fmts);
}
-fn calculate_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: config::CrateType) -> DependencyList {
-
+fn calculate_type<'tcx>(tcx: TyCtxt<'tcx>, ty: config::CrateType) -> DependencyList {
let sess = &tcx.sess;
if !sess.opts.output_types.should_codegen() {
@@ -242,10 +240,12 @@
ret
}
-fn add_library(tcx: TyCtxt<'_, '_, '_>,
- cnum: CrateNum,
- link: LinkagePreference,
- m: &mut FxHashMap<CrateNum, LinkagePreference>) {
+fn add_library(
+ tcx: TyCtxt<'_>,
+ cnum: CrateNum,
+ link: LinkagePreference,
+ m: &mut FxHashMap<CrateNum, LinkagePreference>,
+) {
match m.get(&cnum) {
Some(&link2) => {
// If the linkages differ, then we'd have two copies of the library
@@ -267,7 +267,7 @@
}
}
-fn attempt_static<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<DependencyList> {
+fn attempt_static<'tcx>(tcx: TyCtxt<'tcx>) -> Option<DependencyList> {
let sess = &tcx.sess;
let crates = cstore::used_crates(tcx, RequireStatic);
if !crates.iter().by_ref().all(|&(_, ref p)| p.is_some()) {
@@ -324,7 +324,7 @@
// After the linkage for a crate has been determined we need to verify that
// there's only going to be one allocator in the output.
-fn verify_ok<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, list: &[Linkage]) {
+fn verify_ok<'tcx>(tcx: TyCtxt<'tcx>, list: &[Linkage]) {
let sess = &tcx.sess;
if list.len() == 0 {
return
diff --git a/src/librustc/middle/entry.rs b/src/librustc/middle/entry.rs
index 67db2ec..d9e7cae 100644
--- a/src/librustc/middle/entry.rs
+++ b/src/librustc/middle/entry.rs
@@ -47,7 +47,7 @@
}
}
-fn entry_fn(tcx: TyCtxt<'_, '_, '_>, cnum: CrateNum) -> Option<(DefId, EntryFnType)> {
+fn entry_fn(tcx: TyCtxt<'_>, cnum: CrateNum) -> Option<(DefId, EntryFnType)> {
assert_eq!(cnum, LOCAL_CRATE);
let any_exe = tcx.sess.crate_types.borrow().iter().any(|ty| {
@@ -140,10 +140,7 @@
}
}
-fn configure_main(
- tcx: TyCtxt<'_, '_, '_>,
- visitor: &EntryContext<'_, '_>,
-) -> Option<(DefId, EntryFnType)> {
+fn configure_main(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) -> Option<(DefId, EntryFnType)> {
if let Some((hir_id, _)) = visitor.start_fn {
Some((tcx.hir().local_def_id_from_hir_id(hir_id), EntryFnType::Start))
} else if let Some((hir_id, _)) = visitor.attr_main_fn {
@@ -179,7 +176,7 @@
}
}
-pub fn find_entry_point(tcx: TyCtxt<'_, '_, '_>) -> Option<(DefId, EntryFnType)> {
+pub fn find_entry_point(tcx: TyCtxt<'_>) -> Option<(DefId, EntryFnType)> {
tcx.entry_fn(LOCAL_CRATE)
}
diff --git a/src/librustc/middle/exported_symbols.rs b/src/librustc/middle/exported_symbols.rs
index 4eb3a2b..2027880 100644
--- a/src/librustc/middle/exported_symbols.rs
+++ b/src/librustc/middle/exported_symbols.rs
@@ -38,9 +38,7 @@
}
impl<'tcx> ExportedSymbol<'tcx> {
- pub fn symbol_name(&self,
- tcx: TyCtxt<'_, 'tcx, '_>)
- -> ty::SymbolName {
+ pub fn symbol_name(&self, tcx: TyCtxt<'tcx>) -> ty::SymbolName {
match *self {
ExportedSymbol::NonGeneric(def_id) => {
tcx.symbol_name(ty::Instance::mono(tcx, def_id))
@@ -54,10 +52,7 @@
}
}
- pub fn compare_stable(&self,
- tcx: TyCtxt<'_, 'tcx, '_>,
- other: &ExportedSymbol<'tcx>)
- -> cmp::Ordering {
+ pub fn compare_stable(&self, tcx: TyCtxt<'tcx>, other: &ExportedSymbol<'tcx>) -> cmp::Ordering {
match *self {
ExportedSymbol::NonGeneric(self_def_id) => match *other {
ExportedSymbol::NonGeneric(other_def_id) => {
@@ -92,13 +87,13 @@
}
}
-pub fn metadata_symbol_name(tcx: TyCtxt<'_, '_, '_>) -> String {
+pub fn metadata_symbol_name(tcx: TyCtxt<'_>) -> String {
format!("rust_metadata_{}_{}",
tcx.original_crate_name(LOCAL_CRATE),
tcx.crate_disambiguator(LOCAL_CRATE).to_fingerprint().to_hex())
}
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for ExportedSymbol<'gcx> {
+impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ExportedSymbol<'tcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs
index 34cea2d..61770e6 100644
--- a/src/librustc/middle/expr_use_visitor.rs
+++ b/src/librustc/middle/expr_use_visitor.rs
@@ -202,7 +202,7 @@
}
impl OverloadedCallType {
- fn from_trait_id(tcx: TyCtxt<'_, '_, '_>, trait_id: DefId) -> OverloadedCallType {
+ fn from_trait_id(tcx: TyCtxt<'_>, trait_id: DefId) -> OverloadedCallType {
for &(maybe_function_trait, overloaded_call_type) in &[
(tcx.lang_items().fn_once_trait(), FnOnceOverloadedCall),
(tcx.lang_items().fn_mut_trait(), FnMutOverloadedCall),
@@ -219,7 +219,7 @@
bug!("overloaded call didn't map to known function trait")
}
- fn from_method_id(tcx: TyCtxt<'_, '_, '_>, method_id: DefId) -> OverloadedCallType {
+ fn from_method_id(tcx: TyCtxt<'_>, method_id: DefId) -> OverloadedCallType {
let method = tcx.associated_item(method_id);
OverloadedCallType::from_trait_id(tcx, method.container.id())
}
@@ -229,8 +229,8 @@
// The ExprUseVisitor type
//
// This is the code that actually walks the tree.
-pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
+pub struct ExprUseVisitor<'a, 'tcx: 'a> {
+ mc: mc::MemCategorizationContext<'a, 'tcx>,
delegate: &'a mut dyn Delegate<'tcx>,
param_env: ty::ParamEnv<'tcx>,
}
@@ -254,7 +254,7 @@
)
}
-impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx, 'tcx> {
+impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
/// Creates the ExprUseVisitor, configuring it with the various options provided:
///
/// - `delegate` -- who receives the callbacks
@@ -266,15 +266,15 @@
/// `None` means that rvalues will be given more conservative lifetimes.
///
/// See also `with_infer`, which is used *during* typeck.
- pub fn new(delegate: &'a mut (dyn Delegate<'tcx>+'a),
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body_owner: DefId,
- param_env: ty::ParamEnv<'tcx>,
- region_scope_tree: &'a region::ScopeTree,
- tables: &'a ty::TypeckTables<'tcx>,
- rvalue_promotable_map: Option<&'tcx ItemLocalSet>)
- -> Self
- {
+ pub fn new(
+ delegate: &'a mut (dyn Delegate<'tcx> + 'a),
+ tcx: TyCtxt<'tcx>,
+ body_owner: DefId,
+ param_env: ty::ParamEnv<'tcx>,
+ region_scope_tree: &'a region::ScopeTree,
+ tables: &'a ty::TypeckTables<'tcx>,
+ rvalue_promotable_map: Option<&'tcx ItemLocalSet>,
+ ) -> Self {
ExprUseVisitor {
mc: mc::MemCategorizationContext::new(tcx,
body_owner,
@@ -287,15 +287,15 @@
}
}
-impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
- pub fn with_infer(delegate: &'a mut (dyn Delegate<'tcx>+'a),
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- body_owner: DefId,
- param_env: ty::ParamEnv<'tcx>,
- region_scope_tree: &'a region::ScopeTree,
- tables: &'a ty::TypeckTables<'tcx>)
- -> Self
- {
+impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
+ pub fn with_infer(
+ delegate: &'a mut (dyn Delegate<'tcx> + 'a),
+ infcx: &'a InferCtxt<'a, 'tcx>,
+ body_owner: DefId,
+ param_env: ty::ParamEnv<'tcx>,
+ region_scope_tree: &'a region::ScopeTree,
+ tables: &'a ty::TypeckTables<'tcx>,
+ ) -> Self {
ExprUseVisitor {
mc: mc::MemCategorizationContext::with_infer(
infcx,
@@ -333,7 +333,7 @@
self.consume_expr(&body.value);
}
- fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.mc.tcx
}
@@ -974,12 +974,12 @@
}
}
-fn copy_or_move<'a, 'gcx, 'tcx>(mc: &mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- cmt: &mc::cmt_<'tcx>,
- move_reason: MoveReason)
- -> ConsumeMode
-{
+fn copy_or_move<'a, 'tcx>(
+ mc: &mc::MemCategorizationContext<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ cmt: &mc::cmt_<'tcx>,
+ move_reason: MoveReason,
+) -> ConsumeMode {
if !mc.type_is_copy_modulo_regions(param_env, cmt.ty, cmt.span) {
Move(move_reason)
} else {
diff --git a/src/librustc/middle/free_region.rs b/src/librustc/middle/free_region.rs
index dae33e3..a8a7df0 100644
--- a/src/librustc/middle/free_region.rs
+++ b/src/librustc/middle/free_region.rs
@@ -15,8 +15,8 @@
///
/// This stuff is a bit convoluted and should be refactored, but as we
/// transition to NLL, it'll all go away anyhow.
-pub struct RegionRelations<'a, 'gcx: 'tcx, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct RegionRelations<'a, 'tcx: 'a> {
+ pub tcx: TyCtxt<'tcx>,
/// The context used to fetch the region maps.
pub context: DefId,
@@ -28,9 +28,9 @@
pub free_regions: &'a FreeRegionMap<'tcx>,
}
-impl<'a, 'gcx, 'tcx> RegionRelations<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> RegionRelations<'a, 'tcx> {
pub fn new(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
context: DefId,
region_scope_tree: &'a region::ScopeTree,
free_regions: &'a FreeRegionMap<'tcx>,
diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs
index 1f25dba..e8d68e0 100644
--- a/src/librustc/middle/intrinsicck.rs
+++ b/src/librustc/middle/intrinsicck.rs
@@ -10,7 +10,7 @@
use crate::hir::intravisit::{self, Visitor, NestedVisitorMap};
use crate::hir;
-fn check_mod_intrinsics<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) {
+fn check_mod_intrinsics<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(
module_def_id,
&mut ItemVisitor { tcx }.as_deep_visitor()
@@ -24,21 +24,19 @@
};
}
-struct ItemVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>
+struct ItemVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-struct ExprVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct ExprVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
tables: &'tcx ty::TypeckTables<'tcx>,
param_env: ty::ParamEnv<'tcx>,
}
/// If the type is `Option<T>`, it will return `T`, otherwise
/// the type itself. Works on most `Option`-like types.
-fn unpack_option_like<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>)
- -> Ty<'tcx> {
+fn unpack_option_like<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
let (def, substs) = match ty.sty {
ty::Adt(def, substs) => (def, substs),
_ => return ty
@@ -66,7 +64,7 @@
ty
}
-impl<'a, 'tcx> ExprVisitor<'a, 'tcx> {
+impl ExprVisitor<'tcx> {
fn def_id_is_transmute(&self, def_id: DefId) -> bool {
self.tcx.fn_sig(def_id).abi() == RustIntrinsic &&
self.tcx.item_name(def_id) == sym::transmute
@@ -131,7 +129,7 @@
}
}
-impl<'a, 'tcx> Visitor<'tcx> for ItemVisitor<'a, 'tcx> {
+impl Visitor<'tcx> for ItemVisitor<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
@@ -146,7 +144,7 @@
}
}
-impl<'a, 'tcx> Visitor<'tcx> for ExprVisitor<'a, 'tcx> {
+impl Visitor<'tcx> for ExprVisitor<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs
index 034ef32..d7abdb8 100644
--- a/src/librustc/middle/lang_items.rs
+++ b/src/librustc/middle/lang_items.rs
@@ -104,14 +104,14 @@
)*
}
-struct LanguageItemCollector<'a, 'tcx: 'a> {
+struct LanguageItemCollector<'tcx> {
items: LanguageItems,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
/// A mapping from the name of the lang item to its order and the form it must be of.
item_refs: FxHashMap<&'static str, (usize, Target)>,
}
-impl<'a, 'v, 'tcx> ItemLikeVisitor<'v> for LanguageItemCollector<'a, 'tcx> {
+impl ItemLikeVisitor<'v> for LanguageItemCollector<'tcx> {
fn visit_item(&mut self, item: &hir::Item) {
if let Some((value, span)) = extract(&item.attrs) {
let actual_target = Target::from_item(item);
@@ -159,8 +159,8 @@
}
}
-impl<'a, 'tcx> LanguageItemCollector<'a, 'tcx> {
- fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> LanguageItemCollector<'a, 'tcx> {
+impl LanguageItemCollector<'tcx> {
+ fn new(tcx: TyCtxt<'tcx>) -> LanguageItemCollector<'tcx> {
let mut item_refs = FxHashMap::default();
$( item_refs.insert($name, ($variant as usize, $target)); )*
@@ -217,7 +217,7 @@
}
/// Traverse and collect all the lang items in all crates.
-pub fn collect<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> LanguageItems {
+pub fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> LanguageItems {
// Initialize the collector.
let mut collector = LanguageItemCollector::new(tcx);
@@ -402,7 +402,7 @@
Rc, "rc", rc, Target::Struct;
}
-impl<'a, 'tcx, 'gcx> TyCtxt<'a, 'tcx, 'gcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Returns the `DefId` for a given `LangItem`.
/// If not found, fatally abort compilation.
pub fn require_lang_item(&self, lang_item: LangItem) -> DefId {
diff --git a/src/librustc/middle/lib_features.rs b/src/librustc/middle/lib_features.rs
index 76934dd..9c131ce 100644
--- a/src/librustc/middle/lib_features.rs
+++ b/src/librustc/middle/lib_features.rs
@@ -37,13 +37,13 @@
}
}
-pub struct LibFeatureCollector<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub struct LibFeatureCollector<'tcx> {
+ tcx: TyCtxt<'tcx>,
lib_features: LibFeatures,
}
-impl<'a, 'tcx> LibFeatureCollector<'a, 'tcx> {
- fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> LibFeatureCollector<'a, 'tcx> {
+impl LibFeatureCollector<'tcx> {
+ fn new(tcx: TyCtxt<'tcx>) -> LibFeatureCollector<'tcx> {
LibFeatureCollector {
tcx,
lib_features: LibFeatures::new(),
@@ -130,7 +130,7 @@
}
}
-impl<'a, 'tcx> Visitor<'tcx> for LibFeatureCollector<'a, 'tcx> {
+impl Visitor<'tcx> for LibFeatureCollector<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::All(&self.tcx.hir())
}
@@ -142,7 +142,7 @@
}
}
-pub fn collect<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> LibFeatures {
+pub fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> LibFeatures {
let mut collector = LibFeatureCollector::new(tcx);
intravisit::walk_crate(&mut collector, tcx.hir().krate());
collector.lib_features
diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs
index 45b4fb5..3d2bc6c 100644
--- a/src/librustc/middle/liveness.rs
+++ b/src/librustc/middle/liveness.rs
@@ -150,7 +150,7 @@
ExitNode
}
-fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_, '_, '_>) -> String {
+fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
let cm = tcx.sess.source_map();
match lnk {
UpvarNode(s) => {
@@ -166,7 +166,7 @@
}
}
-impl<'a, 'tcx> Visitor<'tcx> for IrMaps<'a, 'tcx> {
+impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::OnlyBodies(&self.tcx.hir())
}
@@ -181,7 +181,7 @@
fn visit_arm(&mut self, a: &'tcx hir::Arm) { visit_arm(self, a); }
}
-fn check_mod_liveness<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) {
+fn check_mod_liveness<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(
module_def_id,
&mut IrMaps::new(tcx, module_def_id).as_deep_visitor(),
@@ -256,8 +256,8 @@
CleanExit
}
-struct IrMaps<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct IrMaps<'tcx> {
+ tcx: TyCtxt<'tcx>,
body_owner: DefId,
num_live_nodes: usize,
num_vars: usize,
@@ -268,8 +268,8 @@
lnks: Vec<LiveNodeKind>,
}
-impl<'a, 'tcx> IrMaps<'a, 'tcx> {
- fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, body_owner: DefId) -> IrMaps<'a, 'tcx> {
+impl IrMaps<'tcx> {
+ fn new(tcx: TyCtxt<'tcx>, body_owner: DefId) -> IrMaps<'tcx> {
IrMaps {
tcx,
body_owner,
@@ -352,12 +352,14 @@
}
}
-fn visit_fn<'a, 'tcx: 'a>(ir: &mut IrMaps<'a, 'tcx>,
- fk: FnKind<'tcx>,
- decl: &'tcx hir::FnDecl,
- body_id: hir::BodyId,
- sp: Span,
- id: hir::HirId) {
+fn visit_fn<'a, 'tcx: 'a>(
+ ir: &mut IrMaps<'tcx>,
+ fk: FnKind<'tcx>,
+ decl: &'tcx hir::FnDecl,
+ body_id: hir::BodyId,
+ sp: Span,
+ id: hir::HirId,
+) {
debug!("visit_fn");
// swap in a new set of IR maps for this function body:
@@ -374,7 +376,7 @@
}
}
- debug!("creating fn_maps: {:?}", &fn_maps as *const IrMaps<'_, '_>);
+ debug!("creating fn_maps: {:p}", &fn_maps);
let body = ir.tcx.hir().body(body_id);
@@ -411,7 +413,7 @@
lsets.warn_about_unused_args(body, entry_ln);
}
-fn add_from_pat<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, pat: &P<hir::Pat>) {
+fn add_from_pat<'tcx>(ir: &mut IrMaps<'tcx>, pat: &P<hir::Pat>) {
// For struct patterns, take note of which fields used shorthand
// (`x` rather than `x: x`).
let mut shorthand_field_ids = HirIdSet::default();
@@ -457,19 +459,19 @@
});
}
-fn visit_local<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, local: &'tcx hir::Local) {
+fn visit_local<'tcx>(ir: &mut IrMaps<'tcx>, local: &'tcx hir::Local) {
add_from_pat(ir, &local.pat);
intravisit::walk_local(ir, local);
}
-fn visit_arm<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, arm: &'tcx hir::Arm) {
+fn visit_arm<'tcx>(ir: &mut IrMaps<'tcx>, arm: &'tcx hir::Arm) {
for pat in &arm.pats {
add_from_pat(ir, pat);
}
intravisit::walk_arm(ir, arm);
}
-fn visit_expr<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, expr: &'tcx Expr) {
+fn visit_expr<'tcx>(ir: &mut IrMaps<'tcx>, expr: &'tcx Expr) {
match expr.node {
// live nodes required for uses or definitions of variables:
hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
@@ -681,7 +683,7 @@
const ACC_USE: u32 = 4;
struct Liveness<'a, 'tcx: 'a> {
- ir: &'a mut IrMaps<'a, 'tcx>,
+ ir: &'a mut IrMaps<'tcx>,
tables: &'a ty::TypeckTables<'tcx>,
s: Specials,
successors: Vec<LiveNode>,
@@ -695,7 +697,7 @@
}
impl<'a, 'tcx> Liveness<'a, 'tcx> {
- fn new(ir: &'a mut IrMaps<'a, 'tcx>, body: hir::BodyId) -> Liveness<'a, 'tcx> {
+ fn new(ir: &'a mut IrMaps<'tcx>, body: hir::BodyId) -> Liveness<'a, 'tcx> {
// Special nodes and variables:
// - exit_ln represents the end of the fn, either by return or panic
// - implicit_ret_var is a pseudo-variable that represents
@@ -1169,7 +1171,7 @@
}
hir::ExprKind::Call(ref f, ref args) => {
- let m = self.ir.tcx.hir().get_module_parent_by_hir_id(expr.hir_id);
+ let m = self.ir.tcx.hir().get_module_parent(expr.hir_id);
let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) {
self.s.exit_ln
} else {
@@ -1180,7 +1182,7 @@
}
hir::ExprKind::MethodCall(.., ref args) => {
- let m = self.ir.tcx.hir().get_module_parent_by_hir_id(expr.hir_id);
+ let m = self.ir.tcx.hir().get_module_parent(expr.hir_id);
let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) {
self.s.exit_ln
} else {
@@ -1615,7 +1617,7 @@
self.ir.tcx.lint_hir_note(
lint::builtin::UNUSED_VARIABLES,
hir_id,
- spans.clone(),
+ spans,
&format!("variable `{}` is assigned to, but never used", name),
&format!("consider using `_{}` instead", name),
);
diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs
index 7011948..192e723 100644
--- a/src/librustc/middle/mem_categorization.rs
+++ b/src/librustc/middle/mem_categorization.rs
@@ -287,14 +287,14 @@
}
#[derive(Clone)]
-pub struct MemCategorizationContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct MemCategorizationContext<'a, 'tcx> {
+ pub tcx: TyCtxt<'tcx>,
pub body_owner: DefId,
pub upvars: Option<&'tcx FxIndexMap<hir::HirId, hir::Upvar>>,
pub region_scope_tree: &'a region::ScopeTree,
pub tables: &'a ty::TypeckTables<'tcx>,
rvalue_promotable_map: Option<&'tcx ItemLocalSet>,
- infcx: Option<&'a InferCtxt<'a, 'gcx, 'tcx>>,
+ infcx: Option<&'a InferCtxt<'a, 'tcx>>,
}
pub type McResult<T> = Result<T, ()>;
@@ -339,9 +339,12 @@
ret
}
- fn from_local(tcx: TyCtxt<'_, '_, '_>, tables: &ty::TypeckTables<'_>,
- id: ast::NodeId) -> MutabilityCategory {
- let ret = match tcx.hir().get(id) {
+ fn from_local(
+ tcx: TyCtxt<'_>,
+ tables: &ty::TypeckTables<'_>,
+ id: hir::HirId,
+ ) -> MutabilityCategory {
+ let ret = match tcx.hir().get_by_hir_id(id) {
Node::Binding(p) => match p.node {
PatKind::Binding(..) => {
let bm = *tables.pat_binding_modes()
@@ -399,13 +402,14 @@
}
}
-impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body_owner: DefId,
- region_scope_tree: &'a region::ScopeTree,
- tables: &'a ty::TypeckTables<'tcx>,
- rvalue_promotable_map: Option<&'tcx ItemLocalSet>)
- -> MemCategorizationContext<'a, 'tcx, 'tcx> {
+impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
+ pub fn new(
+ tcx: TyCtxt<'tcx>,
+ body_owner: DefId,
+ region_scope_tree: &'a region::ScopeTree,
+ tables: &'a ty::TypeckTables<'tcx>,
+ rvalue_promotable_map: Option<&'tcx ItemLocalSet>,
+ ) -> MemCategorizationContext<'a, 'tcx> {
MemCategorizationContext {
tcx,
body_owner,
@@ -418,7 +422,7 @@
}
}
-impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
/// Creates a `MemCategorizationContext` during type inference.
/// This is used during upvar analysis and a few other places.
/// Because the typeck tables are not yet complete, the results
@@ -428,11 +432,12 @@
/// temporaries may be overly conservative;
/// - similarly, as the results of upvar analysis are not yet
/// known, the results around upvar accesses may be incorrect.
- pub fn with_infer(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- body_owner: DefId,
- region_scope_tree: &'a region::ScopeTree,
- tables: &'a ty::TypeckTables<'tcx>)
- -> MemCategorizationContext<'a, 'gcx, 'tcx> {
+ pub fn with_infer(
+ infcx: &'a InferCtxt<'a, 'tcx>,
+ body_owner: DefId,
+ region_scope_tree: &'a region::ScopeTree,
+ tables: &'a ty::TypeckTables<'tcx>,
+ ) -> MemCategorizationContext<'a, 'tcx> {
let tcx = infcx.tcx;
// Subtle: we can't do rvalue promotion analysis until the
@@ -495,7 +500,6 @@
// FIXME
None if self.is_tainted_by_errors() => Err(()),
None => {
- let id = self.tcx.hir().hir_to_node_id(id);
bug!("no type for node {}: {} in mem_categorization",
id, self.tcx.hir().node_to_string(id));
}
@@ -582,10 +586,11 @@
pub fn cat_expr(&self, expr: &hir::Expr) -> McResult<cmt_<'tcx>> {
// This recursion helper avoids going through *too many*
// adjustments, since *only* non-overloaded deref recurses.
- fn helper<'a, 'gcx, 'tcx>(mc: &MemCategorizationContext<'a, 'gcx, 'tcx>,
- expr: &hir::Expr,
- adjustments: &[adjustment::Adjustment<'tcx>])
- -> McResult<cmt_<'tcx>> {
+ fn helper<'a, 'tcx>(
+ mc: &MemCategorizationContext<'a, 'tcx>,
+ expr: &hir::Expr,
+ adjustments: &[adjustment::Adjustment<'tcx>],
+ ) -> McResult<cmt_<'tcx>> {
match adjustments.split_last() {
None => mc.cat_expr_unadjusted(expr),
Some((adjustment, previous)) => {
@@ -747,15 +752,14 @@
}
Res::Local(var_id) => {
- let var_nid = self.tcx.hir().hir_to_node_id(var_id);
if self.upvars.map_or(false, |upvars| upvars.contains_key(&var_id)) {
- self.cat_upvar(hir_id, span, var_nid)
+ self.cat_upvar(hir_id, span, var_id)
} else {
Ok(cmt_ {
hir_id,
span,
cat: Categorization::Local(var_id),
- mutbl: MutabilityCategory::from_local(self.tcx, self.tables, var_nid),
+ mutbl: MutabilityCategory::from_local(self.tcx, self.tables, var_id),
ty: expr_ty,
note: NoteNone
})
@@ -772,7 +776,7 @@
&self,
hir_id: hir::HirId,
span: Span,
- var_id: ast::NodeId,
+ var_id: hir::HirId,
) -> McResult<cmt_<'tcx>> {
// An upvar can have up to 3 components. We translate first to a
// `Categorization::Upvar`, which is itself a fiction -- it represents the reference to the
@@ -822,13 +826,12 @@
_ => span_bug!(span, "unexpected type for fn in mem_categorization: {:?}", ty),
};
- let var_hir_id = self.tcx.hir().node_to_hir_id(var_id);
let upvar_id = ty::UpvarId {
- var_path: ty::UpvarPath { hir_id: var_hir_id },
+ var_path: ty::UpvarPath { hir_id: var_id },
closure_expr_id: closure_expr_def_id.to_local(),
};
- let var_ty = self.node_ty(var_hir_id)?;
+ let var_ty = self.node_ty(var_id)?;
// Mutability of original variable itself
let var_mutbl = MutabilityCategory::from_local(self.tcx, self.tables, var_id);
@@ -1514,7 +1517,7 @@
}
}
- pub fn descriptive_string(&self, tcx: TyCtxt<'_, '_, '_>) -> Cow<'static, str> {
+ pub fn descriptive_string(&self, tcx: TyCtxt<'_>) -> Cow<'static, str> {
match self.cat {
Categorization::StaticItem => {
"static item".into()
diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs
index 45e1d98..628a44c 100644
--- a/src/librustc/middle/reachable.rs
+++ b/src/librustc/middle/reachable.rs
@@ -27,9 +27,7 @@
// Returns true if the given item must be inlined because it may be
// monomorphized or it was marked with `#[inline]`. This will only return
// true for functions.
-fn item_might_be_inlined(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- item: &hir::Item,
- attrs: CodegenFnAttrs) -> bool {
+fn item_might_be_inlined(tcx: TyCtxt<'tcx>, item: &hir::Item, attrs: CodegenFnAttrs) -> bool {
if attrs.requests_inline() {
return true
}
@@ -44,9 +42,11 @@
}
}
-fn method_might_be_inlined<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_item: &hir::ImplItem,
- impl_src: DefId) -> bool {
+fn method_might_be_inlined<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_item: &hir::ImplItem,
+ impl_src: DefId,
+) -> bool {
let codegen_fn_attrs = tcx.codegen_fn_attrs(impl_item.hir_id.owner_def_id());
let generics = tcx.generics_of(tcx.hir().local_def_id_from_hir_id(impl_item.hir_id));
if codegen_fn_attrs.requests_inline() || generics.requires_monomorphization(tcx) {
@@ -67,7 +67,7 @@
// Information needed while computing reachability.
struct ReachableContext<'a, 'tcx: 'a> {
// The type context.
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
tables: &'a ty::TypeckTables<'tcx>,
// The set of items which must be exported in the linkage sense.
reachable_symbols: HirIdSet,
@@ -174,12 +174,12 @@
} else {
let impl_did = self.tcx
.hir()
- .get_parent_did_by_hir_id(hir_id);
+ .get_parent_did(hir_id);
// Check the impl. If the generics on the self
// type of the impl require inlining, this method
// does too.
let impl_hir_id = self.tcx.hir().as_local_hir_id(impl_did).unwrap();
- match self.tcx.hir().expect_item_by_hir_id(impl_hir_id).node {
+ match self.tcx.hir().expect_item(impl_hir_id).node {
hir::ItemKind::Impl(..) => {
let generics = self.tcx.generics_of(impl_did);
generics.requires_monomorphization(self.tcx)
@@ -296,7 +296,7 @@
self.visit_nested_body(body);
}
hir::ImplItemKind::Method(_, body) => {
- let did = self.tcx.hir().get_parent_did_by_hir_id(search_item);
+ let did = self.tcx.hir().get_parent_did(search_item);
if method_might_be_inlined(self.tcx, impl_item, did) {
self.visit_nested_body(body)
}
@@ -318,7 +318,7 @@
_ => {
bug!(
"found unexpected node kind in worklist: {} ({:?})",
- self.tcx.hir().hir_to_string(search_item),
+ self.tcx.hir().node_to_string(search_item),
node,
);
}
@@ -335,7 +335,7 @@
// trait items are used from inlinable code through method call syntax or UFCS, or their
// trait is a lang item.
struct CollectPrivateImplItemsVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
access_levels: &'a privacy::AccessLevels,
worklist: &'a mut Vec<hir::HirId>,
}
@@ -391,7 +391,7 @@
#[derive(Clone, HashStable)]
pub struct ReachableSet(pub Lrc<HirIdSet>);
-fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> ReachableSet {
+fn reachable_set<'tcx>(tcx: TyCtxt<'tcx>, crate_num: CrateNum) -> ReachableSet {
debug_assert!(crate_num == LOCAL_CRATE);
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs
index 3d78b9b..dfab8e3 100644
--- a/src/librustc/middle/region.rs
+++ b/src/librustc/middle/region.rs
@@ -14,7 +14,6 @@
use std::fmt;
use rustc_macros::HashStable;
use syntax::source_map;
-use syntax::ast;
use syntax_pos::{Span, DUMMY_SP};
use crate::ty::{DefIdTree, TyCtxt};
use crate::ty::query::Providers;
@@ -169,29 +168,29 @@
self.id
}
- pub fn node_id(&self, tcx: TyCtxt<'_, '_, '_>, scope_tree: &ScopeTree) -> ast::NodeId {
+ pub fn hir_id(&self, scope_tree: &ScopeTree) -> hir::HirId {
match scope_tree.root_body {
Some(hir_id) => {
- tcx.hir().hir_to_node_id(hir::HirId {
+ hir::HirId {
owner: hir_id.owner,
local_id: self.item_local_id()
- })
+ }
}
- None => ast::DUMMY_NODE_ID
+ None => hir::DUMMY_HIR_ID
}
}
/// Returns the span of this `Scope`. Note that in general the
/// returned span may not correspond to the span of any `NodeId` in
/// the AST.
- pub fn span(&self, tcx: TyCtxt<'_, '_, '_>, scope_tree: &ScopeTree) -> Span {
- let node_id = self.node_id(tcx, scope_tree);
- if node_id == ast::DUMMY_NODE_ID {
+ pub fn span(&self, tcx: TyCtxt<'_>, scope_tree: &ScopeTree) -> Span {
+ let hir_id = self.hir_id(scope_tree);
+ if hir_id == hir::DUMMY_HIR_ID {
return DUMMY_SP;
}
- let span = tcx.hir().span(node_id);
+ let span = tcx.hir().span(hir_id);
if let ScopeData::Remainder(first_statement_index) = self.data {
- if let Node::Block(ref blk) = tcx.hir().get(node_id) {
+ if let Node::Block(ref blk) = tcx.hir().get_by_hir_id(hir_id) {
// Want span for scope starting after the
// indexed statement and ending at end of
// `blk`; reuse span of `blk` and shift `lo`
@@ -358,8 +357,8 @@
parent: Option<(Scope, ScopeDepth)>,
}
-struct RegionResolutionVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct RegionResolutionVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
// The number of expressions and patterns visited in the current body
expr_and_pat_count: usize,
@@ -646,13 +645,11 @@
/// Assuming that the provided region was defined within this `ScopeTree`,
/// returns the outermost `Scope` that the region outlives.
- pub fn early_free_scope<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- br: &ty::EarlyBoundRegion)
- -> Scope {
+ pub fn early_free_scope(&self, tcx: TyCtxt<'tcx>, br: &ty::EarlyBoundRegion) -> Scope {
let param_owner = tcx.parent(br.def_id).unwrap();
let param_owner_id = tcx.hir().as_local_hir_id(param_owner).unwrap();
- let scope = tcx.hir().maybe_body_owned_by_by_hir_id(param_owner_id).map(|body_id| {
+ let scope = tcx.hir().maybe_body_owned_by(param_owner_id).map(|body_id| {
tcx.hir().body(body_id).value.hir_id.local_id
}).unwrap_or_else(|| {
// The lifetime was defined on node that doesn't own a body,
@@ -677,8 +674,7 @@
/// Assuming that the provided region was defined within this `ScopeTree`,
/// returns the outermost `Scope` that the region outlives.
- pub fn free_scope<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, fr: &ty::FreeRegion)
- -> Scope {
+ pub fn free_scope(&self, tcx: TyCtxt<'tcx>, fr: &ty::FreeRegion) -> Scope {
let param_owner = match fr.bound_region {
ty::BoundRegion::BrNamed(def_id, _) => {
tcx.parent(def_id).unwrap()
@@ -734,9 +730,11 @@
}
/// Records the lifetime of a local variable as `cx.var_parent`
-fn record_var_lifetime(visitor: &mut RegionResolutionVisitor<'_, '_>,
- var_id: hir::ItemLocalId,
- _sp: Span) {
+fn record_var_lifetime(
+ visitor: &mut RegionResolutionVisitor<'_>,
+ var_id: hir::ItemLocalId,
+ _sp: Span,
+) {
match visitor.cx.var_parent {
None => {
// this can happen in extern fn declarations like
@@ -748,7 +746,7 @@
}
}
-fn resolve_block<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, blk: &'tcx hir::Block) {
+fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx hir::Block) {
debug!("resolve_block(blk.hir_id={:?})", blk.hir_id);
let prev_cx = visitor.cx;
@@ -816,7 +814,7 @@
visitor.cx = prev_cx;
}
-fn resolve_arm<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, arm: &'tcx hir::Arm) {
+fn resolve_arm<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, arm: &'tcx hir::Arm) {
let prev_cx = visitor.cx;
visitor.enter_scope(
@@ -838,7 +836,7 @@
visitor.cx = prev_cx;
}
-fn resolve_pat<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, pat: &'tcx hir::Pat) {
+fn resolve_pat<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, pat: &'tcx hir::Pat) {
visitor.record_child_scope(Scope { id: pat.hir_id.local_id, data: ScopeData::Node });
// If this is a binding then record the lifetime of that binding.
@@ -855,7 +853,7 @@
debug!("resolve_pat - post-increment {} pat = {:?}", visitor.expr_and_pat_count, pat);
}
-fn resolve_stmt<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, stmt: &'tcx hir::Stmt) {
+fn resolve_stmt<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, stmt: &'tcx hir::Stmt) {
let stmt_id = stmt.hir_id.local_id;
debug!("resolve_stmt(stmt.id={:?})", stmt_id);
@@ -874,7 +872,7 @@
visitor.cx.parent = prev_parent;
}
-fn resolve_expr<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, expr: &'tcx hir::Expr) {
+fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx hir::Expr) {
debug!("resolve_expr - pre-increment {} expr = {:?}", visitor.expr_and_pat_count, expr);
let prev_cx = visitor.cx;
@@ -977,9 +975,11 @@
visitor.cx = prev_cx;
}
-fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>,
- pat: Option<&'tcx hir::Pat>,
- init: Option<&'tcx hir::Expr>) {
+fn resolve_local<'tcx>(
+ visitor: &mut RegionResolutionVisitor<'tcx>,
+ pat: Option<&'tcx hir::Pat>,
+ init: Option<&'tcx hir::Expr>,
+) {
debug!("resolve_local(pat={:?}, init={:?})", pat, init);
let blk_scope = visitor.cx.var_parent.map(|(p, _)| p);
@@ -1127,11 +1127,11 @@
/// | box E&
/// | E& as ...
/// | ( E& )
- fn record_rvalue_scope_if_borrow_expr<'a, 'tcx>(
- visitor: &mut RegionResolutionVisitor<'a, 'tcx>,
+ fn record_rvalue_scope_if_borrow_expr<'tcx>(
+ visitor: &mut RegionResolutionVisitor<'tcx>,
expr: &hir::Expr,
- blk_id: Option<Scope>)
- {
+ blk_id: Option<Scope>,
+ ) {
match expr.node {
hir::ExprKind::AddrOf(_, ref subexpr) => {
record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id);
@@ -1178,9 +1178,11 @@
/// | <rvalue>
///
/// Note: ET is intended to match "rvalues or places based on rvalues".
- fn record_rvalue_scope<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>,
- expr: &hir::Expr,
- blk_scope: Option<Scope>) {
+ fn record_rvalue_scope<'tcx>(
+ visitor: &mut RegionResolutionVisitor<'tcx>,
+ expr: &hir::Expr,
+ blk_scope: Option<Scope>,
+ ) {
let mut expr = expr;
loop {
// Note: give all the expressions matching `ET` with the
@@ -1205,7 +1207,7 @@
}
}
-impl<'a, 'tcx> RegionResolutionVisitor<'a, 'tcx> {
+impl<'tcx> RegionResolutionVisitor<'tcx> {
/// Records the current parent (if any) as the parent of `child_scope`.
/// Returns the depth of `child_scope`.
fn record_child_scope(&mut self, child_scope: Scope) -> ScopeDepth {
@@ -1235,7 +1237,7 @@
}
}
-impl<'a, 'tcx> Visitor<'tcx> for RegionResolutionVisitor<'a, 'tcx> {
+impl<'tcx> Visitor<'tcx> for RegionResolutionVisitor<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
@@ -1327,16 +1329,14 @@
}
}
-fn region_scope_tree<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
- -> &'tcx ScopeTree
-{
+fn region_scope_tree<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx ScopeTree {
let closure_base_def_id = tcx.closure_base_def_id(def_id);
if closure_base_def_id != def_id {
return tcx.region_scope_tree(closure_base_def_id);
}
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
- let scope_tree = if let Some(body_id) = tcx.hir().maybe_body_owned_by_by_hir_id(id) {
+ let scope_tree = if let Some(body_id) = tcx.hir().maybe_body_owned_by(id) {
let mut visitor = RegionResolutionVisitor {
tcx,
scope_tree: ScopeTree::default(),
diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs
index 331b74d..76bff50 100644
--- a/src/librustc/middle/resolve_lifetime.rs
+++ b/src/librustc/middle/resolve_lifetime.rs
@@ -218,7 +218,7 @@
});
struct LifetimeContext<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
map: &'a mut NamedRegionMap,
scope: ScopeRef<'a>,
@@ -368,10 +368,7 @@
/// entire crate. You should not read the result of this query
/// directly, but rather use `named_region_map`, `is_late_bound_map`,
/// etc.
-fn resolve_lifetimes<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
- for_krate: CrateNum,
-) -> &'tcx ResolveLifetimes {
+fn resolve_lifetimes<'tcx>(tcx: TyCtxt<'tcx>, for_krate: CrateNum) -> &'tcx ResolveLifetimes {
assert_eq!(for_krate, LOCAL_CRATE);
let named_region_map = krate(tcx);
@@ -398,7 +395,7 @@
tcx.arena.alloc(rl)
}
-fn krate<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> NamedRegionMap {
+fn krate<'tcx>(tcx: TyCtxt<'tcx>) -> NamedRegionMap {
let krate = tcx.hir().krate();
let mut map = NamedRegionMap {
defs: Default::default(),
@@ -628,7 +625,7 @@
// `abstract type MyAnonTy<'b>: MyTrait<'b>;`
// ^ ^ this gets resolved in the scope of
// the exist_ty generics
- let (generics, bounds) = match self.tcx.hir().expect_item_by_hir_id(item_id.id).node
+ let (generics, bounds) = match self.tcx.hir().expect_item(item_id.id).node
{
// named existential types are reached via TyKind::Path
// this arm is for `impl Trait` in the types of statics, constants and locals
@@ -1101,7 +1098,7 @@
}
}
-fn check_mixed_explicit_and_in_band_defs(tcx: TyCtxt<'_, '_, '_>, params: &P<[hir::GenericParam]>) {
+fn check_mixed_explicit_and_in_band_defs(tcx: TyCtxt<'_>, params: &P<[hir::GenericParam]>) {
let lifetime_params: Vec<_> = params
.iter()
.filter_map(|param| match param.kind {
@@ -1128,12 +1125,7 @@
}
}
-fn signal_shadowing_problem(
- tcx: TyCtxt<'_, '_, '_>,
- name: ast::Name,
- orig: Original,
- shadower: Shadower,
-) {
+fn signal_shadowing_problem(tcx: TyCtxt<'_>, name: ast::Name, orig: Original, shadower: Shadower) {
let mut err = if let (ShadowKind::Lifetime, ShadowKind::Lifetime) = (orig.kind, shadower.kind) {
// lifetime/lifetime shadowing is an error
struct_span_err!(
@@ -1169,7 +1161,7 @@
// if one of the label shadows a lifetime or another label.
fn extract_labels(ctxt: &mut LifetimeContext<'_, '_>, body: &hir::Body) {
struct GatherLabels<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
scope: ScopeRef<'a>,
labels_in_fn: &'a mut Vec<ast::Ident>,
}
@@ -1218,7 +1210,7 @@
}
fn check_if_label_shadows_lifetime(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
mut scope: ScopeRef<'_>,
label: ast::Ident,
) {
@@ -1244,7 +1236,7 @@
signal_shadowing_problem(
tcx,
label.name,
- original_lifetime(tcx.hir().span_by_hir_id(hir_id)),
+ original_lifetime(tcx.hir().span(hir_id)),
shadower_label(label.span),
);
return;
@@ -1256,9 +1248,7 @@
}
}
-fn compute_object_lifetime_defaults(
- tcx: TyCtxt<'_, '_, '_>,
-) -> HirIdMap<Vec<ObjectLifetimeDefault>> {
+fn compute_object_lifetime_defaults(tcx: TyCtxt<'_>) -> HirIdMap<Vec<ObjectLifetimeDefault>> {
let mut map = HirIdMap::default();
for item in tcx.hir().krate().items.values() {
match item.node {
@@ -1315,7 +1305,7 @@
/// of the form `T:'a` so as to determine the `ObjectLifetimeDefault`
/// for each type parameter.
fn object_lifetime_defaults_for_item(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
generics: &hir::Generics,
) -> Vec<ObjectLifetimeDefault> {
fn add_bounds(set: &mut Set1<hir::LifetimeName>, bounds: &[hir::GenericBound]) {
@@ -1596,6 +1586,17 @@
continue;
}
+ if let Some(parent_def_id) = self.tcx.parent(def_id) {
+ if let Some(parent_hir_id) = self.tcx.hir()
+ .as_local_hir_id(parent_def_id) {
+ // lifetimes in `derive` expansions don't count (Issue #53738)
+ if self.tcx.hir().attrs(parent_hir_id).iter()
+ .any(|attr| attr.check_name(sym::automatically_derived)) {
+ continue;
+ }
+ }
+ }
+
let mut err = self.tcx.struct_span_lint_hir(
lint::builtin::SINGLE_USE_LIFETIMES,
id,
@@ -1689,7 +1690,7 @@
// Find the start of nested early scopes, e.g., in methods.
let mut index = 0;
if let Some(parent_id) = parent_id {
- let parent = self.tcx.hir().expect_item_by_hir_id(parent_id);
+ let parent = self.tcx.hir().expect_item(parent_id);
if sub_items_have_self_param(&parent.node) {
index += 1; // Self comes before lifetimes
}
@@ -1822,7 +1823,7 @@
// Do not free early-bound regions, only late-bound ones.
} else if let Some(body_id) = outermost_body {
let fn_id = self.tcx.hir().body_owner(body_id);
- match self.tcx.hir().get(fn_id) {
+ match self.tcx.hir().get_by_hir_id(fn_id) {
Node::Item(&hir::Item {
node: hir::ItemKind::Fn(..),
..
@@ -1835,7 +1836,7 @@
node: hir::ImplItemKind::Method(..),
..
}) => {
- let scope = self.tcx.hir().local_def_id(fn_id);
+ let scope = self.tcx.hir().local_def_id_from_hir_id(fn_id);
def = Region::Free(scope, def.id().unwrap());
}
_ => {}
@@ -2064,7 +2065,7 @@
}) => {
if let hir::ItemKind::Trait(.., ref trait_items) = self.tcx
.hir()
- .expect_item_by_hir_id(self.tcx.hir().get_parent_item(parent))
+ .expect_item(self.tcx.hir().get_parent_item(parent))
.node
{
assoc_item_kind = trait_items
@@ -2084,7 +2085,7 @@
}) => {
if let hir::ItemKind::Impl(.., ref self_ty, ref impl_items) = self.tcx
.hir()
- .expect_item_by_hir_id(self.tcx.hir().get_parent_item(parent))
+ .expect_item(self.tcx.hir().get_parent_item(parent))
.node
{
impl_self = Some(self_ty);
@@ -2628,7 +2629,7 @@
signal_shadowing_problem(
self.tcx,
param.name.ident().name,
- original_lifetime(self.tcx.hir().span_by_hir_id(hir_id)),
+ original_lifetime(self.tcx.hir().span(hir_id)),
shadower_lifetime(¶m),
);
return;
@@ -2695,7 +2696,7 @@
debug!(
"insert_lifetime: {} resolved to {:?} span={:?}",
- self.tcx.hir().hir_to_string(lifetime_ref.hir_id),
+ self.tcx.hir().node_to_string(lifetime_ref.hir_id),
def,
self.tcx.sess.source_map().span_to_string(lifetime_ref.span)
);
diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs
index 815c68b..19d127a 100644
--- a/src/librustc/middle/stability.rs
+++ b/src/librustc/middle/stability.rs
@@ -106,7 +106,7 @@
// A private tree-walker for producing an Index.
struct Annotator<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
index: &'a mut Index<'tcx>,
parent_stab: Option<&'tcx Stability>,
parent_depr: Option<DeprecationEntry>,
@@ -317,7 +317,7 @@
}
struct MissingStabilityAnnotations<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
access_levels: &'a AccessLevels,
}
@@ -389,8 +389,8 @@
}
}
-impl<'a, 'tcx> Index<'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Index<'tcx> {
+impl<'tcx> Index<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>) -> Index<'tcx> {
let is_staged_api =
tcx.sess.opts.debugging_opts.force_unstable_if_unmarked ||
tcx.features().staged_api;
@@ -452,7 +452,7 @@
AnnotationKind::Required,
|v| intravisit::walk_crate(v, krate));
}
- return index
+ return index;
}
pub fn local_stability(&self, id: HirId) -> Option<&'tcx Stability> {
@@ -466,7 +466,7 @@
/// Cross-references the feature names of unstable APIs with enabled
/// features and possibly prints errors.
-fn check_mod_unstable_api_usage<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) {
+fn check_mod_unstable_api_usage<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(module_def_id, &mut Checker { tcx }.as_deep_visitor());
}
@@ -501,8 +501,8 @@
}
}
-struct Checker<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct Checker<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
/// Result of `TyCtxt::eval_stability`.
@@ -521,7 +521,7 @@
Unmarked,
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
// See issue #38412.
fn skip_stability_check_due_to_privacy(self, mut def_id: DefId) -> bool {
// Check if `def_id` is a trait method.
@@ -752,7 +752,7 @@
}
}
-impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> {
+impl Visitor<'tcx> for Checker<'tcx> {
/// Because stability levels are scoped lexically, we want to walk
/// nested items in the context of the outer item, so enable
/// deep-walking.
@@ -827,7 +827,7 @@
}
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
pub fn lookup_deprecation(self, id: DefId) -> Option<Deprecation> {
self.lookup_deprecation_entry(id).map(|depr| depr.attr)
}
@@ -836,7 +836,7 @@
/// Given the list of enabled features that were not language features (i.e., that
/// were expected to be library features), and the list of features used from
/// libraries, identify activated features that don't exist and error about them.
-pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn check_unused_or_stable_features<'tcx>(tcx: TyCtxt<'tcx>) {
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
if tcx.stability().staged_api[&LOCAL_CRATE] {
@@ -920,11 +920,11 @@
// don't lint about unused features. We should reenable this one day!
}
-fn unnecessary_stable_feature_lint<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn unnecessary_stable_feature_lint<'tcx>(
+ tcx: TyCtxt<'tcx>,
span: Span,
feature: Symbol,
- since: Symbol
+ since: Symbol,
) {
tcx.lint_hir(lint::builtin::STABLE_FEATURES,
hir::CRATE_HIR_ID,
diff --git a/src/librustc/middle/weak_lang_items.rs b/src/librustc/middle/weak_lang_items.rs
index 75c21c7..422ff3f 100644
--- a/src/librustc/middle/weak_lang_items.rs
+++ b/src/librustc/middle/weak_lang_items.rs
@@ -18,13 +18,13 @@
($($name:ident, $item:ident, $sym:ident;)*) => (
struct Context<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
items: &'a mut lang_items::LanguageItems,
}
/// Checks the crate for usage of weak lang items, returning a vector of all the
/// language items required by this crate, but not defined yet.
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub fn check_crate<'tcx>(tcx: TyCtxt<'tcx>,
items: &mut lang_items::LanguageItems) {
// These are never called by user code, they're generated by the compiler.
// They will never implicitly be added to the `missing` array unless we do
@@ -60,7 +60,7 @@
/// Not all lang items are always required for each compilation, particularly in
/// the case of panic=abort. In these situations some lang items are injected by
/// crates and don't actually need to be defined in libstd.
-pub fn whitelisted(tcx: TyCtxt<'_, '_, '_>, lang_item: lang_items::LangItem) -> bool {
+pub fn whitelisted(tcx: TyCtxt<'_>, lang_item: lang_items::LangItem) -> bool {
// If we're not compiling with unwinding, we won't actually need these
// symbols. Other panic runtimes ensure that the relevant symbols are
// available to link things together, but they're never exercised.
@@ -72,7 +72,7 @@
false
}
-fn verify<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn verify<'tcx>(tcx: TyCtxt<'tcx>,
items: &lang_items::LanguageItems) {
// We only need to check for the presence of weak lang items if we're
// emitting something that's not an rlib.
@@ -142,7 +142,7 @@
}
}
-impl<'a, 'tcx, 'gcx> TyCtxt<'a, 'tcx, 'gcx> {
+impl<'tcx> TyCtxt<'tcx> {
pub fn is_weak_lang_item(&self, item_def_id: DefId) -> bool {
let lang_items = self.lang_items();
let did = Some(item_def_id);
diff --git a/src/librustc/mir/interpret/error.rs b/src/librustc/mir/interpret/error.rs
index a9a34f7..e8113b4 100644
--- a/src/librustc/mir/interpret/error.rs
+++ b/src/librustc/mir/interpret/error.rs
@@ -74,19 +74,16 @@
}
}
-impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> {
- pub fn struct_error(&self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- message: &str)
- -> Result<DiagnosticBuilder<'tcx>, ErrorHandled>
- {
+impl<'tcx> ConstEvalErr<'tcx> {
+ pub fn struct_error(
+ &self,
+ tcx: TyCtxtAt<'tcx>,
+ message: &str,
+ ) -> Result<DiagnosticBuilder<'tcx>, ErrorHandled> {
self.struct_generic(tcx, message, None)
}
- pub fn report_as_error(&self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- message: &str
- ) -> ErrorHandled {
+ pub fn report_as_error(&self, tcx: TyCtxtAt<'tcx>, message: &str) -> ErrorHandled {
let err = self.struct_error(tcx, message);
match err {
Ok(mut err) => {
@@ -97,8 +94,9 @@
}
}
- pub fn report_as_lint(&self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ pub fn report_as_lint(
+ &self,
+ tcx: TyCtxtAt<'tcx>,
message: &str,
lint_root: hir::HirId,
span: Option<Span>,
@@ -131,7 +129,7 @@
fn struct_generic(
&self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxtAt<'tcx>,
message: &str,
lint_root: Option<hir::HirId>,
) -> Result<DiagnosticBuilder<'tcx>, ErrorHandled> {
@@ -172,10 +170,7 @@
}
}
-pub fn struct_error<'a, 'gcx, 'tcx>(
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- msg: &str,
-) -> DiagnosticBuilder<'tcx> {
+pub fn struct_error<'tcx>(tcx: TyCtxtAt<'tcx>, msg: &str) -> DiagnosticBuilder<'tcx> {
struct_span_err!(tcx.sess, tcx.span, E0080, "{}", msg)
}
diff --git a/src/librustc/mir/interpret/mod.rs b/src/librustc/mir/interpret/mod.rs
index 15b09f6..a36c788 100644
--- a/src/librustc/mir/interpret/mod.rs
+++ b/src/librustc/mir/interpret/mod.rs
@@ -64,12 +64,9 @@
Static,
}
-pub fn specialized_encode_alloc_id<
- 'a, 'tcx,
- E: Encoder,
->(
+pub fn specialized_encode_alloc_id<'tcx, E: Encoder>(
encoder: &mut E,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
alloc_id: AllocId,
) -> Result<(), E::Error> {
let alloc: GlobalAlloc<'tcx> =
@@ -145,13 +142,10 @@
}
impl<'s> AllocDecodingSession<'s> {
-
// Decodes an AllocId in a thread-safe way.
- pub fn decode_alloc_id<'a, 'tcx, D>(&self,
- decoder: &mut D)
- -> Result<AllocId, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+ pub fn decode_alloc_id<D>(&self, decoder: &mut D) -> Result<AllocId, D::Error>
+ where
+ D: TyDecoder<'tcx>,
{
// Read the index of the allocation
let idx = decoder.read_u32()? as usize;
diff --git a/src/librustc/mir/mod.rs b/src/librustc/mir/mod.rs
index 3102288..1d5c1cb 100644
--- a/src/librustc/mir/mod.rs
+++ b/src/librustc/mir/mod.rs
@@ -1196,9 +1196,9 @@
FalseEdges {
/// The target normal control flow will take
real_target: BasicBlock,
- /// The list of blocks control flow could conceptually take, but won't
- /// in practice
- imaginary_targets: Vec<BasicBlock>,
+ /// A block control flow could conceptually jump to, but won't in
+ /// practice
+ imaginary_target: BasicBlock,
},
/// A terminator for blocks that only take one path in reality, but where we
/// reserve the right to unwind in borrowck, even if it won't happen in practice.
@@ -1240,8 +1240,8 @@
}
impl<'tcx> TerminatorKind<'tcx> {
- pub fn if_<'a, 'gcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ pub fn if_(
+ tcx: TyCtxt<'tcx>,
cond: Operand<'tcx>,
t: BasicBlock,
f: BasicBlock,
@@ -1335,8 +1335,8 @@
SwitchInt { ref targets, .. } => None.into_iter().chain(&targets[..]),
FalseEdges {
ref real_target,
- ref imaginary_targets,
- } => Some(real_target).into_iter().chain(&imaginary_targets[..]),
+ ref imaginary_target,
+ } => Some(real_target).into_iter().chain(slice::from_ref(imaginary_target)),
}
}
@@ -1422,10 +1422,10 @@
} => None.into_iter().chain(&mut targets[..]),
FalseEdges {
ref mut real_target,
- ref mut imaginary_targets,
+ ref mut imaginary_target,
} => Some(real_target)
.into_iter()
- .chain(&mut imaginary_targets[..]),
+ .chain(slice::from_mut(imaginary_target)),
}
}
@@ -1722,12 +1722,9 @@
Assert { cleanup: None, .. } => vec!["".into()],
Assert { .. } => vec!["success".into(), "unwind".into()],
FalseEdges {
- ref imaginary_targets,
..
} => {
- let mut l = vec!["real".into()];
- l.resize(imaginary_targets.len() + 1, "imaginary".into());
- l
+ vec!["real".into(), "imaginary".into()]
}
FalseUnwind {
unwind: Some(_), ..
@@ -2323,8 +2320,8 @@
/// Convenience helper to make a constant that refers to the fn
/// with given `DefId` and substs. Since this is used to synthesize
/// MIR, assumes `user_ty` is None.
- pub fn function_handle<'a>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ pub fn function_handle(
+ tcx: TyCtxt<'tcx>,
def_id: DefId,
substs: SubstsRef<'tcx>,
span: Span,
@@ -2568,7 +2565,7 @@
let name = if tcx.sess.opts.debugging_opts.span_free_formats {
format!("[closure@{:?}]", hir_id)
} else {
- format!("[closure@{:?}]", tcx.hir().span_by_hir_id(hir_id))
+ format!("[closure@{:?}]", tcx.hir().span(hir_id))
};
let mut struct_fmt = fmt.debug_struct(&name);
@@ -2588,7 +2585,7 @@
AggregateKind::Generator(def_id, _, _) => ty::tls::with(|tcx| {
if let Some(hir_id) = tcx.hir().as_local_hir_id(def_id) {
let name = format!("[generator@{:?}]",
- tcx.hir().span_by_hir_id(hir_id));
+ tcx.hir().span(hir_id));
let mut struct_fmt = fmt.debug_struct(&name);
if let Some(upvars) = tcx.upvars(def_id) {
@@ -2795,7 +2792,7 @@
CloneTypeFoldableAndLiftImpls! { ProjectionKind, }
impl<'tcx> TypeFoldable<'tcx> for UserTypeProjection {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
use crate::mir::ProjectionElem::*;
let base = self.base.fold_with(folder);
@@ -3012,8 +3009,8 @@
}
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
-pub struct BorrowCheckResult<'gcx> {
- pub closure_requirements: Option<ClosureRegionRequirements<'gcx>>,
+pub struct BorrowCheckResult<'tcx> {
+ pub closure_requirements: Option<ClosureRegionRequirements<'tcx>>,
pub used_mut_upvars: SmallVec<[Field; 8]>,
}
@@ -3068,7 +3065,7 @@
/// TyCtxt, and hence we cannot use `ReVar` (which is what we use
/// internally within the rest of the NLL code).
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
-pub struct ClosureRegionRequirements<'gcx> {
+pub struct ClosureRegionRequirements<'tcx> {
/// The number of external regions defined on the closure. In our
/// example above, it would be 3 -- one for `'static`, then `'1`
/// and `'2`. This is just used for a sanity check later on, to
@@ -3078,7 +3075,7 @@
/// Requirements between the various free regions defined in
/// indices.
- pub outlives_requirements: Vec<ClosureOutlivesRequirement<'gcx>>,
+ pub outlives_requirements: Vec<ClosureOutlivesRequirement<'tcx>>,
}
/// Indicates an outlives constraint between a type or between two
@@ -3158,7 +3155,7 @@
/*
* TypeFoldable implementations for MIR types
- */
+*/
CloneTypeFoldableAndLiftImpls! {
BlockTailInfo,
@@ -3262,7 +3259,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
use crate::mir::TerminatorKind::*;
let kind = match self.kind {
@@ -3356,10 +3353,10 @@
Unreachable => Unreachable,
FalseEdges {
real_target,
- ref imaginary_targets,
+ imaginary_target,
} => FalseEdges {
real_target,
- imaginary_targets: imaginary_targets.clone(),
+ imaginary_target,
},
FalseUnwind {
real_target,
@@ -3430,7 +3427,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for Place<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
match self {
&Place::Projection(ref p) => Place::Projection(p.fold_with(folder)),
_ => self.clone(),
@@ -3447,7 +3444,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for Rvalue<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
use crate::mir::Rvalue::*;
match *self {
Use(ref op) => Use(op.fold_with(folder)),
@@ -3519,7 +3516,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for Operand<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
match *self {
Operand::Copy(ref place) => Operand::Copy(place.fold_with(folder)),
Operand::Move(ref place) => Operand::Move(place.fold_with(folder)),
@@ -3536,7 +3533,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for Projection<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
use crate::mir::ProjectionElem::*;
let base = self.base.fold_with(folder);
@@ -3562,7 +3559,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for Field {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, _: &mut F) -> Self {
*self
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, _: &mut V) -> bool {
@@ -3571,7 +3568,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for GeneratorSavedLocal {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, _: &mut F) -> Self {
*self
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, _: &mut V) -> bool {
@@ -3580,7 +3577,7 @@
}
impl<'tcx, R: Idx, C: Idx> TypeFoldable<'tcx> for BitMatrix<R, C> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, _: &mut F) -> Self {
self.clone()
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, _: &mut V) -> bool {
@@ -3589,7 +3586,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for Constant<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
Constant {
span: self.span.clone(),
ty: self.ty.fold_with(folder),
diff --git a/src/librustc/mir/mono.rs b/src/librustc/mir/mono.rs
index 79228a5..432a61d 100644
--- a/src/librustc/mir/mono.rs
+++ b/src/librustc/mir/mono.rs
@@ -48,7 +48,7 @@
}
impl<'tcx> MonoItem<'tcx> {
- pub fn size_estimate<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> usize {
+ pub fn size_estimate(&self, tcx: TyCtxt<'tcx>) -> usize {
match *self {
MonoItem::Fn(instance) => {
// Estimate the size of a function based on how many statements
@@ -72,7 +72,7 @@
}
}
- pub fn symbol_name(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> SymbolName {
+ pub fn symbol_name(&self, tcx: TyCtxt<'tcx>) -> SymbolName {
match *self {
MonoItem::Fn(instance) => tcx.symbol_name(instance),
MonoItem::Static(def_id) => {
@@ -87,9 +87,7 @@
}
}
- pub fn instantiation_mode(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> InstantiationMode {
+ pub fn instantiation_mode(&self, tcx: TyCtxt<'tcx>) -> InstantiationMode {
let inline_in_all_cgus =
tcx.sess.opts.debugging_opts.inline_in_all_cgus.unwrap_or_else(|| {
tcx.sess.opts.optimize != OptLevel::No
@@ -133,7 +131,7 @@
}
}
- pub fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Linkage> {
+ pub fn explicit_linkage(&self, tcx: TyCtxt<'tcx>) -> Option<Linkage> {
let def_id = match *self {
MonoItem::Fn(ref instance) => instance.def_id(),
MonoItem::Static(def_id) => def_id,
@@ -169,7 +167,7 @@
/// Similarly, if a vtable method has such a signature, and therefore can't
/// be used, we can just not emit it and have a placeholder (a null pointer,
/// which will never be accessed) in its place.
- pub fn is_instantiable(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
+ pub fn is_instantiable(&self, tcx: TyCtxt<'tcx>) -> bool {
debug!("is_instantiable({:?})", self);
let (def_id, substs) = match *self {
MonoItem::Fn(ref instance) => (instance.def_id(), instance.substs),
@@ -181,7 +179,7 @@
tcx.substitute_normalize_and_test_predicates((def_id, &substs))
}
- pub fn to_string(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, debug: bool) -> String {
+ pub fn to_string(&self, tcx: TyCtxt<'tcx>, debug: bool) -> String {
return match *self {
MonoItem::Fn(instance) => {
to_string_internal(tcx, "fn ", instance, debug)
@@ -195,11 +193,12 @@
}
};
- fn to_string_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- prefix: &str,
- instance: Instance<'tcx>,
- debug: bool)
- -> String {
+ fn to_string_internal<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ prefix: &str,
+ instance: Instance<'tcx>,
+ debug: bool,
+ ) -> String {
let mut result = String::with_capacity(32);
result.push_str(prefix);
let printer = DefPathBasedNames::new(tcx, false, false);
@@ -208,7 +207,7 @@
}
}
- pub fn local_span(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Span> {
+ pub fn local_span(&self, tcx: TyCtxt<'tcx>) -> Option<Span> {
match *self {
MonoItem::Fn(Instance { def, .. }) => {
tcx.hir().as_local_hir_id(def.def_id())
@@ -219,7 +218,7 @@
MonoItem::GlobalAsm(hir_id) => {
Some(hir_id)
}
- }.map(|hir_id| tcx.hir().span_by_hir_id(hir_id))
+ }.map(|hir_id| tcx.hir().span(hir_id))
}
}
@@ -334,7 +333,7 @@
base_n::encode(hash, base_n::CASE_INSENSITIVE)
}
- pub fn estimate_size<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+ pub fn estimate_size(&mut self, tcx: TyCtxt<'tcx>) {
// Estimate the size of a codegen unit as (approximately) the number of MIR
// statements it corresponds to.
self.size_estimate = Some(self.items.keys().map(|mi| mi.size_estimate(tcx)).sum());
@@ -360,7 +359,7 @@
WorkProductId::from_cgu_name(&self.name().as_str())
}
- pub fn work_product(&self, tcx: TyCtxt<'_, '_, '_>) -> WorkProduct {
+ pub fn work_product(&self, tcx: TyCtxt<'_>) -> WorkProduct {
let work_product_id = self.work_product_id();
tcx.dep_graph
.previous_work_product(&work_product_id)
@@ -369,17 +368,16 @@
})
}
- pub fn items_in_deterministic_order<'a>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Vec<(MonoItem<'tcx>,
- (Linkage, Visibility))> {
+ pub fn items_in_deterministic_order(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ ) -> Vec<(MonoItem<'tcx>, (Linkage, Visibility))> {
// The codegen tests rely on items being process in the same order as
// they appear in the file, so for local items, we sort by node_id first
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub struct ItemSortKey(Option<HirId>, SymbolName);
- fn item_sort_key<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- item: MonoItem<'tcx>) -> ItemSortKey {
+ fn item_sort_key<'tcx>(tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>) -> ItemSortKey {
ItemSortKey(match item {
MonoItem::Fn(ref instance) => {
match instance.def {
@@ -415,7 +413,7 @@
items
}
- pub fn codegen_dep_node(&self, tcx: TyCtxt<'_, 'tcx, 'tcx>) -> DepNode {
+ pub fn codegen_dep_node(&self, tcx: TyCtxt<'tcx>) -> DepNode {
DepNode::new(tcx, DepConstructor::CompileCodegenUnit(self.name().clone()))
}
}
@@ -445,14 +443,13 @@
}
}
-pub struct CodegenUnitNameBuilder<'a, 'gcx: 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct CodegenUnitNameBuilder<'tcx> {
+ tcx: TyCtxt<'tcx>,
cache: FxHashMap<CrateNum, String>,
}
-impl<'a, 'gcx: 'tcx, 'tcx: 'a> CodegenUnitNameBuilder<'a, 'gcx, 'tcx> {
-
- pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
+impl CodegenUnitNameBuilder<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>) -> Self {
CodegenUnitNameBuilder {
tcx,
cache: Default::default(),
diff --git a/src/librustc/mir/tcx.rs b/src/librustc/mir/tcx.rs
index 5135aeb..afabcdf 100644
--- a/src/librustc/mir/tcx.rs
+++ b/src/librustc/mir/tcx.rs
@@ -21,7 +21,7 @@
#[cfg(target_arch = "x86_64")]
static_assert_size!(PlaceTy<'_>, 16);
-impl<'a, 'gcx, 'tcx> PlaceTy<'tcx> {
+impl<'tcx> PlaceTy<'tcx> {
pub fn from_ty(ty: Ty<'tcx>) -> PlaceTy<'tcx> {
PlaceTy { ty, variant_index: None }
}
@@ -33,8 +33,7 @@
/// not carry a `Ty` for `T`.)
///
/// Note that the resulting type has not been normalized.
- pub fn field_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, f: &Field) -> Ty<'tcx>
- {
+ pub fn field_ty(self, tcx: TyCtxt<'tcx>, f: &Field) -> Ty<'tcx> {
let answer = match self.ty.sty {
ty::Adt(adt_def, substs) => {
let variant_def = match self.variant_index {
@@ -57,10 +56,7 @@
/// Convenience wrapper around `projection_ty_core` for
/// `PlaceElem`, where we can just use the `Ty` that is already
/// stored inline on field projection elems.
- pub fn projection_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- elem: &PlaceElem<'tcx>)
- -> PlaceTy<'tcx>
- {
+ pub fn projection_ty(self, tcx: TyCtxt<'tcx>, elem: &PlaceElem<'tcx>) -> PlaceTy<'tcx> {
self.projection_ty_core(tcx, elem, |_, _, ty| ty)
}
@@ -71,12 +67,13 @@
/// (which should be trivial when `T` = `Ty`).
pub fn projection_ty_core<V, T>(
self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
elem: &ProjectionElem<V, T>,
- mut handle_field: impl FnMut(&Self, &Field, &T) -> Ty<'tcx>)
- -> PlaceTy<'tcx>
+ mut handle_field: impl FnMut(&Self, &Field, &T) -> Ty<'tcx>,
+ ) -> PlaceTy<'tcx>
where
- V: ::std::fmt::Debug, T: ::std::fmt::Debug
+ V: ::std::fmt::Debug,
+ T: ::std::fmt::Debug,
{
let answer = match *elem {
ProjectionElem::Deref => {
@@ -121,8 +118,9 @@
}
impl<'tcx> Place<'tcx> {
- pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PlaceTy<'tcx>
- where D: HasLocalDecls<'tcx>
+ pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> PlaceTy<'tcx>
+ where
+ D: HasLocalDecls<'tcx>,
{
match *self {
Place::Base(PlaceBase::Local(index)) =>
@@ -141,8 +139,9 @@
}
impl<'tcx> Rvalue<'tcx> {
- pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>
- where D: HasLocalDecls<'tcx>
+ pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx>
+ where
+ D: HasLocalDecls<'tcx>,
{
match *self {
Rvalue::Use(ref operand) => operand.ty(local_decls, tcx),
@@ -222,8 +221,9 @@
}
impl<'tcx> Operand<'tcx> {
- pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>
- where D: HasLocalDecls<'tcx>
+ pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx>
+ where
+ D: HasLocalDecls<'tcx>,
{
match self {
&Operand::Copy(ref l) |
@@ -234,10 +234,7 @@
}
impl<'tcx> BinOp {
- pub fn ty<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- lhs_ty: Ty<'tcx>,
- rhs_ty: Ty<'tcx>)
- -> Ty<'tcx> {
+ pub fn ty(&self, tcx: TyCtxt<'tcx>, lhs_ty: Ty<'tcx>, rhs_ty: Ty<'tcx>) -> Ty<'tcx> {
// FIXME: handle SIMD correctly
match self {
&BinOp::Add | &BinOp::Sub | &BinOp::Mul | &BinOp::Div | &BinOp::Rem |
diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs
index f4d523b..fc09248 100644
--- a/src/librustc/session/config.rs
+++ b/src/librustc/session/config.rs
@@ -1,5 +1,3 @@
-// ignore-tidy-filelength
-
//! Contains infrastructure for configuring the compiler, including parsing
//! command line options.
@@ -2720,711 +2718,4 @@
}
#[cfg(test)]
-mod tests {
- use getopts;
- use crate::lint;
- use crate::middle::cstore;
- use crate::session::config::{
- build_configuration,
- build_session_options_and_crate_config,
- to_crate_config
- };
- use crate::session::config::{LtoCli, LinkerPluginLto, SwitchWithOptPath, ExternEntry};
- use crate::session::build_session;
- use crate::session::search_paths::SearchPath;
- use std::collections::{BTreeMap, BTreeSet};
- use std::iter::FromIterator;
- use std::path::PathBuf;
- use super::{Externs, OutputType, OutputTypes, SymbolManglingVersion};
- use rustc_target::spec::{MergeFunctions, PanicStrategy, RelroLevel};
- use syntax::symbol::sym;
- use syntax::edition::{Edition, DEFAULT_EDITION};
- use syntax;
- use super::Options;
-
- impl ExternEntry {
- fn new_public<S: Into<String>,
- I: IntoIterator<Item = Option<S>>>(locations: I) -> ExternEntry {
- let locations: BTreeSet<_> = locations.into_iter().map(|o| o.map(|s| s.into()))
- .collect();
-
- ExternEntry {
- locations,
- is_private_dep: false
- }
- }
- }
-
- fn optgroups() -> getopts::Options {
- let mut opts = getopts::Options::new();
- for group in super::rustc_optgroups() {
- (group.apply)(&mut opts);
- }
- return opts;
- }
-
- fn mk_map<K: Ord, V>(entries: Vec<(K, V)>) -> BTreeMap<K, V> {
- BTreeMap::from_iter(entries.into_iter())
- }
-
- // When the user supplies --test we should implicitly supply --cfg test
- #[test]
- fn test_switch_implies_cfg_test() {
- syntax::with_default_globals(|| {
- let matches = &match optgroups().parse(&["--test".to_string()]) {
- Ok(m) => m,
- Err(f) => panic!("test_switch_implies_cfg_test: {}", f),
- };
- let registry = errors::registry::Registry::new(&[]);
- let (sessopts, cfg) = build_session_options_and_crate_config(matches);
- let sess = build_session(sessopts, None, registry);
- let cfg = build_configuration(&sess, to_crate_config(cfg));
- assert!(cfg.contains(&(sym::test, None)));
- });
- }
-
- // When the user supplies --test and --cfg test, don't implicitly add
- // another --cfg test
- #[test]
- fn test_switch_implies_cfg_test_unless_cfg_test() {
- syntax::with_default_globals(|| {
- let matches = &match optgroups().parse(&["--test".to_string(),
- "--cfg=test".to_string()]) {
- Ok(m) => m,
- Err(f) => panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f),
- };
- let registry = errors::registry::Registry::new(&[]);
- let (sessopts, cfg) = build_session_options_and_crate_config(matches);
- let sess = build_session(sessopts, None, registry);
- let cfg = build_configuration(&sess, to_crate_config(cfg));
- let mut test_items = cfg.iter().filter(|&&(name, _)| name == sym::test);
- assert!(test_items.next().is_some());
- assert!(test_items.next().is_none());
- });
- }
-
- #[test]
- fn test_can_print_warnings() {
- syntax::with_default_globals(|| {
- let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap();
- let registry = errors::registry::Registry::new(&[]);
- let (sessopts, _) = build_session_options_and_crate_config(&matches);
- let sess = build_session(sessopts, None, registry);
- assert!(!sess.diagnostic().flags.can_emit_warnings);
- });
-
- syntax::with_default_globals(|| {
- let matches = optgroups()
- .parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()])
- .unwrap();
- let registry = errors::registry::Registry::new(&[]);
- let (sessopts, _) = build_session_options_and_crate_config(&matches);
- let sess = build_session(sessopts, None, registry);
- assert!(sess.diagnostic().flags.can_emit_warnings);
- });
-
- syntax::with_default_globals(|| {
- let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap();
- let registry = errors::registry::Registry::new(&[]);
- let (sessopts, _) = build_session_options_and_crate_config(&matches);
- let sess = build_session(sessopts, None, registry);
- assert!(sess.diagnostic().flags.can_emit_warnings);
- });
- }
-
- #[test]
- fn test_output_types_tracking_hash_different_paths() {
- let mut v1 = Options::default();
- let mut v2 = Options::default();
- let mut v3 = Options::default();
-
- v1.output_types =
- OutputTypes::new(&[(OutputType::Exe, Some(PathBuf::from("./some/thing")))]);
- v2.output_types =
- OutputTypes::new(&[(OutputType::Exe, Some(PathBuf::from("/some/thing")))]);
- v3.output_types = OutputTypes::new(&[(OutputType::Exe, None)]);
-
- assert!(v1.dep_tracking_hash() != v2.dep_tracking_hash());
- assert!(v1.dep_tracking_hash() != v3.dep_tracking_hash());
- assert!(v2.dep_tracking_hash() != v3.dep_tracking_hash());
-
- // Check clone
- assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
- assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
- assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
- }
-
- #[test]
- fn test_output_types_tracking_hash_different_construction_order() {
- let mut v1 = Options::default();
- let mut v2 = Options::default();
-
- v1.output_types = OutputTypes::new(&[
- (OutputType::Exe, Some(PathBuf::from("./some/thing"))),
- (OutputType::Bitcode, Some(PathBuf::from("./some/thing.bc"))),
- ]);
-
- v2.output_types = OutputTypes::new(&[
- (OutputType::Bitcode, Some(PathBuf::from("./some/thing.bc"))),
- (OutputType::Exe, Some(PathBuf::from("./some/thing"))),
- ]);
-
- assert_eq!(v1.dep_tracking_hash(), v2.dep_tracking_hash());
-
- // Check clone
- assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
- }
-
- #[test]
- fn test_externs_tracking_hash_different_construction_order() {
- let mut v1 = Options::default();
- let mut v2 = Options::default();
- let mut v3 = Options::default();
-
- v1.externs = Externs::new(mk_map(vec![
- (
- String::from("a"),
- ExternEntry::new_public(vec![Some("b"), Some("c")])
- ),
- (
- String::from("d"),
- ExternEntry::new_public(vec![Some("e"), Some("f")])
- ),
- ]));
-
- v2.externs = Externs::new(mk_map(vec![
- (
- String::from("d"),
- ExternEntry::new_public(vec![Some("e"), Some("f")])
- ),
- (
- String::from("a"),
- ExternEntry::new_public(vec![Some("b"), Some("c")])
- ),
- ]));
-
- v3.externs = Externs::new(mk_map(vec![
- (
- String::from("a"),
- ExternEntry::new_public(vec![Some("b"), Some("c")])
- ),
- (
- String::from("d"),
- ExternEntry::new_public(vec![Some("f"), Some("e")])
- ),
- ]));
-
- assert_eq!(v1.dep_tracking_hash(), v2.dep_tracking_hash());
- assert_eq!(v1.dep_tracking_hash(), v3.dep_tracking_hash());
- assert_eq!(v2.dep_tracking_hash(), v3.dep_tracking_hash());
-
- // Check clone
- assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
- assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
- assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
- }
-
- #[test]
- fn test_lints_tracking_hash_different_values() {
- let mut v1 = Options::default();
- let mut v2 = Options::default();
- let mut v3 = Options::default();
-
- v1.lint_opts = vec![
- (String::from("a"), lint::Allow),
- (String::from("b"), lint::Warn),
- (String::from("c"), lint::Deny),
- (String::from("d"), lint::Forbid),
- ];
-
- v2.lint_opts = vec![
- (String::from("a"), lint::Allow),
- (String::from("b"), lint::Warn),
- (String::from("X"), lint::Deny),
- (String::from("d"), lint::Forbid),
- ];
-
- v3.lint_opts = vec![
- (String::from("a"), lint::Allow),
- (String::from("b"), lint::Warn),
- (String::from("c"), lint::Forbid),
- (String::from("d"), lint::Deny),
- ];
-
- assert!(v1.dep_tracking_hash() != v2.dep_tracking_hash());
- assert!(v1.dep_tracking_hash() != v3.dep_tracking_hash());
- assert!(v2.dep_tracking_hash() != v3.dep_tracking_hash());
-
- // Check clone
- assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
- assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
- assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
- }
-
- #[test]
- fn test_lints_tracking_hash_different_construction_order() {
- let mut v1 = Options::default();
- let mut v2 = Options::default();
-
- v1.lint_opts = vec![
- (String::from("a"), lint::Allow),
- (String::from("b"), lint::Warn),
- (String::from("c"), lint::Deny),
- (String::from("d"), lint::Forbid),
- ];
-
- v2.lint_opts = vec![
- (String::from("a"), lint::Allow),
- (String::from("c"), lint::Deny),
- (String::from("b"), lint::Warn),
- (String::from("d"), lint::Forbid),
- ];
-
- assert_eq!(v1.dep_tracking_hash(), v2.dep_tracking_hash());
-
- // Check clone
- assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
- assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
- }
-
- #[test]
- fn test_search_paths_tracking_hash_different_order() {
- let mut v1 = Options::default();
- let mut v2 = Options::default();
- let mut v3 = Options::default();
- let mut v4 = Options::default();
-
- const JSON: super::ErrorOutputType = super::ErrorOutputType::Json {
- pretty: false,
- json_rendered: super::HumanReadableErrorType::Default(super::ColorConfig::Never),
- };
-
- // Reference
- v1.search_paths
- .push(SearchPath::from_cli_opt("native=abc", JSON));
- v1.search_paths
- .push(SearchPath::from_cli_opt("crate=def", JSON));
- v1.search_paths
- .push(SearchPath::from_cli_opt("dependency=ghi", JSON));
- v1.search_paths
- .push(SearchPath::from_cli_opt("framework=jkl", JSON));
- v1.search_paths
- .push(SearchPath::from_cli_opt("all=mno", JSON));
-
- v2.search_paths
- .push(SearchPath::from_cli_opt("native=abc", JSON));
- v2.search_paths
- .push(SearchPath::from_cli_opt("dependency=ghi", JSON));
- v2.search_paths
- .push(SearchPath::from_cli_opt("crate=def", JSON));
- v2.search_paths
- .push(SearchPath::from_cli_opt("framework=jkl", JSON));
- v2.search_paths
- .push(SearchPath::from_cli_opt("all=mno", JSON));
-
- v3.search_paths
- .push(SearchPath::from_cli_opt("crate=def", JSON));
- v3.search_paths
- .push(SearchPath::from_cli_opt("framework=jkl", JSON));
- v3.search_paths
- .push(SearchPath::from_cli_opt("native=abc", JSON));
- v3.search_paths
- .push(SearchPath::from_cli_opt("dependency=ghi", JSON));
- v3.search_paths
- .push(SearchPath::from_cli_opt("all=mno", JSON));
-
- v4.search_paths
- .push(SearchPath::from_cli_opt("all=mno", JSON));
- v4.search_paths
- .push(SearchPath::from_cli_opt("native=abc", JSON));
- v4.search_paths
- .push(SearchPath::from_cli_opt("crate=def", JSON));
- v4.search_paths
- .push(SearchPath::from_cli_opt("dependency=ghi", JSON));
- v4.search_paths
- .push(SearchPath::from_cli_opt("framework=jkl", JSON));
-
- assert!(v1.dep_tracking_hash() == v2.dep_tracking_hash());
- assert!(v1.dep_tracking_hash() == v3.dep_tracking_hash());
- assert!(v1.dep_tracking_hash() == v4.dep_tracking_hash());
-
- // Check clone
- assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
- assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
- assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
- assert_eq!(v4.dep_tracking_hash(), v4.clone().dep_tracking_hash());
- }
-
- #[test]
- fn test_native_libs_tracking_hash_different_values() {
- let mut v1 = Options::default();
- let mut v2 = Options::default();
- let mut v3 = Options::default();
- let mut v4 = Options::default();
-
- // Reference
- v1.libs = vec![
- (String::from("a"), None, Some(cstore::NativeStatic)),
- (String::from("b"), None, Some(cstore::NativeFramework)),
- (String::from("c"), None, Some(cstore::NativeUnknown)),
- ];
-
- // Change label
- v2.libs = vec![
- (String::from("a"), None, Some(cstore::NativeStatic)),
- (String::from("X"), None, Some(cstore::NativeFramework)),
- (String::from("c"), None, Some(cstore::NativeUnknown)),
- ];
-
- // Change kind
- v3.libs = vec![
- (String::from("a"), None, Some(cstore::NativeStatic)),
- (String::from("b"), None, Some(cstore::NativeStatic)),
- (String::from("c"), None, Some(cstore::NativeUnknown)),
- ];
-
- // Change new-name
- v4.libs = vec![
- (String::from("a"), None, Some(cstore::NativeStatic)),
- (
- String::from("b"),
- Some(String::from("X")),
- Some(cstore::NativeFramework),
- ),
- (String::from("c"), None, Some(cstore::NativeUnknown)),
- ];
-
- assert!(v1.dep_tracking_hash() != v2.dep_tracking_hash());
- assert!(v1.dep_tracking_hash() != v3.dep_tracking_hash());
- assert!(v1.dep_tracking_hash() != v4.dep_tracking_hash());
-
- // Check clone
- assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
- assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
- assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
- assert_eq!(v4.dep_tracking_hash(), v4.clone().dep_tracking_hash());
- }
-
- #[test]
- fn test_native_libs_tracking_hash_different_order() {
- let mut v1 = Options::default();
- let mut v2 = Options::default();
- let mut v3 = Options::default();
-
- // Reference
- v1.libs = vec![
- (String::from("a"), None, Some(cstore::NativeStatic)),
- (String::from("b"), None, Some(cstore::NativeFramework)),
- (String::from("c"), None, Some(cstore::NativeUnknown)),
- ];
-
- v2.libs = vec![
- (String::from("b"), None, Some(cstore::NativeFramework)),
- (String::from("a"), None, Some(cstore::NativeStatic)),
- (String::from("c"), None, Some(cstore::NativeUnknown)),
- ];
-
- v3.libs = vec![
- (String::from("c"), None, Some(cstore::NativeUnknown)),
- (String::from("a"), None, Some(cstore::NativeStatic)),
- (String::from("b"), None, Some(cstore::NativeFramework)),
- ];
-
- assert!(v1.dep_tracking_hash() == v2.dep_tracking_hash());
- assert!(v1.dep_tracking_hash() == v3.dep_tracking_hash());
- assert!(v2.dep_tracking_hash() == v3.dep_tracking_hash());
-
- // Check clone
- assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
- assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
- assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
- }
-
- #[test]
- fn test_codegen_options_tracking_hash() {
- let reference = Options::default();
- let mut opts = Options::default();
-
- // Make sure the changing an [UNTRACKED] option leaves the hash unchanged
- opts.cg.ar = Some(String::from("abc"));
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- opts.cg.linker = Some(PathBuf::from("linker"));
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- opts.cg.link_args = Some(vec![String::from("abc"), String::from("def")]);
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- opts.cg.link_dead_code = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- opts.cg.rpath = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- opts.cg.extra_filename = String::from("extra-filename");
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- opts.cg.codegen_units = Some(42);
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- opts.cg.remark = super::Passes::Some(vec![String::from("pass1"), String::from("pass2")]);
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- opts.cg.save_temps = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- opts.cg.incremental = Some(String::from("abc"));
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- // Make sure changing a [TRACKED] option changes the hash
- opts = reference.clone();
- opts.cg.lto = LtoCli::Fat;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.target_cpu = Some(String::from("abc"));
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.target_feature = String::from("all the features, all of them");
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.passes = vec![String::from("1"), String::from("2")];
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.llvm_args = vec![String::from("1"), String::from("2")];
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.overflow_checks = Some(true);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.no_prepopulate_passes = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.no_vectorize_loops = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.no_vectorize_slp = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.soft_float = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.prefer_dynamic = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.no_integrated_as = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.no_redzone = Some(true);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.relocation_model = Some(String::from("relocation model"));
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.code_model = Some(String::from("code model"));
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.tls_model = Some(String::from("tls model"));
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.pgo_gen = SwitchWithOptPath::Enabled(None);
- assert_ne!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.pgo_use = Some(PathBuf::from("abc"));
- assert_ne!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.metadata = vec![String::from("A"), String::from("B")];
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.debuginfo = Some(0xdeadbeef);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.debuginfo = Some(0xba5eba11);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.force_frame_pointers = Some(false);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.debug_assertions = Some(true);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.inline_threshold = Some(0xf007ba11);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.panic = Some(PanicStrategy::Abort);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.cg.linker_plugin_lto = LinkerPluginLto::LinkerPluginAuto;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
- }
-
- #[test]
- fn test_debugging_options_tracking_hash() {
- let reference = Options::default();
- let mut opts = Options::default();
-
- // Make sure the changing an [UNTRACKED] option leaves the hash unchanged
- opts.debugging_opts.verbose = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.time_passes = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.time_llvm_passes = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.input_stats = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.borrowck_stats = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.meta_stats = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.print_link_args = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.print_llvm_passes = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.ast_json = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.ast_json_noexpand = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.ls = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.save_analysis = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.flowgraph_print_loans = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.flowgraph_print_moves = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.flowgraph_print_assigns = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.flowgraph_print_all = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.print_region_graph = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.parse_only = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.incremental = Some(String::from("abc"));
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.dump_dep_graph = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.query_dep_graph = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.no_analysis = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.unstable_options = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.trace_macros = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.keep_hygiene_data = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.keep_ast = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.print_mono_items = Some(String::from("abc"));
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.dump_mir = Some(String::from("abc"));
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.dump_mir_dir = String::from("abc");
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
- opts.debugging_opts.dump_mir_graphviz = true;
- assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
-
- // Make sure changing a [TRACKED] option changes the hash
- opts = reference.clone();
- opts.debugging_opts.asm_comments = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.verify_llvm_ir = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.no_landing_pads = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.fewer_names = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.no_codegen = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.treat_err_as_bug = Some(1);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.report_delayed_bugs = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.continue_parse_after_error = true;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.extra_plugins = vec![String::from("plugin1"), String::from("plugin2")];
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.force_overflow_checks = Some(true);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.show_span = Some(String::from("abc"));
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.mir_opt_level = 3;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.relro_level = Some(RelroLevel::Full);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.merge_functions = Some(MergeFunctions::Disabled);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.allow_features = Some(vec![String::from("lang_items")]);
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
- opts = reference.clone();
- opts.debugging_opts.symbol_mangling_version = SymbolManglingVersion::V0;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
- }
-
- #[test]
- fn test_edition_parsing() {
- // test default edition
- let options = Options::default();
- assert!(options.edition == DEFAULT_EDITION);
-
- let matches = optgroups()
- .parse(&["--edition=2018".to_string()])
- .unwrap();
- let (sessopts, _) = build_session_options_and_crate_config(&matches);
- assert!(sessopts.edition == Edition::Edition2018)
- }
-}
+mod tests;
diff --git a/src/librustc/session/config/tests.rs b/src/librustc/session/config/tests.rs
new file mode 100644
index 0000000..b8477f8
--- /dev/null
+++ b/src/librustc/session/config/tests.rs
@@ -0,0 +1,706 @@
+use getopts;
+use crate::lint;
+use crate::middle::cstore;
+use crate::session::config::{
+ build_configuration,
+ build_session_options_and_crate_config,
+ to_crate_config
+};
+use crate::session::config::{LtoCli, LinkerPluginLto, SwitchWithOptPath, ExternEntry};
+use crate::session::build_session;
+use crate::session::search_paths::SearchPath;
+use std::collections::{BTreeMap, BTreeSet};
+use std::iter::FromIterator;
+use std::path::PathBuf;
+use super::{Externs, OutputType, OutputTypes, SymbolManglingVersion};
+use rustc_target::spec::{MergeFunctions, PanicStrategy, RelroLevel};
+use syntax::symbol::sym;
+use syntax::edition::{Edition, DEFAULT_EDITION};
+use syntax;
+use super::Options;
+
+impl ExternEntry {
+ fn new_public<S: Into<String>,
+ I: IntoIterator<Item = Option<S>>>(locations: I) -> ExternEntry {
+ let locations: BTreeSet<_> = locations.into_iter().map(|o| o.map(|s| s.into()))
+ .collect();
+
+ ExternEntry {
+ locations,
+ is_private_dep: false
+ }
+ }
+}
+
+fn optgroups() -> getopts::Options {
+ let mut opts = getopts::Options::new();
+ for group in super::rustc_optgroups() {
+ (group.apply)(&mut opts);
+ }
+ return opts;
+}
+
+fn mk_map<K: Ord, V>(entries: Vec<(K, V)>) -> BTreeMap<K, V> {
+ BTreeMap::from_iter(entries.into_iter())
+}
+
+// When the user supplies --test we should implicitly supply --cfg test
+#[test]
+fn test_switch_implies_cfg_test() {
+ syntax::with_default_globals(|| {
+ let matches = &match optgroups().parse(&["--test".to_string()]) {
+ Ok(m) => m,
+ Err(f) => panic!("test_switch_implies_cfg_test: {}", f),
+ };
+ let registry = errors::registry::Registry::new(&[]);
+ let (sessopts, cfg) = build_session_options_and_crate_config(matches);
+ let sess = build_session(sessopts, None, registry);
+ let cfg = build_configuration(&sess, to_crate_config(cfg));
+ assert!(cfg.contains(&(sym::test, None)));
+ });
+}
+
+// When the user supplies --test and --cfg test, don't implicitly add
+// another --cfg test
+#[test]
+fn test_switch_implies_cfg_test_unless_cfg_test() {
+ syntax::with_default_globals(|| {
+ let matches = &match optgroups().parse(&["--test".to_string(),
+ "--cfg=test".to_string()]) {
+ Ok(m) => m,
+ Err(f) => panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f),
+ };
+ let registry = errors::registry::Registry::new(&[]);
+ let (sessopts, cfg) = build_session_options_and_crate_config(matches);
+ let sess = build_session(sessopts, None, registry);
+ let cfg = build_configuration(&sess, to_crate_config(cfg));
+ let mut test_items = cfg.iter().filter(|&&(name, _)| name == sym::test);
+ assert!(test_items.next().is_some());
+ assert!(test_items.next().is_none());
+ });
+}
+
+#[test]
+fn test_can_print_warnings() {
+ syntax::with_default_globals(|| {
+ let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap();
+ let registry = errors::registry::Registry::new(&[]);
+ let (sessopts, _) = build_session_options_and_crate_config(&matches);
+ let sess = build_session(sessopts, None, registry);
+ assert!(!sess.diagnostic().flags.can_emit_warnings);
+ });
+
+ syntax::with_default_globals(|| {
+ let matches = optgroups()
+ .parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()])
+ .unwrap();
+ let registry = errors::registry::Registry::new(&[]);
+ let (sessopts, _) = build_session_options_and_crate_config(&matches);
+ let sess = build_session(sessopts, None, registry);
+ assert!(sess.diagnostic().flags.can_emit_warnings);
+ });
+
+ syntax::with_default_globals(|| {
+ let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap();
+ let registry = errors::registry::Registry::new(&[]);
+ let (sessopts, _) = build_session_options_and_crate_config(&matches);
+ let sess = build_session(sessopts, None, registry);
+ assert!(sess.diagnostic().flags.can_emit_warnings);
+ });
+}
+
+#[test]
+fn test_output_types_tracking_hash_different_paths() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+
+ v1.output_types =
+ OutputTypes::new(&[(OutputType::Exe, Some(PathBuf::from("./some/thing")))]);
+ v2.output_types =
+ OutputTypes::new(&[(OutputType::Exe, Some(PathBuf::from("/some/thing")))]);
+ v3.output_types = OutputTypes::new(&[(OutputType::Exe, None)]);
+
+ assert!(v1.dep_tracking_hash() != v2.dep_tracking_hash());
+ assert!(v1.dep_tracking_hash() != v3.dep_tracking_hash());
+ assert!(v2.dep_tracking_hash() != v3.dep_tracking_hash());
+
+ // Check clone
+ assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
+ assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
+ assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
+}
+
+#[test]
+fn test_output_types_tracking_hash_different_construction_order() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+
+ v1.output_types = OutputTypes::new(&[
+ (OutputType::Exe, Some(PathBuf::from("./some/thing"))),
+ (OutputType::Bitcode, Some(PathBuf::from("./some/thing.bc"))),
+ ]);
+
+ v2.output_types = OutputTypes::new(&[
+ (OutputType::Bitcode, Some(PathBuf::from("./some/thing.bc"))),
+ (OutputType::Exe, Some(PathBuf::from("./some/thing"))),
+ ]);
+
+ assert_eq!(v1.dep_tracking_hash(), v2.dep_tracking_hash());
+
+ // Check clone
+ assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
+}
+
+#[test]
+fn test_externs_tracking_hash_different_construction_order() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+
+ v1.externs = Externs::new(mk_map(vec![
+ (
+ String::from("a"),
+ ExternEntry::new_public(vec![Some("b"), Some("c")])
+ ),
+ (
+ String::from("d"),
+ ExternEntry::new_public(vec![Some("e"), Some("f")])
+ ),
+ ]));
+
+ v2.externs = Externs::new(mk_map(vec![
+ (
+ String::from("d"),
+ ExternEntry::new_public(vec![Some("e"), Some("f")])
+ ),
+ (
+ String::from("a"),
+ ExternEntry::new_public(vec![Some("b"), Some("c")])
+ ),
+ ]));
+
+ v3.externs = Externs::new(mk_map(vec![
+ (
+ String::from("a"),
+ ExternEntry::new_public(vec![Some("b"), Some("c")])
+ ),
+ (
+ String::from("d"),
+ ExternEntry::new_public(vec![Some("f"), Some("e")])
+ ),
+ ]));
+
+ assert_eq!(v1.dep_tracking_hash(), v2.dep_tracking_hash());
+ assert_eq!(v1.dep_tracking_hash(), v3.dep_tracking_hash());
+ assert_eq!(v2.dep_tracking_hash(), v3.dep_tracking_hash());
+
+ // Check clone
+ assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
+ assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
+ assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
+}
+
+#[test]
+fn test_lints_tracking_hash_different_values() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+
+ v1.lint_opts = vec![
+ (String::from("a"), lint::Allow),
+ (String::from("b"), lint::Warn),
+ (String::from("c"), lint::Deny),
+ (String::from("d"), lint::Forbid),
+ ];
+
+ v2.lint_opts = vec![
+ (String::from("a"), lint::Allow),
+ (String::from("b"), lint::Warn),
+ (String::from("X"), lint::Deny),
+ (String::from("d"), lint::Forbid),
+ ];
+
+ v3.lint_opts = vec![
+ (String::from("a"), lint::Allow),
+ (String::from("b"), lint::Warn),
+ (String::from("c"), lint::Forbid),
+ (String::from("d"), lint::Deny),
+ ];
+
+ assert!(v1.dep_tracking_hash() != v2.dep_tracking_hash());
+ assert!(v1.dep_tracking_hash() != v3.dep_tracking_hash());
+ assert!(v2.dep_tracking_hash() != v3.dep_tracking_hash());
+
+ // Check clone
+ assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
+ assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
+ assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
+}
+
+#[test]
+fn test_lints_tracking_hash_different_construction_order() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+
+ v1.lint_opts = vec![
+ (String::from("a"), lint::Allow),
+ (String::from("b"), lint::Warn),
+ (String::from("c"), lint::Deny),
+ (String::from("d"), lint::Forbid),
+ ];
+
+ v2.lint_opts = vec![
+ (String::from("a"), lint::Allow),
+ (String::from("c"), lint::Deny),
+ (String::from("b"), lint::Warn),
+ (String::from("d"), lint::Forbid),
+ ];
+
+ assert_eq!(v1.dep_tracking_hash(), v2.dep_tracking_hash());
+
+ // Check clone
+ assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
+ assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
+}
+
+#[test]
+fn test_search_paths_tracking_hash_different_order() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+ let mut v4 = Options::default();
+
+ const JSON: super::ErrorOutputType = super::ErrorOutputType::Json {
+ pretty: false,
+ json_rendered: super::HumanReadableErrorType::Default(super::ColorConfig::Never),
+ };
+
+ // Reference
+ v1.search_paths
+ .push(SearchPath::from_cli_opt("native=abc", JSON));
+ v1.search_paths
+ .push(SearchPath::from_cli_opt("crate=def", JSON));
+ v1.search_paths
+ .push(SearchPath::from_cli_opt("dependency=ghi", JSON));
+ v1.search_paths
+ .push(SearchPath::from_cli_opt("framework=jkl", JSON));
+ v1.search_paths
+ .push(SearchPath::from_cli_opt("all=mno", JSON));
+
+ v2.search_paths
+ .push(SearchPath::from_cli_opt("native=abc", JSON));
+ v2.search_paths
+ .push(SearchPath::from_cli_opt("dependency=ghi", JSON));
+ v2.search_paths
+ .push(SearchPath::from_cli_opt("crate=def", JSON));
+ v2.search_paths
+ .push(SearchPath::from_cli_opt("framework=jkl", JSON));
+ v2.search_paths
+ .push(SearchPath::from_cli_opt("all=mno", JSON));
+
+ v3.search_paths
+ .push(SearchPath::from_cli_opt("crate=def", JSON));
+ v3.search_paths
+ .push(SearchPath::from_cli_opt("framework=jkl", JSON));
+ v3.search_paths
+ .push(SearchPath::from_cli_opt("native=abc", JSON));
+ v3.search_paths
+ .push(SearchPath::from_cli_opt("dependency=ghi", JSON));
+ v3.search_paths
+ .push(SearchPath::from_cli_opt("all=mno", JSON));
+
+ v4.search_paths
+ .push(SearchPath::from_cli_opt("all=mno", JSON));
+ v4.search_paths
+ .push(SearchPath::from_cli_opt("native=abc", JSON));
+ v4.search_paths
+ .push(SearchPath::from_cli_opt("crate=def", JSON));
+ v4.search_paths
+ .push(SearchPath::from_cli_opt("dependency=ghi", JSON));
+ v4.search_paths
+ .push(SearchPath::from_cli_opt("framework=jkl", JSON));
+
+ assert!(v1.dep_tracking_hash() == v2.dep_tracking_hash());
+ assert!(v1.dep_tracking_hash() == v3.dep_tracking_hash());
+ assert!(v1.dep_tracking_hash() == v4.dep_tracking_hash());
+
+ // Check clone
+ assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
+ assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
+ assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
+ assert_eq!(v4.dep_tracking_hash(), v4.clone().dep_tracking_hash());
+}
+
+#[test]
+fn test_native_libs_tracking_hash_different_values() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+ let mut v4 = Options::default();
+
+ // Reference
+ v1.libs = vec![
+ (String::from("a"), None, Some(cstore::NativeStatic)),
+ (String::from("b"), None, Some(cstore::NativeFramework)),
+ (String::from("c"), None, Some(cstore::NativeUnknown)),
+ ];
+
+ // Change label
+ v2.libs = vec![
+ (String::from("a"), None, Some(cstore::NativeStatic)),
+ (String::from("X"), None, Some(cstore::NativeFramework)),
+ (String::from("c"), None, Some(cstore::NativeUnknown)),
+ ];
+
+ // Change kind
+ v3.libs = vec![
+ (String::from("a"), None, Some(cstore::NativeStatic)),
+ (String::from("b"), None, Some(cstore::NativeStatic)),
+ (String::from("c"), None, Some(cstore::NativeUnknown)),
+ ];
+
+ // Change new-name
+ v4.libs = vec![
+ (String::from("a"), None, Some(cstore::NativeStatic)),
+ (
+ String::from("b"),
+ Some(String::from("X")),
+ Some(cstore::NativeFramework),
+ ),
+ (String::from("c"), None, Some(cstore::NativeUnknown)),
+ ];
+
+ assert!(v1.dep_tracking_hash() != v2.dep_tracking_hash());
+ assert!(v1.dep_tracking_hash() != v3.dep_tracking_hash());
+ assert!(v1.dep_tracking_hash() != v4.dep_tracking_hash());
+
+ // Check clone
+ assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
+ assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
+ assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
+ assert_eq!(v4.dep_tracking_hash(), v4.clone().dep_tracking_hash());
+}
+
+#[test]
+fn test_native_libs_tracking_hash_different_order() {
+ let mut v1 = Options::default();
+ let mut v2 = Options::default();
+ let mut v3 = Options::default();
+
+ // Reference
+ v1.libs = vec![
+ (String::from("a"), None, Some(cstore::NativeStatic)),
+ (String::from("b"), None, Some(cstore::NativeFramework)),
+ (String::from("c"), None, Some(cstore::NativeUnknown)),
+ ];
+
+ v2.libs = vec![
+ (String::from("b"), None, Some(cstore::NativeFramework)),
+ (String::from("a"), None, Some(cstore::NativeStatic)),
+ (String::from("c"), None, Some(cstore::NativeUnknown)),
+ ];
+
+ v3.libs = vec![
+ (String::from("c"), None, Some(cstore::NativeUnknown)),
+ (String::from("a"), None, Some(cstore::NativeStatic)),
+ (String::from("b"), None, Some(cstore::NativeFramework)),
+ ];
+
+ assert!(v1.dep_tracking_hash() == v2.dep_tracking_hash());
+ assert!(v1.dep_tracking_hash() == v3.dep_tracking_hash());
+ assert!(v2.dep_tracking_hash() == v3.dep_tracking_hash());
+
+ // Check clone
+ assert_eq!(v1.dep_tracking_hash(), v1.clone().dep_tracking_hash());
+ assert_eq!(v2.dep_tracking_hash(), v2.clone().dep_tracking_hash());
+ assert_eq!(v3.dep_tracking_hash(), v3.clone().dep_tracking_hash());
+}
+
+#[test]
+fn test_codegen_options_tracking_hash() {
+ let reference = Options::default();
+ let mut opts = Options::default();
+
+ // Make sure the changing an [UNTRACKED] option leaves the hash unchanged
+ opts.cg.ar = Some(String::from("abc"));
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ opts.cg.linker = Some(PathBuf::from("linker"));
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ opts.cg.link_args = Some(vec![String::from("abc"), String::from("def")]);
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ opts.cg.link_dead_code = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ opts.cg.rpath = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ opts.cg.extra_filename = String::from("extra-filename");
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ opts.cg.codegen_units = Some(42);
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ opts.cg.remark = super::Passes::Some(vec![String::from("pass1"), String::from("pass2")]);
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ opts.cg.save_temps = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ opts.cg.incremental = Some(String::from("abc"));
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ // Make sure changing a [TRACKED] option changes the hash
+ opts = reference.clone();
+ opts.cg.lto = LtoCli::Fat;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.target_cpu = Some(String::from("abc"));
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.target_feature = String::from("all the features, all of them");
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.passes = vec![String::from("1"), String::from("2")];
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.llvm_args = vec![String::from("1"), String::from("2")];
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.overflow_checks = Some(true);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.no_prepopulate_passes = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.no_vectorize_loops = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.no_vectorize_slp = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.soft_float = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.prefer_dynamic = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.no_integrated_as = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.no_redzone = Some(true);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.relocation_model = Some(String::from("relocation model"));
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.code_model = Some(String::from("code model"));
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.tls_model = Some(String::from("tls model"));
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.pgo_gen = SwitchWithOptPath::Enabled(None);
+ assert_ne!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.pgo_use = Some(PathBuf::from("abc"));
+ assert_ne!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.metadata = vec![String::from("A"), String::from("B")];
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.debuginfo = Some(0xdeadbeef);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.debuginfo = Some(0xba5eba11);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.force_frame_pointers = Some(false);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.debug_assertions = Some(true);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.inline_threshold = Some(0xf007ba11);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.panic = Some(PanicStrategy::Abort);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.linker_plugin_lto = LinkerPluginLto::LinkerPluginAuto;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+}
+
+#[test]
+fn test_debugging_options_tracking_hash() {
+ let reference = Options::default();
+ let mut opts = Options::default();
+
+ // Make sure the changing an [UNTRACKED] option leaves the hash unchanged
+ opts.debugging_opts.verbose = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.time_passes = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.time_llvm_passes = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.input_stats = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.borrowck_stats = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.meta_stats = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.print_link_args = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.print_llvm_passes = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.ast_json = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.ast_json_noexpand = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.ls = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.save_analysis = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.flowgraph_print_loans = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.flowgraph_print_moves = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.flowgraph_print_assigns = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.flowgraph_print_all = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.print_region_graph = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.parse_only = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.incremental = Some(String::from("abc"));
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.dump_dep_graph = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.query_dep_graph = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.no_analysis = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.unstable_options = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.trace_macros = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.keep_hygiene_data = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.keep_ast = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.print_mono_items = Some(String::from("abc"));
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.dump_mir = Some(String::from("abc"));
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.dump_mir_dir = String::from("abc");
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+ opts.debugging_opts.dump_mir_graphviz = true;
+ assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
+
+ // Make sure changing a [TRACKED] option changes the hash
+ opts = reference.clone();
+ opts.debugging_opts.asm_comments = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.verify_llvm_ir = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.no_landing_pads = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.fewer_names = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.no_codegen = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.treat_err_as_bug = Some(1);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.report_delayed_bugs = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.continue_parse_after_error = true;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.extra_plugins = vec![String::from("plugin1"), String::from("plugin2")];
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.force_overflow_checks = Some(true);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.show_span = Some(String::from("abc"));
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.mir_opt_level = 3;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.relro_level = Some(RelroLevel::Full);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.merge_functions = Some(MergeFunctions::Disabled);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.allow_features = Some(vec![String::from("lang_items")]);
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.debugging_opts.symbol_mangling_version = SymbolManglingVersion::V0;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+}
+
+#[test]
+fn test_edition_parsing() {
+ // test default edition
+ let options = Options::default();
+ assert!(options.edition == DEFAULT_EDITION);
+
+ let matches = optgroups()
+ .parse(&["--edition=2018".to_string()])
+ .unwrap();
+ let (sessopts, _) = build_session_options_and_crate_config(&matches);
+ assert!(sessopts.edition == Edition::Edition2018)
+}
diff --git a/src/librustc/traits/auto_trait.rs b/src/librustc/traits/auto_trait.rs
index 9ce35d1..d89cf8e 100644
--- a/src/librustc/traits/auto_trait.rs
+++ b/src/librustc/traits/auto_trait.rs
@@ -47,12 +47,12 @@
pub vid_to_region: FxHashMap<ty::RegionVid, ty::Region<'cx>>,
}
-pub struct AutoTraitFinder<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub struct AutoTraitFinder<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
+impl<'tcx> AutoTraitFinder<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>) -> Self {
AutoTraitFinder { tcx }
}
@@ -79,7 +79,7 @@
ty: Ty<'tcx>,
orig_env: ty::ParamEnv<'tcx>,
trait_did: DefId,
- auto_trait_callback: impl for<'i> Fn(&InferCtxt<'_, 'tcx, 'i>, AutoTraitInfo<'i>) -> A,
+ auto_trait_callback: impl Fn(&InferCtxt<'_, 'tcx>, AutoTraitInfo<'tcx>) -> A,
) -> AutoTraitResult<A> {
let tcx = self.tcx;
@@ -232,7 +232,7 @@
}
}
-impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
+impl AutoTraitFinder<'tcx> {
// The core logic responsible for computing the bounds for our synthesized impl.
//
// To calculate the bounds, we call SelectionContext.select in a loop. Like FulfillmentContext,
@@ -270,16 +270,16 @@
// the final synthesized generics: we don't want our generated docs page to contain something
// like 'T: Copy + Clone', as that's redundant. Therefore, we keep track of a separate
// 'user_env', which only holds the predicates that will actually be displayed to the user.
- fn evaluate_predicates<'b, 'c>(
+ fn evaluate_predicates(
&self,
- infcx: &InferCtxt<'b, 'tcx, 'c>,
+ infcx: &InferCtxt<'_, 'tcx>,
trait_did: DefId,
- ty: Ty<'c>,
- param_env: ty::ParamEnv<'c>,
- user_env: ty::ParamEnv<'c>,
- fresh_preds: &mut FxHashSet<ty::Predicate<'c>>,
+ ty: Ty<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ user_env: ty::ParamEnv<'tcx>,
+ fresh_preds: &mut FxHashSet<ty::Predicate<'tcx>>,
only_projections: bool,
- ) -> Option<(ty::ParamEnv<'c>, ty::ParamEnv<'c>)> {
+ ) -> Option<(ty::ParamEnv<'tcx>, ty::ParamEnv<'tcx>)> {
let tcx = infcx.tcx;
let mut select = SelectionContext::with_negative(&infcx, true);
@@ -617,20 +617,14 @@
}
}
- fn evaluate_nested_obligations<
- 'b,
- 'c,
- 'd,
- 'cx,
- T: Iterator<Item = Obligation<'cx, ty::Predicate<'cx>>>,
- >(
+ fn evaluate_nested_obligations(
&self,
ty: Ty<'_>,
- nested: T,
- computed_preds: &'b mut FxHashSet<ty::Predicate<'cx>>,
- fresh_preds: &'b mut FxHashSet<ty::Predicate<'cx>>,
- predicates: &'b mut VecDeque<ty::PolyTraitPredicate<'cx>>,
- select: &mut SelectionContext<'c, 'd, 'cx>,
+ nested: impl Iterator<Item = Obligation<'tcx, ty::Predicate<'tcx>>>,
+ computed_preds: &mut FxHashSet<ty::Predicate<'tcx>>,
+ fresh_preds: &mut FxHashSet<ty::Predicate<'tcx>>,
+ predicates: &mut VecDeque<ty::PolyTraitPredicate<'tcx>>,
+ select: &mut SelectionContext<'_, 'tcx>,
only_projections: bool,
) -> bool {
let dummy_cause = ObligationCause::misc(DUMMY_SP, hir::DUMMY_HIR_ID);
@@ -822,23 +816,23 @@
return true;
}
- pub fn clean_pred<'c, 'd, 'cx>(
+ pub fn clean_pred(
&self,
- infcx: &InferCtxt<'c, 'd, 'cx>,
- p: ty::Predicate<'cx>,
- ) -> ty::Predicate<'cx> {
+ infcx: &InferCtxt<'_, 'tcx>,
+ p: ty::Predicate<'tcx>,
+ ) -> ty::Predicate<'tcx> {
infcx.freshen(p)
}
}
// Replaces all ReVars in a type with ty::Region's, using the provided map
-pub struct RegionReplacer<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
+pub struct RegionReplacer<'a, 'tcx> {
vid_to_region: &'a FxHashMap<ty::RegionVid, ty::Region<'tcx>>,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionReplacer<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeFolder<'tcx> for RegionReplacer<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.tcx
}
diff --git a/src/librustc/traits/chalk_fulfill.rs b/src/librustc/traits/chalk_fulfill.rs
index a7b5e6c..0c7c94b 100644
--- a/src/librustc/traits/chalk_fulfill.rs
+++ b/src/librustc/traits/chalk_fulfill.rs
@@ -29,8 +29,8 @@
}
fn in_environment(
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
- obligation: PredicateObligation<'tcx>
+ infcx: &InferCtxt<'_, 'tcx>,
+ obligation: PredicateObligation<'tcx>,
) -> InEnvironment<'tcx, PredicateObligation<'tcx>> {
assert!(!infcx.is_in_snapshot());
let obligation = infcx.resolve_vars_if_possible(&obligation);
@@ -52,7 +52,7 @@
impl TraitEngine<'tcx> for FulfillmentContext<'tcx> {
fn normalize_projection_type(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
_param_env: ty::ParamEnv<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
_cause: ObligationCause<'tcx>,
@@ -62,7 +62,7 @@
fn register_predicate_obligation(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
obligation: PredicateObligation<'tcx>,
) {
self.obligations.insert(in_environment(infcx, obligation));
@@ -70,7 +70,7 @@
fn select_all_or_error(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
) -> Result<(), Vec<FulfillmentError<'tcx>>> {
self.select_where_possible(infcx)?;
@@ -89,7 +89,7 @@
fn select_where_possible(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
) -> Result<(), Vec<FulfillmentError<'tcx>>> {
let mut errors = Vec::new();
let mut next_round = FxHashSet::default();
diff --git a/src/librustc/traits/codegen/mod.rs b/src/librustc/traits/codegen/mod.rs
index 591557e..bb40953 100644
--- a/src/librustc/traits/codegen/mod.rs
+++ b/src/librustc/traits/codegen/mod.rs
@@ -18,11 +18,10 @@
/// that type check should guarantee to us that all nested
/// obligations *could be* resolved if we wanted to.
/// Assumes that this is run after the entire crate has been successfully type-checked.
-pub fn codegen_fulfill_obligation<'a, 'tcx>(ty: TyCtxt<'a, 'tcx, 'tcx>,
- (param_env, trait_ref):
- (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>))
- -> Vtable<'tcx, ()>
-{
+pub fn codegen_fulfill_obligation<'tcx>(
+ ty: TyCtxt<'tcx>,
+ (param_env, trait_ref): (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>),
+) -> Vtable<'tcx, ()> {
// Remove any references to regions; this helps improve caching.
let trait_ref = ty.erase_regions(&trait_ref);
@@ -74,7 +73,7 @@
})
}
-impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Monomorphizes a type from the AST by first applying the
/// in-scope substitutions and then normalizing any associated
/// types.
@@ -116,19 +115,19 @@
// # Global Cache
-pub struct ProjectionCache<'gcx> {
- data: PhantomData<&'gcx ()>
+pub struct ProjectionCache<'tcx> {
+ data: PhantomData<&'tcx ()>,
}
-impl<'gcx> DepTrackingMapConfig for ProjectionCache<'gcx> {
- type Key = Ty<'gcx>;
- type Value = Ty<'gcx>;
+impl<'tcx> DepTrackingMapConfig for ProjectionCache<'tcx> {
+ type Key = Ty<'tcx>;
+ type Value = Ty<'tcx>;
fn to_dep_kind() -> DepKind {
DepKind::TraitSelect
}
}
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
/// Finishes processes any obligations that remain in the
/// fulfillment context, and then returns the result with all type
/// variables removed and regions erased. Because this is intended
@@ -138,11 +137,13 @@
/// type inference variables that appear in `result` to be
/// unified, and hence we need to process those obligations to get
/// the complete picture of the type.
- fn drain_fulfillment_cx_or_panic<T>(&self,
- fulfill_cx: &mut FulfillmentContext<'tcx>,
- result: &T)
- -> T::Lifted
- where T: TypeFoldable<'tcx> + ty::Lift<'gcx>
+ fn drain_fulfillment_cx_or_panic<T>(
+ &self,
+ fulfill_cx: &mut FulfillmentContext<'tcx>,
+ result: &T,
+ ) -> T::Lifted
+ where
+ T: TypeFoldable<'tcx> + ty::Lift<'tcx>,
{
debug!("drain_fulfillment_cx_or_panic()");
diff --git a/src/librustc/traits/coherence.rs b/src/librustc/traits/coherence.rs
index c6521a9..d8087af 100644
--- a/src/librustc/traits/coherence.rs
+++ b/src/librustc/traits/coherence.rs
@@ -48,8 +48,8 @@
/// If there are types that satisfy both impls, invokes `on_overlap`
/// with a suitably-freshened `ImplHeader` with those types
/// substituted. Otherwise, invokes `no_overlap`.
-pub fn overlapping_impls<'gcx, F1, F2, R>(
- tcx: TyCtxt<'_, 'gcx, 'gcx>,
+pub fn overlapping_impls<'tcx, F1, F2, R>(
+ tcx: TyCtxt<'tcx>,
impl1_def_id: DefId,
impl2_def_id: DefId,
intercrate_mode: IntercrateMode,
@@ -87,11 +87,11 @@
})
}
-fn with_fresh_ty_vars<'cx, 'gcx, 'tcx>(selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- impl_def_id: DefId)
- -> ty::ImplHeader<'tcx>
-{
+fn with_fresh_ty_vars<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ impl_def_id: DefId,
+) -> ty::ImplHeader<'tcx> {
let tcx = selcx.tcx();
let impl_substs = selcx.infcx().fresh_substs_for_item(DUMMY_SP, impl_def_id);
@@ -111,8 +111,8 @@
/// Can both impl `a` and impl `b` be satisfied by a common type (including
/// where-clauses)? If so, returns an `ImplHeader` that unifies the two impls.
-fn overlap<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn overlap<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
a_def_id: DefId,
b_def_id: DefId,
) -> Option<OverlapResult<'tcx>> {
@@ -122,7 +122,7 @@
}
fn overlap_within_probe(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+ selcx: &mut SelectionContext<'cx, 'tcx>,
a_def_id: DefId,
b_def_id: DefId,
snapshot: &CombinedSnapshot<'_, 'tcx>,
@@ -183,10 +183,10 @@
Some(OverlapResult { impl_header, intercrate_ambiguity_causes, involves_placeholder })
}
-pub fn trait_ref_is_knowable<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_ref: ty::TraitRef<'tcx>)
- -> Option<Conflict>
-{
+pub fn trait_ref_is_knowable<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::TraitRef<'tcx>,
+) -> Option<Conflict> {
debug!("trait_ref_is_knowable(trait_ref={:?})", trait_ref);
if orphan_check_trait_ref(tcx, trait_ref, InCrate::Remote).is_ok() {
// A downstream or cousin crate is allowed to implement some
@@ -229,9 +229,10 @@
}
}
-pub fn trait_ref_is_local_or_fundamental<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_ref: ty::TraitRef<'tcx>)
- -> bool {
+pub fn trait_ref_is_local_or_fundamental<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::TraitRef<'tcx>,
+) -> bool {
trait_ref.def_id.krate == LOCAL_CRATE || tcx.has_attr(trait_ref.def_id, sym::fundamental)
}
@@ -246,10 +247,10 @@
///
/// 1. All type parameters in `Self` must be "covered" by some local type constructor.
/// 2. Some local type must appear in `Self`.
-pub fn orphan_check<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId)
- -> Result<(), OrphanCheckErr<'tcx>>
-{
+pub fn orphan_check<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_def_id: DefId,
+) -> Result<(), OrphanCheckErr<'tcx>> {
debug!("orphan_check({:?})", impl_def_id);
// We only except this routine to be invoked on implementations
@@ -353,11 +354,11 @@
///
/// Note that this function is never called for types that have both type
/// parameters and inference variables.
-fn orphan_check_trait_ref<'tcx>(tcx: TyCtxt<'_, '_, '_>,
- trait_ref: ty::TraitRef<'tcx>,
- in_crate: InCrate)
- -> Result<(), OrphanCheckErr<'tcx>>
-{
+fn orphan_check_trait_ref<'tcx>(
+ tcx: TyCtxt<'_>,
+ trait_ref: ty::TraitRef<'tcx>,
+ in_crate: InCrate,
+) -> Result<(), OrphanCheckErr<'tcx>> {
debug!("orphan_check_trait_ref(trait_ref={:?}, in_crate={:?})",
trait_ref, in_crate);
@@ -430,8 +431,7 @@
}
}
-fn uncovered_tys<'tcx>(tcx: TyCtxt<'_, '_, '_>, ty: Ty<'tcx>, in_crate: InCrate)
- -> Vec<Ty<'tcx>> {
+fn uncovered_tys<'tcx>(tcx: TyCtxt<'_>, ty: Ty<'tcx>, in_crate: InCrate) -> Vec<Ty<'tcx>> {
if ty_is_local_constructor(ty, in_crate) {
vec![]
} else if fundamental_ty(ty) {
@@ -450,7 +450,7 @@
}
}
-fn ty_is_local(tcx: TyCtxt<'_, '_, '_>, ty: Ty<'_>, in_crate: InCrate) -> bool {
+fn ty_is_local(tcx: TyCtxt<'_>, ty: Ty<'_>, in_crate: InCrate) -> bool {
ty_is_local_constructor(ty, in_crate) ||
fundamental_ty(ty) && ty.walk_shallow().any(|t| ty_is_local(tcx, t, in_crate))
}
diff --git a/src/librustc/traits/engine.rs b/src/librustc/traits/engine.rs
index 2f019d8..b96126c 100644
--- a/src/librustc/traits/engine.rs
+++ b/src/librustc/traits/engine.rs
@@ -9,7 +9,7 @@
pub trait TraitEngine<'tcx>: 'tcx {
fn normalize_projection_type(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
cause: ObligationCause<'tcx>,
@@ -20,7 +20,7 @@
/// parameters (except for `Self`).
fn register_bound(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
def_id: DefId,
@@ -40,18 +40,18 @@
fn register_predicate_obligation(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
obligation: PredicateObligation<'tcx>,
);
fn select_all_or_error(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
) -> Result<(), Vec<FulfillmentError<'tcx>>>;
fn select_where_possible(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
) -> Result<(), Vec<FulfillmentError<'tcx>>>;
fn pending_obligations(&self) -> Vec<PredicateObligation<'tcx>>;
@@ -60,7 +60,7 @@
pub trait TraitEngineExt<'tcx> {
fn register_predicate_obligations(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
obligations: impl IntoIterator<Item = PredicateObligation<'tcx>>,
);
}
@@ -68,7 +68,7 @@
impl<T: ?Sized + TraitEngine<'tcx>> TraitEngineExt<'tcx> for T {
fn register_predicate_obligations(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
obligations: impl IntoIterator<Item = PredicateObligation<'tcx>>,
) {
for obligation in obligations {
@@ -78,7 +78,7 @@
}
impl dyn TraitEngine<'tcx> {
- pub fn new(tcx: TyCtxt<'_, '_, 'tcx>) -> Box<Self> {
+ pub fn new(tcx: TyCtxt<'tcx>) -> Box<Self> {
if tcx.sess.opts.debugging_opts.chalk {
Box::new(ChalkFulfillmentContext::new())
} else {
diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs
index 490501b..dcf69fe 100644
--- a/src/librustc/traits/error_reporting.rs
+++ b/src/librustc/traits/error_reporting.rs
@@ -38,7 +38,7 @@
use syntax::symbol::sym;
use syntax_pos::{DUMMY_SP, Span, ExpnInfo, ExpnFormat};
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn report_fulfillment_errors(&self,
errors: &[FulfillmentError<'tcx>],
body_id: Option<hir::BodyId>,
@@ -353,7 +353,7 @@
_ => {
// this is a "direct", user-specified, rather than derived,
// obligation.
- flags.push(("direct".to_owned(), None));
+ flags.push((sym::direct, None));
}
}
@@ -365,27 +365,27 @@
// Currently I'm leaving it for what I need for `try`.
if self.tcx.trait_of_item(item) == Some(trait_ref.def_id) {
let method = self.tcx.item_name(item);
- flags.push(("from_method".to_owned(), None));
- flags.push(("from_method".to_owned(), Some(method.to_string())));
+ flags.push((sym::from_method, None));
+ flags.push((sym::from_method, Some(method.to_string())));
}
}
if let Some(t) = self.get_parent_trait_ref(&obligation.cause.code) {
- flags.push(("parent_trait".to_owned(), Some(t)));
+ flags.push((sym::parent_trait, Some(t)));
}
if let Some(k) = obligation.cause.span.compiler_desugaring_kind() {
- flags.push(("from_desugaring".to_owned(), None));
- flags.push(("from_desugaring".to_owned(), Some(k.name().to_string())));
+ flags.push((sym::from_desugaring, None));
+ flags.push((sym::from_desugaring, Some(k.name().to_string())));
}
let generics = self.tcx.generics_of(def_id);
let self_ty = trait_ref.self_ty();
// This is also included through the generics list as `Self`,
// but the parser won't allow you to use it
- flags.push(("_Self".to_owned(), Some(self_ty.to_string())));
+ flags.push((sym::_Self, Some(self_ty.to_string())));
if let Some(def) = self_ty.ty_adt_def() {
// We also want to be able to select self's original
// signature with no type arguments resolved
- flags.push(("_Self".to_owned(), Some(self.tcx.type_of(def.did).to_string())));
+ flags.push((sym::_Self, Some(self.tcx.type_of(def.did).to_string())));
}
for param in generics.params.iter() {
@@ -396,38 +396,38 @@
},
GenericParamDefKind::Lifetime => continue,
};
- let name = param.name.to_string();
+ let name = param.name.as_symbol();
flags.push((name, Some(value)));
}
if let Some(true) = self_ty.ty_adt_def().map(|def| def.did.is_local()) {
- flags.push(("crate_local".to_owned(), None));
+ flags.push((sym::crate_local, None));
}
// Allow targeting all integers using `{integral}`, even if the exact type was resolved
if self_ty.is_integral() {
- flags.push(("_Self".to_owned(), Some("{integral}".to_owned())));
+ flags.push((sym::_Self, Some("{integral}".to_owned())));
}
if let ty::Array(aty, len) = self_ty.sty {
- flags.push(("_Self".to_owned(), Some("[]".to_owned())));
- flags.push(("_Self".to_owned(), Some(format!("[{}]", aty))));
+ flags.push((sym::_Self, Some("[]".to_owned())));
+ flags.push((sym::_Self, Some(format!("[{}]", aty))));
if let Some(def) = aty.ty_adt_def() {
// We also want to be able to select the array's type's original
// signature with no type arguments resolved
flags.push((
- "_Self".to_owned(),
+ sym::_Self,
Some(format!("[{}]", self.tcx.type_of(def.did).to_string())),
));
let tcx = self.tcx;
if let Some(len) = len.assert_usize(tcx) {
flags.push((
- "_Self".to_owned(),
+ sym::_Self,
Some(format!("[{}; {}]", self.tcx.type_of(def.did).to_string(), len)),
));
} else {
flags.push((
- "_Self".to_owned(),
+ sym::_Self,
Some(format!("[{}; _]", self.tcx.type_of(def.did).to_string())),
));
}
@@ -1013,10 +1013,8 @@
trait_ref: &ty::Binder<ty::TraitRef<'tcx>>,
) {
let hir = self.tcx.hir();
- let parent_node = hir.get_parent_node(
- hir.hir_to_node_id(obligation.cause.body_id),
- );
- let node = hir.find(parent_node);
+ let parent_node = hir.get_parent_node_by_hir_id(obligation.cause.body_id);
+ let node = hir.find_by_hir_id(parent_node);
if let Some(hir::Node::Item(hir::Item {
node: hir::ItemKind::Fn(decl, _, _, body_id),
..
@@ -1098,7 +1096,7 @@
}
Node::Ctor(ref variant_data) => {
let span = variant_data.ctor_hir_id()
- .map(|hir_id| self.tcx.hir().span_by_hir_id(hir_id))
+ .map(|hir_id| self.tcx.hir().span(hir_id))
.unwrap_or(DUMMY_SP);
let span = self.tcx.sess.source_map().def_span(span);
@@ -1242,15 +1240,14 @@
err
}
- fn report_closure_arg_mismatch(&self,
- span: Span,
- found_span: Option<Span>,
- expected_ref: ty::PolyTraitRef<'tcx>,
- found: ty::PolyTraitRef<'tcx>)
- -> DiagnosticBuilder<'tcx>
- {
- fn build_fn_sig_string<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_ref: &ty::TraitRef<'tcx>) -> String {
+ fn report_closure_arg_mismatch(
+ &self,
+ span: Span,
+ found_span: Option<Span>,
+ expected_ref: ty::PolyTraitRef<'tcx>,
+ found: ty::PolyTraitRef<'tcx>,
+ ) -> DiagnosticBuilder<'tcx> {
+ fn build_fn_sig_string<'tcx>(tcx: TyCtxt<'tcx>, trait_ref: &ty::TraitRef<'tcx>) -> String {
let inputs = trait_ref.substs.type_at(1);
let sig = if let ty::Tuple(inputs) = inputs.sty {
tcx.mk_fn_sig(
@@ -1294,7 +1291,7 @@
}
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
pub fn recursive_type_with_infinite_size_error(self,
type_def_id: DefId)
-> DiagnosticBuilder<'tcx>
@@ -1340,7 +1337,7 @@
}
}
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>,
body_id: Option<hir::BodyId>) {
// Unable to successfully determine, probably means
@@ -1447,17 +1444,18 @@
/// Returns `true` if the trait predicate may apply for *some* assignment
/// to the type parameters.
- fn predicate_can_apply(&self,
- param_env: ty::ParamEnv<'tcx>,
- pred: ty::PolyTraitRef<'tcx>)
- -> bool {
- struct ParamToVarFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- var_map: FxHashMap<Ty<'tcx>, Ty<'tcx>>
+ fn predicate_can_apply(
+ &self,
+ param_env: ty::ParamEnv<'tcx>,
+ pred: ty::PolyTraitRef<'tcx>,
+ ) -> bool {
+ struct ParamToVarFolder<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
+ var_map: FxHashMap<Ty<'tcx>, Ty<'tcx>>,
}
- impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for ParamToVarFolder<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.infcx.tcx }
+ impl<'a, 'tcx> TypeFolder<'tcx> for ParamToVarFolder<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { self.infcx.tcx }
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
if let ty::Param(ty::ParamTy {name, .. }) = ty.sty {
diff --git a/src/librustc/traits/fulfill.rs b/src/librustc/traits/fulfill.rs
index c7943d1..5e2c949 100644
--- a/src/librustc/traits/fulfill.rs
+++ b/src/librustc/traits/fulfill.rs
@@ -68,7 +68,7 @@
pub stalled_on: Vec<Ty<'tcx>>,
}
-impl<'a, 'gcx, 'tcx> FulfillmentContext<'tcx> {
+impl<'a, 'tcx> FulfillmentContext<'tcx> {
/// Creates a new fulfillment context.
pub fn new() -> FulfillmentContext<'tcx> {
FulfillmentContext {
@@ -95,8 +95,10 @@
}
/// Attempts to select obligations using `selcx`.
- fn select(&mut self, selcx: &mut SelectionContext<'a, 'gcx, 'tcx>)
- -> Result<(), Vec<FulfillmentError<'tcx>>> {
+ fn select(
+ &mut self,
+ selcx: &mut SelectionContext<'a, 'tcx>,
+ ) -> Result<(), Vec<FulfillmentError<'tcx>>> {
debug!("select(obligation-forest-size={})", self.predicates.len());
let mut errors = Vec::new();
@@ -143,13 +145,13 @@
/// `SomeTrait` or a where-clause that lets us unify `$0` with
/// something concrete. If this fails, we'll unify `$0` with
/// `projection_ty` again.
- fn normalize_projection_type<'a, 'gcx>(&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- projection_ty: ty::ProjectionTy<'tcx>,
- cause: ObligationCause<'tcx>)
- -> Ty<'tcx>
- {
+ fn normalize_projection_type(
+ &mut self,
+ infcx: &InferCtxt<'_, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ projection_ty: ty::ProjectionTy<'tcx>,
+ cause: ObligationCause<'tcx>,
+ ) -> Ty<'tcx> {
debug!("normalize_projection_type(projection_ty={:?})",
projection_ty);
@@ -172,10 +174,11 @@
normalized_ty
}
- fn register_predicate_obligation<'a, 'gcx>(&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- obligation: PredicateObligation<'tcx>)
- {
+ fn register_predicate_obligation(
+ &mut self,
+ infcx: &InferCtxt<'_, 'tcx>,
+ obligation: PredicateObligation<'tcx>,
+ ) {
// this helps to reduce duplicate errors, as well as making
// debug output much nicer to read and so on.
let obligation = infcx.resolve_vars_if_possible(&obligation);
@@ -190,11 +193,10 @@
});
}
- fn select_all_or_error<'a, 'gcx>(
+ fn select_all_or_error(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>
- ) -> Result<(),Vec<FulfillmentError<'tcx>>>
- {
+ infcx: &InferCtxt<'_, 'tcx>,
+ ) -> Result<(), Vec<FulfillmentError<'tcx>>> {
self.select_where_possible(infcx)?;
let errors: Vec<_> =
@@ -209,10 +211,10 @@
}
}
- fn select_where_possible<'a, 'gcx>(&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>)
- -> Result<(),Vec<FulfillmentError<'tcx>>>
- {
+ fn select_where_possible(
+ &mut self,
+ infcx: &InferCtxt<'_, 'tcx>,
+ ) -> Result<(), Vec<FulfillmentError<'tcx>>> {
let mut selcx = SelectionContext::new(infcx);
self.select(&mut selcx)
}
@@ -222,9 +224,9 @@
}
}
-struct FulfillProcessor<'a, 'b: 'a, 'gcx: 'tcx, 'tcx: 'b> {
- selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
- register_region_obligations: bool
+struct FulfillProcessor<'a, 'b: 'a, 'tcx: 'b> {
+ selcx: &'a mut SelectionContext<'b, 'tcx>,
+ register_region_obligations: bool,
}
fn mk_pending(os: Vec<PredicateObligation<'tcx>>) -> Vec<PendingPredicateObligation<'tcx>> {
@@ -234,7 +236,7 @@
}).collect()
}
-impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, 'tcx> {
+impl<'a, 'b, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'tcx> {
type Obligation = PendingPredicateObligation<'tcx>;
type Error = FulfillmentErrorCode<'tcx>;
@@ -514,9 +516,10 @@
}
/// Returns the set of type variables contained in a trait ref
-fn trait_ref_type_vars<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>,
- t: ty::PolyTraitRef<'tcx>) -> Vec<Ty<'tcx>>
-{
+fn trait_ref_type_vars<'a, 'tcx>(
+ selcx: &mut SelectionContext<'a, 'tcx>,
+ t: ty::PolyTraitRef<'tcx>,
+) -> Vec<Ty<'tcx>> {
t.skip_binder() // ok b/c this check doesn't care about regions
.input_types()
.map(|t| selcx.infcx().resolve_vars_if_possible(&t))
diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs
index fd2d8fd..f5c91a7 100644
--- a/src/librustc/traits/mod.rs
+++ b/src/librustc/traits/mod.rs
@@ -140,7 +140,7 @@
}
impl<'tcx> ObligationCause<'tcx> {
- pub fn span<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Span {
+ pub fn span(&self, tcx: TyCtxt<'tcx>) -> Span {
match self.code {
ObligationCauseCode::CompareImplMethodObligation { .. } |
ObligationCauseCode::MainFunctionType |
@@ -363,9 +363,9 @@
}
impl<'tcx> GoalKind<'tcx> {
- pub fn from_poly_domain_goal<'a, 'gcx>(
+ pub fn from_poly_domain_goal(
domain_goal: PolyDomainGoal<'tcx>,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
) -> GoalKind<'tcx> {
match domain_goal.no_bound_vars() {
Some(p) => p.into_goal(),
@@ -643,8 +643,8 @@
/// `bound` or is not known to meet bound (note that this is
/// conservative towards *no impl*, which is the opposite of the
/// `evaluate` methods).
-pub fn type_known_to_meet_bound_modulo_regions<'a, 'gcx, 'tcx>(
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+pub fn type_known_to_meet_bound_modulo_regions<'a, 'tcx>(
+ infcx: &InferCtxt<'a, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
def_id: DefId,
@@ -710,13 +710,13 @@
}
}
-fn do_normalize_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- region_context: DefId,
- cause: ObligationCause<'tcx>,
- elaborated_env: ty::ParamEnv<'tcx>,
- predicates: Vec<ty::Predicate<'tcx>>)
- -> Result<Vec<ty::Predicate<'tcx>>, ErrorReported>
-{
+fn do_normalize_predicates<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ region_context: DefId,
+ cause: ObligationCause<'tcx>,
+ elaborated_env: ty::ParamEnv<'tcx>,
+ predicates: Vec<ty::Predicate<'tcx>>,
+) -> Result<Vec<ty::Predicate<'tcx>>, ErrorReported> {
debug!(
"do_normalize_predicates(predicates={:?}, region_context={:?}, cause={:?})",
predicates,
@@ -795,12 +795,12 @@
// FIXME: this is gonna need to be removed ...
/// Normalizes the parameter environment, reporting errors if they occur.
-pub fn normalize_param_env_or_error<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- region_context: DefId,
- unnormalized_env: ty::ParamEnv<'tcx>,
- cause: ObligationCause<'tcx>)
- -> ty::ParamEnv<'tcx>
-{
+pub fn normalize_param_env_or_error<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ region_context: DefId,
+ unnormalized_env: ty::ParamEnv<'tcx>,
+ cause: ObligationCause<'tcx>,
+) -> ty::ParamEnv<'tcx> {
// I'm not wild about reporting errors here; I'd prefer to
// have the errors get reported at a defined place (e.g.,
// during typeck). Instead I have all parameter
@@ -904,14 +904,15 @@
)
}
-pub fn fully_normalize<'a, 'gcx, 'tcx, T>(
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+pub fn fully_normalize<'a, 'tcx, T>(
+ infcx: &InferCtxt<'a, 'tcx>,
mut fulfill_cx: FulfillmentContext<'tcx>,
cause: ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
- value: &T)
- -> Result<T, Vec<FulfillmentError<'tcx>>>
- where T : TypeFoldable<'tcx>
+ value: &T,
+) -> Result<T, Vec<FulfillmentError<'tcx>>>
+where
+ T: TypeFoldable<'tcx>,
{
debug!("fully_normalize_with_fulfillcx(value={:?})", value);
let selcx = &mut SelectionContext::new(infcx);
@@ -936,10 +937,10 @@
/// environment. If this returns false, then either normalize
/// encountered an error or one of the predicates did not hold. Used
/// when creating vtables to check for unsatisfiable methods.
-fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- predicates: Vec<ty::Predicate<'tcx>>)
- -> bool
-{
+fn normalize_and_test_predicates<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ predicates: Vec<ty::Predicate<'tcx>>,
+) -> bool {
debug!("normalize_and_test_predicates(predicates={:?})",
predicates);
@@ -965,10 +966,10 @@
result
}
-fn substitute_normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- key: (DefId, SubstsRef<'tcx>))
- -> bool
-{
+fn substitute_normalize_and_test_predicates<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ key: (DefId, SubstsRef<'tcx>),
+) -> bool {
debug!("substitute_normalize_and_test_predicates(key={:?})",
key);
@@ -983,11 +984,10 @@
/// Given a trait `trait_ref`, iterates the vtable entries
/// that come from `trait_ref`, including its supertraits.
#[inline] // FIXME(#35870): avoid closures being unexported due to `impl Trait`.
-fn vtable_methods<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- trait_ref: ty::PolyTraitRef<'tcx>)
- -> &'tcx [Option<(DefId, SubstsRef<'tcx>)>]
-{
+fn vtable_methods<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::PolyTraitRef<'tcx>,
+) -> &'tcx [Option<(DefId, SubstsRef<'tcx>)>] {
debug!("vtable_methods({:?})", trait_ref);
tcx.arena.alloc_from_iter(
@@ -1186,7 +1186,7 @@
where
Self: chalk_engine::context::Context + Clone,
{
- fn fold_ex_clause_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(
+ fn fold_ex_clause_with<F: TypeFolder<'tcx>>(
ex_clause: &chalk_engine::ExClause<Self>,
folder: &mut F,
) -> chalk_engine::ExClause<Self>;
@@ -1205,18 +1205,18 @@
type LiftedDelayedLiteral: Debug + 'tcx;
type LiftedLiteral: Debug + 'tcx;
- fn lift_ex_clause_to_tcx<'a, 'gcx>(
+ fn lift_ex_clause_to_tcx(
ex_clause: &chalk_engine::ExClause<Self>,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
) -> Option<Self::LiftedExClause>;
- fn lift_delayed_literal_to_tcx<'a, 'gcx>(
+ fn lift_delayed_literal_to_tcx(
ex_clause: &chalk_engine::DelayedLiteral<Self>,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
) -> Option<Self::LiftedDelayedLiteral>;
- fn lift_literal_to_tcx<'a, 'gcx>(
+ fn lift_literal_to_tcx(
ex_clause: &chalk_engine::Literal<Self>,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
) -> Option<Self::LiftedLiteral>;
}
diff --git a/src/librustc/traits/object_safety.rs b/src/librustc/traits/object_safety.rs
index 5006ff7..cfd5cfa 100644
--- a/src/librustc/traits/object_safety.rs
+++ b/src/librustc/traits/object_safety.rs
@@ -83,8 +83,7 @@
UndispatchableReceiver,
}
-impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
-
+impl<'tcx> TyCtxt<'tcx> {
/// Returns the object safety violations that affect
/// astconv -- currently, `Self` in supertraits. This is needed
/// because `object_safety_violations` can't be used during
@@ -703,7 +702,6 @@
}
}
-pub(super) fn is_object_safe_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- trait_def_id: DefId) -> bool {
+pub(super) fn is_object_safe_provider<'tcx>(tcx: TyCtxt<'tcx>, trait_def_id: DefId) -> bool {
tcx.object_safety_violations(trait_def_id).is_empty()
}
diff --git a/src/librustc/traits/on_unimplemented.rs b/src/librustc/traits/on_unimplemented.rs
index 1c17ace..0a42b6b 100644
--- a/src/librustc/traits/on_unimplemented.rs
+++ b/src/librustc/traits/on_unimplemented.rs
@@ -7,7 +7,7 @@
use syntax::ast::{MetaItem, NestedMetaItem};
use syntax::attr;
-use syntax::symbol::sym;
+use syntax::symbol::{Symbol, kw, sym};
use syntax_pos::Span;
use syntax_pos::symbol::LocalInternedString;
@@ -35,12 +35,13 @@
}
}
-fn parse_error(tcx: TyCtxt<'_, '_, '_>, span: Span,
- message: &str,
- label: &str,
- note: Option<&str>)
- -> ErrorReported
-{
+fn parse_error(
+ tcx: TyCtxt<'_>,
+ span: Span,
+ message: &str,
+ label: &str,
+ note: Option<&str>,
+) -> ErrorReported {
let mut diag = struct_span_err!(
tcx.sess, span, E0232, "{}", message);
diag.span_label(span, label);
@@ -51,14 +52,14 @@
ErrorReported
}
-impl<'a, 'gcx, 'tcx> OnUnimplementedDirective {
- pub fn parse(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_def_id: DefId,
- items: &[NestedMetaItem],
- span: Span,
- is_root: bool)
- -> Result<Self, ErrorReported>
- {
+impl<'tcx> OnUnimplementedDirective {
+ fn parse(
+ tcx: TyCtxt<'tcx>,
+ trait_def_id: DefId,
+ items: &[NestedMetaItem],
+ span: Span,
+ is_root: bool,
+ ) -> Result<Self, ErrorReported> {
let mut errored = false;
let mut item_iter = items.iter();
@@ -132,12 +133,11 @@
}
}
-
- pub fn of_item(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_def_id: DefId,
- impl_def_id: DefId)
- -> Result<Option<Self>, ErrorReported>
- {
+ pub fn of_item(
+ tcx: TyCtxt<'tcx>,
+ trait_def_id: DefId,
+ impl_def_id: DefId,
+ ) -> Result<Option<Self>, ErrorReported> {
let attrs = tcx.get_attrs(impl_def_id);
let attr = if let Some(item) = attr::find_by_name(&attrs, sym::rustc_on_unimplemented) {
@@ -164,12 +164,12 @@
result
}
- pub fn evaluate(&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_ref: ty::TraitRef<'tcx>,
- options: &[(String, Option<String>)])
- -> OnUnimplementedNote
- {
+ pub fn evaluate(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::TraitRef<'tcx>,
+ options: &[(Symbol, Option<String>)],
+ ) -> OnUnimplementedNote {
let mut message = None;
let mut label = None;
let mut note = None;
@@ -180,7 +180,7 @@
if !attr::eval_condition(condition, &tcx.sess.parse_sess, &mut |c| {
c.ident().map_or(false, |ident| {
options.contains(&(
- ident.to_string(),
+ ident.name,
c.value_str().map(|s| s.as_str().to_string())
))
})
@@ -203,8 +203,8 @@
}
}
- let options: FxHashMap<String, String> = options.into_iter()
- .filter_map(|(k, v)| v.as_ref().map(|v| (k.to_owned(), v.to_owned())))
+ let options: FxHashMap<Symbol, String> = options.into_iter()
+ .filter_map(|(k, v)| v.as_ref().map(|v| (*k, v.to_owned())))
.collect();
OnUnimplementedNote {
label: label.map(|l| l.format(tcx, trait_ref, &options)),
@@ -214,13 +214,13 @@
}
}
-impl<'a, 'gcx, 'tcx> OnUnimplementedFormatString {
- pub fn try_parse(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_def_id: DefId,
- from: LocalInternedString,
- err_sp: Span)
- -> Result<Self, ErrorReported>
- {
+impl<'tcx> OnUnimplementedFormatString {
+ fn try_parse(
+ tcx: TyCtxt<'tcx>,
+ trait_def_id: DefId,
+ from: LocalInternedString,
+ err_sp: Span,
+ ) -> Result<Self, ErrorReported> {
let result = OnUnimplementedFormatString(from);
result.verify(tcx, trait_def_id, err_sp)?;
Ok(result)
@@ -228,7 +228,7 @@
fn verify(
&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
span: Span,
) -> Result<(), ErrorReported> {
@@ -241,16 +241,16 @@
Piece::String(_) => (), // Normal string, no need to check it
Piece::NextArgument(a) => match a.position {
// `{Self}` is allowed
- Position::ArgumentNamed(s) if s == "Self" => (),
+ Position::ArgumentNamed(s) if s == kw::SelfUpper => (),
// `{ThisTraitsName}` is allowed
- Position::ArgumentNamed(s) if s == name.as_str() => (),
+ Position::ArgumentNamed(s) if s == name => (),
// `{from_method}` is allowed
- Position::ArgumentNamed(s) if s == "from_method" => (),
+ Position::ArgumentNamed(s) if s == sym::from_method => (),
// `{from_desugaring}` is allowed
- Position::ArgumentNamed(s) if s == "from_desugaring" => (),
+ Position::ArgumentNamed(s) if s == sym::from_desugaring => (),
// So is `{A}` if A is a type parameter
Position::ArgumentNamed(s) => match generics.params.iter().find(|param| {
- param.name.as_str() == s
+ param.name.as_symbol() == s
}) {
Some(_) => (),
None => {
@@ -274,9 +274,9 @@
pub fn format(
&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
trait_ref: ty::TraitRef<'tcx>,
- options: &FxHashMap<String, String>,
+ options: &FxHashMap<Symbol, String>,
) -> String {
let name = tcx.item_name(trait_ref.def_id);
let trait_str = tcx.def_path_str(trait_ref.def_id);
@@ -289,9 +289,9 @@
},
GenericParamDefKind::Lifetime => return None
};
- let name = param.name.to_string();
+ let name = param.name.as_symbol();
Some((name, value))
- }).collect::<FxHashMap<String, String>>();
+ }).collect::<FxHashMap<Symbol, String>>();
let empty_string = String::new();
let parser = Parser::new(&self.0, None, vec![], false);
@@ -299,15 +299,15 @@
match p {
Piece::String(s) => s,
Piece::NextArgument(a) => match a.position {
- Position::ArgumentNamed(s) => match generic_map.get(s) {
+ Position::ArgumentNamed(s) => match generic_map.get(&s) {
Some(val) => val,
- None if s == name.as_str() => {
+ None if s == name => {
&trait_str
}
None => {
- if let Some(val) = options.get(s) {
+ if let Some(val) = options.get(&s) {
val
- } else if s == "from_desugaring" || s == "from_method" {
+ } else if s == sym::from_desugaring || s == sym::from_method {
// don't break messages using these two arguments incorrectly
&empty_string
} else {
diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs
index 3ee6d0c..d189bb2 100644
--- a/src/librustc/traits/project.rs
+++ b/src/librustc/traits/project.rs
@@ -183,12 +183,10 @@
///
/// If successful, this may result in additional obligations. Also returns
/// the projection cache key used to track these additional obligations.
-pub fn poly_project_and_unify_type<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
- obligation: &PolyProjectionObligation<'tcx>)
- -> Result<Option<Vec<PredicateObligation<'tcx>>>,
- MismatchedProjectionTypes<'tcx>>
-{
+pub fn poly_project_and_unify_type<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
+ obligation: &PolyProjectionObligation<'tcx>,
+) -> Result<Option<Vec<PredicateObligation<'tcx>>>, MismatchedProjectionTypes<'tcx>> {
debug!("poly_project_and_unify_type(obligation={:?})",
obligation);
@@ -210,12 +208,10 @@
/// <T as Trait>::U == V
///
/// If successful, this may result in additional obligations.
-fn project_and_unify_type<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
- obligation: &ProjectionObligation<'tcx>)
- -> Result<Option<Vec<PredicateObligation<'tcx>>>,
- MismatchedProjectionTypes<'tcx>>
-{
+fn project_and_unify_type<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
+ obligation: &ProjectionObligation<'tcx>,
+) -> Result<Option<Vec<PredicateObligation<'tcx>>>, MismatchedProjectionTypes<'tcx>> {
debug!("project_and_unify_type(obligation={:?})",
obligation);
@@ -253,26 +249,28 @@
/// them with a fully resolved type where possible. The return value
/// combines the normalized result and any additional obligations that
/// were incurred as result.
-pub fn normalize<'a, 'b, 'gcx, 'tcx, T>(selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- cause: ObligationCause<'tcx>,
- value: &T)
- -> Normalized<'tcx, T>
- where T : TypeFoldable<'tcx>
+pub fn normalize<'a, 'b, 'tcx, T>(
+ selcx: &'a mut SelectionContext<'b, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ cause: ObligationCause<'tcx>,
+ value: &T,
+) -> Normalized<'tcx, T>
+where
+ T: TypeFoldable<'tcx>,
{
normalize_with_depth(selcx, param_env, cause, 0, value)
}
/// As `normalize`, but with a custom depth.
-pub fn normalize_with_depth<'a, 'b, 'gcx, 'tcx, T>(
- selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
+pub fn normalize_with_depth<'a, 'b, 'tcx, T>(
+ selcx: &'a mut SelectionContext<'b, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
depth: usize,
- value: &T)
- -> Normalized<'tcx, T>
-
- where T : TypeFoldable<'tcx>
+ value: &T,
+) -> Normalized<'tcx, T>
+where
+ T: TypeFoldable<'tcx>,
{
debug!("normalize_with_depth(depth={}, value={:?})", depth, value);
let mut normalizer = AssocTypeNormalizer::new(selcx, param_env, cause, depth);
@@ -287,21 +285,21 @@
}
}
-struct AssocTypeNormalizer<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> {
- selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
+struct AssocTypeNormalizer<'a, 'b: 'a, 'tcx: 'b> {
+ selcx: &'a mut SelectionContext<'b, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
obligations: Vec<PredicateObligation<'tcx>>,
depth: usize,
}
-impl<'a, 'b, 'gcx, 'tcx> AssocTypeNormalizer<'a, 'b, 'gcx, 'tcx> {
- fn new(selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- cause: ObligationCause<'tcx>,
- depth: usize)
- -> AssocTypeNormalizer<'a, 'b, 'gcx, 'tcx>
- {
+impl<'a, 'b, 'tcx> AssocTypeNormalizer<'a, 'b, 'tcx> {
+ fn new(
+ selcx: &'a mut SelectionContext<'b, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ cause: ObligationCause<'tcx>,
+ depth: usize,
+ ) -> AssocTypeNormalizer<'a, 'b, 'tcx> {
AssocTypeNormalizer {
selcx,
param_env,
@@ -322,8 +320,8 @@
}
}
-impl<'a, 'b, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for AssocTypeNormalizer<'a, 'b, 'gcx, 'tcx> {
- fn tcx<'c>(&'c self) -> TyCtxt<'c, 'gcx, 'tcx> {
+impl<'a, 'b, 'tcx> TypeFolder<'tcx> for AssocTypeNormalizer<'a, 'b, 'tcx> {
+ fn tcx<'c>(&'c self) -> TyCtxt<'tcx> {
self.selcx.tcx()
}
@@ -456,15 +454,14 @@
/// there are unresolved type variables in the projection, we will
/// substitute a fresh type variable `$X` and generate a new
/// obligation `<T as Trait>::Item == $X` for later.
-pub fn normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
- selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
+pub fn normalize_projection_type<'a, 'b, 'tcx>(
+ selcx: &'a mut SelectionContext<'b, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
cause: ObligationCause<'tcx>,
depth: usize,
- obligations: &mut Vec<PredicateObligation<'tcx>>)
- -> Ty<'tcx>
-{
+ obligations: &mut Vec<PredicateObligation<'tcx>>,
+) -> Ty<'tcx> {
opt_normalize_projection_type(selcx, param_env, projection_ty.clone(), cause.clone(), depth,
obligations)
.unwrap_or_else(move || {
@@ -501,15 +498,14 @@
/// often immediately appended to another obligations vector. So now this
/// function takes an obligations vector and appends to it directly, which is
/// slightly uglier but avoids the need for an extra short-lived allocation.
-fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
- selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
+fn opt_normalize_projection_type<'a, 'b, 'tcx>(
+ selcx: &'a mut SelectionContext<'b, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
cause: ObligationCause<'tcx>,
depth: usize,
- obligations: &mut Vec<PredicateObligation<'tcx>>)
- -> Option<Ty<'tcx>>
-{
+ obligations: &mut Vec<PredicateObligation<'tcx>>,
+) -> Option<Ty<'tcx>> {
let infcx = selcx.infcx();
let projection_ty = infcx.resolve_vars_if_possible(&projection_ty);
@@ -705,9 +701,10 @@
/// If there are unresolved type variables, then we need to include
/// any subobligations that bind them, at least until those type
/// variables are fully resolved.
-fn prune_cache_value_obligations<'a, 'gcx, 'tcx>(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- result: &NormalizedTy<'tcx>)
- -> NormalizedTy<'tcx> {
+fn prune_cache_value_obligations<'a, 'tcx>(
+ infcx: &'a InferCtxt<'a, 'tcx>,
+ result: &NormalizedTy<'tcx>,
+) -> NormalizedTy<'tcx> {
if infcx.unresolved_type_vars(&result.value).is_none() {
return NormalizedTy { value: result.value, obligations: vec![] };
}
@@ -763,14 +760,13 @@
/// that may yet turn out to be wrong. This *may* lead to some sort
/// of trouble, though we don't have a concrete example of how that
/// can occur yet. But it seems risky at best.
-fn get_paranoid_cache_value_obligation<'a, 'gcx, 'tcx>(
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+fn get_paranoid_cache_value_obligation<'a, 'tcx>(
+ infcx: &'a InferCtxt<'a, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
cause: ObligationCause<'tcx>,
- depth: usize)
- -> PredicateObligation<'tcx>
-{
+ depth: usize,
+) -> PredicateObligation<'tcx> {
let trait_ref = projection_ty.trait_ref(infcx.tcx).to_poly_trait_ref();
Obligation {
cause,
@@ -799,13 +795,13 @@
/// an error for this obligation, but we legitimately should not,
/// because it contains `[type error]`. Yuck! (See issue #29857 for
/// one case where this arose.)
-fn normalize_to_error<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- projection_ty: ty::ProjectionTy<'tcx>,
- cause: ObligationCause<'tcx>,
- depth: usize)
- -> NormalizedTy<'tcx>
-{
+fn normalize_to_error<'a, 'tcx>(
+ selcx: &mut SelectionContext<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ projection_ty: ty::ProjectionTy<'tcx>,
+ cause: ObligationCause<'tcx>,
+ depth: usize,
+) -> NormalizedTy<'tcx> {
let trait_ref = projection_ty.trait_ref(selcx.tcx()).to_poly_trait_ref();
let trait_obligation = Obligation { cause,
recursion_depth: depth,
@@ -836,7 +832,7 @@
}
impl<'tcx> Progress<'tcx> {
- fn error<'a,'gcx>(tcx: TyCtxt<'a,'gcx,'tcx>) -> Self {
+ fn error(tcx: TyCtxt<'tcx>) -> Self {
Progress {
ty: tcx.types.err,
obligations: vec![],
@@ -861,11 +857,10 @@
///
/// IMPORTANT:
/// - `obligation` must be fully normalized
-fn project_type<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
- obligation: &ProjectionTyObligation<'tcx>)
- -> Result<ProjectedTy<'tcx>, ProjectionTyError<'tcx>>
-{
+fn project_type<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
+ obligation: &ProjectionTyObligation<'tcx>,
+) -> Result<ProjectedTy<'tcx>, ProjectionTyError<'tcx>> {
debug!("project(obligation={:?})",
obligation);
@@ -925,12 +920,12 @@
/// The first thing we have to do is scan through the parameter
/// environment to see whether there are any projection predicates
/// there that can answer this question.
-fn assemble_candidates_from_param_env<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn assemble_candidates_from_param_env<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
obligation_trait_ref: &ty::TraitRef<'tcx>,
- candidate_set: &mut ProjectionTyCandidateSet<'tcx>)
-{
+ candidate_set: &mut ProjectionTyCandidateSet<'tcx>,
+) {
debug!("assemble_candidates_from_param_env(..)");
assemble_candidates_from_predicates(selcx,
obligation,
@@ -950,12 +945,12 @@
/// ```
///
/// Here, for example, we could conclude that the result is `i32`.
-fn assemble_candidates_from_trait_def<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn assemble_candidates_from_trait_def<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
obligation_trait_ref: &ty::TraitRef<'tcx>,
- candidate_set: &mut ProjectionTyCandidateSet<'tcx>)
-{
+ candidate_set: &mut ProjectionTyCandidateSet<'tcx>,
+) {
debug!("assemble_candidates_from_trait_def(..)");
let tcx = selcx.tcx();
@@ -986,14 +981,15 @@
bounds)
}
-fn assemble_candidates_from_predicates<'cx, 'gcx, 'tcx, I>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn assemble_candidates_from_predicates<'cx, 'tcx, I>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
obligation_trait_ref: &ty::TraitRef<'tcx>,
candidate_set: &mut ProjectionTyCandidateSet<'tcx>,
ctor: fn(ty::PolyProjectionPredicate<'tcx>) -> ProjectionTyCandidate<'tcx>,
- env_predicates: I)
- where I: IntoIterator<Item=ty::Predicate<'tcx>>
+ env_predicates: I,
+) where
+ I: IntoIterator<Item = ty::Predicate<'tcx>>,
{
debug!("assemble_candidates_from_predicates(obligation={:?})",
obligation);
@@ -1029,12 +1025,12 @@
}
}
-fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn assemble_candidates_from_impls<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
obligation_trait_ref: &ty::TraitRef<'tcx>,
- candidate_set: &mut ProjectionTyCandidateSet<'tcx>)
-{
+ candidate_set: &mut ProjectionTyCandidateSet<'tcx>,
+) {
// If we are resolving `<T as TraitRef<...>>::Item == Type`,
// start out by selecting the predicate `T as TraitRef<...>`:
let poly_trait_ref = obligation_trait_ref.to_poly_trait_ref();
@@ -1177,13 +1173,12 @@
});
}
-fn confirm_candidate<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn confirm_candidate<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
obligation_trait_ref: &ty::TraitRef<'tcx>,
- candidate: ProjectionTyCandidate<'tcx>)
- -> Progress<'tcx>
-{
+ candidate: ProjectionTyCandidate<'tcx>,
+) -> Progress<'tcx> {
debug!("confirm_candidate(candidate={:?}, obligation={:?})",
candidate,
obligation);
@@ -1200,13 +1195,12 @@
}
}
-fn confirm_select_candidate<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn confirm_select_candidate<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
obligation_trait_ref: &ty::TraitRef<'tcx>,
- vtable: Selection<'tcx>)
- -> Progress<'tcx>
-{
+ vtable: Selection<'tcx>,
+) -> Progress<'tcx> {
match vtable {
super::VtableImpl(data) =>
confirm_impl_candidate(selcx, obligation, data),
@@ -1230,12 +1224,11 @@
}
}
-fn confirm_object_candidate<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
- obligation: &ProjectionTyObligation<'tcx>,
- obligation_trait_ref: &ty::TraitRef<'tcx>)
- -> Progress<'tcx>
-{
+fn confirm_object_candidate<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
+ obligation: &ProjectionTyObligation<'tcx>,
+ obligation_trait_ref: &ty::TraitRef<'tcx>,
+) -> Progress<'tcx> {
let self_ty = obligation_trait_ref.self_ty();
let object_ty = selcx.infcx().shallow_resolve(self_ty);
debug!("confirm_object_candidate(object_ty={:?})",
@@ -1295,12 +1288,11 @@
confirm_param_env_candidate(selcx, obligation, env_predicate)
}
-fn confirm_generator_candidate<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn confirm_generator_candidate<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
- vtable: VtableGeneratorData<'tcx, PredicateObligation<'tcx>>)
- -> Progress<'tcx>
-{
+ vtable: VtableGeneratorData<'tcx, PredicateObligation<'tcx>>,
+) -> Progress<'tcx> {
let gen_sig = vtable.substs.poly_sig(vtable.generator_def_id, selcx.tcx());
let Normalized {
value: gen_sig,
@@ -1348,12 +1340,11 @@
.with_addl_obligations(obligations)
}
-fn confirm_fn_pointer_candidate<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn confirm_fn_pointer_candidate<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
- fn_pointer_vtable: VtableFnPointerData<'tcx, PredicateObligation<'tcx>>)
- -> Progress<'tcx>
-{
+ fn_pointer_vtable: VtableFnPointerData<'tcx, PredicateObligation<'tcx>>,
+) -> Progress<'tcx> {
let fn_type = selcx.infcx().shallow_resolve(fn_pointer_vtable.fn_ty);
let sig = fn_type.fn_sig(selcx.tcx());
let Normalized {
@@ -1370,12 +1361,11 @@
.with_addl_obligations(obligations)
}
-fn confirm_closure_candidate<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn confirm_closure_candidate<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
- vtable: VtableClosureData<'tcx, PredicateObligation<'tcx>>)
- -> Progress<'tcx>
-{
+ vtable: VtableClosureData<'tcx, PredicateObligation<'tcx>>,
+) -> Progress<'tcx> {
let tcx = selcx.tcx();
let infcx = selcx.infcx();
let closure_sig_ty = vtable.substs.closure_sig_ty(vtable.closure_def_id, tcx);
@@ -1402,13 +1392,12 @@
.with_addl_obligations(obligations)
}
-fn confirm_callable_candidate<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn confirm_callable_candidate<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
fn_sig: ty::PolyFnSig<'tcx>,
- flag: util::TupleArgumentsFlag)
- -> Progress<'tcx>
-{
+ flag: util::TupleArgumentsFlag,
+) -> Progress<'tcx> {
let tcx = selcx.tcx();
debug!("confirm_callable_candidate({:?},{:?})",
@@ -1437,8 +1426,8 @@
confirm_param_env_candidate(selcx, obligation, predicate)
}
-fn confirm_param_env_candidate<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn confirm_param_env_candidate<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
poly_cache_entry: ty::PolyProjectionPredicate<'tcx>,
) -> Progress<'tcx> {
@@ -1478,12 +1467,11 @@
}
}
-fn confirm_impl_candidate<'cx, 'gcx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+fn confirm_impl_candidate<'cx, 'tcx>(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
- impl_vtable: VtableImplData<'tcx, PredicateObligation<'tcx>>)
- -> Progress<'tcx>
-{
+ impl_vtable: VtableImplData<'tcx, PredicateObligation<'tcx>>,
+) -> Progress<'tcx> {
let VtableImplData { impl_def_id, substs, nested } = impl_vtable;
let tcx = selcx.tcx();
@@ -1521,12 +1509,11 @@
///
/// Based on the "projection mode", this lookup may in fact only examine the
/// topmost impl. See the comments for `Reveal` for more details.
-fn assoc_ty_def<'cx, 'gcx, 'tcx>(
- selcx: &SelectionContext<'cx, 'gcx, 'tcx>,
+fn assoc_ty_def<'cx, 'tcx>(
+ selcx: &SelectionContext<'cx, 'tcx>,
impl_def_id: DefId,
- assoc_ty_def_id: DefId)
- -> specialization_graph::NodeItem<ty::AssocItem>
-{
+ assoc_ty_def_id: DefId,
+) -> specialization_graph::NodeItem<ty::AssocItem> {
let tcx = selcx.tcx();
let assoc_ty_name = tcx.associated_item(assoc_ty_def_id).ident;
let trait_def_id = tcx.impl_trait_ref(impl_def_id).unwrap().def_id;
@@ -1608,11 +1595,11 @@
ty: ty::ProjectionTy<'tcx>
}
-impl<'cx, 'gcx, 'tcx> ProjectionCacheKey<'tcx> {
- pub fn from_poly_projection_predicate(selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
- predicate: &ty::PolyProjectionPredicate<'tcx>)
- -> Option<Self>
- {
+impl<'cx, 'tcx> ProjectionCacheKey<'tcx> {
+ pub fn from_poly_projection_predicate(
+ selcx: &mut SelectionContext<'cx, 'tcx>,
+ predicate: &ty::PolyProjectionPredicate<'tcx>,
+ ) -> Option<Self> {
let infcx = selcx.infcx();
// We don't do cross-snapshot caching of obligations with escaping regions,
// so there's no cache key to use
diff --git a/src/librustc/traits/query/dropck_outlives.rs b/src/librustc/traits/query/dropck_outlives.rs
index c4aa14d..46403a3 100644
--- a/src/librustc/traits/query/dropck_outlives.rs
+++ b/src/librustc/traits/query/dropck_outlives.rs
@@ -6,7 +6,7 @@
use crate::ty::subst::Kind;
use crate::ty::{self, Ty, TyCtxt};
-impl<'cx, 'gcx, 'tcx> At<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> At<'cx, 'tcx> {
/// Given a type `ty` of some value being dropped, computes a set
/// of "kinds" (types, regions) that must be outlive the execution
/// of the destructor. These basically correspond to data that the
@@ -85,12 +85,7 @@
}
impl<'tcx> DropckOutlivesResult<'tcx> {
- pub fn report_overflows(
- &self,
- tcx: TyCtxt<'_, '_, 'tcx>,
- span: Span,
- ty: Ty<'tcx>,
- ) {
+ pub fn report_overflows(&self, tcx: TyCtxt<'tcx>, span: Span, ty: Ty<'tcx>) {
if let Some(overflow_ty) = self.overflows.iter().next() {
let mut err = struct_span_err!(
tcx.sess,
@@ -106,7 +101,7 @@
pub fn into_kinds_reporting_overflows(
self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
span: Span,
ty: Ty<'tcx>,
) -> Vec<Kind<'tcx>> {
@@ -190,7 +185,7 @@
///
/// Note also that `needs_drop` requires a "global" type (i.e., one
/// with erased regions), but this function does not.
-pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> bool {
+pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool {
match ty.sty {
// None of these types have a destructor and hence they do not
// require anything in particular to outlive the dtor's
diff --git a/src/librustc/traits/query/evaluate_obligation.rs b/src/librustc/traits/query/evaluate_obligation.rs
index d5230f1..2ee6364 100644
--- a/src/librustc/traits/query/evaluate_obligation.rs
+++ b/src/librustc/traits/query/evaluate_obligation.rs
@@ -3,7 +3,7 @@
use crate::traits::{EvaluationResult, PredicateObligation, SelectionContext,
TraitQueryMode, OverflowError};
-impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
/// Evaluates whether the predicate can be satisfied (by any means)
/// in the given `ParamEnv`.
pub fn predicate_may_hold(
@@ -64,7 +64,7 @@
Err(OverflowError) => {
let mut selcx =
SelectionContext::with_query_mode(&self, TraitQueryMode::Standard);
- selcx.evaluate_obligation_recursively(obligation)
+ selcx.evaluate_root_obligation(obligation)
.unwrap_or_else(|r| {
span_bug!(
obligation.cause.span,
diff --git a/src/librustc/traits/query/normalize.rs b/src/librustc/traits/query/normalize.rs
index 0b20ec8..5047672 100644
--- a/src/librustc/traits/query/normalize.rs
+++ b/src/librustc/traits/query/normalize.rs
@@ -14,7 +14,7 @@
use super::NoSolution;
-impl<'cx, 'gcx, 'tcx> At<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> At<'cx, 'tcx> {
/// Normalize `value` in the context of the inference context,
/// yielding a resulting type, or an error if `value` cannot be
/// normalized. If you don't care about regions, you should prefer
@@ -73,8 +73,8 @@
pub normalized_ty: Ty<'tcx>,
}
-struct QueryNormalizer<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+struct QueryNormalizer<'cx, 'tcx: 'cx> {
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
cause: &'cx ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
obligations: Vec<PredicateObligation<'tcx>>,
@@ -82,8 +82,8 @@
anon_depth: usize,
}
-impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for QueryNormalizer<'cx, 'gcx, 'tcx> {
- fn tcx<'c>(&'c self) -> TyCtxt<'c, 'gcx, 'tcx> {
+impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> {
+ fn tcx<'c>(&'c self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
diff --git a/src/librustc/traits/query/normalize_erasing_regions.rs b/src/librustc/traits/query/normalize_erasing_regions.rs
index 0c12526..3218ff0 100644
--- a/src/librustc/traits/query/normalize_erasing_regions.rs
+++ b/src/librustc/traits/query/normalize_erasing_regions.rs
@@ -10,7 +10,7 @@
use crate::ty::{self, Ty, TyCtxt};
use crate::ty::fold::{TypeFoldable, TypeFolder};
-impl<'cx, 'tcx> TyCtxt<'cx, 'tcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Erase the regions in `value` and then fully normalize all the
/// types found within. The result will also have regions erased.
///
@@ -62,13 +62,13 @@
}
}
-struct NormalizeAfterErasingRegionsFolder<'cx, 'tcx: 'cx> {
- tcx: TyCtxt<'cx, 'tcx, 'tcx>,
+struct NormalizeAfterErasingRegionsFolder<'tcx> {
+ tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
}
-impl<'cx, 'tcx> TypeFolder<'tcx, 'tcx> for NormalizeAfterErasingRegionsFolder<'cx, 'tcx> {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
+impl TypeFolder<'tcx> for NormalizeAfterErasingRegionsFolder<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
diff --git a/src/librustc/traits/query/outlives_bounds.rs b/src/librustc/traits/query/outlives_bounds.rs
index 954de15..40bd187 100644
--- a/src/librustc/traits/query/outlives_bounds.rs
+++ b/src/librustc/traits/query/outlives_bounds.rs
@@ -64,7 +64,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
/// Implied bounds are region relationships that we deduce
/// automatically. The idea is that (e.g.) a caller must check that a
/// function's argument types are well-formed immediately before
diff --git a/src/librustc/traits/query/type_op/ascribe_user_type.rs b/src/librustc/traits/query/type_op/ascribe_user_type.rs
index d9f573e..05a4d43 100644
--- a/src/librustc/traits/query/type_op/ascribe_user_type.rs
+++ b/src/librustc/traits/query/type_op/ascribe_user_type.rs
@@ -21,25 +21,25 @@
}
}
-impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for AscribeUserType<'tcx> {
+impl<'tcx> super::QueryTypeOp<'tcx> for AscribeUserType<'tcx> {
type QueryResponse = ();
fn try_fast_path(
- _tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ _tcx: TyCtxt<'tcx>,
_key: &ParamEnvAnd<'tcx, Self>,
) -> Option<Self::QueryResponse> {
None
}
fn perform_query(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, ()>> {
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_ascribe_user_type(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResponse<'gcx, ()>,
+ v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
diff --git a/src/librustc/traits/query/type_op/custom.rs b/src/librustc/traits/query/type_op/custom.rs
index 5933d23..72550e2 100644
--- a/src/librustc/traits/query/type_op/custom.rs
+++ b/src/librustc/traits/query/type_op/custom.rs
@@ -14,9 +14,9 @@
}
impl<F, G> CustomTypeOp<F, G> {
- pub fn new<'gcx, 'tcx, R>(closure: F, description: G) -> Self
+ pub fn new<'tcx, R>(closure: F, description: G) -> Self
where
- F: FnOnce(&InferCtxt<'_, 'gcx, 'tcx>) -> Fallible<InferOk<'tcx, R>>,
+ F: FnOnce(&InferCtxt<'_, 'tcx>) -> Fallible<InferOk<'tcx, R>>,
G: Fn() -> String,
{
CustomTypeOp {
@@ -26,9 +26,9 @@
}
}
-impl<'gcx, 'tcx, F, R, G> super::TypeOp<'gcx, 'tcx> for CustomTypeOp<F, G>
+impl<'tcx, F, R, G> super::TypeOp<'tcx> for CustomTypeOp<F, G>
where
- F: for<'a, 'cx> FnOnce(&'a InferCtxt<'cx, 'gcx, 'tcx>) -> Fallible<InferOk<'tcx, R>>,
+ F: for<'a, 'cx> FnOnce(&'a InferCtxt<'cx, 'tcx>) -> Fallible<InferOk<'tcx, R>>,
G: Fn() -> String,
{
type Output = R;
@@ -38,7 +38,7 @@
/// (they will be given over to the NLL region solver).
fn fully_perform(
self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
if cfg!(debug_assertions) {
info!("fully_perform({:?})", self);
@@ -59,8 +59,8 @@
/// Executes `op` and then scrapes out all the "old style" region
/// constraints that result, creating query-region-constraints.
-fn scrape_region_constraints<'gcx, 'tcx, R>(
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+fn scrape_region_constraints<'tcx, R>(
+ infcx: &InferCtxt<'_, 'tcx>,
op: impl FnOnce() -> Fallible<InferOk<'tcx, R>>,
) -> Fallible<(R, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
let mut fulfill_cx = TraitEngine::new(infcx.tcx);
diff --git a/src/librustc/traits/query/type_op/eq.rs b/src/librustc/traits/query/type_op/eq.rs
index 5c3ccc9..e8ec304 100644
--- a/src/librustc/traits/query/type_op/eq.rs
+++ b/src/librustc/traits/query/type_op/eq.rs
@@ -14,11 +14,11 @@
}
}
-impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for Eq<'tcx> {
+impl<'tcx> super::QueryTypeOp<'tcx> for Eq<'tcx> {
type QueryResponse = ();
fn try_fast_path(
- _tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ _tcx: TyCtxt<'tcx>,
key: &ParamEnvAnd<'tcx, Eq<'tcx>>,
) -> Option<Self::QueryResponse> {
if key.value.a == key.value.b {
@@ -29,14 +29,14 @@
}
fn perform_query(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, ()>> {
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_eq(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResponse<'gcx, ()>,
+ v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
diff --git a/src/librustc/traits/query/type_op/implied_outlives_bounds.rs b/src/librustc/traits/query/type_op/implied_outlives_bounds.rs
index c48ca33..3beb4d6 100644
--- a/src/librustc/traits/query/type_op/implied_outlives_bounds.rs
+++ b/src/librustc/traits/query/type_op/implied_outlives_bounds.rs
@@ -14,20 +14,20 @@
}
}
-impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for ImpliedOutlivesBounds<'tcx> {
+impl<'tcx> super::QueryTypeOp<'tcx> for ImpliedOutlivesBounds<'tcx> {
type QueryResponse = Vec<OutlivesBound<'tcx>>;
fn try_fast_path(
- _tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ _tcx: TyCtxt<'tcx>,
_key: &ParamEnvAnd<'tcx, Self>,
) -> Option<Self::QueryResponse> {
None
}
fn perform_query(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self::QueryResponse>> {
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, Self::QueryResponse>> {
// FIXME this `unchecked_map` is only necessary because the
// query is defined as taking a `ParamEnvAnd<Ty>`; it should
// take a `ImpliedOutlivesBounds` instead
@@ -40,7 +40,7 @@
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResponse<'gcx, Self::QueryResponse>,
+ v: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>> {
v
}
diff --git a/src/librustc/traits/query/type_op/mod.rs b/src/librustc/traits/query/type_op/mod.rs
index fd13acc..b298edf 100644
--- a/src/librustc/traits/query/type_op/mod.rs
+++ b/src/librustc/traits/query/type_op/mod.rs
@@ -23,7 +23,7 @@
/// "Type ops" are used in NLL to perform some particular action and
/// extract out the resulting region constraints (or an error if it
/// cannot be completed).
-pub trait TypeOp<'gcx, 'tcx>: Sized + fmt::Debug {
+pub trait TypeOp<'tcx>: Sized + fmt::Debug {
type Output;
/// Processes the operation and all resulting obligations,
@@ -31,7 +31,7 @@
/// (they will be given over to the NLL region solver).
fn fully_perform(
self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)>;
}
@@ -44,16 +44,14 @@
/// which produces the resulting query region constraints.
///
/// [c]: https://rust-lang.github.io/rustc-guide/traits/canonicalization.html
-pub trait QueryTypeOp<'gcx: 'tcx, 'tcx>:
- fmt::Debug + Sized + TypeFoldable<'tcx> + Lift<'gcx>
-{
- type QueryResponse: TypeFoldable<'tcx> + Lift<'gcx>;
+pub trait QueryTypeOp<'tcx>: fmt::Debug + Sized + TypeFoldable<'tcx> + Lift<'tcx> {
+ type QueryResponse: TypeFoldable<'tcx> + Lift<'tcx>;
/// Give query the option for a simple fast path that never
/// actually hits the tcx cache lookup etc. Return `Some(r)` with
/// a final result or `None` to do the full path.
fn try_fast_path(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
key: &ParamEnvAnd<'tcx, Self>,
) -> Option<Self::QueryResponse>;
@@ -64,29 +62,29 @@
/// bad, because it would create subregion relationships that are
/// not captured in the return value.
fn perform_query(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self::QueryResponse>>;
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, Self::QueryResponse>>;
/// Casts a lifted query result (which is in the gcx lifetime)
/// into the tcx lifetime. This is always just an identity cast,
/// but the generic code doesn't realize it -- put another way, in
- /// the generic code, we have a `Lifted<'gcx, Self::QueryResponse>`
+ /// the generic code, we have a `Lifted<'tcx, Self::QueryResponse>`
/// and we want to convert that to a `Self::QueryResponse`. This is
/// not a priori valid, so we can't do it -- but in practice, it
/// is always a no-op (e.g., the lifted form of a type,
- /// `Ty<'gcx>`, is a subtype of `Ty<'tcx>`). So we have to push
+ /// `Ty<'tcx>`, is a subtype of `Ty<'tcx>`). So we have to push
/// the operation into the impls that know more specifically what
/// `QueryResponse` is. This operation would (maybe) be nicer with
/// something like HKTs or GATs, since then we could make
- /// `QueryResponse` parametric and `'gcx` and `'tcx` etc.
+ /// `QueryResponse` parametric and `'tcx` and `'tcx` etc.
fn shrink_to_tcx_lifetime(
- lifted_query_result: &'a CanonicalizedQueryResponse<'gcx, Self::QueryResponse>,
+ lifted_query_result: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>>;
fn fully_perform_into(
query_key: ParamEnvAnd<'tcx, Self>,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
output_query_region_constraints: &mut Vec<QueryRegionConstraint<'tcx>>,
) -> Fallible<Self::QueryResponse> {
if let Some(result) = QueryTypeOp::try_fast_path(infcx.tcx, &query_key) {
@@ -133,15 +131,15 @@
}
}
-impl<'gcx: 'tcx, 'tcx, Q> TypeOp<'gcx, 'tcx> for ParamEnvAnd<'tcx, Q>
+impl<'tcx, Q> TypeOp<'tcx> for ParamEnvAnd<'tcx, Q>
where
- Q: QueryTypeOp<'gcx, 'tcx>,
+ Q: QueryTypeOp<'tcx>,
{
type Output = Q::QueryResponse;
fn fully_perform(
self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
let mut qrc = vec![];
let r = Q::fully_perform_into(self, infcx, &mut qrc)?;
diff --git a/src/librustc/traits/query/type_op/normalize.rs b/src/librustc/traits/query/type_op/normalize.rs
index 594f55e..5a768d9 100644
--- a/src/librustc/traits/query/type_op/normalize.rs
+++ b/src/librustc/traits/query/type_op/normalize.rs
@@ -18,13 +18,13 @@
}
}
-impl<'gcx: 'tcx, 'tcx, T> super::QueryTypeOp<'gcx, 'tcx> for Normalize<T>
+impl<'tcx, T> super::QueryTypeOp<'tcx> for Normalize<T>
where
- T: Normalizable<'gcx, 'tcx>,
+ T: Normalizable<'tcx>,
{
type QueryResponse = T;
- fn try_fast_path(_tcx: TyCtxt<'_, 'gcx, 'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<T> {
+ fn try_fast_path(_tcx: TyCtxt<'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<T> {
if !key.value.value.has_projections() {
Some(key.value.value)
} else {
@@ -33,99 +33,87 @@
}
fn perform_query(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self::QueryResponse>> {
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, Self::QueryResponse>> {
T::type_op_method(tcx, canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResponse<'gcx, T>,
+ v: &'a CanonicalizedQueryResponse<'tcx, T>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, T>> {
T::shrink_to_tcx_lifetime(v)
}
}
-pub trait Normalizable<'gcx, 'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'gcx> + Copy {
+pub trait Normalizable<'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'tcx> + Copy {
fn type_op_method(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>>;
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>>;
- /// Converts from the `'gcx` (lifted) form of `Self` into the `tcx`
+ /// Converts from the `'tcx` (lifted) form of `Self` into the `tcx`
/// form of `Self`.
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResponse<'gcx, Self>,
+ v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>>;
}
-impl Normalizable<'gcx, 'tcx> for Ty<'tcx>
-where
- 'gcx: 'tcx,
-{
+impl Normalizable<'tcx> for Ty<'tcx> {
fn type_op_method(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>> {
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_ty(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResponse<'gcx, Self>,
+ v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
-impl Normalizable<'gcx, 'tcx> for ty::Predicate<'tcx>
-where
- 'gcx: 'tcx,
-{
+impl Normalizable<'tcx> for ty::Predicate<'tcx> {
fn type_op_method(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>> {
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_predicate(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResponse<'gcx, Self>,
+ v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
-impl Normalizable<'gcx, 'tcx> for ty::PolyFnSig<'tcx>
-where
- 'gcx: 'tcx,
-{
+impl Normalizable<'tcx> for ty::PolyFnSig<'tcx> {
fn type_op_method(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>> {
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_poly_fn_sig(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResponse<'gcx, Self>,
+ v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
-impl Normalizable<'gcx, 'tcx> for ty::FnSig<'tcx>
-where
- 'gcx: 'tcx,
-{
+impl Normalizable<'tcx> for ty::FnSig<'tcx> {
fn type_op_method(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>> {
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, Self>> {
tcx.type_op_normalize_fn_sig(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResponse<'gcx, Self>,
+ v: &'a CanonicalizedQueryResponse<'tcx, Self>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
diff --git a/src/librustc/traits/query/type_op/outlives.rs b/src/librustc/traits/query/type_op/outlives.rs
index fc0c1c0..d4b3635 100644
--- a/src/librustc/traits/query/type_op/outlives.rs
+++ b/src/librustc/traits/query/type_op/outlives.rs
@@ -15,14 +15,11 @@
}
}
-impl super::QueryTypeOp<'gcx, 'tcx> for DropckOutlives<'tcx>
-where
- 'gcx: 'tcx,
-{
+impl super::QueryTypeOp<'tcx> for DropckOutlives<'tcx> {
type QueryResponse = DropckOutlivesResult<'tcx>;
fn try_fast_path(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
key: &ParamEnvAnd<'tcx, Self>,
) -> Option<Self::QueryResponse> {
if trivial_dropck_outlives(tcx, key.value.dropped_ty) {
@@ -33,9 +30,9 @@
}
fn perform_query(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self::QueryResponse>> {
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, Self::QueryResponse>> {
// Subtle: note that we are not invoking
// `infcx.at(...).dropck_outlives(...)` here, but rather the
// underlying `dropck_outlives` query. This same underlying
@@ -58,7 +55,7 @@
}
fn shrink_to_tcx_lifetime(
- lifted_query_result: &'a CanonicalizedQueryResponse<'gcx, Self::QueryResponse>,
+ lifted_query_result: &'a CanonicalizedQueryResponse<'tcx, Self::QueryResponse>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>> {
lifted_query_result
}
diff --git a/src/librustc/traits/query/type_op/prove_predicate.rs b/src/librustc/traits/query/type_op/prove_predicate.rs
index 50dedf6..1efe663 100644
--- a/src/librustc/traits/query/type_op/prove_predicate.rs
+++ b/src/librustc/traits/query/type_op/prove_predicate.rs
@@ -13,11 +13,11 @@
}
}
-impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for ProvePredicate<'tcx> {
+impl<'tcx> super::QueryTypeOp<'tcx> for ProvePredicate<'tcx> {
type QueryResponse = ();
fn try_fast_path(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
key: &ParamEnvAnd<'tcx, Self>,
) -> Option<Self::QueryResponse> {
// Proving Sized, very often on "obviously sized" types like
@@ -38,14 +38,14 @@
}
fn perform_query(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, ()>> {
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_prove_predicate(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResponse<'gcx, ()>,
+ v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
diff --git a/src/librustc/traits/query/type_op/subtype.rs b/src/librustc/traits/query/type_op/subtype.rs
index c45fb06..71c7499 100644
--- a/src/librustc/traits/query/type_op/subtype.rs
+++ b/src/librustc/traits/query/type_op/subtype.rs
@@ -17,10 +17,10 @@
}
}
-impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for Subtype<'tcx> {
+impl<'tcx> super::QueryTypeOp<'tcx> for Subtype<'tcx> {
type QueryResponse = ();
- fn try_fast_path(_tcx: TyCtxt<'_, 'gcx, 'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<()> {
+ fn try_fast_path(_tcx: TyCtxt<'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<()> {
if key.value.sub == key.value.sup {
Some(())
} else {
@@ -29,14 +29,14 @@
}
fn perform_query(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
- canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResponse<'gcx, ()>> {
+ tcx: TyCtxt<'tcx>,
+ canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
tcx.type_op_subtype(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResponse<'gcx, ()>,
+ v: &'a CanonicalizedQueryResponse<'tcx, ()>,
) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs
index af0b51e..5aa7a12 100644
--- a/src/librustc/traits/select.rs
+++ b/src/librustc/traits/select.rs
@@ -43,22 +43,22 @@
use rustc_data_structures::bit_set::GrowableBitSet;
use rustc_data_structures::sync::Lock;
use rustc_target::spec::abi::Abi;
-use std::cell::Cell;
+use std::cell::{Cell, RefCell};
use std::cmp;
use std::fmt::{self, Display};
use std::iter;
use std::rc::Rc;
use crate::util::nodemap::{FxHashMap, FxHashSet};
-pub struct SelectionContext<'cx, 'gcx: 'cx + 'tcx, 'tcx: 'cx> {
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+pub struct SelectionContext<'cx, 'tcx: 'cx> {
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
/// Freshener used specifically for entries on the obligation
/// stack. This ensures that all entries on the stack at one time
/// will have the same set of placeholder entries, which is
/// important for checking for trait bounds that recursively
/// require themselves.
- freshener: TypeFreshener<'cx, 'gcx, 'tcx>,
+ freshener: TypeFreshener<'cx, 'tcx>,
/// If `true`, indicates that the evaluation should be conservative
/// and consider the possibility of types outside this crate.
@@ -151,12 +151,12 @@
/// selection-context's freshener. Used to check for recursion.
fresh_trait_ref: ty::PolyTraitRef<'tcx>,
- /// Starts out as false -- if, during evaluation, we encounter a
- /// cycle, then we will set this flag to true for all participants
- /// in the cycle (apart from the "head" node). These participants
- /// will then forego caching their results. This is not the most
- /// efficient solution, but it addresses #60010. The problem we
- /// are trying to prevent:
+ /// Starts out equal to `depth` -- if, during evaluation, we
+ /// encounter a cycle, then we will set this flag to the minimum
+ /// depth of that cycle for all participants in the cycle. These
+ /// participants will then forego caching their results. This is
+ /// not the most efficient solution, but it addresses #60010. The
+ /// problem we are trying to prevent:
///
/// - If you have `A: AutoTrait` requires `B: AutoTrait` and `C: NonAutoTrait`
/// - `B: AutoTrait` requires `A: AutoTrait` (coinductive cycle, ok)
@@ -179,9 +179,16 @@
/// evaluate each member of a cycle up to N times, where N is the
/// length of the cycle. This means the performance impact is
/// bounded and we shouldn't have any terrible worst-cases.
- in_cycle: Cell<bool>,
+ reached_depth: Cell<usize>,
previous: TraitObligationStackList<'prev, 'tcx>,
+
+ /// Number of parent frames plus one -- so the topmost frame has depth 1.
+ depth: usize,
+
+ /// Depth-first number of this node in the search graph -- a
+ /// pre-order index. Basically a freshly incremented counter.
+ dfn: usize,
}
#[derive(Clone, Default)]
@@ -300,7 +307,7 @@
impl<'a, 'tcx> ty::Lift<'tcx> for SelectionCandidate<'a> {
type Lifted = SelectionCandidate<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
Some(match *self {
BuiltinCandidate { has_nested } => BuiltinCandidate { has_nested },
ImplCandidate(def_id) => ImplCandidate(def_id),
@@ -487,8 +494,8 @@
hashmap: Lock<FxHashMap<ty::PolyTraitRef<'tcx>, WithDepNode<EvaluationResult>>>,
}
-impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
- pub fn new(infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>) -> SelectionContext<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
+ pub fn new(infcx: &'cx InferCtxt<'cx, 'tcx>) -> SelectionContext<'cx, 'tcx> {
SelectionContext {
infcx,
freshener: infcx.freshener(),
@@ -500,9 +507,9 @@
}
pub fn intercrate(
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
mode: IntercrateMode,
- ) -> SelectionContext<'cx, 'gcx, 'tcx> {
+ ) -> SelectionContext<'cx, 'tcx> {
debug!("intercrate({:?})", mode);
SelectionContext {
infcx,
@@ -515,9 +522,9 @@
}
pub fn with_negative(
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
allow_negative_impls: bool,
- ) -> SelectionContext<'cx, 'gcx, 'tcx> {
+ ) -> SelectionContext<'cx, 'tcx> {
debug!("with_negative({:?})", allow_negative_impls);
SelectionContext {
infcx,
@@ -530,9 +537,9 @@
}
pub fn with_query_mode(
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
query_mode: TraitQueryMode,
- ) -> SelectionContext<'cx, 'gcx, 'tcx> {
+ ) -> SelectionContext<'cx, 'tcx> {
debug!("with_query_mode({:?})", query_mode);
SelectionContext {
infcx,
@@ -564,15 +571,15 @@
self.intercrate_ambiguity_causes.take().unwrap_or(vec![])
}
- pub fn infcx(&self) -> &'cx InferCtxt<'cx, 'gcx, 'tcx> {
+ pub fn infcx(&self) -> &'cx InferCtxt<'cx, 'tcx> {
self.infcx
}
- pub fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
+ pub fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
- pub fn closure_typer(&self) -> &'cx InferCtxt<'cx, 'gcx, 'tcx> {
+ pub fn closure_typer(&self) -> &'cx InferCtxt<'cx, 'tcx> {
self.infcx
}
@@ -600,7 +607,8 @@
debug!("select({:?})", obligation);
debug_assert!(!obligation.predicate.has_escaping_bound_vars());
- let stack = self.push_stack(TraitObligationStackList::empty(), obligation);
+ let pec = &ProvisionalEvaluationCache::default();
+ let stack = self.push_stack(TraitObligationStackList::empty(pec), obligation);
let candidate = match self.candidate_from_obligation(&stack) {
Err(SelectionError::Overflow) => {
@@ -646,20 +654,23 @@
// where we do not expect overflow to be propagated.
assert!(self.query_mode == TraitQueryMode::Standard);
- self.evaluate_obligation_recursively(obligation)
+ self.evaluate_root_obligation(obligation)
.expect("Overflow should be caught earlier in standard query mode")
.may_apply()
}
- /// Evaluates whether the obligation `obligation` can be satisfied and returns
- /// an `EvaluationResult`.
- pub fn evaluate_obligation_recursively(
+ /// Evaluates whether the obligation `obligation` can be satisfied
+ /// and returns an `EvaluationResult`. This is meant for the
+ /// *initial* call.
+ pub fn evaluate_root_obligation(
&mut self,
obligation: &PredicateObligation<'tcx>,
) -> Result<EvaluationResult, OverflowError> {
self.evaluation_probe(|this| {
- this.evaluate_predicate_recursively(TraitObligationStackList::empty(),
- obligation.clone())
+ this.evaluate_predicate_recursively(
+ TraitObligationStackList::empty(&ProvisionalEvaluationCache::default()),
+ obligation.clone(),
+ )
})
}
@@ -865,23 +876,131 @@
return Ok(result);
}
+ if let Some(result) = stack.cache().get_provisional(fresh_trait_ref) {
+ debug!("PROVISIONAL CACHE HIT: EVAL({:?})={:?}", fresh_trait_ref, result);
+ stack.update_reached_depth(stack.cache().current_reached_depth());
+ return Ok(result);
+ }
+
+ // Check if this is a match for something already on the
+ // stack. If so, we don't want to insert the result into the
+ // main cache (it is cycle dependent) nor the provisional
+ // cache (which is meant for things that have completed but
+ // for a "backedge" -- this result *is* the backedge).
+ if let Some(cycle_result) = self.check_evaluation_cycle(&stack) {
+ return Ok(cycle_result);
+ }
+
let (result, dep_node) = self.in_task(|this| this.evaluate_stack(&stack));
let result = result?;
- if !stack.in_cycle.get() {
+ if !result.must_apply_modulo_regions() {
+ stack.cache().on_failure(stack.dfn);
+ }
+
+ let reached_depth = stack.reached_depth.get();
+ if reached_depth >= stack.depth {
debug!("CACHE MISS: EVAL({:?})={:?}", fresh_trait_ref, result);
self.insert_evaluation_cache(obligation.param_env, fresh_trait_ref, dep_node, result);
+
+ stack.cache().on_completion(stack.depth, |fresh_trait_ref, provisional_result| {
+ self.insert_evaluation_cache(
+ obligation.param_env,
+ fresh_trait_ref,
+ dep_node,
+ provisional_result.max(result),
+ );
+ });
} else {
+ debug!("PROVISIONAL: {:?}={:?}", fresh_trait_ref, result);
debug!(
- "evaluate_trait_predicate_recursively: skipping cache because {:?} \
- is a cycle participant",
+ "evaluate_trait_predicate_recursively: caching provisionally because {:?} \
+ is a cycle participant (at depth {}, reached depth {})",
fresh_trait_ref,
+ stack.depth,
+ reached_depth,
+ );
+
+ stack.cache().insert_provisional(
+ stack.dfn,
+ reached_depth,
+ fresh_trait_ref,
+ result,
);
}
+
Ok(result)
}
+ /// If there is any previous entry on the stack that precisely
+ /// matches this obligation, then we can assume that the
+ /// obligation is satisfied for now (still all other conditions
+ /// must be met of course). One obvious case this comes up is
+ /// marker traits like `Send`. Think of a linked list:
+ ///
+ /// struct List<T> { data: T, next: Option<Box<List<T>>> }
+ ///
+ /// `Box<List<T>>` will be `Send` if `T` is `Send` and
+ /// `Option<Box<List<T>>>` is `Send`, and in turn
+ /// `Option<Box<List<T>>>` is `Send` if `Box<List<T>>` is
+ /// `Send`.
+ ///
+ /// Note that we do this comparison using the `fresh_trait_ref`
+ /// fields. Because these have all been freshened using
+ /// `self.freshener`, we can be sure that (a) this will not
+ /// affect the inferencer state and (b) that if we see two
+ /// fresh regions with the same index, they refer to the same
+ /// unbound type variable.
+ fn check_evaluation_cycle(
+ &mut self,
+ stack: &TraitObligationStack<'_, 'tcx>,
+ ) -> Option<EvaluationResult> {
+ if let Some(cycle_depth) = stack.iter()
+ .skip(1) // skip top-most frame
+ .find(|prev| stack.obligation.param_env == prev.obligation.param_env &&
+ stack.fresh_trait_ref == prev.fresh_trait_ref)
+ .map(|stack| stack.depth)
+ {
+ debug!(
+ "evaluate_stack({:?}) --> recursive at depth {}",
+ stack.fresh_trait_ref,
+ cycle_depth,
+ );
+
+ // If we have a stack like `A B C D E A`, where the top of
+ // the stack is the final `A`, then this will iterate over
+ // `A, E, D, C, B` -- i.e., all the participants apart
+ // from the cycle head. We mark them as participating in a
+ // cycle. This suppresses caching for those nodes. See
+ // `in_cycle` field for more details.
+ stack.update_reached_depth(cycle_depth);
+
+ // Subtle: when checking for a coinductive cycle, we do
+ // not compare using the "freshened trait refs" (which
+ // have erased regions) but rather the fully explicit
+ // trait refs. This is important because it's only a cycle
+ // if the regions match exactly.
+ let cycle = stack.iter().skip(1).take_while(|s| s.depth >= cycle_depth);
+ let cycle = cycle.map(|stack| ty::Predicate::Trait(stack.obligation.predicate));
+ if self.coinductive_match(cycle) {
+ debug!(
+ "evaluate_stack({:?}) --> recursive, coinductive",
+ stack.fresh_trait_ref
+ );
+ Some(EvaluatedToOk)
+ } else {
+ debug!(
+ "evaluate_stack({:?}) --> recursive, inductive",
+ stack.fresh_trait_ref
+ );
+ Some(EvaluatedToRecur)
+ }
+ } else {
+ None
+ }
+ }
+
fn evaluate_stack<'o>(
&mut self,
stack: &TraitObligationStack<'o, 'tcx>,
@@ -958,65 +1077,6 @@
return Ok(EvaluatedToUnknown);
}
- // If there is any previous entry on the stack that precisely
- // matches this obligation, then we can assume that the
- // obligation is satisfied for now (still all other conditions
- // must be met of course). One obvious case this comes up is
- // marker traits like `Send`. Think of a linked list:
- //
- // struct List<T> { data: T, next: Option<Box<List<T>>> }
- //
- // `Box<List<T>>` will be `Send` if `T` is `Send` and
- // `Option<Box<List<T>>>` is `Send`, and in turn
- // `Option<Box<List<T>>>` is `Send` if `Box<List<T>>` is
- // `Send`.
- //
- // Note that we do this comparison using the `fresh_trait_ref`
- // fields. Because these have all been freshened using
- // `self.freshener`, we can be sure that (a) this will not
- // affect the inferencer state and (b) that if we see two
- // fresh regions with the same index, they refer to the same
- // unbound type variable.
- if let Some(rec_index) = stack.iter()
- .skip(1) // skip top-most frame
- .position(|prev| stack.obligation.param_env == prev.obligation.param_env &&
- stack.fresh_trait_ref == prev.fresh_trait_ref)
- {
- debug!("evaluate_stack({:?}) --> recursive", stack.fresh_trait_ref);
-
- // If we have a stack like `A B C D E A`, where the top of
- // the stack is the final `A`, then this will iterate over
- // `A, E, D, C, B` -- i.e., all the participants apart
- // from the cycle head. We mark them as participating in a
- // cycle. This suppresses caching for those nodes. See
- // `in_cycle` field for more details.
- for item in stack.iter().take(rec_index + 1) {
- debug!("evaluate_stack: marking {:?} as cycle participant", item.fresh_trait_ref);
- item.in_cycle.set(true);
- }
-
- // Subtle: when checking for a coinductive cycle, we do
- // not compare using the "freshened trait refs" (which
- // have erased regions) but rather the fully explicit
- // trait refs. This is important because it's only a cycle
- // if the regions match exactly.
- let cycle = stack.iter().skip(1).take(rec_index + 1);
- let cycle = cycle.map(|stack| ty::Predicate::Trait(stack.obligation.predicate));
- if self.coinductive_match(cycle) {
- debug!(
- "evaluate_stack({:?}) --> recursive, coinductive",
- stack.fresh_trait_ref
- );
- return Ok(EvaluatedToOk);
- } else {
- debug!(
- "evaluate_stack({:?}) --> recursive, inductive",
- stack.fresh_trait_ref
- );
- return Ok(EvaluatedToRecur);
- }
- }
-
match self.candidate_from_obligation(stack) {
Ok(Some(c)) => self.evaluate_candidate(stack, &c),
Ok(None) => Ok(EvaluatedToAmbig),
@@ -1219,6 +1279,11 @@
}
// If no match, compute result and insert into cache.
+ //
+ // FIXME(nikomatsakis) -- this cache is not taking into
+ // account cycles that may have occurred in forming the
+ // candidate. I don't know of any specific problems that
+ // result but it seems awfully suspicious.
let (candidate, dep_node) =
self.in_task(|this| this.candidate_from_obligation_no_cache(stack));
@@ -3734,11 +3799,15 @@
.to_poly_trait_ref()
.fold_with(&mut self.freshener);
+ let dfn = previous_stack.cache.next_dfn();
+ let depth = previous_stack.depth() + 1;
TraitObligationStack {
obligation,
fresh_trait_ref,
- in_cycle: Cell::new(false),
+ reached_depth: Cell::new(depth),
previous: previous_stack,
+ dfn,
+ depth,
}
}
@@ -3931,28 +4000,283 @@
TraitObligationStackList::with(self)
}
+ fn cache(&self) -> &'o ProvisionalEvaluationCache<'tcx> {
+ self.previous.cache
+ }
+
fn iter(&'o self) -> TraitObligationStackList<'o, 'tcx> {
self.list()
}
+
+ /// Indicates that attempting to evaluate this stack entry
+ /// required accessing something from the stack at depth `reached_depth`.
+ fn update_reached_depth(&self, reached_depth: usize) {
+ assert!(
+ self.depth > reached_depth,
+ "invoked `update_reached_depth` with something under this stack: \
+ self.depth={} reached_depth={}",
+ self.depth,
+ reached_depth,
+ );
+ debug!("update_reached_depth(reached_depth={})", reached_depth);
+ let mut p = self;
+ while reached_depth < p.depth {
+ debug!("update_reached_depth: marking {:?} as cycle participant", p.fresh_trait_ref);
+ p.reached_depth.set(p.reached_depth.get().min(reached_depth));
+ p = p.previous.head.unwrap();
+ }
+ }
+}
+
+/// The "provisional evaluation cache" is used to store intermediate cache results
+/// when solving auto traits. Auto traits are unusual in that they can support
+/// cycles. So, for example, a "proof tree" like this would be ok:
+///
+/// - `Foo<T>: Send` :-
+/// - `Bar<T>: Send` :-
+/// - `Foo<T>: Send` -- cycle, but ok
+/// - `Baz<T>: Send`
+///
+/// Here, to prove `Foo<T>: Send`, we have to prove `Bar<T>: Send` and
+/// `Baz<T>: Send`. Proving `Bar<T>: Send` in turn required `Foo<T>: Send`.
+/// For non-auto traits, this cycle would be an error, but for auto traits (because
+/// they are coinductive) it is considered ok.
+///
+/// However, there is a complication: at the point where we have
+/// "proven" `Bar<T>: Send`, we have in fact only proven it
+/// *provisionally*. In particular, we proved that `Bar<T>: Send`
+/// *under the assumption* that `Foo<T>: Send`. But what if we later
+/// find out this assumption is wrong? Specifically, we could
+/// encounter some kind of error proving `Baz<T>: Send`. In that case,
+/// `Bar<T>: Send` didn't turn out to be true.
+///
+/// In Issue #60010, we found a bug in rustc where it would cache
+/// these intermediate results. This was fixed in #60444 by disabling
+/// *all* caching for things involved in a cycle -- in our example,
+/// that would mean we don't cache that `Bar<T>: Send`. But this led
+/// to large slowdowns.
+///
+/// Specifically, imagine this scenario, where proving `Baz<T>: Send`
+/// first requires proving `Bar<T>: Send` (which is true:
+///
+/// - `Foo<T>: Send` :-
+/// - `Bar<T>: Send` :-
+/// - `Foo<T>: Send` -- cycle, but ok
+/// - `Baz<T>: Send`
+/// - `Bar<T>: Send` -- would be nice for this to be a cache hit!
+/// - `*const T: Send` -- but what if we later encounter an error?
+///
+/// The *provisional evaluation cache* resolves this issue. It stores
+/// cache results that we've proven but which were involved in a cycle
+/// in some way. We track the minimal stack depth (i.e., the
+/// farthest from the top of the stack) that we are dependent on.
+/// The idea is that the cache results within are all valid -- so long as
+/// none of the nodes in between the current node and the node at that minimum
+/// depth result in an error (in which case the cached results are just thrown away).
+///
+/// During evaluation, we consult this provisional cache and rely on
+/// it. Accessing a cached value is considered equivalent to accessing
+/// a result at `reached_depth`, so it marks the *current* solution as
+/// provisional as well. If an error is encountered, we toss out any
+/// provisional results added from the subtree that encountered the
+/// error. When we pop the node at `reached_depth` from the stack, we
+/// can commit all the things that remain in the provisional cache.
+struct ProvisionalEvaluationCache<'tcx> {
+ /// next "depth first number" to issue -- just a counter
+ dfn: Cell<usize>,
+
+ /// Stores the "coldest" depth (bottom of stack) reached by any of
+ /// the evaluation entries. The idea here is that all things in the provisional
+ /// cache are always dependent on *something* that is colder in the stack:
+ /// therefore, if we add a new entry that is dependent on something *colder still*,
+ /// we have to modify the depth for all entries at once.
+ ///
+ /// Example:
+ ///
+ /// Imagine we have a stack `A B C D E` (with `E` being the top of
+ /// the stack). We cache something with depth 2, which means that
+ /// it was dependent on C. Then we pop E but go on and process a
+ /// new node F: A B C D F. Now F adds something to the cache with
+ /// depth 1, meaning it is dependent on B. Our original cache
+ /// entry is also dependent on B, because there is a path from E
+ /// to C and then from C to F and from F to B.
+ reached_depth: Cell<usize>,
+
+ /// Map from cache key to the provisionally evaluated thing.
+ /// The cache entries contain the result but also the DFN in which they
+ /// were added. The DFN is used to clear out values on failure.
+ ///
+ /// Imagine we have a stack like:
+ ///
+ /// - `A B C` and we add a cache for the result of C (DFN 2)
+ /// - Then we have a stack `A B D` where `D` has DFN 3
+ /// - We try to solve D by evaluating E: `A B D E` (DFN 4)
+ /// - `E` generates various cache entries which have cyclic dependices on `B`
+ /// - `A B D E F` and so forth
+ /// - the DFN of `F` for example would be 5
+ /// - then we determine that `E` is in error -- we will then clear
+ /// all cache values whose DFN is >= 4 -- in this case, that
+ /// means the cached value for `F`.
+ map: RefCell<FxHashMap<ty::PolyTraitRef<'tcx>, ProvisionalEvaluation>>,
+}
+
+/// A cache value for the provisional cache: contains the depth-first
+/// number (DFN) and result.
+#[derive(Copy, Clone, Debug)]
+struct ProvisionalEvaluation {
+ from_dfn: usize,
+ result: EvaluationResult,
+}
+
+impl<'tcx> Default for ProvisionalEvaluationCache<'tcx> {
+ fn default() -> Self {
+ Self {
+ dfn: Cell::new(0),
+ reached_depth: Cell::new(std::usize::MAX),
+ map: Default::default(),
+ }
+ }
+}
+
+impl<'tcx> ProvisionalEvaluationCache<'tcx> {
+ /// Get the next DFN in sequence (basically a counter).
+ fn next_dfn(&self) -> usize {
+ let result = self.dfn.get();
+ self.dfn.set(result + 1);
+ result
+ }
+
+ /// Check the provisional cache for any result for
+ /// `fresh_trait_ref`. If there is a hit, then you must consider
+ /// it an access to the stack slots at depth
+ /// `self.current_reached_depth()` and above.
+ fn get_provisional(&self, fresh_trait_ref: ty::PolyTraitRef<'tcx>) -> Option<EvaluationResult> {
+ debug!(
+ "get_provisional(fresh_trait_ref={:?}) = {:#?} with reached-depth {}",
+ fresh_trait_ref,
+ self.map.borrow().get(&fresh_trait_ref),
+ self.reached_depth.get(),
+ );
+ Some(self.map.borrow().get(&fresh_trait_ref)?.result)
+ }
+
+ /// Current value of the `reached_depth` counter -- all the
+ /// provisional cache entries are dependent on the item at this
+ /// depth.
+ fn current_reached_depth(&self) -> usize {
+ self.reached_depth.get()
+ }
+
+ /// Insert a provisional result into the cache. The result came
+ /// from the node with the given DFN. It accessed a minimum depth
+ /// of `reached_depth` to compute. It evaluated `fresh_trait_ref`
+ /// and resulted in `result`.
+ fn insert_provisional(
+ &self,
+ from_dfn: usize,
+ reached_depth: usize,
+ fresh_trait_ref: ty::PolyTraitRef<'tcx>,
+ result: EvaluationResult,
+ ) {
+ debug!(
+ "insert_provisional(from_dfn={}, reached_depth={}, fresh_trait_ref={:?}, result={:?})",
+ from_dfn,
+ reached_depth,
+ fresh_trait_ref,
+ result,
+ );
+ let r_d = self.reached_depth.get();
+ self.reached_depth.set(r_d.min(reached_depth));
+
+ debug!("insert_provisional: reached_depth={:?}", self.reached_depth.get());
+
+ self.map.borrow_mut().insert(fresh_trait_ref, ProvisionalEvaluation { from_dfn, result });
+ }
+
+ /// Invoked when the node with dfn `dfn` does not get a successful
+ /// result. This will clear out any provisional cache entries
+ /// that were added since `dfn` was created. This is because the
+ /// provisional entries are things which must assume that the
+ /// things on the stack at the time of their creation succeeded --
+ /// since the failing node is presently at the top of the stack,
+ /// these provisional entries must either depend on it or some
+ /// ancestor of it.
+ fn on_failure(&self, dfn: usize) {
+ debug!(
+ "on_failure(dfn={:?})",
+ dfn,
+ );
+ self.map.borrow_mut().retain(|key, eval| {
+ if !eval.from_dfn >= dfn {
+ debug!("on_failure: removing {:?}", key);
+ false
+ } else {
+ true
+ }
+ });
+ }
+
+ /// Invoked when the node at depth `depth` completed without
+ /// depending on anything higher in the stack (if that completion
+ /// was a failure, then `on_failure` should have been invoked
+ /// already). The callback `op` will be invoked for each
+ /// provisional entry that we can now confirm.
+ fn on_completion(
+ &self,
+ depth: usize,
+ mut op: impl FnMut(ty::PolyTraitRef<'tcx>, EvaluationResult),
+ ) {
+ debug!(
+ "on_completion(depth={}, reached_depth={})",
+ depth,
+ self.reached_depth.get(),
+ );
+
+ if self.reached_depth.get() < depth {
+ debug!("on_completion: did not yet reach depth to complete");
+ return;
+ }
+
+ for (fresh_trait_ref, eval) in self.map.borrow_mut().drain() {
+ debug!(
+ "on_completion: fresh_trait_ref={:?} eval={:?}",
+ fresh_trait_ref,
+ eval,
+ );
+
+ op(fresh_trait_ref, eval.result);
+ }
+
+ self.reached_depth.set(std::usize::MAX);
+ }
}
#[derive(Copy, Clone)]
struct TraitObligationStackList<'o, 'tcx: 'o> {
+ cache: &'o ProvisionalEvaluationCache<'tcx>,
head: Option<&'o TraitObligationStack<'o, 'tcx>>,
}
impl<'o, 'tcx> TraitObligationStackList<'o, 'tcx> {
- fn empty() -> TraitObligationStackList<'o, 'tcx> {
- TraitObligationStackList { head: None }
+ fn empty(cache: &'o ProvisionalEvaluationCache<'tcx>) -> TraitObligationStackList<'o, 'tcx> {
+ TraitObligationStackList { cache, head: None }
}
fn with(r: &'o TraitObligationStack<'o, 'tcx>) -> TraitObligationStackList<'o, 'tcx> {
- TraitObligationStackList { head: Some(r) }
+ TraitObligationStackList { cache: r.cache(), head: Some(r) }
}
fn head(&self) -> Option<&'o TraitObligationStack<'o, 'tcx>> {
self.head
}
+
+ fn depth(&self) -> usize {
+ if let Some(head) = self.head {
+ head.depth
+ } else {
+ 0
+ }
+ }
}
impl<'o, 'tcx> Iterator for TraitObligationStackList<'o, 'tcx> {
@@ -3989,7 +4313,7 @@
}
}
- pub fn get(&self, tcx: TyCtxt<'_, '_, '_>) -> T {
+ pub fn get(&self, tcx: TyCtxt<'_>) -> T {
tcx.dep_graph.read_index(self.dep_node);
self.cached_value.clone()
}
diff --git a/src/librustc/traits/specialize/mod.rs b/src/librustc/traits/specialize/mod.rs
index b5d45d0..3d47e94 100644
--- a/src/librustc/traits/specialize/mod.rs
+++ b/src/librustc/traits/specialize/mod.rs
@@ -69,12 +69,13 @@
/// through associated type projection. We deal with such cases by using
/// *fulfillment* to relate the two impls, requiring that all projections are
/// resolved.
-pub fn translate_substs<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- source_impl: DefId,
- source_substs: SubstsRef<'tcx>,
- target_node: specialization_graph::Node)
- -> SubstsRef<'tcx> {
+pub fn translate_substs<'a, 'tcx>(
+ infcx: &InferCtxt<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ source_impl: DefId,
+ source_substs: SubstsRef<'tcx>,
+ target_node: specialization_graph::Node,
+) -> SubstsRef<'tcx> {
debug!("translate_substs({:?}, {:?}, {:?}, {:?})",
param_env, source_impl, source_substs, target_node);
let source_trait_ref = infcx.tcx
@@ -109,8 +110,8 @@
/// the kind `kind`, and trait method substitutions `substs`, in
/// that impl, a less specialized impl, or the trait default,
/// whichever applies.
-pub fn find_associated_item<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub fn find_associated_item<'tcx>(
+ tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
item: &ty::AssocItem,
substs: SubstsRef<'tcx>,
@@ -149,10 +150,10 @@
/// Specialization is determined by the sets of types to which the impls apply;
/// `impl1` specializes `impl2` if it applies to a subset of the types `impl2` applies
/// to.
-pub(super) fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- (impl1_def_id, impl2_def_id): (DefId, DefId))
- -> bool
-{
+pub(super) fn specializes<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ (impl1_def_id, impl2_def_id): (DefId, DefId),
+) -> bool {
debug!("specializes({:?}, {:?})", impl1_def_id, impl2_def_id);
// The feature gate should prevent introducing new specializations, but not
@@ -209,11 +210,12 @@
/// generics of `target_impl`, including both those needed to unify with
/// `source_trait_ref` and those whose identity is determined via a where
/// clause in the impl.
-fn fulfill_implication<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- source_trait_ref: ty::TraitRef<'tcx>,
- target_impl: DefId)
- -> Result<SubstsRef<'tcx>, ()> {
+fn fulfill_implication<'a, 'tcx>(
+ infcx: &InferCtxt<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ source_trait_ref: ty::TraitRef<'tcx>,
+ target_impl: DefId,
+) -> Result<SubstsRef<'tcx>, ()> {
debug!("fulfill_implication({:?}, trait_ref={:?} |- {:?} applies)",
param_env, source_trait_ref, target_impl);
@@ -285,8 +287,8 @@
}
// Query provider for `specialization_graph_of`.
-pub(super) fn specialization_graph_provider<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub(super) fn specialization_graph_provider<'tcx>(
+ tcx: TyCtxt<'tcx>,
trait_id: DefId,
) -> &'tcx specialization_graph::Graph {
let mut sg = specialization_graph::Graph::new();
@@ -390,7 +392,7 @@
/// Recovers the "impl X for Y" signature from `impl_def_id` and returns it as a
/// string.
-fn to_pretty_impl_header(tcx: TyCtxt<'_, '_, '_>, impl_def_id: DefId) -> Option<String> {
+fn to_pretty_impl_header(tcx: TyCtxt<'_>, impl_def_id: DefId) -> Option<String> {
use std::fmt::Write;
let trait_ref = if let Some(tr) = tcx.impl_trait_ref(impl_def_id) {
diff --git a/src/librustc/traits/specialize/specialization_graph.rs b/src/librustc/traits/specialize/specialization_graph.rs
index 9a90b9f..f736c5e 100644
--- a/src/librustc/traits/specialize/specialization_graph.rs
+++ b/src/librustc/traits/specialize/specialization_graph.rs
@@ -81,11 +81,9 @@
ShouldRecurseOn(DefId),
}
-impl<'a, 'gcx, 'tcx> Children {
+impl<'tcx> Children {
/// Insert an impl into this set of children without comparing to any existing impls.
- fn insert_blindly(&mut self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId) {
+ fn insert_blindly(&mut self, tcx: TyCtxt<'tcx>, impl_def_id: DefId) {
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
if let Some(sty) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) {
debug!("insert_blindly: impl_def_id={:?} sty={:?}", impl_def_id, sty);
@@ -99,9 +97,7 @@
/// Removes an impl from this set of children. Used when replacing
/// an impl with a parent. The impl must be present in the list of
/// children already.
- fn remove_existing(&mut self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId) {
+ fn remove_existing(&mut self, tcx: TyCtxt<'tcx>, impl_def_id: DefId) {
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
let vec: &mut Vec<DefId>;
if let Some(sty) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) {
@@ -118,12 +114,12 @@
/// Attempt to insert an impl into this set of children, while comparing for
/// specialization relationships.
- fn insert(&mut self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId,
- simplified_self: Option<SimplifiedType>)
- -> Result<Inserted, OverlapError>
- {
+ fn insert(
+ &mut self,
+ tcx: TyCtxt<'tcx>,
+ impl_def_id: DefId,
+ simplified_self: Option<SimplifiedType>,
+ ) -> Result<Inserted, OverlapError> {
let mut last_lint = None;
let mut replace_children = Vec::new();
@@ -282,7 +278,7 @@
}
}
-impl<'a, 'gcx, 'tcx> Graph {
+impl<'tcx> Graph {
pub fn new() -> Graph {
Graph {
parent: Default::default(),
@@ -293,10 +289,11 @@
/// Insert a local impl into the specialization graph. If an existing impl
/// conflicts with it (has overlap, but neither specializes the other),
/// information about the area of overlap is returned in the `Err`.
- pub fn insert(&mut self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId)
- -> Result<Option<FutureCompatOverlapError>, OverlapError> {
+ pub fn insert(
+ &mut self,
+ tcx: TyCtxt<'tcx>,
+ impl_def_id: DefId,
+ ) -> Result<Option<FutureCompatOverlapError>, OverlapError> {
assert!(impl_def_id.is_local());
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
@@ -386,10 +383,7 @@
}
/// Insert cached metadata mapping from a child impl back to its parent.
- pub fn record_impl_from_cstore(&mut self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- parent: DefId,
- child: DefId) {
+ pub fn record_impl_from_cstore(&mut self, tcx: TyCtxt<'tcx>, parent: DefId, child: DefId) {
if self.parent.insert(child, parent).is_some() {
bug!("When recording an impl from the crate store, information about its parent \
was already present.");
@@ -414,7 +408,7 @@
Trait(DefId),
}
-impl<'a, 'gcx, 'tcx> Node {
+impl<'tcx> Node {
pub fn is_from_trait(&self) -> bool {
match *self {
Node::Trait(..) => true,
@@ -423,10 +417,7 @@
}
/// Iterate over the items defined directly by the given (impl or trait) node.
- pub fn items(
- &self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- ) -> ty::AssocItemsIterator<'a, 'gcx, 'tcx> {
+ pub fn items(&self, tcx: TyCtxt<'tcx>) -> ty::AssocItemsIterator<'tcx> {
tcx.associated_items(self.def_id())
}
@@ -475,18 +466,18 @@
}
}
-impl<'a, 'gcx, 'tcx> Ancestors<'gcx> {
+impl<'tcx> Ancestors<'tcx> {
/// Search the items from the given ancestors, returning each definition
/// with the given name and the given kind.
// FIXME(#35870): avoid closures being unexported due to `impl Trait`.
#[inline]
pub fn defs(
self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
trait_item_name: Ident,
trait_item_kind: ty::AssocKind,
trait_def_id: DefId,
- ) -> impl Iterator<Item = NodeItem<ty::AssocItem>> + Captures<'gcx> + Captures<'tcx> + 'a {
+ ) -> impl Iterator<Item = NodeItem<ty::AssocItem>> + Captures<'tcx> + 'tcx {
self.flat_map(move |node| {
use crate::ty::AssocKind::*;
node.items(tcx).filter(move |impl_item| match (trait_item_kind, impl_item.kind) {
@@ -508,10 +499,11 @@
/// Walk up the specialization ancestors of a given impl, starting with that
/// impl itself.
-pub fn ancestors(tcx: TyCtxt<'_, 'tcx, '_>,
- trait_def_id: DefId,
- start_from_impl: DefId)
- -> Ancestors<'tcx> {
+pub fn ancestors(
+ tcx: TyCtxt<'tcx>,
+ trait_def_id: DefId,
+ start_from_impl: DefId,
+) -> Ancestors<'tcx> {
let specialization_graph = tcx.specialization_graph_of(trait_def_id);
Ancestors {
trait_def_id,
diff --git a/src/librustc/traits/structural_impls.rs b/src/librustc/traits/structural_impls.rs
index f6108f2..129a400 100644
--- a/src/librustc/traits/structural_impls.rs
+++ b/src/librustc/traits/structural_impls.rs
@@ -446,7 +446,7 @@
impl<'a, 'tcx> Lift<'tcx> for traits::SelectionError<'a> {
type Lifted = traits::SelectionError<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
match *self {
super::Unimplemented => Some(super::Unimplemented),
super::OutputTypeParameterMismatch(a, b, ref err) => {
@@ -464,7 +464,7 @@
impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCauseCode<'a> {
type Lifted = traits::ObligationCauseCode<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
match *self {
super::ReturnNoExpression => Some(super::ReturnNoExpression),
super::MiscObligation => Some(super::MiscObligation),
@@ -546,7 +546,7 @@
impl<'a, 'tcx> Lift<'tcx> for traits::DerivedObligationCause<'a> {
type Lifted = traits::DerivedObligationCause<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.parent_trait_ref).and_then(|trait_ref|
tcx.lift(&*self.parent_code)
.map(|code| traits::DerivedObligationCause {
@@ -559,7 +559,7 @@
impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCause<'a> {
type Lifted = traits::ObligationCause<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.code).map(|code| traits::ObligationCause {
span: self.span,
body_id: self.body_id,
@@ -571,7 +571,7 @@
// For codegen only.
impl<'a, 'tcx> Lift<'tcx> for traits::Vtable<'a, ()> {
type Lifted = traits::Vtable<'tcx, ()>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
match self.clone() {
traits::VtableImpl(traits::VtableImplData {
impl_def_id,
@@ -691,7 +691,7 @@
impl<'a, 'tcx> Lift<'tcx> for traits::Environment<'a> {
type Lifted = traits::Environment<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.clauses).map(|clauses| {
traits::Environment {
clauses,
@@ -702,7 +702,7 @@
impl<'a, 'tcx, G: Lift<'tcx>> Lift<'tcx> for traits::InEnvironment<'a, G> {
type Lifted = traits::InEnvironment<'tcx, G::Lifted>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.environment).and_then(|environment| {
tcx.lift(&self.goal).map(|goal| {
traits::InEnvironment {
@@ -721,7 +721,7 @@
{
type Lifted = C::LiftedExClause;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
<C as traits::ChalkContextLift>::lift_ex_clause_to_tcx(self, tcx)
}
}
@@ -733,7 +733,7 @@
{
type Lifted = C::LiftedDelayedLiteral;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
<C as traits::ChalkContextLift>::lift_delayed_literal_to_tcx(self, tcx)
}
}
@@ -745,7 +745,7 @@
{
type Lifted = C::LiftedLiteral;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
<C as traits::ChalkContextLift>::lift_literal_to_tcx(self, tcx)
}
}
@@ -754,7 +754,7 @@
// TypeFoldable implementations.
impl<'tcx, O: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::Obligation<'tcx, O> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
traits::Obligation {
cause: self.cause.clone(),
recursion_depth: self.recursion_depth,
@@ -887,7 +887,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<traits::Goal<'tcx>> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
let v = self.iter()
.map(|t| t.fold_with(folder))
.collect::<SmallVec<[_; 8]>>();
@@ -900,7 +900,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for traits::Goal<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
let v = (**self).fold_with(folder);
folder.tcx().mk_goal(v)
}
@@ -941,7 +941,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for traits::Clauses<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
let v = self.iter()
.map(|t| t.fold_with(folder))
.collect::<SmallVec<[_; 8]>>();
@@ -959,7 +959,7 @@
C::Substitution: Clone,
C::RegionConstraint: Clone,
{
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
<C as traits::ExClauseFold>::fold_ex_clause_with(
self,
folder,
diff --git a/src/librustc/traits/util.rs b/src/librustc/traits/util.rs
index 2f87a74..2d29567 100644
--- a/src/librustc/traits/util.rs
+++ b/src/librustc/traits/util.rs
@@ -12,9 +12,7 @@
use super::{Obligation, ObligationCause, PredicateObligation, SelectionContext, Normalized};
-fn anonymize_predicate<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- pred: &ty::Predicate<'tcx>)
- -> ty::Predicate<'tcx> {
+fn anonymize_predicate<'tcx>(tcx: TyCtxt<'tcx>, pred: &ty::Predicate<'tcx>) -> ty::Predicate<'tcx> {
match *pred {
ty::Predicate::Trait(ref data) =>
ty::Predicate::Trait(tcx.anonymize_late_bound_regions(data)),
@@ -45,13 +43,13 @@
}
}
-struct PredicateSet<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+struct PredicateSet<'tcx> {
+ tcx: TyCtxt<'tcx>,
set: FxHashSet<ty::Predicate<'tcx>>,
}
-impl<'a, 'gcx, 'tcx> PredicateSet<'a, 'gcx, 'tcx> {
- fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
+impl PredicateSet<'tcx> {
+ fn new(tcx: TyCtxt<'tcx>) -> Self {
Self { tcx: tcx, set: Default::default() }
}
@@ -70,7 +68,7 @@
}
}
-impl<'a, 'gcx, 'tcx, T: AsRef<ty::Predicate<'tcx>>> Extend<T> for PredicateSet<'a, 'gcx, 'tcx> {
+impl<T: AsRef<ty::Predicate<'tcx>>> Extend<T> for PredicateSet<'tcx> {
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
for pred in iter {
self.insert(pred.as_ref());
@@ -88,39 +86,36 @@
/// if we know that `T: Ord`, the elaborator would deduce that `T: PartialOrd`
/// holds as well. Similarly, if we have `trait Foo: 'static`, and we know that
/// `T: Foo`, then we know that `T: 'static`.
-pub struct Elaborator<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
+pub struct Elaborator<'tcx> {
stack: Vec<ty::Predicate<'tcx>>,
- visited: PredicateSet<'a, 'gcx, 'tcx>,
+ visited: PredicateSet<'tcx>,
}
-pub fn elaborate_trait_ref<'cx, 'gcx, 'tcx>(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- trait_ref: ty::PolyTraitRef<'tcx>)
- -> Elaborator<'cx, 'gcx, 'tcx>
-{
+pub fn elaborate_trait_ref<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::PolyTraitRef<'tcx>,
+) -> Elaborator<'tcx> {
elaborate_predicates(tcx, vec![trait_ref.to_predicate()])
}
-pub fn elaborate_trait_refs<'cx, 'gcx, 'tcx>(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- trait_refs: impl Iterator<Item = ty::PolyTraitRef<'tcx>>)
- -> Elaborator<'cx, 'gcx, 'tcx>
-{
+pub fn elaborate_trait_refs<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_refs: impl Iterator<Item = ty::PolyTraitRef<'tcx>>,
+) -> Elaborator<'tcx> {
let predicates = trait_refs.map(|trait_ref| trait_ref.to_predicate()).collect();
elaborate_predicates(tcx, predicates)
}
-pub fn elaborate_predicates<'cx, 'gcx, 'tcx>(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- mut predicates: Vec<ty::Predicate<'tcx>>)
- -> Elaborator<'cx, 'gcx, 'tcx>
-{
+pub fn elaborate_predicates<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ mut predicates: Vec<ty::Predicate<'tcx>>,
+) -> Elaborator<'tcx> {
let mut visited = PredicateSet::new(tcx);
predicates.retain(|pred| visited.insert(pred));
Elaborator { stack: predicates, visited }
}
-impl<'cx, 'gcx, 'tcx> Elaborator<'cx, 'gcx, 'tcx> {
+impl Elaborator<'tcx> {
pub fn filter_to_traits(self) -> FilterToTraits<Self> {
FilterToTraits::new(self)
}
@@ -232,7 +227,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> Iterator for Elaborator<'cx, 'gcx, 'tcx> {
+impl Iterator for Elaborator<'tcx> {
type Item = ty::Predicate<'tcx>;
fn size_hint(&self) -> (usize, Option<usize>) {
@@ -254,17 +249,19 @@
// Supertrait iterator
///////////////////////////////////////////////////////////////////////////
-pub type Supertraits<'cx, 'gcx, 'tcx> = FilterToTraits<Elaborator<'cx, 'gcx, 'tcx>>;
+pub type Supertraits<'tcx> = FilterToTraits<Elaborator<'tcx>>;
-pub fn supertraits<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- trait_ref: ty::PolyTraitRef<'tcx>)
- -> Supertraits<'cx, 'gcx, 'tcx> {
+pub fn supertraits<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::PolyTraitRef<'tcx>,
+) -> Supertraits<'tcx> {
elaborate_trait_ref(tcx, trait_ref).filter_to_traits()
}
-pub fn transitive_bounds<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- bounds: impl Iterator<Item = ty::PolyTraitRef<'tcx>>)
- -> Supertraits<'cx, 'gcx, 'tcx> {
+pub fn transitive_bounds<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ bounds: impl Iterator<Item = ty::PolyTraitRef<'tcx>>,
+) -> Supertraits<'tcx> {
elaborate_trait_refs(tcx, bounds).filter_to_traits()
}
@@ -280,8 +277,8 @@
/// `Read + Write + Sync + Send`.
/// Expansion is done via a DFS (depth-first search), and the `visited` field
/// is used to avoid cycles.
-pub struct TraitAliasExpander<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct TraitAliasExpander<'tcx> {
+ tcx: TyCtxt<'tcx>,
stack: Vec<TraitAliasExpansionInfo<'tcx>>,
}
@@ -337,10 +334,10 @@
}
}
-pub fn expand_trait_aliases<'cx, 'gcx, 'tcx>(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- trait_refs: impl IntoIterator<Item = (ty::PolyTraitRef<'tcx>, Span)>
-) -> TraitAliasExpander<'cx, 'gcx, 'tcx> {
+pub fn expand_trait_aliases<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_refs: impl IntoIterator<Item = (ty::PolyTraitRef<'tcx>, Span)>,
+) -> TraitAliasExpander<'tcx> {
let items: Vec<_> = trait_refs
.into_iter()
.map(|(trait_ref, span)| TraitAliasExpansionInfo::new(trait_ref, span))
@@ -348,7 +345,7 @@
TraitAliasExpander { tcx, stack: items }
}
-impl<'cx, 'gcx, 'tcx> TraitAliasExpander<'cx, 'gcx, 'tcx> {
+impl<'tcx> TraitAliasExpander<'tcx> {
/// If `item` is a trait alias and its predicate has not yet been visited, then expands `item`
/// to the definition, pushes the resulting expansion onto `self.stack`, and returns `false`.
/// Otherwise, immediately returns `true` if `item` is a regular trait, or `false` if it is a
@@ -393,7 +390,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> Iterator for TraitAliasExpander<'cx, 'gcx, 'tcx> {
+impl<'tcx> Iterator for TraitAliasExpander<'tcx> {
type Item = TraitAliasExpansionInfo<'tcx>;
fn size_hint(&self) -> (usize, Option<usize>) {
@@ -414,16 +411,13 @@
// Iterator over def-IDs of supertraits
///////////////////////////////////////////////////////////////////////////
-pub struct SupertraitDefIds<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct SupertraitDefIds<'tcx> {
+ tcx: TyCtxt<'tcx>,
stack: Vec<DefId>,
visited: FxHashSet<DefId>,
}
-pub fn supertrait_def_ids<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- trait_def_id: DefId)
- -> SupertraitDefIds<'cx, 'gcx, 'tcx>
-{
+pub fn supertrait_def_ids<'tcx>(tcx: TyCtxt<'tcx>, trait_def_id: DefId) -> SupertraitDefIds<'tcx> {
SupertraitDefIds {
tcx,
stack: vec![trait_def_id],
@@ -431,7 +425,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> Iterator for SupertraitDefIds<'cx, 'gcx, 'tcx> {
+impl Iterator for SupertraitDefIds<'tcx> {
type Item = DefId;
fn next(&mut self) -> Option<DefId> {
@@ -489,13 +483,12 @@
/// Instantiate all bound parameters of the impl with the given substs,
/// returning the resulting trait ref and all obligations that arise.
/// The obligations are closed under normalization.
-pub fn impl_trait_ref_and_oblig<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- impl_def_id: DefId,
- impl_substs: SubstsRef<'tcx>,)
- -> (ty::TraitRef<'tcx>,
- Vec<PredicateObligation<'tcx>>)
-{
+pub fn impl_trait_ref_and_oblig<'a, 'tcx>(
+ selcx: &mut SelectionContext<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ impl_def_id: DefId,
+ impl_substs: SubstsRef<'tcx>,
+) -> (ty::TraitRef<'tcx>, Vec<PredicateObligation<'tcx>>) {
let impl_trait_ref =
selcx.tcx().impl_trait_ref(impl_def_id).unwrap();
let impl_trait_ref =
@@ -552,7 +545,7 @@
}
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
pub fn predicate_for_trait_def(self,
param_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
@@ -661,7 +654,7 @@
pub fn impl_is_default(self, node_item_def_id: DefId) -> bool {
match self.hir().as_local_hir_id(node_item_def_id) {
Some(hir_id) => {
- let item = self.hir().expect_item_by_hir_id(hir_id);
+ let item = self.hir().expect_item(hir_id);
if let hir::ItemKind::Impl(_, _, defaultness, ..) = item.node {
defaultness.is_default()
} else {
diff --git a/src/librustc/ty/_match.rs b/src/librustc/ty/_match.rs
index 8640216..6e10dc0 100644
--- a/src/librustc/ty/_match.rs
+++ b/src/librustc/ty/_match.rs
@@ -19,19 +19,19 @@
/// Like subtyping, matching is really a binary relation, so the only
/// important thing about the result is Ok/Err. Also, matching never
/// affects any type variables or unification state.
-pub struct Match<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>
+pub struct Match<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'gcx, 'tcx> Match<'a, 'gcx, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Match<'a, 'gcx, 'tcx> {
+impl Match<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>) -> Match<'tcx> {
Match { tcx }
}
}
-impl<'a, 'gcx, 'tcx> TypeRelation<'a, 'gcx, 'tcx> for Match<'a, 'gcx, 'tcx> {
+impl TypeRelation<'tcx> for Match<'tcx> {
fn tag(&self) -> &'static str { "Match" }
- fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.tcx }
+ fn tcx(&self) -> TyCtxt<'tcx> { self.tcx }
fn a_is_expected(&self) -> bool { true } // irrelevant
fn relate_with_variance<T: Relate<'tcx>>(&mut self,
diff --git a/src/librustc/ty/adjustment.rs b/src/librustc/ty/adjustment.rs
index 11aad87..9ba9976 100644
--- a/src/librustc/ty/adjustment.rs
+++ b/src/librustc/ty/adjustment.rs
@@ -103,9 +103,8 @@
pub mutbl: hir::Mutability,
}
-impl<'a, 'gcx, 'tcx> OverloadedDeref<'tcx> {
- pub fn method_call(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, source: Ty<'tcx>)
- -> (DefId, SubstsRef<'tcx>) {
+impl<'tcx> OverloadedDeref<'tcx> {
+ pub fn method_call(&self, tcx: TyCtxt<'tcx>, source: Ty<'tcx>) -> (DefId, SubstsRef<'tcx>) {
let trait_def_id = match self.mutbl {
hir::MutImmutable => tcx.lang_items().deref_trait(),
hir::MutMutable => tcx.lang_items().deref_mut_trait()
diff --git a/src/librustc/ty/codec.rs b/src/librustc/ty/codec.rs
index a76cc3d..224f7d5 100644
--- a/src/librustc/ty/codec.rs
+++ b/src/librustc/ty/codec.rs
@@ -107,9 +107,8 @@
Ok(())
}
-pub trait TyDecoder<'a, 'tcx: 'a>: Decoder {
-
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx>;
+pub trait TyDecoder<'tcx>: Decoder {
+ fn tcx(&self) -> TyCtxt<'tcx>;
fn peek_byte(&self) -> u8;
@@ -132,38 +131,38 @@
}
#[inline]
-pub fn decode_arena_allocable<'a, 'tcx, D, T: ArenaAllocatable + Decodable>(
- decoder: &mut D
+pub fn decode_arena_allocable<D, T: ArenaAllocatable + Decodable>(
+ decoder: &mut D,
) -> Result<&'tcx T, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+where
+ D: TyDecoder<'tcx>,
{
Ok(decoder.tcx().arena.alloc(Decodable::decode(decoder)?))
}
#[inline]
-pub fn decode_arena_allocable_slice<'a, 'tcx, D, T: ArenaAllocatable + Decodable>(
- decoder: &mut D
+pub fn decode_arena_allocable_slice<D, T: ArenaAllocatable + Decodable>(
+ decoder: &mut D,
) -> Result<&'tcx [T], D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+where
+ D: TyDecoder<'tcx>,
{
Ok(decoder.tcx().arena.alloc_from_iter(<Vec<T> as Decodable>::decode(decoder)?))
}
#[inline]
-pub fn decode_cnum<'a, 'tcx, D>(decoder: &mut D) -> Result<CrateNum, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+pub fn decode_cnum<D>(decoder: &mut D) -> Result<CrateNum, D::Error>
+where
+ D: TyDecoder<'tcx>,
{
let cnum = CrateNum::from_u32(u32::decode(decoder)?);
Ok(decoder.map_encoded_cnum_to_current(cnum))
}
#[inline]
-pub fn decode_ty<'a, 'tcx, D>(decoder: &mut D) -> Result<Ty<'tcx>, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+pub fn decode_ty<D>(decoder: &mut D) -> Result<Ty<'tcx>, D::Error>
+where
+ D: TyDecoder<'tcx>,
{
// Handle shorthands first, if we have an usize > 0x80.
if decoder.positioned_at_shorthand() {
@@ -181,10 +180,9 @@
}
#[inline]
-pub fn decode_predicates<'a, 'tcx, D>(decoder: &mut D)
- -> Result<ty::GenericPredicates<'tcx>, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+pub fn decode_predicates<D>(decoder: &mut D) -> Result<ty::GenericPredicates<'tcx>, D::Error>
+where
+ D: TyDecoder<'tcx>,
{
Ok(ty::GenericPredicates {
parent: Decodable::decode(decoder)?,
@@ -206,9 +204,9 @@
}
#[inline]
-pub fn decode_substs<'a, 'tcx, D>(decoder: &mut D) -> Result<SubstsRef<'tcx>, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+pub fn decode_substs<D>(decoder: &mut D) -> Result<SubstsRef<'tcx>, D::Error>
+where
+ D: TyDecoder<'tcx>,
{
let len = decoder.read_usize()?;
let tcx = decoder.tcx();
@@ -216,38 +214,37 @@
}
#[inline]
-pub fn decode_region<'a, 'tcx, D>(decoder: &mut D) -> Result<ty::Region<'tcx>, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+pub fn decode_region<D>(decoder: &mut D) -> Result<ty::Region<'tcx>, D::Error>
+where
+ D: TyDecoder<'tcx>,
{
Ok(decoder.tcx().mk_region(Decodable::decode(decoder)?))
}
#[inline]
-pub fn decode_ty_slice<'a, 'tcx, D>(decoder: &mut D)
- -> Result<&'tcx ty::List<Ty<'tcx>>, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+pub fn decode_ty_slice<D>(decoder: &mut D) -> Result<&'tcx ty::List<Ty<'tcx>>, D::Error>
+where
+ D: TyDecoder<'tcx>,
{
let len = decoder.read_usize()?;
Ok(decoder.tcx().mk_type_list((0..len).map(|_| Decodable::decode(decoder)))?)
}
#[inline]
-pub fn decode_adt_def<'a, 'tcx, D>(decoder: &mut D)
- -> Result<&'tcx ty::AdtDef, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+pub fn decode_adt_def<D>(decoder: &mut D) -> Result<&'tcx ty::AdtDef, D::Error>
+where
+ D: TyDecoder<'tcx>,
{
let def_id = DefId::decode(decoder)?;
Ok(decoder.tcx().adt_def(def_id))
}
#[inline]
-pub fn decode_existential_predicate_slice<'a, 'tcx, D>(decoder: &mut D)
- -> Result<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+pub fn decode_existential_predicate_slice<D>(
+ decoder: &mut D,
+) -> Result<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>, D::Error>
+where
+ D: TyDecoder<'tcx>,
{
let len = decoder.read_usize()?;
Ok(decoder.tcx()
@@ -255,10 +252,9 @@
}
#[inline]
-pub fn decode_canonical_var_infos<'a, 'tcx, D>(decoder: &mut D)
- -> Result<CanonicalVarInfos<'tcx>, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+pub fn decode_canonical_var_infos<D>(decoder: &mut D) -> Result<CanonicalVarInfos<'tcx>, D::Error>
+where
+ D: TyDecoder<'tcx>,
{
let len = decoder.read_usize()?;
let interned: Result<Vec<CanonicalVarInfo>, _> = (0..len).map(|_| Decodable::decode(decoder))
@@ -268,19 +264,17 @@
}
#[inline]
-pub fn decode_const<'a, 'tcx, D>(decoder: &mut D)
- -> Result<&'tcx ty::Const<'tcx>, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+pub fn decode_const<D>(decoder: &mut D) -> Result<&'tcx ty::Const<'tcx>, D::Error>
+where
+ D: TyDecoder<'tcx>,
{
Ok(decoder.tcx().mk_const(Decodable::decode(decoder)?))
}
#[inline]
-pub fn decode_allocation<'a, 'tcx, D>(decoder: &mut D)
- -> Result<&'tcx Allocation, D::Error>
- where D: TyDecoder<'a, 'tcx>,
- 'tcx: 'a,
+pub fn decode_allocation<D>(decoder: &mut D) -> Result<&'tcx Allocation, D::Error>
+where
+ D: TyDecoder<'tcx>,
{
Ok(decoder.tcx().intern_const_alloc(Decodable::decode(decoder)?))
}
diff --git a/src/librustc/ty/constness.rs b/src/librustc/ty/constness.rs
index 56f75e8..65b8322 100644
--- a/src/librustc/ty/constness.rs
+++ b/src/librustc/ty/constness.rs
@@ -6,7 +6,7 @@
use crate::hir::map::blocks::FnLikeNode;
use syntax::attr;
-impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Whether the `def_id` counts as const fn in your current crate, considering all active
/// feature gates
pub fn is_const_fn(self, def_id: DefId) -> bool {
@@ -69,7 +69,7 @@
pub fn provide<'tcx>(providers: &mut Providers<'tcx>) {
/// only checks whether the function has a `const` modifier
- fn is_const_fn_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool {
+ fn is_const_fn_raw<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
let hir_id = tcx.hir().as_local_hir_id(def_id)
.expect("Non-local call to local provider is_const_fn");
@@ -83,7 +83,7 @@
}
}
- fn is_promotable_const_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool {
+ fn is_promotable_const_fn<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
tcx.is_const_fn(def_id) && match tcx.lookup_stability(def_id) {
Some(stab) => {
if cfg!(debug_assertions) && stab.promotable {
@@ -101,7 +101,7 @@
}
}
- fn const_fn_is_allowed_fn_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool {
+ fn const_fn_is_allowed_fn_ptr<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
tcx.is_const_fn(def_id) &&
tcx.lookup_stability(def_id)
.map(|stab| stab.allow_const_fn_ptr).unwrap_or(false)
diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs
index e600220..b84ebd8 100644
--- a/src/librustc/ty/context.rs
+++ b/src/librustc/ty/context.rs
@@ -115,7 +115,7 @@
const_: InternedSet<'tcx, Const<'tcx>>,
}
-impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
+impl<'tcx> CtxtInterners<'tcx> {
fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> {
CtxtInterners {
arena,
@@ -137,9 +137,9 @@
/// Intern a type
#[inline(never)]
fn intern_ty(
- local: &CtxtInterners<'gcx>,
- global: &CtxtInterners<'gcx>,
- st: TyKind<'tcx>
+ local: &CtxtInterners<'tcx>,
+ global: &CtxtInterners<'tcx>,
+ st: TyKind<'tcx>,
) -> Ty<'tcx> {
let flags = super::flags::FlagComputation::for_sty(&st);
@@ -164,7 +164,7 @@
// This is safe because all the types the ty_struct can point to
// already is in the local arena or the global arena
- let ty_struct: TyS<'gcx> = unsafe {
+ let ty_struct: TyS<'tcx> = unsafe {
mem::transmute(ty_struct)
};
@@ -180,7 +180,7 @@
// This is safe because all the types the ty_struct can point to
// already is in the global arena
- let ty_struct: TyS<'gcx> = unsafe {
+ let ty_struct: TyS<'tcx> = unsafe {
mem::transmute(ty_struct)
};
@@ -250,11 +250,9 @@
if let Some(local_id_root) = local_id_root {
if hir_id.owner != local_id_root.index {
ty::tls::with(|tcx| {
- let node_id = tcx.hir().hir_to_node_id(hir_id);
-
bug!("node {} with HirId::owner {:?} cannot be placed in \
TypeckTables with local_id_root {:?}",
- tcx.hir().node_to_string(node_id),
+ tcx.hir().node_to_string(hir_id),
DefId::local(hir_id.owner),
local_id_root)
});
@@ -556,7 +554,7 @@
pub fn node_type(&self, id: hir::HirId) -> Ty<'tcx> {
self.node_type_opt(id).unwrap_or_else(||
bug!("node_type: no type for node `{}`",
- tls::with(|tcx| tcx.hir().hir_to_string(id)))
+ tls::with(|tcx| tcx.hir().node_to_string(id)))
)
}
@@ -750,7 +748,7 @@
}
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TypeckTables<'gcx> {
+impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TypeckTables<'tcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
@@ -857,11 +855,10 @@
}
}
-
/// Canonicalized user type annotation.
-pub type CanonicalUserType<'gcx> = Canonical<'gcx, UserType<'gcx>>;
+pub type CanonicalUserType<'tcx> = Canonical<'tcx, UserType<'tcx>>;
-impl CanonicalUserType<'gcx> {
+impl CanonicalUserType<'tcx> {
/// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`,
/// i.e., each thing is mapped to a canonical variable with the same index.
pub fn is_identity(&self) -> bool {
@@ -1016,14 +1013,14 @@
///
/// [rustc guide]: https://rust-lang.github.io/rustc-guide/ty.html
#[derive(Copy, Clone)]
-pub struct TyCtxt<'a, 'gcx: 'tcx, 'tcx: 'a> {
- gcx: &'gcx GlobalCtxt<'gcx>,
- interners: &'gcx CtxtInterners<'gcx>,
- dummy: PhantomData<(&'a (), &'tcx ())>,
+pub struct TyCtxt<'tcx> {
+ gcx: &'tcx GlobalCtxt<'tcx>,
+ interners: &'tcx CtxtInterners<'tcx>,
+ dummy: PhantomData<&'tcx ()>,
}
-impl<'gcx> Deref for TyCtxt<'_, 'gcx, '_> {
- type Target = &'gcx GlobalCtxt<'gcx>;
+impl<'tcx> Deref for TyCtxt<'tcx> {
+ type Target = &'tcx GlobalCtxt<'tcx>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.gcx
@@ -1119,10 +1116,10 @@
output_filenames: Arc<OutputFilenames>,
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Gets the global `TyCtxt`.
#[inline]
- pub fn global_tcx(self) -> TyCtxt<'gcx, 'gcx, 'gcx> {
+ pub fn global_tcx(self) -> TyCtxt<'tcx> {
TyCtxt {
gcx: self.gcx,
interners: &self.gcx.global_interners,
@@ -1131,25 +1128,26 @@
}
#[inline(always)]
- pub fn hir(self) -> &'a hir_map::Map<'gcx> {
+ pub fn hir(self) -> &'tcx hir_map::Map<'tcx> {
&self.hir_map
}
- pub fn alloc_steal_mir(self, mir: Body<'gcx>) -> &'gcx Steal<Body<'gcx>> {
+ pub fn alloc_steal_mir(self, mir: Body<'tcx>) -> &'tcx Steal<Body<'tcx>> {
self.arena.alloc(Steal::new(mir))
}
- pub fn alloc_adt_def(self,
- did: DefId,
- kind: AdtKind,
- variants: IndexVec<VariantIdx, ty::VariantDef>,
- repr: ReprOptions)
- -> &'gcx ty::AdtDef {
+ pub fn alloc_adt_def(
+ self,
+ did: DefId,
+ kind: AdtKind,
+ variants: IndexVec<VariantIdx, ty::VariantDef>,
+ repr: ReprOptions,
+ ) -> &'tcx ty::AdtDef {
let def = ty::AdtDef::new(self, did, kind, variants, repr);
self.arena.alloc(def)
}
- pub fn intern_const_alloc(self, alloc: Allocation) -> &'gcx Allocation {
+ pub fn intern_const_alloc(self, alloc: Allocation) -> &'tcx Allocation {
self.allocation_interner.borrow_mut().intern(alloc, |alloc| {
self.arena.alloc(alloc)
})
@@ -1163,13 +1161,13 @@
self.alloc_map.lock().create_memory_alloc(alloc)
}
- pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
+ pub fn intern_stability(self, stab: attr::Stability) -> &'tcx attr::Stability {
self.stability_interner.borrow_mut().intern(stab, |stab| {
self.arena.alloc(stab)
})
}
- pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
+ pub fn intern_layout(self, layout: LayoutDetails) -> &'tcx LayoutDetails {
self.layout_interner.borrow_mut().intern(layout, |layout| {
self.arena.alloc(layout)
})
@@ -1201,7 +1199,7 @@
}
/// Like lift, but only tries in the global tcx.
- pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
+ pub fn lift_to_global<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
value.lift_to_tcx(self.global_tcx())
}
@@ -1343,11 +1341,11 @@
self.sess.consider_optimizing(&cname, msg)
}
- pub fn lib_features(self) -> &'gcx middle::lib_features::LibFeatures {
+ pub fn lib_features(self) -> &'tcx middle::lib_features::LibFeatures {
self.get_lib_features(LOCAL_CRATE)
}
- pub fn lang_items(self) -> &'gcx middle::lang_items::LanguageItems {
+ pub fn lang_items(self) -> &'tcx middle::lang_items::LanguageItems {
self.get_lang_items(LOCAL_CRATE)
}
@@ -1385,15 +1383,15 @@
else { None }
}
- pub fn stability(self) -> &'gcx stability::Index<'gcx> {
+ pub fn stability(self) -> &'tcx stability::Index<'tcx> {
self.stability_index(LOCAL_CRATE)
}
- pub fn crates(self) -> &'gcx [CrateNum] {
+ pub fn crates(self) -> &'tcx [CrateNum] {
self.all_crate_nums(LOCAL_CRATE)
}
- pub fn features(self) -> &'gcx feature_gate::Features {
+ pub fn features(self) -> &'tcx feature_gate::Features {
self.features_query(LOCAL_CRATE)
}
@@ -1469,7 +1467,7 @@
}
#[inline(always)]
- pub fn create_stable_hashing_context(self) -> StableHashingContext<'a> {
+ pub fn create_stable_hashing_context(self) -> StableHashingContext<'tcx> {
let krate = self.gcx.hir_map.forest.untracked_krate();
StableHashingContext::new(self.sess,
@@ -1666,7 +1664,7 @@
}
}
-impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
pub fn encode_metadata(self)
-> EncodedMetadata
{
@@ -1674,17 +1672,13 @@
}
}
-impl<'gcx> GlobalCtxt<'gcx> {
+impl<'tcx> GlobalCtxt<'tcx> {
/// Call the closure with a local `TyCtxt` using the given arena.
/// `interners` is a slot passed so we can create a CtxtInterners
/// with the same lifetime as `arena`.
- pub fn enter_local<'tcx, F, R>(
- &'gcx self,
- f: F
- ) -> R
+ pub fn enter_local<F, R>(&'tcx self, f: F) -> R
where
- F: FnOnce(TyCtxt<'tcx, 'gcx, 'tcx>) -> R,
- 'gcx: 'tcx,
+ F: FnOnce(TyCtxt<'tcx>) -> R,
{
let tcx = TyCtxt {
gcx: self,
@@ -1725,48 +1719,48 @@
/// e.g., `()` or `u8`, was interned in a different context.
pub trait Lift<'tcx>: fmt::Debug {
type Lifted: fmt::Debug + 'tcx;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted>;
}
macro_rules! nop_lift {
($ty:ty => $lifted:ty) => {
impl<'a, 'tcx> Lift<'tcx> for $ty {
- type Lifted = $lifted;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
- if tcx.interners.arena.in_arena(*self as *const _) {
- return Some(unsafe { mem::transmute(*self) });
+ type Lifted = $lifted;
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
+ if tcx.interners.arena.in_arena(*self as *const _) {
+ return Some(unsafe { mem::transmute(*self) });
+ }
+ // Also try in the global tcx if we're not that.
+ if !tcx.is_global() {
+ self.lift_to_tcx(tcx.global_tcx())
+ } else {
+ None
+ }
+ }
}
- // Also try in the global tcx if we're not that.
- if !tcx.is_global() {
- self.lift_to_tcx(tcx.global_tcx())
- } else {
- None
- }
- }
- }
};
}
macro_rules! nop_list_lift {
($ty:ty => $lifted:ty) => {
impl<'a, 'tcx> Lift<'tcx> for &'a List<$ty> {
- type Lifted = &'tcx List<$lifted>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
- if self.is_empty() {
- return Some(List::empty());
+ type Lifted = &'tcx List<$lifted>;
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
+ if self.is_empty() {
+ return Some(List::empty());
+ }
+ if tcx.interners.arena.in_arena(*self as *const _) {
+ return Some(unsafe { mem::transmute(*self) });
+ }
+ // Also try in the global tcx if we're not that.
+ if !tcx.is_global() {
+ self.lift_to_tcx(tcx.global_tcx())
+ } else {
+ None
+ }
+ }
}
- if tcx.interners.arena.in_arena(*self as *const _) {
- return Some(unsafe { mem::transmute(*self) });
- }
- // Also try in the global tcx if we're not that.
- if !tcx.is_global() {
- self.lift_to_tcx(tcx.global_tcx())
- } else {
- None
- }
- }
- }
};
}
@@ -1812,14 +1806,14 @@
/// you should also have access to an ImplicitCtxt through the functions
/// in this module.
#[derive(Clone)]
- pub struct ImplicitCtxt<'a, 'gcx: 'tcx, 'tcx> {
+ pub struct ImplicitCtxt<'a, 'tcx> {
/// The current TyCtxt. Initially created by `enter_global` and updated
/// by `enter_local` with a new local interner
- pub tcx: TyCtxt<'tcx, 'gcx, 'tcx>,
+ pub tcx: TyCtxt<'tcx>,
/// The current query job, if any. This is updated by JobOwner::start in
/// ty::query::plumbing when executing a query
- pub query: Option<Lrc<query::QueryJob<'gcx>>>,
+ pub query: Option<Lrc<query::QueryJob<'tcx>>>,
/// Where to store diagnostics for the current query job, if any.
/// This is updated by JobOwner::start in ty::query::plumbing when executing a query
@@ -1927,9 +1921,9 @@
/// Sets `context` as the new current ImplicitCtxt for the duration of the function `f`
#[inline]
- pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'gcx, 'tcx>,
- f: F) -> R
- where F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
+ pub fn enter_context<'a, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'tcx>, f: F) -> R
+ where
+ F: FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R,
{
set_tlv(context as *const _ as usize, || {
f(&context)
@@ -1940,8 +1934,9 @@
/// creating a initial TyCtxt and ImplicitCtxt.
/// This happens once per rustc session and TyCtxts only exists
/// inside the `f` function.
- pub fn enter_global<'gcx, F, R>(gcx: &'gcx GlobalCtxt<'gcx>, f: F) -> R
- where F: FnOnce(TyCtxt<'gcx, 'gcx, 'gcx>) -> R
+ pub fn enter_global<'tcx, F, R>(gcx: &'tcx GlobalCtxt<'tcx>, f: F) -> R
+ where
+ F: FnOnce(TyCtxt<'tcx>) -> R,
{
// Update GCX_PTR to indicate there's a GlobalCtxt available
GCX_PTR.with(|lock| {
@@ -1978,7 +1973,8 @@
/// Creates a TyCtxt and ImplicitCtxt based on the GCX_PTR thread local.
/// This is used in the deadlock handler.
pub unsafe fn with_global<F, R>(f: F) -> R
- where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
+ where
+ F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> R,
{
let gcx = GCX_PTR.with(|lock| *lock.lock());
assert!(gcx != 0);
@@ -2001,7 +1997,8 @@
/// Allows access to the current ImplicitCtxt in a closure if one is available
#[inline]
pub fn with_context_opt<F, R>(f: F) -> R
- where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R
+ where
+ F: for<'a, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'tcx>>) -> R,
{
let context = get_tlv();
if context == 0 {
@@ -2009,9 +2006,9 @@
} else {
// We could get a ImplicitCtxt pointer from another thread.
// Ensure that ImplicitCtxt is Sync
- sync::assert_sync::<ImplicitCtxt<'_, '_, '_>>();
+ sync::assert_sync::<ImplicitCtxt<'_, '_>>();
- unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_, '_>))) }
+ unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_>))) }
}
}
@@ -2019,24 +2016,26 @@
/// Panics if there is no ImplicitCtxt available
#[inline]
pub fn with_context<F, R>(f: F) -> R
- where F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
+ where
+ F: for<'a, 'tcx> FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R,
{
with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
}
/// Allows access to the current ImplicitCtxt whose tcx field has the same global
/// interner as the tcx argument passed in. This means the closure is given an ImplicitCtxt
- /// with the same 'gcx lifetime as the TyCtxt passed in.
+ /// with the same 'tcx lifetime as the TyCtxt passed in.
/// This will panic if you pass it a TyCtxt which has a different global interner from
/// the current ImplicitCtxt's tcx field.
#[inline]
- pub fn with_related_context<'a, 'gcx, 'tcx1, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx1>, f: F) -> R
- where F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R
+ pub fn with_related_context<'tcx, F, R>(tcx: TyCtxt<'tcx>, f: F) -> R
+ where
+ F: FnOnce(&ImplicitCtxt<'_, 'tcx>) -> R,
{
with_context(|context| {
unsafe {
assert!(ptr_eq(context.tcx.gcx, tcx.gcx));
- let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context);
+ let context: &ImplicitCtxt<'_, '_> = mem::transmute(context);
f(context)
}
})
@@ -2044,18 +2043,19 @@
/// Allows access to the current ImplicitCtxt whose tcx field has the same global
/// interner and local interner as the tcx argument passed in. This means the closure
- /// is given an ImplicitCtxt with the same 'tcx and 'gcx lifetimes as the TyCtxt passed in.
+ /// is given an ImplicitCtxt with the same 'tcx and 'tcx lifetimes as the TyCtxt passed in.
/// This will panic if you pass it a TyCtxt which has a different global interner or
/// a different local interner from the current ImplicitCtxt's tcx field.
#[inline]
- pub fn with_fully_related_context<'a, 'gcx, 'tcx, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx>, f: F) -> R
- where F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R
+ pub fn with_fully_related_context<'tcx, F, R>(tcx: TyCtxt<'tcx>, f: F) -> R
+ where
+ F: for<'b> FnOnce(&ImplicitCtxt<'b, 'tcx>) -> R,
{
with_context(|context| {
unsafe {
assert!(ptr_eq(context.tcx.gcx, tcx.gcx));
assert!(ptr_eq(context.tcx.interners, tcx.interners));
- let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context);
+ let context: &ImplicitCtxt<'_, '_> = mem::transmute(context);
f(context)
}
})
@@ -2065,7 +2065,8 @@
/// Panics if there is no ImplicitCtxt available
#[inline]
pub fn with<F, R>(f: F) -> R
- where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
+ where
+ F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> R,
{
with_context(|context| f(context.tcx))
}
@@ -2074,7 +2075,8 @@
/// The closure is passed None if there is no ImplicitCtxt available
#[inline]
pub fn with_opt<F, R>(f: F) -> R
- where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
+ where
+ F: for<'tcx> FnOnce(Option<TyCtxt<'tcx>>) -> R,
{
with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
}
@@ -2098,7 +2100,7 @@
all_infer: usize,
}
- pub fn go(tcx: TyCtxt<'_, '_, '_>) {
+ pub fn go(tcx: TyCtxt<'_>) {
let mut total = DebugStat {
total: 0,
lt_infer: 0,
@@ -2151,7 +2153,7 @@
}}
}
-impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
pub fn print_debug_stats(self) {
sty_debug_print!(
self,
@@ -2193,8 +2195,8 @@
}
}
-impl<'tcx: 'lcx, 'lcx> Borrow<TyKind<'lcx>> for Interned<'tcx, TyS<'tcx>> {
- fn borrow<'a>(&'a self) -> &'a TyKind<'lcx> {
+impl<'tcx> Borrow<TyKind<'tcx>> for Interned<'tcx, TyS<'tcx>> {
+ fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> {
&self.0.sty
}
}
@@ -2214,20 +2216,20 @@
}
}
-impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, List<Ty<'tcx>>> {
- fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
+impl<'tcx> Borrow<[Ty<'tcx>]> for Interned<'tcx, List<Ty<'tcx>>> {
+ fn borrow<'a>(&'a self) -> &'a [Ty<'tcx>] {
&self.0[..]
}
}
-impl<'tcx: 'lcx, 'lcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, List<CanonicalVarInfo>> {
+impl<'tcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, List<CanonicalVarInfo>> {
fn borrow<'a>(&'a self) -> &'a [CanonicalVarInfo] {
&self.0[..]
}
}
-impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, InternalSubsts<'tcx>> {
- fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
+impl<'tcx> Borrow<[Kind<'tcx>]> for Interned<'tcx, InternalSubsts<'tcx>> {
+ fn borrow<'a>(&'a self) -> &'a [Kind<'tcx>] {
&self.0[..]
}
}
@@ -2245,42 +2247,40 @@
}
}
-impl<'tcx: 'lcx, 'lcx> Borrow<GoalKind<'lcx>> for Interned<'tcx, GoalKind<'tcx>> {
- fn borrow<'a>(&'a self) -> &'a GoalKind<'lcx> {
+impl<'tcx> Borrow<GoalKind<'tcx>> for Interned<'tcx, GoalKind<'tcx>> {
+ fn borrow<'a>(&'a self) -> &'a GoalKind<'tcx> {
&self.0
}
}
-impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
- for Interned<'tcx, List<ExistentialPredicate<'tcx>>> {
- fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
+impl<'tcx> Borrow<[ExistentialPredicate<'tcx>]>
+ for Interned<'tcx, List<ExistentialPredicate<'tcx>>>
+{
+ fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'tcx>] {
&self.0[..]
}
}
-impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
- for Interned<'tcx, List<Predicate<'tcx>>> {
- fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
+impl<'tcx> Borrow<[Predicate<'tcx>]> for Interned<'tcx, List<Predicate<'tcx>>> {
+ fn borrow<'a>(&'a self) -> &'a [Predicate<'tcx>] {
&self.0[..]
}
}
-impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
- fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
+impl<'tcx> Borrow<Const<'tcx>> for Interned<'tcx, Const<'tcx>> {
+ fn borrow<'a>(&'a self) -> &'a Const<'tcx> {
&self.0
}
}
-impl<'tcx: 'lcx, 'lcx> Borrow<[Clause<'lcx>]>
-for Interned<'tcx, List<Clause<'tcx>>> {
- fn borrow<'a>(&'a self) -> &'a [Clause<'lcx>] {
+impl<'tcx> Borrow<[Clause<'tcx>]> for Interned<'tcx, List<Clause<'tcx>>> {
+ fn borrow<'a>(&'a self) -> &'a [Clause<'tcx>] {
&self.0[..]
}
}
-impl<'tcx: 'lcx, 'lcx> Borrow<[Goal<'lcx>]>
-for Interned<'tcx, List<Goal<'tcx>>> {
- fn borrow<'a>(&'a self) -> &'a [Goal<'lcx>] {
+impl<'tcx> Borrow<[Goal<'tcx>]> for Interned<'tcx, List<Goal<'tcx>>> {
+ fn borrow<'a>(&'a self) -> &'a [Goal<'tcx>] {
&self.0[..]
}
}
@@ -2290,21 +2290,10 @@
$alloc_method:expr,
$alloc_to_key:expr,
$keep_in_local_tcx:expr) -> $ty:ty) => {
- impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
+ impl<$lt_tcx> TyCtxt<$lt_tcx> {
pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
let key = ($alloc_to_key)(&v);
- let alloc = |v, interners: &'gcx CtxtInterners<'gcx>| {
- // This transmutes $alloc<'tcx> to $alloc<'gcx>
- let v = unsafe {
- mem::transmute(v)
- };
- let i: &$lt_tcx $ty = $alloc_method(&interners.arena, v);
- // Cast to 'gcx
- let i = unsafe { mem::transmute(i) };
- Interned(i)
- };
-
// HACK(eddyb) Depend on flags being accurate to
// determine that all contents are in the global tcx.
// See comments on Lift for why we can't use that.
@@ -2318,11 +2307,11 @@
v);
}
- alloc(v, &self.interners)
+ Interned($alloc_method(&self.interners.arena, v))
}).0
} else {
self.global_interners.$name.borrow_mut().intern_ref(key, || {
- alloc(v, &self.global_interners)
+ Interned($alloc_method(&self.global_interners.arena, v))
}).0
}
}
@@ -2400,7 +2389,7 @@
) -> List<CanonicalVarInfo>
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Given a `fn` type, returns an equivalent `unsafe fn` type;
/// that is, a `fn` type that is equivalent in every way for being
/// unsafe.
@@ -2769,7 +2758,7 @@
}
}
- pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'gcx> {
+ pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'tcx> {
if ts.len() == 0 {
List::empty()
} else {
@@ -2935,7 +2924,7 @@
lint::struct_lint_level(self.sess, lint, level, src, None, msg)
}
- pub fn in_scope_traits(self, id: HirId) -> Option<&'gcx StableVec<TraitCandidate>> {
+ pub fn in_scope_traits(self, id: HirId) -> Option<&'tcx StableVec<TraitCandidate>> {
self.in_scope_traits_map(id.owner)
.and_then(|map| map.get(&id.local_id))
}
@@ -2951,9 +2940,7 @@
.unwrap_or(false)
}
- pub fn object_lifetime_defaults(self, id: HirId)
- -> Option<&'gcx [ObjectLifetimeDefault]>
- {
+ pub fn object_lifetime_defaults(self, id: HirId) -> Option<&'tcx [ObjectLifetimeDefault]> {
self.object_lifetime_defaults_map(id.owner)
.and_then(|map| map.get(&id.local_id).map(|v| &**v))
}
diff --git a/src/librustc/ty/erase_regions.rs b/src/librustc/ty/erase_regions.rs
index 1f2d45a..999b4ef 100644
--- a/src/librustc/ty/erase_regions.rs
+++ b/src/librustc/ty/erase_regions.rs
@@ -8,13 +8,13 @@
};
}
-fn erase_regions_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
+fn erase_regions_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
// N.B., use `super_fold_with` here. If we used `fold_with`, it
// could invoke the `erase_regions_ty` query recursively.
ty.super_fold_with(&mut RegionEraserVisitor { tcx })
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Returns an equivalent value with all free regions removed (note
/// that late-bound regions remain, because they are important for
/// subtyping, but they are anonymized and normalized as well)..
@@ -32,12 +32,12 @@
}
}
-struct RegionEraserVisitor<'a, 'gcx: 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+struct RegionEraserVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionEraserVisitor<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+impl TypeFolder<'tcx> for RegionEraserVisitor<'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.tcx
}
diff --git a/src/librustc/ty/error.rs b/src/librustc/ty/error.rs
index 09426fe..d5e0450 100644
--- a/src/librustc/ty/error.rs
+++ b/src/librustc/ty/error.rs
@@ -183,8 +183,8 @@
}
}
-impl<'a, 'gcx, 'lcx, 'tcx> ty::TyS<'tcx> {
- pub fn sort_string(&self, tcx: TyCtxt<'a, 'gcx, 'lcx>) -> Cow<'static, str> {
+impl<'tcx> ty::TyS<'tcx> {
+ pub fn sort_string(&self, tcx: TyCtxt<'_>) -> Cow<'static, str> {
match self.sty {
ty::Bool | ty::Char | ty::Int(_) |
ty::Uint(_) | ty::Float(_) | ty::Str | ty::Never => self.to_string().into(),
@@ -249,7 +249,7 @@
}
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
pub fn note_and_explain_type_err(self,
db: &mut DiagnosticBuilder<'_>,
err: &TypeError<'tcx>,
diff --git a/src/librustc/ty/fast_reject.rs b/src/librustc/ty/fast_reject.rs
index 908bbbc..7aab1ae 100644
--- a/src/librustc/ty/fast_reject.rs
+++ b/src/librustc/ty/fast_reject.rs
@@ -55,11 +55,11 @@
/// then we can't say much about whether two types would unify. Put another way,
/// `can_simplify_params` should be true if type parameters appear free in `ty` and `false` if they
/// are to be considered bound.
-pub fn simplify_type<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- ty: Ty<'_>,
- can_simplify_params: bool)
- -> Option<SimplifiedType>
-{
+pub fn simplify_type<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ ty: Ty<'_>,
+ can_simplify_params: bool,
+) -> Option<SimplifiedType> {
match ty.sty {
ty::Bool => Some(BoolSimplifiedType),
ty::Char => Some(CharSimplifiedType),
diff --git a/src/librustc/ty/fold.rs b/src/librustc/ty/fold.rs
index dae28d5..8b98a29 100644
--- a/src/librustc/ty/fold.rs
+++ b/src/librustc/ty/fold.rs
@@ -45,8 +45,8 @@
/// To implement this conveniently, use the
/// `BraceStructTypeFoldableImpl` etc macros found in `macros.rs`.
pub trait TypeFoldable<'tcx>: fmt::Debug + Clone {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self;
- fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self;
+ fn fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
self.super_fold_with(folder)
}
@@ -155,8 +155,8 @@
/// default implementation that does an "identity" fold. Within each
/// identity fold, it should invoke `foo.fold_with(self)` to fold each
/// sub-item.
-pub trait TypeFolder<'gcx: 'tcx, 'tcx> : Sized {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx>;
+pub trait TypeFolder<'tcx>: Sized {
+ fn tcx<'a>(&'a self) -> TyCtxt<'tcx>;
fn fold_binder<T>(&mut self, t: &Binder<T>) -> Binder<T>
where T : TypeFoldable<'tcx>
@@ -198,23 +198,27 @@
///////////////////////////////////////////////////////////////////////////
// Some sample folders
-pub struct BottomUpFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a, F, G, H>
- where F: FnMut(Ty<'tcx>) -> Ty<'tcx>,
- G: FnMut(ty::Region<'tcx>) -> ty::Region<'tcx>,
- H: FnMut(&'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx>,
+pub struct BottomUpFolder<'tcx, F, G, H>
+where
+ F: FnMut(Ty<'tcx>) -> Ty<'tcx>,
+ G: FnMut(ty::Region<'tcx>) -> ty::Region<'tcx>,
+ H: FnMut(&'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx>,
{
- pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ pub tcx: TyCtxt<'tcx>,
pub ty_op: F,
pub lt_op: G,
pub ct_op: H,
}
-impl<'a, 'gcx, 'tcx, F, G, H> TypeFolder<'gcx, 'tcx> for BottomUpFolder<'a, 'gcx, 'tcx, F, G, H>
- where F: FnMut(Ty<'tcx>) -> Ty<'tcx>,
- G: FnMut(ty::Region<'tcx>) -> ty::Region<'tcx>,
- H: FnMut(&'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx>,
+impl<'tcx, F, G, H> TypeFolder<'tcx> for BottomUpFolder<'tcx, F, G, H>
+where
+ F: FnMut(Ty<'tcx>) -> Ty<'tcx>,
+ G: FnMut(ty::Region<'tcx>) -> ty::Region<'tcx>,
+ H: FnMut(&'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx>,
{
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
let t = ty.super_fold_with(self);
@@ -235,7 +239,7 @@
///////////////////////////////////////////////////////////////////////////
// Region folder
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Collects the free and escaping regions in `value` into `region_set`. Returns
/// whether any late-bound regions were skipped
pub fn collect_regions<T>(self,
@@ -361,8 +365,8 @@
/// visited by this folder; only regions that occur free will be
/// visited by `fld_r`.
-pub struct RegionFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct RegionFolder<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
skipped_regions: &'a mut bool,
/// Stores the index of a binder *just outside* the stuff we have
@@ -373,19 +377,17 @@
/// Callback invokes for each free region. The `DebruijnIndex`
/// points to the binder *just outside* the ones we have passed
/// through.
- fold_region_fn: &'a mut (dyn FnMut(
- ty::Region<'tcx>,
- ty::DebruijnIndex,
- ) -> ty::Region<'tcx> + 'a),
+ fold_region_fn:
+ &'a mut (dyn FnMut(ty::Region<'tcx>, ty::DebruijnIndex) -> ty::Region<'tcx> + 'a),
}
-impl<'a, 'gcx, 'tcx> RegionFolder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> RegionFolder<'a, 'tcx> {
#[inline]
pub fn new(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
skipped_regions: &'a mut bool,
fold_region_fn: &'a mut dyn FnMut(ty::Region<'tcx>, ty::DebruijnIndex) -> ty::Region<'tcx>,
- ) -> RegionFolder<'a, 'gcx, 'tcx> {
+ ) -> RegionFolder<'a, 'tcx> {
RegionFolder {
tcx,
skipped_regions,
@@ -395,8 +397,10 @@
}
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionFolder<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
+impl<'a, 'tcx> TypeFolder<'tcx> for RegionFolder<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
self.current_index.shift_in(1);
@@ -426,8 +430,8 @@
// Bound vars replacer
/// Replaces the escaping bound vars (late bound regions or bound types) in a type.
-struct BoundVarReplacer<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+struct BoundVarReplacer<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
/// As with `RegionFolder`, represents the index of a binder *just outside*
/// the ones we have visited.
@@ -438,16 +442,12 @@
fld_c: &'a mut (dyn FnMut(ty::BoundVar, Ty<'tcx>) -> &'tcx ty::Const<'tcx> + 'a),
}
-impl<'a, 'gcx, 'tcx> BoundVarReplacer<'a, 'gcx, 'tcx> {
- fn new<F, G, H>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- fld_r: &'a mut F,
- fld_t: &'a mut G,
- fld_c: &'a mut H,
- ) -> Self
- where F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
- G: FnMut(ty::BoundTy) -> Ty<'tcx>,
- H: FnMut(ty::BoundVar, Ty<'tcx>) -> &'tcx ty::Const<'tcx>,
+impl<'a, 'tcx> BoundVarReplacer<'a, 'tcx> {
+ fn new<F, G, H>(tcx: TyCtxt<'tcx>, fld_r: &'a mut F, fld_t: &'a mut G, fld_c: &'a mut H) -> Self
+ where
+ F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
+ G: FnMut(ty::BoundTy) -> Ty<'tcx>,
+ H: FnMut(ty::BoundVar, Ty<'tcx>) -> &'tcx ty::Const<'tcx>,
{
BoundVarReplacer {
tcx,
@@ -459,8 +459,10 @@
}
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for BoundVarReplacer<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
+impl<'a, 'tcx> TypeFolder<'tcx> for BoundVarReplacer<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
self.current_index.shift_in(1);
@@ -542,7 +544,7 @@
}
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Replaces all regions bound by the given `Binder` with the
/// results returned by the closure; the closure is expected to
/// return a free region (relative to this binder), and hence the
@@ -722,15 +724,15 @@
Out,
}
-struct Shifter<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+struct Shifter<'tcx> {
+ tcx: TyCtxt<'tcx>,
current_index: ty::DebruijnIndex,
amount: u32,
direction: Direction,
}
-impl Shifter<'a, 'gcx, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, amount: u32, direction: Direction) -> Self {
+impl Shifter<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, amount: u32, direction: Direction) -> Self {
Shifter {
tcx,
current_index: ty::INNERMOST,
@@ -740,8 +742,10 @@
}
}
-impl TypeFolder<'gcx, 'tcx> for Shifter<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
+impl TypeFolder<'tcx> for Shifter<'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
self.current_index.shift_in(1);
@@ -817,10 +821,10 @@
}
}
-pub fn shift_region<'a, 'gcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub fn shift_region<'tcx>(
+ tcx: TyCtxt<'tcx>,
region: ty::Region<'tcx>,
- amount: u32
+ amount: u32,
) -> ty::Region<'tcx> {
match region {
ty::ReLateBound(debruijn, br) if amount > 0 => {
@@ -832,22 +836,20 @@
}
}
-pub fn shift_vars<'a, 'gcx, 'tcx, T>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- value: &T,
- amount: u32
-) -> T where T: TypeFoldable<'tcx> {
+pub fn shift_vars<'tcx, T>(tcx: TyCtxt<'tcx>, value: &T, amount: u32) -> T
+where
+ T: TypeFoldable<'tcx>,
+{
debug!("shift_vars(value={:?}, amount={})",
value, amount);
value.fold_with(&mut Shifter::new(tcx, amount, Direction::In))
}
-pub fn shift_out_vars<'a, 'gcx, 'tcx, T>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- value: &T,
- amount: u32
-) -> T where T: TypeFoldable<'tcx> {
+pub fn shift_out_vars<'tcx, T>(tcx: TyCtxt<'tcx>, value: &T, amount: u32) -> T
+where
+ T: TypeFoldable<'tcx>,
+{
debug!("shift_out_vars(value={:?}, amount={})",
value, amount);
diff --git a/src/librustc/ty/inhabitedness/def_id_forest.rs b/src/librustc/ty/inhabitedness/def_id_forest.rs
index 581fc41..b22bd21 100644
--- a/src/librustc/ty/inhabitedness/def_id_forest.rs
+++ b/src/librustc/ty/inhabitedness/def_id_forest.rs
@@ -21,7 +21,7 @@
root_ids: SmallVec<[DefId; 1]>,
}
-impl<'a, 'gcx, 'tcx> DefIdForest {
+impl<'tcx> DefIdForest {
/// Creates an empty forest.
pub fn empty() -> DefIdForest {
DefIdForest {
@@ -32,7 +32,7 @@
/// Creates a forest consisting of a single tree representing the entire
/// crate.
#[inline]
- pub fn full(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> DefIdForest {
+ pub fn full(tcx: TyCtxt<'tcx>) -> DefIdForest {
let crate_id = tcx.hir().local_def_id(CRATE_NODE_ID);
DefIdForest::from_id(crate_id)
}
@@ -52,17 +52,14 @@
}
/// Tests whether the forest contains a given DefId.
- pub fn contains(&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- id: DefId) -> bool
- {
+ pub fn contains(&self, tcx: TyCtxt<'tcx>, id: DefId) -> bool {
self.root_ids.iter().any(|root_id| tcx.is_descendant_of(id, *root_id))
}
/// Calculate the intersection of a collection of forests.
- pub fn intersection<I>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- iter: I) -> DefIdForest
- where I: IntoIterator<Item=DefIdForest>
+ pub fn intersection<I>(tcx: TyCtxt<'tcx>, iter: I) -> DefIdForest
+ where
+ I: IntoIterator<Item = DefIdForest>,
{
let mut iter = iter.into_iter();
let mut ret = if let Some(first) = iter.next() {
@@ -97,9 +94,9 @@
}
/// Calculate the union of a collection of forests.
- pub fn union<I>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- iter: I) -> DefIdForest
- where I: IntoIterator<Item=DefIdForest>
+ pub fn union<I>(tcx: TyCtxt<'tcx>, iter: I) -> DefIdForest
+ where
+ I: IntoIterator<Item = DefIdForest>,
{
let mut ret = DefIdForest::empty();
let mut next_ret = SmallVec::new();
diff --git a/src/librustc/ty/inhabitedness/mod.rs b/src/librustc/ty/inhabitedness/mod.rs
index be1d973..5ce7508 100644
--- a/src/librustc/ty/inhabitedness/mod.rs
+++ b/src/librustc/ty/inhabitedness/mod.rs
@@ -51,7 +51,7 @@
// This code should only compile in modules where the uninhabitedness of Foo is
// visible.
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Checks whether a type is visibly uninhabited from a particular module.
/// # Example
/// ```rust
@@ -106,13 +106,9 @@
}
}
-impl<'a, 'gcx, 'tcx> AdtDef {
+impl<'tcx> AdtDef {
/// Calculate the forest of DefIds from which this adt is visibly uninhabited.
- fn uninhabited_from(
- &self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- substs: SubstsRef<'tcx>) -> DefIdForest
- {
+ fn uninhabited_from(&self, tcx: TyCtxt<'tcx>, substs: SubstsRef<'tcx>) -> DefIdForest {
// Non-exhaustive ADTs from other crates are always considered inhabited.
if self.is_variant_list_non_exhaustive() && !self.did.is_local() {
DefIdForest::empty()
@@ -124,14 +120,14 @@
}
}
-impl<'a, 'gcx, 'tcx> VariantDef {
+impl<'tcx> VariantDef {
/// Calculate the forest of DefIds from which this variant is visibly uninhabited.
pub fn uninhabited_from(
&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
substs: SubstsRef<'tcx>,
- adt_kind: AdtKind) -> DefIdForest
- {
+ adt_kind: AdtKind,
+ ) -> DefIdForest {
let is_enum = match adt_kind {
// For now, `union`s are never considered uninhabited.
// The precise semantics of inhabitedness with respect to unions is currently undecided.
@@ -150,11 +146,11 @@
}
}
-impl<'a, 'gcx, 'tcx> FieldDef {
+impl<'tcx> FieldDef {
/// Calculate the forest of DefIds from which this field is visibly uninhabited.
fn uninhabited_from(
&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
substs: SubstsRef<'tcx>,
is_enum: bool,
) -> DefIdForest {
@@ -180,10 +176,9 @@
}
}
-impl<'a, 'gcx, 'tcx> TyS<'tcx> {
+impl<'tcx> TyS<'tcx> {
/// Calculate the forest of DefIds from which this type is visibly uninhabited.
- fn uninhabited_from(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> DefIdForest
- {
+ fn uninhabited_from(&self, tcx: TyCtxt<'tcx>) -> DefIdForest {
match self.sty {
Adt(def, substs) => def.uninhabited_from(tcx, substs),
diff --git a/src/librustc/ty/instance.rs b/src/librustc/ty/instance.rs
index ba42cec..457d018 100644
--- a/src/librustc/ty/instance.rs
+++ b/src/librustc/ty/instance.rs
@@ -42,11 +42,8 @@
CloneShim(DefId, Ty<'tcx>),
}
-impl<'a, 'tcx> Instance<'tcx> {
- pub fn ty(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Ty<'tcx>
- {
+impl<'tcx> Instance<'tcx> {
+ pub fn ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
let ty = tcx.type_of(self.def.def_id());
tcx.subst_and_normalize_erasing_regions(
self.substs,
@@ -55,7 +52,7 @@
)
}
- fn fn_sig_noadjust(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> PolyFnSig<'tcx> {
+ fn fn_sig_noadjust(&self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> {
let ty = self.ty(tcx);
match ty.sty {
ty::FnDef(..) |
@@ -105,7 +102,7 @@
}
}
- pub fn fn_sig(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::PolyFnSig<'tcx> {
+ pub fn fn_sig(&self, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> {
let mut fn_sig = self.fn_sig_noadjust(tcx);
if let InstanceDef::VtableShim(..) = self.def {
// Modify fn(self, ...) to fn(self: *mut Self, ...)
@@ -136,14 +133,11 @@
}
#[inline]
- pub fn attrs<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::Attributes<'tcx> {
+ pub fn attrs(&self, tcx: TyCtxt<'tcx>) -> ty::Attributes<'tcx> {
tcx.get_attrs(self.def_id())
}
- pub fn is_inline<'a>(
- &self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>
- ) -> bool {
+ pub fn is_inline(&self, tcx: TyCtxt<'tcx>) -> bool {
use crate::hir::map::DefPathData;
let def_id = match *self {
ty::InstanceDef::Item(def_id) => def_id,
@@ -156,10 +150,7 @@
}
}
- pub fn requires_local<'a>(
- &self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>
- ) -> bool {
+ pub fn requires_local(&self, tcx: TyCtxt<'tcx>) -> bool {
if self.is_inline(tcx) {
return true
}
@@ -209,7 +200,7 @@
}
}
-impl<'a, 'b, 'tcx> Instance<'tcx> {
+impl<'tcx> Instance<'tcx> {
pub fn new(def_id: DefId, substs: SubstsRef<'tcx>)
-> Instance<'tcx> {
assert!(!substs.has_escaping_bound_vars(),
@@ -218,7 +209,7 @@
Instance { def: InstanceDef::Item(def_id), substs: substs }
}
- pub fn mono(tcx: TyCtxt<'a, 'tcx, 'b>, def_id: DefId) -> Instance<'tcx> {
+ pub fn mono(tcx: TyCtxt<'tcx>, def_id: DefId) -> Instance<'tcx> {
Instance::new(def_id, tcx.global_tcx().empty_substs_for_def_id(def_id))
}
@@ -245,10 +236,12 @@
/// Presuming that coherence and type-check have succeeded, if this method is invoked
/// in a monomorphic context (i.e., like during codegen), then it is guaranteed to return
/// `Some`.
- pub fn resolve(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- def_id: DefId,
- substs: SubstsRef<'tcx>) -> Option<Instance<'tcx>> {
+ pub fn resolve(
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ def_id: DefId,
+ substs: SubstsRef<'tcx>,
+ ) -> Option<Instance<'tcx>> {
debug!("resolve(def_id={:?}, substs={:?})", def_id, substs);
let result = if let Some(trait_def_id) = tcx.trait_of_item(def_id) {
debug!(" => associated item, attempting to find impl in param_env {:#?}", param_env);
@@ -297,10 +290,12 @@
result
}
- pub fn resolve_for_vtable(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- def_id: DefId,
- substs: SubstsRef<'tcx>) -> Option<Instance<'tcx>> {
+ pub fn resolve_for_vtable(
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ def_id: DefId,
+ substs: SubstsRef<'tcx>,
+ ) -> Option<Instance<'tcx>> {
debug!("resolve(def_id={:?}, substs={:?})", def_id, substs);
let fn_sig = tcx.fn_sig(def_id);
let is_vtable_shim =
@@ -317,12 +312,11 @@
}
pub fn resolve_closure(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
def_id: DefId,
substs: ty::ClosureSubsts<'tcx>,
- requested_kind: ty::ClosureKind)
- -> Instance<'tcx>
- {
+ requested_kind: ty::ClosureKind,
+ ) -> Instance<'tcx> {
let actual_kind = substs.closure_kind(def_id, tcx);
match needs_fn_once_adapter_shim(actual_kind, requested_kind) {
@@ -331,22 +325,17 @@
}
}
- pub fn resolve_drop_in_place(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>)
- -> ty::Instance<'tcx>
- {
+ pub fn resolve_drop_in_place(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ty::Instance<'tcx> {
let def_id = tcx.require_lang_item(DropInPlaceFnLangItem);
let substs = tcx.intern_substs(&[ty.into()]);
Instance::resolve(tcx, ty::ParamEnv::reveal_all(), def_id, substs).unwrap()
}
pub fn fn_once_adapter_instance(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
closure_did: DefId,
- substs: ty::ClosureSubsts<'tcx>)
- -> Instance<'tcx>
- {
+ substs: ty::ClosureSubsts<'tcx>,
+ ) -> Instance<'tcx> {
debug!("fn_once_adapter_shim({:?}, {:?})",
closure_did,
substs);
@@ -376,8 +365,8 @@
}
}
-fn resolve_associated_item<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn resolve_associated_item<'tcx>(
+ tcx: TyCtxt<'tcx>,
trait_item: &ty::AssocItem,
param_env: ty::ParamEnv<'tcx>,
trait_id: DefId,
diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs
index 65cdd4e..6cee270 100644
--- a/src/librustc/ty/layout.rs
+++ b/src/librustc/ty/layout.rs
@@ -28,21 +28,20 @@
ArgAttribute, ArgAttributes, ArgType, Conv, FnType, IgnoreMode, PassMode, Reg, RegKind
};
-
-
pub trait IntegerExt {
- fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, signed: bool) -> Ty<'tcx>;
+ fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>, signed: bool) -> Ty<'tcx>;
fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer;
- fn repr_discr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>,
- repr: &ReprOptions,
- min: i128,
- max: i128)
- -> (Integer, bool);
+ fn repr_discr<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ ty: Ty<'tcx>,
+ repr: &ReprOptions,
+ min: i128,
+ max: i128,
+ ) -> (Integer, bool);
}
impl IntegerExt for Integer {
- fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, signed: bool) -> Ty<'tcx> {
+ fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>, signed: bool) -> Ty<'tcx> {
match (*self, signed) {
(I8, false) => tcx.types.u8,
(I16, false) => tcx.types.u16,
@@ -77,12 +76,13 @@
/// signed discriminant range and #[repr] attribute.
/// N.B.: u128 values above i128::MAX will be treated as signed, but
/// that shouldn't affect anything, other than maybe debuginfo.
- fn repr_discr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>,
- repr: &ReprOptions,
- min: i128,
- max: i128)
- -> (Integer, bool) {
+ fn repr_discr<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ ty: Ty<'tcx>,
+ repr: &ReprOptions,
+ min: i128,
+ max: i128,
+ ) -> (Integer, bool) {
// Theoretically, negative values could be larger in unsigned representation
// than the unsigned representation of the signed minimum. However, if there
// are any negative values, the only valid unsigned representation is u128
@@ -126,11 +126,11 @@
}
pub trait PrimitiveExt {
- fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx>;
+ fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>;
}
impl PrimitiveExt for Primitive {
- fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> {
+ fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match *self {
Int(i, signed) => i.to_ty(tcx, signed),
Float(FloatTy::F32) => tcx.types.f32,
@@ -171,10 +171,10 @@
}
}
-fn layout_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
- -> Result<&'tcx LayoutDetails, LayoutError<'tcx>>
-{
+fn layout_raw<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
+) -> Result<&'tcx LayoutDetails, LayoutError<'tcx>> {
ty::tls::with_related_context(tcx, move |icx| {
let rec_limit = *tcx.sess.recursion_limit.get();
let (param_env, ty) = query.into_parts();
@@ -226,7 +226,7 @@
Prefixed(Size, Align),
}
-impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
+impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
fn scalar_pair(&self, a: Scalar, b: Scalar) -> LayoutDetails {
let dl = self.data_layout();
let b_align = b.value.align(dl);
@@ -1221,7 +1221,7 @@
// Also included in the layout are the upvars and the discriminant.
// These are included as fields on the "outer" layout; they are not part
// of any variant.
-impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
+impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
/// Compute the eligibility and assignment of each local.
fn generator_saved_local_eligibility(&self, info: &GeneratorLayout<'tcx>)
-> (BitSet<GeneratorSavedLocal>, IndexVec<GeneratorSavedLocal, SavedLocalEligibility>) {
@@ -1442,9 +1442,7 @@
debug!("generator layout ({:?}): {:#?}", ty, layout);
Ok(layout)
}
-}
-impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
/// This is invoked by the `layout_raw` query to record the final
/// layout of each type.
#[inline(always)]
@@ -1605,11 +1603,12 @@
}
}
-impl<'a, 'tcx> SizeSkeleton<'tcx> {
- pub fn compute(ty: Ty<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>)
- -> Result<SizeSkeleton<'tcx>, LayoutError<'tcx>> {
+impl<'tcx> SizeSkeleton<'tcx> {
+ pub fn compute(
+ ty: Ty<'tcx>,
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ ) -> Result<SizeSkeleton<'tcx>, LayoutError<'tcx>> {
debug_assert!(!ty.has_infer_types());
// First try computing a static layout.
@@ -1729,21 +1728,21 @@
}
pub trait HasTyCtxt<'tcx>: HasDataLayout {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx>;
+ fn tcx(&self) -> TyCtxt<'tcx>;
}
pub trait HasParamEnv<'tcx> {
fn param_env(&self) -> ty::ParamEnv<'tcx>;
}
-impl<'a, 'gcx, 'tcx> HasDataLayout for TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> HasDataLayout for TyCtxt<'tcx> {
fn data_layout(&self) -> &TargetDataLayout {
&self.data_layout
}
}
-impl<'a, 'gcx, 'tcx> HasTyCtxt<'gcx> for TyCtxt<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'gcx> {
+impl<'tcx> HasTyCtxt<'tcx> for TyCtxt<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.global_tcx()
}
}
@@ -1760,8 +1759,8 @@
}
}
-impl<'gcx, 'tcx, T: HasTyCtxt<'gcx>> HasTyCtxt<'gcx> for LayoutCx<'tcx, T> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'gcx> {
+impl<'tcx, T: HasTyCtxt<'tcx>> HasTyCtxt<'tcx> for LayoutCx<'tcx, T> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx.tcx()
}
}
@@ -1798,7 +1797,7 @@
pub type TyLayout<'tcx> = ::rustc_target::abi::TyLayout<'tcx, Ty<'tcx>>;
-impl<'a, 'tcx> LayoutOf for LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
+impl<'tcx> LayoutOf for LayoutCx<'tcx, TyCtxt<'tcx>> {
type Ty = Ty<'tcx>;
type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>;
@@ -1825,7 +1824,7 @@
}
}
-impl<'a, 'tcx> LayoutOf for LayoutCx<'tcx, ty::query::TyCtxtAt<'a, 'tcx, 'tcx>> {
+impl LayoutOf for LayoutCx<'tcx, ty::query::TyCtxtAt<'tcx>> {
type Ty = Ty<'tcx>;
type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>;
@@ -1857,7 +1856,7 @@
}
// Helper (inherent) `layout_of` methods to avoid pushing `LayoutCx` to users.
-impl TyCtxt<'a, 'tcx, '_> {
+impl TyCtxt<'tcx> {
/// Computes the layout of a type. Note that this implicitly
/// executes in "reveal all" mode.
#[inline]
@@ -1871,7 +1870,7 @@
}
}
-impl ty::query::TyCtxtAt<'a, 'tcx, '_> {
+impl ty::query::TyCtxtAt<'tcx> {
/// Computes the layout of a type. Note that this implicitly
/// executes in "reveal all" mode.
#[inline]
@@ -2189,9 +2188,9 @@
}
impl Niche {
- fn reserve<'a, 'tcx>(
+ fn reserve<'tcx>(
&self,
- cx: &LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>>,
+ cx: &LayoutCx<'tcx, TyCtxt<'tcx>>,
count: u128,
) -> Option<(u128, Scalar)> {
if count > self.available {
@@ -2207,7 +2206,7 @@
}
}
-impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
+impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
/// Find the offset of a niche leaf field, starting from
/// the given type and recursing through aggregates.
// FIXME(eddyb) traverse already optimized enums.
@@ -2443,24 +2442,27 @@
pref
});
-impl<'gcx> HashStable<StableHashingContext<'gcx>> for Align {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'gcx>,
- hasher: &mut StableHasher<W>) {
+impl<'tcx> HashStable<StableHashingContext<'tcx>> for Align {
+ fn hash_stable<W: StableHasherResult>(
+ &self,
+ hcx: &mut StableHashingContext<'tcx>,
+ hasher: &mut StableHasher<W>,
+ ) {
self.bytes().hash_stable(hcx, hasher);
}
}
-impl<'gcx> HashStable<StableHashingContext<'gcx>> for Size {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'gcx>,
- hasher: &mut StableHasher<W>) {
+impl<'tcx> HashStable<StableHashingContext<'tcx>> for Size {
+ fn hash_stable<W: StableHasherResult>(
+ &self,
+ hcx: &mut StableHashingContext<'tcx>,
+ hasher: &mut StableHasher<W>,
+ ) {
self.bytes().hash_stable(hcx, hasher);
}
}
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for LayoutError<'gcx>
-{
+impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for LayoutError<'tcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs
index d858d37..3614103 100644
--- a/src/librustc/ty/mod.rs
+++ b/src/librustc/ty/mod.rs
@@ -213,7 +213,7 @@
}
}
- pub fn signature<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
+ pub fn signature<'tcx>(&self, tcx: TyCtxt<'tcx>) -> String {
match self.kind {
ty::AssocKind::Method => {
// We skip the binder here because the binder would deanonymize all
@@ -259,14 +259,14 @@
}
}
-impl<'a, 'gcx, 'tcx> DefIdTree for TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> DefIdTree for TyCtxt<'tcx> {
fn parent(self, id: DefId) -> Option<DefId> {
self.def_key(id).parent.map(|index| DefId { index: index, ..id })
}
}
impl Visibility {
- pub fn from_hir(visibility: &hir::Visibility, id: hir::HirId, tcx: TyCtxt<'_, '_, '_>) -> Self {
+ pub fn from_hir(visibility: &hir::Visibility, id: hir::HirId, tcx: TyCtxt<'_>) -> Self {
match visibility.node {
hir::VisibilityKind::Public => Visibility::Public,
hir::VisibilityKind::Crate(_) => Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)),
@@ -277,7 +277,7 @@
def => Visibility::Restricted(def.def_id()),
},
hir::VisibilityKind::Inherited => {
- Visibility::Restricted(tcx.hir().get_module_parent_by_hir_id(id))
+ Visibility::Restricted(tcx.hir().get_module_parent(id))
}
}
}
@@ -569,7 +569,7 @@
}
}
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for ty::TyS<'gcx> {
+impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ty::TyS<'tcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
@@ -592,7 +592,7 @@
impl<'tcx> serialize::UseSpecializedEncodable for Ty<'tcx> {}
impl<'tcx> serialize::UseSpecializedDecodable for Ty<'tcx> {}
-pub type CanonicalTy<'gcx> = Canonical<'gcx, Ty<'gcx>>;
+pub type CanonicalTy<'tcx> = Canonical<'tcx, Ty<'tcx>>;
extern {
/// A dummy type used to force List to by unsized without requiring fat pointers
@@ -912,7 +912,7 @@
pub has_late_bound_regions: Option<Span>,
}
-impl<'a, 'gcx, 'tcx> Generics {
+impl<'tcx> Generics {
pub fn count(&self) -> usize {
self.parent_count + self.params.len()
}
@@ -934,7 +934,7 @@
own_counts
}
- pub fn requires_monomorphization(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
+ pub fn requires_monomorphization(&self, tcx: TyCtxt<'tcx>) -> bool {
if self.own_requires_monomorphization() {
return true;
}
@@ -957,11 +957,11 @@
false
}
- pub fn region_param(&'tcx self,
- param: &EarlyBoundRegion,
- tcx: TyCtxt<'a, 'gcx, 'tcx>)
- -> &'tcx GenericParamDef
- {
+ pub fn region_param(
+ &'tcx self,
+ param: &EarlyBoundRegion,
+ tcx: TyCtxt<'tcx>,
+ ) -> &'tcx GenericParamDef {
if let Some(index) = param.index.checked_sub(self.parent_count as u32) {
let param = &self.params[index as usize];
match param.kind {
@@ -975,10 +975,7 @@
}
/// Returns the `GenericParamDef` associated with this `ParamTy`.
- pub fn type_param(&'tcx self,
- param: &ParamTy,
- tcx: TyCtxt<'a, 'gcx, 'tcx>)
- -> &'tcx GenericParamDef {
+ pub fn type_param(&'tcx self, param: &ParamTy, tcx: TyCtxt<'tcx>) -> &'tcx GenericParamDef {
if let Some(index) = param.index.checked_sub(self.parent_count as u32) {
let param = &self.params[index as usize];
match param.kind {
@@ -992,10 +989,7 @@
}
/// Returns the `ConstParameterDef` associated with this `ParamConst`.
- pub fn const_param(&'tcx self,
- param: &ParamConst,
- tcx: TyCtxt<'a, 'gcx, 'tcx>)
- -> &GenericParamDef {
+ pub fn const_param(&'tcx self, param: &ParamConst, tcx: TyCtxt<'tcx>) -> &GenericParamDef {
if let Some(index) = param.index.checked_sub(self.parent_count as u32) {
let param = &self.params[index as usize];
match param.kind {
@@ -1019,24 +1013,33 @@
impl<'tcx> serialize::UseSpecializedEncodable for GenericPredicates<'tcx> {}
impl<'tcx> serialize::UseSpecializedDecodable for GenericPredicates<'tcx> {}
-impl<'a, 'gcx, 'tcx> GenericPredicates<'tcx> {
- pub fn instantiate(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: SubstsRef<'tcx>)
- -> InstantiatedPredicates<'tcx> {
+impl<'tcx> GenericPredicates<'tcx> {
+ pub fn instantiate(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ substs: SubstsRef<'tcx>,
+ ) -> InstantiatedPredicates<'tcx> {
let mut instantiated = InstantiatedPredicates::empty();
self.instantiate_into(tcx, &mut instantiated, substs);
instantiated
}
- pub fn instantiate_own(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: SubstsRef<'tcx>)
- -> InstantiatedPredicates<'tcx> {
+ pub fn instantiate_own(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ substs: SubstsRef<'tcx>,
+ ) -> InstantiatedPredicates<'tcx> {
InstantiatedPredicates {
predicates: self.predicates.iter().map(|(p, _)| p.subst(tcx, substs)).collect(),
}
}
- fn instantiate_into(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- instantiated: &mut InstantiatedPredicates<'tcx>,
- substs: SubstsRef<'tcx>) {
+ fn instantiate_into(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ instantiated: &mut InstantiatedPredicates<'tcx>,
+ substs: SubstsRef<'tcx>,
+ ) {
if let Some(def_id) = self.parent {
tcx.predicates_of(def_id).instantiate_into(tcx, instantiated, substs);
}
@@ -1045,25 +1048,28 @@
);
}
- pub fn instantiate_identity(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>)
- -> InstantiatedPredicates<'tcx> {
+ pub fn instantiate_identity(&self, tcx: TyCtxt<'tcx>) -> InstantiatedPredicates<'tcx> {
let mut instantiated = InstantiatedPredicates::empty();
self.instantiate_identity_into(tcx, &mut instantiated);
instantiated
}
- fn instantiate_identity_into(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- instantiated: &mut InstantiatedPredicates<'tcx>) {
+ fn instantiate_identity_into(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ instantiated: &mut InstantiatedPredicates<'tcx>,
+ ) {
if let Some(def_id) = self.parent {
tcx.predicates_of(def_id).instantiate_identity_into(tcx, instantiated);
}
instantiated.predicates.extend(self.predicates.iter().map(|&(p, _)| p))
}
- pub fn instantiate_supertrait(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- poly_trait_ref: &ty::PolyTraitRef<'tcx>)
- -> InstantiatedPredicates<'tcx>
- {
+ pub fn instantiate_supertrait(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ poly_trait_ref: &ty::PolyTraitRef<'tcx>,
+ ) -> InstantiatedPredicates<'tcx> {
assert_eq!(self.parent, None);
InstantiatedPredicates {
predicates: self.predicates.iter().map(|(pred, _)| {
@@ -1128,16 +1134,17 @@
}
}
-impl<'a, 'gcx, 'tcx> Predicate<'tcx> {
+impl<'tcx> Predicate<'tcx> {
/// Performs a substitution suitable for going from a
/// poly-trait-ref to supertraits that must hold if that
/// poly-trait-ref holds. This is slightly different from a normal
/// substitution in terms of what happens with bound regions. See
/// lengthy comment below for details.
- pub fn subst_supertrait(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_ref: &ty::PolyTraitRef<'tcx>)
- -> ty::Predicate<'tcx>
- {
+ pub fn subst_supertrait(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ trait_ref: &ty::PolyTraitRef<'tcx>,
+ ) -> ty::Predicate<'tcx> {
// The interaction between HRTB and supertraits is not entirely
// obvious. Let me walk you (and myself) through an example.
//
@@ -1294,7 +1301,7 @@
}
#[inline]
- pub fn to_poly_trait_ref(&self, tcx: TyCtxt<'_, '_, '_>) -> PolyTraitRef<'tcx> {
+ pub fn to_poly_trait_ref(&self, tcx: TyCtxt<'_>) -> PolyTraitRef<'tcx> {
// Note: unlike with `TraitRef::to_poly_trait_ref()`,
// `self.0.trait_ref` is permitted to have escaping regions.
// This is because here `self` has a `Binder` and so does our
@@ -1756,8 +1763,9 @@
}
}
-impl<'a, 'gcx, T> HashStable<StableHashingContext<'a>> for ParamEnvAnd<'gcx, T>
- where T: HashStable<StableHashingContext<'a>>
+impl<'a, 'tcx, T> HashStable<StableHashingContext<'a>> for ParamEnvAnd<'tcx, T>
+where
+ T: HashStable<StableHashingContext<'a>>,
{
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
@@ -1838,7 +1846,7 @@
pub recovered: bool,
}
-impl<'a, 'gcx, 'tcx> VariantDef {
+impl<'tcx> VariantDef {
/// Creates a new `VariantDef`.
///
/// `variant_did` is the `DefId` that identifies the enum variant (if this `VariantDef`
@@ -1856,7 +1864,7 @@
/// If someone speeds up attribute loading to not be a performance concern, they can
/// remove this hack and use the constructor `DefId` everywhere.
pub fn new(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
ident: Ident,
variant_did: Option<DefId>,
ctor_def_id: Option<DefId>,
@@ -2069,7 +2077,7 @@
});
impl ReprOptions {
- pub fn new(tcx: TyCtxt<'_, '_, '_>, did: DefId) -> ReprOptions {
+ pub fn new(tcx: TyCtxt<'_>, did: DefId) -> ReprOptions {
let mut flags = ReprFlags::empty();
let mut size = None;
let mut max_align = 0;
@@ -2140,17 +2148,16 @@
pub fn inhibit_union_abi_opt(&self) -> bool {
self.c()
}
-
}
-impl<'a, 'gcx, 'tcx> AdtDef {
+impl<'tcx> AdtDef {
/// Creates a new `AdtDef`.
fn new(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
did: DefId,
kind: AdtKind,
variants: IndexVec<VariantIdx, VariantDef>,
- repr: ReprOptions
+ repr: ReprOptions,
) -> Self {
debug!("AdtDef::new({:?}, {:?}, {:?}, {:?})", did, kind, variants, repr);
let mut flags = AdtFlags::NO_ADT_FLAGS;
@@ -2286,7 +2293,7 @@
}
/// Returns `true` if this type has a destructor.
- pub fn has_dtor(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
+ pub fn has_dtor(&self, tcx: TyCtxt<'tcx>) -> bool {
self.destructor(tcx).is_some()
}
@@ -2297,7 +2304,7 @@
}
#[inline]
- pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> &'tcx GenericPredicates<'gcx> {
+ pub fn predicates(&self, tcx: TyCtxt<'tcx>) -> &'tcx GenericPredicates<'tcx> {
tcx.predicates_of(self.did)
}
@@ -2349,11 +2356,7 @@
}
#[inline]
- pub fn eval_explicit_discr(
- &self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- expr_did: DefId,
- ) -> Option<Discr<'tcx>> {
+ pub fn eval_explicit_discr(&self, tcx: TyCtxt<'tcx>, expr_did: DefId) -> Option<Discr<'tcx>> {
let param_env = ParamEnv::empty();
let repr_type = self.repr.discr_type();
let substs = InternalSubsts::identity_for_item(tcx.global_tcx(), expr_did);
@@ -2397,9 +2400,9 @@
#[inline]
pub fn discriminants(
- &'a self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- ) -> impl Iterator<Item = (VariantIdx, Discr<'tcx>)> + Captures<'gcx> + 'a {
+ &'tcx self,
+ tcx: TyCtxt<'tcx>,
+ ) -> impl Iterator<Item = (VariantIdx, Discr<'tcx>)> + Captures<'tcx> {
let repr_type = self.repr.discr_type();
let initial = repr_type.initial_discriminant(tcx.global_tcx());
let mut prev_discr = None::<Discr<'tcx>>;
@@ -2427,10 +2430,11 @@
/// discriminant (the last one before the requested variant),
/// assuming there are no constant-evaluation errors there.
#[inline]
- pub fn discriminant_for_variant(&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- variant_index: VariantIdx)
- -> Discr<'tcx> {
+ pub fn discriminant_for_variant(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ variant_index: VariantIdx,
+ ) -> Discr<'tcx> {
let (val, offset) = self.discriminant_def_for_variant(variant_index);
let explicit_value = val
.and_then(|expr_did| self.eval_explicit_discr(tcx, expr_did))
@@ -2465,7 +2469,7 @@
(expr_did, variant_index.as_u32() - explicit_index)
}
- pub fn destructor(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Destructor> {
+ pub fn destructor(&self, tcx: TyCtxt<'tcx>) -> Option<Destructor> {
tcx.adt_destructor(self.did)
}
@@ -2479,14 +2483,11 @@
///
/// Due to normalization being eager, this applies even if
/// the associated type is behind a pointer (e.g., issue #31299).
- pub fn sized_constraint(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> &'tcx [Ty<'tcx>] {
+ pub fn sized_constraint(&self, tcx: TyCtxt<'tcx>) -> &'tcx [Ty<'tcx>] {
tcx.adt_sized_constraint(self.did).0
}
- fn sized_constraint_for_ty(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>)
- -> Vec<Ty<'tcx>> {
+ fn sized_constraint_for_ty(&self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Vec<Ty<'tcx>> {
let result = match ty.sty {
Bool | Char | Int(..) | Uint(..) | Float(..) |
RawPtr(..) | Ref(..) | FnDef(..) | FnPtr(_) |
@@ -2563,8 +2564,8 @@
}
}
-impl<'a, 'gcx, 'tcx> FieldDef {
- pub fn ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, subst: SubstsRef<'tcx>) -> Ty<'tcx> {
+impl<'tcx> FieldDef {
+ pub fn ty(&self, tcx: TyCtxt<'tcx>, subst: SubstsRef<'tcx>) -> Ty<'tcx> {
tcx.type_of(self.did).subst(tcx, subst)
}
}
@@ -2586,11 +2587,11 @@
FnOnce,
}
-impl<'a, 'tcx> ClosureKind {
+impl<'tcx> ClosureKind {
// This is the initial value used when doing upvar inference.
pub const LATTICE_BOTTOM: ClosureKind = ClosureKind::Fn;
- pub fn trait_did(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> DefId {
+ pub fn trait_did(&self, tcx: TyCtxt<'tcx>) -> DefId {
match *self {
ClosureKind::Fn => tcx.require_lang_item(FnTraitLangItem),
ClosureKind::FnMut => {
@@ -2618,7 +2619,7 @@
/// Returns the representative scalar type for this closure kind.
/// See `TyS::to_opt_closure_kind` for more details.
- pub fn to_ty(self, tcx: TyCtxt<'_, '_, 'tcx>) -> Ty<'tcx> {
+ pub fn to_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match self {
ty::ClosureKind::Fn => tcx.types.i8,
ty::ClosureKind::FnMut => tcx.types.i16,
@@ -2700,12 +2701,12 @@
}
#[derive(Debug, Clone)]
-pub enum Attributes<'gcx> {
+pub enum Attributes<'tcx> {
Owned(Lrc<[ast::Attribute]>),
- Borrowed(&'gcx [ast::Attribute])
+ Borrowed(&'tcx [ast::Attribute]),
}
-impl<'gcx> ::std::ops::Deref for Attributes<'gcx> {
+impl<'tcx> ::std::ops::Deref for Attributes<'tcx> {
type Target = [ast::Attribute];
fn deref(&self) -> &[ast::Attribute] {
@@ -2755,17 +2756,15 @@
Issue33140
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- pub fn body_tables(self, body: hir::BodyId) -> &'gcx TypeckTables<'gcx> {
+impl<'tcx> TyCtxt<'tcx> {
+ pub fn body_tables(self, body: hir::BodyId) -> &'tcx TypeckTables<'tcx> {
self.typeck_tables_of(self.hir().body_owner_def_id(body))
}
/// Returns an iterator of the `DefId`s for all body-owners in this
/// crate. If you would prefer to iterate over the bodies
/// themselves, you can do `self.hir().krate().body_ids.iter()`.
- pub fn body_owners(
- self,
- ) -> impl Iterator<Item = DefId> + Captures<'tcx> + Captures<'gcx> + 'a {
+ pub fn body_owners(self) -> impl Iterator<Item = DefId> + Captures<'tcx> + 'tcx {
self.hir().krate()
.body_ids
.iter()
@@ -2889,10 +2888,7 @@
})
}
- pub fn associated_items(
- self,
- def_id: DefId,
- ) -> AssocItemsIterator<'a, 'gcx, 'tcx> {
+ pub fn associated_items(self, def_id: DefId) -> AssocItemsIterator<'tcx> {
// Ideally, we would use `-> impl Iterator` here, but it falls
// afoul of the conservative "capture [restrictions]" we put
// in place, so we use a hand-written iterator.
@@ -3000,9 +2996,7 @@
}
/// Returns the possibly-auto-generated MIR of a `(DefId, Subst)` pair.
- pub fn instance_mir(self, instance: ty::InstanceDef<'gcx>)
- -> &'gcx Body<'gcx>
- {
+ pub fn instance_mir(self, instance: ty::InstanceDef<'tcx>) -> &'tcx Body<'tcx> {
match instance {
ty::InstanceDef::Item(did) => {
self.optimized_mir(did)
@@ -3020,9 +3014,9 @@
}
/// Gets the attributes of a definition.
- pub fn get_attrs(self, did: DefId) -> Attributes<'gcx> {
+ pub fn get_attrs(self, did: DefId) -> Attributes<'tcx> {
if let Some(id) = self.hir().as_local_hir_id(did) {
- Attributes::Borrowed(self.hir().attrs_by_hir_id(id))
+ Attributes::Borrowed(self.hir().attrs(id))
} else {
Attributes::Owned(self.item_attrs(did))
}
@@ -3074,7 +3068,7 @@
pub fn span_of_impl(self, impl_did: DefId) -> Result<Span, Symbol> {
if impl_did.is_local() {
let hir_id = self.hir().as_local_hir_id(impl_did).unwrap();
- Ok(self.hir().span_by_hir_id(hir_id))
+ Ok(self.hir().span(hir_id))
} else {
Err(self.crate_name(impl_did.krate))
}
@@ -3109,19 +3103,19 @@
let scope = match ident.span.modernize_and_adjust(self.expansion_that_defined(scope)) {
Some(actual_expansion) =>
self.hir().definitions().parent_module_of_macro_def(actual_expansion),
- None => self.hir().get_module_parent_by_hir_id(block),
+ None => self.hir().get_module_parent(block),
};
(ident, scope)
}
}
-pub struct AssocItemsIterator<'a, 'gcx: 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- def_ids: &'gcx [DefId],
+pub struct AssocItemsIterator<'tcx> {
+ tcx: TyCtxt<'tcx>,
+ def_ids: &'tcx [DefId],
next_index: usize,
}
-impl Iterator for AssocItemsIterator<'_, '_, '_> {
+impl Iterator for AssocItemsIterator<'_> {
type Item = AssocItem;
fn next(&mut self) -> Option<AssocItem> {
@@ -3131,11 +3125,11 @@
}
}
-fn associated_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> AssocItem {
+fn associated_item<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> AssocItem {
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
let parent_id = tcx.hir().get_parent_item(id);
let parent_def_id = tcx.hir().local_def_id_from_hir_id(parent_id);
- let parent_item = tcx.hir().expect_item_by_hir_id(parent_id);
+ let parent_item = tcx.hir().expect_item(parent_id);
match parent_item.node {
hir::ItemKind::Impl(.., ref impl_item_refs) => {
if let Some(impl_item_ref) = impl_item_refs.iter().find(|i| i.id.hir_id == id) {
@@ -3176,9 +3170,7 @@
/// such.
/// - a Error, if a type contained itself. The representability
/// check should catch this case.
-fn adt_sized_constraint<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> AdtSizedConstraint<'tcx> {
+fn adt_sized_constraint<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> AdtSizedConstraint<'tcx> {
let def = tcx.adt_def(def_id);
let result = tcx.mk_type_list(def.variants.iter().flat_map(|v| {
@@ -3192,11 +3184,9 @@
AdtSizedConstraint(result)
}
-fn associated_item_def_ids<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> &'tcx [DefId] {
+fn associated_item_def_ids<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx [DefId] {
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
- let item = tcx.hir().expect_item_by_hir_id(id);
+ let item = tcx.hir().expect_item(id);
match item.node {
hir::ItemKind::Trait(.., ref trait_item_refs) => {
tcx.arena.alloc_from_iter(
@@ -3217,14 +3207,14 @@
}
}
-fn def_span<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Span {
+fn def_span<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Span {
tcx.hir().span_if_local(def_id).unwrap()
}
/// If the given `DefId` describes an item belonging to a trait,
/// returns the `DefId` of the trait that the trait item belongs to;
/// otherwise, returns `None`.
-fn trait_of_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option<DefId> {
+fn trait_of_item<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Option<DefId> {
tcx.opt_associated_item(def_id)
.and_then(|associated_item| {
match associated_item.container {
@@ -3235,7 +3225,7 @@
}
/// Yields the parent function's `DefId` if `def_id` is an `impl Trait` definition.
-pub fn is_impl_trait_defn(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Option<DefId> {
+pub fn is_impl_trait_defn(tcx: TyCtxt<'_>, def_id: DefId) -> Option<DefId> {
if let Some(hir_id) = tcx.hir().as_local_hir_id(def_id) {
if let Node::Item(item) = tcx.hir().get_by_hir_id(hir_id) {
if let hir::ItemKind::Existential(ref exist_ty) = item.node {
@@ -3247,10 +3237,7 @@
}
/// See `ParamEnv` struct definition for details.
-fn param_env<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> ParamEnv<'tcx>
-{
+fn param_env<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> ParamEnv<'tcx> {
// The param_env of an impl Trait type is its defining function's param_env
if let Some(parent) = is_impl_trait_defn(tcx, def_id) {
return param_env(tcx, parent);
@@ -3279,34 +3266,28 @@
);
let body_id = tcx.hir().as_local_hir_id(def_id).map_or(hir::DUMMY_HIR_ID, |id| {
- tcx.hir().maybe_body_owned_by_by_hir_id(id).map_or(id, |body| body.hir_id)
+ tcx.hir().maybe_body_owned_by(id).map_or(id, |body| body.hir_id)
});
let cause = traits::ObligationCause::misc(tcx.def_span(def_id), body_id);
traits::normalize_param_env_or_error(tcx, def_id, unnormalized_env, cause)
}
-fn crate_disambiguator<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- crate_num: CrateNum) -> CrateDisambiguator {
+fn crate_disambiguator<'tcx>(tcx: TyCtxt<'tcx>, crate_num: CrateNum) -> CrateDisambiguator {
assert_eq!(crate_num, LOCAL_CRATE);
tcx.sess.local_crate_disambiguator()
}
-fn original_crate_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- crate_num: CrateNum) -> Symbol {
+fn original_crate_name<'tcx>(tcx: TyCtxt<'tcx>, crate_num: CrateNum) -> Symbol {
assert_eq!(crate_num, LOCAL_CRATE);
tcx.crate_name.clone()
}
-fn crate_hash<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- crate_num: CrateNum)
- -> Svh {
+fn crate_hash<'tcx>(tcx: TyCtxt<'tcx>, crate_num: CrateNum) -> Svh {
assert_eq!(crate_num, LOCAL_CRATE);
tcx.hir().crate_hash
}
-fn instance_def_size_estimate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- instance_def: InstanceDef<'tcx>)
- -> usize {
+fn instance_def_size_estimate<'tcx>(tcx: TyCtxt<'tcx>, instance_def: InstanceDef<'tcx>) -> usize {
match instance_def {
InstanceDef::Item(..) |
InstanceDef::DropGlue(..) => {
@@ -3321,10 +3302,7 @@
/// If `def_id` is an issue 33140 hack impl, returns its self type; otherwise, returns `None`.
///
/// See [`ImplOverlapKind::Issue33140`] for more details.
-fn issue33140_self_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> Option<Ty<'tcx>>
-{
+fn issue33140_self_ty<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Option<Ty<'tcx>> {
debug!("issue33140_self_ty({:?})", def_id);
let trait_ref = tcx.impl_trait_ref(def_id).unwrap_or_else(|| {
diff --git a/src/librustc/ty/outlives.rs b/src/librustc/ty/outlives.rs
index 5b21ed5..7d1403d 100644
--- a/src/librustc/ty/outlives.rs
+++ b/src/librustc/ty/outlives.rs
@@ -45,7 +45,7 @@
EscapingProjection(Vec<Component<'tcx>>),
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Push onto `out` all the things that must outlive `'a` for the condition
/// `ty0: 'a` to hold. Note that `ty0` must be a **fully resolved type**.
pub fn push_outlives_components(&self, ty0: Ty<'tcx>,
diff --git a/src/librustc/ty/print/mod.rs b/src/librustc/ty/print/mod.rs
index d6bc4e5..092e7c6 100644
--- a/src/librustc/ty/print/mod.rs
+++ b/src/librustc/ty/print/mod.rs
@@ -13,7 +13,7 @@
// FIXME(eddyb) false positive, the lifetime parameters are used with `P: Printer<...>`.
#[allow(unused_lifetimes)]
-pub trait Print<'gcx, 'tcx, P> {
+pub trait Print<'tcx, P> {
type Output;
type Error;
@@ -28,7 +28,7 @@
///
/// For pretty-printing/formatting in particular, see `PrettyPrinter`.
// FIXME(eddyb) find a better name, this is more general than "printing".
-pub trait Printer<'gcx: 'tcx, 'tcx>: Sized {
+pub trait Printer<'tcx>: Sized {
type Error;
type Path;
@@ -37,7 +37,7 @@
type DynExistential;
type Const;
- fn tcx(&'a self) -> TyCtxt<'a, 'gcx, 'tcx>;
+ fn tcx(&'a self) -> TyCtxt<'tcx>;
fn print_def_path(
self,
@@ -302,7 +302,7 @@
}
}
-impl<'gcx: 'tcx, 'tcx, P: Printer<'gcx, 'tcx>> Print<'gcx, 'tcx, P> for ty::RegionKind {
+impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for ty::RegionKind {
type Output = P::Region;
type Error = P::Error;
fn print(&self, cx: P) -> Result<Self::Output, Self::Error> {
@@ -310,7 +310,7 @@
}
}
-impl<'gcx: 'tcx, 'tcx, P: Printer<'gcx, 'tcx>> Print<'gcx, 'tcx, P> for ty::Region<'_> {
+impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for ty::Region<'_> {
type Output = P::Region;
type Error = P::Error;
fn print(&self, cx: P) -> Result<Self::Output, Self::Error> {
@@ -318,7 +318,7 @@
}
}
-impl<'gcx: 'tcx, 'tcx, P: Printer<'gcx, 'tcx>> Print<'gcx, 'tcx, P> for Ty<'tcx> {
+impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for Ty<'tcx> {
type Output = P::Type;
type Error = P::Error;
fn print(&self, cx: P) -> Result<Self::Output, Self::Error> {
@@ -326,9 +326,7 @@
}
}
-impl<'gcx: 'tcx, 'tcx, P: Printer<'gcx, 'tcx>> Print<'gcx, 'tcx, P>
- for &'tcx ty::List<ty::ExistentialPredicate<'tcx>>
-{
+impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for &'tcx ty::List<ty::ExistentialPredicate<'tcx>> {
type Output = P::DynExistential;
type Error = P::Error;
fn print(&self, cx: P) -> Result<Self::Output, Self::Error> {
@@ -336,7 +334,7 @@
}
}
-impl<'gcx: 'tcx, 'tcx, P: Printer<'gcx, 'tcx>> Print<'gcx, 'tcx, P> for &'tcx ty::Const<'tcx> {
+impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for &'tcx ty::Const<'tcx> {
type Output = P::Const;
type Error = P::Error;
fn print(&self, cx: P) -> Result<Self::Output, Self::Error> {
diff --git a/src/librustc/ty/print/obsolete.rs b/src/librustc/ty/print/obsolete.rs
index 85d3386..16fb334 100644
--- a/src/librustc/ty/print/obsolete.rs
+++ b/src/librustc/ty/print/obsolete.rs
@@ -16,18 +16,14 @@
/// Same as `unique_type_name()` but with the result pushed onto the given
/// `output` parameter.
-pub struct DefPathBasedNames<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub struct DefPathBasedNames<'tcx> {
+ tcx: TyCtxt<'tcx>,
omit_disambiguators: bool,
omit_local_crate_name: bool,
}
-impl<'a, 'tcx> DefPathBasedNames<'a, 'tcx> {
- pub fn new(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- omit_disambiguators: bool,
- omit_local_crate_name: bool,
- ) -> Self {
+impl DefPathBasedNames<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, omit_disambiguators: bool, omit_local_crate_name: bool) -> Self {
DefPathBasedNames { tcx, omit_disambiguators, omit_local_crate_name }
}
diff --git a/src/librustc/ty/print/pretty.rs b/src/librustc/ty/print/pretty.rs
index 513ffd6..d143cc3 100644
--- a/src/librustc/ty/print/pretty.rs
+++ b/src/librustc/ty/print/pretty.rs
@@ -166,16 +166,16 @@
}
/// Trait for printers that pretty-print using `fmt::Write` to the printer.
-pub trait PrettyPrinter<'gcx: 'tcx, 'tcx>:
- Printer<'gcx, 'tcx,
+pub trait PrettyPrinter<'tcx>:
+ Printer<
+ 'tcx,
Error = fmt::Error,
Path = Self,
Region = Self,
Type = Self,
DynExistential = Self,
Const = Self,
- > +
- fmt::Write
+ > + fmt::Write
{
/// Like `print_def_path` but for value paths.
fn print_value_path(
@@ -186,21 +186,17 @@
self.print_def_path(def_id, substs)
}
- fn in_binder<T>(
- self,
- value: &ty::Binder<T>,
- ) -> Result<Self, Self::Error>
- where T: Print<'gcx, 'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable<'tcx>
+ fn in_binder<T>(self, value: &ty::Binder<T>) -> Result<Self, Self::Error>
+ where
+ T: Print<'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable<'tcx>,
{
value.skip_binder().print(self)
}
/// Print comma-separated elements.
- fn comma_sep<T>(
- mut self,
- mut elems: impl Iterator<Item = T>,
- ) -> Result<Self, Self::Error>
- where T: Print<'gcx, 'tcx, Self, Output = Self, Error = Self::Error>
+ fn comma_sep<T>(mut self, mut elems: impl Iterator<Item = T>) -> Result<Self, Self::Error>
+ where
+ T: Print<'tcx, Self, Output = Self, Error = Self::Error>,
{
if let Some(first) = elems.next() {
self = first.print(self)?;
@@ -593,7 +589,7 @@
// FIXME(eddyb) should use `def_span`.
if let Some(hir_id) = self.tcx().hir().as_local_hir_id(did) {
- p!(write("@{:?}", self.tcx().hir().span_by_hir_id(hir_id)));
+ p!(write("@{:?}", self.tcx().hir().span(hir_id)));
let mut sep = " ";
for (&var_id, upvar_ty) in self.tcx().upvars(did)
.as_ref()
@@ -635,7 +631,7 @@
if self.tcx().sess.opts.debugging_opts.span_free_formats {
p!(write("@{:?}", hir_id));
} else {
- p!(write("@{:?}", self.tcx().hir().span_by_hir_id(hir_id)));
+ p!(write("@{:?}", self.tcx().hir().span(hir_id)));
}
let mut sep = " ";
for (&var_id, upvar_ty) in self.tcx().upvars(did)
@@ -931,10 +927,10 @@
}
// HACK(eddyb) boxed to avoid moving around a large struct by-value.
-pub struct FmtPrinter<'a, 'gcx, 'tcx, F>(Box<FmtPrinterData<'a, 'gcx, 'tcx, F>>);
+pub struct FmtPrinter<'a, 'tcx, F>(Box<FmtPrinterData<'a, 'tcx, F>>);
-pub struct FmtPrinterData<'a, 'gcx, 'tcx, F> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct FmtPrinterData<'a, 'tcx, F> {
+ tcx: TyCtxt<'tcx>,
fmt: F,
empty_path: bool,
@@ -949,21 +945,21 @@
pub name_resolver: Option<Box<&'a dyn Fn(ty::sty::TyVid) -> Option<String>>>,
}
-impl<F> Deref for FmtPrinter<'a, 'gcx, 'tcx, F> {
- type Target = FmtPrinterData<'a, 'gcx, 'tcx, F>;
+impl<F> Deref for FmtPrinter<'a, 'tcx, F> {
+ type Target = FmtPrinterData<'a, 'tcx, F>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
-impl<F> DerefMut for FmtPrinter<'_, '_, '_, F> {
+impl<F> DerefMut for FmtPrinter<'_, '_, F> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
-impl<F> FmtPrinter<'a, 'gcx, 'tcx, F> {
- pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, fmt: F, ns: Namespace) -> Self {
+impl<F> FmtPrinter<'a, 'tcx, F> {
+ pub fn new(tcx: TyCtxt<'tcx>, fmt: F, ns: Namespace) -> Self {
FmtPrinter(Box::new(FmtPrinterData {
tcx,
fmt,
@@ -978,7 +974,7 @@
}
}
-impl TyCtxt<'_, '_, '_> {
+impl TyCtxt<'_> {
// HACK(eddyb) get rid of `def_path_str` and/or pass `Namespace` explicitly always
// (but also some things just print a `DefId` generally so maybe we need this?)
fn guess_def_namespace(self, def_id: DefId) -> Namespace {
@@ -1010,13 +1006,13 @@
}
}
-impl<F: fmt::Write> fmt::Write for FmtPrinter<'_, '_, '_, F> {
+impl<F: fmt::Write> fmt::Write for FmtPrinter<'_, '_, F> {
fn write_str(&mut self, s: &str) -> fmt::Result {
self.fmt.write_str(s)
}
}
-impl<F: fmt::Write> Printer<'gcx, 'tcx> for FmtPrinter<'_, 'gcx, 'tcx, F> {
+impl<F: fmt::Write> Printer<'tcx> for FmtPrinter<'_, 'tcx, F> {
type Error = fmt::Error;
type Path = Self;
@@ -1025,7 +1021,7 @@
type DynExistential = Self;
type Const = Self;
- fn tcx(&'a self) -> TyCtxt<'a, 'gcx, 'tcx> {
+ fn tcx(&'a self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -1222,7 +1218,7 @@
}
}
-impl<F: fmt::Write> PrettyPrinter<'gcx, 'tcx> for FmtPrinter<'_, 'gcx, 'tcx, F> {
+impl<F: fmt::Write> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx, F> {
fn infer_ty_name(&self, id: ty::TyVid) -> Option<String> {
self.0.name_resolver.as_ref().and_then(|func| func(id))
}
@@ -1239,11 +1235,9 @@
Ok(self)
}
- fn in_binder<T>(
- self,
- value: &ty::Binder<T>,
- ) -> Result<Self, Self::Error>
- where T: Print<'gcx, 'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable<'tcx>
+ fn in_binder<T>(self, value: &ty::Binder<T>) -> Result<Self, Self::Error>
+ where
+ T: Print<'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable<'tcx>,
{
self.pretty_in_binder(value)
}
@@ -1317,7 +1311,7 @@
}
// HACK(eddyb) limited to `FmtPrinter` because of `region_highlight_mode`.
-impl<F: fmt::Write> FmtPrinter<'_, '_, '_, F> {
+impl<F: fmt::Write> FmtPrinter<'_, '_, F> {
pub fn pretty_print_region(
mut self,
region: ty::Region<'_>,
@@ -1416,12 +1410,10 @@
// HACK(eddyb) limited to `FmtPrinter` because of `binder_depth`,
// `region_index` and `used_region_names`.
-impl<F: fmt::Write> FmtPrinter<'_, 'gcx, 'tcx, F> {
- pub fn pretty_in_binder<T>(
- mut self,
- value: &ty::Binder<T>,
- ) -> Result<Self, fmt::Error>
- where T: Print<'gcx, 'tcx, Self, Output = Self, Error = fmt::Error> + TypeFoldable<'tcx>
+impl<F: fmt::Write> FmtPrinter<'_, 'tcx, F> {
+ pub fn pretty_in_binder<T>(mut self, value: &ty::Binder<T>) -> Result<Self, fmt::Error>
+ where
+ T: Print<'tcx, Self, Output = Self, Error = fmt::Error> + TypeFoldable<'tcx>,
{
fn name_by_region_index(index: usize) -> InternedString {
match index {
@@ -1510,9 +1502,9 @@
}
}
-impl<'gcx: 'tcx, 'tcx, T, P: PrettyPrinter<'gcx, 'tcx>> Print<'gcx, 'tcx, P>
- for ty::Binder<T>
- where T: Print<'gcx, 'tcx, P, Output = P, Error = P::Error> + TypeFoldable<'tcx>
+impl<'tcx, T, P: PrettyPrinter<'tcx>> Print<'tcx, P> for ty::Binder<T>
+where
+ T: Print<'tcx, P, Output = P, Error = P::Error> + TypeFoldable<'tcx>,
{
type Output = P;
type Error = P::Error;
@@ -1521,10 +1513,10 @@
}
}
-impl<'gcx: 'tcx, 'tcx, T, U, P: PrettyPrinter<'gcx, 'tcx>> Print<'gcx, 'tcx, P>
- for ty::OutlivesPredicate<T, U>
- where T: Print<'gcx, 'tcx, P, Output = P, Error = P::Error>,
- U: Print<'gcx, 'tcx, P, Output = P, Error = P::Error>,
+impl<'tcx, T, U, P: PrettyPrinter<'tcx>> Print<'tcx, P> for ty::OutlivesPredicate<T, U>
+where
+ T: Print<'tcx, P, Output = P, Error = P::Error>,
+ U: Print<'tcx, P, Output = P, Error = P::Error>,
{
type Output = P;
type Error = P::Error;
@@ -1552,7 +1544,7 @@
macro_rules! define_print_and_forward_display {
(($self:ident, $cx:ident): $($ty:ty $print:block)+) => {
- $(impl<'gcx: 'tcx, 'tcx, P: PrettyPrinter<'gcx, 'tcx>> Print<'gcx, 'tcx, P> for $ty {
+ $(impl<'tcx, P: PrettyPrinter<'tcx>> Print<'tcx, P> for $ty {
type Output = P;
type Error = fmt::Error;
fn print(&$self, $cx: P) -> Result<Self::Output, Self::Error> {
@@ -1585,7 +1577,7 @@
&'tcx ty::Const<'tcx>,
// HACK(eddyb) these are exhaustive instead of generic,
- // because `for<'gcx: 'tcx, 'tcx>` isn't possible yet.
+ // because `for<'tcx>` isn't possible yet.
ty::Binder<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>>,
ty::Binder<ty::TraitRef<'tcx>>,
ty::Binder<ty::FnSig<'tcx>>,
diff --git a/src/librustc/ty/query/config.rs b/src/librustc/ty/query/config.rs
index 286894c..13d93f1 100644
--- a/src/librustc/ty/query/config.rs
+++ b/src/librustc/ty/query/config.rs
@@ -31,38 +31,36 @@
fn query(key: Self::Key) -> Query<'tcx>;
// Don't use this method to access query results, instead use the methods on TyCtxt
- fn query_cache<'a>(tcx: TyCtxt<'a, 'tcx, '_>) -> &'a Lock<QueryCache<'tcx, Self>>;
+ fn query_cache<'a>(tcx: TyCtxt<'tcx>) -> &'a Lock<QueryCache<'tcx, Self>>;
- fn to_dep_node(tcx: TyCtxt<'_, 'tcx, '_>, key: &Self::Key) -> DepNode;
+ fn to_dep_node(tcx: TyCtxt<'tcx>, key: &Self::Key) -> DepNode;
// Don't use this method to compute query results, instead use the methods on TyCtxt
- fn compute(tcx: TyCtxt<'_, 'tcx, '_>, key: Self::Key) -> Self::Value;
+ fn compute(tcx: TyCtxt<'tcx>, key: Self::Key) -> Self::Value;
fn hash_result(
hcx: &mut StableHashingContext<'_>,
result: &Self::Value
) -> Option<Fingerprint>;
- fn handle_cycle_error(tcx: TyCtxt<'_, 'tcx, '_>, error: CycleError<'tcx>) -> Self::Value;
+ fn handle_cycle_error(tcx: TyCtxt<'tcx>, error: CycleError<'tcx>) -> Self::Value;
}
pub(crate) trait QueryDescription<'tcx>: QueryAccessors<'tcx> {
- fn describe(tcx: TyCtxt<'_, '_, '_>, key: Self::Key) -> Cow<'static, str>;
+ fn describe(tcx: TyCtxt<'_>, key: Self::Key) -> Cow<'static, str>;
#[inline]
- fn cache_on_disk(_: TyCtxt<'_, 'tcx, 'tcx>, _: Self::Key) -> bool {
+ fn cache_on_disk(_: TyCtxt<'tcx>, _: Self::Key) -> bool {
false
}
- fn try_load_from_disk(_: TyCtxt<'_, 'tcx, 'tcx>,
- _: SerializedDepNodeIndex)
- -> Option<Self::Value> {
+ fn try_load_from_disk(_: TyCtxt<'tcx>, _: SerializedDepNodeIndex) -> Option<Self::Value> {
bug!("QueryDescription::load_from_disk() called for an unsupported query.")
}
}
-impl<'tcx, M: QueryAccessors<'tcx, Key=DefId>> QueryDescription<'tcx> for M {
- default fn describe(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Cow<'static, str> {
+impl<'tcx, M: QueryAccessors<'tcx, Key = DefId>> QueryDescription<'tcx> for M {
+ default fn describe(tcx: TyCtxt<'_>, def_id: DefId) -> Cow<'static, str> {
if !tcx.sess.verbose() {
format!("processing `{}`", tcx.def_path_str(def_id)).into()
} else {
@@ -73,7 +71,7 @@
}
impl<'tcx> QueryDescription<'tcx> for queries::analysis<'tcx> {
- fn describe(_tcx: TyCtxt<'_, '_, '_>, _: CrateNum) -> Cow<'static, str> {
+ fn describe(_tcx: TyCtxt<'_>, _: CrateNum) -> Cow<'static, str> {
"running analysis passes on this crate".into()
}
}
@@ -82,12 +80,12 @@
($query_name:ident, |$tcx:tt, $key:tt| $cond:expr) => {
impl<'tcx> QueryDescription<'tcx> for queries::$query_name<'tcx> {
#[inline]
- fn cache_on_disk($tcx: TyCtxt<'_, 'tcx, 'tcx>, $key: Self::Key) -> bool {
+ fn cache_on_disk($tcx: TyCtxt<'tcx>, $key: Self::Key) -> bool {
$cond
}
#[inline]
- fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ fn try_load_from_disk(tcx: TyCtxt<'tcx>,
id: SerializedDepNodeIndex)
-> Option<Self::Value> {
tcx.queries.on_disk_cache.try_load_query_result(tcx, id)
diff --git a/src/librustc/ty/query/job.rs b/src/librustc/ty/query/job.rs
index 8e68c9f..dcc467a 100644
--- a/src/librustc/ty/query/job.rs
+++ b/src/librustc/ty/query/job.rs
@@ -75,9 +75,9 @@
/// Awaits for the query job to complete.
#[cfg(parallel_compiler)]
- pub(super) fn r#await<'lcx>(
+ pub(super) fn r#await(
&self,
- tcx: TyCtxt<'_, 'tcx, 'lcx>,
+ tcx: TyCtxt<'tcx>,
span: Span,
) -> Result<(), CycleError<'tcx>> {
tls::with_related_context(tcx, move |icx| {
@@ -100,11 +100,7 @@
}
#[cfg(not(parallel_compiler))]
- pub(super) fn find_cycle_in_stack<'lcx>(
- &self,
- tcx: TyCtxt<'_, 'tcx, 'lcx>,
- span: Span,
- ) -> CycleError<'tcx> {
+ pub(super) fn find_cycle_in_stack(&self, tcx: TyCtxt<'tcx>, span: Span) -> CycleError<'tcx> {
// Get the current executing query (waiter) and find the waitee amongst its parents
let mut current_job = tls::with_related_context(tcx, |icx| icx.query.clone());
let mut cycle = Vec::new();
@@ -338,9 +334,9 @@
// Deterministically pick an query from a list
#[cfg(parallel_compiler)]
fn pick_query<'a, 'tcx, T, F: Fn(&T) -> (Span, Lrc<QueryJob<'tcx>>)>(
- tcx: TyCtxt<'_, 'tcx, '_>,
+ tcx: TyCtxt<'tcx>,
queries: &'a [T],
- f: F
+ f: F,
) -> &'a T {
// Deterministically pick an entry point
// FIXME: Sort this instead
@@ -366,7 +362,7 @@
fn remove_cycle<'tcx>(
jobs: &mut Vec<Lrc<QueryJob<'tcx>>>,
wakelist: &mut Vec<Lrc<QueryWaiter<'tcx>>>,
- tcx: TyCtxt<'_, 'tcx, '_>
+ tcx: TyCtxt<'tcx>,
) -> bool {
let mut visited = FxHashSet::default();
let mut stack = Vec::new();
@@ -505,7 +501,7 @@
/// There may be multiple cycles involved in a deadlock, so this searches
/// all active queries for cycles before finally resuming all the waiters at once.
#[cfg(parallel_compiler)]
-fn deadlock(tcx: TyCtxt<'_, '_, '_>, registry: &rayon_core::Registry) {
+fn deadlock(tcx: TyCtxt<'_>, registry: &rayon_core::Registry) {
let on_panic = OnDrop(|| {
eprintln!("deadlock handler panicked, aborting process");
process::abort();
diff --git a/src/librustc/ty/query/keys.rs b/src/librustc/ty/query/keys.rs
index 27b0e8e..30a3e53 100644
--- a/src/librustc/ty/query/keys.rs
+++ b/src/librustc/ty/query/keys.rs
@@ -22,7 +22,7 @@
/// In the event that a cycle occurs, if no explicit span has been
/// given for a query with key `self`, what span should we use?
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span;
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span;
}
impl<'tcx> Key for ty::InstanceDef<'tcx> {
@@ -30,7 +30,7 @@
LOCAL_CRATE
}
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.def_id())
}
}
@@ -40,7 +40,7 @@
LOCAL_CRATE
}
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.def_id())
}
}
@@ -50,7 +50,7 @@
self.instance.query_crate()
}
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.instance.default_span(tcx)
}
}
@@ -59,7 +59,7 @@
fn query_crate(&self) -> CrateNum {
*self
}
- fn default_span(&self, _: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
@@ -68,7 +68,7 @@
fn query_crate(&self) -> CrateNum {
LOCAL_CRATE
}
- fn default_span(&self, _tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
@@ -77,7 +77,7 @@
fn query_crate(&self) -> CrateNum {
self.krate
}
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(*self)
}
}
@@ -86,7 +86,7 @@
fn query_crate(&self) -> CrateNum {
self.0.krate
}
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.1.default_span(tcx)
}
}
@@ -95,7 +95,7 @@
fn query_crate(&self) -> CrateNum {
self.0
}
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.1.default_span(tcx)
}
}
@@ -104,7 +104,7 @@
fn query_crate(&self) -> CrateNum {
self.0.krate
}
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
@@ -113,7 +113,7 @@
fn query_crate(&self) -> CrateNum {
self.0.krate
}
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
@@ -122,7 +122,7 @@
fn query_crate(&self) -> CrateNum {
self.1.def_id().krate
}
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.1.def_id())
}
}
@@ -131,16 +131,16 @@
fn query_crate(&self) -> CrateNum {
LOCAL_CRATE
}
- fn default_span(&self, _: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
-impl<'tcx> Key for ty::PolyTraitRef<'tcx>{
+impl<'tcx> Key for ty::PolyTraitRef<'tcx> {
fn query_crate(&self) -> CrateNum {
self.def_id().krate
}
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(self.def_id())
}
}
@@ -149,7 +149,7 @@
fn query_crate(&self) -> CrateNum {
LOCAL_CRATE
}
- fn default_span(&self, _: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
@@ -158,7 +158,7 @@
fn query_crate(&self) -> CrateNum {
LOCAL_CRATE
}
- fn default_span(&self, _: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
@@ -167,7 +167,7 @@
fn query_crate(&self) -> CrateNum {
LOCAL_CRATE
}
- fn default_span(&self, _: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
@@ -176,7 +176,7 @@
fn query_crate(&self) -> CrateNum {
self.value.query_crate()
}
- fn default_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.value.default_span(tcx)
}
}
@@ -185,7 +185,7 @@
fn query_crate(&self) -> CrateNum {
LOCAL_CRATE
}
- fn default_span(&self, _: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, _: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
@@ -194,7 +194,7 @@
fn query_crate(&self) -> CrateNum {
LOCAL_CRATE
}
- fn default_span(&self, _tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
@@ -209,7 +209,7 @@
LOCAL_CRATE
}
- fn default_span(&self, _tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs
index 4dbc2ab..6f83991 100644
--- a/src/librustc/ty/query/on_disk_cache.rs
+++ b/src/librustc/ty/query/on_disk_cache.rs
@@ -156,12 +156,10 @@
}
}
- pub fn serialize<'a, 'tcx, E>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- encoder: &mut E)
- -> Result<(), E::Error>
- where E: ty_codec::TyEncoder
- {
+ pub fn serialize<'tcx, E>(&self, tcx: TyCtxt<'tcx>, encoder: &mut E) -> Result<(), E::Error>
+ where
+ E: ty_codec::TyEncoder,
+ {
// Serializing the DepGraph should not modify it:
tcx.dep_graph.with_ignore(|| {
// Allocate SourceFileIndices
@@ -314,7 +312,7 @@
return Ok(());
- fn sorted_cnums_including_local_crate(tcx: TyCtxt<'_, '_, '_>) -> Vec<CrateNum> {
+ fn sorted_cnums_including_local_crate(tcx: TyCtxt<'_>) -> Vec<CrateNum> {
let mut cnums = vec![LOCAL_CRATE];
cnums.extend_from_slice(&tcx.crates()[..]);
cnums.sort_unstable();
@@ -326,10 +324,11 @@
}
/// Loads a diagnostic emitted during the previous compilation session.
- pub fn load_diagnostics<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- dep_node_index: SerializedDepNodeIndex)
- -> Vec<Diagnostic> {
+ pub fn load_diagnostics<'tcx>(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ dep_node_index: SerializedDepNodeIndex,
+ ) -> Vec<Diagnostic> {
let diagnostics: Option<EncodedDiagnostics> = self.load_indexed(
tcx,
dep_node_index,
@@ -354,11 +353,13 @@
/// Returns the cached query result if there is something in the cache for
/// the given `SerializedDepNodeIndex`; otherwise returns `None`.
- pub fn try_load_query_result<'tcx, T>(&self,
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
- dep_node_index: SerializedDepNodeIndex)
- -> Option<T>
- where T: Decodable
+ pub fn try_load_query_result<'tcx, T>(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ dep_node_index: SerializedDepNodeIndex,
+ ) -> Option<T>
+ where
+ T: Decodable,
{
self.load_indexed(tcx,
dep_node_index,
@@ -382,14 +383,15 @@
x.extend(Into::<Vec<_>>::into(diagnostics));
}
- fn load_indexed<'tcx, T>(&self,
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
- dep_node_index: SerializedDepNodeIndex,
- index: &FxHashMap<SerializedDepNodeIndex,
- AbsoluteBytePos>,
- debug_tag: &'static str)
- -> Option<T>
- where T: Decodable
+ fn load_indexed<'tcx, T>(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ dep_node_index: SerializedDepNodeIndex,
+ index: &FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
+ debug_tag: &'static str,
+ ) -> Option<T>
+ where
+ T: Decodable,
{
let pos = index.get(&dep_node_index).cloned()?;
@@ -423,10 +425,10 @@
// current-session-CrateNum. There might be CrateNums from the previous
// Session that don't occur in the current one. For these, the mapping
// maps to None.
- fn compute_cnum_map(tcx: TyCtxt<'_, '_, '_>,
- prev_cnums: &[(u32, String, CrateDisambiguator)])
- -> IndexVec<CrateNum, Option<CrateNum>>
- {
+ fn compute_cnum_map(
+ tcx: TyCtxt<'_>,
+ prev_cnums: &[(u32, String, CrateDisambiguator)],
+ ) -> IndexVec<CrateNum, Option<CrateNum>> {
tcx.dep_graph.with_ignore(|| {
let current_cnums = tcx.all_crate_nums(LOCAL_CRATE).iter().map(|&cnum| {
let crate_name = tcx.original_crate_name(cnum)
@@ -457,18 +459,18 @@
/// A decoder that can read the incr. comp. cache. It is similar to the one
/// we use for crate metadata decoding in that it can rebase spans and
/// eventually will also handle things that contain `Ty` instances.
-struct CacheDecoder<'a, 'tcx: 'a, 'x> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- opaque: opaque::Decoder<'x>,
- source_map: &'x SourceMap,
- cnum_map: &'x IndexVec<CrateNum, Option<CrateNum>>,
- synthetic_expansion_infos: &'x Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
- file_index_to_file: &'x Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
- file_index_to_stable_id: &'x FxHashMap<SourceFileIndex, StableSourceFileId>,
- alloc_decoding_session: AllocDecodingSession<'x>,
+struct CacheDecoder<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ opaque: opaque::Decoder<'a>,
+ source_map: &'a SourceMap,
+ cnum_map: &'a IndexVec<CrateNum, Option<CrateNum>>,
+ synthetic_expansion_infos: &'a Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
+ file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
+ file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, StableSourceFileId>,
+ alloc_decoding_session: AllocDecodingSession<'a>,
}
-impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> {
+impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
fn file_index_to_file(&self, index: SourceFileIndex) -> Lrc<SourceFile> {
let CacheDecoder {
ref file_index_to_file,
@@ -489,13 +491,13 @@
fn position(&self) -> usize;
}
-impl<'enc> DecoderWithPosition for opaque::Decoder<'enc> {
+impl<'a> DecoderWithPosition for opaque::Decoder<'a> {
fn position(&self) -> usize {
self.position()
}
}
-impl<'a, 'tcx, 'x> DecoderWithPosition for CacheDecoder<'a, 'tcx, 'x> {
+impl<'a, 'tcx> DecoderWithPosition for CacheDecoder<'a, 'tcx> {
fn position(&self) -> usize {
self.opaque.position()
}
@@ -524,11 +526,9 @@
Ok(value)
}
-
-impl<'a, 'tcx: 'a, 'x> ty_codec::TyDecoder<'a, 'tcx> for CacheDecoder<'a, 'tcx, 'x> {
-
+impl<'a, 'tcx> ty_codec::TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
#[inline]
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -584,16 +584,16 @@
}
}
-implement_ty_decoder!( CacheDecoder<'a, 'tcx, 'x> );
+implement_ty_decoder!(CacheDecoder<'a, 'tcx>);
-impl<'a, 'tcx, 'x> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, 'tcx, 'x> {
+impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
let alloc_decoding_session = self.alloc_decoding_session;
alloc_decoding_session.decode_alloc_id(self)
}
}
-impl<'a, 'tcx, 'x> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx, 'x> {
+impl<'a, 'tcx> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<Span, Self::Error> {
let tag: u8 = Decodable::decode(self)?;
@@ -656,7 +656,7 @@
// DefIndex that is not contained in a DefId. Such a case would be problematic
// because we would not know how to transform the DefIndex to the current
// context.
-impl<'a, 'tcx, 'x> SpecializedDecoder<DefIndex> for CacheDecoder<'a, 'tcx, 'x> {
+impl<'a, 'tcx> SpecializedDecoder<DefIndex> for CacheDecoder<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<DefIndex, Self::Error> {
bug!("Trying to decode DefIndex outside the context of a DefId")
}
@@ -665,7 +665,7 @@
// Both the CrateNum and the DefIndex of a DefId can change in between two
// compilation sessions. We use the DefPathHash, which is stable across
// sessions, to map the old DefId to the new one.
-impl<'a, 'tcx, 'x> SpecializedDecoder<DefId> for CacheDecoder<'a, 'tcx, 'x> {
+impl<'a, 'tcx> SpecializedDecoder<DefId> for CacheDecoder<'a, 'tcx> {
#[inline]
fn specialized_decode(&mut self) -> Result<DefId, Self::Error> {
// Load the DefPathHash which is was we encoded the DefId as.
@@ -676,14 +676,14 @@
}
}
-impl<'a, 'tcx, 'x> SpecializedDecoder<LocalDefId> for CacheDecoder<'a, 'tcx, 'x> {
+impl<'a, 'tcx> SpecializedDecoder<LocalDefId> for CacheDecoder<'a, 'tcx> {
#[inline]
fn specialized_decode(&mut self) -> Result<LocalDefId, Self::Error> {
Ok(LocalDefId::from_def_id(DefId::decode(self)?))
}
}
-impl<'a, 'tcx, 'x> SpecializedDecoder<hir::HirId> for CacheDecoder<'a, 'tcx, 'x> {
+impl<'a, 'tcx> SpecializedDecoder<hir::HirId> for CacheDecoder<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<hir::HirId, Self::Error> {
// Load the DefPathHash which is was we encoded the DefIndex as.
let def_path_hash = DefPathHash::decode(self)?;
@@ -710,7 +710,7 @@
// NodeIds are not stable across compilation sessions, so we store them in their
// HirId representation. This allows use to map them to the current NodeId.
-impl<'a, 'tcx, 'x> SpecializedDecoder<NodeId> for CacheDecoder<'a, 'tcx, 'x> {
+impl<'a, 'tcx> SpecializedDecoder<NodeId> for CacheDecoder<'a, 'tcx> {
#[inline]
fn specialized_decode(&mut self) -> Result<NodeId, Self::Error> {
let hir_id = hir::HirId::decode(self)?;
@@ -718,14 +718,15 @@
}
}
-impl<'a, 'tcx, 'x> SpecializedDecoder<Fingerprint> for CacheDecoder<'a, 'tcx, 'x> {
+impl<'a, 'tcx> SpecializedDecoder<Fingerprint> for CacheDecoder<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<Fingerprint, Self::Error> {
Fingerprint::decode_opaque(&mut self.opaque)
}
}
-impl<'a, 'tcx, 'x, T: Decodable> SpecializedDecoder<mir::ClearCrossCrate<T>>
-for CacheDecoder<'a, 'tcx, 'x> {
+impl<'a, 'tcx, T: Decodable> SpecializedDecoder<mir::ClearCrossCrate<T>>
+ for CacheDecoder<'a, 'tcx>
+{
#[inline]
fn specialized_decode(&mut self) -> Result<mir::ClearCrossCrate<T>, Self::Error> {
let discr = u8::decode(self)?;
@@ -745,12 +746,9 @@
//- ENCODING -------------------------------------------------------------------
-struct CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder,
- 'tcx: 'a,
-{
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- encoder: &'enc mut E,
+struct CacheEncoder<'a, 'tcx, E: ty_codec::TyEncoder> {
+ tcx: TyCtxt<'tcx>,
+ encoder: &'a mut E,
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
expn_info_shorthands: FxHashMap<Mark, AbsoluteBytePos>,
@@ -760,8 +758,9 @@
file_to_file_index: FxHashMap<*const SourceFile, SourceFileIndex>,
}
-impl<'enc, 'a, 'tcx, E> CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
fn source_file_index(&mut self, source_file: Lrc<SourceFile>) -> SourceFileIndex {
self.file_to_file_index[&(&*source_file as *const SourceFile)]
@@ -787,8 +786,9 @@
}
}
-impl<'enc, 'a, 'tcx, E> SpecializedEncoder<interpret::AllocId> for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> SpecializedEncoder<interpret::AllocId> for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
use std::collections::hash_map::Entry;
@@ -806,8 +806,9 @@
}
}
-impl<'enc, 'a, 'tcx, E> SpecializedEncoder<Span> for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> SpecializedEncoder<Span> for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
fn specialized_encode(&mut self, span: &Span) -> Result<(), Self::Error> {
@@ -862,8 +863,9 @@
}
}
-impl<'enc, 'a, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
#[inline]
fn position(&self) -> usize {
@@ -871,8 +873,9 @@
}
}
-impl<'enc, 'a, 'tcx, E> SpecializedEncoder<CrateNum> for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> SpecializedEncoder<CrateNum> for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
#[inline]
fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> {
@@ -880,8 +883,9 @@
}
}
-impl<'enc, 'a, 'tcx, E> SpecializedEncoder<Ty<'tcx>> for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> SpecializedEncoder<Ty<'tcx>> for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
#[inline]
fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> {
@@ -890,9 +894,9 @@
}
}
-impl<'enc, 'a, 'tcx, E> SpecializedEncoder<ty::GenericPredicates<'tcx>>
- for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> SpecializedEncoder<ty::GenericPredicates<'tcx>> for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
#[inline]
fn specialized_encode(&mut self,
@@ -903,8 +907,9 @@
}
}
-impl<'enc, 'a, 'tcx, E> SpecializedEncoder<hir::HirId> for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> SpecializedEncoder<hir::HirId> for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
#[inline]
fn specialized_encode(&mut self, id: &hir::HirId) -> Result<(), Self::Error> {
@@ -920,9 +925,9 @@
}
}
-
-impl<'enc, 'a, 'tcx, E> SpecializedEncoder<DefId> for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> SpecializedEncoder<DefId> for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
#[inline]
fn specialized_encode(&mut self, id: &DefId) -> Result<(), Self::Error> {
@@ -931,8 +936,9 @@
}
}
-impl<'enc, 'a, 'tcx, E> SpecializedEncoder<LocalDefId> for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> SpecializedEncoder<LocalDefId> for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
#[inline]
fn specialized_encode(&mut self, id: &LocalDefId) -> Result<(), Self::Error> {
@@ -940,8 +946,9 @@
}
}
-impl<'enc, 'a, 'tcx, E> SpecializedEncoder<DefIndex> for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> SpecializedEncoder<DefIndex> for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
fn specialized_encode(&mut self, _: &DefIndex) -> Result<(), Self::Error> {
bug!("Encoding DefIndex without context.")
@@ -950,8 +957,9 @@
// NodeIds are not stable across compilation sessions, so we store them in their
// HirId representation. This allows use to map them to the current NodeId.
-impl<'enc, 'a, 'tcx, E> SpecializedEncoder<NodeId> for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> SpecializedEncoder<NodeId> for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
#[inline]
fn specialized_encode(&mut self, node_id: &NodeId) -> Result<(), Self::Error> {
@@ -960,18 +968,16 @@
}
}
-impl<'enc, 'a, 'tcx> SpecializedEncoder<Fingerprint>
-for CacheEncoder<'enc, 'a, 'tcx, opaque::Encoder>
-{
+impl<'a, 'tcx> SpecializedEncoder<Fingerprint> for CacheEncoder<'a, 'tcx, opaque::Encoder> {
fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> {
f.encode_opaque(&mut self.encoder)
}
}
-impl<'enc, 'a, 'tcx, E, T> SpecializedEncoder<mir::ClearCrossCrate<T>>
-for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder,
- T: Encodable,
+impl<'a, 'tcx, E, T> SpecializedEncoder<mir::ClearCrossCrate<T>> for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
+ T: Encodable,
{
#[inline]
fn specialized_encode(&mut self,
@@ -997,8 +1003,9 @@
}
}
-impl<'enc, 'a, 'tcx, E> Encoder for CacheEncoder<'enc, 'a, 'tcx, E>
- where E: 'enc + ty_codec::TyEncoder
+impl<'a, 'tcx, E> Encoder for CacheEncoder<'a, 'tcx, E>
+where
+ E: 'a + ty_codec::TyEncoder,
{
type Error = E::Error;
@@ -1051,7 +1058,7 @@
}
}
-impl<'enc> SpecializedDecoder<IntEncodedWithFixedSize> for opaque::Decoder<'enc> {
+impl<'a> SpecializedDecoder<IntEncodedWithFixedSize> for opaque::Decoder<'a> {
fn specialized_decode(&mut self) -> Result<IntEncodedWithFixedSize, Self::Error> {
let mut value: u64 = 0;
let start_pos = self.position();
@@ -1068,13 +1075,15 @@
}
}
-fn encode_query_results<'enc, 'a, 'tcx, Q, E>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- encoder: &mut CacheEncoder<'enc, 'a, 'tcx, E>,
- query_result_index: &mut EncodedQueryResultIndex)
- -> Result<(), E::Error>
- where Q: super::config::QueryDescription<'tcx>,
- E: 'enc + TyEncoder,
- Q::Value: Encodable,
+fn encode_query_results<'a, 'tcx, Q, E>(
+ tcx: TyCtxt<'tcx>,
+ encoder: &mut CacheEncoder<'a, 'tcx, E>,
+ query_result_index: &mut EncodedQueryResultIndex,
+) -> Result<(), E::Error>
+where
+ Q: super::config::QueryDescription<'tcx>,
+ E: 'a + TyEncoder,
+ Q::Value: Encodable,
{
let desc = &format!("encode_query_results for {}",
unsafe { ::std::intrinsics::type_name::<Q>() });
diff --git a/src/librustc/ty/query/plumbing.rs b/src/librustc/ty/query/plumbing.rs
index c2760cc..48e6816 100644
--- a/src/librustc/ty/query/plumbing.rs
+++ b/src/librustc/ty/query/plumbing.rs
@@ -104,11 +104,7 @@
/// This function is inlined because that results in a noticeable speed-up
/// for some compile-time benchmarks.
#[inline(always)]
- pub(super) fn try_get(
- tcx: TyCtxt<'a, 'tcx, '_>,
- span: Span,
- key: &Q::Key,
- ) -> TryGetJob<'a, 'tcx, Q> {
+ pub(super) fn try_get(tcx: TyCtxt<'tcx>, span: Span, key: &Q::Key) -> TryGetJob<'a, 'tcx, Q> {
let cache = Q::query_cache(tcx);
loop {
let mut lock = cache.borrow_mut();
@@ -247,22 +243,22 @@
Cycle(D::Value),
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Executes a job by changing the ImplicitCtxt to point to the
/// new query job while it executes. It returns the diagnostics
/// captured during execution and the actual result.
#[inline(always)]
pub(super) fn start_query<F, R>(
self,
- job: Lrc<QueryJob<'gcx>>,
+ job: Lrc<QueryJob<'tcx>>,
diagnostics: Option<&Lock<ThinVec<Diagnostic>>>,
- compute: F)
- -> R
+ compute: F,
+ ) -> R
where
- F: for<'b, 'lcx> FnOnce(TyCtxt<'b, 'gcx, 'lcx>) -> R
+ F: FnOnce(TyCtxt<'tcx>) -> R,
{
// The TyCtxt stored in TLS has the same global interner lifetime
- // as `self`, so we use `with_related_context` to relate the 'gcx lifetimes
+ // as `self`, so we use `with_related_context` to relate the 'tcx lifetimes
// when accessing the ImplicitCtxt
tls::with_related_context(self, move |current_icx| {
// Update the ImplicitCtxt to point to our new query job
@@ -285,12 +281,11 @@
#[cold]
pub(super) fn report_cycle(
self,
- CycleError { usage, cycle: stack }: CycleError<'gcx>
- ) -> DiagnosticBuilder<'a>
- {
+ CycleError { usage, cycle: stack }: CycleError<'tcx>,
+ ) -> DiagnosticBuilder<'tcx> {
assert!(!stack.is_empty());
- let fix_span = |span: Span, query: &Query<'gcx>| {
+ let fix_span = |span: Span, query: &Query<'tcx>| {
self.sess.source_map().def_span(query.default_span(self, span))
};
@@ -352,11 +347,7 @@
}
#[inline(never)]
- pub(super) fn get_query<Q: QueryDescription<'gcx>>(
- self,
- span: Span,
- key: Q::Key)
- -> Q::Value {
+ pub(super) fn get_query<Q: QueryDescription<'tcx>>(self, span: Span, key: Q::Key) -> Q::Value {
debug!("ty::query::get_query<{}>(key={:?}, span={:?})",
Q::NAME.as_str(),
key,
@@ -440,14 +431,13 @@
result
}
- fn load_from_disk_and_cache_in_memory<Q: QueryDescription<'gcx>>(
+ fn load_from_disk_and_cache_in_memory<Q: QueryDescription<'tcx>>(
self,
key: Q::Key,
prev_dep_node_index: SerializedDepNodeIndex,
dep_node_index: DepNodeIndex,
- dep_node: &DepNode
- ) -> Q::Value
- {
+ dep_node: &DepNode,
+ ) -> Q::Value {
// Note this function can be called concurrently from the same query
// We must ensure that this is handled correctly
@@ -508,7 +498,7 @@
#[inline(never)]
#[cold]
- fn incremental_verify_ich<Q: QueryDescription<'gcx>>(
+ fn incremental_verify_ich<Q: QueryDescription<'tcx>>(
self,
result: &Q::Value,
dep_node: &DepNode,
@@ -534,12 +524,12 @@
}
#[inline(always)]
- fn force_query_with_job<Q: QueryDescription<'gcx>>(
+ fn force_query_with_job<Q: QueryDescription<'tcx>>(
self,
key: Q::Key,
- job: JobOwner<'_, 'gcx, Q>,
- dep_node: DepNode)
- -> (Q::Value, DepNodeIndex) {
+ job: JobOwner<'_, 'tcx, Q>,
+ dep_node: DepNode,
+ ) -> (Q::Value, DepNodeIndex) {
// If the following assertion triggers, it can have two reasons:
// 1. Something is wrong with DepNode creation, either here or
// in DepGraph::try_mark_green()
@@ -598,7 +588,7 @@
/// side-effects -- e.g., in order to report errors for erroneous programs.
///
/// Note: The optimization is only available during incr. comp.
- pub(super) fn ensure_query<Q: QueryDescription<'gcx>>(self, key: Q::Key) -> () {
+ pub(super) fn ensure_query<Q: QueryDescription<'tcx>>(self, key: Q::Key) -> () {
let dep_node = Q::to_dep_node(self, &key);
if dep_node.kind.is_eval_always() {
@@ -624,12 +614,7 @@
}
#[allow(dead_code)]
- fn force_query<Q: QueryDescription<'gcx>>(
- self,
- key: Q::Key,
- span: Span,
- dep_node: DepNode
- ) {
+ fn force_query<Q: QueryDescription<'tcx>>(self, key: Q::Key, span: Span, dep_node: DepNode) {
profq_msg!(
self,
ProfileQueriesMsg::QueryBegin(span.data(),
@@ -874,7 +859,7 @@
}
}
- pub fn describe(&self, tcx: TyCtxt<'_, '_, '_>) -> Cow<'static, str> {
+ pub fn describe(&self, tcx: TyCtxt<'_>) -> Cow<'static, str> {
let (r, name) = match *self {
$(Query::$name(key) => {
(queries::$name::describe(tcx, key), stringify!($name))
@@ -888,7 +873,7 @@
}
// FIXME(eddyb) Get more valid Span's on queries.
- pub fn default_span(&self, tcx: TyCtxt<'_, $tcx, '_>, span: Span) -> Span {
+ pub fn default_span(&self, tcx: TyCtxt<$tcx>, span: Span) -> Span {
if !span.is_dummy() {
return span;
}
@@ -954,20 +939,20 @@
}
#[inline(always)]
- fn query_cache<'a>(tcx: TyCtxt<'a, $tcx, '_>) -> &'a Lock<QueryCache<$tcx, Self>> {
+ fn query_cache<'a>(tcx: TyCtxt<$tcx>) -> &'a Lock<QueryCache<$tcx, Self>> {
&tcx.queries.$name
}
#[allow(unused)]
#[inline(always)]
- fn to_dep_node(tcx: TyCtxt<'_, $tcx, '_>, key: &Self::Key) -> DepNode {
+ fn to_dep_node(tcx: TyCtxt<$tcx>, key: &Self::Key) -> DepNode {
use crate::dep_graph::DepConstructor::*;
DepNode::new(tcx, $node(*key))
}
#[inline]
- fn compute(tcx: TyCtxt<'_, 'tcx, '_>, key: Self::Key) -> Self::Value {
+ fn compute(tcx: TyCtxt<'tcx>, key: Self::Key) -> Self::Value {
__query_compute::$name(move || {
let provider = tcx.queries.providers.get(key.query_crate())
// HACK(eddyb) it's possible crates may be loaded after
@@ -988,7 +973,7 @@
}
fn handle_cycle_error(
- tcx: TyCtxt<'_, 'tcx, '_>,
+ tcx: TyCtxt<'tcx>,
error: CycleError<'tcx>
) -> Self::Value {
handle_cycle_error!([$($modifiers)*][tcx, error])
@@ -996,11 +981,11 @@
})*
#[derive(Copy, Clone)]
- pub struct TyCtxtEnsure<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ pub struct TyCtxtEnsure<'tcx> {
+ pub tcx: TyCtxt<'tcx>,
}
- impl<'a, $tcx, 'lcx> TyCtxtEnsure<'a, $tcx, 'lcx> {
+ impl TyCtxtEnsure<$tcx> {
$($(#[$attr])*
#[inline(always)]
pub fn $name(self, key: $K) {
@@ -1009,24 +994,24 @@
}
#[derive(Copy, Clone)]
- pub struct TyCtxtAt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ pub struct TyCtxtAt<'tcx> {
+ pub tcx: TyCtxt<'tcx>,
pub span: Span,
}
- impl<'a, 'gcx, 'tcx> Deref for TyCtxtAt<'a, 'gcx, 'tcx> {
- type Target = TyCtxt<'a, 'gcx, 'tcx>;
+ impl Deref for TyCtxtAt<'tcx> {
+ type Target = TyCtxt<'tcx>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.tcx
}
}
- impl<'a, $tcx, 'lcx> TyCtxt<'a, $tcx, 'lcx> {
+ impl TyCtxt<$tcx> {
/// Returns a transparent wrapper for `TyCtxt`, which ensures queries
/// are executed instead of just returing their results.
#[inline(always)]
- pub fn ensure(self) -> TyCtxtEnsure<'a, $tcx, 'lcx> {
+ pub fn ensure(self) -> TyCtxtEnsure<$tcx> {
TyCtxtEnsure {
tcx: self,
}
@@ -1035,7 +1020,7 @@
/// Returns a transparent wrapper for `TyCtxt` which uses
/// `span` as the location of queries performed through it.
#[inline(always)]
- pub fn at(self, span: Span) -> TyCtxtAt<'a, $tcx, 'lcx> {
+ pub fn at(self, span: Span) -> TyCtxtAt<$tcx> {
TyCtxtAt {
tcx: self,
span
@@ -1049,7 +1034,7 @@
})*
}
- impl<'a, $tcx, 'lcx> TyCtxtAt<'a, $tcx, 'lcx> {
+ impl TyCtxtAt<$tcx> {
$($(#[$attr])*
#[inline(always)]
pub fn $name(self, key: $K) -> $V {
@@ -1090,12 +1075,12 @@
(tcx: $tcx:tt,
input: ($(([$($modifiers:tt)*] [$name:ident] [$K:ty] [$R:ty]))*)) => {
pub struct Providers<$tcx> {
- $(pub $name: for<'a> fn(TyCtxt<'a, $tcx, $tcx>, $K) -> $R,)*
+ $(pub $name: fn(TyCtxt<$tcx>, $K) -> $R,)*
}
impl<$tcx> Default for Providers<$tcx> {
fn default() -> Self {
- $(fn $name<'a, $tcx>(_: TyCtxt<'a, $tcx, $tcx>, key: $K) -> $R {
+ $(fn $name<$tcx>(_: TyCtxt<$tcx>, key: $K) -> $R {
bug!("tcx.{}({:?}) unsupported by its crate",
stringify!($name), key);
})*
@@ -1148,10 +1133,7 @@
/// then `force_from_dep_node()` should not fail for it. Otherwise, you can just
/// add it to the "We don't have enough information to reconstruct..." group in
/// the match below.
-pub fn force_from_dep_node<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
- dep_node: &DepNode
-) -> bool {
+pub fn force_from_dep_node<'tcx>(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> bool {
use crate::dep_graph::RecoverKey;
// We must avoid ever having to call force_from_dep_node() for a
@@ -1237,7 +1219,7 @@
impl DepNode {
// Check whether the query invocation corresponding to the given
// DepNode is eligible for on-disk-caching.
- pub fn cache_on_disk(&self, tcx: TyCtxt<'_, '_, '_>) -> bool {
+ pub fn cache_on_disk(&self, tcx: TyCtxt<'_>) -> bool {
use crate::ty::query::queries;
use crate::ty::query::QueryDescription;
@@ -1255,7 +1237,7 @@
// above `cache_on_disk` methods returns true.
// Also, as a sanity check, it expects that the corresponding query
// invocation has been marked as green already.
- pub fn load_from_on_disk_cache(&self, tcx: TyCtxt<'_, '_, '_>) {
+ pub fn load_from_on_disk_cache(&self, tcx: TyCtxt<'_>) {
match self.kind {
$(DepKind::$dep_kind => {
debug_assert!(tcx.dep_graph
diff --git a/src/librustc/ty/query/values.rs b/src/librustc/ty/query/values.rs
index 01d431b..0149f75 100644
--- a/src/librustc/ty/query/values.rs
+++ b/src/librustc/ty/query/values.rs
@@ -4,36 +4,36 @@
use syntax::symbol::InternedString;
pub(super) trait Value<'tcx>: Sized {
- fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self;
+ fn from_cycle_error(tcx: TyCtxt<'tcx>) -> Self;
}
impl<'tcx, T> Value<'tcx> for T {
- default fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> T {
+ default fn from_cycle_error(tcx: TyCtxt<'tcx>) -> T {
tcx.sess.abort_if_errors();
bug!("Value::from_cycle_error called without errors");
}
}
impl<'tcx> Value<'tcx> for Ty<'tcx> {
- fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> {
+ fn from_cycle_error(tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
tcx.types.err
}
}
impl<'tcx> Value<'tcx> for ty::SymbolName {
- fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
+ fn from_cycle_error(_: TyCtxt<'tcx>) -> Self {
ty::SymbolName { name: InternedString::intern("<error>") }
}
}
impl<'tcx> Value<'tcx> for NeedsDrop {
- fn from_cycle_error(_: TyCtxt<'_, 'tcx, 'tcx>) -> Self {
+ fn from_cycle_error(_: TyCtxt<'tcx>) -> Self {
NeedsDrop(false)
}
}
impl<'tcx> Value<'tcx> for AdtSizedConstraint<'tcx> {
- fn from_cycle_error(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> Self {
+ fn from_cycle_error(tcx: TyCtxt<'tcx>) -> Self {
AdtSizedConstraint(tcx.intern_type_list(&[tcx.types.err]))
}
}
diff --git a/src/librustc/ty/relate.rs b/src/librustc/ty/relate.rs
index 0440be1..98fd5d1 100644
--- a/src/librustc/ty/relate.rs
+++ b/src/librustc/ty/relate.rs
@@ -22,8 +22,8 @@
ExistentialRegionBound, // relating an existential region bound
}
-pub trait TypeRelation<'a, 'gcx: 'a+'tcx, 'tcx: 'a> : Sized {
- fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx>;
+pub trait TypeRelation<'tcx>: Sized {
+ fn tcx(&self) -> TyCtxt<'tcx>;
/// Returns a static string we can use for printouts.
fn tag(&self) -> &'static str;
@@ -94,21 +94,22 @@
}
pub trait Relate<'tcx>: TypeFoldable<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R, a: &Self, b: &Self)
- -> RelateResult<'tcx, Self>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a;
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &Self,
+ b: &Self,
+ ) -> RelateResult<'tcx, Self>;
}
///////////////////////////////////////////////////////////////////////////
// Relate impls
impl<'tcx> Relate<'tcx> for ty::TypeAndMut<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &ty::TypeAndMut<'tcx>,
- b: &ty::TypeAndMut<'tcx>)
- -> RelateResult<'tcx, ty::TypeAndMut<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &ty::TypeAndMut<'tcx>,
+ b: &ty::TypeAndMut<'tcx>,
+ ) -> RelateResult<'tcx, ty::TypeAndMut<'tcx>> {
debug!("{}.mts({:?}, {:?})",
relation.tag(),
a,
@@ -127,13 +128,12 @@
}
}
-pub fn relate_substs<'a, 'gcx, 'tcx, R>(relation: &mut R,
- variances: Option<&[ty::Variance]>,
- a_subst: SubstsRef<'tcx>,
- b_subst: SubstsRef<'tcx>)
- -> RelateResult<'tcx, SubstsRef<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
-{
+pub fn relate_substs<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ variances: Option<&[ty::Variance]>,
+ a_subst: SubstsRef<'tcx>,
+ b_subst: SubstsRef<'tcx>,
+) -> RelateResult<'tcx, SubstsRef<'tcx>> {
let tcx = relation.tcx();
let params = a_subst.iter().zip(b_subst).enumerate().map(|(i, (a, b))| {
@@ -145,12 +145,11 @@
}
impl<'tcx> Relate<'tcx> for ty::FnSig<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &ty::FnSig<'tcx>,
- b: &ty::FnSig<'tcx>)
- -> RelateResult<'tcx, ty::FnSig<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &ty::FnSig<'tcx>,
+ b: &ty::FnSig<'tcx>,
+ ) -> RelateResult<'tcx, ty::FnSig<'tcx>> {
let tcx = relation.tcx();
if a.c_variadic != b.c_variadic {
@@ -185,12 +184,11 @@
}
impl<'tcx> Relate<'tcx> for ast::Unsafety {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &ast::Unsafety,
- b: &ast::Unsafety)
- -> RelateResult<'tcx, ast::Unsafety>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &ast::Unsafety,
+ b: &ast::Unsafety,
+ ) -> RelateResult<'tcx, ast::Unsafety> {
if a != b {
Err(TypeError::UnsafetyMismatch(expected_found(relation, a, b)))
} else {
@@ -200,12 +198,11 @@
}
impl<'tcx> Relate<'tcx> for abi::Abi {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &abi::Abi,
- b: &abi::Abi)
- -> RelateResult<'tcx, abi::Abi>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &abi::Abi,
+ b: &abi::Abi,
+ ) -> RelateResult<'tcx, abi::Abi> {
if a == b {
Ok(*a)
} else {
@@ -215,12 +212,11 @@
}
impl<'tcx> Relate<'tcx> for ty::ProjectionTy<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &ty::ProjectionTy<'tcx>,
- b: &ty::ProjectionTy<'tcx>)
- -> RelateResult<'tcx, ty::ProjectionTy<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &ty::ProjectionTy<'tcx>,
+ b: &ty::ProjectionTy<'tcx>,
+ ) -> RelateResult<'tcx, ty::ProjectionTy<'tcx>> {
if a.item_def_id != b.item_def_id {
Err(TypeError::ProjectionMismatched(
expected_found(relation, &a.item_def_id, &b.item_def_id)))
@@ -235,12 +231,11 @@
}
impl<'tcx> Relate<'tcx> for ty::ExistentialProjection<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &ty::ExistentialProjection<'tcx>,
- b: &ty::ExistentialProjection<'tcx>)
- -> RelateResult<'tcx, ty::ExistentialProjection<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &ty::ExistentialProjection<'tcx>,
+ b: &ty::ExistentialProjection<'tcx>,
+ ) -> RelateResult<'tcx, ty::ExistentialProjection<'tcx>> {
if a.item_def_id != b.item_def_id {
Err(TypeError::ProjectionMismatched(
expected_found(relation, &a.item_def_id, &b.item_def_id)))
@@ -257,12 +252,11 @@
}
impl<'tcx> Relate<'tcx> for Vec<ty::PolyExistentialProjection<'tcx>> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &Vec<ty::PolyExistentialProjection<'tcx>>,
- b: &Vec<ty::PolyExistentialProjection<'tcx>>)
- -> RelateResult<'tcx, Vec<ty::PolyExistentialProjection<'tcx>>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &Vec<ty::PolyExistentialProjection<'tcx>>,
+ b: &Vec<ty::PolyExistentialProjection<'tcx>>,
+ ) -> RelateResult<'tcx, Vec<ty::PolyExistentialProjection<'tcx>>> {
// To be compatible, `a` and `b` must be for precisely the
// same set of traits and item names. We always require that
// projection bounds lists are sorted by trait-def-id and item-name,
@@ -280,12 +274,11 @@
}
impl<'tcx> Relate<'tcx> for ty::TraitRef<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &ty::TraitRef<'tcx>,
- b: &ty::TraitRef<'tcx>)
- -> RelateResult<'tcx, ty::TraitRef<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &ty::TraitRef<'tcx>,
+ b: &ty::TraitRef<'tcx>,
+ ) -> RelateResult<'tcx, ty::TraitRef<'tcx>> {
// Different traits cannot be related
if a.def_id != b.def_id {
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
@@ -297,12 +290,11 @@
}
impl<'tcx> Relate<'tcx> for ty::ExistentialTraitRef<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &ty::ExistentialTraitRef<'tcx>,
- b: &ty::ExistentialTraitRef<'tcx>)
- -> RelateResult<'tcx, ty::ExistentialTraitRef<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &ty::ExistentialTraitRef<'tcx>,
+ b: &ty::ExistentialTraitRef<'tcx>,
+ ) -> RelateResult<'tcx, ty::ExistentialTraitRef<'tcx>> {
// Different traits cannot be related
if a.def_id != b.def_id {
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
@@ -323,12 +315,11 @@
}
impl<'tcx> Relate<'tcx> for GeneratorWitness<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &GeneratorWitness<'tcx>,
- b: &GeneratorWitness<'tcx>)
- -> RelateResult<'tcx, GeneratorWitness<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &GeneratorWitness<'tcx>,
+ b: &GeneratorWitness<'tcx>,
+ ) -> RelateResult<'tcx, GeneratorWitness<'tcx>> {
assert_eq!(a.0.len(), b.0.len());
let tcx = relation.tcx();
let types = tcx.mk_type_list(a.0.iter().zip(b.0).map(|(a, b)| relation.relate(a, b)))?;
@@ -337,12 +328,11 @@
}
impl<'tcx> Relate<'tcx> for Ty<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &Ty<'tcx>,
- b: &Ty<'tcx>)
- -> RelateResult<'tcx, Ty<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &Ty<'tcx>,
+ b: &Ty<'tcx>,
+ ) -> RelateResult<'tcx, Ty<'tcx>> {
relation.tys(a, b)
}
}
@@ -350,12 +340,11 @@
/// The main "type relation" routine. Note that this does not handle
/// inference artifacts, so you should filter those out before calling
/// it.
-pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R,
- a: Ty<'tcx>,
- b: Ty<'tcx>)
- -> RelateResult<'tcx, Ty<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
-{
+pub fn super_relate_tys<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: Ty<'tcx>,
+ b: Ty<'tcx>,
+) -> RelateResult<'tcx, Ty<'tcx>> {
let tcx = relation.tcx();
debug!("super_relate_tys: a={:?} b={:?}", a, b);
match (&a.sty, &b.sty) {
@@ -550,14 +539,11 @@
/// The main "const relation" routine. Note that this does not handle
/// inference artifacts, so you should filter those out before calling
/// it.
-pub fn super_relate_consts<'a, 'gcx, 'tcx, R>(
+pub fn super_relate_consts<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &'tcx ty::Const<'tcx>,
- b: &'tcx ty::Const<'tcx>
-) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>>
-where
- R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
-{
+ b: &'tcx ty::Const<'tcx>,
+) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
let tcx = relation.tcx();
let eagerly_eval = |x: &'tcx ty::Const<'tcx>| {
@@ -632,12 +618,11 @@
}
impl<'tcx> Relate<'tcx> for &'tcx ty::List<ty::ExistentialPredicate<'tcx>> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &Self,
- b: &Self)
- -> RelateResult<'tcx, Self>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a {
-
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &Self,
+ b: &Self,
+ ) -> RelateResult<'tcx, Self> {
if a.len() != b.len() {
return Err(TypeError::ExistentialMismatch(expected_found(relation, a, b)));
}
@@ -657,80 +642,73 @@
}
impl<'tcx> Relate<'tcx> for ty::ClosureSubsts<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &ty::ClosureSubsts<'tcx>,
- b: &ty::ClosureSubsts<'tcx>)
- -> RelateResult<'tcx, ty::ClosureSubsts<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &ty::ClosureSubsts<'tcx>,
+ b: &ty::ClosureSubsts<'tcx>,
+ ) -> RelateResult<'tcx, ty::ClosureSubsts<'tcx>> {
let substs = relate_substs(relation, None, a.substs, b.substs)?;
Ok(ty::ClosureSubsts { substs })
}
}
impl<'tcx> Relate<'tcx> for ty::GeneratorSubsts<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &ty::GeneratorSubsts<'tcx>,
- b: &ty::GeneratorSubsts<'tcx>)
- -> RelateResult<'tcx, ty::GeneratorSubsts<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &ty::GeneratorSubsts<'tcx>,
+ b: &ty::GeneratorSubsts<'tcx>,
+ ) -> RelateResult<'tcx, ty::GeneratorSubsts<'tcx>> {
let substs = relate_substs(relation, None, a.substs, b.substs)?;
Ok(ty::GeneratorSubsts { substs })
}
}
impl<'tcx> Relate<'tcx> for SubstsRef<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &SubstsRef<'tcx>,
- b: &SubstsRef<'tcx>)
- -> RelateResult<'tcx, SubstsRef<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &SubstsRef<'tcx>,
+ b: &SubstsRef<'tcx>,
+ ) -> RelateResult<'tcx, SubstsRef<'tcx>> {
relate_substs(relation, None, a, b)
}
}
impl<'tcx> Relate<'tcx> for ty::Region<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &ty::Region<'tcx>,
- b: &ty::Region<'tcx>)
- -> RelateResult<'tcx, ty::Region<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &ty::Region<'tcx>,
+ b: &ty::Region<'tcx>,
+ ) -> RelateResult<'tcx, ty::Region<'tcx>> {
relation.regions(*a, *b)
}
}
impl<'tcx> Relate<'tcx> for &'tcx ty::Const<'tcx> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &&'tcx ty::Const<'tcx>,
- b: &&'tcx ty::Const<'tcx>)
- -> RelateResult<'tcx, &'tcx ty::Const<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &&'tcx ty::Const<'tcx>,
+ b: &&'tcx ty::Const<'tcx>,
+ ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
relation.consts(*a, *b)
}
}
impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for ty::Binder<T> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &ty::Binder<T>,
- b: &ty::Binder<T>)
- -> RelateResult<'tcx, ty::Binder<T>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &ty::Binder<T>,
+ b: &ty::Binder<T>,
+ ) -> RelateResult<'tcx, ty::Binder<T>> {
relation.binders(a, b)
}
}
impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for Rc<T> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &Rc<T>,
- b: &Rc<T>)
- -> RelateResult<'tcx, Rc<T>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &Rc<T>,
+ b: &Rc<T>,
+ ) -> RelateResult<'tcx, Rc<T>> {
let a: &T = a;
let b: &T = b;
Ok(Rc::new(relation.relate(a, b)?))
@@ -738,12 +716,11 @@
}
impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for Box<T> {
- fn relate<'a, 'gcx, R>(relation: &mut R,
- a: &Box<T>,
- b: &Box<T>)
- -> RelateResult<'tcx, Box<T>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
- {
+ fn relate<R: TypeRelation<'tcx>>(
+ relation: &mut R,
+ a: &Box<T>,
+ b: &Box<T>,
+ ) -> RelateResult<'tcx, Box<T>> {
let a: &T = a;
let b: &T = b;
Ok(Box::new(relation.relate(a, b)?))
@@ -751,14 +728,11 @@
}
impl<'tcx> Relate<'tcx> for Kind<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &Kind<'tcx>,
- b: &Kind<'tcx>
- ) -> RelateResult<'tcx, Kind<'tcx>>
- where
- R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a,
- {
+ b: &Kind<'tcx>,
+ ) -> RelateResult<'tcx, Kind<'tcx>> {
match (a.unpack(), b.unpack()) {
(UnpackedKind::Lifetime(a_lt), UnpackedKind::Lifetime(b_lt)) => {
Ok(relation.relate(&a_lt, &b_lt)?.into())
@@ -783,13 +757,11 @@
}
impl<'tcx> Relate<'tcx> for ty::TraitPredicate<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &ty::TraitPredicate<'tcx>,
- b: &ty::TraitPredicate<'tcx>
- ) -> RelateResult<'tcx, ty::TraitPredicate<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &ty::TraitPredicate<'tcx>,
+ ) -> RelateResult<'tcx, ty::TraitPredicate<'tcx>> {
Ok(ty::TraitPredicate {
trait_ref: relation.relate(&a.trait_ref, &b.trait_ref)?,
})
@@ -797,13 +769,11 @@
}
impl<'tcx> Relate<'tcx> for ty::ProjectionPredicate<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &ty::ProjectionPredicate<'tcx>,
b: &ty::ProjectionPredicate<'tcx>,
- ) -> RelateResult<'tcx, ty::ProjectionPredicate<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ ) -> RelateResult<'tcx, ty::ProjectionPredicate<'tcx>> {
Ok(ty::ProjectionPredicate {
projection_ty: relation.relate(&a.projection_ty, &b.projection_ty)?,
ty: relation.relate(&a.ty, &b.ty)?,
@@ -812,13 +782,11 @@
}
impl<'tcx> Relate<'tcx> for traits::WhereClause<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &traits::WhereClause<'tcx>,
- b: &traits::WhereClause<'tcx>
- ) -> RelateResult<'tcx, traits::WhereClause<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &traits::WhereClause<'tcx>,
+ ) -> RelateResult<'tcx, traits::WhereClause<'tcx>> {
use crate::traits::WhereClause::*;
match (a, b) {
(Implemented(a_pred), Implemented(b_pred)) => {
@@ -849,13 +817,11 @@
}
impl<'tcx> Relate<'tcx> for traits::WellFormed<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &traits::WellFormed<'tcx>,
- b: &traits::WellFormed<'tcx>
- ) -> RelateResult<'tcx, traits::WellFormed<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &traits::WellFormed<'tcx>,
+ ) -> RelateResult<'tcx, traits::WellFormed<'tcx>> {
use crate::traits::WellFormed::*;
match (a, b) {
(Trait(a_pred), Trait(b_pred)) => Ok(Trait(relation.relate(a_pred, b_pred)?)),
@@ -866,13 +832,11 @@
}
impl<'tcx> Relate<'tcx> for traits::FromEnv<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &traits::FromEnv<'tcx>,
- b: &traits::FromEnv<'tcx>
- ) -> RelateResult<'tcx, traits::FromEnv<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &traits::FromEnv<'tcx>,
+ ) -> RelateResult<'tcx, traits::FromEnv<'tcx>> {
use crate::traits::FromEnv::*;
match (a, b) {
(Trait(a_pred), Trait(b_pred)) => Ok(Trait(relation.relate(a_pred, b_pred)?)),
@@ -883,13 +847,11 @@
}
impl<'tcx> Relate<'tcx> for traits::DomainGoal<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &traits::DomainGoal<'tcx>,
- b: &traits::DomainGoal<'tcx>
- ) -> RelateResult<'tcx, traits::DomainGoal<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &traits::DomainGoal<'tcx>,
+ ) -> RelateResult<'tcx, traits::DomainGoal<'tcx>> {
use crate::traits::DomainGoal::*;
match (a, b) {
(Holds(a_wc), Holds(b_wc)) => Ok(Holds(relation.relate(a_wc, b_wc)?)),
@@ -906,13 +868,11 @@
}
impl<'tcx> Relate<'tcx> for traits::Goal<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &traits::Goal<'tcx>,
- b: &traits::Goal<'tcx>
- ) -> RelateResult<'tcx, traits::Goal<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &traits::Goal<'tcx>,
+ ) -> RelateResult<'tcx, traits::Goal<'tcx>> {
use crate::traits::GoalKind::*;
match (a, b) {
(Implies(a_clauses, a_goal), Implies(b_clauses, b_goal)) => {
@@ -952,13 +912,11 @@
}
impl<'tcx> Relate<'tcx> for traits::Goals<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &traits::Goals<'tcx>,
- b: &traits::Goals<'tcx>
- ) -> RelateResult<'tcx, traits::Goals<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &traits::Goals<'tcx>,
+ ) -> RelateResult<'tcx, traits::Goals<'tcx>> {
if a.len() != b.len() {
return Err(TypeError::Mismatch);
}
@@ -970,13 +928,11 @@
}
impl<'tcx> Relate<'tcx> for traits::Clause<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &traits::Clause<'tcx>,
- b: &traits::Clause<'tcx>
- ) -> RelateResult<'tcx, traits::Clause<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &traits::Clause<'tcx>,
+ ) -> RelateResult<'tcx, traits::Clause<'tcx>> {
use crate::traits::Clause::*;
match (a, b) {
(Implies(a_clause), Implies(b_clause)) => {
@@ -995,13 +951,11 @@
}
impl<'tcx> Relate<'tcx> for traits::Clauses<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &traits::Clauses<'tcx>,
- b: &traits::Clauses<'tcx>
- ) -> RelateResult<'tcx, traits::Clauses<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &traits::Clauses<'tcx>,
+ ) -> RelateResult<'tcx, traits::Clauses<'tcx>> {
if a.len() != b.len() {
return Err(TypeError::Mismatch);
}
@@ -1013,13 +967,11 @@
}
impl<'tcx> Relate<'tcx> for traits::ProgramClause<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &traits::ProgramClause<'tcx>,
- b: &traits::ProgramClause<'tcx>
- ) -> RelateResult<'tcx, traits::ProgramClause<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &traits::ProgramClause<'tcx>,
+ ) -> RelateResult<'tcx, traits::ProgramClause<'tcx>> {
Ok(traits::ProgramClause {
goal: relation.relate(&a.goal, &b.goal)?,
hypotheses: relation.relate(&a.hypotheses, &b.hypotheses)?,
@@ -1029,13 +981,11 @@
}
impl<'tcx> Relate<'tcx> for traits::Environment<'tcx> {
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &traits::Environment<'tcx>,
- b: &traits::Environment<'tcx>
- ) -> RelateResult<'tcx, traits::Environment<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &traits::Environment<'tcx>,
+ ) -> RelateResult<'tcx, traits::Environment<'tcx>> {
Ok(traits::Environment {
clauses: relation.relate(&a.clauses, &b.clauses)?,
})
@@ -1043,15 +993,14 @@
}
impl<'tcx, G> Relate<'tcx> for traits::InEnvironment<'tcx, G>
- where G: Relate<'tcx>
+where
+ G: Relate<'tcx>,
{
- fn relate<'a, 'gcx, R>(
+ fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: &traits::InEnvironment<'tcx, G>,
- b: &traits::InEnvironment<'tcx, G>
- ) -> RelateResult<'tcx, traits::InEnvironment<'tcx, G>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'tcx, 'tcx: 'a
- {
+ b: &traits::InEnvironment<'tcx, G>,
+ ) -> RelateResult<'tcx, traits::InEnvironment<'tcx, G>> {
Ok(traits::InEnvironment {
environment: relation.relate(&a.environment, &b.environment)?,
goal: relation.relate(&a.goal, &b.goal)?,
@@ -1062,11 +1011,10 @@
///////////////////////////////////////////////////////////////////////////
// Error handling
-pub fn expected_found<'a, 'gcx, 'tcx, R, T>(relation: &mut R,
- a: &T,
- b: &T)
- -> ExpectedFound<T>
- where R: TypeRelation<'a, 'gcx, 'tcx>, T: Clone, 'gcx: 'a+'tcx, 'tcx: 'a
+pub fn expected_found<R, T>(relation: &mut R, a: &T, b: &T) -> ExpectedFound<T>
+where
+ R: TypeRelation<'tcx>,
+ T: Clone,
{
expected_found_bool(relation.a_is_expected(), a, b)
}
diff --git a/src/librustc/ty/structural_impls.rs b/src/librustc/ty/structural_impls.rs
index 56d47a7..a4efb56 100644
--- a/src/librustc/ty/structural_impls.rs
+++ b/src/librustc/ty/structural_impls.rs
@@ -337,14 +337,14 @@
// FIXME(eddyb) replace all the uses of `Option::map` with `?`.
impl<'tcx, A: Lift<'tcx>, B: Lift<'tcx>> Lift<'tcx> for (A, B) {
type Lifted = (A::Lifted, B::Lifted);
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.0).and_then(|a| tcx.lift(&self.1).map(|b| (a, b)))
}
}
impl<'tcx, A: Lift<'tcx>, B: Lift<'tcx>, C: Lift<'tcx>> Lift<'tcx> for (A, B, C) {
type Lifted = (A::Lifted, B::Lifted, C::Lifted);
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.0).and_then(|a| {
tcx.lift(&self.1).and_then(|b| tcx.lift(&self.2).map(|c| (a, b, c)))
})
@@ -353,7 +353,7 @@
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Option<T> {
type Lifted = Option<T::Lifted>;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
match *self {
Some(ref x) => tcx.lift(x).map(Some),
None => Some(None)
@@ -363,7 +363,7 @@
impl<'tcx, T: Lift<'tcx>, E: Lift<'tcx>> Lift<'tcx> for Result<T, E> {
type Lifted = Result<T::Lifted, E::Lifted>;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
match *self {
Ok(ref x) => tcx.lift(x).map(Ok),
Err(ref e) => tcx.lift(e).map(Err)
@@ -373,14 +373,14 @@
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Box<T> {
type Lifted = Box<T::Lifted>;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&**self).map(Box::new)
}
}
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for [T] {
type Lifted = Vec<T::Lifted>;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
// type annotation needed to inform `projection_must_outlive`
let mut result : Vec<<T as Lift<'tcx>>::Lifted>
= Vec::with_capacity(self.len());
@@ -397,14 +397,14 @@
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Vec<T> {
type Lifted = Vec<T::Lifted>;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self[..])
}
}
impl<'tcx, I: Idx, T: Lift<'tcx>> Lift<'tcx> for IndexVec<I, T> {
type Lifted = IndexVec<I, T::Lifted>;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
self.iter()
.map(|e| tcx.lift(e))
.collect()
@@ -413,7 +413,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::TraitRef<'a> {
type Lifted = ty::TraitRef<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.substs).map(|substs| ty::TraitRef {
def_id: self.def_id,
substs,
@@ -423,7 +423,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialTraitRef<'a> {
type Lifted = ty::ExistentialTraitRef<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.substs).map(|substs| ty::ExistentialTraitRef {
def_id: self.def_id,
substs,
@@ -433,7 +433,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialPredicate<'a> {
type Lifted = ty::ExistentialPredicate<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
match self {
ty::ExistentialPredicate::Trait(x) => {
tcx.lift(x).map(ty::ExistentialPredicate::Trait)
@@ -450,8 +450,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::TraitPredicate<'a> {
type Lifted = ty::TraitPredicate<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
- -> Option<ty::TraitPredicate<'tcx>> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<ty::TraitPredicate<'tcx>> {
tcx.lift(&self.trait_ref).map(|trait_ref| ty::TraitPredicate {
trait_ref,
})
@@ -460,8 +459,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::SubtypePredicate<'a> {
type Lifted = ty::SubtypePredicate<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
- -> Option<ty::SubtypePredicate<'tcx>> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<ty::SubtypePredicate<'tcx>> {
tcx.lift(&(self.a, self.b)).map(|(a, b)| ty::SubtypePredicate {
a_is_expected: self.a_is_expected,
a,
@@ -470,17 +468,16 @@
}
}
-impl<'tcx, A: Copy+Lift<'tcx>, B: Copy+Lift<'tcx>> Lift<'tcx> for ty::OutlivesPredicate<A, B> {
+impl<'tcx, A: Copy + Lift<'tcx>, B: Copy + Lift<'tcx>> Lift<'tcx> for ty::OutlivesPredicate<A, B> {
type Lifted = ty::OutlivesPredicate<A::Lifted, B::Lifted>;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&(self.0, self.1)).map(|(a, b)| ty::OutlivesPredicate(a, b))
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::ProjectionTy<'a> {
type Lifted = ty::ProjectionTy<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
- -> Option<ty::ProjectionTy<'tcx>> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<ty::ProjectionTy<'tcx>> {
tcx.lift(&self.substs).map(|substs| {
ty::ProjectionTy {
item_def_id: self.item_def_id,
@@ -492,8 +489,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::ProjectionPredicate<'a> {
type Lifted = ty::ProjectionPredicate<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
- -> Option<ty::ProjectionPredicate<'tcx>> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<ty::ProjectionPredicate<'tcx>> {
tcx.lift(&(self.projection_ty, self.ty)).map(|(projection_ty, ty)| {
ty::ProjectionPredicate {
projection_ty,
@@ -505,7 +501,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialProjection<'a> {
type Lifted = ty::ExistentialProjection<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.substs).map(|substs| {
ty::ExistentialProjection {
substs,
@@ -518,7 +514,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::Predicate<'a> {
type Lifted = ty::Predicate<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
match *self {
ty::Predicate::Trait(ref binder) => {
tcx.lift(binder).map(ty::Predicate::Trait)
@@ -558,14 +554,14 @@
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::Binder<T> {
type Lifted = ty::Binder<T::Lifted>;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(self.skip_binder()).map(ty::Binder::bind)
}
}
impl<'a, 'tcx> Lift<'tcx> for ty::ParamEnv<'a> {
type Lifted = ty::ParamEnv<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.caller_bounds).map(|caller_bounds| {
ty::ParamEnv {
reveal: self.reveal,
@@ -578,7 +574,7 @@
impl<'a, 'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::ParamEnvAnd<'a, T> {
type Lifted = ty::ParamEnvAnd<'tcx, T::Lifted>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.param_env).and_then(|param_env| {
tcx.lift(&self.value).map(|value| {
ty::ParamEnvAnd {
@@ -592,7 +588,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::ClosureSubsts<'a> {
type Lifted = ty::ClosureSubsts<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.substs).map(|substs| {
ty::ClosureSubsts { substs }
})
@@ -601,7 +597,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::GeneratorSubsts<'a> {
type Lifted = ty::GeneratorSubsts<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.substs).map(|substs| {
ty::GeneratorSubsts { substs }
})
@@ -610,7 +606,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjustment<'a> {
type Lifted = ty::adjustment::Adjustment<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.kind).and_then(|kind| {
tcx.lift(&self.target).map(|target| {
ty::adjustment::Adjustment { kind, target }
@@ -621,7 +617,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjust<'a> {
type Lifted = ty::adjustment::Adjust<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
match *self {
ty::adjustment::Adjust::NeverToAny =>
Some(ty::adjustment::Adjust::NeverToAny),
@@ -639,7 +635,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::OverloadedDeref<'a> {
type Lifted = ty::adjustment::OverloadedDeref<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.region).map(|region| {
ty::adjustment::OverloadedDeref {
region,
@@ -651,7 +647,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::AutoBorrow<'a> {
type Lifted = ty::adjustment::AutoBorrow<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
match *self {
ty::adjustment::AutoBorrow::Ref(r, m) => {
tcx.lift(&r).map(|r| ty::adjustment::AutoBorrow::Ref(r, m))
@@ -665,7 +661,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::GenSig<'a> {
type Lifted = ty::GenSig<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&(self.yield_ty, self.return_ty))
.map(|(yield_ty, return_ty)| {
ty::GenSig {
@@ -678,7 +674,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::FnSig<'a> {
type Lifted = ty::FnSig<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.inputs_and_output).map(|x| {
ty::FnSig {
inputs_and_output: x,
@@ -692,7 +688,7 @@
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::error::ExpectedFound<T> {
type Lifted = ty::error::ExpectedFound<T::Lifted>;
- fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
tcx.lift(&self.expected).and_then(|expected| {
tcx.lift(&self.found).map(|found| {
ty::error::ExpectedFound {
@@ -706,7 +702,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> {
type Lifted = ty::error::TypeError<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
use crate::ty::error::TypeError::*;
Some(match *self {
@@ -743,7 +739,7 @@
impl<'a, 'tcx> Lift<'tcx> for ty::InstanceDef<'a> {
type Lifted = ty::InstanceDef<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
match *self {
ty::InstanceDef::Item(def_id) =>
Some(ty::InstanceDef::Item(def_id)),
@@ -799,7 +795,7 @@
/// AdtDefs are basically the same as a DefId.
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::AdtDef {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, _folder: &mut F) -> Self {
*self
}
@@ -808,8 +804,8 @@
}
}
-impl<'tcx, T:TypeFoldable<'tcx>, U:TypeFoldable<'tcx>> TypeFoldable<'tcx> for (T, U) {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> (T, U) {
+impl<'tcx, T: TypeFoldable<'tcx>, U: TypeFoldable<'tcx>> TypeFoldable<'tcx> for (T, U) {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> (T, U) {
(self.0.fold_with(folder), self.1.fold_with(folder))
}
@@ -826,7 +822,7 @@
}
impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Rc<T> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
Rc::new((**self).fold_with(folder))
}
@@ -836,7 +832,7 @@
}
impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Box<T> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
let content: T = (**self).fold_with(folder);
box content
}
@@ -847,7 +843,7 @@
}
impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Vec<T> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
self.iter().map(|t| t.fold_with(folder)).collect()
}
@@ -857,7 +853,7 @@
}
impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Box<[T]> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
self.iter().map(|t| t.fold_with(folder)).collect::<Vec<_>>().into_boxed_slice()
}
@@ -866,12 +862,12 @@
}
}
-impl<'tcx, T:TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder<T> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder<T> {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
self.map_bound_ref(|ty| ty.fold_with(folder))
}
- fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
folder.fold_binder(self)
}
@@ -889,7 +885,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<ty::ExistentialPredicate<'tcx>> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
folder.tcx().intern_existential_predicates(&v)
}
@@ -908,7 +904,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<Ty<'tcx>> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
let v = self.iter().map(|t| t.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
folder.tcx().intern_type_list(&v)
}
@@ -919,7 +915,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<ProjectionKind> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
let v = self.iter().map(|t| t.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
folder.tcx().intern_projs(&v)
}
@@ -930,7 +926,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for ty::instance::Instance<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
use crate::ty::InstanceDef::*;
Self {
substs: self.substs.fold_with(folder),
@@ -980,7 +976,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for interpret::GlobalId<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
Self {
instance: self.instance.fold_with(folder),
promoted: self.promoted
@@ -993,7 +989,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
let sty = match self.sty {
ty::RawPtr(tm) => ty::RawPtr(tm.fold_with(folder)),
ty::Array(typ, sz) => ty::Array(typ.fold_with(folder), sz.fold_with(folder)),
@@ -1045,7 +1041,7 @@
}
}
- fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
folder.fold_ty(*self)
}
@@ -1128,11 +1124,11 @@
}
impl<'tcx> TypeFoldable<'tcx> for ty::Region<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, _folder: &mut F) -> Self {
*self
}
- fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
folder.fold_region(*self)
}
@@ -1193,7 +1189,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<ty::Predicate<'tcx>> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
folder.tcx().intern_predicates(&v)
}
@@ -1278,7 +1274,7 @@
}
impl<'tcx, T: TypeFoldable<'tcx>, I: Idx> TypeFoldable<'tcx> for IndexVec<I, T> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
self.iter().map(|x| x.fold_with(folder)).collect()
}
@@ -1314,7 +1310,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Const<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
let ty = self.ty.fold_with(folder);
let val = self.val.fold_with(folder);
folder.tcx().mk_const(ty::Const {
@@ -1323,7 +1319,7 @@
})
}
- fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
folder.fold_const(*self)
}
@@ -1337,7 +1333,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for ConstValue<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
match *self {
ConstValue::ByRef(ptr, alloc) => ConstValue::ByRef(ptr, alloc),
ConstValue::Infer(ic) => ConstValue::Infer(ic.fold_with(folder)),
@@ -1364,7 +1360,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for InferConst<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, _folder: &mut F) -> Self {
*self
}
diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs
index abde4d6..810a26d 100644
--- a/src/librustc/ty/sty.rs
+++ b/src/librustc/ty/sty.rs
@@ -324,7 +324,7 @@
/// Divides the closure substs into their respective
/// components. Single source of truth with respect to the
/// ordering.
- fn split(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> SplitClosureSubsts<'tcx> {
+ fn split(self, def_id: DefId, tcx: TyCtxt<'_>) -> SplitClosureSubsts<'tcx> {
let generics = tcx.generics_of(def_id);
let parent_len = generics.parent_count;
SplitClosureSubsts {
@@ -335,9 +335,11 @@
}
#[inline]
- pub fn upvar_tys(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) ->
- impl Iterator<Item=Ty<'tcx>> + 'tcx
- {
+ pub fn upvar_tys(
+ self,
+ def_id: DefId,
+ tcx: TyCtxt<'_>,
+ ) -> impl Iterator<Item = Ty<'tcx>> + 'tcx {
let SplitClosureSubsts { upvar_kinds, .. } = self.split(def_id, tcx);
upvar_kinds.iter().map(|t| {
if let UnpackedKind::Type(ty) = t.unpack() {
@@ -351,7 +353,7 @@
/// Returns the closure kind for this closure; may return a type
/// variable during inference. To get the closure kind during
/// inference, use `infcx.closure_kind(def_id, substs)`.
- pub fn closure_kind_ty(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> Ty<'tcx> {
+ pub fn closure_kind_ty(self, def_id: DefId, tcx: TyCtxt<'_>) -> Ty<'tcx> {
self.split(def_id, tcx).closure_kind_ty
}
@@ -359,7 +361,7 @@
/// closure; may contain type variables during inference. To get
/// the closure signature during inference, use
/// `infcx.fn_sig(def_id)`.
- pub fn closure_sig_ty(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> Ty<'tcx> {
+ pub fn closure_sig_ty(self, def_id: DefId, tcx: TyCtxt<'_>) -> Ty<'tcx> {
self.split(def_id, tcx).closure_sig_ty
}
@@ -368,7 +370,7 @@
/// there are no type variables.
///
/// If you have an inference context, use `infcx.closure_kind()`.
- pub fn closure_kind(self, def_id: DefId, tcx: TyCtxt<'_, 'tcx, 'tcx>) -> ty::ClosureKind {
+ pub fn closure_kind(self, def_id: DefId, tcx: TyCtxt<'tcx>) -> ty::ClosureKind {
self.split(def_id, tcx).closure_kind_ty.to_opt_closure_kind().unwrap()
}
@@ -377,7 +379,7 @@
/// there are no type variables.
///
/// If you have an inference context, use `infcx.closure_sig()`.
- pub fn closure_sig(self, def_id: DefId, tcx: TyCtxt<'_, 'tcx, 'tcx>) -> ty::PolyFnSig<'tcx> {
+ pub fn closure_sig(self, def_id: DefId, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> {
let ty = self.closure_sig_ty(def_id, tcx);
match ty.sty {
ty::FnPtr(sig) => sig,
@@ -401,7 +403,7 @@
}
impl<'tcx> GeneratorSubsts<'tcx> {
- fn split(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> SplitGeneratorSubsts<'tcx> {
+ fn split(self, def_id: DefId, tcx: TyCtxt<'_>) -> SplitGeneratorSubsts<'tcx> {
let generics = tcx.generics_of(def_id);
let parent_len = generics.parent_count;
SplitGeneratorSubsts {
@@ -417,14 +419,16 @@
/// It contains a tuple of all the types that could end up on a generator frame.
/// The state transformation MIR pass may only produce layouts which mention types
/// in this tuple. Upvars are not counted here.
- pub fn witness(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> Ty<'tcx> {
+ pub fn witness(self, def_id: DefId, tcx: TyCtxt<'_>) -> Ty<'tcx> {
self.split(def_id, tcx).witness
}
#[inline]
- pub fn upvar_tys(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) ->
- impl Iterator<Item=Ty<'tcx>> + 'tcx
- {
+ pub fn upvar_tys(
+ self,
+ def_id: DefId,
+ tcx: TyCtxt<'_>,
+ ) -> impl Iterator<Item = Ty<'tcx>> + 'tcx {
let SplitGeneratorSubsts { upvar_kinds, .. } = self.split(def_id, tcx);
upvar_kinds.iter().map(|t| {
if let UnpackedKind::Type(ty) = t.unpack() {
@@ -436,12 +440,12 @@
}
/// Returns the type representing the yield type of the generator.
- pub fn yield_ty(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> Ty<'tcx> {
+ pub fn yield_ty(self, def_id: DefId, tcx: TyCtxt<'_>) -> Ty<'tcx> {
self.split(def_id, tcx).yield_ty
}
/// Returns the type representing the return type of the generator.
- pub fn return_ty(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> Ty<'tcx> {
+ pub fn return_ty(self, def_id: DefId, tcx: TyCtxt<'_>) -> Ty<'tcx> {
self.split(def_id, tcx).return_ty
}
@@ -451,13 +455,13 @@
/// N.B., some bits of the code prefers to see this wrapped in a
/// binder, but it never contains bound regions. Probably this
/// function should be removed.
- pub fn poly_sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> PolyGenSig<'tcx> {
+ pub fn poly_sig(self, def_id: DefId, tcx: TyCtxt<'_>) -> PolyGenSig<'tcx> {
ty::Binder::dummy(self.sig(def_id, tcx))
}
/// Returns the "generator signature", which consists of its yield
/// and return types.
- pub fn sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> GenSig<'tcx> {
+ pub fn sig(self, def_id: DefId, tcx: TyCtxt<'_>) -> GenSig<'tcx> {
ty::GenSig {
yield_ty: self.yield_ty(def_id, tcx),
return_ty: self.return_ty(def_id, tcx),
@@ -465,7 +469,7 @@
}
}
-impl<'a, 'gcx, 'tcx> GeneratorSubsts<'tcx> {
+impl<'tcx> GeneratorSubsts<'tcx> {
/// Generator have not been resumed yet
pub const UNRESUMED: usize = 0;
/// Generator has returned / is completed
@@ -479,7 +483,7 @@
/// The valid variant indices of this Generator.
#[inline]
- pub fn variant_range(&self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Range<VariantIdx> {
+ pub fn variant_range(&self, def_id: DefId, tcx: TyCtxt<'tcx>) -> Range<VariantIdx> {
// FIXME requires optimized MIR
let num_variants = tcx.generator_layout(def_id).variant_fields.len();
(VariantIdx::new(0)..VariantIdx::new(num_variants))
@@ -489,7 +493,10 @@
/// out of range.
#[inline]
pub fn discriminant_for_variant(
- &self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>, variant_index: VariantIdx
+ &self,
+ def_id: DefId,
+ tcx: TyCtxt<'tcx>,
+ variant_index: VariantIdx,
) -> Discr<'tcx> {
// Generators don't support explicit discriminant values, so they are
// the same as the variant index.
@@ -501,8 +508,10 @@
/// variant indices.
#[inline]
pub fn discriminants(
- &'a self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>
- ) -> impl Iterator<Item=(VariantIdx, Discr<'tcx>)> + Captures<'gcx> + 'a {
+ &'tcx self,
+ def_id: DefId,
+ tcx: TyCtxt<'tcx>,
+ ) -> impl Iterator<Item = (VariantIdx, Discr<'tcx>)> + Captures<'tcx> {
self.variant_range(def_id, tcx).map(move |index| {
(index, Discr { val: index.as_usize() as u128, ty: self.discr_ty(tcx) })
})
@@ -522,7 +531,7 @@
/// The type of the state discriminant used in the generator type.
#[inline]
- pub fn discr_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
+ pub fn discr_ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
tcx.types.u32
}
@@ -533,9 +542,11 @@
/// The locals are grouped by their variant number. Note that some locals may
/// be repeated in multiple variants.
#[inline]
- pub fn state_tys(self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>) ->
- impl Iterator<Item=impl Iterator<Item=Ty<'tcx>> + Captures<'gcx> + 'a>
- {
+ pub fn state_tys(
+ self,
+ def_id: DefId,
+ tcx: TyCtxt<'tcx>,
+ ) -> impl Iterator<Item = impl Iterator<Item = Ty<'tcx>> + Captures<'tcx>> {
let layout = tcx.generator_layout(def_id);
layout.variant_fields.iter().map(move |variant| {
variant.iter().map(move |field| {
@@ -547,9 +558,7 @@
/// This is the types of the fields of a generator which are not stored in a
/// variant.
#[inline]
- pub fn prefix_tys(self, def_id: DefId, tcx: TyCtxt<'a, 'gcx, 'tcx>) ->
- impl Iterator<Item=Ty<'tcx>> + 'a
- {
+ pub fn prefix_tys(self, def_id: DefId, tcx: TyCtxt<'tcx>) -> impl Iterator<Item = Ty<'tcx>> {
self.upvar_tys(def_id, tcx)
}
}
@@ -562,9 +571,11 @@
impl<'tcx> UpvarSubsts<'tcx> {
#[inline]
- pub fn upvar_tys(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) ->
- impl Iterator<Item=Ty<'tcx>> + 'tcx
- {
+ pub fn upvar_tys(
+ self,
+ def_id: DefId,
+ tcx: TyCtxt<'_>,
+ ) -> impl Iterator<Item = Ty<'tcx>> + 'tcx {
let upvar_kinds = match self {
UpvarSubsts::Closure(substs) => substs.split(def_id, tcx).upvar_kinds,
UpvarSubsts::Generator(substs) => substs.split(def_id, tcx).upvar_kinds,
@@ -590,10 +601,10 @@
AutoTrait(DefId),
}
-impl<'a, 'gcx, 'tcx> ExistentialPredicate<'tcx> {
+impl<'tcx> ExistentialPredicate<'tcx> {
/// Compares via an ordering that will not change if modules are reordered or other changes are
/// made to the tree. In particular, this ordering is preserved across incremental compilations.
- pub fn stable_cmp(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, other: &Self) -> Ordering {
+ pub fn stable_cmp(&self, tcx: TyCtxt<'tcx>, other: &Self) -> Ordering {
use self::ExistentialPredicate::*;
match (*self, *other) {
(Trait(_), Trait(_)) => Ordering::Equal,
@@ -607,12 +618,10 @@
(AutoTrait(_), _) => Ordering::Greater,
}
}
-
}
-impl<'a, 'gcx, 'tcx> Binder<ExistentialPredicate<'tcx>> {
- pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>)
- -> ty::Predicate<'tcx> {
+impl<'tcx> Binder<ExistentialPredicate<'tcx>> {
+ pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::Predicate<'tcx> {
use crate::ty::ToPredicate;
match *self.skip_binder() {
ExistentialPredicate::Trait(tr) => Binder(tr).with_self_ty(tcx, self_ty).to_predicate(),
@@ -744,7 +753,7 @@
/// Returns a `TraitRef` of the form `P0: Foo<P1..Pn>` where `Pi`
/// are the parameters defined on trait.
- pub fn identity<'a, 'gcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, def_id: DefId) -> TraitRef<'tcx> {
+ pub fn identity(tcx: TyCtxt<'tcx>, def_id: DefId) -> TraitRef<'tcx> {
TraitRef {
def_id,
substs: InternalSubsts::identity_for_item(tcx, def_id),
@@ -764,10 +773,11 @@
self.substs.types()
}
- pub fn from_method(tcx: TyCtxt<'_, '_, 'tcx>,
- trait_id: DefId,
- substs: SubstsRef<'tcx>)
- -> ty::TraitRef<'tcx> {
+ pub fn from_method(
+ tcx: TyCtxt<'tcx>,
+ trait_id: DefId,
+ substs: SubstsRef<'tcx>,
+ ) -> ty::TraitRef<'tcx> {
let defs = tcx.generics_of(trait_id);
ty::TraitRef {
@@ -808,7 +818,7 @@
pub substs: SubstsRef<'tcx>,
}
-impl<'a, 'gcx, 'tcx> ExistentialTraitRef<'tcx> {
+impl<'tcx> ExistentialTraitRef<'tcx> {
pub fn input_types<'b>(&'b self) -> impl DoubleEndedIterator<Item=Ty<'tcx>> + 'b {
// Select only the "input types" from a trait-reference. For
// now this is all the types that appear in the
@@ -817,9 +827,10 @@
self.substs.types()
}
- pub fn erase_self_ty(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_ref: ty::TraitRef<'tcx>)
- -> ty::ExistentialTraitRef<'tcx> {
+ pub fn erase_self_ty(
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::TraitRef<'tcx>,
+ ) -> ty::ExistentialTraitRef<'tcx> {
// Assert there is a Self.
trait_ref.substs.type_at(0);
@@ -833,8 +844,7 @@
/// we convert the principal trait-ref into a normal trait-ref,
/// you must give *some* self type. A common choice is `mk_err()`
/// or some placeholder type.
- pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>)
- -> ty::TraitRef<'tcx> {
+ pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::TraitRef<'tcx> {
// otherwise the escaping vars would be captured by the binder
// debug_assert!(!self_ty.has_escaping_bound_vars());
@@ -856,9 +866,7 @@
/// we convert the principal trait-ref into a normal trait-ref,
/// you must give *some* self type. A common choice is `mk_err()`
/// or some placeholder type.
- pub fn with_self_ty(&self, tcx: TyCtxt<'_, '_, 'tcx>,
- self_ty: Ty<'tcx>)
- -> ty::PolyTraitRef<'tcx> {
+ pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::PolyTraitRef<'tcx> {
self.map_bound(|trait_ref| trait_ref.with_self_ty(tcx, self_ty))
}
}
@@ -992,7 +1000,9 @@
/// Construct a `ProjectionTy` by searching the trait from `trait_ref` for the
/// associated item named `item_name`.
pub fn from_ref_and_name(
- tcx: TyCtxt<'_, '_, '_>, trait_ref: ty::TraitRef<'tcx>, item_name: Ident
+ tcx: TyCtxt<'_>,
+ trait_ref: ty::TraitRef<'tcx>,
+ item_name: Ident,
) -> ProjectionTy<'tcx> {
let item_def_id = tcx.associated_items(trait_ref.def_id).find(|item| {
item.kind == ty::AssocKind::Type &&
@@ -1008,7 +1018,7 @@
/// Extracts the underlying trait reference from this projection.
/// For example, if this is a projection of `<T as Iterator>::Item`,
/// then this function would return a `T: Iterator` trait reference.
- pub fn trait_ref(&self, tcx: TyCtxt<'_, '_, '_>) -> ty::TraitRef<'tcx> {
+ pub fn trait_ref(&self, tcx: TyCtxt<'_>) -> ty::TraitRef<'tcx> {
let def_id = tcx.associated_item(self.item_def_id).container.id();
ty::TraitRef {
def_id,
@@ -1112,7 +1122,7 @@
pub name: InternedString,
}
-impl<'a, 'gcx, 'tcx> ParamTy {
+impl<'tcx> ParamTy {
pub fn new(index: u32, name: InternedString) -> ParamTy {
ParamTy { index, name: name }
}
@@ -1125,7 +1135,7 @@
ParamTy::new(def.index, def.name)
}
- pub fn to_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
+ pub fn to_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
tcx.mk_ty_param(self.index, self.name)
}
@@ -1144,7 +1154,7 @@
pub name: InternedString,
}
-impl<'a, 'gcx, 'tcx> ParamConst {
+impl<'tcx> ParamConst {
pub fn new(index: u32, name: InternedString) -> ParamConst {
ParamConst { index, name }
}
@@ -1153,7 +1163,7 @@
ParamConst::new(def.index, def.name)
}
- pub fn to_const(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, ty: Ty<'tcx>) -> &'tcx Const<'tcx> {
+ pub fn to_const(self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> &'tcx Const<'tcx> {
tcx.mk_const_param(self.index, self.name, ty)
}
}
@@ -1408,12 +1418,12 @@
pub type PolyExistentialProjection<'tcx> = Binder<ExistentialProjection<'tcx>>;
-impl<'a, 'tcx, 'gcx> ExistentialProjection<'tcx> {
+impl<'tcx> ExistentialProjection<'tcx> {
/// Extracts the underlying existential trait reference from this projection.
/// For example, if this is a projection of `exists T. <T as Iterator>::Item == X`,
/// then this function would return a `exists T. T: Iterator` existential trait
/// reference.
- pub fn trait_ref(&self, tcx: TyCtxt<'_, '_, '_>) -> ty::ExistentialTraitRef<'tcx> {
+ pub fn trait_ref(&self, tcx: TyCtxt<'_>) -> ty::ExistentialTraitRef<'tcx> {
let def_id = tcx.associated_item(self.item_def_id).container.id();
ty::ExistentialTraitRef{
def_id,
@@ -1421,10 +1431,11 @@
}
}
- pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- self_ty: Ty<'tcx>)
- -> ty::ProjectionPredicate<'tcx>
- {
+ pub fn with_self_ty(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ self_ty: Ty<'tcx>,
+ ) -> ty::ProjectionPredicate<'tcx> {
// otherwise the escaping regions would be captured by the binders
debug_assert!(!self_ty.has_escaping_bound_vars());
@@ -1438,9 +1449,12 @@
}
}
-impl<'a, 'tcx, 'gcx> PolyExistentialProjection<'tcx> {
- pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>)
- -> ty::PolyProjectionPredicate<'tcx> {
+impl<'tcx> PolyExistentialProjection<'tcx> {
+ pub fn with_self_ty(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ self_ty: Ty<'tcx>,
+ ) -> ty::PolyProjectionPredicate<'tcx> {
self.map_bound(|p| p.with_self_ty(tcx, self_ty))
}
@@ -1652,7 +1666,7 @@
/// of the impl, and for all the other highlighted regions, it
/// would return the `DefId` of the function. In other cases (not shown), this
/// function might return the `DefId` of a closure.
- pub fn free_region_binding_scope(&self, tcx: TyCtxt<'_, '_, '_>) -> DefId {
+ pub fn free_region_binding_scope(&self, tcx: TyCtxt<'_>) -> DefId {
match self {
ty::ReEarlyBound(br) => {
tcx.parent(br.def_id).unwrap()
@@ -1664,7 +1678,8 @@
}
/// Type utilities
-impl<'a, 'gcx, 'tcx> TyS<'tcx> {
+impl<'tcx> TyS<'tcx> {
+ #[inline]
pub fn is_unit(&self) -> bool {
match self.sty {
Tuple(ref tys) => tys.is_empty(),
@@ -1672,6 +1687,7 @@
}
}
+ #[inline]
pub fn is_never(&self) -> bool {
match self.sty {
Never => true,
@@ -1685,7 +1701,7 @@
/// `ty.conservative_is_privately_uninhabited` implies that any value of type `ty`
/// will be `Abi::Uninhabited`. (Note that uninhabited types may have nonzero
/// size, to account for partial initialisation. See #49298 for details.)
- pub fn conservative_is_privately_uninhabited(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
+ pub fn conservative_is_privately_uninhabited(&self, tcx: TyCtxt<'tcx>) -> bool {
// FIXME(varkor): we can make this less conversative by substituting concrete
// type arguments.
match self.sty {
@@ -1726,6 +1742,7 @@
}
}
+ #[inline]
pub fn is_primitive(&self) -> bool {
match self.sty {
Bool | Char | Int(_) | Uint(_) | Float(_) => true,
@@ -1741,6 +1758,7 @@
}
}
+ #[inline]
pub fn is_ty_infer(&self) -> bool {
match self.sty {
Infer(_) => true,
@@ -1748,6 +1766,7 @@
}
}
+ #[inline]
pub fn is_phantom_data(&self) -> bool {
if let Adt(def, _) = self.sty {
def.is_phantom_data()
@@ -1756,8 +1775,10 @@
}
}
+ #[inline]
pub fn is_bool(&self) -> bool { self.sty == Bool }
+ #[inline]
pub fn is_param(&self, index: u32) -> bool {
match self.sty {
ty::Param(ref data) => data.index == index,
@@ -1765,6 +1786,7 @@
}
}
+ #[inline]
pub fn is_self(&self) -> bool {
match self.sty {
Param(ref p) => p.is_self(),
@@ -1772,6 +1794,7 @@
}
}
+ #[inline]
pub fn is_slice(&self) -> bool {
match self.sty {
RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => match ty.sty {
@@ -1790,7 +1813,7 @@
}
}
- pub fn sequence_element_type(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
+ pub fn sequence_element_type(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match self.sty {
Array(ty, _) | Slice(ty) => ty,
Str => tcx.mk_mach_uint(ast::UintTy::U8),
@@ -1798,7 +1821,7 @@
}
}
- pub fn simd_type(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
+ pub fn simd_type(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match self.sty {
Adt(def, substs) => {
def.non_enum_variant().fields[0].ty(tcx, substs)
@@ -1807,13 +1830,14 @@
}
}
- pub fn simd_size(&self, _cx: TyCtxt<'_, '_, '_>) -> usize {
+ pub fn simd_size(&self, _cx: TyCtxt<'_>) -> usize {
match self.sty {
Adt(def, _) => def.non_enum_variant().fields.len(),
_ => bug!("simd_size called on invalid type")
}
}
+ #[inline]
pub fn is_region_ptr(&self) -> bool {
match self.sty {
Ref(..) => true,
@@ -1821,6 +1845,7 @@
}
}
+ #[inline]
pub fn is_mutable_pointer(&self) -> bool {
match self.sty {
RawPtr(TypeAndMut { mutbl: hir::Mutability::MutMutable, .. }) |
@@ -1829,6 +1854,7 @@
}
}
+ #[inline]
pub fn is_unsafe_ptr(&self) -> bool {
match self.sty {
RawPtr(_) => return true,
@@ -1837,6 +1863,7 @@
}
/// Returns `true` if this type is an `Arc<T>`.
+ #[inline]
pub fn is_arc(&self) -> bool {
match self.sty {
Adt(def, _) => def.is_arc(),
@@ -1845,6 +1872,7 @@
}
/// Returns `true` if this type is an `Rc<T>`.
+ #[inline]
pub fn is_rc(&self) -> bool {
match self.sty {
Adt(def, _) => def.is_rc(),
@@ -1852,6 +1880,7 @@
}
}
+ #[inline]
pub fn is_box(&self) -> bool {
match self.sty {
Adt(def, _) => def.is_box(),
@@ -1870,6 +1899,7 @@
/// A scalar type is one that denotes an atomic datum, with no sub-components.
/// (A RawPtr is scalar because it represents a non-managed pointer, so its
/// contents are abstract to rustc.)
+ #[inline]
pub fn is_scalar(&self) -> bool {
match self.sty {
Bool | Char | Int(_) | Float(_) | Uint(_) |
@@ -1880,6 +1910,7 @@
}
/// Returns `true` if this type is a floating point type.
+ #[inline]
pub fn is_floating_point(&self) -> bool {
match self.sty {
Float(_) |
@@ -1888,6 +1919,7 @@
}
}
+ #[inline]
pub fn is_trait(&self) -> bool {
match self.sty {
Dynamic(..) => true,
@@ -1895,6 +1927,7 @@
}
}
+ #[inline]
pub fn is_enum(&self) -> bool {
match self.sty {
Adt(adt_def, _) => {
@@ -1904,6 +1937,7 @@
}
}
+ #[inline]
pub fn is_closure(&self) -> bool {
match self.sty {
Closure(..) => true,
@@ -1911,6 +1945,7 @@
}
}
+ #[inline]
pub fn is_generator(&self) -> bool {
match self.sty {
Generator(..) => true,
@@ -1926,6 +1961,7 @@
}
}
+ #[inline]
pub fn is_fresh_ty(&self) -> bool {
match self.sty {
Infer(FreshTy(_)) => true,
@@ -1933,6 +1969,7 @@
}
}
+ #[inline]
pub fn is_fresh(&self) -> bool {
match self.sty {
Infer(FreshTy(_)) => true,
@@ -1942,6 +1979,7 @@
}
}
+ #[inline]
pub fn is_char(&self) -> bool {
match self.sty {
Char => true,
@@ -1950,17 +1988,11 @@
}
#[inline]
- pub fn is_fp(&self) -> bool {
- match self.sty {
- Infer(FloatVar(_)) | Float(_) => true,
- _ => false
- }
- }
-
pub fn is_numeric(&self) -> bool {
- self.is_integral() || self.is_fp()
+ self.is_integral() || self.is_floating_point()
}
+ #[inline]
pub fn is_signed(&self) -> bool {
match self.sty {
Int(_) => true,
@@ -1968,6 +2000,7 @@
}
}
+ #[inline]
pub fn is_pointer_sized(&self) -> bool {
match self.sty {
Int(ast::IntTy::Isize) | Uint(ast::UintTy::Usize) => true,
@@ -1975,6 +2008,7 @@
}
}
+ #[inline]
pub fn is_machine(&self) -> bool {
match self.sty {
Int(..) | Uint(..) | Float(..) => true,
@@ -1982,6 +2016,7 @@
}
}
+ #[inline]
pub fn has_concrete_skeleton(&self) -> bool {
match self.sty {
Param(_) | Infer(_) | Error => false,
@@ -2015,7 +2050,7 @@
}
}
- pub fn fn_sig(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PolyFnSig<'tcx> {
+ pub fn fn_sig(&self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> {
match self.sty {
FnDef(def_id, substs) => {
tcx.fn_sig(def_id).subst(tcx, substs)
@@ -2028,6 +2063,7 @@
}
}
+ #[inline]
pub fn is_fn(&self) -> bool {
match self.sty {
FnDef(..) | FnPtr(_) => true,
@@ -2043,6 +2079,7 @@
}
}
+ #[inline]
pub fn is_impl_trait(&self) -> bool {
match self.sty {
Opaque(..) => true,
@@ -2061,7 +2098,7 @@
/// If the type contains variants, returns the valid range of variant indices.
/// FIXME This requires the optimized MIR in the case of generators.
#[inline]
- pub fn variant_range(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Range<VariantIdx>> {
+ pub fn variant_range(&self, tcx: TyCtxt<'tcx>) -> Option<Range<VariantIdx>> {
match self.sty {
TyKind::Adt(adt, _) => Some(adt.variant_range()),
TyKind::Generator(def_id, substs, _) => Some(substs.variant_range(def_id, tcx)),
@@ -2075,8 +2112,8 @@
#[inline]
pub fn discriminant_for_variant(
&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- variant_index: VariantIdx
+ tcx: TyCtxt<'tcx>,
+ variant_index: VariantIdx,
) -> Option<Discr<'tcx>> {
match self.sty {
TyKind::Adt(adt, _) => Some(adt.discriminant_for_variant(tcx, variant_index)),
@@ -2166,7 +2203,7 @@
///
/// Returning true means the type is known to be sized. Returning
/// `false` means nothing -- could be sized, might not be.
- pub fn is_trivially_sized(&self, tcx: TyCtxt<'_, '_, 'tcx>) -> bool {
+ pub fn is_trivially_sized(&self, tcx: TyCtxt<'tcx>) -> bool {
match self.sty {
ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) |
ty::Uint(_) | ty::Int(_) | ty::Bool | ty::Float(_) |
@@ -2216,11 +2253,7 @@
impl<'tcx> Const<'tcx> {
#[inline]
- pub fn from_scalar(
- tcx: TyCtxt<'_, '_, 'tcx>,
- val: Scalar,
- ty: Ty<'tcx>,
- ) -> &'tcx Self {
+ pub fn from_scalar(tcx: TyCtxt<'tcx>, val: Scalar, ty: Ty<'tcx>) -> &'tcx Self {
tcx.mk_const(Self {
val: ConstValue::Scalar(val),
ty,
@@ -2228,11 +2261,7 @@
}
#[inline]
- pub fn from_bits(
- tcx: TyCtxt<'_, '_, 'tcx>,
- bits: u128,
- ty: ParamEnvAnd<'tcx, Ty<'tcx>>,
- ) -> &'tcx Self {
+ pub fn from_bits(tcx: TyCtxt<'tcx>, bits: u128, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> &'tcx Self {
let ty = tcx.lift_to_global(&ty).unwrap();
let size = tcx.layout_of(ty).unwrap_or_else(|e| {
panic!("could not compute layout for {:?}: {:?}", ty, e)
@@ -2241,26 +2270,22 @@
}
#[inline]
- pub fn zero_sized(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> &'tcx Self {
+ pub fn zero_sized(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> &'tcx Self {
Self::from_scalar(tcx, Scalar::zst(), ty)
}
#[inline]
- pub fn from_bool(tcx: TyCtxt<'_, '_, 'tcx>, v: bool) -> &'tcx Self {
+ pub fn from_bool(tcx: TyCtxt<'tcx>, v: bool) -> &'tcx Self {
Self::from_bits(tcx, v as u128, ParamEnv::empty().and(tcx.types.bool))
}
#[inline]
- pub fn from_usize(tcx: TyCtxt<'_, '_, 'tcx>, n: u64) -> &'tcx Self {
+ pub fn from_usize(tcx: TyCtxt<'tcx>, n: u64) -> &'tcx Self {
Self::from_bits(tcx, n as u128, ParamEnv::empty().and(tcx.types.usize))
}
#[inline]
- pub fn to_bits(
- &self,
- tcx: TyCtxt<'_, '_, 'tcx>,
- ty: ParamEnvAnd<'tcx, Ty<'tcx>>,
- ) -> Option<u128> {
+ pub fn to_bits(&self, tcx: TyCtxt<'tcx>, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> Option<u128> {
if self.ty != ty.value {
return None;
}
@@ -2275,11 +2300,7 @@
}
#[inline]
- pub fn assert_bits(
- &self,
- tcx: TyCtxt<'_, '_, '_>,
- ty: ParamEnvAnd<'tcx, Ty<'tcx>>,
- ) -> Option<u128> {
+ pub fn assert_bits(&self, tcx: TyCtxt<'_>, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> Option<u128> {
assert_eq!(self.ty, ty.value);
let ty = tcx.lift_to_global(&ty).unwrap();
let size = tcx.layout_of(ty).ok()?.size;
@@ -2287,7 +2308,7 @@
}
#[inline]
- pub fn assert_bool(&self, tcx: TyCtxt<'_, '_, '_>) -> Option<bool> {
+ pub fn assert_bool(&self, tcx: TyCtxt<'_>) -> Option<bool> {
self.assert_bits(tcx, ParamEnv::empty().and(tcx.types.bool)).and_then(|v| match v {
0 => Some(false),
1 => Some(true),
@@ -2296,22 +2317,18 @@
}
#[inline]
- pub fn assert_usize(&self, tcx: TyCtxt<'_, '_, '_>) -> Option<u64> {
+ pub fn assert_usize(&self, tcx: TyCtxt<'_>) -> Option<u64> {
self.assert_bits(tcx, ParamEnv::empty().and(tcx.types.usize)).map(|v| v as u64)
}
#[inline]
- pub fn unwrap_bits(
- &self,
- tcx: TyCtxt<'_, '_, '_>,
- ty: ParamEnvAnd<'tcx, Ty<'tcx>>,
- ) -> u128 {
+ pub fn unwrap_bits(&self, tcx: TyCtxt<'_>, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> u128 {
self.assert_bits(tcx, ty).unwrap_or_else(||
bug!("expected bits of {}, got {:#?}", ty.value, self))
}
#[inline]
- pub fn unwrap_usize(&self, tcx: TyCtxt<'_, '_, '_>) -> u64 {
+ pub fn unwrap_usize(&self, tcx: TyCtxt<'_>) -> u64 {
self.assert_usize(tcx).unwrap_or_else(||
bug!("expected constant usize, got {:#?}", self))
}
diff --git a/src/librustc/ty/subst.rs b/src/librustc/ty/subst.rs
index 75ba1dd..79dcd32 100644
--- a/src/librustc/ty/subst.rs
+++ b/src/librustc/ty/subst.rs
@@ -138,7 +138,7 @@
impl<'a, 'tcx> Lift<'tcx> for Kind<'a> {
type Lifted = Kind<'tcx>;
- fn lift_to_tcx<'cx, 'gcx>(&self, tcx: TyCtxt<'cx, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
match self.unpack() {
UnpackedKind::Lifetime(lt) => tcx.lift(<).map(|lt| lt.into()),
UnpackedKind::Type(ty) => tcx.lift(&ty).map(|ty| ty.into()),
@@ -148,7 +148,7 @@
}
impl<'tcx> TypeFoldable<'tcx> for Kind<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
match self.unpack() {
UnpackedKind::Lifetime(lt) => lt.fold_with(folder).into(),
UnpackedKind::Type(ty) => ty.fold_with(folder).into(),
@@ -182,9 +182,9 @@
pub type SubstsRef<'tcx> = &'tcx InternalSubsts<'tcx>;
-impl<'a, 'gcx, 'tcx> InternalSubsts<'tcx> {
+impl<'a, 'tcx> InternalSubsts<'tcx> {
/// Creates a `InternalSubsts` that maps each generic parameter to itself.
- pub fn identity_for_item(tcx: TyCtxt<'a, 'gcx, 'tcx>, def_id: DefId) -> SubstsRef<'tcx> {
+ pub fn identity_for_item(tcx: TyCtxt<'tcx>, def_id: DefId) -> SubstsRef<'tcx> {
Self::for_item(tcx, def_id, |param, _| {
tcx.mk_param_from_def(param)
})
@@ -194,10 +194,7 @@
/// var bound at index `0`. For types, we use a `BoundVar` index equal to
/// the type parameter index. For regions, we use the `BoundRegion::BrNamed`
/// variant (which has a `DefId`).
- pub fn bound_vars_for_item(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- def_id: DefId
- ) -> SubstsRef<'tcx> {
+ pub fn bound_vars_for_item(tcx: TyCtxt<'tcx>, def_id: DefId) -> SubstsRef<'tcx> {
Self::for_item(tcx, def_id, |param, _| {
match param.kind {
ty::GenericParamDefKind::Type { .. } => {
@@ -233,11 +230,9 @@
/// The closures get to observe the `InternalSubsts` as they're
/// being built, which can be used to correctly
/// substitute defaults of generic parameters.
- pub fn for_item<F>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- def_id: DefId,
- mut mk_kind: F)
- -> SubstsRef<'tcx>
- where F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>
+ pub fn for_item<F>(tcx: TyCtxt<'tcx>, def_id: DefId, mut mk_kind: F) -> SubstsRef<'tcx>
+ where
+ F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>,
{
let defs = tcx.generics_of(def_id);
let count = defs.count();
@@ -246,12 +241,9 @@
tcx.intern_substs(&substs)
}
- pub fn extend_to<F>(&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- def_id: DefId,
- mut mk_kind: F)
- -> SubstsRef<'tcx>
- where F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>
+ pub fn extend_to<F>(&self, tcx: TyCtxt<'tcx>, def_id: DefId, mut mk_kind: F) -> SubstsRef<'tcx>
+ where
+ F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>,
{
Self::for_item(tcx, def_id, |param, substs| {
self.get(param.index as usize)
@@ -260,11 +252,13 @@
})
}
- fn fill_item<F>(substs: &mut SmallVec<[Kind<'tcx>; 8]>,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- defs: &ty::Generics,
- mk_kind: &mut F)
- where F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>
+ fn fill_item<F>(
+ substs: &mut SmallVec<[Kind<'tcx>; 8]>,
+ tcx: TyCtxt<'tcx>,
+ defs: &ty::Generics,
+ mk_kind: &mut F,
+ ) where
+ F: FnMut(&ty::GenericParamDef, &[Kind<'tcx>]) -> Kind<'tcx>,
{
if let Some(def_id) = defs.parent {
let parent_defs = tcx.generics_of(def_id);
@@ -372,22 +366,23 @@
/// in a different item, with `target_substs` as the base for
/// the target impl/trait, with the source child-specific
/// parameters (e.g., method parameters) on top of that base.
- pub fn rebase_onto(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- source_ancestor: DefId,
- target_substs: SubstsRef<'tcx>)
- -> SubstsRef<'tcx> {
+ pub fn rebase_onto(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ source_ancestor: DefId,
+ target_substs: SubstsRef<'tcx>,
+ ) -> SubstsRef<'tcx> {
let defs = tcx.generics_of(source_ancestor);
tcx.mk_substs(target_substs.iter().chain(&self[defs.params.len()..]).cloned())
}
- pub fn truncate_to(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, generics: &ty::Generics)
- -> SubstsRef<'tcx> {
+ pub fn truncate_to(&self, tcx: TyCtxt<'tcx>, generics: &ty::Generics) -> SubstsRef<'tcx> {
tcx.mk_substs(self.iter().take(generics.count()).cloned())
}
}
impl<'tcx> TypeFoldable<'tcx> for SubstsRef<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
let params: SmallVec<[_; 8]> = self.iter().map(|k| k.fold_with(folder)).collect();
// If folding doesn't change the substs, it's faster to avoid
@@ -414,23 +409,15 @@
// there is more information available (for better errors).
pub trait Subst<'tcx>: Sized {
- fn subst<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- substs: &[Kind<'tcx>]) -> Self {
+ fn subst(&self, tcx: TyCtxt<'tcx>, substs: &[Kind<'tcx>]) -> Self {
self.subst_spanned(tcx, substs, None)
}
- fn subst_spanned<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- substs: &[Kind<'tcx>],
- span: Option<Span>)
- -> Self;
+ fn subst_spanned(&self, tcx: TyCtxt<'tcx>, substs: &[Kind<'tcx>], span: Option<Span>) -> Self;
}
-impl<'tcx, T:TypeFoldable<'tcx>> Subst<'tcx> for T {
- fn subst_spanned<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- substs: &[Kind<'tcx>],
- span: Option<Span>)
- -> T
- {
+impl<'tcx, T: TypeFoldable<'tcx>> Subst<'tcx> for T {
+ fn subst_spanned(&self, tcx: TyCtxt<'tcx>, substs: &[Kind<'tcx>], span: Option<Span>) -> T {
let mut folder = SubstFolder { tcx,
substs,
span,
@@ -444,8 +431,8 @@
///////////////////////////////////////////////////////////////////////////
// The actual substitution engine itself is a type folder.
-struct SubstFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+struct SubstFolder<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
substs: &'a [Kind<'tcx>],
/// The location for which the substitution is performed, if available.
@@ -461,8 +448,8 @@
binders_passed: u32,
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for SubstFolder<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
+impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { self.tcx }
fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
self.binders_passed += 1;
@@ -545,7 +532,7 @@
}
}
-impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> SubstFolder<'a, 'tcx> {
fn ty_for_param(&self, p: ty::ParamTy, source_ty: Ty<'tcx>) -> Ty<'tcx> {
// Look up the type in the substitutions. It really should be in there.
let opt_ty = self.substs.get(p.index as usize).map(|k| k.unpack());
diff --git a/src/librustc/ty/trait_def.rs b/src/librustc/ty/trait_def.rs
index a0b409b..c40d4d5 100644
--- a/src/librustc/ty/trait_def.rs
+++ b/src/librustc/ty/trait_def.rs
@@ -46,7 +46,7 @@
non_blanket_impls: FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>,
}
-impl<'a, 'gcx, 'tcx> TraitDef {
+impl<'tcx> TraitDef {
pub fn new(def_id: DefId,
unsafety: hir::Unsafety,
paren_sugar: bool,
@@ -64,14 +64,16 @@
}
}
- pub fn ancestors(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- of_impl: DefId)
- -> specialization_graph::Ancestors<'gcx> {
+ pub fn ancestors(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ of_impl: DefId,
+ ) -> specialization_graph::Ancestors<'tcx> {
specialization_graph::ancestors(tcx, self.def_id, of_impl)
}
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
pub fn for_each_impl<F: FnMut(DefId)>(self, def_id: DefId, mut f: F) {
let impls = self.trait_impls_of(def_id);
@@ -148,9 +150,10 @@
}
// Query provider for `trait_impls_of`.
-pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- trait_id: DefId)
- -> &'tcx TraitImpls {
+pub(super) fn trait_impls_of_provider<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_id: DefId,
+) -> &'tcx TraitImpls {
let mut impls = TraitImpls::default();
{
diff --git a/src/librustc/ty/util.rs b/src/librustc/ty/util.rs
index 9b4029f..a3b99f1 100644
--- a/src/librustc/ty/util.rs
+++ b/src/librustc/ty/util.rs
@@ -51,10 +51,10 @@
impl<'tcx> Discr<'tcx> {
/// Adds `1` to the value and wraps around if the maximum for the type is reached.
- pub fn wrap_incr<'a, 'gcx>(self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
+ pub fn wrap_incr(self, tcx: TyCtxt<'tcx>) -> Self {
self.checked_add(tcx, 1).0
}
- pub fn checked_add<'a, 'gcx>(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, n: u128) -> (Self, bool) {
+ pub fn checked_add(self, tcx: TyCtxt<'tcx>, n: u128) -> (Self, bool) {
let (int, signed) = match self.ty.sty {
Int(ity) => (Integer::from_attr(&tcx, SignedInt(ity)), true),
Uint(uty) => (Integer::from_attr(&tcx, UnsignedInt(uty)), false),
@@ -104,14 +104,13 @@
}
pub trait IntTypeExt {
- fn to_ty<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>;
- fn disr_incr<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, val: Option<Discr<'tcx>>)
- -> Option<Discr<'tcx>>;
- fn initial_discriminant<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Discr<'tcx>;
+ fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>;
+ fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>>;
+ fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx>;
}
impl IntTypeExt for attr::IntType {
- fn to_ty<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
+ fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
match *self {
SignedInt(ast::IntTy::I8) => tcx.types.i8,
SignedInt(ast::IntTy::I16) => tcx.types.i16,
@@ -128,18 +127,14 @@
}
}
- fn initial_discriminant<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Discr<'tcx> {
+ fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx> {
Discr {
val: 0,
ty: self.to_ty(tcx)
}
}
- fn disr_incr<'a, 'tcx>(
- &self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- val: Option<Discr<'tcx>>,
- ) -> Option<Discr<'tcx>> {
+ fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>> {
if let Some(val) = val {
assert_eq!(self.to_ty(tcx), val.ty);
let (new, oflo) = val.checked_add(tcx, 1);
@@ -178,10 +173,11 @@
}
impl<'tcx> ty::ParamEnv<'tcx> {
- pub fn can_type_implement_copy<'a>(self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- self_type: Ty<'tcx>)
- -> Result<(), CopyImplementationError<'tcx>> {
+ pub fn can_type_implement_copy(
+ self,
+ tcx: TyCtxt<'tcx>,
+ self_type: Ty<'tcx>,
+ ) -> Result<(), CopyImplementationError<'tcx>> {
// FIXME: (@jroesch) float this code up
tcx.infer_ctxt().enter(|infcx| {
let (adt, substs) = match self_type.sty {
@@ -228,7 +224,7 @@
}
}
-impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
/// Creates a hash of the type `Ty` which will be the same no matter what crate
/// context it's calculated within. This is used by the `type_id` intrinsic.
pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
@@ -249,7 +245,7 @@
}
}
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'tcx> TyCtxt<'tcx> {
pub fn has_error_field(self, ty: Ty<'tcx>) -> bool {
if let ty::Adt(def, substs) = ty.sty {
for field in def.all_fields() {
@@ -624,7 +620,7 @@
) -> Result<Ty<'tcx>, Ty<'tcx>> {
use crate::ty::fold::TypeFolder;
- struct OpaqueTypeExpander<'a, 'gcx, 'tcx> {
+ struct OpaqueTypeExpander<'tcx> {
// Contains the DefIds of the opaque types that are currently being
// expanded. When we expand an opaque type we insert the DefId of
// that type, and when we finish expanding that type we remove the
@@ -632,10 +628,10 @@
seen_opaque_tys: FxHashSet<DefId>,
primary_def_id: DefId,
found_recursion: bool,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
}
- impl<'a, 'gcx, 'tcx> OpaqueTypeExpander<'a, 'gcx, 'tcx> {
+ impl<'tcx> OpaqueTypeExpander<'tcx> {
fn expand_opaque_ty(
&mut self,
def_id: DefId,
@@ -658,8 +654,8 @@
}
}
- impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for OpaqueTypeExpander<'a, 'gcx, 'tcx> {
- fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx> {
+ impl<'tcx> TypeFolder<'tcx> for OpaqueTypeExpander<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -687,7 +683,7 @@
}
}
-impl<'a, 'tcx> ty::TyS<'tcx> {
+impl<'tcx> ty::TyS<'tcx> {
/// Checks whether values of this type `T` are *moved* or *copied*
/// when referenced -- this amounts to a check for whether `T:
/// Copy`, but note that we **don't** consider lifetimes when
@@ -695,11 +691,12 @@
/// does copies even when the type actually doesn't satisfy the
/// full requirements for the `Copy` trait (cc #29149) -- this
/// winds up being reported as an error during NLL borrow check.
- pub fn is_copy_modulo_regions(&'tcx self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- span: Span)
- -> bool {
+ pub fn is_copy_modulo_regions(
+ &'tcx self,
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ span: Span,
+ ) -> bool {
tcx.at(span).is_copy_raw(param_env.and(self))
}
@@ -709,10 +706,7 @@
/// over-approximation in generic contexts, where one can have
/// strange rules like `<T as Foo<'static>>::Bar: Sized` that
/// actually carry lifetime requirements.
- pub fn is_sized(&'tcx self,
- tcx_at: TyCtxtAt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>)-> bool
- {
+ pub fn is_sized(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
tcx_at.is_sized_raw(param_env.and(self))
}
@@ -723,11 +717,12 @@
/// optimization as well as the rules around static values. Note
/// that the `Freeze` trait is not exposed to end users and is
/// effectively an implementation detail.
- pub fn is_freeze(&'tcx self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- span: Span)-> bool
- {
+ pub fn is_freeze(
+ &'tcx self,
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ span: Span,
+ ) -> bool {
tcx.at(span).is_freeze_raw(param_env.and(self))
}
@@ -738,10 +733,7 @@
/// (Note that this implies that if `ty` has a destructor attached,
/// then `needs_drop` will definitely return `true` for `ty`.)
#[inline]
- pub fn needs_drop(&'tcx self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>)
- -> bool {
+ pub fn needs_drop(&'tcx self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
tcx.needs_drop_raw(param_env.and(self)).0
}
@@ -760,11 +752,7 @@
/// Check whether a type is representable. This means it cannot contain unboxed
/// structural recursion. This check is needed for structs and enums.
- pub fn is_representable(&'tcx self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- sp: Span)
- -> Representability
- {
+ pub fn is_representable(&'tcx self, tcx: TyCtxt<'tcx>, sp: Span) -> Representability {
// Iterate until something non-representable is found
fn fold_repr<It: Iterator<Item=Representability>>(iter: It) -> Representability {
iter.fold(Representability::Representable, |r1, r2| {
@@ -778,13 +766,13 @@
})
}
- fn are_inner_types_recursive<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span,
+ fn are_inner_types_recursive<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ sp: Span,
seen: &mut Vec<Ty<'tcx>>,
representable_cache: &mut FxHashMap<Ty<'tcx>, Representability>,
- ty: Ty<'tcx>)
- -> Representability
- {
+ ty: Ty<'tcx>,
+ ) -> Representability {
match ty.sty {
Tuple(ref ts) => {
// Find non representable
@@ -838,13 +826,13 @@
// Does the type `ty` directly (without indirection through a pointer)
// contain any types on stack `seen`?
- fn is_type_structurally_recursive<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ fn is_type_structurally_recursive<'tcx>(
+ tcx: TyCtxt<'tcx>,
sp: Span,
seen: &mut Vec<Ty<'tcx>>,
representable_cache: &mut FxHashMap<Ty<'tcx>, Representability>,
- ty: Ty<'tcx>) -> Representability
- {
+ ty: Ty<'tcx>,
+ ) -> Representability {
debug!("is_type_structurally_recursive: {:?} {:?}", ty, sp);
if let Some(representability) = representable_cache.get(ty) {
debug!("is_type_structurally_recursive: {:?} {:?} - (cached) {:?}",
@@ -859,13 +847,13 @@
representability
}
- fn is_type_structurally_recursive_inner<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ fn is_type_structurally_recursive_inner<'tcx>(
+ tcx: TyCtxt<'tcx>,
sp: Span,
seen: &mut Vec<Ty<'tcx>>,
representable_cache: &mut FxHashMap<Ty<'tcx>, Representability>,
- ty: Ty<'tcx>) -> Representability
- {
+ ty: Ty<'tcx>,
+ ) -> Representability {
match ty.sty {
Adt(def, _) => {
{
@@ -937,10 +925,7 @@
}
}
-fn is_copy_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
- -> bool
-{
+fn is_copy_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool {
let (param_env, ty) = query.into_parts();
let trait_def_id = tcx.require_lang_item(lang_items::CopyTraitLangItem);
tcx.infer_ctxt()
@@ -953,10 +938,7 @@
))
}
-fn is_sized_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
- -> bool
-{
+fn is_sized_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool {
let (param_env, ty) = query.into_parts();
let trait_def_id = tcx.require_lang_item(lang_items::SizedTraitLangItem);
tcx.infer_ctxt()
@@ -969,10 +951,7 @@
))
}
-fn is_freeze_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
- -> bool
-{
+fn is_freeze_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool {
let (param_env, ty) = query.into_parts();
let trait_def_id = tcx.require_lang_item(lang_items::FreezeTraitLangItem);
tcx.infer_ctxt()
@@ -988,10 +967,7 @@
#[derive(Clone, HashStable)]
pub struct NeedsDrop(pub bool);
-fn needs_drop_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
- -> NeedsDrop
-{
+fn needs_drop_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> NeedsDrop {
let (param_env, ty) = query.into_parts();
let needs_drop = |ty: Ty<'tcx>| -> bool {
diff --git a/src/librustc/ty/wf.rs b/src/librustc/ty/wf.rs
index c474baa..6b2f00e 100644
--- a/src/librustc/ty/wf.rs
+++ b/src/librustc/ty/wf.rs
@@ -15,13 +15,13 @@
/// inference variable, returns `None`, because we are not able to
/// make any progress at all. This is to prevent "livelock" where we
/// say "$0 is WF if $0 is WF".
-pub fn obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- body_id: hir::HirId,
- ty: Ty<'tcx>,
- span: Span)
- -> Option<Vec<traits::PredicateObligation<'tcx>>>
-{
+pub fn obligations<'a, 'tcx>(
+ infcx: &InferCtxt<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ body_id: hir::HirId,
+ ty: Ty<'tcx>,
+ span: Span,
+) -> Option<Vec<traits::PredicateObligation<'tcx>>> {
let mut wf = WfPredicates { infcx,
param_env,
body_id,
@@ -41,25 +41,25 @@
/// well-formed. For example, if there is a trait `Set` defined like
/// `trait Set<K:Eq>`, then the trait reference `Foo: Set<Bar>` is WF
/// if `Bar: Eq`.
-pub fn trait_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- body_id: hir::HirId,
- trait_ref: &ty::TraitRef<'tcx>,
- span: Span)
- -> Vec<traits::PredicateObligation<'tcx>>
-{
+pub fn trait_obligations<'a, 'tcx>(
+ infcx: &InferCtxt<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ body_id: hir::HirId,
+ trait_ref: &ty::TraitRef<'tcx>,
+ span: Span,
+) -> Vec<traits::PredicateObligation<'tcx>> {
let mut wf = WfPredicates { infcx, param_env, body_id, span, out: vec![] };
wf.compute_trait_ref(trait_ref, Elaborate::All);
wf.normalize()
}
-pub fn predicate_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- body_id: hir::HirId,
- predicate: &ty::Predicate<'tcx>,
- span: Span)
- -> Vec<traits::PredicateObligation<'tcx>>
-{
+pub fn predicate_obligations<'a, 'tcx>(
+ infcx: &InferCtxt<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ body_id: hir::HirId,
+ predicate: &ty::Predicate<'tcx>,
+ span: Span,
+) -> Vec<traits::PredicateObligation<'tcx>> {
let mut wf = WfPredicates { infcx, param_env, body_id, span, out: vec![] };
// (*) ok to skip binders, because wf code is prepared for it
@@ -101,8 +101,8 @@
wf.normalize()
}
-struct WfPredicates<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+struct WfPredicates<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
body_id: hir::HirId,
span: Span,
@@ -138,7 +138,7 @@
None,
}
-impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> WfPredicates<'a, 'tcx> {
fn cause(&mut self, code: traits::ObligationCauseCode<'tcx>) -> traits::ObligationCause<'tcx> {
traits::ObligationCause::new(self.span, self.body_id, code)
}
@@ -508,11 +508,10 @@
/// they declare `trait SomeTrait : 'static`, for example, then
/// `'static` would appear in the list. The hard work is done by
/// `ty::required_region_bounds`, see that for more information.
-pub fn object_region_bounds<'a, 'gcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- existential_predicates: ty::Binder<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>>)
- -> Vec<ty::Region<'tcx>>
-{
+pub fn object_region_bounds<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ existential_predicates: ty::Binder<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>>,
+) -> Vec<ty::Region<'tcx>> {
// Since we don't actually *know* the self type for an object,
// this "open(err)" serves as a kind of dummy standin -- basically
// a placeholder type.
diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs
index 35e6c1c..54989db 100644
--- a/src/librustc_borrowck/borrowck/check_loans.rs
+++ b/src/librustc_borrowck/borrowck/check_loans.rs
@@ -36,7 +36,7 @@
return match helper(loan_path) {
Some(new_loan_path) => new_loan_path,
- None => loan_path.clone()
+ None => loan_path,
};
fn helper<'a, 'tcx>(loan_path: &'a LoanPath<'tcx>) -> Option<&'a LoanPath<'tcx>> {
@@ -80,8 +80,8 @@
struct CheckLoanCtxt<'a, 'tcx: 'a> {
bccx: &'a BorrowckCtxt<'a, 'tcx>,
- dfcx_loans: &'a LoanDataFlow<'a, 'tcx>,
- move_data: &'a move_data::FlowedMoveData<'a, 'tcx>,
+ dfcx_loans: &'a LoanDataFlow<'tcx>,
+ move_data: &'a move_data::FlowedMoveData<'tcx>,
all_loans: &'a [Loan<'tcx>],
movable_generator: bool,
}
@@ -179,11 +179,13 @@
fn decl_without_init(&mut self, _id: hir::HirId, _span: Span) { }
}
-pub fn check_loans<'a, 'b, 'c, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
- dfcx_loans: &LoanDataFlow<'b, 'tcx>,
- move_data: &move_data::FlowedMoveData<'c, 'tcx>,
- all_loans: &[Loan<'tcx>],
- body: &hir::Body) {
+pub fn check_loans<'a, 'tcx>(
+ bccx: &BorrowckCtxt<'a, 'tcx>,
+ dfcx_loans: &LoanDataFlow<'tcx>,
+ move_data: &move_data::FlowedMoveData<'tcx>,
+ all_loans: &[Loan<'tcx>],
+ body: &hir::Body,
+) {
debug!("check_loans(body id={})", body.value.hir_id);
let def_id = bccx.tcx.hir().body_owner_def_id(body.id());
@@ -229,7 +231,7 @@
}
impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
- pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.bccx.tcx }
+ pub fn tcx(&self) -> TyCtxt<'tcx> { self.bccx.tcx }
pub fn each_issued_loan<F>(&self, node: hir::ItemLocalId, mut op: F) -> bool where
F: FnMut(&Loan<'tcx>) -> bool,
diff --git a/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs b/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs
index a15d3d1..4d03b58 100644
--- a/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs
+++ b/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs
@@ -45,7 +45,7 @@
///
/// In this latter case, this function will return `PatternSource::LetDecl`
/// with a reference to the let
-fn get_pattern_source<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, pat: &Pat) -> PatternSource<'tcx> {
+fn get_pattern_source<'tcx>(tcx: TyCtxt<'tcx>, pat: &Pat) -> PatternSource<'tcx> {
let parent = tcx.hir().get_parent_node_by_hir_id(pat.hir_id);
diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs
index e437c08..b1854a0 100644
--- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs
+++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs
@@ -251,7 +251,7 @@
}
impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> {
- pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.bccx.tcx }
+ pub fn tcx(&self) -> TyCtxt<'tcx> { self.bccx.tcx }
/// Guarantees that `cmt` is assignable, or reports an error.
fn guarantee_assignment_valid(&mut self,
diff --git a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs
index 9a00c43..58be2cf 100644
--- a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs
+++ b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs
@@ -88,7 +88,7 @@
}
}
if let NoteClosureEnv(upvar_id) = error.move_from.note {
- err.span_label(bccx.tcx.hir().span_by_hir_id(upvar_id.var_path.hir_id),
+ err.span_label(bccx.tcx.hir().span(upvar_id.var_path.hir_id),
"captured outer variable");
}
err.emit();
diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs
index 669fb91..93cea6d 100644
--- a/src/librustc_borrowck/borrowck/mod.rs
+++ b/src/librustc_borrowck/borrowck/mod.rs
@@ -51,9 +51,9 @@
#[derive(Clone, Copy)]
pub struct LoanDataFlowOperator;
-pub type LoanDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, LoanDataFlowOperator>;
+pub type LoanDataFlow<'tcx> = DataFlowContext<'tcx, LoanDataFlowOperator>;
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn check_crate<'tcx>(tcx: TyCtxt<'tcx>) {
tcx.par_body_owners(|body_owner_def_id| {
tcx.ensure().borrowck(body_owner_def_id);
});
@@ -67,15 +67,13 @@
}
/// Collection of conclusions determined via borrow checker analyses.
-pub struct AnalysisData<'a, 'tcx: 'a> {
+pub struct AnalysisData<'tcx> {
pub all_loans: Vec<Loan<'tcx>>,
- pub loans: DataFlowContext<'a, 'tcx, LoanDataFlowOperator>,
- pub move_data: move_data::FlowedMoveData<'a, 'tcx>,
+ pub loans: DataFlowContext<'tcx, LoanDataFlowOperator>,
+ pub move_data: move_data::FlowedMoveData<'tcx>,
}
-fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId)
- -> &'tcx BorrowCheckResult
-{
+fn borrowck<'tcx>(tcx: TyCtxt<'tcx>, owner_def_id: DefId) -> &'tcx BorrowCheckResult {
assert!(tcx.use_ast_borrowck() || tcx.migrate_borrowck());
debug!("borrowck(body_owner_def_id={:?})", owner_def_id);
@@ -141,12 +139,14 @@
})
}
-fn build_borrowck_dataflow_data<'a, 'c, 'tcx, F>(this: &mut BorrowckCtxt<'a, 'tcx>,
- force_analysis: bool,
- body_id: hir::BodyId,
- get_cfg: F)
- -> Option<AnalysisData<'a, 'tcx>>
- where F: FnOnce(&mut BorrowckCtxt<'a, 'tcx>) -> &'c cfg::CFG
+fn build_borrowck_dataflow_data<'a, 'c, 'tcx, F>(
+ this: &mut BorrowckCtxt<'a, 'tcx>,
+ force_analysis: bool,
+ body_id: hir::BodyId,
+ get_cfg: F,
+) -> Option<AnalysisData<'tcx>>
+where
+ F: FnOnce(&mut BorrowckCtxt<'a, 'tcx>) -> &'c cfg::CFG,
{
// Check the body of fn items.
let (all_loans, move_data) =
@@ -193,13 +193,12 @@
/// Accessor for introspective clients inspecting `AnalysisData` and
/// the `BorrowckCtxt` itself , e.g., the flowgraph visualizer.
pub fn build_borrowck_dataflow_data_for_fn<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body_id: hir::BodyId,
- cfg: &cfg::CFG)
- -> (BorrowckCtxt<'a, 'tcx>, AnalysisData<'a, 'tcx>)
-{
+ cfg: &cfg::CFG,
+) -> (BorrowckCtxt<'a, 'tcx>, AnalysisData<'tcx>) {
let owner_id = tcx.hir().body_owner(body_id);
- let owner_def_id = tcx.hir().local_def_id(owner_id);
+ let owner_def_id = tcx.hir().local_def_id_from_hir_id(owner_id);
let tables = tcx.typeck_tables_of(owner_def_id);
let region_scope_tree = tcx.region_scope_tree(owner_def_id);
let body = tcx.hir().body(body_id);
@@ -220,8 +219,8 @@
// ----------------------------------------------------------------------
// Type definitions
-pub struct BorrowckCtxt<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub struct BorrowckCtxt<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
// tables for the current thing we are checking; set to
// Some in `borrowck_fn` and cleared later
@@ -238,14 +237,13 @@
signalled_any_error: Cell<SignalledError>,
}
-
-impl<'a, 'tcx: 'a> BorrowckCtxt<'a, 'tcx> {
+impl BorrowckCtxt<'_, 'tcx> {
fn signal_error(&self) {
self.signalled_any_error.set(SignalledError::SawSomeError);
}
}
-impl<'a, 'b, 'tcx: 'b> BorrowckErrors<'a> for &'a BorrowckCtxt<'b, 'tcx> {
+impl BorrowckErrors<'a> for &'a BorrowckCtxt<'_, 'tcx> {
fn struct_span_err_with_code<S: Into<MultiSpan>>(self,
sp: S,
msg: &str,
@@ -390,10 +388,9 @@
LpInterior(Option<DefId>, InteriorKind),
}
-fn closure_to_block(closure_id: LocalDefId,
- tcx: TyCtxt<'_, '_, '_>) -> HirId {
- let closure_id = tcx.hir().local_def_id_to_node_id(closure_id);
- match tcx.hir().get(closure_id) {
+fn closure_to_block(closure_id: LocalDefId, tcx: TyCtxt<'_>) -> HirId {
+ let closure_id = tcx.hir().local_def_id_to_hir_id(closure_id);
+ match tcx.hir().get_by_hir_id(closure_id) {
Node::Expr(expr) => match expr.node {
hir::ExprKind::Closure(.., body_id, _, _) => {
body_id.hir_id
@@ -406,8 +403,8 @@
}
}
-impl<'a, 'tcx> LoanPath<'tcx> {
- pub fn kill_scope(&self, bccx: &BorrowckCtxt<'a, 'tcx>) -> region::Scope {
+impl LoanPath<'tcx> {
+ pub fn kill_scope(&self, bccx: &BorrowckCtxt<'_, 'tcx>) -> region::Scope {
match self.kind {
LpVar(hir_id) => {
bccx.region_scope_tree.var_scope(hir_id.local_id)
@@ -577,7 +574,7 @@
///////////////////////////////////////////////////////////////////////////
// Misc
-impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> {
+impl BorrowckCtxt<'_, 'tcx> {
pub fn is_subregion_of(&self,
r_sub: ty::Region<'tcx>,
r_sup: ty::Region<'tcx>)
@@ -702,7 +699,7 @@
}
move_data::MoveExpr |
- move_data::MovePat => (self.tcx.hir().span_by_hir_id(hir_id), ""),
+ move_data::MovePat => (self.tcx.hir().span(hir_id), ""),
move_data::Captured =>
(match self.tcx.hir().expect_expr_by_hir_id(hir_id).node {
@@ -831,7 +828,7 @@
let mut db = self.cannot_assign(error_span, &descr, Origin::Ast);
if let mc::NoteClosureEnv(upvar_id) = err.cmt.note {
let hir_id = upvar_id.var_path.hir_id;
- let sp = self.tcx.hir().span_by_hir_id(hir_id);
+ let sp = self.tcx.hir().span(hir_id);
let fn_closure_msg = "`Fn` closures cannot capture their enclosing \
environment for modifications";
match (self.tcx.sess.source_map().span_to_snippet(sp), &err.cmt.cat) {
@@ -899,8 +896,7 @@
// to implement two traits for "one operator" is not very intuitive for
// many programmers.
if err.cmt.note == mc::NoteIndex {
- let node_id = self.tcx.hir().hir_to_node_id(err.cmt.hir_id);
- let node = self.tcx.hir().get(node_id);
+ let node = self.tcx.hir().get_by_hir_id(err.cmt.hir_id);
// This pattern probably always matches.
if let Node::Expr(
@@ -1025,8 +1021,8 @@
}
if let ty::ReScope(scope) = *super_scope {
- let node_id = scope.node_id(self.tcx, &self.region_scope_tree);
- match self.tcx.hir().find(node_id) {
+ let hir_id = scope.hir_id(&self.region_scope_tree);
+ match self.tcx.hir().find_by_hir_id(hir_id) {
Some(Node::Stmt(_)) => {
if *sub_scope != ty::ReStatic {
db.note("consider using a `let` binding to increase its lifetime");
@@ -1121,7 +1117,7 @@
"consider changing this closure to take self by mutable reference"
};
let hir_id = self.tcx.hir().local_def_id_to_hir_id(id);
- let help_span = self.tcx.hir().span_by_hir_id(hir_id);
+ let help_span = self.tcx.hir().span(hir_id);
self.cannot_act_on_capture_in_sharable_fn(span,
prefix,
(help_span, help_msg),
@@ -1227,7 +1223,7 @@
Some(ImmutabilityBlame::LocalDeref(hir_id)) => {
match self.local_binding_mode(hir_id) {
ty::BindByReference(..) => {
- let let_span = self.tcx.hir().span_by_hir_id(hir_id);
+ let let_span = self.tcx.hir().span(hir_id);
let suggestion = suggest_ref_mut(self.tcx, let_span);
if let Some(replace_str) = suggestion {
db.span_suggestion(
@@ -1275,7 +1271,7 @@
db: &mut DiagnosticBuilder<'_>,
borrowed_hir_id: hir::HirId,
binding_hir_id: hir::HirId) {
- let let_span = self.tcx.hir().span_by_hir_id(binding_hir_id);
+ let let_span = self.tcx.hir().span(binding_hir_id);
if let ty::BindByValue(..) = self.local_binding_mode(binding_hir_id) {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(let_span) {
let (ty, is_implicit_self) = self.local_ty(binding_hir_id);
@@ -1293,7 +1289,7 @@
{
let borrow_expr_id = self.tcx.hir().get_parent_node_by_hir_id(borrowed_hir_id);
db.span_suggestion(
- self.tcx.hir().span_by_hir_id(borrow_expr_id),
+ self.tcx.hir().span(borrow_expr_id),
"consider removing the `&mut`, as it is an \
immutable binding to a mutable reference",
snippet,
@@ -1364,7 +1360,7 @@
if *kind == ty::ClosureKind::Fn {
let closure_hir_id =
self.tcx.hir().local_def_id_to_hir_id(upvar_id.closure_expr_id);
- db.span_help(self.tcx.hir().span_by_hir_id(closure_hir_id),
+ db.span_help(self.tcx.hir().span(closure_hir_id),
"consider changing this closure to take \
self by mutable reference");
}
@@ -1373,7 +1369,7 @@
if let Categorization::Deref(..) = err.cmt.cat {
db.span_label(*error_span, "cannot borrow as mutable");
} else if let Categorization::Local(local_id) = err.cmt.cat {
- let span = self.tcx.hir().span_by_hir_id(local_id);
+ let span = self.tcx.hir().span(local_id);
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
if snippet.starts_with("ref mut ") || snippet.starts_with("&mut ") {
db.span_label(*error_span, "cannot reborrow mutably");
@@ -1512,13 +1508,12 @@
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind {
LpVar(id) => {
- write!(f, "$({})", ty::tls::with(|tcx| tcx.hir().hir_to_string(id)))
+ write!(f, "$({})", ty::tls::with(|tcx| tcx.hir().node_to_string(id)))
}
LpUpvar(ty::UpvarId{ var_path: ty::UpvarPath {hir_id: var_id}, closure_expr_id }) => {
let s = ty::tls::with(|tcx| {
- let var_node_id = tcx.hir().hir_to_node_id(var_id);
- tcx.hir().node_to_string(var_node_id)
+ tcx.hir().node_to_string(var_id)
});
write!(f, "$({} captured by id={:?})", s, closure_expr_id)
}
@@ -1552,8 +1547,7 @@
LpUpvar(ty::UpvarId{ var_path: ty::UpvarPath { hir_id }, closure_expr_id: _ }) => {
let s = ty::tls::with(|tcx| {
- let var_node_id = tcx.hir().hir_to_node_id(hir_id);
- tcx.hir().node_to_string(var_node_id)
+ tcx.hir().node_to_string(hir_id)
});
write!(f, "$({} captured by closure)", s)
}
diff --git a/src/librustc_borrowck/borrowck/move_data.rs b/src/librustc_borrowck/borrowck/move_data.rs
index 325d355..9feea64 100644
--- a/src/librustc_borrowck/borrowck/move_data.rs
+++ b/src/librustc_borrowck/borrowck/move_data.rs
@@ -39,15 +39,15 @@
pub path_assignments: RefCell<Vec<Assignment>>,
}
-pub struct FlowedMoveData<'a, 'tcx: 'a> {
+pub struct FlowedMoveData<'tcx> {
pub move_data: MoveData<'tcx>,
- pub dfcx_moves: MoveDataFlow<'a, 'tcx>,
+ pub dfcx_moves: MoveDataFlow<'tcx>,
// We could (and maybe should, for efficiency) combine both move
// and assign data flow into one, but this way it's easier to
// distinguish the bits that correspond to moves and assignments.
- pub dfcx_assign: AssignDataFlow<'a, 'tcx>
+ pub dfcx_assign: AssignDataFlow<'tcx>,
}
/// Index into `MoveData.paths`, used like a pointer
@@ -139,12 +139,12 @@
#[derive(Clone, Copy)]
pub struct MoveDataFlowOperator;
-pub type MoveDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, MoveDataFlowOperator>;
+pub type MoveDataFlow<'tcx> = DataFlowContext<'tcx, MoveDataFlowOperator>;
#[derive(Clone, Copy)]
pub struct AssignDataFlowOperator;
-pub type AssignDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, AssignDataFlowOperator>;
+pub type AssignDataFlow<'tcx> = DataFlowContext<'tcx, AssignDataFlowOperator>;
fn loan_path_is_precise(loan_path: &LoanPath<'_>) -> bool {
match loan_path.kind {
@@ -167,7 +167,7 @@
}
}
-impl<'a, 'tcx> MoveData<'tcx> {
+impl MoveData<'tcx> {
/// Returns `true` if there are no trackable assignments or moves
/// in this move data -- that means that there is nothing that
/// could cause a borrow error.
@@ -223,8 +223,7 @@
/// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for
/// `lp` and any of its base paths that do not yet have an index.
- pub fn move_path(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
+ pub fn move_path(&self, tcx: TyCtxt<'tcx>, lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
if let Some(&index) = self.path_map.borrow().get(&lp) {
return index;
}
@@ -311,10 +310,13 @@
}
/// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`.
- pub fn add_move(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- orig_lp: Rc<LoanPath<'tcx>>,
- id: hir::ItemLocalId,
- kind: MoveKind) {
+ pub fn add_move(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ orig_lp: Rc<LoanPath<'tcx>>,
+ id: hir::ItemLocalId,
+ kind: MoveKind,
+ ) {
// Moving one union field automatically moves all its fields. Also move siblings of
// all parent union fields, moves do not propagate upwards automatically.
let mut lp = orig_lp.clone();
@@ -340,10 +342,13 @@
self.add_move_helper(tcx, orig_lp, id, kind);
}
- fn add_move_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- lp: Rc<LoanPath<'tcx>>,
- id: hir::ItemLocalId,
- kind: MoveKind) {
+ fn add_move_helper(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ lp: Rc<LoanPath<'tcx>>,
+ id: hir::ItemLocalId,
+ kind: MoveKind,
+ ) {
debug!("add_move(lp={:?}, id={:?}, kind={:?})",
lp,
id,
@@ -365,10 +370,13 @@
/// Adds a new record for an assignment to `lp` that occurs at location `id` with the given
/// `span`.
- pub fn add_assignment(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- lp: Rc<LoanPath<'tcx>>,
- assign_id: hir::ItemLocalId,
- span: Span) {
+ pub fn add_assignment(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ lp: Rc<LoanPath<'tcx>>,
+ assign_id: hir::ItemLocalId,
+ span: Span,
+ ) {
// Assigning to one union field automatically assigns to all its fields.
if let LpExtend(ref base_lp, mutbl, LpInterior(opt_variant_id, interior)) = lp.kind {
if let ty::Adt(adt_def, _) = base_lp.ty.sty {
@@ -395,10 +403,13 @@
self.add_assignment_helper(tcx, lp, assign_id, span);
}
- fn add_assignment_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- lp: Rc<LoanPath<'tcx>>,
- assign_id: hir::ItemLocalId,
- span: Span) {
+ fn add_assignment_helper(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ lp: Rc<LoanPath<'tcx>>,
+ assign_id: hir::ItemLocalId,
+ span: Span,
+ ) {
debug!("add_assignment(lp={:?}, assign_id={:?}", lp, assign_id);
let path_index = self.move_path(tcx, lp.clone());
@@ -427,10 +438,12 @@
/// Moves are generated by moves and killed by assignments and
/// scoping. Assignments are generated by assignment to variables and
/// killed by scoping. See `README.md` for more details.
- fn add_gen_kills(&self,
- bccx: &BorrowckCtxt<'a, 'tcx>,
- dfcx_moves: &mut MoveDataFlow<'_, '_>,
- dfcx_assign: &mut AssignDataFlow<'_, '_>) {
+ fn add_gen_kills(
+ &self,
+ bccx: &BorrowckCtxt<'_, 'tcx>,
+ dfcx_moves: &mut MoveDataFlow<'_>,
+ dfcx_assign: &mut AssignDataFlow<'_>,
+ ) {
for (i, the_move) in self.moves.borrow().iter().enumerate() {
dfcx_moves.add_gen(the_move.id, i);
}
@@ -534,11 +547,13 @@
ret
}
- fn kill_moves(&self,
- path: MovePathIndex,
- kill_id: hir::ItemLocalId,
- kill_kind: KillFrom,
- dfcx_moves: &mut MoveDataFlow<'_, '_>) {
+ fn kill_moves(
+ &self,
+ path: MovePathIndex,
+ kill_id: hir::ItemLocalId,
+ kill_kind: KillFrom,
+ dfcx_moves: &mut MoveDataFlow<'_>,
+ ) {
// We can only perform kills for paths that refer to a unique location,
// since otherwise we may kill a move from one location with an
// assignment referring to another location.
@@ -555,12 +570,13 @@
}
}
-impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> {
- pub fn new(move_data: MoveData<'tcx>,
- bccx: &BorrowckCtxt<'a, 'tcx>,
- cfg: &cfg::CFG,
- body: &hir::Body)
- -> FlowedMoveData<'a, 'tcx> {
+impl<'tcx> FlowedMoveData<'tcx> {
+ pub fn new(
+ move_data: MoveData<'tcx>,
+ bccx: &BorrowckCtxt<'_, 'tcx>,
+ cfg: &cfg::CFG,
+ body: &hir::Body,
+ ) -> FlowedMoveData<'tcx> {
let tcx = bccx.tcx;
let mut dfcx_moves =
diff --git a/src/librustc_borrowck/dataflow.rs b/src/librustc_borrowck/dataflow.rs
index af10404..f5d311b 100644
--- a/src/librustc_borrowck/dataflow.rs
+++ b/src/librustc_borrowck/dataflow.rs
@@ -26,8 +26,8 @@
}
#[derive(Clone)]
-pub struct DataFlowContext<'a, 'tcx: 'a, O> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub struct DataFlowContext<'tcx, O> {
+ tcx: TyCtxt<'tcx>,
/// a name for the analysis using this dataflow instance
analysis_name: &'static str,
@@ -51,7 +51,6 @@
// `id_range`, there is a range of words equal to `words_per_id`.
// So, to access the bits for any given id, you take a slice of
// the full vector (see the method `compute_id_range()`).
-
/// bits generated as we exit the cfg node. Updated by `add_gen()`.
gens: Vec<usize>,
@@ -80,9 +79,9 @@
fn initial_value(&self) -> bool;
}
-struct PropagationContext<'a, 'b: 'a, 'tcx: 'b, O> {
- dfcx: &'a mut DataFlowContext<'b, 'tcx, O>,
- changed: bool
+struct PropagationContext<'a, 'tcx, O> {
+ dfcx: &'a mut DataFlowContext<'tcx, O>,
+ changed: bool,
}
fn get_cfg_indices<'a>(id: hir::ItemLocalId,
@@ -91,14 +90,14 @@
index.get(&id).map_or(&[], |v| &v[..])
}
-impl<'a, 'tcx, O: DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
+impl<'tcx, O: DataFlowOperator> DataFlowContext<'tcx, O> {
fn has_bitset_for_local_id(&self, n: hir::ItemLocalId) -> bool {
assert!(n != hir::DUMMY_ITEM_LOCAL_ID);
self.local_id_to_index.contains_key(&n)
}
}
-impl<'a, 'tcx, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, 'tcx, O> {
+impl<'tcx, O: DataFlowOperator> pprust::PpAnn for DataFlowContext<'tcx, O> {
fn nested(&self, state: &mut pprust::State<'_>, nested: pprust::Nested) -> io::Result<()> {
pprust::PpAnn::nested(self.tcx.hir(), state, nested)
}
@@ -224,13 +223,15 @@
Execution,
}
-impl<'a, 'tcx, O: DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
- pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- analysis_name: &'static str,
- body: Option<&hir::Body>,
- cfg: &cfg::CFG,
- oper: O,
- bits_per_id: usize) -> DataFlowContext<'a, 'tcx, O> {
+impl<'tcx, O: DataFlowOperator> DataFlowContext<'tcx, O> {
+ pub fn new(
+ tcx: TyCtxt<'tcx>,
+ analysis_name: &'static str,
+ body: Option<&hir::Body>,
+ cfg: &cfg::CFG,
+ oper: O,
+ bits_per_id: usize,
+ ) -> DataFlowContext<'tcx, O> {
let usize_bits = mem::size_of::<usize>() * 8;
let words_per_id = (bits_per_id + usize_bits - 1) / usize_bits;
let num_nodes = cfg.graph.all_nodes().len();
@@ -500,7 +501,7 @@
}
// N.B. `Clone + 'static` only needed for pretty printing.
-impl<'a, 'tcx, O: DataFlowOperator + Clone + 'static> DataFlowContext<'a, 'tcx, O> {
+impl<'tcx, O: DataFlowOperator + Clone + 'static> DataFlowContext<'tcx, O> {
pub fn propagate(&mut self, cfg: &cfg::CFG, body: &hir::Body) {
//! Performs the data flow analysis.
@@ -537,7 +538,7 @@
}
}
-impl<'a, 'b, 'tcx, O: DataFlowOperator> PropagationContext<'a, 'b, 'tcx, O> {
+impl<O: DataFlowOperator> PropagationContext<'_, 'tcx, O> {
fn walk_cfg(&mut self,
cfg: &cfg::CFG,
nodes_po: &[CFGIndex],
diff --git a/src/librustc_borrowck/graphviz.rs b/src/librustc_borrowck/graphviz.rs
index 77056d4..1f24806 100644
--- a/src/librustc_borrowck/graphviz.rs
+++ b/src/librustc_borrowck/graphviz.rs
@@ -34,7 +34,7 @@
pub inner: cfg_dot::LabelledCFG<'a, 'tcx>,
pub variants: Vec<Variant>,
pub borrowck_ctxt: &'a BorrowckCtxt<'a, 'tcx>,
- pub analysis_data: &'a borrowck::AnalysisData<'a, 'tcx>,
+ pub analysis_data: &'a borrowck::AnalysisData<'tcx>,
}
impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
@@ -61,11 +61,14 @@
}
}
- fn build_set<O:DataFlowOperator, F>(&self,
- e: EntryOrExit,
- cfgidx: CFGIndex,
- dfcx: &DataFlowContext<'a, 'tcx, O>,
- mut to_lp: F) -> String where
+ fn build_set<O: DataFlowOperator, F>(
+ &self,
+ e: EntryOrExit,
+ cfgidx: CFGIndex,
+ dfcx: &DataFlowContext<'tcx, O>,
+ mut to_lp: F,
+ ) -> String
+ where
F: FnMut(usize) -> Rc<LoanPath<'tcx>>,
{
let mut saw_some = false;
diff --git a/src/librustc_borrowck/lib.rs b/src/librustc_borrowck/lib.rs
index 5d8b0cd..98e629c 100644
--- a/src/librustc_borrowck/lib.rs
+++ b/src/librustc_borrowck/lib.rs
@@ -5,6 +5,7 @@
#![deny(internal)]
#![deny(unused_lifetimes)]
+#![feature(in_band_lifetimes)]
#![feature(nll)]
#![recursion_limit="256"]
diff --git a/src/librustc_codegen_llvm/allocator.rs b/src/librustc_codegen_llvm/allocator.rs
index 1fe0205..02a05fd 100644
--- a/src/librustc_codegen_llvm/allocator.rs
+++ b/src/librustc_codegen_llvm/allocator.rs
@@ -9,7 +9,7 @@
use crate::ModuleLlvm;
use crate::llvm::{self, False, True};
-pub(crate) unsafe fn codegen(tcx: TyCtxt<'_, '_, '_>, mods: &mut ModuleLlvm, kind: AllocatorKind) {
+pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut ModuleLlvm, kind: AllocatorKind) {
let llcx = &*mods.llcx;
let llmod = mods.llmod();
let usize = match &tcx.sess.target.target.target_pointer_width[..] {
diff --git a/src/librustc_codegen_llvm/attributes.rs b/src/librustc_codegen_llvm/attributes.rs
index 9d0e7dd..4735588 100644
--- a/src/librustc_codegen_llvm/attributes.rs
+++ b/src/librustc_codegen_llvm/attributes.rs
@@ -367,7 +367,7 @@
};
}
-fn wasm_import_module(tcx: TyCtxt<'_, '_, '_>, id: DefId) -> Option<CString> {
+fn wasm_import_module(tcx: TyCtxt<'_>, id: DefId) -> Option<CString> {
tcx.wasm_import_module_map(id.krate)
.get(&id)
.map(|s| CString::new(&s[..]).unwrap())
diff --git a/src/librustc_codegen_llvm/back/lto.rs b/src/librustc_codegen_llvm/back/lto.rs
index 74cda2d..5d3cc0c 100644
--- a/src/librustc_codegen_llvm/back/lto.rs
+++ b/src/librustc_codegen_llvm/back/lto.rs
@@ -279,7 +279,7 @@
}
}));
serialized_modules.extend(cached_modules.into_iter().map(|(buffer, wp)| {
- (buffer, CString::new(wp.cgu_name.clone()).unwrap())
+ (buffer, CString::new(wp.cgu_name).unwrap())
}));
// For all serialized bitcode files we parse them and link them in as we did
diff --git a/src/librustc_codegen_llvm/back/write.rs b/src/librustc_codegen_llvm/back/write.rs
index d8a9f68..3638730 100644
--- a/src/librustc_codegen_llvm/back/write.rs
+++ b/src/librustc_codegen_llvm/back/write.rs
@@ -89,7 +89,7 @@
}
pub fn create_target_machine(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
find_features: bool,
) -> &'static mut llvm::TargetMachine {
target_machine_factory(&tcx.sess, tcx.backend_optimization_level(LOCAL_CRATE), find_features)()
diff --git a/src/librustc_codegen_llvm/base.rs b/src/librustc_codegen_llvm/base.rs
index 62374eb..04645da 100644
--- a/src/librustc_codegen_llvm/base.rs
+++ b/src/librustc_codegen_llvm/base.rs
@@ -41,10 +41,10 @@
use crate::value::Value;
-pub fn write_compressed_metadata<'a, 'gcx>(
- tcx: TyCtxt<'a, 'gcx, 'gcx>,
+pub fn write_compressed_metadata<'tcx>(
+ tcx: TyCtxt<'tcx>,
metadata: &EncodedMetadata,
- llvm_module: &mut ModuleLlvm
+ llvm_module: &mut ModuleLlvm,
) {
use std::io::Write;
use flate2::Compression;
@@ -103,7 +103,7 @@
}
}
-pub fn compile_codegen_unit(tcx: TyCtxt<'a, 'tcx, 'tcx>, cgu_name: InternedString) {
+pub fn compile_codegen_unit(tcx: TyCtxt<'tcx>, cgu_name: InternedString) {
let start_time = Instant::now();
let dep_node = tcx.codegen_unit(cgu_name).codegen_dep_node(tcx);
@@ -123,8 +123,8 @@
submit_codegened_module_to_llvm(&LlvmCodegenBackend(()), tcx, module, cost);
- fn module_codegen<'ll, 'tcx>(
- tcx: TyCtxt<'ll, 'tcx, 'tcx>,
+ fn module_codegen<'tcx>(
+ tcx: TyCtxt<'tcx>,
cgu_name: InternedString,
) -> ModuleCodegen<ModuleLlvm> {
let cgu = tcx.codegen_unit(cgu_name);
diff --git a/src/librustc_codegen_llvm/builder.rs b/src/librustc_codegen_llvm/builder.rs
index 9821563..9102ba9 100644
--- a/src/librustc_codegen_llvm/builder.rs
+++ b/src/librustc_codegen_llvm/builder.rs
@@ -66,7 +66,7 @@
}
impl ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.cx.tcx
}
}
@@ -1228,9 +1228,7 @@
ret.expect("LLVM does not have support for catchret")
}
- fn check_store<'b>(&mut self,
- val: &'ll Value,
- ptr: &'ll Value) -> &'ll Value {
+ fn check_store(&mut self, val: &'ll Value, ptr: &'ll Value) -> &'ll Value {
let dest_ptr_ty = self.cx.val_ty(ptr);
let stored_ty = self.cx.val_ty(val);
let stored_ptr_ty = self.cx.type_ptr_to(stored_ty);
diff --git a/src/librustc_codegen_llvm/context.rs b/src/librustc_codegen_llvm/context.rs
index 2bc4633..588f748 100644
--- a/src/librustc_codegen_llvm/context.rs
+++ b/src/librustc_codegen_llvm/context.rs
@@ -35,7 +35,7 @@
/// `llvm::Context` so that several compilation units may be optimized in parallel.
/// All other LLVM data structures in the `CodegenCx` are tied to that `llvm::Context`.
pub struct CodegenCx<'ll, 'tcx: 'll> {
- pub tcx: TyCtxt<'ll, 'tcx, 'tcx>,
+ pub tcx: TyCtxt<'tcx>,
pub check_overflow: bool,
pub use_dll_storage_attrs: bool,
pub tls_model: llvm::ThreadLocalMode,
@@ -47,8 +47,8 @@
/// Cache instances of monomorphic and polymorphic items
pub instances: RefCell<FxHashMap<Instance<'tcx>, &'ll Value>>,
/// Cache generated vtables
- pub vtables: RefCell<FxHashMap<
- (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), &'ll Value>>,
+ pub vtables:
+ RefCell<FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), &'ll Value>>,
/// Cache of constant strings,
pub const_cstr_cache: RefCell<FxHashMap<LocalInternedString, &'ll Value>>,
@@ -141,7 +141,7 @@
}
pub unsafe fn create_module(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
llcx: &'ll llvm::Context,
mod_name: &str,
) -> &'ll llvm::Module {
@@ -207,10 +207,11 @@
}
impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
- crate fn new(tcx: TyCtxt<'ll, 'tcx, 'tcx>,
- codegen_unit: Arc<CodegenUnit<'tcx>>,
- llvm_module: &'ll crate::ModuleLlvm)
- -> Self {
+ crate fn new(
+ tcx: TyCtxt<'tcx>,
+ codegen_unit: Arc<CodegenUnit<'tcx>>,
+ llvm_module: &'ll crate::ModuleLlvm,
+ ) -> Self {
// An interesting part of Windows which MSVC forces our hand on (and
// apparently MinGW didn't) is the usage of `dllimport` and `dllexport`
// attributes in LLVM IR as well as native dependencies (in C these
@@ -458,7 +459,7 @@
};
let f = self.declare_cfn(name, fn_ty);
llvm::SetUnnamedAddr(f, false);
- self.intrinsics.borrow_mut().insert(name, f.clone());
+ self.intrinsics.borrow_mut().insert(name, f);
f
}
@@ -838,7 +839,7 @@
}
impl ty::layout::HasTyCtxt<'tcx> for CodegenCx<'ll, 'tcx> {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
}
diff --git a/src/librustc_codegen_llvm/debuginfo/metadata.rs b/src/librustc_codegen_llvm/debuginfo/metadata.rs
index 42bd790..fbeda43 100644
--- a/src/librustc_codegen_llvm/debuginfo/metadata.rs
+++ b/src/librustc_codegen_llvm/debuginfo/metadata.rs
@@ -222,8 +222,7 @@
// Get the unique type id string for an enum variant part.
// Variant parts are not types and shouldn't really have their own id,
// but it makes set_members_of_composite_type() simpler.
- fn get_unique_type_id_str_of_enum_variant_part<'a>(&mut self,
- enum_type_id: UniqueTypeId) -> &str {
+ fn get_unique_type_id_str_of_enum_variant_part(&mut self, enum_type_id: UniqueTypeId) -> &str {
let variant_part_type_id = format!("{}_variant_part",
self.get_unique_type_id_as_string(enum_type_id));
let interner_key = self.unique_id_interner.intern(&variant_part_type_id);
@@ -894,10 +893,11 @@
}
}
-pub fn compile_unit_metadata(tcx: TyCtxt<'_, '_, '_>,
- codegen_unit_name: &str,
- debug_context: &CrateDebugContext<'ll, '_>)
- -> &'ll DIDescriptor {
+pub fn compile_unit_metadata(
+ tcx: TyCtxt<'_>,
+ codegen_unit_name: &str,
+ debug_context: &CrateDebugContext<'ll, '_>,
+) -> &'ll DIDescriptor {
let mut name_in_debuginfo = match tcx.sess.local_crate_source_file {
Some(ref path) => path.clone(),
None => PathBuf::from(&*tcx.crate_name(LOCAL_CRATE).as_str()),
@@ -1609,7 +1609,7 @@
// with every variant, make each variant name be just the value
// of the discriminant. The struct name for the variant includes
// the actual variant description.
- format!("{}", variant_index.as_usize()).to_string()
+ format!("{}", variant_index.as_usize())
}
}
}
@@ -2257,11 +2257,7 @@
/// given type.
///
/// Adds the created metadata nodes directly to the crate's IR.
-pub fn create_vtable_metadata(
- cx: &CodegenCx<'ll, 'tcx>,
- ty: ty::Ty<'tcx>,
- vtable: &'ll Value,
-) {
+pub fn create_vtable_metadata(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>, vtable: &'ll Value) {
if cx.dbg_cx.is_none() {
return;
}
diff --git a/src/librustc_codegen_llvm/intrinsic.rs b/src/librustc_codegen_llvm/intrinsic.rs
index 875f1d0..dc3631e 100644
--- a/src/librustc_codegen_llvm/intrinsic.rs
+++ b/src/librustc_codegen_llvm/intrinsic.rs
@@ -1417,7 +1417,7 @@
// FIXME: use:
// https://github.com/llvm-mirror/llvm/blob/master/include/llvm/IR/Function.h#L182
// https://github.com/llvm-mirror/llvm/blob/master/include/llvm/IR/Intrinsics.h#L81
- fn llvm_vector_str(elem_ty: ty::Ty<'_>, vec_len: usize, no_pointers: usize) -> String {
+ fn llvm_vector_str(elem_ty: Ty<'_>, vec_len: usize, no_pointers: usize) -> String {
let p0s: String = "p0".repeat(no_pointers);
match elem_ty.sty {
ty::Int(v) => format!("v{}{}i{}", vec_len, p0s, v.bit_width().unwrap()),
@@ -1427,7 +1427,7 @@
}
}
- fn llvm_vector_ty(cx: &CodegenCx<'ll, '_>, elem_ty: ty::Ty<'_>, vec_len: usize,
+ fn llvm_vector_ty(cx: &CodegenCx<'ll, '_>, elem_ty: Ty<'_>, vec_len: usize,
mut no_pointers: usize) -> &'ll Type {
// FIXME: use cx.layout_of(ty).llvm_type() ?
let mut elem_ty = match elem_ty.sty {
@@ -1473,7 +1473,7 @@
in_ty, ret_ty);
// This counts how many pointers
- fn ptr_count(t: ty::Ty<'_>) -> usize {
+ fn ptr_count(t: Ty<'_>) -> usize {
match t.sty {
ty::RawPtr(p) => 1 + ptr_count(p.ty),
_ => 0,
@@ -1481,7 +1481,7 @@
}
// Non-ptr type
- fn non_ptr(t: ty::Ty<'_>) -> ty::Ty<'_> {
+ fn non_ptr(t: Ty<'_>) -> Ty<'_> {
match t.sty {
ty::RawPtr(p) => non_ptr(p.ty),
_ => t,
@@ -1572,7 +1572,7 @@
arg_tys[2].simd_size(tcx));
// This counts how many pointers
- fn ptr_count(t: ty::Ty<'_>) -> usize {
+ fn ptr_count(t: Ty<'_>) -> usize {
match t.sty {
ty::RawPtr(p) => 1 + ptr_count(p.ty),
_ => 0,
@@ -1580,7 +1580,7 @@
}
// Non-ptr type
- fn non_ptr(t: ty::Ty<'_>) -> ty::Ty<'_> {
+ fn non_ptr(t: Ty<'_>) -> Ty<'_> {
match t.sty {
ty::RawPtr(p) => non_ptr(p.ty),
_ => t,
diff --git a/src/librustc_codegen_llvm/lib.rs b/src/librustc_codegen_llvm/lib.rs
index 8391f02..a0dd767 100644
--- a/src/librustc_codegen_llvm/lib.rs
+++ b/src/librustc_codegen_llvm/lib.rs
@@ -22,6 +22,8 @@
#![feature(static_nobundle)]
#![feature(trusted_len)]
#![deny(rust_2018_idioms)]
+#![deny(internal)]
+#![deny(unused_lifetimes)]
#![allow(explicit_outlives_requirements)]
use back::write::{create_target_machine, create_informational_target_machine};
@@ -103,31 +105,27 @@
pub struct LlvmCodegenBackend(());
impl ExtraBackendMethods for LlvmCodegenBackend {
- fn new_metadata(&self, tcx: TyCtxt<'_, '_, '_>, mod_name: &str) -> ModuleLlvm {
+ fn new_metadata(&self, tcx: TyCtxt<'_>, mod_name: &str) -> ModuleLlvm {
ModuleLlvm::new_metadata(tcx, mod_name)
}
- fn write_compressed_metadata<'b, 'gcx>(
+ fn write_compressed_metadata<'tcx>(
&self,
- tcx: TyCtxt<'b, 'gcx, 'gcx>,
+ tcx: TyCtxt<'tcx>,
metadata: &EncodedMetadata,
- llvm_module: &mut ModuleLlvm
+ llvm_module: &mut ModuleLlvm,
) {
base::write_compressed_metadata(tcx, metadata, llvm_module)
}
- fn codegen_allocator<'b, 'gcx>(
+ fn codegen_allocator<'tcx>(
&self,
- tcx: TyCtxt<'b, 'gcx, 'gcx>,
+ tcx: TyCtxt<'tcx>,
mods: &mut ModuleLlvm,
- kind: AllocatorKind
+ kind: AllocatorKind,
) {
unsafe { allocator::codegen(tcx, mods, kind) }
}
- fn compile_codegen_unit<'a, 'tcx: 'a>(
- &self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cgu_name: InternedString,
- ) {
+ fn compile_codegen_unit<'a, 'tcx: 'a>(&self, tcx: TyCtxt<'tcx>, cgu_name: InternedString) {
base::compile_codegen_unit(tcx, cgu_name);
}
fn target_machine_factory(
@@ -284,12 +282,12 @@
attributes::provide_extern(providers);
}
- fn codegen_crate<'b, 'tcx>(
+ fn codegen_crate<'tcx>(
&self,
- tcx: TyCtxt<'b, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
metadata: EncodedMetadata,
need_metadata_module: bool,
- rx: mpsc::Receiver<Box<dyn Any + Send>>
+ rx: mpsc::Receiver<Box<dyn Any + Send>>,
) -> Box<dyn Any> {
box rustc_codegen_ssa::base::codegen_crate(
LlvmCodegenBackend(()), tcx, metadata, need_metadata_module, rx)
@@ -365,7 +363,7 @@
unsafe impl Sync for ModuleLlvm { }
impl ModuleLlvm {
- fn new(tcx: TyCtxt<'_, '_, '_>, mod_name: &str) -> Self {
+ fn new(tcx: TyCtxt<'_>, mod_name: &str) -> Self {
unsafe {
let llcx = llvm::LLVMRustContextCreate(tcx.sess.fewer_names());
let llmod_raw = context::create_module(tcx, llcx, mod_name) as *const _;
@@ -377,7 +375,7 @@
}
}
- fn new_metadata(tcx: TyCtxt<'_, '_, '_>, mod_name: &str) -> Self {
+ fn new_metadata(tcx: TyCtxt<'_>, mod_name: &str) -> Self {
unsafe {
let llcx = llvm::LLVMRustContextCreate(tcx.sess.fewer_names());
let llmod_raw = context::create_module(tcx, llcx, mod_name) as *const _;
diff --git a/src/librustc_codegen_llvm/llvm/ffi.rs b/src/librustc_codegen_llvm/llvm/ffi.rs
index a71243c..a5c295c 100644
--- a/src/librustc_codegen_llvm/llvm/ffi.rs
+++ b/src/librustc_codegen_llvm/llvm/ffi.rs
@@ -564,7 +564,7 @@
// These values **must** match with LLVMRustDIFlags!!
bitflags! {
- #[repr(C)]
+ #[repr(transparent)]
#[derive(Default)]
pub struct DIFlags: ::libc::uint32_t {
const FlagZero = 0;
@@ -593,7 +593,7 @@
// These values **must** match with LLVMRustDISPFlags!!
bitflags! {
- #[repr(C)]
+ #[repr(transparent)]
#[derive(Default)]
pub struct DISPFlags: ::libc::uint32_t {
const SPFlagZero = 0;
diff --git a/src/librustc_codegen_llvm/type_.rs b/src/librustc_codegen_llvm/type_.rs
index a3d3f07..2c16725 100644
--- a/src/librustc_codegen_llvm/type_.rs
+++ b/src/librustc_codegen_llvm/type_.rs
@@ -327,7 +327,7 @@
fn backend_field_index(&self, layout: TyLayout<'tcx>, index: usize) -> u64 {
layout.llvm_field_index(index)
}
- fn scalar_pair_element_backend_type<'a>(
+ fn scalar_pair_element_backend_type(
&self,
layout: TyLayout<'tcx>,
index: usize,
diff --git a/src/librustc_codegen_llvm/type_of.rs b/src/librustc_codegen_llvm/type_of.rs
index 7a82fd7..36a9ff0 100644
--- a/src/librustc_codegen_llvm/type_of.rs
+++ b/src/librustc_codegen_llvm/type_of.rs
@@ -175,7 +175,7 @@
pub trait LayoutLlvmExt<'tcx> {
fn is_llvm_immediate(&self) -> bool;
- fn is_llvm_scalar_pair<'a>(&self) -> bool;
+ fn is_llvm_scalar_pair(&self) -> bool;
fn llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> &'a Type;
fn immediate_llvm_type<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> &'a Type;
fn scalar_llvm_type_at<'a>(&self, cx: &CodegenCx<'a, 'tcx>,
@@ -198,7 +198,7 @@
}
}
- fn is_llvm_scalar_pair<'a>(&self) -> bool {
+ fn is_llvm_scalar_pair(&self) -> bool {
match self.abi {
layout::Abi::ScalarPair(..) => true,
layout::Abi::Uninhabited |
diff --git a/src/librustc_codegen_ssa/back/linker.rs b/src/librustc_codegen_ssa/back/linker.rs
index 926f4fe..32696d4 100644
--- a/src/librustc_codegen_ssa/back/linker.rs
+++ b/src/librustc_codegen_ssa/back/linker.rs
@@ -25,7 +25,7 @@
}
impl LinkerInfo {
- pub fn new(tcx: TyCtxt<'_, '_, '_>) -> LinkerInfo {
+ pub fn new(tcx: TyCtxt<'_>) -> LinkerInfo {
LinkerInfo {
exports: tcx.sess.crate_types.borrow().iter().map(|&c| {
(c, exported_symbols(tcx, c))
@@ -377,23 +377,16 @@
return;
}
- // If we're compiling a dylib, then we let symbol visibility in object
- // files to take care of whether they're exported or not.
- //
- // If we're compiling a cdylib, however, we manually create a list of
- // exported symbols to ensure we don't expose any more. The object files
- // have far more public symbols than we actually want to export, so we
- // hide them all here.
- if crate_type == CrateType::Dylib ||
- crate_type == CrateType::ProcMacro {
- return
+ // We manually create a list of exported symbols to ensure we don't expose any more.
+ // The object files have far more public symbols than we actually want to export,
+ // so we hide them all here.
+
+ if !self.sess.target.target.options.limit_rdylib_exports {
+ return;
}
- // Symbol visibility takes care of this for the WebAssembly.
- // Additionally the only known linker, LLD, doesn't support the script
- // arguments just yet
- if self.sess.target.target.arch == "wasm32" {
- return;
+ if crate_type == CrateType::ProcMacro {
+ return
}
let mut arg = OsString::new();
@@ -1012,7 +1005,7 @@
}
}
-fn exported_symbols(tcx: TyCtxt<'_, '_, '_>, crate_type: CrateType) -> Vec<String> {
+fn exported_symbols(tcx: TyCtxt<'_>, crate_type: CrateType) -> Vec<String> {
if let Some(ref exports) = tcx.sess.target.target.options.override_export_symbols {
return exports.clone()
}
diff --git a/src/librustc_codegen_ssa/back/rpath.rs b/src/librustc_codegen_ssa/back/rpath.rs
index 2b7abcb..e27cb6d 100644
--- a/src/librustc_codegen_ssa/back/rpath.rs
+++ b/src/librustc_codegen_ssa/back/rpath.rs
@@ -173,98 +173,4 @@
}
#[cfg(all(unix, test))]
-mod tests {
- use super::{RPathConfig};
- use super::{minimize_rpaths, rpaths_to_flags, get_rpath_relative_to_output};
- use std::path::{Path, PathBuf};
-
- #[test]
- fn test_rpaths_to_flags() {
- let flags = rpaths_to_flags(&[
- "path1".to_string(),
- "path2".to_string()
- ]);
- assert_eq!(flags,
- ["-Wl,-rpath,path1",
- "-Wl,-rpath,path2"]);
- }
-
- #[test]
- fn test_minimize1() {
- let res = minimize_rpaths(&[
- "rpath1".to_string(),
- "rpath2".to_string(),
- "rpath1".to_string()
- ]);
- assert!(res == [
- "rpath1",
- "rpath2",
- ]);
- }
-
- #[test]
- fn test_minimize2() {
- let res = minimize_rpaths(&[
- "1a".to_string(),
- "2".to_string(),
- "2".to_string(),
- "1a".to_string(),
- "4a".to_string(),
- "1a".to_string(),
- "2".to_string(),
- "3".to_string(),
- "4a".to_string(),
- "3".to_string()
- ]);
- assert!(res == [
- "1a",
- "2",
- "4a",
- "3",
- ]);
- }
-
- #[test]
- fn test_rpath_relative() {
- if cfg!(target_os = "macos") {
- let config = &mut RPathConfig {
- used_crates: Vec::new(),
- has_rpath: true,
- is_like_osx: true,
- linker_is_gnu: false,
- out_filename: PathBuf::from("bin/rustc"),
- get_install_prefix_lib_path: &mut || panic!(),
- };
- let res = get_rpath_relative_to_output(config,
- Path::new("lib/libstd.so"));
- assert_eq!(res, "@loader_path/../lib");
- } else {
- let config = &mut RPathConfig {
- used_crates: Vec::new(),
- out_filename: PathBuf::from("bin/rustc"),
- get_install_prefix_lib_path: &mut || panic!(),
- has_rpath: true,
- is_like_osx: false,
- linker_is_gnu: true,
- };
- let res = get_rpath_relative_to_output(config,
- Path::new("lib/libstd.so"));
- assert_eq!(res, "$ORIGIN/../lib");
- }
- }
-
- #[test]
- fn test_xlinker() {
- let args = rpaths_to_flags(&[
- "a/normal/path".to_string(),
- "a,comma,path".to_string()
- ]);
-
- assert_eq!(args, vec![
- "-Wl,-rpath,a/normal/path".to_string(),
- "-Wl,-rpath".to_string(),
- "-Xlinker".to_string(),
- "a,comma,path".to_string()
- ]);
- }
-}
+mod tests;
diff --git a/src/librustc_codegen_ssa/back/rpath/tests.rs b/src/librustc_codegen_ssa/back/rpath/tests.rs
new file mode 100644
index 0000000..e42a878
--- /dev/null
+++ b/src/librustc_codegen_ssa/back/rpath/tests.rs
@@ -0,0 +1,93 @@
+use super::{RPathConfig};
+use super::{minimize_rpaths, rpaths_to_flags, get_rpath_relative_to_output};
+use std::path::{Path, PathBuf};
+
+#[test]
+fn test_rpaths_to_flags() {
+ let flags = rpaths_to_flags(&[
+ "path1".to_string(),
+ "path2".to_string()
+ ]);
+ assert_eq!(flags,
+ ["-Wl,-rpath,path1",
+ "-Wl,-rpath,path2"]);
+}
+
+#[test]
+fn test_minimize1() {
+ let res = minimize_rpaths(&[
+ "rpath1".to_string(),
+ "rpath2".to_string(),
+ "rpath1".to_string()
+ ]);
+ assert!(res == [
+ "rpath1",
+ "rpath2",
+ ]);
+}
+
+#[test]
+fn test_minimize2() {
+ let res = minimize_rpaths(&[
+ "1a".to_string(),
+ "2".to_string(),
+ "2".to_string(),
+ "1a".to_string(),
+ "4a".to_string(),
+ "1a".to_string(),
+ "2".to_string(),
+ "3".to_string(),
+ "4a".to_string(),
+ "3".to_string()
+ ]);
+ assert!(res == [
+ "1a",
+ "2",
+ "4a",
+ "3",
+ ]);
+}
+
+#[test]
+fn test_rpath_relative() {
+ if cfg!(target_os = "macos") {
+ let config = &mut RPathConfig {
+ used_crates: Vec::new(),
+ has_rpath: true,
+ is_like_osx: true,
+ linker_is_gnu: false,
+ out_filename: PathBuf::from("bin/rustc"),
+ get_install_prefix_lib_path: &mut || panic!(),
+ };
+ let res = get_rpath_relative_to_output(config,
+ Path::new("lib/libstd.so"));
+ assert_eq!(res, "@loader_path/../lib");
+ } else {
+ let config = &mut RPathConfig {
+ used_crates: Vec::new(),
+ out_filename: PathBuf::from("bin/rustc"),
+ get_install_prefix_lib_path: &mut || panic!(),
+ has_rpath: true,
+ is_like_osx: false,
+ linker_is_gnu: true,
+ };
+ let res = get_rpath_relative_to_output(config,
+ Path::new("lib/libstd.so"));
+ assert_eq!(res, "$ORIGIN/../lib");
+ }
+}
+
+#[test]
+fn test_xlinker() {
+ let args = rpaths_to_flags(&[
+ "a/normal/path".to_string(),
+ "a,comma,path".to_string()
+ ]);
+
+ assert_eq!(args, vec![
+ "-Wl,-rpath,a/normal/path".to_string(),
+ "-Wl,-rpath".to_string(),
+ "-Xlinker".to_string(),
+ "a,comma,path".to_string()
+ ]);
+}
diff --git a/src/librustc_codegen_ssa/back/symbol_export.rs b/src/librustc_codegen_ssa/back/symbol_export.rs
index fb7ef87..aeff73c7 100644
--- a/src/librustc_codegen_ssa/back/symbol_export.rs
+++ b/src/librustc_codegen_ssa/back/symbol_export.rs
@@ -21,7 +21,7 @@
Arc<Vec<(String, SymbolExportLevel)>>,
>;
-pub fn threshold(tcx: TyCtxt<'_, '_, '_>) -> SymbolExportLevel {
+pub fn threshold(tcx: TyCtxt<'_>) -> SymbolExportLevel {
crates_export_threshold(&tcx.sess.crate_types.borrow())
}
@@ -46,10 +46,10 @@
}
}
-fn reachable_non_generics_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cnum: CrateNum)
- -> &'tcx DefIdMap<SymbolExportLevel>
-{
+fn reachable_non_generics_provider<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ cnum: CrateNum,
+) -> &'tcx DefIdMap<SymbolExportLevel> {
assert_eq!(cnum, LOCAL_CRATE);
if !tcx.sess.opts.output_types.should_codegen() {
@@ -157,9 +157,7 @@
tcx.arena.alloc(reachable_non_generics)
}
-fn is_reachable_non_generic_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> bool {
+fn is_reachable_non_generic_provider_local<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
let export_threshold = threshold(tcx);
if let Some(&level) = tcx.reachable_non_generics(def_id.krate).get(&def_id) {
@@ -169,17 +167,14 @@
}
}
-fn is_reachable_non_generic_provider_extern<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> bool {
+fn is_reachable_non_generic_provider_extern<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
tcx.reachable_non_generics(def_id.krate).contains_key(&def_id)
}
-fn exported_symbols_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cnum: CrateNum)
- -> Arc<Vec<(ExportedSymbol<'tcx>,
- SymbolExportLevel)>>
-{
+fn exported_symbols_provider_local<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ cnum: CrateNum,
+) -> Arc<Vec<(ExportedSymbol<'tcx>, SymbolExportLevel)>> {
assert_eq!(cnum, LOCAL_CRATE);
if !tcx.sess.opts.output_types.should_codegen() {
@@ -278,11 +273,10 @@
Arc::new(symbols)
}
-fn upstream_monomorphizations_provider<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cnum: CrateNum)
- -> &'tcx DefIdMap<FxHashMap<SubstsRef<'tcx>, CrateNum>>
-{
+fn upstream_monomorphizations_provider<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ cnum: CrateNum,
+) -> &'tcx DefIdMap<FxHashMap<SubstsRef<'tcx>, CrateNum>> {
debug_assert!(cnum == LOCAL_CRATE);
let cnums = tcx.all_crate_nums(LOCAL_CRATE);
@@ -328,16 +322,15 @@
tcx.arena.alloc(instances)
}
-fn upstream_monomorphizations_for_provider<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> Option<&'tcx FxHashMap<SubstsRef<'tcx>, CrateNum>>
-{
+fn upstream_monomorphizations_for_provider<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ def_id: DefId,
+) -> Option<&'tcx FxHashMap<SubstsRef<'tcx>, CrateNum>> {
debug_assert!(!def_id.is_local());
tcx.upstream_monomorphizations(LOCAL_CRATE).get(&def_id)
}
-fn is_unreachable_local_definition_provider(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> bool {
+fn is_unreachable_local_definition_provider(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
if let Some(hir_id) = tcx.hir().as_local_hir_id(def_id) {
!tcx.reachable_set(LOCAL_CRATE).0.contains(&hir_id)
} else {
@@ -359,7 +352,7 @@
providers.upstream_monomorphizations_for = upstream_monomorphizations_for_provider;
}
-fn symbol_export_level(tcx: TyCtxt<'_, '_, '_>, sym_def_id: DefId) -> SymbolExportLevel {
+fn symbol_export_level(tcx: TyCtxt<'_>, sym_def_id: DefId) -> SymbolExportLevel {
// We export anything that's not mangled at the "C" layer as it probably has
// to do with ABI concerns. We do not, however, apply such treatment to
// special symbols in the standard library for various plumbing between
diff --git a/src/librustc_codegen_ssa/back/write.rs b/src/librustc_codegen_ssa/back/write.rs
index 5abff2d..309187c 100644
--- a/src/librustc_codegen_ssa/back/write.rs
+++ b/src/librustc_codegen_ssa/back/write.rs
@@ -375,10 +375,10 @@
pub fn start_async_codegen<B: ExtraBackendMethods>(
backend: B,
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
metadata: EncodedMetadata,
coordinator_receive: Receiver<Box<dyn Any + Send>>,
- total_cgus: usize
+ total_cgus: usize,
) -> OngoingCodegen<B> {
let sess = tcx.sess;
let crate_name = tcx.crate_name(LOCAL_CRATE);
@@ -996,7 +996,7 @@
fn start_executing_work<B: ExtraBackendMethods>(
backend: B,
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
crate_info: &CrateInfo,
shared_emitter: SharedEmitter,
codegen_worker_send: Sender<Message<B>>,
@@ -1005,7 +1005,7 @@
jobserver: Client,
modules_config: Arc<ModuleConfig>,
metadata_config: Arc<ModuleConfig>,
- allocator_config: Arc<ModuleConfig>
+ allocator_config: Arc<ModuleConfig>,
) -> thread::JoinHandle<Result<CompiledModules, ()>> {
let coordinator_send = tcx.tx_to_llvm_workers.lock().clone();
let sess = tcx.sess;
@@ -1861,9 +1861,11 @@
}, work_products)
}
- pub fn submit_pre_codegened_module_to_llvm(&self,
- tcx: TyCtxt<'_, '_, '_>,
- module: ModuleCodegen<B::Module>) {
+ pub fn submit_pre_codegened_module_to_llvm(
+ &self,
+ tcx: TyCtxt<'_>,
+ module: ModuleCodegen<B::Module>,
+ ) {
self.wait_for_signal_to_codegen_item();
self.check_for_errors(tcx.sess);
@@ -1872,7 +1874,7 @@
submit_codegened_module_to_llvm(&self.backend, tcx, module, cost);
}
- pub fn codegen_finished(&self, tcx: TyCtxt<'_, '_, '_>) {
+ pub fn codegen_finished(&self, tcx: TyCtxt<'_>) {
self.wait_for_signal_to_codegen_item();
self.check_for_errors(tcx.sess);
drop(self.coordinator_send.send(Box::new(Message::CodegenComplete::<B>)));
@@ -1911,9 +1913,9 @@
pub fn submit_codegened_module_to_llvm<B: ExtraBackendMethods>(
_backend: &B,
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
module: ModuleCodegen<B::Module>,
- cost: u64
+ cost: u64,
) {
let llvm_work_item = WorkItem::Optimize(module);
drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::CodegenDone::<B> {
@@ -1924,8 +1926,8 @@
pub fn submit_post_lto_module_to_llvm<B: ExtraBackendMethods>(
_backend: &B,
- tcx: TyCtxt<'_, '_, '_>,
- module: CachedModuleCodegen
+ tcx: TyCtxt<'_>,
+ module: CachedModuleCodegen,
) {
let llvm_work_item = WorkItem::CopyPostLtoArtifacts(module);
drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::CodegenDone::<B> {
@@ -1936,8 +1938,8 @@
pub fn submit_pre_lto_module_to_llvm<B: ExtraBackendMethods>(
_backend: &B,
- tcx: TyCtxt<'_, '_, '_>,
- module: CachedModuleCodegen
+ tcx: TyCtxt<'_>,
+ module: CachedModuleCodegen,
) {
let filename = pre_lto_bitcode_filename(&module.name);
let bc_path = in_incr_comp_dir_sess(tcx.sess, &filename);
@@ -1961,7 +1963,7 @@
format!("{}.{}", module_name, PRE_LTO_BC_EXT)
}
-fn msvc_imps_needed(tcx: TyCtxt<'_, '_, '_>) -> bool {
+fn msvc_imps_needed(tcx: TyCtxt<'_>) -> bool {
// This should never be true (because it's not supported). If it is true,
// something is wrong with commandline arg validation.
assert!(!(tcx.sess.opts.cg.linker_plugin_lto.enabled() &&
diff --git a/src/librustc_codegen_ssa/base.rs b/src/librustc_codegen_ssa/base.rs
index 0cd29e0..ca68645 100644
--- a/src/librustc_codegen_ssa/base.rs
+++ b/src/librustc_codegen_ssa/base.rs
@@ -480,12 +480,11 @@
pub fn codegen_crate<B: ExtraBackendMethods>(
backend: B,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
metadata: EncodedMetadata,
need_metadata_module: bool,
- rx: mpsc::Receiver<Box<dyn Any + Send>>
+ rx: mpsc::Receiver<Box<dyn Any + Send>>,
) -> OngoingCodegen<B> {
-
check_for_rustc_errors_attr(tcx);
// Skip crate items and just output metadata in -Z no-codegen mode.
@@ -703,7 +702,7 @@
}
}
-fn assert_and_save_dep_graph<'ll, 'tcx>(tcx: TyCtxt<'ll, 'tcx, 'tcx>) {
+fn assert_and_save_dep_graph<'tcx>(tcx: TyCtxt<'tcx>) {
time(tcx.sess,
"assert dep graph",
|| ::rustc_incremental::assert_dep_graph(tcx));
@@ -714,7 +713,7 @@
}
impl CrateInfo {
- pub fn new(tcx: TyCtxt<'_, '_, '_>) -> CrateInfo {
+ pub fn new(tcx: TyCtxt<'_>) -> CrateInfo {
let mut info = CrateInfo {
panic_runtime: None,
compiler_builtins: None,
@@ -776,11 +775,11 @@
info.missing_lang_items.insert(cnum, missing);
}
- return info
+ return info;
}
}
-fn is_codegened_item(tcx: TyCtxt<'_, '_, '_>, id: DefId) -> bool {
+fn is_codegened_item(tcx: TyCtxt<'_>, id: DefId) -> bool {
let (all_mono_items, _) =
tcx.collect_and_partition_mono_items(LOCAL_CRATE);
all_mono_items.contains(&id)
@@ -850,9 +849,7 @@
};
}
-fn determine_cgu_reuse<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cgu: &CodegenUnit<'tcx>)
- -> CguReuse {
+fn determine_cgu_reuse<'tcx>(tcx: TyCtxt<'tcx>, cgu: &CodegenUnit<'tcx>) -> CguReuse {
if !tcx.dep_graph.is_fully_enabled() {
return CguReuse::No
}
diff --git a/src/librustc_codegen_ssa/common.rs b/src/librustc_codegen_ssa/common.rs
index 0e1885f..e22d4db 100644
--- a/src/librustc_codegen_ssa/common.rs
+++ b/src/librustc_codegen_ssa/common.rs
@@ -122,11 +122,7 @@
}
}
-pub fn langcall(tcx: TyCtxt<'_, '_, '_>,
- span: Option<Span>,
- msg: &str,
- li: LangItem)
- -> DefId {
+pub fn langcall(tcx: TyCtxt<'_>, span: Option<Span>, msg: &str, li: LangItem) -> DefId {
tcx.lang_items().require(li).unwrap_or_else(|s| {
let msg = format!("{} {}", msg, s);
match span {
diff --git a/src/librustc_codegen_ssa/debuginfo/type_names.rs b/src/librustc_codegen_ssa/debuginfo/type_names.rs
index 39aea4b..8f0bb6e 100644
--- a/src/librustc_codegen_ssa/debuginfo/type_names.rs
+++ b/src/librustc_codegen_ssa/debuginfo/type_names.rs
@@ -8,10 +8,11 @@
// any caching, i.e., calling the function twice with the same type will also do
// the work twice. The `qualified` parameter only affects the first level of the
// type name, further levels (i.e., type parameters) are always fully qualified.
-pub fn compute_debuginfo_type_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- t: Ty<'tcx>,
- qualified: bool)
- -> String {
+pub fn compute_debuginfo_type_name<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ t: Ty<'tcx>,
+ qualified: bool,
+) -> String {
let mut result = String::with_capacity(64);
let mut visited = FxHashSet::default();
push_debuginfo_type_name(tcx, t, qualified, &mut result, &mut visited);
@@ -20,12 +21,13 @@
// Pushes the name of the type as it should be stored in debuginfo on the
// `output` String. See also compute_debuginfo_type_name().
-pub fn push_debuginfo_type_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- t: Ty<'tcx>,
- qualified: bool,
- output: &mut String,
- visited: &mut FxHashSet<Ty<'tcx>>) {
-
+pub fn push_debuginfo_type_name<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ t: Ty<'tcx>,
+ qualified: bool,
+ output: &mut String,
+ visited: &mut FxHashSet<Ty<'tcx>>,
+) {
// When targeting MSVC, emit C++ style type names for compatibility with
// .natvis visualizers (and perhaps other existing native debuggers?)
let cpp_like_names = tcx.sess.target.target.options.is_like_msvc;
@@ -208,10 +210,7 @@
}
}
- fn push_item_name(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- qualified: bool,
- output: &mut String) {
+ fn push_item_name(tcx: TyCtxt<'tcx>, def_id: DefId, qualified: bool, output: &mut String) {
if qualified {
output.push_str(&tcx.crate_name(def_id.krate).as_str());
for path_element in tcx.def_path(def_id).data {
@@ -228,10 +227,12 @@
// reconstructed for items from non-local crates. For local crates, this
// would be possible but with inlining and LTO we have to use the least
// common denominator - otherwise we would run into conflicts.
- fn push_type_params<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- substs: SubstsRef<'tcx>,
- output: &mut String,
- visited: &mut FxHashSet<Ty<'tcx>>) {
+ fn push_type_params<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ substs: SubstsRef<'tcx>,
+ output: &mut String,
+ visited: &mut FxHashSet<Ty<'tcx>>,
+ ) {
if substs.types().next().is_none() {
return;
}
diff --git a/src/librustc_codegen_ssa/mir/rvalue.rs b/src/librustc_codegen_ssa/mir/rvalue.rs
index 7a2bd18..87e15ba 100644
--- a/src/librustc_codegen_ssa/mir/rvalue.rs
+++ b/src/librustc_codegen_ssa/mir/rvalue.rs
@@ -429,7 +429,7 @@
mir::Rvalue::UnaryOp(op, ref operand) => {
let operand = self.codegen_operand(&mut bx, operand);
let lloperand = operand.immediate();
- let is_float = operand.layout.ty.is_fp();
+ let is_float = operand.layout.ty.is_floating_point();
let llval = match op {
mir::UnOp::Not => bx.not(lloperand),
mir::UnOp::Neg => if is_float {
@@ -536,7 +536,7 @@
rhs: Bx::Value,
input_ty: Ty<'tcx>,
) -> Bx::Value {
- let is_float = input_ty.is_fp();
+ let is_float = input_ty.is_floating_point();
let is_signed = input_ty.is_signed();
let is_unit = input_ty.is_unit();
match op {
diff --git a/src/librustc_codegen_ssa/mono_item.rs b/src/librustc_codegen_ssa/mono_item.rs
index 11e9a48..dc50c0e 100644
--- a/src/librustc_codegen_ssa/mono_item.rs
+++ b/src/librustc_codegen_ssa/mono_item.rs
@@ -29,7 +29,7 @@
cx.codegen_static(def_id, cx.tcx().is_mutable_static(def_id));
}
MonoItem::GlobalAsm(hir_id) => {
- let item = cx.tcx().hir().expect_item_by_hir_id(hir_id);
+ let item = cx.tcx().hir().expect_item(hir_id);
if let hir::ItemKind::GlobalAsm(ref ga) = item.node {
cx.codegen_global_asm(ga);
} else {
diff --git a/src/librustc_codegen_ssa/traits/backend.rs b/src/librustc_codegen_ssa/traits/backend.rs
index 0466b47..2f95c9a 100644
--- a/src/librustc_codegen_ssa/traits/backend.rs
+++ b/src/librustc_codegen_ssa/traits/backend.rs
@@ -31,24 +31,20 @@
}
pub trait ExtraBackendMethods: CodegenBackend + WriteBackendMethods + Sized + Send {
- fn new_metadata(&self, sess: TyCtxt<'_, '_, '_>, mod_name: &str) -> Self::Module;
- fn write_compressed_metadata<'b, 'gcx>(
+ fn new_metadata(&self, sess: TyCtxt<'_>, mod_name: &str) -> Self::Module;
+ fn write_compressed_metadata<'tcx>(
&self,
- tcx: TyCtxt<'b, 'gcx, 'gcx>,
+ tcx: TyCtxt<'tcx>,
metadata: &EncodedMetadata,
llvm_module: &mut Self::Module,
);
- fn codegen_allocator<'b, 'gcx>(
+ fn codegen_allocator<'tcx>(
&self,
- tcx: TyCtxt<'b, 'gcx, 'gcx>,
+ tcx: TyCtxt<'tcx>,
mods: &mut Self::Module,
- kind: AllocatorKind
+ kind: AllocatorKind,
);
- fn compile_codegen_unit<'a, 'tcx: 'a>(
- &self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cgu_name: InternedString,
- );
+ fn compile_codegen_unit<'a, 'tcx: 'a>(&self, tcx: TyCtxt<'tcx>, cgu_name: InternedString);
// If find_features is true this won't access `sess.crate_types` by assuming
// that `is_pie_binary` is false. When we discover LLVM target features
// `sess.crate_types` is uninitialized so we cannot access it.
diff --git a/src/librustc_codegen_utils/codegen_backend.rs b/src/librustc_codegen_utils/codegen_backend.rs
index 191c660..7a7a50a 100644
--- a/src/librustc_codegen_utils/codegen_backend.rs
+++ b/src/librustc_codegen_utils/codegen_backend.rs
@@ -6,7 +6,6 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
-
#![feature(box_syntax)]
use std::any::Any;
@@ -34,12 +33,12 @@
fn metadata_loader(&self) -> Box<dyn MetadataLoader + Sync>;
fn provide(&self, _providers: &mut Providers<'_>);
fn provide_extern(&self, _providers: &mut Providers<'_>);
- fn codegen_crate<'a, 'tcx>(
+ fn codegen_crate<'tcx>(
&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
metadata: EncodedMetadata,
need_metadata_module: bool,
- rx: mpsc::Receiver<Box<dyn Any + Send>>
+ rx: mpsc::Receiver<Box<dyn Any + Send>>,
) -> Box<dyn Any>;
/// This is called on the returned `Box<dyn Any>` from `codegen_backend`
diff --git a/src/librustc_codegen_utils/lib.rs b/src/librustc_codegen_utils/lib.rs
index 3726e4f..942c2d1 100644
--- a/src/librustc_codegen_utils/lib.rs
+++ b/src/librustc_codegen_utils/lib.rs
@@ -36,7 +36,7 @@
/// error in codegen. This is used to write compile-fail tests
/// that actually test that compilation succeeds without
/// reporting an error.
-pub fn check_for_rustc_errors_attr(tcx: TyCtxt<'_, '_, '_>) {
+pub fn check_for_rustc_errors_attr(tcx: TyCtxt<'_>) {
if let Some((def_id, _)) = tcx.entry_fn(LOCAL_CRATE) {
if tcx.has_attr(def_id, sym::rustc_error) {
tcx.sess.span_fatal(tcx.def_span(def_id), "compilation successful");
diff --git a/src/librustc_codegen_utils/symbol_names.rs b/src/librustc_codegen_utils/symbol_names.rs
index 47dc4e5..ba74f79 100644
--- a/src/librustc_codegen_utils/symbol_names.rs
+++ b/src/librustc_codegen_utils/symbol_names.rs
@@ -112,7 +112,7 @@
};
}
-fn symbol_name(tcx: TyCtxt<'_, 'tcx, 'tcx>, instance: Instance<'tcx>) -> InternedString {
+fn symbol_name(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> InternedString {
let def_id = instance.def_id();
let substs = instance.substs;
diff --git a/src/librustc_codegen_utils/symbol_names/legacy.rs b/src/librustc_codegen_utils/symbol_names/legacy.rs
index b6ece06..22b7e0a 100644
--- a/src/librustc_codegen_utils/symbol_names/legacy.rs
+++ b/src/librustc_codegen_utils/symbol_names/legacy.rs
@@ -14,7 +14,7 @@
use std::mem::{self, discriminant};
pub(super) fn mangle(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
instance: Instance<'tcx>,
instantiating_crate: Option<CrateNum>,
) -> String {
@@ -68,8 +68,8 @@
printer.path.finish(hash)
}
-fn get_symbol_hash<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn get_symbol_hash<'tcx>(
+ tcx: TyCtxt<'tcx>,
// instance this name will be for
instance: Instance<'tcx>,
@@ -179,8 +179,8 @@
}
}
-struct SymbolPrinter<'a, 'tcx> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct SymbolPrinter<'tcx> {
+ tcx: TyCtxt<'tcx>,
path: SymbolPath,
// When `true`, `finalize_pending_component` isn't used.
@@ -194,7 +194,7 @@
// `PrettyPrinter` aka pretty printing of e.g. types in paths,
// symbol names should have their own printing machinery.
-impl Printer<'tcx, 'tcx> for SymbolPrinter<'_, 'tcx> {
+impl Printer<'tcx> for SymbolPrinter<'tcx> {
type Error = fmt::Error;
type Path = Self;
@@ -203,7 +203,7 @@
type DynExistential = Self;
type Const = Self;
- fn tcx(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -360,18 +360,16 @@
}
}
-impl PrettyPrinter<'tcx, 'tcx> for SymbolPrinter<'_, 'tcx> {
+impl PrettyPrinter<'tcx> for SymbolPrinter<'tcx> {
fn region_should_not_be_omitted(
&self,
_region: ty::Region<'_>,
) -> bool {
false
}
- fn comma_sep<T>(
- mut self,
- mut elems: impl Iterator<Item = T>,
- ) -> Result<Self, Self::Error>
- where T: Print<'tcx, 'tcx, Self, Output = Self, Error = Self::Error>
+ fn comma_sep<T>(mut self, mut elems: impl Iterator<Item = T>) -> Result<Self, Self::Error>
+ where
+ T: Print<'tcx, Self, Output = Self, Error = Self::Error>,
{
if let Some(first) = elems.next() {
self = first.print(self)?;
@@ -400,7 +398,7 @@
}
}
-impl fmt::Write for SymbolPrinter<'_, '_> {
+impl fmt::Write for SymbolPrinter<'_> {
fn write_str(&mut self, s: &str) -> fmt::Result {
// Name sanitation. LLVM will happily accept identifiers with weird names, but
// gas doesn't!
diff --git a/src/librustc_codegen_utils/symbol_names/v0.rs b/src/librustc_codegen_utils/symbol_names/v0.rs
index d83d7e5..8a54fb6 100644
--- a/src/librustc_codegen_utils/symbol_names/v0.rs
+++ b/src/librustc_codegen_utils/symbol_names/v0.rs
@@ -13,7 +13,7 @@
use std::ops::Range;
pub(super) fn mangle(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
instance: Instance<'tcx>,
instantiating_crate: Option<CrateNum>,
) -> String {
@@ -75,14 +75,14 @@
lifetime_depths: Range<u32>,
}
-struct SymbolMangler<'a, 'tcx> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct SymbolMangler<'tcx> {
+ tcx: TyCtxt<'tcx>,
compress: Option<Box<CompressionCaches<'tcx>>>,
binders: Vec<BinderLevel>,
out: String,
}
-impl SymbolMangler<'_, 'tcx> {
+impl SymbolMangler<'tcx> {
fn push(&mut self, s: &str) {
self.out.push_str(s);
}
@@ -214,7 +214,7 @@
}
}
-impl Printer<'tcx, 'tcx> for SymbolMangler<'_, 'tcx> {
+impl Printer<'tcx> for SymbolMangler<'tcx> {
type Error = !;
type Path = Self;
@@ -223,7 +223,7 @@
type DynExistential = Self;
type Const = Self;
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
diff --git a/src/librustc_codegen_utils/symbol_names_test.rs b/src/librustc_codegen_utils/symbol_names_test.rs
index 9f7e483..f48d1f2 100644
--- a/src/librustc_codegen_utils/symbol_names_test.rs
+++ b/src/librustc_codegen_utils/symbol_names_test.rs
@@ -11,7 +11,7 @@
const SYMBOL_NAME: Symbol = sym::rustc_symbol_name;
const DEF_PATH: Symbol = sym::rustc_def_path;
-pub fn report_symbol_names<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn report_symbol_names<'tcx>(tcx: TyCtxt<'tcx>) {
// if the `rustc_attrs` feature is not enabled, then the
// attributes we are interested in cannot be present anyway, so
// skip the walk.
@@ -25,11 +25,11 @@
})
}
-struct SymbolNamesTest<'a, 'tcx:'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct SymbolNamesTest<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> SymbolNamesTest<'a, 'tcx> {
+impl SymbolNamesTest<'tcx> {
fn process_attrs(&mut self,
hir_id: hir::HirId) {
let tcx = self.tcx;
@@ -56,7 +56,7 @@
}
}
-impl<'a, 'tcx> hir::itemlikevisit::ItemLikeVisitor<'tcx> for SymbolNamesTest<'a, 'tcx> {
+impl hir::itemlikevisit::ItemLikeVisitor<'tcx> for SymbolNamesTest<'tcx> {
fn visit_item(&mut self, item: &'tcx hir::Item) {
self.process_attrs(item.hir_id);
}
diff --git a/src/librustc_data_structures/macros.rs b/src/librustc_data_structures/macros.rs
index b851263..6e7a8e9 100644
--- a/src/librustc_data_structures/macros.rs
+++ b/src/librustc_data_structures/macros.rs
@@ -1,6 +1,7 @@
/// A simple static assertion macro.
#[macro_export]
-#[allow_internal_unstable(type_ascription, underscore_const_names)]
+#[cfg_attr(bootstrap, allow_internal_unstable(type_ascription, underscore_const_names))]
+#[cfg_attr(not(bootstrap), allow_internal_unstable(type_ascription))]
macro_rules! static_assert {
($test:expr) => {
// Use the bool to access an array such that if the bool is false, the access
@@ -12,7 +13,7 @@
/// Type size assertion. The first argument is a type and the second argument is its expected size.
#[macro_export]
-#[allow_internal_unstable(underscore_const_names)]
+#[cfg_attr(bootstrap, allow_internal_unstable(underscore_const_names))]
macro_rules! static_assert_size {
($ty:ty, $size:expr) => {
const _: [(); $size] = [(); ::std::mem::size_of::<$ty>()];
diff --git a/src/librustc_data_structures/owning_ref/mod.rs b/src/librustc_data_structures/owning_ref/mod.rs
index 0301891..a7af615 100644
--- a/src/librustc_data_structures/owning_ref/mod.rs
+++ b/src/librustc_data_structures/owning_ref/mod.rs
@@ -1221,717 +1221,4 @@
pub type ErasedBoxRefMut<U> = OwningRefMut<Box<dyn Erased>, U>;
#[cfg(test)]
-mod tests {
- mod owning_ref {
- use super::super::OwningRef;
- use super::super::{RcRef, BoxRef, Erased, ErasedBoxRef};
- use std::cmp::{PartialEq, Ord, PartialOrd, Ordering};
- use std::hash::{Hash, Hasher};
- use std::collections::hash_map::DefaultHasher;
- use std::collections::HashMap;
- use std::rc::Rc;
-
- #[derive(Debug, PartialEq)]
- struct Example(u32, String, [u8; 3]);
- fn example() -> Example {
- Example(42, "hello world".to_string(), [1, 2, 3])
- }
-
- #[test]
- fn new_deref() {
- let or: OwningRef<Box<()>, ()> = OwningRef::new(Box::new(()));
- assert_eq!(&*or, &());
- }
-
- #[test]
- fn into() {
- let or: OwningRef<Box<()>, ()> = Box::new(()).into();
- assert_eq!(&*or, &());
- }
-
- #[test]
- fn map_offset_ref() {
- let or: BoxRef<Example> = Box::new(example()).into();
- let or: BoxRef<_, u32> = or.map(|x| &x.0);
- assert_eq!(&*or, &42);
-
- let or: BoxRef<Example> = Box::new(example()).into();
- let or: BoxRef<_, u8> = or.map(|x| &x.2[1]);
- assert_eq!(&*or, &2);
- }
-
- #[test]
- fn map_heap_ref() {
- let or: BoxRef<Example> = Box::new(example()).into();
- let or: BoxRef<_, str> = or.map(|x| &x.1[..5]);
- assert_eq!(&*or, "hello");
- }
-
- #[test]
- fn map_static_ref() {
- let or: BoxRef<()> = Box::new(()).into();
- let or: BoxRef<_, str> = or.map(|_| "hello");
- assert_eq!(&*or, "hello");
- }
-
- #[test]
- fn map_chained() {
- let or: BoxRef<String> = Box::new(example().1).into();
- let or: BoxRef<_, str> = or.map(|x| &x[1..5]);
- let or: BoxRef<_, str> = or.map(|x| &x[..2]);
- assert_eq!(&*or, "el");
- }
-
- #[test]
- fn map_chained_inference() {
- let or = BoxRef::new(Box::new(example().1))
- .map(|x| &x[..5])
- .map(|x| &x[1..3]);
- assert_eq!(&*or, "el");
- }
-
- #[test]
- fn owner() {
- let or: BoxRef<String> = Box::new(example().1).into();
- let or = or.map(|x| &x[..5]);
- assert_eq!(&*or, "hello");
- assert_eq!(&**or.owner(), "hello world");
- }
-
- #[test]
- fn into_inner() {
- let or: BoxRef<String> = Box::new(example().1).into();
- let or = or.map(|x| &x[..5]);
- assert_eq!(&*or, "hello");
- let s = *or.into_inner();
- assert_eq!(&s, "hello world");
- }
-
- #[test]
- fn fmt_debug() {
- let or: BoxRef<String> = Box::new(example().1).into();
- let or = or.map(|x| &x[..5]);
- let s = format!("{:?}", or);
- assert_eq!(&s, "OwningRef { owner: \"hello world\", reference: \"hello\" }");
- }
-
- #[test]
- fn erased_owner() {
- let o1: BoxRef<Example, str> = BoxRef::new(Box::new(example()))
- .map(|x| &x.1[..]);
-
- let o2: BoxRef<String, str> = BoxRef::new(Box::new(example().1))
- .map(|x| &x[..]);
-
- let os: Vec<ErasedBoxRef<str>> = vec![o1.erase_owner(), o2.erase_owner()];
- assert!(os.iter().all(|e| &e[..] == "hello world"));
- }
-
- #[test]
- fn raii_locks() {
- use super::super::{RefRef, RefMutRef};
- use std::cell::RefCell;
- use super::super::{MutexGuardRef, RwLockReadGuardRef, RwLockWriteGuardRef};
- use std::sync::{Mutex, RwLock};
-
- {
- let a = RefCell::new(1);
- let a = {
- let a = RefRef::new(a.borrow());
- assert_eq!(*a, 1);
- a
- };
- assert_eq!(*a, 1);
- drop(a);
- }
- {
- let a = RefCell::new(1);
- let a = {
- let a = RefMutRef::new(a.borrow_mut());
- assert_eq!(*a, 1);
- a
- };
- assert_eq!(*a, 1);
- drop(a);
- }
- {
- let a = Mutex::new(1);
- let a = {
- let a = MutexGuardRef::new(a.lock().unwrap());
- assert_eq!(*a, 1);
- a
- };
- assert_eq!(*a, 1);
- drop(a);
- }
- {
- let a = RwLock::new(1);
- let a = {
- let a = RwLockReadGuardRef::new(a.read().unwrap());
- assert_eq!(*a, 1);
- a
- };
- assert_eq!(*a, 1);
- drop(a);
- }
- {
- let a = RwLock::new(1);
- let a = {
- let a = RwLockWriteGuardRef::new(a.write().unwrap());
- assert_eq!(*a, 1);
- a
- };
- assert_eq!(*a, 1);
- drop(a);
- }
- }
-
- #[test]
- fn eq() {
- let or1: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
- let or2: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
- assert_eq!(or1.eq(&or2), true);
- }
-
- #[test]
- fn cmp() {
- let or1: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
- let or2: BoxRef<[u8]> = BoxRef::new(vec![4, 5, 6].into_boxed_slice());
- assert_eq!(or1.cmp(&or2), Ordering::Less);
- }
-
- #[test]
- fn partial_cmp() {
- let or1: BoxRef<[u8]> = BoxRef::new(vec![4, 5, 6].into_boxed_slice());
- let or2: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
- assert_eq!(or1.partial_cmp(&or2), Some(Ordering::Greater));
- }
-
- #[test]
- fn hash() {
- let mut h1 = DefaultHasher::new();
- let mut h2 = DefaultHasher::new();
-
- let or1: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
- let or2: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
-
- or1.hash(&mut h1);
- or2.hash(&mut h2);
-
- assert_eq!(h1.finish(), h2.finish());
- }
-
- #[test]
- fn borrow() {
- let mut hash = HashMap::new();
- let key = RcRef::<String>::new(Rc::new("foo-bar".to_string())).map(|s| &s[..]);
-
- hash.insert(key.clone().map(|s| &s[..3]), 42);
- hash.insert(key.clone().map(|s| &s[4..]), 23);
-
- assert_eq!(hash.get("foo"), Some(&42));
- assert_eq!(hash.get("bar"), Some(&23));
- }
-
- #[test]
- fn total_erase() {
- let a: OwningRef<Vec<u8>, [u8]>
- = OwningRef::new(vec![]).map(|x| &x[..]);
- let b: OwningRef<Box<[u8]>, [u8]>
- = OwningRef::new(vec![].into_boxed_slice()).map(|x| &x[..]);
-
- let c: OwningRef<Rc<Vec<u8>>, [u8]> = unsafe {a.map_owner(Rc::new)};
- let d: OwningRef<Rc<Box<[u8]>>, [u8]> = unsafe {b.map_owner(Rc::new)};
-
- let e: OwningRef<Rc<dyn Erased>, [u8]> = c.erase_owner();
- let f: OwningRef<Rc<dyn Erased>, [u8]> = d.erase_owner();
-
- let _g = e.clone();
- let _h = f.clone();
- }
-
- #[test]
- fn total_erase_box() {
- let a: OwningRef<Vec<u8>, [u8]>
- = OwningRef::new(vec![]).map(|x| &x[..]);
- let b: OwningRef<Box<[u8]>, [u8]>
- = OwningRef::new(vec![].into_boxed_slice()).map(|x| &x[..]);
-
- let c: OwningRef<Box<Vec<u8>>, [u8]> = a.map_owner_box();
- let d: OwningRef<Box<Box<[u8]>>, [u8]> = b.map_owner_box();
-
- let _e: OwningRef<Box<dyn Erased>, [u8]> = c.erase_owner();
- let _f: OwningRef<Box<dyn Erased>, [u8]> = d.erase_owner();
- }
-
- #[test]
- fn try_map1() {
- use std::any::Any;
-
- let x = Box::new(123_i32);
- let y: Box<dyn Any> = x;
-
- assert!(OwningRef::new(y).try_map(|x| x.downcast_ref::<i32>().ok_or(())).is_ok());
- }
-
- #[test]
- fn try_map2() {
- use std::any::Any;
-
- let x = Box::new(123_i32);
- let y: Box<dyn Any> = x;
-
- assert!(!OwningRef::new(y).try_map(|x| x.downcast_ref::<i32>().ok_or(())).is_err());
- }
- }
-
- mod owning_handle {
- use super::super::OwningHandle;
- use super::super::RcRef;
- use std::rc::Rc;
- use std::cell::RefCell;
- use std::sync::Arc;
- use std::sync::RwLock;
-
- #[test]
- fn owning_handle() {
- use std::cell::RefCell;
- let cell = Rc::new(RefCell::new(2));
- let cell_ref = RcRef::new(cell);
- let mut handle = OwningHandle::new_with_fn(cell_ref, |x| unsafe { x.as_ref() }.unwrap().borrow_mut());
- assert_eq!(*handle, 2);
- *handle = 3;
- assert_eq!(*handle, 3);
- }
-
- #[test]
- fn try_owning_handle_ok() {
- use std::cell::RefCell;
- let cell = Rc::new(RefCell::new(2));
- let cell_ref = RcRef::new(cell);
- let mut handle = OwningHandle::try_new::<_, ()>(cell_ref, |x| {
- Ok(unsafe {
- x.as_ref()
- }.unwrap().borrow_mut())
- }).unwrap();
- assert_eq!(*handle, 2);
- *handle = 3;
- assert_eq!(*handle, 3);
- }
-
- #[test]
- fn try_owning_handle_err() {
- use std::cell::RefCell;
- let cell = Rc::new(RefCell::new(2));
- let cell_ref = RcRef::new(cell);
- let handle = OwningHandle::try_new::<_, ()>(cell_ref, |x| {
- if false {
- return Ok(unsafe {
- x.as_ref()
- }.unwrap().borrow_mut())
- }
- Err(())
- });
- assert!(handle.is_err());
- }
-
- #[test]
- fn nested() {
- use std::cell::RefCell;
- use std::sync::{Arc, RwLock};
-
- let result = {
- let complex = Rc::new(RefCell::new(Arc::new(RwLock::new("someString"))));
- let curr = RcRef::new(complex);
- let curr = OwningHandle::new_with_fn(curr, |x| unsafe { x.as_ref() }.unwrap().borrow_mut());
- let mut curr = OwningHandle::new_with_fn(curr, |x| unsafe { x.as_ref() }.unwrap().try_write().unwrap());
- assert_eq!(*curr, "someString");
- *curr = "someOtherString";
- curr
- };
- assert_eq!(*result, "someOtherString");
- }
-
- #[test]
- fn owning_handle_safe() {
- use std::cell::RefCell;
- let cell = Rc::new(RefCell::new(2));
- let cell_ref = RcRef::new(cell);
- let handle = OwningHandle::new(cell_ref);
- assert_eq!(*handle, 2);
- }
-
- #[test]
- fn owning_handle_mut_safe() {
- use std::cell::RefCell;
- let cell = Rc::new(RefCell::new(2));
- let cell_ref = RcRef::new(cell);
- let mut handle = OwningHandle::new_mut(cell_ref);
- assert_eq!(*handle, 2);
- *handle = 3;
- assert_eq!(*handle, 3);
- }
-
- #[test]
- fn owning_handle_safe_2() {
- let result = {
- let complex = Rc::new(RefCell::new(Arc::new(RwLock::new("someString"))));
- let curr = RcRef::new(complex);
- let curr = OwningHandle::new_with_fn(curr, |x| unsafe { x.as_ref() }.unwrap().borrow_mut());
- let mut curr = OwningHandle::new_with_fn(curr, |x| unsafe { x.as_ref() }.unwrap().try_write().unwrap());
- assert_eq!(*curr, "someString");
- *curr = "someOtherString";
- curr
- };
- assert_eq!(*result, "someOtherString");
- }
- }
-
- mod owning_ref_mut {
- use super::super::{OwningRefMut, BoxRefMut, Erased, ErasedBoxRefMut};
- use super::super::BoxRef;
- use std::cmp::{PartialEq, Ord, PartialOrd, Ordering};
- use std::hash::{Hash, Hasher};
- use std::collections::hash_map::DefaultHasher;
- use std::collections::HashMap;
-
- #[derive(Debug, PartialEq)]
- struct Example(u32, String, [u8; 3]);
- fn example() -> Example {
- Example(42, "hello world".to_string(), [1, 2, 3])
- }
-
- #[test]
- fn new_deref() {
- let or: OwningRefMut<Box<()>, ()> = OwningRefMut::new(Box::new(()));
- assert_eq!(&*or, &());
- }
-
- #[test]
- fn new_deref_mut() {
- let mut or: OwningRefMut<Box<()>, ()> = OwningRefMut::new(Box::new(()));
- assert_eq!(&mut *or, &mut ());
- }
-
- #[test]
- fn mutate() {
- let mut or: OwningRefMut<Box<usize>, usize> = OwningRefMut::new(Box::new(0));
- assert_eq!(&*or, &0);
- *or = 1;
- assert_eq!(&*or, &1);
- }
-
- #[test]
- fn into() {
- let or: OwningRefMut<Box<()>, ()> = Box::new(()).into();
- assert_eq!(&*or, &());
- }
-
- #[test]
- fn map_offset_ref() {
- let or: BoxRefMut<Example> = Box::new(example()).into();
- let or: BoxRef<_, u32> = or.map(|x| &mut x.0);
- assert_eq!(&*or, &42);
-
- let or: BoxRefMut<Example> = Box::new(example()).into();
- let or: BoxRef<_, u8> = or.map(|x| &mut x.2[1]);
- assert_eq!(&*or, &2);
- }
-
- #[test]
- fn map_heap_ref() {
- let or: BoxRefMut<Example> = Box::new(example()).into();
- let or: BoxRef<_, str> = or.map(|x| &mut x.1[..5]);
- assert_eq!(&*or, "hello");
- }
-
- #[test]
- fn map_static_ref() {
- let or: BoxRefMut<()> = Box::new(()).into();
- let or: BoxRef<_, str> = or.map(|_| "hello");
- assert_eq!(&*or, "hello");
- }
-
- #[test]
- fn map_mut_offset_ref() {
- let or: BoxRefMut<Example> = Box::new(example()).into();
- let or: BoxRefMut<_, u32> = or.map_mut(|x| &mut x.0);
- assert_eq!(&*or, &42);
-
- let or: BoxRefMut<Example> = Box::new(example()).into();
- let or: BoxRefMut<_, u8> = or.map_mut(|x| &mut x.2[1]);
- assert_eq!(&*or, &2);
- }
-
- #[test]
- fn map_mut_heap_ref() {
- let or: BoxRefMut<Example> = Box::new(example()).into();
- let or: BoxRefMut<_, str> = or.map_mut(|x| &mut x.1[..5]);
- assert_eq!(&*or, "hello");
- }
-
- #[test]
- fn map_mut_static_ref() {
- static mut MUT_S: [u8; 5] = *b"hello";
-
- let mut_s: &'static mut [u8] = unsafe { &mut MUT_S };
-
- let or: BoxRefMut<()> = Box::new(()).into();
- let or: BoxRefMut<_, [u8]> = or.map_mut(move |_| mut_s);
- assert_eq!(&*or, b"hello");
- }
-
- #[test]
- fn map_mut_chained() {
- let or: BoxRefMut<String> = Box::new(example().1).into();
- let or: BoxRefMut<_, str> = or.map_mut(|x| &mut x[1..5]);
- let or: BoxRefMut<_, str> = or.map_mut(|x| &mut x[..2]);
- assert_eq!(&*or, "el");
- }
-
- #[test]
- fn map_chained_inference() {
- let or = BoxRefMut::new(Box::new(example().1))
- .map_mut(|x| &mut x[..5])
- .map_mut(|x| &mut x[1..3]);
- assert_eq!(&*or, "el");
- }
-
- #[test]
- fn try_map_mut() {
- let or: BoxRefMut<String> = Box::new(example().1).into();
- let or: Result<BoxRefMut<_, str>, ()> = or.try_map_mut(|x| Ok(&mut x[1..5]));
- assert_eq!(&*or.unwrap(), "ello");
-
- let or: BoxRefMut<String> = Box::new(example().1).into();
- let or: Result<BoxRefMut<_, str>, ()> = or.try_map_mut(|_| Err(()));
- assert!(or.is_err());
- }
-
- #[test]
- fn owner() {
- let or: BoxRefMut<String> = Box::new(example().1).into();
- let or = or.map_mut(|x| &mut x[..5]);
- assert_eq!(&*or, "hello");
- assert_eq!(&**or.owner(), "hello world");
- }
-
- #[test]
- fn into_inner() {
- let or: BoxRefMut<String> = Box::new(example().1).into();
- let or = or.map_mut(|x| &mut x[..5]);
- assert_eq!(&*or, "hello");
- let s = *or.into_inner();
- assert_eq!(&s, "hello world");
- }
-
- #[test]
- fn fmt_debug() {
- let or: BoxRefMut<String> = Box::new(example().1).into();
- let or = or.map_mut(|x| &mut x[..5]);
- let s = format!("{:?}", or);
- assert_eq!(&s,
- "OwningRefMut { owner: \"hello world\", reference: \"hello\" }");
- }
-
- #[test]
- fn erased_owner() {
- let o1: BoxRefMut<Example, str> = BoxRefMut::new(Box::new(example()))
- .map_mut(|x| &mut x.1[..]);
-
- let o2: BoxRefMut<String, str> = BoxRefMut::new(Box::new(example().1))
- .map_mut(|x| &mut x[..]);
-
- let os: Vec<ErasedBoxRefMut<str>> = vec![o1.erase_owner(), o2.erase_owner()];
- assert!(os.iter().all(|e| &e[..] == "hello world"));
- }
-
- #[test]
- fn raii_locks() {
- use super::super::RefMutRefMut;
- use std::cell::RefCell;
- use super::super::{MutexGuardRefMut, RwLockWriteGuardRefMut};
- use std::sync::{Mutex, RwLock};
-
- {
- let a = RefCell::new(1);
- let a = {
- let a = RefMutRefMut::new(a.borrow_mut());
- assert_eq!(*a, 1);
- a
- };
- assert_eq!(*a, 1);
- drop(a);
- }
- {
- let a = Mutex::new(1);
- let a = {
- let a = MutexGuardRefMut::new(a.lock().unwrap());
- assert_eq!(*a, 1);
- a
- };
- assert_eq!(*a, 1);
- drop(a);
- }
- {
- let a = RwLock::new(1);
- let a = {
- let a = RwLockWriteGuardRefMut::new(a.write().unwrap());
- assert_eq!(*a, 1);
- a
- };
- assert_eq!(*a, 1);
- drop(a);
- }
- }
-
- #[test]
- fn eq() {
- let or1: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
- let or2: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
- assert_eq!(or1.eq(&or2), true);
- }
-
- #[test]
- fn cmp() {
- let or1: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
- let or2: BoxRefMut<[u8]> = BoxRefMut::new(vec![4, 5, 6].into_boxed_slice());
- assert_eq!(or1.cmp(&or2), Ordering::Less);
- }
-
- #[test]
- fn partial_cmp() {
- let or1: BoxRefMut<[u8]> = BoxRefMut::new(vec![4, 5, 6].into_boxed_slice());
- let or2: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
- assert_eq!(or1.partial_cmp(&or2), Some(Ordering::Greater));
- }
-
- #[test]
- fn hash() {
- let mut h1 = DefaultHasher::new();
- let mut h2 = DefaultHasher::new();
-
- let or1: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
- let or2: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
-
- or1.hash(&mut h1);
- or2.hash(&mut h2);
-
- assert_eq!(h1.finish(), h2.finish());
- }
-
- #[test]
- fn borrow() {
- let mut hash = HashMap::new();
- let key1 = BoxRefMut::<String>::new(Box::new("foo".to_string())).map(|s| &s[..]);
- let key2 = BoxRefMut::<String>::new(Box::new("bar".to_string())).map(|s| &s[..]);
-
- hash.insert(key1, 42);
- hash.insert(key2, 23);
-
- assert_eq!(hash.get("foo"), Some(&42));
- assert_eq!(hash.get("bar"), Some(&23));
- }
-
- #[test]
- fn total_erase() {
- let a: OwningRefMut<Vec<u8>, [u8]>
- = OwningRefMut::new(vec![]).map_mut(|x| &mut x[..]);
- let b: OwningRefMut<Box<[u8]>, [u8]>
- = OwningRefMut::new(vec![].into_boxed_slice()).map_mut(|x| &mut x[..]);
-
- let c: OwningRefMut<Box<Vec<u8>>, [u8]> = unsafe {a.map_owner(Box::new)};
- let d: OwningRefMut<Box<Box<[u8]>>, [u8]> = unsafe {b.map_owner(Box::new)};
-
- let _e: OwningRefMut<Box<dyn Erased>, [u8]> = c.erase_owner();
- let _f: OwningRefMut<Box<dyn Erased>, [u8]> = d.erase_owner();
- }
-
- #[test]
- fn total_erase_box() {
- let a: OwningRefMut<Vec<u8>, [u8]>
- = OwningRefMut::new(vec![]).map_mut(|x| &mut x[..]);
- let b: OwningRefMut<Box<[u8]>, [u8]>
- = OwningRefMut::new(vec![].into_boxed_slice()).map_mut(|x| &mut x[..]);
-
- let c: OwningRefMut<Box<Vec<u8>>, [u8]> = a.map_owner_box();
- let d: OwningRefMut<Box<Box<[u8]>>, [u8]> = b.map_owner_box();
-
- let _e: OwningRefMut<Box<dyn Erased>, [u8]> = c.erase_owner();
- let _f: OwningRefMut<Box<dyn Erased>, [u8]> = d.erase_owner();
- }
-
- #[test]
- fn try_map1() {
- use std::any::Any;
-
- let x = Box::new(123_i32);
- let y: Box<dyn Any> = x;
-
- assert!(OwningRefMut::new(y).try_map_mut(|x| x.downcast_mut::<i32>().ok_or(())).is_ok());
- }
-
- #[test]
- fn try_map2() {
- use std::any::Any;
-
- let x = Box::new(123_i32);
- let y: Box<dyn Any> = x;
-
- assert!(!OwningRefMut::new(y).try_map_mut(|x| x.downcast_mut::<i32>().ok_or(())).is_err());
- }
-
- #[test]
- fn try_map3() {
- use std::any::Any;
-
- let x = Box::new(123_i32);
- let y: Box<dyn Any> = x;
-
- assert!(OwningRefMut::new(y).try_map(|x| x.downcast_ref::<i32>().ok_or(())).is_ok());
- }
-
- #[test]
- fn try_map4() {
- use std::any::Any;
-
- let x = Box::new(123_i32);
- let y: Box<dyn Any> = x;
-
- assert!(!OwningRefMut::new(y).try_map(|x| x.downcast_ref::<i32>().ok_or(())).is_err());
- }
-
- #[test]
- fn into_owning_ref() {
- use super::super::BoxRef;
-
- let or: BoxRefMut<()> = Box::new(()).into();
- let or: BoxRef<()> = or.into();
- assert_eq!(&*or, &());
- }
-
- struct Foo {
- u: u32,
- }
- struct Bar {
- f: Foo,
- }
-
- #[test]
- fn ref_mut() {
- use std::cell::RefCell;
-
- let a = RefCell::new(Bar { f: Foo { u: 42 } });
- let mut b = OwningRefMut::new(a.borrow_mut());
- assert_eq!(b.f.u, 42);
- b.f.u = 43;
- let mut c = b.map_mut(|x| &mut x.f);
- assert_eq!(c.u, 43);
- c.u = 44;
- let mut d = c.map_mut(|x| &mut x.u);
- assert_eq!(*d, 44);
- *d = 45;
- assert_eq!(*d, 45);
- }
- }
-}
+mod tests;
diff --git a/src/librustc_data_structures/owning_ref/tests.rs b/src/librustc_data_structures/owning_ref/tests.rs
new file mode 100644
index 0000000..d368219
--- /dev/null
+++ b/src/librustc_data_structures/owning_ref/tests.rs
@@ -0,0 +1,712 @@
+mod owning_ref {
+ use super::super::OwningRef;
+ use super::super::{RcRef, BoxRef, Erased, ErasedBoxRef};
+ use std::cmp::{PartialEq, Ord, PartialOrd, Ordering};
+ use std::hash::{Hash, Hasher};
+ use std::collections::hash_map::DefaultHasher;
+ use std::collections::HashMap;
+ use std::rc::Rc;
+
+ #[derive(Debug, PartialEq)]
+ struct Example(u32, String, [u8; 3]);
+ fn example() -> Example {
+ Example(42, "hello world".to_string(), [1, 2, 3])
+ }
+
+ #[test]
+ fn new_deref() {
+ let or: OwningRef<Box<()>, ()> = OwningRef::new(Box::new(()));
+ assert_eq!(&*or, &());
+ }
+
+ #[test]
+ fn into() {
+ let or: OwningRef<Box<()>, ()> = Box::new(()).into();
+ assert_eq!(&*or, &());
+ }
+
+ #[test]
+ fn map_offset_ref() {
+ let or: BoxRef<Example> = Box::new(example()).into();
+ let or: BoxRef<_, u32> = or.map(|x| &x.0);
+ assert_eq!(&*or, &42);
+
+ let or: BoxRef<Example> = Box::new(example()).into();
+ let or: BoxRef<_, u8> = or.map(|x| &x.2[1]);
+ assert_eq!(&*or, &2);
+ }
+
+ #[test]
+ fn map_heap_ref() {
+ let or: BoxRef<Example> = Box::new(example()).into();
+ let or: BoxRef<_, str> = or.map(|x| &x.1[..5]);
+ assert_eq!(&*or, "hello");
+ }
+
+ #[test]
+ fn map_static_ref() {
+ let or: BoxRef<()> = Box::new(()).into();
+ let or: BoxRef<_, str> = or.map(|_| "hello");
+ assert_eq!(&*or, "hello");
+ }
+
+ #[test]
+ fn map_chained() {
+ let or: BoxRef<String> = Box::new(example().1).into();
+ let or: BoxRef<_, str> = or.map(|x| &x[1..5]);
+ let or: BoxRef<_, str> = or.map(|x| &x[..2]);
+ assert_eq!(&*or, "el");
+ }
+
+ #[test]
+ fn map_chained_inference() {
+ let or = BoxRef::new(Box::new(example().1))
+ .map(|x| &x[..5])
+ .map(|x| &x[1..3]);
+ assert_eq!(&*or, "el");
+ }
+
+ #[test]
+ fn owner() {
+ let or: BoxRef<String> = Box::new(example().1).into();
+ let or = or.map(|x| &x[..5]);
+ assert_eq!(&*or, "hello");
+ assert_eq!(&**or.owner(), "hello world");
+ }
+
+ #[test]
+ fn into_inner() {
+ let or: BoxRef<String> = Box::new(example().1).into();
+ let or = or.map(|x| &x[..5]);
+ assert_eq!(&*or, "hello");
+ let s = *or.into_inner();
+ assert_eq!(&s, "hello world");
+ }
+
+ #[test]
+ fn fmt_debug() {
+ let or: BoxRef<String> = Box::new(example().1).into();
+ let or = or.map(|x| &x[..5]);
+ let s = format!("{:?}", or);
+ assert_eq!(&s, "OwningRef { owner: \"hello world\", reference: \"hello\" }");
+ }
+
+ #[test]
+ fn erased_owner() {
+ let o1: BoxRef<Example, str> = BoxRef::new(Box::new(example()))
+ .map(|x| &x.1[..]);
+
+ let o2: BoxRef<String, str> = BoxRef::new(Box::new(example().1))
+ .map(|x| &x[..]);
+
+ let os: Vec<ErasedBoxRef<str>> = vec![o1.erase_owner(), o2.erase_owner()];
+ assert!(os.iter().all(|e| &e[..] == "hello world"));
+ }
+
+ #[test]
+ fn raii_locks() {
+ use super::super::{RefRef, RefMutRef};
+ use std::cell::RefCell;
+ use super::super::{MutexGuardRef, RwLockReadGuardRef, RwLockWriteGuardRef};
+ use std::sync::{Mutex, RwLock};
+
+ {
+ let a = RefCell::new(1);
+ let a = {
+ let a = RefRef::new(a.borrow());
+ assert_eq!(*a, 1);
+ a
+ };
+ assert_eq!(*a, 1);
+ drop(a);
+ }
+ {
+ let a = RefCell::new(1);
+ let a = {
+ let a = RefMutRef::new(a.borrow_mut());
+ assert_eq!(*a, 1);
+ a
+ };
+ assert_eq!(*a, 1);
+ drop(a);
+ }
+ {
+ let a = Mutex::new(1);
+ let a = {
+ let a = MutexGuardRef::new(a.lock().unwrap());
+ assert_eq!(*a, 1);
+ a
+ };
+ assert_eq!(*a, 1);
+ drop(a);
+ }
+ {
+ let a = RwLock::new(1);
+ let a = {
+ let a = RwLockReadGuardRef::new(a.read().unwrap());
+ assert_eq!(*a, 1);
+ a
+ };
+ assert_eq!(*a, 1);
+ drop(a);
+ }
+ {
+ let a = RwLock::new(1);
+ let a = {
+ let a = RwLockWriteGuardRef::new(a.write().unwrap());
+ assert_eq!(*a, 1);
+ a
+ };
+ assert_eq!(*a, 1);
+ drop(a);
+ }
+ }
+
+ #[test]
+ fn eq() {
+ let or1: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
+ let or2: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
+ assert_eq!(or1.eq(&or2), true);
+ }
+
+ #[test]
+ fn cmp() {
+ let or1: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
+ let or2: BoxRef<[u8]> = BoxRef::new(vec![4, 5, 6].into_boxed_slice());
+ assert_eq!(or1.cmp(&or2), Ordering::Less);
+ }
+
+ #[test]
+ fn partial_cmp() {
+ let or1: BoxRef<[u8]> = BoxRef::new(vec![4, 5, 6].into_boxed_slice());
+ let or2: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
+ assert_eq!(or1.partial_cmp(&or2), Some(Ordering::Greater));
+ }
+
+ #[test]
+ fn hash() {
+ let mut h1 = DefaultHasher::new();
+ let mut h2 = DefaultHasher::new();
+
+ let or1: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
+ let or2: BoxRef<[u8]> = BoxRef::new(vec![1, 2, 3].into_boxed_slice());
+
+ or1.hash(&mut h1);
+ or2.hash(&mut h2);
+
+ assert_eq!(h1.finish(), h2.finish());
+ }
+
+ #[test]
+ fn borrow() {
+ let mut hash = HashMap::new();
+ let key = RcRef::<String>::new(Rc::new("foo-bar".to_string())).map(|s| &s[..]);
+
+ hash.insert(key.clone().map(|s| &s[..3]), 42);
+ hash.insert(key.clone().map(|s| &s[4..]), 23);
+
+ assert_eq!(hash.get("foo"), Some(&42));
+ assert_eq!(hash.get("bar"), Some(&23));
+ }
+
+ #[test]
+ fn total_erase() {
+ let a: OwningRef<Vec<u8>, [u8]>
+ = OwningRef::new(vec![]).map(|x| &x[..]);
+ let b: OwningRef<Box<[u8]>, [u8]>
+ = OwningRef::new(vec![].into_boxed_slice()).map(|x| &x[..]);
+
+ let c: OwningRef<Rc<Vec<u8>>, [u8]> = unsafe {a.map_owner(Rc::new)};
+ let d: OwningRef<Rc<Box<[u8]>>, [u8]> = unsafe {b.map_owner(Rc::new)};
+
+ let e: OwningRef<Rc<dyn Erased>, [u8]> = c.erase_owner();
+ let f: OwningRef<Rc<dyn Erased>, [u8]> = d.erase_owner();
+
+ let _g = e.clone();
+ let _h = f.clone();
+ }
+
+ #[test]
+ fn total_erase_box() {
+ let a: OwningRef<Vec<u8>, [u8]>
+ = OwningRef::new(vec![]).map(|x| &x[..]);
+ let b: OwningRef<Box<[u8]>, [u8]>
+ = OwningRef::new(vec![].into_boxed_slice()).map(|x| &x[..]);
+
+ let c: OwningRef<Box<Vec<u8>>, [u8]> = a.map_owner_box();
+ let d: OwningRef<Box<Box<[u8]>>, [u8]> = b.map_owner_box();
+
+ let _e: OwningRef<Box<dyn Erased>, [u8]> = c.erase_owner();
+ let _f: OwningRef<Box<dyn Erased>, [u8]> = d.erase_owner();
+ }
+
+ #[test]
+ fn try_map1() {
+ use std::any::Any;
+
+ let x = Box::new(123_i32);
+ let y: Box<dyn Any> = x;
+
+ assert!(OwningRef::new(y).try_map(|x| x.downcast_ref::<i32>().ok_or(())).is_ok());
+ }
+
+ #[test]
+ fn try_map2() {
+ use std::any::Any;
+
+ let x = Box::new(123_i32);
+ let y: Box<dyn Any> = x;
+
+ assert!(!OwningRef::new(y).try_map(|x| x.downcast_ref::<i32>().ok_or(())).is_err());
+ }
+}
+
+mod owning_handle {
+ use super::super::OwningHandle;
+ use super::super::RcRef;
+ use std::rc::Rc;
+ use std::cell::RefCell;
+ use std::sync::Arc;
+ use std::sync::RwLock;
+
+ #[test]
+ fn owning_handle() {
+ use std::cell::RefCell;
+ let cell = Rc::new(RefCell::new(2));
+ let cell_ref = RcRef::new(cell);
+ let mut handle = OwningHandle::new_with_fn(cell_ref, |x| unsafe { x.as_ref() }.unwrap().borrow_mut());
+ assert_eq!(*handle, 2);
+ *handle = 3;
+ assert_eq!(*handle, 3);
+ }
+
+ #[test]
+ fn try_owning_handle_ok() {
+ use std::cell::RefCell;
+ let cell = Rc::new(RefCell::new(2));
+ let cell_ref = RcRef::new(cell);
+ let mut handle = OwningHandle::try_new::<_, ()>(cell_ref, |x| {
+ Ok(unsafe {
+ x.as_ref()
+ }.unwrap().borrow_mut())
+ }).unwrap();
+ assert_eq!(*handle, 2);
+ *handle = 3;
+ assert_eq!(*handle, 3);
+ }
+
+ #[test]
+ fn try_owning_handle_err() {
+ use std::cell::RefCell;
+ let cell = Rc::new(RefCell::new(2));
+ let cell_ref = RcRef::new(cell);
+ let handle = OwningHandle::try_new::<_, ()>(cell_ref, |x| {
+ if false {
+ return Ok(unsafe {
+ x.as_ref()
+ }.unwrap().borrow_mut())
+ }
+ Err(())
+ });
+ assert!(handle.is_err());
+ }
+
+ #[test]
+ fn nested() {
+ use std::cell::RefCell;
+ use std::sync::{Arc, RwLock};
+
+ let result = {
+ let complex = Rc::new(RefCell::new(Arc::new(RwLock::new("someString"))));
+ let curr = RcRef::new(complex);
+ let curr = OwningHandle::new_with_fn(curr, |x| unsafe { x.as_ref() }.unwrap().borrow_mut());
+ let mut curr = OwningHandle::new_with_fn(curr, |x| unsafe { x.as_ref() }.unwrap().try_write().unwrap());
+ assert_eq!(*curr, "someString");
+ *curr = "someOtherString";
+ curr
+ };
+ assert_eq!(*result, "someOtherString");
+ }
+
+ #[test]
+ fn owning_handle_safe() {
+ use std::cell::RefCell;
+ let cell = Rc::new(RefCell::new(2));
+ let cell_ref = RcRef::new(cell);
+ let handle = OwningHandle::new(cell_ref);
+ assert_eq!(*handle, 2);
+ }
+
+ #[test]
+ fn owning_handle_mut_safe() {
+ use std::cell::RefCell;
+ let cell = Rc::new(RefCell::new(2));
+ let cell_ref = RcRef::new(cell);
+ let mut handle = OwningHandle::new_mut(cell_ref);
+ assert_eq!(*handle, 2);
+ *handle = 3;
+ assert_eq!(*handle, 3);
+ }
+
+ #[test]
+ fn owning_handle_safe_2() {
+ let result = {
+ let complex = Rc::new(RefCell::new(Arc::new(RwLock::new("someString"))));
+ let curr = RcRef::new(complex);
+ let curr = OwningHandle::new_with_fn(curr, |x| unsafe { x.as_ref() }.unwrap().borrow_mut());
+ let mut curr = OwningHandle::new_with_fn(curr, |x| unsafe { x.as_ref() }.unwrap().try_write().unwrap());
+ assert_eq!(*curr, "someString");
+ *curr = "someOtherString";
+ curr
+ };
+ assert_eq!(*result, "someOtherString");
+ }
+}
+
+mod owning_ref_mut {
+ use super::super::{OwningRefMut, BoxRefMut, Erased, ErasedBoxRefMut};
+ use super::super::BoxRef;
+ use std::cmp::{PartialEq, Ord, PartialOrd, Ordering};
+ use std::hash::{Hash, Hasher};
+ use std::collections::hash_map::DefaultHasher;
+ use std::collections::HashMap;
+
+ #[derive(Debug, PartialEq)]
+ struct Example(u32, String, [u8; 3]);
+ fn example() -> Example {
+ Example(42, "hello world".to_string(), [1, 2, 3])
+ }
+
+ #[test]
+ fn new_deref() {
+ let or: OwningRefMut<Box<()>, ()> = OwningRefMut::new(Box::new(()));
+ assert_eq!(&*or, &());
+ }
+
+ #[test]
+ fn new_deref_mut() {
+ let mut or: OwningRefMut<Box<()>, ()> = OwningRefMut::new(Box::new(()));
+ assert_eq!(&mut *or, &mut ());
+ }
+
+ #[test]
+ fn mutate() {
+ let mut or: OwningRefMut<Box<usize>, usize> = OwningRefMut::new(Box::new(0));
+ assert_eq!(&*or, &0);
+ *or = 1;
+ assert_eq!(&*or, &1);
+ }
+
+ #[test]
+ fn into() {
+ let or: OwningRefMut<Box<()>, ()> = Box::new(()).into();
+ assert_eq!(&*or, &());
+ }
+
+ #[test]
+ fn map_offset_ref() {
+ let or: BoxRefMut<Example> = Box::new(example()).into();
+ let or: BoxRef<_, u32> = or.map(|x| &mut x.0);
+ assert_eq!(&*or, &42);
+
+ let or: BoxRefMut<Example> = Box::new(example()).into();
+ let or: BoxRef<_, u8> = or.map(|x| &mut x.2[1]);
+ assert_eq!(&*or, &2);
+ }
+
+ #[test]
+ fn map_heap_ref() {
+ let or: BoxRefMut<Example> = Box::new(example()).into();
+ let or: BoxRef<_, str> = or.map(|x| &mut x.1[..5]);
+ assert_eq!(&*or, "hello");
+ }
+
+ #[test]
+ fn map_static_ref() {
+ let or: BoxRefMut<()> = Box::new(()).into();
+ let or: BoxRef<_, str> = or.map(|_| "hello");
+ assert_eq!(&*or, "hello");
+ }
+
+ #[test]
+ fn map_mut_offset_ref() {
+ let or: BoxRefMut<Example> = Box::new(example()).into();
+ let or: BoxRefMut<_, u32> = or.map_mut(|x| &mut x.0);
+ assert_eq!(&*or, &42);
+
+ let or: BoxRefMut<Example> = Box::new(example()).into();
+ let or: BoxRefMut<_, u8> = or.map_mut(|x| &mut x.2[1]);
+ assert_eq!(&*or, &2);
+ }
+
+ #[test]
+ fn map_mut_heap_ref() {
+ let or: BoxRefMut<Example> = Box::new(example()).into();
+ let or: BoxRefMut<_, str> = or.map_mut(|x| &mut x.1[..5]);
+ assert_eq!(&*or, "hello");
+ }
+
+ #[test]
+ fn map_mut_static_ref() {
+ static mut MUT_S: [u8; 5] = *b"hello";
+
+ let mut_s: &'static mut [u8] = unsafe { &mut MUT_S };
+
+ let or: BoxRefMut<()> = Box::new(()).into();
+ let or: BoxRefMut<_, [u8]> = or.map_mut(move |_| mut_s);
+ assert_eq!(&*or, b"hello");
+ }
+
+ #[test]
+ fn map_mut_chained() {
+ let or: BoxRefMut<String> = Box::new(example().1).into();
+ let or: BoxRefMut<_, str> = or.map_mut(|x| &mut x[1..5]);
+ let or: BoxRefMut<_, str> = or.map_mut(|x| &mut x[..2]);
+ assert_eq!(&*or, "el");
+ }
+
+ #[test]
+ fn map_chained_inference() {
+ let or = BoxRefMut::new(Box::new(example().1))
+ .map_mut(|x| &mut x[..5])
+ .map_mut(|x| &mut x[1..3]);
+ assert_eq!(&*or, "el");
+ }
+
+ #[test]
+ fn try_map_mut() {
+ let or: BoxRefMut<String> = Box::new(example().1).into();
+ let or: Result<BoxRefMut<_, str>, ()> = or.try_map_mut(|x| Ok(&mut x[1..5]));
+ assert_eq!(&*or.unwrap(), "ello");
+
+ let or: BoxRefMut<String> = Box::new(example().1).into();
+ let or: Result<BoxRefMut<_, str>, ()> = or.try_map_mut(|_| Err(()));
+ assert!(or.is_err());
+ }
+
+ #[test]
+ fn owner() {
+ let or: BoxRefMut<String> = Box::new(example().1).into();
+ let or = or.map_mut(|x| &mut x[..5]);
+ assert_eq!(&*or, "hello");
+ assert_eq!(&**or.owner(), "hello world");
+ }
+
+ #[test]
+ fn into_inner() {
+ let or: BoxRefMut<String> = Box::new(example().1).into();
+ let or = or.map_mut(|x| &mut x[..5]);
+ assert_eq!(&*or, "hello");
+ let s = *or.into_inner();
+ assert_eq!(&s, "hello world");
+ }
+
+ #[test]
+ fn fmt_debug() {
+ let or: BoxRefMut<String> = Box::new(example().1).into();
+ let or = or.map_mut(|x| &mut x[..5]);
+ let s = format!("{:?}", or);
+ assert_eq!(&s,
+ "OwningRefMut { owner: \"hello world\", reference: \"hello\" }");
+ }
+
+ #[test]
+ fn erased_owner() {
+ let o1: BoxRefMut<Example, str> = BoxRefMut::new(Box::new(example()))
+ .map_mut(|x| &mut x.1[..]);
+
+ let o2: BoxRefMut<String, str> = BoxRefMut::new(Box::new(example().1))
+ .map_mut(|x| &mut x[..]);
+
+ let os: Vec<ErasedBoxRefMut<str>> = vec![o1.erase_owner(), o2.erase_owner()];
+ assert!(os.iter().all(|e| &e[..] == "hello world"));
+ }
+
+ #[test]
+ fn raii_locks() {
+ use super::super::RefMutRefMut;
+ use std::cell::RefCell;
+ use super::super::{MutexGuardRefMut, RwLockWriteGuardRefMut};
+ use std::sync::{Mutex, RwLock};
+
+ {
+ let a = RefCell::new(1);
+ let a = {
+ let a = RefMutRefMut::new(a.borrow_mut());
+ assert_eq!(*a, 1);
+ a
+ };
+ assert_eq!(*a, 1);
+ drop(a);
+ }
+ {
+ let a = Mutex::new(1);
+ let a = {
+ let a = MutexGuardRefMut::new(a.lock().unwrap());
+ assert_eq!(*a, 1);
+ a
+ };
+ assert_eq!(*a, 1);
+ drop(a);
+ }
+ {
+ let a = RwLock::new(1);
+ let a = {
+ let a = RwLockWriteGuardRefMut::new(a.write().unwrap());
+ assert_eq!(*a, 1);
+ a
+ };
+ assert_eq!(*a, 1);
+ drop(a);
+ }
+ }
+
+ #[test]
+ fn eq() {
+ let or1: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
+ let or2: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
+ assert_eq!(or1.eq(&or2), true);
+ }
+
+ #[test]
+ fn cmp() {
+ let or1: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
+ let or2: BoxRefMut<[u8]> = BoxRefMut::new(vec![4, 5, 6].into_boxed_slice());
+ assert_eq!(or1.cmp(&or2), Ordering::Less);
+ }
+
+ #[test]
+ fn partial_cmp() {
+ let or1: BoxRefMut<[u8]> = BoxRefMut::new(vec![4, 5, 6].into_boxed_slice());
+ let or2: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
+ assert_eq!(or1.partial_cmp(&or2), Some(Ordering::Greater));
+ }
+
+ #[test]
+ fn hash() {
+ let mut h1 = DefaultHasher::new();
+ let mut h2 = DefaultHasher::new();
+
+ let or1: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
+ let or2: BoxRefMut<[u8]> = BoxRefMut::new(vec![1, 2, 3].into_boxed_slice());
+
+ or1.hash(&mut h1);
+ or2.hash(&mut h2);
+
+ assert_eq!(h1.finish(), h2.finish());
+ }
+
+ #[test]
+ fn borrow() {
+ let mut hash = HashMap::new();
+ let key1 = BoxRefMut::<String>::new(Box::new("foo".to_string())).map(|s| &s[..]);
+ let key2 = BoxRefMut::<String>::new(Box::new("bar".to_string())).map(|s| &s[..]);
+
+ hash.insert(key1, 42);
+ hash.insert(key2, 23);
+
+ assert_eq!(hash.get("foo"), Some(&42));
+ assert_eq!(hash.get("bar"), Some(&23));
+ }
+
+ #[test]
+ fn total_erase() {
+ let a: OwningRefMut<Vec<u8>, [u8]>
+ = OwningRefMut::new(vec![]).map_mut(|x| &mut x[..]);
+ let b: OwningRefMut<Box<[u8]>, [u8]>
+ = OwningRefMut::new(vec![].into_boxed_slice()).map_mut(|x| &mut x[..]);
+
+ let c: OwningRefMut<Box<Vec<u8>>, [u8]> = unsafe {a.map_owner(Box::new)};
+ let d: OwningRefMut<Box<Box<[u8]>>, [u8]> = unsafe {b.map_owner(Box::new)};
+
+ let _e: OwningRefMut<Box<dyn Erased>, [u8]> = c.erase_owner();
+ let _f: OwningRefMut<Box<dyn Erased>, [u8]> = d.erase_owner();
+ }
+
+ #[test]
+ fn total_erase_box() {
+ let a: OwningRefMut<Vec<u8>, [u8]>
+ = OwningRefMut::new(vec![]).map_mut(|x| &mut x[..]);
+ let b: OwningRefMut<Box<[u8]>, [u8]>
+ = OwningRefMut::new(vec![].into_boxed_slice()).map_mut(|x| &mut x[..]);
+
+ let c: OwningRefMut<Box<Vec<u8>>, [u8]> = a.map_owner_box();
+ let d: OwningRefMut<Box<Box<[u8]>>, [u8]> = b.map_owner_box();
+
+ let _e: OwningRefMut<Box<dyn Erased>, [u8]> = c.erase_owner();
+ let _f: OwningRefMut<Box<dyn Erased>, [u8]> = d.erase_owner();
+ }
+
+ #[test]
+ fn try_map1() {
+ use std::any::Any;
+
+ let x = Box::new(123_i32);
+ let y: Box<dyn Any> = x;
+
+ assert!(OwningRefMut::new(y).try_map_mut(|x| x.downcast_mut::<i32>().ok_or(())).is_ok());
+ }
+
+ #[test]
+ fn try_map2() {
+ use std::any::Any;
+
+ let x = Box::new(123_i32);
+ let y: Box<dyn Any> = x;
+
+ assert!(!OwningRefMut::new(y).try_map_mut(|x| x.downcast_mut::<i32>().ok_or(())).is_err());
+ }
+
+ #[test]
+ fn try_map3() {
+ use std::any::Any;
+
+ let x = Box::new(123_i32);
+ let y: Box<dyn Any> = x;
+
+ assert!(OwningRefMut::new(y).try_map(|x| x.downcast_ref::<i32>().ok_or(())).is_ok());
+ }
+
+ #[test]
+ fn try_map4() {
+ use std::any::Any;
+
+ let x = Box::new(123_i32);
+ let y: Box<dyn Any> = x;
+
+ assert!(!OwningRefMut::new(y).try_map(|x| x.downcast_ref::<i32>().ok_or(())).is_err());
+ }
+
+ #[test]
+ fn into_owning_ref() {
+ use super::super::BoxRef;
+
+ let or: BoxRefMut<()> = Box::new(()).into();
+ let or: BoxRef<()> = or.into();
+ assert_eq!(&*or, &());
+ }
+
+ struct Foo {
+ u: u32,
+ }
+ struct Bar {
+ f: Foo,
+ }
+
+ #[test]
+ fn ref_mut() {
+ use std::cell::RefCell;
+
+ let a = RefCell::new(Bar { f: Foo { u: 42 } });
+ let mut b = OwningRefMut::new(a.borrow_mut());
+ assert_eq!(b.f.u, 42);
+ b.f.u = 43;
+ let mut c = b.map_mut(|x| &mut x.f);
+ assert_eq!(c.u, 43);
+ c.u = 44;
+ let mut d = c.map_mut(|x| &mut x.u);
+ assert_eq!(*d, 44);
+ *d = 45;
+ assert_eq!(*d, 45);
+ }
+}
diff --git a/src/librustc_data_structures/sorted_map.rs b/src/librustc_data_structures/sorted_map.rs
index 1f674c1..fb819dd 100644
--- a/src/librustc_data_structures/sorted_map.rs
+++ b/src/librustc_data_structures/sorted_map.rs
@@ -305,204 +305,4 @@
}
#[cfg(test)]
-mod tests {
- use super::SortedMap;
-
- #[test]
- fn test_insert_and_iter() {
- let mut map = SortedMap::new();
- let mut expected = Vec::new();
-
- for x in 0 .. 100 {
- assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected);
-
- let x = 1000 - x * 2;
- map.insert(x, x);
- expected.insert(0, (x, x));
- }
- }
-
- #[test]
- fn test_get_and_index() {
- let mut map = SortedMap::new();
- let mut expected = Vec::new();
-
- for x in 0 .. 100 {
- let x = 1000 - x;
- if x & 1 == 0 {
- map.insert(x, x);
- }
- expected.push(x);
- }
-
- for mut x in expected {
- if x & 1 == 0 {
- assert_eq!(map.get(&x), Some(&x));
- assert_eq!(map.get_mut(&x), Some(&mut x));
- assert_eq!(map[&x], x);
- assert_eq!(&mut map[&x], &mut x);
- } else {
- assert_eq!(map.get(&x), None);
- assert_eq!(map.get_mut(&x), None);
- }
- }
- }
-
- #[test]
- fn test_range() {
- let mut map = SortedMap::new();
- map.insert(1, 1);
- map.insert(3, 3);
- map.insert(6, 6);
- map.insert(9, 9);
-
- let keys = |s: &[(_, _)]| {
- s.into_iter().map(|e| e.0).collect::<Vec<u32>>()
- };
-
- for start in 0 .. 11 {
- for end in 0 .. 11 {
- if end < start {
- continue
- }
-
- let mut expected = vec![1, 3, 6, 9];
- expected.retain(|&x| x >= start && x < end);
-
- assert_eq!(keys(map.range(start..end)), expected, "range = {}..{}", start, end);
- }
- }
- }
-
-
- #[test]
- fn test_offset_keys() {
- let mut map = SortedMap::new();
- map.insert(1, 1);
- map.insert(3, 3);
- map.insert(6, 6);
-
- map.offset_keys(|k| *k += 1);
-
- let mut expected = SortedMap::new();
- expected.insert(2, 1);
- expected.insert(4, 3);
- expected.insert(7, 6);
-
- assert_eq!(map, expected);
- }
-
- fn keys(s: SortedMap<u32, u32>) -> Vec<u32> {
- s.into_iter().map(|(k, _)| k).collect::<Vec<u32>>()
- }
-
- fn elements(s: SortedMap<u32, u32>) -> Vec<(u32, u32)> {
- s.into_iter().collect::<Vec<(u32, u32)>>()
- }
-
- #[test]
- fn test_remove_range() {
- let mut map = SortedMap::new();
- map.insert(1, 1);
- map.insert(3, 3);
- map.insert(6, 6);
- map.insert(9, 9);
-
- for start in 0 .. 11 {
- for end in 0 .. 11 {
- if end < start {
- continue
- }
-
- let mut expected = vec![1, 3, 6, 9];
- expected.retain(|&x| x < start || x >= end);
-
- let mut map = map.clone();
- map.remove_range(start .. end);
-
- assert_eq!(keys(map), expected, "range = {}..{}", start, end);
- }
- }
- }
-
- #[test]
- fn test_remove() {
- let mut map = SortedMap::new();
- let mut expected = Vec::new();
-
- for x in 0..10 {
- map.insert(x, x);
- expected.push((x, x));
- }
-
- for x in 0 .. 10 {
- let mut map = map.clone();
- let mut expected = expected.clone();
-
- assert_eq!(map.remove(&x), Some(x));
- expected.remove(x as usize);
-
- assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected);
- }
- }
-
- #[test]
- fn test_insert_presorted_non_overlapping() {
- let mut map = SortedMap::new();
- map.insert(2, 0);
- map.insert(8, 0);
-
- map.insert_presorted(vec![(3, 0), (7, 0)]);
-
- let expected = vec![2, 3, 7, 8];
- assert_eq!(keys(map), expected);
- }
-
- #[test]
- fn test_insert_presorted_first_elem_equal() {
- let mut map = SortedMap::new();
- map.insert(2, 2);
- map.insert(8, 8);
-
- map.insert_presorted(vec![(2, 0), (7, 7)]);
-
- let expected = vec![(2, 0), (7, 7), (8, 8)];
- assert_eq!(elements(map), expected);
- }
-
- #[test]
- fn test_insert_presorted_last_elem_equal() {
- let mut map = SortedMap::new();
- map.insert(2, 2);
- map.insert(8, 8);
-
- map.insert_presorted(vec![(3, 3), (8, 0)]);
-
- let expected = vec![(2, 2), (3, 3), (8, 0)];
- assert_eq!(elements(map), expected);
- }
-
- #[test]
- fn test_insert_presorted_shuffle() {
- let mut map = SortedMap::new();
- map.insert(2, 2);
- map.insert(7, 7);
-
- map.insert_presorted(vec![(1, 1), (3, 3), (8, 8)]);
-
- let expected = vec![(1, 1), (2, 2), (3, 3), (7, 7), (8, 8)];
- assert_eq!(elements(map), expected);
- }
-
- #[test]
- fn test_insert_presorted_at_end() {
- let mut map = SortedMap::new();
- map.insert(1, 1);
- map.insert(2, 2);
-
- map.insert_presorted(vec![(3, 3), (8, 8)]);
-
- let expected = vec![(1, 1), (2, 2), (3, 3), (8, 8)];
- assert_eq!(elements(map), expected);
- }
-}
+mod tests;
diff --git a/src/librustc_data_structures/sorted_map/tests.rs b/src/librustc_data_structures/sorted_map/tests.rs
new file mode 100644
index 0000000..f970409c
--- /dev/null
+++ b/src/librustc_data_structures/sorted_map/tests.rs
@@ -0,0 +1,199 @@
+use super::SortedMap;
+
+#[test]
+fn test_insert_and_iter() {
+ let mut map = SortedMap::new();
+ let mut expected = Vec::new();
+
+ for x in 0 .. 100 {
+ assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected);
+
+ let x = 1000 - x * 2;
+ map.insert(x, x);
+ expected.insert(0, (x, x));
+ }
+}
+
+#[test]
+fn test_get_and_index() {
+ let mut map = SortedMap::new();
+ let mut expected = Vec::new();
+
+ for x in 0 .. 100 {
+ let x = 1000 - x;
+ if x & 1 == 0 {
+ map.insert(x, x);
+ }
+ expected.push(x);
+ }
+
+ for mut x in expected {
+ if x & 1 == 0 {
+ assert_eq!(map.get(&x), Some(&x));
+ assert_eq!(map.get_mut(&x), Some(&mut x));
+ assert_eq!(map[&x], x);
+ assert_eq!(&mut map[&x], &mut x);
+ } else {
+ assert_eq!(map.get(&x), None);
+ assert_eq!(map.get_mut(&x), None);
+ }
+ }
+}
+
+#[test]
+fn test_range() {
+ let mut map = SortedMap::new();
+ map.insert(1, 1);
+ map.insert(3, 3);
+ map.insert(6, 6);
+ map.insert(9, 9);
+
+ let keys = |s: &[(_, _)]| {
+ s.into_iter().map(|e| e.0).collect::<Vec<u32>>()
+ };
+
+ for start in 0 .. 11 {
+ for end in 0 .. 11 {
+ if end < start {
+ continue
+ }
+
+ let mut expected = vec![1, 3, 6, 9];
+ expected.retain(|&x| x >= start && x < end);
+
+ assert_eq!(keys(map.range(start..end)), expected, "range = {}..{}", start, end);
+ }
+ }
+}
+
+
+#[test]
+fn test_offset_keys() {
+ let mut map = SortedMap::new();
+ map.insert(1, 1);
+ map.insert(3, 3);
+ map.insert(6, 6);
+
+ map.offset_keys(|k| *k += 1);
+
+ let mut expected = SortedMap::new();
+ expected.insert(2, 1);
+ expected.insert(4, 3);
+ expected.insert(7, 6);
+
+ assert_eq!(map, expected);
+}
+
+fn keys(s: SortedMap<u32, u32>) -> Vec<u32> {
+ s.into_iter().map(|(k, _)| k).collect::<Vec<u32>>()
+}
+
+fn elements(s: SortedMap<u32, u32>) -> Vec<(u32, u32)> {
+ s.into_iter().collect::<Vec<(u32, u32)>>()
+}
+
+#[test]
+fn test_remove_range() {
+ let mut map = SortedMap::new();
+ map.insert(1, 1);
+ map.insert(3, 3);
+ map.insert(6, 6);
+ map.insert(9, 9);
+
+ for start in 0 .. 11 {
+ for end in 0 .. 11 {
+ if end < start {
+ continue
+ }
+
+ let mut expected = vec![1, 3, 6, 9];
+ expected.retain(|&x| x < start || x >= end);
+
+ let mut map = map.clone();
+ map.remove_range(start .. end);
+
+ assert_eq!(keys(map), expected, "range = {}..{}", start, end);
+ }
+ }
+}
+
+#[test]
+fn test_remove() {
+ let mut map = SortedMap::new();
+ let mut expected = Vec::new();
+
+ for x in 0..10 {
+ map.insert(x, x);
+ expected.push((x, x));
+ }
+
+ for x in 0 .. 10 {
+ let mut map = map.clone();
+ let mut expected = expected.clone();
+
+ assert_eq!(map.remove(&x), Some(x));
+ expected.remove(x as usize);
+
+ assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected);
+ }
+}
+
+#[test]
+fn test_insert_presorted_non_overlapping() {
+ let mut map = SortedMap::new();
+ map.insert(2, 0);
+ map.insert(8, 0);
+
+ map.insert_presorted(vec![(3, 0), (7, 0)]);
+
+ let expected = vec![2, 3, 7, 8];
+ assert_eq!(keys(map), expected);
+}
+
+#[test]
+fn test_insert_presorted_first_elem_equal() {
+ let mut map = SortedMap::new();
+ map.insert(2, 2);
+ map.insert(8, 8);
+
+ map.insert_presorted(vec![(2, 0), (7, 7)]);
+
+ let expected = vec![(2, 0), (7, 7), (8, 8)];
+ assert_eq!(elements(map), expected);
+}
+
+#[test]
+fn test_insert_presorted_last_elem_equal() {
+ let mut map = SortedMap::new();
+ map.insert(2, 2);
+ map.insert(8, 8);
+
+ map.insert_presorted(vec![(3, 3), (8, 0)]);
+
+ let expected = vec![(2, 2), (3, 3), (8, 0)];
+ assert_eq!(elements(map), expected);
+}
+
+#[test]
+fn test_insert_presorted_shuffle() {
+ let mut map = SortedMap::new();
+ map.insert(2, 2);
+ map.insert(7, 7);
+
+ map.insert_presorted(vec![(1, 1), (3, 3), (8, 8)]);
+
+ let expected = vec![(1, 1), (2, 2), (3, 3), (7, 7), (8, 8)];
+ assert_eq!(elements(map), expected);
+}
+
+#[test]
+fn test_insert_presorted_at_end() {
+ let mut map = SortedMap::new();
+ map.insert(1, 1);
+ map.insert(2, 2);
+
+ map.insert_presorted(vec![(3, 3), (8, 8)]);
+
+ let expected = vec![(1, 1), (2, 2), (3, 3), (8, 8)];
+ assert_eq!(elements(map), expected);
+}
diff --git a/src/librustc_data_structures/thin_vec.rs b/src/librustc_data_structures/thin_vec.rs
index 52f23f4..6692903 100644
--- a/src/librustc_data_structures/thin_vec.rs
+++ b/src/librustc_data_structures/thin_vec.rs
@@ -66,3 +66,9 @@
(**self).hash_stable(hcx, hasher)
}
}
+
+impl<T> Default for ThinVec<T> {
+ fn default() -> Self {
+ Self(None)
+ }
+}
diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs
index 812321f..683da58 100644
--- a/src/librustc_driver/pretty.rs
+++ b/src/librustc_driver/pretty.rs
@@ -154,12 +154,14 @@
impl PpSourceMode {
/// Constructs a `PrinterSupport` object and passes it to `f`.
- fn call_with_pp_support<'tcx, A, F>(&self,
- sess: &'tcx Session,
- tcx: Option<TyCtxt<'tcx, 'tcx, 'tcx>>,
- f: F)
- -> A
- where F: FnOnce(&dyn PrinterSupport) -> A
+ fn call_with_pp_support<'tcx, A, F>(
+ &self,
+ sess: &'tcx Session,
+ tcx: Option<TyCtxt<'tcx>>,
+ f: F,
+ ) -> A
+ where
+ F: FnOnce(&dyn PrinterSupport) -> A,
{
match *self {
PpmNormal | PpmEveryBodyLoops | PpmExpanded => {
@@ -186,12 +188,9 @@
_ => panic!("Should use call_with_pp_support_hir"),
}
}
- fn call_with_pp_support_hir<'tcx, A, F>(
- &self,
- tcx: TyCtxt<'tcx, 'tcx, 'tcx>,
- f: F
- ) -> A
- where F: FnOnce(&dyn HirPrinterSupport<'_>, &hir::Crate) -> A
+ fn call_with_pp_support_hir<'tcx, A, F>(&self, tcx: TyCtxt<'tcx>, f: F) -> A
+ where
+ F: FnOnce(&dyn HirPrinterSupport<'_>, &hir::Crate) -> A,
{
match *self {
PpmNormal => {
@@ -270,7 +269,7 @@
struct NoAnn<'hir> {
sess: &'hir Session,
- tcx: Option<TyCtxt<'hir, 'hir, 'hir>>,
+ tcx: Option<TyCtxt<'hir>>,
}
impl<'hir> PrinterSupport for NoAnn<'hir> {
@@ -311,7 +310,7 @@
struct IdentifiedAnnotation<'hir> {
sess: &'hir Session,
- tcx: Option<TyCtxt<'hir, 'hir, 'hir>>,
+ tcx: Option<TyCtxt<'hir>>,
}
impl<'hir> PrinterSupport for IdentifiedAnnotation<'hir> {
@@ -454,9 +453,8 @@
}
}
-
struct TypedAnnotation<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
tables: Cell<&'a ty::TypeckTables<'tcx>>,
}
@@ -617,12 +615,13 @@
}
}
-fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec<borrowck_dot::Variant>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- code: blocks::Code<'tcx>,
- mode: PpFlowGraphMode,
- mut out: W)
- -> io::Result<()> {
+fn print_flowgraph<'tcx, W: Write>(
+ variants: Vec<borrowck_dot::Variant>,
+ tcx: TyCtxt<'tcx>,
+ code: blocks::Code<'tcx>,
+ mode: PpFlowGraphMode,
+ mut out: W,
+) -> io::Result<()> {
let body_id = match code {
blocks::Code::Expr(expr) => {
// Find the function this expression is from.
@@ -755,12 +754,13 @@
}
pub fn print_after_hir_lowering<'tcx>(
- tcx: TyCtxt<'tcx, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
input: &Input,
krate: &ast::Crate,
ppm: PpMode,
opt_uii: Option<UserIdentifiedItem>,
- ofile: Option<&Path>) {
+ ofile: Option<&Path>,
+) {
if ppm.needs_analysis() {
abort_on_err(print_with_analysis(
tcx,
@@ -866,10 +866,10 @@
// with a different callback than the standard driver, so that isn't easy.
// Instead, we call that function ourselves.
fn print_with_analysis<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
ppm: PpMode,
uii: Option<UserIdentifiedItem>,
- ofile: Option<&Path>
+ ofile: Option<&Path>,
) -> Result<(), ErrorReported> {
let nodeid = if let Some(uii) = uii {
debug!("pretty printing for {:?}", uii);
@@ -922,7 +922,8 @@
got {:?}",
node);
- tcx.sess.span_fatal(tcx.hir().span(nodeid), &message)
+ let hir_id = tcx.hir().node_to_hir_id(nodeid);
+ tcx.sess.span_fatal(tcx.hir().span(hir_id), &message)
}
}
}
diff --git a/src/librustc_errors/annotate_snippet_emitter_writer.rs b/src/librustc_errors/annotate_snippet_emitter_writer.rs
index 9f9c758..7ed2fdd 100644
--- a/src/librustc_errors/annotate_snippet_emitter_writer.rs
+++ b/src/librustc_errors/annotate_snippet_emitter_writer.rs
@@ -194,7 +194,7 @@
let converter = DiagnosticConverter {
source_map: self.source_map.clone(),
level: level.clone(),
- message: message.clone(),
+ message,
code: code.clone(),
msp: msp.clone(),
children,
diff --git a/src/librustc_errors/lock.rs b/src/librustc_errors/lock.rs
index f731791..25a27d2 100644
--- a/src/librustc_errors/lock.rs
+++ b/src/librustc_errors/lock.rs
@@ -64,7 +64,7 @@
//
// This will silently create one if it doesn't already exist, or it'll
// open up a handle to one if it already exists.
- let mutex = CreateMutexA(0 as *mut _, 0, cname.as_ptr() as *const u8);
+ let mutex = CreateMutexA(std::ptr::null_mut(), 0, cname.as_ptr() as *const u8);
if mutex.is_null() {
panic!("failed to create global mutex named `{}`: {}",
name,
diff --git a/src/librustc_incremental/assert_dep_graph.rs b/src/librustc_incremental/assert_dep_graph.rs
index ea89d2c..a43347a 100644
--- a/src/librustc_incremental/assert_dep_graph.rs
+++ b/src/librustc_incremental/assert_dep_graph.rs
@@ -51,7 +51,7 @@
use syntax::ast;
use syntax_pos::Span;
-pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn assert_dep_graph<'tcx>(tcx: TyCtxt<'tcx>) {
tcx.dep_graph.with_ignore(|| {
if tcx.sess.opts.debugging_opts.dump_dep_graph {
dump_graph(tcx);
@@ -89,13 +89,13 @@
type Sources = Vec<(Span, DefId, DepNode)>;
type Targets = Vec<(Span, ast::Name, hir::HirId, DepNode)>;
-struct IfThisChanged<'a, 'tcx:'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct IfThisChanged<'tcx> {
+ tcx: TyCtxt<'tcx>,
if_this_changed: Sources,
then_this_would_need: Targets,
}
-impl<'a, 'tcx> IfThisChanged<'a, 'tcx> {
+impl IfThisChanged<'tcx> {
fn argument(&self, attr: &ast::Attribute) -> Option<ast::Name> {
let mut value = None;
for list_item in attr.meta_item_list().unwrap_or_default() {
@@ -158,7 +158,7 @@
}
}
-impl<'a, 'tcx> Visitor<'tcx> for IfThisChanged<'a, 'tcx> {
+impl Visitor<'tcx> for IfThisChanged<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::OnlyBodies(&self.tcx.hir())
}
@@ -184,10 +184,7 @@
}
}
-fn check_paths<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- if_this_changed: &Sources,
- then_this_would_need: &Targets)
-{
+fn check_paths<'tcx>(tcx: TyCtxt<'tcx>, if_this_changed: &Sources, then_this_would_need: &Targets) {
// Return early here so as not to construct the query, which is not cheap.
if if_this_changed.is_empty() {
for &(target_span, _, _, _) in then_this_would_need {
@@ -217,7 +214,7 @@
}
}
-fn dump_graph(tcx: TyCtxt<'_, '_, '_>) {
+fn dump_graph(tcx: TyCtxt<'_>) {
let path: String = env::var("RUST_DEP_GRAPH").unwrap_or_else(|_| "dep_graph".to_string());
let query = tcx.dep_graph.query();
diff --git a/src/librustc_incremental/assert_module_sources.rs b/src/librustc_incremental/assert_module_sources.rs
index 04dad9c..f502d04 100644
--- a/src/librustc_incremental/assert_module_sources.rs
+++ b/src/librustc_incremental/assert_module_sources.rs
@@ -35,7 +35,7 @@
const CFG: Symbol = sym::cfg;
const KIND: Symbol = sym::kind;
-pub fn assert_module_sources<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn assert_module_sources<'tcx>(tcx: TyCtxt<'tcx>) {
tcx.dep_graph.with_ignore(|| {
if tcx.sess.opts.incremental.is_none() {
return;
@@ -59,12 +59,12 @@
})
}
-struct AssertModuleSource<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct AssertModuleSource<'tcx> {
+ tcx: TyCtxt<'tcx>,
available_cgus: BTreeSet<String>,
}
-impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> {
+impl AssertModuleSource<'tcx> {
fn check_attr(&self, attr: &ast::Attribute) {
let (expected_reuse, comp_kind) = if attr.check_name(ATTR_PARTITION_REUSED) {
(CguReuse::PreLto, ComparisonKind::AtLeast)
diff --git a/src/librustc_incremental/lib.rs b/src/librustc_incremental/lib.rs
index 50780ba..ffea495 100644
--- a/src/librustc_incremental/lib.rs
+++ b/src/librustc_incremental/lib.rs
@@ -2,6 +2,7 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
+#![feature(in_band_lifetimes)]
#![feature(nll)]
#![feature(specialization)]
diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs
index f404a4f..e2e4a4e 100644
--- a/src/librustc_incremental/persist/dirty_clean.rs
+++ b/src/librustc_incremental/persist/dirty_clean.rs
@@ -206,7 +206,7 @@
}
}
-pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn check_dirty_clean_annotations<'tcx>(tcx: TyCtxt<'tcx>) {
// can't add `#[rustc_dirty]` etc without opting in to this feature
if !tcx.features().rustc_attrs {
return;
@@ -234,13 +234,12 @@
})
}
-pub struct DirtyCleanVisitor<'a, 'tcx:'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub struct DirtyCleanVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
checked_attrs: FxHashSet<ast::AttrId>,
}
-impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
-
+impl DirtyCleanVisitor<'tcx> {
/// Possibly "deserialize" the attribute into a clean/dirty assertion
fn assertion_maybe(&mut self, item_id: hir::HirId, attr: &Attribute)
-> Option<Assertion>
@@ -518,7 +517,7 @@
}
}
-impl<'a, 'tcx> ItemLikeVisitor<'tcx> for DirtyCleanVisitor<'a, 'tcx> {
+impl ItemLikeVisitor<'tcx> for DirtyCleanVisitor<'tcx> {
fn visit_item(&mut self, item: &'tcx hir::Item) {
self.check_item(item.hir_id, item.span);
}
@@ -538,7 +537,7 @@
///
/// Also make sure that the `label` and `except` fields do not
/// both exist.
-fn check_config(tcx: TyCtxt<'_, '_, '_>, attr: &Attribute) -> bool {
+fn check_config(tcx: TyCtxt<'_>, attr: &Attribute) -> bool {
debug!("check_config(attr={:?})", attr);
let config = &tcx.sess.parse_sess.config;
debug!("check_config: config={:?}", config);
@@ -573,7 +572,7 @@
}
}
-fn expect_associated_value(tcx: TyCtxt<'_, '_, '_>, item: &NestedMetaItem) -> ast::Name {
+fn expect_associated_value(tcx: TyCtxt<'_>, item: &NestedMetaItem) -> ast::Name {
if let Some(value) = item.value_str() {
value
} else {
@@ -590,14 +589,13 @@
// A visitor that collects all #[rustc_dirty]/#[rustc_clean] attributes from
// the HIR. It is used to verfiy that we really ran checks for all annotated
// nodes.
-pub struct FindAllAttrs<'a, 'tcx:'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub struct FindAllAttrs<'tcx> {
+ tcx: TyCtxt<'tcx>,
attr_names: Vec<Symbol>,
found_attrs: Vec<&'tcx Attribute>,
}
-impl<'a, 'tcx> FindAllAttrs<'a, 'tcx> {
-
+impl FindAllAttrs<'tcx> {
fn is_active_attr(&mut self, attr: &Attribute) -> bool {
for attr_name in &self.attr_names {
if attr.check_name(*attr_name) && check_config(self.tcx, attr) {
@@ -618,7 +616,7 @@
}
}
-impl<'a, 'tcx> intravisit::Visitor<'tcx> for FindAllAttrs<'a, 'tcx> {
+impl intravisit::Visitor<'tcx> for FindAllAttrs<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'tcx> {
intravisit::NestedVisitorMap::All(&self.tcx.hir())
}
diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs
index 255a389..d9bcc0b 100644
--- a/src/librustc_incremental/persist/load.rs
+++ b/src/librustc_incremental/persist/load.rs
@@ -15,7 +15,7 @@
use super::file_format;
use super::work_product;
-pub fn dep_graph_tcx_init<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn dep_graph_tcx_init<'tcx>(tcx: TyCtxt<'tcx>) {
if !tcx.dep_graph.is_fully_enabled() {
return
}
diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs
index 34fe2f1..49c79ec 100644
--- a/src/librustc_incremental/persist/save.rs
+++ b/src/librustc_incremental/persist/save.rs
@@ -15,7 +15,7 @@
use super::file_format;
use super::work_product;
-pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn save_dep_graph<'tcx>(tcx: TyCtxt<'tcx>) {
debug!("save_dep_graph()");
tcx.dep_graph.with_ignore(|| {
let sess = tcx.sess;
@@ -129,8 +129,7 @@
}
}
-fn encode_dep_graph(tcx: TyCtxt<'_, '_, '_>,
- encoder: &mut Encoder) {
+fn encode_dep_graph(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
// First encode the commandline arguments hash
tcx.sess.opts.dep_tracking_hash().encode(encoder).unwrap();
@@ -234,8 +233,7 @@
serialized_products.encode(encoder).unwrap();
}
-fn encode_query_cache(tcx: TyCtxt<'_, '_, '_>,
- encoder: &mut Encoder) {
+fn encode_query_cache(tcx: TyCtxt<'_>, encoder: &mut Encoder) {
time(tcx.sess, "serialize query result cache", || {
tcx.serialize_query_result_cache(encoder).unwrap();
})
diff --git a/src/librustc_interface/passes.rs b/src/librustc_interface/passes.rs
index 9691d03..69cb696 100644
--- a/src/librustc_interface/passes.rs
+++ b/src/librustc_interface/passes.rs
@@ -791,14 +791,14 @@
declare_box_region_type!(
pub BoxedGlobalCtxt,
- for('gcx),
- (&'gcx GlobalCtxt<'gcx>) -> ((), ())
+ for('tcx),
+ (&'tcx GlobalCtxt<'tcx>) -> ((), ())
);
impl BoxedGlobalCtxt {
pub fn enter<F, R>(&mut self, f: F) -> R
where
- F: for<'tcx> FnOnce(TyCtxt<'tcx, 'tcx, 'tcx>) -> R
+ F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> R,
{
self.access(|gcx| ty::tls::enter_global(gcx, |tcx| f(tcx)))
}
@@ -811,7 +811,7 @@
resolutions: Resolutions,
outputs: OutputFilenames,
tx: mpsc::Sender<Box<dyn Any + Send>>,
- crate_name: &str
+ crate_name: &str,
) -> BoxedGlobalCtxt {
let sess = compiler.session().clone();
let cstore = compiler.cstore.clone();
@@ -866,7 +866,7 @@
});
yield BoxedGlobalCtxt::initial_yield(());
- box_region_allow_access!(for('gcx), (&'gcx GlobalCtxt<'gcx>), (gcx));
+ box_region_allow_access!(for('tcx), (&'tcx GlobalCtxt<'tcx>), (gcx));
if sess.opts.debugging_opts.query_stats {
gcx.queries.print_stats();
@@ -878,10 +878,7 @@
/// Runs the resolution, type-checking, region checking and other
/// miscellaneous analysis passes on the crate.
-fn analysis<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
- cnum: CrateNum,
-) -> Result<()> {
+fn analysis<'tcx>(tcx: TyCtxt<'tcx>, cnum: CrateNum) -> Result<()> {
assert_eq!(cnum, LOCAL_CRATE);
let sess = tcx.sess;
@@ -999,7 +996,7 @@
}
fn encode_and_write_metadata<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
outputs: &OutputFilenames,
) -> (middle::cstore::EncodedMetadata, bool) {
#[derive(PartialEq, Eq, PartialOrd, Ord)]
@@ -1062,7 +1059,7 @@
/// be discarded.
pub fn start_codegen<'tcx>(
codegen_backend: &dyn CodegenBackend,
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
rx: mpsc::Receiver<Box<dyn Any + Send>>,
outputs: &OutputFilenames,
) -> Box<dyn Any> {
diff --git a/src/librustc_interface/proc_macro_decls.rs b/src/librustc_interface/proc_macro_decls.rs
index e9f2f04..9e1ef6b 100644
--- a/src/librustc_interface/proc_macro_decls.rs
+++ b/src/librustc_interface/proc_macro_decls.rs
@@ -6,14 +6,11 @@
use syntax::attr;
use syntax::symbol::sym;
-pub fn find<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> Option<DefId> {
+pub fn find<'tcx>(tcx: TyCtxt<'tcx>) -> Option<DefId> {
tcx.proc_macro_decls_static(LOCAL_CRATE)
}
-fn proc_macro_decls_static<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
- cnum: CrateNum,
-) -> Option<DefId> {
+fn proc_macro_decls_static<'tcx>(tcx: TyCtxt<'tcx>, cnum: CrateNum) -> Option<DefId> {
assert_eq!(cnum, LOCAL_CRATE);
let mut finder = Finder { decls: None };
diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs
index 87d46a0..ec8a9c6 100644
--- a/src/librustc_lint/lib.rs
+++ b/src/librustc_lint/lib.rs
@@ -75,7 +75,7 @@
};
}
-fn lint_mod<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) {
+fn lint_mod<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
lint::late_lint_mod(tcx, module_def_id, BuiltinCombinedModuleLateLintPass::new());
}
diff --git a/src/librustc_lint/nonstandard_style.rs b/src/librustc_lint/nonstandard_style.rs
index 551eded..b221b8e 100644
--- a/src/librustc_lint/nonstandard_style.rs
+++ b/src/librustc_lint/nonstandard_style.rs
@@ -254,7 +254,7 @@
let crate_ident = if let Some(name) = &cx.tcx.sess.opts.crate_name {
Some(Ident::from_str(name))
} else {
- attr::find_by_name(&cx.tcx.hir().attrs_by_hir_id(hir::CRATE_HIR_ID), sym::crate_name)
+ attr::find_by_name(&cx.tcx.hir().attrs(hir::CRATE_HIR_ID), sym::crate_name)
.and_then(|attr| attr.meta())
.and_then(|meta| {
meta.name_value_literal().and_then(|lit| {
@@ -440,26 +440,4 @@
}
#[cfg(test)]
-mod tests {
- use super::{is_camel_case, to_camel_case};
-
- #[test]
- fn camel_case() {
- assert!(!is_camel_case("userData"));
- assert_eq!(to_camel_case("userData"), "UserData");
-
- assert!(is_camel_case("X86_64"));
-
- assert!(!is_camel_case("X86__64"));
- assert_eq!(to_camel_case("X86__64"), "X86_64");
-
- assert!(!is_camel_case("Abc_123"));
- assert_eq!(to_camel_case("Abc_123"), "Abc123");
-
- assert!(!is_camel_case("A1_b2_c3"));
- assert_eq!(to_camel_case("A1_b2_c3"), "A1B2C3");
-
- assert!(!is_camel_case("ONE_TWO_THREE"));
- assert_eq!(to_camel_case("ONE_TWO_THREE"), "OneTwoThree");
- }
-}
+mod tests;
diff --git a/src/librustc_lint/nonstandard_style/tests.rs b/src/librustc_lint/nonstandard_style/tests.rs
new file mode 100644
index 0000000..39c525b
--- /dev/null
+++ b/src/librustc_lint/nonstandard_style/tests.rs
@@ -0,0 +1,21 @@
+use super::{is_camel_case, to_camel_case};
+
+#[test]
+fn camel_case() {
+ assert!(!is_camel_case("userData"));
+ assert_eq!(to_camel_case("userData"), "UserData");
+
+ assert!(is_camel_case("X86_64"));
+
+ assert!(!is_camel_case("X86__64"));
+ assert_eq!(to_camel_case("X86__64"), "X86_64");
+
+ assert!(!is_camel_case("Abc_123"));
+ assert_eq!(to_camel_case("Abc_123"), "Abc123");
+
+ assert!(!is_camel_case("A1_b2_c3"));
+ assert_eq!(to_camel_case("A1_b2_c3"), "A1B2C3");
+
+ assert!(!is_camel_case("ONE_TWO_THREE"));
+ assert_eq!(to_camel_case("ONE_TWO_THREE"), "OneTwoThree");
+}
diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs
index 45cef61..9fc23e4 100644
--- a/src/librustc_lint/types.rs
+++ b/src/librustc_lint/types.rs
@@ -519,11 +519,11 @@
},
}
-fn is_zst<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId, ty: Ty<'tcx>) -> bool {
+fn is_zst<'tcx>(tcx: TyCtxt<'tcx>, did: DefId, ty: Ty<'tcx>) -> bool {
tcx.layout_of(tcx.param_env(did).and(ty)).map(|layout| layout.is_zst()).unwrap_or(false)
}
-fn ty_is_known_nonnull<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
+fn ty_is_known_nonnull<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool {
match ty.sty {
ty::FnPtr(_) => true,
ty::Ref(..) => true,
@@ -555,11 +555,12 @@
/// to function pointers, references, core::num::NonZero*,
/// core::ptr::NonNull, and #[repr(transparent)] newtypes.
/// FIXME: This duplicates code in codegen.
-fn is_repr_nullable_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>,
- ty_def: &'tcx ty::AdtDef,
- substs: SubstsRef<'tcx>)
- -> bool {
+fn is_repr_nullable_ptr<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ ty: Ty<'tcx>,
+ ty_def: &'tcx ty::AdtDef,
+ substs: SubstsRef<'tcx>,
+) -> bool {
if ty_def.variants.len() != 2 {
return false;
}
@@ -920,7 +921,7 @@
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ImproperCTypes {
fn check_foreign_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::ForeignItem) {
let mut vis = ImproperCTypesVisitor { cx };
- let abi = cx.tcx.hir().get_foreign_abi_by_hir_id(it.hir_id);
+ let abi = cx.tcx.hir().get_foreign_abi(it.hir_id);
if abi != Abi::RustIntrinsic && abi != Abi::PlatformIntrinsic {
match it.node {
hir::ForeignItemKind::Fn(ref decl, _, _) => {
diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs
index d540b3f..f84ce2f 100644
--- a/src/librustc_lint/unused.rs
+++ b/src/librustc_lint/unused.rs
@@ -136,7 +136,7 @@
descr_post_path: &str,
) -> bool {
if ty.is_unit() || cx.tcx.is_ty_uninhabited_from(
- cx.tcx.hir().get_module_parent_by_hir_id(expr.hir_id), ty)
+ cx.tcx.hir().get_module_parent(expr.hir_id), ty)
{
return true;
}
diff --git a/src/librustc_llvm/build.rs b/src/librustc_llvm/build.rs
index 7fa83dd..21fa872 100644
--- a/src/librustc_llvm/build.rs
+++ b/src/librustc_llvm/build.rs
@@ -234,6 +234,21 @@
}
}
+ // Some LLVM linker flags (-L and -l) may be needed even when linking
+ // librustc_llvm, for example when using static libc++, we may need to
+ // manually specify the library search path and -ldl -lpthread as link
+ // dependencies.
+ let llvm_linker_flags = env::var_os("LLVM_LINKER_FLAGS");
+ if let Some(s) = llvm_linker_flags {
+ for lib in s.into_string().unwrap().split_whitespace() {
+ if lib.starts_with("-l") {
+ println!("cargo:rustc-link-lib={}", &lib[2..]);
+ } else if lib.starts_with("-L") {
+ println!("cargo:rustc-link-search=native={}", &lib[2..]);
+ }
+ }
+ }
+
let llvm_static_stdcpp = env::var_os("LLVM_STATIC_STDCPP");
let llvm_use_libcxx = env::var_os("LLVM_USE_LIBCXX");
diff --git a/src/librustc_macros/src/query.rs b/src/librustc_macros/src/query.rs
index 8eacbfb..0474d2a 100644
--- a/src/librustc_macros/src/query.rs
+++ b/src/librustc_macros/src/query.rs
@@ -314,7 +314,7 @@
fn add_query_description_impl(
query: &Query,
modifiers: QueryModifiers,
- impls: &mut proc_macro2::TokenStream
+ impls: &mut proc_macro2::TokenStream,
) {
let name = &query.name;
let arg = &query.arg;
@@ -327,7 +327,7 @@
quote! {
#[inline]
fn try_load_from_disk(
- #tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ #tcx: TyCtxt<'tcx>,
#id: SerializedDepNodeIndex
) -> Option<Self::Value> {
#block
@@ -338,7 +338,7 @@
quote! {
#[inline]
fn try_load_from_disk(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
id: SerializedDepNodeIndex
) -> Option<Self::Value> {
tcx.queries.on_disk_cache.try_load_query_result(tcx, id)
@@ -350,7 +350,7 @@
quote! {
#[inline]
#[allow(unused_variables)]
- fn cache_on_disk(#tcx: TyCtxt<'_, 'tcx, 'tcx>, #key: Self::Key) -> bool {
+ fn cache_on_disk(#tcx: TyCtxt<'tcx>, #key: Self::Key) -> bool {
#expr
}
@@ -367,7 +367,7 @@
quote! {
#[allow(unused_variables)]
fn describe(
- #tcx: TyCtxt<'_, '_, '_>,
+ #tcx: TyCtxt<'_>,
#key: #arg,
) -> Cow<'static, str> {
format!(#desc).into()
diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs
index 7ffba41..5fef8e5 100644
--- a/src/librustc_metadata/creader.rs
+++ b/src/librustc_metadata/creader.rs
@@ -236,7 +236,7 @@
let host_lib = host_lib.unwrap();
self.load_derive_macros(
&host_lib.metadata.get_root(),
- host_lib.dylib.clone().map(|p| p.0),
+ host_lib.dylib.map(|p| p.0),
span
)
} else {
diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs
index 35faa1d..86536b1 100644
--- a/src/librustc_metadata/cstore_impl.rs
+++ b/src/librustc_metadata/cstore_impl.rs
@@ -40,11 +40,12 @@
(<$lt:tt> $tcx:ident, $def_id:ident, $other:ident, $cdata:ident,
$($name:ident => $compute:block)*) => {
pub fn provide_extern<$lt>(providers: &mut Providers<$lt>) {
- $(fn $name<'a, $lt:$lt, T>($tcx: TyCtxt<'a, $lt, $lt>, def_id_arg: T)
- -> <ty::queries::$name<$lt> as
- QueryConfig<$lt>>::Value
- where T: IntoArgs,
- {
+ // HACK(eddyb) `$lt: $lt` forces `$lt` to be early-bound, which
+ // allows the associated type in the return type to be normalized.
+ $(fn $name<$lt: $lt, T: IntoArgs>(
+ $tcx: TyCtxt<$lt>,
+ def_id_arg: T,
+ ) -> <ty::queries::$name<$lt> as QueryConfig<$lt>>::Value {
#[allow(unused_variables)]
let ($def_id, $other) = def_id_arg.into_args();
assert!(!$def_id.is_local());
@@ -550,10 +551,7 @@
self.do_postorder_cnums_untracked()
}
- fn encode_metadata<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> EncodedMetadata
- {
+ fn encode_metadata<'tcx>(&self, tcx: TyCtxt<'tcx>) -> EncodedMetadata {
encoder::encode_metadata(tcx)
}
diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs
index e3e327d..4bafe16 100644
--- a/src/librustc_metadata/decoder.rs
+++ b/src/librustc_metadata/decoder.rs
@@ -39,7 +39,7 @@
opaque: opaque::Decoder<'a>,
cdata: Option<&'a CrateMetadata>,
sess: Option<&'a Session>,
- tcx: Option<TyCtxt<'a, 'tcx, 'tcx>>,
+ tcx: Option<TyCtxt<'tcx>>,
// Cache the last used source_file for translating spans as an optimization.
last_source_file_index: usize,
@@ -55,7 +55,9 @@
fn raw_bytes(self) -> &'a [u8];
fn cdata(self) -> Option<&'a CrateMetadata> { None }
fn sess(self) -> Option<&'a Session> { None }
- fn tcx(self) -> Option<TyCtxt<'a, 'tcx, 'tcx>> { None }
+ fn tcx(self) -> Option<TyCtxt<'tcx>> {
+ None
+ }
fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> {
let tcx = self.tcx();
@@ -114,14 +116,14 @@
}
}
-impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a CrateMetadata, TyCtxt<'a, 'tcx, 'tcx>) {
+impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a CrateMetadata, TyCtxt<'tcx>) {
fn raw_bytes(self) -> &'a [u8] {
self.0.raw_bytes()
}
fn cdata(self) -> Option<&'a CrateMetadata> {
Some(self.0)
}
- fn tcx(self) -> Option<TyCtxt<'a, 'tcx, 'tcx>> {
+ fn tcx(self) -> Option<TyCtxt<'tcx>> {
Some(self.1)
}
}
@@ -146,7 +148,7 @@
}
impl<'a, 'tcx> DecodeContext<'a, 'tcx> {
- pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ pub fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx.expect("missing TyCtxt in DecodeContext")
}
@@ -169,10 +171,9 @@
}
}
-impl<'a, 'tcx: 'a> TyDecoder<'a, 'tcx> for DecodeContext<'a, 'tcx> {
-
+impl<'a, 'tcx> TyDecoder<'tcx> for DecodeContext<'a, 'tcx> {
#[inline]
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx.expect("missing TyCtxt in DecodeContext")
}
@@ -547,11 +548,11 @@
fn get_variant(
&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
item: &Entry<'_>,
index: DefIndex,
parent_did: DefId,
- adt_kind: ty::AdtKind
+ adt_kind: ty::AdtKind,
) -> ty::VariantDef {
let data = match item.kind {
EntryKind::Variant(data) |
@@ -588,10 +589,7 @@
)
}
- pub fn get_adt_def(&self,
- item_id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> &'tcx ty::AdtDef {
+ pub fn get_adt_def(&self, item_id: DefIndex, tcx: TyCtxt<'tcx>) -> &'tcx ty::AdtDef {
let item = self.entry(item_id);
let did = self.local_def_id(item_id);
@@ -616,24 +614,27 @@
tcx.alloc_adt_def(did, kind, variants, repr)
}
- pub fn get_predicates(&self,
- item_id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::GenericPredicates<'tcx> {
+ pub fn get_predicates(
+ &self,
+ item_id: DefIndex,
+ tcx: TyCtxt<'tcx>,
+ ) -> ty::GenericPredicates<'tcx> {
self.entry(item_id).predicates.unwrap().decode((self, tcx))
- }
+}
- pub fn get_predicates_defined_on(&self,
- item_id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::GenericPredicates<'tcx> {
+ pub fn get_predicates_defined_on(
+ &self,
+ item_id: DefIndex,
+ tcx: TyCtxt<'tcx>,
+ ) -> ty::GenericPredicates<'tcx> {
self.entry(item_id).predicates_defined_on.unwrap().decode((self, tcx))
}
- pub fn get_super_predicates(&self,
- item_id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::GenericPredicates<'tcx> {
+ pub fn get_super_predicates(
+ &self,
+ item_id: DefIndex,
+ tcx: TyCtxt<'tcx>,
+ ) -> ty::GenericPredicates<'tcx> {
let super_predicates = match self.entry(item_id).kind {
EntryKind::Trait(data) => data.decode(self).super_predicates,
EntryKind::TraitAlias(data) => data.decode(self).super_predicates,
@@ -650,7 +651,7 @@
self.entry(item_id).generics.unwrap().decode((self, sess))
}
- pub fn get_type(&self, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> {
+ pub fn get_type(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
self.entry(id).ty.unwrap().decode((self, tcx))
}
@@ -700,18 +701,12 @@
self.get_impl_data(id).coerce_unsized_info
}
- pub fn get_impl_trait(&self,
- id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Option<ty::TraitRef<'tcx>> {
+ pub fn get_impl_trait(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Option<ty::TraitRef<'tcx>> {
self.get_impl_data(id).trait_ref.map(|tr| tr.decode((self, tcx)))
}
/// Iterates over all the stability attributes in the given crate.
- pub fn get_lib_features(
- &self,
- tcx: TyCtxt<'_, 'tcx, '_>,
- ) -> &'tcx [(ast::Name, Option<ast::Name>)] {
+ pub fn get_lib_features(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(ast::Name, Option<ast::Name>)] {
// FIXME: For a proc macro crate, not sure whether we should return the "host"
// features or an empty Vec. Both don't cause ICEs.
tcx.arena.alloc_from_iter(self.root
@@ -720,10 +715,7 @@
}
/// Iterates over the language items in the given crate.
- pub fn get_lang_items(
- &self,
- tcx: TyCtxt<'_, 'tcx, '_>,
- ) -> &'tcx [(DefId, usize)] {
+ pub fn get_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [(DefId, usize)] {
if self.proc_macros.is_some() {
// Proc macro crates do not export any lang-items to the target.
&[]
@@ -884,10 +876,7 @@
self.maybe_entry(id).and_then(|item| item.decode(self).mir).is_some()
}
- pub fn maybe_get_optimized_mir(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- id: DefIndex)
- -> Option<Body<'tcx>> {
+ pub fn maybe_get_optimized_mir(&self, tcx: TyCtxt<'tcx>, id: DefIndex) -> Option<Body<'tcx>> {
match self.is_proc_macro(id) {
true => None,
false => self.entry(id).mir.map(|mir| mir.decode((self, tcx))),
@@ -1019,8 +1008,8 @@
pub fn get_inherent_implementations_for_type(
&self,
- tcx: TyCtxt<'_, 'tcx, '_>,
- id: DefIndex
+ tcx: TyCtxt<'tcx>,
+ id: DefIndex,
) -> &'tcx [DefId] {
tcx.arena.alloc_from_iter(self.entry(id)
.inherent_impls
@@ -1030,7 +1019,7 @@
pub fn get_implementations_for_trait(
&self,
- tcx: TyCtxt<'_, 'tcx, '_>,
+ tcx: TyCtxt<'tcx>,
filter: Option<DefId>,
) -> &'tcx [DefId] {
if self.proc_macros.is_some() {
@@ -1085,10 +1074,7 @@
}
}
- pub fn get_foreign_modules(
- &self,
- tcx: TyCtxt<'_, 'tcx, '_>,
- ) -> &'tcx [ForeignModule] {
+ pub fn get_foreign_modules(&self, tcx: TyCtxt<'tcx>) -> &'tcx [ForeignModule] {
if self.proc_macros.is_some() {
// Proc macro crates do not have any *target* foreign modules.
&[]
@@ -1099,7 +1085,7 @@
pub fn get_dylib_dependency_formats(
&self,
- tcx: TyCtxt<'_, 'tcx, '_>,
+ tcx: TyCtxt<'tcx>,
) -> &'tcx [(CrateNum, LinkagePreference)] {
tcx.arena.alloc_from_iter(self.root
.dylib_dependency_formats
@@ -1111,10 +1097,7 @@
}))
}
- pub fn get_missing_lang_items(
- &self,
- tcx: TyCtxt<'_, 'tcx, '_>,
- ) -> &'tcx [lang_items::LangItem] {
+ pub fn get_missing_lang_items(&self, tcx: TyCtxt<'tcx>) -> &'tcx [lang_items::LangItem] {
if self.proc_macros.is_some() {
// Proc macro crates do not depend on any target weak lang-items.
&[]
@@ -1135,9 +1118,10 @@
arg_names.decode(self).collect()
}
- pub fn exported_symbols(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Vec<(ExportedSymbol<'tcx>, SymbolExportLevel)> {
+ pub fn exported_symbols(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ ) -> Vec<(ExportedSymbol<'tcx>, SymbolExportLevel)> {
if self.proc_macros.is_some() {
// If this crate is a custom derive crate, then we're not even going to
// link those in so we skip those crates.
@@ -1192,10 +1176,7 @@
}
}
- pub fn fn_sig(&self,
- id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::PolyFnSig<'tcx> {
+ pub fn fn_sig(&self, id: DefIndex, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> {
let sig = match self.entry(id).kind {
EntryKind::Fn(data) |
EntryKind::ForeignFn(data) => data.decode(self).sig,
diff --git a/src/librustc_metadata/dynamic_lib.rs b/src/librustc_metadata/dynamic_lib.rs
index 9dd160c..4c27936 100644
--- a/src/librustc_metadata/dynamic_lib.rs
+++ b/src/librustc_metadata/dynamic_lib.rs
@@ -74,55 +74,7 @@
}
#[cfg(test)]
-mod tests {
- use super::*;
- use std::mem;
-
- #[test]
- fn test_loading_atoi() {
- if cfg!(windows) {
- return
- }
-
- // The C library does not need to be loaded since it is already linked in
- let lib = match DynamicLibrary::open(None) {
- Err(error) => panic!("Could not load self as module: {}", error),
- Ok(lib) => lib
- };
-
- let atoi: extern fn(*const libc::c_char) -> libc::c_int = unsafe {
- match lib.symbol("atoi") {
- Err(error) => panic!("Could not load function atoi: {}", error),
- Ok(atoi) => mem::transmute::<*mut u8, _>(atoi)
- }
- };
-
- let argument = CString::new("1383428980").unwrap();
- let expected_result = 0x52757374;
- let result = atoi(argument.as_ptr());
- if result != expected_result {
- panic!("atoi({:?}) != {} but equaled {} instead", argument,
- expected_result, result)
- }
- }
-
- #[test]
- fn test_errors_do_not_crash() {
- use std::path::Path;
-
- if !cfg!(unix) {
- return
- }
-
- // Open /dev/null as a library to get an error, and make sure
- // that only causes an error, and not a crash.
- let path = Path::new("/dev/null");
- match DynamicLibrary::open(Some(&path)) {
- Err(_) => {}
- Ok(_) => panic!("Successfully opened the empty library.")
- }
- }
-}
+mod tests;
#[cfg(unix)]
mod dl {
@@ -161,9 +113,9 @@
pub fn check_for_errors_in<T, F>(f: F) -> Result<T, String> where
F: FnOnce() -> T,
{
- use std::sync::{Mutex, Once, ONCE_INIT};
- static INIT: Once = ONCE_INIT;
- static mut LOCK: *mut Mutex<()> = 0 as *mut _;
+ use std::sync::{Mutex, Once};
+ static INIT: Once = Once::new();
+ static mut LOCK: *mut Mutex<()> = ptr::null_mut();
unsafe {
INIT.call_once(|| {
LOCK = Box::into_raw(Box::new(Mutex::new(())));
diff --git a/src/librustc_metadata/dynamic_lib/tests.rs b/src/librustc_metadata/dynamic_lib/tests.rs
new file mode 100644
index 0000000..b2302f2
--- /dev/null
+++ b/src/librustc_metadata/dynamic_lib/tests.rs
@@ -0,0 +1,47 @@
+use super::*;
+use std::mem;
+
+#[test]
+fn test_loading_atoi() {
+ if cfg!(windows) {
+ return
+ }
+
+ // The C library does not need to be loaded since it is already linked in
+ let lib = match DynamicLibrary::open(None) {
+ Err(error) => panic!("Could not load self as module: {}", error),
+ Ok(lib) => lib
+ };
+
+ let atoi: extern fn(*const libc::c_char) -> libc::c_int = unsafe {
+ match lib.symbol("atoi") {
+ Err(error) => panic!("Could not load function atoi: {}", error),
+ Ok(atoi) => mem::transmute::<*mut u8, _>(atoi)
+ }
+ };
+
+ let argument = CString::new("1383428980").unwrap();
+ let expected_result = 0x52757374;
+ let result = atoi(argument.as_ptr());
+ if result != expected_result {
+ panic!("atoi({:?}) != {} but equaled {} instead", argument,
+ expected_result, result)
+ }
+}
+
+#[test]
+fn test_errors_do_not_crash() {
+ use std::path::Path;
+
+ if !cfg!(unix) {
+ return
+ }
+
+ // Open /dev/null as a library to get an error, and make sure
+ // that only causes an error, and not a crash.
+ let path = Path::new("/dev/null");
+ match DynamicLibrary::open(Some(&path)) {
+ Err(_) => {}
+ Ok(_) => panic!("Successfully opened the empty library.")
+ }
+}
diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs
index 586fc50..b52b6df 100644
--- a/src/librustc_metadata/encoder.rs
+++ b/src/librustc_metadata/encoder.rs
@@ -40,9 +40,9 @@
use rustc::hir::intravisit::{Visitor, NestedVisitorMap};
use rustc::hir::intravisit;
-pub struct EncodeContext<'a, 'tcx: 'a> {
+pub struct EncodeContext<'tcx> {
opaque: opaque::Encoder,
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ pub tcx: TyCtxt<'tcx>,
entries_index: Index<'tcx>,
@@ -65,7 +65,7 @@
}
}
-impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> {
+impl<'tcx> Encoder for EncodeContext<'tcx> {
type Error = <opaque::Encoder as Encoder>::Error;
fn emit_unit(&mut self) -> Result<(), Self::Error> {
@@ -95,13 +95,13 @@
}
}
-impl<'a, 'tcx, T> SpecializedEncoder<Lazy<T>> for EncodeContext<'a, 'tcx> {
+impl<'tcx, T> SpecializedEncoder<Lazy<T>> for EncodeContext<'tcx> {
fn specialized_encode(&mut self, lazy: &Lazy<T>) -> Result<(), Self::Error> {
self.emit_lazy_distance(lazy.position, Lazy::<T>::min_size())
}
}
-impl<'a, 'tcx, T> SpecializedEncoder<LazySeq<T>> for EncodeContext<'a, 'tcx> {
+impl<'tcx, T> SpecializedEncoder<LazySeq<T>> for EncodeContext<'tcx> {
fn specialized_encode(&mut self, seq: &LazySeq<T>) -> Result<(), Self::Error> {
self.emit_usize(seq.len)?;
if seq.len == 0 {
@@ -111,14 +111,14 @@
}
}
-impl<'a, 'tcx> SpecializedEncoder<CrateNum> for EncodeContext<'a, 'tcx> {
+impl<'tcx> SpecializedEncoder<CrateNum> for EncodeContext<'tcx> {
#[inline]
fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> {
self.emit_u32(cnum.as_u32())
}
}
-impl<'a, 'tcx> SpecializedEncoder<DefId> for EncodeContext<'a, 'tcx> {
+impl<'tcx> SpecializedEncoder<DefId> for EncodeContext<'tcx> {
#[inline]
fn specialized_encode(&mut self, def_id: &DefId) -> Result<(), Self::Error> {
let DefId {
@@ -131,14 +131,14 @@
}
}
-impl<'a, 'tcx> SpecializedEncoder<DefIndex> for EncodeContext<'a, 'tcx> {
+impl<'tcx> SpecializedEncoder<DefIndex> for EncodeContext<'tcx> {
#[inline]
fn specialized_encode(&mut self, def_index: &DefIndex) -> Result<(), Self::Error> {
self.emit_u32(def_index.as_u32())
}
}
-impl<'a, 'tcx> SpecializedEncoder<Span> for EncodeContext<'a, 'tcx> {
+impl<'tcx> SpecializedEncoder<Span> for EncodeContext<'tcx> {
fn specialized_encode(&mut self, span: &Span) -> Result<(), Self::Error> {
if span.is_dummy() {
return TAG_INVALID_SPAN.encode(self)
@@ -173,20 +173,20 @@
}
}
-impl<'a, 'tcx> SpecializedEncoder<LocalDefId> for EncodeContext<'a, 'tcx> {
+impl<'tcx> SpecializedEncoder<LocalDefId> for EncodeContext<'tcx> {
#[inline]
fn specialized_encode(&mut self, def_id: &LocalDefId) -> Result<(), Self::Error> {
self.specialized_encode(&def_id.to_def_id())
}
}
-impl<'a, 'tcx> SpecializedEncoder<Ty<'tcx>> for EncodeContext<'a, 'tcx> {
+impl<'tcx> SpecializedEncoder<Ty<'tcx>> for EncodeContext<'tcx> {
fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> {
ty_codec::encode_with_shorthand(self, ty, |ecx| &mut ecx.type_shorthands)
}
}
-impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx> {
+impl<'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'tcx> {
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
use std::collections::hash_map::Entry;
let index = match self.interpret_allocs.entry(*alloc_id) {
@@ -203,7 +203,7 @@
}
}
-impl<'a, 'tcx> SpecializedEncoder<ty::GenericPredicates<'tcx>> for EncodeContext<'a, 'tcx> {
+impl<'tcx> SpecializedEncoder<ty::GenericPredicates<'tcx>> for EncodeContext<'tcx> {
fn specialized_encode(&mut self,
predicates: &ty::GenericPredicates<'tcx>)
-> Result<(), Self::Error> {
@@ -211,14 +211,13 @@
}
}
-impl<'a, 'tcx> SpecializedEncoder<Fingerprint> for EncodeContext<'a, 'tcx> {
+impl<'tcx> SpecializedEncoder<Fingerprint> for EncodeContext<'tcx> {
fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> {
f.encode_opaque(&mut self.opaque)
}
}
-impl<'a, 'tcx, T: Encodable> SpecializedEncoder<mir::ClearCrossCrate<T>>
-for EncodeContext<'a, 'tcx> {
+impl<'tcx, T: Encodable> SpecializedEncoder<mir::ClearCrossCrate<T>> for EncodeContext<'tcx> {
fn specialized_encode(&mut self,
_: &mir::ClearCrossCrate<T>)
-> Result<(), Self::Error> {
@@ -226,14 +225,13 @@
}
}
-impl<'a, 'tcx> TyEncoder for EncodeContext<'a, 'tcx> {
+impl<'tcx> TyEncoder for EncodeContext<'tcx> {
fn position(&self) -> usize {
self.opaque.position()
}
}
-impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
-
+impl<'tcx> EncodeContext<'tcx> {
fn emit_node<F: FnOnce(&mut Self, usize) -> R, R>(&mut self, f: F) -> R {
assert_eq!(self.lazy_state, LazyState::NoNode);
let pos = self.position();
@@ -544,7 +542,7 @@
}
}
-impl EncodeContext<'_, 'tcx> {
+impl EncodeContext<'tcx> {
fn encode_variances_of(&mut self, def_id: DefId) -> LazySeq<ty::Variance> {
debug!("EncodeContext::encode_variances_of({:?})", def_id);
let tcx = self.tcx;
@@ -581,7 +579,7 @@
};
let enum_id = tcx.hir().as_local_hir_id(enum_did).unwrap();
- let enum_vis = &tcx.hir().expect_item_by_hir_id(enum_id).vis;
+ let enum_vis = &tcx.hir().expect_item(enum_id).vis;
Entry {
kind: EntryKind::Variant(self.lazy(&data)),
@@ -634,7 +632,7 @@
// Variant constructors have the same visibility as the parent enums, unless marked as
// non-exhaustive, in which case they are lowered to `pub(crate)`.
let enum_id = tcx.hir().as_local_hir_id(enum_did).unwrap();
- let enum_vis = &tcx.hir().expect_item_by_hir_id(enum_id).vis;
+ let enum_vis = &tcx.hir().expect_item(enum_id).vis;
let mut ctor_vis = ty::Visibility::from_hir(enum_vis, enum_id, tcx);
if variant.is_field_list_non_exhaustive() && ctor_vis == ty::Visibility::Public {
ctor_vis = ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX));
@@ -753,7 +751,7 @@
};
let struct_id = tcx.hir().as_local_hir_id(adt_def_id).unwrap();
- let struct_vis = &tcx.hir().expect_item_by_hir_id(struct_id).vis;
+ let struct_vis = &tcx.hir().expect_item(struct_id).vis;
let mut ctor_vis = ty::Visibility::from_hir(struct_vis, struct_id, tcx);
for field in &variant.fields {
if ctor_vis.is_at_least(field.vis, tcx) {
@@ -1648,7 +1646,7 @@
}
}
-impl Visitor<'tcx> for EncodeContext<'_, 'tcx> {
+impl Visitor<'tcx> for EncodeContext<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::OnlyBodies(&self.tcx.hir())
}
@@ -1698,7 +1696,7 @@
}
}
-impl EncodeContext<'_, 'tcx> {
+impl EncodeContext<'tcx> {
fn encode_fields(&mut self, adt_def_id: DefId) {
let def = self.tcx.adt_def(adt_def_id);
for (variant_index, variant) in def.variants.iter_enumerated() {
@@ -1817,12 +1815,12 @@
}
}
-struct ImplVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct ImplVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
impls: FxHashMap<DefId, Vec<DefIndex>>,
}
-impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ImplVisitor<'a, 'tcx> {
+impl<'tcx, 'v> ItemLikeVisitor<'v> for ImplVisitor<'tcx> {
fn visit_item(&mut self, item: &hir::Item) {
if let hir::ItemKind::Impl(..) = item.node {
let impl_id = self.tcx.hir().local_def_id_from_hir_id(item.hir_id);
@@ -1865,9 +1863,7 @@
// will allow us to slice the metadata to the precise length that we just
// generated regardless of trailing bytes that end up in it.
-pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> EncodedMetadata
-{
+pub fn encode_metadata<'tcx>(tcx: TyCtxt<'tcx>) -> EncodedMetadata {
let mut encoder = opaque::Encoder::new(vec![]);
encoder.emit_raw_bytes(METADATA_HEADER);
@@ -1909,7 +1905,7 @@
EncodedMetadata { raw_data: result }
}
-pub fn get_repr_options<'a, 'tcx, 'gcx>(tcx: TyCtxt<'a, 'tcx, 'gcx>, did: DefId) -> ReprOptions {
+pub fn get_repr_options<'tcx>(tcx: TyCtxt<'tcx>, did: DefId) -> ReprOptions {
let ty = tcx.type_of(did);
match ty.sty {
ty::Adt(ref def, _) => return def.repr,
diff --git a/src/librustc_metadata/foreign_modules.rs b/src/librustc_metadata/foreign_modules.rs
index 284f679..0ce103c 100644
--- a/src/librustc_metadata/foreign_modules.rs
+++ b/src/librustc_metadata/foreign_modules.rs
@@ -3,21 +3,21 @@
use rustc::middle::cstore::ForeignModule;
use rustc::ty::TyCtxt;
-pub fn collect<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Vec<ForeignModule> {
+pub fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> Vec<ForeignModule> {
let mut collector = Collector {
tcx,
modules: Vec::new(),
};
tcx.hir().krate().visit_all_item_likes(&mut collector);
- return collector.modules
+ return collector.modules;
}
-struct Collector<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct Collector<'tcx> {
+ tcx: TyCtxt<'tcx>,
modules: Vec<ForeignModule>,
}
-impl<'a, 'tcx> ItemLikeVisitor<'tcx> for Collector<'a, 'tcx> {
+impl ItemLikeVisitor<'tcx> for Collector<'tcx> {
fn visit_item(&mut self, it: &'tcx hir::Item) {
let fm = match it.node {
hir::ItemKind::ForeignMod(ref fm) => fm,
diff --git a/src/librustc_metadata/link_args.rs b/src/librustc_metadata/link_args.rs
index f468246..cd62700 100644
--- a/src/librustc_metadata/link_args.rs
+++ b/src/librustc_metadata/link_args.rs
@@ -4,7 +4,7 @@
use rustc_target::spec::abi::Abi;
use syntax::symbol::sym;
-pub fn collect<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Vec<String> {
+pub fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> Vec<String> {
let mut collector = Collector {
args: Vec::new(),
};
@@ -18,7 +18,7 @@
}
}
- return collector.args
+ return collector.args;
}
struct Collector {
diff --git a/src/librustc_metadata/native_libs.rs b/src/librustc_metadata/native_libs.rs
index fee08f4..7b335b3 100644
--- a/src/librustc_metadata/native_libs.rs
+++ b/src/librustc_metadata/native_libs.rs
@@ -11,14 +11,14 @@
use syntax::symbol::{Symbol, sym};
use syntax::{span_err, struct_span_err};
-pub fn collect<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Vec<NativeLibrary> {
+pub fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> Vec<NativeLibrary> {
let mut collector = Collector {
tcx,
libs: Vec::new(),
};
tcx.hir().krate().visit_all_item_likes(&mut collector);
collector.process_command_line();
- return collector.libs
+ return collector.libs;
}
pub fn relevant_lib(sess: &Session, lib: &NativeLibrary) -> bool {
@@ -28,12 +28,12 @@
}
}
-struct Collector<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct Collector<'tcx> {
+ tcx: TyCtxt<'tcx>,
libs: Vec<NativeLibrary>,
}
-impl<'a, 'tcx> ItemLikeVisitor<'tcx> for Collector<'a, 'tcx> {
+impl ItemLikeVisitor<'tcx> for Collector<'tcx> {
fn visit_item(&mut self, it: &'tcx hir::Item) {
let fm = match it.node {
hir::ItemKind::ForeignMod(ref fm) => fm,
@@ -130,7 +130,7 @@
fn visit_impl_item(&mut self, _it: &'tcx hir::ImplItem) {}
}
-impl<'a, 'tcx> Collector<'a, 'tcx> {
+impl Collector<'tcx> {
fn register_native_lib(&mut self, span: Option<Span>, lib: NativeLibrary) {
if lib.name.as_ref().map(|s| s.as_str().is_empty()).unwrap_or(false) {
match span {
diff --git a/src/librustc_mir/borrow_check/borrow_set.rs b/src/librustc_mir/borrow_check/borrow_set.rs
index 0fc72b8..f6f2cfb 100644
--- a/src/librustc_mir/borrow_check/borrow_set.rs
+++ b/src/librustc_mir/borrow_check/borrow_set.rs
@@ -122,12 +122,11 @@
impl<'tcx> BorrowSet<'tcx> {
pub fn build(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
locals_are_invalidated_at_exit: bool,
- move_data: &MoveData<'tcx>
+ move_data: &MoveData<'tcx>,
) -> Self {
-
let mut visitor = GatherBorrows {
tcx,
body,
@@ -161,8 +160,8 @@
}
}
-struct GatherBorrows<'a, 'gcx: 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+struct GatherBorrows<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
idx_vec: IndexVec<BorrowIndex, BorrowData<'tcx>>,
location_map: FxHashMap<Location, BorrowIndex>,
@@ -182,7 +181,7 @@
locals_state_at_exit: LocalsStateAtExit,
}
-impl<'a, 'gcx, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
fn visit_assign(
&mut self,
assigned_place: &mir::Place<'tcx>,
@@ -289,7 +288,7 @@
}
}
-impl<'a, 'gcx, 'tcx> GatherBorrows<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> GatherBorrows<'a, 'tcx> {
/// If this is a two-phase borrow, then we will record it
/// as "pending" until we find the activating use.
diff --git a/src/librustc_mir/borrow_check/conflict_errors.rs b/src/librustc_mir/borrow_check/conflict_errors.rs
index 359e3be..6a70a23 100644
--- a/src/librustc_mir/borrow_check/conflict_errors.rs
+++ b/src/librustc_mir/borrow_check/conflict_errors.rs
@@ -43,7 +43,7 @@
Destructor(Ty<'tcx>),
}
-impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
pub(super) fn report_use_of_moved_or_uninitialized(
&mut self,
location: Location,
@@ -1903,7 +1903,7 @@
/// helps explain.
pub(super) fn emit(
&self,
- cx: &mut MirBorrowckCtxt<'_, '_, 'tcx>,
+ cx: &mut MirBorrowckCtxt<'_, 'tcx>,
diag: &mut DiagnosticBuilder<'_>,
) -> String {
match self {
diff --git a/src/librustc_mir/borrow_check/error_reporting.rs b/src/librustc_mir/borrow_check/error_reporting.rs
index a124c78..7eb2963 100644
--- a/src/librustc_mir/borrow_check/error_reporting.rs
+++ b/src/librustc_mir/borrow_check/error_reporting.rs
@@ -18,7 +18,7 @@
pub(super) struct IncludingDowncast(pub(super) bool);
-impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
/// Adds a suggestion when a closure is invoked twice with a moved variable or when a closure
/// is moved after being invoked.
///
@@ -403,7 +403,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
/// Return the name of the provided `Ty` (that must be a reference) with a synthesized lifetime
/// name where required.
pub(super) fn get_name_for_ty(&self, ty: Ty<'tcx>, counter: usize) -> String {
@@ -547,7 +547,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
/// Finds the spans associated to a move or copy of move_place at location.
pub(super) fn move_spans(
&self,
diff --git a/src/librustc_mir/borrow_check/flows.rs b/src/librustc_mir/borrow_check/flows.rs
index c0b199f..5a57db2 100644
--- a/src/librustc_mir/borrow_check/flows.rs
+++ b/src/librustc_mir/borrow_check/flows.rs
@@ -22,20 +22,20 @@
use std::rc::Rc;
// (forced to be `pub` due to its use as an associated type below.)
-crate struct Flows<'b, 'gcx: 'tcx, 'tcx: 'b> {
- borrows: FlowAtLocation<'tcx, Borrows<'b, 'gcx, 'tcx>>,
- pub uninits: FlowAtLocation<'tcx, MaybeUninitializedPlaces<'b, 'gcx, 'tcx>>,
- pub ever_inits: FlowAtLocation<'tcx, EverInitializedPlaces<'b, 'gcx, 'tcx>>,
+crate struct Flows<'b, 'tcx: 'b> {
+ borrows: FlowAtLocation<'tcx, Borrows<'b, 'tcx>>,
+ pub uninits: FlowAtLocation<'tcx, MaybeUninitializedPlaces<'b, 'tcx>>,
+ pub ever_inits: FlowAtLocation<'tcx, EverInitializedPlaces<'b, 'tcx>>,
/// Polonius Output
pub polonius_output: Option<Rc<Output<RegionVid, BorrowIndex, LocationIndex>>>,
}
-impl<'b, 'gcx, 'tcx> Flows<'b, 'gcx, 'tcx> {
+impl<'b, 'tcx> Flows<'b, 'tcx> {
crate fn new(
- borrows: FlowAtLocation<'tcx, Borrows<'b, 'gcx, 'tcx>>,
- uninits: FlowAtLocation<'tcx, MaybeUninitializedPlaces<'b, 'gcx, 'tcx>>,
- ever_inits: FlowAtLocation<'tcx, EverInitializedPlaces<'b, 'gcx, 'tcx>>,
+ borrows: FlowAtLocation<'tcx, Borrows<'b, 'tcx>>,
+ uninits: FlowAtLocation<'tcx, MaybeUninitializedPlaces<'b, 'tcx>>,
+ ever_inits: FlowAtLocation<'tcx, EverInitializedPlaces<'b, 'tcx>>,
polonius_output: Option<Rc<Output<RegionVid, BorrowIndex, LocationIndex>>>,
) -> Self {
Flows {
@@ -70,7 +70,7 @@
};
}
-impl<'b, 'gcx, 'tcx> FlowsAtLocation for Flows<'b, 'gcx, 'tcx> {
+impl<'b, 'tcx> FlowsAtLocation for Flows<'b, 'tcx> {
fn reset_to_entry_of(&mut self, bb: BasicBlock) {
each_flow!(self, reset_to_entry_of(bb));
}
@@ -92,7 +92,7 @@
}
}
-impl<'b, 'gcx, 'tcx> fmt::Display for Flows<'b, 'gcx, 'tcx> {
+impl<'b, 'tcx> fmt::Display for Flows<'b, 'tcx> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s = String::new();
diff --git a/src/librustc_mir/borrow_check/mod.rs b/src/librustc_mir/borrow_check/mod.rs
index 502d601..c4a11ef 100644
--- a/src/librustc_mir/borrow_check/mod.rs
+++ b/src/librustc_mir/borrow_check/mod.rs
@@ -87,7 +87,7 @@
};
}
-fn mir_borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> BorrowCheckResult<'tcx> {
+fn mir_borrowck<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> BorrowCheckResult<'tcx> {
let input_body = tcx.mir_validated(def_id);
debug!("run query mir_borrowck: {}", tcx.def_path_str(def_id));
@@ -100,11 +100,11 @@
opt_closure_req
}
-fn do_mir_borrowck<'a, 'gcx, 'tcx>(
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- input_body: &Body<'gcx>,
+fn do_mir_borrowck<'a, 'tcx>(
+ infcx: &InferCtxt<'a, 'tcx>,
+ input_body: &Body<'tcx>,
def_id: DefId,
-) -> BorrowCheckResult<'gcx> {
+) -> BorrowCheckResult<'tcx> {
debug!("do_mir_borrowck(def_id = {:?})", def_id);
let tcx = infcx.tcx;
@@ -177,7 +177,7 @@
|bd, i| DebugFormatted::new(&bd.move_data().move_paths[i]),
));
- let locals_are_invalidated_at_exit = tcx.hir().body_owner_kind_by_hir_id(id).is_fn_or_closure();
+ let locals_are_invalidated_at_exit = tcx.hir().body_owner_kind(id).is_fn_or_closure();
let borrow_set = Rc::new(BorrowSet::build(
tcx, body, locals_are_invalidated_at_exit, &mdpe.move_data));
@@ -423,8 +423,8 @@
}
}
-pub struct MirBorrowckCtxt<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+pub struct MirBorrowckCtxt<'cx, 'tcx: 'cx> {
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
body: &'cx Body<'tcx>,
mir_def_id: DefId,
move_data: &'cx MoveData<'tcx>,
@@ -508,8 +508,8 @@
// 2. loans made in overlapping scopes do not conflict
// 3. assignments do not affect things loaned out as immutable
// 4. moves do not affect things loaned out in any way
-impl<'cx, 'gcx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
- type FlowState = Flows<'cx, 'gcx, 'tcx>;
+impl<'cx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tcx> {
+ type FlowState = Flows<'cx, 'tcx>;
fn body(&self) -> &'cx Body<'tcx> {
self.body
@@ -780,7 +780,7 @@
| TerminatorKind::Unreachable
| TerminatorKind::FalseEdges {
real_target: _,
- imaginary_targets: _,
+ imaginary_target: _,
}
| TerminatorKind::FalseUnwind {
real_target: _,
@@ -920,7 +920,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
/// Checks an access to the given place to see if it is allowed. Examines the set of borrows
/// that are in scope, as well as which paths have been initialized, to ensure that (a) the
/// place is initialized and (b) it is not borrowed in some way that would prevent this
@@ -933,7 +933,7 @@
place_span: (&Place<'tcx>, Span),
kind: (AccessDepth, ReadOrWrite),
is_local_mutation_allowed: LocalMutationIsAllowed,
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ flow_state: &Flows<'cx, 'tcx>,
) {
let (sd, rw) = kind;
@@ -996,7 +996,7 @@
place_span: (&Place<'tcx>, Span),
sd: AccessDepth,
rw: ReadOrWrite,
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ flow_state: &Flows<'cx, 'tcx>,
) -> bool {
debug!(
"check_access_for_conflict(location={:?}, place_span={:?}, sd={:?}, rw={:?})",
@@ -1148,7 +1148,7 @@
place_span: (&Place<'tcx>, Span),
kind: AccessDepth,
mode: MutateMode,
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ flow_state: &Flows<'cx, 'tcx>,
) {
// Write of P[i] or *P, or WriteAndRead of any P, requires P init'd.
match mode {
@@ -1195,7 +1195,7 @@
&mut self,
location: Location,
(rvalue, span): (&Rvalue<'tcx>, Span),
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ flow_state: &Flows<'cx, 'tcx>,
) {
match *rvalue {
Rvalue::Ref(_ /*rgn*/, bk, ref place) => {
@@ -1382,7 +1382,7 @@
&mut self,
location: Location,
(operand, span): (&Operand<'tcx>, Span),
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ flow_state: &Flows<'cx, 'tcx>,
) {
match *operand {
Operand::Copy(ref place) => {
@@ -1511,12 +1511,7 @@
}
}
- fn check_activations(
- &mut self,
- location: Location,
- span: Span,
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
- ) {
+ fn check_activations(&mut self, location: Location, span: Span, flow_state: &Flows<'cx, 'tcx>) {
// Two-phase borrow support: For each activation that is newly
// generated at this statement, check if it interferes with
// another borrow.
@@ -1547,13 +1542,13 @@
}
}
-impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
fn check_if_reassignment_to_immutable_state(
&mut self,
location: Location,
local: Local,
place_span: (&Place<'tcx>, Span),
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ flow_state: &Flows<'cx, 'tcx>,
) {
debug!("check_if_reassignment_to_immutable_state({:?})", local);
@@ -1573,7 +1568,7 @@
location: Location,
desired_action: InitializationRequiringAction,
place_span: (&Place<'tcx>, Span),
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ flow_state: &Flows<'cx, 'tcx>,
) {
let maybe_uninits = &flow_state.uninits;
@@ -1641,7 +1636,7 @@
location: Location,
desired_action: InitializationRequiringAction,
place_span: (&Place<'tcx>, Span),
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ flow_state: &Flows<'cx, 'tcx>,
) {
let maybe_uninits = &flow_state.uninits;
@@ -1728,7 +1723,7 @@
&mut self,
location: Location,
(place, span): (&Place<'tcx>, Span),
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ flow_state: &Flows<'cx, 'tcx>,
) {
debug!("check_if_assigned_path_is_moved place: {:?}", place);
// recur down place; dispatch to external checks when necessary
@@ -1811,12 +1806,12 @@
}
}
- fn check_parent_of_field<'cx, 'gcx, 'tcx>(
- this: &mut MirBorrowckCtxt<'cx, 'gcx, 'tcx>,
+ fn check_parent_of_field<'cx, 'tcx>(
+ this: &mut MirBorrowckCtxt<'cx, 'tcx>,
location: Location,
base: &Place<'tcx>,
span: Span,
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ flow_state: &Flows<'cx, 'tcx>,
) {
// rust-lang/rust#21232: Until Rust allows reads from the
// initialized parts of partially initialized structs, we
@@ -1905,7 +1900,7 @@
(place, span): (&Place<'tcx>, Span),
kind: ReadOrWrite,
is_local_mutation_allowed: LocalMutationIsAllowed,
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ flow_state: &Flows<'cx, 'tcx>,
location: Location,
) -> bool {
debug!(
@@ -2034,11 +2029,11 @@
};
}
- fn is_local_ever_initialized(&self,
- local: Local,
- flow_state: &Flows<'cx, 'gcx, 'tcx>)
- -> Option<InitIndex>
- {
+ fn is_local_ever_initialized(
+ &self,
+ local: Local,
+ flow_state: &Flows<'cx, 'tcx>,
+ ) -> Option<InitIndex> {
let mpi = self.move_data.rev_lookup.find_local(local);
let ii = &self.move_data.init_path_map[mpi];
for &index in ii {
@@ -2050,11 +2045,7 @@
}
/// Adds the place into the used mutable variables set
- fn add_used_mut<'d>(
- &mut self,
- root_place: RootPlace<'d, 'tcx>,
- flow_state: &Flows<'cx, 'gcx, 'tcx>,
- ) {
+ fn add_used_mut<'d>(&mut self, root_place: RootPlace<'d, 'tcx>, flow_state: &Flows<'cx, 'tcx>) {
match root_place {
RootPlace {
place: Place::Base(PlaceBase::Local(local)),
diff --git a/src/librustc_mir/borrow_check/move_errors.rs b/src/librustc_mir/borrow_check/move_errors.rs
index f4bc1bc..d152293 100644
--- a/src/librustc_mir/borrow_check/move_errors.rs
+++ b/src/librustc_mir/borrow_check/move_errors.rs
@@ -96,7 +96,7 @@
}
}
- fn from_call(func: Ty<'tcx>, tcx: TyCtxt<'_, '_, 'tcx>) -> Option<Self> {
+ fn from_call(func: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> Option<Self> {
match func.sty {
ty::FnDef(def_id, substs) => {
let trait_id = tcx.trait_of_item(def_id)?;
@@ -119,7 +119,7 @@
}
}
-impl<'a, 'gcx, 'tcx> MirBorrowckCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
pub(crate) fn report_move_errors(&mut self, move_errors: Vec<(Place<'tcx>, MoveError<'tcx>)>) {
let grouped_errors = self.group_move_errors(move_errors);
for error in grouped_errors {
@@ -422,7 +422,7 @@
let upvar = &self.upvars[upvar_field.unwrap().index()];
let upvar_hir_id = upvar.var_hir_id;
let upvar_name = upvar.name;
- let upvar_span = self.infcx.tcx.hir().span_by_hir_id(upvar_hir_id);
+ let upvar_span = self.infcx.tcx.hir().span(upvar_hir_id);
let place_name = self.describe_place(move_place).unwrap();
diff --git a/src/librustc_mir/borrow_check/mutability_errors.rs b/src/librustc_mir/borrow_check/mutability_errors.rs
index 46116e3..fc11cd8 100644
--- a/src/librustc_mir/borrow_check/mutability_errors.rs
+++ b/src/librustc_mir/borrow_check/mutability_errors.rs
@@ -24,7 +24,7 @@
Move,
}
-impl<'a, 'gcx, 'tcx> MirBorrowckCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
pub(super) fn report_mutability_error(
&mut self,
access_place: &Place<'tcx>,
@@ -304,8 +304,8 @@
err.span_label(span, format!("cannot {ACT}", ACT = act));
let upvar_hir_id = self.upvars[upvar_index.index()].var_hir_id;
- let upvar_node_id = self.infcx.tcx.hir().hir_to_node_id(upvar_hir_id);
- if let Some(Node::Binding(pat)) = self.infcx.tcx.hir().find(upvar_node_id) {
+ if let Some(Node::Binding(pat)) = self.infcx.tcx.hir().find_by_hir_id(upvar_hir_id)
+ {
if let hir::PatKind::Binding(
hir::BindingAnnotation::Unannotated,
_,
@@ -522,8 +522,8 @@
}
}
-fn suggest_ampmut_self<'cx, 'gcx, 'tcx>(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+fn suggest_ampmut_self<'tcx>(
+ tcx: TyCtxt<'tcx>,
local_decl: &mir::LocalDecl<'tcx>,
) -> (Span, String) {
let sp = local_decl.source_info.span;
@@ -555,8 +555,8 @@
//
// This implementation attempts to emulate AST-borrowck prioritization
// by trying (3.), then (2.) and finally falling back on (1.).
-fn suggest_ampmut<'cx, 'gcx, 'tcx>(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+fn suggest_ampmut<'tcx>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
local: Local,
local_decl: &mir::LocalDecl<'tcx>,
@@ -623,7 +623,7 @@
/// | ---------- use `&'a mut String` here to make mutable
/// ```
fn annotate_struct_field(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
ty: Ty<'tcx>,
field: &mir::Field,
) -> Option<(Span, String)> {
diff --git a/src/librustc_mir/borrow_check/nll/constraint_generation.rs b/src/librustc_mir/borrow_check/nll/constraint_generation.rs
index 4a9aad2..18c542d 100644
--- a/src/librustc_mir/borrow_check/nll/constraint_generation.rs
+++ b/src/librustc_mir/borrow_check/nll/constraint_generation.rs
@@ -13,8 +13,8 @@
use rustc::ty::{self, ClosureSubsts, GeneratorSubsts, RegionVid, Ty};
use rustc::ty::subst::SubstsRef;
-pub(super) fn generate_constraints<'cx, 'gcx, 'tcx>(
- infcx: &InferCtxt<'cx, 'gcx, 'tcx>,
+pub(super) fn generate_constraints<'cx, 'tcx>(
+ infcx: &InferCtxt<'cx, 'tcx>,
liveness_constraints: &mut LivenessValues<RegionVid>,
all_facts: &mut Option<AllFacts>,
location_table: &LocationTable,
@@ -35,15 +35,15 @@
}
/// 'cg = the duration of the constraint generation process itself.
-struct ConstraintGeneration<'cg, 'cx: 'cg, 'gcx: 'tcx, 'tcx: 'cx> {
- infcx: &'cg InferCtxt<'cx, 'gcx, 'tcx>,
+struct ConstraintGeneration<'cg, 'cx: 'cg, 'tcx: 'cx> {
+ infcx: &'cg InferCtxt<'cx, 'tcx>,
all_facts: &'cg mut Option<AllFacts>,
location_table: &'cg LocationTable,
liveness_constraints: &'cg mut LivenessValues<RegionVid>,
borrow_set: &'cg BorrowSet<'tcx>,
}
-impl<'cg, 'cx, 'gcx, 'tcx> Visitor<'tcx> for ConstraintGeneration<'cg, 'cx, 'gcx, 'tcx> {
+impl<'cg, 'cx, 'tcx> Visitor<'tcx> for ConstraintGeneration<'cg, 'cx, 'tcx> {
fn visit_basic_block_data(&mut self, bb: BasicBlock, data: &BasicBlockData<'tcx>) {
self.super_basic_block_data(bb, data);
}
@@ -177,7 +177,7 @@
}
}
-impl<'cx, 'cg, 'gcx, 'tcx> ConstraintGeneration<'cx, 'cg, 'gcx, 'tcx> {
+impl<'cx, 'cg, 'tcx> ConstraintGeneration<'cx, 'cg, 'tcx> {
/// Some variable with type `live_ty` is "regular live" at
/// `location` -- i.e., it may be used later. This means that all
/// regions appearing in the type `live_ty` must be live at
diff --git a/src/librustc_mir/borrow_check/nll/explain_borrow/find_use.rs b/src/librustc_mir/borrow_check/nll/explain_borrow/find_use.rs
index 31181e7..4d7ab90 100644
--- a/src/librustc_mir/borrow_check/nll/explain_borrow/find_use.rs
+++ b/src/librustc_mir/borrow_check/nll/explain_borrow/find_use.rs
@@ -12,7 +12,7 @@
crate fn find<'tcx>(
body: &Body<'tcx>,
regioncx: &Rc<RegionInferenceContext<'tcx>>,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
region_vid: RegionVid,
start_point: Location,
) -> Option<Cause> {
@@ -27,15 +27,15 @@
uf.find()
}
-struct UseFinder<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+struct UseFinder<'cx, 'tcx: 'cx> {
body: &'cx Body<'tcx>,
regioncx: &'cx Rc<RegionInferenceContext<'tcx>>,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
region_vid: RegionVid,
start_point: Location,
}
-impl<'cx, 'gcx, 'tcx> UseFinder<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> UseFinder<'cx, 'tcx> {
fn find(&mut self) -> Option<Cause> {
let mut queue = VecDeque::new();
let mut visited = FxHashSet::default();
@@ -99,9 +99,9 @@
}
}
-struct DefUseVisitor<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+struct DefUseVisitor<'cx, 'tcx: 'cx> {
body: &'cx Body<'tcx>,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
region_vid: RegionVid,
def_use_result: Option<DefUseResult>,
}
@@ -112,7 +112,7 @@
UseDrop { local: Local },
}
-impl<'cx, 'gcx, 'tcx> Visitor<'tcx> for DefUseVisitor<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> Visitor<'tcx> for DefUseVisitor<'cx, 'tcx> {
fn visit_local(&mut self, &local: &Local, context: PlaceContext, _: Location) {
let local_ty = self.body.local_decls[local].ty;
diff --git a/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs b/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs
index f0e6d56..4bc2f70 100644
--- a/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs
+++ b/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs
@@ -51,9 +51,9 @@
_ => true,
}
}
- pub(in crate::borrow_check) fn add_explanation_to_diagnostic<'cx, 'gcx, 'tcx>(
+ pub(in crate::borrow_check) fn add_explanation_to_diagnostic<'tcx>(
&self,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
err: &mut DiagnosticBuilder<'_>,
borrow_desc: &str,
@@ -207,7 +207,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
/// Returns structured explanation for *why* the borrow contains the
/// point from `location`. This is key for the "3-point errors"
/// [described in the NLL RFC][d].
diff --git a/src/librustc_mir/borrow_check/nll/facts.rs b/src/librustc_mir/borrow_check/nll/facts.rs
index 926f52b..d84afea 100644
--- a/src/librustc_mir/borrow_check/nll/facts.rs
+++ b/src/librustc_mir/borrow_check/nll/facts.rs
@@ -15,7 +15,7 @@
crate trait AllFactsExt {
/// Returns `true` if there is a need to gather `AllFacts` given the
/// current `-Z` flags.
- fn enabled(tcx: TyCtxt<'_, '_, '_>) -> bool;
+ fn enabled(tcx: TyCtxt<'_>) -> bool;
fn write_to_dir(
&self,
@@ -26,7 +26,7 @@
impl AllFactsExt for AllFacts {
/// Return
- fn enabled(tcx: TyCtxt<'_, '_, '_>) -> bool {
+ fn enabled(tcx: TyCtxt<'_>) -> bool {
tcx.sess.opts.debugging_opts.nll_facts
|| tcx.sess.opts.debugging_opts.polonius
}
diff --git a/src/librustc_mir/borrow_check/nll/invalidation.rs b/src/librustc_mir/borrow_check/nll/invalidation.rs
index 516eb6d..286f3ac 100644
--- a/src/librustc_mir/borrow_check/nll/invalidation.rs
+++ b/src/librustc_mir/borrow_check/nll/invalidation.rs
@@ -17,8 +17,8 @@
use rustc::mir::{Operand, BorrowKind};
use rustc_data_structures::graph::dominators::Dominators;
-pub(super) fn generate_invalidates<'cx, 'gcx, 'tcx>(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+pub(super) fn generate_invalidates<'tcx>(
+ tcx: TyCtxt<'tcx>,
all_facts: &mut Option<AllFacts>,
location_table: &LocationTable,
body: &Body<'tcx>,
@@ -43,8 +43,8 @@
}
}
-struct InvalidationGenerator<'cx, 'tcx: 'cx, 'gcx: 'tcx> {
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+struct InvalidationGenerator<'cx, 'tcx: 'cx> {
+ tcx: TyCtxt<'tcx>,
all_facts: &'cx mut AllFacts,
location_table: &'cx LocationTable,
body: &'cx Body<'tcx>,
@@ -54,7 +54,7 @@
/// Visits the whole MIR and generates `invalidates()` facts.
/// Most of the code implementing this was stolen from `borrow_check/mod.rs`.
-impl<'cx, 'tcx, 'gcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx, 'gcx> {
+impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
fn visit_statement(
&mut self,
statement: &Statement<'tcx>,
@@ -244,7 +244,7 @@
| TerminatorKind::Unreachable
| TerminatorKind::FalseEdges {
real_target: _,
- imaginary_targets: _,
+ imaginary_target: _,
}
| TerminatorKind::FalseUnwind {
real_target: _,
@@ -258,7 +258,7 @@
}
}
-impl<'cx, 'tcx, 'gcx> InvalidationGenerator<'cx, 'tcx, 'gcx> {
+impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
/// Simulates mutation of a place.
fn mutate_place(
&mut self,
diff --git a/src/librustc_mir/borrow_check/nll/mod.rs b/src/librustc_mir/borrow_check/nll/mod.rs
index a260f54..5dd7b74 100644
--- a/src/librustc_mir/borrow_check/nll/mod.rs
+++ b/src/librustc_mir/borrow_check/nll/mod.rs
@@ -46,8 +46,8 @@
/// scraping out the set of universal regions (e.g., region parameters)
/// declared on the function. That set will need to be given to
/// `compute_regions`.
-pub(in crate::borrow_check) fn replace_regions_in_mir<'cx, 'gcx, 'tcx>(
- infcx: &InferCtxt<'cx, 'gcx, 'tcx>,
+pub(in crate::borrow_check) fn replace_regions_in_mir<'cx, 'tcx>(
+ infcx: &InferCtxt<'cx, 'tcx>,
def_id: DefId,
param_env: ty::ParamEnv<'tcx>,
body: &mut Body<'tcx>,
@@ -69,22 +69,22 @@
/// Computes the (non-lexical) regions from the input MIR.
///
/// This may result in errors being reported.
-pub(in crate::borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
- infcx: &InferCtxt<'cx, 'gcx, 'tcx>,
+pub(in crate::borrow_check) fn compute_regions<'cx, 'tcx>(
+ infcx: &InferCtxt<'cx, 'tcx>,
def_id: DefId,
universal_regions: UniversalRegions<'tcx>,
body: &Body<'tcx>,
upvars: &[Upvar],
location_table: &LocationTable,
- param_env: ty::ParamEnv<'gcx>,
- flow_inits: &mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'cx, 'gcx, 'tcx>>,
+ param_env: ty::ParamEnv<'tcx>,
+ flow_inits: &mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'cx, 'tcx>>,
move_data: &MoveData<'tcx>,
borrow_set: &BorrowSet<'tcx>,
errors_buffer: &mut Vec<Diagnostic>,
) -> (
RegionInferenceContext<'tcx>,
Option<Rc<Output<RegionVid, BorrowIndex, LocationIndex>>>,
- Option<ClosureRegionRequirements<'gcx>>,
+ Option<ClosureRegionRequirements<'tcx>>,
) {
let mut all_facts = if AllFacts::enabled(infcx.tcx) {
Some(AllFacts::default())
@@ -210,8 +210,8 @@
(regioncx, polonius_output, closure_region_requirements)
}
-fn dump_mir_results<'a, 'gcx, 'tcx>(
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+fn dump_mir_results<'a, 'tcx>(
+ infcx: &InferCtxt<'a, 'tcx>,
source: MirSource<'tcx>,
body: &Body<'tcx>,
regioncx: &RegionInferenceContext<'_>,
@@ -271,8 +271,8 @@
};
}
-fn dump_annotation<'a, 'gcx, 'tcx>(
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+fn dump_annotation<'a, 'tcx>(
+ infcx: &InferCtxt<'a, 'tcx>,
body: &Body<'tcx>,
mir_def_id: DefId,
regioncx: &RegionInferenceContext<'tcx>,
diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs
index 8593784..04ff54e 100644
--- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs
+++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs
@@ -239,7 +239,7 @@
&self,
body: &Body<'tcx>,
upvars: &[Upvar],
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
mir_def_id: DefId,
fr: RegionVid,
outlived_fr: RegionVid,
@@ -359,7 +359,7 @@
&self,
body: &Body<'tcx>,
upvars: &[Upvar],
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
mir_def_id: DefId,
_fr: RegionVid,
outlived_fr: RegionVid,
@@ -424,7 +424,7 @@
&self,
body: &Body<'tcx>,
upvars: &[Upvar],
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
mir_def_id: DefId,
fr: RegionVid,
outlived_fr: RegionVid,
@@ -516,7 +516,7 @@
&self,
body: &Body<'tcx>,
upvars: &[Upvar],
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
mir_def_id: DefId,
fr: RegionVid,
fr_is_local: bool,
@@ -585,7 +585,7 @@
/// ```
fn add_static_impl_trait_suggestion(
&self,
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
diag: &mut DiagnosticBuilder<'_>,
fr: RegionVid,
// We need to pass `fr_name` - computing it again will label it twice.
@@ -671,7 +671,7 @@
body: &Body<'tcx>,
upvars: &[Upvar],
mir_def_id: DefId,
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
borrow_region: RegionVid,
outlived_region: RegionVid,
) -> (ConstraintCategory, bool, Span, Option<RegionName>) {
@@ -752,7 +752,7 @@
}
/// Returns `true` if a closure is inferred to be an `FnMut` closure.
- crate fn is_closure_fn_mut(&self, infcx: &InferCtxt<'_, '_, 'tcx>, fr: RegionVid) -> bool {
+ crate fn is_closure_fn_mut(&self, infcx: &InferCtxt<'_, 'tcx>, fr: RegionVid) -> bool {
if let Some(ty::ReFree(free_region)) = self.to_error_region(fr) {
if let ty::BoundRegion::BrEnv = free_region.bound_region {
if let DefiningTy::Closure(def_id, substs) = self.universal_regions.defining_ty {
diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs
index 82720d0..46b6901 100644
--- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs
+++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs
@@ -151,7 +151,7 @@
/// and then return the name `'1` for us to use.
crate fn give_region_a_name(
&self,
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
upvars: &[Upvar],
mir_def_id: DefId,
@@ -194,7 +194,7 @@
/// named variants.
fn give_name_from_error_region(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
mir_def_id: DefId,
fr: RegionVid,
counter: &mut usize,
@@ -230,14 +230,14 @@
},
ty::BoundRegion::BrEnv => {
- let mir_node_id = tcx.hir()
- .as_local_node_id(mir_def_id)
- .expect("non-local mir");
+ let mir_hir_id = tcx.hir()
+ .as_local_hir_id(mir_def_id)
+ .expect("non-local mir");
let def_ty = self.universal_regions.defining_ty;
if let DefiningTy::Closure(def_id, substs) = def_ty {
let args_span = if let hir::ExprKind::Closure(_, _, _, span, _) =
- tcx.hir().expect_expr(mir_node_id).node
+ tcx.hir().expect_expr_by_hir_id(mir_hir_id).node
{
span
} else {
@@ -303,14 +303,14 @@
/// ```
fn get_named_span(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
error_region: &RegionKind,
name: InternedString,
) -> Span {
let scope = error_region.free_region_binding_scope(tcx);
let node = tcx.hir().as_local_hir_id(scope).unwrap_or(hir::DUMMY_HIR_ID);
- let span = tcx.sess.source_map().def_span(tcx.hir().span_by_hir_id(node));
+ let span = tcx.sess.source_map().def_span(tcx.hir().span(node));
if let Some(param) = tcx.hir()
.get_generics(scope)
.and_then(|generics| generics.get_named(name))
@@ -331,7 +331,7 @@
/// ```
fn give_name_if_anonymous_region_appears_in_arguments(
&self,
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
mir_def_id: DefId,
fr: RegionVid,
@@ -359,7 +359,7 @@
fn give_name_if_we_can_match_hir_ty_from_argument(
&self,
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
mir_def_id: DefId,
needle_fr: RegionVid,
@@ -367,8 +367,8 @@
argument_index: usize,
counter: &mut usize,
) -> Option<RegionName> {
- let mir_node_id = infcx.tcx.hir().as_local_node_id(mir_def_id)?;
- let fn_decl = infcx.tcx.hir().fn_decl(mir_node_id)?;
+ let mir_hir_id = infcx.tcx.hir().as_local_hir_id(mir_def_id)?;
+ let fn_decl = infcx.tcx.hir().fn_decl_by_hir_id(mir_hir_id)?;
let argument_hir_ty: &hir::Ty = &fn_decl.inputs[argument_index];
match argument_hir_ty.node {
// This indicates a variable with no type annotation, like
@@ -405,7 +405,7 @@
/// ```
fn give_name_if_we_cannot_match_hir_ty(
&self,
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
needle_fr: RegionVid,
argument_ty: Ty<'tcx>,
@@ -461,7 +461,7 @@
/// to highlighting that closest type instead.
fn give_name_if_we_can_match_hir_ty(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
needle_fr: RegionVid,
argument_ty: Ty<'tcx>,
argument_hir_ty: &hir::Ty,
@@ -653,7 +653,7 @@
/// ```
fn give_name_if_anonymous_region_appears_in_upvars(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
upvars: &[Upvar],
fr: RegionVid,
counter: &mut usize,
@@ -675,7 +675,7 @@
/// or be early bound (named, not in argument).
fn give_name_if_anonymous_region_appears_in_output(
&self,
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
mir_def_id: DefId,
fr: RegionVid,
@@ -696,9 +696,9 @@
highlight.highlighting_region_vid(fr, *counter);
let type_name = infcx.extract_type_name(&return_ty, Some(highlight));
- let mir_node_id = tcx.hir().as_local_node_id(mir_def_id).expect("non-local mir");
+ let mir_hir_id = tcx.hir().as_local_hir_id(mir_def_id).expect("non-local mir");
- let (return_span, mir_description) = match tcx.hir().get(mir_node_id) {
+ let (return_span, mir_description) = match tcx.hir().get_by_hir_id(mir_hir_id) {
hir::Node::Expr(hir::Expr {
node: hir::ExprKind::Closure(_, return_ty, _, span, gen_move),
..
@@ -735,7 +735,7 @@
fn give_name_if_anonymous_region_appears_in_yield_ty(
&self,
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
mir_def_id: DefId,
fr: RegionVid,
@@ -759,9 +759,9 @@
highlight.highlighting_region_vid(fr, *counter);
let type_name = infcx.extract_type_name(&yield_ty, Some(highlight));
- let mir_node_id = tcx.hir().as_local_node_id(mir_def_id).expect("non-local mir");
+ let mir_hir_id = tcx.hir().as_local_hir_id(mir_def_id).expect("non-local mir");
- let yield_span = match tcx.hir().get(mir_node_id) {
+ let yield_span = match tcx.hir().get_by_hir_id(mir_hir_id) {
hir::Node::Expr(hir::Expr {
node: hir::ExprKind::Closure(_, _, _, span, _),
..
diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs
index f2dbcc5..59fc411 100644
--- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs
+++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs
@@ -10,7 +10,7 @@
impl<'tcx> RegionInferenceContext<'tcx> {
crate fn get_var_name_and_span_for_region(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
upvars: &[Upvar],
fr: RegionVid,
@@ -33,11 +33,7 @@
}
/// Search the upvars (if any) to find one that references fr. Return its index.
- crate fn get_upvar_index_for_region(
- &self,
- tcx: TyCtxt<'_, '_, 'tcx>,
- fr: RegionVid,
- ) -> Option<usize> {
+ crate fn get_upvar_index_for_region(&self, tcx: TyCtxt<'tcx>, fr: RegionVid) -> Option<usize> {
let upvar_index = self
.universal_regions
.defining_ty
@@ -69,7 +65,7 @@
/// declared.
crate fn get_upvar_name_and_span_for_region(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
upvars: &[Upvar],
upvar_index: usize,
) -> (Symbol, Span) {
@@ -77,7 +73,7 @@
debug!("get_upvar_name_and_span_for_region: upvar_hir_id={:?}", upvar_hir_id);
let upvar_name = tcx.hir().name_by_hir_id(upvar_hir_id);
- let upvar_span = tcx.hir().span_by_hir_id(upvar_hir_id);
+ let upvar_span = tcx.hir().span(upvar_hir_id);
debug!("get_upvar_name_and_span_for_region: upvar_name={:?} upvar_span={:?}",
upvar_name, upvar_span);
@@ -91,7 +87,7 @@
/// user - in particular, index 0 is not the implicit self parameter.
crate fn get_argument_index_for_region(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
fr: RegionVid,
) -> Option<usize> {
let implicit_inputs = self.universal_regions.defining_ty.implicit_inputs();
@@ -134,5 +130,4 @@
(argument_name, argument_span)
}
-
}
diff --git a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs
index 170f61a..2b38fce 100644
--- a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs
+++ b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs
@@ -370,7 +370,7 @@
}
/// Adds annotations for `#[rustc_regions]`; see `UniversalRegions::annotate`.
- crate fn annotate(&self, tcx: TyCtxt<'_, '_, 'tcx>, err: &mut DiagnosticBuilder<'_>) {
+ crate fn annotate(&self, tcx: TyCtxt<'tcx>, err: &mut DiagnosticBuilder<'_>) {
self.universal_regions.annotate(tcx, err)
}
@@ -397,14 +397,14 @@
/// Performs region inference and report errors if we see any
/// unsatisfiable constraints. If this is a closure, returns the
/// region requirements to propagate to our creator, if any.
- pub(super) fn solve<'gcx>(
+ pub(super) fn solve(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
upvars: &[Upvar],
mir_def_id: DefId,
errors_buffer: &mut Vec<Diagnostic>,
- ) -> Option<ClosureRegionRequirements<'gcx>> {
+ ) -> Option<ClosureRegionRequirements<'tcx>> {
common::time_ext(
infcx.tcx.sess.time_extended(),
Some(infcx.tcx.sess),
@@ -413,14 +413,14 @@
)
}
- fn solve_inner<'gcx>(
+ fn solve_inner(
&mut self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
upvars: &[Upvar],
mir_def_id: DefId,
errors_buffer: &mut Vec<Diagnostic>,
- ) -> Option<ClosureRegionRequirements<'gcx>> {
+ ) -> Option<ClosureRegionRequirements<'tcx>> {
self.propagate_constraints(body);
// If this is a closure, we can propagate unsatisfied
@@ -578,12 +578,12 @@
/// whether the "type tests" produced by typeck were satisfied;
/// type tests encode type-outlives relationships like `T:
/// 'a`. See `TypeTest` for more details.
- fn check_type_tests<'gcx>(
+ fn check_type_tests(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
mir_def_id: DefId,
- mut propagated_outlives_requirements: Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
+ mut propagated_outlives_requirements: Option<&mut Vec<ClosureOutlivesRequirement<'tcx>>>,
errors_buffer: &mut Vec<Diagnostic>,
) {
let tcx = infcx.tcx;
@@ -722,12 +722,12 @@
/// The idea then is to lower the `T: 'X` constraint into multiple
/// bounds -- e.g., if `'X` is the union of two free lifetimes,
/// `'1` and `'2`, then we would create `T: '1` and `T: '2`.
- fn try_promote_type_test<'gcx>(
+ fn try_promote_type_test(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
type_test: &TypeTest<'tcx>,
- propagated_outlives_requirements: &mut Vec<ClosureOutlivesRequirement<'gcx>>,
+ propagated_outlives_requirements: &mut Vec<ClosureOutlivesRequirement<'tcx>>,
) -> bool {
let tcx = infcx.tcx;
@@ -794,7 +794,7 @@
/// When we promote a type test `T: 'r`, we have to convert the
/// type `T` into something we can store in a query result (so
- /// something allocated for `'gcx`). This is problematic if `ty`
+ /// something allocated for `'tcx`). This is problematic if `ty`
/// contains regions. During the course of NLL region checking, we
/// will have replaced all of those regions with fresh inference
/// variables. To create a test subject, we want to replace those
@@ -803,11 +803,11 @@
/// fallible process. Presuming we do find a suitable region, we
/// will represent it with a `ReClosureBound`, which is a
/// `RegionKind` variant that can be allocated in the gcx.
- fn try_promote_type_test_subject<'gcx>(
+ fn try_promote_type_test_subject(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
ty: Ty<'tcx>,
- ) -> Option<ClosureOutlivesSubject<'gcx>> {
+ ) -> Option<ClosureOutlivesSubject<'tcx>> {
let tcx = infcx.tcx;
let gcx = tcx.global_tcx();
@@ -943,7 +943,7 @@
/// `point`.
fn eval_verify_bound(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
generic_ty: Ty<'tcx>,
lower_bound: RegionVid,
@@ -976,7 +976,7 @@
fn eval_if_eq(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
generic_ty: Ty<'tcx>,
lower_bound: RegionVid,
@@ -1022,7 +1022,7 @@
/// higher-ranked things and so forth, and right now the inference
/// context is not permitted to make more inference variables. So
/// we use this kind of hacky solution.
- fn normalize_to_scc_representatives<T>(&self, tcx: TyCtxt<'_, '_, 'tcx>, value: T) -> T
+ fn normalize_to_scc_representatives<T>(&self, tcx: TyCtxt<'tcx>, value: T) -> T
where
T: TypeFoldable<'tcx>,
{
@@ -1102,13 +1102,13 @@
/// If `propagated_outlives_requirements` is `Some`, then we will
/// push unsatisfied obligations into there. Otherwise, we'll
/// report them as errors.
- fn check_universal_regions<'gcx>(
+ fn check_universal_regions(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
upvars: &[Upvar],
mir_def_id: DefId,
- mut propagated_outlives_requirements: Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
+ mut propagated_outlives_requirements: Option<&mut Vec<ClosureOutlivesRequirement<'tcx>>>,
errors_buffer: &mut Vec<Diagnostic>,
) {
for (fr, fr_definition) in self.definitions.iter_enumerated() {
@@ -1147,14 +1147,14 @@
///
/// Things that are to be propagated are accumulated into the
/// `outlives_requirements` vector.
- fn check_universal_region<'gcx>(
+ fn check_universal_region(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
upvars: &[Upvar],
mir_def_id: DefId,
longer_fr: RegionVid,
- propagated_outlives_requirements: &mut Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
+ propagated_outlives_requirements: &mut Option<&mut Vec<ClosureOutlivesRequirement<'tcx>>>,
errors_buffer: &mut Vec<Diagnostic>,
) {
debug!("check_universal_region(fr={:?})", longer_fr);
@@ -1215,11 +1215,11 @@
&self,
longer_fr: RegionVid,
shorter_fr: RegionVid,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
upvars: &[Upvar],
mir_def_id: DefId,
- propagated_outlives_requirements: &mut Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
+ propagated_outlives_requirements: &mut Option<&mut Vec<ClosureOutlivesRequirement<'tcx>>>,
errors_buffer: &mut Vec<Diagnostic>,
) -> Option<ErrorReported> {
// If it is known that `fr: o`, carry on.
@@ -1280,9 +1280,9 @@
Some(ErrorReported)
}
- fn check_bound_universal_region<'gcx>(
+ fn check_bound_universal_region(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
body: &Body<'tcx>,
_mir_def_id: DefId,
longer_fr: RegionVid,
@@ -1365,17 +1365,17 @@
}
}
-pub trait ClosureRegionRequirementsExt<'gcx, 'tcx> {
+pub trait ClosureRegionRequirementsExt<'tcx> {
fn apply_requirements(
&self,
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
closure_def_id: DefId,
closure_substs: SubstsRef<'tcx>,
) -> Vec<QueryRegionConstraint<'tcx>>;
fn subst_closure_mapping<T>(
&self,
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
closure_mapping: &IndexVec<RegionVid, ty::Region<'tcx>>,
value: &T,
) -> T
@@ -1383,7 +1383,7 @@
T: TypeFoldable<'tcx>;
}
-impl<'gcx, 'tcx> ClosureRegionRequirementsExt<'gcx, 'tcx> for ClosureRegionRequirements<'gcx> {
+impl<'tcx> ClosureRegionRequirementsExt<'tcx> for ClosureRegionRequirements<'tcx> {
/// Given an instance T of the closure type, this method
/// instantiates the "extra" requirements that we computed for the
/// closure into the inference context. This has the effect of
@@ -1398,7 +1398,7 @@
/// requirements.
fn apply_requirements(
&self,
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
closure_def_id: DefId,
closure_substs: SubstsRef<'tcx>,
) -> Vec<QueryRegionConstraint<'tcx>> {
@@ -1453,7 +1453,7 @@
fn subst_closure_mapping<T>(
&self,
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
closure_mapping: &IndexVec<RegionVid, ty::Region<'tcx>>,
value: &T,
) -> T
diff --git a/src/librustc_mir/borrow_check/nll/renumber.rs b/src/librustc_mir/borrow_check/nll/renumber.rs
index b5fe3d7..c1d1185 100644
--- a/src/librustc_mir/borrow_check/nll/renumber.rs
+++ b/src/librustc_mir/borrow_check/nll/renumber.rs
@@ -6,7 +6,7 @@
/// Replaces all free regions appearing in the MIR with fresh
/// inference variables, returning the number of variables created.
-pub fn renumber_mir<'tcx>(infcx: &InferCtxt<'_, '_, 'tcx>, body: &mut Body<'tcx>) {
+pub fn renumber_mir<'tcx>(infcx: &InferCtxt<'_, 'tcx>, body: &mut Body<'tcx>) {
debug!("renumber_mir()");
debug!("renumber_mir: body.arg_count={:?}", body.arg_count);
@@ -16,10 +16,7 @@
/// Replaces all regions appearing in `value` with fresh inference
/// variables.
-pub fn renumber_regions<'tcx, T>(
- infcx: &InferCtxt<'_, '_, 'tcx>,
- value: &T,
-) -> T
+pub fn renumber_regions<'tcx, T>(infcx: &InferCtxt<'_, 'tcx>, value: &T) -> T
where
T: TypeFoldable<'tcx>,
{
@@ -33,11 +30,11 @@
})
}
-struct NLLVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+struct NLLVisitor<'a, 'tcx> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
}
-impl<'a, 'gcx, 'tcx> NLLVisitor<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> NLLVisitor<'a, 'tcx> {
fn renumber_regions<T>(&mut self, value: &T) -> T
where
T: TypeFoldable<'tcx>,
@@ -46,7 +43,7 @@
}
}
-impl<'a, 'gcx, 'tcx> MutVisitor<'tcx> for NLLVisitor<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> MutVisitor<'tcx> for NLLVisitor<'a, 'tcx> {
fn visit_body(&mut self, body: &mut Body<'tcx>) {
for promoted in body.promoted.iter_mut() {
self.visit_body(promoted);
diff --git a/src/librustc_mir/borrow_check/nll/type_check/constraint_conversion.rs b/src/librustc_mir/borrow_check/nll/type_check/constraint_conversion.rs
index bef159e..d867027 100644
--- a/src/librustc_mir/borrow_check/nll/type_check/constraint_conversion.rs
+++ b/src/librustc_mir/borrow_check/nll/type_check/constraint_conversion.rs
@@ -13,9 +13,9 @@
use rustc::ty::{self, TyCtxt};
use syntax_pos::DUMMY_SP;
-crate struct ConstraintConversion<'a, 'gcx: 'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+crate struct ConstraintConversion<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
+ tcx: TyCtxt<'tcx>,
universal_regions: &'a UniversalRegions<'tcx>,
region_bound_pairs: &'a RegionBoundPairs<'tcx>,
implicit_region_bound: Option<ty::Region<'tcx>>,
@@ -25,9 +25,9 @@
constraints: &'a mut MirTypeckRegionConstraints<'tcx>,
}
-impl<'a, 'gcx, 'tcx> ConstraintConversion<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
crate fn new(
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &'a InferCtxt<'a, 'tcx>,
universal_regions: &'a UniversalRegions<'tcx>,
region_bound_pairs: &'a RegionBoundPairs<'tcx>,
implicit_region_bound: Option<ty::Region<'tcx>>,
@@ -150,9 +150,7 @@
}
}
-impl<'a, 'b, 'gcx, 'tcx> TypeOutlivesDelegate<'tcx>
- for &'a mut ConstraintConversion<'b, 'gcx, 'tcx>
-{
+impl<'a, 'b, 'tcx> TypeOutlivesDelegate<'tcx> for &'a mut ConstraintConversion<'b, 'tcx> {
fn push_sub_region_constraint(
&mut self,
_origin: SubregionOrigin<'tcx>,
diff --git a/src/librustc_mir/borrow_check/nll/type_check/free_region_relations.rs b/src/librustc_mir/borrow_check/nll/type_check/free_region_relations.rs
index 361353f..ca42f24 100644
--- a/src/librustc_mir/borrow_check/nll/type_check/free_region_relations.rs
+++ b/src/librustc_mir/borrow_check/nll/type_check/free_region_relations.rs
@@ -55,7 +55,7 @@
}
crate fn create(
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
implicit_region_bound: Option<ty::Region<'tcx>>,
universal_regions: &Rc<UniversalRegions<'tcx>>,
@@ -219,8 +219,8 @@
}
}
-struct UniversalRegionRelationsBuilder<'this, 'gcx: 'tcx, 'tcx: 'this> {
- infcx: &'this InferCtxt<'this, 'gcx, 'tcx>,
+struct UniversalRegionRelationsBuilder<'this, 'tcx: 'this> {
+ infcx: &'this InferCtxt<'this, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
universal_regions: Rc<UniversalRegions<'tcx>>,
implicit_region_bound: Option<ty::Region<'tcx>>,
@@ -231,7 +231,7 @@
region_bound_pairs: RegionBoundPairs<'tcx>,
}
-impl UniversalRegionRelationsBuilder<'cx, 'gcx, 'tcx> {
+impl UniversalRegionRelationsBuilder<'cx, 'tcx> {
crate fn create(mut self) -> CreateResult<'tcx> {
let unnormalized_input_output_tys = self
.universal_regions
diff --git a/src/librustc_mir/borrow_check/nll/type_check/input_output.rs b/src/librustc_mir/borrow_check/nll/type_check/input_output.rs
index 353c5a3..3954d62 100644
--- a/src/librustc_mir/borrow_check/nll/type_check/input_output.rs
+++ b/src/librustc_mir/borrow_check/nll/type_check/input_output.rs
@@ -17,7 +17,7 @@
use super::{Locations, TypeChecker};
-impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
pub(super) fn equate_inputs_and_outputs(
&mut self,
body: &Body<'tcx>,
diff --git a/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs b/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs
index fb99382..3b138bc 100644
--- a/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs
+++ b/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs
@@ -25,11 +25,11 @@
///
/// N.B., this computation requires normalization; therefore, it must be
/// performed before
-pub(super) fn generate<'gcx, 'tcx>(
- typeck: &mut TypeChecker<'_, 'gcx, 'tcx>,
+pub(super) fn generate<'tcx>(
+ typeck: &mut TypeChecker<'_, 'tcx>,
body: &Body<'tcx>,
elements: &Rc<RegionValueElements>,
- flow_inits: &mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'_, 'gcx, 'tcx>>,
+ flow_inits: &mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'_, 'tcx>>,
move_data: &MoveData<'tcx>,
location_table: &LocationTable,
) {
@@ -75,7 +75,7 @@
// some region `R` in its type where `R` is not known to outlive a free
// region (i.e., where `R` may be valid for just a subset of the fn body).
fn compute_live_locals(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
free_regions: &FxHashSet<RegionVid>,
body: &Body<'tcx>,
) -> Vec<Local> {
diff --git a/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs b/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs
index 828cb4c..48e45e9 100644
--- a/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs
+++ b/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs
@@ -31,10 +31,10 @@
/// `dropck_outlives` result of the variable's type (in particular,
/// this respects `#[may_dangle]` annotations).
pub(super) fn trace(
- typeck: &mut TypeChecker<'_, 'gcx, 'tcx>,
+ typeck: &mut TypeChecker<'_, 'tcx>,
body: &Body<'tcx>,
elements: &Rc<RegionValueElements>,
- flow_inits: &mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'_, 'gcx, 'tcx>>,
+ flow_inits: &mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'_, 'tcx>>,
move_data: &MoveData<'tcx>,
live_locals: Vec<Local>,
location_table: &LocationTable,
@@ -58,15 +58,14 @@
}
/// Contextual state for the type-liveness generator.
-struct LivenessContext<'me, 'typeck, 'flow, 'gcx, 'tcx>
+struct LivenessContext<'me, 'typeck, 'flow, 'tcx>
where
'typeck: 'me,
'flow: 'me,
'tcx: 'typeck + 'flow,
- 'gcx: 'tcx,
{
/// Current type-checker, giving us our inference context etc.
- typeck: &'me mut TypeChecker<'typeck, 'gcx, 'tcx>,
+ typeck: &'me mut TypeChecker<'typeck, 'tcx>,
/// Defines the `PointIndex` mapping
elements: &'me RegionValueElements,
@@ -82,7 +81,7 @@
/// Results of dataflow tracking which variables (and paths) have been
/// initialized.
- flow_inits: &'me mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'flow, 'gcx, 'tcx>>,
+ flow_inits: &'me mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'flow, 'tcx>>,
/// Index indicating where each variable is assigned, used, or
/// dropped.
@@ -97,14 +96,13 @@
region_constraint_data: Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>,
}
-struct LivenessResults<'me, 'typeck, 'flow, 'gcx, 'tcx>
+struct LivenessResults<'me, 'typeck, 'flow, 'tcx>
where
'typeck: 'me,
'flow: 'me,
'tcx: 'typeck + 'flow,
- 'gcx: 'tcx,
{
- cx: LivenessContext<'me, 'typeck, 'flow, 'gcx, 'tcx>,
+ cx: LivenessContext<'me, 'typeck, 'flow, 'tcx>,
/// Set of points that define the current local.
defs: HybridBitSet<PointIndex>,
@@ -125,8 +123,8 @@
stack: Vec<PointIndex>,
}
-impl LivenessResults<'me, 'typeck, 'flow, 'gcx, 'tcx> {
- fn new(cx: LivenessContext<'me, 'typeck, 'flow, 'gcx, 'tcx>) -> Self {
+impl LivenessResults<'me, 'typeck, 'flow, 'tcx> {
+ fn new(cx: LivenessContext<'me, 'typeck, 'flow, 'tcx>) -> Self {
let num_points = cx.elements.num_points();
LivenessResults {
cx,
@@ -392,7 +390,7 @@
}
}
-impl LivenessContext<'_, '_, '_, '_, 'tcx> {
+impl LivenessContext<'_, '_, '_, 'tcx> {
/// Returns `true` if the local variable (or some part of it) is initialized in
/// the terminator of `block`. We need to check this to determine if a
/// DROP of some local variable will have an effect -- note that
@@ -504,7 +502,7 @@
fn make_all_regions_live(
elements: &RegionValueElements,
- typeck: &mut TypeChecker<'_, '_, 'tcx>,
+ typeck: &mut TypeChecker<'_, 'tcx>,
value: impl TypeFoldable<'tcx>,
live_at: &HybridBitSet<PointIndex>,
location_table: &LocationTable,
@@ -536,7 +534,7 @@
}
fn compute_drop_data(
- typeck: &mut TypeChecker<'_, 'gcx, 'tcx>,
+ typeck: &mut TypeChecker<'_, 'tcx>,
dropped_ty: Ty<'tcx>,
) -> DropData<'tcx> {
debug!("compute_drop_data(dropped_ty={:?})", dropped_ty,);
diff --git a/src/librustc_mir/borrow_check/nll/type_check/mod.rs b/src/librustc_mir/borrow_check/nll/type_check/mod.rs
index d6da42c..ad79f21 100644
--- a/src/librustc_mir/borrow_check/nll/type_check/mod.rs
+++ b/src/librustc_mir/borrow_check/nll/type_check/mod.rs
@@ -109,16 +109,16 @@
/// constraints for the regions in the types of variables
/// - `flow_inits` -- results of a maybe-init dataflow analysis
/// - `move_data` -- move-data constructed when performing the maybe-init dataflow analysis
-pub(crate) fn type_check<'gcx, 'tcx>(
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'gcx>,
+pub(crate) fn type_check<'tcx>(
+ infcx: &InferCtxt<'_, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body: &Body<'tcx>,
mir_def_id: DefId,
universal_regions: &Rc<UniversalRegions<'tcx>>,
location_table: &LocationTable,
borrow_set: &BorrowSet<'tcx>,
all_facts: &mut Option<AllFacts>,
- flow_inits: &mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'_, 'gcx, 'tcx>>,
+ flow_inits: &mut FlowAtLocation<'tcx, MaybeInitializedPlaces<'_, 'tcx>>,
move_data: &MoveData<'tcx>,
elements: &Rc<RegionValueElements>,
) -> MirTypeckResults<'tcx> {
@@ -175,16 +175,16 @@
}
}
-fn type_check_internal<'a, 'gcx, 'tcx, R>(
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+fn type_check_internal<'a, 'tcx, R>(
+ infcx: &'a InferCtxt<'a, 'tcx>,
mir_def_id: DefId,
- param_env: ty::ParamEnv<'gcx>,
+ param_env: ty::ParamEnv<'tcx>,
body: &'a Body<'tcx>,
region_bound_pairs: &'a RegionBoundPairs<'tcx>,
implicit_region_bound: ty::Region<'tcx>,
borrowck_context: &'a mut BorrowCheckContext<'a, 'tcx>,
universal_region_relations: &'a UniversalRegionRelations<'tcx>,
- mut extra: impl FnMut(&mut TypeChecker<'a, 'gcx, 'tcx>) -> R,
+ mut extra: impl FnMut(&mut TypeChecker<'a, 'tcx>) -> R,
) -> R where {
let mut checker = TypeChecker::new(
infcx,
@@ -235,7 +235,7 @@
}
}
-fn mirbug(tcx: TyCtxt<'_, '_, '_>, span: Span, msg: &str) {
+fn mirbug(tcx: TyCtxt<'_>, span: Span, msg: &str) {
// We sometimes see MIR failures (notably predicate failures) due to
// the fact that we check rvalue sized predicates here. So use `delay_span_bug`
// to avoid reporting bugs in those cases.
@@ -251,15 +251,15 @@
/// The sanitize_XYZ methods here take an MIR object and compute its
/// type, calling `span_mirbug` and returning an error type if there
/// is a problem.
-struct TypeVerifier<'a, 'b: 'a, 'gcx: 'tcx, 'tcx: 'b> {
- cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>,
+struct TypeVerifier<'a, 'b: 'a, 'tcx: 'b> {
+ cx: &'a mut TypeChecker<'b, 'tcx>,
body: &'b Body<'tcx>,
last_span: Span,
mir_def_id: DefId,
errors_reported: bool,
}
-impl<'a, 'b, 'gcx, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'gcx, 'tcx> {
+impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
fn visit_span(&mut self, span: &Span) {
if !span.is_dummy() {
self.last_span = *span;
@@ -380,8 +380,8 @@
}
}
-impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> {
- fn new(cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>, body: &'b Body<'tcx>) -> Self {
+impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
+ fn new(cx: &'a mut TypeChecker<'b, 'tcx>, body: &'b Body<'tcx>) -> Self {
TypeVerifier {
body,
mir_def_id: cx.mir_def_id,
@@ -391,7 +391,7 @@
}
}
- fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.cx.infcx.tcx
}
@@ -455,7 +455,7 @@
PlaceBase::Static(box Static { kind, ty: sty }) => {
let sty = self.sanitize_type(place, sty);
let check_err =
- |verifier: &mut TypeVerifier<'a, 'b, 'gcx, 'tcx>,
+ |verifier: &mut TypeVerifier<'a, 'b, 'tcx>,
place: &Place<'tcx>,
ty,
sty| {
@@ -830,9 +830,9 @@
/// constraints needed for it to be valid and well-typed. Along the
/// way, it accrues region constraints -- these can later be used by
/// NLL region checking.
-struct TypeChecker<'a, 'gcx: 'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'gcx>,
+struct TypeChecker<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
last_span: Span,
/// User type annotations are shared between the main MIR and the MIR of
/// all of the promoted items.
@@ -895,7 +895,7 @@
impl MirTypeckRegionConstraints<'tcx> {
fn placeholder_region(
&mut self,
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
placeholder: ty::PlaceholderRegion,
) -> ty::Region<'tcx> {
let placeholder_index = self.placeholder_indices.insert(placeholder);
@@ -977,12 +977,12 @@
}
}
-impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
fn new(
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &'a InferCtxt<'a, 'tcx>,
body: &'a Body<'tcx>,
mir_def_id: DefId,
- param_env: ty::ParamEnv<'gcx>,
+ param_env: ty::ParamEnv<'tcx>,
region_bound_pairs: &'a RegionBoundPairs<'tcx>,
implicit_region_bound: ty::Region<'tcx>,
borrowck_context: &'a mut BorrowCheckContext<'a, 'tcx>,
@@ -1078,7 +1078,7 @@
&mut self,
locations: Locations,
category: ConstraintCategory,
- op: impl type_op::TypeOp<'gcx, 'tcx, Output = R>,
+ op: impl type_op::TypeOp<'tcx, Output = R>,
) -> Fallible<R> {
let (r, opt_data) = op.fully_perform(self.infcx)?;
@@ -1313,7 +1313,7 @@
Ok(())
}
- fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
@@ -1792,12 +1792,10 @@
}
TerminatorKind::FalseEdges {
real_target,
- ref imaginary_targets,
+ imaginary_target,
} => {
self.assert_iscleanup(body, block_data, real_target, is_cleanup);
- for target in imaginary_targets {
- self.assert_iscleanup(body, block_data, *target, is_cleanup);
- }
+ self.assert_iscleanup(body, block_data, imaginary_target, is_cleanup);
}
TerminatorKind::FalseUnwind {
real_target,
@@ -2504,7 +2502,7 @@
fn prove_closure_bounds(
&mut self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
def_id: DefId,
substs: SubstsRef<'tcx>,
location: Location,
@@ -2652,7 +2650,7 @@
fn normalize<T>(&mut self, value: T, location: impl NormalizeLocation) -> T
where
- T: type_op::normalize::Normalizable<'gcx, 'tcx> + Copy,
+ T: type_op::normalize::Normalizable<'tcx> + Copy,
{
debug!("normalize(value={:?}, location={:?})", value, location);
let param_env = self.param_env;
diff --git a/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs b/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs
index 8754475..5ced356 100644
--- a/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs
+++ b/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs
@@ -17,7 +17,7 @@
/// N.B., the type `a` is permitted to have unresolved inference
/// variables, but not the type `b`.
pub(super) fn relate_types<'tcx>(
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
a: Ty<'tcx>,
v: ty::Variance,
b: Ty<'tcx>,
@@ -34,8 +34,8 @@
Ok(())
}
-struct NllTypeRelatingDelegate<'me, 'bccx: 'me, 'gcx: 'tcx, 'tcx: 'bccx> {
- infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,
+struct NllTypeRelatingDelegate<'me, 'bccx: 'me, 'tcx: 'bccx> {
+ infcx: &'me InferCtxt<'me, 'tcx>,
borrowck_context: Option<&'me mut BorrowCheckContext<'bccx, 'tcx>>,
/// Where (and why) is this relation taking place?
@@ -45,9 +45,9 @@
category: ConstraintCategory,
}
-impl NllTypeRelatingDelegate<'me, 'bccx, 'gcx, 'tcx> {
+impl NllTypeRelatingDelegate<'me, 'bccx, 'tcx> {
fn new(
- infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,
+ infcx: &'me InferCtxt<'me, 'tcx>,
borrowck_context: Option<&'me mut BorrowCheckContext<'bccx, 'tcx>>,
locations: Locations,
category: ConstraintCategory,
@@ -61,7 +61,7 @@
}
}
-impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, '_, 'tcx> {
+impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> {
fn create_next_universe(&mut self) -> ty::UniverseIndex {
self.infcx.create_next_universe()
}
diff --git a/src/librustc_mir/borrow_check/nll/universal_regions.rs b/src/librustc_mir/borrow_check/nll/universal_regions.rs
index 72e453d..a236359 100644
--- a/src/librustc_mir/borrow_check/nll/universal_regions.rs
+++ b/src/librustc_mir/borrow_check/nll/universal_regions.rs
@@ -106,7 +106,7 @@
/// not a closure or generator, there are no upvars, and hence it
/// will be an empty list. The order of types in this list will
/// match up with the upvar order in the HIR, typesystem, and MIR.
- pub fn upvar_tys(self, tcx: TyCtxt<'_, '_, 'tcx>) -> impl Iterator<Item = Ty<'tcx>> + 'tcx {
+ pub fn upvar_tys(self, tcx: TyCtxt<'tcx>) -> impl Iterator<Item = Ty<'tcx>> + 'tcx {
match self {
DefiningTy::Closure(def_id, substs) => Either::Left(substs.upvar_tys(def_id, tcx)),
DefiningTy::Generator(def_id, substs, _) => {
@@ -194,7 +194,7 @@
/// signature. This will also compute the relationships that are
/// known between those regions.
pub fn new(
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
mir_def_id: DefId,
param_env: ty::ParamEnv<'tcx>,
) -> Self {
@@ -218,7 +218,7 @@
/// `'1: '2`, then the caller would impose the constraint that
/// `V[1]: V[2]`.
pub fn closure_mapping(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
closure_substs: SubstsRef<'tcx>,
expected_num_vars: usize,
closure_base_def_id: DefId,
@@ -305,7 +305,7 @@
/// that this region imposes on others. The methods in this file
/// handle the part about dumping the inference context internal
/// state.
- crate fn annotate(&self, tcx: TyCtxt<'_, '_, 'tcx>, err: &mut DiagnosticBuilder<'_>) {
+ crate fn annotate(&self, tcx: TyCtxt<'tcx>, err: &mut DiagnosticBuilder<'_>) {
match self.defining_ty {
DefiningTy::Closure(def_id, substs) => {
err.note(&format!(
@@ -363,8 +363,8 @@
}
}
-struct UniversalRegionsBuilder<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+struct UniversalRegionsBuilder<'cx, 'tcx: 'cx> {
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
mir_def_id: DefId,
mir_hir_id: HirId,
param_env: ty::ParamEnv<'tcx>,
@@ -372,7 +372,7 @@
const FR: NLLRegionVariableOrigin = NLLRegionVariableOrigin::FreeRegion;
-impl<'cx, 'gcx, 'tcx> UniversalRegionsBuilder<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
fn build(self) -> UniversalRegions<'tcx> {
debug!("build(mir_def_id={:?})", self.mir_def_id);
@@ -471,7 +471,7 @@
let tcx = self.infcx.tcx;
let closure_base_def_id = tcx.closure_base_def_id(self.mir_def_id);
- match tcx.hir().body_owner_kind_by_hir_id(self.mir_hir_id) {
+ match tcx.hir().body_owner_kind(self.mir_hir_id) {
BodyOwnerKind::Closure |
BodyOwnerKind::Fn => {
let defining_ty = if self.mir_def_id == closure_base_def_id {
@@ -639,7 +639,7 @@
);
}
-impl<'cx, 'gcx, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'cx, 'tcx> {
fn replace_free_regions_with_nll_infer_vars<T>(
&self,
origin: NLLRegionVariableOrigin,
@@ -744,7 +744,7 @@
/// Replaces all free regions in `value` with region vids, as
/// returned by `to_region_vid`.
- pub fn fold_to_region_vids<T>(&self, tcx: TyCtxt<'_, '_, 'tcx>, value: &T) -> T
+ pub fn fold_to_region_vids<T>(&self, tcx: TyCtxt<'tcx>, value: &T) -> T
where
T: TypeFoldable<'tcx>,
{
@@ -757,7 +757,7 @@
/// Iterates over the late-bound regions defined on fn_def_id and
/// invokes `f` with the liberated form of each one.
fn for_each_late_bound_region_defined_on<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
fn_def_id: DefId,
mut f: impl FnMut(ty::Region<'tcx>),
) {
diff --git a/src/librustc_mir/borrow_check/path_utils.rs b/src/librustc_mir/borrow_check/path_utils.rs
index a11e5d9..aa2b177 100644
--- a/src/librustc_mir/borrow_check/path_utils.rs
+++ b/src/librustc_mir/borrow_check/path_utils.rs
@@ -22,9 +22,9 @@
}
/// Encapsulates the idea of iterating over every borrow that involves a particular path
-pub(super) fn each_borrow_involving_path<'a, 'tcx, 'gcx: 'tcx, F, I, S> (
+pub(super) fn each_borrow_involving_path<'tcx, F, I, S>(
s: &mut S,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
_location: Location,
access_place: (AccessDepth, &Place<'tcx>),
@@ -33,7 +33,7 @@
mut op: F,
) where
F: FnMut(&mut S, BorrowIndex, &BorrowData<'tcx>) -> Control,
- I: Iterator<Item=BorrowIndex>
+ I: Iterator<Item = BorrowIndex>,
{
let (access, place) = access_place;
diff --git a/src/librustc_mir/borrow_check/place_ext.rs b/src/librustc_mir/borrow_check/place_ext.rs
index 509bd16..a8f28b64 100644
--- a/src/librustc_mir/borrow_check/place_ext.rs
+++ b/src/librustc_mir/borrow_check/place_ext.rs
@@ -12,16 +12,16 @@
/// for borrows of raw pointer dereferents as well as shared references.
fn ignore_borrow(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
locals_state_at_exit: &LocalsStateAtExit,
- ) -> bool;
+ ) -> bool;
}
impl<'tcx> PlaceExt<'tcx> for Place<'tcx> {
fn ignore_borrow(
&self,
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
locals_state_at_exit: &LocalsStateAtExit,
) -> bool {
diff --git a/src/librustc_mir/borrow_check/places_conflict.rs b/src/librustc_mir/borrow_check/places_conflict.rs
index a9ee0a6..64ca00d 100644
--- a/src/librustc_mir/borrow_check/places_conflict.rs
+++ b/src/librustc_mir/borrow_check/places_conflict.rs
@@ -24,8 +24,8 @@
/// Helper function for checking if places conflict with a mutable borrow and deep access depth.
/// This is used to check for places conflicting outside of the borrow checking code (such as in
/// dataflow).
-crate fn places_conflict<'gcx, 'tcx>(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+crate fn places_conflict<'tcx>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
borrow_place: &Place<'tcx>,
access_place: &Place<'tcx>,
@@ -46,8 +46,8 @@
/// access depth. The `bias` parameter is used to determine how the unknowable (comparing runtime
/// array indices, for example) should be interpreted - this depends on what the caller wants in
/// order to make the conservative choice and preserve soundness.
-pub(super) fn borrow_conflicts_with_place<'gcx, 'tcx>(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+pub(super) fn borrow_conflicts_with_place<'tcx>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
borrow_place: &Place<'tcx>,
borrow_kind: BorrowKind,
@@ -83,8 +83,8 @@
})
}
-fn place_components_conflict<'gcx, 'tcx>(
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+fn place_components_conflict<'tcx>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
borrow_projections: (&PlaceBase<'tcx>, ProjectionsIter<'_, 'tcx>),
borrow_kind: BorrowKind,
@@ -298,8 +298,8 @@
// Given that the bases of `elem1` and `elem2` are always either equal
// or disjoint (and have the same type!), return the overlap situation
// between `elem1` and `elem2`.
-fn place_base_conflict<'a, 'gcx: 'tcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+fn place_base_conflict<'tcx>(
+ tcx: TyCtxt<'tcx>,
elem1: &PlaceBase<'tcx>,
elem2: &PlaceBase<'tcx>,
) -> Overlap {
@@ -365,8 +365,8 @@
// Given that the bases of `elem1` and `elem2` are always either equal
// or disjoint (and have the same type!), return the overlap situation
// between `elem1` and `elem2`.
-fn place_projection_conflict<'a, 'gcx: 'tcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+fn place_projection_conflict<'tcx>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
pi1: &Projection<'tcx>,
pi2: &Projection<'tcx>,
diff --git a/src/librustc_mir/borrow_check/prefixes.rs b/src/librustc_mir/borrow_check/prefixes.rs
index 416de1c..b35bcc0 100644
--- a/src/librustc_mir/borrow_check/prefixes.rs
+++ b/src/librustc_mir/borrow_check/prefixes.rs
@@ -36,10 +36,9 @@
}
}
-
-pub(super) struct Prefixes<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+pub(super) struct Prefixes<'cx, 'tcx: 'cx> {
body: &'cx Body<'tcx>,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
kind: PrefixSet,
next: Option<&'cx Place<'tcx>>,
}
@@ -56,15 +55,11 @@
Supporting,
}
-impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
/// Returns an iterator over the prefixes of `place`
/// (inclusive) from longest to smallest, potentially
/// terminating the iteration early based on `kind`.
- pub(super) fn prefixes(
- &self,
- place: &'cx Place<'tcx>,
- kind: PrefixSet,
- ) -> Prefixes<'cx, 'gcx, 'tcx> {
+ pub(super) fn prefixes(&self, place: &'cx Place<'tcx>, kind: PrefixSet) -> Prefixes<'cx, 'tcx> {
Prefixes {
next: Some(place),
kind,
@@ -74,7 +69,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> Iterator for Prefixes<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> Iterator for Prefixes<'cx, 'tcx> {
type Item = &'cx Place<'tcx>;
fn next(&mut self) -> Option<Self::Item> {
let mut cursor = self.next?;
diff --git a/src/librustc_mir/borrow_check/used_muts.rs b/src/librustc_mir/borrow_check/used_muts.rs
index 7b2f662..e609ddb 100644
--- a/src/librustc_mir/borrow_check/used_muts.rs
+++ b/src/librustc_mir/borrow_check/used_muts.rs
@@ -7,7 +7,7 @@
use crate::borrow_check::MirBorrowckCtxt;
-impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
/// Walks the MIR adding to the set of `used_mut` locals that will be ignored for the purposes
/// of the `unused_mut` lint.
///
@@ -46,13 +46,13 @@
/// MIR visitor for collecting used mutable variables.
/// The 'visit lifetime represents the duration of the MIR walk.
-struct GatherUsedMutsVisitor<'visit, 'cx: 'visit, 'gcx: 'tcx, 'tcx: 'cx> {
+struct GatherUsedMutsVisitor<'visit, 'cx: 'visit, 'tcx: 'cx> {
temporary_used_locals: FxHashSet<Local>,
never_initialized_mut_locals: &'visit mut FxHashSet<Local>,
- mbcx: &'visit mut MirBorrowckCtxt<'cx, 'gcx, 'tcx>,
+ mbcx: &'visit mut MirBorrowckCtxt<'cx, 'tcx>,
}
-impl GatherUsedMutsVisitor<'_, '_, '_, '_> {
+impl GatherUsedMutsVisitor<'_, '_, '_> {
fn remove_never_initialized_mut_locals(&mut self, into: &Place<'_>) {
// Remove any locals that we found were initialized from the
// `never_initialized_mut_locals` set. At the end, the only remaining locals will
@@ -65,7 +65,7 @@
}
}
-impl<'visit, 'cx, 'gcx, 'tcx> Visitor<'tcx> for GatherUsedMutsVisitor<'visit, 'cx, 'gcx, 'tcx> {
+impl<'visit, 'cx, 'tcx> Visitor<'tcx> for GatherUsedMutsVisitor<'visit, 'cx, 'tcx> {
fn visit_terminator_kind(
&mut self,
kind: &TerminatorKind<'tcx>,
diff --git a/src/librustc_mir/build/block.rs b/src/librustc_mir/build/block.rs
index b5bab15..749cd6f 100644
--- a/src/librustc_mir/build/block.rs
+++ b/src/librustc_mir/build/block.rs
@@ -6,7 +6,7 @@
use rustc::hir;
use syntax_pos::Span;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
pub fn ast_block(&mut self,
destination: &Place<'tcx>,
block: BasicBlock,
diff --git a/src/librustc_mir/build/expr/as_constant.rs b/src/librustc_mir/build/expr/as_constant.rs
index 6146681..5197981 100644
--- a/src/librustc_mir/build/expr/as_constant.rs
+++ b/src/librustc_mir/build/expr/as_constant.rs
@@ -5,7 +5,7 @@
use rustc::mir::*;
use rustc::ty::CanonicalUserTypeAnnotation;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Compile `expr`, yielding a compile-time constant. Assumes that
/// `expr` is a valid compile-time constant!
pub fn as_constant<M>(&mut self, expr: M) -> Constant<'tcx>
diff --git a/src/librustc_mir/build/expr/as_operand.rs b/src/librustc_mir/build/expr/as_operand.rs
index ed80cb1..dd78e7e 100644
--- a/src/librustc_mir/build/expr/as_operand.rs
+++ b/src/librustc_mir/build/expr/as_operand.rs
@@ -6,7 +6,7 @@
use rustc::middle::region;
use rustc::mir::*;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Returns an operand suitable for use until the end of the current
/// scope expression.
///
diff --git a/src/librustc_mir/build/expr/as_place.rs b/src/librustc_mir/build/expr/as_place.rs
index a956eac..51808ef 100644
--- a/src/librustc_mir/build/expr/as_place.rs
+++ b/src/librustc_mir/build/expr/as_place.rs
@@ -10,7 +10,7 @@
use rustc_data_structures::indexed_vec::Idx;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Compile `expr`, yielding a place that we can move from etc.
pub fn as_place<M>(&mut self, block: BasicBlock, expr: M) -> BlockAnd<Place<'tcx>>
where
diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs
index 87e2118..6d6e5de 100644
--- a/src/librustc_mir/build/expr/as_rvalue.rs
+++ b/src/librustc_mir/build/expr/as_rvalue.rs
@@ -12,7 +12,7 @@
use rustc::ty::{self, CanonicalUserTypeAnnotation, Ty, UpvarSubsts};
use syntax_pos::Span;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// See comment on `as_local_operand`
pub fn as_local_rvalue<M>(&mut self, block: BasicBlock, expr: M) -> BlockAnd<Rvalue<'tcx>>
where
diff --git a/src/librustc_mir/build/expr/as_temp.rs b/src/librustc_mir/build/expr/as_temp.rs
index 8542395..9d907c6 100644
--- a/src/librustc_mir/build/expr/as_temp.rs
+++ b/src/librustc_mir/build/expr/as_temp.rs
@@ -6,7 +6,7 @@
use rustc::middle::region;
use rustc::mir::*;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Compile `expr` into a fresh temporary. This is used when building
/// up rvalues so as to freeze the value that will be consumed.
pub fn as_temp<M>(
diff --git a/src/librustc_mir/build/expr/into.rs b/src/librustc_mir/build/expr/into.rs
index 7bdfdf0..a397623 100644
--- a/src/librustc_mir/build/expr/into.rs
+++ b/src/librustc_mir/build/expr/into.rs
@@ -8,7 +8,7 @@
use rustc_target::spec::abi::Abi;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Compile `expr`, storing the result into `destination`, which
/// is assumed to be uninitialized.
pub fn into_expr(
diff --git a/src/librustc_mir/build/expr/stmt.rs b/src/librustc_mir/build/expr/stmt.rs
index ac690f8..74338de 100644
--- a/src/librustc_mir/build/expr/stmt.rs
+++ b/src/librustc_mir/build/expr/stmt.rs
@@ -3,7 +3,7 @@
use crate::hair::*;
use rustc::mir::*;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Builds a block of MIR statements to evaluate the HAIR `expr`.
/// If the original expression was an AST statement,
/// (e.g., `some().code(&here());`) then `opt_stmt_span` is the
diff --git a/src/librustc_mir/build/into.rs b/src/librustc_mir/build/into.rs
index 67b6540..077840c 100644
--- a/src/librustc_mir/build/into.rs
+++ b/src/librustc_mir/build/into.rs
@@ -9,14 +9,15 @@
use rustc::mir::*;
pub(in crate::build) trait EvalInto<'tcx> {
- fn eval_into<'a, 'gcx>(self,
- builder: &mut Builder<'a, 'gcx, 'tcx>,
- destination: &Place<'tcx>,
- block: BasicBlock)
- -> BlockAnd<()>;
+ fn eval_into(
+ self,
+ builder: &mut Builder<'_, 'tcx>,
+ destination: &Place<'tcx>,
+ block: BasicBlock,
+ ) -> BlockAnd<()>;
}
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
pub fn into<E>(&mut self,
destination: &Place<'tcx>,
block: BasicBlock,
@@ -29,22 +30,24 @@
}
impl<'tcx> EvalInto<'tcx> for ExprRef<'tcx> {
- fn eval_into<'a, 'gcx>(self,
- builder: &mut Builder<'a, 'gcx, 'tcx>,
- destination: &Place<'tcx>,
- block: BasicBlock)
- -> BlockAnd<()> {
+ fn eval_into(
+ self,
+ builder: &mut Builder<'_, 'tcx>,
+ destination: &Place<'tcx>,
+ block: BasicBlock,
+ ) -> BlockAnd<()> {
let expr = builder.hir.mirror(self);
builder.into_expr(destination, block, expr)
}
}
impl<'tcx> EvalInto<'tcx> for Expr<'tcx> {
- fn eval_into<'a, 'gcx>(self,
- builder: &mut Builder<'a, 'gcx, 'tcx>,
- destination: &Place<'tcx>,
- block: BasicBlock)
- -> BlockAnd<()> {
+ fn eval_into(
+ self,
+ builder: &mut Builder<'_, 'tcx>,
+ destination: &Place<'tcx>,
+ block: BasicBlock,
+ ) -> BlockAnd<()> {
builder.into_expr(destination, block, self)
}
}
diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs
index 0aabebc..134ff52 100644
--- a/src/librustc_mir/build/matches/mod.rs
+++ b/src/librustc_mir/build/matches/mod.rs
@@ -27,7 +27,7 @@
use std::convert::TryFrom;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Generates MIR for a `match` expression.
///
/// The MIR that we generate for a match looks like this.
@@ -143,19 +143,10 @@
// create binding start block for link them by false edges
let candidate_count = arms.iter().map(|c| c.patterns.len()).sum::<usize>();
- let pre_binding_blocks: Vec<_> = (0..=candidate_count)
+ let pre_binding_blocks: Vec<_> = (0..candidate_count)
.map(|_| self.cfg.start_new_block())
.collect();
- // There's one more pre_binding block than there are candidates so that
- // every candidate can have a `next_candidate_pre_binding_block`.
- let outer_source_info = self.source_info(span);
- self.cfg.terminate(
- *pre_binding_blocks.last().unwrap(),
- outer_source_info,
- TerminatorKind::Unreachable,
- );
-
let mut match_has_guard = false;
let mut candidate_pre_binding_blocks = pre_binding_blocks.iter();
@@ -171,9 +162,8 @@
let arm_candidates: Vec<_> = arm.patterns
.iter()
.zip(candidate_pre_binding_blocks.by_ref())
- .zip(next_candidate_pre_binding_blocks.by_ref())
.map(
- |((pattern, pre_binding_block), next_candidate_pre_binding_block)| {
+ |(pattern, pre_binding_block)| {
Candidate {
span: pattern.span,
match_pairs: vec![
@@ -188,7 +178,7 @@
},
pre_binding_block: *pre_binding_block,
next_candidate_pre_binding_block:
- *next_candidate_pre_binding_block,
+ next_candidate_pre_binding_blocks.next().copied(),
}
},
)
@@ -216,31 +206,18 @@
.flat_map(|(_, candidates)| candidates)
.collect::<Vec<_>>();
+ let outer_source_info = self.source_info(span);
+
// this will generate code to test scrutinee_place and
// branch to the appropriate arm block
- let otherwise = self.match_candidates(
+ self.match_candidates(
scrutinee_span,
+ &mut Some(block),
+ None,
candidates,
- block,
&mut fake_borrows,
);
- if !otherwise.is_empty() {
- // All matches are exhaustive. However, because some matches
- // only have exponentially-large exhaustive decision trees, we
- // sometimes generate an inexhaustive decision tree.
- //
- // In that case, the inexhaustive tips of the decision tree
- // can't be reached - terminate them with an `unreachable`.
- let mut otherwise = otherwise;
- otherwise.sort();
- otherwise.dedup(); // variant switches can introduce duplicate target blocks
- for block in otherwise {
- self.cfg
- .terminate(block, outer_source_info, TerminatorKind::Unreachable);
- }
- }
-
// Step 4. Determine the fake borrows that are needed from the above
// places. Create the required temporaries for them.
@@ -251,13 +228,10 @@
};
// Step 5. Create everything else: the guards and the arms.
-
- let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, candidates)| {
+ let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, mut candidates)| {
let arm_source_info = self.source_info(arm.span);
let region_scope = (arm.scope, arm_source_info);
self.in_scope(region_scope, arm.lint_level, |this| {
- let arm_block = this.cfg.start_new_block();
-
let body = this.hir.mirror(arm.body.clone());
let scope = this.declare_bindings(
None,
@@ -267,20 +241,36 @@
Some((Some(&scrutinee_place), scrutinee_span)),
);
- if let Some(source_scope) = scope {
- this.source_scope = source_scope;
- }
-
- for candidate in candidates {
- this.clear_top_scope(arm.scope);
- this.bind_and_guard_matched_candidate(
- candidate,
+ let arm_block;
+ if candidates.len() == 1 {
+ arm_block = this.bind_and_guard_matched_candidate(
+ candidates.pop().unwrap(),
arm.guard.clone(),
- arm_block,
&fake_borrow_temps,
scrutinee_span,
region_scope,
);
+ } else {
+ arm_block = this.cfg.start_new_block();
+ for candidate in candidates {
+ this.clear_top_scope(arm.scope);
+ let binding_end = this.bind_and_guard_matched_candidate(
+ candidate,
+ arm.guard.clone(),
+ &fake_borrow_temps,
+ scrutinee_span,
+ region_scope,
+ );
+ this.cfg.terminate(
+ binding_end,
+ source_info,
+ TerminatorKind::Goto { target: arm_block },
+ );
+ }
+ }
+
+ if let Some(source_scope) = scope {
+ this.source_scope = source_scope;
}
this.into(destination, arm_block, body)
@@ -434,7 +424,7 @@
// since we don't call `match_candidates`, next fields are unused
otherwise_block: None,
pre_binding_block: block,
- next_candidate_pre_binding_block: block,
+ next_candidate_pre_binding_block: None,
};
// Simplify the candidate. Since the pattern is irrefutable, this should
@@ -689,7 +679,7 @@
// ...and the blocks for add false edges between candidates
pre_binding_block: BasicBlock,
- next_candidate_pre_binding_block: BasicBlock,
+ next_candidate_pre_binding_block: Option<BasicBlock>,
}
#[derive(Clone, Debug)]
@@ -725,29 +715,46 @@
#[derive(Clone, Debug, PartialEq)]
enum TestKind<'tcx> {
- // test the branches of enum
+ /// Test the branches of enum.
Switch {
+ /// The enum being tested
adt_def: &'tcx ty::AdtDef,
+ /// The set of variants that we should create a branch for. We also
+ /// create an additional "otherwise" case.
variants: BitSet<VariantIdx>,
},
- // test the branches of enum
+ /// Test what value an `integer`, `bool` or `char` has.
SwitchInt {
+ /// The type of the value that we're testing.
switch_ty: Ty<'tcx>,
+ /// The (ordered) set of values that we test for.
+ ///
+ /// For integers and `char`s we create a branch to each of the values in
+ /// `options`, as well as an "otherwise" branch for all other values, even
+ /// in the (rare) case that options is exhaustive.
+ ///
+ /// For `bool` we always generate two edges, one for `true` and one for
+ /// `false`.
options: Vec<u128>,
+ /// Reverse map used to ensure that the values in `options` are unique.
indices: FxHashMap<&'tcx ty::Const<'tcx>, usize>,
},
- // test for equality
+ /// Test for equality with value, possibly after an unsizing coercion to
+ /// `ty`,
Eq {
value: &'tcx ty::Const<'tcx>,
+ // Integer types are handled by `SwitchInt`, and constants with ADT
+ // types are converted back into patterns, so this can only be `&str`,
+ // `&[T]`, `f32` or `f64`.
ty: Ty<'tcx>,
},
- // test whether the value falls within an inclusive or exclusive range
+ /// Test whether the value falls within an inclusive or exclusive range
Range(PatternRange<'tcx>),
- // test length of the slice is equal to len
+ /// Test length of the slice is equal to len
Len {
len: u64,
op: BinOp,
@@ -768,7 +775,7 @@
///////////////////////////////////////////////////////////////////////////
// Main matching algorithm
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// The main match algorithm. It begins with a set of candidates
/// `candidates` and has the job of generating code to determine
/// which of these candidates, if any, is the correct one. The
@@ -777,11 +784,10 @@
/// the value, we will generate a branch to the appropriate
/// prebinding block.
///
- /// The return value is a list of "otherwise" blocks. These are
- /// points in execution where we found that *NONE* of the
- /// candidates apply. In principle, this means that the input
- /// list was not exhaustive, though at present we sometimes are
- /// not smart enough to recognize all exhaustive inputs.
+ /// If we find that *NONE* of the candidates apply, we branch to the
+ /// `otherwise_block`. In principle, this means that the input list was not
+ /// exhaustive, though at present we sometimes are not smart enough to
+ /// recognize all exhaustive inputs.
///
/// It might be surprising that the input can be inexhaustive.
/// Indeed, initially, it is not, because all matches are
@@ -795,13 +801,17 @@
fn match_candidates<'pat>(
&mut self,
span: Span,
+ start_block: &mut Option<BasicBlock>,
+ otherwise_block: Option<BasicBlock>,
candidates: &mut [&mut Candidate<'pat, 'tcx>],
- mut block: BasicBlock,
fake_borrows: &mut Option<FxHashSet<Place<'tcx>>>,
- ) -> Vec<BasicBlock> {
+ ) {
debug!(
- "matched_candidate(span={:?}, block={:?}, candidates={:?})",
- span, block, candidates
+ "matched_candidate(span={:?}, candidates={:?}, start_block={:?}, otherwise_block={:?})",
+ span,
+ candidates,
+ start_block,
+ otherwise_block,
);
// Start by simplifying candidates. Once this process is complete, all
@@ -824,52 +834,57 @@
);
let (matched_candidates, unmatched_candidates) = candidates.split_at_mut(fully_matched);
+ let block: BasicBlock;
+
if !matched_candidates.is_empty() {
- block = if let Some(last_otherwise_block) = self.select_matched_candidates(
+ let otherwise_block = self.select_matched_candidates(
matched_candidates,
- block,
+ start_block,
fake_borrows,
- ) {
- last_otherwise_block
+ );
+
+ if let Some(last_otherwise_block) = otherwise_block {
+ block = last_otherwise_block
} else {
// Any remaining candidates are unreachable.
if unmatched_candidates.is_empty() {
- return Vec::new();
- } else {
- self.cfg.start_new_block()
+ return;
}
+ block = self.cfg.start_new_block();
};
+ } else {
+ block = *start_block.get_or_insert_with(|| self.cfg.start_new_block());
}
// If there are no candidates that still need testing, we're
// done. Since all matches are exhaustive, execution should
// never reach this point.
if unmatched_candidates.is_empty() {
- return vec![block];
+ let source_info = self.source_info(span);
+ if let Some(otherwise) = otherwise_block {
+ self.cfg.terminate(
+ block,
+ source_info,
+ TerminatorKind::Goto { target: otherwise },
+ );
+ } else {
+ self.cfg.terminate(
+ block,
+ source_info,
+ TerminatorKind::Unreachable,
+ )
+ }
+ return;
}
- // Test candidates where possible.
- let (otherwise, untested_candidates) = self.test_candidates(
+ // Test for the remaining candidates.
+ self.test_candidates(
span,
unmatched_candidates,
block,
+ otherwise_block,
fake_borrows,
);
-
- // If the target candidates were exhaustive, then we are done.
- // But for borrowck continue build decision tree.
- if untested_candidates.is_empty() {
- return otherwise;
- }
-
- // Otherwise, let's process those remaining candidates.
- let join_block = self.join_otherwise_blocks(span, otherwise);
- self.match_candidates(
- span,
- untested_candidates,
- join_block,
- fake_borrows,
- )
}
/// Link up matched candidates. For example, if we have something like
@@ -893,7 +908,7 @@
fn select_matched_candidates(
&mut self,
matched_candidates: &mut [&mut Candidate<'_, 'tcx>],
- block: BasicBlock,
+ start_block: &mut Option<BasicBlock>,
fake_borrows: &mut Option<FxHashSet<Place<'tcx>>>,
) -> Option<BasicBlock> {
debug_assert!(
@@ -941,31 +956,29 @@
= matched_candidates.split_at_mut(fully_matched_with_guard + 1);
let first_candidate = &reachable_candidates[0];
+ let first_prebinding_block = first_candidate.pre_binding_block;
- let candidate_source_info = self.source_info(first_candidate.span);
-
- self.cfg.terminate(
- block,
- candidate_source_info,
- TerminatorKind::Goto {
- target: first_candidate.pre_binding_block,
- },
- );
+ if let Some(start_block) = *start_block {
+ let source_info = self.source_info(first_candidate.span);
+ self.cfg.terminate(
+ start_block,
+ source_info,
+ TerminatorKind::Goto { target: first_prebinding_block },
+ );
+ } else {
+ *start_block = Some(first_prebinding_block);
+ }
for window in reachable_candidates.windows(2) {
if let [first_candidate, second_candidate] = window {
let source_info = self.source_info(first_candidate.span);
if let Some(otherwise_block) = first_candidate.otherwise_block {
- self.cfg.terminate(
+ self.false_edges(
otherwise_block,
+ second_candidate.pre_binding_block,
+ first_candidate.next_candidate_pre_binding_block,
source_info,
- TerminatorKind::FalseEdges {
- real_target: second_candidate.pre_binding_block,
- imaginary_targets: vec![
- first_candidate.next_candidate_pre_binding_block
- ],
- }
- )
+ );
} else {
bug!("candidate other than the last has no guard");
}
@@ -979,13 +992,11 @@
if let Some(otherwise) = candidate.otherwise_block {
let source_info = self.source_info(candidate.span);
let unreachable = self.cfg.start_new_block();
- self.cfg.terminate(
+ self.false_edges(
otherwise,
+ unreachable,
+ candidate.next_candidate_pre_binding_block,
source_info,
- TerminatorKind::FalseEdges {
- real_target: unreachable,
- imaginary_targets: vec![candidate.next_candidate_pre_binding_block],
- }
);
self.cfg.terminate(unreachable, source_info, TerminatorKind::Unreachable);
}
@@ -996,13 +1007,11 @@
if let Some(otherwise) = last_candidate.otherwise_block {
let source_info = self.source_info(last_candidate.span);
let block = self.cfg.start_new_block();
- self.cfg.terminate(
+ self.false_edges(
otherwise,
+ block,
+ last_candidate.next_candidate_pre_binding_block,
source_info,
- TerminatorKind::FalseEdges {
- real_target: block,
- imaginary_targets: vec![last_candidate.next_candidate_pre_binding_block]
- }
);
Some(block)
} else {
@@ -1010,25 +1019,6 @@
}
}
- fn join_otherwise_blocks(&mut self, span: Span, mut otherwise: Vec<BasicBlock>) -> BasicBlock {
- let source_info = self.source_info(span);
- otherwise.sort();
- otherwise.dedup(); // variant switches can introduce duplicate target blocks
- if otherwise.len() == 1 {
- otherwise[0]
- } else {
- let join_block = self.cfg.start_new_block();
- for block in otherwise {
- self.cfg.terminate(
- block,
- source_info,
- TerminatorKind::Goto { target: join_block },
- );
- }
- join_block
- }
- }
-
/// This is the most subtle part of the matching algorithm. At
/// this point, the input candidates have been fully simplified,
/// and so we know that all remaining match-pairs require some
@@ -1146,8 +1136,9 @@
span: Span,
mut candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>],
block: BasicBlock,
+ mut otherwise_block: Option<BasicBlock>,
fake_borrows: &mut Option<FxHashSet<Place<'tcx>>>,
- ) -> (Vec<BasicBlock>, &'b mut [&'c mut Candidate<'pat, 'tcx>]) {
+ ) {
// extract the match-pair from the highest priority candidate
let match_pair = &candidates.first().unwrap().match_pairs[0];
let mut test = self.test(match_pair);
@@ -1201,9 +1192,8 @@
"match_candidates: test={:?} match_pair={:?}",
test, match_pair
);
- let target_blocks = self.perform_test(block, &match_place, &test);
let mut target_candidates: Vec<Vec<&mut Candidate<'pat, 'tcx>>> = vec![];
- target_candidates.resize_with(target_blocks.len(), Default::default);
+ target_candidates.resize_with(test.targets(), Default::default);
let total_candidate_count = candidates.len();
@@ -1225,24 +1215,59 @@
debug!("tested_candidates: {}", total_candidate_count - candidates.len());
debug!("untested_candidates: {}", candidates.len());
- // For each outcome of test, process the candidates that still
- // apply. Collect a list of blocks where control flow will
- // branch if one of the `target_candidate` sets is not
- // exhaustive.
- let otherwise: Vec<_> = target_blocks
- .into_iter()
- .zip(target_candidates)
- .flat_map(|(target_block, mut target_candidates)| {
- self.match_candidates(
+ // HACK(matthewjasper) This is a closure so that we can let the test
+ // create its blocks before the rest of the match. This currently
+ // improves the speed of llvm when optimizing long string literal
+ // matches
+ let make_target_blocks = move |this: &mut Self| -> Vec<BasicBlock> {
+ // For each outcome of test, process the candidates that still
+ // apply. Collect a list of blocks where control flow will
+ // branch if one of the `target_candidate` sets is not
+ // exhaustive.
+ if !candidates.is_empty() {
+ let remainder_start = &mut None;
+ this.match_candidates(
span,
- &mut *target_candidates,
- target_block,
+ remainder_start,
+ otherwise_block,
+ candidates,
fake_borrows,
- )
- })
- .collect();
+ );
+ otherwise_block = Some(remainder_start.unwrap());
+ };
- (otherwise, candidates)
+ target_candidates.into_iter().map(|mut candidates| {
+ if candidates.len() != 0 {
+ let candidate_start = &mut None;
+ this.match_candidates(
+ span,
+ candidate_start,
+ otherwise_block,
+ &mut *candidates,
+ fake_borrows,
+ );
+ candidate_start.unwrap()
+ } else {
+ *otherwise_block.get_or_insert_with(|| {
+ let unreachable = this.cfg.start_new_block();
+ let source_info = this.source_info(span);
+ this.cfg.terminate(
+ unreachable,
+ source_info,
+ TerminatorKind::Unreachable,
+ );
+ unreachable
+ })
+ }
+ }).collect()
+ };
+
+ self.perform_test(
+ block,
+ &match_place,
+ &test,
+ make_target_blocks,
+ );
}
// Determine the fake borrows that are needed to ensure that the place
@@ -1296,7 +1321,7 @@
///////////////////////////////////////////////////////////////////////////
// Pattern binding - used for `let` and function parameters as well.
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Initializes each of the bindings from the candidate by
/// moving/copying/ref'ing the source as appropriate. Tests the guard, if
/// any, and then branches to the arm. Returns the block for the case where
@@ -1313,27 +1338,37 @@
&mut self,
candidate: Candidate<'pat, 'tcx>,
guard: Option<Guard<'tcx>>,
- arm_block: BasicBlock,
fake_borrows: &Vec<(&Place<'tcx>, Local)>,
scrutinee_span: Span,
region_scope: (region::Scope, SourceInfo),
- ) {
+ ) -> BasicBlock {
debug!("bind_and_guard_matched_candidate(candidate={:?})", candidate);
debug_assert!(candidate.match_pairs.is_empty());
let candidate_source_info = self.source_info(candidate.span);
- let mut block = self.cfg.start_new_block();
- self.cfg.terminate(
- candidate.pre_binding_block,
- candidate_source_info,
- TerminatorKind::FalseEdges {
- real_target: block,
- imaginary_targets: vec![candidate.next_candidate_pre_binding_block],
- },
- );
- self.ascribe_types(block, &candidate.ascriptions);
+ let mut block = candidate.pre_binding_block;
+
+ // If we are adding our own statements, then we need a fresh block.
+ let create_fresh_block = candidate.next_candidate_pre_binding_block.is_some()
+ || !candidate.bindings.is_empty()
+ || !candidate.ascriptions.is_empty()
+ || guard.is_some();
+
+ if create_fresh_block {
+ let fresh_block = self.cfg.start_new_block();
+ self.false_edges(
+ block,
+ fresh_block,
+ candidate.next_candidate_pre_binding_block,
+ candidate_source_info,
+ );
+ block = fresh_block;
+ self.ascribe_types(block, &candidate.ascriptions);
+ } else {
+ return block;
+ }
// rust-lang/rust#27282: The `autoref` business deserves some
// explanation here.
@@ -1478,7 +1513,7 @@
// because that would be before we've checked the result
// from the guard.
//
- // But binding them on `arm_block` is *too late*, because
+ // But binding them on the arm is *too late*, because
// then all of the candidates for a single arm would be
// bound in the same place, that would cause a case like:
//
@@ -1554,22 +1589,14 @@
by_value_bindings,
);
- self.cfg.terminate(
- post_guard_block,
- source_info,
- TerminatorKind::Goto { target: arm_block },
- );
+ post_guard_block
} else {
assert!(candidate.otherwise_block.is_none());
// (Here, it is not too early to bind the matched
// candidate on `block`, because there is no guard result
// that we have to inspect before we bind them.)
self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
- self.cfg.terminate(
- block,
- candidate_source_info,
- TerminatorKind::Goto { target: arm_block },
- );
+ block
}
}
@@ -1737,7 +1764,7 @@
pat_span,
}))),
};
- let for_arm_body = self.local_decls.push(local.clone());
+ let for_arm_body = self.local_decls.push(local);
let locals = if has_guard.0 {
let ref_for_guard = self.local_decls.push(LocalDecl::<'tcx> {
// This variable isn't mutated but has a name, so has to be
diff --git a/src/librustc_mir/build/matches/simplify.rs b/src/librustc_mir/build/matches/simplify.rs
index 2e4f530..b1b5233 100644
--- a/src/librustc_mir/build/matches/simplify.rs
+++ b/src/librustc_mir/build/matches/simplify.rs
@@ -23,7 +23,7 @@
use std::mem;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
pub fn simplify_candidate<'pat>(&mut self,
candidate: &mut Candidate<'pat, 'tcx>) {
// repeatedly simplify match pairs until fixed point is reached
diff --git a/src/librustc_mir/build/matches/test.rs b/src/librustc_mir/build/matches/test.rs
index c767fff..95e2e52 100644
--- a/src/librustc_mir/build/matches/test.rs
+++ b/src/librustc_mir/build/matches/test.rs
@@ -15,11 +15,12 @@
use rustc::ty::util::IntTypeExt;
use rustc::ty::layout::VariantIdx;
use rustc::mir::*;
-use rustc::hir::{RangeEnd, Mutability};
-use syntax_pos::Span;
+use rustc::hir::RangeEnd;
+use syntax_pos::symbol::sym;
+
use std::cmp::Ordering;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Identifies what test is needed to decide if `match_pair` is applicable.
///
/// It is a bug to call this with a simplifiable pattern.
@@ -162,43 +163,51 @@
}
}
- /// Generates the code to perform a test.
- pub fn perform_test(&mut self,
- block: BasicBlock,
- place: &Place<'tcx>,
- test: &Test<'tcx>)
- -> Vec<BasicBlock> {
+ pub fn perform_test(
+ &mut self,
+ block: BasicBlock,
+ place: &Place<'tcx>,
+ test: &Test<'tcx>,
+ make_target_blocks: impl FnOnce(&mut Self) -> Vec<BasicBlock>,
+ ) {
debug!("perform_test({:?}, {:?}: {:?}, {:?})",
block,
place,
place.ty(&self.local_decls, self.hir.tcx()),
test);
+
let source_info = self.source_info(test.span);
match test.kind {
TestKind::Switch { adt_def, ref variants } => {
+ let target_blocks = make_target_blocks(self);
// Variants is a BitVec of indexes into adt_def.variants.
let num_enum_variants = adt_def.variants.len();
let used_variants = variants.count();
- let mut otherwise_block = None;
- let mut target_blocks = Vec::with_capacity(num_enum_variants);
+ debug_assert_eq!(target_blocks.len(), num_enum_variants + 1);
+ let otherwise_block = *target_blocks.last().unwrap();
let mut targets = Vec::with_capacity(used_variants + 1);
let mut values = Vec::with_capacity(used_variants);
let tcx = self.hir.tcx();
for (idx, discr) in adt_def.discriminants(tcx) {
- target_blocks.push(if variants.contains(idx) {
+ if variants.contains(idx) {
+ debug_assert_ne!(
+ target_blocks[idx.index()],
+ otherwise_block,
+ "no canididates for tested discriminant: {:?}",
+ discr,
+ );
values.push(discr.val);
- let block = self.cfg.start_new_block();
- targets.push(block);
- block
+ targets.push(target_blocks[idx.index()]);
} else {
- *otherwise_block
- .get_or_insert_with(|| self.cfg.start_new_block())
- });
+ debug_assert_eq!(
+ target_blocks[idx.index()],
+ otherwise_block,
+ "found canididates for untested discriminant: {:?}",
+ discr,
+ );
+ }
}
- targets.push(
- otherwise_block
- .unwrap_or_else(|| self.unreachable_block()),
- );
+ targets.push(otherwise_block);
debug!("num_enum_variants: {}, tested variants: {:?}, variants: {:?}",
num_enum_variants, values, variants);
let discr_ty = adt_def.repr.discr_type().to_ty(tcx);
@@ -212,182 +221,97 @@
values: From::from(values),
targets,
});
- target_blocks
}
TestKind::SwitchInt { switch_ty, ref options, indices: _ } => {
- let (ret, terminator) = if switch_ty.sty == ty::Bool {
+ let target_blocks = make_target_blocks(self);
+ let terminator = if switch_ty.sty == ty::Bool {
assert!(options.len() > 0 && options.len() <= 2);
- let (true_bb, false_bb) = (self.cfg.start_new_block(),
- self.cfg.start_new_block());
- let ret = match options[0] {
- 1 => vec![true_bb, false_bb],
- 0 => vec![false_bb, true_bb],
- v => span_bug!(test.span, "expected boolean value but got {:?}", v)
- };
- (ret, TerminatorKind::if_(self.hir.tcx(), Operand::Copy(place.clone()),
- true_bb, false_bb))
+ if let [first_bb, second_bb] = *target_blocks {
+ let (true_bb, false_bb) = match options[0] {
+ 1 => (first_bb, second_bb),
+ 0 => (second_bb, first_bb),
+ v => span_bug!(test.span, "expected boolean value but got {:?}", v)
+ };
+ TerminatorKind::if_(
+ self.hir.tcx(),
+ Operand::Copy(place.clone()),
+ true_bb,
+ false_bb,
+ )
+ } else {
+ bug!("`TestKind::SwitchInt` on `bool` should have two targets")
+ }
} else {
- // The switch may be inexhaustive so we
- // add a catch all block
- let otherwise = self.cfg.start_new_block();
- let targets: Vec<_> =
- options.iter()
- .map(|_| self.cfg.start_new_block())
- .chain(Some(otherwise))
- .collect();
- (targets.clone(), TerminatorKind::SwitchInt {
+ // The switch may be inexhaustive so we have a catch all block
+ debug_assert_eq!(options.len() + 1, target_blocks.len());
+ TerminatorKind::SwitchInt {
discr: Operand::Copy(place.clone()),
switch_ty,
values: options.clone().into(),
- targets,
- })
+ targets: target_blocks,
+ }
};
self.cfg.terminate(block, source_info, terminator);
- ret
}
- TestKind::Eq { value, mut ty } => {
- let val = Operand::Copy(place.clone());
- let mut expect = self.literal_operand(test.span, ty, value);
- // Use `PartialEq::eq` instead of `BinOp::Eq`
- // (the binop can only handle primitives)
- let fail = self.cfg.start_new_block();
+ TestKind::Eq { value, ty } => {
if !ty.is_scalar() {
- // If we're using `b"..."` as a pattern, we need to insert an
- // unsizing coercion, as the byte string has the type `&[u8; N]`.
- //
- // We want to do this even when the scrutinee is a reference to an
- // array, so we can call `<[u8]>::eq` rather than having to find an
- // `<[u8; N]>::eq`.
- let unsize = |ty: Ty<'tcx>| match ty.sty {
- ty::Ref(region, rty, _) => match rty.sty {
- ty::Array(inner_ty, n) => Some((region, inner_ty, n)),
- _ => None,
- },
- _ => None,
- };
- let opt_ref_ty = unsize(ty);
- let opt_ref_test_ty = unsize(value.ty);
- let mut place = place.clone();
- match (opt_ref_ty, opt_ref_test_ty) {
- // nothing to do, neither is an array
- (None, None) => {},
- (Some((region, elem_ty, _)), _) |
- (None, Some((region, elem_ty, _))) => {
- let tcx = self.hir.tcx();
- // make both a slice
- ty = tcx.mk_imm_ref(region, tcx.mk_slice(elem_ty));
- if opt_ref_ty.is_some() {
- place = self.temp(ty, test.span);
- self.cfg.push_assign(
- block, source_info, &place, Rvalue::Cast(
- CastKind::Pointer(PointerCast::Unsize), val, ty
- )
- );
- }
- if opt_ref_test_ty.is_some() {
- let array = self.literal_operand(
- test.span,
- value.ty,
- value,
- );
-
- let slice = self.temp(ty, test.span);
- self.cfg.push_assign(
- block, source_info, &slice, Rvalue::Cast(
- CastKind::Pointer(PointerCast::Unsize), array, ty
- )
- );
- expect = Operand::Move(slice);
- }
- },
- }
- let eq_def_id = self.hir.tcx().lang_items().eq_trait().unwrap();
- let (mty, method) = self.hir.trait_method(eq_def_id, "eq", ty, &[ty.into()]);
-
- let re_erased = self.hir.tcx().lifetimes.re_erased;
- // take the argument by reference
- let tam = ty::TypeAndMut {
+ // Use `PartialEq::eq` instead of `BinOp::Eq`
+ // (the binop can only handle primitives)
+ self.non_scalar_compare(
+ block,
+ make_target_blocks,
+ source_info,
+ value,
+ place,
ty,
- mutbl: Mutability::MutImmutable,
- };
- let ref_ty = self.hir.tcx().mk_ref(re_erased, tam);
-
- // let lhs_ref_place = &lhs;
- let ref_rvalue = Rvalue::Ref(re_erased, BorrowKind::Shared, place);
- let lhs_ref_place = self.temp(ref_ty, test.span);
- self.cfg.push_assign(block, source_info, &lhs_ref_place, ref_rvalue);
- let val = Operand::Move(lhs_ref_place);
-
- // let rhs_place = rhs;
- let rhs_place = self.temp(ty, test.span);
- self.cfg.push_assign(block, source_info, &rhs_place, Rvalue::Use(expect));
-
- // let rhs_ref_place = &rhs_place;
- let ref_rvalue = Rvalue::Ref(re_erased, BorrowKind::Shared, rhs_place);
- let rhs_ref_place = self.temp(ref_ty, test.span);
- self.cfg.push_assign(block, source_info, &rhs_ref_place, ref_rvalue);
- let expect = Operand::Move(rhs_ref_place);
-
- let bool_ty = self.hir.bool_ty();
- let eq_result = self.temp(bool_ty, test.span);
- let eq_block = self.cfg.start_new_block();
- let cleanup = self.diverge_cleanup();
- self.cfg.terminate(block, source_info, TerminatorKind::Call {
- func: Operand::Constant(box Constant {
- span: test.span,
- ty: mty,
-
- // FIXME(#54571): This constant comes from user
- // input (a constant in a pattern). Are
- // there forms where users can add type
- // annotations here? For example, an
- // associated constant? Need to
- // experiment.
- user_ty: None,
-
- literal: method,
- }),
- args: vec![val, expect],
- destination: Some((eq_result.clone(), eq_block)),
- cleanup: Some(cleanup),
- from_hir_call: false,
- });
-
- // check the result
- let block = self.cfg.start_new_block();
- self.cfg.terminate(eq_block, source_info,
- TerminatorKind::if_(self.hir.tcx(),
- Operand::Move(eq_result),
- block, fail));
- vec![block, fail]
+ );
} else {
- let block = self.compare(block, fail, test.span, BinOp::Eq, expect, val);
- vec![block, fail]
+ if let [success, fail] = *make_target_blocks(self) {
+ let val = Operand::Copy(place.clone());
+ let expect = self.literal_operand(test.span, ty, value);
+ self.compare(block, success, fail, source_info, BinOp::Eq, expect, val);
+ } else {
+ bug!("`TestKind::Eq` should have two target blocks");
+ }
}
}
TestKind::Range(PatternRange { ref lo, ref hi, ty, ref end }) => {
+ let lower_bound_success = self.cfg.start_new_block();
+ let target_blocks = make_target_blocks(self);
+
// Test `val` by computing `lo <= val && val <= hi`, using primitive comparisons.
let lo = self.literal_operand(test.span, ty, lo);
let hi = self.literal_operand(test.span, ty, hi);
let val = Operand::Copy(place.clone());
- let fail = self.cfg.start_new_block();
- let block = self.compare(block, fail, test.span, BinOp::Le, lo, val.clone());
- let block = match *end {
- RangeEnd::Included => self.compare(block, fail, test.span, BinOp::Le, val, hi),
- RangeEnd::Excluded => self.compare(block, fail, test.span, BinOp::Lt, val, hi),
- };
-
- vec![block, fail]
+ if let [success, fail] = *target_blocks {
+ self.compare(
+ block,
+ lower_bound_success,
+ fail,
+ source_info,
+ BinOp::Le,
+ lo,
+ val.clone(),
+ );
+ let op = match *end {
+ RangeEnd::Included => BinOp::Le,
+ RangeEnd::Excluded => BinOp::Lt,
+ };
+ self.compare(lower_bound_success, success, fail, source_info, op, val, hi);
+ } else {
+ bug!("`TestKind::Range` should have two target blocks");
+ }
}
TestKind::Len { len, op } => {
- let (usize_ty, bool_ty) = (self.hir.usize_ty(), self.hir.bool_ty());
- let (actual, result) = (self.temp(usize_ty, test.span),
- self.temp(bool_ty, test.span));
+ let target_blocks = make_target_blocks(self);
+
+ let usize_ty = self.hir.usize_ty();
+ let actual = self.temp(usize_ty, test.span);
// actual = len(place)
self.cfg.push_assign(block, source_info,
@@ -396,44 +320,165 @@
// expected = <N>
let expected = self.push_usize(block, source_info, len);
- // result = actual == expected OR result = actual < expected
- self.cfg.push_assign(block, source_info, &result,
- Rvalue::BinaryOp(op,
- Operand::Move(actual),
- Operand::Move(expected)));
-
- // branch based on result
- let (false_bb, true_bb) = (self.cfg.start_new_block(),
- self.cfg.start_new_block());
- self.cfg.terminate(block, source_info,
- TerminatorKind::if_(self.hir.tcx(), Operand::Move(result),
- true_bb, false_bb));
- vec![true_bb, false_bb]
+ if let [true_bb, false_bb] = *target_blocks {
+ // result = actual == expected OR result = actual < expected
+ // branch based on result
+ self.compare(
+ block,
+ true_bb,
+ false_bb,
+ source_info,
+ op,
+ Operand::Move(actual),
+ Operand::Move(expected),
+ );
+ } else {
+ bug!("`TestKind::Len` should have two target blocks");
+ }
}
}
}
- fn compare(&mut self,
- block: BasicBlock,
- fail_block: BasicBlock,
- span: Span,
- op: BinOp,
- left: Operand<'tcx>,
- right: Operand<'tcx>) -> BasicBlock {
+ /// Compare using the provided built-in comparison operator
+ fn compare(
+ &mut self,
+ block: BasicBlock,
+ success_block: BasicBlock,
+ fail_block: BasicBlock,
+ source_info: SourceInfo,
+ op: BinOp,
+ left: Operand<'tcx>,
+ right: Operand<'tcx>,
+ ) {
let bool_ty = self.hir.bool_ty();
- let result = self.temp(bool_ty, span);
+ let result = self.temp(bool_ty, source_info.span);
// result = op(left, right)
- let source_info = self.source_info(span);
- self.cfg.push_assign(block, source_info, &result,
- Rvalue::BinaryOp(op, left, right));
+ self.cfg.push_assign(
+ block,
+ source_info,
+ &result,
+ Rvalue::BinaryOp(op, left, right),
+ );
// branch based on result
- let target_block = self.cfg.start_new_block();
- self.cfg.terminate(block, source_info,
- TerminatorKind::if_(self.hir.tcx(), Operand::Move(result),
- target_block, fail_block));
- target_block
+ self.cfg.terminate(
+ block,
+ source_info,
+ TerminatorKind::if_(
+ self.hir.tcx(),
+ Operand::Move(result),
+ success_block,
+ fail_block,
+ ),
+ );
+ }
+
+ /// Compare two `&T` values using `<T as std::compare::PartialEq>::eq`
+ fn non_scalar_compare(
+ &mut self,
+ block: BasicBlock,
+ make_target_blocks: impl FnOnce(&mut Self) -> Vec<BasicBlock>,
+ source_info: SourceInfo,
+ value: &'tcx ty::Const<'tcx>,
+ place: &Place<'tcx>,
+ mut ty: Ty<'tcx>,
+ ) {
+ use rustc::middle::lang_items::EqTraitLangItem;
+
+ let mut expect = self.literal_operand(source_info.span, value.ty, value);
+ let mut val = Operand::Copy(place.clone());
+
+ // If we're using `b"..."` as a pattern, we need to insert an
+ // unsizing coercion, as the byte string has the type `&[u8; N]`.
+ //
+ // We want to do this even when the scrutinee is a reference to an
+ // array, so we can call `<[u8]>::eq` rather than having to find an
+ // `<[u8; N]>::eq`.
+ let unsize = |ty: Ty<'tcx>| match ty.sty {
+ ty::Ref(region, rty, _) => match rty.sty {
+ ty::Array(inner_ty, n) => Some((region, inner_ty, n)),
+ _ => None,
+ },
+ _ => None,
+ };
+ let opt_ref_ty = unsize(ty);
+ let opt_ref_test_ty = unsize(value.ty);
+ match (opt_ref_ty, opt_ref_test_ty) {
+ // nothing to do, neither is an array
+ (None, None) => {},
+ (Some((region, elem_ty, _)), _) |
+ (None, Some((region, elem_ty, _))) => {
+ let tcx = self.hir.tcx();
+ // make both a slice
+ ty = tcx.mk_imm_ref(region, tcx.mk_slice(elem_ty));
+ if opt_ref_ty.is_some() {
+ let temp = self.temp(ty, source_info.span);
+ self.cfg.push_assign(
+ block, source_info, &temp, Rvalue::Cast(
+ CastKind::Pointer(PointerCast::Unsize), val, ty
+ )
+ );
+ val = Operand::Move(temp);
+ }
+ if opt_ref_test_ty.is_some() {
+ let slice = self.temp(ty, source_info.span);
+ self.cfg.push_assign(
+ block, source_info, &slice, Rvalue::Cast(
+ CastKind::Pointer(PointerCast::Unsize), expect, ty
+ )
+ );
+ expect = Operand::Move(slice);
+ }
+ },
+ }
+
+ let deref_ty = match ty.sty {
+ ty::Ref(_, deref_ty, _) => deref_ty,
+ _ => bug!("non_scalar_compare called on non-reference type: {}", ty),
+ };
+
+ let eq_def_id = self.hir.tcx().require_lang_item(EqTraitLangItem);
+ let (mty, method) = self.hir.trait_method(eq_def_id, sym::eq, deref_ty, &[deref_ty.into()]);
+
+ let bool_ty = self.hir.bool_ty();
+ let eq_result = self.temp(bool_ty, source_info.span);
+ let eq_block = self.cfg.start_new_block();
+ let cleanup = self.diverge_cleanup();
+ self.cfg.terminate(block, source_info, TerminatorKind::Call {
+ func: Operand::Constant(box Constant {
+ span: source_info.span,
+ ty: mty,
+
+ // FIXME(#54571): This constant comes from user input (a
+ // constant in a pattern). Are there forms where users can add
+ // type annotations here? For example, an associated constant?
+ // Need to experiment.
+ user_ty: None,
+
+ literal: method,
+ }),
+ args: vec![val, expect],
+ destination: Some((eq_result.clone(), eq_block)),
+ cleanup: Some(cleanup),
+ from_hir_call: false,
+ });
+
+ if let [success_block, fail_block] = *make_target_blocks(self) {
+ // check the result
+ self.cfg.terminate(
+ eq_block,
+ source_info,
+ TerminatorKind::if_(
+ self.hir.tcx(),
+ Operand::Move(eq_result),
+ success_block,
+ fail_block,
+ ),
+ );
+ } else {
+ bug!("`TestKind::Eq` should have two target blocks")
+ }
}
/// Given that we are performing `test` against `test_place`, this job
@@ -755,6 +800,32 @@
}
}
+impl Test<'_> {
+ pub(super) fn targets(&self) -> usize {
+ match self.kind {
+ TestKind::Eq { .. } | TestKind::Range(_) | TestKind::Len { .. } => {
+ 2
+ }
+ TestKind::Switch { adt_def, .. } => {
+ // While the switch that we generate doesn't test for all
+ // variants, we have a target for each variant and the
+ // otherwise case, and we make sure that all of the cases not
+ // specified have the same block.
+ adt_def.variants.len() + 1
+ }
+ TestKind::SwitchInt { switch_ty, ref options, .. } => {
+ if switch_ty.is_bool() {
+ // `bool` is special cased in `perform_test` to always
+ // branch to two blocks.
+ 2
+ } else {
+ options.len() + 1
+ }
+ }
+ }
+ }
+}
+
fn is_switch_ty<'tcx>(ty: Ty<'tcx>) -> bool {
ty.is_integral() || ty.is_char() || ty.is_bool()
}
diff --git a/src/librustc_mir/build/matches/util.rs b/src/librustc_mir/build/matches/util.rs
index 3b90ff7..011b3a8 100644
--- a/src/librustc_mir/build/matches/util.rs
+++ b/src/librustc_mir/build/matches/util.rs
@@ -5,7 +5,7 @@
use std::u32;
use std::convert::TryInto;
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
pub fn field_match_pairs<'pat>(&mut self,
place: Place<'tcx>,
subpatterns: &'pat [FieldPattern<'tcx>])
@@ -65,6 +65,39 @@
})
);
}
+
+ /// Creates a false edge to `imaginary_target` and a real edge to
+ /// real_target. If `imaginary_target` is none, or is the same as the real
+ /// target, a Goto is generated instead to simplify the generated MIR.
+ pub fn false_edges(
+ &mut self,
+ from_block: BasicBlock,
+ real_target: BasicBlock,
+ imaginary_target: Option<BasicBlock>,
+ source_info: SourceInfo,
+ ) {
+ match imaginary_target {
+ Some(target) if target != real_target => {
+ self.cfg.terminate(
+ from_block,
+ source_info,
+ TerminatorKind::FalseEdges {
+ real_target,
+ imaginary_target: target,
+ },
+ );
+ }
+ _ => {
+ self.cfg.terminate(
+ from_block,
+ source_info,
+ TerminatorKind::Goto {
+ target: real_target
+ }
+ );
+ }
+ }
+ }
}
impl<'pat, 'tcx> MatchPair<'pat, 'tcx> {
diff --git a/src/librustc_mir/build/misc.rs b/src/librustc_mir/build/misc.rs
index daf59d7..ad891b1 100644
--- a/src/librustc_mir/build/misc.rs
+++ b/src/librustc_mir/build/misc.rs
@@ -8,7 +8,7 @@
use rustc::mir::*;
use syntax_pos::{Span, DUMMY_SP};
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Adds a new temporary value of type `ty` storing the result of
/// evaluating `expr`.
///
diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs
index 65ece3f..a0e45ca 100644
--- a/src/librustc_mir/build/mod.rs
+++ b/src/librustc_mir/build/mod.rs
@@ -9,13 +9,10 @@
use rustc::hir::def_id::DefId;
use rustc::middle::region;
use rustc::mir::*;
-use rustc::mir::visit::{MutVisitor, TyContext};
use rustc::ty::{self, Ty, TyCtxt};
-use rustc::ty::subst::SubstsRef;
use rustc::util::nodemap::HirIdMap;
use rustc_target::spec::PanicStrategy;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
-use std::mem;
use std::u32;
use rustc_target::spec::abi::Abi;
use syntax::attr::{self, UnwindAttr};
@@ -25,7 +22,7 @@
use super::lints;
/// Construct the MIR for a given `DefId`.
-pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Body<'tcx> {
+pub fn mir_build<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Body<'tcx> {
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
// Figure out what primary body this item has.
@@ -58,15 +55,15 @@
(*body_id, ty.span)
}
Node::AnonConst(hir::AnonConst { body, hir_id, .. }) => {
- (*body, tcx.hir().span_by_hir_id(*hir_id))
+ (*body, tcx.hir().span(*hir_id))
}
- _ => span_bug!(tcx.hir().span_by_hir_id(id), "can't build MIR for {:?}", def_id),
+ _ => span_bug!(tcx.hir().span(id), "can't build MIR for {:?}", def_id),
};
tcx.infer_ctxt().enter(|infcx| {
let cx = Cx::new(&infcx, id);
- let mut body = if cx.tables().tainted_by_errors {
+ let body = if cx.tables().tainted_by_errors {
build::construct_error(cx, body_id)
} else if cx.body_owner_kind.is_fn_or_closure() {
// fetch the fully liberated fn signature (that is, all bound
@@ -104,9 +101,9 @@
let owner_id = tcx.hir().body_owner(body_id);
let opt_ty_info;
let self_arg;
- if let Some(ref fn_decl) = tcx.hir().fn_decl(owner_id) {
+ if let Some(ref fn_decl) = tcx.hir().fn_decl_by_hir_id(owner_id) {
let ty_hir_id = fn_decl.inputs[index].hir_id;
- let ty_span = tcx.hir().span_by_hir_id(ty_hir_id);
+ let ty_span = tcx.hir().span(ty_hir_id);
opt_ty_info = Some(ty_span);
self_arg = if index == 0 && fn_decl.implicit_self.has_implicit_self() {
match fn_decl.implicit_self {
@@ -134,7 +131,7 @@
ty::Generator(gen_def_id, gen_substs, ..) =>
gen_substs.sig(gen_def_id, tcx),
_ =>
- span_bug!(tcx.hir().span_by_hir_id(id),
+ span_bug!(tcx.hir().span(id),
"generator w/o generator type: {:?}", ty),
};
(Some(gen_sig.yield_ty), gen_sig.return_ty)
@@ -162,16 +159,6 @@
build::construct_const(cx, body_id, return_ty, return_ty_span)
};
- // Convert the `mir::Body` to global types.
- let mut globalizer = GlobalizeMir {
- tcx,
- span: body.span
- };
- globalizer.visit_body(&mut body);
- let body = unsafe {
- mem::transmute::<Body<'_>, Body<'tcx>>(body)
- };
-
mir_util::dump_mir(tcx, None, "mir_map", &0,
MirSource::item(def_id), &body, |_, _| Ok(()) );
@@ -181,63 +168,14 @@
})
}
-/// A pass to lift all the types and substitutions in a MIR
-/// to the global tcx. Sadly, we don't have a "folder" that
-/// can change `'tcx` so we have to transmute afterwards.
-struct GlobalizeMir<'a, 'gcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'gcx>,
- span: Span
-}
-
-impl<'a, 'gcx: 'tcx, 'tcx> MutVisitor<'tcx> for GlobalizeMir<'a, 'gcx> {
- fn visit_ty(&mut self, ty: &mut Ty<'tcx>, _: TyContext) {
- if let Some(lifted) = self.tcx.lift(ty) {
- *ty = lifted;
- } else {
- span_bug!(self.span,
- "found type `{:?}` with inference types/regions in MIR",
- ty);
- }
- }
-
- fn visit_region(&mut self, region: &mut ty::Region<'tcx>, _: Location) {
- if let Some(lifted) = self.tcx.lift(region) {
- *region = lifted;
- } else {
- span_bug!(self.span,
- "found region `{:?}` with inference types/regions in MIR",
- region);
- }
- }
-
- fn visit_const(&mut self, constant: &mut &'tcx ty::Const<'tcx>, _: Location) {
- if let Some(lifted) = self.tcx.lift(constant) {
- *constant = lifted;
- } else {
- span_bug!(self.span,
- "found constant `{:?}` with inference types/regions in MIR",
- constant);
- }
- }
-
- fn visit_substs(&mut self, substs: &mut SubstsRef<'tcx>, _: Location) {
- if let Some(lifted) = self.tcx.lift(substs) {
- *substs = lifted;
- } else {
- span_bug!(self.span,
- "found substs `{:?}` with inference types/regions in MIR",
- substs);
- }
- }
-}
-
///////////////////////////////////////////////////////////////////////////
// BuildMir -- walks a crate, looking for fn items and methods to build MIR from
-fn liberated_closure_env_ty<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- closure_expr_id: hir::HirId,
- body_id: hir::BodyId)
- -> Ty<'tcx> {
+fn liberated_closure_env_ty<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ closure_expr_id: hir::HirId,
+ body_id: hir::BodyId,
+) -> Ty<'tcx> {
let closure_ty = tcx.body_tables(body_id).node_type(closure_expr_id);
let (closure_def_id, closure_substs) = match closure_ty.sty {
@@ -303,8 +241,8 @@
#[derive(Debug)]
struct BlockContext(Vec<BlockFrame>);
-struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- hir: Cx<'a, 'gcx, 'tcx>,
+struct Builder<'a, 'tcx: 'a> {
+ hir: Cx<'a, 'tcx>,
cfg: CFG<'tcx>,
fn_span: Span,
@@ -369,7 +307,7 @@
cached_unreachable_block: Option<BasicBlock>,
}
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
fn is_bound_var_in_guard(&self, id: hir::HirId) -> bool {
self.guard_context.iter().any(|frame| frame.locals.iter().any(|local| local.id == id))
}
@@ -551,10 +489,7 @@
};
}
-fn should_abort_on_panic<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- fn_def_id: DefId,
- abi: Abi)
- -> bool {
+fn should_abort_on_panic<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, abi: Abi) -> bool {
// Not callable from C, so we can safely unwind through these
if abi == Abi::Rust || abi == Abi::RustCall { return false; }
@@ -580,25 +515,27 @@
///////////////////////////////////////////////////////////////////////////
/// the main entry point for building MIR for a function
-struct ArgInfo<'gcx>(Ty<'gcx>, Option<Span>, Option<&'gcx hir::Pat>, Option<ImplicitSelfKind>);
+struct ArgInfo<'tcx>(Ty<'tcx>, Option<Span>, Option<&'tcx hir::Pat>, Option<ImplicitSelfKind>);
-fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
- fn_id: hir::HirId,
- arguments: A,
- safety: Safety,
- abi: Abi,
- return_ty: Ty<'gcx>,
- yield_ty: Option<Ty<'gcx>>,
- return_ty_span: Span,
- body: &'gcx hir::Body)
- -> Body<'tcx>
- where A: Iterator<Item=ArgInfo<'gcx>>
+fn construct_fn<'a, 'tcx, A>(
+ hir: Cx<'a, 'tcx>,
+ fn_id: hir::HirId,
+ arguments: A,
+ safety: Safety,
+ abi: Abi,
+ return_ty: Ty<'tcx>,
+ yield_ty: Option<Ty<'tcx>>,
+ return_ty_span: Span,
+ body: &'tcx hir::Body,
+) -> Body<'tcx>
+where
+ A: Iterator<Item=ArgInfo<'tcx>>
{
let arguments: Vec<_> = arguments.collect();
let tcx = hir.tcx();
let tcx_hir = tcx.hir();
- let span = tcx_hir.span_by_hir_id(fn_id);
+ let span = tcx_hir.span(fn_id);
let hir_tables = hir.tables();
let fn_def_id = tcx_hir.local_def_id_from_hir_id(fn_id);
@@ -705,8 +642,8 @@
body
}
-fn construct_const<'a, 'gcx, 'tcx>(
- hir: Cx<'a, 'gcx, 'tcx>,
+fn construct_const<'a, 'tcx>(
+ hir: Cx<'a, 'tcx>,
body_id: hir::BodyId,
const_ty: Ty<'tcx>,
const_ty_span: Span,
@@ -747,9 +684,10 @@
builder.finish(None)
}
-fn construct_error<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
- body_id: hir::BodyId)
- -> Body<'tcx> {
+fn construct_error<'a, 'tcx>(
+ hir: Cx<'a, 'tcx>,
+ body_id: hir::BodyId
+) -> Body<'tcx> {
let owner_id = hir.tcx().hir().body_owner(body_id);
let span = hir.tcx().hir().span(owner_id);
let ty = hir.tcx().types.err;
@@ -759,8 +697,8 @@
builder.finish(None)
}
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
- fn new(hir: Cx<'a, 'gcx, 'tcx>,
+impl<'a, 'tcx> Builder<'a, 'tcx> {
+ fn new(hir: Cx<'a, 'tcx>,
span: Span,
arg_count: usize,
safety: Safety,
@@ -769,7 +707,7 @@
__upvar_debuginfo_codegen_only_do_not_use: Vec<UpvarDebuginfo>,
upvar_mutbls: Vec<Mutability>,
is_generator: bool)
- -> Builder<'a, 'gcx, 'tcx> {
+ -> Builder<'a, 'tcx> {
let lint_level = LintLevel::Explicit(hir.root_lint_level);
let mut builder = Builder {
hir,
@@ -835,9 +773,9 @@
fn args_and_body(&mut self,
mut block: BasicBlock,
- arguments: &[ArgInfo<'gcx>],
+ arguments: &[ArgInfo<'tcx>],
argument_scope: region::Scope,
- ast_body: &'gcx hir::Expr)
+ ast_body: &'tcx hir::Expr)
-> BlockAnd<()>
{
// Allocate locals for the function arguments
@@ -954,17 +892,6 @@
}
}
}
-
- fn unreachable_block(&mut self) -> BasicBlock {
- match self.cached_unreachable_block {
- Some(ub) => ub,
- None => {
- let ub = self.cfg.start_new_block();
- self.cached_unreachable_block = Some(ub);
- ub
- }
- }
- }
}
///////////////////////////////////////////////////////////////////////////
diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs
index b697b98..db58a70 100644
--- a/src/librustc_mir/build/scope.rs
+++ b/src/librustc_mir/build/scope.rs
@@ -257,7 +257,7 @@
}
}
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Builder<'a, 'tcx> {
// Adding and removing scopes
// ==========================
/// Start a breakable scope, which tracks where `continue` and `break`
@@ -269,7 +269,7 @@
break_block: BasicBlock,
break_destination: Place<'tcx>,
f: F) -> R
- where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> R
+ where F: FnOnce(&mut Builder<'a, 'tcx>) -> R
{
let region_scope = self.topmost_scope();
let scope = BreakableScope {
@@ -289,7 +289,7 @@
opt_scope: Option<(region::Scope, SourceInfo)>,
f: F)
-> BlockAnd<R>
- where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> BlockAnd<R>
+ where F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>
{
debug!("in_opt_scope(opt_scope={:?})", opt_scope);
if let Some(region_scope) = opt_scope { self.push_scope(region_scope); }
@@ -309,7 +309,7 @@
lint_level: LintLevel,
f: F)
-> BlockAnd<R>
- where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> BlockAnd<R>
+ where F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>
{
debug!("in_scope(region_scope={:?})", region_scope);
let source_scope = self.source_scope;
diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs
index b938e86..7d05e7b 100644
--- a/src/librustc_mir/const_eval.rs
+++ b/src/librustc_mir/const_eval.rs
@@ -43,17 +43,17 @@
/// that inform us about the generic bounds of the constant. E.g., using an associated constant
/// of a function's generic parameter will require knowledge about the bounds on the generic
/// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument.
-pub(crate) fn mk_eval_cx<'a, 'mir, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub(crate) fn mk_eval_cx<'mir, 'tcx>(
+ tcx: TyCtxt<'tcx>,
span: Span,
param_env: ty::ParamEnv<'tcx>,
-) -> CompileTimeEvalContext<'a, 'mir, 'tcx> {
+) -> CompileTimeEvalContext<'mir, 'tcx> {
debug!("mk_eval_cx: {:?}", param_env);
InterpretCx::new(tcx.at(span), param_env, CompileTimeInterpreter::new())
}
-pub(crate) fn eval_promoted<'a, 'mir, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub(crate) fn eval_promoted<'mir, 'tcx>(
+ tcx: TyCtxt<'tcx>,
cid: GlobalId<'tcx>,
body: &'mir mir::Body<'tcx>,
param_env: ty::ParamEnv<'tcx>,
@@ -64,7 +64,7 @@
}
fn mplace_to_const<'tcx>(
- ecx: &CompileTimeEvalContext<'_, '_, 'tcx>,
+ ecx: &CompileTimeEvalContext<'_, 'tcx>,
mplace: MPlaceTy<'tcx>,
) -> &'tcx ty::Const<'tcx> {
let MemPlace { ptr, align, meta } = *mplace;
@@ -84,7 +84,7 @@
}
fn op_to_const<'tcx>(
- ecx: &CompileTimeEvalContext<'_, '_, 'tcx>,
+ ecx: &CompileTimeEvalContext<'_, 'tcx>,
op: OpTy<'tcx>,
) -> &'tcx ty::Const<'tcx> {
// We do not normalize just any data. Only non-union scalars and slices.
@@ -137,7 +137,7 @@
// Returns a pointer to where the result lives
fn eval_body_using_ecx<'mir, 'tcx>(
- ecx: &mut CompileTimeEvalContext<'_, 'mir, 'tcx>,
+ ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
cid: GlobalId<'tcx>,
body: &'mir mir::Body<'tcx>,
param_env: ty::ParamEnv<'tcx>,
@@ -216,7 +216,7 @@
}
// Extra machine state for CTFE, and the Machine instance
-pub struct CompileTimeInterpreter<'a, 'mir, 'tcx: 'a+'mir> {
+pub struct CompileTimeInterpreter<'mir, 'tcx> {
/// When this value is negative, it indicates the number of interpreter
/// steps *until* the loop detector is enabled. When it is positive, it is
/// the number of steps after the detector has been enabled modulo the loop
@@ -224,10 +224,10 @@
pub(super) steps_since_detector_enabled: isize,
/// Extra state to detect loops.
- pub(super) loop_detector: snapshot::InfiniteLoopDetector<'a, 'mir, 'tcx>,
+ pub(super) loop_detector: snapshot::InfiniteLoopDetector<'mir, 'tcx>,
}
-impl<'a, 'mir, 'tcx> CompileTimeInterpreter<'a, 'mir, 'tcx> {
+impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
fn new() -> Self {
CompileTimeInterpreter {
loop_detector: Default::default(),
@@ -297,8 +297,8 @@
}
}
-type CompileTimeEvalContext<'a, 'mir, 'tcx> =
- InterpretCx<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>;
+type CompileTimeEvalContext<'mir, 'tcx> =
+ InterpretCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
impl interpret::MayLeak for ! {
#[inline(always)]
@@ -308,9 +308,7 @@
}
}
-impl<'a, 'mir, 'tcx> interpret::Machine<'a, 'mir, 'tcx>
- for CompileTimeInterpreter<'a, 'mir, 'tcx>
-{
+impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
type MemoryKinds = !;
type PointerTag = ();
@@ -323,12 +321,12 @@
const STATIC_KIND: Option<!> = None; // no copying of statics allowed
#[inline(always)]
- fn enforce_validity(_ecx: &InterpretCx<'a, 'mir, 'tcx, Self>) -> bool {
+ fn enforce_validity(_ecx: &InterpretCx<'mir, 'tcx, Self>) -> bool {
false // for now, we don't enforce validity
}
fn find_fn(
- ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx>],
dest: Option<PlaceTy<'tcx>>,
@@ -368,7 +366,7 @@
}
fn call_intrinsic(
- ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx>],
dest: PlaceTy<'tcx>,
@@ -384,7 +382,7 @@
}
fn ptr_op(
- _ecx: &InterpretCx<'a, 'mir, 'tcx, Self>,
+ _ecx: &InterpretCx<'mir, 'tcx, Self>,
_bin_op: mir::BinOp,
_left: ImmTy<'tcx>,
_right: ImmTy<'tcx>,
@@ -396,7 +394,7 @@
fn find_foreign_static(
_def_id: DefId,
- _tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
+ _tcx: TyCtxtAt<'tcx>,
) -> InterpResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag>>> {
err!(ReadForeignStatic)
}
@@ -421,7 +419,7 @@
}
fn box_alloc(
- _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
+ _ecx: &mut InterpretCx<'mir, 'tcx, Self>,
_dest: PlaceTy<'tcx>,
) -> InterpResult<'tcx> {
Err(
@@ -429,7 +427,7 @@
)
}
- fn before_terminator(ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>) -> InterpResult<'tcx> {
+ fn before_terminator(ecx: &mut InterpretCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
{
let steps = &mut ecx.machine.steps_since_detector_enabled;
@@ -454,18 +452,13 @@
}
#[inline(always)]
- fn stack_push(
- _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
- ) -> InterpResult<'tcx> {
+ fn stack_push(_ecx: &mut InterpretCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
Ok(())
}
/// Called immediately before a stack frame gets popped.
#[inline(always)]
- fn stack_pop(
- _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
- _extra: (),
- ) -> InterpResult<'tcx> {
+ fn stack_pop(_ecx: &mut InterpretCx<'mir, 'tcx, Self>, _extra: ()) -> InterpResult<'tcx> {
Ok(())
}
}
@@ -473,8 +466,8 @@
/// Extracts a field of a (variant of a) const.
// this function uses `unwrap` copiously, because an already validated constant must have valid
// fields and can thus never fail outside of compiler bugs
-pub fn const_field<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub fn const_field<'tcx>(
+ tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
variant: Option<VariantIdx>,
field: mir::Field,
@@ -498,8 +491,8 @@
// this function uses `unwrap` copiously, because an already validated constant must have valid
// fields and can thus never fail outside of compiler bugs
-pub fn const_variant_index<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub fn const_variant_index<'tcx>(
+ tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
val: &'tcx ty::Const<'tcx>,
) -> VariantIdx {
@@ -509,17 +502,17 @@
ecx.read_discriminant(op).unwrap().1
}
-pub fn error_to_const_error<'a, 'mir, 'tcx>(
- ecx: &InterpretCx<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
- mut error: InterpErrorInfo<'tcx>
+pub fn error_to_const_error<'mir, 'tcx>(
+ ecx: &InterpretCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
+ mut error: InterpErrorInfo<'tcx>,
) -> ConstEvalErr<'tcx> {
error.print_backtrace();
let stacktrace = ecx.generate_stacktrace(None);
ConstEvalErr { error: error.kind, stacktrace, span: ecx.tcx.span }
}
-fn validate_and_turn_into_const<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn validate_and_turn_into_const<'tcx>(
+ tcx: TyCtxt<'tcx>,
constant: RawConst<'tcx>,
key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
@@ -561,8 +554,8 @@
})
}
-pub fn const_eval_provider<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub fn const_eval_provider<'tcx>(
+ tcx: TyCtxt<'tcx>,
key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
// see comment in const_eval_provider for what we're doing here
@@ -585,8 +578,8 @@
})
}
-pub fn const_eval_raw_provider<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub fn const_eval_raw_provider<'tcx>(
+ tcx: TyCtxt<'tcx>,
key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
) -> ::rustc::mir::interpret::ConstEvalRawResult<'tcx> {
// Because the constant is computed twice (once per value of `Reveal`), we are at risk of
diff --git a/src/librustc_mir/dataflow/drop_flag_effects.rs b/src/librustc_mir/dataflow/drop_flag_effects.rs
index b77fdcd..e8a3247 100644
--- a/src/librustc_mir/dataflow/drop_flag_effects.rs
+++ b/src/librustc_mir/dataflow/drop_flag_effects.rs
@@ -46,9 +46,11 @@
/// is no need to maintain separate drop flags to track such state.
//
// FIXME: we have to do something for moving slice patterns.
-fn place_contents_drop_state_cannot_differ<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- body: &Body<'tcx>,
- place: &mir::Place<'tcx>) -> bool {
+fn place_contents_drop_state_cannot_differ<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &Body<'tcx>,
+ place: &mir::Place<'tcx>,
+) -> bool {
let ty = place.ty(body, tcx).ty;
match ty.sty {
ty::Array(..) => {
@@ -72,13 +74,14 @@
}
}
-pub(crate) fn on_lookup_result_bits<'a, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub(crate) fn on_lookup_result_bits<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
move_data: &MoveData<'tcx>,
lookup_result: LookupResult,
- each_child: F)
- where F: FnMut(MovePathIndex)
+ each_child: F,
+) where
+ F: FnMut(MovePathIndex),
{
match lookup_result {
LookupResult::Parent(..) => {
@@ -90,31 +93,33 @@
}
}
-pub(crate) fn on_all_children_bits<'a, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub(crate) fn on_all_children_bits<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
move_data: &MoveData<'tcx>,
move_path_index: MovePathIndex,
- mut each_child: F)
- where F: FnMut(MovePathIndex)
+ mut each_child: F,
+) where
+ F: FnMut(MovePathIndex),
{
- fn is_terminal_path<'a, 'gcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ fn is_terminal_path<'tcx>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
move_data: &MoveData<'tcx>,
- path: MovePathIndex) -> bool
- {
+ path: MovePathIndex,
+ ) -> bool {
place_contents_drop_state_cannot_differ(
tcx, body, &move_data.move_paths[path].place)
}
- fn on_all_children_bits<'a, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ fn on_all_children_bits<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
move_data: &MoveData<'tcx>,
move_path_index: MovePathIndex,
- each_child: &mut F)
- where F: FnMut(MovePathIndex)
+ each_child: &mut F,
+ ) where
+ F: FnMut(MovePathIndex),
{
each_child(move_path_index);
@@ -131,13 +136,14 @@
on_all_children_bits(tcx, body, move_data, move_path_index, &mut each_child);
}
-pub(crate) fn on_all_drop_children_bits<'a, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub(crate) fn on_all_drop_children_bits<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
- ctxt: &MoveDataParamEnv<'gcx, 'tcx>,
+ ctxt: &MoveDataParamEnv<'tcx>,
path: MovePathIndex,
- mut each_child: F)
- where F: FnMut(MovePathIndex)
+ mut each_child: F,
+) where
+ F: FnMut(MovePathIndex),
{
on_all_children_bits(tcx, body, &ctxt.move_data, path, |child| {
let place = &ctxt.move_data.move_paths[path].place;
@@ -154,12 +160,13 @@
})
}
-pub(crate) fn drop_flag_effects_for_function_entry<'a, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub(crate) fn drop_flag_effects_for_function_entry<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
- ctxt: &MoveDataParamEnv<'gcx, 'tcx>,
- mut callback: F)
- where F: FnMut(MovePathIndex, DropFlagState)
+ ctxt: &MoveDataParamEnv<'tcx>,
+ mut callback: F,
+) where
+ F: FnMut(MovePathIndex, DropFlagState),
{
let move_data = &ctxt.move_data;
for arg in body.args_iter() {
@@ -171,13 +178,14 @@
}
}
-pub(crate) fn drop_flag_effects_for_location<'a, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub(crate) fn drop_flag_effects_for_location<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
- ctxt: &MoveDataParamEnv<'gcx, 'tcx>,
+ ctxt: &MoveDataParamEnv<'tcx>,
loc: Location,
- mut callback: F)
- where F: FnMut(MovePathIndex, DropFlagState)
+ mut callback: F,
+) where
+ F: FnMut(MovePathIndex, DropFlagState),
{
let move_data = &ctxt.move_data;
debug!("drop_flag_effects_for_location({:?})", loc);
@@ -203,13 +211,14 @@
);
}
-pub(crate) fn for_location_inits<'a, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub(crate) fn for_location_inits<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
move_data: &MoveData<'tcx>,
loc: Location,
- mut callback: F)
- where F: FnMut(MovePathIndex)
+ mut callback: F,
+) where
+ F: FnMut(MovePathIndex),
{
for ii in &move_data.init_loc_map[loc] {
let init = move_data.inits[*ii];
diff --git a/src/librustc_mir/dataflow/impls/borrows.rs b/src/librustc_mir/dataflow/impls/borrows.rs
index eedb936..ba1a22c 100644
--- a/src/librustc_mir/dataflow/impls/borrows.rs
+++ b/src/librustc_mir/dataflow/impls/borrows.rs
@@ -29,8 +29,8 @@
/// `BorrowIndex`, and maps each such index to a `BorrowData`
/// describing the borrow. These indexes are used for representing the
/// borrows in compact bitvectors.
-pub struct Borrows<'a, 'gcx: 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct Borrows<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
borrow_set: Rc<BorrowSet<'tcx>>,
@@ -133,9 +133,9 @@
}
}
-impl<'a, 'gcx, 'tcx> Borrows<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Borrows<'a, 'tcx> {
crate fn new(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,
borrow_set: &Rc<BorrowSet<'tcx>>,
@@ -234,7 +234,7 @@
}
}
-impl<'a, 'gcx, 'tcx> BitDenotation<'tcx> for Borrows<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> BitDenotation<'tcx> for Borrows<'a, 'tcx> {
type Idx = BorrowIndex;
fn name() -> &'static str { "borrows" }
fn bits_per_block(&self) -> usize {
@@ -330,14 +330,14 @@
}
}
-impl<'a, 'gcx, 'tcx> BitSetOperator for Borrows<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> BitSetOperator for Borrows<'a, 'tcx> {
#[inline]
fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
inout_set.union(in_set) // "maybe" means we union effects of both preds
}
}
-impl<'a, 'gcx, 'tcx> InitialFlow for Borrows<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InitialFlow for Borrows<'a, 'tcx> {
#[inline]
fn bottom_value() -> bool {
false // bottom = nothing is reserved or activated yet
diff --git a/src/librustc_mir/dataflow/impls/mod.rs b/src/librustc_mir/dataflow/impls/mod.rs
index 55ef861..50d9bbf 100644
--- a/src/librustc_mir/dataflow/impls/mod.rs
+++ b/src/librustc_mir/dataflow/impls/mod.rs
@@ -63,23 +63,19 @@
/// Similarly, at a given `drop` statement, the set-intersection
/// between this data and `MaybeUninitializedPlaces` yields the set of
/// places that would require a dynamic drop-flag at that statement.
-pub struct MaybeInitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct MaybeInitializedPlaces<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
}
-impl<'a, 'gcx: 'tcx, 'tcx> MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
- -> Self
- {
+impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
MaybeInitializedPlaces { tcx: tcx, body: body, mdpe: mdpe }
}
}
-impl<'a, 'gcx, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
}
@@ -118,23 +114,19 @@
/// Similarly, at a given `drop` statement, the set-intersection
/// between this data and `MaybeInitializedPlaces` yields the set of
/// places that would require a dynamic drop-flag at that statement.
-pub struct MaybeUninitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct MaybeUninitializedPlaces<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
}
-impl<'a, 'gcx, 'tcx> MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
- -> Self
- {
+impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
MaybeUninitializedPlaces { tcx: tcx, body: body, mdpe: mdpe }
}
}
-impl<'a, 'gcx, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
}
@@ -172,23 +164,19 @@
/// Similarly, at a given `drop` statement, the set-difference between
/// this data and `MaybeInitializedPlaces` yields the set of places
/// that would require a dynamic drop-flag at that statement.
-pub struct DefinitelyInitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct DefinitelyInitializedPlaces<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
}
-impl<'a, 'gcx, 'tcx: 'a> DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
- -> Self
- {
+impl<'a, 'tcx: 'a> DefinitelyInitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
DefinitelyInitializedPlaces { tcx: tcx, body: body, mdpe: mdpe }
}
}
-impl<'a, 'gcx, 'tcx: 'a> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx: 'a> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
}
@@ -221,28 +209,23 @@
/// c = S; // {a, b, c, d }
/// }
/// ```
-pub struct EverInitializedPlaces<'a, 'gcx: 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub struct EverInitializedPlaces<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
}
-impl<'a, 'gcx: 'tcx, 'tcx: 'a> EverInitializedPlaces<'a, 'gcx, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'gcx, 'tcx>)
- -> Self
- {
+impl<'a, 'tcx: 'a> EverInitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
EverInitializedPlaces { tcx: tcx, body: body, mdpe: mdpe }
}
}
-impl<'a, 'gcx, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
fn move_data(&self) -> &MoveData<'tcx> { &self.mdpe.move_data }
}
-
-impl<'a, 'gcx, 'tcx> MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
fn update_bits(sets: &mut BlockSets<'_, MovePathIndex>, path: MovePathIndex,
state: DropFlagState)
{
@@ -253,7 +236,7 @@
}
}
-impl<'a, 'gcx, 'tcx> MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
fn update_bits(sets: &mut BlockSets<'_, MovePathIndex>, path: MovePathIndex,
state: DropFlagState)
{
@@ -264,7 +247,7 @@
}
}
-impl<'a, 'gcx, 'tcx> DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
fn update_bits(sets: &mut BlockSets<'_, MovePathIndex>, path: MovePathIndex,
state: DropFlagState)
{
@@ -275,7 +258,7 @@
}
}
-impl<'a, 'gcx, 'tcx> BitDenotation<'tcx> for MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> BitDenotation<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
type Idx = MovePathIndex;
fn name() -> &'static str { "maybe_init" }
fn bits_per_block(&self) -> usize {
@@ -328,7 +311,7 @@
}
}
-impl<'a, 'gcx, 'tcx> BitDenotation<'tcx> for MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> BitDenotation<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
type Idx = MovePathIndex;
fn name() -> &'static str { "maybe_uninit" }
fn bits_per_block(&self) -> usize {
@@ -386,7 +369,7 @@
}
}
-impl<'a, 'gcx, 'tcx> BitDenotation<'tcx> for DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> BitDenotation<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
type Idx = MovePathIndex;
fn name() -> &'static str { "definite_init" }
fn bits_per_block(&self) -> usize {
@@ -442,7 +425,7 @@
}
}
-impl<'a, 'gcx, 'tcx> BitDenotation<'tcx> for EverInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> BitDenotation<'tcx> for EverInitializedPlaces<'a, 'tcx> {
type Idx = InitIndex;
fn name() -> &'static str { "ever_init" }
fn bits_per_block(&self) -> usize {
@@ -519,28 +502,28 @@
}
}
-impl<'a, 'gcx, 'tcx> BitSetOperator for MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> BitSetOperator for MaybeInitializedPlaces<'a, 'tcx> {
#[inline]
fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
inout_set.union(in_set) // "maybe" means we union effects of both preds
}
}
-impl<'a, 'gcx, 'tcx> BitSetOperator for MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> BitSetOperator for MaybeUninitializedPlaces<'a, 'tcx> {
#[inline]
fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
inout_set.union(in_set) // "maybe" means we union effects of both preds
}
}
-impl<'a, 'gcx, 'tcx> BitSetOperator for DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> BitSetOperator for DefinitelyInitializedPlaces<'a, 'tcx> {
#[inline]
fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
inout_set.intersect(in_set) // "definitely" means we intersect effects of both preds
}
}
-impl<'a, 'gcx, 'tcx> BitSetOperator for EverInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> BitSetOperator for EverInitializedPlaces<'a, 'tcx> {
#[inline]
fn join<T: Idx>(&self, inout_set: &mut BitSet<T>, in_set: &BitSet<T>) -> bool {
inout_set.union(in_set) // inits from both preds are in scope
@@ -557,28 +540,28 @@
// propagating, or you start at all-ones and then use Intersect as
// your merge when propagating.
-impl<'a, 'gcx, 'tcx> InitialFlow for MaybeInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InitialFlow for MaybeInitializedPlaces<'a, 'tcx> {
#[inline]
fn bottom_value() -> bool {
false // bottom = uninitialized
}
}
-impl<'a, 'gcx, 'tcx> InitialFlow for MaybeUninitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InitialFlow for MaybeUninitializedPlaces<'a, 'tcx> {
#[inline]
fn bottom_value() -> bool {
false // bottom = initialized (start_block_effect counters this at outset)
}
}
-impl<'a, 'gcx, 'tcx> InitialFlow for DefinitelyInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InitialFlow for DefinitelyInitializedPlaces<'a, 'tcx> {
#[inline]
fn bottom_value() -> bool {
true // bottom = initialized (start_block_effect counters this at outset)
}
}
-impl<'a, 'gcx, 'tcx> InitialFlow for EverInitializedPlaces<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InitialFlow for EverInitializedPlaces<'a, 'tcx> {
#[inline]
fn bottom_value() -> bool {
false // bottom = no initialized variables by default
diff --git a/src/librustc_mir/dataflow/mod.rs b/src/librustc_mir/dataflow/mod.rs
index 8e20682..8c4acd7 100644
--- a/src/librustc_mir/dataflow/mod.rs
+++ b/src/librustc_mir/dataflow/mod.rs
@@ -116,34 +116,41 @@
return None;
}
-pub struct MoveDataParamEnv<'gcx, 'tcx> {
+pub struct MoveDataParamEnv<'tcx> {
pub(crate) move_data: MoveData<'tcx>,
- pub(crate) param_env: ty::ParamEnv<'gcx>,
+ pub(crate) param_env: ty::ParamEnv<'tcx>,
}
-pub(crate) fn do_dataflow<'a, 'gcx, 'tcx, BD, P>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- body: &'a Body<'tcx>,
- def_id: DefId,
- attributes: &[ast::Attribute],
- dead_unwinds: &BitSet<BasicBlock>,
- bd: BD,
- p: P)
- -> DataflowResults<'tcx, BD>
- where BD: BitDenotation<'tcx> + InitialFlow,
- P: Fn(&BD, BD::Idx) -> DebugFormatted
+pub(crate) fn do_dataflow<'a, 'tcx, BD, P>(
+ tcx: TyCtxt<'tcx>,
+ body: &'a Body<'tcx>,
+ def_id: DefId,
+ attributes: &[ast::Attribute],
+ dead_unwinds: &BitSet<BasicBlock>,
+ bd: BD,
+ p: P,
+) -> DataflowResults<'tcx, BD>
+where
+ BD: BitDenotation<'tcx> + InitialFlow,
+ P: Fn(&BD, BD::Idx) -> DebugFormatted,
{
let flow_state = DataflowAnalysis::new(body, dead_unwinds, bd);
flow_state.run(tcx, def_id, attributes, p)
}
-impl<'a, 'gcx: 'tcx, 'tcx: 'a, BD> DataflowAnalysis<'a, 'tcx, BD> where BD: BitDenotation<'tcx>
+impl<'a, 'tcx: 'a, BD> DataflowAnalysis<'a, 'tcx, BD>
+where
+ BD: BitDenotation<'tcx>,
{
- pub(crate) fn run<P>(self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- def_id: DefId,
- attributes: &[ast::Attribute],
- p: P) -> DataflowResults<'tcx, BD>
- where P: Fn(&BD, BD::Idx) -> DebugFormatted
+ pub(crate) fn run<P>(
+ self,
+ tcx: TyCtxt<'tcx>,
+ def_id: DefId,
+ attributes: &[ast::Attribute],
+ p: P,
+ ) -> DataflowResults<'tcx, BD>
+ where
+ P: Fn(&BD, BD::Idx) -> DebugFormatted,
{
let name_found = |sess: &Session, attrs: &[ast::Attribute], name| -> Option<String> {
if let Some(item) = has_rustc_mir_with(attrs, name) {
@@ -795,11 +802,9 @@
self.propagate_bits_into_entry_set_for(in_out, dest_bb, dirty_list);
}
}
- mir::TerminatorKind::FalseEdges { real_target, ref imaginary_targets } => {
+ mir::TerminatorKind::FalseEdges { real_target, imaginary_target } => {
self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
- for target in imaginary_targets {
- self.propagate_bits_into_entry_set_for(in_out, *target, dirty_list);
- }
+ self.propagate_bits_into_entry_set_for(in_out, imaginary_target, dirty_list);
}
mir::TerminatorKind::FalseUnwind { real_target, unwind } => {
self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
diff --git a/src/librustc_mir/dataflow/move_paths/builder.rs b/src/librustc_mir/dataflow/move_paths/builder.rs
index 90e6c46..7c738b7 100644
--- a/src/librustc_mir/dataflow/move_paths/builder.rs
+++ b/src/librustc_mir/dataflow/move_paths/builder.rs
@@ -12,15 +12,15 @@
use super::{MoveError, InitIndex, Init, InitLocation, LookupResult, InitKind};
use super::IllegalMoveOriginKind::*;
-struct MoveDataBuilder<'a, 'gcx: 'tcx, 'tcx: 'a> {
+struct MoveDataBuilder<'a, 'tcx: 'a> {
body: &'a Body<'tcx>,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
data: MoveData<'tcx>,
errors: Vec<(Place<'tcx>, MoveError<'tcx>)>,
}
-impl<'a, 'gcx, 'tcx> MoveDataBuilder<'a, 'gcx, 'tcx> {
- fn new(body: &'a Body<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
+impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> {
+ fn new(body: &'a Body<'tcx>, tcx: TyCtxt<'tcx>) -> Self {
let mut move_paths = IndexVec::new();
let mut path_map = IndexVec::new();
let mut init_path_map = IndexVec::new();
@@ -83,7 +83,7 @@
}
}
-impl<'b, 'a, 'gcx, 'tcx> Gatherer<'b, 'a, 'gcx, 'tcx> {
+impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
/// This creates a MovePath for a given place, returning an `MovePathError`
/// if that place can't be moved from.
///
@@ -178,7 +178,7 @@
}
}
-impl<'a, 'gcx, 'tcx> MoveDataBuilder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> {
fn finalize(
self
) -> Result<MoveData<'tcx>, (MoveData<'tcx>, Vec<(Place<'tcx>, MoveError<'tcx>)>)> {
@@ -202,9 +202,9 @@
}
}
-pub(super) fn gather_moves<'a, 'gcx, 'tcx>(
+pub(super) fn gather_moves<'tcx>(
body: &Body<'tcx>,
- tcx: TyCtxt<'a, 'gcx, 'tcx>
+ tcx: TyCtxt<'tcx>,
) -> Result<MoveData<'tcx>, (MoveData<'tcx>, Vec<(Place<'tcx>, MoveError<'tcx>)>)> {
let mut builder = MoveDataBuilder::new(body, tcx);
@@ -226,7 +226,7 @@
builder.finalize()
}
-impl<'a, 'gcx, 'tcx> MoveDataBuilder<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> {
fn gather_args(&mut self) {
for arg in self.body.args_iter() {
let path = self.data.rev_lookup.locals[arg];
@@ -253,12 +253,12 @@
}
}
-struct Gatherer<'b, 'a: 'b, 'gcx: 'tcx, 'tcx: 'a> {
- builder: &'b mut MoveDataBuilder<'a, 'gcx, 'tcx>,
+struct Gatherer<'b, 'a: 'b, 'tcx: 'a> {
+ builder: &'b mut MoveDataBuilder<'a, 'tcx>,
loc: Location,
}
-impl<'b, 'a, 'gcx, 'tcx> Gatherer<'b, 'a, 'gcx, 'tcx> {
+impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
fn gather_statement(&mut self, stmt: &Statement<'tcx>) {
match stmt.kind {
StatementKind::Assign(ref place, ref rval) => {
diff --git a/src/librustc_mir/dataflow/move_paths/mod.rs b/src/librustc_mir/dataflow/move_paths/mod.rs
index 7d75d35..938450c 100644
--- a/src/librustc_mir/dataflow/move_paths/mod.rs
+++ b/src/librustc_mir/dataflow/move_paths/mod.rs
@@ -205,7 +205,7 @@
}
impl Init {
- crate fn span<'gcx>(&self, body: &Body<'gcx>) -> Span {
+ crate fn span<'tcx>(&self, body: &Body<'tcx>) -> Span {
match self.location {
InitLocation::Argument(local) => body.local_decls[local].source_info.span,
InitLocation::Statement(location) => body.source_info(location).span,
@@ -305,9 +305,11 @@
}
}
-impl<'a, 'gcx, 'tcx> MoveData<'tcx> {
- pub fn gather_moves(body: &Body<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>)
- -> Result<Self, (Self, Vec<(Place<'tcx>, MoveError<'tcx>)>)> {
+impl<'tcx> MoveData<'tcx> {
+ pub fn gather_moves(
+ body: &Body<'tcx>,
+ tcx: TyCtxt<'tcx>,
+ ) -> Result<Self, (Self, Vec<(Place<'tcx>, MoveError<'tcx>)>)> {
builder::gather_moves(body, tcx)
}
diff --git a/src/librustc_mir/hair/constant.rs b/src/librustc_mir/hair/constant.rs
index 37a2e79..2cd0463 100644
--- a/src/librustc_mir/hair/constant.rs
+++ b/src/librustc_mir/hair/constant.rs
@@ -9,9 +9,9 @@
Reported,
}
-crate fn lit_to_const<'a, 'gcx, 'tcx>(
+crate fn lit_to_const<'tcx>(
lit: &'tcx ast::LitKind,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
ty: Ty<'tcx>,
neg: bool,
) -> Result<&'tcx ty::Const<'tcx>, LitToConstError> {
diff --git a/src/librustc_mir/hair/cx/block.rs b/src/librustc_mir/hair/cx/block.rs
index 17fab6c..d593205 100644
--- a/src/librustc_mir/hair/cx/block.rs
+++ b/src/librustc_mir/hair/cx/block.rs
@@ -10,7 +10,7 @@
impl<'tcx> Mirror<'tcx> for &'tcx hir::Block {
type Output = Block<'tcx>;
- fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Block<'tcx> {
+ fn make_mirror(self, cx: &mut Cx<'_, 'tcx>) -> Block<'tcx> {
// We have to eagerly lower the "spine" of the statements
// in order to get the lexical scoping correctly.
let stmts = mirror_stmts(cx, self.hir_id.local_id, &*self.stmts);
@@ -40,15 +40,16 @@
}
}
-fn mirror_stmts<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
- block_id: hir::ItemLocalId,
- stmts: &'tcx [hir::Stmt])
- -> Vec<StmtRef<'tcx>> {
+fn mirror_stmts<'a, 'tcx>(
+ cx: &mut Cx<'a, 'tcx>,
+ block_id: hir::ItemLocalId,
+ stmts: &'tcx [hir::Stmt],
+) -> Vec<StmtRef<'tcx>> {
let mut result = vec![];
for (index, stmt) in stmts.iter().enumerate() {
let hir_id = stmt.hir_id;
let opt_dxn_ext = cx.region_scope_tree.opt_destruction_scope(hir_id.local_id);
- let stmt_span = StatementSpan(cx.tcx.hir().span_by_hir_id(hir_id));
+ let stmt_span = StatementSpan(cx.tcx.hir().span(hir_id));
match stmt.node {
hir::StmtKind::Expr(ref expr) |
hir::StmtKind::Semi(ref expr) => {
@@ -114,9 +115,10 @@
return result;
}
-pub fn to_expr_ref<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
- block: &'tcx hir::Block)
- -> ExprRef<'tcx> {
+pub fn to_expr_ref<'a, 'tcx>(
+ cx: &mut Cx<'a, 'tcx>,
+ block: &'tcx hir::Block,
+) -> ExprRef<'tcx> {
let block_ty = cx.tables().node_type(block.hir_id);
let temp_lifetime = cx.region_scope_tree.temporary_scope(block.hir_id.local_id);
let expr = Expr {
diff --git a/src/librustc_mir/hair/cx/expr.rs b/src/librustc_mir/hair/cx/expr.rs
index 5a6b6a7..9f05cf9 100644
--- a/src/librustc_mir/hair/cx/expr.rs
+++ b/src/librustc_mir/hair/cx/expr.rs
@@ -17,7 +17,7 @@
impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
type Output = Expr<'tcx>;
- fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Expr<'tcx> {
+ fn make_mirror(self, cx: &mut Cx<'_, 'tcx>) -> Expr<'tcx> {
let temp_lifetime = cx.region_scope_tree.temporary_scope(self.hir_id.local_id);
let expr_scope = region::Scope {
id: self.hir_id.local_id,
@@ -68,11 +68,12 @@
}
}
-fn apply_adjustment<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
- hir_expr: &'tcx hir::Expr,
- mut expr: Expr<'tcx>,
- adjustment: &Adjustment<'tcx>)
- -> Expr<'tcx> {
+fn apply_adjustment<'a, 'tcx>(
+ cx: &mut Cx<'a, 'tcx>,
+ hir_expr: &'tcx hir::Expr,
+ mut expr: Expr<'tcx>,
+ adjustment: &Adjustment<'tcx>
+) -> Expr<'tcx> {
let Expr { temp_lifetime, mut span, .. } = expr;
// Adjust the span from the block, to the last expression of the
@@ -196,9 +197,10 @@
}
}
-fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
- expr: &'tcx hir::Expr)
- -> Expr<'tcx> {
+fn make_mirror_unadjusted<'a, 'tcx>(
+ cx: &mut Cx<'a, 'tcx>,
+ expr: &'tcx hir::Expr,
+) -> Expr<'tcx> {
let expr_ty = cx.tables().expr_ty(expr);
let temp_lifetime = cx.region_scope_tree.temporary_scope(expr.hir_id.local_id);
@@ -774,7 +776,7 @@
}
fn user_substs_applied_to_res(
- cx: &mut Cx<'a, 'gcx, 'tcx>,
+ cx: &mut Cx<'a, 'tcx>,
hir_id: hir::HirId,
res: Res,
) -> Option<ty::CanonicalUserType<'tcx>> {
@@ -808,8 +810,8 @@
user_provided_type
}
-fn method_callee<'a, 'gcx, 'tcx>(
- cx: &mut Cx<'a, 'gcx, 'tcx>,
+fn method_callee<'a, 'tcx>(
+ cx: &mut Cx<'a, 'tcx>,
expr: &hir::Expr,
span: Span,
overloaded_callee: Option<(DefId, SubstsRef<'tcx>)>,
@@ -865,7 +867,7 @@
}
}
-fn convert_arm<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, arm: &'tcx hir::Arm) -> Arm<'tcx> {
+fn convert_arm<'a, 'tcx>(cx: &mut Cx<'a, 'tcx>, arm: &'tcx hir::Arm) -> Arm<'tcx> {
Arm {
patterns: arm.pats.iter().map(|p| cx.pattern_from_hir(p)).collect(),
guard: match arm.guard {
@@ -882,10 +884,11 @@
}
}
-fn convert_path_expr<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
- expr: &'tcx hir::Expr,
- res: Res)
- -> ExprKind<'tcx> {
+fn convert_path_expr<'a, 'tcx>(
+ cx: &mut Cx<'a, 'tcx>,
+ expr: &'tcx hir::Expr,
+ res: Res,
+) -> ExprKind<'tcx> {
let substs = cx.tables().node_substs(expr.hir_id);
match res {
// A regular function, constructor function or a constant.
@@ -905,12 +908,13 @@
}
Res::Def(DefKind::ConstParam, def_id) => {
- let node_id = cx.tcx.hir().as_local_node_id(def_id).unwrap();
- let item_id = cx.tcx.hir().get_parent_node(node_id);
- let item_def_id = cx.tcx.hir().local_def_id(item_id);
+ let hir_id = cx.tcx.hir().as_local_hir_id(def_id).unwrap();
+ let item_id = cx.tcx.hir().get_parent_node_by_hir_id(hir_id);
+ let item_def_id = cx.tcx.hir().local_def_id_from_hir_id(item_id);
let generics = cx.tcx.generics_of(item_def_id);
- let index = generics.param_def_id_to_index[&cx.tcx.hir().local_def_id(node_id)];
- let name = cx.tcx.hir().name(node_id).as_interned_str();
+ let local_def_id = cx.tcx.hir().local_def_id_from_hir_id(hir_id);
+ let index = generics.param_def_id_to_index[&local_def_id];
+ let name = cx.tcx.hir().name_by_hir_id(hir_id).as_interned_str();
let val = ConstValue::Param(ty::ParamConst::new(index, name));
ExprKind::Literal {
literal: cx.tcx.mk_const(
@@ -967,7 +971,7 @@
}
fn convert_var(
- cx: &mut Cx<'_, '_, 'tcx>,
+ cx: &mut Cx<'_, 'tcx>,
expr: &'tcx hir::Expr,
var_hir_id: hir::HirId,
) -> ExprKind<'tcx> {
@@ -1117,10 +1121,11 @@
}
}
-fn overloaded_operator<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
- expr: &'tcx hir::Expr,
- args: Vec<ExprRef<'tcx>>)
- -> ExprKind<'tcx> {
+fn overloaded_operator<'a, 'tcx>(
+ cx: &mut Cx<'a, 'tcx>,
+ expr: &'tcx hir::Expr,
+ args: Vec<ExprRef<'tcx>>
+) -> ExprKind<'tcx> {
let fun = method_callee(cx, expr, expr.span, None);
ExprKind::Call {
ty: fun.ty,
@@ -1130,8 +1135,8 @@
}
}
-fn overloaded_place<'a, 'gcx, 'tcx>(
- cx: &mut Cx<'a, 'gcx, 'tcx>,
+fn overloaded_place<'a, 'tcx>(
+ cx: &mut Cx<'a, 'tcx>,
expr: &'tcx hir::Expr,
place_ty: Ty<'tcx>,
overloaded_callee: Option<(DefId, SubstsRef<'tcx>)>,
@@ -1178,11 +1183,12 @@
ExprKind::Deref { arg: ref_expr.to_ref() }
}
-fn capture_upvar<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
- closure_expr: &'tcx hir::Expr,
- var_hir_id: hir::HirId,
- upvar_ty: Ty<'tcx>)
- -> ExprRef<'tcx> {
+fn capture_upvar<'tcx>(
+ cx: &mut Cx<'_, 'tcx>,
+ closure_expr: &'tcx hir::Expr,
+ var_hir_id: hir::HirId,
+ upvar_ty: Ty<'tcx>
+) -> ExprRef<'tcx> {
let upvar_id = ty::UpvarId {
var_path: ty::UpvarPath { hir_id: var_hir_id },
closure_expr_id: cx.tcx.hir().local_def_id_from_hir_id(closure_expr.hir_id).to_local(),
@@ -1218,9 +1224,10 @@
}
/// Converts a list of named fields (i.e., for struct-like struct/enum ADTs) into FieldExprRef.
-fn field_refs<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
- fields: &'tcx [hir::Field])
- -> Vec<FieldExprRef<'tcx>> {
+fn field_refs<'a, 'tcx>(
+ cx: &mut Cx<'a, 'tcx>,
+ fields: &'tcx [hir::Field]
+) -> Vec<FieldExprRef<'tcx>> {
fields.iter()
.map(|field| {
FieldExprRef {
diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs
index f4a23a9..ff53cf0 100644
--- a/src/librustc_mir/hair/cx/mod.rs
+++ b/src/librustc_mir/hair/cx/mod.rs
@@ -21,18 +21,18 @@
use crate::hair::constant::{lit_to_const, LitToConstError};
#[derive(Clone)]
-pub struct Cx<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+pub struct Cx<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'tcx>,
+ infcx: &'a InferCtxt<'a, 'tcx>,
pub root_lint_level: hir::HirId,
- pub param_env: ty::ParamEnv<'gcx>,
+ pub param_env: ty::ParamEnv<'tcx>,
/// Identity `InternalSubsts` for use with const-evaluation.
- pub identity_substs: &'gcx InternalSubsts<'gcx>,
+ pub identity_substs: &'tcx InternalSubsts<'tcx>,
- pub region_scope_tree: &'gcx region::ScopeTree,
- pub tables: &'a ty::TypeckTables<'gcx>,
+ pub region_scope_tree: &'tcx region::ScopeTree,
+ pub tables: &'a ty::TypeckTables<'tcx>,
/// This is `Constness::Const` if we are compiling a `static`,
/// `const`, or the body of a `const fn`.
@@ -51,13 +51,12 @@
control_flow_destroyed: Vec<(Span, String)>,
}
-impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
- pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- src_id: hir::HirId) -> Cx<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Cx<'a, 'tcx> {
+ pub fn new(infcx: &'a InferCtxt<'a, 'tcx>, src_id: hir::HirId) -> Cx<'a, 'tcx> {
let tcx = infcx.tcx;
let src_def_id = tcx.hir().local_def_id_from_hir_id(src_id);
let tables = tcx.typeck_tables_of(src_def_id);
- let body_owner_kind = tcx.hir().body_owner_kind_by_hir_id(src_id);
+ let body_owner_kind = tcx.hir().body_owner_kind(src_id);
let constness = match body_owner_kind {
hir::BodyOwnerKind::Const |
@@ -66,7 +65,7 @@
hir::BodyOwnerKind::Fn => hir::Constness::NotConst,
};
- let attrs = tcx.hir().attrs_by_hir_id(src_id);
+ let attrs = tcx.hir().attrs(src_id);
// Some functions always have overflow checks enabled,
// however, they may not get codegen'd, depending on
@@ -100,7 +99,7 @@
}
}
-impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Cx<'a, 'tcx> {
/// Normalizes `ast` into the appropriate "mirror" type.
pub fn mirror<M: Mirror<'tcx>>(&mut self, ast: M) -> M::Output {
ast.make_mirror(self)
@@ -168,11 +167,10 @@
pub fn trait_method(&mut self,
trait_def_id: DefId,
- method_name: &str,
+ method_name: Symbol,
self_ty: Ty<'tcx>,
params: &[Kind<'tcx>])
-> (Ty<'tcx>, &'tcx ty::Const<'tcx>) {
- let method_name = Symbol::intern(method_name);
let substs = self.tcx.mk_substs_trait(self_ty, params);
for item in self.tcx.associated_items(trait_def_id) {
if item.kind == ty::AssocKind::Method && item.ident.name == method_name {
@@ -200,11 +198,11 @@
ty.needs_drop(self.tcx.global_tcx(), param_env)
}
- pub fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
+ pub fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
- pub fn tables(&self) -> &'a ty::TypeckTables<'gcx> {
+ pub fn tables(&self) -> &'a ty::TypeckTables<'tcx> {
self.tables
}
@@ -217,8 +215,8 @@
}
}
-impl UserAnnotatedTyHelpers<'gcx, 'tcx> for Cx<'_, 'gcx, 'tcx> {
- fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx> {
+impl UserAnnotatedTyHelpers<'tcx> for Cx<'_, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx()
}
diff --git a/src/librustc_mir/hair/mod.rs b/src/librustc_mir/hair/mod.rs
index 8e19913..4694241 100644
--- a/src/librustc_mir/hair/mod.rs
+++ b/src/librustc_mir/hair/mod.rs
@@ -344,13 +344,13 @@
pub trait Mirror<'tcx> {
type Output;
- fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Self::Output;
+ fn make_mirror(self, cx: &mut Cx<'_, 'tcx>) -> Self::Output;
}
impl<'tcx> Mirror<'tcx> for Expr<'tcx> {
type Output = Expr<'tcx>;
- fn make_mirror<'a, 'gcx>(self, _: &mut Cx<'a, 'gcx, 'tcx>) -> Expr<'tcx> {
+ fn make_mirror(self, _: &mut Cx<'_, 'tcx>) -> Expr<'tcx> {
self
}
}
@@ -358,7 +358,7 @@
impl<'tcx> Mirror<'tcx> for ExprRef<'tcx> {
type Output = Expr<'tcx>;
- fn make_mirror<'a, 'gcx>(self, hir: &mut Cx<'a, 'gcx, 'tcx>) -> Expr<'tcx> {
+ fn make_mirror(self, hir: &mut Cx<'a, 'tcx>) -> Expr<'tcx> {
match self {
ExprRef::Hair(h) => h.make_mirror(hir),
ExprRef::Mirror(m) => *m,
@@ -369,7 +369,7 @@
impl<'tcx> Mirror<'tcx> for Stmt<'tcx> {
type Output = Stmt<'tcx>;
- fn make_mirror<'a, 'gcx>(self, _: &mut Cx<'a, 'gcx, 'tcx>) -> Stmt<'tcx> {
+ fn make_mirror(self, _: &mut Cx<'_, 'tcx>) -> Stmt<'tcx> {
self
}
}
@@ -377,7 +377,7 @@
impl<'tcx> Mirror<'tcx> for StmtRef<'tcx> {
type Output = Stmt<'tcx>;
- fn make_mirror<'a, 'gcx>(self, _: &mut Cx<'a, 'gcx, 'tcx>) -> Stmt<'tcx> {
+ fn make_mirror(self, _: &mut Cx<'_, 'tcx>) -> Stmt<'tcx> {
match self {
StmtRef::Mirror(m) => *m,
}
@@ -387,7 +387,7 @@
impl<'tcx> Mirror<'tcx> for Block<'tcx> {
type Output = Block<'tcx>;
- fn make_mirror<'a, 'gcx>(self, _: &mut Cx<'a, 'gcx, 'tcx>) -> Block<'tcx> {
+ fn make_mirror(self, _: &mut Cx<'_, 'tcx>) -> Block<'tcx> {
self
}
}
diff --git a/src/librustc_mir/hair/pattern/_match.rs b/src/librustc_mir/hair/pattern/_match.rs
index 29e9c42..bb1a67b 100644
--- a/src/librustc_mir/hair/pattern/_match.rs
+++ b/src/librustc_mir/hair/pattern/_match.rs
@@ -194,11 +194,11 @@
cx.pattern_arena.alloc(LiteralExpander { tcx: cx.tcx }.fold_pattern(&pat))
}
-struct LiteralExpander<'a, 'tcx> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>
+struct LiteralExpander<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> LiteralExpander<'a, 'tcx> {
+impl LiteralExpander<'tcx> {
/// Derefs `val` and potentially unsizes the value if `crty` is an array and `rty` a slice.
///
/// `crty` and `rty` can differ because you can use array constants in the presence of slice
@@ -239,7 +239,7 @@
}
}
-impl<'a, 'tcx> PatternFolder<'tcx> for LiteralExpander<'a, 'tcx> {
+impl PatternFolder<'tcx> for LiteralExpander<'tcx> {
fn fold_pattern(&mut self, pat: &Pattern<'tcx>) -> Pattern<'tcx> {
debug!("fold_pattern {:?} {:?} {:?}", pat, pat.ty.sty, pat.kind);
match (&pat.ty.sty, &*pat.kind) {
@@ -350,7 +350,7 @@
}
pub struct MatchCheckCtxt<'a, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ pub tcx: TyCtxt<'tcx>,
/// The module in which the match occurs. This is necessary for
/// checking inhabited-ness of types because whether a type is (visibly)
/// inhabited can depend on whether it was defined in the current module or
@@ -365,11 +365,13 @@
impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
pub fn create_and_enter<F, R>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
module: DefId,
- f: F) -> R
- where F: for<'b> FnOnce(MatchCheckCtxt<'b, 'tcx>) -> R
+ f: F,
+ ) -> R
+ where
+ F: for<'b> FnOnce(MatchCheckCtxt<'b, 'tcx>) -> R,
{
let pattern_arena = TypedArena::default();
@@ -426,7 +428,7 @@
Variant(DefId),
/// Literal values.
ConstantValue(&'tcx ty::Const<'tcx>),
- /// Ranges of literal values (`2...5` and `2..5`).
+ /// Ranges of literal values (`2..=5` and `2..5`).
ConstantRange(u128, u128, Ty<'tcx>, RangeEnd),
/// Array patterns of length n.
Slice(u64),
@@ -814,7 +816,7 @@
/// `IntRange`s always store a contiguous range. This means that values are
/// encoded such that `0` encodes the minimum value for the integer,
/// regardless of the signedness.
-/// For example, the pattern `-128...127i8` is encoded as `0..=255`.
+/// For example, the pattern `-128..=127i8` is encoded as `0..=255`.
/// This makes comparisons and arithmetic on interval endpoints much more
/// straightforward. See `signed_bias` for details.
///
@@ -827,9 +829,7 @@
}
impl<'tcx> IntRange<'tcx> {
- fn from_ctor(tcx: TyCtxt<'_, 'tcx, 'tcx>,
- ctor: &Constructor<'tcx>)
- -> Option<IntRange<'tcx>> {
+ fn from_ctor(tcx: TyCtxt<'tcx>, ctor: &Constructor<'tcx>) -> Option<IntRange<'tcx>> {
// Floating-point ranges are permitted and we don't want
// to consider them when constructing integer ranges.
fn is_integral<'tcx>(ty: Ty<'tcx>) -> bool {
@@ -867,9 +867,7 @@
}
}
- fn from_pat(tcx: TyCtxt<'_, 'tcx, 'tcx>,
- mut pat: &Pattern<'tcx>)
- -> Option<IntRange<'tcx>> {
+ fn from_pat(tcx: TyCtxt<'tcx>, mut pat: &Pattern<'tcx>) -> Option<IntRange<'tcx>> {
let range = loop {
match pat.kind {
box PatternKind::Constant { value } => break ConstantValue(value),
@@ -889,7 +887,7 @@
}
// The return value of `signed_bias` should be XORed with an endpoint to encode/decode it.
- fn signed_bias(tcx: TyCtxt<'_, 'tcx, 'tcx>, ty: Ty<'tcx>) -> u128 {
+ fn signed_bias(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> u128 {
match ty.sty {
ty::Int(ity) => {
let bits = Integer::from_attr(&tcx, SignedInt(ity)).size().bits() as u128;
@@ -901,7 +899,7 @@
/// Converts a `RangeInclusive` to a `ConstantValue` or inclusive `ConstantRange`.
fn range_to_ctor(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
ty: Ty<'tcx>,
r: RangeInclusive<u128>,
) -> Constructor<'tcx> {
@@ -917,10 +915,11 @@
/// Returns a collection of ranges that spans the values covered by `ranges`, subtracted
/// by the values covered by `self`: i.e., `ranges \ self` (in set notation).
- fn subtract_from(self,
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
- ranges: Vec<Constructor<'tcx>>)
- -> Vec<Constructor<'tcx>> {
+ fn subtract_from(
+ self,
+ tcx: TyCtxt<'tcx>,
+ ranges: Vec<Constructor<'tcx>>,
+ ) -> Vec<Constructor<'tcx>> {
let ranges = ranges.into_iter().filter_map(|r| {
IntRange::from_ctor(tcx, &r).map(|i| i.range)
});
@@ -988,7 +987,7 @@
// to compute the full set.)
fn compute_missing_ctors<'a, 'tcx: 'a>(
info: MissingCtorsInfo,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
all_ctors: &Vec<Constructor<'tcx>>,
used_ctors: &Vec<Constructor<'tcx>>,
) -> MissingCtors<'tcx> {
@@ -1424,12 +1423,12 @@
// meaning all other types will compare unequal and thus equal patterns often do not cause the
// second pattern to lint about unreachable match arms.
fn slice_pat_covered_by_const<'tcx>(
- tcx: TyCtxt<'_, 'tcx, '_>,
+ tcx: TyCtxt<'tcx>,
_span: Span,
const_val: &'tcx ty::Const<'tcx>,
prefix: &[Pattern<'tcx>],
slice: &Option<Pattern<'tcx>>,
- suffix: &[Pattern<'tcx>]
+ suffix: &[Pattern<'tcx>],
) -> Result<bool, ErrorReported> {
let data: &[u8] = match (const_val.val, &const_val.ty.sty) {
(ConstValue::ByRef(ptr, alloc), ty::Array(t, n)) => {
@@ -1476,7 +1475,7 @@
// Whether to evaluate a constructor using exhaustive integer matching. This is true if the
// constructor is a range or constant with an integer type.
-fn should_treat_range_exhaustively(tcx: TyCtxt<'_, 'tcx, 'tcx>, ctor: &Constructor<'tcx>) -> bool {
+fn should_treat_range_exhaustively(tcx: TyCtxt<'tcx>, ctor: &Constructor<'tcx>) -> bool {
let ty = match ctor {
ConstantValue(value) => value.ty,
ConstantRange(_, _, ty, _) => ty,
@@ -1522,7 +1521,7 @@
/// between every pair of boundary points. (This essentially sums up to performing the intuitive
/// merging operation depicted above.)
fn split_grouped_constructors<'p, 'a: 'p, 'tcx: 'a>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
ctors: Vec<Constructor<'tcx>>,
&Matrix(ref m): &Matrix<'p, 'tcx>,
ty: Ty<'tcx>,
@@ -1600,7 +1599,7 @@
/// Checks whether there exists any shared value in either `ctor` or `pat` by intersecting them.
fn constructor_intersects_pattern<'p, 'a: 'p, 'tcx: 'a>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
ctor: &Constructor<'tcx>,
pat: &'p Pattern<'tcx>,
) -> Option<SmallVec<[&'p Pattern<'tcx>; 2]>> {
@@ -1627,8 +1626,8 @@
}
}
-fn constructor_covered_by_range<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn constructor_covered_by_range<'tcx>(
+ tcx: TyCtxt<'tcx>,
ctor: &Constructor<'tcx>,
pat: &Pattern<'tcx>,
) -> Result<bool, ErrorReported> {
diff --git a/src/librustc_mir/hair/pattern/check_match.rs b/src/librustc_mir/hair/pattern/check_match.rs
index 215faee..159b526 100644
--- a/src/librustc_mir/hair/pattern/check_match.rs
+++ b/src/librustc_mir/hair/pattern/check_match.rs
@@ -26,7 +26,7 @@
use syntax::ptr::P;
use syntax_pos::{Span, DUMMY_SP, MultiSpan};
-pub(crate) fn check_match<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
+pub(crate) fn check_match<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) {
let body_id = if let Some(id) = tcx.hir().as_local_hir_id(def_id) {
tcx.hir().body_owned_by(id)
} else {
@@ -48,7 +48,7 @@
}
struct MatchVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body_owner: DefId,
tables: &'a ty::TypeckTables<'tcx>,
param_env: ty::ParamEnv<'tcx>,
@@ -161,7 +161,7 @@
}
}
- let module = self.tcx.hir().get_module_parent_by_hir_id(scrut.hir_id);
+ let module = self.tcx.hir().get_module_parent(scrut.hir_id);
MatchCheckCtxt::create_and_enter(self.tcx, self.param_env, module, |ref mut cx| {
let mut have_errors = false;
@@ -193,7 +193,7 @@
// Then, if the match has no arms, check whether the scrutinee
// is uninhabited.
let pat_ty = self.tables.node_type(scrut.hir_id);
- let module = self.tcx.hir().get_module_parent_by_hir_id(scrut.hir_id);
+ let module = self.tcx.hir().get_module_parent(scrut.hir_id);
let mut def_span = None;
let mut missing_variants = vec![];
if inlined_arms.is_empty() {
@@ -261,7 +261,7 @@
}
fn check_irrefutable(&self, pat: &'tcx Pat, origin: &str) {
- let module = self.tcx.hir().get_module_parent_by_hir_id(pat.hir_id);
+ let module = self.tcx.hir().get_module_parent(pat.hir_id);
MatchCheckCtxt::create_and_enter(self.tcx, self.param_env, module, |ref mut cx| {
let mut patcx = PatternContext::new(self.tcx,
self.param_env.and(self.identity_substs),
diff --git a/src/librustc_mir/hair/pattern/mod.rs b/src/librustc_mir/hair/pattern/mod.rs
index 716838b..e0e1485 100644
--- a/src/librustc_mir/hair/pattern/mod.rs
+++ b/src/librustc_mir/hair/pattern/mod.rs
@@ -327,7 +327,7 @@
}
pub struct PatternContext<'a, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ pub tcx: TyCtxt<'tcx>,
pub param_env: ty::ParamEnv<'tcx>,
pub tables: &'a ty::TypeckTables<'tcx>,
pub substs: SubstsRef<'tcx>,
@@ -335,10 +335,12 @@
}
impl<'a, 'tcx> Pattern<'tcx> {
- pub fn from_hir(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env_and_substs: ty::ParamEnvAnd<'tcx, SubstsRef<'tcx>>,
- tables: &'a ty::TypeckTables<'tcx>,
- pat: &'tcx hir::Pat) -> Self {
+ pub fn from_hir(
+ tcx: TyCtxt<'tcx>,
+ param_env_and_substs: ty::ParamEnvAnd<'tcx, SubstsRef<'tcx>>,
+ tables: &'a ty::TypeckTables<'tcx>,
+ pat: &'tcx hir::Pat,
+ ) -> Self {
let mut pcx = PatternContext::new(tcx, param_env_and_substs, tables);
let result = pcx.lower_pattern(pat);
if !pcx.errors.is_empty() {
@@ -351,9 +353,11 @@
}
impl<'a, 'tcx> PatternContext<'a, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env_and_substs: ty::ParamEnvAnd<'tcx, SubstsRef<'tcx>>,
- tables: &'a ty::TypeckTables<'tcx>) -> Self {
+ pub fn new(
+ tcx: TyCtxt<'tcx>,
+ param_env_and_substs: ty::ParamEnvAnd<'tcx, SubstsRef<'tcx>>,
+ tables: &'a ty::TypeckTables<'tcx>,
+ ) -> Self {
PatternContext {
tcx,
param_env: param_env_and_substs.param_env,
@@ -1054,8 +1058,8 @@
}
}
-impl UserAnnotatedTyHelpers<'tcx, 'tcx> for PatternContext<'_, 'tcx> {
- fn tcx(&self) -> TyCtxt<'_, 'tcx, 'tcx> {
+impl UserAnnotatedTyHelpers<'tcx> for PatternContext<'_, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -1242,8 +1246,8 @@
}
}
-pub fn compare_const_vals<'a, 'gcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub fn compare_const_vals<'tcx>(
+ tcx: TyCtxt<'tcx>,
a: &'tcx ty::Const<'tcx>,
b: &'tcx ty::Const<'tcx>,
ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
diff --git a/src/librustc_mir/hair/util.rs b/src/librustc_mir/hair/util.rs
index c9dae69..4e01485 100644
--- a/src/librustc_mir/hair/util.rs
+++ b/src/librustc_mir/hair/util.rs
@@ -1,8 +1,8 @@
use rustc::hir;
use rustc::ty::{self, CanonicalUserType, TyCtxt, UserType};
-crate trait UserAnnotatedTyHelpers<'gcx: 'tcx, 'tcx> {
- fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx>;
+crate trait UserAnnotatedTyHelpers<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx>;
fn tables(&self) -> &ty::TypeckTables<'tcx>;
diff --git a/src/librustc_mir/interpret/cast.rs b/src/librustc_mir/interpret/cast.rs
index 6392e09..fbacdf6 100644
--- a/src/librustc_mir/interpret/cast.rs
+++ b/src/librustc_mir/interpret/cast.rs
@@ -13,7 +13,7 @@
use super::{InterpretCx, Machine, PlaceTy, OpTy, Immediate};
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
match ty.sty {
ty::RawPtr(ty::TypeAndMut { ty, .. }) |
diff --git a/src/librustc_mir/interpret/eval_context.rs b/src/librustc_mir/interpret/eval_context.rs
index a34889e..28dc0d3 100644
--- a/src/librustc_mir/interpret/eval_context.rs
+++ b/src/librustc_mir/interpret/eval_context.rs
@@ -26,27 +26,25 @@
Memory, Machine
};
-pub struct InterpretCx<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>> {
+pub struct InterpretCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
/// Stores the `Machine` instance.
pub machine: M,
/// The results of the type checker, from rustc.
- pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
+ pub tcx: TyCtxtAt<'tcx>,
/// Bounds in scope for polymorphic evaluations.
pub(crate) param_env: ty::ParamEnv<'tcx>,
/// The virtual memory system.
- pub(crate) memory: Memory<'a, 'mir, 'tcx, M>,
+ pub(crate) memory: Memory<'mir, 'tcx, M>,
/// The virtual call stack.
pub(crate) stack: Vec<Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>>,
/// A cache for deduplicating vtables
- pub(super) vtables: FxHashMap<
- (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>),
- Pointer<M::PointerTag>
- >,
+ pub(super) vtables:
+ FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), Pointer<M::PointerTag>>,
}
/// A stack frame.
@@ -160,35 +158,33 @@
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
- for InterpretCx<'a, 'mir, 'tcx, M>
-{
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for InterpretCx<'mir, 'tcx, M> {
#[inline]
fn data_layout(&self) -> &layout::TargetDataLayout {
&self.tcx.data_layout
}
}
-impl<'a, 'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpretCx<'a, 'mir, 'tcx, M>
- where M: Machine<'a, 'mir, 'tcx>
+impl<'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpretCx<'mir, 'tcx, M>
+where
+ M: Machine<'mir, 'tcx>,
{
#[inline]
- fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
*self.tcx
}
}
-impl<'a, 'mir, 'tcx, M> layout::HasParamEnv<'tcx> for InterpretCx<'a, 'mir, 'tcx, M>
- where M: Machine<'a, 'mir, 'tcx>
+impl<'mir, 'tcx, M> layout::HasParamEnv<'tcx> for InterpretCx<'mir, 'tcx, M>
+where
+ M: Machine<'mir, 'tcx>,
{
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.param_env
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> LayoutOf
- for InterpretCx<'a, 'mir, 'tcx, M>
-{
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf for InterpretCx<'mir, 'tcx, M> {
type Ty = Ty<'tcx>;
type TyLayout = InterpResult<'tcx, TyLayout<'tcx>>;
@@ -199,12 +195,8 @@
}
}
-impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
- pub fn new(
- tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- machine: M,
- ) -> Self {
+impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
+ pub fn new(tcx: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>, machine: M) -> Self {
InterpretCx {
machine,
tcx,
@@ -216,12 +208,12 @@
}
#[inline(always)]
- pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
+ pub fn memory(&self) -> &Memory<'mir, 'tcx, M> {
&self.memory
}
#[inline(always)]
- pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
+ pub fn memory_mut(&mut self) -> &mut Memory<'mir, 'tcx, M> {
&mut self.memory
}
@@ -773,4 +765,21 @@
pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 {
truncate(value, ty.size)
}
+
+ #[inline(always)]
+ pub fn force_ptr(
+ &self,
+ scalar: Scalar<M::PointerTag>,
+ ) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
+ self.memory.force_ptr(scalar)
+ }
+
+ #[inline(always)]
+ pub fn force_bits(
+ &self,
+ scalar: Scalar<M::PointerTag>,
+ size: Size
+ ) -> InterpResult<'tcx, u128> {
+ self.memory.force_bits(scalar, size)
+ }
}
diff --git a/src/librustc_mir/interpret/intrinsics.rs b/src/librustc_mir/interpret/intrinsics.rs
index 8888d7d..beb5049 100644
--- a/src/librustc_mir/interpret/intrinsics.rs
+++ b/src/librustc_mir/interpret/intrinsics.rs
@@ -39,7 +39,7 @@
Ok(Scalar::from_uint(bits_out, size))
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
/// Returns `true` if emulation happened.
pub fn emulate_intrinsic(
&mut self,
diff --git a/src/librustc_mir/interpret/intrinsics/type_name.rs b/src/librustc_mir/interpret/intrinsics/type_name.rs
index 1270b35..5ca3531 100644
--- a/src/librustc_mir/interpret/intrinsics/type_name.rs
+++ b/src/librustc_mir/interpret/intrinsics/type_name.rs
@@ -9,12 +9,12 @@
use std::fmt::Write;
use rustc::mir::interpret::{Allocation, ConstValue};
-struct AbsolutePathPrinter<'a, 'tcx> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct AbsolutePathPrinter<'tcx> {
+ tcx: TyCtxt<'tcx>,
path: String,
}
-impl<'tcx> Printer<'tcx, 'tcx> for AbsolutePathPrinter<'_, 'tcx> {
+impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> {
type Error = std::fmt::Error;
type Path = Self;
@@ -23,7 +23,7 @@
type DynExistential = Self;
type Const = Self;
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -167,18 +167,16 @@
}
}
}
-impl PrettyPrinter<'tcx, 'tcx> for AbsolutePathPrinter<'_, 'tcx> {
+impl PrettyPrinter<'tcx> for AbsolutePathPrinter<'tcx> {
fn region_should_not_be_omitted(
&self,
_region: ty::Region<'_>,
) -> bool {
false
}
- fn comma_sep<T>(
- mut self,
- mut elems: impl Iterator<Item = T>,
- ) -> Result<Self, Self::Error>
- where T: Print<'tcx, 'tcx, Self, Output = Self, Error = Self::Error>
+ fn comma_sep<T>(mut self, mut elems: impl Iterator<Item = T>) -> Result<Self, Self::Error>
+ where
+ T: Print<'tcx, Self, Output = Self, Error = Self::Error>,
{
if let Some(first) = elems.next() {
self = first.print(self)?;
@@ -204,7 +202,7 @@
}
}
-impl Write for AbsolutePathPrinter<'_, '_> {
+impl Write for AbsolutePathPrinter<'_> {
fn write_str(&mut self, s: &str) -> std::fmt::Result {
Ok(self.path.push_str(s))
}
@@ -212,7 +210,7 @@
/// Produces an absolute path representation of the given type. See also the documentation on
/// `std::any::type_name`
-pub fn type_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> &'tcx ty::Const<'tcx> {
+pub fn type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> &'tcx ty::Const<'tcx> {
let alloc = alloc_type_name(tcx, ty);
tcx.mk_const(ty::Const {
val: ConstValue::Slice {
@@ -225,10 +223,7 @@
}
/// Directly returns an `Allocation` containing an absolute path representation of the given type.
-pub(super) fn alloc_type_name<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>
-) -> &'tcx Allocation {
+pub(super) fn alloc_type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> &'tcx Allocation {
let path = AbsolutePathPrinter { tcx, path: String::new() }.print_type(ty).unwrap().path;
let alloc = Allocation::from_byte_aligned_bytes(path.into_bytes());
tcx.intern_const_alloc(alloc)
diff --git a/src/librustc_mir/interpret/machine.rs b/src/librustc_mir/interpret/machine.rs
index 7ee77a9..2581c13 100644
--- a/src/librustc_mir/interpret/machine.rs
+++ b/src/librustc_mir/interpret/machine.rs
@@ -11,7 +11,8 @@
use super::{
Allocation, AllocId, InterpResult, Scalar, AllocationExtra,
- InterpretCx, PlaceTy, OpTy, ImmTy, MemoryKind,
+ InterpretCx, PlaceTy, OpTy, ImmTy, MemoryKind, Pointer,
+ InterpErrorInfo, InterpError
};
/// Whether this kind of memory is allowed to leak
@@ -58,7 +59,7 @@
/// Methods of this trait signifies a point where CTFE evaluation would fail
/// and some use case dependent behaviour can instead be applied.
-pub trait Machine<'a, 'mir, 'tcx>: Sized {
+pub trait Machine<'mir, 'tcx>: Sized {
/// Additional memory kinds a machine wishes to distinguish from the builtin ones
type MemoryKinds: ::std::fmt::Debug + MayLeak + Eq + 'static;
@@ -95,11 +96,11 @@
const STATIC_KIND: Option<Self::MemoryKinds>;
/// Whether to enforce the validity invariant
- fn enforce_validity(ecx: &InterpretCx<'a, 'mir, 'tcx, Self>) -> bool;
+ fn enforce_validity(ecx: &InterpretCx<'mir, 'tcx, Self>) -> bool;
/// Called before a basic block terminator is executed.
/// You can use this to detect endlessly running programs.
- fn before_terminator(ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>) -> InterpResult<'tcx>;
+ fn before_terminator(ecx: &mut InterpretCx<'mir, 'tcx, Self>) -> InterpResult<'tcx>;
/// Entry point to all function calls.
///
@@ -112,7 +113,7 @@
/// Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them
/// was used.
fn find_fn(
- ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Self::PointerTag>],
dest: Option<PlaceTy<'tcx, Self::PointerTag>>,
@@ -122,7 +123,7 @@
/// Directly process an intrinsic without pushing a stack frame.
/// If this returns successfully, the engine will take care of jumping to the next block.
fn call_intrinsic(
- ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Self::PointerTag>],
dest: PlaceTy<'tcx, Self::PointerTag>,
@@ -137,7 +138,7 @@
/// This allocation will then be fed to `tag_allocation` to initialize the "extra" state.
fn find_foreign_static(
def_id: DefId,
- tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxtAt<'tcx>,
) -> InterpResult<'tcx, Cow<'tcx, Allocation>>;
/// Called for all binary operations on integer(-like) types when one operand is a pointer
@@ -145,7 +146,7 @@
///
/// Returns a (value, overflowed) pair if the operation succeeded
fn ptr_op(
- ecx: &InterpretCx<'a, 'mir, 'tcx, Self>,
+ ecx: &InterpretCx<'mir, 'tcx, Self>,
bin_op: mir::BinOp,
left: ImmTy<'tcx, Self::PointerTag>,
right: ImmTy<'tcx, Self::PointerTag>,
@@ -153,7 +154,7 @@
/// Heap allocations via the `box` keyword.
fn box_alloc(
- ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'mir, 'tcx, Self>,
dest: PlaceTy<'tcx, Self::PointerTag>,
) -> InterpResult<'tcx>;
@@ -193,7 +194,7 @@
/// Executes a retagging operation
#[inline]
fn retag(
- _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
+ _ecx: &mut InterpretCx<'mir, 'tcx, Self>,
_kind: mir::RetagKind,
_place: PlaceTy<'tcx, Self::PointerTag>,
) -> InterpResult<'tcx> {
@@ -201,13 +202,29 @@
}
/// Called immediately before a new stack frame got pushed
- fn stack_push(
- ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
- ) -> InterpResult<'tcx, Self::FrameExtra>;
+ fn stack_push(ecx: &mut InterpretCx<'mir, 'tcx, Self>) -> InterpResult<'tcx, Self::FrameExtra>;
/// Called immediately after a stack frame gets popped
fn stack_pop(
- ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'mir, 'tcx, Self>,
extra: Self::FrameExtra,
) -> InterpResult<'tcx>;
+
+ fn int_to_ptr(
+ int: u64,
+ _extra: &Self::MemoryExtra,
+ ) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
+ if int == 0 {
+ Err(InterpErrorInfo::from(InterpError::InvalidNullPointerUsage))
+ } else {
+ Err(InterpErrorInfo::from(InterpError::ReadBytesAsPointer))
+ }
+ }
+
+ fn ptr_to_int(
+ _ptr: Pointer<Self::PointerTag>,
+ _extra: &Self::MemoryExtra,
+ ) -> InterpResult<'tcx, u64> {
+ Err(InterpErrorInfo::from(InterpError::ReadPointerAsBytes))
+ }
}
diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs
index 7126cd8..a0a34df 100644
--- a/src/librustc_mir/interpret/memory.rs
+++ b/src/librustc_mir/interpret/memory.rs
@@ -45,7 +45,7 @@
// `Memory` has to depend on the `Machine` because some of its operations
// (e.g., `get`) call a `Machine` hook.
-pub struct Memory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>> {
+pub struct Memory<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
/// Allocations local to this instance of the miri engine. The kind
/// helps ensure that the same mechanism is used for allocation and
/// deallocation. When an allocation is not found here, it is a
@@ -66,12 +66,10 @@
pub extra: M::MemoryExtra,
/// Lets us implement `HasDataLayout`, which is awfully convenient.
- pub(super) tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
+ pub(super) tcx: TyCtxtAt<'tcx>,
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
- for Memory<'a, 'mir, 'tcx, M>
-{
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for Memory<'mir, 'tcx, M> {
#[inline]
fn data_layout(&self) -> &TargetDataLayout {
&self.tcx.data_layout
@@ -80,12 +78,9 @@
// FIXME: Really we shouldn't clone memory, ever. Snapshot machinery should instead
// carefully copy only the reachable parts.
-impl<'a, 'mir, 'tcx, M>
- Clone
-for
- Memory<'a, 'mir, 'tcx, M>
+impl<'mir, 'tcx, M> Clone for Memory<'mir, 'tcx, M>
where
- M: Machine<'a, 'mir, 'tcx, PointerTag=(), AllocExtra=(), MemoryExtra=()>,
+ M: Machine<'mir, 'tcx, PointerTag = (), AllocExtra = (), MemoryExtra = ()>,
M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation)>,
{
fn clone(&self) -> Self {
@@ -98,8 +93,8 @@
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
- pub fn new(tcx: TyCtxtAt<'a, 'tcx, 'tcx>) -> Self {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
+ pub fn new(tcx: TyCtxtAt<'tcx>) -> Self {
Memory {
alloc_map: M::MemoryMap::default(),
dead_alloc_map: FxHashMap::default(),
@@ -312,7 +307,7 @@
}
/// Allocation accessors
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
/// Helper function to obtain the global (tcx) allocation for a static.
/// This attempts to return a reference to an existing allocation if
/// one can be found in `tcx`. That, however, is only possible if `tcx` and
@@ -329,7 +324,7 @@
/// another static), those inner references only exist in "resolved" form.
fn get_static_alloc(
id: AllocId,
- tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxtAt<'tcx>,
memory_extra: &M::MemoryExtra,
) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::PointerTag, M::AllocExtra>>> {
let alloc = tcx.alloc_map.lock().get(id);
@@ -623,7 +618,7 @@
}
/// Byte Accessors
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
pub fn read_bytes(
&self,
ptr: Scalar<M::PointerTag>,
@@ -632,16 +627,16 @@
if size.bytes() == 0 {
Ok(&[])
} else {
- let ptr = ptr.to_ptr()?;
+ let ptr = self.force_ptr(ptr)?;
self.get(ptr.alloc_id)?.get_bytes(self, ptr, size)
}
}
}
/// Interning (for CTFE)
-impl<'a, 'mir, 'tcx, M> Memory<'a, 'mir, 'tcx, M>
+impl<'mir, 'tcx, M> Memory<'mir, 'tcx, M>
where
- M: Machine<'a, 'mir, 'tcx, PointerTag=(), AllocExtra=(), MemoryExtra=()>,
+ M: Machine<'mir, 'tcx, PointerTag = (), AllocExtra = (), MemoryExtra = ()>,
// FIXME: Working around https://github.com/rust-lang/rust/issues/24159
M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation)>,
{
@@ -689,7 +684,7 @@
}
/// Reading and writing.
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
pub fn copy(
&mut self,
src: Scalar<M::PointerTag>,
@@ -719,8 +714,8 @@
// non-NULLness which already happened.
return Ok(());
}
- let src = src.to_ptr()?;
- let dest = dest.to_ptr()?;
+ let src = self.force_ptr(src)?;
+ let dest = self.force_ptr(dest)?;
// first copy the relocations to a temporary buffer, because
// `get_bytes_mut` will clear the relocations, which is correct,
@@ -806,7 +801,7 @@
}
/// Undefined bytes
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// FIXME: Add a fast version for the common, nonoverlapping case
fn copy_undef_mask(
&mut self,
@@ -879,4 +874,25 @@
}
Ok(())
}
+
+ pub fn force_ptr(
+ &self,
+ scalar: Scalar<M::PointerTag>,
+ ) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
+ match scalar {
+ Scalar::Ptr(ptr) => Ok(ptr),
+ _ => M::int_to_ptr(scalar.to_usize(self)?, &self.extra)
+ }
+ }
+
+ pub fn force_bits(
+ &self,
+ scalar: Scalar<M::PointerTag>,
+ size: Size
+ ) -> InterpResult<'tcx, u128> {
+ match scalar.to_bits_or_ptr(size, self) {
+ Ok(bits) => Ok(bits),
+ Err(ptr) => Ok(M::ptr_to_int(ptr, &self.extra)? as u128)
+ }
+ }
}
diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs
index 7c83bf1..4b1e782 100644
--- a/src/librustc_mir/interpret/operand.rs
+++ b/src/librustc_mir/interpret/operand.rs
@@ -211,7 +211,7 @@
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
/// Try reading an immediate in memory; this is interesting particularly for `ScalarPair`.
/// Returns `None` if the layout does not permit loading this as a value.
fn try_read_immediate_from_mplace(
@@ -232,7 +232,7 @@
}
// check for integer pointers before alignment to report better errors
- let ptr = ptr.to_ptr()?;
+ let ptr = self.force_ptr(ptr)?;
self.memory.check_align(ptr.into(), ptr_align)?;
match mplace.layout.abi {
layout::Abi::Scalar(..) => {
diff --git a/src/librustc_mir/interpret/operator.rs b/src/librustc_mir/interpret/operator.rs
index db7da93..029a440 100644
--- a/src/librustc_mir/interpret/operator.rs
+++ b/src/librustc_mir/interpret/operator.rs
@@ -7,7 +7,7 @@
use super::{InterpretCx, PlaceTy, Immediate, Machine, ImmTy};
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
/// Applies the binary operation `op` to the two operands and writes a tuple of the result
/// and a boolean signifying the potential overflow to the destination.
pub fn binop_with_overflow(
@@ -36,7 +36,7 @@
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
fn binary_char_op(
&self,
bin_op: mir::BinOp,
@@ -347,7 +347,7 @@
}
_ => {
assert!(layout.ty.is_integral());
- let val = val.to_bits(layout.size)?;
+ let val = self.force_bits(val, layout.size)?;
let res = match un_op {
Not => !val,
Neg => {
diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs
index 758230e..a8f88af 100644
--- a/src/librustc_mir/interpret/place.rs
+++ b/src/librustc_mir/interpret/place.rs
@@ -290,11 +290,11 @@
}
// separating the pointer tag for `impl Trait`, see https://github.com/rust-lang/rust/issues/54385
-impl<'a, 'mir, 'tcx, Tag, M> InterpretCx<'a, 'mir, 'tcx, M>
+impl<'mir, 'tcx, Tag, M> InterpretCx<'mir, 'tcx, M>
where
// FIXME: Working around https://github.com/rust-lang/rust/issues/54385
Tag: ::std::fmt::Debug + Copy + Eq + Hash + 'static,
- M: Machine<'a, 'mir, 'tcx, PointerTag=Tag>,
+ M: Machine<'mir, 'tcx, PointerTag = Tag>,
// FIXME: Working around https://github.com/rust-lang/rust/issues/24159
M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation<Tag, M::AllocExtra>)>,
M::AllocExtra: AllocationExtra<Tag>,
@@ -348,8 +348,12 @@
offsets[usize::try_from(field).unwrap()],
layout::FieldPlacement::Array { stride, .. } => {
let len = base.len(self)?;
- assert!(field < len, "Tried to access element {} of array/slice with length {}",
- field, len);
+ if field >= len {
+ // This can be violated because this runs during promotion on code where the
+ // type system has not yet ensured that such things don't happen.
+ debug!("Tried to access element {} of array/slice with length {}", field, len);
+ return err!(BoundsCheck { len, index: field });
+ }
stride * field
}
layout::FieldPlacement::Union(count) => {
@@ -396,8 +400,7 @@
pub fn mplace_array_fields(
&self,
base: MPlaceTy<'tcx, Tag>,
- ) ->
- InterpResult<'tcx, impl Iterator<Item=InterpResult<'tcx, MPlaceTy<'tcx, Tag>>> + 'a>
+ ) -> InterpResult<'tcx, impl Iterator<Item = InterpResult<'tcx, MPlaceTy<'tcx, Tag>>> + 'tcx>
{
let len = base.len(self)?; // also asserts that we have a type where this makes sense
let stride = match base.layout.fields {
@@ -470,7 +473,7 @@
let layout = self.layout_of(self.tcx.types.usize)?;
let n = self.access_local(self.frame(), local, Some(layout))?;
let n = self.read_scalar(n)?;
- let n = n.to_bits(self.tcx.data_layout.pointer_size)?;
+ let n = self.force_bits(n.not_undef()?, self.tcx.data_layout.pointer_size)?;
self.mplace_field(base, u64::try_from(n).unwrap())?
}
@@ -750,7 +753,7 @@
}
// check for integer pointers before alignment to report better errors
- let ptr = ptr.to_ptr()?;
+ let ptr = self.force_ptr(ptr)?;
self.memory.check_align(ptr.into(), ptr_align)?;
let tcx = &*self.tcx;
// FIXME: We should check that there are dest.layout.size many bytes available in
diff --git a/src/librustc_mir/interpret/snapshot.rs b/src/librustc_mir/interpret/snapshot.rs
index 4e13291..0032e84 100644
--- a/src/librustc_mir/interpret/snapshot.rs
+++ b/src/librustc_mir/interpret/snapshot.rs
@@ -28,7 +28,7 @@
use crate::const_eval::CompileTimeInterpreter;
#[derive(Default)]
-pub(crate) struct InfiniteLoopDetector<'a, 'mir, 'tcx: 'a + 'mir> {
+pub(crate) struct InfiniteLoopDetector<'mir, 'tcx> {
/// The set of all `InterpSnapshot` *hashes* observed by this detector.
///
/// When a collision occurs in this table, we store the full snapshot in
@@ -40,16 +40,15 @@
/// An `InterpSnapshot` will only be fully cloned once it has caused a
/// collision in `hashes`. As a result, the detector must observe at least
/// *two* full cycles of an infinite loop before it triggers.
- snapshots: FxHashSet<InterpSnapshot<'a, 'mir, 'tcx>>,
+ snapshots: FxHashSet<InterpSnapshot<'mir, 'tcx>>,
}
-impl<'a, 'mir, 'tcx> InfiniteLoopDetector<'a, 'mir, 'tcx>
-{
- pub fn observe_and_analyze<'b>(
+impl<'mir, 'tcx> InfiniteLoopDetector<'mir, 'tcx> {
+ pub fn observe_and_analyze(
&mut self,
- tcx: TyCtxt<'b, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
span: Span,
- memory: &Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
+ memory: &Memory<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
stack: &[Frame<'mir, 'tcx>],
) -> InterpResult<'tcx, ()> {
// Compute stack's hash before copying anything
@@ -373,8 +372,8 @@
layout -> _,
});
-impl<'a, 'b, 'mir, 'tcx: 'a+'mir> SnapshotContext<'b>
- for Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>
+impl<'b, 'mir, 'tcx> SnapshotContext<'b>
+ for Memory<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>
{
fn resolve(&'b self, id: &AllocId) -> Option<&'b Allocation> {
self.get(*id).ok()
@@ -384,16 +383,15 @@
/// The virtual machine state during const-evaluation at a given point in time.
/// We assume the `CompileTimeInterpreter` has no interesting extra state that
/// is worth considering here.
-struct InterpSnapshot<'a, 'mir, 'tcx: 'a + 'mir> {
- memory: Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
+struct InterpSnapshot<'mir, 'tcx> {
+ memory: Memory<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
stack: Vec<Frame<'mir, 'tcx>>,
}
-impl<'a, 'mir, 'tcx: 'a + 'mir> InterpSnapshot<'a, 'mir, 'tcx>
-{
+impl InterpSnapshot<'mir, 'tcx> {
fn new(
- memory: &Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
- stack: &[Frame<'mir, 'tcx>]
+ memory: &Memory<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
+ stack: &[Frame<'mir, 'tcx>],
) -> Self {
InterpSnapshot {
memory: memory.clone(),
@@ -408,11 +406,9 @@
// Start with the stack, iterate and recursively snapshot
self.stack.iter().map(|frame| frame.snapshot(&self.memory)).collect()
}
-
}
-impl<'a, 'mir, 'tcx> Hash for InterpSnapshot<'a, 'mir, 'tcx>
-{
+impl<'mir, 'tcx> Hash for InterpSnapshot<'mir, 'tcx> {
fn hash<H: Hasher>(&self, state: &mut H) {
// Implement in terms of hash stable, so that k1 == k2 -> hash(k1) == hash(k2)
let mut hcx = self.memory.tcx.get_stable_hashing_context();
@@ -422,17 +418,15 @@
}
}
-impl_stable_hash_for!(impl<> for struct InterpSnapshot<'_, 'mir, 'tcx> {
+impl_stable_hash_for!(impl<> for struct InterpSnapshot<'mir, 'tcx> {
// Not hashing memory: Avoid hashing memory all the time during execution
memory -> _,
stack,
});
-impl<'a, 'mir, 'tcx> Eq for InterpSnapshot<'a, 'mir, 'tcx>
-{}
+impl<'mir, 'tcx> Eq for InterpSnapshot<'mir, 'tcx> {}
-impl<'a, 'mir, 'tcx> PartialEq for InterpSnapshot<'a, 'mir, 'tcx>
-{
+impl<'mir, 'tcx> PartialEq for InterpSnapshot<'mir, 'tcx> {
fn eq(&self, other: &Self) -> bool {
// FIXME: This looks to be a *ridiculously expensive* comparison operation.
// Doesn't this make tons of copies? Either `snapshot` is very badly named,
diff --git a/src/librustc_mir/interpret/step.rs b/src/librustc_mir/interpret/step.rs
index 9312d71..2f99973 100644
--- a/src/librustc_mir/interpret/step.rs
+++ b/src/librustc_mir/interpret/step.rs
@@ -35,7 +35,7 @@
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
pub fn run(&mut self) -> InterpResult<'tcx> {
while self.step()? {}
Ok(())
diff --git a/src/librustc_mir/interpret/terminator.rs b/src/librustc_mir/interpret/terminator.rs
index ff8d680..190f781 100644
--- a/src/librustc_mir/interpret/terminator.rs
+++ b/src/librustc_mir/interpret/terminator.rs
@@ -11,7 +11,7 @@
InterpretCx, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup
};
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
#[inline]
pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
if let Some(target) = target {
@@ -79,7 +79,7 @@
let (fn_def, abi) = match func.layout.ty.sty {
ty::FnPtr(sig) => {
let caller_abi = sig.abi();
- let fn_ptr = self.read_scalar(func)?.to_ptr()?;
+ let fn_ptr = self.force_ptr(self.read_scalar(func)?.not_undef()?)?;
let instance = self.memory.get_fn(fn_ptr)?;
(instance, caller_abi)
}
diff --git a/src/librustc_mir/interpret/traits.rs b/src/librustc_mir/interpret/traits.rs
index 220f3e8..4ae0ee5 100644
--- a/src/librustc_mir/interpret/traits.rs
+++ b/src/librustc_mir/interpret/traits.rs
@@ -4,7 +4,7 @@
use super::{InterpretCx, InterpError, Machine, MemoryKind};
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
/// Creates a dynamic vtable for the given type and vtable origin. This is used only for
/// objects.
///
diff --git a/src/librustc_mir/interpret/validity.rs b/src/librustc_mir/interpret/validity.rs
index 6768d9e..3460c21 100644
--- a/src/librustc_mir/interpret/validity.rs
+++ b/src/librustc_mir/interpret/validity.rs
@@ -149,17 +149,17 @@
}
}
-struct ValidityVisitor<'rt, 'a: 'rt, 'mir: 'rt, 'tcx: 'a+'rt+'mir, M: Machine<'a, 'mir, 'tcx>+'rt> {
+struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
/// The `path` may be pushed to, but the part that is present when a function
/// starts must not be changed! `visit_fields` and `visit_array` rely on
/// this stack discipline.
path: Vec<PathElem>,
ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::PointerTag>>>,
const_mode: bool,
- ecx: &'rt InterpretCx<'a, 'mir, 'tcx, M>,
+ ecx: &'rt InterpretCx<'mir, 'tcx, M>,
}
-impl<'rt, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> ValidityVisitor<'rt, 'a, 'mir, 'tcx, M> {
+impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M> {
fn aggregate_field_path_elem(
&mut self,
layout: TyLayout<'tcx>,
@@ -175,8 +175,8 @@
// Sometimes the index is beyond the number of upvars (seen
// for a generator).
if let Some((&var_hir_id, _)) = upvars.get_index(field) {
- let var_node_id = self.ecx.tcx.hir().hir_to_node_id(var_hir_id);
- if let hir::Node::Binding(pat) = self.ecx.tcx.hir().get(var_node_id) {
+ let node = self.ecx.tcx.hir().get_by_hir_id(var_hir_id);
+ if let hir::Node::Binding(pat) = node {
if let hir::PatKind::Binding(_, _, ident, _) = pat.node {
name = Some(ident.name);
}
@@ -235,13 +235,13 @@
}
}
-impl<'rt, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>>
- ValueVisitor<'a, 'mir, 'tcx, M> for ValidityVisitor<'rt, 'a, 'mir, 'tcx, M>
+impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
+ for ValidityVisitor<'rt, 'mir, 'tcx, M>
{
type V = OpTy<'tcx, M::PointerTag>;
#[inline(always)]
- fn ecx(&self) -> &InterpretCx<'a, 'mir, 'tcx, M> {
+ fn ecx(&self) -> &InterpretCx<'mir, 'tcx, M> {
&self.ecx
}
@@ -559,7 +559,7 @@
// This is the size in bytes of the whole array.
let size = ty_size * len;
- let ptr = mplace.ptr.to_ptr()?;
+ let ptr = self.ecx.force_ptr(mplace.ptr)?;
// NOTE: Keep this in sync with the handling of integer and float
// types above, in `visit_primitive`.
@@ -607,7 +607,7 @@
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
/// This function checks the data at `op`. `op` is assumed to cover valid memory if it
/// is an indirect operand.
/// It will error if the bits at the destination do not match the ones described by the layout.
diff --git a/src/librustc_mir/interpret/visitor.rs b/src/librustc_mir/interpret/visitor.rs
index b5477c6..9150f16 100644
--- a/src/librustc_mir/interpret/visitor.rs
+++ b/src/librustc_mir/interpret/visitor.rs
@@ -14,15 +14,14 @@
// A thing that we can project into, and that has a layout.
// This wouldn't have to depend on `Machine` but with the current type inference,
// that's just more convenient to work with (avoids repeating all the `Machine` bounds).
-pub trait Value<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>>: Copy
-{
+pub trait Value<'mir, 'tcx, M: Machine<'mir, 'tcx>>: Copy {
/// Gets this value's layout.
fn layout(&self) -> TyLayout<'tcx>;
/// Makes this into an `OpTy`.
fn to_op(
self,
- ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>>;
/// Creates this from an `MPlaceTy`.
@@ -31,23 +30,21 @@
/// Projects to the given enum variant.
fn project_downcast(
self,
- ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'mir, 'tcx, M>,
variant: VariantIdx,
) -> InterpResult<'tcx, Self>;
/// Projects to the n-th field.
fn project_field(
self,
- ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'mir, 'tcx, M>,
field: u64,
) -> InterpResult<'tcx, Self>;
}
// Operands and memory-places are both values.
// Places in general are not due to `place_field` having to do `force_allocation`.
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Value<'a, 'mir, 'tcx, M>
- for OpTy<'tcx, M::PointerTag>
-{
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Value<'mir, 'tcx, M> for OpTy<'tcx, M::PointerTag> {
#[inline(always)]
fn layout(&self) -> TyLayout<'tcx> {
self.layout
@@ -56,7 +53,7 @@
#[inline(always)]
fn to_op(
self,
- _ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
+ _ecx: &InterpretCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
Ok(self)
}
@@ -69,7 +66,7 @@
#[inline(always)]
fn project_downcast(
self,
- ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'mir, 'tcx, M>,
variant: VariantIdx,
) -> InterpResult<'tcx, Self> {
ecx.operand_downcast(self, variant)
@@ -78,15 +75,13 @@
#[inline(always)]
fn project_field(
self,
- ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'mir, 'tcx, M>,
field: u64,
) -> InterpResult<'tcx, Self> {
ecx.operand_field(self, field)
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Value<'a, 'mir, 'tcx, M>
- for MPlaceTy<'tcx, M::PointerTag>
-{
+impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Value<'mir, 'tcx, M> for MPlaceTy<'tcx, M::PointerTag> {
#[inline(always)]
fn layout(&self) -> TyLayout<'tcx> {
self.layout
@@ -95,7 +90,7 @@
#[inline(always)]
fn to_op(
self,
- _ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
+ _ecx: &InterpretCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
Ok(self.into())
}
@@ -108,7 +103,7 @@
#[inline(always)]
fn project_downcast(
self,
- ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'mir, 'tcx, M>,
variant: VariantIdx,
) -> InterpResult<'tcx, Self> {
ecx.mplace_downcast(self, variant)
@@ -117,7 +112,7 @@
#[inline(always)]
fn project_field(
self,
- ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'mir, 'tcx, M>,
field: u64,
) -> InterpResult<'tcx, Self> {
ecx.mplace_field(self, field)
@@ -127,12 +122,12 @@
macro_rules! make_value_visitor {
($visitor_trait_name:ident, $($mutability:ident)?) => {
// How to traverse a value and what to do when we are at the leaves.
- pub trait $visitor_trait_name<'a, 'mir, 'tcx: 'mir+'a, M: Machine<'a, 'mir, 'tcx>>: Sized {
- type V: Value<'a, 'mir, 'tcx, M>;
+ pub trait $visitor_trait_name<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>>: Sized {
+ type V: Value<'mir, 'tcx, M>;
/// The visitor must have an `InterpretCx` in it.
fn ecx(&$($mutability)? self)
- -> &$($mutability)? InterpretCx<'a, 'mir, 'tcx, M>;
+ -> &$($mutability)? InterpretCx<'mir, 'tcx, M>;
// Recursive actions, ready to be overloaded.
/// Visits the given value, dispatching as appropriate to more specialized visitors.
diff --git a/src/librustc_mir/lints.rs b/src/librustc_mir/lints.rs
index e15c8a4..a712088 100644
--- a/src/librustc_mir/lints.rs
+++ b/src/librustc_mir/lints.rs
@@ -7,9 +7,7 @@
use rustc::ty::{self, AssocItem, AssocItemContainer, Instance, TyCtxt};
use rustc::ty::subst::InternalSubsts;
-pub fn check(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body: &Body<'tcx>,
- def_id: DefId) {
+pub fn check(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, def_id: DefId) {
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
if let Some(fn_like_node) = FnLikeNode::from_node(tcx.hir().get_by_hir_id(hir_id)) {
@@ -17,10 +15,12 @@
}
}
-fn check_fn_for_unconditional_recursion(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- fn_kind: FnKind<'_>,
- body: &Body<'tcx>,
- def_id: DefId) {
+fn check_fn_for_unconditional_recursion(
+ tcx: TyCtxt<'tcx>,
+ fn_kind: FnKind<'_>,
+ body: &Body<'tcx>,
+ def_id: DefId,
+) {
if let FnKind::Closure(_) = fn_kind {
// closures can't recur, so they don't matter.
return;
@@ -130,7 +130,7 @@
// recurs.
if !reached_exit_without_self_call && !self_call_locations.is_empty() {
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
- let sp = tcx.sess.source_map().def_span(tcx.hir().span_by_hir_id(hir_id));
+ let sp = tcx.sess.source_map().def_span(tcx.hir().span(hir_id));
let mut db = tcx.struct_span_lint_hir(UNCONDITIONAL_RECURSION,
hir_id,
sp,
diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs
index 706ace0..2e74ebc 100644
--- a/src/librustc_mir/monomorphize/collector.rs
+++ b/src/librustc_mir/monomorphize/collector.rs
@@ -281,10 +281,10 @@
}
}
-pub fn collect_crate_mono_items<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- mode: MonoItemCollectionMode)
- -> (FxHashSet<MonoItem<'tcx>>,
- InliningMap<'tcx>) {
+pub fn collect_crate_mono_items<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ mode: MonoItemCollectionMode,
+) -> (FxHashSet<MonoItem<'tcx>>, InliningMap<'tcx>) {
let roots = time(tcx.sess, "collecting roots", || {
collect_roots(tcx, mode)
});
@@ -315,9 +315,7 @@
// Find all non-generic items by walking the HIR. These items serve as roots to
// start monomorphizing from.
-fn collect_roots<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- mode: MonoItemCollectionMode)
- -> Vec<MonoItem<'tcx>> {
+fn collect_roots<'tcx>(tcx: TyCtxt<'tcx>, mode: MonoItemCollectionMode) -> Vec<MonoItem<'tcx>> {
debug!("Collecting roots");
let mut roots = Vec::new();
@@ -347,11 +345,13 @@
}
// Collect all monomorphized items reachable from `starting_point`
-fn collect_items_rec<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- starting_point: MonoItem<'tcx>,
- visited: MTRef<'_, MTLock<FxHashSet<MonoItem<'tcx>>>>,
- recursion_depths: &mut DefIdMap<usize>,
- inlining_map: MTRef<'_, MTLock<InliningMap<'tcx>>>) {
+fn collect_items_rec<'a, 'tcx: 'a>(
+ tcx: TyCtxt<'tcx>,
+ starting_point: MonoItem<'tcx>,
+ visited: MTRef<'_, MTLock<FxHashSet<MonoItem<'tcx>>>>,
+ recursion_depths: &mut DefIdMap<usize>,
+ inlining_map: MTRef<'_, MTLock<InliningMap<'tcx>>>,
+) {
if !visited.lock_mut().insert(starting_point.clone()) {
// We've been here already, no need to search again.
return;
@@ -413,10 +413,12 @@
debug!("END collect_items_rec({})", starting_point.to_string(tcx, true));
}
-fn record_accesses<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- caller: MonoItem<'tcx>,
- callees: &[MonoItem<'tcx>],
- inlining_map: MTRef<'_, MTLock<InliningMap<'tcx>>>) {
+fn record_accesses<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ caller: MonoItem<'tcx>,
+ callees: &[MonoItem<'tcx>],
+ inlining_map: MTRef<'_, MTLock<InliningMap<'tcx>>>,
+) {
let is_inlining_candidate = |mono_item: &MonoItem<'tcx>| {
mono_item.instantiation_mode(tcx) == InstantiationMode::LocalCopy
};
@@ -429,10 +431,11 @@
inlining_map.lock_mut().record_accesses(caller, accesses);
}
-fn check_recursion_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- instance: Instance<'tcx>,
- recursion_depths: &mut DefIdMap<usize>)
- -> (DefId, usize) {
+fn check_recursion_limit<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ instance: Instance<'tcx>,
+ recursion_depths: &mut DefIdMap<usize>,
+) -> (DefId, usize) {
let def_id = instance.def_id();
let recursion_depth = recursion_depths.get(&def_id).cloned().unwrap_or(0);
debug!(" => recursion depth={}", recursion_depth);
@@ -452,7 +455,7 @@
let error = format!("reached the recursion limit while instantiating `{}`",
instance);
if let Some(hir_id) = tcx.hir().as_local_hir_id(def_id) {
- tcx.sess.span_fatal(tcx.hir().span_by_hir_id(hir_id), &error);
+ tcx.sess.span_fatal(tcx.hir().span(hir_id), &error);
} else {
tcx.sess.fatal(&error);
}
@@ -463,9 +466,7 @@
(def_id, recursion_depth)
}
-fn check_type_length_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- instance: Instance<'tcx>)
-{
+fn check_type_length_limit<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) {
let type_length = instance.substs.types().flat_map(|ty| ty.walk()).count();
let const_length = instance.substs.consts().flat_map(|ct| ct.ty.walk()).count();
debug!(" => type length={}, const length={}", type_length, const_length);
@@ -515,7 +516,7 @@
}
struct MirNeighborCollector<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &'a mir::Body<'tcx>,
output: &'a mut Vec<MonoItem<'tcx>>,
param_substs: SubstsRef<'tcx>,
@@ -679,20 +680,22 @@
}
}
-fn visit_drop_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>,
- is_direct_call: bool,
- output: &mut Vec<MonoItem<'tcx>>)
-{
+fn visit_drop_use<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ ty: Ty<'tcx>,
+ is_direct_call: bool,
+ output: &mut Vec<MonoItem<'tcx>>,
+) {
let instance = Instance::resolve_drop_in_place(tcx, ty);
visit_instance_use(tcx, instance, is_direct_call, output);
}
-fn visit_fn_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>,
- is_direct_call: bool,
- output: &mut Vec<MonoItem<'tcx>>)
-{
+fn visit_fn_use<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ ty: Ty<'tcx>,
+ is_direct_call: bool,
+ output: &mut Vec<MonoItem<'tcx>>,
+) {
if let ty::FnDef(def_id, substs) = ty.sty {
let instance = ty::Instance::resolve(tcx,
ty::ParamEnv::reveal_all(),
@@ -702,11 +705,12 @@
}
}
-fn visit_instance_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- instance: ty::Instance<'tcx>,
- is_direct_call: bool,
- output: &mut Vec<MonoItem<'tcx>>)
-{
+fn visit_instance_use<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ instance: ty::Instance<'tcx>,
+ is_direct_call: bool,
+ output: &mut Vec<MonoItem<'tcx>>,
+) {
debug!("visit_item_use({:?}, is_direct_call={:?})", instance, is_direct_call);
if !should_monomorphize_locally(tcx, &instance) {
return
@@ -741,8 +745,7 @@
// Returns true if we should codegen an instance in the local crate.
// Returns false if we can just link to the upstream crate and therefore don't
// need a mono item.
-fn should_monomorphize_locally<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, instance: &Instance<'tcx>)
- -> bool {
+fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx>) -> bool {
let def_id = match instance.def {
ty::InstanceDef::Item(def_id) => def_id,
ty::InstanceDef::VtableShim(..) |
@@ -776,10 +779,11 @@
}
return true;
- fn is_available_upstream_generic<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- substs: SubstsRef<'tcx>)
- -> bool {
+ fn is_available_upstream_generic<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ def_id: DefId,
+ substs: SubstsRef<'tcx>,
+ ) -> bool {
debug_assert!(!def_id.is_local());
// If we are not in share generics mode, we don't link to upstream
@@ -841,10 +845,11 @@
///
/// Finally, there is also the case of custom unsizing coercions, e.g., for
/// smart pointers such as `Rc` and `Arc`.
-fn find_vtable_types_for_unsizing<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- source_ty: Ty<'tcx>,
- target_ty: Ty<'tcx>)
- -> (Ty<'tcx>, Ty<'tcx>) {
+fn find_vtable_types_for_unsizing<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ source_ty: Ty<'tcx>,
+ target_ty: Ty<'tcx>,
+) -> (Ty<'tcx>, Ty<'tcx>) {
let ptr_vtable = |inner_source: Ty<'tcx>, inner_target: Ty<'tcx>| {
let type_has_metadata = |ty: Ty<'tcx>| -> bool {
use syntax_pos::DUMMY_SP;
@@ -914,10 +919,12 @@
/// Creates a `MonoItem` for each method that is referenced by the vtable for
/// the given trait/impl pair.
-fn create_mono_items_for_vtable_methods<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- trait_ty: Ty<'tcx>,
- impl_ty: Ty<'tcx>,
- output: &mut Vec<MonoItem<'tcx>>) {
+fn create_mono_items_for_vtable_methods<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_ty: Ty<'tcx>,
+ impl_ty: Ty<'tcx>,
+ output: &mut Vec<MonoItem<'tcx>>,
+) {
assert!(!trait_ty.needs_subst() && !trait_ty.has_escaping_bound_vars() &&
!impl_ty.needs_subst() && !impl_ty.has_escaping_bound_vars());
@@ -948,14 +955,14 @@
// Root Collection
//=-----------------------------------------------------------------------------
-struct RootCollector<'b, 'a: 'b, 'tcx: 'a + 'b> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct RootCollector<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
mode: MonoItemCollectionMode,
- output: &'b mut Vec<MonoItem<'tcx>>,
+ output: &'a mut Vec<MonoItem<'tcx>>,
entry_fn: Option<(DefId, EntryFnType)>,
}
-impl<'b, 'a, 'v> ItemLikeVisitor<'v> for RootCollector<'b, 'a, 'v> {
+impl ItemLikeVisitor<'v> for RootCollector<'_, 'v> {
fn visit_item(&mut self, item: &'v hir::Item) {
match item.node {
hir::ItemKind::ExternCrate(..) |
@@ -1044,7 +1051,7 @@
}
}
-impl<'b, 'a, 'v> RootCollector<'b, 'a, 'v> {
+impl RootCollector<'_, 'v> {
fn is_root(&self, def_id: DefId) -> bool {
!item_requires_monomorphization(self.tcx, def_id) && match self.mode {
MonoItemCollectionMode::Eager => {
@@ -1107,14 +1114,16 @@
}
}
-fn item_requires_monomorphization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool {
+fn item_requires_monomorphization<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
let generics = tcx.generics_of(def_id);
generics.requires_monomorphization(tcx)
}
-fn create_mono_items_for_default_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- item: &'tcx hir::Item,
- output: &mut Vec<MonoItem<'tcx>>) {
+fn create_mono_items_for_default_impls<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ item: &'tcx hir::Item,
+ output: &mut Vec<MonoItem<'tcx>>,
+) {
match item.node {
hir::ItemKind::Impl(_, _, _, ref generics, .., ref impl_item_refs) => {
for param in &generics.params {
@@ -1176,11 +1185,7 @@
}
/// Scan the miri alloc in order to find function calls, closures, and drop-glue
-fn collect_miri<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- alloc_id: AllocId,
- output: &mut Vec<MonoItem<'tcx>>,
-) {
+fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut Vec<MonoItem<'tcx>>) {
let alloc_kind = tcx.alloc_map.lock().get(alloc_id);
match alloc_kind {
Some(GlobalAlloc::Static(def_id)) => {
@@ -1207,10 +1212,11 @@
}
/// Scan the MIR in order to find function calls, closures, and drop-glue
-fn collect_neighbours<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- instance: Instance<'tcx>,
- output: &mut Vec<MonoItem<'tcx>>)
-{
+fn collect_neighbours<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ instance: Instance<'tcx>,
+ output: &mut Vec<MonoItem<'tcx>>,
+) {
let body = tcx.instance_mir(instance.def);
MirNeighborCollector {
@@ -1237,17 +1243,15 @@
}
}
-fn def_id_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> String {
+fn def_id_to_string<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> String {
let mut output = String::new();
let printer = DefPathBasedNames::new(tcx, false, false);
printer.push_def_path(def_id, &mut output);
output
}
-fn collect_const<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn collect_const<'tcx>(
+ tcx: TyCtxt<'tcx>,
constant: &'tcx ty::Const<'tcx>,
param_substs: SubstsRef<'tcx>,
output: &mut Vec<MonoItem<'tcx>>,
diff --git a/src/librustc_mir/monomorphize/item.rs b/src/librustc_mir/monomorphize/item.rs
index d60d0fe..2bcf058 100644
--- a/src/librustc_mir/monomorphize/item.rs
+++ b/src/librustc_mir/monomorphize/item.rs
@@ -35,7 +35,7 @@
LocalCopy,
}
-pub trait MonoItemExt<'a, 'tcx>: fmt::Debug {
+pub trait MonoItemExt<'tcx>: fmt::Debug {
fn as_mono_item(&self) -> &MonoItem<'tcx>;
fn is_generic_fn(&self) -> bool {
@@ -48,7 +48,7 @@
}
}
- fn symbol_name(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::SymbolName {
+ fn symbol_name(&self, tcx: TyCtxt<'tcx>) -> ty::SymbolName {
match *self.as_mono_item() {
MonoItem::Fn(instance) => tcx.symbol_name(instance),
MonoItem::Static(def_id) => {
@@ -62,9 +62,7 @@
}
}
}
- fn instantiation_mode(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> InstantiationMode {
+ fn instantiation_mode(&self, tcx: TyCtxt<'tcx>) -> InstantiationMode {
let inline_in_all_cgus =
tcx.sess.opts.debugging_opts.inline_in_all_cgus.unwrap_or_else(|| {
tcx.sess.opts.optimize != OptLevel::No
@@ -108,7 +106,7 @@
}
}
- fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Linkage> {
+ fn explicit_linkage(&self, tcx: TyCtxt<'tcx>) -> Option<Linkage> {
let def_id = match *self.as_mono_item() {
MonoItem::Fn(ref instance) => instance.def_id(),
MonoItem::Static(def_id) => def_id,
@@ -144,7 +142,7 @@
/// Similarly, if a vtable method has such a signature, and therefore can't
/// be used, we can just not emit it and have a placeholder (a null pointer,
/// which will never be accessed) in its place.
- fn is_instantiable(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
+ fn is_instantiable(&self, tcx: TyCtxt<'tcx>) -> bool {
debug!("is_instantiable({:?})", self);
let (def_id, substs) = match *self.as_mono_item() {
MonoItem::Fn(ref instance) => (instance.def_id(), instance.substs),
@@ -156,7 +154,7 @@
tcx.substitute_normalize_and_test_predicates((def_id, &substs))
}
- fn to_string(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, debug: bool) -> String {
+ fn to_string(&self, tcx: TyCtxt<'tcx>, debug: bool) -> String {
return match *self.as_mono_item() {
MonoItem::Fn(instance) => {
to_string_internal(tcx, "fn ", instance, debug)
@@ -170,11 +168,12 @@
}
};
- fn to_string_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- prefix: &str,
- instance: Instance<'tcx>,
- debug: bool)
- -> String {
+ fn to_string_internal<'a, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ prefix: &str,
+ instance: Instance<'tcx>,
+ debug: bool,
+ ) -> String {
let mut result = String::with_capacity(32);
result.push_str(prefix);
let printer = DefPathBasedNames::new(tcx, false, false);
@@ -183,7 +182,7 @@
}
}
- fn local_span(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Span> {
+ fn local_span(&self, tcx: TyCtxt<'tcx>) -> Option<Span> {
match *self.as_mono_item() {
MonoItem::Fn(Instance { def, .. }) => {
tcx.hir().as_local_hir_id(def.def_id())
@@ -194,11 +193,11 @@
MonoItem::GlobalAsm(hir_id) => {
Some(hir_id)
}
- }.map(|hir_id| tcx.hir().span_by_hir_id(hir_id))
+ }.map(|hir_id| tcx.hir().span(hir_id))
}
}
-impl<'a, 'tcx> MonoItemExt<'a, 'tcx> for MonoItem<'tcx> {
+impl MonoItemExt<'tcx> for MonoItem<'tcx> {
fn as_mono_item(&self) -> &MonoItem<'tcx> {
self
}
diff --git a/src/librustc_mir/monomorphize/mod.rs b/src/librustc_mir/monomorphize/mod.rs
index 51bcbff..b36cf49 100644
--- a/src/librustc_mir/monomorphize/mod.rs
+++ b/src/librustc_mir/monomorphize/mod.rs
@@ -5,10 +5,11 @@
pub mod collector;
pub mod partitioning;
-pub fn custom_coerce_unsize_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- source_ty: Ty<'tcx>,
- target_ty: Ty<'tcx>)
- -> CustomCoerceUnsized {
+pub fn custom_coerce_unsize_info<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ source_ty: Ty<'tcx>,
+ target_ty: Ty<'tcx>,
+) -> CustomCoerceUnsized {
let def_id = tcx.lang_items().coerce_unsized_trait().unwrap();
let trait_ref = ty::Binder::bind(ty::TraitRef {
diff --git a/src/librustc_mir/monomorphize/partitioning.rs b/src/librustc_mir/monomorphize/partitioning.rs
index 2c84364..a821cb2 100644
--- a/src/librustc_mir/monomorphize/partitioning.rs
+++ b/src/librustc_mir/monomorphize/partitioning.rs
@@ -121,16 +121,18 @@
}
// Anything we can't find a proper codegen unit for goes into this.
-fn fallback_cgu_name(name_builder: &mut CodegenUnitNameBuilder<'_, '_, '_>) -> InternedString {
+fn fallback_cgu_name(name_builder: &mut CodegenUnitNameBuilder<'_>) -> InternedString {
name_builder.build_cgu_name(LOCAL_CRATE, &["fallback"], Some("cgu"))
}
-pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- mono_items: I,
- strategy: PartitioningStrategy,
- inlining_map: &InliningMap<'tcx>)
- -> Vec<CodegenUnit<'tcx>>
- where I: Iterator<Item = MonoItem<'tcx>>
+pub fn partition<'tcx, I>(
+ tcx: TyCtxt<'tcx>,
+ mono_items: I,
+ strategy: PartitioningStrategy,
+ inlining_map: &InliningMap<'tcx>,
+) -> Vec<CodegenUnit<'tcx>>
+where
+ I: Iterator<Item = MonoItem<'tcx>>,
{
// In the first step, we place all regular monomorphizations into their
// respective 'home' codegen unit. Regular monomorphizations are all
@@ -201,10 +203,9 @@
internalization_candidates: FxHashSet<MonoItem<'tcx>>,
}
-fn place_root_mono_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- mono_items: I)
- -> PreInliningPartitioning<'tcx>
- where I: Iterator<Item = MonoItem<'tcx>>
+fn place_root_mono_items<'tcx, I>(tcx: TyCtxt<'tcx>, mono_items: I) -> PreInliningPartitioning<'tcx>
+where
+ I: Iterator<Item = MonoItem<'tcx>>,
{
let mut roots = FxHashSet::default();
let mut codegen_units = FxHashMap::default();
@@ -276,7 +277,7 @@
}
fn mono_item_linkage_and_visibility(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
mono_item: &MonoItem<'tcx>,
can_be_internalized: &mut bool,
export_generics: bool,
@@ -294,7 +295,7 @@
}
fn mono_item_visibility(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
mono_item: &MonoItem<'tcx>,
can_be_internalized: &mut bool,
export_generics: bool,
@@ -439,7 +440,7 @@
}
}
-fn default_visibility(tcx: TyCtxt<'_, '_, '_>, id: DefId, is_generic: bool) -> Visibility {
+fn default_visibility(tcx: TyCtxt<'_>, id: DefId, is_generic: bool) -> Visibility {
if !tcx.sess.target.target.options.default_hidden_visibility {
return Visibility::Default
}
@@ -463,9 +464,11 @@
}
}
-fn merge_codegen_units<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>,
- initial_partitioning: &mut PreInliningPartitioning<'tcx>,
- target_cgu_count: usize) {
+fn merge_codegen_units<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ initial_partitioning: &mut PreInliningPartitioning<'tcx>,
+ target_cgu_count: usize,
+) {
assert!(target_cgu_count >= 1);
let codegen_units = &mut initial_partitioning.codegen_units;
@@ -585,9 +588,11 @@
}
}
-fn internalize_symbols<'a, 'tcx>(_tcx: TyCtxt<'a, 'tcx, 'tcx>,
- partitioning: &mut PostInliningPartitioning<'tcx>,
- inlining_map: &InliningMap<'tcx>) {
+fn internalize_symbols<'tcx>(
+ _tcx: TyCtxt<'tcx>,
+ partitioning: &mut PostInliningPartitioning<'tcx>,
+ inlining_map: &InliningMap<'tcx>,
+) {
if partitioning.codegen_units.len() == 1 {
// Fast path for when there is only one codegen unit. In this case we
// can internalize all candidates, since there is nowhere else they
@@ -650,9 +655,10 @@
}
}
-fn characteristic_def_id_of_mono_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- mono_item: MonoItem<'tcx>)
- -> Option<DefId> {
+fn characteristic_def_id_of_mono_item<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ mono_item: MonoItem<'tcx>,
+) -> Option<DefId> {
match mono_item {
MonoItem::Fn(instance) => {
let def_id = match instance.def {
@@ -698,12 +704,13 @@
type CguNameCache = FxHashMap<(DefId, bool), InternedString>;
-fn compute_codegen_unit_name(tcx: TyCtxt<'_, '_, '_>,
- name_builder: &mut CodegenUnitNameBuilder<'_, '_, '_>,
- def_id: DefId,
- volatile: bool,
- cache: &mut CguNameCache)
- -> InternedString {
+fn compute_codegen_unit_name(
+ tcx: TyCtxt<'_>,
+ name_builder: &mut CodegenUnitNameBuilder<'_>,
+ def_id: DefId,
+ volatile: bool,
+ cache: &mut CguNameCache,
+) -> InternedString {
// Find the innermost module that is not nested within a function.
let mut current_def_id = def_id;
let mut cgu_def_id = None;
@@ -752,17 +759,17 @@
}).clone()
}
-fn numbered_codegen_unit_name(name_builder: &mut CodegenUnitNameBuilder<'_, '_, '_>,
- index: usize)
- -> InternedString {
+fn numbered_codegen_unit_name(
+ name_builder: &mut CodegenUnitNameBuilder<'_>,
+ index: usize,
+) -> InternedString {
name_builder.build_cgu_name_no_mangle(LOCAL_CRATE, &["cgu"], Some(index))
}
-fn debug_dump<'a, 'b, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- label: &str,
- cgus: I)
- where I: Iterator<Item=&'b CodegenUnit<'tcx>>,
- 'tcx: 'a + 'b
+fn debug_dump<'a, 'b, 'tcx, I>(tcx: TyCtxt<'tcx>, label: &str, cgus: I)
+where
+ I: Iterator<Item = &'b CodegenUnit<'tcx>>,
+ 'tcx: 'a + 'b,
{
if cfg!(debug_assertions) {
debug!("{}", label);
@@ -787,8 +794,9 @@
}
#[inline(never)] // give this a place in the profiler
-fn assert_symbols_are_distinct<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mono_items: I)
- where I: Iterator<Item=&'a MonoItem<'tcx>>
+fn assert_symbols_are_distinct<'a, 'tcx: 'a, I>(tcx: TyCtxt<'tcx>, mono_items: I)
+where
+ I: Iterator<Item = &'a MonoItem<'tcx>>,
{
let mut symbols: Vec<_> = mono_items.map(|mono_item| {
(mono_item, mono_item.symbol_name(tcx))
@@ -830,11 +838,10 @@
}
}
-fn collect_and_partition_mono_items<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn collect_and_partition_mono_items<'tcx>(
+ tcx: TyCtxt<'tcx>,
cnum: CrateNum,
-) -> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>)
-{
+) -> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>) {
assert_eq!(cnum, LOCAL_CRATE);
let collection_mode = match tcx.sess.opts.debugging_opts.print_mono_items {
diff --git a/src/librustc_mir/shim.rs b/src/librustc_mir/shim.rs
index 0f2196b..f5a22ea 100644
--- a/src/librustc_mir/shim.rs
+++ b/src/librustc_mir/shim.rs
@@ -26,10 +26,7 @@
providers.mir_shims = make_shim;
}
-fn make_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- instance: ty::InstanceDef<'tcx>)
- -> &'tcx Body<'tcx>
-{
+fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> &'tcx Body<'tcx> {
debug!("make_shim({:?})", instance);
let mut result = match instance {
@@ -166,11 +163,7 @@
.collect()
}
-fn build_drop_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- ty: Option<Ty<'tcx>>)
- -> Body<'tcx>
-{
+fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option<Ty<'tcx>>) -> Body<'tcx> {
debug!("build_drop_shim(def_id={:?}, ty={:?})", def_id, ty);
// Check if this is a generator, if so, return the drop glue for it
@@ -258,7 +251,7 @@
pub struct DropShimElaborator<'a, 'tcx: 'a> {
pub body: &'a Body<'tcx>,
pub patch: MirPatch<'tcx>,
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ pub tcx: TyCtxt<'tcx>,
pub param_env: ty::ParamEnv<'tcx>,
}
@@ -273,7 +266,9 @@
fn patch(&mut self) -> &mut MirPatch<'tcx> { &mut self.patch }
fn body(&self) -> &'a Body<'tcx> { self.body }
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.tcx }
+ fn tcx(&self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
fn param_env(&self) -> ty::ParamEnv<'tcx> { self.param_env }
fn drop_style(&self, _path: Self::Path, mode: DropFlagMode) -> DropStyle {
@@ -306,11 +301,7 @@
}
/// Builds a `Clone::clone` shim for `self_ty`. Here, `def_id` is `Clone::clone`.
-fn build_clone_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- self_ty: Ty<'tcx>)
- -> Body<'tcx>
-{
+fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -> Body<'tcx> {
debug!("build_clone_shim(def_id={:?})", def_id);
let mut builder = CloneShimBuilder::new(tcx, def_id, self_ty);
@@ -340,8 +331,8 @@
builder.into_mir()
}
-struct CloneShimBuilder<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct CloneShimBuilder<'tcx> {
+ tcx: TyCtxt<'tcx>,
def_id: DefId,
local_decls: IndexVec<Local, LocalDecl<'tcx>>,
blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
@@ -349,10 +340,8 @@
sig: ty::FnSig<'tcx>,
}
-impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> {
- fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- self_ty: Ty<'tcx>) -> Self {
+impl CloneShimBuilder<'tcx> {
+ fn new(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -> Self {
// we must subst the self_ty because it's
// otherwise going to be TySelf and we can't index
// or access fields of a Place of type TySelf.
@@ -691,13 +680,13 @@
///
/// If `untuple_args` is a vec of types, the second argument of the
/// function will be untupled as these types.
-fn build_call_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- rcvr_adjustment: Adjustment,
- call_kind: CallKind,
- untuple_args: Option<&[Ty<'tcx>]>)
- -> Body<'tcx>
-{
+fn build_call_shim<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ def_id: DefId,
+ rcvr_adjustment: Adjustment,
+ call_kind: CallKind,
+ untuple_args: Option<&[Ty<'tcx>]>,
+) -> Body<'tcx> {
debug!("build_call_shim(def_id={:?}, rcvr_adjustment={:?}, \
call_kind={:?}, untuple_args={:?})",
def_id, rcvr_adjustment, call_kind, untuple_args);
@@ -842,7 +831,7 @@
body
}
-pub fn build_adt_ctor<'gcx>(tcx: TyCtxt<'_, 'gcx, 'gcx>, ctor_id: DefId) -> &'gcx Body<'gcx> {
+pub fn build_adt_ctor<'tcx>(tcx: TyCtxt<'tcx>, ctor_id: DefId) -> &'tcx Body<'tcx> {
debug_assert!(tcx.is_constructor(ctor_id));
let span = tcx.hir().span_if_local(ctor_id)
diff --git a/src/librustc_mir/transform/add_call_guards.rs b/src/librustc_mir/transform/add_call_guards.rs
index 40af357..c08c33b 100644
--- a/src/librustc_mir/transform/add_call_guards.rs
+++ b/src/librustc_mir/transform/add_call_guards.rs
@@ -31,10 +31,7 @@
*/
impl MirPass for AddCallGuards {
- fn run_pass<'a, 'tcx>(&self,
- _tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _src: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, _tcx: TyCtxt<'tcx>, _src: MirSource<'tcx>, body: &mut Body<'tcx>) {
self.add_call_guards(body);
}
}
diff --git a/src/librustc_mir/transform/add_moves_for_packed_drops.rs b/src/librustc_mir/transform/add_moves_for_packed_drops.rs
index 23f5e63..a111669 100644
--- a/src/librustc_mir/transform/add_moves_for_packed_drops.rs
+++ b/src/librustc_mir/transform/add_moves_for_packed_drops.rs
@@ -40,31 +40,22 @@
pub struct AddMovesForPackedDrops;
impl MirPass for AddMovesForPackedDrops {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- src: MirSource<'tcx>,
- body: &mut Body<'tcx>)
- {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) {
debug!("add_moves_for_packed_drops({:?} @ {:?})", src, body.span);
add_moves_for_packed_drops(tcx, body, src.def_id());
}
}
-pub fn add_moves_for_packed_drops<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body: &mut Body<'tcx>,
- def_id: DefId)
-{
+pub fn add_moves_for_packed_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, def_id: DefId) {
let patch = add_moves_for_packed_drops_patch(tcx, body, def_id);
patch.apply(body);
}
-fn add_moves_for_packed_drops_patch<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn add_moves_for_packed_drops_patch<'tcx>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
- def_id: DefId)
- -> MirPatch<'tcx>
-{
+ def_id: DefId,
+) -> MirPatch<'tcx> {
let mut patch = MirPatch::new(body);
let param_env = tcx.param_env(def_id);
@@ -90,14 +81,14 @@
patch
}
-fn add_move_for_packed_drop<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn add_move_for_packed_drop<'tcx>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
patch: &mut MirPatch<'tcx>,
terminator: &Terminator<'tcx>,
loc: Location,
- is_cleanup: bool)
-{
+ is_cleanup: bool,
+) {
debug!("add_move_for_packed_drop({:?} @ {:?})", terminator, loc);
let (location, target, unwind) = match terminator.kind {
TerminatorKind::Drop { ref location, target, unwind } =>
diff --git a/src/librustc_mir/transform/add_retag.rs b/src/librustc_mir/transform/add_retag.rs
index bea95bc..ee040bf 100644
--- a/src/librustc_mir/transform/add_retag.rs
+++ b/src/librustc_mir/transform/add_retag.rs
@@ -48,7 +48,7 @@
/// Determine whether this type may have a reference in it, recursing below compound types but
/// not below references.
-fn may_have_reference<'a, 'gcx, 'tcx>(ty: Ty<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
+fn may_have_reference<'tcx>(ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> bool {
match ty.sty {
// Primitive types that are not references
ty::Bool | ty::Char |
@@ -74,11 +74,7 @@
}
impl MirPass for AddRetag {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _src: MirSource<'tcx>,
- body: &mut Body<'tcx>)
- {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _src: MirSource<'tcx>, body: &mut Body<'tcx>) {
if !tcx.sess.opts.debugging_opts.mir_emit_retag {
return;
}
diff --git a/src/librustc_mir/transform/check_unsafety.rs b/src/librustc_mir/transform/check_unsafety.rs
index b8077d2..32153f7 100644
--- a/src/librustc_mir/transform/check_unsafety.rs
+++ b/src/librustc_mir/transform/check_unsafety.rs
@@ -25,7 +25,7 @@
source_scope_local_data: &'a IndexVec<SourceScope, SourceScopeLocalData>,
violations: Vec<UnsafetyViolation>,
source_info: SourceInfo,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
/// Mark an `unsafe` block as used, so we don't lint it.
used_unsafe: FxHashSet<hir::HirId>,
@@ -38,7 +38,7 @@
min_const_fn: bool,
body: &'a Body<'tcx>,
source_scope_local_data: &'a IndexVec<SourceScope, SourceScopeLocalData>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
) -> Self {
// sanity check
@@ -480,14 +480,15 @@
}
}
-fn check_unused_unsafe<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- used_unsafe: &FxHashSet<hir::HirId>,
- unsafe_blocks: &'a mut Vec<(hir::HirId, bool)>)
-{
+fn check_unused_unsafe<'a, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ def_id: DefId,
+ used_unsafe: &FxHashSet<hir::HirId>,
+ unsafe_blocks: &'a mut Vec<(hir::HirId, bool)>,
+) {
let body_id =
tcx.hir().as_local_hir_id(def_id).and_then(|hir_id| {
- tcx.hir().maybe_body_owned_by_by_hir_id(hir_id)
+ tcx.hir().maybe_body_owned_by(hir_id)
});
let body_id = match body_id {
@@ -505,9 +506,7 @@
hir::intravisit::Visitor::visit_body(&mut visitor, body);
}
-fn unsafety_check_result<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
- -> UnsafetyCheckResult
-{
+fn unsafety_check_result<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> UnsafetyCheckResult {
debug!("unsafety_violations({:?})", def_id);
// N.B., this borrow is valid because all the consumers of
@@ -528,7 +527,7 @@
let param_env = tcx.param_env(def_id);
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
- let (const_context, min_const_fn) = match tcx.hir().body_owner_kind_by_hir_id(id) {
+ let (const_context, min_const_fn) = match tcx.hir().body_owner_kind(id) {
hir::BodyOwnerKind::Closure => (false, false),
hir::BodyOwnerKind::Fn => (tcx.is_const_fn(def_id), tcx.is_min_const_fn(def_id)),
hir::BodyOwnerKind::Const |
@@ -546,7 +545,7 @@
}
}
-fn unsafe_derive_on_repr_packed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
+fn unsafe_derive_on_repr_packed<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) {
let lint_hir_id = tcx.hir().as_local_hir_id(def_id).unwrap_or_else(||
bug!("checking unsafety for non-local def id {:?}", def_id));
@@ -566,9 +565,11 @@
}
/// Returns the `HirId` for an enclosing scope that is also `unsafe`.
-fn is_enclosed(tcx: TyCtxt<'_, '_, '_>,
- used_unsafe: &FxHashSet<hir::HirId>,
- id: hir::HirId) -> Option<(String, hir::HirId)> {
+fn is_enclosed(
+ tcx: TyCtxt<'_>,
+ used_unsafe: &FxHashSet<hir::HirId>,
+ id: hir::HirId,
+) -> Option<(String, hir::HirId)> {
let parent_id = tcx.hir().get_parent_node_by_hir_id(id);
if parent_id != id {
if used_unsafe.contains(&parent_id) {
@@ -589,21 +590,19 @@
}
}
-fn report_unused_unsafe(tcx: TyCtxt<'_, '_, '_>,
- used_unsafe: &FxHashSet<hir::HirId>,
- id: hir::HirId) {
- let span = tcx.sess.source_map().def_span(tcx.hir().span_by_hir_id(id));
+fn report_unused_unsafe(tcx: TyCtxt<'_>, used_unsafe: &FxHashSet<hir::HirId>, id: hir::HirId) {
+ let span = tcx.sess.source_map().def_span(tcx.hir().span(id));
let msg = "unnecessary `unsafe` block";
let mut db = tcx.struct_span_lint_hir(UNUSED_UNSAFE, id, span, msg);
db.span_label(span, msg);
if let Some((kind, id)) = is_enclosed(tcx, used_unsafe, id) {
- db.span_label(tcx.sess.source_map().def_span(tcx.hir().span_by_hir_id(id)),
+ db.span_label(tcx.sess.source_map().def_span(tcx.hir().span(id)),
format!("because it's nested under this `unsafe` {}", kind));
}
db.emit();
}
-fn builtin_derive_def_id<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option<DefId> {
+fn builtin_derive_def_id<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Option<DefId> {
debug!("builtin_derive_def_id({:?})", def_id);
if let Some(impl_def_id) = tcx.impl_of_method(def_id) {
if tcx.has_attr(impl_def_id, sym::automatically_derived) {
@@ -619,7 +618,7 @@
}
}
-pub fn check_unsafety<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
+pub fn check_unsafety<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) {
debug!("check_unsafety({:?})", def_id);
// closures are handled by their parent fn.
diff --git a/src/librustc_mir/transform/cleanup_post_borrowck.rs b/src/librustc_mir/transform/cleanup_post_borrowck.rs
index 2bbd6ff..6ee1416 100644
--- a/src/librustc_mir/transform/cleanup_post_borrowck.rs
+++ b/src/librustc_mir/transform/cleanup_post_borrowck.rs
@@ -27,10 +27,7 @@
pub struct DeleteNonCodegenStatements;
impl MirPass for CleanupNonCodegenStatements {
- fn run_pass<'a, 'tcx>(&self,
- _tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _source: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, _tcx: TyCtxt<'tcx>, _source: MirSource<'tcx>, body: &mut Body<'tcx>) {
let mut delete = DeleteNonCodegenStatements;
delete.visit_body(body);
}
diff --git a/src/librustc_mir/transform/const_prop.rs b/src/librustc_mir/transform/const_prop.rs
index b112643..2ec5c19 100644
--- a/src/librustc_mir/transform/const_prop.rs
+++ b/src/librustc_mir/transform/const_prop.rs
@@ -31,10 +31,7 @@
pub struct ConstProp;
impl MirPass for ConstProp {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- source: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut Body<'tcx>) {
// will be evaluated by miri and produce its errors there
if source.promoted.is_some() {
return;
@@ -83,9 +80,9 @@
type Const<'tcx> = OpTy<'tcx>;
/// Finds optimization opportunities on the MIR.
-struct ConstPropagator<'a, 'mir, 'tcx:'a+'mir> {
- ecx: InterpretCx<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct ConstPropagator<'mir, 'tcx> {
+ ecx: InterpretCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
+ tcx: TyCtxt<'tcx>,
source: MirSource<'tcx>,
places: IndexVec<Local, Option<Const<'tcx>>>,
can_const_prop: IndexVec<Local, bool>,
@@ -95,7 +92,7 @@
promoted: IndexVec<Promoted, Body<'tcx>>,
}
-impl<'a, 'b, 'tcx> LayoutOf for ConstPropagator<'a, 'b, 'tcx> {
+impl<'mir, 'tcx> LayoutOf for ConstPropagator<'mir, 'tcx> {
type Ty = Ty<'tcx>;
type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>;
@@ -104,26 +101,26 @@
}
}
-impl<'a, 'b, 'tcx> HasDataLayout for ConstPropagator<'a, 'b, 'tcx> {
+impl<'mir, 'tcx> HasDataLayout for ConstPropagator<'mir, 'tcx> {
#[inline]
fn data_layout(&self) -> &TargetDataLayout {
&self.tcx.data_layout
}
}
-impl<'a, 'b, 'tcx> HasTyCtxt<'tcx> for ConstPropagator<'a, 'b, 'tcx> {
+impl<'mir, 'tcx> HasTyCtxt<'tcx> for ConstPropagator<'mir, 'tcx> {
#[inline]
- fn tcx<'c>(&'c self) -> TyCtxt<'c, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
}
-impl<'a, 'mir, 'tcx> ConstPropagator<'a, 'mir, 'tcx> {
+impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
fn new(
body: &mut Body<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
source: MirSource<'tcx>,
- ) -> ConstPropagator<'a, 'mir, 'tcx> {
+ ) -> ConstPropagator<'mir, 'tcx> {
let param_env = tcx.param_env(source.def_id());
let ecx = mk_eval_cx(tcx, tcx.def_span(source.def_id()), param_env);
let can_const_prop = CanConstProp::check(body);
@@ -599,9 +596,11 @@
}
}
-fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- ty: Ty<'tcx>) -> Option<u64> {
+fn type_size_of<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ ty: Ty<'tcx>,
+) -> Option<u64> {
tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
}
@@ -668,7 +667,7 @@
}
}
-impl<'b, 'a, 'tcx> MutVisitor<'tcx> for ConstPropagator<'b, 'a, 'tcx> {
+impl<'mir, 'tcx> MutVisitor<'tcx> for ConstPropagator<'mir, 'tcx> {
fn visit_constant(
&mut self,
constant: &mut Constant<'tcx>,
diff --git a/src/librustc_mir/transform/copy_prop.rs b/src/librustc_mir/transform/copy_prop.rs
index 45b3fb7..c850b48 100644
--- a/src/librustc_mir/transform/copy_prop.rs
+++ b/src/librustc_mir/transform/copy_prop.rs
@@ -30,10 +30,7 @@
pub struct CopyPropagation;
impl MirPass for CopyPropagation {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _source: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _source: MirSource<'tcx>, body: &mut Body<'tcx>) {
// We only run when the MIR optimization level is > 1.
// This avoids a slow pass, and messing up debug info.
if tcx.sess.opts.debugging_opts.mir_opt_level <= 1 {
diff --git a/src/librustc_mir/transform/deaggregator.rs b/src/librustc_mir/transform/deaggregator.rs
index 78725f7..1b42a0d 100644
--- a/src/librustc_mir/transform/deaggregator.rs
+++ b/src/librustc_mir/transform/deaggregator.rs
@@ -6,10 +6,7 @@
pub struct Deaggregator;
impl MirPass for Deaggregator {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _source: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _source: MirSource<'tcx>, body: &mut Body<'tcx>) {
let (basic_blocks, local_decls) = body.basic_blocks_and_local_decls_mut();
let local_decls = &*local_decls;
for bb in basic_blocks {
diff --git a/src/librustc_mir/transform/dump_mir.rs b/src/librustc_mir/transform/dump_mir.rs
index 9d88a2c..243820b 100644
--- a/src/librustc_mir/transform/dump_mir.rs
+++ b/src/librustc_mir/transform/dump_mir.rs
@@ -18,11 +18,7 @@
Cow::Borrowed(self.0)
}
- fn run_pass<'a, 'tcx>(&self,
- _tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _source: MirSource<'tcx>,
- _body: &mut Body<'tcx>)
- {
+ fn run_pass<'tcx>(&self, _tcx: TyCtxt<'tcx>, _source: MirSource<'tcx>, _body: &mut Body<'tcx>) {
}
}
@@ -37,13 +33,14 @@
}
}
-
-pub fn on_mir_pass<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- pass_num: &dyn fmt::Display,
- pass_name: &str,
- source: MirSource<'tcx>,
- body: &Body<'tcx>,
- is_after: bool) {
+pub fn on_mir_pass<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ pass_num: &dyn fmt::Display,
+ pass_name: &str,
+ source: MirSource<'tcx>,
+ body: &Body<'tcx>,
+ is_after: bool,
+) {
if mir_util::dump_enabled(tcx, pass_name, source) {
mir_util::dump_mir(tcx,
Some(pass_num),
@@ -55,11 +52,7 @@
}
}
-pub fn emit_mir<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- outputs: &OutputFilenames)
- -> io::Result<()>
-{
+pub fn emit_mir<'tcx>(tcx: TyCtxt<'tcx>, outputs: &OutputFilenames) -> io::Result<()> {
let path = outputs.path(OutputType::Mir);
let mut f = File::create(&path)?;
mir_util::write_mir_pretty(tcx, None, &mut f)?;
diff --git a/src/librustc_mir/transform/elaborate_drops.rs b/src/librustc_mir/transform/elaborate_drops.rs
index c48b94b..584a2fd 100644
--- a/src/librustc_mir/transform/elaborate_drops.rs
+++ b/src/librustc_mir/transform/elaborate_drops.rs
@@ -21,11 +21,7 @@
pub struct ElaborateDrops;
impl MirPass for ElaborateDrops {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- src: MirSource<'tcx>,
- body: &mut Body<'tcx>)
- {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) {
debug!("elaborate_drops({:?} @ {:?})", src, body.span);
let def_id = src.def_id();
@@ -77,13 +73,12 @@
/// Returns the set of basic blocks whose unwind edges are known
/// to not be reachable, because they are `drop` terminators
/// that can't drop anything.
-fn find_dead_unwinds<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn find_dead_unwinds<'tcx>(
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
def_id: hir::def_id::DefId,
- env: &MoveDataParamEnv<'tcx, 'tcx>)
- -> BitSet<BasicBlock>
-{
+ env: &MoveDataParamEnv<'tcx>,
+) -> BitSet<BasicBlock> {
debug!("find_dead_unwinds({:?})", body.span);
// We only need to do this pass once, because unwind edges can only
// reach cleanup blocks, which can't have unwind edges themselves.
@@ -141,12 +136,13 @@
}
impl InitializationData {
- fn apply_location<'a,'tcx>(&mut self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body: &Body<'tcx>,
- env: &MoveDataParamEnv<'tcx, 'tcx>,
- loc: Location)
- {
+ fn apply_location<'tcx>(
+ &mut self,
+ tcx: TyCtxt<'tcx>,
+ body: &Body<'tcx>,
+ env: &MoveDataParamEnv<'tcx>,
+ loc: Location,
+ ) {
drop_flag_effects_for_location(tcx, body, env, loc, |path, df| {
debug!("at location {:?}: setting {:?} to {:?}",
loc, path, df);
@@ -190,7 +186,7 @@
self.ctxt.body
}
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.ctxt.tcx
}
@@ -290,11 +286,11 @@
}
struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
- env: &'a MoveDataParamEnv<'tcx, 'tcx>,
- flow_inits: DataflowResults<'tcx, MaybeInitializedPlaces<'a, 'tcx, 'tcx>>,
- flow_uninits: DataflowResults<'tcx, MaybeUninitializedPlaces<'a, 'tcx, 'tcx>>,
+ env: &'a MoveDataParamEnv<'tcx>,
+ flow_inits: DataflowResults<'tcx, MaybeInitializedPlaces<'a, 'tcx>>,
+ flow_uninits: DataflowResults<'tcx, MaybeUninitializedPlaces<'a, 'tcx>>,
drop_flags: FxHashMap<MovePathIndex, Local>,
patch: MirPatch<'tcx>,
}
diff --git a/src/librustc_mir/transform/erase_regions.rs b/src/librustc_mir/transform/erase_regions.rs
index 5ed7abc..5a29ea2 100644
--- a/src/librustc_mir/transform/erase_regions.rs
+++ b/src/librustc_mir/transform/erase_regions.rs
@@ -10,19 +10,19 @@
use rustc::mir::visit::{MutVisitor, TyContext};
use crate::transform::{MirPass, MirSource};
-struct EraseRegionsVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct EraseRegionsVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> EraseRegionsVisitor<'a, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
+impl EraseRegionsVisitor<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>) -> Self {
EraseRegionsVisitor {
tcx,
}
}
}
-impl<'a, 'tcx> MutVisitor<'tcx> for EraseRegionsVisitor<'a, 'tcx> {
+impl MutVisitor<'tcx> for EraseRegionsVisitor<'tcx> {
fn visit_ty(&mut self, ty: &mut Ty<'tcx>, _: TyContext) {
*ty = self.tcx.erase_regions(ty);
self.super_ty(ty);
@@ -50,10 +50,7 @@
pub struct EraseRegions;
impl MirPass for EraseRegions {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _: MirSource<'tcx>, body: &mut Body<'tcx>) {
EraseRegionsVisitor::new(tcx).visit_body(body);
}
}
diff --git a/src/librustc_mir/transform/generator.rs b/src/librustc_mir/transform/generator.rs
index 0665d09..9c7aedc 100644
--- a/src/librustc_mir/transform/generator.rs
+++ b/src/librustc_mir/transform/generator.rs
@@ -168,8 +168,8 @@
storage_liveness: liveness::LiveVarSet,
}
-struct TransformVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct TransformVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
state_adt_ref: &'tcx AdtDef,
state_substs: SubstsRef<'tcx>,
@@ -191,7 +191,7 @@
new_ret_local: Local,
}
-impl<'a, 'tcx> TransformVisitor<'a, 'tcx> {
+impl TransformVisitor<'tcx> {
// Make a GeneratorState rvalue
fn make_state(&self, idx: VariantIdx, val: Operand<'tcx>) -> Rvalue<'tcx> {
let adt = AggregateKind::Adt(self.state_adt_ref, idx, self.state_substs, None, None);
@@ -233,7 +233,7 @@
}
}
-impl<'a, 'tcx> MutVisitor<'tcx> for TransformVisitor<'a, 'tcx> {
+impl MutVisitor<'tcx> for TransformVisitor<'tcx> {
fn visit_local(&mut self,
local: &mut Local,
_: PlaceContext,
@@ -310,10 +310,11 @@
}
}
-fn make_generator_state_argument_indirect<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- body: &mut Body<'tcx>) {
+fn make_generator_state_argument_indirect<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ def_id: DefId,
+ body: &mut Body<'tcx>,
+) {
let gen_ty = body.local_decls.raw[1].ty;
let region = ty::ReFree(ty::FreeRegion {
@@ -335,9 +336,7 @@
DerefArgVisitor.visit_body(body);
}
-fn make_generator_state_argument_pinned<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body: &mut Body<'tcx>) {
+fn make_generator_state_argument_pinned<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let ref_gen_ty = body.local_decls.raw[1].ty;
let pin_did = tcx.lang_items().pin_type().unwrap();
@@ -416,7 +415,7 @@
}
fn locals_live_across_suspend_points(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
source: MirSource<'tcx>,
movable: bool,
@@ -678,16 +677,18 @@
}
}
-fn compute_layout<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- source: MirSource<'tcx>,
- upvars: &Vec<Ty<'tcx>>,
- interior: Ty<'tcx>,
- movable: bool,
- body: &mut Body<'tcx>)
- -> (FxHashMap<Local, (Ty<'tcx>, VariantIdx, usize)>,
- GeneratorLayout<'tcx>,
- FxHashMap<BasicBlock, liveness::LiveVarSet>)
-{
+fn compute_layout<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ source: MirSource<'tcx>,
+ upvars: &Vec<Ty<'tcx>>,
+ interior: Ty<'tcx>,
+ movable: bool,
+ body: &mut Body<'tcx>,
+) -> (
+ FxHashMap<Local, (Ty<'tcx>, VariantIdx, usize)>,
+ GeneratorLayout<'tcx>,
+ FxHashMap<BasicBlock, liveness::LiveVarSet>,
+) {
// Use a liveness analysis to compute locals which are live across a suspension point
let LivenessInfo {
live_locals, live_locals_at_suspension_points, storage_conflicts, storage_liveness
@@ -767,10 +768,12 @@
(remap, layout, storage_liveness)
}
-fn insert_switch<'a, 'tcx>(body: &mut Body<'tcx>,
- cases: Vec<(usize, BasicBlock)>,
- transform: &TransformVisitor<'a, 'tcx>,
- default: TerminatorKind<'tcx>) {
+fn insert_switch<'tcx>(
+ body: &mut Body<'tcx>,
+ cases: Vec<(usize, BasicBlock)>,
+ transform: &TransformVisitor<'tcx>,
+ default: TerminatorKind<'tcx>,
+) {
let default_block = insert_term_block(body, default);
let (assign, discr) = transform.get_discr(body);
let switch = TerminatorKind::SwitchInt {
@@ -797,9 +800,7 @@
}
}
-fn elaborate_generator_drops<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- body: &mut Body<'tcx>) {
+fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, body: &mut Body<'tcx>) {
use crate::util::elaborate_drops::{elaborate_drop, Unwind};
use crate::util::patch::MirPatch;
use crate::shim::DropShimElaborator;
@@ -848,14 +849,15 @@
elaborator.patch.apply(body);
}
-fn create_generator_drop_shim<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- transform: &TransformVisitor<'a, 'tcx>,
- def_id: DefId,
- source: MirSource<'tcx>,
- gen_ty: Ty<'tcx>,
- body: &Body<'tcx>,
- drop_clean: BasicBlock) -> Body<'tcx> {
+fn create_generator_drop_shim<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ transform: &TransformVisitor<'tcx>,
+ def_id: DefId,
+ source: MirSource<'tcx>,
+ gen_ty: Ty<'tcx>,
+ body: &Body<'tcx>,
+ drop_clean: BasicBlock,
+) -> Body<'tcx> {
let mut body = body.clone();
let source_info = source_info(&body);
@@ -939,9 +941,11 @@
term_block
}
-fn insert_panic_block<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body: &mut Body<'tcx>,
- message: AssertMessage<'tcx>) -> BasicBlock {
+fn insert_panic_block<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &mut Body<'tcx>,
+ message: AssertMessage<'tcx>,
+) -> BasicBlock {
let assert_block = BasicBlock::new(body.basic_blocks().len());
let term = TerminatorKind::Assert {
cond: Operand::Constant(box Constant {
@@ -969,12 +973,13 @@
assert_block
}
-fn create_generator_resume_function<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- transform: TransformVisitor<'a, 'tcx>,
- def_id: DefId,
- source: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+fn create_generator_resume_function<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ transform: TransformVisitor<'tcx>,
+ def_id: DefId,
+ source: MirSource<'tcx>,
+ body: &mut Body<'tcx>,
+) {
// Poison the generator when it unwinds
for block in body.basic_blocks_mut() {
let source_info = block.terminator().source_info;
@@ -1042,10 +1047,14 @@
drop_clean
}
-fn create_cases<'a, 'tcx, F>(body: &mut Body<'tcx>,
- transform: &TransformVisitor<'a, 'tcx>,
- target: F) -> Vec<(usize, BasicBlock)>
- where F: Fn(&SuspensionPoint) -> Option<BasicBlock> {
+fn create_cases<'tcx, F>(
+ body: &mut Body<'tcx>,
+ transform: &TransformVisitor<'tcx>,
+ target: F,
+) -> Vec<(usize, BasicBlock)>
+where
+ F: Fn(&SuspensionPoint) -> Option<BasicBlock>,
+{
let source_info = source_info(body);
transform.suspension_points.iter().filter_map(|point| {
@@ -1083,10 +1092,7 @@
}
impl MirPass for StateTransform {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- source: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut Body<'tcx>) {
let yield_ty = if let Some(yield_ty) = body.yield_ty {
yield_ty
} else {
diff --git a/src/librustc_mir/transform/inline.rs b/src/librustc_mir/transform/inline.rs
index 651910c..5e6f1bc 100644
--- a/src/librustc_mir/transform/inline.rs
+++ b/src/librustc_mir/transform/inline.rs
@@ -38,22 +38,19 @@
}
impl MirPass for Inline {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- source: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut Body<'tcx>) {
if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 {
Inliner { tcx, source }.run_pass(body);
}
}
}
-struct Inliner<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct Inliner<'tcx> {
+ tcx: TyCtxt<'tcx>,
source: MirSource<'tcx>,
}
-impl<'a, 'tcx> Inliner<'a, 'tcx> {
+impl Inliner<'tcx> {
fn run_pass(&self, caller_body: &mut Body<'tcx>) {
// Keep a queue of callsites to try inlining on. We take
// advantage of the fact that queries detect cycles here to
@@ -73,7 +70,7 @@
// Only do inlining into fn bodies.
let id = self.tcx.hir().as_local_hir_id(self.source.def_id()).unwrap();
- if self.tcx.hir().body_owner_kind_by_hir_id(id).is_fn_or_closure()
+ if self.tcx.hir().body_owner_kind(id).is_fn_or_closure()
&& self.source.promoted.is_none()
{
for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated() {
@@ -631,9 +628,11 @@
}
}
-fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- ty: Ty<'tcx>) -> Option<u64> {
+fn type_size_of<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ ty: Ty<'tcx>,
+) -> Option<u64> {
tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
}
@@ -643,7 +642,7 @@
* Integrates blocks from the callee function into the calling function.
* Updates block indices, references to locals and other control flow
* stuff.
- */
+*/
struct Integrator<'a, 'tcx: 'a> {
block_idx: usize,
args: &'a [Local],
@@ -787,11 +786,9 @@
}
TerminatorKind::Abort => { }
TerminatorKind::Unreachable => { }
- TerminatorKind::FalseEdges { ref mut real_target, ref mut imaginary_targets } => {
+ TerminatorKind::FalseEdges { ref mut real_target, ref mut imaginary_target } => {
*real_target = self.update_target(*real_target);
- for target in imaginary_targets {
- *target = self.update_target(*target);
- }
+ *imaginary_target = self.update_target(*imaginary_target);
}
TerminatorKind::FalseUnwind { real_target: _ , unwind: _ } =>
// see the ordering of passes in the optimized_mir query.
diff --git a/src/librustc_mir/transform/instcombine.rs b/src/librustc_mir/transform/instcombine.rs
index 2899112..c338e1e 100644
--- a/src/librustc_mir/transform/instcombine.rs
+++ b/src/librustc_mir/transform/instcombine.rs
@@ -12,10 +12,7 @@
pub struct InstCombine;
impl MirPass for InstCombine {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _: MirSource<'tcx>, body: &mut Body<'tcx>) {
// We only run when optimizing MIR (at any level).
if tcx.sess.opts.debugging_opts.mir_opt_level == 0 {
return
@@ -63,14 +60,14 @@
}
/// Finds optimization opportunities on the MIR.
-struct OptimizationFinder<'b, 'a, 'tcx:'a+'b> {
+struct OptimizationFinder<'b, 'tcx> {
body: &'b Body<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
optimizations: OptimizationList<'tcx>,
}
-impl<'b, 'a, 'tcx:'b> OptimizationFinder<'b, 'a, 'tcx> {
- fn new(body: &'b Body<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> OptimizationFinder<'b, 'a, 'tcx> {
+impl OptimizationFinder<'b, 'tcx> {
+ fn new(body: &'b Body<'tcx>, tcx: TyCtxt<'tcx>) -> OptimizationFinder<'b, 'tcx> {
OptimizationFinder {
body,
tcx,
@@ -79,7 +76,7 @@
}
}
-impl<'b, 'a, 'tcx> Visitor<'tcx> for OptimizationFinder<'b, 'a, 'tcx> {
+impl Visitor<'tcx> for OptimizationFinder<'b, 'tcx> {
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
if let Rvalue::Ref(_, _, Place::Projection(ref projection)) = *rvalue {
if let ProjectionElem::Deref = projection.elem {
diff --git a/src/librustc_mir/transform/lower_128bit.rs b/src/librustc_mir/transform/lower_128bit.rs
index 189258c..f0aa189 100644
--- a/src/librustc_mir/transform/lower_128bit.rs
+++ b/src/librustc_mir/transform/lower_128bit.rs
@@ -10,10 +10,7 @@
pub struct Lower128Bit;
impl MirPass for Lower128Bit {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _src: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _src: MirSource<'tcx>, body: &mut Body<'tcx>) {
let debugging_override = tcx.sess.opts.debugging_opts.lower_128bit_ops;
let target_default = tcx.sess.host.options.i128_lowering;
if !debugging_override.unwrap_or(target_default) {
@@ -21,11 +18,11 @@
}
self.lower_128bit_ops(tcx, body);
- }
+}
}
impl Lower128Bit {
- fn lower_128bit_ops<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, body: &mut Body<'tcx>) {
+ fn lower_128bit_ops<'tcx>(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let mut new_blocks = Vec::new();
let cur_len = body.basic_blocks().len();
@@ -120,15 +117,16 @@
}
}
-fn check_lang_item_type<'a, 'tcx, D>(
+fn check_lang_item_type<'tcx, D>(
lang_item: LangItem,
place: &Place<'tcx>,
lhs: &Operand<'tcx>,
rhs: &Operand<'tcx>,
local_decls: &D,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
--> DefId
- where D: HasLocalDecls<'tcx>
+ tcx: TyCtxt<'tcx>,
+) -> DefId
+where
+ D: HasLocalDecls<'tcx>,
{
let did = tcx.require_lang_item(lang_item);
let poly_sig = tcx.fn_sig(did);
@@ -142,9 +140,13 @@
did
}
-fn lower_to<'a, 'tcx, D>(statement: &Statement<'tcx>, local_decls: &D, tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Option<(LangItem, RhsKind)>
- where D: HasLocalDecls<'tcx>
+fn lower_to<'tcx, D>(
+ statement: &Statement<'tcx>,
+ local_decls: &D,
+ tcx: TyCtxt<'tcx>,
+) -> Option<(LangItem, RhsKind)>
+where
+ D: HasLocalDecls<'tcx>,
{
match statement.kind {
StatementKind::Assign(_, box Rvalue::BinaryOp(bin_op, ref lhs, _)) => {
@@ -172,7 +174,7 @@
}
impl RhsKind {
- fn ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Ty<'tcx>> {
+ fn ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Option<Ty<'tcx>> {
match *self {
RhsKind::Unchanged => None,
RhsKind::ForceU128 => Some(tcx.types.u128),
diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs
index b6220ac..79bb2cf 100644
--- a/src/librustc_mir/transform/mod.rs
+++ b/src/librustc_mir/transform/mod.rs
@@ -50,14 +50,13 @@
};
}
-fn is_mir_available<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool {
+fn is_mir_available<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
tcx.mir_keys(def_id.krate).contains(&def_id)
}
/// Finds the full set of `DefId`s within the current crate that have
/// MIR associated with them.
-fn mir_keys<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, krate: CrateNum)
- -> &'tcx DefIdSet {
+fn mir_keys<'tcx>(tcx: TyCtxt<'tcx>, krate: CrateNum) -> &'tcx DefIdSet {
assert_eq!(krate, LOCAL_CRATE);
let mut set = DefIdSet::default();
@@ -68,7 +67,7 @@
// Additionally, tuple struct/variant constructors have MIR, but
// they don't have a BodyId, so we need to build them separately.
struct GatherCtors<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
set: &'a mut DefIdSet,
}
impl<'a, 'tcx> Visitor<'tcx> for GatherCtors<'a, 'tcx> {
@@ -95,7 +94,7 @@
tcx.arena.alloc(set)
}
-fn mir_built<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx Steal<Body<'tcx>> {
+fn mir_built<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx Steal<Body<'tcx>> {
let mir = build::mir_build(tcx, def_id);
tcx.alloc_steal_mir(mir)
}
@@ -142,14 +141,11 @@
default_name::<Self>()
}
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- source: MirSource<'tcx>,
- body: &mut Body<'tcx>);
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut Body<'tcx>);
}
pub fn run_passes(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &mut Body<'tcx>,
instance: InstanceDef<'tcx>,
mir_phase: MirPhase,
@@ -196,7 +192,7 @@
}
}
-fn mir_const<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx Steal<Body<'tcx>> {
+fn mir_const<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx Steal<Body<'tcx>> {
// Unsafety check uses the raw mir, so make sure it is run
let _ = tcx.unsafety_check_result(def_id);
@@ -210,9 +206,9 @@
tcx.alloc_steal_mir(body)
}
-fn mir_validated<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx Steal<Body<'tcx>> {
+fn mir_validated(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx Steal<Body<'tcx>> {
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
- if let hir::BodyOwnerKind::Const = tcx.hir().body_owner_kind_by_hir_id(hir_id) {
+ if let hir::BodyOwnerKind::Const = tcx.hir().body_owner_kind(hir_id) {
// Ensure that we compute the `mir_const_qualif` for constants at
// this point, before we steal the mir-const result.
let _ = tcx.mir_const_qualif(def_id);
@@ -227,7 +223,7 @@
tcx.alloc_steal_mir(body)
}
-fn optimized_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx Body<'tcx> {
+fn optimized_mir<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx Body<'tcx> {
if tcx.is_constructor(def_id) {
// There's no reason to run all of the MIR passes on constructors when
// we can just output the MIR we want directly. This also saves const
diff --git a/src/librustc_mir/transform/no_landing_pads.rs b/src/librustc_mir/transform/no_landing_pads.rs
index a987c19..841db80 100644
--- a/src/librustc_mir/transform/no_landing_pads.rs
+++ b/src/librustc_mir/transform/no_landing_pads.rs
@@ -9,15 +9,12 @@
pub struct NoLandingPads;
impl MirPass for NoLandingPads {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _: MirSource<'tcx>, body: &mut Body<'tcx>) {
no_landing_pads(tcx, body)
}
}
-pub fn no_landing_pads<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, body: &mut Body<'tcx>) {
+pub fn no_landing_pads<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
if tcx.sess.no_landing_pads() {
NoLandingPads.visit_body(body);
}
diff --git a/src/librustc_mir/transform/promote_consts.rs b/src/librustc_mir/transform/promote_consts.rs
index 4fbb95e..84d3f8f 100644
--- a/src/librustc_mir/transform/promote_consts.rs
+++ b/src/librustc_mir/transform/promote_consts.rs
@@ -148,14 +148,14 @@
}
struct Promoter<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
source: &'a mut Body<'tcx>,
promoted: Body<'tcx>,
temps: &'a mut IndexVec<Local, TempState>,
/// If true, all nested temps are also kept in the
/// source MIR, not moved to the promoted MIR.
- keep_original: bool
+ keep_original: bool,
}
impl<'a, 'tcx> Promoter<'a, 'tcx> {
@@ -369,10 +369,12 @@
}
}
-pub fn promote_candidates<'a, 'tcx>(body: &mut Body<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- mut temps: IndexVec<Local, TempState>,
- candidates: Vec<Candidate>) {
+pub fn promote_candidates<'tcx>(
+ body: &mut Body<'tcx>,
+ tcx: TyCtxt<'tcx>,
+ mut temps: IndexVec<Local, TempState>,
+ candidates: Vec<Candidate>,
+) {
// Visit candidates in reverse, in case they're nested.
debug!("promote_candidates({:?})", candidates);
diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs
index 19bd812..b6abfdb 100644
--- a/src/librustc_mir/transform/qualify_consts.rs
+++ b/src/librustc_mir/transform/qualify_consts.rs
@@ -124,7 +124,7 @@
}
struct ConstCx<'a, 'tcx> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
mode: Mode,
body: &'a Body<'tcx>,
@@ -652,11 +652,7 @@
}
impl<'a, 'tcx> Checker<'a, 'tcx> {
- fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- body: &'a Body<'tcx>,
- mode: Mode)
- -> Self {
+ fn new(tcx: TyCtxt<'tcx>, def_id: DefId, body: &'a Body<'tcx>, mode: Mode) -> Self {
assert!(def_id.is_local());
let mut rpo = traversal::reverse_postorder(body);
let temps = promote_consts::collect_temps(body, &mut rpo);
@@ -1452,6 +1448,9 @@
StatementKind::Assign(..) => {
self.super_statement(statement, location);
}
+ StatementKind::FakeRead(FakeReadCause::ForMatchedPlace, _) => {
+ self.not_const();
+ }
// FIXME(eddyb) should these really do nothing?
StatementKind::FakeRead(..) |
StatementKind::SetDiscriminant { .. } |
@@ -1472,9 +1471,7 @@
};
}
-fn mir_const_qualif<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> (u8, &'tcx BitSet<Local>) {
+fn mir_const_qualif<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> (u8, &'tcx BitSet<Local>) {
// N.B., this `borrow()` is guaranteed to be valid (i.e., the value
// cannot yet be stolen), because `mir_validated()`, which steals
// from `mir_const(), forces this query to execute before
@@ -1492,10 +1489,7 @@
pub struct QualifyAndPromoteConstants;
impl MirPass for QualifyAndPromoteConstants {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- src: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) {
// There's not really any point in promoting errorful MIR.
if body.return_ty().references_error() {
tcx.sess.delay_span_bug(body.span, "QualifyAndPromoteConstants: MIR had errors");
@@ -1509,7 +1503,7 @@
let def_id = src.def_id();
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
let mut const_promoted_temps = None;
- let mode = match tcx.hir().body_owner_kind_by_hir_id(id) {
+ let mode = match tcx.hir().body_owner_kind(id) {
hir::BodyOwnerKind::Closure => Mode::NonConstFn,
hir::BodyOwnerKind::Fn => {
if tcx.is_const_fn(def_id) {
@@ -1668,7 +1662,7 @@
}
}
-fn args_required_const(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> Option<FxHashSet<usize>> {
+fn args_required_const(tcx: TyCtxt<'_>, def_id: DefId) -> Option<FxHashSet<usize>> {
let attrs = tcx.get_attrs(def_id);
let attr = attrs.iter().find(|a| a.check_name(sym::rustc_args_required_const))?;
let mut ret = FxHashSet::default();
diff --git a/src/librustc_mir/transform/qualify_min_const_fn.rs b/src/librustc_mir/transform/qualify_min_const_fn.rs
index 7bafef7..b84bc31 100644
--- a/src/librustc_mir/transform/qualify_min_const_fn.rs
+++ b/src/librustc_mir/transform/qualify_min_const_fn.rs
@@ -8,11 +8,7 @@
type McfResult = Result<(), (Span, Cow<'static, str>)>;
-pub fn is_min_const_fn(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- body: &'a Body<'tcx>,
-) -> McfResult {
+pub fn is_min_const_fn(tcx: TyCtxt<'tcx>, def_id: DefId, body: &'a Body<'tcx>) -> McfResult {
let mut current = def_id;
loop {
let predicates = tcx.predicates_of(current);
@@ -79,12 +75,7 @@
Ok(())
}
-fn check_ty(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>,
- span: Span,
- fn_def_id: DefId,
-) -> McfResult {
+fn check_ty(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, span: Span, fn_def_id: DefId) -> McfResult {
for ty in ty.walk() {
match ty.sty {
ty::Ref(_, _, hir::Mutability::MutMutable) => return Err((
@@ -129,7 +120,7 @@
}
fn check_rvalue(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
rvalue: &Rvalue<'tcx>,
span: Span,
@@ -209,7 +200,7 @@
}
fn check_statement(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
statement: &Statement<'tcx>,
) -> McfResult {
@@ -220,6 +211,10 @@
check_rvalue(tcx, body, rval, span)
}
+ StatementKind::FakeRead(FakeReadCause::ForMatchedPlace, _) => {
+ Err((span, "loops and conditional expressions are not stable in const fn".into()))
+ }
+
StatementKind::FakeRead(_, place) => check_place(place, span),
// just an assignment
@@ -279,7 +274,7 @@
}
fn check_terminator(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
terminator: &Terminator<'tcx>,
) -> McfResult {
@@ -375,7 +370,7 @@
/// for being called from stable `const fn`s (`min_const_fn`).
///
/// Adding more intrinsics requires sign-off from @rust-lang/lang.
-fn is_intrinsic_whitelisted(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool {
+fn is_intrinsic_whitelisted(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
match &tcx.item_name(def_id).as_str()[..] {
| "size_of"
| "min_align_of"
diff --git a/src/librustc_mir/transform/remove_noop_landing_pads.rs b/src/librustc_mir/transform/remove_noop_landing_pads.rs
index 30edf7f..7b3cdc8 100644
--- a/src/librustc_mir/transform/remove_noop_landing_pads.rs
+++ b/src/librustc_mir/transform/remove_noop_landing_pads.rs
@@ -9,10 +9,7 @@
/// code for these.
pub struct RemoveNoopLandingPads;
-pub fn remove_noop_landing_pads<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body: &mut Body<'tcx>)
-{
+pub fn remove_noop_landing_pads<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
if tcx.sess.no_landing_pads() {
return
}
@@ -22,10 +19,7 @@
}
impl MirPass for RemoveNoopLandingPads {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _src: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _src: MirSource<'tcx>, body: &mut Body<'tcx>) {
remove_noop_landing_pads(tcx, body);
}
}
diff --git a/src/librustc_mir/transform/rustc_peek.rs b/src/librustc_mir/transform/rustc_peek.rs
index a6ae03e..c4601229 100644
--- a/src/librustc_mir/transform/rustc_peek.rs
+++ b/src/librustc_mir/transform/rustc_peek.rs
@@ -25,8 +25,7 @@
pub struct SanityCheck;
impl MirPass for SanityCheck {
- fn run_pass<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- src: MirSource<'tcx>, body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) {
let def_id = src.def_id();
if !tcx.has_attr(def_id, sym::rustc_mir) {
debug!("skipping rustc_peek::SanityCheck on {}", tcx.def_path_str(def_id));
@@ -84,12 +83,14 @@
/// (If there are any calls to `rustc_peek` that do not match the
/// expression form above, then that emits an error as well, but those
/// errors are not intended to be used for unit tests.)
-pub fn sanity_check_via_rustc_peek<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body: &Body<'tcx>,
- def_id: DefId,
- _attributes: &[ast::Attribute],
- results: &DataflowResults<'tcx, O>)
- where O: BitDenotation<'tcx, Idx=MovePathIndex> + HasMoveData<'tcx>
+pub fn sanity_check_via_rustc_peek<'tcx, O>(
+ tcx: TyCtxt<'tcx>,
+ body: &Body<'tcx>,
+ def_id: DefId,
+ _attributes: &[ast::Attribute],
+ results: &DataflowResults<'tcx, O>,
+) where
+ O: BitDenotation<'tcx, Idx = MovePathIndex> + HasMoveData<'tcx>,
{
debug!("sanity_check_via_rustc_peek def_id: {:?}", def_id);
// FIXME: this is not DRY. Figure out way to abstract this and
@@ -101,11 +102,13 @@
}
}
-fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- body: &Body<'tcx>,
- results: &DataflowResults<'tcx, O>,
- bb: mir::BasicBlock) where
- O: BitDenotation<'tcx, Idx=MovePathIndex> + HasMoveData<'tcx>
+fn each_block<'tcx, O>(
+ tcx: TyCtxt<'tcx>,
+ body: &Body<'tcx>,
+ results: &DataflowResults<'tcx, O>,
+ bb: mir::BasicBlock,
+) where
+ O: BitDenotation<'tcx, Idx = MovePathIndex> + HasMoveData<'tcx>,
{
let move_data = results.0.operator.move_data();
let mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = body[bb];
@@ -214,9 +217,10 @@
form `&expr`"));
}
-fn is_rustc_peek<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- terminator: &'a Option<mir::Terminator<'tcx>>)
- -> Option<(&'a [mir::Operand<'tcx>], Span)> {
+fn is_rustc_peek<'a, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ terminator: &'a Option<mir::Terminator<'tcx>>,
+) -> Option<(&'a [mir::Operand<'tcx>], Span)> {
if let Some(mir::Terminator { ref kind, source_info, .. }) = *terminator {
if let mir::TerminatorKind::Call { func: ref oper, ref args, .. } = *kind {
if let mir::Operand::Constant(ref func) = *oper {
diff --git a/src/librustc_mir/transform/simplify.rs b/src/librustc_mir/transform/simplify.rs
index 4d1c90b..ac15f52 100644
--- a/src/librustc_mir/transform/simplify.rs
+++ b/src/librustc_mir/transform/simplify.rs
@@ -57,10 +57,7 @@
Cow::Borrowed(&self.label)
}
- fn run_pass<'a, 'tcx>(&self,
- _tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _src: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, _tcx: TyCtxt<'tcx>, _src: MirSource<'tcx>, body: &mut Body<'tcx>) {
debug!("SimplifyCfg({:?}) - simplifying {:?}", self.label, body);
simplify_cfg(body);
}
@@ -296,10 +293,7 @@
pub struct SimplifyLocals;
impl MirPass for SimplifyLocals {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _: MirSource<'tcx>, body: &mut Body<'tcx>) {
let mut marker = DeclMarker { locals: BitSet::new_empty(body.local_decls.len()) };
marker.visit_body(body);
// Return pointer and arguments are always live
diff --git a/src/librustc_mir/transform/simplify_branches.rs b/src/librustc_mir/transform/simplify_branches.rs
index 938fa77..0c63a8d 100644
--- a/src/librustc_mir/transform/simplify_branches.rs
+++ b/src/librustc_mir/transform/simplify_branches.rs
@@ -19,10 +19,7 @@
Cow::Borrowed(&self.label)
}
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _src: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _src: MirSource<'tcx>, body: &mut Body<'tcx>) {
for block in body.basic_blocks_mut() {
let terminator = block.terminator_mut();
terminator.kind = match terminator.kind {
diff --git a/src/librustc_mir/transform/uniform_array_move_out.rs b/src/librustc_mir/transform/uniform_array_move_out.rs
index fbfc296..90b52b7 100644
--- a/src/librustc_mir/transform/uniform_array_move_out.rs
+++ b/src/librustc_mir/transform/uniform_array_move_out.rs
@@ -37,10 +37,7 @@
pub struct UniformArrayMoveOut;
impl MirPass for UniformArrayMoveOut {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _src: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _src: MirSource<'tcx>, body: &mut Body<'tcx>) {
let mut patch = MirPatch::new(body);
{
let mut visitor = UniformArrayMoveOutVisitor{body, patch: &mut patch, tcx};
@@ -53,7 +50,7 @@
struct UniformArrayMoveOutVisitor<'a, 'tcx: 'a> {
body: &'a Body<'tcx>,
patch: &'a mut MirPatch<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
}
impl<'a, 'tcx> Visitor<'tcx> for UniformArrayMoveOutVisitor<'a, 'tcx> {
@@ -162,10 +159,7 @@
pub struct RestoreSubsliceArrayMoveOut;
impl MirPass for RestoreSubsliceArrayMoveOut {
- fn run_pass<'a, 'tcx>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- _src: MirSource<'tcx>,
- body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _src: MirSource<'tcx>, body: &mut Body<'tcx>) {
let mut patch = MirPatch::new(body);
{
let mut visitor = RestoreDataCollector {
diff --git a/src/librustc_mir/util/alignment.rs b/src/librustc_mir/util/alignment.rs
index 788b7fd..6245d9c 100644
--- a/src/librustc_mir/util/alignment.rs
+++ b/src/librustc_mir/util/alignment.rs
@@ -4,12 +4,14 @@
/// Returns `true` if this place is allowed to be less aligned
/// than its containing struct (because it is within a packed
/// struct).
-pub fn is_disaligned<'a, 'tcx, L>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- local_decls: &L,
- param_env: ty::ParamEnv<'tcx>,
- place: &Place<'tcx>)
- -> bool
- where L: HasLocalDecls<'tcx>
+pub fn is_disaligned<'tcx, L>(
+ tcx: TyCtxt<'tcx>,
+ local_decls: &L,
+ param_env: ty::ParamEnv<'tcx>,
+ place: &Place<'tcx>,
+) -> bool
+where
+ L: HasLocalDecls<'tcx>,
{
debug!("is_disaligned({:?})", place);
if !is_within_packed(tcx, local_decls, place) {
@@ -32,11 +34,9 @@
}
}
-fn is_within_packed<'a, 'tcx, L>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- local_decls: &L,
- place: &Place<'tcx>)
- -> bool
- where L: HasLocalDecls<'tcx>
+fn is_within_packed<'tcx, L>(tcx: TyCtxt<'tcx>, local_decls: &L, place: &Place<'tcx>) -> bool
+where
+ L: HasLocalDecls<'tcx>,
{
let mut place = place;
while let &Place::Projection(box Projection {
diff --git a/src/librustc_mir/util/borrowck_errors.rs b/src/librustc_mir/util/borrowck_errors.rs
index 37df368..f1aaa85 100644
--- a/src/librustc_mir/util/borrowck_errors.rs
+++ b/src/librustc_mir/util/borrowck_errors.rs
@@ -788,25 +788,25 @@
}
}
-impl<'cx, 'gcx, 'tcx> BorrowckErrors<'cx> for TyCtxt<'cx, 'gcx, 'tcx> {
+impl BorrowckErrors<'tcx> for TyCtxt<'tcx> {
fn struct_span_err_with_code<S: Into<MultiSpan>>(
self,
sp: S,
msg: &str,
code: DiagnosticId,
- ) -> DiagnosticBuilder<'cx> {
+ ) -> DiagnosticBuilder<'tcx> {
self.sess.struct_span_err_with_code(sp, msg, code)
}
- fn struct_span_err<S: Into<MultiSpan>>(self, sp: S, msg: &str) -> DiagnosticBuilder<'cx> {
+ fn struct_span_err<S: Into<MultiSpan>>(self, sp: S, msg: &str) -> DiagnosticBuilder<'tcx> {
self.sess.struct_span_err(sp, msg)
}
fn cancel_if_wrong_origin(
self,
- mut diag: DiagnosticBuilder<'cx>,
+ mut diag: DiagnosticBuilder<'tcx>,
o: Origin,
- ) -> DiagnosticBuilder<'cx> {
+ ) -> DiagnosticBuilder<'tcx> {
if !o.should_emit_errors(self.borrowck_mode()) {
self.sess.diagnostic().cancel(&mut diag);
}
diff --git a/src/librustc_mir/util/elaborate_drops.rs b/src/librustc_mir/util/elaborate_drops.rs
index b8ce31d..0d7d6b4 100644
--- a/src/librustc_mir/util/elaborate_drops.rs
+++ b/src/librustc_mir/util/elaborate_drops.rs
@@ -70,12 +70,12 @@
}
}
-pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
+pub trait DropElaborator<'a, 'tcx: 'a>: fmt::Debug {
type Path : Copy + fmt::Debug;
fn patch(&mut self) -> &mut MirPatch<'tcx>;
fn body(&self) -> &'a Body<'tcx>;
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx>;
+ fn tcx(&self) -> TyCtxt<'tcx>;
fn param_env(&self) -> ty::ParamEnv<'tcx>;
fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
@@ -126,7 +126,7 @@
place.ty(self.elaborator.body(), self.tcx()).ty
}
- fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.elaborator.tcx()
}
diff --git a/src/librustc_mir/util/graphviz.rs b/src/librustc_mir/util/graphviz.rs
index fe0a119..7b154a9 100644
--- a/src/librustc_mir/util/graphviz.rs
+++ b/src/librustc_mir/util/graphviz.rs
@@ -8,11 +8,13 @@
use super::pretty::dump_mir_def_ids;
/// Write a graphviz DOT graph of a list of MIRs.
-pub fn write_mir_graphviz<'tcx, W>(tcx: TyCtxt<'_, '_, 'tcx>,
- single: Option<DefId>,
- w: &mut W)
- -> io::Result<()>
- where W: Write
+pub fn write_mir_graphviz<'tcx, W>(
+ tcx: TyCtxt<'tcx>,
+ single: Option<DefId>,
+ w: &mut W,
+) -> io::Result<()>
+where
+ W: Write,
{
for def_id in dump_mir_def_ids(tcx, single) {
let body = &tcx.optimized_mir(def_id);
@@ -32,11 +34,14 @@
}
/// Write a graphviz DOT graph of the MIR.
-pub fn write_mir_fn_graphviz<'tcx, W>(tcx: TyCtxt<'_, '_, 'tcx>,
- def_id: DefId,
- body: &Body<'_>,
- w: &mut W) -> io::Result<()>
- where W: Write
+pub fn write_mir_fn_graphviz<'tcx, W>(
+ tcx: TyCtxt<'tcx>,
+ def_id: DefId,
+ body: &Body<'_>,
+ w: &mut W,
+) -> io::Result<()>
+where
+ W: Write,
{
writeln!(w, "digraph Mir_{} {{", graphviz_safe_def_name(def_id))?;
@@ -133,11 +138,12 @@
/// Write the graphviz DOT label for the overall graph. This is essentially a block of text that
/// will appear below the graph, showing the type of the `fn` this MIR represents and the types of
/// all the variables and temporaries.
-fn write_graph_label<'a, 'gcx, 'tcx, W: Write>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- def_id: DefId,
- body: &Body<'_>,
- w: &mut W)
- -> io::Result<()> {
+fn write_graph_label<'tcx, W: Write>(
+ tcx: TyCtxt<'tcx>,
+ def_id: DefId,
+ body: &Body<'_>,
+ w: &mut W,
+) -> io::Result<()> {
write!(w, " label=<fn {}(", dot::escape_html(&tcx.def_path_str(def_id)))?;
// fn argument types.
diff --git a/src/librustc_mir/util/liveness.rs b/src/librustc_mir/util/liveness.rs
index 82ec5ab..cf0fc09 100644
--- a/src/librustc_mir/util/liveness.rs
+++ b/src/librustc_mir/util/liveness.rs
@@ -254,8 +254,8 @@
visitor.defs_uses
}
-pub fn dump_mir<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub fn dump_mir<'tcx>(
+ tcx: TyCtxt<'tcx>,
pass_name: &str,
source: MirSource<'tcx>,
body: &Body<'tcx>,
@@ -271,8 +271,8 @@
dump_matched_mir_node(tcx, pass_name, &node_path, source, body, result);
}
-fn dump_matched_mir_node<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn dump_matched_mir_node<'tcx>(
+ tcx: TyCtxt<'tcx>,
pass_name: &str,
node_path: &str,
source: MirSource<'tcx>,
@@ -294,8 +294,8 @@
});
}
-pub fn write_mir_fn<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub fn write_mir_fn<'tcx>(
+ tcx: TyCtxt<'tcx>,
src: MirSource<'tcx>,
body: &Body<'tcx>,
w: &mut dyn Write,
diff --git a/src/librustc_mir/util/mod.rs b/src/librustc_mir/util/mod.rs
index e340029..719029d 100644
--- a/src/librustc_mir/util/mod.rs
+++ b/src/librustc_mir/util/mod.rs
@@ -21,10 +21,7 @@
pub use self::graphviz::write_node_label as write_graphviz_node_label;
/// If possible, suggest replacing `ref` with `ref mut`.
-pub fn suggest_ref_mut<'cx, 'gcx, 'tcx>(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- binding_span: Span,
-) -> Option<(String)> {
+pub fn suggest_ref_mut<'tcx>(tcx: TyCtxt<'tcx>, binding_span: Span) -> Option<(String)> {
let hi_src = tcx.sess.source_map().span_to_snippet(binding_span).unwrap();
if hi_src.starts_with("ref")
&& hi_src["ref".len()..].starts_with(Pattern_White_Space)
diff --git a/src/librustc_mir/util/pretty.rs b/src/librustc_mir/util/pretty.rs
index 2de58d2..fc46adb 100644
--- a/src/librustc_mir/util/pretty.rs
+++ b/src/librustc_mir/util/pretty.rs
@@ -62,8 +62,8 @@
/// or `typeck` appears in the name.
/// - `foo & nll | bar & typeck` == match if `foo` and `nll` both appear in the name
/// or `typeck` and `bar` both appear in the name.
-pub fn dump_mir<'a, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub fn dump_mir<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
pass_num: Option<&dyn Display>,
pass_name: &str,
disambiguator: &dyn Display,
@@ -93,11 +93,7 @@
);
}
-pub fn dump_enabled<'a, 'gcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- pass_name: &str,
- source: MirSource<'tcx>,
-) -> bool {
+pub fn dump_enabled<'tcx>(tcx: TyCtxt<'tcx>, pass_name: &str, source: MirSource<'tcx>) -> bool {
let filters = match tcx.sess.opts.debugging_opts.dump_mir {
None => return false,
Some(ref filters) => filters,
@@ -117,8 +113,8 @@
// `def_path_str()` would otherwise trigger `type_of`, and this can
// run while we are already attempting to evaluate `type_of`.
-fn dump_matched_mir_node<'a, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+fn dump_matched_mir_node<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
pass_num: Option<&dyn Display>,
pass_name: &str,
node_path: &str,
@@ -158,7 +154,7 @@
/// Also used by other bits of code (e.g., NLL inference) that dump
/// graphviz data or other things.
fn dump_path(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
extension: &str,
pass_num: Option<&dyn Display>,
pass_name: &str,
@@ -225,7 +221,7 @@
/// bits of code (e.g., NLL inference) that dump graphviz data or
/// other things, and hence takes the extension as an argument.
pub(crate) fn create_dump_file(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
extension: &str,
pass_num: Option<&dyn Display>,
pass_name: &str,
@@ -240,8 +236,8 @@
}
/// Write out a human-readable textual representation for the given MIR.
-pub fn write_mir_pretty<'a, 'gcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub fn write_mir_pretty<'tcx>(
+ tcx: TyCtxt<'tcx>,
single: Option<DefId>,
w: &mut dyn Write,
) -> io::Result<()> {
@@ -279,8 +275,8 @@
Ok(())
}
-pub fn write_mir_fn<'a, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub fn write_mir_fn<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
src: MirSource<'tcx>,
body: &Body<'tcx>,
extra_data: &mut F,
@@ -303,8 +299,8 @@
}
/// Write out a human-readable textual representation for the given basic block.
-pub fn write_basic_block<'cx, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+pub fn write_basic_block<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
block: BasicBlock,
body: &Body<'tcx>,
extra_data: &mut F,
@@ -370,13 +366,9 @@
/// After we print the main statement, we sometimes dump extra
/// information. There's often a lot of little things "nuzzled up" in
/// a statement.
-fn write_extra<'cx, 'gcx, 'tcx, F>(
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- write: &mut dyn Write,
- mut visit_op: F,
-) -> io::Result<()>
+fn write_extra<'tcx, F>(tcx: TyCtxt<'tcx>, write: &mut dyn Write, mut visit_op: F) -> io::Result<()>
where
- F: FnMut(&mut ExtraComments<'cx, 'gcx, 'tcx>),
+ F: FnMut(&mut ExtraComments<'tcx>),
{
let mut extra_comments = ExtraComments {
_tcx: tcx,
@@ -389,12 +381,12 @@
Ok(())
}
-struct ExtraComments<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- _tcx: TyCtxt<'cx, 'gcx, 'tcx>, // don't need it now, but bet we will soon
+struct ExtraComments<'tcx> {
+ _tcx: TyCtxt<'tcx>, // don't need it now, but bet we will soon
comments: Vec<String>,
}
-impl<'cx, 'gcx, 'tcx> ExtraComments<'cx, 'gcx, 'tcx> {
+impl ExtraComments<'tcx> {
fn push(&mut self, lines: &str) {
for line in lines.split('\n') {
self.comments.push(line.to_string());
@@ -402,7 +394,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> Visitor<'tcx> for ExtraComments<'cx, 'gcx, 'tcx> {
+impl Visitor<'tcx> for ExtraComments<'tcx> {
fn visit_constant(&mut self, constant: &Constant<'tcx>, location: Location) {
self.super_constant(constant, location);
let Constant { span, ty, user_ty, literal } = constant;
@@ -453,7 +445,7 @@
}
}
-fn comment(tcx: TyCtxt<'_, '_, '_>, SourceInfo { span, scope }: SourceInfo) -> String {
+fn comment(tcx: TyCtxt<'_>, SourceInfo { span, scope }: SourceInfo) -> String {
format!(
"scope {} at {}",
scope.index(),
@@ -463,7 +455,7 @@
/// Prints local variables in a scope tree.
fn write_scope_tree(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
body: &Body<'_>,
scope_tree: &FxHashMap<SourceScope, Vec<SourceScope>>,
w: &mut dyn Write,
@@ -538,8 +530,8 @@
/// Write out a human-readable textual representation of the MIR's `fn` type and the types of its
/// local variables (both user-defined bindings and compiler temporaries).
-pub fn write_mir_intro<'a, 'gcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+pub fn write_mir_intro<'tcx>(
+ tcx: TyCtxt<'tcx>,
src: MirSource<'tcx>,
body: &Body<'_>,
w: &mut dyn Write,
@@ -570,7 +562,7 @@
}
fn write_mir_sig(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
src: MirSource<'tcx>,
body: &Body<'_>,
w: &mut dyn Write,
@@ -642,7 +634,7 @@
Ok(())
}
-pub fn dump_mir_def_ids(tcx: TyCtxt<'_, '_, '_>, single: Option<DefId>) -> Vec<DefId> {
+pub fn dump_mir_def_ids(tcx: TyCtxt<'_>, single: Option<DefId>) -> Vec<DefId> {
if let Some(i) = single {
vec![i]
} else {
diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs
index 532cec2..00b6db0 100644
--- a/src/librustc_passes/ast_validation.rs
+++ b/src/librustc_passes/ast_validation.rs
@@ -14,6 +14,7 @@
use rustc_data_structures::fx::FxHashMap;
use syntax::ast::*;
use syntax::attr;
+use syntax::feature_gate::is_builtin_attr;
use syntax::source_map::Spanned;
use syntax::symbol::{kw, sym};
use syntax::ptr::P;
@@ -365,6 +366,29 @@
_ => None,
}
}
+
+ fn check_fn_decl(&self, fn_decl: &FnDecl) {
+ fn_decl
+ .inputs
+ .iter()
+ .flat_map(|i| i.attrs.as_ref())
+ .filter(|attr| {
+ let arr = [sym::allow, sym::cfg, sym::cfg_attr, sym::deny, sym::forbid, sym::warn];
+ !arr.contains(&attr.name_or_empty()) && is_builtin_attr(attr)
+ })
+ .for_each(|attr| if attr.is_sugared_doc {
+ let mut err = self.err_handler().struct_span_err(
+ attr.span,
+ "documentation comments cannot be applied to function parameters"
+ );
+ err.span_label(attr.span, "doc comments are not allowed here");
+ err.emit();
+ }
+ else {
+ self.err_handler().span_err(attr.span, "allow, cfg, cfg_attr, deny, \
+ forbid, and warn are the only allowed built-in attributes in function parameters")
+ });
+ }
}
enum GenericPosition {
@@ -470,6 +494,9 @@
impl<'a> Visitor<'a> for AstValidator<'a> {
fn visit_expr(&mut self, expr: &'a Expr) {
match expr.node {
+ ExprKind::Closure(_, _, _, ref fn_decl, _, _) => {
+ self.check_fn_decl(fn_decl);
+ }
ExprKind::IfLet(_, ref expr, _, _) | ExprKind::WhileLet(_, ref expr, _, _) =>
self.while_if_let_ambiguity(&expr),
ExprKind::InlineAsm(..) if !self.session.target.target.options.allow_asm => {
@@ -484,6 +511,7 @@
fn visit_ty(&mut self, ty: &'a Ty) {
match ty.node {
TyKind::BareFn(ref bfty) => {
+ self.check_fn_decl(&bfty.decl);
self.check_decl_no_pat(&bfty.decl, |span, _| {
struct_span_err!(self.session, span, E0561,
"patterns aren't allowed in function pointer types").emit();
@@ -601,10 +629,11 @@
.note("only trait implementations may be annotated with default").emit();
}
}
- ItemKind::Fn(_, ref header, ref generics, _) => {
+ ItemKind::Fn(ref decl, ref header, ref generics, _) => {
+ self.visit_fn_header(header);
+ self.check_fn_decl(decl);
// We currently do not permit const generics in `const fn`, as
// this is tantamount to allowing compile-time dependent typing.
- self.visit_fn_header(header);
if header.constness.node == Constness::Const {
// Look for const generics and error if we find any.
for param in &generics.params {
@@ -657,6 +686,7 @@
self.no_questions_in_bounds(bounds, "supertraits", true);
for trait_item in trait_items {
if let TraitItemKind::Method(ref sig, ref block) = trait_item.node {
+ self.check_fn_decl(&sig.decl);
self.check_trait_fn_not_async(trait_item.span, sig.header.asyncness.node);
self.check_trait_fn_not_const(sig.header.constness);
if block.is_none() {
@@ -711,6 +741,7 @@
fn visit_foreign_item(&mut self, fi: &'a ForeignItem) {
match fi.node {
ForeignItemKind::Fn(ref decl, _) => {
+ self.check_fn_decl(decl);
self.check_decl_no_pat(decl, |span, _| {
struct_span_err!(self.session, span, E0130,
"patterns aren't allowed in foreign function declarations")
@@ -864,6 +895,16 @@
"`async fn` is not permitted in the 2015 edition").emit();
}
}
+
+ fn visit_impl_item(&mut self, ii: &'a ImplItem) {
+ match ii.node {
+ ImplItemKind::Method(ref sig, _) => {
+ self.check_fn_decl(&sig.decl);
+ }
+ _ => {}
+ }
+ visit::walk_impl_item(self, ii);
+ }
}
pub fn check_crate(session: &Session, krate: &Crate) -> (bool, bool) {
diff --git a/src/librustc_passes/layout_test.rs b/src/librustc_passes/layout_test.rs
index dea5774..8f790d1 100644
--- a/src/librustc_passes/layout_test.rs
+++ b/src/librustc_passes/layout_test.rs
@@ -14,7 +14,7 @@
use syntax::ast::Attribute;
use syntax::symbol::sym;
-pub fn test_layout<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn test_layout<'tcx>(tcx: TyCtxt<'tcx>) {
if tcx.features().rustc_attrs {
// if the `rustc_attrs` feature is not enabled, don't bother testing layout
tcx.hir()
@@ -23,11 +23,11 @@
}
}
-struct VarianceTest<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct VarianceTest<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> ItemLikeVisitor<'tcx> for VarianceTest<'a, 'tcx> {
+impl ItemLikeVisitor<'tcx> for VarianceTest<'tcx> {
fn visit_item(&mut self, item: &'tcx hir::Item) {
let item_def_id = self.tcx.hir().local_def_id_from_hir_id(item.hir_id);
@@ -44,7 +44,7 @@
fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem) {}
}
-impl<'a, 'tcx> VarianceTest<'a, 'tcx> {
+impl VarianceTest<'tcx> {
fn dump_layout_of(&self, item_def_id: DefId, item: &hir::Item, attr: &Attribute) {
let tcx = self.tcx;
let param_env = self.tcx.param_env(item_def_id);
@@ -104,12 +104,12 @@
}
}
-struct UnwrapLayoutCx<'me, 'tcx> {
- tcx: TyCtxt<'me, 'tcx, 'tcx>,
+struct UnwrapLayoutCx<'tcx> {
+ tcx: TyCtxt<'tcx>,
param_env: ParamEnv<'tcx>,
}
-impl<'me, 'tcx> LayoutOf for UnwrapLayoutCx<'me, 'tcx> {
+impl LayoutOf for UnwrapLayoutCx<'tcx> {
type Ty = Ty<'tcx>;
type TyLayout = TyLayout<'tcx>;
@@ -118,19 +118,19 @@
}
}
-impl<'me, 'tcx> HasTyCtxt<'tcx> for UnwrapLayoutCx<'me, 'tcx> {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
+impl HasTyCtxt<'tcx> for UnwrapLayoutCx<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
}
-impl<'me, 'tcx> HasParamEnv<'tcx> for UnwrapLayoutCx<'me, 'tcx> {
+impl HasParamEnv<'tcx> for UnwrapLayoutCx<'tcx> {
fn param_env(&self) -> ParamEnv<'tcx> {
self.param_env
}
}
-impl<'me, 'tcx> HasDataLayout for UnwrapLayoutCx<'me, 'tcx> {
+impl HasDataLayout for UnwrapLayoutCx<'tcx> {
fn data_layout(&self) -> &TargetDataLayout {
self.tcx.data_layout()
}
diff --git a/src/librustc_passes/lib.rs b/src/librustc_passes/lib.rs
index fbd6ddf..bf2f763 100644
--- a/src/librustc_passes/lib.rs
+++ b/src/librustc_passes/lib.rs
@@ -6,6 +6,7 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
+#![feature(in_band_lifetimes)]
#![feature(nll)]
#![feature(rustc_diagnostic_macros)]
diff --git a/src/librustc_passes/loops.rs b/src/librustc_passes/loops.rs
index 97bbb0a..efa4bd6 100644
--- a/src/librustc_passes/loops.rs
+++ b/src/librustc_passes/loops.rs
@@ -45,7 +45,7 @@
cx: Context,
}
-fn check_mod_loops<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) {
+fn check_mod_loops<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(module_def_id, &mut CheckLoopVisitor {
sess: &tcx.sess,
hir_map: &tcx.hir(),
diff --git a/src/librustc_passes/rvalue_promotion.rs b/src/librustc_passes/rvalue_promotion.rs
index 869cae3..5397a4a 100644
--- a/src/librustc_passes/rvalue_promotion.rs
+++ b/src/librustc_passes/rvalue_promotion.rs
@@ -39,10 +39,7 @@
};
}
-fn const_is_rvalue_promotable_to_static<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> bool
-{
+fn const_is_rvalue_promotable_to_static<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
assert!(def_id.is_local());
let hir_id = tcx.hir().as_local_hir_id(def_id)
@@ -51,10 +48,7 @@
tcx.rvalue_promotable_map(def_id).contains(&body_id.hir_id.local_id)
}
-fn rvalue_promotable_map<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> &'tcx ItemLocalSet
-{
+fn rvalue_promotable_map<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx ItemLocalSet {
let outer_def_id = tcx.closure_base_def_id(def_id);
if outer_def_id != def_id {
return tcx.rvalue_promotable_map(outer_def_id);
@@ -81,7 +75,7 @@
}
struct CheckCrateVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
in_fn: bool,
in_static: bool,
mut_rvalue_borrows: HirIdSet,
@@ -126,9 +120,9 @@
}
}
-impl<'a, 'gcx> CheckCrateVisitor<'a, 'gcx> {
+impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> {
// Returns true iff all the values of the type are promotable.
- fn type_promotability(&mut self, ty: Ty<'gcx>) -> Promotability {
+ fn type_promotability(&mut self, ty: Ty<'tcx>) -> Promotability {
debug!("type_promotability({})", ty);
if ty.is_freeze(self.tcx, self.param_env, DUMMY_SP) &&
@@ -171,7 +165,7 @@
impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> {
fn check_nested_body(&mut self, body_id: hir::BodyId) -> Promotability {
let item_id = self.tcx.hir().body_owner(body_id);
- let item_def_id = self.tcx.hir().local_def_id(item_id);
+ let item_def_id = self.tcx.hir().local_def_id_from_hir_id(item_id);
let outer_in_fn = self.in_fn;
let outer_tables = self.tables;
@@ -599,7 +593,7 @@
Promotable
}
-impl<'a, 'gcx, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'gcx> {
+impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> {
fn consume(&mut self,
_consume_id: hir::HirId,
_consume_span: Span,
diff --git a/src/librustc_plugin/build.rs b/src/librustc_plugin/build.rs
index 8259419..d3ac597 100644
--- a/src/librustc_plugin/build.rs
+++ b/src/librustc_plugin/build.rs
@@ -30,14 +30,11 @@
}
/// Finds the function marked with `#[plugin_registrar]`, if any.
-pub fn find_plugin_registrar<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> Option<DefId> {
+pub fn find_plugin_registrar<'tcx>(tcx: TyCtxt<'tcx>) -> Option<DefId> {
tcx.plugin_registrar_fn(LOCAL_CRATE)
}
-fn plugin_registrar_fn<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
- cnum: CrateNum,
-) -> Option<DefId> {
+fn plugin_registrar_fn<'tcx>(tcx: TyCtxt<'tcx>, cnum: CrateNum) -> Option<DefId> {
assert_eq!(cnum, LOCAL_CRATE);
let mut finder = RegistrarFinder { registrars: Vec::new() };
diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs
index 42b70dd..9eaa6f9 100644
--- a/src/librustc_privacy/lib.rs
+++ b/src/librustc_privacy/lib.rs
@@ -4,6 +4,7 @@
#![deny(internal)]
#![deny(unused_lifetimes)]
+#![feature(in_band_lifetimes)]
#![feature(nll)]
#![feature(rustc_diagnostic_macros)]
@@ -47,14 +48,14 @@
/// First, it doesn't have overridable `fn visit_trait_ref`, so we have to catch trait `DefId`s
/// manually. Second, it doesn't visit some type components like signatures of fn types, or traits
/// in `impl Trait`, see individual comments in `DefIdVisitorSkeleton::visit_ty`.
-trait DefIdVisitor<'a, 'tcx: 'a> {
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx>;
+trait DefIdVisitor<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx>;
fn shallow(&self) -> bool { false }
fn skip_assoc_tys(&self) -> bool { false }
fn visit_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool;
/// Not overridden, but used to actually visit types and traits.
- fn skeleton(&mut self) -> DefIdVisitorSkeleton<'_, 'a, 'tcx, Self> {
+ fn skeleton(&mut self) -> DefIdVisitorSkeleton<'_, 'tcx, Self> {
DefIdVisitorSkeleton {
def_id_visitor: self,
visited_opaque_tys: Default::default(),
@@ -72,16 +73,18 @@
}
}
-struct DefIdVisitorSkeleton<'v, 'a, 'tcx, V>
- where V: DefIdVisitor<'a, 'tcx> + ?Sized
+struct DefIdVisitorSkeleton<'v, 'tcx, V>
+where
+ V: DefIdVisitor<'tcx> + ?Sized,
{
def_id_visitor: &'v mut V,
visited_opaque_tys: FxHashSet<DefId>,
- dummy: PhantomData<TyCtxt<'a, 'tcx, 'tcx>>,
+ dummy: PhantomData<TyCtxt<'tcx>>,
}
-impl<'a, 'tcx, V> DefIdVisitorSkeleton<'_, 'a, 'tcx, V>
- where V: DefIdVisitor<'a, 'tcx> + ?Sized
+impl<'tcx, V> DefIdVisitorSkeleton<'_, 'tcx, V>
+where
+ V: DefIdVisitor<'tcx> + ?Sized,
{
fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> bool {
let TraitRef { def_id, substs } = trait_ref;
@@ -123,8 +126,9 @@
}
}
-impl<'a, 'tcx, V> TypeVisitor<'tcx> for DefIdVisitorSkeleton<'_, 'a, 'tcx, V>
- where V: DefIdVisitor<'a, 'tcx> + ?Sized
+impl<'tcx, V> TypeVisitor<'tcx> for DefIdVisitorSkeleton<'_, 'tcx, V>
+where
+ V: DefIdVisitor<'tcx> + ?Sized,
{
fn visit_ty(&mut self, ty: Ty<'tcx>) -> bool {
let tcx = self.def_id_visitor.tcx();
@@ -219,15 +223,17 @@
}
}
-fn def_id_visibility<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
- -> (ty::Visibility, Span, &'static str) {
+fn def_id_visibility<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ def_id: DefId,
+) -> (ty::Visibility, Span, &'static str) {
match tcx.hir().as_local_hir_id(def_id) {
Some(hir_id) => {
let vis = match tcx.hir().get_by_hir_id(hir_id) {
Node::Item(item) => &item.vis,
Node::ForeignItem(foreign_item) => &foreign_item.vis,
Node::TraitItem(..) | Node::Variant(..) => {
- return def_id_visibility(tcx, tcx.hir().get_parent_did_by_hir_id(hir_id));
+ return def_id_visibility(tcx, tcx.hir().get_parent_did(hir_id));
}
Node::ImplItem(impl_item) => {
match tcx.hir().get_by_hir_id(tcx.hir().get_parent_item(hir_id)) {
@@ -249,7 +255,7 @@
tcx, parent_did,
);
- let adt_def = tcx.adt_def(tcx.hir().get_parent_did_by_hir_id(hir_id));
+ let adt_def = tcx.adt_def(tcx.hir().get_parent_did(hir_id));
let ctor_did = tcx.hir().local_def_id_from_hir_id(
vdata.ctor_hir_id().unwrap());
let variant = adt_def.variant_with_ctor_id(ctor_did);
@@ -288,7 +294,7 @@
// visibility to within the crate.
if ctor_vis == ty::Visibility::Public {
let adt_def =
- tcx.adt_def(tcx.hir().get_parent_did_by_hir_id(hir_id));
+ tcx.adt_def(tcx.hir().get_parent_did(hir_id));
if adt_def.non_enum_variant().is_field_list_non_exhaustive() {
ctor_vis =
ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX));
@@ -305,7 +311,7 @@
}
Node::Expr(expr) => {
return (ty::Visibility::Restricted(
- tcx.hir().get_module_parent_by_hir_id(expr.hir_id)),
+ tcx.hir().get_module_parent(expr.hir_id)),
expr.span, "private")
}
node => bug!("unexpected node kind: {:?}", node)
@@ -322,16 +328,16 @@
// Set the correct `TypeckTables` for the given `item_id` (or an empty table if
// there is no `TypeckTables` for the item).
-fn item_tables<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- hir_id: hir::HirId,
- empty_tables: &'a ty::TypeckTables<'tcx>)
- -> &'a ty::TypeckTables<'tcx> {
+fn item_tables<'a, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ hir_id: hir::HirId,
+ empty_tables: &'a ty::TypeckTables<'tcx>,
+) -> &'a ty::TypeckTables<'tcx> {
let def_id = tcx.hir().local_def_id_from_hir_id(hir_id);
if tcx.has_typeck_tables(def_id) { tcx.typeck_tables_of(def_id) } else { empty_tables }
}
-fn min<'a, 'tcx>(vis1: ty::Visibility, vis2: ty::Visibility, tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::Visibility {
+fn min<'tcx>(vis1: ty::Visibility, vis2: ty::Visibility, tcx: TyCtxt<'tcx>) -> ty::Visibility {
if vis1.is_at_least(vis2, tcx) { vis2 } else { vis1 }
}
@@ -341,12 +347,12 @@
/// This is done so that `private_in_public` warnings can be turned into hard errors
/// in crates that have been updated to use pub(restricted).
////////////////////////////////////////////////////////////////////////////////
-struct PubRestrictedVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct PubRestrictedVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
has_pub_restricted: bool,
}
-impl<'a, 'tcx> Visitor<'tcx> for PubRestrictedVisitor<'a, 'tcx> {
+impl Visitor<'tcx> for PubRestrictedVisitor<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::All(&self.tcx.hir())
}
@@ -360,13 +366,13 @@
////////////////////////////////////////////////////////////////////////////////
struct FindMin<'a, 'tcx, VL: VisibilityLike> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
access_levels: &'a AccessLevels,
min: VL,
}
-impl<'a, 'tcx, VL: VisibilityLike> DefIdVisitor<'a, 'tcx> for FindMin<'a, 'tcx, VL> {
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.tcx }
+impl<'a, 'tcx, VL: VisibilityLike> DefIdVisitor<'tcx> for FindMin<'a, 'tcx, VL> {
+ fn tcx(&self) -> TyCtxt<'tcx> { self.tcx }
fn shallow(&self) -> bool { VL::SHALLOW }
fn skip_assoc_tys(&self) -> bool { true }
fn visit_def_id(&mut self, def_id: DefId, _kind: &str, _descr: &dyn fmt::Display) -> bool {
@@ -382,8 +388,11 @@
// Returns an over-approximation (`skip_assoc_tys` = true) of visibility due to
// associated types for which we can't determine visibility precisely.
- fn of_impl<'a, 'tcx>(hir_id: hir::HirId, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- access_levels: &'a AccessLevels) -> Self {
+ fn of_impl<'a, 'tcx>(
+ hir_id: hir::HirId,
+ tcx: TyCtxt<'tcx>,
+ access_levels: &'a AccessLevels,
+ ) -> Self {
let mut find = FindMin { tcx, access_levels, min: Self::MAX };
let def_id = tcx.hir().local_def_id_from_hir_id(hir_id);
find.visit(tcx.type_of(def_id));
@@ -424,8 +433,8 @@
/// The embargo visitor, used to determine the exports of the AST.
////////////////////////////////////////////////////////////////////////////////
-struct EmbargoVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct EmbargoVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
// Accessibility levels for reachable nodes.
access_levels: AccessLevels,
@@ -435,13 +444,13 @@
changed: bool,
}
-struct ReachEverythingInTheInterfaceVisitor<'b, 'a: 'b, 'tcx: 'a> {
+struct ReachEverythingInTheInterfaceVisitor<'a, 'tcx> {
access_level: Option<AccessLevel>,
item_def_id: DefId,
- ev: &'b mut EmbargoVisitor<'a, 'tcx>,
+ ev: &'a mut EmbargoVisitor<'tcx>,
}
-impl<'a, 'tcx> EmbargoVisitor<'a, 'tcx> {
+impl EmbargoVisitor<'tcx> {
fn get(&self, id: hir::HirId) -> Option<AccessLevel> {
self.access_levels.map.get(&id).cloned()
}
@@ -459,8 +468,11 @@
}
}
- fn reach(&mut self, item_id: hir::HirId, access_level: Option<AccessLevel>)
- -> ReachEverythingInTheInterfaceVisitor<'_, 'a, 'tcx> {
+ fn reach(
+ &mut self,
+ item_id: hir::HirId,
+ access_level: Option<AccessLevel>,
+ ) -> ReachEverythingInTheInterfaceVisitor<'_, 'tcx> {
ReachEverythingInTheInterfaceVisitor {
access_level: cmp::min(access_level, Some(AccessLevel::Reachable)),
item_def_id: self.tcx.hir().local_def_id_from_hir_id(item_id),
@@ -489,11 +501,11 @@
if let Some(item) = module.res
.and_then(|res| res.mod_def_id())
.and_then(|def_id| self.tcx.hir().as_local_hir_id(def_id))
- .map(|module_hir_id| self.tcx.hir().expect_item_by_hir_id(module_hir_id))
+ .map(|module_hir_id| self.tcx.hir().expect_item(module_hir_id))
{
if let hir::ItemKind::Mod(m) = &item.node {
for item_id in m.item_ids.as_ref() {
- let item = self.tcx.hir().expect_item_by_hir_id(item_id.id);
+ let item = self.tcx.hir().expect_item(item_id.id);
let def_id = self.tcx.hir().local_def_id_from_hir_id(item_id.id);
if !self.tcx.hygienic_eq(segment.ident, item.ident, def_id) { continue; }
if let hir::ItemKind::Use(..) = item.node {
@@ -506,7 +518,7 @@
}
}
-impl<'a, 'tcx> Visitor<'tcx> for EmbargoVisitor<'a, 'tcx> {
+impl Visitor<'tcx> for EmbargoVisitor<'tcx> {
/// We want to visit items in the context of their containing
/// module and so forth, so supply a crate for doing a deep walk.
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
@@ -752,7 +764,7 @@
let module = if module_id == hir::CRATE_HIR_ID {
&self.tcx.hir().krate().module
} else if let hir::ItemKind::Mod(ref module) =
- self.tcx.hir().expect_item_by_hir_id(module_id).node {
+ self.tcx.hir().expect_item(module_id).node {
module
} else {
unreachable!()
@@ -777,7 +789,7 @@
}
}
-impl<'a, 'tcx> ReachEverythingInTheInterfaceVisitor<'_, 'a, 'tcx> {
+impl ReachEverythingInTheInterfaceVisitor<'_, 'tcx> {
fn generics(&mut self) -> &mut Self {
for param in &self.ev.tcx.generics_of(self.item_def_id).params {
match param.kind {
@@ -813,8 +825,8 @@
}
}
-impl<'a, 'tcx> DefIdVisitor<'a, 'tcx> for ReachEverythingInTheInterfaceVisitor<'_, 'a, 'tcx> {
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.ev.tcx }
+impl DefIdVisitor<'tcx> for ReachEverythingInTheInterfaceVisitor<'_, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> { self.ev.tcx }
fn visit_def_id(&mut self, def_id: DefId, _kind: &str, _descr: &dyn fmt::Display) -> bool {
if let Some(hir_id) = self.ev.tcx.hir().as_local_hir_id(def_id) {
self.ev.update(hir_id, self.access_level);
@@ -831,7 +843,7 @@
//////////////////////////////////////////////////////////////////////////////////////
struct NamePrivacyVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
tables: &'a ty::TypeckTables<'tcx>,
current_item: hir::HirId,
empty_tables: &'a ty::TypeckTables<'tcx>,
@@ -958,7 +970,7 @@
////////////////////////////////////////////////////////////////////////////////////////////
struct TypePrivacyVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
tables: &'a ty::TypeckTables<'tcx>,
current_item: DefId,
in_body: bool,
@@ -1176,8 +1188,8 @@
}
}
-impl<'a, 'tcx> DefIdVisitor<'a, 'tcx> for TypePrivacyVisitor<'a, 'tcx> {
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.tcx }
+impl DefIdVisitor<'tcx> for TypePrivacyVisitor<'a, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> { self.tcx }
fn visit_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool {
self.check_def_id(def_id, kind, descr)
}
@@ -1191,7 +1203,7 @@
///////////////////////////////////////////////////////////////////////////////
struct ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
access_levels: &'a AccessLevels,
in_variant: bool,
// Set of errors produced by this obsolete visitor.
@@ -1534,8 +1546,8 @@
/// and traits in public interfaces.
///////////////////////////////////////////////////////////////////////////////
-struct SearchInterfaceForPrivateItemsVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct SearchInterfaceForPrivateItemsVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
item_id: hir::HirId,
item_def_id: DefId,
span: Span,
@@ -1546,7 +1558,7 @@
in_assoc_ty: bool,
}
-impl<'a, 'tcx: 'a> SearchInterfaceForPrivateItemsVisitor<'a, 'tcx> {
+impl SearchInterfaceForPrivateItemsVisitor<'tcx> {
fn generics(&mut self) -> &mut Self {
for param in &self.tcx.generics_of(self.item_def_id).params {
match param.kind {
@@ -1632,22 +1644,25 @@
}
}
-impl<'a, 'tcx> DefIdVisitor<'a, 'tcx> for SearchInterfaceForPrivateItemsVisitor<'a, 'tcx> {
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.tcx }
+impl DefIdVisitor<'tcx> for SearchInterfaceForPrivateItemsVisitor<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> { self.tcx }
fn visit_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool {
self.check_def_id(def_id, kind, descr)
}
}
struct PrivateItemsInPublicInterfacesVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
has_pub_restricted: bool,
old_error_set: &'a HirIdSet,
}
impl<'a, 'tcx> PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> {
- fn check(&self, item_id: hir::HirId, required_visibility: ty::Visibility)
- -> SearchInterfaceForPrivateItemsVisitor<'a, 'tcx> {
+ fn check(
+ &self,
+ item_id: hir::HirId,
+ required_visibility: ty::Visibility,
+ ) -> SearchInterfaceForPrivateItemsVisitor<'tcx> {
let mut has_old_errors = false;
// Slow path taken only if there any errors in the crate.
@@ -1675,7 +1690,7 @@
tcx: self.tcx,
item_id,
item_def_id: self.tcx.hir().local_def_id_from_hir_id(item_id),
- span: self.tcx.hir().span_by_hir_id(item_id),
+ span: self.tcx.hir().span(item_id),
required_visibility,
has_pub_restricted: self.has_pub_restricted,
has_old_errors,
@@ -1813,7 +1828,7 @@
};
}
-fn check_mod_privacy<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) {
+fn check_mod_privacy<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
let empty_tables = ty::TypeckTables::empty(None);
// Check privacy of names not checked in previous compilation stages.
@@ -1840,10 +1855,7 @@
intravisit::walk_mod(&mut visitor, module, hir_id);
}
-fn privacy_access_levels<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
- krate: CrateNum,
-) -> &'tcx AccessLevels {
+fn privacy_access_levels<'tcx>(tcx: TyCtxt<'tcx>, krate: CrateNum) -> &'tcx AccessLevels {
assert_eq!(krate, LOCAL_CRATE);
// Build up a set of all exported items in the AST. This is a set of all
@@ -1867,7 +1879,7 @@
tcx.arena.alloc(visitor.access_levels)
}
-fn check_private_in_public<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, krate: CrateNum) {
+fn check_private_in_public<'tcx>(tcx: TyCtxt<'tcx>, krate: CrateNum) {
assert_eq!(krate, LOCAL_CRATE);
let access_levels = tcx.privacy_access_levels(LOCAL_CRATE);
diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs
index 6d0b142..e3cd294 100644
--- a/src/librustc_resolve/build_reduced_graph.rs
+++ b/src/librustc_resolve/build_reduced_graph.rs
@@ -305,7 +305,7 @@
}
// Empty groups `a::b::{}` are turned into synthetic `self` imports
- // `a::b::c::{self as __dummy}`, so that their prefixes are correctly
+ // `a::b::c::{self as _}`, so that their prefixes are correctly
// resolved and checked for privacy/stability/etc.
if items.is_empty() && !empty_for_self(&prefix) {
let new_span = prefix[prefix.len() - 1].ident.span;
@@ -314,7 +314,7 @@
Ident::new(kw::SelfLower, new_span)
),
kind: ast::UseTreeKind::Simple(
- Some(Ident::from_str_and_span("__dummy", new_span).gensym()),
+ Some(Ident::new(kw::Underscore, new_span)),
ast::DUMMY_NODE_ID,
ast::DUMMY_NODE_ID,
),
diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs
index fec7bf3..7f05e0f 100644
--- a/src/librustc_resolve/lib.rs
+++ b/src/librustc_resolve/lib.rs
@@ -1519,37 +1519,32 @@
///
/// All other types are defined somewhere and possibly imported, but the primitive ones need
/// special handling, since they have no place of origin.
-#[derive(Default)]
struct PrimitiveTypeTable {
primitive_types: FxHashMap<Name, PrimTy>,
}
impl PrimitiveTypeTable {
fn new() -> PrimitiveTypeTable {
- let mut table = PrimitiveTypeTable::default();
+ let mut table = FxHashMap::default();
- table.intern("bool", Bool);
- table.intern("char", Char);
- table.intern("f32", Float(FloatTy::F32));
- table.intern("f64", Float(FloatTy::F64));
- table.intern("isize", Int(IntTy::Isize));
- table.intern("i8", Int(IntTy::I8));
- table.intern("i16", Int(IntTy::I16));
- table.intern("i32", Int(IntTy::I32));
- table.intern("i64", Int(IntTy::I64));
- table.intern("i128", Int(IntTy::I128));
- table.intern("str", Str);
- table.intern("usize", Uint(UintTy::Usize));
- table.intern("u8", Uint(UintTy::U8));
- table.intern("u16", Uint(UintTy::U16));
- table.intern("u32", Uint(UintTy::U32));
- table.intern("u64", Uint(UintTy::U64));
- table.intern("u128", Uint(UintTy::U128));
- table
- }
-
- fn intern(&mut self, string: &str, primitive_type: PrimTy) {
- self.primitive_types.insert(Symbol::intern(string), primitive_type);
+ table.insert(sym::bool, Bool);
+ table.insert(sym::char, Char);
+ table.insert(sym::f32, Float(FloatTy::F32));
+ table.insert(sym::f64, Float(FloatTy::F64));
+ table.insert(sym::isize, Int(IntTy::Isize));
+ table.insert(sym::i8, Int(IntTy::I8));
+ table.insert(sym::i16, Int(IntTy::I16));
+ table.insert(sym::i32, Int(IntTy::I32));
+ table.insert(sym::i64, Int(IntTy::I64));
+ table.insert(sym::i128, Int(IntTy::I128));
+ table.insert(sym::str, Str);
+ table.insert(sym::usize, Uint(UintTy::Usize));
+ table.insert(sym::u8, Uint(UintTy::U8));
+ table.insert(sym::u16, Uint(UintTy::U16));
+ table.insert(sym::u32, Uint(UintTy::U32));
+ table.insert(sym::u64, Uint(UintTy::U64));
+ table.insert(sym::u128, Uint(UintTy::U128));
+ Self { primitive_types: table }
}
}
diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs
index 0675804..f9dd443 100644
--- a/src/librustc_save_analysis/dump_visitor.rs
+++ b/src/librustc_save_analysis/dump_visitor.rs
@@ -77,7 +77,7 @@
pub struct DumpVisitor<'l, 'tcx: 'l, 'll, O: DumpOutput> {
save_ctxt: SaveContext<'l, 'tcx>,
- tcx: TyCtxt<'l, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
dumper: &'ll mut JsonDumper<O>,
span: SpanUtils<'l>,
diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs
index 30b0250..fb9f872 100644
--- a/src/librustc_save_analysis/lib.rs
+++ b/src/librustc_save_analysis/lib.rs
@@ -52,7 +52,7 @@
pub struct SaveContext<'l, 'tcx: 'l> {
- tcx: TyCtxt<'l, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
tables: &'l ty::TypeckTables<'tcx>,
access_levels: &'l AccessLevels,
span_utils: SpanUtils<'tcx>,
@@ -1115,7 +1115,7 @@
}
pub fn process_crate<'l, 'tcx, H: SaveHandler>(
- tcx: TyCtxt<'l, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
krate: &ast::Crate,
cratename: &str,
input: &'l Input,
diff --git a/src/librustc_target/abi/mod.rs b/src/librustc_target/abi/mod.rs
index 77493fb..b7ad5d8 100644
--- a/src/librustc_target/abi/mod.rs
+++ b/src/librustc_target/abi/mod.rs
@@ -136,7 +136,7 @@
}
if bits >= i128_align_src && bits <= 128 {
// Default alignment for i128 is decided by taking the alignment of
- // largest-sized i{64...128}.
+ // largest-sized i{64..=128}.
i128_align_src = bits;
dl.i128_align = a;
}
diff --git a/src/librustc_target/spec/mod.rs b/src/librustc_target/spec/mod.rs
index 844edbb..08cf062 100644
--- a/src/librustc_target/spec/mod.rs
+++ b/src/librustc_target/spec/mod.rs
@@ -268,16 +268,16 @@
pub type TargetResult = Result<Target, String>;
macro_rules! supported_targets {
- ( $(($triple:expr, $module:ident),)+ ) => (
- $(mod $module;)*
+ ( $(($( $triple:literal, )+ $module:ident ),)+ ) => {
+ $(mod $module;)+
/// List of supported targets
- const TARGETS: &[&str] = &[$($triple),*];
+ const TARGETS: &[&str] = &[$($($triple),+),+];
fn load_specific(target: &str) -> Result<Target, LoadTargetError> {
match target {
$(
- $triple => {
+ $($triple)|+ => {
let mut t = $module::target()
.map_err(LoadTargetError::Other)?;
t.options.is_builtin = true;
@@ -307,7 +307,7 @@
mod test_json_encode_decode {
use serialize::json::ToJson;
use super::Target;
- $(use super::$module;)*
+ $(use super::$module;)+
$(
#[test]
@@ -322,9 +322,9 @@
assert_eq!(original, parsed);
});
}
- )*
+ )+
}
- )
+ };
}
supported_targets! {
@@ -426,7 +426,9 @@
("armv7r-none-eabi", armv7r_none_eabi),
("armv7r-none-eabihf", armv7r_none_eabihf),
- ("x86_64-sun-solaris", x86_64_sun_solaris),
+ // `x86_64-pc-solaris` is an alias for `x86_64_sun_solaris` for backwards compatibility reasons.
+ // (See <https://github.com/rust-lang/rust/issues/40531>.)
+ ("x86_64-sun-solaris", "x86_64-pc-solaris", x86_64_sun_solaris),
("sparcv9-sun-solaris", sparcv9_sun_solaris),
("x86_64-pc-windows-gnu", x86_64_pc_windows_gnu),
@@ -748,6 +750,9 @@
/// wasm32 where the whole program either has simd or not.
pub simd_types_indirect: bool,
+ /// Pass a list of symbol which should be exported in the dylib to the linker.
+ pub limit_rdylib_exports: bool,
+
/// If set, have the linker export exactly these symbols, instead of using
/// the usual logic to figure this out from the crate itself.
pub override_export_symbols: Option<Vec<String>>,
@@ -843,6 +848,7 @@
emit_debug_gdb_scripts: true,
requires_uwtable: false,
simd_types_indirect: true,
+ limit_rdylib_exports: true,
override_export_symbols: None,
merge_functions: MergeFunctions::Aliases,
target_mcount: "mcount".to_string(),
@@ -1149,6 +1155,7 @@
key!(emit_debug_gdb_scripts, bool);
key!(requires_uwtable, bool);
key!(simd_types_indirect, bool);
+ key!(limit_rdylib_exports, bool);
key!(override_export_symbols, opt_list);
key!(merge_functions, MergeFunctions)?;
key!(target_mcount);
@@ -1364,6 +1371,7 @@
target_option_val!(emit_debug_gdb_scripts);
target_option_val!(requires_uwtable);
target_option_val!(simd_types_indirect);
+ target_option_val!(limit_rdylib_exports);
target_option_val!(override_export_symbols);
target_option_val!(merge_functions);
target_option_val!(target_mcount);
diff --git a/src/librustc_target/spec/riscv32imac_unknown_none_elf.rs b/src/librustc_target/spec/riscv32imac_unknown_none_elf.rs
index 5064393..8a97a09 100644
--- a/src/librustc_target/spec/riscv32imac_unknown_none_elf.rs
+++ b/src/librustc_target/spec/riscv32imac_unknown_none_elf.rs
@@ -25,6 +25,7 @@
relocation_model: "static".to_string(),
emit_debug_gdb_scripts: false,
abi_blacklist: super::riscv_base::abi_blacklist(),
+ eliminate_frame_pointer: false,
.. Default::default()
},
})
diff --git a/src/librustc_target/spec/riscv32imc_unknown_none_elf.rs b/src/librustc_target/spec/riscv32imc_unknown_none_elf.rs
index 31e74c5..647d33e 100644
--- a/src/librustc_target/spec/riscv32imc_unknown_none_elf.rs
+++ b/src/librustc_target/spec/riscv32imc_unknown_none_elf.rs
@@ -26,6 +26,7 @@
relocation_model: "static".to_string(),
emit_debug_gdb_scripts: false,
abi_blacklist: super::riscv_base::abi_blacklist(),
+ eliminate_frame_pointer: false,
.. Default::default()
},
})
diff --git a/src/librustc_target/spec/riscv64gc_unknown_none_elf.rs b/src/librustc_target/spec/riscv64gc_unknown_none_elf.rs
index 2d4070c..a5c13fa 100644
--- a/src/librustc_target/spec/riscv64gc_unknown_none_elf.rs
+++ b/src/librustc_target/spec/riscv64gc_unknown_none_elf.rs
@@ -25,6 +25,7 @@
relocation_model: "static".to_string(),
emit_debug_gdb_scripts: false,
abi_blacklist: super::riscv_base::abi_blacklist(),
+ eliminate_frame_pointer: false,
.. Default::default()
},
})
diff --git a/src/librustc_target/spec/riscv64imac_unknown_none_elf.rs b/src/librustc_target/spec/riscv64imac_unknown_none_elf.rs
index f2e152c..237d615 100644
--- a/src/librustc_target/spec/riscv64imac_unknown_none_elf.rs
+++ b/src/librustc_target/spec/riscv64imac_unknown_none_elf.rs
@@ -25,6 +25,7 @@
relocation_model: "static".to_string(),
emit_debug_gdb_scripts: false,
abi_blacklist: super::riscv_base::abi_blacklist(),
+ eliminate_frame_pointer: false,
.. Default::default()
},
})
diff --git a/src/librustc_target/spec/solaris_base.rs b/src/librustc_target/spec/solaris_base.rs
index 0dfbb13..9e7eda0 100644
--- a/src/librustc_target/spec/solaris_base.rs
+++ b/src/librustc_target/spec/solaris_base.rs
@@ -8,6 +8,7 @@
has_rpath: true,
target_family: Some("unix".to_string()),
is_like_solaris: true,
+ limit_rdylib_exports: false, // Linker doesn't support this
.. Default::default()
}
diff --git a/src/librustc_target/spec/wasm32_base.rs b/src/librustc_target/spec/wasm32_base.rs
index edaf902..39a8ce9 100644
--- a/src/librustc_target/spec/wasm32_base.rs
+++ b/src/librustc_target/spec/wasm32_base.rs
@@ -106,6 +106,11 @@
// no dynamic linking, no need for default visibility!
default_hidden_visibility: true,
+ // Symbol visibility takes care of this for the WebAssembly.
+ // Additionally the only known linker, LLD, doesn't support the script
+ // arguments just yet
+ limit_rdylib_exports: false,
+
// we use the LLD shipped with the Rust toolchain by default
linker: Some("rust-lld".to_owned()),
lld_flavor: LldFlavor::Wasm,
diff --git a/src/librustc_target/spec/wasm32_experimental_emscripten.rs b/src/librustc_target/spec/wasm32_experimental_emscripten.rs
index 5ecd663..b802bee 100644
--- a/src/librustc_target/spec/wasm32_experimental_emscripten.rs
+++ b/src/librustc_target/spec/wasm32_experimental_emscripten.rs
@@ -24,6 +24,7 @@
is_like_emscripten: true,
max_atomic_width: Some(32),
post_link_args,
+ limit_rdylib_exports: false,
target_family: Some("unix".to_string()),
.. Default::default()
};
diff --git a/src/librustc_target/spec/wasm32_unknown_emscripten.rs b/src/librustc_target/spec/wasm32_unknown_emscripten.rs
index a6e9340..e0df368 100644
--- a/src/librustc_target/spec/wasm32_unknown_emscripten.rs
+++ b/src/librustc_target/spec/wasm32_unknown_emscripten.rs
@@ -26,6 +26,7 @@
is_like_emscripten: true,
max_atomic_width: Some(32),
post_link_args,
+ limit_rdylib_exports: false,
target_family: Some("unix".to_string()),
codegen_backend: "emscripten".to_string(),
.. Default::default()
diff --git a/src/librustc_traits/chalk_context/mod.rs b/src/librustc_traits/chalk_context/mod.rs
index bf61a55..2b67891 100644
--- a/src/librustc_traits/chalk_context/mod.rs
+++ b/src/librustc_traits/chalk_context/mod.rs
@@ -45,19 +45,19 @@
use self::unify::*;
#[derive(Copy, Clone, Debug)]
-crate struct ChalkArenas<'gcx> {
- _phantom: PhantomData<&'gcx ()>,
+crate struct ChalkArenas<'tcx> {
+ _phantom: PhantomData<&'tcx ()>,
}
#[derive(Copy, Clone)]
-crate struct ChalkContext<'cx, 'gcx: 'cx> {
- _arenas: ChalkArenas<'gcx>,
- tcx: TyCtxt<'cx, 'gcx, 'gcx>,
+crate struct ChalkContext<'tcx> {
+ _arenas: ChalkArenas<'tcx>,
+ tcx: TyCtxt<'tcx>,
}
#[derive(Copy, Clone)]
-crate struct ChalkInferenceContext<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+crate struct ChalkInferenceContext<'cx, 'tcx: 'cx> {
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
}
#[derive(Copy, Clone, Debug)]
@@ -126,12 +126,12 @@
}
}
-impl context::AggregateOps<ChalkArenas<'gcx>> for ChalkContext<'cx, 'gcx> {
+impl context::AggregateOps<ChalkArenas<'tcx>> for ChalkContext<'tcx> {
fn make_solution(
&self,
- root_goal: &Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>,
- mut simplified_answers: impl context::AnswerStream<ChalkArenas<'gcx>>,
- ) -> Option<Canonical<'gcx, QueryResponse<'gcx, ()>>> {
+ root_goal: &Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>>,
+ mut simplified_answers: impl context::AnswerStream<ChalkArenas<'tcx>>,
+ ) -> Option<Canonical<'tcx, QueryResponse<'tcx, ()>>> {
use chalk_engine::SimplifiedAnswer;
debug!("make_solution(root_goal = {:?})", root_goal);
@@ -176,13 +176,10 @@
}
}
-impl context::ContextOps<ChalkArenas<'gcx>> for ChalkContext<'cx, 'gcx> {
+impl context::ContextOps<ChalkArenas<'tcx>> for ChalkContext<'tcx> {
/// Returns `true` if this is a coinductive goal: basically proving that an auto trait
/// is implemented or proving that a trait reference is well-formed.
- fn is_coinductive(
- &self,
- goal: &Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>
- ) -> bool {
+ fn is_coinductive(&self, goal: &Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>>) -> bool {
use rustc::traits::{WellFormed, WhereClause};
let mut goal = goal.value.goal;
@@ -216,8 +213,8 @@
/// - the environment and goal found by substitution `S` into `arg`.
fn instantiate_ucanonical_goal<R>(
&self,
- arg: &Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>,
- op: impl context::WithInstantiatedUCanonicalGoal<ChalkArenas<'gcx>, Output = R>,
+ arg: &Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>>,
+ op: impl context::WithInstantiatedUCanonicalGoal<ChalkArenas<'tcx>, Output = R>,
) -> R {
self.tcx.infer_ctxt().enter_with_canonical(DUMMY_SP, arg, |ref infcx, arg, subst| {
let chalk_infcx = &mut ChalkInferenceContext {
@@ -230,8 +227,8 @@
fn instantiate_ex_clause<R>(
&self,
_num_universes: usize,
- arg: &Canonical<'gcx, ChalkExClause<'gcx>>,
- op: impl context::WithInstantiatedExClause<ChalkArenas<'gcx>, Output = R>,
+ arg: &Canonical<'tcx, ChalkExClause<'tcx>>,
+ op: impl context::WithInstantiatedExClause<ChalkArenas<'tcx>, Output = R>,
) -> R {
self.tcx.infer_ctxt().enter_with_canonical(DUMMY_SP, &arg.upcast(), |ref infcx, arg, _| {
let chalk_infcx = &mut ChalkInferenceContext {
@@ -242,31 +239,31 @@
}
/// Returns `true` if this solution has no region constraints.
- fn empty_constraints(ccs: &Canonical<'gcx, ConstrainedSubst<'gcx>>) -> bool {
+ fn empty_constraints(ccs: &Canonical<'tcx, ConstrainedSubst<'tcx>>) -> bool {
ccs.value.constraints.is_empty()
}
fn inference_normalized_subst_from_ex_clause(
- canon_ex_clause: &'a Canonical<'gcx, ChalkExClause<'gcx>>,
- ) -> &'a CanonicalVarValues<'gcx> {
+ canon_ex_clause: &'a Canonical<'tcx, ChalkExClause<'tcx>>,
+ ) -> &'a CanonicalVarValues<'tcx> {
&canon_ex_clause.value.subst
}
fn inference_normalized_subst_from_subst(
- canon_subst: &'a Canonical<'gcx, ConstrainedSubst<'gcx>>,
- ) -> &'a CanonicalVarValues<'gcx> {
+ canon_subst: &'a Canonical<'tcx, ConstrainedSubst<'tcx>>,
+ ) -> &'a CanonicalVarValues<'tcx> {
&canon_subst.value.subst
}
fn canonical(
- u_canon: &'a Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>,
- ) -> &'a Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>> {
+ u_canon: &'a Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>>,
+ ) -> &'a Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>> {
u_canon
}
fn is_trivial_substitution(
- u_canon: &Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>,
- canonical_subst: &Canonical<'gcx, ConstrainedSubst<'gcx>>,
+ u_canon: &Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>>,
+ canonical_subst: &Canonical<'tcx, ConstrainedSubst<'tcx>>,
) -> bool {
let subst = &canonical_subst.value.subst;
assert_eq!(u_canon.variables.len(), subst.var_values.len());
@@ -297,7 +294,7 @@
})
}
- fn num_universes(canon: &Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>) -> usize {
+ fn num_universes(canon: &Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>>) -> usize {
canon.max_universe.index() + 1
}
@@ -306,21 +303,21 @@
/// but for the universes of universally quantified names.
fn map_goal_from_canonical(
_map: &UniverseMap,
- value: &Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>,
- ) -> Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>> {
+ value: &Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>>,
+ ) -> Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>> {
*value // FIXME universe maps not implemented yet
}
fn map_subst_from_canonical(
_map: &UniverseMap,
- value: &Canonical<'gcx, ConstrainedSubst<'gcx>>,
- ) -> Canonical<'gcx, ConstrainedSubst<'gcx>> {
+ value: &Canonical<'tcx, ConstrainedSubst<'tcx>>,
+ ) -> Canonical<'tcx, ConstrainedSubst<'tcx>> {
value.clone() // FIXME universe maps not implemented yet
}
}
-impl context::InferenceTable<ChalkArenas<'gcx>, ChalkArenas<'tcx>>
- for ChalkInferenceContext<'cx, 'gcx, 'tcx>
+impl context::InferenceTable<ChalkArenas<'tcx>, ChalkArenas<'tcx>>
+ for ChalkInferenceContext<'cx, 'tcx>
{
fn into_goal(&self, domain_goal: DomainGoal<'tcx>) -> Goal<'tcx> {
self.infcx.tcx.mk_goal(GoalKind::DomainGoal(domain_goal))
@@ -363,8 +360,8 @@
}
}
-impl context::TruncateOps<ChalkArenas<'gcx>, ChalkArenas<'tcx>>
- for ChalkInferenceContext<'cx, 'gcx, 'tcx>
+impl context::TruncateOps<ChalkArenas<'tcx>, ChalkArenas<'tcx>>
+ for ChalkInferenceContext<'cx, 'tcx>
{
fn truncate_goal(
&mut self,
@@ -381,8 +378,8 @@
}
}
-impl context::UnificationOps<ChalkArenas<'gcx>, ChalkArenas<'tcx>>
- for ChalkInferenceContext<'cx, 'gcx, 'tcx>
+impl context::UnificationOps<ChalkArenas<'tcx>, ChalkArenas<'tcx>>
+ for ChalkInferenceContext<'cx, 'tcx>
{
fn program_clauses(
&self,
@@ -418,7 +415,7 @@
fn canonicalize_goal(
&mut self,
value: &InEnvironment<'tcx, Goal<'tcx>>,
- ) -> Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>> {
+ ) -> Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>> {
let mut _orig_values = OriginalQueryValues::default();
self.infcx.canonicalize_query(value, &mut _orig_values)
}
@@ -426,7 +423,7 @@
fn canonicalize_ex_clause(
&mut self,
value: &ChalkExClause<'tcx>,
- ) -> Canonical<'gcx, ChalkExClause<'gcx>> {
+ ) -> Canonical<'tcx, ChalkExClause<'tcx>> {
self.infcx.canonicalize_response(value)
}
@@ -434,19 +431,16 @@
&mut self,
subst: CanonicalVarValues<'tcx>,
constraints: Vec<RegionConstraint<'tcx>>,
- ) -> Canonical<'gcx, ConstrainedSubst<'gcx>> {
+ ) -> Canonical<'tcx, ConstrainedSubst<'tcx>> {
self.infcx.canonicalize_response(&ConstrainedSubst { subst, constraints })
}
fn u_canonicalize_goal(
&mut self,
- value: &Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>,
- ) -> (
- Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>,
- UniverseMap,
- ) {
+ value: &Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>>,
+ ) -> (Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>>, UniverseMap) {
(value.clone(), UniverseMap)
- }
+}
fn invert_goal(
&mut self,
@@ -470,7 +464,7 @@
fn sink_answer_subset(
&self,
- value: &Canonical<'gcx, ConstrainedSubst<'gcx>>,
+ value: &Canonical<'tcx, ConstrainedSubst<'tcx>>,
) -> Canonical<'tcx, ConstrainedSubst<'tcx>> {
value.clone()
}
@@ -478,7 +472,7 @@
fn lift_delayed_literal(
&self,
value: DelayedLiteral<ChalkArenas<'tcx>>,
- ) -> DelayedLiteral<ChalkArenas<'gcx>> {
+ ) -> DelayedLiteral<ChalkArenas<'tcx>> {
match self.infcx.tcx.lift_to_global(&value) {
Some(literal) => literal,
None => bug!("cannot lift {:?}", value),
@@ -508,13 +502,13 @@
type ChalkExClause<'tcx> = ExClause<ChalkArenas<'tcx>>;
-impl Debug for ChalkContext<'cx, 'gcx> {
+impl Debug for ChalkContext<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "ChalkContext")
}
}
-impl Debug for ChalkInferenceContext<'cx, 'gcx, 'tcx> {
+impl Debug for ChalkInferenceContext<'cx, 'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "ChalkInferenceContext")
}
@@ -527,7 +521,7 @@
fn lift_ex_clause_to_tcx(
ex_clause: &ChalkExClause<'a>,
- tcx: TyCtxt<'_, 'gcx, 'tcx>
+ tcx: TyCtxt<'tcx>,
) -> Option<Self::LiftedExClause> {
Some(ChalkExClause {
subst: tcx.lift(&ex_clause.subst)?,
@@ -539,7 +533,7 @@
fn lift_delayed_literal_to_tcx(
literal: &DelayedLiteral<ChalkArenas<'a>>,
- tcx: TyCtxt<'_, 'gcx, 'tcx>
+ tcx: TyCtxt<'tcx>,
) -> Option<Self::LiftedDelayedLiteral> {
Some(match literal {
DelayedLiteral::CannotProve(()) => DelayedLiteral::CannotProve(()),
@@ -553,7 +547,7 @@
fn lift_literal_to_tcx(
literal: &Literal<ChalkArenas<'a>>,
- tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
) -> Option<Self::LiftedLiteral> {
Some(match literal {
Literal::Negative(goal) => Literal::Negative(tcx.lift(goal)?),
@@ -563,7 +557,7 @@
}
impl ExClauseFold<'tcx> for ChalkArenas<'tcx> {
- fn fold_ex_clause_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(
+ fn fold_ex_clause_with<F: TypeFolder<'tcx>>(
ex_clause: &ChalkExClause<'tcx>,
folder: &mut F,
) -> ChalkExClause<'tcx> {
@@ -600,13 +594,13 @@
}
}
-trait Upcast<'tcx, 'gcx: 'tcx>: 'gcx {
+trait Upcast<'tcx>: 'tcx {
type Upcasted: 'tcx;
fn upcast(&self) -> Self::Upcasted;
}
-impl<'tcx, 'gcx: 'tcx> Upcast<'tcx, 'gcx> for DelayedLiteral<ChalkArenas<'gcx>> {
+impl<'tcx> Upcast<'tcx> for DelayedLiteral<ChalkArenas<'tcx>> {
type Upcasted = DelayedLiteral<ChalkArenas<'tcx>>;
fn upcast(&self) -> Self::Upcasted {
@@ -621,7 +615,7 @@
}
}
-impl<'tcx, 'gcx: 'tcx> Upcast<'tcx, 'gcx> for Literal<ChalkArenas<'gcx>> {
+impl<'tcx> Upcast<'tcx> for Literal<ChalkArenas<'tcx>> {
type Upcasted = Literal<ChalkArenas<'tcx>>;
fn upcast(&self) -> Self::Upcasted {
@@ -632,7 +626,7 @@
}
}
-impl<'tcx, 'gcx: 'tcx> Upcast<'tcx, 'gcx> for ExClause<ChalkArenas<'gcx>> {
+impl<'tcx> Upcast<'tcx> for ExClause<ChalkArenas<'tcx>> {
type Upcasted = ExClause<ChalkArenas<'tcx>>;
fn upcast(&self) -> Self::Upcasted {
@@ -651,8 +645,9 @@
}
}
-impl<'tcx, 'gcx: 'tcx, T> Upcast<'tcx, 'gcx> for Canonical<'gcx, T>
- where T: Upcast<'tcx, 'gcx>
+impl<'tcx, T> Upcast<'tcx> for Canonical<'tcx, T>
+where
+ T: Upcast<'tcx>,
{
type Upcasted = Canonical<'tcx, T::Upcasted>;
@@ -672,13 +667,10 @@
};
}
-crate fn evaluate_goal<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- goal: ChalkCanonicalGoal<'tcx>
-) -> Result<
- &'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>,
- traits::query::NoSolution
-> {
+crate fn evaluate_goal<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ goal: ChalkCanonicalGoal<'tcx>,
+) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, traits::query::NoSolution> {
use crate::lowering::Lower;
use rustc::traits::WellFormed;
diff --git a/src/librustc_traits/chalk_context/program_clauses/builtin.rs b/src/librustc_traits/chalk_context/program_clauses/builtin.rs
index bd72a04..71e18d2 100644
--- a/src/librustc_traits/chalk_context/program_clauses/builtin.rs
+++ b/src/librustc_traits/chalk_context/program_clauses/builtin.rs
@@ -15,10 +15,10 @@
/// `Implemented(ty: Trait) :- Implemented(nested: Trait)...`
/// where `Trait` is specified by `trait_def_id`.
fn builtin_impl_clause(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
ty: Ty<'tcx>,
nested: &[Kind<'tcx>],
- trait_def_id: DefId
+ trait_def_id: DefId,
) -> ProgramClause<'tcx> {
ProgramClause {
goal: ty::TraitPredicate {
@@ -43,11 +43,11 @@
}
crate fn assemble_builtin_unsize_impls<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
unsize_def_id: DefId,
source: Ty<'tcx>,
target: Ty<'tcx>,
- clauses: &mut Vec<Clause<'tcx>>
+ clauses: &mut Vec<Clause<'tcx>>,
) {
match (&source.sty, &target.sty) {
(ty::Dynamic(data_a, ..), ty::Dynamic(data_b, ..)) => {
@@ -119,10 +119,10 @@
}
crate fn assemble_builtin_sized_impls<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
sized_def_id: DefId,
ty: Ty<'tcx>,
- clauses: &mut Vec<Clause<'tcx>>
+ clauses: &mut Vec<Clause<'tcx>>,
) {
let mut push_builtin_impl = |ty: Ty<'tcx>, nested: &[Kind<'tcx>]| {
let clause = builtin_impl_clause(tcx, ty, nested, sized_def_id);
@@ -223,10 +223,10 @@
}
crate fn assemble_builtin_copy_clone_impls<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
ty: Ty<'tcx>,
- clauses: &mut Vec<Clause<'tcx>>
+ clauses: &mut Vec<Clause<'tcx>>,
) {
let mut push_builtin_impl = |ty: Ty<'tcx>, nested: &[Kind<'tcx>]| {
let clause = builtin_impl_clause(tcx, ty, nested, trait_def_id);
diff --git a/src/librustc_traits/chalk_context/program_clauses/mod.rs b/src/librustc_traits/chalk_context/program_clauses/mod.rs
index c1f14cd..a49ca40 100644
--- a/src/librustc_traits/chalk_context/program_clauses/mod.rs
+++ b/src/librustc_traits/chalk_context/program_clauses/mod.rs
@@ -19,9 +19,9 @@
use self::builtin::*;
fn assemble_clauses_from_impls<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
- clauses: &mut Vec<Clause<'tcx>>
+ clauses: &mut Vec<Clause<'tcx>>,
) {
tcx.for_each_impl(trait_def_id, |impl_def_id| {
clauses.extend(
@@ -33,9 +33,9 @@
}
fn assemble_clauses_from_assoc_ty_values<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
- clauses: &mut Vec<Clause<'tcx>>
+ clauses: &mut Vec<Clause<'tcx>>,
) {
tcx.for_each_impl(trait_def_id, |impl_def_id| {
for def_id in tcx.associated_item_def_ids(impl_def_id).iter() {
@@ -48,7 +48,7 @@
});
}
-impl ChalkInferenceContext<'cx, 'gcx, 'tcx> {
+impl ChalkInferenceContext<'cx, 'tcx> {
pub(super) fn program_clauses_impl(
&self,
environment: &Environment<'tcx>,
diff --git a/src/librustc_traits/chalk_context/program_clauses/primitive.rs b/src/librustc_traits/chalk_context/program_clauses/primitive.rs
index c37c8fa..8e4b9da 100644
--- a/src/librustc_traits/chalk_context/program_clauses/primitive.rs
+++ b/src/librustc_traits/chalk_context/program_clauses/primitive.rs
@@ -15,10 +15,7 @@
use crate::generic_types;
use std::iter;
-crate fn wf_clause_for_raw_ptr<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
- mutbl: hir::Mutability
-) -> Clauses<'tcx> {
+crate fn wf_clause_for_raw_ptr<'tcx>(tcx: TyCtxt<'tcx>, mutbl: hir::Mutability) -> Clauses<'tcx> {
let ptr_ty = generic_types::raw_ptr(tcx, mutbl);
let wf_clause = ProgramClause {
@@ -33,11 +30,11 @@
}
crate fn wf_clause_for_fn_ptr<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
arity_and_output: usize,
variadic: bool,
unsafety: hir::Unsafety,
- abi: abi::Abi
+ abi: abi::Abi,
) -> Clauses<'tcx> {
let fn_ptr = generic_types::fn_ptr(tcx, arity_and_output, variadic, unsafety, abi);
@@ -53,7 +50,7 @@
tcx.mk_clauses(iter::once(wf_clause))
}
-crate fn wf_clause_for_slice<'tcx>(tcx: TyCtxt<'_, '_, 'tcx>) -> Clauses<'tcx> {
+crate fn wf_clause_for_slice<'tcx>(tcx: TyCtxt<'tcx>) -> Clauses<'tcx> {
let ty = generic_types::bound(tcx, 0);
let slice_ty = tcx.mk_slice(ty);
@@ -83,8 +80,8 @@
}
crate fn wf_clause_for_array<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
- length: &'tcx ty::Const<'tcx>
+ tcx: TyCtxt<'tcx>,
+ length: &'tcx ty::Const<'tcx>,
) -> Clauses<'tcx> {
let ty = generic_types::bound(tcx, 0);
let array_ty = tcx.mk_ty(ty::Array(ty, length));
@@ -114,10 +111,7 @@
tcx.mk_clauses(iter::once(wf_clause))
}
-crate fn wf_clause_for_tuple<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
- arity: usize
-) -> Clauses<'tcx> {
+crate fn wf_clause_for_tuple<'tcx>(tcx: TyCtxt<'tcx>, arity: usize) -> Clauses<'tcx> {
let type_list = generic_types::type_list(tcx, arity);
let tuple_ty = tcx.mk_ty(ty::Tuple(type_list));
@@ -158,10 +152,7 @@
tcx.mk_clauses(iter::once(wf_clause))
}
-crate fn wf_clause_for_ref<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
- mutbl: hir::Mutability
-) -> Clauses<'tcx> {
+crate fn wf_clause_for_ref<'tcx>(tcx: TyCtxt<'tcx>, mutbl: hir::Mutability) -> Clauses<'tcx> {
let region = tcx.mk_region(
ty::ReLateBound(ty::INNERMOST, ty::BoundRegion::BrAnon(0))
);
@@ -185,10 +176,7 @@
tcx.mk_clauses(iter::once(wf_clause))
}
-crate fn wf_clause_for_fn_def<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
- def_id: DefId
-) -> Clauses<'tcx> {
+crate fn wf_clause_for_fn_def<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Clauses<'tcx> {
let fn_def = generic_types::fn_def(tcx, def_id);
let wf_clause = ProgramClause {
diff --git a/src/librustc_traits/chalk_context/resolvent_ops.rs b/src/librustc_traits/chalk_context/resolvent_ops.rs
index f1b8588..1e8b026 100644
--- a/src/librustc_traits/chalk_context/resolvent_ops.rs
+++ b/src/librustc_traits/chalk_context/resolvent_ops.rs
@@ -25,8 +25,8 @@
use super::{ChalkInferenceContext, ChalkArenas, ChalkExClause, ConstrainedSubst};
use super::unify::*;
-impl context::ResolventOps<ChalkArenas<'gcx>, ChalkArenas<'tcx>>
- for ChalkInferenceContext<'cx, 'gcx, 'tcx>
+impl context::ResolventOps<ChalkArenas<'tcx>, ChalkArenas<'tcx>>
+ for ChalkInferenceContext<'cx, 'tcx>
{
fn resolvent_clause(
&mut self,
@@ -34,7 +34,7 @@
goal: &DomainGoal<'tcx>,
subst: &CanonicalVarValues<'tcx>,
clause: &Clause<'tcx>,
- ) -> Fallible<Canonical<'gcx, ChalkExClause<'gcx>>> {
+ ) -> Fallible<Canonical<'tcx, ChalkExClause<'tcx>>> {
use chalk_engine::context::UnificationOps;
debug!("resolvent_clause(goal = {:?}, clause = {:?})", goal, clause);
@@ -106,8 +106,8 @@
&mut self,
ex_clause: ChalkExClause<'tcx>,
selected_goal: &InEnvironment<'tcx, Goal<'tcx>>,
- answer_table_goal: &Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>,
- canonical_answer_subst: &Canonical<'gcx, ConstrainedSubst<'gcx>>,
+ answer_table_goal: &Canonical<'tcx, InEnvironment<'tcx, Goal<'tcx>>>,
+ canonical_answer_subst: &Canonical<'tcx, ConstrainedSubst<'tcx>>,
) -> Fallible<ChalkExClause<'tcx>> {
debug!(
"apply_answer_subst(ex_clause = {:?}, selected_goal = {:?})",
@@ -139,15 +139,15 @@
}
}
-struct AnswerSubstitutor<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+struct AnswerSubstitutor<'cx, 'tcx: 'cx> {
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
environment: Environment<'tcx>,
answer_subst: CanonicalVarValues<'tcx>,
binder_index: ty::DebruijnIndex,
ex_clause: ChalkExClause<'tcx>,
}
-impl AnswerSubstitutor<'cx, 'gcx, 'tcx> {
+impl AnswerSubstitutor<'cx, 'tcx> {
fn unify_free_answer_var(
&mut self,
answer_var: ty::BoundVar,
@@ -169,8 +169,8 @@
}
}
-impl TypeRelation<'cx, 'gcx, 'tcx> for AnswerSubstitutor<'cx, 'gcx, 'tcx> {
- fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
+impl TypeRelation<'tcx> for AnswerSubstitutor<'cx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
diff --git a/src/librustc_traits/chalk_context/unify.rs b/src/librustc_traits/chalk_context/unify.rs
index abb48127..d66faa9 100644
--- a/src/librustc_traits/chalk_context/unify.rs
+++ b/src/librustc_traits/chalk_context/unify.rs
@@ -10,12 +10,12 @@
crate constraints: Vec<super::RegionConstraint<'tcx>>,
}
-crate fn unify<'me, 'gcx, 'tcx, T: Relate<'tcx>>(
- infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,
+crate fn unify<'me, 'tcx, T: Relate<'tcx>>(
+ infcx: &'me InferCtxt<'me, 'tcx>,
environment: Environment<'tcx>,
variance: ty::Variance,
a: &T,
- b: &T
+ b: &T,
) -> RelateResult<'tcx, UnificationResult<'tcx>> {
debug!("unify(
a = {:?},
@@ -42,18 +42,15 @@
})
}
-struct ChalkTypeRelatingDelegate<'me, 'gcx: 'tcx, 'tcx: 'me> {
- infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,
+struct ChalkTypeRelatingDelegate<'me, 'tcx: 'me> {
+ infcx: &'me InferCtxt<'me, 'tcx>,
environment: Environment<'tcx>,
goals: Vec<InEnvironment<'tcx, Goal<'tcx>>>,
constraints: Vec<super::RegionConstraint<'tcx>>,
}
-impl ChalkTypeRelatingDelegate<'me, 'gcx, 'tcx> {
- fn new(
- infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,
- environment: Environment<'tcx>,
- ) -> Self {
+impl ChalkTypeRelatingDelegate<'me, 'tcx> {
+ fn new(infcx: &'me InferCtxt<'me, 'tcx>, environment: Environment<'tcx>) -> Self {
Self {
infcx,
environment,
@@ -63,7 +60,7 @@
}
}
-impl TypeRelatingDelegate<'tcx> for &mut ChalkTypeRelatingDelegate<'_, '_, 'tcx> {
+impl TypeRelatingDelegate<'tcx> for &mut ChalkTypeRelatingDelegate<'_, 'tcx> {
fn create_next_universe(&mut self) -> ty::UniverseIndex {
self.infcx.create_next_universe()
}
diff --git a/src/librustc_traits/dropck_outlives.rs b/src/librustc_traits/dropck_outlives.rs
index 25ce5c8..3abd7e9 100644
--- a/src/librustc_traits/dropck_outlives.rs
+++ b/src/librustc_traits/dropck_outlives.rs
@@ -18,7 +18,7 @@
}
fn dropck_outlives<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
canonical_goal: CanonicalTyGoal<'tcx>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, DropckOutlivesResult<'tcx>>>, NoSolution> {
debug!("dropck_outlives(goal={:#?})", canonical_goal);
@@ -146,8 +146,8 @@
/// Returns a set of constraints that needs to be satisfied in
/// order for `ty` to be valid for destruction.
-fn dtorck_constraint_for_ty<'a, 'gcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+fn dtorck_constraint_for_ty<'tcx>(
+ tcx: TyCtxt<'tcx>,
span: Span,
for_ty: Ty<'tcx>,
depth: usize,
@@ -279,8 +279,8 @@
}
/// Calculates the dtorck constraint for a type.
-crate fn adt_dtorck_constraint<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+crate fn adt_dtorck_constraint<'tcx>(
+ tcx: TyCtxt<'tcx>,
def_id: DefId,
) -> Result<DtorckConstraint<'tcx>, NoSolution> {
let def = tcx.adt_def(def_id);
diff --git a/src/librustc_traits/evaluate_obligation.rs b/src/librustc_traits/evaluate_obligation.rs
index 83aebd1..30a1814 100644
--- a/src/librustc_traits/evaluate_obligation.rs
+++ b/src/librustc_traits/evaluate_obligation.rs
@@ -14,7 +14,7 @@
}
fn evaluate_obligation<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
canonical_goal: CanonicalPredicateGoal<'tcx>,
) -> Result<EvaluationResult, OverflowError> {
tcx.infer_ctxt().enter_with_canonical(
@@ -29,7 +29,7 @@
let mut selcx = SelectionContext::with_query_mode(&infcx, TraitQueryMode::Canonical);
let obligation = Obligation::new(ObligationCause::dummy(), param_env, predicate);
- selcx.evaluate_obligation_recursively(&obligation)
+ selcx.evaluate_root_obligation(&obligation)
},
)
}
diff --git a/src/librustc_traits/generic_types.rs b/src/librustc_traits/generic_types.rs
index 6ea3626..bd2ed94 100644
--- a/src/librustc_traits/generic_types.rs
+++ b/src/librustc_traits/generic_types.rs
@@ -6,7 +6,7 @@
use rustc::hir::def_id::DefId;
use rustc_target::spec::abi;
-crate fn bound(tcx: TyCtxt<'_, '_, 'tcx>, index: u32) -> Ty<'tcx> {
+crate fn bound(tcx: TyCtxt<'tcx>, index: u32) -> Ty<'tcx> {
let ty = ty::Bound(
ty::INNERMOST,
ty::BoundVar::from_u32(index).into()
@@ -14,7 +14,7 @@
tcx.mk_ty(ty)
}
-crate fn raw_ptr(tcx: TyCtxt<'_, '_, 'tcx>, mutbl: hir::Mutability) -> Ty<'tcx> {
+crate fn raw_ptr(tcx: TyCtxt<'tcx>, mutbl: hir::Mutability) -> Ty<'tcx> {
tcx.mk_ptr(ty::TypeAndMut {
ty: bound(tcx, 0),
mutbl,
@@ -22,11 +22,11 @@
}
crate fn fn_ptr(
- tcx: TyCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'tcx>,
arity_and_output: usize,
c_variadic: bool,
unsafety: hir::Unsafety,
- abi: abi::Abi
+ abi: abi::Abi,
) -> Ty<'tcx> {
let inputs_and_output = tcx.mk_type_list(
(0..arity_and_output).into_iter()
@@ -44,7 +44,7 @@
tcx.mk_fn_ptr(fn_sig)
}
-crate fn type_list(tcx: TyCtxt<'_, '_, 'tcx>, arity: usize) -> SubstsRef<'tcx> {
+crate fn type_list(tcx: TyCtxt<'tcx>, arity: usize) -> SubstsRef<'tcx> {
tcx.mk_substs(
(0..arity).into_iter()
.map(|i| ty::BoundVar::from(i))
@@ -53,7 +53,7 @@
)
}
-crate fn ref_ty(tcx: TyCtxt<'_, '_, 'tcx>, mutbl: hir::Mutability) -> Ty<'tcx> {
+crate fn ref_ty(tcx: TyCtxt<'tcx>, mutbl: hir::Mutability) -> Ty<'tcx> {
let region = tcx.mk_region(
ty::ReLateBound(ty::INNERMOST, ty::BoundRegion::BrAnon(0))
);
@@ -64,17 +64,17 @@
})
}
-crate fn fn_def(tcx: TyCtxt<'_, '_, 'tcx>, def_id: DefId) -> Ty<'tcx> {
+crate fn fn_def(tcx: TyCtxt<'tcx>, def_id: DefId) -> Ty<'tcx> {
tcx.mk_ty(ty::FnDef(def_id, InternalSubsts::bound_vars_for_item(tcx, def_id)))
}
-crate fn closure(tcx: TyCtxt<'_, '_, 'tcx>, def_id: DefId) -> Ty<'tcx> {
+crate fn closure(tcx: TyCtxt<'tcx>, def_id: DefId) -> Ty<'tcx> {
tcx.mk_closure(def_id, ty::ClosureSubsts {
substs: InternalSubsts::bound_vars_for_item(tcx, def_id),
})
}
-crate fn generator(tcx: TyCtxt<'_, '_, 'tcx>, def_id: DefId) -> Ty<'tcx> {
+crate fn generator(tcx: TyCtxt<'tcx>, def_id: DefId) -> Ty<'tcx> {
tcx.mk_generator(def_id, ty::GeneratorSubsts {
substs: InternalSubsts::bound_vars_for_item(tcx, def_id),
}, hir::GeneratorMovability::Movable)
diff --git a/src/librustc_traits/implied_outlives_bounds.rs b/src/librustc_traits/implied_outlives_bounds.rs
index 73bb3fb..7f9ebdc 100644
--- a/src/librustc_traits/implied_outlives_bounds.rs
+++ b/src/librustc_traits/implied_outlives_bounds.rs
@@ -23,11 +23,11 @@
}
fn implied_outlives_bounds<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
goal: CanonicalTyGoal<'tcx>,
) -> Result<
- &'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, Vec<OutlivesBound<'tcx>>>>,
- NoSolution,
+ &'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, Vec<OutlivesBound<'tcx>>>>,
+ NoSolution,
> {
tcx.infer_ctxt()
.enter_canonical_trait_query(&goal, |infcx, _fulfill_cx, key| {
@@ -37,9 +37,9 @@
}
fn compute_implied_outlives_bounds<'tcx>(
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
- ty: Ty<'tcx>
+ ty: Ty<'tcx>,
) -> Fallible<Vec<OutlivesBound<'tcx>>> {
let tcx = infcx.tcx;
diff --git a/src/librustc_traits/lowering/environment.rs b/src/librustc_traits/lowering/environment.rs
index 3570cb1..d1bad6b 100644
--- a/src/librustc_traits/lowering/environment.rs
+++ b/src/librustc_traits/lowering/environment.rs
@@ -11,13 +11,13 @@
use rustc::hir::def_id::DefId;
use rustc_data_structures::fx::FxHashSet;
-struct ClauseVisitor<'set, 'a, 'tcx: 'a + 'set> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- round: &'set mut FxHashSet<Clause<'tcx>>,
+struct ClauseVisitor<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ round: &'a mut FxHashSet<Clause<'tcx>>,
}
-impl ClauseVisitor<'set, 'a, 'tcx> {
- fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, round: &'set mut FxHashSet<Clause<'tcx>>) -> Self {
+impl ClauseVisitor<'a, 'tcx> {
+ fn new(tcx: TyCtxt<'tcx>, round: &'a mut FxHashSet<Clause<'tcx>>) -> Self {
ClauseVisitor {
tcx,
round,
@@ -127,8 +127,8 @@
}
}
-crate fn program_clauses_for_env<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+crate fn program_clauses_for_env<'tcx>(
+ tcx: TyCtxt<'tcx>,
environment: Environment<'tcx>,
) -> Clauses<'tcx> {
debug!("program_clauses_for_env(environment={:?})", environment);
@@ -160,10 +160,7 @@
);
}
-crate fn environment<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId
-) -> Environment<'tcx> {
+crate fn environment<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Environment<'tcx> {
use super::{Lower, IntoFromEnvGoal};
use rustc::hir::{Node, TraitItemKind, ImplItemKind, ItemKind, ForeignItemKind};
diff --git a/src/librustc_traits/lowering/mod.rs b/src/librustc_traits/lowering/mod.rs
index 80b0868..2a66131 100644
--- a/src/librustc_traits/lowering/mod.rs
+++ b/src/librustc_traits/lowering/mod.rs
@@ -155,10 +155,7 @@
}
}
-crate fn program_clauses_for<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
-) -> Clauses<'tcx> {
+crate fn program_clauses_for<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Clauses<'tcx> {
// FIXME(eddyb) this should only be using `def_kind`.
match tcx.def_key(def_id).disambiguated_data.data {
DefPathData::TypeNs(..) => match tcx.def_kind(def_id) {
@@ -184,10 +181,7 @@
}
}
-fn program_clauses_for_trait<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
-) -> Clauses<'tcx> {
+fn program_clauses_for_trait<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Clauses<'tcx> {
// `trait Trait<P1..Pn> where WC { .. } // P0 == Self`
// Rule Implemented-From-Env (see rustc guide)
@@ -300,7 +294,7 @@
)
}
-fn program_clauses_for_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Clauses<'tcx> {
+fn program_clauses_for_impl(tcx: TyCtxt<'tcx>, def_id: DefId) -> Clauses<'tcx> {
if let ImplPolarity::Negative = tcx.impl_polarity(def_id) {
return List::empty();
}
@@ -343,10 +337,7 @@
tcx.mk_clauses(iter::once(Clause::ForAll(ty::Binder::bind(clause))))
}
-pub fn program_clauses_for_type_def<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
-) -> Clauses<'tcx> {
+pub fn program_clauses_for_type_def<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Clauses<'tcx> {
// Rule WellFormed-Type
//
// `struct Ty<P1..Pn> where WC1, ..., WCm`
@@ -420,8 +411,8 @@
tcx.mk_clauses(iter::once(well_formed_clause).chain(from_env_clauses))
}
-pub fn program_clauses_for_associated_type_def<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub fn program_clauses_for_associated_type_def<'tcx>(
+ tcx: TyCtxt<'tcx>,
item_id: DefId,
) -> Clauses<'tcx> {
// Rule ProjectionEq-Placeholder
@@ -558,8 +549,8 @@
tcx.mk_clauses(clauses)
}
-pub fn program_clauses_for_associated_type_value<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub fn program_clauses_for_associated_type_value<'tcx>(
+ tcx: TyCtxt<'tcx>,
item_id: DefId,
) -> Clauses<'tcx> {
// Rule Normalize-From-Impl (see rustc guide)
@@ -620,7 +611,7 @@
tcx.mk_clauses(iter::once(normalize_clause))
}
-pub fn dump_program_clauses<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn dump_program_clauses<'tcx>(tcx: TyCtxt<'tcx>) {
if !tcx.features().rustc_attrs {
return;
}
@@ -631,11 +622,11 @@
.visit_all_item_likes(&mut visitor.as_deep_visitor());
}
-struct ClauseDumper<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct ClauseDumper<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> ClauseDumper<'a, 'tcx> {
+impl ClauseDumper<'tcx> {
fn process_attrs(&mut self, hir_id: hir::HirId, attrs: &[ast::Attribute]) {
let def_id = self.tcx.hir().local_def_id_from_hir_id(hir_id);
for attr in attrs {
@@ -673,7 +664,7 @@
}
}
-impl<'a, 'tcx> Visitor<'tcx> for ClauseDumper<'a, 'tcx> {
+impl Visitor<'tcx> for ClauseDumper<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::OnlyBodies(&self.tcx.hir())
}
diff --git a/src/librustc_traits/normalize_erasing_regions.rs b/src/librustc_traits/normalize_erasing_regions.rs
index 24fa5e9..bfa1a80 100644
--- a/src/librustc_traits/normalize_erasing_regions.rs
+++ b/src/librustc_traits/normalize_erasing_regions.rs
@@ -12,7 +12,7 @@
}
fn normalize_ty_after_erasing_regions<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
goal: ParamEnvAnd<'tcx, Ty<'tcx>>,
) -> Ty<'tcx> {
debug!("normalize_ty_after_erasing_regions(goal={:#?})", goal);
diff --git a/src/librustc_traits/normalize_projection_ty.rs b/src/librustc_traits/normalize_projection_ty.rs
index 3ff04bc..7e0ca5b 100644
--- a/src/librustc_traits/normalize_projection_ty.rs
+++ b/src/librustc_traits/normalize_projection_ty.rs
@@ -15,7 +15,7 @@
}
fn normalize_projection_ty<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
goal: CanonicalProjectionGoal<'tcx>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, NormalizationResult<'tcx>>>, NoSolution> {
debug!("normalize_provider(goal={:#?})", goal);
diff --git a/src/librustc_traits/type_op.rs b/src/librustc_traits/type_op.rs
index ea37024..dcbb0df 100644
--- a/src/librustc_traits/type_op.rs
+++ b/src/librustc_traits/type_op.rs
@@ -35,7 +35,7 @@
}
fn type_op_ascribe_user_type<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, AscribeUserType<'tcx>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> {
tcx.infer_ctxt()
@@ -56,13 +56,13 @@
})
}
-struct AscribeUserTypeCx<'me, 'gcx: 'tcx, 'tcx: 'me> {
- infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,
+struct AscribeUserTypeCx<'me, 'tcx: 'me> {
+ infcx: &'me InferCtxt<'me, 'tcx>,
param_env: ParamEnv<'tcx>,
fulfill_cx: &'me mut dyn TraitEngine<'tcx>,
}
-impl AscribeUserTypeCx<'me, 'gcx, 'tcx> {
+impl AscribeUserTypeCx<'me, 'tcx> {
fn normalize<T>(&mut self, value: T) -> T
where
T: TypeFoldable<'tcx>,
@@ -94,7 +94,7 @@
);
}
- fn tcx(&self) -> TyCtxt<'me, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
@@ -167,7 +167,7 @@
}
fn type_op_eq<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Eq<'tcx>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> {
tcx.infer_ctxt()
@@ -181,12 +181,12 @@
}
fn type_op_normalize<T>(
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'tcx>,
fulfill_cx: &mut dyn TraitEngine<'tcx>,
key: ParamEnvAnd<'tcx, Normalize<T>>,
) -> Fallible<T>
where
- T: fmt::Debug + TypeFoldable<'tcx> + Lift<'gcx>,
+ T: fmt::Debug + TypeFoldable<'tcx> + Lift<'tcx>,
{
let (param_env, Normalize { value }) = key.into_parts();
let Normalized { value, obligations } = infcx
@@ -197,7 +197,7 @@
}
fn type_op_normalize_ty(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<Ty<'tcx>>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, Ty<'tcx>>>, NoSolution> {
tcx.infer_ctxt()
@@ -205,7 +205,7 @@
}
fn type_op_normalize_predicate(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<Predicate<'tcx>>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, Predicate<'tcx>>>, NoSolution> {
tcx.infer_ctxt()
@@ -213,7 +213,7 @@
}
fn type_op_normalize_fn_sig(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<FnSig<'tcx>>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, FnSig<'tcx>>>, NoSolution> {
tcx.infer_ctxt()
@@ -221,7 +221,7 @@
}
fn type_op_normalize_poly_fn_sig(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<PolyFnSig<'tcx>>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, PolyFnSig<'tcx>>>, NoSolution> {
tcx.infer_ctxt()
@@ -229,7 +229,7 @@
}
fn type_op_subtype<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Subtype<'tcx>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> {
tcx.infer_ctxt()
@@ -243,7 +243,7 @@
}
fn type_op_prove_predicate<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, ProvePredicate<'tcx>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> {
tcx.infer_ctxt()
diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs
index 5a46c9d..97124e5 100644
--- a/src/librustc_typeck/astconv.rs
+++ b/src/librustc_typeck/astconv.rs
@@ -40,8 +40,8 @@
#[derive(Debug)]
pub struct PathSeg(pub DefId, pub usize);
-pub trait AstConv<'gcx, 'tcx> {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx>;
+pub trait AstConv<'tcx> {
+ fn tcx<'a>(&'a self) -> TyCtxt<'tcx>;
/// Returns the set of bounds in scope for the type parameter with
/// the given id.
@@ -115,7 +115,7 @@
MethodCall,
}
-impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o {
+impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
pub fn ast_region_to_region(&self,
lifetime: &hir::Lifetime,
def: Option<&ty::GenericParamDef>)
@@ -208,7 +208,7 @@
/// Report error if there is an explicit type parameter when using `impl Trait`.
fn check_impl_trait(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
span: Span,
seg: &hir::PathSegment,
generics: &ty::Generics,
@@ -239,7 +239,7 @@
/// Checks that the correct number of generic arguments have been provided.
/// Used specifically for function calls.
pub fn check_generic_arg_count_for_call(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
span: Span,
def: &ty::Generics,
seg: &hir::PathSegment,
@@ -271,7 +271,7 @@
/// Checks that the correct number of generic arguments have been provided.
/// This is used both for datatypes and function calls.
fn check_generic_arg_count(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
span: Span,
def: &ty::Generics,
args: &hir::GenericArgs,
@@ -461,8 +461,8 @@
/// instantiate a `Kind`.
/// - `inferred_kind`: if no parameter was provided, and inference is enabled, then
/// creates a suitable inference variable.
- pub fn create_substs_for_generic_args<'a, 'b>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ pub fn create_substs_for_generic_args<'b>(
+ tcx: TyCtxt<'tcx>,
def_id: DefId,
parent_substs: &[Kind<'tcx>],
has_self: bool,
@@ -1810,7 +1810,7 @@
has_err
}
- pub fn prohibit_assoc_ty_binding(tcx: TyCtxt<'_, '_, '_>, span: Span) {
+ pub fn prohibit_assoc_ty_binding(tcx: TyCtxt<'_>, span: Span) {
let mut err = struct_span_err!(tcx.sess, span, E0229,
"associated type bindings are not allowed here");
err.span_label(span, "associated type not allowed here").emit();
@@ -2157,6 +2157,14 @@
/// Returns the `DefId` of the constant parameter that the provided expression is a path to.
pub fn const_param_def_id(&self, expr: &hir::Expr) -> Option<DefId> {
+ // Unwrap a block, so that e.g. `{ P }` is recognised as a parameter. Const arguments
+ // currently have to be wrapped in curly brackets, so it's necessary to special-case.
+ let expr = match &expr.node {
+ ExprKind::Block(block, _) if block.stmts.is_empty() && block.expr.is_some() =>
+ block.expr.as_ref().unwrap(),
+ _ => expr,
+ };
+
match &expr.node {
ExprKind::Path(hir::QPath::Resolved(_, path)) => match path.res {
Res::Def(DefKind::ConstParam, did) => Some(did),
@@ -2184,27 +2192,16 @@
ty,
};
- let mut expr = &tcx.hir().body(ast_const.body).value;
-
- // Unwrap a block, so that e.g. `{ P }` is recognised as a parameter. Const arguments
- // currently have to be wrapped in curly brackets, so it's necessary to special-case.
- if let ExprKind::Block(block, _) = &expr.node {
- if block.stmts.is_empty() {
- if let Some(trailing) = &block.expr {
- expr = &trailing;
- }
- }
- }
-
+ let expr = &tcx.hir().body(ast_const.body).value;
if let Some(def_id) = self.const_param_def_id(expr) {
// Find the name and index of the const parameter by indexing the generics of the
// parent item and construct a `ParamConst`.
- let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
- let item_id = tcx.hir().get_parent_node(node_id);
- let item_def_id = tcx.hir().local_def_id(item_id);
+ let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+ let item_id = tcx.hir().get_parent_node_by_hir_id(hir_id);
+ let item_def_id = tcx.hir().local_def_id_from_hir_id(item_id);
let generics = tcx.generics_of(item_def_id);
- let index = generics.param_def_id_to_index[&tcx.hir().local_def_id(node_id)];
- let name = tcx.hir().name(node_id).as_interned_str();
+ let index = generics.param_def_id_to_index[&tcx.hir().local_def_id_from_hir_id(hir_id)];
+ let name = tcx.hir().name_by_hir_id(hir_id).as_interned_str();
const_.val = ConstValue::Param(ty::ParamConst::new(index, name));
}
@@ -2415,14 +2412,16 @@
pub implicitly_sized: Option<Span>,
}
-impl<'a, 'gcx, 'tcx> Bounds<'tcx> {
+impl<'tcx> Bounds<'tcx> {
/// Converts a bounds list into a flat set of predicates (like
/// where-clauses). Because some of our bounds listings (e.g.,
/// regions) don't include the self-type, you must supply the
/// self-type here (the `param_ty` parameter).
- pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, param_ty: Ty<'tcx>)
- -> Vec<(ty::Predicate<'tcx>, Span)>
- {
+ pub fn predicates(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ param_ty: Ty<'tcx>,
+ ) -> Vec<(ty::Predicate<'tcx>, Span)> {
// If it could be sized, and is, add the `Sized` predicate.
let sized_predicate = self.implicitly_sized.and_then(|span| {
tcx.lang_items().sized_trait().map(|sized| {
diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs
index 3fea080..65a36d9 100644
--- a/src/librustc_typeck/check/_match.rs
+++ b/src/librustc_typeck/check/_match.rs
@@ -22,7 +22,7 @@
use super::report_unexpected_variant_res;
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// `discrim_span` argument having a `Span` indicates that this pattern is part of a match
/// expression arm guard, and it points to the match discriminant to add context in type errors.
/// In the following example, `discrim_span` corresponds to the `a + b` expression:
@@ -41,7 +41,7 @@
/// ```
pub fn check_pat_walk(
&self,
- pat: &'gcx hir::Pat,
+ pat: &'tcx hir::Pat,
mut expected: Ty<'tcx>,
mut def_bm: ty::BindingMode,
discrim_span: Option<Span>,
@@ -50,6 +50,7 @@
debug!("check_pat_walk(pat={:?},expected={:?},def_bm={:?})", pat, expected, def_bm);
+ let mut path_resolution = None;
let is_non_ref_pat = match pat.node {
PatKind::Struct(..) |
PatKind::TupleStruct(..) |
@@ -65,8 +66,9 @@
}
}
PatKind::Path(ref qpath) => {
- let (def, _, _) = self.resolve_ty_and_res_ufcs(qpath, pat.hir_id, pat.span);
- match def {
+ let resolution = self.resolve_ty_and_res_ufcs(qpath, pat.hir_id, pat.span);
+ path_resolution = Some(resolution);
+ match resolution.0 {
Res::Def(DefKind::Const, _) | Res::Def(DefKind::AssocConst, _) => false,
_ => true,
}
@@ -132,7 +134,7 @@
// }
// ```
//
- // cc #46688
+ // See issue #46688.
def_bm = ty::BindByValue(hir::MutImmutable);
}
@@ -150,7 +152,7 @@
let ty = self.node_ty(lt.hir_id);
// Byte string patterns behave the same way as array patterns
- // They can denote both statically and dynamically sized byte arrays
+ // They can denote both statically and dynamically-sized byte arrays.
let mut pat_ty = ty;
if let hir::ExprKind::Lit(ref lt) = lt.node {
if let ast::LitKind::ByteStr(_) = lt.node {
@@ -164,7 +166,7 @@
}
}
- // somewhat surprising: in this case, the subtyping
+ // Somewhat surprising: in this case, the subtyping
// relation goes the opposite way as the other
// cases. Actually what we really want is not a subtyping
// relation at all but rather that there exists a LUB (so
@@ -175,7 +177,7 @@
//
// &'static str <: expected
//
- // that's equivalent to there existing a LUB.
+ // then that's equivalent to there existing a LUB.
if let Some(mut err) = self.demand_suptype_diag(pat.span, expected, pat_ty) {
err.emit_unless(discrim_span
.filter(|&s| s.is_compiler_desugaring(CompilerDesugaringKind::IfTemporary))
@@ -228,7 +230,7 @@
// it to type the entire expression.
let common_type = self.resolve_vars_if_possible(&lhs_ty);
- // subtyping doesn't matter here, as the value is some kind of scalar
+ // Subtyping doesn't matter here, as the value is some kind of scalar.
self.demand_eqtype_pat(pat.span, expected, lhs_ty, discrim_span);
self.demand_eqtype_pat(pat.span, expected, rhs_ty, discrim_span);
common_type
@@ -248,8 +250,8 @@
let local_ty = self.local_ty(pat.span, pat.hir_id).decl_ty;
match bm {
ty::BindByReference(mutbl) => {
- // if the binding is like
- // ref x | ref const x | ref mut x
+ // If the binding is like
+ // ref x | ref const x | ref mut x
// then `x` is assigned a value of type `&M T` where M is the mutability
// and T is the expected type.
let region_var = self.next_region_var(infer::PatternRegion(pat.span));
@@ -261,16 +263,16 @@
// an explanation.
self.demand_eqtype_pat(pat.span, region_ty, local_ty, discrim_span);
}
- // otherwise the type of x is the expected type T
+ // Otherwise, the type of x is the expected type `T`.
ty::BindByValue(_) => {
- // As above, `T <: typeof(x)` is required but we
+ // As above, `T <: typeof(x)` is required, but we
// use equality, see (*) below.
self.demand_eqtype_pat(pat.span, expected, local_ty, discrim_span);
}
}
- // if there are multiple arms, make sure they all agree on
- // what the type of the binding `x` ought to be
+ // If there are multiple arms, make sure they all agree on
+ // what the type of the binding `x` ought to be.
if var_id != pat.hir_id {
let vt = self.local_ty(pat.span, var_id).decl_ty;
self.demand_eqtype_pat(pat.span, vt, local_ty, discrim_span);
@@ -294,7 +296,7 @@
)
}
PatKind::Path(ref qpath) => {
- self.check_pat_path(pat, qpath, expected)
+ self.check_pat_path(pat, path_resolution.unwrap(), qpath, expected)
}
PatKind::Struct(ref qpath, ref fields, etc) => {
self.check_pat_struct(pat, qpath, fields, etc, expected, def_bm, discrim_span)
@@ -613,9 +615,9 @@
pub fn check_match(
&self,
- expr: &'gcx hir::Expr,
- discrim: &'gcx hir::Expr,
- arms: &'gcx [hir::Arm],
+ expr: &'tcx hir::Expr,
+ discrim: &'tcx hir::Expr,
+ arms: &'tcx [hir::Arm],
expected: Expectation<'tcx>,
match_src: hir::MatchSource,
) -> Ty<'tcx> {
@@ -769,7 +771,7 @@
/// When the previously checked expression (the scrutinee) diverges,
/// warn the user about the match arms being unreachable.
- fn warn_arms_when_scrutinee_diverges(&self, arms: &'gcx [hir::Arm], source_if: bool) {
+ fn warn_arms_when_scrutinee_diverges(&self, arms: &'tcx [hir::Arm], source_if: bool) {
if self.diverges.get().always() {
let msg = if source_if { "block in `if` expression" } else { "arm" };
for arm in arms {
@@ -782,8 +784,8 @@
fn if_fallback_coercion(
&self,
span: Span,
- then_expr: &'gcx hir::Expr,
- coercion: &mut CoerceMany<'gcx, 'tcx, '_, rustc::hir::Arm>,
+ then_expr: &'tcx hir::Expr,
+ coercion: &mut CoerceMany<'tcx, '_, rustc::hir::Arm>,
) {
// If this `if` expr is the parent's function return expr,
// the cause of the type coercion is the return type, point at it. (#25228)
@@ -839,8 +841,8 @@
fn if_cause(
&self,
span: Span,
- then_expr: &'gcx hir::Expr,
- else_expr: &'gcx hir::Expr,
+ then_expr: &'tcx hir::Expr,
+ else_expr: &'tcx hir::Expr,
then_ty: Ty<'tcx>,
else_ty: Ty<'tcx>,
) -> ObligationCause<'tcx> {
@@ -878,7 +880,7 @@
// possibly incorrect trailing `;` in the else arm
remove_semicolon = self.could_remove_semicolon(block, then_ty);
stmt.span
- } else { // empty block, point at its entirety
+ } else { // empty block; point at its entirety
// Avoid overlapping spans that aren't as readable:
// ```
// 2 | let x = if true {
@@ -915,7 +917,7 @@
else_expr.span
};
- // Compute `Span` of `then` part of `if`-expression:
+ // Compute `Span` of `then` part of `if`-expression.
let then_sp = if let ExprKind::Block(block, _) = &then_expr.node {
if let Some(expr) = &block.expr {
expr.span
@@ -923,11 +925,11 @@
// possibly incorrect trailing `;` in the else arm
remove_semicolon = remove_semicolon.or(self.could_remove_semicolon(block, else_ty));
stmt.span
- } else { // empty block, point at its entirety
- outer_sp = None; // same as in `error_sp`, cleanup output
+ } else { // empty block; point at its entirety
+ outer_sp = None; // same as in `error_sp`; cleanup output
then_expr.span
}
- } else { // shouldn't happen unless the parser has done something weird
+ } else { // shouldn't happen unless the parser has done something weird
then_expr.span
};
@@ -941,8 +943,8 @@
fn demand_discriminant_type(
&self,
- arms: &'gcx [hir::Arm],
- discrim: &'gcx hir::Expr,
+ arms: &'tcx [hir::Arm],
+ discrim: &'tcx hir::Expr,
) -> Ty<'tcx> {
// Not entirely obvious: if matches may create ref bindings, we want to
// use the *precise* type of the discriminant, *not* some supertype, as
@@ -1020,15 +1022,14 @@
fn check_pat_struct(
&self,
- pat: &'gcx hir::Pat,
+ pat: &'tcx hir::Pat,
qpath: &hir::QPath,
- fields: &'gcx [Spanned<hir::FieldPat>],
+ fields: &'tcx [Spanned<hir::FieldPat>],
etc: bool,
expected: Ty<'tcx>,
def_bm: ty::BindingMode,
discrim_span: Option<Span>,
- ) -> Ty<'tcx>
- {
+ ) -> Ty<'tcx> {
// Resolve the path and check the definition for errors.
let (variant, pat_ty) = if let Some(variant_ty) = self.check_struct_path(qpath, pat.hir_id)
{
@@ -1055,22 +1056,20 @@
fn check_pat_path(
&self,
pat: &hir::Pat,
+ path_resolution: (Res, Option<Ty<'tcx>>, &'b [hir::PathSegment]),
qpath: &hir::QPath,
expected: Ty<'tcx>,
) -> Ty<'tcx> {
let tcx = self.tcx;
- // Resolve the path and check the definition for errors.
- let (res, opt_ty, segments) = self.resolve_ty_and_res_ufcs(qpath, pat.hir_id, pat.span);
+ // We have already resolved the path.
+ let (res, opt_ty, segments) = path_resolution;
match res {
Res::Err => {
self.set_tainted_by_errors();
return tcx.types.err;
}
- Res::Def(DefKind::Method, _) => {
- report_unexpected_variant_res(tcx, res, pat.span, qpath);
- return tcx.types.err;
- }
+ Res::Def(DefKind::Method, _) |
Res::Def(DefKind::Ctor(_, CtorKind::Fictive), _) |
Res::Def(DefKind::Ctor(_, CtorKind::Fn), _) => {
report_unexpected_variant_res(tcx, res, pat.span, qpath);
@@ -1091,7 +1090,7 @@
&self,
pat: &hir::Pat,
qpath: &hir::QPath,
- subpats: &'gcx [P<hir::Pat>],
+ subpats: &'tcx [P<hir::Pat>],
ddpos: Option<usize>,
expected: Ty<'tcx>,
def_bm: ty::BindingMode,
@@ -1195,7 +1194,7 @@
pat_id: hir::HirId,
span: Span,
variant: &'tcx ty::VariantDef,
- fields: &'gcx [Spanned<hir::FieldPat>],
+ fields: &'tcx [Spanned<hir::FieldPat>],
etc: bool,
def_bm: ty::BindingMode,
) -> bool {
diff --git a/src/librustc_typeck/check/autoderef.rs b/src/librustc_typeck/check/autoderef.rs
index 38c3ee7..dc4969d 100644
--- a/src/librustc_typeck/check/autoderef.rs
+++ b/src/librustc_typeck/check/autoderef.rs
@@ -20,8 +20,8 @@
Overloaded,
}
-pub struct Autoderef<'a, 'gcx: 'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+pub struct Autoderef<'a, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'tcx>,
body_id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
steps: Vec<(Ty<'tcx>, AutoderefKind)>,
@@ -31,10 +31,10 @@
include_raw_pointers: bool,
span: Span,
silence_errors: bool,
- reached_recursion_limit: bool
+ reached_recursion_limit: bool,
}
-impl<'a, 'gcx, 'tcx> Iterator for Autoderef<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Iterator for Autoderef<'a, 'tcx> {
type Item = (Ty<'tcx>, usize);
fn next(&mut self) -> Option<Self::Item> {
@@ -85,14 +85,14 @@
}
}
-impl<'a, 'gcx, 'tcx> Autoderef<'a, 'gcx, 'tcx> {
- pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- body_id: hir::HirId,
- span: Span,
- base_ty: Ty<'tcx>)
- -> Autoderef<'a, 'gcx, 'tcx>
- {
+impl<'a, 'tcx> Autoderef<'a, 'tcx> {
+ pub fn new(
+ infcx: &'a InferCtxt<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ body_id: hir::HirId,
+ span: Span,
+ base_ty: Ty<'tcx>,
+ ) -> Autoderef<'a, 'tcx> {
Autoderef {
infcx,
body_id,
@@ -157,7 +157,7 @@
/// Returns the final type, generating an error if it is an
/// unresolved inference variable.
- pub fn unambiguous_final_ty(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
+ pub fn unambiguous_final_ty(&self, fcx: &FnCtxt<'a, 'tcx>) -> Ty<'tcx> {
fcx.structurally_resolved_type(self.span, self.cur_ty)
}
@@ -172,13 +172,15 @@
}
/// Returns the adjustment steps.
- pub fn adjust_steps(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>, needs: Needs)
- -> Vec<Adjustment<'tcx>> {
+ pub fn adjust_steps(&self, fcx: &FnCtxt<'a, 'tcx>, needs: Needs) -> Vec<Adjustment<'tcx>> {
fcx.register_infer_ok_obligations(self.adjust_steps_as_infer_ok(fcx, needs))
}
- pub fn adjust_steps_as_infer_ok(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>, needs: Needs)
- -> InferOk<'tcx, Vec<Adjustment<'tcx>>> {
+ pub fn adjust_steps_as_infer_ok(
+ &self,
+ fcx: &FnCtxt<'a, 'tcx>,
+ needs: Needs,
+ ) -> InferOk<'tcx, Vec<Adjustment<'tcx>>> {
let mut obligations = vec![];
let targets = self.steps.iter().skip(1).map(|&(ty, _)| ty)
.chain(iter::once(self.cur_ty));
@@ -230,7 +232,7 @@
self.reached_recursion_limit
}
- pub fn finalize(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) {
+ pub fn finalize(self, fcx: &FnCtxt<'a, 'tcx>) {
fcx.register_predicates(self.into_obligations());
}
@@ -239,9 +241,7 @@
}
}
-pub fn report_autoderef_recursion_limit_error<'a, 'gcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'tcx>, span: Span, ty: Ty<'tcx>)
-{
+pub fn report_autoderef_recursion_limit_error<'tcx>(tcx: TyCtxt<'tcx>, span: Span, ty: Ty<'tcx>) {
// We've reached the recursion limit, error gracefully.
let suggested_limit = *tcx.sess.recursion_limit.get() * 2;
let msg = format!("reached the recursion limit while auto-dereferencing `{:?}`",
@@ -262,8 +262,8 @@
}
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
- pub fn autoderef(&'a self, span: Span, base_ty: Ty<'tcx>) -> Autoderef<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
+ pub fn autoderef(&'a self, span: Span, base_ty: Ty<'tcx>) -> Autoderef<'a, 'tcx> {
Autoderef::new(self, self.param_env, self.body_id, span, base_ty)
}
diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs
index 8d32583..42c7ff6 100644
--- a/src/librustc_typeck/check/callee.rs
+++ b/src/librustc_typeck/check/callee.rs
@@ -18,7 +18,7 @@
/// Checks that it is legal to call methods of the trait corresponding
/// to `trait_id` (this only cares about the trait, not the specific
/// method that is called).
-pub fn check_legal_trait_for_method_call(tcx: TyCtxt<'_, '_, '_>, span: Span, trait_id: DefId) {
+pub fn check_legal_trait_for_method_call(tcx: TyCtxt<'_>, span: Span, trait_id: DefId) {
if tcx.lang_items().drop_trait() == Some(trait_id) {
struct_span_err!(tcx.sess, span, E0040, "explicit use of destructor method")
.span_label(span, "explicit destructor calls not allowed")
@@ -33,12 +33,12 @@
Overloaded(MethodCallee<'tcx>),
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub fn check_call(
&self,
- call_expr: &'gcx hir::Expr,
- callee_expr: &'gcx hir::Expr,
- arg_exprs: &'gcx [hir::Expr],
+ call_expr: &'tcx hir::Expr,
+ callee_expr: &'tcx hir::Expr,
+ arg_exprs: &'tcx [hir::Expr],
expected: Expectation<'tcx>,
) -> Ty<'tcx> {
let original_callee_ty = self.check_expr(callee_expr);
@@ -78,10 +78,10 @@
fn try_overloaded_call_step(
&self,
- call_expr: &'gcx hir::Expr,
- callee_expr: &'gcx hir::Expr,
- arg_exprs: &'gcx [hir::Expr],
- autoderef: &Autoderef<'a, 'gcx, 'tcx>,
+ call_expr: &'tcx hir::Expr,
+ callee_expr: &'tcx hir::Expr,
+ arg_exprs: &'tcx [hir::Expr],
+ autoderef: &Autoderef<'a, 'tcx>,
) -> Option<CallStep<'tcx>> {
let adjusted_ty = autoderef.unambiguous_final_ty(self);
debug!(
@@ -165,7 +165,7 @@
&self,
call_expr: &hir::Expr,
adjusted_ty: Ty<'tcx>,
- opt_arg_exprs: Option<&'gcx [hir::Expr]>,
+ opt_arg_exprs: Option<&'tcx [hir::Expr]>,
) -> Option<(Option<Adjustment<'tcx>>, MethodCallee<'tcx>)> {
// Try the options that are least restrictive on the caller first.
for &(opt_trait_def_id, method_name, borrow) in &[
@@ -265,7 +265,7 @@
&self,
call_expr: &hir::Expr,
callee_ty: Ty<'tcx>,
- arg_exprs: &'gcx [hir::Expr],
+ arg_exprs: &'tcx [hir::Expr],
expected: Expectation<'tcx>,
) -> Ty<'tcx> {
let (fn_sig, def_span) = match callee_ty.sty {
@@ -354,7 +354,7 @@
let def_span = match def {
Res::Err => None,
Res::Local(id) => {
- Some(self.tcx.hir().span_by_hir_id(id))
+ Some(self.tcx.hir().span(id))
},
_ => def
.opt_def_id()
@@ -440,7 +440,7 @@
fn confirm_deferred_closure_call(
&self,
call_expr: &hir::Expr,
- arg_exprs: &'gcx [hir::Expr],
+ arg_exprs: &'tcx [hir::Expr],
expected: Expectation<'tcx>,
fn_sig: ty::FnSig<'tcx>,
) -> Ty<'tcx> {
@@ -473,7 +473,7 @@
fn confirm_overloaded_call(
&self,
call_expr: &hir::Expr,
- arg_exprs: &'gcx [hir::Expr],
+ arg_exprs: &'tcx [hir::Expr],
expected: Expectation<'tcx>,
method_callee: MethodCallee<'tcx>,
) -> Ty<'tcx> {
@@ -492,9 +492,9 @@
}
#[derive(Debug)]
-pub struct DeferredCallResolution<'gcx: 'tcx, 'tcx> {
- call_expr: &'gcx hir::Expr,
- callee_expr: &'gcx hir::Expr,
+pub struct DeferredCallResolution<'tcx> {
+ call_expr: &'tcx hir::Expr,
+ callee_expr: &'tcx hir::Expr,
adjusted_ty: Ty<'tcx>,
adjustments: Vec<Adjustment<'tcx>>,
fn_sig: ty::FnSig<'tcx>,
@@ -502,8 +502,8 @@
closure_substs: ty::ClosureSubsts<'tcx>,
}
-impl<'a, 'gcx, 'tcx> DeferredCallResolution<'gcx, 'tcx> {
- pub fn resolve(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) {
+impl<'a, 'tcx> DeferredCallResolution<'tcx> {
+ pub fn resolve(self, fcx: &FnCtxt<'a, 'tcx>) {
debug!("DeferredCallResolution::resolve() {:?}", self);
// we should not be invoked until the closure kind has been
diff --git a/src/librustc_typeck/check/cast.rs b/src/librustc_typeck/check/cast.rs
index f8cad73..5310149 100644
--- a/src/librustc_typeck/check/cast.rs
+++ b/src/librustc_typeck/check/cast.rs
@@ -74,7 +74,7 @@
OfParam(&'tcx ty::ParamTy),
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Returns the kind of unsize information of t, or None
/// if t is unknown.
fn pointer_kind(&self, t: Ty<'tcx>, span: Span) ->
@@ -158,26 +158,28 @@
}
}
-fn make_invalid_casting_error<'a, 'gcx, 'tcx>(sess: &'a Session,
- span: Span,
- expr_ty: Ty<'tcx>,
- cast_ty: Ty<'tcx>,
- fcx: &FnCtxt<'a, 'gcx, 'tcx>)
- -> DiagnosticBuilder<'a> {
+fn make_invalid_casting_error<'a, 'tcx>(
+ sess: &'a Session,
+ span: Span,
+ expr_ty: Ty<'tcx>,
+ cast_ty: Ty<'tcx>,
+ fcx: &FnCtxt<'a, 'tcx>,
+) -> DiagnosticBuilder<'a> {
type_error_struct!(sess, span, expr_ty, E0606,
"casting `{}` as `{}` is invalid",
fcx.ty_to_string(expr_ty),
fcx.ty_to_string(cast_ty))
}
-impl<'a, 'gcx, 'tcx> CastCheck<'tcx> {
- pub fn new(fcx: &FnCtxt<'a, 'gcx, 'tcx>,
- expr: &'tcx hir::Expr,
- expr_ty: Ty<'tcx>,
- cast_ty: Ty<'tcx>,
- cast_span: Span,
- span: Span)
- -> Result<CastCheck<'tcx>, ErrorReported> {
+impl<'a, 'tcx> CastCheck<'tcx> {
+ pub fn new(
+ fcx: &FnCtxt<'a, 'tcx>,
+ expr: &'tcx hir::Expr,
+ expr_ty: Ty<'tcx>,
+ cast_ty: Ty<'tcx>,
+ cast_span: Span,
+ span: Span,
+ ) -> Result<CastCheck<'tcx>, ErrorReported> {
let check = CastCheck {
expr,
expr_ty,
@@ -198,7 +200,7 @@
}
}
- fn report_cast_error(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>, e: CastError) {
+ fn report_cast_error(&self, fcx: &FnCtxt<'a, 'tcx>, e: CastError) {
match e {
CastError::ErrorReported => {
// an error has already been reported
@@ -326,7 +328,7 @@
}
}
- fn report_cast_to_unsized_type(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) {
+ fn report_cast_to_unsized_type(&self, fcx: &FnCtxt<'a, 'tcx>) {
if self.cast_ty.references_error() || self.expr_ty.references_error() {
return;
}
@@ -386,7 +388,7 @@
err.emit();
}
- fn trivial_cast_lint(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) {
+ fn trivial_cast_lint(&self, fcx: &FnCtxt<'a, 'tcx>) {
let t_cast = self.cast_ty;
let t_expr = self.expr_ty;
let type_asc_or = if fcx.tcx.features().type_ascription {
@@ -412,7 +414,7 @@
err.emit();
}
- pub fn check(mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) {
+ pub fn check(mut self, fcx: &FnCtxt<'a, 'tcx>) {
self.expr_ty = fcx.structurally_resolved_type(self.span, self.expr_ty);
self.cast_ty = fcx.structurally_resolved_type(self.span, self.cast_ty);
@@ -443,7 +445,7 @@
/// Checks a cast, and report an error if one exists. In some cases, this
/// can return Ok and create type errors in the fcx rather than returning
/// directly. coercion-cast is handled in check instead of here.
- fn do_check(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Result<CastKind, CastError> {
+ fn do_check(&self, fcx: &FnCtxt<'a, 'tcx>) -> Result<CastKind, CastError> {
use rustc::ty::cast::IntTy::*;
use rustc::ty::cast::CastTy::*;
@@ -531,11 +533,12 @@
}
}
- fn check_ptr_ptr_cast(&self,
- fcx: &FnCtxt<'a, 'gcx, 'tcx>,
- m_expr: ty::TypeAndMut<'tcx>,
- m_cast: ty::TypeAndMut<'tcx>)
- -> Result<CastKind, CastError> {
+ fn check_ptr_ptr_cast(
+ &self,
+ fcx: &FnCtxt<'a, 'tcx>,
+ m_expr: ty::TypeAndMut<'tcx>,
+ m_cast: ty::TypeAndMut<'tcx>,
+ ) -> Result<CastKind, CastError> {
debug!("check_ptr_ptr_cast m_expr={:?} m_cast={:?}", m_expr, m_cast);
// ptr-ptr cast. vtables must match.
@@ -572,10 +575,11 @@
}
}
- fn check_fptr_ptr_cast(&self,
- fcx: &FnCtxt<'a, 'gcx, 'tcx>,
- m_cast: ty::TypeAndMut<'tcx>)
- -> Result<CastKind, CastError> {
+ fn check_fptr_ptr_cast(
+ &self,
+ fcx: &FnCtxt<'a, 'tcx>,
+ m_cast: ty::TypeAndMut<'tcx>,
+ ) -> Result<CastKind, CastError> {
// fptr-ptr cast. must be to thin ptr
match fcx.pointer_kind(m_cast.ty, self.span)? {
@@ -585,10 +589,11 @@
}
}
- fn check_ptr_addr_cast(&self,
- fcx: &FnCtxt<'a, 'gcx, 'tcx>,
- m_expr: ty::TypeAndMut<'tcx>)
- -> Result<CastKind, CastError> {
+ fn check_ptr_addr_cast(
+ &self,
+ fcx: &FnCtxt<'a, 'tcx>,
+ m_expr: ty::TypeAndMut<'tcx>,
+ ) -> Result<CastKind, CastError> {
// ptr-addr cast. must be from thin ptr
match fcx.pointer_kind(m_expr.ty, self.span)? {
@@ -598,11 +603,12 @@
}
}
- fn check_ref_cast(&self,
- fcx: &FnCtxt<'a, 'gcx, 'tcx>,
- m_expr: ty::TypeAndMut<'tcx>,
- m_cast: ty::TypeAndMut<'tcx>)
- -> Result<CastKind, CastError> {
+ fn check_ref_cast(
+ &self,
+ fcx: &FnCtxt<'a, 'tcx>,
+ m_expr: ty::TypeAndMut<'tcx>,
+ m_cast: ty::TypeAndMut<'tcx>,
+ ) -> Result<CastKind, CastError> {
// array-ptr-cast.
if m_expr.mutbl == hir::MutImmutable && m_cast.mutbl == hir::MutImmutable {
@@ -623,10 +629,11 @@
Err(CastError::IllegalCast)
}
- fn check_addr_ptr_cast(&self,
- fcx: &FnCtxt<'a, 'gcx, 'tcx>,
- m_cast: TypeAndMut<'tcx>)
- -> Result<CastKind, CastError> {
+ fn check_addr_ptr_cast(
+ &self,
+ fcx: &FnCtxt<'a, 'tcx>,
+ m_cast: TypeAndMut<'tcx>,
+ ) -> Result<CastKind, CastError> {
// ptr-addr cast. pointer must be thin.
match fcx.pointer_kind(m_cast.ty, self.span)? {
None => Err(CastError::UnknownCastPtrKind),
@@ -635,12 +642,12 @@
}
}
- fn try_coercion_cast(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> bool {
+ fn try_coercion_cast(&self, fcx: &FnCtxt<'a, 'tcx>) -> bool {
fcx.try_coerce(self.expr, self.expr_ty, self.cast_ty, AllowTwoPhase::No).is_ok()
}
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fn type_is_known_to_be_sized_modulo_regions(&self, ty: Ty<'tcx>, span: Span) -> bool {
let lang_item = self.tcx.require_lang_item(lang_items::SizedTraitLangItem);
traits::type_known_to_meet_bound_modulo_regions(self, self.param_env, ty, lang_item, span)
diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs
index b894fc8..6c0deed 100644
--- a/src/librustc_typeck/check/closure.rs
+++ b/src/librustc_typeck/check/closure.rs
@@ -32,12 +32,12 @@
liberated_sig: ty::FnSig<'tcx>,
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub fn check_expr_closure(
&self,
expr: &hir::Expr,
_capture: hir::CaptureClause,
- decl: &'gcx hir::FnDecl,
+ decl: &'tcx hir::FnDecl,
body_id: hir::BodyId,
gen: Option<hir::GeneratorMovability>,
expected: Expectation<'tcx>,
@@ -62,8 +62,8 @@
&self,
expr: &hir::Expr,
opt_kind: Option<ty::ClosureKind>,
- decl: &'gcx hir::FnDecl,
- body: &'gcx hir::Body,
+ decl: &'tcx hir::FnDecl,
+ body: &'tcx hir::Body,
gen: Option<hir::GeneratorMovability>,
expected_sig: Option<ExpectedSig<'tcx>>,
) -> Ty<'tcx> {
@@ -592,7 +592,7 @@
expr_def_id: DefId,
decl: &hir::FnDecl,
) -> ty::PolyFnSig<'tcx> {
- let astconv: &dyn AstConv<'_, '_> = self;
+ let astconv: &dyn AstConv<'_> = self;
// First, convert the types that the user supplied (if any).
let supplied_arguments = decl.inputs.iter().map(|a| astconv.ast_ty_to_ty(a));
@@ -624,7 +624,7 @@
/// so should yield an error, but returns back a signature where
/// all parameters are of type `TyErr`.
fn error_sig_of_closure(&self, decl: &hir::FnDecl) -> ty::PolyFnSig<'tcx> {
- let astconv: &dyn AstConv<'_, '_> = self;
+ let astconv: &dyn AstConv<'_> = self;
let supplied_arguments = decl.inputs.iter().map(|a| {
// Convert the types that the user supplied (if any), but ignore them.
diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs
index f95021f..a56196c 100644
--- a/src/librustc_typeck/check/coercion.rs
+++ b/src/librustc_typeck/check/coercion.rs
@@ -71,8 +71,8 @@
use syntax::symbol::sym;
use syntax_pos;
-struct Coerce<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
+struct Coerce<'a, 'tcx> {
+ fcx: &'a FnCtxt<'a, 'tcx>,
cause: ObligationCause<'tcx>,
use_lub: bool,
/// Determines whether or not allow_two_phase_borrow is set on any
@@ -84,8 +84,8 @@
allow_two_phase: AllowTwoPhase,
}
-impl<'a, 'gcx, 'tcx> Deref for Coerce<'a, 'gcx, 'tcx> {
- type Target = FnCtxt<'a, 'gcx, 'tcx>;
+impl<'a, 'tcx> Deref for Coerce<'a, 'tcx> {
+ type Target = FnCtxt<'a, 'tcx>;
fn deref(&self) -> &Self::Target {
&self.fcx
}
@@ -120,10 +120,12 @@
})
}
-impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
- fn new(fcx: &'f FnCtxt<'f, 'gcx, 'tcx>,
- cause: ObligationCause<'tcx>,
- allow_two_phase: AllowTwoPhase) -> Self {
+impl<'f, 'tcx> Coerce<'f, 'tcx> {
+ fn new(
+ fcx: &'f FnCtxt<'f, 'tcx>,
+ cause: ObligationCause<'tcx>,
+ allow_two_phase: AllowTwoPhase,
+ ) -> Self {
Coerce {
fcx,
cause,
@@ -792,7 +794,7 @@
}
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Attempt to coerce an expression to a type, and return the
/// adjusted type of the expression, if successful.
/// Adjustments are only recorded if the coercion succeeded.
@@ -1004,29 +1006,23 @@
/// }
/// let final_ty = coerce.complete(fcx);
/// ```
-pub struct CoerceMany<'gcx, 'tcx, 'exprs, E>
- where 'gcx: 'tcx, E: 'exprs + AsCoercionSite,
-{
+pub struct CoerceMany<'tcx, 'exprs, E: AsCoercionSite> {
expected_ty: Ty<'tcx>,
final_ty: Option<Ty<'tcx>>,
- expressions: Expressions<'gcx, 'exprs, E>,
+ expressions: Expressions<'tcx, 'exprs, E>,
pushed: usize,
}
/// The type of a `CoerceMany` that is storing up the expressions into
/// a buffer. We use this in `check/mod.rs` for things like `break`.
-pub type DynamicCoerceMany<'gcx, 'tcx> = CoerceMany<'gcx, 'tcx, 'gcx, P<hir::Expr>>;
+pub type DynamicCoerceMany<'tcx> = CoerceMany<'tcx, 'tcx, P<hir::Expr>>;
-enum Expressions<'gcx, 'exprs, E>
- where E: 'exprs + AsCoercionSite,
-{
- Dynamic(Vec<&'gcx hir::Expr>),
+enum Expressions<'tcx, 'exprs, E: AsCoercionSite> {
+ Dynamic(Vec<&'tcx hir::Expr>),
UpFront(&'exprs [E]),
}
-impl<'gcx, 'tcx, 'exprs, E> CoerceMany<'gcx, 'tcx, 'exprs, E>
- where 'gcx: 'tcx, E: 'exprs + AsCoercionSite,
-{
+impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
/// The usual case; collect the set of expressions dynamically.
/// If the full set of coercion sites is known before hand,
/// consider `with_coercion_sites()` instead to avoid allocation.
@@ -1045,7 +1041,7 @@
Self::make(expected_ty, Expressions::UpFront(coercion_sites))
}
- fn make(expected_ty: Ty<'tcx>, expressions: Expressions<'gcx, 'exprs, E>) -> Self {
+ fn make(expected_ty: Ty<'tcx>, expressions: Expressions<'tcx, 'exprs, E>) -> Self {
CoerceMany {
expected_ty,
final_ty: None,
@@ -1079,12 +1075,13 @@
/// could coerce from. This will record `expression`, and later
/// calls to `coerce` may come back and add adjustments and things
/// if necessary.
- pub fn coerce<'a>(&mut self,
- fcx: &FnCtxt<'a, 'gcx, 'tcx>,
- cause: &ObligationCause<'tcx>,
- expression: &'gcx hir::Expr,
- expression_ty: Ty<'tcx>)
- {
+ pub fn coerce<'a>(
+ &mut self,
+ fcx: &FnCtxt<'a, 'tcx>,
+ cause: &ObligationCause<'tcx>,
+ expression: &'tcx hir::Expr,
+ expression_ty: Ty<'tcx>,
+ ) {
self.coerce_inner(fcx,
cause,
Some(expression),
@@ -1104,12 +1101,13 @@
/// The `augment_error` gives you a chance to extend the error
/// message, in case any results (e.g., we use this to suggest
/// removing a `;`).
- pub fn coerce_forced_unit<'a>(&mut self,
- fcx: &FnCtxt<'a, 'gcx, 'tcx>,
- cause: &ObligationCause<'tcx>,
- augment_error: &mut dyn FnMut(&mut DiagnosticBuilder<'_>),
- label_unit_as_expected: bool)
- {
+ pub fn coerce_forced_unit<'a>(
+ &mut self,
+ fcx: &FnCtxt<'a, 'tcx>,
+ cause: &ObligationCause<'tcx>,
+ augment_error: &mut dyn FnMut(&mut DiagnosticBuilder<'_>),
+ label_unit_as_expected: bool,
+ ) {
self.coerce_inner(fcx,
cause,
None,
@@ -1121,14 +1119,15 @@
/// The inner coercion "engine". If `expression` is `None`, this
/// is a forced-unit case, and hence `expression_ty` must be
/// `Nil`.
- fn coerce_inner<'a>(&mut self,
- fcx: &FnCtxt<'a, 'gcx, 'tcx>,
- cause: &ObligationCause<'tcx>,
- expression: Option<&'gcx hir::Expr>,
- mut expression_ty: Ty<'tcx>,
- augment_error: Option<&mut dyn FnMut(&mut DiagnosticBuilder<'_>)>,
- label_expression_as_expected: bool)
- {
+ fn coerce_inner<'a>(
+ &mut self,
+ fcx: &FnCtxt<'a, 'tcx>,
+ cause: &ObligationCause<'tcx>,
+ expression: Option<&'tcx hir::Expr>,
+ mut expression_ty: Ty<'tcx>,
+ augment_error: Option<&mut dyn FnMut(&mut DiagnosticBuilder<'_>)>,
+ label_expression_as_expected: bool,
+ ) {
// Incorporate whatever type inference information we have
// until now; in principle we might also want to process
// pending obligations, but doing so should only improve
@@ -1270,9 +1269,9 @@
expected: Ty<'tcx>,
found: Ty<'tcx>,
err: TypeError<'tcx>,
- fcx: &FnCtxt<'a, 'gcx, 'tcx>,
+ fcx: &FnCtxt<'a, 'tcx>,
id: hir::HirId,
- expression: Option<(&'gcx hir::Expr, hir::HirId)>,
+ expression: Option<(&'tcx hir::Expr, hir::HirId)>,
) -> DiagnosticBuilder<'a> {
let mut db = fcx.report_mismatched_types(cause, expected, found, err);
@@ -1317,7 +1316,7 @@
db
}
- pub fn complete<'a>(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {
+ pub fn complete<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Ty<'tcx> {
if let Some(final_ty) = self.final_ty {
final_ty
} else {
diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs
index b4548ac..9460827 100644
--- a/src/librustc_typeck/check/compare_method.rs
+++ b/src/librustc_typeck/check/compare_method.rs
@@ -23,12 +23,14 @@
/// - `trait_m`: the method in the trait
/// - `impl_trait_ref`: the TraitRef corresponding to the trait implementation
-pub fn compare_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_m: &ty::AssocItem,
- impl_m_span: Span,
- trait_m: &ty::AssocItem,
- impl_trait_ref: ty::TraitRef<'tcx>,
- trait_item_span: Option<Span>) {
+pub fn compare_impl_method<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_m: &ty::AssocItem,
+ impl_m_span: Span,
+ trait_m: &ty::AssocItem,
+ impl_trait_ref: ty::TraitRef<'tcx>,
+ trait_item_span: Option<Span>,
+) {
debug!("compare_impl_method(impl_trait_ref={:?})",
impl_trait_ref);
@@ -73,12 +75,13 @@
}
}
-fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_m: &ty::AssocItem,
- impl_m_span: Span,
- trait_m: &ty::AssocItem,
- impl_trait_ref: ty::TraitRef<'tcx>)
- -> Result<(), ErrorReported> {
+fn compare_predicate_entailment<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_m: &ty::AssocItem,
+ impl_m_span: Span,
+ trait_m: &ty::AssocItem,
+ impl_trait_ref: ty::TraitRef<'tcx>,
+) -> Result<(), ErrorReported> {
let trait_to_impl_substs = impl_trait_ref.substs;
// This node-id should be used for the `body_id` field on each
@@ -355,14 +358,15 @@
})
}
-fn check_region_bounds_on_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- span: Span,
- impl_m: &ty::AssocItem,
- trait_m: &ty::AssocItem,
- trait_generics: &ty::Generics,
- impl_generics: &ty::Generics,
- trait_to_skol_substs: SubstsRef<'tcx>)
- -> Result<(), ErrorReported> {
+fn check_region_bounds_on_impl_method<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ span: Span,
+ impl_m: &ty::AssocItem,
+ trait_m: &ty::AssocItem,
+ trait_generics: &ty::Generics,
+ impl_generics: &ty::Generics,
+ trait_to_skol_substs: SubstsRef<'tcx>,
+) -> Result<(), ErrorReported> {
let trait_params = trait_generics.own_counts().lifetimes;
let impl_params = impl_generics.own_counts().lifetimes;
@@ -406,15 +410,16 @@
Ok(())
}
-fn extract_spans_for_error_reporting<'a, 'gcx, 'tcx>(infcx: &infer::InferCtxt<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- terr: &TypeError<'_>,
- cause: &ObligationCause<'tcx>,
- impl_m: &ty::AssocItem,
- impl_sig: ty::FnSig<'tcx>,
- trait_m: &ty::AssocItem,
- trait_sig: ty::FnSig<'tcx>)
- -> (Span, Option<Span>) {
+fn extract_spans_for_error_reporting<'a, 'tcx>(
+ infcx: &infer::InferCtxt<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ terr: &TypeError<'_>,
+ cause: &ObligationCause<'tcx>,
+ impl_m: &ty::AssocItem,
+ impl_sig: ty::FnSig<'tcx>,
+ trait_m: &ty::AssocItem,
+ trait_sig: ty::FnSig<'tcx>,
+) -> (Span, Option<Span>) {
let tcx = infcx.tcx;
let impl_m_hir_id = tcx.hir().as_local_hir_id(impl_m.def_id).unwrap();
let (impl_m_output, impl_m_iter) = match tcx.hir()
@@ -495,13 +500,13 @@
}
}
-fn compare_self_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_m: &ty::AssocItem,
- impl_m_span: Span,
- trait_m: &ty::AssocItem,
- impl_trait_ref: ty::TraitRef<'tcx>)
- -> Result<(), ErrorReported>
-{
+fn compare_self_type<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_m: &ty::AssocItem,
+ impl_m_span: Span,
+ trait_m: &ty::AssocItem,
+ impl_trait_ref: ty::TraitRef<'tcx>,
+) -> Result<(), ErrorReported> {
// Try to give more informative error messages about self typing
// mismatches. Note that any mismatch will also be detected
// below, where we construct a canonical function type that
@@ -580,8 +585,8 @@
Ok(())
}
-fn compare_number_of_generics<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn compare_number_of_generics<'tcx>(
+ tcx: TyCtxt<'tcx>,
impl_: &ty::AssocItem,
_impl_span: Span,
trait_: &ty::AssocItem,
@@ -695,12 +700,13 @@
}
}
-fn compare_number_of_method_arguments<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_m: &ty::AssocItem,
- impl_m_span: Span,
- trait_m: &ty::AssocItem,
- trait_item_span: Option<Span>)
- -> Result<(), ErrorReported> {
+fn compare_number_of_method_arguments<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_m: &ty::AssocItem,
+ impl_m_span: Span,
+ trait_m: &ty::AssocItem,
+ trait_item_span: Option<Span>,
+) -> Result<(), ErrorReported> {
let impl_m_fty = tcx.fn_sig(impl_m.def_id);
let trait_m_fty = tcx.fn_sig(trait_m.def_id);
let trait_number_args = trait_m_fty.inputs().skip_binder().len();
@@ -779,10 +785,11 @@
Ok(())
}
-fn compare_synthetic_generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_m: &ty::AssocItem,
- trait_m: &ty::AssocItem)
- -> Result<(), ErrorReported> {
+fn compare_synthetic_generics<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_m: &ty::AssocItem,
+ trait_m: &ty::AssocItem,
+) -> Result<(), ErrorReported> {
// FIXME(chrisvittal) Clean up this function, list of FIXME items:
// 1. Better messages for the span labels
// 2. Explanation as to what is going on
@@ -806,7 +813,7 @@
{
if impl_synthetic != trait_synthetic {
let impl_hir_id = tcx.hir().as_local_hir_id(impl_def_id).unwrap();
- let impl_span = tcx.hir().span_by_hir_id(impl_hir_id);
+ let impl_span = tcx.hir().span(impl_hir_id);
let trait_span = tcx.def_span(trait_def_id);
let mut err = struct_span_err!(tcx.sess,
impl_span,
@@ -951,11 +958,13 @@
}
}
-pub fn compare_const_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_c: &ty::AssocItem,
- impl_c_span: Span,
- trait_c: &ty::AssocItem,
- impl_trait_ref: ty::TraitRef<'tcx>) {
+pub fn compare_const_impl<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_c: &ty::AssocItem,
+ impl_c_span: Span,
+ trait_c: &ty::AssocItem,
+ impl_trait_ref: ty::TraitRef<'tcx>,
+) {
debug!("compare_const_impl(impl_trait_ref={:?})", impl_trait_ref);
tcx.infer_ctxt().enter(|infcx| {
diff --git a/src/librustc_typeck/check/demand.rs b/src/librustc_typeck/check/demand.rs
index 87fc90f..aff8eba 100644
--- a/src/librustc_typeck/check/demand.rs
+++ b/src/librustc_typeck/check/demand.rs
@@ -14,7 +14,7 @@
use super::method::probe;
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Requires that the two types unify, and prints an error message if
// they don't.
pub fn demand_suptype(&self, sp: Span, expected: Ty<'tcx>, actual: Ty<'tcx>) {
@@ -379,7 +379,23 @@
}
};
if self.can_coerce(ref_ty, expected) {
- if let Ok(src) = cm.span_to_snippet(sp) {
+ let mut sugg_sp = sp;
+ if let hir::ExprKind::MethodCall(segment, _sp, args) = &expr.node {
+ let clone_trait = self.tcx.lang_items().clone_trait().unwrap();
+ if let ([arg], Some(true), "clone") = (
+ &args[..],
+ self.tables.borrow().type_dependent_def_id(expr.hir_id).map(|did| {
+ let ai = self.tcx.associated_item(did);
+ ai.container == ty::TraitContainer(clone_trait)
+ }),
+ &segment.ident.as_str()[..],
+ ) {
+ // If this expression had a clone call when suggesting borrowing
+ // we want to suggest removing it because it'd now be unecessary.
+ sugg_sp = arg.span;
+ }
+ }
+ if let Ok(src) = cm.span_to_snippet(sugg_sp) {
let needs_parens = match expr.node {
// parenthesize if needed (Issue #46756)
hir::ExprKind::Cast(_, _) |
@@ -425,6 +441,7 @@
}
}
}
+
return Some(match mutability {
hir::Mutability::MutMutable => (
sp,
diff --git a/src/librustc_typeck/check/dropck.rs b/src/librustc_typeck/check/dropck.rs
index 2184555..a2621ab 100644
--- a/src/librustc_typeck/check/dropck.rs
+++ b/src/librustc_typeck/check/dropck.rs
@@ -29,10 +29,7 @@
/// struct/enum definition for the nominal type itself (i.e.
/// cannot do `struct S<T>; impl<T:Clone> Drop for S<T> { ... }`).
///
-pub fn check_drop_impl<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- drop_impl_did: DefId,
-) -> Result<(), ErrorReported> {
+pub fn check_drop_impl<'tcx>(tcx: TyCtxt<'tcx>, drop_impl_did: DefId) -> Result<(), ErrorReported> {
let dtor_self_type = tcx.type_of(drop_impl_did);
let dtor_predicates = tcx.predicates_of(drop_impl_did);
match dtor_self_type.sty {
@@ -64,8 +61,8 @@
}
}
-fn ensure_drop_params_and_item_params_correspond<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn ensure_drop_params_and_item_params_correspond<'tcx>(
+ tcx: TyCtxt<'tcx>,
drop_impl_did: DefId,
drop_impl_ty: Ty<'tcx>,
self_type_did: DefId,
@@ -140,8 +137,8 @@
/// Confirms that every predicate imposed by dtor_predicates is
/// implied by assuming the predicates attached to self_type_did.
-fn ensure_drop_predicates_are_implied_by_item_defn<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
+ tcx: TyCtxt<'tcx>,
drop_impl_did: DefId,
dtor_predicates: &ty::GenericPredicates<'tcx>,
self_type_did: DefId,
@@ -216,7 +213,7 @@
// repeated `contains` calls.
if !assumptions_in_impl_context.contains(&predicate) {
- let item_span = tcx.hir().span_by_hir_id(self_type_hir_id);
+ let item_span = tcx.hir().span(self_type_hir_id);
struct_span_err!(
tcx.sess,
drop_impl_span,
@@ -287,8 +284,8 @@
/// this conservative assumption (and thus assume the obligation of
/// ensuring that they do not access data nor invoke methods of
/// values that have been previously dropped).
-pub fn check_safety_of_destructor_if_necessary<'a, 'gcx, 'tcx>(
- rcx: &mut RegionCtxt<'a, 'gcx, 'tcx>,
+pub fn check_safety_of_destructor_if_necessary<'a, 'tcx>(
+ rcx: &mut RegionCtxt<'a, 'tcx>,
ty: Ty<'tcx>,
span: Span,
body_id: hir::HirId,
diff --git a/src/librustc_typeck/check/expr.rs b/src/librustc_typeck/check/expr.rs
new file mode 100644
index 0000000..4216880
--- /dev/null
+++ b/src/librustc_typeck/check/expr.rs
@@ -0,0 +1,1537 @@
+//! Type checking expressions.
+//!
+//! See `mod.rs` for more context on type checking in general.
+
+use crate::check::BreakableCtxt;
+use crate::check::cast;
+use crate::check::coercion::CoerceMany;
+use crate::check::Diverges;
+use crate::check::FnCtxt;
+use crate::check::Expectation::{self, NoExpectation, ExpectHasType, ExpectCastableToType};
+use crate::check::fatally_break_rust;
+use crate::check::report_unexpected_variant_res;
+use crate::check::Needs;
+use crate::check::TupleArgumentsFlag::DontTupleArguments;
+use crate::check::method::SelfSource;
+use crate::middle::lang_items;
+use crate::util::common::ErrorReported;
+use crate::util::nodemap::FxHashMap;
+use crate::astconv::AstConv as _;
+
+use errors::{Applicability, DiagnosticBuilder};
+use syntax::ast;
+use syntax::ptr::P;
+use syntax::symbol::{Symbol, LocalInternedString, kw, sym};
+use syntax::source_map::Span;
+use syntax::util::lev_distance::find_best_match_for_name;
+use rustc::hir;
+use rustc::hir::{ExprKind, QPath};
+use rustc::hir::def::{CtorKind, Res, DefKind};
+use rustc::infer;
+use rustc::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
+use rustc::mir::interpret::GlobalId;
+use rustc::ty;
+use rustc::ty::adjustment::{
+ Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability,
+};
+use rustc::ty::{AdtKind, Visibility};
+use rustc::ty::Ty;
+use rustc::ty::TypeFoldable;
+use rustc::ty::subst::InternalSubsts;
+use rustc::traits::{self, ObligationCauseCode};
+
+use std::fmt::Display;
+
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
+ fn check_expr_eq_type(&self, expr: &'tcx hir::Expr, expected: Ty<'tcx>) {
+ let ty = self.check_expr_with_hint(expr, expected);
+ self.demand_eqtype(expr.span, expected, ty);
+ }
+
+ pub fn check_expr_has_type_or_error(
+ &self,
+ expr: &'tcx hir::Expr,
+ expected: Ty<'tcx>,
+ ) -> Ty<'tcx> {
+ self.check_expr_meets_expectation_or_error(expr, ExpectHasType(expected))
+ }
+
+ fn check_expr_meets_expectation_or_error(
+ &self,
+ expr: &'tcx hir::Expr,
+ expected: Expectation<'tcx>,
+ ) -> Ty<'tcx> {
+ let expected_ty = expected.to_option(&self).unwrap_or(self.tcx.types.bool);
+ let mut ty = self.check_expr_with_expectation(expr, expected);
+
+ // While we don't allow *arbitrary* coercions here, we *do* allow
+ // coercions from ! to `expected`.
+ if ty.is_never() {
+ assert!(!self.tables.borrow().adjustments().contains_key(expr.hir_id),
+ "expression with never type wound up being adjusted");
+ let adj_ty = self.next_diverging_ty_var(
+ TypeVariableOrigin {
+ kind: TypeVariableOriginKind::AdjustmentType,
+ span: expr.span,
+ },
+ );
+ self.apply_adjustments(expr, vec![Adjustment {
+ kind: Adjust::NeverToAny,
+ target: adj_ty
+ }]);
+ ty = adj_ty;
+ }
+
+ if let Some(mut err) = self.demand_suptype_diag(expr.span, expected_ty, ty) {
+ let expr = match &expr.node {
+ ExprKind::DropTemps(expr) => expr,
+ _ => expr,
+ };
+ // Error possibly reported in `check_assign` so avoid emitting error again.
+ err.emit_unless(self.is_assign_to_bool(expr, expected_ty));
+ }
+ ty
+ }
+
+ pub(super) fn check_expr_coercable_to_type(
+ &self,
+ expr: &'tcx hir::Expr,
+ expected: Ty<'tcx>
+ ) -> Ty<'tcx> {
+ let ty = self.check_expr_with_hint(expr, expected);
+ // checks don't need two phase
+ self.demand_coerce(expr, ty, expected, AllowTwoPhase::No)
+ }
+
+ pub(super) fn check_expr_with_hint(
+ &self,
+ expr: &'tcx hir::Expr,
+ expected: Ty<'tcx>
+ ) -> Ty<'tcx> {
+ self.check_expr_with_expectation(expr, ExpectHasType(expected))
+ }
+
+ pub(super) fn check_expr_with_expectation(
+ &self,
+ expr: &'tcx hir::Expr,
+ expected: Expectation<'tcx>,
+ ) -> Ty<'tcx> {
+ self.check_expr_with_expectation_and_needs(expr, expected, Needs::None)
+ }
+
+ pub(super) fn check_expr(&self, expr: &'tcx hir::Expr) -> Ty<'tcx> {
+ self.check_expr_with_expectation(expr, NoExpectation)
+ }
+
+ pub(super) fn check_expr_with_needs(&self, expr: &'tcx hir::Expr, needs: Needs) -> Ty<'tcx> {
+ self.check_expr_with_expectation_and_needs(expr, NoExpectation, needs)
+ }
+
+ /// Invariant:
+ /// If an expression has any sub-expressions that result in a type error,
+ /// inspecting that expression's type with `ty.references_error()` will return
+ /// true. Likewise, if an expression is known to diverge, inspecting its
+ /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
+ /// strict, _|_ can appear in the type of an expression that does not,
+ /// itself, diverge: for example, fn() -> _|_.)
+ /// Note that inspecting a type's structure *directly* may expose the fact
+ /// that there are actually multiple representations for `Error`, so avoid
+ /// that when err needs to be handled differently.
+ fn check_expr_with_expectation_and_needs(
+ &self,
+ expr: &'tcx hir::Expr,
+ expected: Expectation<'tcx>,
+ needs: Needs,
+ ) -> Ty<'tcx> {
+ debug!(">> type-checking: expr={:?} expected={:?}",
+ expr, expected);
+
+ // Warn for expressions after diverging siblings.
+ self.warn_if_unreachable(expr.hir_id, expr.span, "expression");
+
+ // Hide the outer diverging and has_errors flags.
+ let old_diverges = self.diverges.get();
+ let old_has_errors = self.has_errors.get();
+ self.diverges.set(Diverges::Maybe);
+ self.has_errors.set(false);
+
+ let ty = self.check_expr_kind(expr, expected, needs);
+
+ // Warn for non-block expressions with diverging children.
+ match expr.node {
+ ExprKind::Block(..) |
+ ExprKind::Loop(..) | ExprKind::While(..) |
+ ExprKind::Match(..) => {}
+
+ _ => self.warn_if_unreachable(expr.hir_id, expr.span, "expression")
+ }
+
+ // Any expression that produces a value of type `!` must have diverged
+ if ty.is_never() {
+ self.diverges.set(self.diverges.get() | Diverges::Always);
+ }
+
+ // Record the type, which applies it effects.
+ // We need to do this after the warning above, so that
+ // we don't warn for the diverging expression itself.
+ self.write_ty(expr.hir_id, ty);
+
+ // Combine the diverging and has_error flags.
+ self.diverges.set(self.diverges.get() | old_diverges);
+ self.has_errors.set(self.has_errors.get() | old_has_errors);
+
+ debug!("type of {} is...", self.tcx.hir().node_to_string(expr.hir_id));
+ debug!("... {:?}, expected is {:?}", ty, expected);
+
+ ty
+ }
+
+ fn check_expr_kind(
+ &self,
+ expr: &'tcx hir::Expr,
+ expected: Expectation<'tcx>,
+ needs: Needs,
+ ) -> Ty<'tcx> {
+ debug!(
+ "check_expr_kind(expr={:?}, expected={:?}, needs={:?})",
+ expr,
+ expected,
+ needs,
+ );
+
+ let tcx = self.tcx;
+ match expr.node {
+ ExprKind::Box(ref subexpr) => {
+ self.check_expr_box(subexpr, expected)
+ }
+ ExprKind::Lit(ref lit) => {
+ self.check_lit(&lit, expected)
+ }
+ ExprKind::Binary(op, ref lhs, ref rhs) => {
+ self.check_binop(expr, op, lhs, rhs)
+ }
+ ExprKind::AssignOp(op, ref lhs, ref rhs) => {
+ self.check_binop_assign(expr, op, lhs, rhs)
+ }
+ ExprKind::Unary(unop, ref oprnd) => {
+ self.check_expr_unary(unop, oprnd, expected, needs, expr)
+ }
+ ExprKind::AddrOf(mutbl, ref oprnd) => {
+ self.check_expr_addr_of(mutbl, oprnd, expected, expr)
+ }
+ ExprKind::Path(ref qpath) => {
+ self.check_expr_path(qpath, expr)
+ }
+ ExprKind::InlineAsm(_, ref outputs, ref inputs) => {
+ for expr in outputs.iter().chain(inputs.iter()) {
+ self.check_expr(expr);
+ }
+ tcx.mk_unit()
+ }
+ ExprKind::Break(destination, ref expr_opt) => {
+ self.check_expr_break(destination, expr_opt.deref(), expr)
+ }
+ ExprKind::Continue(destination) => {
+ if destination.target_id.is_ok() {
+ tcx.types.never
+ } else {
+ // There was an error; make type-check fail.
+ tcx.types.err
+ }
+ }
+ ExprKind::Ret(ref expr_opt) => {
+ self.check_expr_return(expr_opt.deref(), expr)
+ }
+ ExprKind::Assign(ref lhs, ref rhs) => {
+ self.check_expr_assign(expr, expected, lhs, rhs)
+ }
+ ExprKind::While(ref cond, ref body, _) => {
+ self.check_expr_while(cond, body, expr)
+ }
+ ExprKind::Loop(ref body, _, source) => {
+ self.check_expr_loop(body, source, expected, expr)
+ }
+ ExprKind::Match(ref discrim, ref arms, match_src) => {
+ self.check_match(expr, &discrim, arms, expected, match_src)
+ }
+ ExprKind::Closure(capture, ref decl, body_id, _, gen) => {
+ self.check_expr_closure(expr, capture, &decl, body_id, gen, expected)
+ }
+ ExprKind::Block(ref body, _) => {
+ self.check_block_with_expected(&body, expected)
+ }
+ ExprKind::Call(ref callee, ref args) => {
+ self.check_call(expr, &callee, args, expected)
+ }
+ ExprKind::MethodCall(ref segment, span, ref args) => {
+ self.check_method_call(expr, segment, span, args, expected, needs)
+ }
+ ExprKind::Cast(ref e, ref t) => {
+ self.check_expr_cast(e, t, expr)
+ }
+ ExprKind::Type(ref e, ref t) => {
+ let ty = self.to_ty_saving_user_provided_ty(&t);
+ self.check_expr_eq_type(&e, ty);
+ ty
+ }
+ ExprKind::DropTemps(ref e) => {
+ self.check_expr_with_expectation(e, expected)
+ }
+ ExprKind::Array(ref args) => {
+ self.check_expr_array(args, expected, expr)
+ }
+ ExprKind::Repeat(ref element, ref count) => {
+ self.check_expr_repeat(element, count, expected, expr)
+ }
+ ExprKind::Tup(ref elts) => {
+ self.check_expr_tuple(elts, expected, expr)
+ }
+ ExprKind::Struct(ref qpath, ref fields, ref base_expr) => {
+ self.check_expr_struct(expr, expected, qpath, fields, base_expr)
+ }
+ ExprKind::Field(ref base, field) => {
+ self.check_field(expr, needs, &base, field)
+ }
+ ExprKind::Index(ref base, ref idx) => {
+ self.check_expr_index(base, idx, needs, expr)
+ }
+ ExprKind::Yield(ref value) => {
+ self.check_expr_yield(value, expr)
+ }
+ hir::ExprKind::Err => {
+ tcx.types.err
+ }
+ }
+ }
+
+ fn check_expr_box(&self, expr: &'tcx hir::Expr, expected: Expectation<'tcx>) -> Ty<'tcx> {
+ let expected_inner = expected.to_option(self).map_or(NoExpectation, |ty| {
+ match ty.sty {
+ ty::Adt(def, _) if def.is_box()
+ => Expectation::rvalue_hint(self, ty.boxed_ty()),
+ _ => NoExpectation
+ }
+ });
+ let referent_ty = self.check_expr_with_expectation(expr, expected_inner);
+ self.tcx.mk_box(referent_ty)
+ }
+
+ fn check_expr_unary(
+ &self,
+ unop: hir::UnOp,
+ oprnd: &'tcx hir::Expr,
+ expected: Expectation<'tcx>,
+ needs: Needs,
+ expr: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
+ let tcx = self.tcx;
+ let expected_inner = match unop {
+ hir::UnNot | hir::UnNeg => expected,
+ hir::UnDeref => NoExpectation,
+ };
+ let needs = match unop {
+ hir::UnDeref => needs,
+ _ => Needs::None
+ };
+ let mut oprnd_t = self.check_expr_with_expectation_and_needs(&oprnd, expected_inner, needs);
+
+ if !oprnd_t.references_error() {
+ oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t);
+ match unop {
+ hir::UnDeref => {
+ if let Some(mt) = oprnd_t.builtin_deref(true) {
+ oprnd_t = mt.ty;
+ } else if let Some(ok) = self.try_overloaded_deref(
+ expr.span, oprnd_t, needs) {
+ let method = self.register_infer_ok_obligations(ok);
+ if let ty::Ref(region, _, mutbl) = method.sig.inputs()[0].sty {
+ let mutbl = match mutbl {
+ hir::MutImmutable => AutoBorrowMutability::Immutable,
+ hir::MutMutable => AutoBorrowMutability::Mutable {
+ // (It shouldn't actually matter for unary ops whether
+ // we enable two-phase borrows or not, since a unary
+ // op has no additional operands.)
+ allow_two_phase_borrow: AllowTwoPhase::No,
+ }
+ };
+ self.apply_adjustments(oprnd, vec![Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl)),
+ target: method.sig.inputs()[0]
+ }]);
+ }
+ oprnd_t = self.make_overloaded_place_return_type(method).ty;
+ self.write_method_call(expr.hir_id, method);
+ } else {
+ let mut err = type_error_struct!(
+ tcx.sess,
+ expr.span,
+ oprnd_t,
+ E0614,
+ "type `{}` cannot be dereferenced",
+ oprnd_t,
+ );
+ let sp = tcx.sess.source_map().start_point(expr.span);
+ if let Some(sp) = tcx.sess.parse_sess.ambiguous_block_expr_parse
+ .borrow().get(&sp)
+ {
+ tcx.sess.parse_sess.expr_parentheses_needed(
+ &mut err,
+ *sp,
+ None,
+ );
+ }
+ err.emit();
+ oprnd_t = tcx.types.err;
+ }
+ }
+ hir::UnNot => {
+ let result = self.check_user_unop(expr, oprnd_t, unop);
+ // If it's builtin, we can reuse the type, this helps inference.
+ if !(oprnd_t.is_integral() || oprnd_t.sty == ty::Bool) {
+ oprnd_t = result;
+ }
+ }
+ hir::UnNeg => {
+ let result = self.check_user_unop(expr, oprnd_t, unop);
+ // If it's builtin, we can reuse the type, this helps inference.
+ if !oprnd_t.is_numeric() {
+ oprnd_t = result;
+ }
+ }
+ }
+ }
+ oprnd_t
+ }
+
+ fn check_expr_addr_of(
+ &self,
+ mutbl: hir::Mutability,
+ oprnd: &'tcx hir::Expr,
+ expected: Expectation<'tcx>,
+ expr: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
+ let hint = expected.only_has_type(self).map_or(NoExpectation, |ty| {
+ match ty.sty {
+ ty::Ref(_, ty, _) | ty::RawPtr(ty::TypeAndMut { ty, .. }) => {
+ if oprnd.is_place_expr() {
+ // Places may legitimately have unsized types.
+ // For example, dereferences of a fat pointer and
+ // the last field of a struct can be unsized.
+ ExpectHasType(ty)
+ } else {
+ Expectation::rvalue_hint(self, ty)
+ }
+ }
+ _ => NoExpectation
+ }
+ });
+ let needs = Needs::maybe_mut_place(mutbl);
+ let ty = self.check_expr_with_expectation_and_needs(&oprnd, hint, needs);
+
+ let tm = ty::TypeAndMut { ty: ty, mutbl: mutbl };
+ if tm.ty.references_error() {
+ self.tcx.types.err
+ } else {
+ // Note: at this point, we cannot say what the best lifetime
+ // is to use for resulting pointer. We want to use the
+ // shortest lifetime possible so as to avoid spurious borrowck
+ // errors. Moreover, the longest lifetime will depend on the
+ // precise details of the value whose address is being taken
+ // (and how long it is valid), which we don't know yet until type
+ // inference is complete.
+ //
+ // Therefore, here we simply generate a region variable. The
+ // region inferencer will then select the ultimate value.
+ // Finally, borrowck is charged with guaranteeing that the
+ // value whose address was taken can actually be made to live
+ // as long as it needs to live.
+ let region = self.next_region_var(infer::AddrOfRegion(expr.span));
+ self.tcx.mk_ref(region, tm)
+ }
+ }
+
+ fn check_expr_path(&self, qpath: &hir::QPath, expr: &'tcx hir::Expr) -> Ty<'tcx> {
+ let tcx = self.tcx;
+ let (res, opt_ty, segs) = self.resolve_ty_and_res_ufcs(qpath, expr.hir_id, expr.span);
+ let ty = match res {
+ Res::Err => {
+ self.set_tainted_by_errors();
+ tcx.types.err
+ }
+ Res::Def(DefKind::Ctor(_, CtorKind::Fictive), _) => {
+ report_unexpected_variant_res(tcx, res, expr.span, qpath);
+ tcx.types.err
+ }
+ _ => self.instantiate_value_path(segs, opt_ty, res, expr.span, expr.hir_id).0,
+ };
+
+ if let ty::FnDef(..) = ty.sty {
+ let fn_sig = ty.fn_sig(tcx);
+ if !tcx.features().unsized_locals {
+ // We want to remove some Sized bounds from std functions,
+ // but don't want to expose the removal to stable Rust.
+ // i.e., we don't want to allow
+ //
+ // ```rust
+ // drop as fn(str);
+ // ```
+ //
+ // to work in stable even if the Sized bound on `drop` is relaxed.
+ for i in 0..fn_sig.inputs().skip_binder().len() {
+ // We just want to check sizedness, so instead of introducing
+ // placeholder lifetimes with probing, we just replace higher lifetimes
+ // with fresh vars.
+ let input = self.replace_bound_vars_with_fresh_vars(
+ expr.span,
+ infer::LateBoundRegionConversionTime::FnCall,
+ &fn_sig.input(i)).0;
+ self.require_type_is_sized_deferred(input, expr.span,
+ traits::SizedArgumentType);
+ }
+ }
+ // Here we want to prevent struct constructors from returning unsized types.
+ // There were two cases this happened: fn pointer coercion in stable
+ // and usual function call in presense of unsized_locals.
+ // Also, as we just want to check sizedness, instead of introducing
+ // placeholder lifetimes with probing, we just replace higher lifetimes
+ // with fresh vars.
+ let output = self.replace_bound_vars_with_fresh_vars(
+ expr.span,
+ infer::LateBoundRegionConversionTime::FnCall,
+ &fn_sig.output()).0;
+ self.require_type_is_sized_deferred(output, expr.span, traits::SizedReturnType);
+ }
+
+ // We always require that the type provided as the value for
+ // a type parameter outlives the moment of instantiation.
+ let substs = self.tables.borrow().node_substs(expr.hir_id);
+ self.add_wf_bounds(substs, expr);
+
+ ty
+ }
+
+ fn check_expr_break(
+ &self,
+ destination: hir::Destination,
+ expr_opt: Option<&'tcx hir::Expr>,
+ expr: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
+ let tcx = self.tcx;
+ if let Ok(target_id) = destination.target_id {
+ let (e_ty, cause);
+ if let Some(ref e) = expr_opt {
+ // If this is a break with a value, we need to type-check
+ // the expression. Get an expected type from the loop context.
+ let opt_coerce_to = {
+ let mut enclosing_breakables = self.enclosing_breakables.borrow_mut();
+ enclosing_breakables.find_breakable(target_id)
+ .coerce
+ .as_ref()
+ .map(|coerce| coerce.expected_ty())
+ };
+
+ // If the loop context is not a `loop { }`, then break with
+ // a value is illegal, and `opt_coerce_to` will be `None`.
+ // Just set expectation to error in that case.
+ let coerce_to = opt_coerce_to.unwrap_or(tcx.types.err);
+
+ // Recurse without `enclosing_breakables` borrowed.
+ e_ty = self.check_expr_with_hint(e, coerce_to);
+ cause = self.misc(e.span);
+ } else {
+ // Otherwise, this is a break *without* a value. That's
+ // always legal, and is equivalent to `break ()`.
+ e_ty = tcx.mk_unit();
+ cause = self.misc(expr.span);
+ }
+
+ // Now that we have type-checked `expr_opt`, borrow
+ // the `enclosing_loops` field and let's coerce the
+ // type of `expr_opt` into what is expected.
+ let mut enclosing_breakables = self.enclosing_breakables.borrow_mut();
+ let ctxt = enclosing_breakables.find_breakable(target_id);
+ if let Some(ref mut coerce) = ctxt.coerce {
+ if let Some(ref e) = expr_opt {
+ coerce.coerce(self, &cause, e, e_ty);
+ } else {
+ assert!(e_ty.is_unit());
+ coerce.coerce_forced_unit(self, &cause, &mut |_| (), true);
+ }
+ } else {
+ // If `ctxt.coerce` is `None`, we can just ignore
+ // the type of the expresison. This is because
+ // either this was a break *without* a value, in
+ // which case it is always a legal type (`()`), or
+ // else an error would have been flagged by the
+ // `loops` pass for using break with an expression
+ // where you are not supposed to.
+ assert!(expr_opt.is_none() || self.tcx.sess.err_count() > 0);
+ }
+
+ ctxt.may_break = true;
+
+ // the type of a `break` is always `!`, since it diverges
+ tcx.types.never
+ } else {
+ // Otherwise, we failed to find the enclosing loop;
+ // this can only happen if the `break` was not
+ // inside a loop at all, which is caught by the
+ // loop-checking pass.
+ if self.tcx.sess.err_count() == 0 {
+ self.tcx.sess.delay_span_bug(expr.span,
+ "break was outside loop, but no error was emitted");
+ }
+
+ // We still need to assign a type to the inner expression to
+ // prevent the ICE in #43162.
+ if let Some(ref e) = expr_opt {
+ self.check_expr_with_hint(e, tcx.types.err);
+
+ // ... except when we try to 'break rust;'.
+ // ICE this expression in particular (see #43162).
+ if let ExprKind::Path(QPath::Resolved(_, ref path)) = e.node {
+ if path.segments.len() == 1 &&
+ path.segments[0].ident.name == sym::rust {
+ fatally_break_rust(self.tcx.sess);
+ }
+ }
+ }
+ // There was an error; make type-check fail.
+ tcx.types.err
+ }
+ }
+
+ fn check_expr_return(
+ &self,
+ expr_opt: Option<&'tcx hir::Expr>,
+ expr: &'tcx hir::Expr
+ ) -> Ty<'tcx> {
+ if self.ret_coercion.is_none() {
+ struct_span_err!(self.tcx.sess, expr.span, E0572,
+ "return statement outside of function body").emit();
+ } else if let Some(ref e) = expr_opt {
+ if self.ret_coercion_span.borrow().is_none() {
+ *self.ret_coercion_span.borrow_mut() = Some(e.span);
+ }
+ self.check_return_expr(e);
+ } else {
+ let mut coercion = self.ret_coercion.as_ref().unwrap().borrow_mut();
+ if self.ret_coercion_span.borrow().is_none() {
+ *self.ret_coercion_span.borrow_mut() = Some(expr.span);
+ }
+ let cause = self.cause(expr.span, ObligationCauseCode::ReturnNoExpression);
+ if let Some((fn_decl, _)) = self.get_fn_decl(expr.hir_id) {
+ coercion.coerce_forced_unit(
+ self,
+ &cause,
+ &mut |db| {
+ db.span_label(
+ fn_decl.output.span(),
+ format!(
+ "expected `{}` because of this return type",
+ fn_decl.output,
+ ),
+ );
+ },
+ true,
+ );
+ } else {
+ coercion.coerce_forced_unit(self, &cause, &mut |_| (), true);
+ }
+ }
+ self.tcx.types.never
+ }
+
+ pub(super) fn check_return_expr(&self, return_expr: &'tcx hir::Expr) {
+ let ret_coercion =
+ self.ret_coercion
+ .as_ref()
+ .unwrap_or_else(|| span_bug!(return_expr.span,
+ "check_return_expr called outside fn body"));
+
+ let ret_ty = ret_coercion.borrow().expected_ty();
+ let return_expr_ty = self.check_expr_with_hint(return_expr, ret_ty.clone());
+ ret_coercion.borrow_mut()
+ .coerce(self,
+ &self.cause(return_expr.span,
+ ObligationCauseCode::ReturnType(return_expr.hir_id)),
+ return_expr,
+ return_expr_ty);
+ }
+
+ /// Type check assignment expression `expr` of form `lhs = rhs`.
+ /// The expected type is `()` and is passsed to the function for the purposes of diagnostics.
+ fn check_expr_assign(
+ &self,
+ expr: &'tcx hir::Expr,
+ expected: Expectation<'tcx>,
+ lhs: &'tcx hir::Expr,
+ rhs: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
+ let lhs_ty = self.check_expr_with_needs(&lhs, Needs::MutPlace);
+ let rhs_ty = self.check_expr_coercable_to_type(&rhs, lhs_ty);
+
+ let expected_ty = expected.coercion_target_type(self, expr.span);
+ if expected_ty == self.tcx.types.bool {
+ // The expected type is `bool` but this will result in `()` so we can reasonably
+ // say that the user intended to write `lhs == rhs` instead of `lhs = rhs`.
+ // The likely cause of this is `if foo = bar { .. }`.
+ let actual_ty = self.tcx.mk_unit();
+ let mut err = self.demand_suptype_diag(expr.span, expected_ty, actual_ty).unwrap();
+ let msg = "try comparing for equality";
+ let left = self.tcx.sess.source_map().span_to_snippet(lhs.span);
+ let right = self.tcx.sess.source_map().span_to_snippet(rhs.span);
+ if let (Ok(left), Ok(right)) = (left, right) {
+ let help = format!("{} == {}", left, right);
+ err.span_suggestion(expr.span, msg, help, Applicability::MaybeIncorrect);
+ } else {
+ err.help(msg);
+ }
+ err.emit();
+ } else if !lhs.is_place_expr() {
+ struct_span_err!(self.tcx.sess, expr.span, E0070,
+ "invalid left-hand side expression")
+ .span_label(expr.span, "left-hand of expression not valid")
+ .emit();
+ }
+
+ self.require_type_is_sized(lhs_ty, lhs.span, traits::AssignmentLhsSized);
+
+ if lhs_ty.references_error() || rhs_ty.references_error() {
+ self.tcx.types.err
+ } else {
+ self.tcx.mk_unit()
+ }
+ }
+
+ fn check_expr_while(
+ &self,
+ cond: &'tcx hir::Expr,
+ body: &'tcx hir::Block,
+ expr: &'tcx hir::Expr
+ ) -> Ty<'tcx> {
+ let ctxt = BreakableCtxt {
+ // Cannot use break with a value from a while loop.
+ coerce: None,
+ may_break: false, // Will get updated if/when we find a `break`.
+ };
+
+ let (ctxt, ()) = self.with_breakable_ctxt(expr.hir_id, ctxt, || {
+ self.check_expr_has_type_or_error(&cond, self.tcx.types.bool);
+ let cond_diverging = self.diverges.get();
+ self.check_block_no_value(&body);
+
+ // We may never reach the body so it diverging means nothing.
+ self.diverges.set(cond_diverging);
+ });
+
+ if ctxt.may_break {
+ // No way to know whether it's diverging because
+ // of a `break` or an outer `break` or `return`.
+ self.diverges.set(Diverges::Maybe);
+ }
+
+ self.tcx.mk_unit()
+ }
+
+ fn check_expr_loop(
+ &self,
+ body: &'tcx hir::Block,
+ source: hir::LoopSource,
+ expected: Expectation<'tcx>,
+ expr: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
+ let coerce = match source {
+ // you can only use break with a value from a normal `loop { }`
+ hir::LoopSource::Loop => {
+ let coerce_to = expected.coercion_target_type(self, body.span);
+ Some(CoerceMany::new(coerce_to))
+ }
+
+ hir::LoopSource::WhileLet |
+ hir::LoopSource::ForLoop => {
+ None
+ }
+ };
+
+ let ctxt = BreakableCtxt {
+ coerce,
+ may_break: false, // Will get updated if/when we find a `break`.
+ };
+
+ let (ctxt, ()) = self.with_breakable_ctxt(expr.hir_id, ctxt, || {
+ self.check_block_no_value(&body);
+ });
+
+ if ctxt.may_break {
+ // No way to know whether it's diverging because
+ // of a `break` or an outer `break` or `return`.
+ self.diverges.set(Diverges::Maybe);
+ }
+
+ // If we permit break with a value, then result type is
+ // the LUB of the breaks (possibly ! if none); else, it
+ // is nil. This makes sense because infinite loops
+ // (which would have type !) are only possible iff we
+ // permit break with a value [1].
+ if ctxt.coerce.is_none() && !ctxt.may_break {
+ // [1]
+ self.tcx.sess.delay_span_bug(body.span, "no coercion, but loop may not break");
+ }
+ ctxt.coerce.map(|c| c.complete(self)).unwrap_or_else(|| self.tcx.mk_unit())
+ }
+
+ /// Checks a method call.
+ fn check_method_call(
+ &self,
+ expr: &'tcx hir::Expr,
+ segment: &hir::PathSegment,
+ span: Span,
+ args: &'tcx [hir::Expr],
+ expected: Expectation<'tcx>,
+ needs: Needs,
+ ) -> Ty<'tcx> {
+ let rcvr = &args[0];
+ let rcvr_t = self.check_expr_with_needs(&rcvr, needs);
+ // no need to check for bot/err -- callee does that
+ let rcvr_t = self.structurally_resolved_type(args[0].span, rcvr_t);
+
+ let method = match self.lookup_method(rcvr_t,
+ segment,
+ span,
+ expr,
+ rcvr) {
+ Ok(method) => {
+ self.write_method_call(expr.hir_id, method);
+ Ok(method)
+ }
+ Err(error) => {
+ if segment.ident.name != kw::Invalid {
+ self.report_method_error(span,
+ rcvr_t,
+ segment.ident,
+ SelfSource::MethodCall(rcvr),
+ error,
+ Some(args));
+ }
+ Err(())
+ }
+ };
+
+ // Call the generic checker.
+ self.check_method_argument_types(span,
+ expr.span,
+ method,
+ &args[1..],
+ DontTupleArguments,
+ expected)
+ }
+
+ fn check_expr_cast(
+ &self,
+ e: &'tcx hir::Expr,
+ t: &'tcx hir::Ty,
+ expr: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
+ // Find the type of `e`. Supply hints based on the type we are casting to,
+ // if appropriate.
+ let t_cast = self.to_ty_saving_user_provided_ty(t);
+ let t_cast = self.resolve_vars_if_possible(&t_cast);
+ let t_expr = self.check_expr_with_expectation(e, ExpectCastableToType(t_cast));
+ let t_cast = self.resolve_vars_if_possible(&t_cast);
+
+ // Eagerly check for some obvious errors.
+ if t_expr.references_error() || t_cast.references_error() {
+ self.tcx.types.err
+ } else {
+ // Defer other checks until we're done type checking.
+ let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
+ match cast::CastCheck::new(self, e, t_expr, t_cast, t.span, expr.span) {
+ Ok(cast_check) => {
+ deferred_cast_checks.push(cast_check);
+ t_cast
+ }
+ Err(ErrorReported) => {
+ self.tcx.types.err
+ }
+ }
+ }
+ }
+
+ fn check_expr_array(
+ &self,
+ args: &'tcx [hir::Expr],
+ expected: Expectation<'tcx>,
+ expr: &'tcx hir::Expr
+ ) -> Ty<'tcx> {
+ let uty = expected.to_option(self).and_then(|uty| {
+ match uty.sty {
+ ty::Array(ty, _) | ty::Slice(ty) => Some(ty),
+ _ => None
+ }
+ });
+
+ let element_ty = if !args.is_empty() {
+ let coerce_to = uty.unwrap_or_else(|| {
+ self.next_ty_var(TypeVariableOrigin {
+ kind: TypeVariableOriginKind::TypeInference,
+ span: expr.span,
+ })
+ });
+ let mut coerce = CoerceMany::with_coercion_sites(coerce_to, args);
+ assert_eq!(self.diverges.get(), Diverges::Maybe);
+ for e in args {
+ let e_ty = self.check_expr_with_hint(e, coerce_to);
+ let cause = self.misc(e.span);
+ coerce.coerce(self, &cause, e, e_ty);
+ }
+ coerce.complete(self)
+ } else {
+ self.next_ty_var(TypeVariableOrigin {
+ kind: TypeVariableOriginKind::TypeInference,
+ span: expr.span,
+ })
+ };
+ self.tcx.mk_array(element_ty, args.len() as u64)
+ }
+
+ fn check_expr_repeat(
+ &self,
+ element: &'tcx hir::Expr,
+ count: &'tcx hir::AnonConst,
+ expected: Expectation<'tcx>,
+ expr: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
+ let tcx = self.tcx;
+ let count_def_id = tcx.hir().local_def_id_from_hir_id(count.hir_id);
+ let count = if self.const_param_def_id(count).is_some() {
+ Ok(self.to_const(count, tcx.type_of(count_def_id)))
+ } else {
+ let param_env = ty::ParamEnv::empty();
+ let substs = InternalSubsts::identity_for_item(tcx.global_tcx(), count_def_id);
+ let instance = ty::Instance::resolve(
+ tcx.global_tcx(),
+ param_env,
+ count_def_id,
+ substs,
+ ).unwrap();
+ let global_id = GlobalId {
+ instance,
+ promoted: None
+ };
+
+ tcx.const_eval(param_env.and(global_id))
+ };
+
+ let uty = match expected {
+ ExpectHasType(uty) => {
+ match uty.sty {
+ ty::Array(ty, _) | ty::Slice(ty) => Some(ty),
+ _ => None
+ }
+ }
+ _ => None
+ };
+
+ let (element_ty, t) = match uty {
+ Some(uty) => {
+ self.check_expr_coercable_to_type(&element, uty);
+ (uty, uty)
+ }
+ None => {
+ let ty = self.next_ty_var(TypeVariableOrigin {
+ kind: TypeVariableOriginKind::MiscVariable,
+ span: element.span,
+ });
+ let element_ty = self.check_expr_has_type_or_error(&element, ty);
+ (element_ty, ty)
+ }
+ };
+
+ if let Ok(count) = count {
+ let zero_or_one = count.assert_usize(tcx).map_or(false, |count| count <= 1);
+ if !zero_or_one {
+ // For [foo, ..n] where n > 1, `foo` must have
+ // Copy type:
+ let lang_item = tcx.require_lang_item(lang_items::CopyTraitLangItem);
+ self.require_type_meets(t, expr.span, traits::RepeatVec, lang_item);
+ }
+ }
+
+ if element_ty.references_error() {
+ tcx.types.err
+ } else if let Ok(count) = count {
+ tcx.mk_ty(ty::Array(t, count))
+ } else {
+ tcx.types.err
+ }
+ }
+
+ fn check_expr_tuple(
+ &self,
+ elts: &'tcx [hir::Expr],
+ expected: Expectation<'tcx>,
+ expr: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
+ let flds = expected.only_has_type(self).and_then(|ty| {
+ let ty = self.resolve_type_vars_with_obligations(ty);
+ match ty.sty {
+ ty::Tuple(ref flds) => Some(&flds[..]),
+ _ => None
+ }
+ });
+
+ let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| {
+ let t = match flds {
+ Some(ref fs) if i < fs.len() => {
+ let ety = fs[i].expect_ty();
+ self.check_expr_coercable_to_type(&e, ety);
+ ety
+ }
+ _ => {
+ self.check_expr_with_expectation(&e, NoExpectation)
+ }
+ };
+ t
+ });
+ let tuple = self.tcx.mk_tup(elt_ts_iter);
+ if tuple.references_error() {
+ self.tcx.types.err
+ } else {
+ self.require_type_is_sized(tuple, expr.span, traits::TupleInitializerSized);
+ tuple
+ }
+ }
+
+ fn check_expr_struct(
+ &self,
+ expr: &hir::Expr,
+ expected: Expectation<'tcx>,
+ qpath: &QPath,
+ fields: &'tcx [hir::Field],
+ base_expr: &'tcx Option<P<hir::Expr>>,
+ ) -> Ty<'tcx> {
+ // Find the relevant variant
+ let (variant, adt_ty) =
+ if let Some(variant_ty) = self.check_struct_path(qpath, expr.hir_id) {
+ variant_ty
+ } else {
+ self.check_struct_fields_on_error(fields, base_expr);
+ return self.tcx.types.err;
+ };
+
+ let path_span = match *qpath {
+ QPath::Resolved(_, ref path) => path.span,
+ QPath::TypeRelative(ref qself, _) => qself.span
+ };
+
+ // Prohibit struct expressions when non-exhaustive flag is set.
+ let adt = adt_ty.ty_adt_def().expect("`check_struct_path` returned non-ADT type");
+ if !adt.did.is_local() && variant.is_field_list_non_exhaustive() {
+ span_err!(self.tcx.sess, expr.span, E0639,
+ "cannot create non-exhaustive {} using struct expression",
+ adt.variant_descr());
+ }
+
+ let error_happened = self.check_expr_struct_fields(adt_ty, expected, expr.hir_id, path_span,
+ variant, fields, base_expr.is_none());
+ if let &Some(ref base_expr) = base_expr {
+ // If check_expr_struct_fields hit an error, do not attempt to populate
+ // the fields with the base_expr. This could cause us to hit errors later
+ // when certain fields are assumed to exist that in fact do not.
+ if !error_happened {
+ self.check_expr_has_type_or_error(base_expr, adt_ty);
+ match adt_ty.sty {
+ ty::Adt(adt, substs) if adt.is_struct() => {
+ let fru_field_types = adt.non_enum_variant().fields.iter().map(|f| {
+ self.normalize_associated_types_in(expr.span, &f.ty(self.tcx, substs))
+ }).collect();
+
+ self.tables
+ .borrow_mut()
+ .fru_field_types_mut()
+ .insert(expr.hir_id, fru_field_types);
+ }
+ _ => {
+ span_err!(self.tcx.sess, base_expr.span, E0436,
+ "functional record update syntax requires a struct");
+ }
+ }
+ }
+ }
+ self.require_type_is_sized(adt_ty, expr.span, traits::StructInitializerSized);
+ adt_ty
+ }
+
+ fn check_expr_struct_fields(
+ &self,
+ adt_ty: Ty<'tcx>,
+ expected: Expectation<'tcx>,
+ expr_id: hir::HirId,
+ span: Span,
+ variant: &'tcx ty::VariantDef,
+ ast_fields: &'tcx [hir::Field],
+ check_completeness: bool,
+ ) -> bool {
+ let tcx = self.tcx;
+
+ let adt_ty_hint =
+ self.expected_inputs_for_expected_output(span, expected, adt_ty, &[adt_ty])
+ .get(0).cloned().unwrap_or(adt_ty);
+ // re-link the regions that EIfEO can erase.
+ self.demand_eqtype(span, adt_ty_hint, adt_ty);
+
+ let (substs, adt_kind, kind_name) = match &adt_ty.sty {
+ &ty::Adt(adt, substs) => {
+ (substs, adt.adt_kind(), adt.variant_descr())
+ }
+ _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields")
+ };
+
+ let mut remaining_fields = variant.fields.iter().enumerate().map(|(i, field)|
+ (field.ident.modern(), (i, field))
+ ).collect::<FxHashMap<_, _>>();
+
+ let mut seen_fields = FxHashMap::default();
+
+ let mut error_happened = false;
+
+ // Type-check each field.
+ for field in ast_fields {
+ let ident = tcx.adjust_ident(field.ident, variant.def_id);
+ let field_type = if let Some((i, v_field)) = remaining_fields.remove(&ident) {
+ seen_fields.insert(ident, field.span);
+ self.write_field_index(field.hir_id, i);
+
+ // We don't look at stability attributes on
+ // struct-like enums (yet...), but it's definitely not
+ // a bug to have constructed one.
+ if adt_kind != AdtKind::Enum {
+ tcx.check_stability(v_field.did, Some(expr_id), field.span);
+ }
+
+ self.field_ty(field.span, v_field, substs)
+ } else {
+ error_happened = true;
+ if let Some(prev_span) = seen_fields.get(&ident) {
+ let mut err = struct_span_err!(self.tcx.sess,
+ field.ident.span,
+ E0062,
+ "field `{}` specified more than once",
+ ident);
+
+ err.span_label(field.ident.span, "used more than once");
+ err.span_label(*prev_span, format!("first use of `{}`", ident));
+
+ err.emit();
+ } else {
+ self.report_unknown_field(adt_ty, variant, field, ast_fields, kind_name);
+ }
+
+ tcx.types.err
+ };
+
+ // Make sure to give a type to the field even if there's
+ // an error, so we can continue type-checking.
+ self.check_expr_coercable_to_type(&field.expr, field_type);
+ }
+
+ // Make sure the programmer specified correct number of fields.
+ if kind_name == "union" {
+ if ast_fields.len() != 1 {
+ tcx.sess.span_err(span, "union expressions should have exactly one field");
+ }
+ } else if check_completeness && !error_happened && !remaining_fields.is_empty() {
+ let len = remaining_fields.len();
+
+ let mut displayable_field_names = remaining_fields
+ .keys()
+ .map(|ident| ident.as_str())
+ .collect::<Vec<_>>();
+
+ displayable_field_names.sort();
+
+ let truncated_fields_error = if len <= 3 {
+ String::new()
+ } else {
+ format!(" and {} other field{}", (len - 3), if len - 3 == 1 {""} else {"s"})
+ };
+
+ let remaining_fields_names = displayable_field_names.iter().take(3)
+ .map(|n| format!("`{}`", n))
+ .collect::<Vec<_>>()
+ .join(", ");
+
+ struct_span_err!(tcx.sess, span, E0063,
+ "missing field{} {}{} in initializer of `{}`",
+ if remaining_fields.len() == 1 { "" } else { "s" },
+ remaining_fields_names,
+ truncated_fields_error,
+ adt_ty)
+ .span_label(span, format!("missing {}{}",
+ remaining_fields_names,
+ truncated_fields_error))
+ .emit();
+ }
+ error_happened
+ }
+
+ fn check_struct_fields_on_error(
+ &self,
+ fields: &'tcx [hir::Field],
+ base_expr: &'tcx Option<P<hir::Expr>>,
+ ) {
+ for field in fields {
+ self.check_expr(&field.expr);
+ }
+ if let Some(ref base) = *base_expr {
+ self.check_expr(&base);
+ }
+ }
+
+ fn report_unknown_field(
+ &self,
+ ty: Ty<'tcx>,
+ variant: &'tcx ty::VariantDef,
+ field: &hir::Field,
+ skip_fields: &[hir::Field],
+ kind_name: &str,
+ ) {
+ if variant.recovered {
+ return;
+ }
+ let mut err = self.type_error_struct_with_diag(
+ field.ident.span,
+ |actual| match ty.sty {
+ ty::Adt(adt, ..) if adt.is_enum() => {
+ struct_span_err!(self.tcx.sess, field.ident.span, E0559,
+ "{} `{}::{}` has no field named `{}`",
+ kind_name, actual, variant.ident, field.ident)
+ }
+ _ => {
+ struct_span_err!(self.tcx.sess, field.ident.span, E0560,
+ "{} `{}` has no field named `{}`",
+ kind_name, actual, field.ident)
+ }
+ },
+ ty);
+ // prevent all specified fields from being suggested
+ let skip_fields = skip_fields.iter().map(|ref x| x.ident.as_str());
+ if let Some(field_name) = Self::suggest_field_name(variant,
+ &field.ident.as_str(),
+ skip_fields.collect()) {
+ err.span_suggestion(
+ field.ident.span,
+ "a field with a similar name exists",
+ field_name.to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ } else {
+ match ty.sty {
+ ty::Adt(adt, ..) => {
+ if adt.is_enum() {
+ err.span_label(field.ident.span,
+ format!("`{}::{}` does not have this field",
+ ty, variant.ident));
+ } else {
+ err.span_label(field.ident.span,
+ format!("`{}` does not have this field", ty));
+ }
+ let available_field_names = self.available_field_names(variant);
+ if !available_field_names.is_empty() {
+ err.note(&format!("available fields are: {}",
+ self.name_series_display(available_field_names)));
+ }
+ }
+ _ => bug!("non-ADT passed to report_unknown_field")
+ }
+ };
+ err.emit();
+ }
+
+ // Return an hint about the closest match in field names
+ fn suggest_field_name(variant: &'tcx ty::VariantDef,
+ field: &str,
+ skip: Vec<LocalInternedString>)
+ -> Option<Symbol> {
+ let names = variant.fields.iter().filter_map(|field| {
+ // ignore already set fields and private fields from non-local crates
+ if skip.iter().any(|x| *x == field.ident.as_str()) ||
+ (!variant.def_id.is_local() && field.vis != Visibility::Public)
+ {
+ None
+ } else {
+ Some(&field.ident.name)
+ }
+ });
+
+ find_best_match_for_name(names, field, None)
+ }
+
+ fn available_field_names(&self, variant: &'tcx ty::VariantDef) -> Vec<ast::Name> {
+ variant.fields.iter().filter(|field| {
+ let def_scope =
+ self.tcx.adjust_ident_and_get_scope(field.ident, variant.def_id, self.body_id).1;
+ field.vis.is_accessible_from(def_scope, self.tcx)
+ })
+ .map(|field| field.ident.name)
+ .collect()
+ }
+
+ fn name_series_display(&self, names: Vec<ast::Name>) -> String {
+ // dynamic limit, to never omit just one field
+ let limit = if names.len() == 6 { 6 } else { 5 };
+ let mut display = names.iter().take(limit)
+ .map(|n| format!("`{}`", n)).collect::<Vec<_>>().join(", ");
+ if names.len() > limit {
+ display = format!("{} ... and {} others", display, names.len() - limit);
+ }
+ display
+ }
+
+ // Check field access expressions
+ fn check_field(
+ &self,
+ expr: &'tcx hir::Expr,
+ needs: Needs,
+ base: &'tcx hir::Expr,
+ field: ast::Ident,
+ ) -> Ty<'tcx> {
+ let expr_t = self.check_expr_with_needs(base, needs);
+ let expr_t = self.structurally_resolved_type(base.span,
+ expr_t);
+ let mut private_candidate = None;
+ let mut autoderef = self.autoderef(expr.span, expr_t);
+ while let Some((base_t, _)) = autoderef.next() {
+ match base_t.sty {
+ ty::Adt(base_def, substs) if !base_def.is_enum() => {
+ debug!("struct named {:?}", base_t);
+ let (ident, def_scope) =
+ self.tcx.adjust_ident_and_get_scope(field, base_def.did, self.body_id);
+ let fields = &base_def.non_enum_variant().fields;
+ if let Some(index) = fields.iter().position(|f| f.ident.modern() == ident) {
+ let field = &fields[index];
+ let field_ty = self.field_ty(expr.span, field, substs);
+ // Save the index of all fields regardless of their visibility in case
+ // of error recovery.
+ self.write_field_index(expr.hir_id, index);
+ if field.vis.is_accessible_from(def_scope, self.tcx) {
+ let adjustments = autoderef.adjust_steps(self, needs);
+ self.apply_adjustments(base, adjustments);
+ autoderef.finalize(self);
+
+ self.tcx.check_stability(field.did, Some(expr.hir_id), expr.span);
+ return field_ty;
+ }
+ private_candidate = Some((base_def.did, field_ty));
+ }
+ }
+ ty::Tuple(ref tys) => {
+ let fstr = field.as_str();
+ if let Ok(index) = fstr.parse::<usize>() {
+ if fstr == index.to_string() {
+ if let Some(field_ty) = tys.get(index) {
+ let adjustments = autoderef.adjust_steps(self, needs);
+ self.apply_adjustments(base, adjustments);
+ autoderef.finalize(self);
+
+ self.write_field_index(expr.hir_id, index);
+ return field_ty.expect_ty();
+ }
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+ autoderef.unambiguous_final_ty(self);
+
+ if let Some((did, field_ty)) = private_candidate {
+ let struct_path = self.tcx().def_path_str(did);
+ let mut err = struct_span_err!(self.tcx().sess, expr.span, E0616,
+ "field `{}` of struct `{}` is private",
+ field, struct_path);
+ // Also check if an accessible method exists, which is often what is meant.
+ if self.method_exists(field, expr_t, expr.hir_id, false)
+ && !self.expr_in_place(expr.hir_id)
+ {
+ self.suggest_method_call(
+ &mut err,
+ &format!("a method `{}` also exists, call it with parentheses", field),
+ field,
+ expr_t,
+ expr.hir_id,
+ );
+ }
+ err.emit();
+ field_ty
+ } else if field.name == kw::Invalid {
+ self.tcx().types.err
+ } else if self.method_exists(field, expr_t, expr.hir_id, true) {
+ let mut err = type_error_struct!(self.tcx().sess, field.span, expr_t, E0615,
+ "attempted to take value of method `{}` on type `{}`",
+ field, expr_t);
+
+ if !self.expr_in_place(expr.hir_id) {
+ self.suggest_method_call(
+ &mut err,
+ "use parentheses to call the method",
+ field,
+ expr_t,
+ expr.hir_id
+ );
+ } else {
+ err.help("methods are immutable and cannot be assigned to");
+ }
+
+ err.emit();
+ self.tcx().types.err
+ } else {
+ if !expr_t.is_primitive_ty() {
+ let mut err = self.no_such_field_err(field.span, field, expr_t);
+
+ match expr_t.sty {
+ ty::Adt(def, _) if !def.is_enum() => {
+ if let Some(suggested_field_name) =
+ Self::suggest_field_name(def.non_enum_variant(),
+ &field.as_str(), vec![]) {
+ err.span_suggestion(
+ field.span,
+ "a field with a similar name exists",
+ suggested_field_name.to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ } else {
+ err.span_label(field.span, "unknown field");
+ let struct_variant_def = def.non_enum_variant();
+ let field_names = self.available_field_names(struct_variant_def);
+ if !field_names.is_empty() {
+ err.note(&format!("available fields are: {}",
+ self.name_series_display(field_names)));
+ }
+ };
+ }
+ ty::Array(_, len) => {
+ if let (Some(len), Ok(user_index)) = (
+ len.assert_usize(self.tcx),
+ field.as_str().parse::<u64>()
+ ) {
+ let base = self.tcx.sess.source_map()
+ .span_to_snippet(base.span)
+ .unwrap_or_else(|_|
+ self.tcx.hir().hir_to_pretty_string(base.hir_id));
+ let help = "instead of using tuple indexing, use array indexing";
+ let suggestion = format!("{}[{}]", base, field);
+ let applicability = if len < user_index {
+ Applicability::MachineApplicable
+ } else {
+ Applicability::MaybeIncorrect
+ };
+ err.span_suggestion(
+ expr.span, help, suggestion, applicability
+ );
+ }
+ }
+ ty::RawPtr(..) => {
+ let base = self.tcx.sess.source_map()
+ .span_to_snippet(base.span)
+ .unwrap_or_else(|_| self.tcx.hir().hir_to_pretty_string(base.hir_id));
+ let msg = format!("`{}` is a raw pointer; try dereferencing it", base);
+ let suggestion = format!("(*{}).{}", base, field);
+ err.span_suggestion(
+ expr.span,
+ &msg,
+ suggestion,
+ Applicability::MaybeIncorrect,
+ );
+ }
+ _ => {}
+ }
+ err
+ } else {
+ type_error_struct!(self.tcx().sess, field.span, expr_t, E0610,
+ "`{}` is a primitive type and therefore doesn't have fields",
+ expr_t)
+ }.emit();
+ self.tcx().types.err
+ }
+ }
+
+ fn no_such_field_err<T: Display>(&self, span: Span, field: T, expr_t: &ty::TyS<'_>)
+ -> DiagnosticBuilder<'_> {
+ type_error_struct!(self.tcx().sess, span, expr_t, E0609,
+ "no field `{}` on type `{}`",
+ field, expr_t)
+ }
+
+ fn check_expr_index(
+ &self,
+ base: &'tcx hir::Expr,
+ idx: &'tcx hir::Expr,
+ needs: Needs,
+ expr: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
+ let base_t = self.check_expr_with_needs(&base, needs);
+ let idx_t = self.check_expr(&idx);
+
+ if base_t.references_error() {
+ base_t
+ } else if idx_t.references_error() {
+ idx_t
+ } else {
+ let base_t = self.structurally_resolved_type(base.span, base_t);
+ match self.lookup_indexing(expr, base, base_t, idx_t, needs) {
+ Some((index_ty, element_ty)) => {
+ // two-phase not needed because index_ty is never mutable
+ self.demand_coerce(idx, idx_t, index_ty, AllowTwoPhase::No);
+ element_ty
+ }
+ None => {
+ let mut err =
+ type_error_struct!(self.tcx.sess, expr.span, base_t, E0608,
+ "cannot index into a value of type `{}`",
+ base_t);
+ // Try to give some advice about indexing tuples.
+ if let ty::Tuple(..) = base_t.sty {
+ let mut needs_note = true;
+ // If the index is an integer, we can show the actual
+ // fixed expression:
+ if let ExprKind::Lit(ref lit) = idx.node {
+ if let ast::LitKind::Int(i, ast::LitIntType::Unsuffixed) = lit.node {
+ let snip = self.tcx.sess.source_map().span_to_snippet(base.span);
+ if let Ok(snip) = snip {
+ err.span_suggestion(
+ expr.span,
+ "to access tuple elements, use",
+ format!("{}.{}", snip, i),
+ Applicability::MachineApplicable,
+ );
+ needs_note = false;
+ }
+ }
+ }
+ if needs_note {
+ err.help("to access tuple elements, use tuple indexing \
+ syntax (e.g., `tuple.0`)");
+ }
+ }
+ err.emit();
+ self.tcx.types.err
+ }
+ }
+ }
+ }
+
+ fn check_expr_yield(&self, value: &'tcx hir::Expr, expr: &'tcx hir::Expr) -> Ty<'tcx> {
+ match self.yield_ty {
+ Some(ty) => {
+ self.check_expr_coercable_to_type(&value, ty);
+ }
+ None => {
+ struct_span_err!(self.tcx.sess, expr.span, E0627,
+ "yield statement outside of generator literal").emit();
+ }
+ }
+ self.tcx.mk_unit()
+ }
+}
diff --git a/src/librustc_typeck/check/generator_interior.rs b/src/librustc_typeck/check/generator_interior.rs
index 3785c3c..5f9aa5f 100644
--- a/src/librustc_typeck/check/generator_interior.rs
+++ b/src/librustc_typeck/check/generator_interior.rs
@@ -12,14 +12,14 @@
use super::FnCtxt;
use crate::util::nodemap::FxHashMap;
-struct InteriorVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
+struct InteriorVisitor<'a, 'tcx: 'a> {
+ fcx: &'a FnCtxt<'a, 'tcx>,
types: FxHashMap<Ty<'tcx>, usize>,
- region_scope_tree: &'gcx region::ScopeTree,
+ region_scope_tree: &'tcx region::ScopeTree,
expr_count: usize,
}
-impl<'a, 'gcx, 'tcx> InteriorVisitor<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
fn record(&mut self,
ty: Ty<'tcx>,
scope: Option<region::Scope>,
@@ -75,10 +75,12 @@
}
}
-pub fn resolve_interior<'a, 'gcx, 'tcx>(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
- def_id: DefId,
- body_id: hir::BodyId,
- interior: Ty<'tcx>) {
+pub fn resolve_interior<'a, 'tcx>(
+ fcx: &'a FnCtxt<'a, 'tcx>,
+ def_id: DefId,
+ body_id: hir::BodyId,
+ interior: Ty<'tcx>,
+) {
let body = fcx.tcx.hir().body(body_id);
let mut visitor = InteriorVisitor {
fcx,
@@ -136,7 +138,7 @@
// This visitor has to have the same visit_expr calls as RegionResolutionVisitor in
// librustc/middle/region.rs since `expr_count` is compared against the results
// there.
-impl<'a, 'gcx, 'tcx> Visitor<'tcx> for InteriorVisitor<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Visitor<'tcx> for InteriorVisitor<'a, 'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs
index 11598ad..7f690b6 100644
--- a/src/librustc_typeck/check/intrinsic.rs
+++ b/src/librustc_typeck/check/intrinsic.rs
@@ -13,8 +13,8 @@
use std::iter;
-fn equate_intrinsic_type<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn equate_intrinsic_type<'tcx>(
+ tcx: TyCtxt<'tcx>,
it: &hir::ForeignItem,
n_tps: usize,
abi: Abi,
@@ -79,8 +79,7 @@
/// Remember to add all intrinsics here, in librustc_codegen_llvm/intrinsic.rs,
/// and in libcore/intrinsics.rs
-pub fn check_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- it: &hir::ForeignItem) {
+pub fn check_intrinsic_type<'tcx>(tcx: TyCtxt<'tcx>, it: &hir::ForeignItem) {
let param = |n| tcx.mk_ty_param(n, InternedString::intern(&format!("P{}", n)));
let name = it.ident.as_str();
@@ -400,8 +399,7 @@
}
/// Type-check `extern "platform-intrinsic" { ... }` functions.
-pub fn check_platform_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- it: &hir::ForeignItem) {
+pub fn check_platform_intrinsic_type<'tcx>(tcx: TyCtxt<'tcx>, it: &hir::ForeignItem) {
let param = |n| {
let name = InternedString::intern(&format!("P{}", n));
tcx.mk_ty_param(n, name)
diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs
index 4934863..5df0010 100644
--- a/src/librustc_typeck/check/method/confirm.rs
+++ b/src/librustc_typeck/check/method/confirm.rs
@@ -16,15 +16,15 @@
use std::ops::Deref;
-struct ConfirmContext<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
+struct ConfirmContext<'a, 'tcx> {
+ fcx: &'a FnCtxt<'a, 'tcx>,
span: Span,
- self_expr: &'gcx hir::Expr,
- call_expr: &'gcx hir::Expr,
+ self_expr: &'tcx hir::Expr,
+ call_expr: &'tcx hir::Expr,
}
-impl<'a, 'gcx, 'tcx> Deref for ConfirmContext<'a, 'gcx, 'tcx> {
- type Target = FnCtxt<'a, 'gcx, 'tcx>;
+impl<'a, 'tcx> Deref for ConfirmContext<'a, 'tcx> {
+ type Target = FnCtxt<'a, 'tcx>;
fn deref(&self) -> &Self::Target {
&self.fcx
}
@@ -35,12 +35,12 @@
pub illegal_sized_bound: bool,
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub fn confirm_method(
&self,
span: Span,
- self_expr: &'gcx hir::Expr,
- call_expr: &'gcx hir::Expr,
+ self_expr: &'tcx hir::Expr,
+ call_expr: &'tcx hir::Expr,
unadjusted_self_ty: Ty<'tcx>,
pick: probe::Pick<'tcx>,
segment: &hir::PathSegment,
@@ -57,12 +57,13 @@
}
}
-impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> {
- fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
- span: Span,
- self_expr: &'gcx hir::Expr,
- call_expr: &'gcx hir::Expr)
- -> ConfirmContext<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
+ fn new(
+ fcx: &'a FnCtxt<'a, 'tcx>,
+ span: Span,
+ self_expr: &'tcx hir::Expr,
+ call_expr: &'tcx hir::Expr,
+ ) -> ConfirmContext<'a, 'tcx> {
ConfirmContext {
fcx,
span,
@@ -263,10 +264,8 @@
}
fn extract_existential_trait_ref<R, F>(&mut self, self_ty: Ty<'tcx>, mut closure: F) -> R
- where F: FnMut(&mut ConfirmContext<'a, 'gcx, 'tcx>,
- Ty<'tcx>,
- ty::PolyExistentialTraitRef<'tcx>)
- -> R
+ where
+ F: FnMut(&mut ConfirmContext<'a, 'tcx>, Ty<'tcx>, ty::PolyExistentialTraitRef<'tcx>) -> R,
{
// If we specified that this is an object method, then the
// self-type ought to be something that can be dereferenced to
diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs
index 213d53c..b492197 100644
--- a/src/librustc_typeck/check/method/mod.rs
+++ b/src/librustc_typeck/check/method/mod.rs
@@ -100,7 +100,7 @@
TraitSource(DefId /* trait id */),
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Determines whether the type `self_ty` supports a method name `method_name` or not.
pub fn method_exists(&self,
method_name: ast::Ident,
@@ -174,13 +174,14 @@
/// * `self_ty`: the (unadjusted) type of the self expression (`foo`)
/// * `supplied_method_types`: the explicit method type parameters, if any (`T1..Tn`)
/// * `self_expr`: the self expression (`foo`)
- pub fn lookup_method(&self,
- self_ty: Ty<'tcx>,
- segment: &hir::PathSegment,
- span: Span,
- call_expr: &'gcx hir::Expr,
- self_expr: &'gcx hir::Expr)
- -> Result<MethodCallee<'tcx>, MethodError<'tcx>> {
+ pub fn lookup_method(
+ &self,
+ self_ty: Ty<'tcx>,
+ segment: &hir::PathSegment,
+ span: Span,
+ call_expr: &'tcx hir::Expr,
+ self_expr: &'tcx hir::Expr,
+ ) -> Result<MethodCallee<'tcx>, MethodError<'tcx>> {
debug!("lookup(method_name={}, self_ty={:?}, call_expr={:?}, self_expr={:?})",
segment.ident,
self_ty,
@@ -245,13 +246,14 @@
Ok(result.callee)
}
- fn lookup_probe(&self,
- span: Span,
- method_name: ast::Ident,
- self_ty: Ty<'tcx>,
- call_expr: &'gcx hir::Expr,
- scope: ProbeScope)
- -> probe::PickResult<'tcx> {
+ fn lookup_probe(
+ &self,
+ span: Span,
+ method_name: ast::Ident,
+ self_ty: Ty<'tcx>,
+ call_expr: &'tcx hir::Expr,
+ scope: ProbeScope,
+ ) -> probe::PickResult<'tcx> {
let mode = probe::Mode::MethodCall;
let self_ty = self.resolve_vars_if_possible(&self_ty);
self.probe_for_name(span, mode, method_name, IsSuggestion(false),
diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs
index e55da40..661883f 100644
--- a/src/librustc_typeck/check/method/probe.rs
+++ b/src/librustc_typeck/check/method/probe.rs
@@ -45,8 +45,8 @@
#[derive(Clone, Copy)]
pub struct IsSuggestion(pub bool);
-struct ProbeContext<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
+struct ProbeContext<'a, 'tcx> {
+ fcx: &'a FnCtxt<'a, 'tcx>,
span: Span,
mode: Mode,
method_name: Option<ast::Ident>,
@@ -55,7 +55,7 @@
/// This is the OriginalQueryValues for the steps queries
/// that are answered in steps.
orig_steps_var_values: OriginalQueryValues<'tcx>,
- steps: Lrc<Vec<CandidateStep<'gcx>>>,
+ steps: Lrc<Vec<CandidateStep<'tcx>>>,
inherent_candidates: Vec<Candidate<'tcx>>,
extension_candidates: Vec<Candidate<'tcx>>,
@@ -79,8 +79,8 @@
is_suggestion: IsSuggestion,
}
-impl<'a, 'gcx, 'tcx> Deref for ProbeContext<'a, 'gcx, 'tcx> {
- type Target = FnCtxt<'a, 'gcx, 'tcx>;
+impl<'a, 'tcx> Deref for ProbeContext<'a, 'tcx> {
+ type Target = FnCtxt<'a, 'tcx>;
fn deref(&self) -> &Self::Target {
&self.fcx
}
@@ -200,7 +200,7 @@
AllTraits,
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// This is used to offer suggestions to users. It returns methods
/// that could have been called which have the desired return
/// type. Some effort is made to rule out methods that, if called,
@@ -259,18 +259,20 @@
|probe_cx| probe_cx.pick())
}
- fn probe_op<OP,R>(&'a self,
- span: Span,
- mode: Mode,
- method_name: Option<ast::Ident>,
- return_type: Option<Ty<'tcx>>,
- is_suggestion: IsSuggestion,
- self_ty: Ty<'tcx>,
- scope_expr_id: hir::HirId,
- scope: ProbeScope,
- op: OP)
- -> Result<R, MethodError<'tcx>>
- where OP: FnOnce(ProbeContext<'a, 'gcx, 'tcx>) -> Result<R, MethodError<'tcx>>
+ fn probe_op<OP, R>(
+ &'a self,
+ span: Span,
+ mode: Mode,
+ method_name: Option<ast::Ident>,
+ return_type: Option<Ty<'tcx>>,
+ is_suggestion: IsSuggestion,
+ self_ty: Ty<'tcx>,
+ scope_expr_id: hir::HirId,
+ scope: ProbeScope,
+ op: OP,
+ ) -> Result<R, MethodError<'tcx>>
+ where
+ OP: FnOnce(ProbeContext<'a, 'tcx>) -> Result<R, MethodError<'tcx>>,
{
let mut orig_values = OriginalQueryValues::default();
let param_env_and_self_ty =
@@ -395,10 +397,10 @@
providers.method_autoderef_steps = method_autoderef_steps;
}
-fn method_autoderef_steps<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'gcx>,
- goal: CanonicalTyGoal<'tcx>)
- -> MethodAutoderefStepsResult<'gcx>
-{
+fn method_autoderef_steps<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ goal: CanonicalTyGoal<'tcx>,
+) -> MethodAutoderefStepsResult<'tcx> {
debug!("method_autoderef_steps({:?})", goal);
tcx.infer_ctxt().enter_with_canonical(DUMMY_SP, &goal, |ref infcx, goal, inference_vars| {
@@ -463,17 +465,17 @@
})
}
-
-impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
- fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
- span: Span,
- mode: Mode,
- method_name: Option<ast::Ident>,
- return_type: Option<Ty<'tcx>>,
- orig_steps_var_values: OriginalQueryValues<'tcx>,
- steps: Lrc<Vec<CandidateStep<'gcx>>>,
- is_suggestion: IsSuggestion)
- -> ProbeContext<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
+ fn new(
+ fcx: &'a FnCtxt<'a, 'tcx>,
+ span: Span,
+ mode: Mode,
+ method_name: Option<ast::Ident>,
+ return_type: Option<Ty<'tcx>>,
+ orig_steps_var_values: OriginalQueryValues<'tcx>,
+ steps: Lrc<Vec<CandidateStep<'tcx>>>,
+ is_suggestion: IsSuggestion,
+ ) -> ProbeContext<'a, 'tcx> {
ProbeContext {
fcx,
span,
@@ -535,7 +537,7 @@
}
}
- fn assemble_probe(&mut self, self_ty: &Canonical<'gcx, QueryResponse<'gcx, Ty<'gcx>>>) {
+ fn assemble_probe(&mut self, self_ty: &Canonical<'tcx, QueryResponse<'tcx, Ty<'tcx>>>) {
debug!("assemble_probe: self_ty={:?}", self_ty);
let lang_items = self.tcx.lang_items();
@@ -808,12 +810,12 @@
// Do a search through a list of bounds, using a callback to actually
// create the candidates.
- fn elaborate_bounds<F>(&mut self,
- bounds: impl Iterator<Item = ty::PolyTraitRef<'tcx>>,
- mut mk_cand: F)
- where F: for<'b> FnMut(&mut ProbeContext<'b, 'gcx, 'tcx>,
- ty::PolyTraitRef<'tcx>,
- ty::AssocItem)
+ fn elaborate_bounds<F>(
+ &mut self,
+ bounds: impl Iterator<Item = ty::PolyTraitRef<'tcx>>,
+ mut mk_cand: F,
+ ) where
+ F: for<'b> FnMut(&mut ProbeContext<'b, 'tcx>, ty::PolyTraitRef<'tcx>, ty::AssocItem),
{
let tcx = self.tcx;
for bound_trait_ref in traits::transitive_bounds(tcx, bounds) {
@@ -1045,9 +1047,11 @@
.next()
}
- fn pick_by_value_method(&mut self, step: &CandidateStep<'gcx>, self_ty: Ty<'tcx>)
- -> Option<PickResult<'tcx>>
- {
+ fn pick_by_value_method(
+ &mut self,
+ step: &CandidateStep<'tcx>,
+ self_ty: Ty<'tcx>,
+ ) -> Option<PickResult<'tcx>> {
//! For each type `T` in the step list, this attempts to find a
//! method where the (transformed) self type is exactly `T`. We
//! do however do one transformation on the adjustment: if we
@@ -1075,11 +1079,12 @@
})
}
- fn pick_autorefd_method(&mut self,
- step: &CandidateStep<'gcx>,
- self_ty: Ty<'tcx>,
- mutbl: hir::Mutability)
- -> Option<PickResult<'tcx>> {
+ fn pick_autorefd_method(
+ &mut self,
+ step: &CandidateStep<'tcx>,
+ self_ty: Ty<'tcx>,
+ mutbl: hir::Mutability,
+ ) -> Option<PickResult<'tcx>> {
let tcx = self.tcx;
// In general, during probing we erase regions. See
diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs
index 82ccd3a..29b4fee 100644
--- a/src/librustc_typeck/check/method/suggest.rs
+++ b/src/librustc_typeck/check/method/suggest.rs
@@ -24,7 +24,7 @@
use super::{MethodError, NoMatchData, CandidateSource};
use super::probe::Mode;
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fn is_fn_ty(&self, ty: Ty<'tcx>, span: Span) -> bool {
let tcx = self.tcx;
match ty.sty {
@@ -69,7 +69,7 @@
item_name: ast::Ident,
source: SelfSource<'b>,
error: MethodError<'tcx>,
- args: Option<&'gcx [hir::Expr]>,
+ args: Option<&'tcx [hir::Expr]>,
) {
let orig_span = span;
let mut span = span;
@@ -264,7 +264,7 @@
// local binding
if let &QPath::Resolved(_, ref path) = &qpath {
if let hir::def::Res::Local(hir_id) = path.res {
- let span = tcx.hir().span_by_hir_id(hir_id);
+ let span = tcx.hir().span(hir_id);
let snippet = tcx.sess.source_map().span_to_snippet(span);
let filename = tcx.sess.source_map().span_to_filename(span);
@@ -370,7 +370,7 @@
});
if let Some((field, field_ty)) = field_receiver {
- let scope = self.tcx.hir().get_module_parent_by_hir_id(self.body_id);
+ let scope = self.tcx.hir().get_module_parent(self.body_id);
let is_accessible = field.vis.is_accessible_from(scope, self.tcx);
if is_accessible {
@@ -564,7 +564,7 @@
err: &mut DiagnosticBuilder<'_>,
mut msg: String,
candidates: Vec<DefId>) {
- let module_did = self.tcx.hir().get_module_parent_by_hir_id(self.body_id);
+ let module_did = self.tcx.hir().get_module_parent(self.body_id);
let module_id = self.tcx.hir().as_local_hir_id(module_did).unwrap();
let krate = self.tcx.hir().krate();
let (span, found_use) = UsePlacementFinder::check(self.tcx, krate, module_id);
@@ -775,12 +775,12 @@
}
/// Retrieves all traits in this crate and any dependent crates.
-pub fn all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Vec<TraitInfo> {
+pub fn all_traits<'tcx>(tcx: TyCtxt<'tcx>) -> Vec<TraitInfo> {
tcx.all_traits(LOCAL_CRATE).iter().map(|&def_id| TraitInfo { def_id }).collect()
}
/// Computes all traits in this crate and any dependent crates.
-fn compute_all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Vec<DefId> {
+fn compute_all_traits<'tcx>(tcx: TyCtxt<'tcx>) -> Vec<DefId> {
use hir::itemlikevisit;
let mut traits = vec![];
@@ -817,10 +817,12 @@
// Cross-crate:
let mut external_mods = FxHashSet::default();
- fn handle_external_res(tcx: TyCtxt<'_, '_, '_>,
- traits: &mut Vec<DefId>,
- external_mods: &mut FxHashSet<DefId>,
- res: Res) {
+ fn handle_external_res(
+ tcx: TyCtxt<'_>,
+ traits: &mut Vec<DefId>,
+ external_mods: &mut FxHashSet<DefId>,
+ res: Res,
+ ) {
match res {
Res::Def(DefKind::Trait, def_id) |
Res::Def(DefKind::TraitAlias, def_id) => {
@@ -855,16 +857,16 @@
}
}
-struct UsePlacementFinder<'a, 'tcx: 'a, 'gcx: 'tcx> {
+struct UsePlacementFinder<'tcx> {
target_module: hir::HirId,
span: Option<Span>,
found_use: bool,
- tcx: TyCtxt<'a, 'gcx, 'tcx>
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx, 'gcx> UsePlacementFinder<'a, 'tcx, 'gcx> {
+impl UsePlacementFinder<'tcx> {
fn check(
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
krate: &'tcx hir::Crate,
target_module: hir::HirId,
) -> (Option<Span>, bool) {
@@ -879,7 +881,7 @@
}
}
-impl<'a, 'tcx, 'gcx> hir::intravisit::Visitor<'tcx> for UsePlacementFinder<'a, 'tcx, 'gcx> {
+impl hir::intravisit::Visitor<'tcx> for UsePlacementFinder<'tcx> {
fn visit_mod(
&mut self,
module: &'tcx hir::Mod,
@@ -895,7 +897,7 @@
}
// Find a `use` statement.
for item_id in &module.item_ids {
- let item = self.tcx.hir().expect_item_by_hir_id(item_id.id);
+ let item = self.tcx.hir().expect_item(item_id.id);
match item.node {
hir::ItemKind::Use(..) => {
// Don't suggest placing a `use` before the prelude
diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs
index e35df6c..10bfe9e 100644
--- a/src/librustc_typeck/check/mod.rs
+++ b/src/librustc_typeck/check/mod.rs
@@ -74,6 +74,7 @@
mod regionck;
pub mod coercion;
pub mod demand;
+mod expr;
pub mod method;
mod upvar;
mod wfcheck;
@@ -88,7 +89,7 @@
use crate::astconv::{AstConv, PathSeg};
use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
use rustc::hir::{self, ExprKind, GenericArg, ItemKind, Node, PatKind, QPath};
-use rustc::hir::def::{CtorOf, CtorKind, Res, DefKind};
+use rustc::hir::def::{CtorOf, Res, DefKind};
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
use rustc::hir::itemlikevisit::ItemLikeVisitor;
@@ -105,7 +106,7 @@
use rustc::mir::interpret::{ConstValue, GlobalId};
use rustc::traits::{self, ObligationCause, ObligationCauseCode, TraitEngine};
use rustc::ty::{
- self, AdtKind, CanonicalUserType, Ty, TyCtxt, Const, GenericParamDefKind, Visibility,
+ self, AdtKind, CanonicalUserType, Ty, TyCtxt, Const, GenericParamDefKind,
ToPolyTraitRef, ToPredicate, RegionKind, UserType
};
use rustc::ty::adjustment::{
@@ -123,13 +124,11 @@
use syntax::feature_gate::{GateIssue, emit_feature_err};
use syntax::ptr::P;
use syntax::source_map::{DUMMY_SP, original_sp};
-use syntax::symbol::{Symbol, LocalInternedString, kw, sym};
-use syntax::util::lev_distance::find_best_match_for_name;
+use syntax::symbol::{kw, sym};
use std::cell::{Cell, RefCell, Ref, RefMut};
use std::collections::hash_map::Entry;
use std::cmp;
-use std::fmt::Display;
use std::iter;
use std::mem::replace;
use std::ops::{self, Deref};
@@ -142,7 +141,7 @@
use crate::lint;
use crate::util::captures::Captures;
use crate::util::common::{ErrorReported, indenter};
-use crate::util::nodemap::{DefIdMap, DefIdSet, FxHashMap, FxHashSet, HirIdMap};
+use crate::util::nodemap::{DefIdMap, DefIdSet, FxHashSet, HirIdMap};
pub use self::Expectation::*;
use self::autoderef::Autoderef;
@@ -194,8 +193,8 @@
/// Here, the function `foo()` and the closure passed to
/// `bar()` will each have their own `FnCtxt`, but they will
/// share the inherited fields.
-pub struct Inherited<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: InferCtxt<'a, 'gcx, 'tcx>,
+pub struct Inherited<'a, 'tcx: 'a> {
+ infcx: InferCtxt<'a, 'tcx>,
tables: MaybeInProgressTables<'a, 'tcx>,
@@ -214,7 +213,7 @@
// decision. We keep these deferred resolutions grouped by the
// def-id of the closure, so that once we decide, we can easily go
// back and process them.
- deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolution<'gcx, 'tcx>>>>,
+ deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolution<'tcx>>>>,
deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
@@ -237,8 +236,8 @@
body_id: Option<hir::BodyId>,
}
-impl<'a, 'gcx, 'tcx> Deref for Inherited<'a, 'gcx, 'tcx> {
- type Target = InferCtxt<'a, 'gcx, 'tcx>;
+impl<'a, 'tcx> Deref for Inherited<'a, 'tcx> {
+ type Target = InferCtxt<'a, 'tcx>;
fn deref(&self) -> &Self::Target {
&self.infcx
}
@@ -262,7 +261,7 @@
ExpectRvalueLikeUnsized(Ty<'tcx>),
}
-impl<'a, 'gcx, 'tcx> Expectation<'tcx> {
+impl<'a, 'tcx> Expectation<'tcx> {
// Disregard "castable to" expectations because they
// can lead us astray. Consider for example `if cond
// {22} else {c} as u8` -- if we propagate the
@@ -279,7 +278,7 @@
// an expected type. Otherwise, we might write parts of the type
// when checking the 'then' block which are incompatible with the
// 'else' branch.
- fn adjust_for_branches(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
+ fn adjust_for_branches(&self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
match *self {
ExpectHasType(ety) => {
let ety = fcx.shallow_resolve(ety);
@@ -315,7 +314,7 @@
/// which still is useful, because it informs integer literals and the like.
/// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
/// for examples of where this comes up,.
- fn rvalue_hint(fcx: &FnCtxt<'a, 'gcx, 'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
+ fn rvalue_hint(fcx: &FnCtxt<'a, 'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
match fcx.tcx.struct_tail(ty).sty {
ty::Slice(_) | ty::Str | ty::Dynamic(..) => {
ExpectRvalueLikeUnsized(ty)
@@ -327,7 +326,7 @@
// Resolves `expected` by a single level if it is a variable. If
// there is no expected type or resolution is not possible (e.g.,
// no constraints yet present), just returns `None`.
- fn resolve(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
+ fn resolve(self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
match self {
NoExpectation => NoExpectation,
ExpectCastableToType(t) => {
@@ -342,7 +341,7 @@
}
}
- fn to_option(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
+ fn to_option(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
match self.resolve(fcx) {
NoExpectation => None,
ExpectCastableToType(ty) |
@@ -355,7 +354,7 @@
/// a **hard constraint** (i.e., something that must be satisfied
/// for the program to type-check). `only_has_type` will return
/// such a constraint, if it exists.
- fn only_has_type(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
+ fn only_has_type(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
match self.resolve(fcx) {
ExpectHasType(ty) => Some(ty),
NoExpectation | ExpectCastableToType(_) | ExpectRvalueLikeUnsized(_) => None,
@@ -364,7 +363,7 @@
/// Like `only_has_type`, but instead of returning `None` if no
/// hard constraint exists, creates a fresh type variable.
- fn coercion_target_type(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>, span: Span) -> Ty<'tcx> {
+ fn coercion_target_type(self, fcx: &FnCtxt<'a, 'tcx>, span: Span) -> Ty<'tcx> {
self.only_has_type(fcx)
.unwrap_or_else(|| {
fcx.next_ty_var(TypeVariableOrigin {
@@ -491,21 +490,21 @@
}
}
-pub struct BreakableCtxt<'gcx: 'tcx, 'tcx> {
+pub struct BreakableCtxt<'tcx> {
may_break: bool,
// this is `null` for loops where break with a value is illegal,
// such as `while`, `for`, and `while let`
- coerce: Option<DynamicCoerceMany<'gcx, 'tcx>>,
+ coerce: Option<DynamicCoerceMany<'tcx>>,
}
-pub struct EnclosingBreakables<'gcx: 'tcx, 'tcx> {
- stack: Vec<BreakableCtxt<'gcx, 'tcx>>,
+pub struct EnclosingBreakables<'tcx> {
+ stack: Vec<BreakableCtxt<'tcx>>,
by_id: HirIdMap<usize>,
}
-impl<'gcx, 'tcx> EnclosingBreakables<'gcx, 'tcx> {
- fn find_breakable(&mut self, target_id: hir::HirId) -> &mut BreakableCtxt<'gcx, 'tcx> {
+impl<'tcx> EnclosingBreakables<'tcx> {
+ fn find_breakable(&mut self, target_id: hir::HirId) -> &mut BreakableCtxt<'tcx> {
let ix = *self.by_id.get(&target_id).unwrap_or_else(|| {
bug!("could not find enclosing breakable with id {}", target_id);
});
@@ -513,7 +512,7 @@
}
}
-pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+pub struct FnCtxt<'a, 'tcx: 'a> {
body_id: hir::HirId,
/// The parameter environment used for proving trait obligations
@@ -530,7 +529,7 @@
/// expects the types within the function to be consistent.
err_count_on_creation: usize,
- ret_coercion: Option<RefCell<DynamicCoerceMany<'gcx, 'tcx>>>,
+ ret_coercion: Option<RefCell<DynamicCoerceMany<'tcx>>>,
ret_coercion_span: RefCell<Option<Span>>,
yield_ty: Option<Ty<'tcx>>,
@@ -573,13 +572,13 @@
/// Whether any child nodes have any type errors.
has_errors: Cell<bool>,
- enclosing_breakables: RefCell<EnclosingBreakables<'gcx, 'tcx>>,
+ enclosing_breakables: RefCell<EnclosingBreakables<'tcx>>,
- inh: &'a Inherited<'a, 'gcx, 'tcx>,
+ inh: &'a Inherited<'a, 'tcx>,
}
-impl<'a, 'gcx, 'tcx> Deref for FnCtxt<'a, 'gcx, 'tcx> {
- type Target = Inherited<'a, 'gcx, 'tcx>;
+impl<'a, 'tcx> Deref for FnCtxt<'a, 'tcx> {
+ type Target = Inherited<'a, 'tcx>;
fn deref(&self) -> &Self::Target {
&self.inh
}
@@ -587,15 +586,14 @@
/// Helper type of a temporary returned by `Inherited::build(...)`.
/// Necessary because we can't write the following bound:
-/// `F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>)`.
-pub struct InheritedBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx>,
+/// `F: for<'b, 'tcx> where 'tcx FnOnce(Inherited<'b, 'tcx>)`.
+pub struct InheritedBuilder<'tcx> {
+ infcx: infer::InferCtxtBuilder<'tcx>,
def_id: DefId,
}
-impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> {
- pub fn build(tcx: TyCtxt<'a, 'gcx, 'gcx>, def_id: DefId)
- -> InheritedBuilder<'a, 'gcx, 'tcx> {
+impl Inherited<'_, 'tcx> {
+ pub fn build(tcx: TyCtxt<'tcx>, def_id: DefId) -> InheritedBuilder<'tcx> {
let hir_id_root = if def_id.is_local() {
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
DefId::local(hir_id.owner)
@@ -610,20 +608,21 @@
}
}
-impl<'a, 'gcx, 'tcx> InheritedBuilder<'a, 'gcx, 'tcx> {
- fn enter<F, R>(&'tcx mut self, f: F) -> R
- where F: for<'b> FnOnce(Inherited<'b, 'gcx, 'tcx>) -> R
+impl<'tcx> InheritedBuilder<'tcx> {
+ fn enter<F, R>(&mut self, f: F) -> R
+ where
+ F: for<'a> FnOnce(Inherited<'a, 'tcx>) -> R,
{
let def_id = self.def_id;
self.infcx.enter(|infcx| f(Inherited::new(infcx, def_id)))
}
}
-impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> {
- fn new(infcx: InferCtxt<'a, 'gcx, 'tcx>, def_id: DefId) -> Self {
+impl Inherited<'a, 'tcx> {
+ fn new(infcx: InferCtxt<'a, 'tcx>, def_id: DefId) -> Self {
let tcx = infcx.tcx;
let item_id = tcx.hir().as_local_hir_id(def_id);
- let body_id = item_id.and_then(|id| tcx.hir().maybe_body_owned_by_by_hir_id(id));
+ let body_id = item_id.and_then(|id| tcx.hir().maybe_body_owned_by(id));
let implicit_region_bound = body_id.map(|body_id| {
let body = tcx.hir().body(body_id);
tcx.mk_region(ty::ReScope(region::Scope {
@@ -685,9 +684,11 @@
}
}
-struct CheckItemTypesVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> }
+struct CheckItemTypesVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
+}
-impl<'a, 'tcx> ItemLikeVisitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
+impl ItemLikeVisitor<'tcx> for CheckItemTypesVisitor<'tcx> {
fn visit_item(&mut self, i: &'tcx hir::Item) {
check_item_type(self.tcx, i);
}
@@ -695,33 +696,33 @@
fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem) { }
}
-pub fn check_wf_new<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Result<(), ErrorReported> {
+pub fn check_wf_new<'tcx>(tcx: TyCtxt<'tcx>) -> Result<(), ErrorReported> {
tcx.sess.track_errors(|| {
let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(tcx);
tcx.hir().krate().par_visit_all_item_likes(&mut visit);
})
}
-fn check_mod_item_types<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) {
+fn check_mod_item_types<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(module_def_id, &mut CheckItemTypesVisitor { tcx });
}
-fn typeck_item_bodies<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) {
+fn typeck_item_bodies<'tcx>(tcx: TyCtxt<'tcx>, crate_num: CrateNum) {
debug_assert!(crate_num == LOCAL_CRATE);
tcx.par_body_owners(|body_owner_def_id| {
tcx.ensure().typeck_tables_of(body_owner_def_id);
});
}
-fn check_item_well_formed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
+fn check_item_well_formed<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) {
wfcheck::check_item_well_formed(tcx, def_id);
}
-fn check_trait_item_well_formed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
+fn check_trait_item_well_formed<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) {
wfcheck::check_trait_item(tcx, def_id);
}
-fn check_impl_item_well_formed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
+fn check_impl_item_well_formed<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) {
wfcheck::check_impl_item(tcx, def_id);
}
@@ -741,9 +742,7 @@
};
}
-fn adt_destructor<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> Option<ty::Destructor> {
+fn adt_destructor<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Option<ty::Destructor> {
tcx.calculate_dtor(def_id, &mut dropck::check_drop_impl)
}
@@ -756,10 +755,10 @@
/// may not succeed. In some cases where this function returns `None`
/// (notably closures), `typeck_tables(def_id)` would wind up
/// redirecting to the owning function.
-fn primary_body_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- id: hir::HirId)
- -> Option<(hir::BodyId, Option<&'tcx hir::FnDecl>)>
-{
+fn primary_body_of<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ id: hir::HirId,
+) -> Option<(hir::BodyId, Option<&'tcx hir::FnDecl>)> {
match tcx.hir().get_by_hir_id(id) {
Node::Item(item) => {
match item.node {
@@ -797,9 +796,7 @@
}
}
-fn has_typeck_tables<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> bool {
+fn has_typeck_tables<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
// Closures' tables come from their outermost function,
// as they are part of the same "inference environment".
let outer_def_id = tcx.closure_base_def_id(def_id);
@@ -811,15 +808,11 @@
primary_body_of(tcx, id).is_some()
}
-fn used_trait_imports<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> &'tcx DefIdSet {
+fn used_trait_imports<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx DefIdSet {
&*tcx.typeck_tables_of(def_id).used_trait_imports
}
-fn typeck_tables_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> &'tcx ty::TypeckTables<'tcx> {
+fn typeck_tables_of<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx ty::TypeckTables<'tcx> {
// Closures' tables come from their outermost function,
// as they are part of the same "inference environment".
let outer_def_id = tcx.closure_base_def_id(def_id);
@@ -828,7 +821,7 @@
}
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
- let span = tcx.hir().span_by_hir_id(id);
+ let span = tcx.hir().span(id);
// Figure out what primary body this item has.
let (body_id, fn_decl) = primary_body_of(tcx, id).unwrap_or_else(|| {
@@ -919,19 +912,19 @@
tables
}
-fn check_abi<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, span: Span, abi: Abi) {
+fn check_abi<'tcx>(tcx: TyCtxt<'tcx>, span: Span, abi: Abi) {
if !tcx.sess.target.target.is_abi_supported(abi) {
struct_span_err!(tcx.sess, span, E0570,
"The ABI `{}` is not supported for the current target", abi).emit()
}
}
-struct GatherLocalsVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
+struct GatherLocalsVisitor<'a, 'tcx: 'a> {
+ fcx: &'a FnCtxt<'a, 'tcx>,
parent_id: hir::HirId,
}
-impl<'a, 'gcx, 'tcx> GatherLocalsVisitor<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> {
fn assign(&mut self, span: Span, nid: hir::HirId, ty_opt: Option<LocalTy<'tcx>>) -> Ty<'tcx> {
match ty_opt {
None => {
@@ -955,13 +948,13 @@
}
}
-impl<'a, 'gcx, 'tcx> Visitor<'gcx> for GatherLocalsVisitor<'a, 'gcx, 'tcx> {
- fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {
+impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> {
+ fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
// Add explicitly-declared locals.
- fn visit_local(&mut self, local: &'gcx hir::Local) {
+ fn visit_local(&mut self, local: &'tcx hir::Local) {
let local_ty = match local.ty {
Some(ref ty) => {
let o_ty = self.fcx.to_ty(&ty);
@@ -996,7 +989,7 @@
}
// Add pattern bindings.
- fn visit_pat(&mut self, p: &'gcx hir::Pat) {
+ fn visit_pat(&mut self, p: &'tcx hir::Pat) {
if let PatKind::Binding(_, _, ident, _) = p.node {
let var_ty = self.assign(p.span, p.hir_id, None);
@@ -1016,8 +1009,14 @@
}
// Don't descend into the bodies of nested closures
- fn visit_fn(&mut self, _: intravisit::FnKind<'gcx>, _: &'gcx hir::FnDecl,
- _: hir::BodyId, _: Span, _: hir::HirId) { }
+ fn visit_fn(
+ &mut self,
+ _: intravisit::FnKind<'tcx>,
+ _: &'tcx hir::FnDecl,
+ _: hir::BodyId,
+ _: Span,
+ _: hir::HirId,
+ ) { }
}
/// When `check_fn` is invoked on a generator (i.e., a body that
@@ -1040,15 +1039,15 @@
///
/// * ...
/// * inherited: other fields inherited from the enclosing fn (if any)
-fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- fn_sig: ty::FnSig<'tcx>,
- decl: &'gcx hir::FnDecl,
- fn_id: hir::HirId,
- body: &'gcx hir::Body,
- can_be_generator: Option<hir::GeneratorMovability>)
- -> (FnCtxt<'a, 'gcx, 'tcx>, Option<GeneratorTypes<'tcx>>)
-{
+fn check_fn<'a, 'tcx>(
+ inherited: &'a Inherited<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ fn_sig: ty::FnSig<'tcx>,
+ decl: &'tcx hir::FnDecl,
+ fn_id: hir::HirId,
+ body: &'tcx hir::Body,
+ can_be_generator: Option<hir::GeneratorMovability>,
+) -> (FnCtxt<'a, 'tcx>, Option<GeneratorTypes<'tcx>>) {
let mut fn_sig = fn_sig.clone();
debug!("check_fn(sig={:?}, fn_id={}, param_env={:?})", fn_sig, fn_id, param_env);
@@ -1194,7 +1193,7 @@
}
let inputs = fn_sig.inputs();
- let span = fcx.tcx.hir().span_by_hir_id(fn_id);
+ let span = fcx.tcx.hir().span(fn_id);
if inputs.len() == 1 {
let arg_is_panic_info = match inputs[0].sty {
ty::Ref(region, ty, mutbl) => match ty.sty {
@@ -1247,7 +1246,7 @@
}
let inputs = fn_sig.inputs();
- let span = fcx.tcx.hir().span_by_hir_id(fn_id);
+ let span = fcx.tcx.hir().span(fn_id);
if inputs.len() == 1 {
let arg_is_alloc_layout = match inputs[0].sty {
ty::Adt(ref adt, _) => {
@@ -1287,9 +1286,7 @@
(fcx, gen_ty)
}
-fn check_struct<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- id: hir::HirId,
- span: Span) {
+fn check_struct<'tcx>(tcx: TyCtxt<'tcx>, id: hir::HirId, span: Span) {
let def_id = tcx.hir().local_def_id_from_hir_id(id);
let def = tcx.adt_def(def_id);
def.destructor(tcx); // force the destructor to be evaluated
@@ -1303,9 +1300,7 @@
check_packed(tcx, span, def_id);
}
-fn check_union<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- id: hir::HirId,
- span: Span) {
+fn check_union<'tcx>(tcx: TyCtxt<'tcx>, id: hir::HirId, span: Span) {
let def_id = tcx.hir().local_def_id_from_hir_id(id);
let def = tcx.adt_def(def_id);
def.destructor(tcx); // force the destructor to be evaluated
@@ -1314,12 +1309,7 @@
check_packed(tcx, span, def_id);
}
-fn check_opaque<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- substs: SubstsRef<'tcx>,
- span: Span,
-) {
+fn check_opaque<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, substs: SubstsRef<'tcx>, span: Span) {
if let Err(partially_expanded_type) = tcx.try_expand_impl_trait_type(def_id, substs) {
let mut err = struct_span_err!(
tcx.sess, span, E0720,
@@ -1335,7 +1325,7 @@
}
}
-pub fn check_item_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, it: &'tcx hir::Item) {
+pub fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, it: &'tcx hir::Item) {
debug!(
"check_item_type(it.hir_id={}, it.name={})",
it.hir_id,
@@ -1434,7 +1424,7 @@
}
}
-fn maybe_check_static_with_link_section(tcx: TyCtxt<'_, '_, '_>, id: DefId, span: Span) {
+fn maybe_check_static_with_link_section(tcx: TyCtxt<'_>, id: DefId, span: Span) {
// Only restricted on wasm32 target for now
if !tcx.sess.opts.target_triple.triple().starts_with("wasm32") {
return
@@ -1472,18 +1462,17 @@
}
}
-fn check_on_unimplemented<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- trait_def_id: DefId,
- item: &hir::Item) {
+fn check_on_unimplemented<'tcx>(tcx: TyCtxt<'tcx>, trait_def_id: DefId, item: &hir::Item) {
let item_def_id = tcx.hir().local_def_id_from_hir_id(item.hir_id);
// an error would be reported if this fails.
let _ = traits::OnUnimplementedDirective::of_item(tcx, trait_def_id, item_def_id);
}
-fn report_forbidden_specialization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_item: &hir::ImplItem,
- parent_impl: DefId)
-{
+fn report_forbidden_specialization<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_item: &hir::ImplItem,
+ parent_impl: DefId,
+) {
let mut err = struct_span_err!(
tcx.sess, impl_item.span, E0520,
"`{}` specializes an item from a parent `impl`, but \
@@ -1506,12 +1495,13 @@
err.emit();
}
-fn check_specialization_validity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- trait_def: &ty::TraitDef,
- trait_item: &ty::AssocItem,
- impl_id: DefId,
- impl_item: &hir::ImplItem)
-{
+fn check_specialization_validity<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_def: &ty::TraitDef,
+ trait_item: &ty::AssocItem,
+ impl_id: DefId,
+ impl_item: &hir::ImplItem,
+) {
let ancestors = trait_def.ancestors(tcx, impl_id);
let kind = match impl_item.node {
@@ -1532,11 +1522,13 @@
}
-fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_span: Span,
- impl_id: DefId,
- impl_trait_ref: ty::TraitRef<'tcx>,
- impl_item_refs: &[hir::ImplItemRef]) {
+fn check_impl_items_against_trait<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_span: Span,
+ impl_id: DefId,
+ impl_trait_ref: ty::TraitRef<'tcx>,
+ impl_item_refs: &[hir::ImplItemRef],
+) {
let impl_span = tcx.sess.source_map().def_span(impl_span);
// If the trait reference itself is erroneous (so the compilation is going
@@ -1693,10 +1685,7 @@
/// Checks whether a type can be represented in memory. In particular, it
/// identifies types that contain themselves without indirection through a
/// pointer, which would mean their size is unbounded.
-fn check_representable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- sp: Span,
- item_def_id: DefId)
- -> bool {
+fn check_representable<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, item_def_id: DefId) -> bool {
let rty = tcx.type_of(item_def_id);
// Check that it is possible to represent this type. This call identifies
@@ -1715,10 +1704,10 @@
}
Representability::Representable | Representability::ContainsRecursive => (),
}
- return true
+ return true;
}
-pub fn check_simd<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, def_id: DefId) {
+pub fn check_simd<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, def_id: DefId) {
let t = tcx.type_of(def_id);
if let ty::Adt(def, substs) = t.sty {
if def.is_struct() {
@@ -1747,7 +1736,7 @@
}
}
-fn check_packed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, def_id: DefId) {
+fn check_packed<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, def_id: DefId) {
let repr = tcx.adt_def(def_id).repr;
if repr.packed() {
for attr in tcx.get_attrs(def_id).iter() {
@@ -1771,9 +1760,7 @@
}
}
-fn check_packed_inner<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- stack: &mut Vec<DefId>) -> bool {
+fn check_packed_inner<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, stack: &mut Vec<DefId>) -> bool {
let t = tcx.type_of(def_id);
if stack.contains(&def_id) {
debug!("check_packed_inner: {:?} is recursive", t);
@@ -1801,7 +1788,7 @@
false
}
-fn check_transparent<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, def_id: DefId) {
+fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, def_id: DefId) {
let adt = tcx.adt_def(def_id);
if !adt.repr.transparent() {
return;
@@ -1881,10 +1868,7 @@
}
#[allow(trivial_numeric_casts)]
-pub fn check_enum<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- sp: Span,
- vs: &'tcx [hir::Variant],
- id: hir::HirId) {
+pub fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, vs: &'tcx [hir::Variant], id: hir::HirId) {
let def_id = tcx.hir().local_def_id_from_hir_id(id);
let def = tcx.adt_def(def_id);
def.destructor(tcx); // force the destructor to be evaluated
@@ -1925,11 +1909,11 @@
let variant_i_hir_id = tcx.hir().as_local_hir_id(variant_did).unwrap();
let variant_i = tcx.hir().expect_variant(variant_i_hir_id);
let i_span = match variant_i.node.disr_expr {
- Some(ref expr) => tcx.hir().span_by_hir_id(expr.hir_id),
- None => tcx.hir().span_by_hir_id(variant_i_hir_id)
+ Some(ref expr) => tcx.hir().span(expr.hir_id),
+ None => tcx.hir().span(variant_i_hir_id)
};
let span = match v.node.disr_expr {
- Some(ref expr) => tcx.hir().span_by_hir_id(expr.hir_id),
+ Some(ref expr) => tcx.hir().span(expr.hir_id),
None => v.span
};
struct_span_err!(tcx.sess, span, E0081,
@@ -1945,18 +1929,17 @@
check_transparent(tcx, sp, def_id);
}
-fn report_unexpected_variant_res<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- res: Res,
- span: Span,
- qpath: &QPath) {
+fn report_unexpected_variant_res<'tcx>(tcx: TyCtxt<'tcx>, res: Res, span: Span, qpath: &QPath) {
span_err!(tcx.sess, span, E0533,
"expected unit struct/variant or constant, found {} `{}`",
res.descr(),
hir::print::to_string(tcx.hir(), |s| s.print_qpath(qpath, false)));
}
-impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
+impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
fn get_type_parameter_bounds(&self, _: Span, def_id: DefId)
-> &'tcx ty::GenericPredicates<'tcx>
@@ -2082,11 +2065,12 @@
TupleArguments,
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
- pub fn new(inh: &'a Inherited<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- body_id: hir::HirId)
- -> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
+ pub fn new(
+ inh: &'a Inherited<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ body_id: hir::HirId,
+ ) -> FnCtxt<'a, 'tcx> {
FnCtxt {
body_id,
param_env,
@@ -2173,30 +2157,31 @@
ty
}
- fn record_deferred_call_resolution(&self,
- closure_def_id: DefId,
- r: DeferredCallResolution<'gcx, 'tcx>) {
+ fn record_deferred_call_resolution(
+ &self,
+ closure_def_id: DefId,
+ r: DeferredCallResolution<'tcx>,
+ ) {
let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
deferred_call_resolutions.entry(closure_def_id).or_default().push(r);
}
- fn remove_deferred_call_resolutions(&self,
- closure_def_id: DefId)
- -> Vec<DeferredCallResolution<'gcx, 'tcx>>
- {
+ fn remove_deferred_call_resolutions(
+ &self,
+ closure_def_id: DefId,
+ ) -> Vec<DeferredCallResolution<'tcx>> {
let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
deferred_call_resolutions.remove(&closure_def_id).unwrap_or(vec![])
}
pub fn tag(&self) -> String {
- let self_ptr: *const FnCtxt<'_, '_, '_> = self;
- format!("{:?}", self_ptr)
+ format!("{:p}", self)
}
pub fn local_ty(&self, span: Span, nid: hir::HirId) -> LocalTy<'tcx> {
self.locals.borrow().get(&nid).cloned().unwrap_or_else(||
span_bug!(span, "no type for local variable {}",
- self.tcx.hir().hir_to_string(nid))
+ self.tcx.hir().node_to_string(nid))
)
}
@@ -2216,15 +2201,15 @@
self.tables.borrow_mut().field_indices_mut().insert(hir_id, index);
}
+ fn write_resolution(&self, hir_id: hir::HirId, r: Result<(DefKind, DefId), ErrorReported>) {
+ self.tables.borrow_mut().type_dependent_defs_mut().insert(hir_id, r);
+ }
+
pub fn write_method_call(&self,
hir_id: hir::HirId,
method: MethodCallee<'tcx>) {
debug!("write_method_call(hir_id={:?}, method={:?})", hir_id, method);
- self.tables
- .borrow_mut()
- .type_dependent_defs_mut()
- .insert(hir_id, Ok((DefKind::Method, method.def_id)));
-
+ self.write_resolution(hir_id, Ok((DefKind::Method, method.def_id)));
self.write_substs(hir_id, method.substs);
// When the method is confirmed, the `method.substs` includes
@@ -2532,9 +2517,8 @@
Some(&t) => t,
None if self.is_tainted_by_errors() => self.tcx.types.err,
None => {
- let node_id = self.tcx.hir().hir_to_node_id(id);
bug!("no type for node {}: {} in fcx {}",
- node_id, self.tcx.hir().node_to_string(node_id),
+ id, self.tcx.hir().node_to_string(id),
self.tag());
}
}
@@ -2673,14 +2657,14 @@
ret_ty.builtin_deref(true).unwrap()
}
- fn lookup_indexing(&self,
- expr: &hir::Expr,
- base_expr: &'gcx hir::Expr,
- base_ty: Ty<'tcx>,
- idx_ty: Ty<'tcx>,
- needs: Needs)
- -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
- {
+ fn lookup_indexing(
+ &self,
+ expr: &hir::Expr,
+ base_expr: &'tcx hir::Expr,
+ base_ty: Ty<'tcx>,
+ idx_ty: Ty<'tcx>,
+ needs: Needs,
+ ) -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> {
// FIXME(#18741) -- this is almost but not quite the same as the
// autoderef that normal method probing does. They could likely be
// consolidated.
@@ -2699,14 +2683,14 @@
/// supports builtin indexing or overloaded indexing.
/// This loop implements one step in that search; the autoderef loop
/// is implemented by `lookup_indexing`.
- fn try_index_step(&self,
- expr: &hir::Expr,
- base_expr: &hir::Expr,
- autoderef: &Autoderef<'a, 'gcx, 'tcx>,
- needs: Needs,
- index_ty: Ty<'tcx>)
- -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
- {
+ fn try_index_step(
+ &self,
+ expr: &hir::Expr,
+ base_expr: &hir::Expr,
+ autoderef: &Autoderef<'a, 'tcx>,
+ needs: Needs,
+ index_ty: Ty<'tcx>,
+ ) -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> {
let adjusted_ty = autoderef.unambiguous_final_ty(self);
debug!("try_index_step(expr={:?}, base_expr={:?}, adjusted_ty={:?}, \
index_ty={:?})",
@@ -2824,14 +2808,15 @@
method
}
- fn check_method_argument_types(&self,
- sp: Span,
- expr_sp: Span,
- method: Result<MethodCallee<'tcx>, ()>,
- args_no_rcvr: &'gcx [hir::Expr],
- tuple_arguments: TupleArgumentsFlag,
- expected: Expectation<'tcx>)
- -> Ty<'tcx> {
+ fn check_method_argument_types(
+ &self,
+ sp: Span,
+ expr_sp: Span,
+ method: Result<MethodCallee<'tcx>, ()>,
+ args_no_rcvr: &'tcx [hir::Expr],
+ tuple_arguments: TupleArgumentsFlag,
+ expected: Expectation<'tcx>,
+ ) -> Ty<'tcx> {
let has_error = match method {
Ok(method) => {
method.substs.references_error() || method.sig.references_error()
@@ -2887,10 +2872,12 @@
}
}
- fn obligations_for_self_ty<'b>(&'b self, self_ty: ty::TyVid)
- -> impl Iterator<Item=(ty::PolyTraitRef<'tcx>, traits::PredicateObligation<'tcx>)>
- + Captures<'gcx> + 'b
- {
+ fn obligations_for_self_ty<'b>(
+ &'b self,
+ self_ty: ty::TyVid,
+ ) -> impl Iterator<Item = (ty::PolyTraitRef<'tcx>, traits::PredicateObligation<'tcx>)>
+ + Captures<'tcx>
+ + 'b {
// FIXME: consider using `sub_root_var` here so we
// can see through subtyping.
let ty_var_root = self.root_var(self_ty);
@@ -2933,15 +2920,17 @@
/// Generic function that factors out common logic from function calls,
/// method calls and overloaded operators.
- fn check_argument_types(&self,
- sp: Span,
- expr_sp: Span,
- fn_inputs: &[Ty<'tcx>],
- expected_arg_tys: &[Ty<'tcx>],
- args: &'gcx [hir::Expr],
- c_variadic: bool,
- tuple_arguments: TupleArgumentsFlag,
- def_span: Option<Span>) {
+ fn check_argument_types(
+ &self,
+ sp: Span,
+ expr_sp: Span,
+ fn_inputs: &[Ty<'tcx>],
+ expected_arg_tys: &[Ty<'tcx>],
+ args: &'tcx [hir::Expr],
+ c_variadic: bool,
+ tuple_arguments: TupleArgumentsFlag,
+ def_span: Option<Span>,
+ ) {
let tcx = self.tcx;
// Grab the argument types, supplying fresh type variables
@@ -3205,82 +3194,6 @@
}
}
- fn check_expr_eq_type(&self,
- expr: &'gcx hir::Expr,
- expected: Ty<'tcx>) {
- let ty = self.check_expr_with_hint(expr, expected);
- self.demand_eqtype(expr.span, expected, ty);
- }
-
- pub fn check_expr_has_type_or_error(&self,
- expr: &'gcx hir::Expr,
- expected: Ty<'tcx>) -> Ty<'tcx> {
- self.check_expr_meets_expectation_or_error(expr, ExpectHasType(expected))
- }
-
- fn check_expr_meets_expectation_or_error(&self,
- expr: &'gcx hir::Expr,
- expected: Expectation<'tcx>) -> Ty<'tcx> {
- let expected_ty = expected.to_option(&self).unwrap_or(self.tcx.types.bool);
- let mut ty = self.check_expr_with_expectation(expr, expected);
-
- // While we don't allow *arbitrary* coercions here, we *do* allow
- // coercions from ! to `expected`.
- if ty.is_never() {
- assert!(!self.tables.borrow().adjustments().contains_key(expr.hir_id),
- "expression with never type wound up being adjusted");
- let adj_ty = self.next_diverging_ty_var(
- TypeVariableOrigin {
- kind: TypeVariableOriginKind::AdjustmentType,
- span: expr.span,
- },
- );
- self.apply_adjustments(expr, vec![Adjustment {
- kind: Adjust::NeverToAny,
- target: adj_ty
- }]);
- ty = adj_ty;
- }
-
- if let Some(mut err) = self.demand_suptype_diag(expr.span, expected_ty, ty) {
- let expr = match &expr.node {
- ExprKind::DropTemps(expr) => expr,
- _ => expr,
- };
- // Error possibly reported in `check_assign` so avoid emitting error again.
- err.emit_unless(self.is_assign_to_bool(expr, expected_ty));
- }
- ty
- }
-
- fn check_expr_coercable_to_type(&self,
- expr: &'gcx hir::Expr,
- expected: Ty<'tcx>) -> Ty<'tcx> {
- let ty = self.check_expr_with_hint(expr, expected);
- // checks don't need two phase
- self.demand_coerce(expr, ty, expected, AllowTwoPhase::No)
- }
-
- fn check_expr_with_hint(&self,
- expr: &'gcx hir::Expr,
- expected: Ty<'tcx>) -> Ty<'tcx> {
- self.check_expr_with_expectation(expr, ExpectHasType(expected))
- }
-
- fn check_expr_with_expectation(&self,
- expr: &'gcx hir::Expr,
- expected: Expectation<'tcx>) -> Ty<'tcx> {
- self.check_expr_with_expectation_and_needs(expr, expected, Needs::None)
- }
-
- fn check_expr(&self, expr: &'gcx hir::Expr) -> Ty<'tcx> {
- self.check_expr_with_expectation(expr, NoExpectation)
- }
-
- fn check_expr_with_needs(&self, expr: &'gcx hir::Expr, needs: Needs) -> Ty<'tcx> {
- self.check_expr_with_expectation_and_needs(expr, NoExpectation, needs)
- }
-
// Determine the `Self` type, using fresh variables for all variables
// declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
// would return `($0, $1)` where `$0` and `$1` are freshly instantiated type
@@ -3350,462 +3263,6 @@
expect_args
}
- // Checks a method call.
- fn check_method_call(&self,
- expr: &'gcx hir::Expr,
- segment: &hir::PathSegment,
- span: Span,
- args: &'gcx [hir::Expr],
- expected: Expectation<'tcx>,
- needs: Needs) -> Ty<'tcx> {
- let rcvr = &args[0];
- let rcvr_t = self.check_expr_with_needs(&rcvr, needs);
- // no need to check for bot/err -- callee does that
- let rcvr_t = self.structurally_resolved_type(args[0].span, rcvr_t);
-
- let method = match self.lookup_method(rcvr_t,
- segment,
- span,
- expr,
- rcvr) {
- Ok(method) => {
- self.write_method_call(expr.hir_id, method);
- Ok(method)
- }
- Err(error) => {
- if segment.ident.name != kw::Invalid {
- self.report_method_error(span,
- rcvr_t,
- segment.ident,
- SelfSource::MethodCall(rcvr),
- error,
- Some(args));
- }
- Err(())
- }
- };
-
- // Call the generic checker.
- self.check_method_argument_types(span,
- expr.span,
- method,
- &args[1..],
- DontTupleArguments,
- expected)
- }
-
- fn check_return_expr(&self, return_expr: &'gcx hir::Expr) {
- let ret_coercion =
- self.ret_coercion
- .as_ref()
- .unwrap_or_else(|| span_bug!(return_expr.span,
- "check_return_expr called outside fn body"));
-
- let ret_ty = ret_coercion.borrow().expected_ty();
- let return_expr_ty = self.check_expr_with_hint(return_expr, ret_ty.clone());
- ret_coercion.borrow_mut()
- .coerce(self,
- &self.cause(return_expr.span,
- ObligationCauseCode::ReturnType(return_expr.hir_id)),
- return_expr,
- return_expr_ty);
- }
-
- // Check field access expressions
- fn check_field(&self,
- expr: &'gcx hir::Expr,
- needs: Needs,
- base: &'gcx hir::Expr,
- field: ast::Ident) -> Ty<'tcx> {
- let expr_t = self.check_expr_with_needs(base, needs);
- let expr_t = self.structurally_resolved_type(base.span,
- expr_t);
- let mut private_candidate = None;
- let mut autoderef = self.autoderef(expr.span, expr_t);
- while let Some((base_t, _)) = autoderef.next() {
- match base_t.sty {
- ty::Adt(base_def, substs) if !base_def.is_enum() => {
- debug!("struct named {:?}", base_t);
- let (ident, def_scope) =
- self.tcx.adjust_ident_and_get_scope(field, base_def.did, self.body_id);
- let fields = &base_def.non_enum_variant().fields;
- if let Some(index) = fields.iter().position(|f| f.ident.modern() == ident) {
- let field = &fields[index];
- let field_ty = self.field_ty(expr.span, field, substs);
- // Save the index of all fields regardless of their visibility in case
- // of error recovery.
- self.write_field_index(expr.hir_id, index);
- if field.vis.is_accessible_from(def_scope, self.tcx) {
- let adjustments = autoderef.adjust_steps(self, needs);
- self.apply_adjustments(base, adjustments);
- autoderef.finalize(self);
-
- self.tcx.check_stability(field.did, Some(expr.hir_id), expr.span);
- return field_ty;
- }
- private_candidate = Some((base_def.did, field_ty));
- }
- }
- ty::Tuple(ref tys) => {
- let fstr = field.as_str();
- if let Ok(index) = fstr.parse::<usize>() {
- if fstr == index.to_string() {
- if let Some(field_ty) = tys.get(index) {
- let adjustments = autoderef.adjust_steps(self, needs);
- self.apply_adjustments(base, adjustments);
- autoderef.finalize(self);
-
- self.write_field_index(expr.hir_id, index);
- return field_ty.expect_ty();
- }
- }
- }
- }
- _ => {}
- }
- }
- autoderef.unambiguous_final_ty(self);
-
- if let Some((did, field_ty)) = private_candidate {
- let struct_path = self.tcx().def_path_str(did);
- let mut err = struct_span_err!(self.tcx().sess, expr.span, E0616,
- "field `{}` of struct `{}` is private",
- field, struct_path);
- // Also check if an accessible method exists, which is often what is meant.
- if self.method_exists(field, expr_t, expr.hir_id, false)
- && !self.expr_in_place(expr.hir_id)
- {
- self.suggest_method_call(
- &mut err,
- &format!("a method `{}` also exists, call it with parentheses", field),
- field,
- expr_t,
- expr.hir_id,
- );
- }
- err.emit();
- field_ty
- } else if field.name == kw::Invalid {
- self.tcx().types.err
- } else if self.method_exists(field, expr_t, expr.hir_id, true) {
- let mut err = type_error_struct!(self.tcx().sess, field.span, expr_t, E0615,
- "attempted to take value of method `{}` on type `{}`",
- field, expr_t);
-
- if !self.expr_in_place(expr.hir_id) {
- self.suggest_method_call(
- &mut err,
- "use parentheses to call the method",
- field,
- expr_t,
- expr.hir_id
- );
- } else {
- err.help("methods are immutable and cannot be assigned to");
- }
-
- err.emit();
- self.tcx().types.err
- } else {
- if !expr_t.is_primitive_ty() {
- let mut err = self.no_such_field_err(field.span, field, expr_t);
-
- match expr_t.sty {
- ty::Adt(def, _) if !def.is_enum() => {
- if let Some(suggested_field_name) =
- Self::suggest_field_name(def.non_enum_variant(),
- &field.as_str(), vec![]) {
- err.span_suggestion(
- field.span,
- "a field with a similar name exists",
- suggested_field_name.to_string(),
- Applicability::MaybeIncorrect,
- );
- } else {
- err.span_label(field.span, "unknown field");
- let struct_variant_def = def.non_enum_variant();
- let field_names = self.available_field_names(struct_variant_def);
- if !field_names.is_empty() {
- err.note(&format!("available fields are: {}",
- self.name_series_display(field_names)));
- }
- };
- }
- ty::Array(_, len) => {
- if let (Some(len), Ok(user_index)) = (
- len.assert_usize(self.tcx),
- field.as_str().parse::<u64>()
- ) {
- let base = self.tcx.sess.source_map()
- .span_to_snippet(base.span)
- .unwrap_or_else(|_|
- self.tcx.hir().hir_to_pretty_string(base.hir_id));
- let help = "instead of using tuple indexing, use array indexing";
- let suggestion = format!("{}[{}]", base, field);
- let applicability = if len < user_index {
- Applicability::MachineApplicable
- } else {
- Applicability::MaybeIncorrect
- };
- err.span_suggestion(
- expr.span, help, suggestion, applicability
- );
- }
- }
- ty::RawPtr(..) => {
- let base = self.tcx.sess.source_map()
- .span_to_snippet(base.span)
- .unwrap_or_else(|_| self.tcx.hir().hir_to_pretty_string(base.hir_id));
- let msg = format!("`{}` is a raw pointer; try dereferencing it", base);
- let suggestion = format!("(*{}).{}", base, field);
- err.span_suggestion(
- expr.span,
- &msg,
- suggestion,
- Applicability::MaybeIncorrect,
- );
- }
- _ => {}
- }
- err
- } else {
- type_error_struct!(self.tcx().sess, field.span, expr_t, E0610,
- "`{}` is a primitive type and therefore doesn't have fields",
- expr_t)
- }.emit();
- self.tcx().types.err
- }
- }
-
- // Return an hint about the closest match in field names
- fn suggest_field_name(variant: &'tcx ty::VariantDef,
- field: &str,
- skip: Vec<LocalInternedString>)
- -> Option<Symbol> {
- let names = variant.fields.iter().filter_map(|field| {
- // ignore already set fields and private fields from non-local crates
- if skip.iter().any(|x| *x == field.ident.as_str()) ||
- (!variant.def_id.is_local() && field.vis != Visibility::Public)
- {
- None
- } else {
- Some(&field.ident.name)
- }
- });
-
- find_best_match_for_name(names, field, None)
- }
-
- fn available_field_names(&self, variant: &'tcx ty::VariantDef) -> Vec<ast::Name> {
- variant.fields.iter().filter(|field| {
- let def_scope =
- self.tcx.adjust_ident_and_get_scope(field.ident, variant.def_id, self.body_id).1;
- field.vis.is_accessible_from(def_scope, self.tcx)
- })
- .map(|field| field.ident.name)
- .collect()
- }
-
- fn name_series_display(&self, names: Vec<ast::Name>) -> String {
- // dynamic limit, to never omit just one field
- let limit = if names.len() == 6 { 6 } else { 5 };
- let mut display = names.iter().take(limit)
- .map(|n| format!("`{}`", n)).collect::<Vec<_>>().join(", ");
- if names.len() > limit {
- display = format!("{} ... and {} others", display, names.len() - limit);
- }
- display
- }
-
- fn no_such_field_err<T: Display>(&self, span: Span, field: T, expr_t: &ty::TyS<'_>)
- -> DiagnosticBuilder<'_> {
- type_error_struct!(self.tcx().sess, span, expr_t, E0609,
- "no field `{}` on type `{}`",
- field, expr_t)
- }
-
- fn report_unknown_field(
- &self,
- ty: Ty<'tcx>,
- variant: &'tcx ty::VariantDef,
- field: &hir::Field,
- skip_fields: &[hir::Field],
- kind_name: &str,
- ) {
- if variant.recovered {
- return;
- }
- let mut err = self.type_error_struct_with_diag(
- field.ident.span,
- |actual| match ty.sty {
- ty::Adt(adt, ..) if adt.is_enum() => {
- struct_span_err!(self.tcx.sess, field.ident.span, E0559,
- "{} `{}::{}` has no field named `{}`",
- kind_name, actual, variant.ident, field.ident)
- }
- _ => {
- struct_span_err!(self.tcx.sess, field.ident.span, E0560,
- "{} `{}` has no field named `{}`",
- kind_name, actual, field.ident)
- }
- },
- ty);
- // prevent all specified fields from being suggested
- let skip_fields = skip_fields.iter().map(|ref x| x.ident.as_str());
- if let Some(field_name) = Self::suggest_field_name(variant,
- &field.ident.as_str(),
- skip_fields.collect()) {
- err.span_suggestion(
- field.ident.span,
- "a field with a similar name exists",
- field_name.to_string(),
- Applicability::MaybeIncorrect,
- );
- } else {
- match ty.sty {
- ty::Adt(adt, ..) => {
- if adt.is_enum() {
- err.span_label(field.ident.span,
- format!("`{}::{}` does not have this field",
- ty, variant.ident));
- } else {
- err.span_label(field.ident.span,
- format!("`{}` does not have this field", ty));
- }
- let available_field_names = self.available_field_names(variant);
- if !available_field_names.is_empty() {
- err.note(&format!("available fields are: {}",
- self.name_series_display(available_field_names)));
- }
- }
- _ => bug!("non-ADT passed to report_unknown_field")
- }
- };
- err.emit();
- }
-
- fn check_expr_struct_fields(&self,
- adt_ty: Ty<'tcx>,
- expected: Expectation<'tcx>,
- expr_id: hir::HirId,
- span: Span,
- variant: &'tcx ty::VariantDef,
- ast_fields: &'gcx [hir::Field],
- check_completeness: bool) -> bool {
- let tcx = self.tcx;
-
- let adt_ty_hint =
- self.expected_inputs_for_expected_output(span, expected, adt_ty, &[adt_ty])
- .get(0).cloned().unwrap_or(adt_ty);
- // re-link the regions that EIfEO can erase.
- self.demand_eqtype(span, adt_ty_hint, adt_ty);
-
- let (substs, adt_kind, kind_name) = match &adt_ty.sty {
- &ty::Adt(adt, substs) => {
- (substs, adt.adt_kind(), adt.variant_descr())
- }
- _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields")
- };
-
- let mut remaining_fields = variant.fields.iter().enumerate().map(|(i, field)|
- (field.ident.modern(), (i, field))
- ).collect::<FxHashMap<_, _>>();
-
- let mut seen_fields = FxHashMap::default();
-
- let mut error_happened = false;
-
- // Type-check each field.
- for field in ast_fields {
- let ident = tcx.adjust_ident(field.ident, variant.def_id);
- let field_type = if let Some((i, v_field)) = remaining_fields.remove(&ident) {
- seen_fields.insert(ident, field.span);
- self.write_field_index(field.hir_id, i);
-
- // We don't look at stability attributes on
- // struct-like enums (yet...), but it's definitely not
- // a bug to have constructed one.
- if adt_kind != AdtKind::Enum {
- tcx.check_stability(v_field.did, Some(expr_id), field.span);
- }
-
- self.field_ty(field.span, v_field, substs)
- } else {
- error_happened = true;
- if let Some(prev_span) = seen_fields.get(&ident) {
- let mut err = struct_span_err!(self.tcx.sess,
- field.ident.span,
- E0062,
- "field `{}` specified more than once",
- ident);
-
- err.span_label(field.ident.span, "used more than once");
- err.span_label(*prev_span, format!("first use of `{}`", ident));
-
- err.emit();
- } else {
- self.report_unknown_field(adt_ty, variant, field, ast_fields, kind_name);
- }
-
- tcx.types.err
- };
-
- // Make sure to give a type to the field even if there's
- // an error, so we can continue type-checking.
- self.check_expr_coercable_to_type(&field.expr, field_type);
- }
-
- // Make sure the programmer specified correct number of fields.
- if kind_name == "union" {
- if ast_fields.len() != 1 {
- tcx.sess.span_err(span, "union expressions should have exactly one field");
- }
- } else if check_completeness && !error_happened && !remaining_fields.is_empty() {
- let len = remaining_fields.len();
-
- let mut displayable_field_names = remaining_fields
- .keys()
- .map(|ident| ident.as_str())
- .collect::<Vec<_>>();
-
- displayable_field_names.sort();
-
- let truncated_fields_error = if len <= 3 {
- String::new()
- } else {
- format!(" and {} other field{}", (len - 3), if len - 3 == 1 {""} else {"s"})
- };
-
- let remaining_fields_names = displayable_field_names.iter().take(3)
- .map(|n| format!("`{}`", n))
- .collect::<Vec<_>>()
- .join(", ");
-
- struct_span_err!(tcx.sess, span, E0063,
- "missing field{} {}{} in initializer of `{}`",
- if remaining_fields.len() == 1 { "" } else { "s" },
- remaining_fields_names,
- truncated_fields_error,
- adt_ty)
- .span_label(span, format!("missing {}{}",
- remaining_fields_names,
- truncated_fields_error))
- .emit();
- }
- error_happened
- }
-
- fn check_struct_fields_on_error(&self,
- fields: &'gcx [hir::Field],
- base_expr: &'gcx Option<P<hir::Expr>>) {
- for field in fields {
- self.check_expr(&field.expr);
- }
- if let Some(ref base) = *base_expr {
- self.check_expr(&base);
- }
- }
-
pub fn check_struct_path(&self,
qpath: &QPath,
hir_id: hir::HirId)
@@ -3864,839 +3321,6 @@
}
}
- fn check_expr_struct(&self,
- expr: &hir::Expr,
- expected: Expectation<'tcx>,
- qpath: &QPath,
- fields: &'gcx [hir::Field],
- base_expr: &'gcx Option<P<hir::Expr>>) -> Ty<'tcx>
- {
- // Find the relevant variant
- let (variant, adt_ty) =
- if let Some(variant_ty) = self.check_struct_path(qpath, expr.hir_id) {
- variant_ty
- } else {
- self.check_struct_fields_on_error(fields, base_expr);
- return self.tcx.types.err;
- };
-
- let path_span = match *qpath {
- QPath::Resolved(_, ref path) => path.span,
- QPath::TypeRelative(ref qself, _) => qself.span
- };
-
- // Prohibit struct expressions when non-exhaustive flag is set.
- let adt = adt_ty.ty_adt_def().expect("`check_struct_path` returned non-ADT type");
- if !adt.did.is_local() && variant.is_field_list_non_exhaustive() {
- span_err!(self.tcx.sess, expr.span, E0639,
- "cannot create non-exhaustive {} using struct expression",
- adt.variant_descr());
- }
-
- let error_happened = self.check_expr_struct_fields(adt_ty, expected, expr.hir_id, path_span,
- variant, fields, base_expr.is_none());
- if let &Some(ref base_expr) = base_expr {
- // If check_expr_struct_fields hit an error, do not attempt to populate
- // the fields with the base_expr. This could cause us to hit errors later
- // when certain fields are assumed to exist that in fact do not.
- if !error_happened {
- self.check_expr_has_type_or_error(base_expr, adt_ty);
- match adt_ty.sty {
- ty::Adt(adt, substs) if adt.is_struct() => {
- let fru_field_types = adt.non_enum_variant().fields.iter().map(|f| {
- self.normalize_associated_types_in(expr.span, &f.ty(self.tcx, substs))
- }).collect();
-
- self.tables
- .borrow_mut()
- .fru_field_types_mut()
- .insert(expr.hir_id, fru_field_types);
- }
- _ => {
- span_err!(self.tcx.sess, base_expr.span, E0436,
- "functional record update syntax requires a struct");
- }
- }
- }
- }
- self.require_type_is_sized(adt_ty, expr.span, traits::StructInitializerSized);
- adt_ty
- }
-
-
- /// Invariant:
- /// If an expression has any sub-expressions that result in a type error,
- /// inspecting that expression's type with `ty.references_error()` will return
- /// true. Likewise, if an expression is known to diverge, inspecting its
- /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
- /// strict, _|_ can appear in the type of an expression that does not,
- /// itself, diverge: for example, fn() -> _|_.)
- /// Note that inspecting a type's structure *directly* may expose the fact
- /// that there are actually multiple representations for `Error`, so avoid
- /// that when err needs to be handled differently.
- fn check_expr_with_expectation_and_needs(&self,
- expr: &'gcx hir::Expr,
- expected: Expectation<'tcx>,
- needs: Needs) -> Ty<'tcx> {
- debug!(">> type-checking: expr={:?} expected={:?}",
- expr, expected);
-
- // Warn for expressions after diverging siblings.
- self.warn_if_unreachable(expr.hir_id, expr.span, "expression");
-
- // Hide the outer diverging and has_errors flags.
- let old_diverges = self.diverges.get();
- let old_has_errors = self.has_errors.get();
- self.diverges.set(Diverges::Maybe);
- self.has_errors.set(false);
-
- let ty = self.check_expr_kind(expr, expected, needs);
-
- // Warn for non-block expressions with diverging children.
- match expr.node {
- ExprKind::Block(..) |
- ExprKind::Loop(..) | ExprKind::While(..) |
- ExprKind::Match(..) => {}
-
- _ => self.warn_if_unreachable(expr.hir_id, expr.span, "expression")
- }
-
- // Any expression that produces a value of type `!` must have diverged
- if ty.is_never() {
- self.diverges.set(self.diverges.get() | Diverges::Always);
- }
-
- // Record the type, which applies it effects.
- // We need to do this after the warning above, so that
- // we don't warn for the diverging expression itself.
- self.write_ty(expr.hir_id, ty);
-
- // Combine the diverging and has_error flags.
- self.diverges.set(self.diverges.get() | old_diverges);
- self.has_errors.set(self.has_errors.get() | old_has_errors);
-
- debug!("type of {} is...", self.tcx.hir().hir_to_string(expr.hir_id));
- debug!("... {:?}, expected is {:?}", ty, expected);
-
- ty
- }
-
- fn check_expr_kind(
- &self,
- expr: &'gcx hir::Expr,
- expected: Expectation<'tcx>,
- needs: Needs
- ) -> Ty<'tcx> {
- debug!(
- "check_expr_kind(expr={:?}, expected={:?}, needs={:?})",
- expr,
- expected,
- needs,
- );
-
- let tcx = self.tcx;
- let id = expr.hir_id;
- match expr.node {
- ExprKind::Box(ref subexpr) => {
- let expected_inner = expected.to_option(self).map_or(NoExpectation, |ty| {
- match ty.sty {
- ty::Adt(def, _) if def.is_box()
- => Expectation::rvalue_hint(self, ty.boxed_ty()),
- _ => NoExpectation
- }
- });
- let referent_ty = self.check_expr_with_expectation(subexpr, expected_inner);
- tcx.mk_box(referent_ty)
- }
-
- ExprKind::Lit(ref lit) => {
- self.check_lit(&lit, expected)
- }
- ExprKind::Binary(op, ref lhs, ref rhs) => {
- self.check_binop(expr, op, lhs, rhs)
- }
- ExprKind::AssignOp(op, ref lhs, ref rhs) => {
- self.check_binop_assign(expr, op, lhs, rhs)
- }
- ExprKind::Unary(unop, ref oprnd) => {
- let expected_inner = match unop {
- hir::UnNot | hir::UnNeg => {
- expected
- }
- hir::UnDeref => {
- NoExpectation
- }
- };
- let needs = match unop {
- hir::UnDeref => needs,
- _ => Needs::None
- };
- let mut oprnd_t = self.check_expr_with_expectation_and_needs(&oprnd,
- expected_inner,
- needs);
-
- if !oprnd_t.references_error() {
- oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t);
- match unop {
- hir::UnDeref => {
- if let Some(mt) = oprnd_t.builtin_deref(true) {
- oprnd_t = mt.ty;
- } else if let Some(ok) = self.try_overloaded_deref(
- expr.span, oprnd_t, needs) {
- let method = self.register_infer_ok_obligations(ok);
- if let ty::Ref(region, _, mutbl) = method.sig.inputs()[0].sty {
- let mutbl = match mutbl {
- hir::MutImmutable => AutoBorrowMutability::Immutable,
- hir::MutMutable => AutoBorrowMutability::Mutable {
- // (It shouldn't actually matter for unary ops whether
- // we enable two-phase borrows or not, since a unary
- // op has no additional operands.)
- allow_two_phase_borrow: AllowTwoPhase::No,
- }
- };
- self.apply_adjustments(oprnd, vec![Adjustment {
- kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl)),
- target: method.sig.inputs()[0]
- }]);
- }
- oprnd_t = self.make_overloaded_place_return_type(method).ty;
- self.write_method_call(expr.hir_id, method);
- } else {
- let mut err = type_error_struct!(
- tcx.sess,
- expr.span,
- oprnd_t,
- E0614,
- "type `{}` cannot be dereferenced",
- oprnd_t,
- );
- let sp = tcx.sess.source_map().start_point(expr.span);
- if let Some(sp) = tcx.sess.parse_sess.ambiguous_block_expr_parse
- .borrow().get(&sp)
- {
- tcx.sess.parse_sess.expr_parentheses_needed(
- &mut err,
- *sp,
- None,
- );
- }
- err.emit();
- oprnd_t = tcx.types.err;
- }
- }
- hir::UnNot => {
- let result = self.check_user_unop(expr, oprnd_t, unop);
- // If it's builtin, we can reuse the type, this helps inference.
- if !(oprnd_t.is_integral() || oprnd_t.sty == ty::Bool) {
- oprnd_t = result;
- }
- }
- hir::UnNeg => {
- let result = self.check_user_unop(expr, oprnd_t, unop);
- // If it's builtin, we can reuse the type, this helps inference.
- if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
- oprnd_t = result;
- }
- }
- }
- }
- oprnd_t
- }
- ExprKind::AddrOf(mutbl, ref oprnd) => {
- let hint = expected.only_has_type(self).map_or(NoExpectation, |ty| {
- match ty.sty {
- ty::Ref(_, ty, _) | ty::RawPtr(ty::TypeAndMut { ty, .. }) => {
- if oprnd.is_place_expr() {
- // Places may legitimately have unsized types.
- // For example, dereferences of a fat pointer and
- // the last field of a struct can be unsized.
- ExpectHasType(ty)
- } else {
- Expectation::rvalue_hint(self, ty)
- }
- }
- _ => NoExpectation
- }
- });
- let needs = Needs::maybe_mut_place(mutbl);
- let ty = self.check_expr_with_expectation_and_needs(&oprnd, hint, needs);
-
- let tm = ty::TypeAndMut { ty: ty, mutbl: mutbl };
- if tm.ty.references_error() {
- tcx.types.err
- } else {
- // Note: at this point, we cannot say what the best lifetime
- // is to use for resulting pointer. We want to use the
- // shortest lifetime possible so as to avoid spurious borrowck
- // errors. Moreover, the longest lifetime will depend on the
- // precise details of the value whose address is being taken
- // (and how long it is valid), which we don't know yet until type
- // inference is complete.
- //
- // Therefore, here we simply generate a region variable. The
- // region inferencer will then select the ultimate value.
- // Finally, borrowck is charged with guaranteeing that the
- // value whose address was taken can actually be made to live
- // as long as it needs to live.
- let region = self.next_region_var(infer::AddrOfRegion(expr.span));
- tcx.mk_ref(region, tm)
- }
- }
- ExprKind::Path(ref qpath) => {
- let (res, opt_ty, segs) = self.resolve_ty_and_res_ufcs(qpath, expr.hir_id,
- expr.span);
- let ty = match res {
- Res::Err => {
- self.set_tainted_by_errors();
- tcx.types.err
- }
- Res::Def(DefKind::Ctor(_, CtorKind::Fictive), _) => {
- report_unexpected_variant_res(tcx, res, expr.span, qpath);
- tcx.types.err
- }
- _ => self.instantiate_value_path(segs, opt_ty, res, expr.span, id).0,
- };
-
- if let ty::FnDef(..) = ty.sty {
- let fn_sig = ty.fn_sig(tcx);
- if !tcx.features().unsized_locals {
- // We want to remove some Sized bounds from std functions,
- // but don't want to expose the removal to stable Rust.
- // i.e., we don't want to allow
- //
- // ```rust
- // drop as fn(str);
- // ```
- //
- // to work in stable even if the Sized bound on `drop` is relaxed.
- for i in 0..fn_sig.inputs().skip_binder().len() {
- // We just want to check sizedness, so instead of introducing
- // placeholder lifetimes with probing, we just replace higher lifetimes
- // with fresh vars.
- let input = self.replace_bound_vars_with_fresh_vars(
- expr.span,
- infer::LateBoundRegionConversionTime::FnCall,
- &fn_sig.input(i)).0;
- self.require_type_is_sized_deferred(input, expr.span,
- traits::SizedArgumentType);
- }
- }
- // Here we want to prevent struct constructors from returning unsized types.
- // There were two cases this happened: fn pointer coercion in stable
- // and usual function call in presense of unsized_locals.
- // Also, as we just want to check sizedness, instead of introducing
- // placeholder lifetimes with probing, we just replace higher lifetimes
- // with fresh vars.
- let output = self.replace_bound_vars_with_fresh_vars(
- expr.span,
- infer::LateBoundRegionConversionTime::FnCall,
- &fn_sig.output()).0;
- self.require_type_is_sized_deferred(output, expr.span, traits::SizedReturnType);
- }
-
- // We always require that the type provided as the value for
- // a type parameter outlives the moment of instantiation.
- let substs = self.tables.borrow().node_substs(expr.hir_id);
- self.add_wf_bounds(substs, expr);
-
- ty
- }
- ExprKind::InlineAsm(_, ref outputs, ref inputs) => {
- for expr in outputs.iter().chain(inputs.iter()) {
- self.check_expr(expr);
- }
- tcx.mk_unit()
- }
- ExprKind::Break(destination, ref expr_opt) => {
- if let Ok(target_id) = destination.target_id {
- let (e_ty, cause);
- if let Some(ref e) = *expr_opt {
- // If this is a break with a value, we need to type-check
- // the expression. Get an expected type from the loop context.
- let opt_coerce_to = {
- let mut enclosing_breakables = self.enclosing_breakables.borrow_mut();
- enclosing_breakables.find_breakable(target_id)
- .coerce
- .as_ref()
- .map(|coerce| coerce.expected_ty())
- };
-
- // If the loop context is not a `loop { }`, then break with
- // a value is illegal, and `opt_coerce_to` will be `None`.
- // Just set expectation to error in that case.
- let coerce_to = opt_coerce_to.unwrap_or(tcx.types.err);
-
- // Recurse without `enclosing_breakables` borrowed.
- e_ty = self.check_expr_with_hint(e, coerce_to);
- cause = self.misc(e.span);
- } else {
- // Otherwise, this is a break *without* a value. That's
- // always legal, and is equivalent to `break ()`.
- e_ty = tcx.mk_unit();
- cause = self.misc(expr.span);
- }
-
- // Now that we have type-checked `expr_opt`, borrow
- // the `enclosing_loops` field and let's coerce the
- // type of `expr_opt` into what is expected.
- let mut enclosing_breakables = self.enclosing_breakables.borrow_mut();
- let ctxt = enclosing_breakables.find_breakable(target_id);
- if let Some(ref mut coerce) = ctxt.coerce {
- if let Some(ref e) = *expr_opt {
- coerce.coerce(self, &cause, e, e_ty);
- } else {
- assert!(e_ty.is_unit());
- coerce.coerce_forced_unit(self, &cause, &mut |_| (), true);
- }
- } else {
- // If `ctxt.coerce` is `None`, we can just ignore
- // the type of the expresison. This is because
- // either this was a break *without* a value, in
- // which case it is always a legal type (`()`), or
- // else an error would have been flagged by the
- // `loops` pass for using break with an expression
- // where you are not supposed to.
- assert!(expr_opt.is_none() || self.tcx.sess.err_count() > 0);
- }
-
- ctxt.may_break = true;
-
- // the type of a `break` is always `!`, since it diverges
- tcx.types.never
- } else {
- // Otherwise, we failed to find the enclosing loop;
- // this can only happen if the `break` was not
- // inside a loop at all, which is caught by the
- // loop-checking pass.
- if self.tcx.sess.err_count() == 0 {
- self.tcx.sess.delay_span_bug(expr.span,
- "break was outside loop, but no error was emitted");
- }
-
- // We still need to assign a type to the inner expression to
- // prevent the ICE in #43162.
- if let Some(ref e) = *expr_opt {
- self.check_expr_with_hint(e, tcx.types.err);
-
- // ... except when we try to 'break rust;'.
- // ICE this expression in particular (see #43162).
- if let ExprKind::Path(QPath::Resolved(_, ref path)) = e.node {
- if path.segments.len() == 1 &&
- path.segments[0].ident.name == sym::rust {
- fatally_break_rust(self.tcx.sess);
- }
- }
- }
- // There was an error; make type-check fail.
- tcx.types.err
- }
-
- }
- ExprKind::Continue(destination) => {
- if destination.target_id.is_ok() {
- tcx.types.never
- } else {
- // There was an error; make type-check fail.
- tcx.types.err
- }
- }
- ExprKind::Ret(ref expr_opt) => {
- if self.ret_coercion.is_none() {
- struct_span_err!(self.tcx.sess, expr.span, E0572,
- "return statement outside of function body").emit();
- } else if let Some(ref e) = *expr_opt {
- if self.ret_coercion_span.borrow().is_none() {
- *self.ret_coercion_span.borrow_mut() = Some(e.span);
- }
- self.check_return_expr(e);
- } else {
- let mut coercion = self.ret_coercion.as_ref().unwrap().borrow_mut();
- if self.ret_coercion_span.borrow().is_none() {
- *self.ret_coercion_span.borrow_mut() = Some(expr.span);
- }
- let cause = self.cause(expr.span, ObligationCauseCode::ReturnNoExpression);
- if let Some((fn_decl, _)) = self.get_fn_decl(expr.hir_id) {
- coercion.coerce_forced_unit(
- self,
- &cause,
- &mut |db| {
- db.span_label(
- fn_decl.output.span(),
- format!(
- "expected `{}` because of this return type",
- fn_decl.output,
- ),
- );
- },
- true,
- );
- } else {
- coercion.coerce_forced_unit(self, &cause, &mut |_| (), true);
- }
- }
- tcx.types.never
- }
- ExprKind::Assign(ref lhs, ref rhs) => {
- self.check_assign(expr, expected, lhs, rhs)
- }
- ExprKind::While(ref cond, ref body, _) => {
- let ctxt = BreakableCtxt {
- // cannot use break with a value from a while loop
- coerce: None,
- may_break: false, // Will get updated if/when we find a `break`.
- };
-
- let (ctxt, ()) = self.with_breakable_ctxt(expr.hir_id, ctxt, || {
- self.check_expr_has_type_or_error(&cond, tcx.types.bool);
- let cond_diverging = self.diverges.get();
- self.check_block_no_value(&body);
-
- // We may never reach the body so it diverging means nothing.
- self.diverges.set(cond_diverging);
- });
-
- if ctxt.may_break {
- // No way to know whether it's diverging because
- // of a `break` or an outer `break` or `return`.
- self.diverges.set(Diverges::Maybe);
- }
-
- self.tcx.mk_unit()
- }
- ExprKind::Loop(ref body, _, source) => {
- let coerce = match source {
- // you can only use break with a value from a normal `loop { }`
- hir::LoopSource::Loop => {
- let coerce_to = expected.coercion_target_type(self, body.span);
- Some(CoerceMany::new(coerce_to))
- }
-
- hir::LoopSource::WhileLet |
- hir::LoopSource::ForLoop => {
- None
- }
- };
-
- let ctxt = BreakableCtxt {
- coerce,
- may_break: false, // Will get updated if/when we find a `break`.
- };
-
- let (ctxt, ()) = self.with_breakable_ctxt(expr.hir_id, ctxt, || {
- self.check_block_no_value(&body);
- });
-
- if ctxt.may_break {
- // No way to know whether it's diverging because
- // of a `break` or an outer `break` or `return`.
- self.diverges.set(Diverges::Maybe);
- }
-
- // If we permit break with a value, then result type is
- // the LUB of the breaks (possibly ! if none); else, it
- // is nil. This makes sense because infinite loops
- // (which would have type !) are only possible iff we
- // permit break with a value [1].
- if ctxt.coerce.is_none() && !ctxt.may_break {
- // [1]
- self.tcx.sess.delay_span_bug(body.span, "no coercion, but loop may not break");
- }
- ctxt.coerce.map(|c| c.complete(self)).unwrap_or_else(|| self.tcx.mk_unit())
- }
- ExprKind::Match(ref discrim, ref arms, match_src) => {
- self.check_match(expr, &discrim, arms, expected, match_src)
- }
- ExprKind::Closure(capture, ref decl, body_id, _, gen) => {
- self.check_expr_closure(expr, capture, &decl, body_id, gen, expected)
- }
- ExprKind::Block(ref body, _) => {
- self.check_block_with_expected(&body, expected)
- }
- ExprKind::Call(ref callee, ref args) => {
- self.check_call(expr, &callee, args, expected)
- }
- ExprKind::MethodCall(ref segment, span, ref args) => {
- self.check_method_call(expr, segment, span, args, expected, needs)
- }
- ExprKind::Cast(ref e, ref t) => {
- // Find the type of `e`. Supply hints based on the type we are casting to,
- // if appropriate.
- let t_cast = self.to_ty_saving_user_provided_ty(t);
- let t_cast = self.resolve_vars_if_possible(&t_cast);
- let t_expr = self.check_expr_with_expectation(e, ExpectCastableToType(t_cast));
- let t_cast = self.resolve_vars_if_possible(&t_cast);
-
- // Eagerly check for some obvious errors.
- if t_expr.references_error() || t_cast.references_error() {
- tcx.types.err
- } else {
- // Defer other checks until we're done type checking.
- let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
- match cast::CastCheck::new(self, e, t_expr, t_cast, t.span, expr.span) {
- Ok(cast_check) => {
- deferred_cast_checks.push(cast_check);
- t_cast
- }
- Err(ErrorReported) => {
- tcx.types.err
- }
- }
- }
- }
- ExprKind::Type(ref e, ref t) => {
- let ty = self.to_ty_saving_user_provided_ty(&t);
- self.check_expr_eq_type(&e, ty);
- ty
- }
- ExprKind::DropTemps(ref e) => {
- self.check_expr_with_expectation(e, expected)
- }
- ExprKind::Array(ref args) => {
- let uty = expected.to_option(self).and_then(|uty| {
- match uty.sty {
- ty::Array(ty, _) | ty::Slice(ty) => Some(ty),
- _ => None
- }
- });
-
- let element_ty = if !args.is_empty() {
- let coerce_to = uty.unwrap_or_else(|| {
- self.next_ty_var(TypeVariableOrigin {
- kind: TypeVariableOriginKind::TypeInference,
- span: expr.span,
- })
- });
- let mut coerce = CoerceMany::with_coercion_sites(coerce_to, args);
- assert_eq!(self.diverges.get(), Diverges::Maybe);
- for e in args {
- let e_ty = self.check_expr_with_hint(e, coerce_to);
- let cause = self.misc(e.span);
- coerce.coerce(self, &cause, e, e_ty);
- }
- coerce.complete(self)
- } else {
- self.next_ty_var(TypeVariableOrigin {
- kind: TypeVariableOriginKind::TypeInference,
- span: expr.span,
- })
- };
- tcx.mk_array(element_ty, args.len() as u64)
- }
- ExprKind::Repeat(ref element, ref count) => {
- let count_def_id = tcx.hir().local_def_id_from_hir_id(count.hir_id);
- let count = if self.const_param_def_id(count).is_some() {
- Ok(self.to_const(count, self.tcx.type_of(count_def_id)))
- } else {
- let param_env = ty::ParamEnv::empty();
- let substs = InternalSubsts::identity_for_item(tcx.global_tcx(), count_def_id);
- let instance = ty::Instance::resolve(
- tcx.global_tcx(),
- param_env,
- count_def_id,
- substs,
- ).unwrap();
- let global_id = GlobalId {
- instance,
- promoted: None
- };
-
- tcx.const_eval(param_env.and(global_id))
- };
-
- let uty = match expected {
- ExpectHasType(uty) => {
- match uty.sty {
- ty::Array(ty, _) | ty::Slice(ty) => Some(ty),
- _ => None
- }
- }
- _ => None
- };
-
- let (element_ty, t) = match uty {
- Some(uty) => {
- self.check_expr_coercable_to_type(&element, uty);
- (uty, uty)
- }
- None => {
- let ty = self.next_ty_var(TypeVariableOrigin {
- kind: TypeVariableOriginKind::MiscVariable,
- span: element.span,
- });
- let element_ty = self.check_expr_has_type_or_error(&element, ty);
- (element_ty, ty)
- }
- };
-
- if let Ok(count) = count {
- let zero_or_one = count.assert_usize(tcx).map_or(false, |count| count <= 1);
- if !zero_or_one {
- // For [foo, ..n] where n > 1, `foo` must have
- // Copy type:
- let lang_item = self.tcx.require_lang_item(lang_items::CopyTraitLangItem);
- self.require_type_meets(t, expr.span, traits::RepeatVec, lang_item);
- }
- }
-
- if element_ty.references_error() {
- tcx.types.err
- } else if let Ok(count) = count {
- tcx.mk_ty(ty::Array(t, count))
- } else {
- tcx.types.err
- }
- }
- ExprKind::Tup(ref elts) => {
- let flds = expected.only_has_type(self).and_then(|ty| {
- let ty = self.resolve_type_vars_with_obligations(ty);
- match ty.sty {
- ty::Tuple(ref flds) => Some(&flds[..]),
- _ => None
- }
- });
-
- let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| {
- let t = match flds {
- Some(ref fs) if i < fs.len() => {
- let ety = fs[i].expect_ty();
- self.check_expr_coercable_to_type(&e, ety);
- ety
- }
- _ => {
- self.check_expr_with_expectation(&e, NoExpectation)
- }
- };
- t
- });
- let tuple = tcx.mk_tup(elt_ts_iter);
- if tuple.references_error() {
- tcx.types.err
- } else {
- self.require_type_is_sized(tuple, expr.span, traits::TupleInitializerSized);
- tuple
- }
- }
- ExprKind::Struct(ref qpath, ref fields, ref base_expr) => {
- self.check_expr_struct(expr, expected, qpath, fields, base_expr)
- }
- ExprKind::Field(ref base, field) => {
- self.check_field(expr, needs, &base, field)
- }
- ExprKind::Index(ref base, ref idx) => {
- let base_t = self.check_expr_with_needs(&base, needs);
- let idx_t = self.check_expr(&idx);
-
- if base_t.references_error() {
- base_t
- } else if idx_t.references_error() {
- idx_t
- } else {
- let base_t = self.structurally_resolved_type(base.span, base_t);
- match self.lookup_indexing(expr, base, base_t, idx_t, needs) {
- Some((index_ty, element_ty)) => {
- // two-phase not needed because index_ty is never mutable
- self.demand_coerce(idx, idx_t, index_ty, AllowTwoPhase::No);
- element_ty
- }
- None => {
- let mut err =
- type_error_struct!(tcx.sess, expr.span, base_t, E0608,
- "cannot index into a value of type `{}`",
- base_t);
- // Try to give some advice about indexing tuples.
- if let ty::Tuple(..) = base_t.sty {
- let mut needs_note = true;
- // If the index is an integer, we can show the actual
- // fixed expression:
- if let ExprKind::Lit(ref lit) = idx.node {
- if let ast::LitKind::Int(i,
- ast::LitIntType::Unsuffixed) = lit.node {
- let snip = tcx.sess.source_map().span_to_snippet(base.span);
- if let Ok(snip) = snip {
- err.span_suggestion(
- expr.span,
- "to access tuple elements, use",
- format!("{}.{}", snip, i),
- Applicability::MachineApplicable,
- );
- needs_note = false;
- }
- }
- }
- if needs_note {
- err.help("to access tuple elements, use tuple indexing \
- syntax (e.g., `tuple.0`)");
- }
- }
- err.emit();
- self.tcx.types.err
- }
- }
- }
- }
- ExprKind::Yield(ref value) => {
- match self.yield_ty {
- Some(ty) => {
- self.check_expr_coercable_to_type(&value, ty);
- }
- None => {
- struct_span_err!(self.tcx.sess, expr.span, E0627,
- "yield statement outside of generator literal").emit();
- }
- }
- tcx.mk_unit()
- }
- hir::ExprKind::Err => {
- tcx.types.err
- }
- }
- }
-
- /// Type check assignment expression `expr` of form `lhs = rhs`.
- /// The expected type is `()` and is passsed to the function for the purposes of diagnostics.
- fn check_assign(
- &self,
- expr: &'gcx hir::Expr,
- expected: Expectation<'tcx>,
- lhs: &'gcx hir::Expr,
- rhs: &'gcx hir::Expr,
- ) -> Ty<'tcx> {
- let lhs_ty = self.check_expr_with_needs(&lhs, Needs::MutPlace);
- let rhs_ty = self.check_expr_coercable_to_type(&rhs, lhs_ty);
-
- let expected_ty = expected.coercion_target_type(self, expr.span);
- if expected_ty == self.tcx.types.bool {
- // The expected type is `bool` but this will result in `()` so we can reasonably
- // say that the user intended to write `lhs == rhs` instead of `lhs = rhs`.
- // The likely cause of this is `if foo = bar { .. }`.
- let actual_ty = self.tcx.mk_unit();
- let mut err = self.demand_suptype_diag(expr.span, expected_ty, actual_ty).unwrap();
- let msg = "try comparing for equality";
- let left = self.tcx.sess.source_map().span_to_snippet(lhs.span);
- let right = self.tcx.sess.source_map().span_to_snippet(rhs.span);
- if let (Ok(left), Ok(right)) = (left, right) {
- let help = format!("{} == {}", left, right);
- err.span_suggestion(expr.span, msg, help, Applicability::MaybeIncorrect);
- } else {
- err.help(msg);
- }
- err.emit();
- } else if !lhs.is_place_expr() {
- struct_span_err!(self.tcx.sess, expr.span, E0070,
- "invalid left-hand side expression")
- .span_label(expr.span, "left-hand of expression not valid")
- .emit();
- }
-
- self.require_type_is_sized(lhs_ty, lhs.span, traits::AssignmentLhsSized);
-
- if lhs_ty.references_error() || rhs_ty.references_error() {
- self.tcx.types.err
- } else {
- self.tcx.mk_unit()
- }
- }
-
// Finish resolving a path in a struct expression or pattern `S::A { .. }` if necessary.
// The newly resolved definition is written into `type_dependent_defs`.
fn finish_resolving_struct_path(&self,
@@ -4732,14 +3356,14 @@
let result = result.map(|(_, kind, def_id)| (kind, def_id));
// Write back the new resolution.
- self.tables.borrow_mut().type_dependent_defs_mut().insert(hir_id, result);
+ self.write_resolution(hir_id, result);
(result.map(|(kind, def_id)| Res::Def(kind, def_id)).unwrap_or(Res::Err), ty)
}
}
}
- /// Resolves associated value path into a base type and associated constant or method
+ /// Resolves an associated value path into a base type and associated constant, or method
/// resolution. The newly resolved definition is written into `type_dependent_defs`.
pub fn resolve_ty_and_res_ufcs<'b>(&self,
qpath: &'b QPath,
@@ -4785,7 +3409,7 @@
});
// Write back the new resolution.
- self.tables.borrow_mut().type_dependent_defs_mut().insert(hir_id, result);
+ self.write_resolution(hir_id, result);
(
result.map(|(kind, def_id)| Res::Def(kind, def_id)).unwrap_or(Res::Err),
Some(ty),
@@ -4793,10 +3417,11 @@
)
}
- pub fn check_decl_initializer(&self,
- local: &'gcx hir::Local,
- init: &'gcx hir::Expr) -> Ty<'tcx>
- {
+ pub fn check_decl_initializer(
+ &self,
+ local: &'tcx hir::Local,
+ init: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
// FIXME(tschottdorf): `contains_explicit_ref_binding()` must be removed
// for #42640 (default match binding modes).
//
@@ -4821,7 +3446,7 @@
}
}
- pub fn check_decl_local(&self, local: &'gcx hir::Local) {
+ pub fn check_decl_local(&self, local: &'tcx hir::Local) {
let t = self.local_ty(local.span, local.hir_id).decl_ty;
self.write_ty(local.hir_id, t);
@@ -4844,7 +3469,7 @@
}
}
- pub fn check_stmt(&self, stmt: &'gcx hir::Stmt) {
+ pub fn check_stmt(&self, stmt: &'tcx hir::Stmt) {
// Don't do all the complex logic below for `DeclItem`.
match stmt.node {
hir::StmtKind::Item(..) => return,
@@ -4879,7 +3504,7 @@
self.has_errors.set(self.has_errors.get() | old_has_errors);
}
- pub fn check_block_no_value(&self, blk: &'gcx hir::Block) {
+ pub fn check_block_no_value(&self, blk: &'tcx hir::Block) {
let unit = self.tcx.mk_unit();
let ty = self.check_block_with_expected(blk, ExpectHasType(unit));
@@ -4890,9 +3515,11 @@
}
}
- fn check_block_with_expected(&self,
- blk: &'gcx hir::Block,
- expected: Expectation<'tcx>) -> Ty<'tcx> {
+ fn check_block_with_expected(
+ &self,
+ blk: &'tcx hir::Block,
+ expected: Expectation<'tcx>,
+ ) -> Ty<'tcx> {
let prev = {
let mut fcx_ps = self.ps.borrow_mut();
let unsafety_state = fcx_ps.recurse(blk);
@@ -5083,7 +3710,7 @@
pub fn suggest_mismatched_types_on_tail(
&self,
err: &mut DiagnosticBuilder<'tcx>,
- expression: &'gcx hir::Expr,
+ expression: &'tcx hir::Expr,
expected: Ty<'tcx>,
found: Ty<'tcx>,
cause_span: Span,
@@ -5164,11 +3791,13 @@
/// This routine checks if the return expression in a block would make sense on its own as a
/// statement and the return type has been left as default or has been specified as `()`. If so,
/// it suggests adding a semicolon.
- fn suggest_missing_semicolon(&self,
- err: &mut DiagnosticBuilder<'tcx>,
- expression: &'gcx hir::Expr,
- expected: Ty<'tcx>,
- cause_span: Span) {
+ fn suggest_missing_semicolon(
+ &self,
+ err: &mut DiagnosticBuilder<'tcx>,
+ expression: &'tcx hir::Expr,
+ expected: Ty<'tcx>,
+ cause_span: Span,
+ ) {
if expected.is_unit() {
// `BlockTailExpression` only relevant if the tail expr would be
// useful on its own.
@@ -5263,7 +3892,7 @@
/// with `expected_ty`. If so, it suggests removing the semicolon.
fn consider_hint_about_removing_semicolon(
&self,
- blk: &'gcx hir::Block,
+ blk: &'tcx hir::Block,
expected_ty: Ty<'tcx>,
err: &mut DiagnosticBuilder<'_>,
) {
@@ -5277,11 +3906,7 @@
}
}
- fn could_remove_semicolon(
- &self,
- blk: &'gcx hir::Block,
- expected_ty: Ty<'tcx>,
- ) -> Option<Span> {
+ fn could_remove_semicolon(&self, blk: &'tcx hir::Block, expected_ty: Ty<'tcx>) -> Option<Span> {
// Be helpful when the user wrote `{... expr;}` and
// taking the `;` off is enough to fix the error.
let last_stmt = blk.stmts.last()?;
@@ -5641,9 +4266,12 @@
}
}
- fn with_breakable_ctxt<F: FnOnce() -> R, R>(&self, id: hir::HirId,
- ctxt: BreakableCtxt<'gcx, 'tcx>, f: F)
- -> (BreakableCtxt<'gcx, 'tcx>, R) {
+ fn with_breakable_ctxt<F: FnOnce() -> R, R>(
+ &self,
+ id: hir::HirId,
+ ctxt: BreakableCtxt<'tcx>,
+ f: F,
+ ) -> (BreakableCtxt<'tcx>, R) {
let index;
{
let mut enclosing_breakables = self.enclosing_breakables.borrow_mut();
@@ -5700,9 +4328,7 @@
}
}
-pub fn check_bounds_are_used<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- generics: &ty::Generics,
- ty: Ty<'tcx>) {
+pub fn check_bounds_are_used<'tcx>(tcx: TyCtxt<'tcx>, generics: &ty::Generics, ty: Ty<'tcx>) {
let own_counts = generics.own_counts();
debug!(
"check_bounds_are_used(n_tys={}, n_cts={}, ty={:?})",
@@ -5737,7 +4363,7 @@
for (&used, param) in types_used.iter().zip(types) {
if !used {
let id = tcx.hir().as_local_hir_id(param.def_id).unwrap();
- let span = tcx.hir().span_by_hir_id(id);
+ let span = tcx.hir().span(id);
struct_span_err!(tcx.sess, span, E0091, "type parameter `{}` is unused", param.name)
.span_label(span, "unused type parameter")
.emit();
diff --git a/src/librustc_typeck/check/op.rs b/src/librustc_typeck/check/op.rs
index 51a9103..93855a3 100644
--- a/src/librustc_typeck/check/op.rs
+++ b/src/librustc_typeck/check/op.rs
@@ -11,14 +11,15 @@
use syntax::ast::Ident;
use rustc::hir;
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Checks a `a <op>= b`
- pub fn check_binop_assign(&self,
- expr: &'gcx hir::Expr,
- op: hir::BinOp,
- lhs_expr: &'gcx hir::Expr,
- rhs_expr: &'gcx hir::Expr) -> Ty<'tcx>
- {
+ pub fn check_binop_assign(
+ &self,
+ expr: &'tcx hir::Expr,
+ op: hir::BinOp,
+ lhs_expr: &'tcx hir::Expr,
+ rhs_expr: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
let (lhs_ty, rhs_ty, return_ty) =
self.check_overloaded_binop(expr, lhs_expr, rhs_expr, op, IsAssign::Yes);
@@ -43,12 +44,13 @@
}
/// Checks a potentially overloaded binary operator.
- pub fn check_binop(&self,
- expr: &'gcx hir::Expr,
- op: hir::BinOp,
- lhs_expr: &'gcx hir::Expr,
- rhs_expr: &'gcx hir::Expr) -> Ty<'tcx>
- {
+ pub fn check_binop(
+ &self,
+ expr: &'tcx hir::Expr,
+ op: hir::BinOp,
+ lhs_expr: &'tcx hir::Expr,
+ rhs_expr: &'tcx hir::Expr,
+ ) -> Ty<'tcx> {
let tcx = self.tcx;
debug!("check_binop(expr.hir_id={}, expr={:?}, op={:?}, lhs_expr={:?}, rhs_expr={:?})",
@@ -104,14 +106,14 @@
}
}
- fn enforce_builtin_binop_types(&self,
- lhs_expr: &'gcx hir::Expr,
- lhs_ty: Ty<'tcx>,
- rhs_expr: &'gcx hir::Expr,
- rhs_ty: Ty<'tcx>,
- op: hir::BinOp)
- -> Ty<'tcx>
- {
+ fn enforce_builtin_binop_types(
+ &self,
+ lhs_expr: &'tcx hir::Expr,
+ lhs_ty: Ty<'tcx>,
+ rhs_expr: &'tcx hir::Expr,
+ rhs_ty: Ty<'tcx>,
+ op: hir::BinOp,
+ ) -> Ty<'tcx> {
debug_assert!(is_builtin_binop(lhs_ty, rhs_ty, op));
let tcx = self.tcx;
@@ -142,14 +144,14 @@
}
}
- fn check_overloaded_binop(&self,
- expr: &'gcx hir::Expr,
- lhs_expr: &'gcx hir::Expr,
- rhs_expr: &'gcx hir::Expr,
- op: hir::BinOp,
- is_assign: IsAssign)
- -> (Ty<'tcx>, Ty<'tcx>, Ty<'tcx>)
- {
+ fn check_overloaded_binop(
+ &self,
+ expr: &'tcx hir::Expr,
+ lhs_expr: &'tcx hir::Expr,
+ rhs_expr: &'tcx hir::Expr,
+ op: hir::BinOp,
+ is_assign: IsAssign,
+ ) -> (Ty<'tcx>, Ty<'tcx>, Ty<'tcx>) {
debug!("check_overloaded_binop(expr.hir_id={}, op={:?}, is_assign={:?})",
expr.hir_id,
op,
@@ -515,8 +517,8 @@
/// to print the normal "implementation of `std::ops::Add` might be missing" note
fn check_str_addition(
&self,
- lhs_expr: &'gcx hir::Expr,
- rhs_expr: &'gcx hir::Expr,
+ lhs_expr: &'tcx hir::Expr,
+ rhs_expr: &'tcx hir::Expr,
lhs_ty: Ty<'tcx>,
rhs_ty: Ty<'tcx>,
err: &mut errors::DiagnosticBuilder<'_>,
@@ -611,12 +613,12 @@
}
}
- pub fn check_user_unop(&self,
- ex: &'gcx hir::Expr,
- operand_ty: Ty<'tcx>,
- op: hir::UnOp)
- -> Ty<'tcx>
- {
+ pub fn check_user_unop(
+ &self,
+ ex: &'tcx hir::Expr,
+ operand_ty: Ty<'tcx>,
+ op: hir::UnOp,
+ ) -> Ty<'tcx> {
assert!(op.is_by_value());
match self.lookup_op_method(operand_ty, &[], Op::Unary(op, ex.span)) {
Ok(method) => {
diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs
index c3b6fb2..5c71039 100644
--- a/src/librustc_typeck/check/regionck.rs
+++ b/src/librustc_typeck/check/regionck.rs
@@ -107,8 +107,8 @@
///////////////////////////////////////////////////////////////////////////
// PUBLIC ENTRY POINTS
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
- pub fn regionck_expr(&self, body: &'gcx hir::Body) {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
+ pub fn regionck_expr(&self, body: &'tcx hir::Body) {
let subject = self.tcx.hir().body_owner_def_id(body.id());
let id = body.value.hir_id;
let mut rcx = RegionCtxt::new(
@@ -161,7 +161,7 @@
/// rest of type check and because sometimes we need type
/// inference to have completed before we can determine which
/// constraints to add.
- pub fn regionck_fn(&self, fn_id: hir::HirId, body: &'gcx hir::Body) {
+ pub fn regionck_fn(&self, fn_id: hir::HirId, body: &'tcx hir::Body) {
debug!("regionck_fn(id={})", fn_id);
let subject = self.tcx.hir().body_owner_def_id(body.id());
let hir_id = body.value.hir_id;
@@ -175,7 +175,7 @@
if self.err_count_since_creation() == 0 {
// regionck assumes typeck succeeded
- rcx.visit_fn_body(fn_id, body, self.tcx.hir().span_by_hir_id(fn_id));
+ rcx.visit_fn_body(fn_id, body, self.tcx.hir().span(fn_id));
}
rcx.resolve_regions_and_report_errors(SuppressRegionErrors::when_nll_is_enabled(self.tcx));
@@ -191,10 +191,10 @@
///////////////////////////////////////////////////////////////////////////
// INTERNALS
-pub struct RegionCtxt<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- pub fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
+pub struct RegionCtxt<'a, 'tcx> {
+ pub fcx: &'a FnCtxt<'a, 'tcx>,
- pub region_scope_tree: &'gcx region::ScopeTree,
+ pub region_scope_tree: &'tcx region::ScopeTree,
outlives_environment: OutlivesEnvironment<'tcx>,
@@ -212,8 +212,8 @@
subject_def_id: DefId,
}
-impl<'a, 'gcx, 'tcx> Deref for RegionCtxt<'a, 'gcx, 'tcx> {
- type Target = FnCtxt<'a, 'gcx, 'tcx>;
+impl<'a, 'tcx> Deref for RegionCtxt<'a, 'tcx> {
+ type Target = FnCtxt<'a, 'tcx>;
fn deref(&self) -> &Self::Target {
&self.fcx
}
@@ -222,14 +222,14 @@
pub struct RepeatingScope(hir::HirId);
pub struct Subject(DefId);
-impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> RegionCtxt<'a, 'tcx> {
pub fn new(
- fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
+ fcx: &'a FnCtxt<'a, 'tcx>,
RepeatingScope(initial_repeating_scope): RepeatingScope,
initial_body_id: hir::HirId,
Subject(subject): Subject,
param_env: ty::ParamEnv<'tcx>,
- ) -> RegionCtxt<'a, 'gcx, 'tcx> {
+ ) -> RegionCtxt<'a, 'tcx> {
let region_scope_tree = fcx.tcx.region_scope_tree(subject);
let outlives_environment = OutlivesEnvironment::new(param_env);
RegionCtxt {
@@ -302,7 +302,7 @@
fn visit_fn_body(
&mut self,
id: hir::HirId, // the id of the fn itself
- body: &'gcx hir::Body,
+ body: &'tcx hir::Body,
span: Span,
) {
// When we enter a function, we can derive
@@ -437,7 +437,7 @@
}
}
-impl<'a, 'gcx, 'tcx> Visitor<'gcx> for RegionCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> Visitor<'tcx> for RegionCtxt<'a, 'tcx> {
// (..) FIXME(#3238) should use visit_pat, not visit_arm/visit_local,
// However, right now we run into an issue whereby some free
// regions are not properly related if they appear within the
@@ -446,14 +446,14 @@
// hierarchy, and in particular the relationships between free
// regions, until regionck, as described in #3238.
- fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {
+ fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
fn visit_fn(
&mut self,
- fk: intravisit::FnKind<'gcx>,
- _: &'gcx hir::FnDecl,
+ fk: intravisit::FnKind<'tcx>,
+ _: &'tcx hir::FnDecl,
body_id: hir::BodyId,
span: Span,
hir_id: hir::HirId,
@@ -486,7 +486,7 @@
//visit_pat: visit_pat, // (..) see above
- fn visit_arm(&mut self, arm: &'gcx hir::Arm) {
+ fn visit_arm(&mut self, arm: &'tcx hir::Arm) {
// see above
for p in &arm.pats {
self.constrain_bindings_in_pat(p);
@@ -494,14 +494,14 @@
intravisit::walk_arm(self, arm);
}
- fn visit_local(&mut self, l: &'gcx hir::Local) {
+ fn visit_local(&mut self, l: &'tcx hir::Local) {
// see above
self.constrain_bindings_in_pat(&l.pat);
self.link_local(l);
intravisit::walk_local(self, l);
}
- fn visit_expr(&mut self, expr: &'gcx hir::Expr) {
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
debug!(
"regionck::visit_expr(e={:?}, repeating_scope={:?})",
expr, self.repeating_scope
@@ -717,7 +717,7 @@
}
}
-impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> RegionCtxt<'a, 'tcx> {
fn constrain_cast(&mut self, cast_expr: &hir::Expr, source_expr: &hir::Expr) {
debug!(
"constrain_cast(cast_expr={:?}, source_expr={:?})",
@@ -758,7 +758,7 @@
}
}
- fn check_expr_fn_block(&mut self, expr: &'gcx hir::Expr, body_id: hir::BodyId) {
+ fn check_expr_fn_block(&mut self, expr: &'tcx hir::Expr, body_id: hir::BodyId) {
let repeating_scope = self.set_repeating_scope(body_id.hir_id);
intravisit::walk_expr(self, expr);
self.set_repeating_scope(repeating_scope);
@@ -830,7 +830,7 @@
/// Creates a temporary `MemCategorizationContext` and pass it to the closure.
fn with_mc<F, R>(&self, f: F) -> R
where
- F: for<'b> FnOnce(mc::MemCategorizationContext<'b, 'gcx, 'tcx>) -> R,
+ F: for<'b> FnOnce(mc::MemCategorizationContext<'b, 'tcx>) -> R,
{
f(mc::MemCategorizationContext::with_infer(
&self.infcx,
diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs
index 82c173a..bba108a 100644
--- a/src/librustc_typeck/check/upvar.rs
+++ b/src/librustc_typeck/check/upvar.rs
@@ -45,8 +45,8 @@
use syntax::ast;
use syntax_pos::Span;
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
- pub fn closure_analyze(&self, body: &'gcx hir::Body) {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
+ pub fn closure_analyze(&self, body: &'tcx hir::Body) {
InferBorrowKindVisitor { fcx: self }.visit_body(body);
// it's our job to process these.
@@ -54,16 +54,16 @@
}
}
-struct InferBorrowKindVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
+struct InferBorrowKindVisitor<'a, 'tcx> {
+ fcx: &'a FnCtxt<'a, 'tcx>,
}
-impl<'a, 'gcx, 'tcx> Visitor<'gcx> for InferBorrowKindVisitor<'a, 'gcx, 'tcx> {
- fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {
+impl<'a, 'tcx> Visitor<'tcx> for InferBorrowKindVisitor<'a, 'tcx> {
+ fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
- fn visit_expr(&mut self, expr: &'gcx hir::Expr) {
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
if let hir::ExprKind::Closure(cc, _, body_id, _, _) = expr.node {
let body = self.fcx.tcx.hir().body(body_id);
self.visit_body(body);
@@ -75,7 +75,7 @@
}
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fn analyze_closure(
&self,
closure_hir_id: hir::HirId,
@@ -282,8 +282,8 @@
}
}
-struct InferBorrowKind<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
- fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
+struct InferBorrowKind<'a, 'tcx> {
+ fcx: &'a FnCtxt<'a, 'tcx>,
// The def-id of the closure whose kind and upvar accesses are being inferred.
closure_def_id: DefId,
@@ -305,7 +305,7 @@
adjust_upvar_captures: ty::UpvarCaptureMap<'tcx>,
}
-impl<'a, 'gcx, 'tcx> InferBorrowKind<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> InferBorrowKind<'a, 'tcx> {
fn adjust_upvar_borrow_kind_for_consume(
&mut self,
cmt: &mc::cmt_<'tcx>,
@@ -581,7 +581,7 @@
}
}
-impl<'a, 'gcx, 'tcx> euv::Delegate<'tcx> for InferBorrowKind<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> euv::Delegate<'tcx> for InferBorrowKind<'a, 'tcx> {
fn consume(
&mut self,
_consume_id: hir::HirId,
@@ -651,6 +651,6 @@
}
}
-fn var_name(tcx: TyCtxt<'_, '_, '_>, var_hir_id: hir::HirId) -> ast::Name {
+fn var_name(tcx: TyCtxt<'_>, var_hir_id: hir::HirId) -> ast::Name {
tcx.hir().name_by_hir_id(var_hir_id)
}
diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs
index 2b627a6..034ff5f 100644
--- a/src/librustc_typeck/check/wfcheck.rs
+++ b/src/librustc_typeck/check/wfcheck.rs
@@ -23,19 +23,19 @@
/// This is necessary because we can't write the following bound:
///
/// ```rust
-/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(FnCtxt<'b, 'gcx, 'tcx>)
+/// F: for<'b, 'tcx> where 'tcx FnOnce(FnCtxt<'b, 'tcx>)
/// ```
-struct CheckWfFcxBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- inherited: super::InheritedBuilder<'a, 'gcx, 'tcx>,
+struct CheckWfFcxBuilder<'tcx> {
+ inherited: super::InheritedBuilder<'tcx>,
id: hir::HirId,
span: Span,
param_env: ty::ParamEnv<'tcx>,
}
-impl<'a, 'gcx, 'tcx> CheckWfFcxBuilder<'a, 'gcx, 'tcx> {
- fn with_fcx<F>(&'tcx mut self, f: F) where
- F: for<'b> FnOnce(&FnCtxt<'b, 'gcx, 'tcx>,
- TyCtxt<'b, 'gcx, 'gcx>) -> Vec<Ty<'tcx>>
+impl<'tcx> CheckWfFcxBuilder<'tcx> {
+ fn with_fcx<F>(&mut self, f: F)
+ where
+ F: for<'b> FnOnce(&FnCtxt<'b, 'tcx>, TyCtxt<'tcx>) -> Vec<Ty<'tcx>>,
{
let id = self.id;
let span = self.span;
@@ -68,9 +68,9 @@
/// We do this check as a pre-pass before checking fn bodies because if these constraints are
/// not included it frequently leads to confusing errors in fn bodies. So it's better to check
/// the types first.
-pub fn check_item_well_formed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
+pub fn check_item_well_formed<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) {
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
- let item = tcx.hir().expect_item_by_hir_id(hir_id);
+ let item = tcx.hir().expect_item(hir_id);
debug!("check_item_well_formed(it.hir_id={:?}, it.name={})",
item.hir_id,
@@ -156,7 +156,7 @@
}
}
-pub fn check_trait_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
+pub fn check_trait_item<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) {
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
let trait_item = tcx.hir().expect_trait_item(hir_id);
@@ -167,7 +167,7 @@
check_associated_item(tcx, trait_item.hir_id, trait_item.span, method_sig);
}
-pub fn check_impl_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
+pub fn check_impl_item<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) {
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
let impl_item = tcx.hir().expect_impl_item(hir_id);
@@ -178,10 +178,12 @@
check_associated_item(tcx, impl_item.hir_id, impl_item.span, method_sig);
}
-fn check_associated_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- item_id: hir::HirId,
- span: Span,
- sig_if_method: Option<&hir::MethodSig>) {
+fn check_associated_item<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ item_id: hir::HirId,
+ span: Span,
+ sig_if_method: Option<&hir::MethodSig>,
+) {
debug!("check_associated_item: {:?}", item_id);
let code = ObligationCauseCode::MiscObligation;
@@ -225,13 +227,11 @@
})
}
-fn for_item<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'gcx>, item: &hir::Item)
- -> CheckWfFcxBuilder<'a, 'gcx, 'tcx> {
+fn for_item<'tcx>(tcx: TyCtxt<'tcx>, item: &hir::Item) -> CheckWfFcxBuilder<'tcx> {
for_id(tcx, item.hir_id, item.span)
}
-fn for_id<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'gcx>, id: hir::HirId, span: Span)
- -> CheckWfFcxBuilder<'a, 'gcx, 'tcx> {
+fn for_id<'tcx>(tcx: TyCtxt<'tcx>, id: hir::HirId, span: Span) -> CheckWfFcxBuilder<'tcx> {
let def_id = tcx.hir().local_def_id_from_hir_id(id);
CheckWfFcxBuilder {
inherited: Inherited::build(tcx, def_id),
@@ -242,9 +242,13 @@
}
/// In a type definition, we check that to ensure that the types of the fields are well-formed.
-fn check_type_defn<'a, 'tcx, F>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- item: &hir::Item, all_sized: bool, mut lookup_fields: F)
- where F: for<'fcx, 'gcx, 'tcx2> FnMut(&FnCtxt<'fcx, 'gcx, 'tcx2>) -> Vec<AdtVariant<'tcx2>>
+fn check_type_defn<'tcx, F>(
+ tcx: TyCtxt<'tcx>,
+ item: &hir::Item,
+ all_sized: bool,
+ mut lookup_fields: F,
+) where
+ F: for<'fcx> FnMut(&FnCtxt<'fcx, 'tcx>) -> Vec<AdtVariant<'tcx>>,
{
for_item(tcx, item).with_fcx(|fcx, fcx_tcx| {
let variants = lookup_fields(fcx);
@@ -312,7 +316,7 @@
});
}
-fn check_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item: &hir::Item) {
+fn check_trait<'tcx>(tcx: TyCtxt<'tcx>, item: &hir::Item) {
debug!("check_trait: {:?}", item.hir_id);
let trait_def_id = tcx.hir().local_def_id_from_hir_id(item.hir_id);
@@ -335,7 +339,7 @@
});
}
-fn check_item_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item: &hir::Item) {
+fn check_item_fn<'tcx>(tcx: TyCtxt<'tcx>, item: &hir::Item) {
for_item(tcx, item).with_fcx(|fcx, tcx| {
let def_id = fcx.tcx.hir().local_def_id_from_hir_id(item.hir_id);
let sig = fcx.tcx.fn_sig(def_id);
@@ -347,8 +351,8 @@
})
}
-fn check_item_type<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn check_item_type<'tcx>(
+ tcx: TyCtxt<'tcx>,
item_id: hir::HirId,
ty_span: Span,
allow_foreign_ty: bool,
@@ -380,11 +384,12 @@
});
}
-fn check_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- item: &hir::Item,
- ast_self_ty: &hir::Ty,
- ast_trait_ref: &Option<hir::TraitRef>)
-{
+fn check_impl<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ item: &hir::Item,
+ ast_self_ty: &hir::Ty,
+ ast_trait_ref: &Option<hir::TraitRef>,
+) {
debug!("check_impl: {:?}", item);
for_item(tcx, item).with_fcx(|fcx, tcx| {
@@ -421,9 +426,9 @@
}
/// Checks where-clauses and inline bounds that are declared on `def_id`.
-fn check_where_clauses<'a, 'gcx, 'fcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'gcx>,
- fcx: &FnCtxt<'fcx, 'gcx, 'tcx>,
+fn check_where_clauses<'tcx, 'fcx>(
+ tcx: TyCtxt<'tcx>,
+ fcx: &FnCtxt<'fcx, 'tcx>,
span: Span,
def_id: DefId,
return_ty: Option<Ty<'tcx>>,
@@ -574,13 +579,14 @@
}
}
-fn check_fn_or_method<'a, 'fcx, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'gcx>,
- fcx: &FnCtxt<'fcx, 'gcx, 'tcx>,
- span: Span,
- sig: ty::PolyFnSig<'tcx>,
- def_id: DefId,
- implied_bounds: &mut Vec<Ty<'tcx>>)
-{
+fn check_fn_or_method<'fcx, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ fcx: &FnCtxt<'fcx, 'tcx>,
+ span: Span,
+ sig: ty::PolyFnSig<'tcx>,
+ def_id: DefId,
+ implied_bounds: &mut Vec<Ty<'tcx>>,
+) {
let sig = fcx.normalize_associated_types_in(span, &sig);
let sig = fcx.tcx.liberate_late_bound_regions(def_id, &sig);
@@ -616,9 +622,9 @@
/// fn b<T>() -> Foo<T, u32> { .. }
/// ```
///
-fn check_existential_types<'a, 'fcx, 'gcx, 'tcx>(
- tcx: TyCtxt<'a, 'gcx, 'gcx>,
- fcx: &FnCtxt<'fcx, 'gcx, 'tcx>,
+fn check_existential_types<'fcx, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ fcx: &FnCtxt<'fcx, 'tcx>,
fn_def_id: DefId,
span: Span,
ty: Ty<'tcx>,
@@ -755,11 +761,12 @@
substituted_predicates
}
-fn check_method_receiver<'fcx, 'gcx, 'tcx>(fcx: &FnCtxt<'fcx, 'gcx, 'tcx>,
- method_sig: &hir::MethodSig,
- method: &ty::AssocItem,
- self_ty: Ty<'tcx>)
-{
+fn check_method_receiver<'fcx, 'tcx>(
+ fcx: &FnCtxt<'fcx, 'tcx>,
+ method_sig: &hir::MethodSig,
+ method: &ty::AssocItem,
+ self_ty: Ty<'tcx>,
+) {
// Check that the method has a valid receiver type, given the type `Self`.
debug!("check_method_receiver({:?}, self_ty={:?})",
method, self_ty);
@@ -838,8 +845,8 @@
/// N.B., there are cases this function returns `true` but causes an error to be emitted,
/// particularly when `receiver_ty` derefs to a type that is the same as `self_ty` but has the
/// wrong lifetime. Be careful of this if you are calling this function speculatively.
-fn receiver_is_valid<'fcx, 'tcx, 'gcx>(
- fcx: &FnCtxt<'fcx, 'gcx, 'tcx>,
+fn receiver_is_valid<'fcx, 'tcx>(
+ fcx: &FnCtxt<'fcx, 'tcx>,
span: Span,
receiver_ty: Ty<'tcx>,
self_ty: Ty<'tcx>,
@@ -915,7 +922,7 @@
};
let obligation = traits::Obligation::new(
- cause.clone(),
+ cause,
fcx.param_env,
trait_ref.to_predicate()
);
@@ -930,10 +937,11 @@
true
}
-fn check_variances_for_type_defn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- item: &hir::Item,
- hir_generics: &hir::Generics)
-{
+fn check_variances_for_type_defn<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ item: &hir::Item,
+ hir_generics: &hir::Generics,
+) {
let item_def_id = tcx.hir().local_def_id_from_hir_id(item.hir_id);
let ty = tcx.type_of(item_def_id);
if tcx.has_error_field(ty) {
@@ -971,10 +979,7 @@
}
}
-fn report_bivariance<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- span: Span,
- param_name: ast::Name)
-{
+fn report_bivariance<'tcx>(tcx: TyCtxt<'tcx>, span: Span, param_name: ast::Name) {
let mut err = error_392(tcx, span, param_name);
let suggested_marker_id = tcx.lang_items().phantom_data();
@@ -987,7 +992,7 @@
err.emit();
}
-fn reject_shadowing_parameters(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) {
+fn reject_shadowing_parameters(tcx: TyCtxt<'_>, def_id: DefId) {
let generics = tcx.generics_of(def_id);
let parent = tcx.generics_of(generics.parent.unwrap());
let impl_params: FxHashMap<_, _> = parent.params.iter().flat_map(|param| match param.kind {
@@ -1017,11 +1022,7 @@
/// Feature gates RFC 2056 -- trivial bounds, checking for global bounds that
/// aren't true.
-fn check_false_global_bounds<'a, 'gcx, 'tcx>(
- fcx: &FnCtxt<'a, 'gcx, 'tcx>,
- span: Span,
- id: hir::HirId)
-{
+fn check_false_global_bounds<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, span: Span, id: hir::HirId) {
let empty_env = ty::ParamEnv::empty();
let def_id = fcx.tcx.hir().local_def_id_from_hir_id(id);
@@ -1052,20 +1053,19 @@
fcx.select_all_obligations_or_error();
}
-pub struct CheckTypeWellFormedVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub struct CheckTypeWellFormedVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'gcx> CheckTypeWellFormedVisitor<'a, 'gcx> {
- pub fn new(tcx: TyCtxt<'a, 'gcx, 'gcx>)
- -> CheckTypeWellFormedVisitor<'a, 'gcx> {
+impl CheckTypeWellFormedVisitor<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>) -> CheckTypeWellFormedVisitor<'tcx> {
CheckTypeWellFormedVisitor {
tcx,
}
}
}
-impl<'a, 'tcx> ParItemLikeVisitor<'tcx> for CheckTypeWellFormedVisitor<'a, 'tcx> {
+impl ParItemLikeVisitor<'tcx> for CheckTypeWellFormedVisitor<'tcx> {
fn visit_item(&self, i: &'tcx hir::Item) {
debug!("visit_item: {:?}", i);
let def_id = self.tcx.hir().local_def_id_from_hir_id(i.hir_id);
@@ -1097,7 +1097,7 @@
span: Span,
}
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fn non_enum_variant(&self, struct_def: &hir::VariantData) -> AdtVariant<'tcx> {
let fields = struct_def.fields().iter().map(|field| {
let field_ty = self.tcx.type_of(self.tcx.hir().local_def_id_from_hir_id(field.hir_id));
@@ -1134,15 +1134,18 @@
}
}
-fn error_392<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, span: Span, param_name: ast::Name)
- -> DiagnosticBuilder<'tcx> {
+fn error_392<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ span: Span,
+ param_name: ast::Name,
+) -> DiagnosticBuilder<'tcx> {
let mut err = struct_span_err!(tcx.sess, span, E0392,
"parameter `{}` is never used", param_name);
err.span_label(span, "unused parameter");
err
}
-fn error_194(tcx: TyCtxt<'_, '_, '_>, span: Span, trait_decl_span: Span, name: &str) {
+fn error_194(tcx: TyCtxt<'_>, span: Span, trait_decl_span: Span, name: &str) {
struct_span_err!(tcx.sess, span, E0194,
"type parameter `{}` shadows another type parameter of the same name",
name)
diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs
index 1bc7119..ffc323f 100644
--- a/src/librustc_typeck/check/writeback.rs
+++ b/src/librustc_typeck/check/writeback.rs
@@ -31,10 +31,10 @@
// so instead all of the replacement happens at the end in
// resolve_type_vars_in_body, which creates a new TypeTables which
// doesn't contain any inference types.
-impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
- pub fn resolve_type_vars_in_body(&self, body: &'gcx hir::Body) -> &'gcx ty::TypeckTables<'gcx> {
+impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
+ pub fn resolve_type_vars_in_body(&self, body: &'tcx hir::Body) -> &'tcx ty::TypeckTables<'tcx> {
let item_id = self.tcx.hir().body_owner(body.id());
- let item_def_id = self.tcx.hir().local_def_id(item_id);
+ let item_def_id = self.tcx.hir().local_def_id_from_hir_id(item_id);
// This attribute causes us to dump some writeback information
// in the form of errors, which is uSymbolfor unit tests.
@@ -47,8 +47,7 @@
// Type only exists for constants and statics, not functions.
match self.tcx.hir().body_owner_kind(item_id) {
hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => {
- let item_hir_id = self.tcx.hir().node_to_hir_id(item_id);
- wbcx.visit_node_id(body.value.span, item_hir_id);
+ wbcx.visit_node_id(body.value.span, item_id);
}
hir::BodyOwnerKind::Closure | hir::BodyOwnerKind::Fn => (),
}
@@ -97,22 +96,22 @@
// there, it applies a few ad-hoc checks that were not convenient to
// do elsewhere.
-struct WritebackCx<'cx, 'gcx: 'cx + 'tcx, 'tcx: 'cx> {
- fcx: &'cx FnCtxt<'cx, 'gcx, 'tcx>,
+struct WritebackCx<'cx, 'tcx: 'cx> {
+ fcx: &'cx FnCtxt<'cx, 'tcx>,
- tables: ty::TypeckTables<'gcx>,
+ tables: ty::TypeckTables<'tcx>,
- body: &'gcx hir::Body,
+ body: &'tcx hir::Body,
rustc_dump_user_substs: bool,
}
-impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
fn new(
- fcx: &'cx FnCtxt<'cx, 'gcx, 'tcx>,
- body: &'gcx hir::Body,
+ fcx: &'cx FnCtxt<'cx, 'tcx>,
+ body: &'tcx hir::Body,
rustc_dump_user_substs: bool,
- ) -> WritebackCx<'cx, 'gcx, 'tcx> {
+ ) -> WritebackCx<'cx, 'tcx> {
let owner = body.id().hir_id;
WritebackCx {
@@ -123,11 +122,11 @@
}
}
- fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.fcx.tcx
}
- fn write_ty_to_tables(&mut self, hir_id: hir::HirId, ty: Ty<'gcx>) {
+ fn write_ty_to_tables(&mut self, hir_id: hir::HirId, ty: Ty<'tcx>) {
debug!("write_ty_to_tables({:?}, {:?})", hir_id, ty);
assert!(!ty.needs_infer() && !ty.has_placeholders());
self.tables.node_types_mut().insert(hir_id, ty);
@@ -234,12 +233,12 @@
// below. In general, a function is made into a `visitor` if it must
// traffic in node-ids or update tables in the type context etc.
-impl<'cx, 'gcx, 'tcx> Visitor<'gcx> for WritebackCx<'cx, 'gcx, 'tcx> {
- fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {
+impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
+ fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
- fn visit_expr(&mut self, e: &'gcx hir::Expr) {
+ fn visit_expr(&mut self, e: &'tcx hir::Expr) {
self.fix_scalar_builtin_expr(e);
self.fix_index_builtin_expr(e);
@@ -268,12 +267,12 @@
intravisit::walk_expr(self, e);
}
- fn visit_block(&mut self, b: &'gcx hir::Block) {
+ fn visit_block(&mut self, b: &'tcx hir::Block) {
self.visit_node_id(b.span, b.hir_id);
intravisit::walk_block(self, b);
}
- fn visit_pat(&mut self, p: &'gcx hir::Pat) {
+ fn visit_pat(&mut self, p: &'tcx hir::Pat) {
match p.node {
hir::PatKind::Binding(..) => {
if let Some(&bm) = self.fcx.tables.borrow().pat_binding_modes().get(p.hir_id) {
@@ -298,14 +297,14 @@
intravisit::walk_pat(self, p);
}
- fn visit_local(&mut self, l: &'gcx hir::Local) {
+ fn visit_local(&mut self, l: &'tcx hir::Local) {
intravisit::walk_local(self, l);
let var_ty = self.fcx.local_ty(l.span, l.hir_id).decl_ty;
let var_ty = self.resolve(&var_ty, &l.span);
self.write_ty_to_tables(l.hir_id, var_ty);
}
- fn visit_ty(&mut self, hir_ty: &'gcx hir::Ty) {
+ fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty) {
intravisit::walk_ty(self, hir_ty);
let ty = self.fcx.node_ty(hir_ty.hir_id);
let ty = self.resolve(&ty, &hir_ty.span);
@@ -313,7 +312,7 @@
}
}
-impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
fn visit_upvar_capture_map(&mut self) {
for (upvar_id, upvar_capture) in self.fcx.tables.borrow().upvar_capture_map.iter() {
let new_upvar_capture = match *upvar_capture {
@@ -399,7 +398,7 @@
if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
if self.rustc_dump_user_substs {
// This is a unit-testing mechanism.
- let span = self.tcx().hir().span_by_hir_id(hir_id);
+ let span = self.tcx().hir().span(hir_id);
// We need to buffer the errors in order to guarantee a consistent
// order when emitting them.
let err = self.tcx().sess.struct_span_err(
@@ -746,7 +745,7 @@
fn resolve<T>(&self, x: &T, span: &dyn Locatable) -> T::Lifted
where
- T: TypeFoldable<'tcx> + ty::Lift<'gcx>,
+ T: TypeFoldable<'tcx> + ty::Lift<'tcx>,
{
let x = x.fold_with(&mut Resolver::new(self.fcx, span, self.body));
if let Some(lifted) = self.tcx().lift_to_global(&x) {
@@ -762,25 +761,25 @@
}
trait Locatable {
- fn to_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span;
+ fn to_span(&self, tcx: TyCtxt<'_>) -> Span;
}
impl Locatable for Span {
- fn to_span(&self, _: TyCtxt<'_, '_, '_>) -> Span {
+ fn to_span(&self, _: TyCtxt<'_>) -> Span {
*self
}
}
impl Locatable for DefIndex {
- fn to_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
+ fn to_span(&self, tcx: TyCtxt<'_>) -> Span {
let hir_id = tcx.hir().def_index_to_hir_id(*self);
- tcx.hir().span_by_hir_id(hir_id)
+ tcx.hir().span(hir_id)
}
}
impl Locatable for hir::HirId {
- fn to_span(&self, tcx: TyCtxt<'_, '_, '_>) -> Span {
- tcx.hir().span_by_hir_id(*self)
+ fn to_span(&self, tcx: TyCtxt<'_>) -> Span {
+ tcx.hir().span(*self)
}
}
@@ -788,19 +787,19 @@
// The Resolver. This is the type folding engine that detects
// unresolved types and so forth.
-struct Resolver<'cx, 'gcx: 'cx + 'tcx, 'tcx: 'cx> {
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+struct Resolver<'cx, 'tcx: 'cx> {
+ tcx: TyCtxt<'tcx>,
+ infcx: &'cx InferCtxt<'cx, 'tcx>,
span: &'cx dyn Locatable,
- body: &'gcx hir::Body,
+ body: &'tcx hir::Body,
}
-impl<'cx, 'gcx, 'tcx> Resolver<'cx, 'gcx, 'tcx> {
+impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
fn new(
- fcx: &'cx FnCtxt<'cx, 'gcx, 'tcx>,
+ fcx: &'cx FnCtxt<'cx, 'tcx>,
span: &'cx dyn Locatable,
- body: &'gcx hir::Body,
- ) -> Resolver<'cx, 'gcx, 'tcx> {
+ body: &'tcx hir::Body,
+ ) -> Resolver<'cx, 'tcx> {
Resolver {
tcx: fcx.tcx,
infcx: fcx,
@@ -818,8 +817,8 @@
}
}
-impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Resolver<'cx, 'gcx, 'tcx> {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx> {
+impl<'cx, 'tcx> TypeFolder<'tcx> for Resolver<'cx, 'tcx> {
+ fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
self.tcx
}
diff --git a/src/librustc_typeck/check_unused.rs b/src/librustc_typeck/check_unused.rs
index 3c3509f..dda8677 100644
--- a/src/librustc_typeck/check_unused.rs
+++ b/src/librustc_typeck/check_unused.rs
@@ -13,7 +13,7 @@
use rustc_data_structures::fx::FxHashMap;
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn check_crate<'tcx>(tcx: TyCtxt<'tcx>) {
let mut used_trait_imports = DefIdSet::default();
for &body_id in tcx.hir().krate().bodies.keys() {
let item_def_id = tcx.hir().body_owner_def_id(body_id);
@@ -28,7 +28,7 @@
unused_crates_lint(tcx);
}
-impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for CheckVisitor<'a, 'tcx> {
+impl ItemLikeVisitor<'v> for CheckVisitor<'tcx> {
fn visit_item(&mut self, item: &hir::Item) {
if item.vis.node.is_pub() || item.span.is_dummy() {
return;
@@ -45,12 +45,12 @@
}
}
-struct CheckVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct CheckVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
used_trait_imports: DefIdSet,
}
-impl<'a, 'tcx> CheckVisitor<'a, 'tcx> {
+impl CheckVisitor<'tcx> {
fn check_import(&self, id: hir::HirId, span: Span) {
let def_id = self.tcx.hir().local_def_id_from_hir_id(id);
if !self.tcx.maybe_unused_trait_import(def_id) {
@@ -70,7 +70,7 @@
}
}
-fn unused_crates_lint<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) {
+fn unused_crates_lint<'tcx>(tcx: TyCtxt<'tcx>) {
let lint = lint::builtin::UNUSED_EXTERN_CRATES;
// Collect first the crates that are completely unused. These we
@@ -121,7 +121,7 @@
for extern_crate in &crates_to_lint {
let id = tcx.hir().as_local_hir_id(extern_crate.def_id).unwrap();
- let item = tcx.hir().expect_item_by_hir_id(id);
+ let item = tcx.hir().expect_item(id);
// If the crate is fully unused, we suggest removing it altogether.
// We do this in any edition.
@@ -195,7 +195,7 @@
}
struct CollectExternCrateVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
crates_to_lint: &'a mut Vec<ExternCrateToLint>,
}
diff --git a/src/librustc_typeck/coherence/builtin.rs b/src/librustc_typeck/coherence/builtin.rs
index 1be0248..e392622 100644
--- a/src/librustc_typeck/coherence/builtin.rs
+++ b/src/librustc_typeck/coherence/builtin.rs
@@ -17,7 +17,7 @@
use hir::Node;
use rustc::hir::{self, ItemKind};
-pub fn check_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_def_id: DefId) {
+pub fn check_trait<'tcx>(tcx: TyCtxt<'tcx>, trait_def_id: DefId) {
Checker { tcx, trait_def_id }
.check(tcx.lang_items().drop_trait(), visit_implementation_of_drop)
.check(tcx.lang_items().copy_trait(), visit_implementation_of_copy)
@@ -26,14 +26,15 @@
visit_implementation_of_dispatch_from_dyn);
}
-struct Checker<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- trait_def_id: DefId
+struct Checker<'tcx> {
+ tcx: TyCtxt<'tcx>,
+ trait_def_id: DefId,
}
-impl<'a, 'tcx> Checker<'a, 'tcx> {
+impl<'tcx> Checker<'tcx> {
fn check<F>(&self, trait_def_id: Option<DefId>, mut f: F) -> &Self
- where F: FnMut(TyCtxt<'a, 'tcx, 'tcx>, DefId)
+ where
+ F: FnMut(TyCtxt<'tcx>, DefId),
{
if Some(self.trait_def_id) == trait_def_id {
for &impl_id in self.tcx.hir().trait_impls(self.trait_def_id) {
@@ -45,7 +46,7 @@
}
}
-fn visit_implementation_of_drop<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_did: DefId) {
+fn visit_implementation_of_drop<'tcx>(tcx: TyCtxt<'tcx>, impl_did: DefId) {
if let ty::Adt(..) = tcx.type_of(impl_did).sty {
/* do nothing */
} else {
@@ -73,7 +74,7 @@
}
}
-fn visit_implementation_of_copy<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_did: DefId) {
+fn visit_implementation_of_copy<'tcx>(tcx: TyCtxt<'tcx>, impl_did: DefId) {
debug!("visit_implementation_of_copy: impl_did={:?}", impl_did);
let impl_hir_id = if let Some(n) = tcx.hir().as_local_hir_id(impl_did) {
@@ -87,7 +88,7 @@
debug!("visit_implementation_of_copy: self_type={:?} (bound)",
self_type);
- let span = tcx.hir().span_by_hir_id(impl_hir_id);
+ let span = tcx.hir().span(impl_hir_id);
let param_env = tcx.param_env(impl_did);
assert!(!self_type.has_escaping_bound_vars());
@@ -97,7 +98,7 @@
match param_env.can_type_implement_copy(tcx, self_type) {
Ok(()) => {}
Err(CopyImplementationError::InfrigingFields(fields)) => {
- let item = tcx.hir().expect_item_by_hir_id(impl_hir_id);
+ let item = tcx.hir().expect_item(impl_hir_id);
let span = if let ItemKind::Impl(.., Some(ref tr), _, _) = item.node {
tr.path.span
} else {
@@ -114,7 +115,7 @@
err.emit()
}
Err(CopyImplementationError::NotAnAdt) => {
- let item = tcx.hir().expect_item_by_hir_id(impl_hir_id);
+ let item = tcx.hir().expect_item(impl_hir_id);
let span = if let ItemKind::Impl(.., ref ty, _) = item.node {
ty.span
} else {
@@ -140,7 +141,7 @@
}
}
-fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_did: DefId) {
+fn visit_implementation_of_coerce_unsized(tcx: TyCtxt<'tcx>, impl_did: DefId) {
debug!("visit_implementation_of_coerce_unsized: impl_did={:?}",
impl_did);
@@ -153,17 +154,14 @@
}
}
-fn visit_implementation_of_dispatch_from_dyn<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_did: DefId,
-) {
+fn visit_implementation_of_dispatch_from_dyn<'tcx>(tcx: TyCtxt<'tcx>, impl_did: DefId) {
debug!("visit_implementation_of_dispatch_from_dyn: impl_did={:?}",
impl_did);
if impl_did.is_local() {
let dispatch_from_dyn_trait = tcx.lang_items().dispatch_from_dyn_trait().unwrap();
let impl_hir_id = tcx.hir().as_local_hir_id(impl_did).unwrap();
- let span = tcx.hir().span_by_hir_id(impl_hir_id);
+ let span = tcx.hir().span(impl_hir_id);
let source = tcx.type_of(impl_did);
assert!(!source.has_escaping_bound_vars());
@@ -324,9 +322,7 @@
}
}
-pub fn coerce_unsized_info<'a, 'gcx>(gcx: TyCtxt<'a, 'gcx, 'gcx>,
- impl_did: DefId)
- -> CoerceUnsizedInfo {
+pub fn coerce_unsized_info<'tcx>(gcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUnsizedInfo {
debug!("compute_coerce_unsized_info(impl_did={:?})", impl_did);
let coerce_unsized_trait = gcx.lang_items().coerce_unsized_trait().unwrap();
@@ -347,7 +343,7 @@
source,
target);
- let span = gcx.hir().span_by_hir_id(impl_hir_id);
+ let span = gcx.hir().span(impl_hir_id);
let param_env = gcx.param_env(impl_did);
assert!(!source.has_escaping_bound_vars());
@@ -359,9 +355,9 @@
gcx.infer_ctxt().enter(|infcx| {
let cause = ObligationCause::misc(span, impl_hir_id);
- let check_mutbl = |mt_a: ty::TypeAndMut<'gcx>,
- mt_b: ty::TypeAndMut<'gcx>,
- mk_ptr: &dyn Fn(Ty<'gcx>) -> Ty<'gcx>| {
+ let check_mutbl = |mt_a: ty::TypeAndMut<'tcx>,
+ mt_b: ty::TypeAndMut<'tcx>,
+ mk_ptr: &dyn Fn(Ty<'tcx>) -> Ty<'tcx>| {
if (mt_a.mutbl, mt_b.mutbl) == (hir::MutImmutable, hir::MutMutable) {
infcx.report_mismatched_types(&cause,
mk_ptr(mt_b.ty),
@@ -484,11 +480,11 @@
being coerced, none found");
return err_info;
} else if diff_fields.len() > 1 {
- let item = gcx.hir().expect_item_by_hir_id(impl_hir_id);
+ let item = gcx.hir().expect_item(impl_hir_id);
let span = if let ItemKind::Impl(.., Some(ref t), _, _) = item.node {
t.path.span
} else {
- gcx.hir().span_by_hir_id(impl_hir_id)
+ gcx.hir().span(impl_hir_id)
};
let mut err = struct_span_err!(gcx.sess,
diff --git a/src/librustc_typeck/coherence/inherent_impls.rs b/src/librustc_typeck/coherence/inherent_impls.rs
index 644d959..6088c03 100644
--- a/src/librustc_typeck/coherence/inherent_impls.rs
+++ b/src/librustc_typeck/coherence/inherent_impls.rs
@@ -17,9 +17,10 @@
use syntax_pos::Span;
/// On-demand query: yields a map containing all types mapped to their inherent impls.
-pub fn crate_inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- crate_num: CrateNum)
- -> &'tcx CrateInherentImpls {
+pub fn crate_inherent_impls<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ crate_num: CrateNum,
+) -> &'tcx CrateInherentImpls {
assert_eq!(crate_num, LOCAL_CRATE);
let krate = tcx.hir().krate();
@@ -32,9 +33,7 @@
}
/// On-demand query: yields a vector of the inherent impls for a specific type.
-pub fn inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty_def_id: DefId)
- -> &'tcx [DefId] {
+pub fn inherent_impls<'tcx>(tcx: TyCtxt<'tcx>, ty_def_id: DefId) -> &'tcx [DefId] {
assert!(ty_def_id.is_local());
// NB. Until we adopt the red-green dep-tracking algorithm (see
@@ -68,12 +67,12 @@
result
}
-struct InherentCollect<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct InherentCollect<'tcx> {
+ tcx: TyCtxt<'tcx>,
impls_map: CrateInherentImpls,
}
-impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentCollect<'a, 'tcx> {
+impl ItemLikeVisitor<'v> for InherentCollect<'tcx> {
fn visit_item(&mut self, item: &hir::Item) {
let ty = match item.node {
hir::ItemKind::Impl(.., None, ref ty, _) => ty,
@@ -277,7 +276,7 @@
}
}
-impl<'a, 'tcx> InherentCollect<'a, 'tcx> {
+impl InherentCollect<'tcx> {
fn check_def_id(&mut self, item: &hir::Item, def_id: DefId) {
if def_id.is_local() {
// Add the implementation to the mapping from implementation to base
diff --git a/src/librustc_typeck/coherence/inherent_impls_overlap.rs b/src/librustc_typeck/coherence/inherent_impls_overlap.rs
index a9951c7..aae1b17 100644
--- a/src/librustc_typeck/coherence/inherent_impls_overlap.rs
+++ b/src/librustc_typeck/coherence/inherent_impls_overlap.rs
@@ -5,18 +5,17 @@
use rustc::traits::{self, IntercrateMode};
use rustc::ty::TyCtxt;
-pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- crate_num: CrateNum) {
+pub fn crate_inherent_impls_overlap_check<'tcx>(tcx: TyCtxt<'tcx>, crate_num: CrateNum) {
assert_eq!(crate_num, LOCAL_CRATE);
let krate = tcx.hir().krate();
krate.visit_all_item_likes(&mut InherentOverlapChecker { tcx });
}
-struct InherentOverlapChecker<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>
+struct InherentOverlapChecker<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> InherentOverlapChecker<'a, 'tcx> {
+impl InherentOverlapChecker<'tcx> {
fn check_for_common_items_in_impls(&self, impl1: DefId, impl2: DefId,
overlap: traits::OverlapResult<'_>) {
@@ -83,7 +82,7 @@
}
}
-impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> {
+impl ItemLikeVisitor<'v> for InherentOverlapChecker<'tcx> {
fn visit_item(&mut self, item: &'v hir::Item) {
match item.node {
hir::ItemKind::Enum(..) |
diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs
index 40f01eb..4336e86 100644
--- a/src/librustc_typeck/coherence/mod.rs
+++ b/src/librustc_typeck/coherence/mod.rs
@@ -18,7 +18,7 @@
mod orphan;
mod unsafety;
-fn check_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, hir_id: HirId) {
+fn check_impl<'tcx>(tcx: TyCtxt<'tcx>, hir_id: HirId) {
let impl_def_id = tcx.hir().local_def_id_from_hir_id(hir_id);
// If there are no traits, then this implementation must have a
@@ -40,11 +40,7 @@
}
}
-fn enforce_trait_manually_implementable(
- tcx: TyCtxt<'_, '_, '_>,
- impl_def_id: DefId,
- trait_def_id: DefId
-) {
+fn enforce_trait_manually_implementable(tcx: TyCtxt<'_>, impl_def_id: DefId, trait_def_id: DefId) {
let did = Some(trait_def_id);
let li = tcx.lang_items();
let span = tcx.sess.source_map().def_span(tcx.span_of_impl(impl_def_id).unwrap());
@@ -96,11 +92,7 @@
/// We allow impls of marker traits to overlap, so they can't override impls
/// as that could make it ambiguous which associated item to use.
-fn enforce_empty_impls_for_marker_traits(
- tcx: TyCtxt<'_, '_, '_>,
- impl_def_id: DefId,
- trait_def_id: DefId
-) {
+fn enforce_empty_impls_for_marker_traits(tcx: TyCtxt<'_>, impl_def_id: DefId, trait_def_id: DefId) {
if !tcx.trait_def(trait_def_id).is_marker {
return;
}
@@ -132,7 +124,7 @@
};
}
-fn coherent_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
+fn coherent_trait<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) {
let impls = tcx.hir().trait_impls(def_id);
for &impl_id in impls {
check_impl(tcx, impl_id);
@@ -143,7 +135,7 @@
builtin::check_trait(tcx, def_id);
}
-pub fn check_coherence<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn check_coherence<'tcx>(tcx: TyCtxt<'tcx>) {
for &trait_def_id in tcx.hir().krate().trait_impls.keys() {
tcx.ensure().coherent_trait(trait_def_id);
}
@@ -159,7 +151,7 @@
/// Overlap: no two impls for the same trait are implemented for the
/// same type. Likewise, no two inherent impls for a given type
/// constructor provide a method with the same name.
-fn check_impl_overlap<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, hir_id: HirId) {
+fn check_impl_overlap<'tcx>(tcx: TyCtxt<'tcx>, hir_id: HirId) {
let impl_def_id = tcx.hir().local_def_id_from_hir_id(hir_id);
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
let trait_def_id = trait_ref.def_id;
diff --git a/src/librustc_typeck/coherence/orphan.rs b/src/librustc_typeck/coherence/orphan.rs
index 7e1c38e..4e6fcfe 100644
--- a/src/librustc_typeck/coherence/orphan.rs
+++ b/src/librustc_typeck/coherence/orphan.rs
@@ -6,16 +6,16 @@
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::hir;
-pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn check<'tcx>(tcx: TyCtxt<'tcx>) {
let mut orphan = OrphanChecker { tcx };
tcx.hir().krate().visit_all_item_likes(&mut orphan);
}
-struct OrphanChecker<'cx, 'tcx: 'cx> {
- tcx: TyCtxt<'cx, 'tcx, 'tcx>,
+struct OrphanChecker<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for OrphanChecker<'cx, 'tcx> {
+impl ItemLikeVisitor<'v> for OrphanChecker<'tcx> {
/// Checks exactly one impl for orphan rules and other such
/// restrictions. In this fn, it can happen that multiple errors
/// apply to a specific impl, so just return after reporting one
@@ -26,7 +26,7 @@
// "Trait" impl
if let hir::ItemKind::Impl(.., Some(_), _, _) = item.node {
debug!("coherence2::orphan check: trait impl {}",
- self.tcx.hir().hir_to_string(item.hir_id));
+ self.tcx.hir().node_to_string(item.hir_id));
let trait_ref = self.tcx.impl_trait_ref(def_id).unwrap();
let trait_def_id = trait_ref.def_id;
let cm = self.tcx.sess.source_map();
diff --git a/src/librustc_typeck/coherence/unsafety.rs b/src/librustc_typeck/coherence/unsafety.rs
index 0b1de51..c41a0e1 100644
--- a/src/librustc_typeck/coherence/unsafety.rs
+++ b/src/librustc_typeck/coherence/unsafety.rs
@@ -5,16 +5,16 @@
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::hir::{self, Unsafety};
-pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn check<'tcx>(tcx: TyCtxt<'tcx>) {
let mut unsafety = UnsafetyChecker { tcx };
tcx.hir().krate().visit_all_item_likes(&mut unsafety);
}
-struct UnsafetyChecker<'cx, 'tcx: 'cx> {
- tcx: TyCtxt<'cx, 'tcx, 'tcx>,
+struct UnsafetyChecker<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'cx, 'tcx, 'v> UnsafetyChecker<'cx, 'tcx> {
+impl UnsafetyChecker<'tcx> {
fn check_unsafety_coherence(&mut self,
item: &'v hir::Item,
impl_generics: Option<&hir::Generics>,
@@ -69,7 +69,7 @@
}
}
-impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for UnsafetyChecker<'cx, 'tcx> {
+impl ItemLikeVisitor<'v> for UnsafetyChecker<'tcx> {
fn visit_item(&mut self, item: &'v hir::Item) {
if let hir::ItemKind::Impl(unsafety, polarity, _, ref generics, ..) = item.node {
self.check_unsafety_coherence(item, Some(generics), unsafety, polarity);
diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs
index 5d91794..5606d9c 100644
--- a/src/librustc_typeck/collect.rs
+++ b/src/librustc_typeck/collect.rs
@@ -56,7 +56,7 @@
///////////////////////////////////////////////////////////////////////////
// Main entry point
-fn collect_mod_item_types<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) {
+fn collect_mod_item_types<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(
module_def_id,
&mut CollectItemTypesVisitor { tcx }.as_deep_visitor()
@@ -96,18 +96,18 @@
/// `ItemCtxt` is parameterized by a `DefId` that it uses to satisfy
/// `get_type_parameter_bounds` requests, drawing the information from
/// the AST (`hir::Generics`), recursively.
-pub struct ItemCtxt<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+pub struct ItemCtxt<'tcx> {
+ tcx: TyCtxt<'tcx>,
item_def_id: DefId,
}
///////////////////////////////////////////////////////////////////////////
-struct CollectItemTypesVisitor<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct CollectItemTypesVisitor<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> Visitor<'tcx> for CollectItemTypesVisitor<'a, 'tcx> {
+impl Visitor<'tcx> for CollectItemTypesVisitor<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::OnlyBodies(&self.tcx.hir())
}
@@ -160,20 +160,18 @@
///////////////////////////////////////////////////////////////////////////
// Utility types and common code for the above passes.
-impl<'a, 'tcx> ItemCtxt<'a, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId) -> ItemCtxt<'a, 'tcx> {
+impl ItemCtxt<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, item_def_id: DefId) -> ItemCtxt<'tcx> {
ItemCtxt { tcx, item_def_id }
}
-}
-impl<'a, 'tcx> ItemCtxt<'a, 'tcx> {
pub fn to_ty(&self, ast_ty: &hir::Ty) -> Ty<'tcx> {
AstConv::ast_ty_to_ty(self, ast_ty)
}
}
-impl<'a, 'tcx> AstConv<'tcx, 'tcx> for ItemCtxt<'a, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
+impl AstConv<'tcx> for ItemCtxt<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -255,8 +253,8 @@
}
}
-fn type_param_predicates<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn type_param_predicates<'tcx>(
+ tcx: TyCtxt<'tcx>,
(item_def_id, def_id): (DefId, DefId),
) -> &'tcx ty::GenericPredicates<'tcx> {
use rustc::hir::*;
@@ -333,7 +331,7 @@
tcx.arena.alloc(result)
}
-impl<'a, 'tcx> ItemCtxt<'a, 'tcx> {
+impl ItemCtxt<'tcx> {
/// Finds bounds from `hir::Generics`. This requires scanning through the
/// AST. We do this to avoid having to convert *all* the bounds, which
/// would create artificial cycles. Instead we can only convert the
@@ -383,11 +381,7 @@
/// parameter with ID `param_id`. We use this so as to avoid running
/// `ast_ty_to_ty`, because we want to avoid triggering an all-out
/// conversion of the type to avoid inducing unnecessary cycles.
-fn is_param<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ast_ty: &hir::Ty,
- param_id: hir::HirId,
-) -> bool {
+fn is_param<'tcx>(tcx: TyCtxt<'tcx>, ast_ty: &hir::Ty, param_id: hir::HirId) -> bool {
if let hir::TyKind::Path(hir::QPath::Resolved(None, ref path)) = ast_ty.node {
match path.res {
Res::SelfTy(Some(def_id), None) | Res::Def(DefKind::TyParam, def_id) => {
@@ -400,8 +394,8 @@
}
}
-fn convert_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_id: hir::HirId) {
- let it = tcx.hir().expect_item_by_hir_id(item_id);
+fn convert_item<'tcx>(tcx: TyCtxt<'tcx>, item_id: hir::HirId) {
+ let it = tcx.hir().expect_item(item_id);
debug!("convert: item {} with id {}", it.ident, it.hir_id);
let def_id = tcx.hir().local_def_id_from_hir_id(item_id);
match it.node {
@@ -482,7 +476,7 @@
}
}
-fn convert_trait_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_item_id: hir::HirId) {
+fn convert_trait_item<'tcx>(tcx: TyCtxt<'tcx>, trait_item_id: hir::HirId) {
let trait_item = tcx.hir().expect_trait_item(trait_item_id);
let def_id = tcx.hir().local_def_id_from_hir_id(trait_item.hir_id);
tcx.generics_of(def_id);
@@ -503,7 +497,7 @@
tcx.predicates_of(def_id);
}
-fn convert_impl_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_item_id: hir::HirId) {
+fn convert_impl_item<'tcx>(tcx: TyCtxt<'tcx>, impl_item_id: hir::HirId) {
let def_id = tcx.hir().local_def_id_from_hir_id(impl_item_id);
tcx.generics_of(def_id);
tcx.type_of(def_id);
@@ -513,18 +507,14 @@
}
}
-fn convert_variant_ctor<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ctor_id: hir::HirId) {
+fn convert_variant_ctor<'tcx>(tcx: TyCtxt<'tcx>, ctor_id: hir::HirId) {
let def_id = tcx.hir().local_def_id_from_hir_id(ctor_id);
tcx.generics_of(def_id);
tcx.type_of(def_id);
tcx.predicates_of(def_id);
}
-fn convert_enum_variant_types<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- variants: &[hir::Variant],
-) {
+fn convert_enum_variant_types<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, variants: &[hir::Variant]) {
let def = tcx.adt_def(def_id);
let repr_type = def.repr.discr_type();
let initial = repr_type.initial_discriminant(tcx);
@@ -572,15 +562,15 @@
}
}
-fn convert_variant<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn convert_variant<'tcx>(
+ tcx: TyCtxt<'tcx>,
variant_did: Option<DefId>,
ctor_did: Option<DefId>,
ident: Ident,
discr: ty::VariantDiscr,
def: &hir::VariantData,
adt_kind: ty::AdtKind,
- parent_did: DefId
+ parent_did: DefId,
) -> ty::VariantDef {
let mut seen_fields: FxHashMap<ast::Ident, Span> = Default::default();
let hir_id = tcx.hir().as_local_hir_id(variant_did.unwrap_or(parent_did)).unwrap();
@@ -629,7 +619,7 @@
)
}
-fn adt_def<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx ty::AdtDef {
+fn adt_def<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx ty::AdtDef {
use rustc::hir::*;
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
@@ -696,8 +686,8 @@
/// Ensures that the super-predicates of the trait with a `DefId`
/// of `trait_def_id` are converted and stored. This also ensures that
/// the transitive super-predicates are converted.
-fn super_predicates_of<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn super_predicates_of<'tcx>(
+ tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
) -> &'tcx ty::GenericPredicates<'tcx> {
debug!("super_predicates(trait_def_id={:?})", trait_def_id);
@@ -750,9 +740,9 @@
})
}
-fn trait_def<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx ty::TraitDef {
+fn trait_def<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx ty::TraitDef {
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
- let item = tcx.hir().expect_item_by_hir_id(hir_id);
+ let item = tcx.hir().expect_item(hir_id);
let (is_auto, unsafety) = match item.node {
hir::ItemKind::Trait(is_auto, unsafety, ..) => (is_auto == hir::IsAuto::Yes, unsafety),
@@ -781,17 +771,14 @@
tcx.arena.alloc(def)
}
-fn has_late_bound_regions<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- node: Node<'tcx>,
-) -> Option<Span> {
- struct LateBoundRegionsDetector<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn has_late_bound_regions<'tcx>(tcx: TyCtxt<'tcx>, node: Node<'tcx>) -> Option<Span> {
+ struct LateBoundRegionsDetector<'tcx> {
+ tcx: TyCtxt<'tcx>,
outer_index: ty::DebruijnIndex,
has_late_bound_regions: Option<Span>,
}
- impl<'a, 'tcx> Visitor<'tcx> for LateBoundRegionsDetector<'a, 'tcx> {
+ impl Visitor<'tcx> for LateBoundRegionsDetector<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
@@ -842,8 +829,8 @@
}
}
- fn has_late_bound_regions<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ fn has_late_bound_regions<'tcx>(
+ tcx: TyCtxt<'tcx>,
generics: &'tcx hir::Generics,
decl: &'tcx hir::FnDecl,
) -> Option<Span> {
@@ -892,7 +879,7 @@
}
}
-fn generics_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx ty::Generics {
+fn generics_of<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx ty::Generics {
use rustc::hir::*;
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
@@ -1135,7 +1122,7 @@
})
}
-fn report_assoc_ty_on_inherent_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, span: Span) {
+fn report_assoc_ty_on_inherent_impl<'tcx>(tcx: TyCtxt<'tcx>, span: Span) {
span_err!(
tcx.sess,
span,
@@ -1144,7 +1131,7 @@
);
}
-fn type_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Ty<'tcx> {
+fn type_of<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Ty<'tcx> {
checked_type_of(tcx, def_id, true).unwrap()
}
@@ -1152,11 +1139,7 @@
///
/// If you want to fail anyway, you can set the `fail` parameter to true, but in this case,
/// you'd better just call [`type_of`] directly.
-pub fn checked_type_of<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- fail: bool,
-) -> Option<Ty<'tcx>> {
+pub fn checked_type_of<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, fail: bool) -> Option<Ty<'tcx>> {
use rustc::hir::*;
let hir_id = match tcx.hir().as_local_hir_id(def_id) {
@@ -1194,7 +1177,7 @@
ImplItemKind::Const(ref ty, _) => icx.to_ty(ty),
ImplItemKind::Existential(_) => {
if tcx
- .impl_trait_ref(tcx.hir().get_parent_did_by_hir_id(hir_id))
+ .impl_trait_ref(tcx.hir().get_parent_did(hir_id))
.is_none()
{
report_assoc_ty_on_inherent_impl(tcx, item.span);
@@ -1204,7 +1187,7 @@
}
ImplItemKind::Type(ref ty) => {
if tcx
- .impl_trait_ref(tcx.hir().get_parent_did_by_hir_id(hir_id))
+ .impl_trait_ref(tcx.hir().get_parent_did(hir_id))
.is_none()
{
report_assoc_ty_on_inherent_impl(tcx, item.span);
@@ -1289,7 +1272,7 @@
..
}) => match *def {
VariantData::Unit(..) | VariantData::Struct(..) => {
- tcx.type_of(tcx.hir().get_parent_did_by_hir_id(hir_id))
+ tcx.type_of(tcx.hir().get_parent_did(hir_id))
}
VariantData::Tuple(..) => {
let substs = InternalSubsts::identity_for_item(tcx, def_id);
@@ -1342,7 +1325,7 @@
..
}) if e.hir_id == hir_id =>
{
- tcx.adt_def(tcx.hir().get_parent_did_by_hir_id(hir_id))
+ tcx.adt_def(tcx.hir().get_parent_did(hir_id))
.repr
.discr_type()
.to_ty(tcx)
@@ -1481,16 +1464,13 @@
})
}
-fn find_existential_constraints<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
-) -> Ty<'tcx> {
+fn find_existential_constraints<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Ty<'tcx> {
use rustc::hir::{ImplItem, Item, TraitItem};
debug!("find_existential_constraints({:?})", def_id);
- struct ConstraintLocator<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ struct ConstraintLocator<'tcx> {
+ tcx: TyCtxt<'tcx>,
def_id: DefId,
// (first found type span, actual type, mapping from the existential type's generic
// parameters to the concrete type's generic parameters)
@@ -1501,7 +1481,7 @@
found: Option<(Span, Ty<'tcx>, Vec<usize>)>,
}
- impl<'a, 'tcx> ConstraintLocator<'a, 'tcx> {
+ impl ConstraintLocator<'tcx> {
fn check(&mut self, def_id: DefId) {
// Don't try to check items that cannot possibly constrain the type.
if !self.tcx.has_typeck_tables(def_id) {
@@ -1638,7 +1618,7 @@
}
}
- impl<'a, 'tcx> intravisit::Visitor<'tcx> for ConstraintLocator<'a, 'tcx> {
+ impl<'tcx> intravisit::Visitor<'tcx> for ConstraintLocator<'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'tcx> {
intravisit::NestedVisitorMap::All(&self.tcx.hir())
}
@@ -1702,7 +1682,7 @@
}
}
-fn fn_sig<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> ty::PolyFnSig<'tcx> {
+fn fn_sig<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> ty::PolyFnSig<'tcx> {
use rustc::hir::*;
use rustc::hir::Node::*;
@@ -1729,7 +1709,7 @@
node: ForeignItemKind::Fn(ref fn_decl, _, _),
..
}) => {
- let abi = tcx.hir().get_foreign_abi_by_hir_id(hir_id);
+ let abi = tcx.hir().get_foreign_abi(hir_id);
compute_sig_of_foreign_fn_decl(tcx, def_id, fn_decl, abi)
}
@@ -1737,7 +1717,7 @@
node: hir::VariantKind { data, .. },
..
}) if data.ctor_hir_id().is_some() => {
- let ty = tcx.type_of(tcx.hir().get_parent_did_by_hir_id(hir_id));
+ let ty = tcx.type_of(tcx.hir().get_parent_did(hir_id));
let inputs = data.fields()
.iter()
.map(|f| tcx.type_of(tcx.hir().local_def_id_from_hir_id(f.hir_id)));
@@ -1778,14 +1758,11 @@
}
}
-fn impl_trait_ref<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
-) -> Option<ty::TraitRef<'tcx>> {
+fn impl_trait_ref<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Option<ty::TraitRef<'tcx>> {
let icx = ItemCtxt::new(tcx, def_id);
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
- match tcx.hir().expect_item_by_hir_id(hir_id).node {
+ match tcx.hir().expect_item(hir_id).node {
hir::ItemKind::Impl(.., ref opt_trait_ref, _, _) => {
opt_trait_ref.as_ref().map(|ast_trait_ref| {
let selfty = tcx.type_of(def_id);
@@ -1796,9 +1773,9 @@
}
}
-fn impl_polarity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> hir::ImplPolarity {
+fn impl_polarity<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> hir::ImplPolarity {
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
- match tcx.hir().expect_item_by_hir_id(hir_id).node {
+ match tcx.hir().expect_item(hir_id).node {
hir::ItemKind::Impl(_, polarity, ..) => polarity,
ref item => bug!("impl_polarity: {:?} not an impl", item),
}
@@ -1809,8 +1786,8 @@
/// the lifetimes that are declared. For fns or methods, we have to
/// screen out those that do not appear in any where-clauses etc using
/// `resolve_lifetime::early_bound_lifetimes`.
-fn early_bound_lifetimes_from_generics<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn early_bound_lifetimes_from_generics<'a, 'tcx: 'a>(
+ tcx: TyCtxt<'tcx>,
generics: &'a hir::Generics,
) -> impl Iterator<Item = &'a hir::GenericParam> + Captures<'tcx> {
generics
@@ -1827,8 +1804,8 @@
/// Returns a list of type predicates for the definition with ID `def_id`, including inferred
/// lifetime constraints. This includes all predicates returned by `explicit_predicates_of`, plus
/// inferred constraints concerning which regions outlive other regions.
-fn predicates_defined_on<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn predicates_defined_on<'tcx>(
+ tcx: TyCtxt<'tcx>,
def_id: DefId,
) -> &'tcx ty::GenericPredicates<'tcx> {
debug!("predicates_defined_on({:?})", def_id);
@@ -1857,10 +1834,7 @@
/// Returns a list of all type predicates (explicit and implicit) for the definition with
/// ID `def_id`. This includes all predicates returned by `predicates_defined_on`, plus
/// `Self: Trait` predicates for traits.
-fn predicates_of<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
-) -> &'tcx ty::GenericPredicates<'tcx> {
+fn predicates_of<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx ty::GenericPredicates<'tcx> {
let mut result = tcx.predicates_defined_on(def_id);
if tcx.is_trait(def_id) {
@@ -1887,8 +1861,8 @@
/// Returns a list of user-specified type predicates for the definition with ID `def_id`.
/// N.B., this does not include any implied/inferred constraints.
-fn explicit_predicates_of<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn explicit_predicates_of<'tcx>(
+ tcx: TyCtxt<'tcx>,
def_id: DefId,
) -> &'tcx ty::GenericPredicates<'tcx> {
use rustc::hir::*;
@@ -2229,7 +2203,7 @@
/// predicates) to one (`T: Foo`) to many (`T: Bar<X=i32>` adds `T: Bar`
/// and `<T as Bar>::X == i32`).
fn predicates_from_bound<'tcx>(
- astconv: &dyn AstConv<'tcx, 'tcx>,
+ astconv: &dyn AstConv<'tcx>,
param_ty: Ty<'tcx>,
bound: &hir::GenericBound,
) -> Vec<(ty::Predicate<'tcx>, Span)> {
@@ -2250,8 +2224,8 @@
}
}
-fn compute_sig_of_foreign_fn_decl<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn compute_sig_of_foreign_fn_decl<'tcx>(
+ tcx: TyCtxt<'tcx>,
def_id: DefId,
decl: &hir::FnDecl,
abi: abi::Abi,
@@ -2295,7 +2269,7 @@
fty
}
-fn is_foreign_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool {
+fn is_foreign_item<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
match tcx.hir().get_if_local(def_id) {
Some(Node::ForeignItem(..)) => true,
Some(_) => false,
@@ -2303,10 +2277,7 @@
}
}
-fn static_mutability<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
-) -> Option<hir::Mutability> {
+fn static_mutability<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Option<hir::Mutability> {
match tcx.hir().get_if_local(def_id) {
Some(Node::Item(&hir::Item {
node: hir::ItemKind::Static(_, mutbl, _), ..
@@ -2320,7 +2291,7 @@
}
fn from_target_feature(
- tcx: TyCtxt<'_, '_, '_>,
+ tcx: TyCtxt<'_>,
id: DefId,
attr: &ast::Attribute,
whitelist: &FxHashMap<String, Option<Symbol>>,
@@ -2414,7 +2385,7 @@
}
}
-fn linkage_by_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId, name: &str) -> Linkage {
+fn linkage_by_name<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, name: &str) -> Linkage {
use rustc::mir::mono::Linkage::*;
// Use the names from src/llvm/docs/LangRef.rst here. Most types are only
@@ -2449,7 +2420,7 @@
}
}
-fn codegen_fn_attrs<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefId) -> CodegenFnAttrs {
+fn codegen_fn_attrs<'tcx>(tcx: TyCtxt<'tcx>, id: DefId) -> CodegenFnAttrs {
let attrs = tcx.get_attrs(id);
let mut codegen_fn_attrs = CodegenFnAttrs::new();
diff --git a/src/librustc_typeck/constrained_generic_params.rs b/src/librustc_typeck/constrained_generic_params.rs
index 49910e3..79a04b9 100644
--- a/src/librustc_typeck/constrained_generic_params.rs
+++ b/src/librustc_typeck/constrained_generic_params.rs
@@ -86,11 +86,12 @@
}
}
-pub fn identify_constrained_generic_params<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>,
- predicates: &ty::GenericPredicates<'tcx>,
- impl_trait_ref: Option<ty::TraitRef<'tcx>>,
- input_parameters: &mut FxHashSet<Parameter>)
-{
+pub fn identify_constrained_generic_params<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ predicates: &ty::GenericPredicates<'tcx>,
+ impl_trait_ref: Option<ty::TraitRef<'tcx>>,
+ input_parameters: &mut FxHashSet<Parameter>,
+) {
let mut predicates = predicates.predicates.clone();
setup_constraining_predicates(tcx, &mut predicates, impl_trait_ref, input_parameters);
}
@@ -136,11 +137,12 @@
/// which is determined by 1, which requires `U`, that is determined
/// by 0. I should probably pick a less tangled example, but I can't
/// think of any.
-pub fn setup_constraining_predicates<'tcx>(tcx: TyCtxt<'_, '_, '_>,
- predicates: &mut [(ty::Predicate<'tcx>, Span)],
- impl_trait_ref: Option<ty::TraitRef<'tcx>>,
- input_parameters: &mut FxHashSet<Parameter>)
-{
+pub fn setup_constraining_predicates<'tcx>(
+ tcx: TyCtxt<'_>,
+ predicates: &mut [(ty::Predicate<'tcx>, Span)],
+ impl_trait_ref: Option<ty::TraitRef<'tcx>>,
+ input_parameters: &mut FxHashSet<Parameter>,
+) {
// The canonical way of doing the needed topological sort
// would be a DFS, but getting the graph and its ownership
// right is annoying, so I am using an in-place fixed-point iteration,
diff --git a/src/librustc_typeck/error_codes.rs b/src/librustc_typeck/error_codes.rs
index 8fb7049..115ee0f 100644
--- a/src/librustc_typeck/error_codes.rs
+++ b/src/librustc_typeck/error_codes.rs
@@ -3938,7 +3938,7 @@
// Another example
-let v = 0 as *const u8; // So here, `v` is a `*const u8`.
+let v = core::ptr::null::<u8>(); // So here, `v` is a `*const u8`.
v as &u8; // error: non-primitive cast: `*const u8` as `&u8`
```
@@ -3948,7 +3948,7 @@
let x = 0u8;
x as u32; // ok!
-let v = 0 as *const u8;
+let v = core::ptr::null::<u8>();
v as *const i8; // ok!
```
@@ -3988,7 +3988,7 @@
Erroneous code example:
```compile_fail,E0607
-let v = 0 as *const u8;
+let v = core::ptr::null::<u8>();
v as *const [u8];
```
diff --git a/src/librustc_typeck/impl_wf_check.rs b/src/librustc_typeck/impl_wf_check.rs
index 87476d3..b833d85 100644
--- a/src/librustc_typeck/impl_wf_check.rs
+++ b/src/librustc_typeck/impl_wf_check.rs
@@ -49,7 +49,7 @@
/// impl<'a> Trait<Foo> for Bar { type X = &'a i32; }
/// // ^ 'a is unused and appears in assoc type, error
/// ```
-pub fn impl_wf_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn impl_wf_check<'tcx>(tcx: TyCtxt<'tcx>) {
// We will tag this as part of the WF check -- logically, it is,
// but it's one that we must perform earlier than the rest of
// WfCheck.
@@ -58,7 +58,7 @@
}
}
-fn check_mod_impl_wf<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) {
+fn check_mod_impl_wf<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(
module_def_id,
&mut ImplWfCheck { tcx }
@@ -72,11 +72,11 @@
};
}
-struct ImplWfCheck<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct ImplWfCheck<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> ItemLikeVisitor<'tcx> for ImplWfCheck<'a, 'tcx> {
+impl ItemLikeVisitor<'tcx> for ImplWfCheck<'tcx> {
fn visit_item(&mut self, item: &'tcx hir::Item) {
if let hir::ItemKind::Impl(.., ref impl_item_refs) = item.node {
let impl_def_id = self.tcx.hir().local_def_id_from_hir_id(item.hir_id);
@@ -92,10 +92,11 @@
fn visit_impl_item(&mut self, _impl_item: &'tcx hir::ImplItem) { }
}
-fn enforce_impl_params_are_constrained<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_def_id: DefId,
- impl_item_refs: &[hir::ImplItemRef])
-{
+fn enforce_impl_params_are_constrained<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ impl_def_id: DefId,
+ impl_item_refs: &[hir::ImplItemRef],
+) {
// Every lifetime used in an associated type must be constrained.
let impl_self_ty = tcx.type_of(impl_def_id);
let impl_generics = tcx.generics_of(impl_def_id);
@@ -171,11 +172,7 @@
// used elsewhere are not projected back out.
}
-fn report_unused_parameter(tcx: TyCtxt<'_, '_, '_>,
- span: Span,
- kind: &str,
- name: &str)
-{
+fn report_unused_parameter(tcx: TyCtxt<'_>, span: Span, kind: &str, name: &str) {
struct_span_err!(
tcx.sess, span, E0207,
"the {} parameter `{}` is not constrained by the \
@@ -186,9 +183,7 @@
}
/// Enforce that we do not have two items in an impl with the same name.
-fn enforce_impl_items_are_distinct<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_item_refs: &[hir::ImplItemRef])
-{
+fn enforce_impl_items_are_distinct<'tcx>(tcx: TyCtxt<'tcx>, impl_item_refs: &[hir::ImplItemRef]) {
let mut seen_type_items = FxHashMap::default();
let mut seen_value_items = FxHashMap::default();
for impl_item_ref in impl_item_refs {
diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs
index 14dec42..cc6f7a0 100644
--- a/src/librustc_typeck/lib.rs
+++ b/src/librustc_typeck/lib.rs
@@ -63,10 +63,12 @@
#![feature(box_syntax)]
#![feature(crate_visibility_modifier)]
#![feature(exhaustive_patterns)]
+#![feature(in_band_lifetimes)]
#![feature(nll)]
#![feature(rustc_diagnostic_macros)]
#![feature(slice_patterns)]
#![feature(never_type)]
+#![feature(inner_deref)]
#![recursion_limit="256"]
@@ -123,8 +125,7 @@
ty: Ty<'tcx>,
}
-fn check_type_alias_enum_variants_enabled<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- span: Span) {
+fn check_type_alias_enum_variants_enabled<'tcx>(tcx: TyCtxt<'tcx>, span: Span) {
if !tcx.features().type_alias_enum_variants {
let mut err = tcx.sess.struct_span_err(
span,
@@ -139,10 +140,7 @@
}
}
-fn require_c_abi_if_c_variadic(tcx: TyCtxt<'_, '_, '_>,
- decl: &hir::FnDecl,
- abi: Abi,
- span: Span) {
+fn require_c_abi_if_c_variadic(tcx: TyCtxt<'_>, decl: &hir::FnDecl, abi: Abi, span: Span) {
if decl.c_variadic && !(abi == Abi::C || abi == Abi::Cdecl) {
let mut err = struct_span_err!(tcx.sess, span, E0045,
"C-variadic function must have C or cdecl calling convention");
@@ -150,11 +148,12 @@
}
}
-fn require_same_types<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cause: &ObligationCause<'tcx>,
- expected: Ty<'tcx>,
- actual: Ty<'tcx>)
- -> bool {
+fn require_same_types<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ cause: &ObligationCause<'tcx>,
+ expected: Ty<'tcx>,
+ actual: Ty<'tcx>,
+) -> bool {
tcx.infer_ctxt().enter(|ref infcx| {
let param_env = ty::ParamEnv::empty();
let mut fulfill_cx = TraitEngine::new(infcx.tcx);
@@ -178,7 +177,7 @@
})
}
-fn check_main_fn_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, main_def_id: DefId) {
+fn check_main_fn_ty<'tcx>(tcx: TyCtxt<'tcx>, main_def_id: DefId) {
let main_id = tcx.hir().as_local_hir_id(main_def_id).unwrap();
let main_span = tcx.def_span(main_def_id);
let main_t = tcx.type_of(main_def_id);
@@ -243,7 +242,7 @@
}
}
-fn check_start_fn_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, start_def_id: DefId) {
+fn check_start_fn_ty<'tcx>(tcx: TyCtxt<'tcx>, start_def_id: DefId) {
let start_id = tcx.hir().as_local_hir_id(start_def_id).unwrap();
let start_span = tcx.def_span(start_def_id);
let start_t = tcx.type_of(start_def_id);
@@ -300,7 +299,7 @@
}
}
-fn check_for_entry_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+fn check_for_entry_fn<'tcx>(tcx: TyCtxt<'tcx>) {
match tcx.entry_fn(LOCAL_CRATE) {
Some((def_id, EntryFnType::Main)) => check_main_fn_ty(tcx, def_id),
Some((def_id, EntryFnType::Start)) => check_start_fn_ty(tcx, def_id),
@@ -317,9 +316,7 @@
impl_wf_check::provide(providers);
}
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Result<(), ErrorReported>
-{
+pub fn check_crate<'tcx>(tcx: TyCtxt<'tcx>) -> Result<(), ErrorReported> {
tcx.sess.profiler(|p| p.start_activity("type-check crate"));
// this ensures that later parts of type checking can assume that items
@@ -380,7 +377,7 @@
/// A quasi-deprecated helper used in rustdoc and clippy to get
/// the type from a HIR node.
-pub fn hir_ty_to_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, hir_ty: &hir::Ty) -> Ty<'tcx> {
+pub fn hir_ty_to_ty<'tcx>(tcx: TyCtxt<'tcx>, hir_ty: &hir::Ty) -> Ty<'tcx> {
// In case there are any projections, etc., find the "environment"
// def-ID that will be used to determine the traits/predicates in
// scope. This is derived from the enclosing item-like thing.
@@ -391,8 +388,10 @@
astconv::AstConv::ast_ty_to_ty(&item_cx, hir_ty)
}
-pub fn hir_trait_to_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, hir_trait: &hir::TraitRef)
- -> (ty::PolyTraitRef<'tcx>, Bounds<'tcx>) {
+pub fn hir_trait_to_predicates<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ hir_trait: &hir::TraitRef,
+) -> (ty::PolyTraitRef<'tcx>, Bounds<'tcx>) {
// In case there are any projections, etc., find the "environment"
// def-ID that will be used to determine the traits/predicates in
// scope. This is derived from the enclosing item-like thing.
diff --git a/src/librustc_typeck/outlives/explicit.rs b/src/librustc_typeck/outlives/explicit.rs
index 574086f..40a5778 100644
--- a/src/librustc_typeck/outlives/explicit.rs
+++ b/src/librustc_typeck/outlives/explicit.rs
@@ -18,7 +18,7 @@
pub fn explicit_predicates_of(
&mut self,
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
def_id: DefId,
) -> &RequiredPredicates<'tcx> {
self.map.entry(def_id).or_insert_with(|| {
diff --git a/src/librustc_typeck/outlives/implicit_infer.rs b/src/librustc_typeck/outlives/implicit_infer.rs
index b560f3b..a2f9a2b 100644
--- a/src/librustc_typeck/outlives/implicit_infer.rs
+++ b/src/librustc_typeck/outlives/implicit_infer.rs
@@ -15,7 +15,7 @@
/// was generated by walking the items in the crate. This will
/// now be filled with inferred predicates.
pub fn infer_predicates<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
explicit_map: &mut ExplicitPredicatesMap<'tcx>,
) -> FxHashMap<DefId, RequiredPredicates<'tcx>> {
debug!("infer_predicates");
@@ -44,7 +44,7 @@
}
pub struct InferVisitor<'cx, 'tcx: 'cx> {
- tcx: TyCtxt<'cx, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
global_inferred_outlives: &'cx mut FxHashMap<DefId, RequiredPredicates<'tcx>>,
predicates_added: &'cx mut bool,
explicit_map: &'cx mut ExplicitPredicatesMap<'tcx>,
@@ -117,7 +117,7 @@
}
fn insert_required_predicates_to_be_wf<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
field_ty: Ty<'tcx>,
global_inferred_outlives: &FxHashMap<DefId, RequiredPredicates<'tcx>>,
required_predicates: &mut RequiredPredicates<'tcx>,
@@ -255,7 +255,7 @@
/// can ignore, but we will want to process `U: 'static`,
/// applying the substitution as above.
pub fn check_explicit_predicates<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
def_id: &DefId,
substs: &[Kind<'tcx>],
required_predicates: &mut RequiredPredicates<'tcx>,
diff --git a/src/librustc_typeck/outlives/mod.rs b/src/librustc_typeck/outlives/mod.rs
index 57787a7..ad538b0 100644
--- a/src/librustc_typeck/outlives/mod.rs
+++ b/src/librustc_typeck/outlives/mod.rs
@@ -20,8 +20,8 @@
};
}
-fn inferred_outlives_of<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+fn inferred_outlives_of<'tcx>(
+ tcx: TyCtxt<'tcx>,
item_def_id: DefId,
) -> &'tcx [ty::Predicate<'tcx>] {
let id = tcx
@@ -71,7 +71,7 @@
}
fn inferred_outlives_crate<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
crate_num: CrateNum,
) -> &'tcx CratePredicatesMap<'tcx> {
assert_eq!(crate_num, LOCAL_CRATE);
diff --git a/src/librustc_typeck/outlives/test.rs b/src/librustc_typeck/outlives/test.rs
index 54fd4fa..4690cb9 100644
--- a/src/librustc_typeck/outlives/test.rs
+++ b/src/librustc_typeck/outlives/test.rs
@@ -3,17 +3,17 @@
use rustc::ty::TyCtxt;
use syntax::symbol::sym;
-pub fn test_inferred_outlives<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn test_inferred_outlives<'tcx>(tcx: TyCtxt<'tcx>) {
tcx.hir()
.krate()
.visit_all_item_likes(&mut OutlivesTest { tcx });
}
-struct OutlivesTest<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+struct OutlivesTest<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> ItemLikeVisitor<'tcx> for OutlivesTest<'a, 'tcx> {
+impl ItemLikeVisitor<'tcx> for OutlivesTest<'tcx> {
fn visit_item(&mut self, item: &'tcx hir::Item) {
let item_def_id = self.tcx.hir().local_def_id_from_hir_id(item.hir_id);
diff --git a/src/librustc_typeck/outlives/utils.rs b/src/librustc_typeck/outlives/utils.rs
index ee552ca..c6b0da3 100644
--- a/src/librustc_typeck/outlives/utils.rs
+++ b/src/librustc_typeck/outlives/utils.rs
@@ -11,7 +11,7 @@
/// Given a requirement `T: 'a` or `'b: 'a`, deduce the
/// outlives_component and add it to `required_predicates`
pub fn insert_outlives_predicate<'tcx>(
- tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
kind: Kind<'tcx>,
outlived_region: Region<'tcx>,
required_predicates: &mut RequiredPredicates<'tcx>,
@@ -125,7 +125,7 @@
}
}
-fn is_free_region<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, region: Region<'_>) -> bool {
+fn is_free_region<'tcx>(tcx: TyCtxt<'tcx>, region: Region<'_>) -> bool {
// First, screen for regions that might appear in a type header.
match region {
// These correspond to `T: 'a` relationships:
diff --git a/src/librustc_typeck/variance/constraints.rs b/src/librustc_typeck/variance/constraints.rs
index 4f82978..59213ac 100644
--- a/src/librustc_typeck/variance/constraints.rs
+++ b/src/librustc_typeck/variance/constraints.rs
@@ -124,7 +124,7 @@
self.build_constraints_for_item(def_id);
}
- fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.terms_cx.tcx
}
diff --git a/src/librustc_typeck/variance/mod.rs b/src/librustc_typeck/variance/mod.rs
index 47c4a9b..5dbd667 100644
--- a/src/librustc_typeck/variance/mod.rs
+++ b/src/librustc_typeck/variance/mod.rs
@@ -34,8 +34,7 @@
};
}
-fn crate_variances<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum)
- -> &'tcx CrateVariancesMap<'tcx> {
+fn crate_variances<'tcx>(tcx: TyCtxt<'tcx>, crate_num: CrateNum) -> &'tcx CrateVariancesMap<'tcx> {
assert_eq!(crate_num, LOCAL_CRATE);
let mut arena = arena::TypedArena::default();
let terms_cx = terms::determine_parameters_to_be_inferred(tcx, &mut arena);
@@ -43,12 +42,11 @@
tcx.arena.alloc(solve::solve_constraints(constraints_cx))
}
-fn variances_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId)
- -> &'tcx [ty::Variance] {
+fn variances_of<'tcx>(tcx: TyCtxt<'tcx>, item_def_id: DefId) -> &'tcx [ty::Variance] {
let id = tcx.hir().as_local_hir_id(item_def_id).expect("expected local def-id");
let unsupported = || {
// Variance not relevant.
- span_bug!(tcx.hir().span_by_hir_id(id), "asked to compute variance for wrong kind of item")
+ span_bug!(tcx.hir().span(id), "asked to compute variance for wrong kind of item")
};
match tcx.hir().get_by_hir_id(id) {
Node::Item(item) => match item.node {
diff --git a/src/librustc_typeck/variance/terms.rs b/src/librustc_typeck/variance/terms.rs
index efb221b..99f87cc 100644
--- a/src/librustc_typeck/variance/terms.rs
+++ b/src/librustc_typeck/variance/terms.rs
@@ -48,7 +48,7 @@
// The first pass over the crate simply builds up the set of inferreds.
pub struct TermsContext<'a, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ pub tcx: TyCtxt<'tcx>,
pub arena: &'a TypedArena<VarianceTerm<'a>>,
// For marker types, UnsafeCell, and other lang items where
@@ -64,9 +64,10 @@
pub inferred_terms: Vec<VarianceTermPtr<'a>>,
}
-pub fn determine_parameters_to_be_inferred<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- arena: &'a mut TypedArena<VarianceTerm<'a>>)
- -> TermsContext<'a, 'tcx> {
+pub fn determine_parameters_to_be_inferred<'a, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ arena: &'a mut TypedArena<VarianceTerm<'a>>,
+) -> TermsContext<'a, 'tcx> {
let mut terms_cx = TermsContext {
tcx,
arena,
@@ -85,7 +86,7 @@
terms_cx
}
-fn lang_items(tcx: TyCtxt<'_, '_, '_>) -> Vec<(hir::HirId, Vec<ty::Variance>)> {
+fn lang_items(tcx: TyCtxt<'_>) -> Vec<(hir::HirId, Vec<ty::Variance>)> {
let lang_items = tcx.lang_items();
let all = vec![
(lang_items.phantom_data(), vec![ty::Covariant]),
@@ -128,7 +129,7 @@
impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for TermsContext<'a, 'tcx> {
fn visit_item(&mut self, item: &hir::Item) {
debug!("add_inferreds for item {}",
- self.tcx.hir().hir_to_string(item.hir_id));
+ self.tcx.hir().node_to_string(item.hir_id));
match item.node {
hir::ItemKind::Struct(ref struct_def, _) |
diff --git a/src/librustc_typeck/variance/test.rs b/src/librustc_typeck/variance/test.rs
index b519582..cefc200 100644
--- a/src/librustc_typeck/variance/test.rs
+++ b/src/librustc_typeck/variance/test.rs
@@ -3,15 +3,15 @@
use rustc::ty::TyCtxt;
use syntax::symbol::sym;
-pub fn test_variance<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+pub fn test_variance<'tcx>(tcx: TyCtxt<'tcx>) {
tcx.hir().krate().visit_all_item_likes(&mut VarianceTest { tcx });
}
-struct VarianceTest<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>
+struct VarianceTest<'tcx> {
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'tcx> ItemLikeVisitor<'tcx> for VarianceTest<'a, 'tcx> {
+impl ItemLikeVisitor<'tcx> for VarianceTest<'tcx> {
fn visit_item(&mut self, item: &'tcx hir::Item) {
let item_def_id = self.tcx.hir().local_def_id_from_hir_id(item.hir_id);
diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs
index 7b58312..3dcf77e 100644
--- a/src/librustdoc/clean/auto_trait.rs
+++ b/src/librustdoc/clean/auto_trait.rs
@@ -7,7 +7,7 @@
pub struct AutoTraitFinder<'a, 'tcx> {
pub cx: &'a core::DocContext<'tcx>,
- pub f: auto_trait::AutoTraitFinder<'a, 'tcx>,
+ pub f: auto_trait::AutoTraitFinder<'tcx>,
}
impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
@@ -313,10 +313,10 @@
lifetime_predicates
}
- fn extract_for_generics<'b, 'c, 'd>(
+ fn extract_for_generics(
&self,
- tcx: TyCtxt<'b, 'c, 'd>,
- pred: ty::Predicate<'d>,
+ tcx: TyCtxt<'tcx>,
+ pred: ty::Predicate<'tcx>,
) -> FxHashSet<GenericParamDef> {
pred.walk_tys()
.flat_map(|t| {
@@ -448,13 +448,13 @@
// * Fn bounds are handled specially - instead of leaving it as 'T: Fn(), <T as Fn::Output> =
// K', we use the dedicated syntax 'T: Fn() -> K'
// * We explcitly add a '?Sized' bound if we didn't find any 'Sized' predicates for a type
- fn param_env_to_generics<'b, 'c, 'cx>(
+ fn param_env_to_generics(
&self,
- tcx: TyCtxt<'b, 'c, 'cx>,
+ tcx: TyCtxt<'tcx>,
param_env_def_id: DefId,
- param_env: ty::ParamEnv<'cx>,
+ param_env: ty::ParamEnv<'tcx>,
mut existing_predicates: Vec<WherePredicate>,
- vid_to_region: FxHashMap<ty::RegionVid, ty::Region<'cx>>,
+ vid_to_region: FxHashMap<ty::RegionVid, ty::Region<'tcx>>,
) -> Generics {
debug!(
"param_env_to_generics(param_env_def_id={:?}, param_env={:?}, \
@@ -776,7 +776,7 @@
vec.sort_by_cached_key(|x| format!("{:?}", x))
}
- fn is_fn_ty(&self, tcx: TyCtxt<'_, '_, '_>, ty: &Type) -> bool {
+ fn is_fn_ty(&self, tcx: TyCtxt<'_>, ty: &Type) -> bool {
match &ty {
&&Type::ResolvedPath { ref did, .. } => {
*did == tcx.require_lang_item(lang_items::FnTraitLangItem)
@@ -789,13 +789,13 @@
}
// Replaces all ReVars in a type with ty::Region's, using the provided map
-struct RegionReplacer<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
+struct RegionReplacer<'a, 'tcx> {
vid_to_region: &'a FxHashMap<ty::RegionVid, ty::Region<'tcx>>,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionReplacer<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+impl<'a, 'tcx> TypeFolder<'tcx> for RegionReplacer<'a, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.tcx
}
diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs
index 5a5540e..c14ae59 100644
--- a/src/librustdoc/clean/inline.rs
+++ b/src/librustdoc/clean/inline.rs
@@ -305,7 +305,7 @@
}
let for_ = if let Some(hir_id) = tcx.hir().as_local_hir_id(did) {
- match tcx.hir().expect_item_by_hir_id(hir_id).node {
+ match tcx.hir().expect_item(hir_id).node {
hir::ItemKind::Impl(.., ref t, _) => {
t.clean(cx)
}
@@ -327,7 +327,7 @@
let predicates = tcx.explicit_predicates_of(did);
let (trait_items, generics) = if let Some(hir_id) = tcx.hir().as_local_hir_id(did) {
- match tcx.hir().expect_item_by_hir_id(hir_id).node {
+ match tcx.hir().expect_item(hir_id).node {
hir::ItemKind::Impl(.., ref gen, _, _, ref item_ids) => {
(
item_ids.iter()
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
index e44c35b..4becb42 100644
--- a/src/librustdoc/clean/mod.rs
+++ b/src/librustdoc/clean/mod.rs
@@ -276,7 +276,7 @@
};
let primitives = if root.is_local() {
cx.tcx.hir().krate().module.item_ids.iter().filter_map(|&id| {
- let item = cx.tcx.hir().expect_item_by_hir_id(id.id);
+ let item = cx.tcx.hir().expect_item(id.id);
match item.node {
hir::ItemKind::Mod(_) => {
as_primitive(Res::Def(
@@ -320,7 +320,7 @@
};
let keywords = if root.is_local() {
cx.tcx.hir().krate().module.item_ids.iter().filter_map(|&id| {
- let item = cx.tcx.hir().expect_item_by_hir_id(id.id);
+ let item = cx.tcx.hir().expect_item(id.id);
match item.node {
hir::ItemKind::Mod(_) => {
as_keyword(Res::Def(
@@ -2777,7 +2777,7 @@
},
TyKind::Tup(ref tys) => Tuple(tys.clean(cx)),
TyKind::Def(item_id, _) => {
- let item = cx.tcx.hir().expect_item_by_hir_id(item_id.id);
+ let item = cx.tcx.hir().expect_item(item_id.id);
if let hir::ItemKind::Existential(ref ty) = item.node {
ImplTrait(ty.bounds.clean(cx))
} else {
@@ -2799,7 +2799,7 @@
// Substitute private type aliases
if let Some(hir_id) = cx.tcx.hir().as_local_hir_id(def_id) {
if !cx.renderinfo.borrow().access_levels.is_exported(def_id) {
- alias = Some(&cx.tcx.hir().expect_item_by_hir_id(hir_id).node);
+ alias = Some(&cx.tcx.hir().expect_item(hir_id).node);
}
}
};
@@ -4432,7 +4432,7 @@
// Start of code copied from rust-clippy
-pub fn path_to_def_local(tcx: TyCtxt<'_, '_, '_>, path: &[Symbol]) -> Option<DefId> {
+pub fn path_to_def_local(tcx: TyCtxt<'_>, path: &[Symbol]) -> Option<DefId> {
let krate = tcx.hir().krate();
let mut items = krate.module.item_ids.clone();
let mut path_it = path.iter().peekable();
@@ -4441,7 +4441,7 @@
let segment = path_it.next()?;
for item_id in mem::replace(&mut items, HirVec::new()).iter() {
- let item = tcx.hir().expect_item_by_hir_id(item_id.id);
+ let item = tcx.hir().expect_item(item_id.id);
if item.ident.name == *segment {
if path_it.peek().is_none() {
return Some(tcx.hir().local_def_id_from_hir_id(item_id.id))
@@ -4457,7 +4457,7 @@
}
}
-pub fn path_to_def(tcx: TyCtxt<'_, '_, '_>, path: &[Symbol]) -> Option<DefId> {
+pub fn path_to_def(tcx: TyCtxt<'_>, path: &[Symbol]) -> Option<DefId> {
let crates = tcx.crates();
let krate = crates
diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs
index 2a3bc5e..20a4f86 100644
--- a/src/librustdoc/core.rs
+++ b/src/librustdoc/core.rs
@@ -44,7 +44,7 @@
pub struct DocContext<'tcx> {
- pub tcx: TyCtxt<'tcx, 'tcx, 'tcx>,
+ pub tcx: TyCtxt<'tcx>,
pub resolver: Rc<Option<RefCell<interface::BoxedResolver>>>,
/// The stack of module NodeIds up till this point
pub crate_name: Option<String>,
@@ -317,7 +317,7 @@
// Ensure that rustdoc works even if rustc is feature-staged
unstable_features: UnstableFeatures::Allow,
actually_rustdoc: true,
- debugging_opts: debugging_options.clone(),
+ debugging_opts: debugging_options,
error_format,
edition,
describe_lints,
diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs
index 334b831..c698200 100644
--- a/src/librustdoc/html/markdown.rs
+++ b/src/librustdoc/html/markdown.rs
@@ -43,25 +43,30 @@
Options::ENABLE_TABLES | Options::ENABLE_FOOTNOTES
}
-/// A unit struct which has the `fmt::Display` trait implemented. When
-/// formatted, this struct will emit the HTML corresponding to the rendered
+/// A tuple struct that has the `fmt::Display` trait implemented.
+/// When formatted, this struct will emit the HTML corresponding to the rendered
/// version of the contained markdown string.
-///
-/// The second parameter is a list of link replacements.
-///
-/// The third is the current list of used header IDs.
-///
-/// The fourth is whether to allow the use of explicit error codes in doctest lang strings.
-///
-/// The fifth is what default edition to use when parsing doctests (to add a `fn main`).
pub struct Markdown<'a>(
- pub &'a str, pub &'a [(String, String)], pub RefCell<&'a mut IdMap>, pub ErrorCodes, pub Edition);
-/// A unit struct like `Markdown`, that renders the markdown with a
-/// table of contents.
-pub struct MarkdownWithToc<'a>(pub &'a str, pub RefCell<&'a mut IdMap>, pub ErrorCodes, pub Edition);
-/// A unit struct like `Markdown`, that renders the markdown escaping HTML tags.
+ pub &'a str,
+ /// A list of link replacements.
+ pub &'a [(String, String)],
+ /// The current list of used header IDs.
+ pub RefCell<&'a mut IdMap>,
+ /// Whether to allow the use of explicit error codes in doctest lang strings.
+ pub ErrorCodes,
+ /// Default edition to use when parsing doctests (to add a `fn main`).
+ pub Edition,
+);
+/// A tuple struct like `Markdown` that renders the markdown with a table of contents.
+pub struct MarkdownWithToc<'a>(
+ pub &'a str,
+ pub RefCell<&'a mut IdMap>,
+ pub ErrorCodes,
+ pub Edition,
+);
+/// A tuple struct like `Markdown` that renders the markdown escaping HTML tags.
pub struct MarkdownHtml<'a>(pub &'a str, pub RefCell<&'a mut IdMap>, pub ErrorCodes, pub Edition);
-/// A unit struct like `Markdown`, that renders only the first paragraph.
+/// A tuple struct like `Markdown` that renders only the first paragraph.
pub struct MarkdownSummaryLine<'a>(pub &'a str, pub &'a [(String, String)]);
#[derive(Copy, Clone, PartialEq, Debug)]
@@ -1050,130 +1055,4 @@
}
#[cfg(test)]
-mod tests {
- use super::{ErrorCodes, LangString, Markdown, MarkdownHtml, IdMap};
- use super::plain_summary_line;
- use std::cell::RefCell;
- use syntax::edition::{Edition, DEFAULT_EDITION};
-
- #[test]
- fn test_lang_string_parse() {
- fn t(s: &str,
- should_panic: bool, no_run: bool, ignore: bool, rust: bool, test_harness: bool,
- compile_fail: bool, allow_fail: bool, error_codes: Vec<String>,
- edition: Option<Edition>) {
- assert_eq!(LangString::parse(s, ErrorCodes::Yes), LangString {
- should_panic,
- no_run,
- ignore,
- rust,
- test_harness,
- compile_fail,
- error_codes,
- original: s.to_owned(),
- allow_fail,
- edition,
- })
- }
-
- fn v() -> Vec<String> {
- Vec::new()
- }
-
- // ignore-tidy-linelength
- // marker | should_panic | no_run | ignore | rust | test_harness
- // | compile_fail | allow_fail | error_codes | edition
- t("", false, false, false, true, false, false, false, v(), None);
- t("rust", false, false, false, true, false, false, false, v(), None);
- t("sh", false, false, false, false, false, false, false, v(), None);
- t("ignore", false, false, true, true, false, false, false, v(), None);
- t("should_panic", true, false, false, true, false, false, false, v(), None);
- t("no_run", false, true, false, true, false, false, false, v(), None);
- t("test_harness", false, false, false, true, true, false, false, v(), None);
- t("compile_fail", false, true, false, true, false, true, false, v(), None);
- t("allow_fail", false, false, false, true, false, false, true, v(), None);
- t("{.no_run .example}", false, true, false, true, false, false, false, v(), None);
- t("{.sh .should_panic}", true, false, false, false, false, false, false, v(), None);
- t("{.example .rust}", false, false, false, true, false, false, false, v(), None);
- t("{.test_harness .rust}", false, false, false, true, true, false, false, v(), None);
- t("text, no_run", false, true, false, false, false, false, false, v(), None);
- t("text,no_run", false, true, false, false, false, false, false, v(), None);
- t("edition2015", false, false, false, true, false, false, false, v(), Some(Edition::Edition2015));
- t("edition2018", false, false, false, true, false, false, false, v(), Some(Edition::Edition2018));
- }
-
- #[test]
- fn test_header() {
- fn t(input: &str, expect: &str) {
- let mut map = IdMap::new();
- let output = Markdown(input, &[], RefCell::new(&mut map),
- ErrorCodes::Yes, DEFAULT_EDITION).to_string();
- assert_eq!(output, expect, "original: {}", input);
- }
-
- t("# Foo bar", "<h1 id=\"foo-bar\" class=\"section-header\">\
- <a href=\"#foo-bar\">Foo bar</a></h1>");
- t("## Foo-bar_baz qux", "<h2 id=\"foo-bar_baz-qux\" class=\"section-\
- header\"><a href=\"#foo-bar_baz-qux\">Foo-bar_baz qux</a></h2>");
- t("### **Foo** *bar* baz!?!& -_qux_-%",
- "<h3 id=\"foo-bar-baz--qux-\" class=\"section-header\">\
- <a href=\"#foo-bar-baz--qux-\"><strong>Foo</strong> \
- <em>bar</em> baz!?!& -<em>qux</em>-%</a></h3>");
- t("#### **Foo?** & \\*bar?!* _`baz`_ ❤ #qux",
- "<h4 id=\"foo--bar--baz--qux\" class=\"section-header\">\
- <a href=\"#foo--bar--baz--qux\"><strong>Foo?</strong> & *bar?!* \
- <em><code>baz</code></em> ❤ #qux</a></h4>");
- }
-
- #[test]
- fn test_header_ids_multiple_blocks() {
- let mut map = IdMap::new();
- fn t(map: &mut IdMap, input: &str, expect: &str) {
- let output = Markdown(input, &[], RefCell::new(map),
- ErrorCodes::Yes, DEFAULT_EDITION).to_string();
- assert_eq!(output, expect, "original: {}", input);
- }
-
- t(&mut map, "# Example", "<h1 id=\"example\" class=\"section-header\">\
- <a href=\"#example\">Example</a></h1>");
- t(&mut map, "# Panics", "<h1 id=\"panics\" class=\"section-header\">\
- <a href=\"#panics\">Panics</a></h1>");
- t(&mut map, "# Example", "<h1 id=\"example-1\" class=\"section-header\">\
- <a href=\"#example-1\">Example</a></h1>");
- t(&mut map, "# Main", "<h1 id=\"main\" class=\"section-header\">\
- <a href=\"#main\">Main</a></h1>");
- t(&mut map, "# Example", "<h1 id=\"example-2\" class=\"section-header\">\
- <a href=\"#example-2\">Example</a></h1>");
- t(&mut map, "# Panics", "<h1 id=\"panics-1\" class=\"section-header\">\
- <a href=\"#panics-1\">Panics</a></h1>");
- }
-
- #[test]
- fn test_plain_summary_line() {
- fn t(input: &str, expect: &str) {
- let output = plain_summary_line(input);
- assert_eq!(output, expect, "original: {}", input);
- }
-
- t("hello [Rust](https://www.rust-lang.org) :)", "hello Rust :)");
- t("hello [Rust](https://www.rust-lang.org \"Rust\") :)", "hello Rust :)");
- t("code `let x = i32;` ...", "code `let x = i32;` ...");
- t("type `Type<'static>` ...", "type `Type<'static>` ...");
- t("# top header", "top header");
- t("## header", "header");
- }
-
- #[test]
- fn test_markdown_html_escape() {
- fn t(input: &str, expect: &str) {
- let mut idmap = IdMap::new();
- let output = MarkdownHtml(input, RefCell::new(&mut idmap),
- ErrorCodes::Yes, DEFAULT_EDITION).to_string();
- assert_eq!(output, expect, "original: {}", input);
- }
-
- t("`Struct<'a, T>`", "<p><code>Struct<'a, T></code></p>\n");
- t("Struct<'a, T>", "<p>Struct<'a, T></p>\n");
- t("Struct<br>", "<p>Struct<br></p>\n");
- }
-}
+mod tests;
diff --git a/src/librustdoc/html/markdown/tests.rs b/src/librustdoc/html/markdown/tests.rs
new file mode 100644
index 0000000..f470e64
--- /dev/null
+++ b/src/librustdoc/html/markdown/tests.rs
@@ -0,0 +1,125 @@
+use super::{ErrorCodes, LangString, Markdown, MarkdownHtml, IdMap};
+use super::plain_summary_line;
+use std::cell::RefCell;
+use syntax::edition::{Edition, DEFAULT_EDITION};
+
+#[test]
+fn test_lang_string_parse() {
+ fn t(s: &str,
+ should_panic: bool, no_run: bool, ignore: bool, rust: bool, test_harness: bool,
+ compile_fail: bool, allow_fail: bool, error_codes: Vec<String>,
+ edition: Option<Edition>) {
+ assert_eq!(LangString::parse(s, ErrorCodes::Yes), LangString {
+ should_panic,
+ no_run,
+ ignore,
+ rust,
+ test_harness,
+ compile_fail,
+ error_codes,
+ original: s.to_owned(),
+ allow_fail,
+ edition,
+ })
+ }
+
+ fn v() -> Vec<String> {
+ Vec::new()
+ }
+
+ // ignore-tidy-linelength
+ // marker | should_panic | no_run | ignore | rust | test_harness
+ // | compile_fail | allow_fail | error_codes | edition
+ t("", false, false, false, true, false, false, false, v(), None);
+ t("rust", false, false, false, true, false, false, false, v(), None);
+ t("sh", false, false, false, false, false, false, false, v(), None);
+ t("ignore", false, false, true, true, false, false, false, v(), None);
+ t("should_panic", true, false, false, true, false, false, false, v(), None);
+ t("no_run", false, true, false, true, false, false, false, v(), None);
+ t("test_harness", false, false, false, true, true, false, false, v(), None);
+ t("compile_fail", false, true, false, true, false, true, false, v(), None);
+ t("allow_fail", false, false, false, true, false, false, true, v(), None);
+ t("{.no_run .example}", false, true, false, true, false, false, false, v(), None);
+ t("{.sh .should_panic}", true, false, false, false, false, false, false, v(), None);
+ t("{.example .rust}", false, false, false, true, false, false, false, v(), None);
+ t("{.test_harness .rust}", false, false, false, true, true, false, false, v(), None);
+ t("text, no_run", false, true, false, false, false, false, false, v(), None);
+ t("text,no_run", false, true, false, false, false, false, false, v(), None);
+ t("edition2015", false, false, false, true, false, false, false, v(), Some(Edition::Edition2015));
+ t("edition2018", false, false, false, true, false, false, false, v(), Some(Edition::Edition2018));
+}
+
+#[test]
+fn test_header() {
+ fn t(input: &str, expect: &str) {
+ let mut map = IdMap::new();
+ let output = Markdown(input, &[], RefCell::new(&mut map),
+ ErrorCodes::Yes, DEFAULT_EDITION).to_string();
+ assert_eq!(output, expect, "original: {}", input);
+ }
+
+ t("# Foo bar", "<h1 id=\"foo-bar\" class=\"section-header\">\
+ <a href=\"#foo-bar\">Foo bar</a></h1>");
+ t("## Foo-bar_baz qux", "<h2 id=\"foo-bar_baz-qux\" class=\"section-\
+ header\"><a href=\"#foo-bar_baz-qux\">Foo-bar_baz qux</a></h2>");
+ t("### **Foo** *bar* baz!?!& -_qux_-%",
+ "<h3 id=\"foo-bar-baz--qux-\" class=\"section-header\">\
+ <a href=\"#foo-bar-baz--qux-\"><strong>Foo</strong> \
+ <em>bar</em> baz!?!& -<em>qux</em>-%</a></h3>");
+ t("#### **Foo?** & \\*bar?!* _`baz`_ ❤ #qux",
+ "<h4 id=\"foo--bar--baz--qux\" class=\"section-header\">\
+ <a href=\"#foo--bar--baz--qux\"><strong>Foo?</strong> & *bar?!* \
+ <em><code>baz</code></em> ❤ #qux</a></h4>");
+}
+
+#[test]
+fn test_header_ids_multiple_blocks() {
+ let mut map = IdMap::new();
+ fn t(map: &mut IdMap, input: &str, expect: &str) {
+ let output = Markdown(input, &[], RefCell::new(map),
+ ErrorCodes::Yes, DEFAULT_EDITION).to_string();
+ assert_eq!(output, expect, "original: {}", input);
+ }
+
+ t(&mut map, "# Example", "<h1 id=\"example\" class=\"section-header\">\
+ <a href=\"#example\">Example</a></h1>");
+ t(&mut map, "# Panics", "<h1 id=\"panics\" class=\"section-header\">\
+ <a href=\"#panics\">Panics</a></h1>");
+ t(&mut map, "# Example", "<h1 id=\"example-1\" class=\"section-header\">\
+ <a href=\"#example-1\">Example</a></h1>");
+ t(&mut map, "# Main", "<h1 id=\"main\" class=\"section-header\">\
+ <a href=\"#main\">Main</a></h1>");
+ t(&mut map, "# Example", "<h1 id=\"example-2\" class=\"section-header\">\
+ <a href=\"#example-2\">Example</a></h1>");
+ t(&mut map, "# Panics", "<h1 id=\"panics-1\" class=\"section-header\">\
+ <a href=\"#panics-1\">Panics</a></h1>");
+}
+
+#[test]
+fn test_plain_summary_line() {
+ fn t(input: &str, expect: &str) {
+ let output = plain_summary_line(input);
+ assert_eq!(output, expect, "original: {}", input);
+ }
+
+ t("hello [Rust](https://www.rust-lang.org) :)", "hello Rust :)");
+ t("hello [Rust](https://www.rust-lang.org \"Rust\") :)", "hello Rust :)");
+ t("code `let x = i32;` ...", "code `let x = i32;` ...");
+ t("type `Type<'static>` ...", "type `Type<'static>` ...");
+ t("# top header", "top header");
+ t("## header", "header");
+}
+
+#[test]
+fn test_markdown_html_escape() {
+ fn t(input: &str, expect: &str) {
+ let mut idmap = IdMap::new();
+ let output = MarkdownHtml(input, RefCell::new(&mut idmap),
+ ErrorCodes::Yes, DEFAULT_EDITION).to_string();
+ assert_eq!(output, expect, "original: {}", input);
+ }
+
+ t("`Struct<'a, T>`", "<p><code>Struct<'a, T></code></p>\n");
+ t("Struct<'a, T>", "<p>Struct<'a, T></p>\n");
+ t("Struct<br>", "<p>Struct<br></p>\n");
+}
diff --git a/src/librustdoc/html/toc.rs b/src/librustdoc/html/toc.rs
index 409f247..2564c61 100644
--- a/src/librustdoc/html/toc.rs
+++ b/src/librustdoc/html/toc.rs
@@ -188,85 +188,4 @@
}
#[cfg(test)]
-mod tests {
- use super::{TocBuilder, Toc, TocEntry};
-
- #[test]
- fn builder_smoke() {
- let mut builder = TocBuilder::new();
-
- // this is purposely not using a fancy macro like below so
- // that we're sure that this is doing the correct thing, and
- // there's been no macro mistake.
- macro_rules! push {
- ($level: expr, $name: expr) => {
- assert_eq!(builder.push($level,
- $name.to_string(),
- "".to_string()),
- $name);
- }
- }
- push!(2, "0.1");
- push!(1, "1");
- {
- push!(2, "1.1");
- {
- push!(3, "1.1.1");
- push!(3, "1.1.2");
- }
- push!(2, "1.2");
- {
- push!(3, "1.2.1");
- push!(3, "1.2.2");
- }
- }
- push!(1, "2");
- push!(1, "3");
- {
- push!(4, "3.0.0.1");
- {
- push!(6, "3.0.0.1.0.1");
- }
- push!(4, "3.0.0.2");
- push!(2, "3.1");
- {
- push!(4, "3.1.0.1");
- }
- }
-
- macro_rules! toc {
- ($(($level: expr, $name: expr, $(($sub: tt))* )),*) => {
- Toc {
- entries: vec![
- $(
- TocEntry {
- level: $level,
- name: $name.to_string(),
- sec_number: $name.to_string(),
- id: "".to_string(),
- children: toc!($($sub),*)
- }
- ),*
- ]
- }
- }
- }
- let expected = toc!(
- (2, "0.1", ),
-
- (1, "1",
- ((2, "1.1", ((3, "1.1.1", )) ((3, "1.1.2", ))))
- ((2, "1.2", ((3, "1.2.1", )) ((3, "1.2.2", ))))
- ),
-
- (1, "2", ),
-
- (1, "3",
- ((4, "3.0.0.1", ((6, "3.0.0.1.0.1", ))))
- ((4, "3.0.0.2", ))
- ((2, "3.1", ((4, "3.1.0.1", ))))
- )
- );
- assert_eq!(expected, builder.into_toc());
- }
-}
+mod tests;
diff --git a/src/librustdoc/html/toc/tests.rs b/src/librustdoc/html/toc/tests.rs
new file mode 100644
index 0000000..ef69ada
--- /dev/null
+++ b/src/librustdoc/html/toc/tests.rs
@@ -0,0 +1,80 @@
+use super::{TocBuilder, Toc, TocEntry};
+
+#[test]
+fn builder_smoke() {
+ let mut builder = TocBuilder::new();
+
+ // this is purposely not using a fancy macro like below so
+ // that we're sure that this is doing the correct thing, and
+ // there's been no macro mistake.
+ macro_rules! push {
+ ($level: expr, $name: expr) => {
+ assert_eq!(builder.push($level,
+ $name.to_string(),
+ "".to_string()),
+ $name);
+ }
+ }
+ push!(2, "0.1");
+ push!(1, "1");
+ {
+ push!(2, "1.1");
+ {
+ push!(3, "1.1.1");
+ push!(3, "1.1.2");
+ }
+ push!(2, "1.2");
+ {
+ push!(3, "1.2.1");
+ push!(3, "1.2.2");
+ }
+ }
+ push!(1, "2");
+ push!(1, "3");
+ {
+ push!(4, "3.0.0.1");
+ {
+ push!(6, "3.0.0.1.0.1");
+ }
+ push!(4, "3.0.0.2");
+ push!(2, "3.1");
+ {
+ push!(4, "3.1.0.1");
+ }
+ }
+
+ macro_rules! toc {
+ ($(($level: expr, $name: expr, $(($sub: tt))* )),*) => {
+ Toc {
+ entries: vec![
+ $(
+ TocEntry {
+ level: $level,
+ name: $name.to_string(),
+ sec_number: $name.to_string(),
+ id: "".to_string(),
+ children: toc!($($sub),*)
+ }
+ ),*
+ ]
+ }
+ }
+ }
+ let expected = toc!(
+ (2, "0.1", ),
+
+ (1, "1",
+ ((2, "1.1", ((3, "1.1.1", )) ((3, "1.1.2", ))))
+ ((2, "1.2", ((3, "1.2.1", )) ((3, "1.2.2", ))))
+ ),
+
+ (1, "2", ),
+
+ (1, "3",
+ ((4, "3.0.0.1", ((6, "3.0.0.1.0.1", ))))
+ ((4, "3.0.0.2", ))
+ ((2, "3.1", ((4, "3.1.0.1", ))))
+ )
+ );
+ assert_eq!(expected, builder.into_toc());
+}
diff --git a/src/librustdoc/passes/check_code_block_syntax.rs b/src/librustdoc/passes/check_code_block_syntax.rs
index 694843a..6d51278 100644
--- a/src/librustdoc/passes/check_code_block_syntax.rs
+++ b/src/librustdoc/passes/check_code_block_syntax.rs
@@ -2,7 +2,7 @@
use syntax::parse::lexer::{StringReader as Lexer};
use syntax::parse::{ParseSess, token};
use syntax::source_map::FilePathMapping;
-use syntax_pos::FileName;
+use syntax_pos::{InnerSpan, FileName};
use crate::clean;
use crate::core::DocContext;
@@ -63,7 +63,7 @@
}
if code_block.syntax.is_none() && code_block.is_fenced {
- let sp = sp.from_inner_byte_pos(0, 3);
+ let sp = sp.from_inner(InnerSpan::new(0, 3));
diag.span_suggestion(
sp,
"mark blocks that do not contain Rust code as text",
diff --git a/src/librustdoc/passes/mod.rs b/src/librustdoc/passes/mod.rs
index 018ab5d..8fc6b9f 100644
--- a/src/librustdoc/passes/mod.rs
+++ b/src/librustdoc/passes/mod.rs
@@ -6,7 +6,7 @@
use rustc::middle::privacy::AccessLevels;
use rustc::util::nodemap::DefIdSet;
use std::mem;
-use syntax_pos::{DUMMY_SP, Span};
+use syntax_pos::{DUMMY_SP, InnerSpan, Span};
use std::ops::Range;
use crate::clean::{self, GetDefId, Item};
@@ -440,10 +440,10 @@
}
}
- let sp = span_of_attrs(attrs).from_inner_byte_pos(
+ let sp = span_of_attrs(attrs).from_inner(InnerSpan::new(
md_range.start + start_bytes,
md_range.end + start_bytes + end_bytes,
- );
+ ));
Some(sp)
}
diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs
index 5bce5d6..9a9fd94 100644
--- a/src/librustdoc/test.rs
+++ b/src/librustdoc/test.rs
@@ -740,7 +740,7 @@
debug!("Creating test {}: {}", name, test);
self.tests.push(testing::TestDescAndFn {
desc: testing::TestDesc {
- name: testing::DynTestName(name.clone()),
+ name: testing::DynTestName(name),
ignore: config.ignore,
// compiler failures are test failures
should_panic: testing::ShouldPanic::No,
@@ -975,304 +975,4 @@
}
#[cfg(test)]
-mod tests {
- use super::{TestOptions, make_test};
- use syntax::edition::DEFAULT_EDITION;
-
- #[test]
- fn make_test_basic() {
- //basic use: wraps with `fn main`, adds `#![allow(unused)]`
- let opts = TestOptions::default();
- let input =
-"assert_eq!(2+2, 4);";
- let expected =
-"#![allow(unused)]
-fn main() {
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 2));
- }
-
- #[test]
- fn make_test_crate_name_no_use() {
- // If you give a crate name but *don't* use it within the test, it won't bother inserting
- // the `extern crate` statement.
- let opts = TestOptions::default();
- let input =
-"assert_eq!(2+2, 4);";
- let expected =
-"#![allow(unused)]
-fn main() {
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 2));
- }
-
- #[test]
- fn make_test_crate_name() {
- // If you give a crate name and use it within the test, it will insert an `extern crate`
- // statement before `fn main`.
- let opts = TestOptions::default();
- let input =
-"use asdf::qwop;
-assert_eq!(2+2, 4);";
- let expected =
-"#![allow(unused)]
-extern crate asdf;
-fn main() {
-use asdf::qwop;
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 3));
- }
-
- #[test]
- fn make_test_no_crate_inject() {
- // Even if you do use the crate within the test, setting `opts.no_crate_inject` will skip
- // adding it anyway.
- let opts = TestOptions {
- no_crate_inject: true,
- display_warnings: false,
- attrs: vec![],
- };
- let input =
-"use asdf::qwop;
-assert_eq!(2+2, 4);";
- let expected =
-"#![allow(unused)]
-fn main() {
-use asdf::qwop;
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 2));
- }
-
- #[test]
- fn make_test_ignore_std() {
- // Even if you include a crate name, and use it in the doctest, we still won't include an
- // `extern crate` statement if the crate is "std" -- that's included already by the
- // compiler!
- let opts = TestOptions::default();
- let input =
-"use std::*;
-assert_eq!(2+2, 4);";
- let expected =
-"#![allow(unused)]
-fn main() {
-use std::*;
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, Some("std"), false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 2));
- }
-
- #[test]
- fn make_test_manual_extern_crate() {
- // When you manually include an `extern crate` statement in your doctest, `make_test`
- // assumes you've included one for your own crate too.
- let opts = TestOptions::default();
- let input =
-"extern crate asdf;
-use asdf::qwop;
-assert_eq!(2+2, 4);";
- let expected =
-"#![allow(unused)]
-extern crate asdf;
-fn main() {
-use asdf::qwop;
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 2));
- }
-
- #[test]
- fn make_test_manual_extern_crate_with_macro_use() {
- let opts = TestOptions::default();
- let input =
-"#[macro_use] extern crate asdf;
-use asdf::qwop;
-assert_eq!(2+2, 4);";
- let expected =
-"#![allow(unused)]
-#[macro_use] extern crate asdf;
-fn main() {
-use asdf::qwop;
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 2));
- }
-
- #[test]
- fn make_test_opts_attrs() {
- // If you supplied some doctest attributes with `#![doc(test(attr(...)))]`, it will use
- // those instead of the stock `#![allow(unused)]`.
- let mut opts = TestOptions::default();
- opts.attrs.push("feature(sick_rad)".to_string());
- let input =
-"use asdf::qwop;
-assert_eq!(2+2, 4);";
- let expected =
-"#![feature(sick_rad)]
-extern crate asdf;
-fn main() {
-use asdf::qwop;
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 3));
-
- // Adding more will also bump the returned line offset.
- opts.attrs.push("feature(hella_dope)".to_string());
- let expected =
-"#![feature(sick_rad)]
-#![feature(hella_dope)]
-extern crate asdf;
-fn main() {
-use asdf::qwop;
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 4));
- }
-
- #[test]
- fn make_test_crate_attrs() {
- // Including inner attributes in your doctest will apply them to the whole "crate", pasting
- // them outside the generated main function.
- let opts = TestOptions::default();
- let input =
-"#![feature(sick_rad)]
-assert_eq!(2+2, 4);";
- let expected =
-"#![allow(unused)]
-#![feature(sick_rad)]
-fn main() {
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 2));
- }
-
- #[test]
- fn make_test_with_main() {
- // Including your own `fn main` wrapper lets the test use it verbatim.
- let opts = TestOptions::default();
- let input =
-"fn main() {
- assert_eq!(2+2, 4);
-}";
- let expected =
-"#![allow(unused)]
-fn main() {
- assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 1));
- }
-
- #[test]
- fn make_test_fake_main() {
- // ... but putting it in a comment will still provide a wrapper.
- let opts = TestOptions::default();
- let input =
-"//Ceci n'est pas une `fn main`
-assert_eq!(2+2, 4);";
- let expected =
-"#![allow(unused)]
-//Ceci n'est pas une `fn main`
-fn main() {
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 2));
- }
-
- #[test]
- fn make_test_dont_insert_main() {
- // Even with that, if you set `dont_insert_main`, it won't create the `fn main` wrapper.
- let opts = TestOptions::default();
- let input =
-"//Ceci n'est pas une `fn main`
-assert_eq!(2+2, 4);";
- let expected =
-"#![allow(unused)]
-//Ceci n'est pas une `fn main`
-assert_eq!(2+2, 4);".to_string();
- let output = make_test(input, None, true, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 1));
- }
-
- #[test]
- fn make_test_display_warnings() {
- // If the user is asking to display doctest warnings, suppress the default `allow(unused)`.
- let mut opts = TestOptions::default();
- opts.display_warnings = true;
- let input =
-"assert_eq!(2+2, 4);";
- let expected =
-"fn main() {
-assert_eq!(2+2, 4);
-}".to_string();
- let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 1));
- }
-
- #[test]
- fn make_test_issues_21299_33731() {
- let opts = TestOptions::default();
-
- let input =
-"// fn main
-assert_eq!(2+2, 4);";
-
- let expected =
-"#![allow(unused)]
-// fn main
-fn main() {
-assert_eq!(2+2, 4);
-}".to_string();
-
- let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 2));
-
- let input =
-"extern crate hella_qwop;
-assert_eq!(asdf::foo, 4);";
-
- let expected =
-"#![allow(unused)]
-extern crate hella_qwop;
-extern crate asdf;
-fn main() {
-assert_eq!(asdf::foo, 4);
-}".to_string();
-
- let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 3));
- }
-
- #[test]
- fn make_test_main_in_macro() {
- let opts = TestOptions::default();
- let input =
-"#[macro_use] extern crate my_crate;
-test_wrapper! {
- fn main() {}
-}";
- let expected =
-"#![allow(unused)]
-#[macro_use] extern crate my_crate;
-test_wrapper! {
- fn main() {}
-}".to_string();
-
- let output = make_test(input, Some("my_crate"), false, &opts, DEFAULT_EDITION);
- assert_eq!(output, (expected, 1));
- }
-}
+mod tests;
diff --git a/src/librustdoc/test/tests.rs b/src/librustdoc/test/tests.rs
new file mode 100644
index 0000000..d4d558b
--- /dev/null
+++ b/src/librustdoc/test/tests.rs
@@ -0,0 +1,299 @@
+use super::{TestOptions, make_test};
+use syntax::edition::DEFAULT_EDITION;
+
+#[test]
+fn make_test_basic() {
+ //basic use: wraps with `fn main`, adds `#![allow(unused)]`
+ let opts = TestOptions::default();
+ let input =
+"assert_eq!(2+2, 4);";
+ let expected =
+"#![allow(unused)]
+fn main() {
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 2));
+}
+
+#[test]
+fn make_test_crate_name_no_use() {
+ // If you give a crate name but *don't* use it within the test, it won't bother inserting
+ // the `extern crate` statement.
+ let opts = TestOptions::default();
+ let input =
+"assert_eq!(2+2, 4);";
+ let expected =
+"#![allow(unused)]
+fn main() {
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 2));
+}
+
+#[test]
+fn make_test_crate_name() {
+ // If you give a crate name and use it within the test, it will insert an `extern crate`
+ // statement before `fn main`.
+ let opts = TestOptions::default();
+ let input =
+"use asdf::qwop;
+assert_eq!(2+2, 4);";
+ let expected =
+"#![allow(unused)]
+extern crate asdf;
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 3));
+}
+
+#[test]
+fn make_test_no_crate_inject() {
+ // Even if you do use the crate within the test, setting `opts.no_crate_inject` will skip
+ // adding it anyway.
+ let opts = TestOptions {
+ no_crate_inject: true,
+ display_warnings: false,
+ attrs: vec![],
+ };
+ let input =
+"use asdf::qwop;
+assert_eq!(2+2, 4);";
+ let expected =
+"#![allow(unused)]
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 2));
+}
+
+#[test]
+fn make_test_ignore_std() {
+ // Even if you include a crate name, and use it in the doctest, we still won't include an
+ // `extern crate` statement if the crate is "std" -- that's included already by the
+ // compiler!
+ let opts = TestOptions::default();
+ let input =
+"use std::*;
+assert_eq!(2+2, 4);";
+ let expected =
+"#![allow(unused)]
+fn main() {
+use std::*;
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, Some("std"), false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 2));
+}
+
+#[test]
+fn make_test_manual_extern_crate() {
+ // When you manually include an `extern crate` statement in your doctest, `make_test`
+ // assumes you've included one for your own crate too.
+ let opts = TestOptions::default();
+ let input =
+"extern crate asdf;
+use asdf::qwop;
+assert_eq!(2+2, 4);";
+ let expected =
+"#![allow(unused)]
+extern crate asdf;
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 2));
+}
+
+#[test]
+fn make_test_manual_extern_crate_with_macro_use() {
+ let opts = TestOptions::default();
+ let input =
+"#[macro_use] extern crate asdf;
+use asdf::qwop;
+assert_eq!(2+2, 4);";
+ let expected =
+"#![allow(unused)]
+#[macro_use] extern crate asdf;
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 2));
+}
+
+#[test]
+fn make_test_opts_attrs() {
+ // If you supplied some doctest attributes with `#![doc(test(attr(...)))]`, it will use
+ // those instead of the stock `#![allow(unused)]`.
+ let mut opts = TestOptions::default();
+ opts.attrs.push("feature(sick_rad)".to_string());
+ let input =
+"use asdf::qwop;
+assert_eq!(2+2, 4);";
+ let expected =
+"#![feature(sick_rad)]
+extern crate asdf;
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 3));
+
+ // Adding more will also bump the returned line offset.
+ opts.attrs.push("feature(hella_dope)".to_string());
+ let expected =
+"#![feature(sick_rad)]
+#![feature(hella_dope)]
+extern crate asdf;
+fn main() {
+use asdf::qwop;
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 4));
+}
+
+#[test]
+fn make_test_crate_attrs() {
+ // Including inner attributes in your doctest will apply them to the whole "crate", pasting
+ // them outside the generated main function.
+ let opts = TestOptions::default();
+ let input =
+"#![feature(sick_rad)]
+assert_eq!(2+2, 4);";
+ let expected =
+"#![allow(unused)]
+#![feature(sick_rad)]
+fn main() {
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 2));
+}
+
+#[test]
+fn make_test_with_main() {
+ // Including your own `fn main` wrapper lets the test use it verbatim.
+ let opts = TestOptions::default();
+ let input =
+"fn main() {
+ assert_eq!(2+2, 4);
+}";
+ let expected =
+"#![allow(unused)]
+fn main() {
+ assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 1));
+}
+
+#[test]
+fn make_test_fake_main() {
+ // ... but putting it in a comment will still provide a wrapper.
+ let opts = TestOptions::default();
+ let input =
+"//Ceci n'est pas une `fn main`
+assert_eq!(2+2, 4);";
+ let expected =
+"#![allow(unused)]
+//Ceci n'est pas une `fn main`
+fn main() {
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 2));
+}
+
+#[test]
+fn make_test_dont_insert_main() {
+ // Even with that, if you set `dont_insert_main`, it won't create the `fn main` wrapper.
+ let opts = TestOptions::default();
+ let input =
+"//Ceci n'est pas une `fn main`
+assert_eq!(2+2, 4);";
+ let expected =
+"#![allow(unused)]
+//Ceci n'est pas une `fn main`
+assert_eq!(2+2, 4);".to_string();
+ let output = make_test(input, None, true, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 1));
+}
+
+#[test]
+fn make_test_display_warnings() {
+ // If the user is asking to display doctest warnings, suppress the default `allow(unused)`.
+ let mut opts = TestOptions::default();
+ opts.display_warnings = true;
+ let input =
+"assert_eq!(2+2, 4);";
+ let expected =
+"fn main() {
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 1));
+}
+
+#[test]
+fn make_test_issues_21299_33731() {
+ let opts = TestOptions::default();
+
+ let input =
+"// fn main
+assert_eq!(2+2, 4);";
+
+ let expected =
+"#![allow(unused)]
+// fn main
+fn main() {
+assert_eq!(2+2, 4);
+}".to_string();
+
+ let output = make_test(input, None, false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 2));
+
+ let input =
+"extern crate hella_qwop;
+assert_eq!(asdf::foo, 4);";
+
+ let expected =
+"#![allow(unused)]
+extern crate hella_qwop;
+extern crate asdf;
+fn main() {
+assert_eq!(asdf::foo, 4);
+}".to_string();
+
+ let output = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 3));
+}
+
+#[test]
+fn make_test_main_in_macro() {
+ let opts = TestOptions::default();
+ let input =
+"#[macro_use] extern crate my_crate;
+test_wrapper! {
+ fn main() {}
+}";
+ let expected =
+"#![allow(unused)]
+#[macro_use] extern crate my_crate;
+test_wrapper! {
+ fn main() {}
+}".to_string();
+
+ let output = make_test(input, Some("my_crate"), false, &opts, DEFAULT_EDITION);
+ assert_eq!(output, (expected, 1));
+}
diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs
index eb9de43..ff76579 100644
--- a/src/librustdoc/visit_ast.rs
+++ b/src/librustdoc/visit_ast.rs
@@ -248,7 +248,7 @@
let orig_inside_public_path = self.inside_public_path;
self.inside_public_path &= vis.node.is_pub();
for i in &m.item_ids {
- let item = self.cx.tcx.hir().expect_item_by_hir_id(i.id);
+ let item = self.cx.tcx.hir().expect_item(i.id);
self.visit_item(item, None, &mut om);
}
self.inside_public_path = orig_inside_public_path;
@@ -275,7 +275,7 @@
fn inherits_doc_hidden(cx: &core::DocContext<'_>, mut node: hir::HirId) -> bool {
while let Some(id) = cx.tcx.hir().get_enclosing_scope(node) {
node = id;
- if cx.tcx.hir().attrs_by_hir_id(node)
+ if cx.tcx.hir().attrs(node)
.lists(sym::doc).has_word(sym::hidden) {
return true;
}
@@ -295,7 +295,7 @@
return false;
};
- let use_attrs = tcx.hir().attrs_by_hir_id(id);
+ let use_attrs = tcx.hir().attrs(id);
// Don't inline `doc(hidden)` imports so they can be stripped at a later stage.
let is_no_inline = use_attrs.lists(sym::doc).has_word(sym::no_inline) ||
use_attrs.lists(sym::doc).has_word(sym::hidden);
@@ -346,7 +346,7 @@
Node::Item(&hir::Item { node: hir::ItemKind::Mod(ref m), .. }) if glob => {
let prev = mem::replace(&mut self.inlining, true);
for i in &m.item_ids {
- let i = self.cx.tcx.hir().expect_item_by_hir_id(i.id);
+ let i = self.cx.tcx.hir().expect_item(i.id);
self.visit_item(i, None, om);
}
self.inlining = prev;
@@ -361,7 +361,7 @@
Node::ForeignItem(it) if !glob => {
// Generate a fresh `extern {}` block if we want to inline a foreign item.
om.foreigns.push(hir::ForeignMod {
- abi: tcx.hir().get_foreign_abi_by_hir_id(it.hir_id),
+ abi: tcx.hir().get_foreign_abi(it.hir_id),
items: vec![hir::ForeignItem {
ident: renamed.unwrap_or(it.ident),
.. it.clone()
diff --git a/src/libserialize/hex.rs b/src/libserialize/hex.rs
index 8a7927e..95d92f3 100644
--- a/src/libserialize/hex.rs
+++ b/src/libserialize/hex.rs
@@ -143,79 +143,4 @@
}
#[cfg(test)]
-mod tests {
- extern crate test;
- use test::Bencher;
- use crate::hex::{FromHex, ToHex};
-
- #[test]
- pub fn test_to_hex() {
- assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172");
- }
-
- #[test]
- pub fn test_from_hex_okay() {
- assert_eq!("666f6f626172".from_hex().unwrap(),
- b"foobar");
- assert_eq!("666F6F626172".from_hex().unwrap(),
- b"foobar");
- }
-
- #[test]
- pub fn test_from_hex_odd_len() {
- assert!("666".from_hex().is_err());
- assert!("66 6".from_hex().is_err());
- }
-
- #[test]
- pub fn test_from_hex_invalid_char() {
- assert!("66y6".from_hex().is_err());
- }
-
- #[test]
- pub fn test_from_hex_ignores_whitespace() {
- assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(),
- b"foobar");
- }
-
- #[test]
- pub fn test_to_hex_all_bytes() {
- for i in 0..256 {
- assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize));
- }
- }
-
- #[test]
- pub fn test_from_hex_all_bytes() {
- for i in 0..256 {
- let ii: &[u8] = &[i as u8];
- assert_eq!(format!("{:02x}", i as usize).from_hex()
- .unwrap(),
- ii);
- assert_eq!(format!("{:02X}", i as usize).from_hex()
- .unwrap(),
- ii);
- }
- }
-
- #[bench]
- pub fn bench_to_hex(b: &mut Bencher) {
- let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
- ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
- b.iter(|| {
- s.as_bytes().to_hex();
- });
- b.bytes = s.len() as u64;
- }
-
- #[bench]
- pub fn bench_from_hex(b: &mut Bencher) {
- let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
- ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
- let sb = s.as_bytes().to_hex();
- b.iter(|| {
- sb.from_hex().unwrap();
- });
- b.bytes = sb.len() as u64;
- }
-}
+mod tests;
diff --git a/src/libserialize/hex/tests.rs b/src/libserialize/hex/tests.rs
new file mode 100644
index 0000000..471912c
--- /dev/null
+++ b/src/libserialize/hex/tests.rs
@@ -0,0 +1,74 @@
+extern crate test;
+use test::Bencher;
+use crate::hex::{FromHex, ToHex};
+
+#[test]
+pub fn test_to_hex() {
+ assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172");
+}
+
+#[test]
+pub fn test_from_hex_okay() {
+ assert_eq!("666f6f626172".from_hex().unwrap(),
+ b"foobar");
+ assert_eq!("666F6F626172".from_hex().unwrap(),
+ b"foobar");
+}
+
+#[test]
+pub fn test_from_hex_odd_len() {
+ assert!("666".from_hex().is_err());
+ assert!("66 6".from_hex().is_err());
+}
+
+#[test]
+pub fn test_from_hex_invalid_char() {
+ assert!("66y6".from_hex().is_err());
+}
+
+#[test]
+pub fn test_from_hex_ignores_whitespace() {
+ assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(),
+ b"foobar");
+}
+
+#[test]
+pub fn test_to_hex_all_bytes() {
+ for i in 0..256 {
+ assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize));
+ }
+}
+
+#[test]
+pub fn test_from_hex_all_bytes() {
+ for i in 0..256 {
+ let ii: &[u8] = &[i as u8];
+ assert_eq!(format!("{:02x}", i as usize).from_hex()
+ .unwrap(),
+ ii);
+ assert_eq!(format!("{:02X}", i as usize).from_hex()
+ .unwrap(),
+ ii);
+ }
+}
+
+#[bench]
+pub fn bench_to_hex(b: &mut Bencher) {
+ let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
+ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
+ b.iter(|| {
+ s.as_bytes().to_hex();
+ });
+ b.bytes = s.len() as u64;
+}
+
+#[bench]
+pub fn bench_from_hex(b: &mut Bencher) {
+ let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
+ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
+ let sb = s.as_bytes().to_hex();
+ b.iter(|| {
+ sb.from_hex().unwrap();
+ });
+ b.bytes = sb.len() as u64;
+}
diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs
index a4fd288..8f926e6 100644
--- a/src/libserialize/json.rs
+++ b/src/libserialize/json.rs
@@ -2582,139 +2582,4 @@
}
#[cfg(test)]
-mod tests {
- // Benchmarks and tests that require private items
-
- extern crate test;
- use test::Bencher;
- use super::{from_str, Parser, StackElement, Stack};
- use std::string;
-
- #[test]
- fn test_stack() {
- let mut stack = Stack::new();
-
- assert!(stack.is_empty());
- assert!(stack.is_empty());
- assert!(!stack.last_is_index());
-
- stack.push_index(0);
- stack.bump_index();
-
- assert!(stack.len() == 1);
- assert!(stack.is_equal_to(&[StackElement::Index(1)]));
- assert!(stack.starts_with(&[StackElement::Index(1)]));
- assert!(stack.ends_with(&[StackElement::Index(1)]));
- assert!(stack.last_is_index());
- assert!(stack.get(0) == StackElement::Index(1));
-
- stack.push_key("foo".to_string());
-
- assert!(stack.len() == 2);
- assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")]));
- assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
- assert!(stack.starts_with(&[StackElement::Index(1)]));
- assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")]));
- assert!(stack.ends_with(&[StackElement::Key("foo")]));
- assert!(!stack.last_is_index());
- assert!(stack.get(0) == StackElement::Index(1));
- assert!(stack.get(1) == StackElement::Key("foo"));
-
- stack.push_key("bar".to_string());
-
- assert!(stack.len() == 3);
- assert!(stack.is_equal_to(&[StackElement::Index(1),
- StackElement::Key("foo"),
- StackElement::Key("bar")]));
- assert!(stack.starts_with(&[StackElement::Index(1)]));
- assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
- assert!(stack.starts_with(&[StackElement::Index(1),
- StackElement::Key("foo"),
- StackElement::Key("bar")]));
- assert!(stack.ends_with(&[StackElement::Key("bar")]));
- assert!(stack.ends_with(&[StackElement::Key("foo"), StackElement::Key("bar")]));
- assert!(stack.ends_with(&[StackElement::Index(1),
- StackElement::Key("foo"),
- StackElement::Key("bar")]));
- assert!(!stack.last_is_index());
- assert!(stack.get(0) == StackElement::Index(1));
- assert!(stack.get(1) == StackElement::Key("foo"));
- assert!(stack.get(2) == StackElement::Key("bar"));
-
- stack.pop();
-
- assert!(stack.len() == 2);
- assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")]));
- assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
- assert!(stack.starts_with(&[StackElement::Index(1)]));
- assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")]));
- assert!(stack.ends_with(&[StackElement::Key("foo")]));
- assert!(!stack.last_is_index());
- assert!(stack.get(0) == StackElement::Index(1));
- assert!(stack.get(1) == StackElement::Key("foo"));
- }
-
- #[bench]
- fn bench_streaming_small(b: &mut Bencher) {
- b.iter( || {
- let mut parser = Parser::new(
- r#"{
- "a": 1.0,
- "b": [
- true,
- "foo\nbar",
- { "c": {"d": null} }
- ]
- }"#.chars()
- );
- loop {
- match parser.next() {
- None => return,
- _ => {}
- }
- }
- });
- }
- #[bench]
- fn bench_small(b: &mut Bencher) {
- b.iter( || {
- let _ = from_str(r#"{
- "a": 1.0,
- "b": [
- true,
- "foo\nbar",
- { "c": {"d": null} }
- ]
- }"#);
- });
- }
-
- fn big_json() -> string::String {
- let mut src = "[\n".to_string();
- for _ in 0..500 {
- src.push_str(r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": \
- [1,2,3]},"#);
- }
- src.push_str("{}]");
- return src;
- }
-
- #[bench]
- fn bench_streaming_large(b: &mut Bencher) {
- let src = big_json();
- b.iter( || {
- let mut parser = Parser::new(src.chars());
- loop {
- match parser.next() {
- None => return,
- _ => {}
- }
- }
- });
- }
- #[bench]
- fn bench_large(b: &mut Bencher) {
- let src = big_json();
- b.iter( || { let _ = from_str(&src); });
- }
-}
+mod tests;
diff --git a/src/libserialize/json/tests.rs b/src/libserialize/json/tests.rs
new file mode 100644
index 0000000..a16b8bd
--- /dev/null
+++ b/src/libserialize/json/tests.rs
@@ -0,0 +1,134 @@
+// Benchmarks and tests that require private items
+
+extern crate test;
+use test::Bencher;
+use super::{from_str, Parser, StackElement, Stack};
+use std::string;
+
+#[test]
+fn test_stack() {
+ let mut stack = Stack::new();
+
+ assert!(stack.is_empty());
+ assert!(stack.is_empty());
+ assert!(!stack.last_is_index());
+
+ stack.push_index(0);
+ stack.bump_index();
+
+ assert!(stack.len() == 1);
+ assert!(stack.is_equal_to(&[StackElement::Index(1)]));
+ assert!(stack.starts_with(&[StackElement::Index(1)]));
+ assert!(stack.ends_with(&[StackElement::Index(1)]));
+ assert!(stack.last_is_index());
+ assert!(stack.get(0) == StackElement::Index(1));
+
+ stack.push_key("foo".to_string());
+
+ assert!(stack.len() == 2);
+ assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")]));
+ assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
+ assert!(stack.starts_with(&[StackElement::Index(1)]));
+ assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")]));
+ assert!(stack.ends_with(&[StackElement::Key("foo")]));
+ assert!(!stack.last_is_index());
+ assert!(stack.get(0) == StackElement::Index(1));
+ assert!(stack.get(1) == StackElement::Key("foo"));
+
+ stack.push_key("bar".to_string());
+
+ assert!(stack.len() == 3);
+ assert!(stack.is_equal_to(&[StackElement::Index(1),
+ StackElement::Key("foo"),
+ StackElement::Key("bar")]));
+ assert!(stack.starts_with(&[StackElement::Index(1)]));
+ assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
+ assert!(stack.starts_with(&[StackElement::Index(1),
+ StackElement::Key("foo"),
+ StackElement::Key("bar")]));
+ assert!(stack.ends_with(&[StackElement::Key("bar")]));
+ assert!(stack.ends_with(&[StackElement::Key("foo"), StackElement::Key("bar")]));
+ assert!(stack.ends_with(&[StackElement::Index(1),
+ StackElement::Key("foo"),
+ StackElement::Key("bar")]));
+ assert!(!stack.last_is_index());
+ assert!(stack.get(0) == StackElement::Index(1));
+ assert!(stack.get(1) == StackElement::Key("foo"));
+ assert!(stack.get(2) == StackElement::Key("bar"));
+
+ stack.pop();
+
+ assert!(stack.len() == 2);
+ assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")]));
+ assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
+ assert!(stack.starts_with(&[StackElement::Index(1)]));
+ assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")]));
+ assert!(stack.ends_with(&[StackElement::Key("foo")]));
+ assert!(!stack.last_is_index());
+ assert!(stack.get(0) == StackElement::Index(1));
+ assert!(stack.get(1) == StackElement::Key("foo"));
+}
+
+#[bench]
+fn bench_streaming_small(b: &mut Bencher) {
+ b.iter( || {
+ let mut parser = Parser::new(
+ r#"{
+ "a": 1.0,
+ "b": [
+ true,
+ "foo\nbar",
+ { "c": {"d": null} }
+ ]
+ }"#.chars()
+ );
+ loop {
+ match parser.next() {
+ None => return,
+ _ => {}
+ }
+ }
+ });
+}
+#[bench]
+fn bench_small(b: &mut Bencher) {
+ b.iter( || {
+ let _ = from_str(r#"{
+ "a": 1.0,
+ "b": [
+ true,
+ "foo\nbar",
+ { "c": {"d": null} }
+ ]
+ }"#);
+ });
+}
+
+fn big_json() -> string::String {
+ let mut src = "[\n".to_string();
+ for _ in 0..500 {
+ src.push_str(r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": \
+ [1,2,3]},"#);
+ }
+ src.push_str("{}]");
+ return src;
+}
+
+#[bench]
+fn bench_streaming_large(b: &mut Bencher) {
+ let src = big_json();
+ b.iter( || {
+ let mut parser = Parser::new(src.chars());
+ loop {
+ match parser.next() {
+ None => return,
+ _ => {}
+ }
+ }
+ });
+}
+#[bench]
+fn bench_large(b: &mut Bencher) {
+ let src = big_json();
+ b.iter( || { let _ = from_str(&src); });
+}
diff --git a/src/libstd/Cargo.toml b/src/libstd/Cargo.toml
index 30e23f1..38df1f2 100644
--- a/src/libstd/Cargo.toml
+++ b/src/libstd/Cargo.toml
@@ -15,6 +15,7 @@
[dependencies]
alloc = { path = "../liballoc" }
+cfg-if = { version = "0.1.8", features = ['rustc-dep-of-std'] }
panic_unwind = { path = "../libpanic_unwind", optional = true }
panic_abort = { path = "../libpanic_abort" }
core = { path = "../libcore" }
diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs
index a3356e6..e0ffc9b 100644
--- a/src/libstd/lib.rs
+++ b/src/libstd/lib.rs
@@ -336,6 +336,12 @@
#[allow(unused_extern_crates)]
extern crate unwind;
+// Only needed for now for the `std_detect` module until that crate changes to
+// use `cfg_if::cfg_if!`
+#[macro_use]
+#[cfg(not(test))]
+extern crate cfg_if;
+
// During testing, this crate is not actually the "real" std library, but rather
// it links to the real std library, which was compiled from this same source
// code. So any lang items std defines are conditionally excluded (or else they
diff --git a/src/libstd/macros.rs b/src/libstd/macros.rs
index ef7179f..d695141 100644
--- a/src/libstd/macros.rs
+++ b/src/libstd/macros.rs
@@ -56,13 +56,13 @@
#[allow_internal_unstable(__rust_unstable_column, libstd_sys_internals)]
macro_rules! panic {
() => ({
- panic!("explicit panic")
+ $crate::panic!("explicit panic")
});
($msg:expr) => ({
$crate::rt::begin_panic($msg, &(file!(), line!(), __rust_unstable_column!()))
});
($msg:expr,) => ({
- panic!($msg)
+ $crate::panic!($msg)
});
($fmt:expr, $($arg:tt)+) => ({
$crate::rt::begin_panic_fmt(&format_args!($fmt, $($arg)+),
@@ -145,7 +145,7 @@
#[stable(feature = "rust1", since = "1.0.0")]
#[allow_internal_unstable(print_internals, format_args_nl)]
macro_rules! println {
- () => (print!("\n"));
+ () => ($crate::print!("\n"));
($($arg:tt)*) => ({
$crate::io::_print(format_args_nl!($($arg)*));
})
@@ -204,7 +204,7 @@
#[stable(feature = "eprint", since = "1.19.0")]
#[allow_internal_unstable(print_internals, format_args_nl)]
macro_rules! eprintln {
- () => (eprint!("\n"));
+ () => ($crate::eprint!("\n"));
($($arg:tt)*) => ({
$crate::io::_eprint(format_args_nl!($($arg)*));
})
@@ -337,23 +337,23 @@
#[stable(feature = "dbg_macro", since = "1.32.0")]
macro_rules! dbg {
() => {
- eprintln!("[{}:{}]", file!(), line!());
+ $crate::eprintln!("[{}:{}]", file!(), line!());
};
($val:expr) => {
// Use of `match` here is intentional because it affects the lifetimes
// of temporaries - https://stackoverflow.com/a/48732525/1063961
match $val {
tmp => {
- eprintln!("[{}:{}] {} = {:#?}",
+ $crate::eprintln!("[{}:{}] {} = {:#?}",
file!(), line!(), stringify!($val), &tmp);
tmp
}
}
};
// Trailing comma with single argument is ignored
- ($val:expr,) => { dbg!($val) };
+ ($val:expr,) => { $crate::dbg!($val) };
($($val:expr),+ $(,)?) => {
- ($(dbg!($val)),+,)
+ ($($crate::dbg!($val)),+,)
};
}
@@ -896,39 +896,3 @@
($cond:expr, $($arg:tt)+) => ({ /* compiler built-in */ });
}
}
-
-/// Defines `#[cfg]` if-else statements.
-///
-/// This is similar to the `if/elif` C preprocessor macro by allowing definition
-/// of a cascade of `#[cfg]` cases, emitting the implementation which matches
-/// first.
-///
-/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code
-/// without having to rewrite each clause multiple times.
-macro_rules! cfg_if {
- ($(
- if #[cfg($($meta:meta),*)] { $($it:item)* }
- ) else * else {
- $($it2:item)*
- }) => {
- __cfg_if_items! {
- () ;
- $( ( ($($meta),*) ($($it)*) ), )*
- ( () ($($it2)*) ),
- }
- }
-}
-
-macro_rules! __cfg_if_items {
- (($($not:meta,)*) ; ) => {};
- (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
- __cfg_if_apply! { cfg(all(not(any($($not),*)), $($m,)*)), $($it)* }
- __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* }
- }
-}
-
-macro_rules! __cfg_if_apply {
- ($m:meta, $($it:item)*) => {
- $(#[$m] $it)*
- }
-}
diff --git a/src/libstd/os/mod.rs b/src/libstd/os/mod.rs
index 44cbc18..94e8b78 100644
--- a/src/libstd/os/mod.rs
+++ b/src/libstd/os/mod.rs
@@ -3,7 +3,7 @@
#![stable(feature = "os", since = "1.0.0")]
#![allow(missing_docs, nonstandard_style, missing_debug_implementations)]
-cfg_if! {
+cfg_if::cfg_if! {
if #[cfg(rustdoc)] {
// When documenting libstd we want to show unix/windows/linux modules as
diff --git a/src/libstd/sync/mod.rs b/src/libstd/sync/mod.rs
index 809ee88..fd6e46f 100644
--- a/src/libstd/sync/mod.rs
+++ b/src/libstd/sync/mod.rs
@@ -163,6 +163,7 @@
#[stable(feature = "rust1", since = "1.0.0")]
pub use self::mutex::{Mutex, MutexGuard};
#[stable(feature = "rust1", since = "1.0.0")]
+#[allow(deprecated)]
pub use self::once::{Once, OnceState, ONCE_INIT};
#[stable(feature = "rust1", since = "1.0.0")]
pub use crate::sys_common::poison::{PoisonError, TryLockError, TryLockResult, LockResult};
diff --git a/src/libstd/sync/once.rs b/src/libstd/sync/once.rs
index 0c91249..e529b8c 100644
--- a/src/libstd/sync/once.rs
+++ b/src/libstd/sync/once.rs
@@ -115,6 +115,11 @@
/// static START: Once = ONCE_INIT;
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
+#[rustc_deprecated(
+ since = "1.38.0",
+ reason = "the `new` function is now preferred",
+ suggestion = "Once::new()",
+)]
pub const ONCE_INIT: Once = Once::new();
// Four states that a Once can be in, encoded into the lower bits of `state` in
diff --git a/src/libstd/sys/mod.rs b/src/libstd/sys/mod.rs
index 3f3cedc..21360e2 100644
--- a/src/libstd/sys/mod.rs
+++ b/src/libstd/sys/mod.rs
@@ -22,7 +22,7 @@
#![allow(missing_debug_implementations)]
-cfg_if! {
+cfg_if::cfg_if! {
if #[cfg(unix)] {
mod unix;
pub use self::unix::*;
@@ -54,7 +54,7 @@
// Windows when we're compiling for Linux.
#[cfg(rustdoc)]
-cfg_if! {
+cfg_if::cfg_if! {
if #[cfg(any(unix, target_os = "redox"))] {
// On unix we'll document what's already available
#[stable(feature = "rust1", since = "1.0.0")]
@@ -77,7 +77,7 @@
}
#[cfg(rustdoc)]
-cfg_if! {
+cfg_if::cfg_if! {
if #[cfg(windows)] {
// On windows we'll just be documenting what's already available
#[allow(missing_docs)]
diff --git a/src/libstd/sys/wasi/args.rs b/src/libstd/sys/wasi/args.rs
index 9c8e59e..8b4b354 100644
--- a/src/libstd/sys/wasi/args.rs
+++ b/src/libstd/sys/wasi/args.rs
@@ -32,7 +32,7 @@
let (mut argc, mut argv_buf_size) = (0, 0);
cvt_wasi(libc::__wasi_args_sizes_get(&mut argc, &mut argv_buf_size))?;
- let mut argc = vec![0 as *mut libc::c_char; argc];
+ let mut argc = vec![core::ptr::null_mut::<libc::c_char>(); argc];
let mut argv_buf = vec![0; argv_buf_size];
cvt_wasi(libc::__wasi_args_get(argc.as_mut_ptr(), argv_buf.as_mut_ptr()))?;
diff --git a/src/libstd/sys/wasm/mod.rs b/src/libstd/sys/wasm/mod.rs
index 9ea8bd1..7d15770 100644
--- a/src/libstd/sys/wasm/mod.rs
+++ b/src/libstd/sys/wasm/mod.rs
@@ -40,7 +40,7 @@
pub use crate::sys_common::os_str_bytes as os_str;
-cfg_if! {
+cfg_if::cfg_if! {
if #[cfg(target_feature = "atomics")] {
#[path = "condvar_atomics.rs"]
pub mod condvar;
diff --git a/src/libstd/sys/wasm/thread.rs b/src/libstd/sys/wasm/thread.rs
index 1dc786c..61b4003 100644
--- a/src/libstd/sys/wasm/thread.rs
+++ b/src/libstd/sys/wasm/thread.rs
@@ -59,7 +59,7 @@
pub unsafe fn init() -> Option<Guard> { None }
}
-cfg_if! {
+cfg_if::cfg_if! {
if #[cfg(all(target_feature = "atomics", feature = "wasm-bindgen-threads"))] {
#[link(wasm_import_module = "__wbindgen_thread_xform__")]
extern {
diff --git a/src/libstd/sys/wasm/thread_local_atomics.rs b/src/libstd/sys/wasm/thread_local_atomics.rs
index b408ad0..3dc0bb2 100644
--- a/src/libstd/sys/wasm/thread_local_atomics.rs
+++ b/src/libstd/sys/wasm/thread_local_atomics.rs
@@ -11,7 +11,7 @@
impl ThreadControlBlock {
fn new() -> ThreadControlBlock {
ThreadControlBlock {
- keys: [0 as *mut u8; MAX_KEYS],
+ keys: [core::ptr::null_mut(); MAX_KEYS],
}
}
diff --git a/src/libstd/sys_common/mod.rs b/src/libstd/sys_common/mod.rs
index c4daede..13a59f6 100644
--- a/src/libstd/sys_common/mod.rs
+++ b/src/libstd/sys_common/mod.rs
@@ -65,7 +65,7 @@
pub mod process;
pub mod fs;
-cfg_if! {
+cfg_if::cfg_if! {
if #[cfg(any(target_os = "cloudabi",
target_os = "l4re",
target_os = "redox",
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index 02fbcb1..54cd403 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -1181,7 +1181,7 @@
Field(P<Expr>, Ident),
/// An indexing operation (e.g., `foo[2]`).
Index(P<Expr>, P<Expr>),
- /// A range (e.g., `1..2`, `1..`, `..2`, `1...2`, `1...`, `...2`).
+ /// A range (e.g., `1..2`, `1..`, `..2`, `1..=2`, `..=2`).
Range(Option<P<Expr>>, Option<P<Expr>>, RangeLimits),
/// Variable reference, possibly containing `::` and/or type
@@ -1770,6 +1770,7 @@
/// E.g., `bar: usize` as in `fn foo(bar: usize)`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Arg {
+ pub attrs: ThinVec<Attribute>,
pub ty: P<Ty>,
pub pat: P<Pat>,
pub id: NodeId,
@@ -1817,7 +1818,7 @@
}
}
- pub fn from_self(eself: ExplicitSelf, eself_ident: Ident) -> Arg {
+ pub fn from_self(attrs: ThinVec<Attribute>, eself: ExplicitSelf, eself_ident: Ident) -> Arg {
let span = eself.span.to(eself_ident.span);
let infer_ty = P(Ty {
id: DUMMY_NODE_ID,
@@ -1825,6 +1826,7 @@
span,
});
let arg = |mutbl, ty| Arg {
+ attrs,
pat: P(Pat {
id: DUMMY_NODE_ID,
node: PatKind::Ident(BindingMode::ByValue(mutbl), eself_ident, None),
diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs
index d7e43f6..436620a 100644
--- a/src/libsyntax/attr/mod.rs
+++ b/src/libsyntax/attr/mod.rs
@@ -723,7 +723,7 @@
derive_has_attrs! {
Item, Expr, Local, ast::ForeignItem, ast::StructField, ast::ImplItem, ast::TraitItem, ast::Arm,
- ast::Field, ast::FieldPat, ast::Variant_
+ ast::Field, ast::FieldPat, ast::Variant_, ast::Arg
}
pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -> ast::Crate {
diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs
index 6123e95..1cc13ac 100644
--- a/src/libsyntax/config.rs
+++ b/src/libsyntax/config.rs
@@ -298,6 +298,10 @@
}
}
+ pub fn configure_fn_decl(&mut self, fn_decl: &mut ast::FnDecl) {
+ fn_decl.inputs.flat_map_in_place(|arg| self.configure(arg));
+ }
+
/// Denies `#[cfg]` on generic parameters until we decide what to do with it.
/// See issue #51279.
pub fn disallow_cfg_on_generic_param(&mut self, param: &ast::GenericParam) {
@@ -364,6 +368,11 @@
self.configure_pat(pat);
noop_visit_pat(pat, self)
}
+
+ fn visit_fn_decl(&mut self, mut fn_decl: &mut P<ast::FnDecl>) {
+ self.configure_fn_decl(&mut fn_decl);
+ noop_visit_fn_decl(fn_decl, self);
+ }
}
fn is_cfg(attr: &ast::Attribute) -> bool {
diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs
index 9f01b9b..ee640a1 100644
--- a/src/libsyntax/diagnostics/plugin.rs
+++ b/src/libsyntax/diagnostics/plugin.rs
@@ -33,8 +33,10 @@
span: Span,
token_tree: &[TokenTree])
-> Box<dyn MacResult+'cx> {
- let code = match (token_tree.len(), token_tree.get(0)) {
- (1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. }))) => code,
+ let code = match token_tree {
+ [
+ TokenTree::Token(Token { kind: token::Ident(code, _), .. })
+ ] => code,
_ => unreachable!()
};
@@ -66,22 +68,19 @@
span: Span,
token_tree: &[TokenTree])
-> Box<dyn MacResult+'cx> {
- let (code, description) = match (
- token_tree.len(),
- token_tree.get(0),
- token_tree.get(1),
- token_tree.get(2)
- ) {
- (1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })), None, None) => {
- (code, None)
+ let (code, description) = match token_tree {
+ [
+ TokenTree::Token(Token { kind: token::Ident(code, _), .. })
+ ] => {
+ (*code, None)
},
- (3, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })),
- Some(&TokenTree::Token(Token { kind: token::Comma, .. })),
- Some(&TokenTree::Token(Token {
- kind: token::Literal(token::Lit { symbol, .. }), ..
- }))) => {
- (code, Some(symbol))
- }
+ [
+ TokenTree::Token(Token { kind: token::Ident(code, _), .. }),
+ TokenTree::Token(Token { kind: token::Comma, .. }),
+ TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), ..})
+ ] => {
+ (*code, Some(*symbol))
+ },
_ => unreachable!()
};
@@ -121,19 +120,7 @@
}
});
- let span = span.apply_mark(ecx.current_expansion.mark);
-
- let name = Ident::from_str_and_span(&format!("__register_diagnostic_{}", code), span).gensym();
-
- MacEager::items(smallvec![
- ecx.item_mod(
- span,
- span,
- name,
- vec![],
- vec![],
- )
- ])
+ MacEager::items(smallvec![])
}
#[allow(deprecated)]
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index 2a03e49..9d4bf7d 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -963,9 +963,10 @@
fn arg(&self, span: Span, ident: ast::Ident, ty: P<ast::Ty>) -> ast::Arg {
let arg_pat = self.pat_ident(span, ident);
ast::Arg {
- ty,
+ attrs: ThinVec::default(),
+ id: ast::DUMMY_NODE_ID,
pat: arg_pat,
- id: ast::DUMMY_NODE_ID
+ ty,
}
}
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 084d4fd..3fa96c6 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -1578,6 +1578,11 @@
*id = self.cx.resolver.next_node_id()
}
}
+
+ fn visit_fn_decl(&mut self, mut fn_decl: &mut P<ast::FnDecl>) {
+ self.cfg.configure_fn_decl(&mut fn_decl);
+ noop_visit_fn_decl(fn_decl, self);
+ }
}
pub struct ExpansionConfig<'feat> {
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 5dbf218..22745a1 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -23,6 +23,7 @@
use rustc_data_structures::fx::{FxHashMap};
use std::borrow::Cow;
use std::collections::hash_map::Entry;
+use std::slice;
use rustc_data_structures::sync::Lrc;
use errors::Applicability;
@@ -249,8 +250,9 @@
def: &ast::Item,
edition: Edition
) -> SyntaxExtension {
- let lhs_nm = ast::Ident::from_str("lhs").gensym();
- let rhs_nm = ast::Ident::from_str("rhs").gensym();
+ let lhs_nm = ast::Ident::new(sym::lhs, def.span);
+ let rhs_nm = ast::Ident::new(sym::rhs, def.span);
+ let tt_spec = ast::Ident::new(sym::tt, def.span);
// Parse the macro_rules! invocation
let body = match def.node {
@@ -266,9 +268,9 @@
let argument_gram = vec![
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
tts: vec![
- quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, ast::Ident::from_str("tt")),
+ quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
quoted::TokenTree::token(token::FatArrow, def.span),
- quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, ast::Ident::from_str("tt")),
+ quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
],
separator: Some(Token::new(
if body.legacy { token::Semi } else { token::Comma }, def.span
@@ -358,10 +360,10 @@
// don't abort iteration early, so that errors for multiple lhses can be reported
for lhs in &lhses {
- valid &= check_lhs_no_empty_seq(sess, &[lhs.clone()]);
+ valid &= check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
valid &= check_lhs_duplicate_matcher_bindings(
sess,
- &[lhs.clone()],
+ slice::from_ref(lhs),
&mut FxHashMap::default(),
def.id
);
@@ -1115,10 +1117,9 @@
tok: "ed::TokenTree) -> Result<(), String> {
debug!("has_legal_fragment_specifier({:?})", tok);
if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
- let frag_name = frag_spec.as_str();
let frag_span = tok.span();
- if !is_legal_fragment_specifier(sess, features, attrs, &frag_name, frag_span) {
- return Err(frag_name.to_string());
+ if !is_legal_fragment_specifier(sess, features, attrs, frag_spec.name, frag_span) {
+ return Err(frag_spec.to_string());
}
}
Ok(())
@@ -1127,7 +1128,7 @@
fn is_legal_fragment_specifier(_sess: &ParseSess,
_features: &Features,
_attrs: &[ast::Attribute],
- frag_name: &str,
+ frag_name: Symbol,
_frag_span: Span) -> bool {
/*
* If new fragment specifiers are invented in nightly, `_sess`,
@@ -1136,9 +1137,9 @@
* this function.
*/
match frag_name {
- "item" | "block" | "stmt" | "expr" | "pat" | "lifetime" |
- "path" | "ty" | "ident" | "meta" | "tt" | "vis" | "literal" |
- "" => true,
+ sym::item | sym::block | sym::stmt | sym::expr | sym::pat |
+ sym::lifetime | sym::path | sym::ty | sym::ident | sym::meta | sym::tt |
+ sym::vis | sym::literal | kw::Invalid => true,
_ => false,
}
}
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index 044c4b1..7f80e20 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -25,7 +25,7 @@
use crate::edition::{ALL_EDITIONS, Edition};
use crate::visit::{self, FnKind, Visitor};
use crate::parse::{token, ParseSess};
-use crate::symbol::{Symbol, kw, sym};
+use crate::symbol::{Symbol, sym};
use crate::tokenstream::TokenTree;
use errors::{Applicability, DiagnosticBuilder, Handler};
@@ -526,9 +526,6 @@
// Allows `impl Trait` in bindings (`let`, `const`, `static`).
(active, impl_trait_in_bindings, "1.30.0", Some(34511), None),
- // Allows `const _: TYPE = VALUE`.
- (active, underscore_const_names, "1.31.0", Some(54912), None),
-
// Allows using `reason` in lint attributes and the `#[expect(lint)]` lint check.
(active, lint_reasons, "1.31.0", Some(54503), None),
@@ -557,8 +554,10 @@
// Allows the user of associated type bounds.
(active, associated_type_bounds, "1.34.0", Some(52662), None),
- // Allows calling constructor functions in `const fn`
- // FIXME Create issue
+ // Attributes on formal function params.
+ (active, param_attrs, "1.36.0", Some(60406), None),
+
+ // Allows calling constructor functions in `const fn`.
(active, const_constructor, "1.37.0", Some(61456), None),
// #[repr(transparent)] on enums.
@@ -849,6 +848,8 @@
// Allows using `#[repr(align(X))]` on enums with equivalent semantics
// to wrapping an enum in a wrapper struct with `#[repr(align(X))]`.
(accepted, repr_align_enum, "1.37.0", Some(57996), None),
+ // Allows `const _: TYPE = VALUE`.
+ (accepted, underscore_const_names, "1.37.0", Some(54912), None),
// -------------------------------------------------------------------------
// feature-group-end: accepted features
@@ -1998,13 +1999,6 @@
fn visit_item(&mut self, i: &'a ast::Item) {
match i.node {
- ast::ItemKind::Const(_,_) => {
- if i.ident.name == kw::Underscore {
- gate_feature_post!(&self, underscore_const_names, i.span,
- "naming constants with `_` is unstable");
- }
- }
-
ast::ItemKind::ForeignMod(ref foreign_module) => {
self.check_abi(foreign_module.abi, i.span);
}
@@ -2511,6 +2505,18 @@
parse_sess: sess,
plugin_attributes,
};
+
+ sess
+ .param_attr_spans
+ .borrow()
+ .iter()
+ .for_each(|span| gate_feature!(
+ &ctx,
+ param_attrs,
+ *span,
+ "attributes on function parameters are unstable"
+ ));
+
let visitor = &mut PostExpansionVisitor {
context: &ctx,
builtin_attributes: &*BUILTIN_ATTRIBUTE_MAP,
diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs
index 2889f8e..02e2c96 100644
--- a/src/libsyntax/mut_visit.rs
+++ b/src/libsyntax/mut_visit.rs
@@ -568,8 +568,9 @@
vis.visit_span(span);
}
-pub fn noop_visit_arg<T: MutVisitor>(Arg { id, pat, ty }: &mut Arg, vis: &mut T) {
+pub fn noop_visit_arg<T: MutVisitor>(Arg { attrs, id, pat, ty }: &mut Arg, vis: &mut T) {
vis.visit_id(id);
+ visit_thin_attrs(attrs, vis);
vis.visit_pat(pat);
vis.visit_ty(ty);
}
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 77a87e2..b28d48b 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -18,6 +18,14 @@
permitted in this context";
impl<'a> Parser<'a> {
+ crate fn parse_arg_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
+ let attrs = self.parse_outer_attributes()?;
+ attrs.iter().for_each(|a|
+ self.sess.param_attr_spans.borrow_mut().push(a.span)
+ );
+ Ok(attrs)
+ }
+
/// Parse attributes that appear before an item
crate fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
let mut attrs: Vec<ast::Attribute> = Vec::new();
@@ -35,7 +43,8 @@
};
let inner_parse_policy =
InnerAttributeParsePolicy::NotPermitted { reason: inner_error_reason };
- attrs.push(self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?);
+ let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?;
+ attrs.push(attr);
just_parsed_doc_comment = false;
}
token::DocComment(s) => {
diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs
index 9d2ac5b..60544cc 100644
--- a/src/libsyntax/parse/diagnostics.rs
+++ b/src/libsyntax/parse/diagnostics.rs
@@ -28,7 +28,7 @@
span: ident.span,
id: ast::DUMMY_NODE_ID
};
- Arg { ty: P(ty), pat: pat, id: ast::DUMMY_NODE_ID }
+ Arg { attrs: ThinVec::default(), id: ast::DUMMY_NODE_ID, pat, ty: P(ty) }
}
pub enum Error {
@@ -1074,11 +1074,11 @@
Err(err)
}
- crate fn eat_incorrect_doc_comment(&mut self, applied_to: &str) {
+ crate fn eat_incorrect_doc_comment_for_arg_type(&mut self) {
if let token::DocComment(_) = self.token.kind {
let mut err = self.diagnostic().struct_span_err(
self.token.span,
- &format!("documentation comments cannot be applied to {}", applied_to),
+ "documentation comments cannot be applied to a function parameter's type",
);
err.span_label(self.token.span, "doc comments are not allowed here");
err.emit();
@@ -1095,7 +1095,7 @@
self.bump();
let mut err = self.diagnostic().struct_span_err(
sp,
- &format!("attributes cannot be applied to {}", applied_to),
+ "attributes cannot be applied to a function parameter's type",
);
err.span_label(sp, "attributes are not allowed here");
err.emit();
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 9df2898..1c44155 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1490,6 +1490,7 @@
buffered_lints: Lock::new(vec![]),
edition: Edition::from_session(),
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
+ param_attr_spans: Lock::new(Vec::new()),
}
}
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index cde3568..ba5d1d0 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -54,6 +54,7 @@
/// operation token that followed it, but that the parser cannot identify without further
/// analysis.
pub ambiguous_block_expr_parse: Lock<FxHashMap<Span, Span>>,
+ pub param_attr_spans: Lock<Vec<Span>>
}
impl ParseSess {
@@ -79,6 +80,7 @@
buffered_lints: Lock::new(vec![]),
edition: Edition::from_session(),
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
+ param_attr_spans: Lock::new(Vec::new()),
}
}
@@ -424,48 +426,38 @@
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
let tts: &[TokenTree] = &tts[..];
- match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
- (
- 4,
- Some(&TokenTree::Token(Token {
- kind: token::Ident(name_macro_rules, false), ..
- })),
- Some(&TokenTree::Token(Token { kind: token::Not, .. })),
- Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
- Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
- )
- if name_macro_rules == sym::macro_rules && name_zip.as_str() == "zip" => {
+ match tts {
+ [
+ TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }),
+ TokenTree::Token(Token { kind: token::Not, .. }),
+ TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }),
+ TokenTree::Delimited(_, macro_delim, macro_tts)
+ ]
+ if name_macro_rules == &sym::macro_rules && name_zip.as_str() == "zip" => {
let tts = ¯o_tts.trees().collect::<Vec<_>>();
- match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
- (
- 3,
- Some(&TokenTree::Delimited(_, first_delim, ref first_tts)),
- Some(&TokenTree::Token(Token { kind: token::FatArrow, .. })),
- Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
- )
- if macro_delim == token::Paren => {
+ match &tts[..] {
+ [
+ TokenTree::Delimited(_, first_delim, first_tts),
+ TokenTree::Token(Token { kind: token::FatArrow, .. }),
+ TokenTree::Delimited(_, second_delim, second_tts),
+ ]
+ if macro_delim == &token::Paren => {
let tts = &first_tts.trees().collect::<Vec<_>>();
- match (tts.len(), tts.get(0), tts.get(1)) {
- (
- 2,
- Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
- Some(&TokenTree::Token(Token {
- kind: token::Ident(name, false), ..
- })),
- )
- if first_delim == token::Paren && name.as_str() == "a" => {},
+ match &tts[..] {
+ [
+ TokenTree::Token(Token { kind: token::Dollar, .. }),
+ TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
+ ]
+ if first_delim == &token::Paren && name.as_str() == "a" => {},
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
}
let tts = &second_tts.trees().collect::<Vec<_>>();
- match (tts.len(), tts.get(0), tts.get(1)) {
- (
- 2,
- Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
- Some(&TokenTree::Token(Token {
- kind: token::Ident(name, false), ..
- })),
- )
- if second_delim == token::Paren && name.as_str() == "a" => {},
+ match &tts[..] {
+ [
+ TokenTree::Token(Token { kind: token::Dollar, .. }),
+ TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
+ ]
+ if second_delim == &token::Paren && name.as_str() == "a" => {},
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
}
},
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index d9eba3b..78eeb51 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -1188,7 +1188,8 @@
// definition...
// We don't allow argument names to be left off in edition 2018.
- p.parse_arg_general(p.token.span.rust_2018(), true, false)
+ let is_name_required = p.token.span.rust_2018();
+ p.parse_arg_general(true, false, |_| is_name_required)
})?;
generics.where_clause = self.parse_where_clause()?;
@@ -1487,26 +1488,31 @@
/// Skips unexpected attributes and doc comments in this position and emits an appropriate
/// error.
/// This version of parse arg doesn't necessarily require identifier names.
- fn parse_arg_general(
+ fn parse_arg_general<F>(
&mut self,
- require_name: bool,
is_trait_item: bool,
allow_c_variadic: bool,
- ) -> PResult<'a, Arg> {
- if let Ok(Some(arg)) = self.parse_self_arg() {
+ is_name_required: F,
+ ) -> PResult<'a, Arg>
+ where
+ F: Fn(&token::Token) -> bool
+ {
+ let attrs = self.parse_arg_attributes()?;
+ if let Ok(Some(mut arg)) = self.parse_self_arg() {
+ arg.attrs = attrs.into();
return self.recover_bad_self_arg(arg, is_trait_item);
}
- let (pat, ty) = if require_name || self.is_named_argument() {
- debug!("parse_arg_general parse_pat (require_name:{})", require_name);
- self.eat_incorrect_doc_comment("method arguments");
- let pat = self.parse_pat(Some("argument name"))?;
+ let is_name_required = is_name_required(&self.token);
+ let (pat, ty) = if is_name_required || self.is_named_argument() {
+ debug!("parse_arg_general parse_pat (is_name_required:{})", is_name_required);
+ let pat = self.parse_pat(Some("argument name"))?;
if let Err(mut err) = self.expect(&token::Colon) {
if let Some(ident) = self.argument_without_type(
&mut err,
pat,
- require_name,
+ is_name_required,
is_trait_item,
) {
err.emit();
@@ -1516,12 +1522,12 @@
}
}
- self.eat_incorrect_doc_comment("a method argument's type");
+ self.eat_incorrect_doc_comment_for_arg_type();
(pat, self.parse_ty_common(true, true, allow_c_variadic)?)
} else {
debug!("parse_arg_general ident_to_pat");
let parser_snapshot_before_ty = self.clone();
- self.eat_incorrect_doc_comment("a method argument's type");
+ self.eat_incorrect_doc_comment_for_arg_type();
let mut ty = self.parse_ty_common(true, true, allow_c_variadic);
if ty.is_ok() && self.token != token::Comma &&
self.token != token::CloseDelim(token::Paren) {
@@ -1554,11 +1560,12 @@
}
};
- Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID })
+ Ok(Arg { attrs: attrs.into(), id: ast::DUMMY_NODE_ID, pat, ty })
}
/// Parses an argument in a lambda header (e.g., `|arg, arg|`).
fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
+ let attrs = self.parse_arg_attributes()?;
let pat = self.parse_pat(Some("argument name"))?;
let t = if self.eat(&token::Colon) {
self.parse_ty()?
@@ -1570,6 +1577,7 @@
})
};
Ok(Arg {
+ attrs: attrs.into(),
ty: t,
pat,
id: ast::DUMMY_NODE_ID
@@ -1986,8 +1994,29 @@
let ex: ExprKind;
+ macro_rules! parse_lit {
+ () => {
+ match self.parse_lit() {
+ Ok(literal) => {
+ hi = self.prev_span;
+ ex = ExprKind::Lit(literal);
+ }
+ Err(mut err) => {
+ self.cancel(&mut err);
+ return Err(self.expected_expression_found());
+ }
+ }
+ }
+ }
+
// Note: when adding new syntax here, don't forget to adjust TokenKind::can_begin_expr().
match self.token.kind {
+ // This match arm is a special-case of the `_` match arm below and
+ // could be removed without changing functionality, but it's faster
+ // to have it here, especially for programs with large constants.
+ token::Literal(_) => {
+ parse_lit!()
+ }
token::OpenDelim(token::Paren) => {
self.bump();
@@ -2233,16 +2262,7 @@
self.bump();
return Ok(self.mk_expr(self.token.span, ExprKind::Err, ThinVec::new()));
}
- match self.parse_literal_maybe_minus() {
- Ok(expr) => {
- hi = expr.span;
- ex = expr.node.clone();
- }
- Err(mut err) => {
- self.cancel(&mut err);
- return Err(self.expected_expression_found());
- }
- }
+ parse_lit!()
}
}
}
@@ -5413,15 +5433,19 @@
&token::CloseDelim(token::Paren),
SeqSep::trailing_allowed(token::Comma),
|p| {
- // If the argument is a C-variadic argument we should not
- // enforce named arguments.
- let enforce_named_args = if p.token == token::DotDotDot {
- false
- } else {
- named_args
- };
- match p.parse_arg_general(enforce_named_args, false,
- allow_c_variadic) {
+ let do_not_enforce_named_arguments_for_c_variadic =
+ |token: &token::Token| -> bool {
+ if token == &token::DotDotDot {
+ false
+ } else {
+ named_args
+ }
+ };
+ match p.parse_arg_general(
+ false,
+ allow_c_variadic,
+ do_not_enforce_named_arguments_for_c_variadic
+ ) {
Ok(arg) => {
if let TyKind::CVarArgs = arg.ty.node {
c_variadic = true;
@@ -5466,7 +5490,6 @@
/// Parses the argument list and result type of a function declaration.
fn parse_fn_decl(&mut self, allow_c_variadic: bool) -> PResult<'a, P<FnDecl>> {
-
let (args, c_variadic) = self.parse_fn_args(true, allow_c_variadic)?;
let ret_ty = self.parse_ret_ty(true)?;
@@ -5478,6 +5501,8 @@
}
/// Returns the parsed optional self argument and whether a self shortcut was used.
+ ///
+ /// See `parse_self_arg_with_attrs` to collect attributes.
fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
let expect_ident = |this: &mut Self| match this.token.kind {
// Preserve hygienic context.
@@ -5583,7 +5608,18 @@
};
let eself = source_map::respan(eself_lo.to(eself_hi), eself);
- Ok(Some(Arg::from_self(eself, eself_ident)))
+ Ok(Some(Arg::from_self(ThinVec::default(), eself, eself_ident)))
+ }
+
+ /// Returns the parsed optional self argument with attributes and whether a self
+ /// shortcut was used.
+ fn parse_self_arg_with_attrs(&mut self) -> PResult<'a, Option<Arg>> {
+ let attrs = self.parse_arg_attributes()?;
+ let arg_opt = self.parse_self_arg()?;
+ Ok(arg_opt.map(|mut arg| {
+ arg.attrs = attrs.into();
+ arg
+ }))
}
/// Parses the parameter list and result type of a function that may have a `self` parameter.
@@ -5593,7 +5629,7 @@
self.expect(&token::OpenDelim(token::Paren))?;
// Parse optional self argument.
- let self_arg = self.parse_self_arg()?;
+ let self_arg = self.parse_self_arg_with_attrs()?;
// Parse the rest of the function parameter list.
let sep = SeqSep::trailing_allowed(token::Comma);
@@ -5867,7 +5903,7 @@
let ident = self.parse_ident()?;
let mut generics = self.parse_generics()?;
let decl = self.parse_fn_decl_with_self(|p| {
- p.parse_arg_general(true, true, false)
+ p.parse_arg_general(true, false, |_| true)
})?;
generics.where_clause = self.parse_where_clause()?;
*at_end = true;
@@ -7443,7 +7479,7 @@
} else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) {
let ident = self.parse_ident().unwrap();
self.bump(); // `(`
- let kw_name = if let Ok(Some(_)) = self.parse_self_arg() {
+ let kw_name = if let Ok(Some(_)) = self.parse_self_arg_with_attrs() {
"method"
} else {
"function"
@@ -7494,7 +7530,7 @@
self.eat_to_tokens(&[&token::Gt]);
self.bump(); // `>`
let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) {
- if let Ok(Some(_)) = self.parse_self_arg() {
+ if let Ok(Some(_)) = self.parse_self_arg_with_attrs() {
("fn", "method", false)
} else {
("fn", "function", false)
diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs
index a21d2df..c030726 100644
--- a/src/libsyntax/source_map.rs
+++ b/src/libsyntax/source_map.rs
@@ -191,6 +191,18 @@
/// If a file already exists in the source_map with the same id, that file is returned
/// unmodified
pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> {
+ self.try_new_source_file(filename, src)
+ .unwrap_or_else(|OffsetOverflowError| {
+ eprintln!("fatal error: rustc does not support files larger than 4GB");
+ errors::FatalError.raise()
+ })
+ }
+
+ fn try_new_source_file(
+ &self,
+ filename: FileName,
+ src: String
+ ) -> Result<Lrc<SourceFile>, OffsetOverflowError> {
let start_pos = self.next_start_pos();
// The path is used to determine the directory for loading submodules and
@@ -212,7 +224,7 @@
was_remapped,
Some(&unmapped_path));
- return match self.source_file_by_stable_id(file_id) {
+ let lrc_sf = match self.source_file_by_stable_id(file_id) {
Some(lrc_sf) => lrc_sf,
None => {
let source_file = Lrc::new(SourceFile::new(
@@ -221,7 +233,7 @@
unmapped_path,
src,
Pos::from_usize(start_pos),
- ));
+ )?);
let mut files = self.files.borrow_mut();
@@ -230,7 +242,8 @@
source_file
}
- }
+ };
+ Ok(lrc_sf)
}
/// Allocates a new SourceFile representing a source file from an external
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index 1998ec19..cbaf125 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -327,7 +327,7 @@
// }
let sp = ignored_span(cx, DUMMY_SP);
let ecx = &cx.ext_cx;
- let test_id = ecx.ident_of("test").gensym();
+ let test_id = Ident::with_empty_ctxt(sym::test);
// test::test_main_static(...)
let mut test_runner = cx.test_runner.clone().unwrap_or(
@@ -350,7 +350,7 @@
let test_extern_stmt = ecx.stmt_item(sp, ecx.item(sp,
test_id,
vec![],
- ast::ItemKind::ExternCrate(Some(sym::test))
+ ast::ItemKind::ExternCrate(None)
));
// pub fn main() { ... }
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 2daec97..cd906bb 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -65,9 +65,9 @@
// bounds without them.
// FIXME: Remove these impls when the compiler can compute the bounds quickly again.
// See https://github.com/rust-lang/rust/issues/60846
-#[cfg(parallel_compiler)]
+#[cfg(all(bootstrap, parallel_compiler))]
unsafe impl Send for TokenTree {}
-#[cfg(parallel_compiler)]
+#[cfg(all(bootstrap, parallel_compiler))]
unsafe impl Sync for TokenTree {}
impl TokenTree {
diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs
index 69dd966..fcecee8 100644
--- a/src/libsyntax/util/parser.rs
+++ b/src/libsyntax/util/parser.rs
@@ -234,7 +234,7 @@
pub const PREC_CLOSURE: i8 = -40;
pub const PREC_JUMP: i8 = -30;
pub const PREC_RANGE: i8 = -10;
-// The range 2 ... 14 is reserved for AssocOp binary operator precedences.
+// The range 2..=14 is reserved for AssocOp binary operator precedences.
pub const PREC_PREFIX: i8 = 50;
pub const PREC_POSTFIX: i8 = 60;
pub const PREC_PAREN: i8 = 99;
diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs
index 24b0c37..8132024 100644
--- a/src/libsyntax/visit.rs
+++ b/src/libsyntax/visit.rs
@@ -550,8 +550,9 @@
pub fn walk_fn_decl<'a, V: Visitor<'a>>(visitor: &mut V, function_declaration: &'a FnDecl) {
for argument in &function_declaration.inputs {
+ walk_list!(visitor, visit_attribute, argument.attrs.iter());
visitor.visit_pat(&argument.pat);
- visitor.visit_ty(&argument.ty)
+ visitor.visit_ty(&argument.ty);
}
visitor.visit_fn_ret_ty(&function_declaration.output)
}
diff --git a/src/libsyntax_ext/deriving/cmp/ord.rs b/src/libsyntax_ext/deriving/cmp/ord.rs
index b25a9e4..844865d 100644
--- a/src/libsyntax_ext/deriving/cmp/ord.rs
+++ b/src/libsyntax_ext/deriving/cmp/ord.rs
@@ -82,8 +82,8 @@
// }
let new = {
- let other_f = match (other_fs.len(), other_fs.get(0)) {
- (1, Some(o_f)) => o_f,
+ let other_f = match other_fs {
+ [o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"),
};
diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs
index 6172f27..732bb23 100644
--- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs
+++ b/src/libsyntax_ext/deriving/cmp/partial_eq.rs
@@ -25,8 +25,8 @@
-> P<Expr>
{
let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P<Expr>, other_fs: &[P<Expr>]| {
- let other_f = match (other_fs.len(), other_fs.get(0)) {
- (1, Some(o_f)) => o_f,
+ let other_f = match other_fs {
+ [o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"),
};
diff --git a/src/libsyntax_ext/deriving/cmp/partial_ord.rs b/src/libsyntax_ext/deriving/cmp/partial_ord.rs
index 3980741..a30a7d7 100644
--- a/src/libsyntax_ext/deriving/cmp/partial_ord.rs
+++ b/src/libsyntax_ext/deriving/cmp/partial_ord.rs
@@ -143,8 +143,8 @@
// }
let new = {
- let other_f = match (other_fs.len(), other_fs.get(0)) {
- (1, Some(o_f)) => o_f,
+ let other_f = match other_fs {
+ [o_f] => o_f,
_ => {
cx.span_bug(span,
"not exactly 2 arguments in `derive(PartialOrd)`")
@@ -193,8 +193,8 @@
};
let par_cmp = |cx: &mut ExtCtxt<'_>, span, self_f: P<Expr>, other_fs: &[P<Expr>], default| {
- let other_f = match (other_fs.len(), other_fs.get(0)) {
- (1, Some(o_f)) => o_f,
+ let other_f = match other_fs {
+ [o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"),
};
diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs
index ffec667..8b719b5 100644
--- a/src/libsyntax_ext/deriving/generic/mod.rs
+++ b/src/libsyntax_ext/deriving/generic/mod.rs
@@ -928,7 +928,7 @@
let args = {
let self_args = explicit_self.map(|explicit_self| {
let ident = Ident::with_empty_ctxt(kw::SelfLower).with_span_pos(trait_.span);
- ast::Arg::from_self(explicit_self, ident)
+ ast::Arg::from_self(ThinVec::default(), explicit_self, ident)
});
let nonself_args = arg_types.into_iter()
.map(|(name, ty)| cx.arg(trait_.span, name, ty));
diff --git a/src/libsyntax_ext/deriving/hash.rs b/src/libsyntax_ext/deriving/hash.rs
index e7f99d4..7ad04ae 100644
--- a/src/libsyntax_ext/deriving/hash.rs
+++ b/src/libsyntax_ext/deriving/hash.rs
@@ -52,8 +52,8 @@
}
fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructure<'_>) -> P<Expr> {
- let state_expr = match (substr.nonself_args.len(), substr.nonself_args.get(0)) {
- (1, Some(o_f)) => o_f,
+ let state_expr = match &substr.nonself_args {
+ &[o_f] => o_f,
_ => {
cx.span_bug(trait_span,
"incorrect number of arguments in `derive(Hash)`")
diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs
index 3771647..f44a6e7 100644
--- a/src/libsyntax_ext/format.rs
+++ b/src/libsyntax_ext/format.rs
@@ -28,7 +28,7 @@
enum Position {
Exact(usize),
- Named(String),
+ Named(Symbol),
}
struct Context<'a, 'b: 'a> {
@@ -57,7 +57,7 @@
/// Unique format specs seen for each argument.
arg_unique_types: Vec<Vec<ArgumentType>>,
/// Map from named arguments to their resolved indices.
- names: FxHashMap<String, usize>,
+ names: FxHashMap<Symbol, usize>,
/// The latest consecutive literal strings, or empty if there weren't any.
literal: String,
@@ -127,9 +127,9 @@
ecx: &mut ExtCtxt<'a>,
sp: Span,
tts: &[tokenstream::TokenTree]
-) -> Result<(P<ast::Expr>, Vec<P<ast::Expr>>, FxHashMap<String, usize>), DiagnosticBuilder<'a>> {
+) -> Result<(P<ast::Expr>, Vec<P<ast::Expr>>, FxHashMap<Symbol, usize>), DiagnosticBuilder<'a>> {
let mut args = Vec::<P<ast::Expr>>::new();
- let mut names = FxHashMap::<String, usize>::default();
+ let mut names = FxHashMap::<Symbol, usize>::default();
let mut p = ecx.new_parser_from_tts(tts);
@@ -158,11 +158,10 @@
"expected ident, positional arguments cannot follow named arguments",
));
};
- let name: &str = &name.as_str();
p.expect(&token::Eq)?;
let e = p.parse_expr()?;
- if let Some(prev) = names.get(name) {
+ if let Some(prev) = names.get(&name) {
ecx.struct_span_err(e.span, &format!("duplicate argument named `{}`", name))
.span_note(args[*prev].span, "previously here")
.emit();
@@ -174,7 +173,7 @@
// if the input is valid, we can simply append to the positional
// args. And remember the names.
let slot = args.len();
- names.insert(name.to_string(), slot);
+ names.insert(name, slot);
args.push(e);
} else {
let e = p.parse_expr()?;
@@ -188,7 +187,7 @@
fn resolve_name_inplace(&self, p: &mut parse::Piece<'_>) {
// NOTE: the `unwrap_or` branch is needed in case of invalid format
// arguments, e.g., `format_args!("{foo}")`.
- let lookup = |s| *self.names.get(s).unwrap_or(&0);
+ let lookup = |s: Symbol| *self.names.get(&s).unwrap_or(&0);
match *p {
parse::String(_) => {}
@@ -222,7 +221,7 @@
// it's written second, so it should come after width/precision.
let pos = match arg.position {
parse::ArgumentIs(i) | parse::ArgumentImplicitlyIs(i) => Exact(i),
- parse::ArgumentNamed(s) => Named(s.to_string()),
+ parse::ArgumentNamed(s) => Named(s),
};
let ty = Placeholder(arg.format.ty.to_string());
@@ -232,7 +231,7 @@
}
}
- fn verify_count(&mut self, c: parse::Count<'_>) {
+ fn verify_count(&mut self, c: parse::Count) {
match c {
parse::CountImplied |
parse::CountIs(..) => {}
@@ -240,7 +239,7 @@
self.verify_arg_type(Exact(i), Count);
}
parse::CountIsName(s) => {
- self.verify_arg_type(Named(s.to_string()), Count);
+ self.verify_arg_type(Named(s), Count);
}
}
}
@@ -390,7 +389,7 @@
ecx.std_path(&[sym::fmt, sym::rt, sym::v1, Symbol::intern(s)])
}
- fn build_count(&self, c: parse::Count<'_>) -> P<ast::Expr> {
+ fn build_count(&self, c: parse::Count) -> P<ast::Expr> {
let sp = self.macsp;
let count = |c, arg| {
let mut path = Context::rtpath(self.ecx, "Count");
@@ -739,7 +738,7 @@
sp: Span,
efmt: P<ast::Expr>,
args: Vec<P<ast::Expr>>,
- names: FxHashMap<String, usize>,
+ names: FxHashMap<Symbol, usize>,
append_newline: bool)
-> P<ast::Expr> {
// NOTE: this verbose way of initializing `Vec<Vec<ArgumentType>>` is because
@@ -888,7 +887,7 @@
};
let fmt_str = &*fmt.node.0.as_str(); // for the suggestions below
- let mut parser = parse::Parser::new(fmt_str, str_style, skips.clone(), append_newline);
+ let mut parser = parse::Parser::new(fmt_str, str_style, skips, append_newline);
let mut unverified_pieces = Vec::new();
while let Some(piece) = parser.next() {
@@ -901,15 +900,15 @@
if !parser.errors.is_empty() {
let err = parser.errors.remove(0);
- let sp = fmt.span.from_inner_byte_pos(err.start.unwrap(), err.end.unwrap());
+ let sp = fmt.span.from_inner(err.span);
let mut e = ecx.struct_span_err(sp, &format!("invalid format string: {}",
err.description));
e.span_label(sp, err.label + " in format string");
if let Some(note) = err.note {
e.note(¬e);
}
- if let Some((label, start, end)) = err.secondary_label {
- let sp = fmt.span.from_inner_byte_pos(start.unwrap(), end.unwrap());
+ if let Some((label, span)) = err.secondary_label {
+ let sp = fmt.span.from_inner(span);
e.span_label(sp, label);
}
e.emit();
@@ -917,9 +916,7 @@
}
let arg_spans = parser.arg_places.iter()
- .map(|&(parse::SpanIndex(start), parse::SpanIndex(end))| {
- fmt.span.from_inner_byte_pos(start, end)
- })
+ .map(|span| fmt.span.from_inner(*span))
.collect();
let mut cx = Context {
@@ -1044,7 +1041,9 @@
let mut show_doc_note = false;
let mut suggestions = vec![];
- for sub in foreign::$kind::iter_subs(fmt_str) {
+ // account for `"` and account for raw strings `r#`
+ let padding = str_style.map(|i| i + 2).unwrap_or(1);
+ for sub in foreign::$kind::iter_subs(fmt_str, padding) {
let trn = match sub.translate() {
Some(trn) => trn,
@@ -1064,10 +1063,8 @@
show_doc_note = true;
}
- if let Some((start, end)) = pos {
- // account for `"` and account for raw strings `r#`
- let padding = str_style.map(|i| i + 2).unwrap_or(1);
- let sp = fmt_sp.from_inner_byte_pos(start + padding, end + padding);
+ if let Some(inner_sp) = pos {
+ let sp = fmt_sp.from_inner(inner_sp);
suggestions.push((sp, trn));
} else {
diag.help(&format!("`{}` should be written as `{}`", sub, trn));
diff --git a/src/libsyntax_ext/format_foreign.rs b/src/libsyntax_ext/format_foreign.rs
index 261b2f3..3d4f827 100644
--- a/src/libsyntax_ext/format_foreign.rs
+++ b/src/libsyntax_ext/format_foreign.rs
@@ -1,5 +1,6 @@
pub mod printf {
use super::strcursor::StrCursor as Cur;
+ use syntax_pos::InnerSpan;
/// Represents a single `printf`-style substitution.
#[derive(Clone, PartialEq, Debug)]
@@ -18,7 +19,7 @@
}
}
- pub fn position(&self) -> Option<(usize, usize)> {
+ pub fn position(&self) -> Option<InnerSpan> {
match *self {
Substitution::Format(ref fmt) => Some(fmt.position),
_ => None,
@@ -28,7 +29,7 @@
pub fn set_position(&mut self, start: usize, end: usize) {
match self {
Substitution::Format(ref mut fmt) => {
- fmt.position = (start, end);
+ fmt.position = InnerSpan::new(start, end);
}
_ => {}
}
@@ -65,7 +66,7 @@
/// Type of parameter being converted.
pub type_: &'a str,
/// Byte offset for the start and end of this formatting directive.
- pub position: (usize, usize),
+ pub position: InnerSpan,
}
impl Format<'_> {
@@ -263,10 +264,10 @@
}
/// Returns an iterator over all substitutions in a given string.
- pub fn iter_subs(s: &str) -> Substitutions<'_> {
+ pub fn iter_subs(s: &str, start_pos: usize) -> Substitutions<'_> {
Substitutions {
s,
- pos: 0,
+ pos: start_pos,
}
}
@@ -282,9 +283,9 @@
let (mut sub, tail) = parse_next_substitution(self.s)?;
self.s = tail;
match sub {
- Substitution::Format(_) => if let Some((start, end)) = sub.position() {
- sub.set_position(start + self.pos, end + self.pos);
- self.pos += end;
+ Substitution::Format(_) => if let Some(inner_span) = sub.position() {
+ sub.set_position(inner_span.start + self.pos, inner_span.end + self.pos);
+ self.pos += inner_span.end;
}
Substitution::Escape => self.pos += 2,
}
@@ -373,7 +374,7 @@
precision: None,
length: None,
type_: at.slice_between(next).unwrap(),
- position: (start.at, next.at),
+ position: InnerSpan::new(start.at, next.at),
}),
next.slice_after()
));
@@ -560,7 +561,7 @@
drop(next);
end = at;
- let position = (start.at, end.at);
+ let position = InnerSpan::new(start.at, end.at);
let f = Format {
span: start.slice_between(end).unwrap(),
@@ -604,163 +605,12 @@
}
#[cfg(test)]
- mod tests {
- use super::{
- Format as F,
- Num as N,
- Substitution as S,
- iter_subs,
- parse_next_substitution as pns,
- };
-
- macro_rules! assert_eq_pnsat {
- ($lhs:expr, $rhs:expr) => {
- assert_eq!(
- pns($lhs).and_then(|(s, _)| s.translate()),
- $rhs.map(<String as From<&str>>::from)
- )
- };
- }
-
- #[test]
- fn test_escape() {
- assert_eq!(pns("has no escapes"), None);
- assert_eq!(pns("has no escapes, either %"), None);
- assert_eq!(pns("*so* has a %% escape"), Some((S::Escape," escape")));
- assert_eq!(pns("%% leading escape"), Some((S::Escape, " leading escape")));
- assert_eq!(pns("trailing escape %%"), Some((S::Escape, "")));
- }
-
- #[test]
- fn test_parse() {
- macro_rules! assert_pns_eq_sub {
- ($in_:expr, {
- $param:expr, $flags:expr,
- $width:expr, $prec:expr, $len:expr, $type_:expr,
- $pos:expr,
- }) => {
- assert_eq!(
- pns(concat!($in_, "!")),
- Some((
- S::Format(F {
- span: $in_,
- parameter: $param,
- flags: $flags,
- width: $width,
- precision: $prec,
- length: $len,
- type_: $type_,
- position: $pos,
- }),
- "!"
- ))
- )
- };
- }
-
- assert_pns_eq_sub!("%!",
- { None, "", None, None, None, "!", (0, 2), });
- assert_pns_eq_sub!("%c",
- { None, "", None, None, None, "c", (0, 2), });
- assert_pns_eq_sub!("%s",
- { None, "", None, None, None, "s", (0, 2), });
- assert_pns_eq_sub!("%06d",
- { None, "0", Some(N::Num(6)), None, None, "d", (0, 4), });
- assert_pns_eq_sub!("%4.2f",
- { None, "", Some(N::Num(4)), Some(N::Num(2)), None, "f", (0, 5), });
- assert_pns_eq_sub!("%#x",
- { None, "#", None, None, None, "x", (0, 3), });
- assert_pns_eq_sub!("%-10s",
- { None, "-", Some(N::Num(10)), None, None, "s", (0, 5), });
- assert_pns_eq_sub!("%*s",
- { None, "", Some(N::Next), None, None, "s", (0, 3), });
- assert_pns_eq_sub!("%-10.*s",
- { None, "-", Some(N::Num(10)), Some(N::Next), None, "s", (0, 7), });
- assert_pns_eq_sub!("%-*.*s",
- { None, "-", Some(N::Next), Some(N::Next), None, "s", (0, 6), });
- assert_pns_eq_sub!("%.6i",
- { None, "", None, Some(N::Num(6)), None, "i", (0, 4), });
- assert_pns_eq_sub!("%+i",
- { None, "+", None, None, None, "i", (0, 3), });
- assert_pns_eq_sub!("%08X",
- { None, "0", Some(N::Num(8)), None, None, "X", (0, 4), });
- assert_pns_eq_sub!("%lu",
- { None, "", None, None, Some("l"), "u", (0, 3), });
- assert_pns_eq_sub!("%Iu",
- { None, "", None, None, Some("I"), "u", (0, 3), });
- assert_pns_eq_sub!("%I32u",
- { None, "", None, None, Some("I32"), "u", (0, 5), });
- assert_pns_eq_sub!("%I64u",
- { None, "", None, None, Some("I64"), "u", (0, 5), });
- assert_pns_eq_sub!("%'d",
- { None, "'", None, None, None, "d", (0, 3), });
- assert_pns_eq_sub!("%10s",
- { None, "", Some(N::Num(10)), None, None, "s", (0, 4), });
- assert_pns_eq_sub!("%-10.10s",
- { None, "-", Some(N::Num(10)), Some(N::Num(10)), None, "s", (0, 8), });
- assert_pns_eq_sub!("%1$d",
- { Some(1), "", None, None, None, "d", (0, 4), });
- assert_pns_eq_sub!("%2$.*3$d",
- { Some(2), "", None, Some(N::Arg(3)), None, "d", (0, 8), });
- assert_pns_eq_sub!("%1$*2$.*3$d",
- { Some(1), "", Some(N::Arg(2)), Some(N::Arg(3)), None, "d", (0, 11), });
- assert_pns_eq_sub!("%-8ld",
- { None, "-", Some(N::Num(8)), None, Some("l"), "d", (0, 5), });
- }
-
- #[test]
- fn test_iter() {
- let s = "The %d'th word %% is: `%.*s` %!\n";
- let subs: Vec<_> = iter_subs(s).map(|sub| sub.translate()).collect();
- assert_eq!(
- subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(),
- vec![Some("{}"), None, Some("{:.*}"), None]
- );
- }
-
- /// Checks that the translations are what we expect.
- #[test]
- fn test_translation() {
- assert_eq_pnsat!("%c", Some("{}"));
- assert_eq_pnsat!("%d", Some("{}"));
- assert_eq_pnsat!("%u", Some("{}"));
- assert_eq_pnsat!("%x", Some("{:x}"));
- assert_eq_pnsat!("%X", Some("{:X}"));
- assert_eq_pnsat!("%e", Some("{:e}"));
- assert_eq_pnsat!("%E", Some("{:E}"));
- assert_eq_pnsat!("%f", Some("{}"));
- assert_eq_pnsat!("%g", Some("{:e}"));
- assert_eq_pnsat!("%G", Some("{:E}"));
- assert_eq_pnsat!("%s", Some("{}"));
- assert_eq_pnsat!("%p", Some("{:p}"));
-
- assert_eq_pnsat!("%06d", Some("{:06}"));
- assert_eq_pnsat!("%4.2f", Some("{:4.2}"));
- assert_eq_pnsat!("%#x", Some("{:#x}"));
- assert_eq_pnsat!("%-10s", Some("{:<10}"));
- assert_eq_pnsat!("%*s", None);
- assert_eq_pnsat!("%-10.*s", Some("{:<10.*}"));
- assert_eq_pnsat!("%-*.*s", None);
- assert_eq_pnsat!("%.6i", Some("{:06}"));
- assert_eq_pnsat!("%+i", Some("{:+}"));
- assert_eq_pnsat!("%08X", Some("{:08X}"));
- assert_eq_pnsat!("%lu", Some("{}"));
- assert_eq_pnsat!("%Iu", Some("{}"));
- assert_eq_pnsat!("%I32u", Some("{}"));
- assert_eq_pnsat!("%I64u", Some("{}"));
- assert_eq_pnsat!("%'d", None);
- assert_eq_pnsat!("%10s", Some("{:>10}"));
- assert_eq_pnsat!("%-10.10s", Some("{:<10.10}"));
- assert_eq_pnsat!("%1$d", Some("{0}"));
- assert_eq_pnsat!("%2$.*3$d", Some("{1:02$}"));
- assert_eq_pnsat!("%1$*2$.*3$s", Some("{0:>1$.2$}"));
- assert_eq_pnsat!("%-8ld", Some("{:<8}"));
- }
- }
+ mod tests;
}
pub mod shell {
use super::strcursor::StrCursor as Cur;
+ use syntax_pos::InnerSpan;
#[derive(Clone, PartialEq, Debug)]
pub enum Substitution<'a> {
@@ -778,11 +628,11 @@
}
}
- pub fn position(&self) -> Option<(usize, usize)> {
+ pub fn position(&self) -> Option<InnerSpan> {
match self {
Substitution::Ordinal(_, pos) |
Substitution::Name(_, pos) |
- Substitution::Escape(pos) => Some(*pos),
+ Substitution::Escape(pos) => Some(InnerSpan::new(pos.0, pos.1)),
}
}
@@ -804,10 +654,10 @@
}
/// Returns an iterator over all substitutions in a given string.
- pub fn iter_subs(s: &str) -> Substitutions<'_> {
+ pub fn iter_subs(s: &str, start_pos: usize) -> Substitutions<'_> {
Substitutions {
s,
- pos: 0,
+ pos: start_pos,
}
}
@@ -823,7 +673,7 @@
match parse_next_substitution(self.s) {
Some((mut sub, tail)) => {
self.s = tail;
- if let Some((start, end)) = sub.position() {
+ if let Some(InnerSpan { start, end }) = sub.position() {
sub.set_position(start + self.pos, end + self.pos);
self.pos += end;
}
@@ -897,68 +747,7 @@
}
#[cfg(test)]
- mod tests {
- use super::{
- Substitution as S,
- parse_next_substitution as pns,
- };
-
- macro_rules! assert_eq_pnsat {
- ($lhs:expr, $rhs:expr) => {
- assert_eq!(
- pns($lhs).and_then(|(f, _)| f.translate()),
- $rhs.map(<String as From<&str>>::from)
- )
- };
- }
-
- #[test]
- fn test_escape() {
- assert_eq!(pns("has no escapes"), None);
- assert_eq!(pns("has no escapes, either $"), None);
- assert_eq!(pns("*so* has a $$ escape"), Some((S::Escape((11, 13)), " escape")));
- assert_eq!(pns("$$ leading escape"), Some((S::Escape((0, 2)), " leading escape")));
- assert_eq!(pns("trailing escape $$"), Some((S::Escape((16, 18)), "")));
- }
-
- #[test]
- fn test_parse() {
- macro_rules! assert_pns_eq_sub {
- ($in_:expr, $kind:ident($arg:expr, $pos:expr)) => {
- assert_eq!(pns(concat!($in_, "!")), Some((S::$kind($arg.into(), $pos), "!")))
- };
- }
-
- assert_pns_eq_sub!("$0", Ordinal(0, (0, 2)));
- assert_pns_eq_sub!("$1", Ordinal(1, (0, 2)));
- assert_pns_eq_sub!("$9", Ordinal(9, (0, 2)));
- assert_pns_eq_sub!("$N", Name("N", (0, 2)));
- assert_pns_eq_sub!("$NAME", Name("NAME", (0, 5)));
- }
-
- #[test]
- fn test_iter() {
- use super::iter_subs;
- let s = "The $0'th word $$ is: `$WORD` $!\n";
- let subs: Vec<_> = iter_subs(s).map(|sub| sub.translate()).collect();
- assert_eq!(
- subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(),
- vec![Some("{0}"), None, Some("{WORD}")]
- );
- }
-
- #[test]
- fn test_translation() {
- assert_eq_pnsat!("$0", Some("{0}"));
- assert_eq_pnsat!("$9", Some("{9}"));
- assert_eq_pnsat!("$1", Some("{1}"));
- assert_eq_pnsat!("$10", Some("{1}"));
- assert_eq_pnsat!("$stuff", Some("{stuff}"));
- assert_eq_pnsat!("$NAME", Some("{NAME}"));
- assert_eq_pnsat!("$PREFIX/bin", Some("{PREFIX}"));
- }
-
- }
+ mod tests;
}
mod strcursor {
diff --git a/src/libsyntax_ext/format_foreign/printf/tests.rs b/src/libsyntax_ext/format_foreign/printf/tests.rs
new file mode 100644
index 0000000..87021f1
--- /dev/null
+++ b/src/libsyntax_ext/format_foreign/printf/tests.rs
@@ -0,0 +1,151 @@
+use super::{
+ Format as F,
+ Num as N,
+ Substitution as S,
+ iter_subs,
+ parse_next_substitution as pns,
+};
+
+macro_rules! assert_eq_pnsat {
+ ($lhs:expr, $rhs:expr) => {
+ assert_eq!(
+ pns($lhs).and_then(|(s, _)| s.translate()),
+ $rhs.map(<String as From<&str>>::from)
+ )
+ };
+}
+
+#[test]
+fn test_escape() {
+ assert_eq!(pns("has no escapes"), None);
+ assert_eq!(pns("has no escapes, either %"), None);
+ assert_eq!(pns("*so* has a %% escape"), Some((S::Escape," escape")));
+ assert_eq!(pns("%% leading escape"), Some((S::Escape, " leading escape")));
+ assert_eq!(pns("trailing escape %%"), Some((S::Escape, "")));
+}
+
+#[test]
+fn test_parse() {
+ macro_rules! assert_pns_eq_sub {
+ ($in_:expr, {
+ $param:expr, $flags:expr,
+ $width:expr, $prec:expr, $len:expr, $type_:expr,
+ $pos:expr,
+ }) => {
+ assert_eq!(
+ pns(concat!($in_, "!")),
+ Some((
+ S::Format(F {
+ span: $in_,
+ parameter: $param,
+ flags: $flags,
+ width: $width,
+ precision: $prec,
+ length: $len,
+ type_: $type_,
+ position: syntax_pos::InnerSpan::new($pos.0, $pos.1),
+ }),
+ "!"
+ ))
+ )
+ };
+ }
+
+ assert_pns_eq_sub!("%!",
+ { None, "", None, None, None, "!", (0, 2), });
+ assert_pns_eq_sub!("%c",
+ { None, "", None, None, None, "c", (0, 2), });
+ assert_pns_eq_sub!("%s",
+ { None, "", None, None, None, "s", (0, 2), });
+ assert_pns_eq_sub!("%06d",
+ { None, "0", Some(N::Num(6)), None, None, "d", (0, 4), });
+ assert_pns_eq_sub!("%4.2f",
+ { None, "", Some(N::Num(4)), Some(N::Num(2)), None, "f", (0, 5), });
+ assert_pns_eq_sub!("%#x",
+ { None, "#", None, None, None, "x", (0, 3), });
+ assert_pns_eq_sub!("%-10s",
+ { None, "-", Some(N::Num(10)), None, None, "s", (0, 5), });
+ assert_pns_eq_sub!("%*s",
+ { None, "", Some(N::Next), None, None, "s", (0, 3), });
+ assert_pns_eq_sub!("%-10.*s",
+ { None, "-", Some(N::Num(10)), Some(N::Next), None, "s", (0, 7), });
+ assert_pns_eq_sub!("%-*.*s",
+ { None, "-", Some(N::Next), Some(N::Next), None, "s", (0, 6), });
+ assert_pns_eq_sub!("%.6i",
+ { None, "", None, Some(N::Num(6)), None, "i", (0, 4), });
+ assert_pns_eq_sub!("%+i",
+ { None, "+", None, None, None, "i", (0, 3), });
+ assert_pns_eq_sub!("%08X",
+ { None, "0", Some(N::Num(8)), None, None, "X", (0, 4), });
+ assert_pns_eq_sub!("%lu",
+ { None, "", None, None, Some("l"), "u", (0, 3), });
+ assert_pns_eq_sub!("%Iu",
+ { None, "", None, None, Some("I"), "u", (0, 3), });
+ assert_pns_eq_sub!("%I32u",
+ { None, "", None, None, Some("I32"), "u", (0, 5), });
+ assert_pns_eq_sub!("%I64u",
+ { None, "", None, None, Some("I64"), "u", (0, 5), });
+ assert_pns_eq_sub!("%'d",
+ { None, "'", None, None, None, "d", (0, 3), });
+ assert_pns_eq_sub!("%10s",
+ { None, "", Some(N::Num(10)), None, None, "s", (0, 4), });
+ assert_pns_eq_sub!("%-10.10s",
+ { None, "-", Some(N::Num(10)), Some(N::Num(10)), None, "s", (0, 8), });
+ assert_pns_eq_sub!("%1$d",
+ { Some(1), "", None, None, None, "d", (0, 4), });
+ assert_pns_eq_sub!("%2$.*3$d",
+ { Some(2), "", None, Some(N::Arg(3)), None, "d", (0, 8), });
+ assert_pns_eq_sub!("%1$*2$.*3$d",
+ { Some(1), "", Some(N::Arg(2)), Some(N::Arg(3)), None, "d", (0, 11), });
+ assert_pns_eq_sub!("%-8ld",
+ { None, "-", Some(N::Num(8)), None, Some("l"), "d", (0, 5), });
+}
+
+#[test]
+fn test_iter() {
+ let s = "The %d'th word %% is: `%.*s` %!\n";
+ let subs: Vec<_> = iter_subs(s, 0).map(|sub| sub.translate()).collect();
+ assert_eq!(
+ subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(),
+ vec![Some("{}"), None, Some("{:.*}"), None]
+ );
+}
+
+/// Checks that the translations are what we expect.
+#[test]
+fn test_translation() {
+ assert_eq_pnsat!("%c", Some("{}"));
+ assert_eq_pnsat!("%d", Some("{}"));
+ assert_eq_pnsat!("%u", Some("{}"));
+ assert_eq_pnsat!("%x", Some("{:x}"));
+ assert_eq_pnsat!("%X", Some("{:X}"));
+ assert_eq_pnsat!("%e", Some("{:e}"));
+ assert_eq_pnsat!("%E", Some("{:E}"));
+ assert_eq_pnsat!("%f", Some("{}"));
+ assert_eq_pnsat!("%g", Some("{:e}"));
+ assert_eq_pnsat!("%G", Some("{:E}"));
+ assert_eq_pnsat!("%s", Some("{}"));
+ assert_eq_pnsat!("%p", Some("{:p}"));
+
+ assert_eq_pnsat!("%06d", Some("{:06}"));
+ assert_eq_pnsat!("%4.2f", Some("{:4.2}"));
+ assert_eq_pnsat!("%#x", Some("{:#x}"));
+ assert_eq_pnsat!("%-10s", Some("{:<10}"));
+ assert_eq_pnsat!("%*s", None);
+ assert_eq_pnsat!("%-10.*s", Some("{:<10.*}"));
+ assert_eq_pnsat!("%-*.*s", None);
+ assert_eq_pnsat!("%.6i", Some("{:06}"));
+ assert_eq_pnsat!("%+i", Some("{:+}"));
+ assert_eq_pnsat!("%08X", Some("{:08X}"));
+ assert_eq_pnsat!("%lu", Some("{}"));
+ assert_eq_pnsat!("%Iu", Some("{}"));
+ assert_eq_pnsat!("%I32u", Some("{}"));
+ assert_eq_pnsat!("%I64u", Some("{}"));
+ assert_eq_pnsat!("%'d", None);
+ assert_eq_pnsat!("%10s", Some("{:>10}"));
+ assert_eq_pnsat!("%-10.10s", Some("{:<10.10}"));
+ assert_eq_pnsat!("%1$d", Some("{0}"));
+ assert_eq_pnsat!("%2$.*3$d", Some("{1:02$}"));
+ assert_eq_pnsat!("%1$*2$.*3$s", Some("{0:>1$.2$}"));
+ assert_eq_pnsat!("%-8ld", Some("{:<8}"));
+}
diff --git a/src/libsyntax_ext/format_foreign/shell/tests.rs b/src/libsyntax_ext/format_foreign/shell/tests.rs
new file mode 100644
index 0000000..8ef58b8
--- /dev/null
+++ b/src/libsyntax_ext/format_foreign/shell/tests.rs
@@ -0,0 +1,59 @@
+use super::{
+ Substitution as S,
+ parse_next_substitution as pns,
+};
+
+macro_rules! assert_eq_pnsat {
+ ($lhs:expr, $rhs:expr) => {
+ assert_eq!(
+ pns($lhs).and_then(|(f, _)| f.translate()),
+ $rhs.map(<String as From<&str>>::from)
+ )
+ };
+}
+
+#[test]
+fn test_escape() {
+ assert_eq!(pns("has no escapes"), None);
+ assert_eq!(pns("has no escapes, either $"), None);
+ assert_eq!(pns("*so* has a $$ escape"), Some((S::Escape((11, 13)), " escape")));
+ assert_eq!(pns("$$ leading escape"), Some((S::Escape((0, 2)), " leading escape")));
+ assert_eq!(pns("trailing escape $$"), Some((S::Escape((16, 18)), "")));
+}
+
+#[test]
+fn test_parse() {
+ macro_rules! assert_pns_eq_sub {
+ ($in_:expr, $kind:ident($arg:expr, $pos:expr)) => {
+ assert_eq!(pns(concat!($in_, "!")), Some((S::$kind($arg.into(), $pos), "!")))
+ };
+ }
+
+ assert_pns_eq_sub!("$0", Ordinal(0, (0, 2)));
+ assert_pns_eq_sub!("$1", Ordinal(1, (0, 2)));
+ assert_pns_eq_sub!("$9", Ordinal(9, (0, 2)));
+ assert_pns_eq_sub!("$N", Name("N", (0, 2)));
+ assert_pns_eq_sub!("$NAME", Name("NAME", (0, 5)));
+}
+
+#[test]
+fn test_iter() {
+ use super::iter_subs;
+ let s = "The $0'th word $$ is: `$WORD` $!\n";
+ let subs: Vec<_> = iter_subs(s, 0).map(|sub| sub.translate()).collect();
+ assert_eq!(
+ subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(),
+ vec![Some("{0}"), None, Some("{WORD}")]
+ );
+}
+
+#[test]
+fn test_translation() {
+ assert_eq_pnsat!("$0", Some("{0}"));
+ assert_eq_pnsat!("$9", Some("{9}"));
+ assert_eq_pnsat!("$1", Some("{1}"));
+ assert_eq_pnsat!("$10", Some("{1}"));
+ assert_eq_pnsat!("$stuff", Some("{stuff}"));
+ assert_eq_pnsat!("$NAME", Some("{NAME}"));
+ assert_eq_pnsat!("$PREFIX/bin", Some("{PREFIX}"));
+}
diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs
index 00a420d..b5d5a38 100644
--- a/src/libsyntax_ext/proc_macro_server.rs
+++ b/src/libsyntax_ext/proc_macro_server.rs
@@ -409,7 +409,7 @@
}
fn from_str(&mut self, src: &str) -> Self::TokenStream {
parse::parse_stream_from_source_str(
- FileName::proc_macro_source_code(src.clone()),
+ FileName::proc_macro_source_code(src),
src.to_string(),
self.sess,
Some(self.call_site),
diff --git a/src/libsyntax_ext/trace_macros.rs b/src/libsyntax_ext/trace_macros.rs
index 6c74f77..512513e 100644
--- a/src/libsyntax_ext/trace_macros.rs
+++ b/src/libsyntax_ext/trace_macros.rs
@@ -16,11 +16,11 @@
feature_gate::EXPLAIN_TRACE_MACROS);
}
- match (tt.len(), tt.first()) {
- (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::True) => {
+ match tt {
+ [TokenTree::Token(token)] if token.is_keyword(kw::True) => {
cx.set_trace_macros(true);
}
- (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::False) => {
+ [TokenTree::Token(token)] if token.is_keyword(kw::False) => {
cx.set_trace_macros(false);
}
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs
index 649ab81..2dd409b 100644
--- a/src/libsyntax_pos/lib.rs
+++ b/src/libsyntax_pos/lib.rs
@@ -505,10 +505,10 @@
)
}
- pub fn from_inner_byte_pos(self, start: usize, end: usize) -> Span {
+ pub fn from_inner(self, inner: InnerSpan) -> Span {
let span = self.data();
- Span::new(span.lo + BytePos::from_usize(start),
- span.lo + BytePos::from_usize(end),
+ Span::new(span.lo + BytePos::from_usize(inner.start),
+ span.lo + BytePos::from_usize(inner.end),
span.ctxt)
}
@@ -859,6 +859,9 @@
}
}
+#[derive(Debug)]
+pub struct OffsetOverflowError;
+
/// A single source in the `SourceMap`.
#[derive(Clone)]
pub struct SourceFile {
@@ -1040,7 +1043,7 @@
name_was_remapped: bool,
unmapped_path: FileName,
mut src: String,
- start_pos: BytePos) -> SourceFile {
+ start_pos: BytePos) -> Result<SourceFile, OffsetOverflowError> {
remove_bom(&mut src);
let src_hash = {
@@ -1054,11 +1057,14 @@
hasher.finish()
};
let end_pos = start_pos.to_usize() + src.len();
+ if end_pos > u32::max_value() as usize {
+ return Err(OffsetOverflowError);
+ }
let (lines, multibyte_chars, non_narrow_chars) =
analyze_source_file::analyze_source_file(&src[..], start_pos);
- SourceFile {
+ Ok(SourceFile {
name,
name_was_remapped,
unmapped_path: Some(unmapped_path),
@@ -1072,7 +1078,7 @@
multibyte_chars,
non_narrow_chars,
name_hash,
- }
+ })
}
/// Returns the `BytePos` of the beginning of the current line.
@@ -1396,6 +1402,18 @@
pub end_pos: BytePos
}
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub struct InnerSpan {
+ pub start: usize,
+ pub end: usize,
+}
+
+impl InnerSpan {
+ pub fn new(start: usize, end: usize) -> InnerSpan {
+ InnerSpan { start, end }
+ }
+}
+
// Given a slice of line start positions and a position, returns the index of
// the line the position is on. Returns -1 if the position is located before
// the first line.
diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs
index 875c286..c25e65e 100644
--- a/src/libsyntax_pos/symbol.rs
+++ b/src/libsyntax_pos/symbol.rs
@@ -157,6 +157,7 @@
bin,
bind_by_move_pattern_guards,
block,
+ bool,
borrowck_graphviz_postflow,
borrowck_graphviz_preflow,
box_patterns,
@@ -171,6 +172,7 @@
cfg_target_has_atomic,
cfg_target_thread_local,
cfg_target_vendor,
+ char,
clone,
Clone,
clone_closures,
@@ -203,6 +205,7 @@
core_intrinsics,
crate_id,
crate_in_paths,
+ crate_local,
crate_name,
crate_type,
crate_visibility_modifier,
@@ -221,6 +224,7 @@
deref,
deref_mut,
derive,
+ direct,
doc,
doc_alias,
doc_cfg,
@@ -245,6 +249,7 @@
eh_personality,
eh_unwind_resume,
enable,
+ eq,
err,
Err,
Equal,
@@ -278,8 +283,10 @@
format_args_nl,
from,
From,
+ from_desugaring,
from_error,
from_generator,
+ from_method,
from_ok,
from_usize,
fundamental,
@@ -347,6 +354,7 @@
label_break_value,
lang,
lang_items,
+ lhs,
lib,
lifetime,
link,
@@ -443,7 +451,9 @@
panic_impl,
panic_implementation,
panic_runtime,
+ parent_trait,
partial_cmp,
+ param_attrs,
PartialOrd,
passes,
pat,
@@ -505,6 +515,7 @@
result,
Result,
Return,
+ rhs,
rlib,
rt,
rtm_target_feature,
@@ -569,6 +580,7 @@
__rust_unstable_column,
rvalue_static_promotion,
sanitizer_runtime,
+ _Self,
self_in_typedefs,
self_struct_ctor,
Send,
diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs
index 810a98e..53bf67b 100644
--- a/src/libtest/lib.rs
+++ b/src/libtest/lib.rs
@@ -1772,458 +1772,4 @@
}
#[cfg(test)]
-mod tests {
- use crate::bench;
- use crate::test::{
- filter_tests, parse_opts, run_test, DynTestFn, DynTestName, MetricMap, RunIgnored,
- ShouldPanic, StaticTestName, TestDesc, TestDescAndFn, TestOpts, TrFailed, TrFailedMsg,
- TrIgnored, TrOk,
- };
- use crate::Bencher;
- use crate::Concurrent;
- use std::sync::mpsc::channel;
-
- fn one_ignored_one_unignored_test() -> Vec<TestDescAndFn> {
- vec![
- TestDescAndFn {
- desc: TestDesc {
- name: StaticTestName("1"),
- ignore: true,
- should_panic: ShouldPanic::No,
- allow_fail: false,
- },
- testfn: DynTestFn(Box::new(move || {})),
- },
- TestDescAndFn {
- desc: TestDesc {
- name: StaticTestName("2"),
- ignore: false,
- should_panic: ShouldPanic::No,
- allow_fail: false,
- },
- testfn: DynTestFn(Box::new(move || {})),
- },
- ]
- }
-
- #[test]
- pub fn do_not_run_ignored_tests() {
- fn f() {
- panic!();
- }
- let desc = TestDescAndFn {
- desc: TestDesc {
- name: StaticTestName("whatever"),
- ignore: true,
- should_panic: ShouldPanic::No,
- allow_fail: false,
- },
- testfn: DynTestFn(Box::new(f)),
- };
- let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
- let (_, res, _) = rx.recv().unwrap();
- assert!(res != TrOk);
- }
-
- #[test]
- pub fn ignored_tests_result_in_ignored() {
- fn f() {}
- let desc = TestDescAndFn {
- desc: TestDesc {
- name: StaticTestName("whatever"),
- ignore: true,
- should_panic: ShouldPanic::No,
- allow_fail: false,
- },
- testfn: DynTestFn(Box::new(f)),
- };
- let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
- let (_, res, _) = rx.recv().unwrap();
- assert!(res == TrIgnored);
- }
-
- #[test]
- fn test_should_panic() {
- fn f() {
- panic!();
- }
- let desc = TestDescAndFn {
- desc: TestDesc {
- name: StaticTestName("whatever"),
- ignore: false,
- should_panic: ShouldPanic::Yes,
- allow_fail: false,
- },
- testfn: DynTestFn(Box::new(f)),
- };
- let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
- let (_, res, _) = rx.recv().unwrap();
- assert!(res == TrOk);
- }
-
- #[test]
- fn test_should_panic_good_message() {
- fn f() {
- panic!("an error message");
- }
- let desc = TestDescAndFn {
- desc: TestDesc {
- name: StaticTestName("whatever"),
- ignore: false,
- should_panic: ShouldPanic::YesWithMessage("error message"),
- allow_fail: false,
- },
- testfn: DynTestFn(Box::new(f)),
- };
- let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
- let (_, res, _) = rx.recv().unwrap();
- assert!(res == TrOk);
- }
-
- #[test]
- fn test_should_panic_bad_message() {
- fn f() {
- panic!("an error message");
- }
- let expected = "foobar";
- let failed_msg = "panic did not include expected string";
- let desc = TestDescAndFn {
- desc: TestDesc {
- name: StaticTestName("whatever"),
- ignore: false,
- should_panic: ShouldPanic::YesWithMessage(expected),
- allow_fail: false,
- },
- testfn: DynTestFn(Box::new(f)),
- };
- let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
- let (_, res, _) = rx.recv().unwrap();
- assert!(res == TrFailedMsg(format!("{} '{}'", failed_msg, expected)));
- }
-
- #[test]
- fn test_should_panic_but_succeeds() {
- fn f() {}
- let desc = TestDescAndFn {
- desc: TestDesc {
- name: StaticTestName("whatever"),
- ignore: false,
- should_panic: ShouldPanic::Yes,
- allow_fail: false,
- },
- testfn: DynTestFn(Box::new(f)),
- };
- let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
- let (_, res, _) = rx.recv().unwrap();
- assert!(res == TrFailed);
- }
-
- #[test]
- fn parse_ignored_flag() {
- let args = vec![
- "progname".to_string(),
- "filter".to_string(),
- "--ignored".to_string(),
- ];
- let opts = parse_opts(&args).unwrap().unwrap();
- assert_eq!(opts.run_ignored, RunIgnored::Only);
- }
-
- #[test]
- fn parse_include_ignored_flag() {
- let args = vec![
- "progname".to_string(),
- "filter".to_string(),
- "-Zunstable-options".to_string(),
- "--include-ignored".to_string(),
- ];
- let opts = parse_opts(&args).unwrap().unwrap();
- assert_eq!(opts.run_ignored, RunIgnored::Yes);
- }
-
- #[test]
- pub fn filter_for_ignored_option() {
- // When we run ignored tests the test filter should filter out all the
- // unignored tests and flip the ignore flag on the rest to false
-
- let mut opts = TestOpts::new();
- opts.run_tests = true;
- opts.run_ignored = RunIgnored::Only;
-
- let tests = one_ignored_one_unignored_test();
- let filtered = filter_tests(&opts, tests);
-
- assert_eq!(filtered.len(), 1);
- assert_eq!(filtered[0].desc.name.to_string(), "1");
- assert!(!filtered[0].desc.ignore);
- }
-
- #[test]
- pub fn run_include_ignored_option() {
- // When we "--include-ignored" tests, the ignore flag should be set to false on
- // all tests and no test filtered out
-
- let mut opts = TestOpts::new();
- opts.run_tests = true;
- opts.run_ignored = RunIgnored::Yes;
-
- let tests = one_ignored_one_unignored_test();
- let filtered = filter_tests(&opts, tests);
-
- assert_eq!(filtered.len(), 2);
- assert!(!filtered[0].desc.ignore);
- assert!(!filtered[1].desc.ignore);
- }
-
- #[test]
- pub fn exclude_should_panic_option() {
- let mut opts = TestOpts::new();
- opts.run_tests = true;
- opts.exclude_should_panic = true;
-
- let mut tests = one_ignored_one_unignored_test();
- tests.push(TestDescAndFn {
- desc: TestDesc {
- name: StaticTestName("3"),
- ignore: false,
- should_panic: ShouldPanic::Yes,
- allow_fail: false,
- },
- testfn: DynTestFn(Box::new(move || {})),
- });
-
- let filtered = filter_tests(&opts, tests);
-
- assert_eq!(filtered.len(), 2);
- assert!(filtered.iter().all(|test| test.desc.should_panic == ShouldPanic::No));
- }
-
- #[test]
- pub fn exact_filter_match() {
- fn tests() -> Vec<TestDescAndFn> {
- vec!["base", "base::test", "base::test1", "base::test2"]
- .into_iter()
- .map(|name| TestDescAndFn {
- desc: TestDesc {
- name: StaticTestName(name),
- ignore: false,
- should_panic: ShouldPanic::No,
- allow_fail: false,
- },
- testfn: DynTestFn(Box::new(move || {})),
- })
- .collect()
- }
-
- let substr = filter_tests(
- &TestOpts {
- filter: Some("base".into()),
- ..TestOpts::new()
- },
- tests(),
- );
- assert_eq!(substr.len(), 4);
-
- let substr = filter_tests(
- &TestOpts {
- filter: Some("bas".into()),
- ..TestOpts::new()
- },
- tests(),
- );
- assert_eq!(substr.len(), 4);
-
- let substr = filter_tests(
- &TestOpts {
- filter: Some("::test".into()),
- ..TestOpts::new()
- },
- tests(),
- );
- assert_eq!(substr.len(), 3);
-
- let substr = filter_tests(
- &TestOpts {
- filter: Some("base::test".into()),
- ..TestOpts::new()
- },
- tests(),
- );
- assert_eq!(substr.len(), 3);
-
- let exact = filter_tests(
- &TestOpts {
- filter: Some("base".into()),
- filter_exact: true,
- ..TestOpts::new()
- },
- tests(),
- );
- assert_eq!(exact.len(), 1);
-
- let exact = filter_tests(
- &TestOpts {
- filter: Some("bas".into()),
- filter_exact: true,
- ..TestOpts::new()
- },
- tests(),
- );
- assert_eq!(exact.len(), 0);
-
- let exact = filter_tests(
- &TestOpts {
- filter: Some("::test".into()),
- filter_exact: true,
- ..TestOpts::new()
- },
- tests(),
- );
- assert_eq!(exact.len(), 0);
-
- let exact = filter_tests(
- &TestOpts {
- filter: Some("base::test".into()),
- filter_exact: true,
- ..TestOpts::new()
- },
- tests(),
- );
- assert_eq!(exact.len(), 1);
- }
-
- #[test]
- pub fn sort_tests() {
- let mut opts = TestOpts::new();
- opts.run_tests = true;
-
- let names = vec![
- "sha1::test".to_string(),
- "isize::test_to_str".to_string(),
- "isize::test_pow".to_string(),
- "test::do_not_run_ignored_tests".to_string(),
- "test::ignored_tests_result_in_ignored".to_string(),
- "test::first_free_arg_should_be_a_filter".to_string(),
- "test::parse_ignored_flag".to_string(),
- "test::parse_include_ignored_flag".to_string(),
- "test::filter_for_ignored_option".to_string(),
- "test::run_include_ignored_option".to_string(),
- "test::sort_tests".to_string(),
- ];
- let tests = {
- fn testfn() {}
- let mut tests = Vec::new();
- for name in &names {
- let test = TestDescAndFn {
- desc: TestDesc {
- name: DynTestName((*name).clone()),
- ignore: false,
- should_panic: ShouldPanic::No,
- allow_fail: false,
- },
- testfn: DynTestFn(Box::new(testfn)),
- };
- tests.push(test);
- }
- tests
- };
- let filtered = filter_tests(&opts, tests);
-
- let expected = vec![
- "isize::test_pow".to_string(),
- "isize::test_to_str".to_string(),
- "sha1::test".to_string(),
- "test::do_not_run_ignored_tests".to_string(),
- "test::filter_for_ignored_option".to_string(),
- "test::first_free_arg_should_be_a_filter".to_string(),
- "test::ignored_tests_result_in_ignored".to_string(),
- "test::parse_ignored_flag".to_string(),
- "test::parse_include_ignored_flag".to_string(),
- "test::run_include_ignored_option".to_string(),
- "test::sort_tests".to_string(),
- ];
-
- for (a, b) in expected.iter().zip(filtered) {
- assert!(*a == b.desc.name.to_string());
- }
- }
-
- #[test]
- pub fn test_metricmap_compare() {
- let mut m1 = MetricMap::new();
- let mut m2 = MetricMap::new();
- m1.insert_metric("in-both-noise", 1000.0, 200.0);
- m2.insert_metric("in-both-noise", 1100.0, 200.0);
-
- m1.insert_metric("in-first-noise", 1000.0, 2.0);
- m2.insert_metric("in-second-noise", 1000.0, 2.0);
-
- m1.insert_metric("in-both-want-downwards-but-regressed", 1000.0, 10.0);
- m2.insert_metric("in-both-want-downwards-but-regressed", 2000.0, 10.0);
-
- m1.insert_metric("in-both-want-downwards-and-improved", 2000.0, 10.0);
- m2.insert_metric("in-both-want-downwards-and-improved", 1000.0, 10.0);
-
- m1.insert_metric("in-both-want-upwards-but-regressed", 2000.0, -10.0);
- m2.insert_metric("in-both-want-upwards-but-regressed", 1000.0, -10.0);
-
- m1.insert_metric("in-both-want-upwards-and-improved", 1000.0, -10.0);
- m2.insert_metric("in-both-want-upwards-and-improved", 2000.0, -10.0);
- }
-
- #[test]
- pub fn test_bench_once_no_iter() {
- fn f(_: &mut Bencher) {}
- bench::run_once(f);
- }
-
- #[test]
- pub fn test_bench_once_iter() {
- fn f(b: &mut Bencher) {
- b.iter(|| {})
- }
- bench::run_once(f);
- }
-
- #[test]
- pub fn test_bench_no_iter() {
- fn f(_: &mut Bencher) {}
-
- let (tx, rx) = channel();
-
- let desc = TestDesc {
- name: StaticTestName("f"),
- ignore: false,
- should_panic: ShouldPanic::No,
- allow_fail: false,
- };
-
- crate::bench::benchmark(desc, tx, true, f);
- rx.recv().unwrap();
- }
-
- #[test]
- pub fn test_bench_iter() {
- fn f(b: &mut Bencher) {
- b.iter(|| {})
- }
-
- let (tx, rx) = channel();
-
- let desc = TestDesc {
- name: StaticTestName("f"),
- ignore: false,
- should_panic: ShouldPanic::No,
- allow_fail: false,
- };
-
- crate::bench::benchmark(desc, tx, true, f);
- rx.recv().unwrap();
- }
-}
+mod tests;
diff --git a/src/libtest/stats.rs b/src/libtest/stats.rs
index 5c9421d..32c3006 100644
--- a/src/libtest/stats.rs
+++ b/src/libtest/stats.rs
@@ -318,582 +318,7 @@
// Test vectors generated from R, using the script src/etc/stat-test-vectors.r.
#[cfg(test)]
-mod tests {
- use crate::stats::Stats;
- use crate::stats::Summary;
- use std::f64;
- use std::io::prelude::*;
- use std::io;
-
- macro_rules! assert_approx_eq {
- ($a: expr, $b: expr) => {{
- let (a, b) = (&$a, &$b);
- assert!(
- (*a - *b).abs() < 1.0e-6,
- "{} is not approximately equal to {}",
- *a,
- *b
- );
- }};
- }
-
- fn check(samples: &[f64], summ: &Summary) {
- let summ2 = Summary::new(samples);
-
- let mut w = io::sink();
- let w = &mut w;
- (write!(w, "\n")).unwrap();
-
- assert_eq!(summ.sum, summ2.sum);
- assert_eq!(summ.min, summ2.min);
- assert_eq!(summ.max, summ2.max);
- assert_eq!(summ.mean, summ2.mean);
- assert_eq!(summ.median, summ2.median);
-
- // We needed a few more digits to get exact equality on these
- // but they're within float epsilon, which is 1.0e-6.
- assert_approx_eq!(summ.var, summ2.var);
- assert_approx_eq!(summ.std_dev, summ2.std_dev);
- assert_approx_eq!(summ.std_dev_pct, summ2.std_dev_pct);
- assert_approx_eq!(summ.median_abs_dev, summ2.median_abs_dev);
- assert_approx_eq!(summ.median_abs_dev_pct, summ2.median_abs_dev_pct);
-
- assert_eq!(summ.quartiles, summ2.quartiles);
- assert_eq!(summ.iqr, summ2.iqr);
- }
-
- #[test]
- fn test_min_max_nan() {
- let xs = &[1.0, 2.0, f64::NAN, 3.0, 4.0];
- let summary = Summary::new(xs);
- assert_eq!(summary.min, 1.0);
- assert_eq!(summary.max, 4.0);
- }
-
- #[test]
- fn test_norm2() {
- let val = &[958.0000000000, 924.0000000000];
- let summ = &Summary {
- sum: 1882.0000000000,
- min: 924.0000000000,
- max: 958.0000000000,
- mean: 941.0000000000,
- median: 941.0000000000,
- var: 578.0000000000,
- std_dev: 24.0416305603,
- std_dev_pct: 2.5549022912,
- median_abs_dev: 25.2042000000,
- median_abs_dev_pct: 2.6784484591,
- quartiles: (932.5000000000, 941.0000000000, 949.5000000000),
- iqr: 17.0000000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_norm10narrow() {
- let val = &[
- 966.0000000000,
- 985.0000000000,
- 1110.0000000000,
- 848.0000000000,
- 821.0000000000,
- 975.0000000000,
- 962.0000000000,
- 1157.0000000000,
- 1217.0000000000,
- 955.0000000000,
- ];
- let summ = &Summary {
- sum: 9996.0000000000,
- min: 821.0000000000,
- max: 1217.0000000000,
- mean: 999.6000000000,
- median: 970.5000000000,
- var: 16050.7111111111,
- std_dev: 126.6914010938,
- std_dev_pct: 12.6742097933,
- median_abs_dev: 102.2994000000,
- median_abs_dev_pct: 10.5408964451,
- quartiles: (956.7500000000, 970.5000000000, 1078.7500000000),
- iqr: 122.0000000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_norm10medium() {
- let val = &[
- 954.0000000000,
- 1064.0000000000,
- 855.0000000000,
- 1000.0000000000,
- 743.0000000000,
- 1084.0000000000,
- 704.0000000000,
- 1023.0000000000,
- 357.0000000000,
- 869.0000000000,
- ];
- let summ = &Summary {
- sum: 8653.0000000000,
- min: 357.0000000000,
- max: 1084.0000000000,
- mean: 865.3000000000,
- median: 911.5000000000,
- var: 48628.4555555556,
- std_dev: 220.5186059170,
- std_dev_pct: 25.4846418487,
- median_abs_dev: 195.7032000000,
- median_abs_dev_pct: 21.4704552935,
- quartiles: (771.0000000000, 911.5000000000, 1017.2500000000),
- iqr: 246.2500000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_norm10wide() {
- let val = &[
- 505.0000000000,
- 497.0000000000,
- 1591.0000000000,
- 887.0000000000,
- 1026.0000000000,
- 136.0000000000,
- 1580.0000000000,
- 940.0000000000,
- 754.0000000000,
- 1433.0000000000,
- ];
- let summ = &Summary {
- sum: 9349.0000000000,
- min: 136.0000000000,
- max: 1591.0000000000,
- mean: 934.9000000000,
- median: 913.5000000000,
- var: 239208.9888888889,
- std_dev: 489.0899599142,
- std_dev_pct: 52.3146817750,
- median_abs_dev: 611.5725000000,
- median_abs_dev_pct: 66.9482758621,
- quartiles: (567.2500000000, 913.5000000000, 1331.2500000000),
- iqr: 764.0000000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_norm25verynarrow() {
- let val = &[
- 991.0000000000,
- 1018.0000000000,
- 998.0000000000,
- 1013.0000000000,
- 974.0000000000,
- 1007.0000000000,
- 1014.0000000000,
- 999.0000000000,
- 1011.0000000000,
- 978.0000000000,
- 985.0000000000,
- 999.0000000000,
- 983.0000000000,
- 982.0000000000,
- 1015.0000000000,
- 1002.0000000000,
- 977.0000000000,
- 948.0000000000,
- 1040.0000000000,
- 974.0000000000,
- 996.0000000000,
- 989.0000000000,
- 1015.0000000000,
- 994.0000000000,
- 1024.0000000000,
- ];
- let summ = &Summary {
- sum: 24926.0000000000,
- min: 948.0000000000,
- max: 1040.0000000000,
- mean: 997.0400000000,
- median: 998.0000000000,
- var: 393.2066666667,
- std_dev: 19.8294393937,
- std_dev_pct: 1.9888308788,
- median_abs_dev: 22.2390000000,
- median_abs_dev_pct: 2.2283567134,
- quartiles: (983.0000000000, 998.0000000000, 1013.0000000000),
- iqr: 30.0000000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_exp10a() {
- let val = &[
- 23.0000000000,
- 11.0000000000,
- 2.0000000000,
- 57.0000000000,
- 4.0000000000,
- 12.0000000000,
- 5.0000000000,
- 29.0000000000,
- 3.0000000000,
- 21.0000000000,
- ];
- let summ = &Summary {
- sum: 167.0000000000,
- min: 2.0000000000,
- max: 57.0000000000,
- mean: 16.7000000000,
- median: 11.5000000000,
- var: 287.7888888889,
- std_dev: 16.9643416875,
- std_dev_pct: 101.5828843560,
- median_abs_dev: 13.3434000000,
- median_abs_dev_pct: 116.0295652174,
- quartiles: (4.2500000000, 11.5000000000, 22.5000000000),
- iqr: 18.2500000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_exp10b() {
- let val = &[
- 24.0000000000,
- 17.0000000000,
- 6.0000000000,
- 38.0000000000,
- 25.0000000000,
- 7.0000000000,
- 51.0000000000,
- 2.0000000000,
- 61.0000000000,
- 32.0000000000,
- ];
- let summ = &Summary {
- sum: 263.0000000000,
- min: 2.0000000000,
- max: 61.0000000000,
- mean: 26.3000000000,
- median: 24.5000000000,
- var: 383.5666666667,
- std_dev: 19.5848580967,
- std_dev_pct: 74.4671410520,
- median_abs_dev: 22.9803000000,
- median_abs_dev_pct: 93.7971428571,
- quartiles: (9.5000000000, 24.5000000000, 36.5000000000),
- iqr: 27.0000000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_exp10c() {
- let val = &[
- 71.0000000000,
- 2.0000000000,
- 32.0000000000,
- 1.0000000000,
- 6.0000000000,
- 28.0000000000,
- 13.0000000000,
- 37.0000000000,
- 16.0000000000,
- 36.0000000000,
- ];
- let summ = &Summary {
- sum: 242.0000000000,
- min: 1.0000000000,
- max: 71.0000000000,
- mean: 24.2000000000,
- median: 22.0000000000,
- var: 458.1777777778,
- std_dev: 21.4050876611,
- std_dev_pct: 88.4507754589,
- median_abs_dev: 21.4977000000,
- median_abs_dev_pct: 97.7168181818,
- quartiles: (7.7500000000, 22.0000000000, 35.0000000000),
- iqr: 27.2500000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_exp25() {
- let val = &[
- 3.0000000000,
- 24.0000000000,
- 1.0000000000,
- 19.0000000000,
- 7.0000000000,
- 5.0000000000,
- 30.0000000000,
- 39.0000000000,
- 31.0000000000,
- 13.0000000000,
- 25.0000000000,
- 48.0000000000,
- 1.0000000000,
- 6.0000000000,
- 42.0000000000,
- 63.0000000000,
- 2.0000000000,
- 12.0000000000,
- 108.0000000000,
- 26.0000000000,
- 1.0000000000,
- 7.0000000000,
- 44.0000000000,
- 25.0000000000,
- 11.0000000000,
- ];
- let summ = &Summary {
- sum: 593.0000000000,
- min: 1.0000000000,
- max: 108.0000000000,
- mean: 23.7200000000,
- median: 19.0000000000,
- var: 601.0433333333,
- std_dev: 24.5161851301,
- std_dev_pct: 103.3565983562,
- median_abs_dev: 19.2738000000,
- median_abs_dev_pct: 101.4410526316,
- quartiles: (6.0000000000, 19.0000000000, 31.0000000000),
- iqr: 25.0000000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_binom25() {
- let val = &[
- 18.0000000000,
- 17.0000000000,
- 27.0000000000,
- 15.0000000000,
- 21.0000000000,
- 25.0000000000,
- 17.0000000000,
- 24.0000000000,
- 25.0000000000,
- 24.0000000000,
- 26.0000000000,
- 26.0000000000,
- 23.0000000000,
- 15.0000000000,
- 23.0000000000,
- 17.0000000000,
- 18.0000000000,
- 18.0000000000,
- 21.0000000000,
- 16.0000000000,
- 15.0000000000,
- 31.0000000000,
- 20.0000000000,
- 17.0000000000,
- 15.0000000000,
- ];
- let summ = &Summary {
- sum: 514.0000000000,
- min: 15.0000000000,
- max: 31.0000000000,
- mean: 20.5600000000,
- median: 20.0000000000,
- var: 20.8400000000,
- std_dev: 4.5650848842,
- std_dev_pct: 22.2037202539,
- median_abs_dev: 5.9304000000,
- median_abs_dev_pct: 29.6520000000,
- quartiles: (17.0000000000, 20.0000000000, 24.0000000000),
- iqr: 7.0000000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_pois25lambda30() {
- let val = &[
- 27.0000000000,
- 33.0000000000,
- 34.0000000000,
- 34.0000000000,
- 24.0000000000,
- 39.0000000000,
- 28.0000000000,
- 27.0000000000,
- 31.0000000000,
- 28.0000000000,
- 38.0000000000,
- 21.0000000000,
- 33.0000000000,
- 36.0000000000,
- 29.0000000000,
- 37.0000000000,
- 32.0000000000,
- 34.0000000000,
- 31.0000000000,
- 39.0000000000,
- 25.0000000000,
- 31.0000000000,
- 32.0000000000,
- 40.0000000000,
- 24.0000000000,
- ];
- let summ = &Summary {
- sum: 787.0000000000,
- min: 21.0000000000,
- max: 40.0000000000,
- mean: 31.4800000000,
- median: 32.0000000000,
- var: 26.5933333333,
- std_dev: 5.1568724372,
- std_dev_pct: 16.3814245145,
- median_abs_dev: 5.9304000000,
- median_abs_dev_pct: 18.5325000000,
- quartiles: (28.0000000000, 32.0000000000, 34.0000000000),
- iqr: 6.0000000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_pois25lambda40() {
- let val = &[
- 42.0000000000,
- 50.0000000000,
- 42.0000000000,
- 46.0000000000,
- 34.0000000000,
- 45.0000000000,
- 34.0000000000,
- 49.0000000000,
- 39.0000000000,
- 28.0000000000,
- 40.0000000000,
- 35.0000000000,
- 37.0000000000,
- 39.0000000000,
- 46.0000000000,
- 44.0000000000,
- 32.0000000000,
- 45.0000000000,
- 42.0000000000,
- 37.0000000000,
- 48.0000000000,
- 42.0000000000,
- 33.0000000000,
- 42.0000000000,
- 48.0000000000,
- ];
- let summ = &Summary {
- sum: 1019.0000000000,
- min: 28.0000000000,
- max: 50.0000000000,
- mean: 40.7600000000,
- median: 42.0000000000,
- var: 34.4400000000,
- std_dev: 5.8685603004,
- std_dev_pct: 14.3978417577,
- median_abs_dev: 5.9304000000,
- median_abs_dev_pct: 14.1200000000,
- quartiles: (37.0000000000, 42.0000000000, 45.0000000000),
- iqr: 8.0000000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_pois25lambda50() {
- let val = &[
- 45.0000000000,
- 43.0000000000,
- 44.0000000000,
- 61.0000000000,
- 51.0000000000,
- 53.0000000000,
- 59.0000000000,
- 52.0000000000,
- 49.0000000000,
- 51.0000000000,
- 51.0000000000,
- 50.0000000000,
- 49.0000000000,
- 56.0000000000,
- 42.0000000000,
- 52.0000000000,
- 51.0000000000,
- 43.0000000000,
- 48.0000000000,
- 48.0000000000,
- 50.0000000000,
- 42.0000000000,
- 43.0000000000,
- 42.0000000000,
- 60.0000000000,
- ];
- let summ = &Summary {
- sum: 1235.0000000000,
- min: 42.0000000000,
- max: 61.0000000000,
- mean: 49.4000000000,
- median: 50.0000000000,
- var: 31.6666666667,
- std_dev: 5.6273143387,
- std_dev_pct: 11.3913245723,
- median_abs_dev: 4.4478000000,
- median_abs_dev_pct: 8.8956000000,
- quartiles: (44.0000000000, 50.0000000000, 52.0000000000),
- iqr: 8.0000000000,
- };
- check(val, summ);
- }
- #[test]
- fn test_unif25() {
- let val = &[
- 99.0000000000,
- 55.0000000000,
- 92.0000000000,
- 79.0000000000,
- 14.0000000000,
- 2.0000000000,
- 33.0000000000,
- 49.0000000000,
- 3.0000000000,
- 32.0000000000,
- 84.0000000000,
- 59.0000000000,
- 22.0000000000,
- 86.0000000000,
- 76.0000000000,
- 31.0000000000,
- 29.0000000000,
- 11.0000000000,
- 41.0000000000,
- 53.0000000000,
- 45.0000000000,
- 44.0000000000,
- 98.0000000000,
- 98.0000000000,
- 7.0000000000,
- ];
- let summ = &Summary {
- sum: 1242.0000000000,
- min: 2.0000000000,
- max: 99.0000000000,
- mean: 49.6800000000,
- median: 45.0000000000,
- var: 1015.6433333333,
- std_dev: 31.8691595957,
- std_dev_pct: 64.1488719719,
- median_abs_dev: 45.9606000000,
- median_abs_dev_pct: 102.1346666667,
- quartiles: (29.0000000000, 45.0000000000, 79.0000000000),
- iqr: 50.0000000000,
- };
- check(val, summ);
- }
-
- #[test]
- fn test_sum_f64s() {
- assert_eq!([0.5f64, 3.2321f64, 1.5678f64].sum(), 5.2999);
- }
- #[test]
- fn test_sum_f64_between_ints_that_sum_to_0() {
- assert_eq!([1e30f64, 1.2f64, -1e30f64].sum(), 1.2);
- }
-}
+mod tests;
#[cfg(test)]
mod bench {
diff --git a/src/libtest/stats/tests.rs b/src/libtest/stats/tests.rs
new file mode 100644
index 0000000..59f9364
--- /dev/null
+++ b/src/libtest/stats/tests.rs
@@ -0,0 +1,574 @@
+use crate::stats::Stats;
+use crate::stats::Summary;
+use std::f64;
+use std::io::prelude::*;
+use std::io;
+
+macro_rules! assert_approx_eq {
+ ($a: expr, $b: expr) => {{
+ let (a, b) = (&$a, &$b);
+ assert!(
+ (*a - *b).abs() < 1.0e-6,
+ "{} is not approximately equal to {}",
+ *a,
+ *b
+ );
+ }};
+}
+
+fn check(samples: &[f64], summ: &Summary) {
+ let summ2 = Summary::new(samples);
+
+ let mut w = io::sink();
+ let w = &mut w;
+ (write!(w, "\n")).unwrap();
+
+ assert_eq!(summ.sum, summ2.sum);
+ assert_eq!(summ.min, summ2.min);
+ assert_eq!(summ.max, summ2.max);
+ assert_eq!(summ.mean, summ2.mean);
+ assert_eq!(summ.median, summ2.median);
+
+ // We needed a few more digits to get exact equality on these
+ // but they're within float epsilon, which is 1.0e-6.
+ assert_approx_eq!(summ.var, summ2.var);
+ assert_approx_eq!(summ.std_dev, summ2.std_dev);
+ assert_approx_eq!(summ.std_dev_pct, summ2.std_dev_pct);
+ assert_approx_eq!(summ.median_abs_dev, summ2.median_abs_dev);
+ assert_approx_eq!(summ.median_abs_dev_pct, summ2.median_abs_dev_pct);
+
+ assert_eq!(summ.quartiles, summ2.quartiles);
+ assert_eq!(summ.iqr, summ2.iqr);
+}
+
+#[test]
+fn test_min_max_nan() {
+ let xs = &[1.0, 2.0, f64::NAN, 3.0, 4.0];
+ let summary = Summary::new(xs);
+ assert_eq!(summary.min, 1.0);
+ assert_eq!(summary.max, 4.0);
+}
+
+#[test]
+fn test_norm2() {
+ let val = &[958.0000000000, 924.0000000000];
+ let summ = &Summary {
+ sum: 1882.0000000000,
+ min: 924.0000000000,
+ max: 958.0000000000,
+ mean: 941.0000000000,
+ median: 941.0000000000,
+ var: 578.0000000000,
+ std_dev: 24.0416305603,
+ std_dev_pct: 2.5549022912,
+ median_abs_dev: 25.2042000000,
+ median_abs_dev_pct: 2.6784484591,
+ quartiles: (932.5000000000, 941.0000000000, 949.5000000000),
+ iqr: 17.0000000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_norm10narrow() {
+ let val = &[
+ 966.0000000000,
+ 985.0000000000,
+ 1110.0000000000,
+ 848.0000000000,
+ 821.0000000000,
+ 975.0000000000,
+ 962.0000000000,
+ 1157.0000000000,
+ 1217.0000000000,
+ 955.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 9996.0000000000,
+ min: 821.0000000000,
+ max: 1217.0000000000,
+ mean: 999.6000000000,
+ median: 970.5000000000,
+ var: 16050.7111111111,
+ std_dev: 126.6914010938,
+ std_dev_pct: 12.6742097933,
+ median_abs_dev: 102.2994000000,
+ median_abs_dev_pct: 10.5408964451,
+ quartiles: (956.7500000000, 970.5000000000, 1078.7500000000),
+ iqr: 122.0000000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_norm10medium() {
+ let val = &[
+ 954.0000000000,
+ 1064.0000000000,
+ 855.0000000000,
+ 1000.0000000000,
+ 743.0000000000,
+ 1084.0000000000,
+ 704.0000000000,
+ 1023.0000000000,
+ 357.0000000000,
+ 869.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 8653.0000000000,
+ min: 357.0000000000,
+ max: 1084.0000000000,
+ mean: 865.3000000000,
+ median: 911.5000000000,
+ var: 48628.4555555556,
+ std_dev: 220.5186059170,
+ std_dev_pct: 25.4846418487,
+ median_abs_dev: 195.7032000000,
+ median_abs_dev_pct: 21.4704552935,
+ quartiles: (771.0000000000, 911.5000000000, 1017.2500000000),
+ iqr: 246.2500000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_norm10wide() {
+ let val = &[
+ 505.0000000000,
+ 497.0000000000,
+ 1591.0000000000,
+ 887.0000000000,
+ 1026.0000000000,
+ 136.0000000000,
+ 1580.0000000000,
+ 940.0000000000,
+ 754.0000000000,
+ 1433.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 9349.0000000000,
+ min: 136.0000000000,
+ max: 1591.0000000000,
+ mean: 934.9000000000,
+ median: 913.5000000000,
+ var: 239208.9888888889,
+ std_dev: 489.0899599142,
+ std_dev_pct: 52.3146817750,
+ median_abs_dev: 611.5725000000,
+ median_abs_dev_pct: 66.9482758621,
+ quartiles: (567.2500000000, 913.5000000000, 1331.2500000000),
+ iqr: 764.0000000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_norm25verynarrow() {
+ let val = &[
+ 991.0000000000,
+ 1018.0000000000,
+ 998.0000000000,
+ 1013.0000000000,
+ 974.0000000000,
+ 1007.0000000000,
+ 1014.0000000000,
+ 999.0000000000,
+ 1011.0000000000,
+ 978.0000000000,
+ 985.0000000000,
+ 999.0000000000,
+ 983.0000000000,
+ 982.0000000000,
+ 1015.0000000000,
+ 1002.0000000000,
+ 977.0000000000,
+ 948.0000000000,
+ 1040.0000000000,
+ 974.0000000000,
+ 996.0000000000,
+ 989.0000000000,
+ 1015.0000000000,
+ 994.0000000000,
+ 1024.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 24926.0000000000,
+ min: 948.0000000000,
+ max: 1040.0000000000,
+ mean: 997.0400000000,
+ median: 998.0000000000,
+ var: 393.2066666667,
+ std_dev: 19.8294393937,
+ std_dev_pct: 1.9888308788,
+ median_abs_dev: 22.2390000000,
+ median_abs_dev_pct: 2.2283567134,
+ quartiles: (983.0000000000, 998.0000000000, 1013.0000000000),
+ iqr: 30.0000000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_exp10a() {
+ let val = &[
+ 23.0000000000,
+ 11.0000000000,
+ 2.0000000000,
+ 57.0000000000,
+ 4.0000000000,
+ 12.0000000000,
+ 5.0000000000,
+ 29.0000000000,
+ 3.0000000000,
+ 21.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 167.0000000000,
+ min: 2.0000000000,
+ max: 57.0000000000,
+ mean: 16.7000000000,
+ median: 11.5000000000,
+ var: 287.7888888889,
+ std_dev: 16.9643416875,
+ std_dev_pct: 101.5828843560,
+ median_abs_dev: 13.3434000000,
+ median_abs_dev_pct: 116.0295652174,
+ quartiles: (4.2500000000, 11.5000000000, 22.5000000000),
+ iqr: 18.2500000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_exp10b() {
+ let val = &[
+ 24.0000000000,
+ 17.0000000000,
+ 6.0000000000,
+ 38.0000000000,
+ 25.0000000000,
+ 7.0000000000,
+ 51.0000000000,
+ 2.0000000000,
+ 61.0000000000,
+ 32.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 263.0000000000,
+ min: 2.0000000000,
+ max: 61.0000000000,
+ mean: 26.3000000000,
+ median: 24.5000000000,
+ var: 383.5666666667,
+ std_dev: 19.5848580967,
+ std_dev_pct: 74.4671410520,
+ median_abs_dev: 22.9803000000,
+ median_abs_dev_pct: 93.7971428571,
+ quartiles: (9.5000000000, 24.5000000000, 36.5000000000),
+ iqr: 27.0000000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_exp10c() {
+ let val = &[
+ 71.0000000000,
+ 2.0000000000,
+ 32.0000000000,
+ 1.0000000000,
+ 6.0000000000,
+ 28.0000000000,
+ 13.0000000000,
+ 37.0000000000,
+ 16.0000000000,
+ 36.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 242.0000000000,
+ min: 1.0000000000,
+ max: 71.0000000000,
+ mean: 24.2000000000,
+ median: 22.0000000000,
+ var: 458.1777777778,
+ std_dev: 21.4050876611,
+ std_dev_pct: 88.4507754589,
+ median_abs_dev: 21.4977000000,
+ median_abs_dev_pct: 97.7168181818,
+ quartiles: (7.7500000000, 22.0000000000, 35.0000000000),
+ iqr: 27.2500000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_exp25() {
+ let val = &[
+ 3.0000000000,
+ 24.0000000000,
+ 1.0000000000,
+ 19.0000000000,
+ 7.0000000000,
+ 5.0000000000,
+ 30.0000000000,
+ 39.0000000000,
+ 31.0000000000,
+ 13.0000000000,
+ 25.0000000000,
+ 48.0000000000,
+ 1.0000000000,
+ 6.0000000000,
+ 42.0000000000,
+ 63.0000000000,
+ 2.0000000000,
+ 12.0000000000,
+ 108.0000000000,
+ 26.0000000000,
+ 1.0000000000,
+ 7.0000000000,
+ 44.0000000000,
+ 25.0000000000,
+ 11.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 593.0000000000,
+ min: 1.0000000000,
+ max: 108.0000000000,
+ mean: 23.7200000000,
+ median: 19.0000000000,
+ var: 601.0433333333,
+ std_dev: 24.5161851301,
+ std_dev_pct: 103.3565983562,
+ median_abs_dev: 19.2738000000,
+ median_abs_dev_pct: 101.4410526316,
+ quartiles: (6.0000000000, 19.0000000000, 31.0000000000),
+ iqr: 25.0000000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_binom25() {
+ let val = &[
+ 18.0000000000,
+ 17.0000000000,
+ 27.0000000000,
+ 15.0000000000,
+ 21.0000000000,
+ 25.0000000000,
+ 17.0000000000,
+ 24.0000000000,
+ 25.0000000000,
+ 24.0000000000,
+ 26.0000000000,
+ 26.0000000000,
+ 23.0000000000,
+ 15.0000000000,
+ 23.0000000000,
+ 17.0000000000,
+ 18.0000000000,
+ 18.0000000000,
+ 21.0000000000,
+ 16.0000000000,
+ 15.0000000000,
+ 31.0000000000,
+ 20.0000000000,
+ 17.0000000000,
+ 15.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 514.0000000000,
+ min: 15.0000000000,
+ max: 31.0000000000,
+ mean: 20.5600000000,
+ median: 20.0000000000,
+ var: 20.8400000000,
+ std_dev: 4.5650848842,
+ std_dev_pct: 22.2037202539,
+ median_abs_dev: 5.9304000000,
+ median_abs_dev_pct: 29.6520000000,
+ quartiles: (17.0000000000, 20.0000000000, 24.0000000000),
+ iqr: 7.0000000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_pois25lambda30() {
+ let val = &[
+ 27.0000000000,
+ 33.0000000000,
+ 34.0000000000,
+ 34.0000000000,
+ 24.0000000000,
+ 39.0000000000,
+ 28.0000000000,
+ 27.0000000000,
+ 31.0000000000,
+ 28.0000000000,
+ 38.0000000000,
+ 21.0000000000,
+ 33.0000000000,
+ 36.0000000000,
+ 29.0000000000,
+ 37.0000000000,
+ 32.0000000000,
+ 34.0000000000,
+ 31.0000000000,
+ 39.0000000000,
+ 25.0000000000,
+ 31.0000000000,
+ 32.0000000000,
+ 40.0000000000,
+ 24.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 787.0000000000,
+ min: 21.0000000000,
+ max: 40.0000000000,
+ mean: 31.4800000000,
+ median: 32.0000000000,
+ var: 26.5933333333,
+ std_dev: 5.1568724372,
+ std_dev_pct: 16.3814245145,
+ median_abs_dev: 5.9304000000,
+ median_abs_dev_pct: 18.5325000000,
+ quartiles: (28.0000000000, 32.0000000000, 34.0000000000),
+ iqr: 6.0000000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_pois25lambda40() {
+ let val = &[
+ 42.0000000000,
+ 50.0000000000,
+ 42.0000000000,
+ 46.0000000000,
+ 34.0000000000,
+ 45.0000000000,
+ 34.0000000000,
+ 49.0000000000,
+ 39.0000000000,
+ 28.0000000000,
+ 40.0000000000,
+ 35.0000000000,
+ 37.0000000000,
+ 39.0000000000,
+ 46.0000000000,
+ 44.0000000000,
+ 32.0000000000,
+ 45.0000000000,
+ 42.0000000000,
+ 37.0000000000,
+ 48.0000000000,
+ 42.0000000000,
+ 33.0000000000,
+ 42.0000000000,
+ 48.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 1019.0000000000,
+ min: 28.0000000000,
+ max: 50.0000000000,
+ mean: 40.7600000000,
+ median: 42.0000000000,
+ var: 34.4400000000,
+ std_dev: 5.8685603004,
+ std_dev_pct: 14.3978417577,
+ median_abs_dev: 5.9304000000,
+ median_abs_dev_pct: 14.1200000000,
+ quartiles: (37.0000000000, 42.0000000000, 45.0000000000),
+ iqr: 8.0000000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_pois25lambda50() {
+ let val = &[
+ 45.0000000000,
+ 43.0000000000,
+ 44.0000000000,
+ 61.0000000000,
+ 51.0000000000,
+ 53.0000000000,
+ 59.0000000000,
+ 52.0000000000,
+ 49.0000000000,
+ 51.0000000000,
+ 51.0000000000,
+ 50.0000000000,
+ 49.0000000000,
+ 56.0000000000,
+ 42.0000000000,
+ 52.0000000000,
+ 51.0000000000,
+ 43.0000000000,
+ 48.0000000000,
+ 48.0000000000,
+ 50.0000000000,
+ 42.0000000000,
+ 43.0000000000,
+ 42.0000000000,
+ 60.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 1235.0000000000,
+ min: 42.0000000000,
+ max: 61.0000000000,
+ mean: 49.4000000000,
+ median: 50.0000000000,
+ var: 31.6666666667,
+ std_dev: 5.6273143387,
+ std_dev_pct: 11.3913245723,
+ median_abs_dev: 4.4478000000,
+ median_abs_dev_pct: 8.8956000000,
+ quartiles: (44.0000000000, 50.0000000000, 52.0000000000),
+ iqr: 8.0000000000,
+ };
+ check(val, summ);
+}
+#[test]
+fn test_unif25() {
+ let val = &[
+ 99.0000000000,
+ 55.0000000000,
+ 92.0000000000,
+ 79.0000000000,
+ 14.0000000000,
+ 2.0000000000,
+ 33.0000000000,
+ 49.0000000000,
+ 3.0000000000,
+ 32.0000000000,
+ 84.0000000000,
+ 59.0000000000,
+ 22.0000000000,
+ 86.0000000000,
+ 76.0000000000,
+ 31.0000000000,
+ 29.0000000000,
+ 11.0000000000,
+ 41.0000000000,
+ 53.0000000000,
+ 45.0000000000,
+ 44.0000000000,
+ 98.0000000000,
+ 98.0000000000,
+ 7.0000000000,
+ ];
+ let summ = &Summary {
+ sum: 1242.0000000000,
+ min: 2.0000000000,
+ max: 99.0000000000,
+ mean: 49.6800000000,
+ median: 45.0000000000,
+ var: 1015.6433333333,
+ std_dev: 31.8691595957,
+ std_dev_pct: 64.1488719719,
+ median_abs_dev: 45.9606000000,
+ median_abs_dev_pct: 102.1346666667,
+ quartiles: (29.0000000000, 45.0000000000, 79.0000000000),
+ iqr: 50.0000000000,
+ };
+ check(val, summ);
+}
+
+#[test]
+fn test_sum_f64s() {
+ assert_eq!([0.5f64, 3.2321f64, 1.5678f64].sum(), 5.2999);
+}
+#[test]
+fn test_sum_f64_between_ints_that_sum_to_0() {
+ assert_eq!([1e30f64, 1.2f64, -1e30f64].sum(), 1.2);
+}
diff --git a/src/libtest/tests.rs b/src/libtest/tests.rs
new file mode 100644
index 0000000..d8734d8
--- /dev/null
+++ b/src/libtest/tests.rs
@@ -0,0 +1,453 @@
+use crate::bench;
+use crate::test::{
+ filter_tests, parse_opts, run_test, DynTestFn, DynTestName, MetricMap, RunIgnored,
+ ShouldPanic, StaticTestName, TestDesc, TestDescAndFn, TestOpts, TrFailed, TrFailedMsg,
+ TrIgnored, TrOk,
+};
+use crate::Bencher;
+use crate::Concurrent;
+use std::sync::mpsc::channel;
+
+fn one_ignored_one_unignored_test() -> Vec<TestDescAndFn> {
+ vec![
+ TestDescAndFn {
+ desc: TestDesc {
+ name: StaticTestName("1"),
+ ignore: true,
+ should_panic: ShouldPanic::No,
+ allow_fail: false,
+ },
+ testfn: DynTestFn(Box::new(move || {})),
+ },
+ TestDescAndFn {
+ desc: TestDesc {
+ name: StaticTestName("2"),
+ ignore: false,
+ should_panic: ShouldPanic::No,
+ allow_fail: false,
+ },
+ testfn: DynTestFn(Box::new(move || {})),
+ },
+ ]
+}
+
+#[test]
+pub fn do_not_run_ignored_tests() {
+ fn f() {
+ panic!();
+ }
+ let desc = TestDescAndFn {
+ desc: TestDesc {
+ name: StaticTestName("whatever"),
+ ignore: true,
+ should_panic: ShouldPanic::No,
+ allow_fail: false,
+ },
+ testfn: DynTestFn(Box::new(f)),
+ };
+ let (tx, rx) = channel();
+ run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
+ let (_, res, _) = rx.recv().unwrap();
+ assert!(res != TrOk);
+}
+
+#[test]
+pub fn ignored_tests_result_in_ignored() {
+ fn f() {}
+ let desc = TestDescAndFn {
+ desc: TestDesc {
+ name: StaticTestName("whatever"),
+ ignore: true,
+ should_panic: ShouldPanic::No,
+ allow_fail: false,
+ },
+ testfn: DynTestFn(Box::new(f)),
+ };
+ let (tx, rx) = channel();
+ run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
+ let (_, res, _) = rx.recv().unwrap();
+ assert!(res == TrIgnored);
+}
+
+#[test]
+fn test_should_panic() {
+ fn f() {
+ panic!();
+ }
+ let desc = TestDescAndFn {
+ desc: TestDesc {
+ name: StaticTestName("whatever"),
+ ignore: false,
+ should_panic: ShouldPanic::Yes,
+ allow_fail: false,
+ },
+ testfn: DynTestFn(Box::new(f)),
+ };
+ let (tx, rx) = channel();
+ run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
+ let (_, res, _) = rx.recv().unwrap();
+ assert!(res == TrOk);
+}
+
+#[test]
+fn test_should_panic_good_message() {
+ fn f() {
+ panic!("an error message");
+ }
+ let desc = TestDescAndFn {
+ desc: TestDesc {
+ name: StaticTestName("whatever"),
+ ignore: false,
+ should_panic: ShouldPanic::YesWithMessage("error message"),
+ allow_fail: false,
+ },
+ testfn: DynTestFn(Box::new(f)),
+ };
+ let (tx, rx) = channel();
+ run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
+ let (_, res, _) = rx.recv().unwrap();
+ assert!(res == TrOk);
+}
+
+#[test]
+fn test_should_panic_bad_message() {
+ fn f() {
+ panic!("an error message");
+ }
+ let expected = "foobar";
+ let failed_msg = "panic did not include expected string";
+ let desc = TestDescAndFn {
+ desc: TestDesc {
+ name: StaticTestName("whatever"),
+ ignore: false,
+ should_panic: ShouldPanic::YesWithMessage(expected),
+ allow_fail: false,
+ },
+ testfn: DynTestFn(Box::new(f)),
+ };
+ let (tx, rx) = channel();
+ run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
+ let (_, res, _) = rx.recv().unwrap();
+ assert!(res == TrFailedMsg(format!("{} '{}'", failed_msg, expected)));
+}
+
+#[test]
+fn test_should_panic_but_succeeds() {
+ fn f() {}
+ let desc = TestDescAndFn {
+ desc: TestDesc {
+ name: StaticTestName("whatever"),
+ ignore: false,
+ should_panic: ShouldPanic::Yes,
+ allow_fail: false,
+ },
+ testfn: DynTestFn(Box::new(f)),
+ };
+ let (tx, rx) = channel();
+ run_test(&TestOpts::new(), false, desc, tx, Concurrent::No);
+ let (_, res, _) = rx.recv().unwrap();
+ assert!(res == TrFailed);
+}
+
+#[test]
+fn parse_ignored_flag() {
+ let args = vec![
+ "progname".to_string(),
+ "filter".to_string(),
+ "--ignored".to_string(),
+ ];
+ let opts = parse_opts(&args).unwrap().unwrap();
+ assert_eq!(opts.run_ignored, RunIgnored::Only);
+}
+
+#[test]
+fn parse_include_ignored_flag() {
+ let args = vec![
+ "progname".to_string(),
+ "filter".to_string(),
+ "-Zunstable-options".to_string(),
+ "--include-ignored".to_string(),
+ ];
+ let opts = parse_opts(&args).unwrap().unwrap();
+ assert_eq!(opts.run_ignored, RunIgnored::Yes);
+}
+
+#[test]
+pub fn filter_for_ignored_option() {
+ // When we run ignored tests the test filter should filter out all the
+ // unignored tests and flip the ignore flag on the rest to false
+
+ let mut opts = TestOpts::new();
+ opts.run_tests = true;
+ opts.run_ignored = RunIgnored::Only;
+
+ let tests = one_ignored_one_unignored_test();
+ let filtered = filter_tests(&opts, tests);
+
+ assert_eq!(filtered.len(), 1);
+ assert_eq!(filtered[0].desc.name.to_string(), "1");
+ assert!(!filtered[0].desc.ignore);
+}
+
+#[test]
+pub fn run_include_ignored_option() {
+ // When we "--include-ignored" tests, the ignore flag should be set to false on
+ // all tests and no test filtered out
+
+ let mut opts = TestOpts::new();
+ opts.run_tests = true;
+ opts.run_ignored = RunIgnored::Yes;
+
+ let tests = one_ignored_one_unignored_test();
+ let filtered = filter_tests(&opts, tests);
+
+ assert_eq!(filtered.len(), 2);
+ assert!(!filtered[0].desc.ignore);
+ assert!(!filtered[1].desc.ignore);
+}
+
+#[test]
+pub fn exclude_should_panic_option() {
+ let mut opts = TestOpts::new();
+ opts.run_tests = true;
+ opts.exclude_should_panic = true;
+
+ let mut tests = one_ignored_one_unignored_test();
+ tests.push(TestDescAndFn {
+ desc: TestDesc {
+ name: StaticTestName("3"),
+ ignore: false,
+ should_panic: ShouldPanic::Yes,
+ allow_fail: false,
+ },
+ testfn: DynTestFn(Box::new(move || {})),
+ });
+
+ let filtered = filter_tests(&opts, tests);
+
+ assert_eq!(filtered.len(), 2);
+ assert!(filtered.iter().all(|test| test.desc.should_panic == ShouldPanic::No));
+}
+
+#[test]
+pub fn exact_filter_match() {
+ fn tests() -> Vec<TestDescAndFn> {
+ vec!["base", "base::test", "base::test1", "base::test2"]
+ .into_iter()
+ .map(|name| TestDescAndFn {
+ desc: TestDesc {
+ name: StaticTestName(name),
+ ignore: false,
+ should_panic: ShouldPanic::No,
+ allow_fail: false,
+ },
+ testfn: DynTestFn(Box::new(move || {})),
+ })
+ .collect()
+ }
+
+ let substr = filter_tests(
+ &TestOpts {
+ filter: Some("base".into()),
+ ..TestOpts::new()
+ },
+ tests(),
+ );
+ assert_eq!(substr.len(), 4);
+
+ let substr = filter_tests(
+ &TestOpts {
+ filter: Some("bas".into()),
+ ..TestOpts::new()
+ },
+ tests(),
+ );
+ assert_eq!(substr.len(), 4);
+
+ let substr = filter_tests(
+ &TestOpts {
+ filter: Some("::test".into()),
+ ..TestOpts::new()
+ },
+ tests(),
+ );
+ assert_eq!(substr.len(), 3);
+
+ let substr = filter_tests(
+ &TestOpts {
+ filter: Some("base::test".into()),
+ ..TestOpts::new()
+ },
+ tests(),
+ );
+ assert_eq!(substr.len(), 3);
+
+ let exact = filter_tests(
+ &TestOpts {
+ filter: Some("base".into()),
+ filter_exact: true,
+ ..TestOpts::new()
+ },
+ tests(),
+ );
+ assert_eq!(exact.len(), 1);
+
+ let exact = filter_tests(
+ &TestOpts {
+ filter: Some("bas".into()),
+ filter_exact: true,
+ ..TestOpts::new()
+ },
+ tests(),
+ );
+ assert_eq!(exact.len(), 0);
+
+ let exact = filter_tests(
+ &TestOpts {
+ filter: Some("::test".into()),
+ filter_exact: true,
+ ..TestOpts::new()
+ },
+ tests(),
+ );
+ assert_eq!(exact.len(), 0);
+
+ let exact = filter_tests(
+ &TestOpts {
+ filter: Some("base::test".into()),
+ filter_exact: true,
+ ..TestOpts::new()
+ },
+ tests(),
+ );
+ assert_eq!(exact.len(), 1);
+}
+
+#[test]
+pub fn sort_tests() {
+ let mut opts = TestOpts::new();
+ opts.run_tests = true;
+
+ let names = vec![
+ "sha1::test".to_string(),
+ "isize::test_to_str".to_string(),
+ "isize::test_pow".to_string(),
+ "test::do_not_run_ignored_tests".to_string(),
+ "test::ignored_tests_result_in_ignored".to_string(),
+ "test::first_free_arg_should_be_a_filter".to_string(),
+ "test::parse_ignored_flag".to_string(),
+ "test::parse_include_ignored_flag".to_string(),
+ "test::filter_for_ignored_option".to_string(),
+ "test::run_include_ignored_option".to_string(),
+ "test::sort_tests".to_string(),
+ ];
+ let tests = {
+ fn testfn() {}
+ let mut tests = Vec::new();
+ for name in &names {
+ let test = TestDescAndFn {
+ desc: TestDesc {
+ name: DynTestName((*name).clone()),
+ ignore: false,
+ should_panic: ShouldPanic::No,
+ allow_fail: false,
+ },
+ testfn: DynTestFn(Box::new(testfn)),
+ };
+ tests.push(test);
+ }
+ tests
+ };
+ let filtered = filter_tests(&opts, tests);
+
+ let expected = vec![
+ "isize::test_pow".to_string(),
+ "isize::test_to_str".to_string(),
+ "sha1::test".to_string(),
+ "test::do_not_run_ignored_tests".to_string(),
+ "test::filter_for_ignored_option".to_string(),
+ "test::first_free_arg_should_be_a_filter".to_string(),
+ "test::ignored_tests_result_in_ignored".to_string(),
+ "test::parse_ignored_flag".to_string(),
+ "test::parse_include_ignored_flag".to_string(),
+ "test::run_include_ignored_option".to_string(),
+ "test::sort_tests".to_string(),
+ ];
+
+ for (a, b) in expected.iter().zip(filtered) {
+ assert!(*a == b.desc.name.to_string());
+ }
+}
+
+#[test]
+pub fn test_metricmap_compare() {
+ let mut m1 = MetricMap::new();
+ let mut m2 = MetricMap::new();
+ m1.insert_metric("in-both-noise", 1000.0, 200.0);
+ m2.insert_metric("in-both-noise", 1100.0, 200.0);
+
+ m1.insert_metric("in-first-noise", 1000.0, 2.0);
+ m2.insert_metric("in-second-noise", 1000.0, 2.0);
+
+ m1.insert_metric("in-both-want-downwards-but-regressed", 1000.0, 10.0);
+ m2.insert_metric("in-both-want-downwards-but-regressed", 2000.0, 10.0);
+
+ m1.insert_metric("in-both-want-downwards-and-improved", 2000.0, 10.0);
+ m2.insert_metric("in-both-want-downwards-and-improved", 1000.0, 10.0);
+
+ m1.insert_metric("in-both-want-upwards-but-regressed", 2000.0, -10.0);
+ m2.insert_metric("in-both-want-upwards-but-regressed", 1000.0, -10.0);
+
+ m1.insert_metric("in-both-want-upwards-and-improved", 1000.0, -10.0);
+ m2.insert_metric("in-both-want-upwards-and-improved", 2000.0, -10.0);
+}
+
+#[test]
+pub fn test_bench_once_no_iter() {
+ fn f(_: &mut Bencher) {}
+ bench::run_once(f);
+}
+
+#[test]
+pub fn test_bench_once_iter() {
+ fn f(b: &mut Bencher) {
+ b.iter(|| {})
+ }
+ bench::run_once(f);
+}
+
+#[test]
+pub fn test_bench_no_iter() {
+ fn f(_: &mut Bencher) {}
+
+ let (tx, rx) = channel();
+
+ let desc = TestDesc {
+ name: StaticTestName("f"),
+ ignore: false,
+ should_panic: ShouldPanic::No,
+ allow_fail: false,
+ };
+
+ crate::bench::benchmark(desc, tx, true, f);
+ rx.recv().unwrap();
+}
+
+#[test]
+pub fn test_bench_iter() {
+ fn f(b: &mut Bencher) {
+ b.iter(|| {})
+ }
+
+ let (tx, rx) = channel();
+
+ let desc = TestDesc {
+ name: StaticTestName("f"),
+ ignore: false,
+ should_panic: ShouldPanic::No,
+ allow_fail: false,
+ };
+
+ crate::bench::benchmark(desc, tx, true, f);
+ rx.recv().unwrap();
+}
diff --git a/src/libunwind/Cargo.toml b/src/libunwind/Cargo.toml
index 4ddc878..f0f1bab 100644
--- a/src/libunwind/Cargo.toml
+++ b/src/libunwind/Cargo.toml
@@ -19,6 +19,7 @@
core = { path = "../libcore" }
libc = { version = "0.2.43", features = ['rustc-dep-of-std'], default-features = false }
compiler_builtins = "0.1.0"
+cfg-if = "0.1.8"
[build-dependencies]
cc = { optional = true, version = "1.0.1" }
diff --git a/src/libunwind/lib.rs b/src/libunwind/lib.rs
index 0ccffea..9182e34 100644
--- a/src/libunwind/lib.rs
+++ b/src/libunwind/lib.rs
@@ -11,10 +11,7 @@
#![cfg_attr(not(target_env = "msvc"), feature(libc))]
-#[macro_use]
-mod macros;
-
-cfg_if! {
+cfg_if::cfg_if! {
if #[cfg(target_env = "msvc")] {
// no extra unwinder support needed
} else if #[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))] {
diff --git a/src/libunwind/libunwind.rs b/src/libunwind/libunwind.rs
index 339b554..5794e0b 100644
--- a/src/libunwind/libunwind.rs
+++ b/src/libunwind/libunwind.rs
@@ -1,10 +1,5 @@
#![allow(nonstandard_style)]
-macro_rules! cfg_if {
- ( $( if #[cfg( $meta:meta )] { $($it1:item)* } else { $($it2:item)* } )* ) =>
- ( $( $( #[cfg($meta)] $it1)* $( #[cfg(not($meta))] $it2)* )* )
-}
-
use libc::{c_int, c_void, uintptr_t};
#[repr(C)]
@@ -82,7 +77,7 @@
pub fn _Unwind_GetDataRelBase(ctx: *mut _Unwind_Context) -> _Unwind_Ptr;
}
-cfg_if! {
+cfg_if::cfg_if! {
if #[cfg(all(any(target_os = "ios", target_os = "netbsd", not(target_arch = "arm"))))] {
// Not ARM EHABI
#[repr(C)]
@@ -206,7 +201,9 @@
pc
}
}
+} // cfg_if!
+cfg_if::cfg_if! {
if #[cfg(not(all(target_os = "ios", target_arch = "arm")))] {
// Not 32-bit iOS
extern "C" {
diff --git a/src/libunwind/macros.rs b/src/libunwind/macros.rs
deleted file mode 100644
index 659e977..0000000
--- a/src/libunwind/macros.rs
+++ /dev/null
@@ -1,35 +0,0 @@
-/// A macro for defining `#[cfg]` if-else statements.
-///
-/// This is similar to the `if/elif` C preprocessor macro by allowing definition
-/// of a cascade of `#[cfg]` cases, emitting the implementation which matches
-/// first.
-///
-/// This allows you to conveniently provide a long list `#[cfg]`'d blocks of code
-/// without having to rewrite each clause multiple times.
-macro_rules! cfg_if {
- ($(
- if #[cfg($($meta:meta),*)] { $($it:item)* }
- ) else * else {
- $($it2:item)*
- }) => {
- __cfg_if_items! {
- () ;
- $( ( ($($meta),*) ($($it)*) ), )*
- ( () ($($it2)*) ),
- }
- }
-}
-
-macro_rules! __cfg_if_items {
- (($($not:meta,)*) ; ) => {};
- (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
- __cfg_if_apply! { cfg(all(not(any($($not),*)), $($m,)*)), $($it)* }
- __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* }
- }
-}
-
-macro_rules! __cfg_if_apply {
- ($m:meta, $($it:item)*) => {
- $(#[$m] $it)*
- }
-}
diff --git a/src/test/assembly/nvptx-arch-default.rs b/src/test/assembly/nvptx-arch-default.rs
index 7fe71c3..8a71a63 100644
--- a/src/test/assembly/nvptx-arch-default.rs
+++ b/src/test/assembly/nvptx-arch-default.rs
@@ -1,6 +1,7 @@
// assembly-output: ptx-linker
// compile-flags: --crate-type cdylib
// only-nvptx64
+// ignore-nvptx64
#![no_std]
diff --git a/src/test/assembly/nvptx-arch-emit-asm.rs b/src/test/assembly/nvptx-arch-emit-asm.rs
index 0ca1772..b252b45 100644
--- a/src/test/assembly/nvptx-arch-emit-asm.rs
+++ b/src/test/assembly/nvptx-arch-emit-asm.rs
@@ -1,6 +1,7 @@
// assembly-output: emit-asm
// compile-flags: --crate-type rlib
// only-nvptx64
+// ignore-nvptx64
#![no_std]
diff --git a/src/test/assembly/nvptx-arch-link-arg.rs b/src/test/assembly/nvptx-arch-link-arg.rs
index f6b6e8c..025a9ad 100644
--- a/src/test/assembly/nvptx-arch-link-arg.rs
+++ b/src/test/assembly/nvptx-arch-link-arg.rs
@@ -1,6 +1,7 @@
// assembly-output: ptx-linker
// compile-flags: --crate-type cdylib -C link-arg=--arch=sm_60
// only-nvptx64
+// ignore-nvptx64
#![no_std]
diff --git a/src/test/assembly/nvptx-arch-target-cpu.rs b/src/test/assembly/nvptx-arch-target-cpu.rs
index 08a7a19..824ee9c 100644
--- a/src/test/assembly/nvptx-arch-target-cpu.rs
+++ b/src/test/assembly/nvptx-arch-target-cpu.rs
@@ -1,6 +1,7 @@
// assembly-output: ptx-linker
// compile-flags: --crate-type cdylib -C target-cpu=sm_50
// only-nvptx64
+// ignore-nvptx64
#![no_std]
diff --git a/src/test/assembly/nvptx-atomics.rs b/src/test/assembly/nvptx-atomics.rs
index 3bbd7b3..f963980 100644
--- a/src/test/assembly/nvptx-atomics.rs
+++ b/src/test/assembly/nvptx-atomics.rs
@@ -1,6 +1,7 @@
// assembly-output: ptx-linker
// compile-flags: --crate-type cdylib
// only-nvptx64
+// ignore-nvptx64
#![feature(abi_ptx, core_intrinsics)]
#![no_std]
diff --git a/src/test/assembly/nvptx-internalizing.rs b/src/test/assembly/nvptx-internalizing.rs
index c9edc38..0004fce 100644
--- a/src/test/assembly/nvptx-internalizing.rs
+++ b/src/test/assembly/nvptx-internalizing.rs
@@ -1,6 +1,7 @@
// assembly-output: ptx-linker
// compile-flags: --crate-type cdylib
// only-nvptx64
+// ignore-nvptx64
#![feature(abi_ptx)]
#![no_std]
diff --git a/src/test/assembly/nvptx-linking-binary.rs b/src/test/assembly/nvptx-linking-binary.rs
index d88ed91..64b9c2f 100644
--- a/src/test/assembly/nvptx-linking-binary.rs
+++ b/src/test/assembly/nvptx-linking-binary.rs
@@ -1,6 +1,7 @@
// assembly-output: ptx-linker
// compile-flags: --crate-type bin
// only-nvptx64
+// ignore-nvptx64
#![feature(abi_ptx)]
#![no_main]
diff --git a/src/test/assembly/nvptx-linking-cdylib.rs b/src/test/assembly/nvptx-linking-cdylib.rs
index 1145f56..bdbc30e 100644
--- a/src/test/assembly/nvptx-linking-cdylib.rs
+++ b/src/test/assembly/nvptx-linking-cdylib.rs
@@ -1,6 +1,7 @@
// assembly-output: ptx-linker
// compile-flags: --crate-type cdylib
// only-nvptx64
+// ignore-nvptx64
#![feature(abi_ptx)]
#![no_std]
diff --git a/src/test/assembly/nvptx-safe-naming.rs b/src/test/assembly/nvptx-safe-naming.rs
index ab6f914..80bb04f 100644
--- a/src/test/assembly/nvptx-safe-naming.rs
+++ b/src/test/assembly/nvptx-safe-naming.rs
@@ -1,6 +1,7 @@
// assembly-output: ptx-linker
// compile-flags: --crate-type cdylib
// only-nvptx64
+// ignore-nvptx64
#![feature(abi_ptx)]
#![no_std]
diff --git a/src/test/codegen/match.rs b/src/test/codegen/match.rs
index 1b46bb3..145d4ba 100644
--- a/src/test/codegen/match.rs
+++ b/src/test/codegen/match.rs
@@ -14,12 +14,12 @@
// CHECK-NEXT: i[[TY:[0-9]+]] [[DISCR:[0-9]+]], label %[[A:[a-zA-Z0-9_]+]]
// CHECK-NEXT: i[[TY:[0-9]+]] [[DISCR:[0-9]+]], label %[[B:[a-zA-Z0-9_]+]]
// CHECK-NEXT: ]
+// CHECK: [[B]]:
+// CHECK-NEXT: br label %[[EXIT:[a-zA-Z0-9_]+]]
// CHECK: [[OTHERWISE]]:
// CHECK-NEXT: unreachable
// CHECK: [[A]]:
// CHECK-NEXT: br label %[[EXIT:[a-zA-Z0-9_]+]]
-// CHECK: [[B]]:
-// CHECK-NEXT: br label %[[EXIT:[a-zA-Z0-9_]+]]
match e {
E::A => unit,
E::B => unit,
diff --git a/src/test/codegen/slice-iter-len-eq-zero.rs b/src/test/codegen/slice-iter-len-eq-zero.rs
new file mode 100644
index 0000000..a551683
--- /dev/null
+++ b/src/test/codegen/slice-iter-len-eq-zero.rs
@@ -0,0 +1,14 @@
+// no-system-llvm
+// compile-flags: -O
+#![crate_type = "lib"]
+
+type Demo = [u8; 3];
+
+// CHECK-LABEL: @slice_iter_len_eq_zero
+#[no_mangle]
+pub fn slice_iter_len_eq_zero(y: std::slice::Iter<'_, Demo>) -> bool {
+ // CHECK-NOT: sub
+ // CHECK: %2 = icmp eq i8* %1, %0
+ // CHECK: ret i1 %2
+ y.len() == 0
+}
diff --git a/src/test/compile-fail/const-fn-error.rs b/src/test/compile-fail/const-fn-error.rs
index da6036a..87a9cf9 100644
--- a/src/test/compile-fail/const-fn-error.rs
+++ b/src/test/compile-fail/const-fn-error.rs
@@ -7,6 +7,7 @@
for i in 0..x {
//~^ ERROR E0015
//~| ERROR E0019
+ //~| ERROR E0019
//~| ERROR E0080
sum += i;
}
diff --git a/src/test/compile-fail/issue-52443.rs b/src/test/compile-fail/issue-52443.rs
index e1f07ff..0d69060 100644
--- a/src/test/compile-fail/issue-52443.rs
+++ b/src/test/compile-fail/issue-52443.rs
@@ -4,5 +4,6 @@
[(); {while true {break}; 0}]; //~ ERROR constant contains unimplemented expression type
[(); { for _ in 0usize.. {}; 0}]; //~ ERROR calls in constants are limited to constant functions
//~^ ERROR constant contains unimplemented expression type
+ //~| ERROR constant contains unimplemented expression type
//~| ERROR evaluation of constant value failed
}
diff --git a/src/test/mir-opt/const_prop/switch_int.rs b/src/test/mir-opt/const_prop/switch_int.rs
index 0df1112..904d303 100644
--- a/src/test/mir-opt/const_prop/switch_int.rs
+++ b/src/test/mir-opt/const_prop/switch_int.rs
@@ -13,26 +13,26 @@
// bb0: {
// ...
// _1 = const 1i32;
-// switchInt(_1) -> [1i32: bb1, otherwise: bb2];
+// switchInt(_1) -> [1i32: bb2, otherwise: bb1];
// }
// END rustc.main.ConstProp.before.mir
// START rustc.main.ConstProp.after.mir
// bb0: {
// ...
-// switchInt(const 1i32) -> [1i32: bb1, otherwise: bb2];
+// switchInt(const 1i32) -> [1i32: bb2, otherwise: bb1];
// }
// END rustc.main.ConstProp.after.mir
// START rustc.main.SimplifyBranches-after-const-prop.before.mir
// bb0: {
// ...
// _1 = const 1i32;
-// switchInt(const 1i32) -> [1i32: bb1, otherwise: bb2];
+// switchInt(const 1i32) -> [1i32: bb2, otherwise: bb1];
// }
// END rustc.main.SimplifyBranches-after-const-prop.before.mir
// START rustc.main.SimplifyBranches-after-const-prop.after.mir
// bb0: {
// ...
// _1 = const 1i32;
-// goto -> bb1;
+// goto -> bb2;
// }
// END rustc.main.SimplifyBranches-after-const-prop.after.mir
diff --git a/src/test/mir-opt/deaggregator_test_enum_2.rs b/src/test/mir-opt/deaggregator_test_enum_2.rs
index 59c7573..b39ad1b 100644
--- a/src/test/mir-opt/deaggregator_test_enum_2.rs
+++ b/src/test/mir-opt/deaggregator_test_enum_2.rs
@@ -21,30 +21,22 @@
// END RUST SOURCE
// START rustc.test1.Deaggregator.before.mir
// bb1: {
-// StorageLive(_4);
-// _4 = _2;
-// _0 = Foo::A(move _4,);
-// StorageDead(_4);
-// goto -> bb3;
-// }
-// bb2: {
// StorageLive(_5);
// _5 = _2;
// _0 = Foo::B(move _5,);
// StorageDead(_5);
// goto -> bb3;
// }
-// END rustc.test1.Deaggregator.before.mir
-// START rustc.test1.Deaggregator.after.mir
-// bb1: {
+// bb2: {
// StorageLive(_4);
// _4 = _2;
-// ((_0 as A).0: i32) = move _4;
-// discriminant(_0) = 0;
+// _0 = Foo::A(move _4,);
// StorageDead(_4);
// goto -> bb3;
// }
-// bb2: {
+// END rustc.test1.Deaggregator.before.mir
+// START rustc.test1.Deaggregator.after.mir
+// bb1: {
// StorageLive(_5);
// _5 = _2;
// ((_0 as B).0: i32) = move _5;
@@ -52,5 +44,13 @@
// StorageDead(_5);
// goto -> bb3;
// }
+// bb2: {
+// StorageLive(_4);
+// _4 = _2;
+// ((_0 as A).0: i32) = move _4;
+// discriminant(_0) = 0;
+// StorageDead(_4);
+// goto -> bb3;
+// }
// END rustc.test1.Deaggregator.after.mir
//
diff --git a/src/test/mir-opt/issue-38669.rs b/src/test/mir-opt/issue-38669.rs
index e8ab690..909f9b7 100644
--- a/src/test/mir-opt/issue-38669.rs
+++ b/src/test/mir-opt/issue-38669.rs
@@ -31,17 +31,17 @@
// switchInt(_4) -> [false: bb5, otherwise: bb4];
// }
// ...
-// bb7: {
-// _0 = ();
-// StorageDead(_4);
-// StorageDead(_1);
-// return;
-// }
-// bb8: {
+// bb5: {
// _3 = ();
// StorageDead(_4);
// _1 = const true;
// _2 = ();
// goto -> bb2;
// }
+// bb6: {
+// _0 = ();
+// StorageDead(_4);
+// StorageDead(_1);
+// return;
+// }
// END rustc.main.SimplifyCfg-initial.after.mir
diff --git a/src/test/mir-opt/issue-49232.rs b/src/test/mir-opt/issue-49232.rs
index 447f3a0..9dde6d8 100644
--- a/src/test/mir-opt/issue-49232.rs
+++ b/src/test/mir-opt/issue-49232.rs
@@ -32,76 +32,58 @@
// falseUnwind -> [real: bb3, cleanup: bb4];
// }
// bb2: {
-// goto -> bb20;
+// goto -> bb14;
// }
// bb3: {
// StorageLive(_2);
// StorageLive(_3);
// _3 = const true;
// FakeRead(ForMatchedPlace, _3);
-// switchInt(_3) -> [false: bb9, otherwise: bb8];
+// switchInt(_3) -> [false: bb5, otherwise: bb6];
// }
// bb4 (cleanup): {
// resume;
// }
// bb5: {
-// falseEdges -> [real: bb11, imaginary: bb6];
+// falseEdges -> [real: bb7, imaginary: bb6];
// }
// bb6: {
-// falseEdges -> [real: bb13, imaginary: bb7];
+// _0 = ();
+// goto -> bb8;
// }
// bb7: {
-// unreachable;
-// }
-// bb8: {
-// goto -> bb6;
-// }
-// bb9: {
-// goto -> bb5;
-// }
-// bb10: {
// _2 = const 4i32;
-// goto -> bb18;
-// }
-// bb11: {
-// goto -> bb10;
-// }
-// bb12: {
-// _0 = ();
-// goto -> bb14;
-// }
-// bb13: {
// goto -> bb12;
// }
-// bb14: {
+// bb8: {
// StorageDead(_3);
-// goto -> bb15;
+// goto -> bb9;
// }
-// bb15: {
+// bb9: {
// StorageDead(_2);
// goto -> bb2;
// }
-// bb16: {
+// bb10: {
// _4 = ();
// unreachable;
// }
-// bb17: {
-// goto -> bb18;
+// bb11: {
+// goto -> bb12;
// }
-// bb18: {
+// bb12: {
// FakeRead(ForLet, _2);
// StorageDead(_3);
// StorageLive(_6);
// _6 = &_2;
-// _5 = const std::mem::drop::<&i32>(move _6) -> [return: bb19, unwind: bb4];
+// _5 = const std::mem::drop::<&i32>(move _6) -> [return: bb13, unwind: bb4];
// }
-// bb19: {
+// bb13: {
// StorageDead(_6);
// _1 = ();
// StorageDead(_2);
// goto -> bb1;
// }
-// bb20: {
+// bb14: {
// return;
// }
// }
diff --git a/src/test/mir-opt/loop_test.rs b/src/test/mir-opt/loop_test.rs
index e75955b..68ea60d 100644
--- a/src/test/mir-opt/loop_test.rs
+++ b/src/test/mir-opt/loop_test.rs
@@ -22,20 +22,21 @@
// resume;
// }
// ...
-// bb6: { // Entry into the loop
+// bb3: { // Entry into the loop
// _1 = ();
// StorageDead(_2);
-// goto -> bb7;
+// goto -> bb5;
// }
-// bb7: { // The loop_block
-// falseUnwind -> [real: bb8, cleanup: bb1];
+// ...
+// bb5: { // The loop_block
+// falseUnwind -> [real: bb6, cleanup: bb1];
// }
-// bb8: { // The loop body (body_block)
+// bb6: { // The loop body (body_block)
// StorageLive(_6);
// _6 = const 1i32;
// FakeRead(ForLet, _6);
// StorageDead(_6);
-// goto -> bb7;
+// goto -> bb5;
// }
// ...
// END rustc.main.SimplifyCfg-qualify-consts.after.mir
diff --git a/src/test/mir-opt/match-arm-scopes.rs b/src/test/mir-opt/match-arm-scopes.rs
index 0f026b8..a2bc238 100644
--- a/src/test/mir-opt/match-arm-scopes.rs
+++ b/src/test/mir-opt/match-arm-scopes.rs
@@ -42,55 +42,49 @@
// let mut _0: i32;
// let mut _3: &bool; // Temp for fake borrow of `items.0`
// let mut _4: &bool; // Temp for fake borrow of `items.1`
-// let _5: bool; // `a` in arm
-// let _6: &bool; // `a` in guard
-// let _7: std::string::String; // `s` in arm
-// let _8: &std::string::String; // `s` in guard
+// let _5: bool; // `a` in arm
+// let _6: &bool; // `a` in guard
+// let _7: std::string::String; // `s` in arm
+// let _8: &std::string::String; // `s` in guard
// let mut _9: bool; // `if cond { return 3 } else { a }`
// let mut _10: bool; // `cond`
// let mut _11: !; // `return 3`
// let mut _12: bool; // `if cond { return 3 } else { a }`
// let mut _13: bool; // `cond`
// let mut _14: !; // `return 3`
-// let _15: bool; // `b`
-// let _16: std::string::String; // `t`
+// let _15: bool; // `b`
+// let _16: std::string::String; // `t`
// scope 1 {
// }
// scope 2 {
// }
// bb0: {
// FakeRead(ForMatchedPlace, _2);
-// switchInt((_2.0: bool)) -> [false: bb2, otherwise: bb7];
+// switchInt((_2.0: bool)) -> [false: bb2, otherwise: bb5];
// }
// bb1 (cleanup): {
// resume;
// }
// bb2: {
-// falseEdges -> [real: bb10, imaginary: bb3];
+// falseEdges -> [real: bb8, imaginary: bb3];
// }
// bb3: {
-// falseEdges -> [real: bb21, imaginary: bb4];
+// falseEdges -> [real: bb17, imaginary: bb4];
// }
// bb4: {
-// falseEdges -> [real: bb31, imaginary: bb5];
+// falseEdges -> [real: bb25, imaginary: bb26];
// }
// bb5: {
-// falseEdges -> [real: bb32, imaginary: bb6];
+// switchInt((_2.1: bool)) -> [false: bb3, otherwise: bb6];
// }
// bb6: {
-// unreachable;
+// switchInt((_2.0: bool)) -> [false: bb26, otherwise: bb4];
// }
-// bb7: {
-// switchInt((_2.1: bool)) -> [false: bb3, otherwise: bb8];
-// }
-// bb8: {
-// switchInt((_2.0: bool)) -> [false: bb5, otherwise: bb4];
-// }
-// bb9: { // arm 1
+// bb7: { // arm 1
// _0 = const 1i32;
-// drop(_7) -> [return: bb29, unwind: bb16];
+// drop(_7) -> [return: bb23, unwind: bb13];
// }
-// bb10: { // guard - first time
+// bb8: { // guard - first time
// StorageLive(_6);
// _6 = &(_2.1: bool);
// StorageLive(_8);
@@ -101,58 +95,52 @@
// StorageLive(_10);
// _10 = _1;
// FakeRead(ForMatchedPlace, _10);
-// switchInt(_10) -> [false: bb12, otherwise: bb11];
+// switchInt(_10) -> [false: bb10, otherwise: bb9];
// }
-// bb11: {
-// falseEdges -> [real: bb14, imaginary: bb12];
+// bb9: {
+// falseEdges -> [real: bb11, imaginary: bb10];
// }
-// bb12: {
-// falseEdges -> [real: bb18, imaginary: bb13];
-// }
-// bb13: {
-// unreachable;
-// }
-// bb14: { // `return 3` - first time
-// _0 = const 3i32;
-// StorageDead(_10);
-// StorageDead(_9);
-// StorageDead(_8);
-// StorageDead(_6);
-// goto -> bb17;
-// }
-// bb15: {
-// return;
-// }
-// bb16 (cleanup): {
-// drop(_2) -> bb1;
-// }
-// bb17: {
-// drop(_2) -> [return: bb15, unwind: bb1];
-// }
-// bb18: { // `else` block - first time
+// bb10: { // `else` block - first time
// _9 = (*_6);
// StorageDead(_10);
// FakeRead(ForMatchGuard, _3);
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
// FakeRead(ForGuardBinding, _8);
-// switchInt(move _9) -> [false: bb20, otherwise: bb19];
+// switchInt(move _9) -> [false: bb16, otherwise: bb15];
// }
-// bb19: {
+// bb11: { // `return 3` - first time
+// _0 = const 3i32;
+// StorageDead(_10);
+// StorageDead(_9);
+// StorageDead(_8);
+// StorageDead(_6);
+// goto -> bb14;
+// }
+// bb12: {
+// return;
+// }
+// bb13 (cleanup): {
+// drop(_2) -> bb1;
+// }
+// bb14: {
+// drop(_2) -> [return: bb12, unwind: bb1];
+// }
+// bb15: {
// StorageDead(_9);
// StorageLive(_5);
// _5 = (_2.1: bool);
// StorageLive(_7);
// _7 = move (_2.2: std::string::String);
-// goto -> bb9;
+// goto -> bb7;
// }
-// bb20: { // guard otherwise case - first time
+// bb16: { // guard otherwise case - first time
// StorageDead(_9);
// StorageDead(_8);
// StorageDead(_6);
-// falseEdges -> [real: bb7, imaginary: bb3];
+// falseEdges -> [real: bb5, imaginary: bb3];
// }
-// bb21: { // guard - second time
+// bb17: { // guard - second time
// StorageLive(_6);
// _6 = &(_2.0: bool);
// StorageLive(_8);
@@ -163,80 +151,74 @@
// StorageLive(_13);
// _13 = _1;
// FakeRead(ForMatchedPlace, _13);
-// switchInt(_13) -> [false: bb23, otherwise: bb22];
+// switchInt(_13) -> [false: bb19, otherwise: bb18];
// }
-// bb22: {
-// falseEdges -> [real: bb25, imaginary: bb23];
+// bb18: {
+// falseEdges -> [real: bb20, imaginary: bb19];
// }
-// bb23: {
-// falseEdges -> [real: bb26, imaginary: bb24];
-// }
-// bb24: {
-// unreachable;
-// }
-// bb25: { // `return 3` - second time
-// _0 = const 3i32;
-// StorageDead(_13);
-// StorageDead(_12);
-// StorageDead(_8);
-// StorageDead(_6);
-// goto -> bb17;
-// }
-// bb26: { // `else` block - second time
+// bb19: { // `else` block - second time
// _12 = (*_6);
// StorageDead(_13);
// FakeRead(ForMatchGuard, _3);
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
// FakeRead(ForGuardBinding, _8);
-// switchInt(move _12) -> [false: bb28, otherwise: bb27];
+// switchInt(move _12) -> [false: bb22, otherwise: bb21];
// }
-// bb27: { // Guard otherwise case - second time
+// bb20: {
+// _0 = const 3i32;
+// StorageDead(_13);
+// StorageDead(_12);
+// StorageDead(_8);
+// StorageDead(_6);
+// goto -> bb14;
+// }
+// bb21: { // bindings for arm 1
// StorageDead(_12);
// StorageLive(_5);
// _5 = (_2.0: bool);
// StorageLive(_7);
// _7 = move (_2.2: std::string::String);
-// goto -> bb9;
+// goto -> bb7;
// }
-// bb28: { // rest of arm 1
+// bb22: { // Guard otherwise case - second time
// StorageDead(_12);
// StorageDead(_8);
// StorageDead(_6);
-// falseEdges -> [real: bb8, imaginary: bb4];
+// falseEdges -> [real: bb6, imaginary: bb4];
// }
-// bb29: {
+// bb23: { // rest of arm 1
// StorageDead(_7);
// StorageDead(_5);
// StorageDead(_8);
// StorageDead(_6);
-// goto -> bb34;
+// goto -> bb28;
// }
-// bb30: { // arm 2
+// bb24: { // arm 2
// _0 = const 2i32;
-// drop(_16) -> [return: bb33, unwind: bb16];
+// drop(_16) -> [return: bb27, unwind: bb13];
// }
-// bb31: { // bindings for arm 2 - first pattern
+// bb25: { // bindings for arm 2 - first pattern
// StorageLive(_15);
// _15 = (_2.1: bool);
// StorageLive(_16);
// _16 = move (_2.2: std::string::String);
-// goto -> bb30;
+// goto -> bb24;
// }
-// bb32: { // bindings for arm 2 - first pattern
+// bb26: { // bindings for arm 2 - second pattern
// StorageLive(_15);
// _15 = (_2.1: bool);
// StorageLive(_16);
// _16 = move (_2.2: std::string::String);
-// goto -> bb30;
+// goto -> bb24;
// }
-// bb33: { // rest of arm 2
+// bb27: { // rest of arm 2
// StorageDead(_16);
// StorageDead(_15);
-// goto -> bb34;
+// goto -> bb28;
// }
-// bb34: { // end of match
-// drop(_2) -> [return: bb15, unwind: bb1];
+// bb28: {
+// drop(_2) -> [return: bb12, unwind: bb1];
// }
// END rustc.complicated_match.SimplifyCfg-initial.after.mir
// START rustc.complicated_match.ElaborateDrops.after.mir
diff --git a/src/test/mir-opt/match_false_edges.rs b/src/test/mir-opt/match_false_edges.rs
index 6979924..a62e1b2 100644
--- a/src/test/mir-opt/match_false_edges.rs
+++ b/src/test/mir-opt/match_false_edges.rs
@@ -45,39 +45,37 @@
// _2 = std::option::Option::<i32>::Some(const 42i32,);
// FakeRead(ForMatchedPlace, _2);
// _3 = discriminant(_2);
-// switchInt(move _3) -> [0isize: bb4, 1isize: bb2, otherwise: bb6];
+// switchInt(move _3) -> [0isize: bb4, 1isize: bb2, otherwise: bb5];
// }
// bb1 (cleanup): {
// resume;
// }
// bb2: {
-// falseEdges -> [real: bb7, imaginary: bb3]; //pre_binding1
+// falseEdges -> [real: bb6, imaginary: bb3]; //pre_binding1
// }
// bb3: {
-// falseEdges -> [real: bb11, imaginary: bb4]; //pre_binding2
+// falseEdges -> [real: bb10, imaginary: bb4]; //pre_binding2
// }
-// bb4: {
-// falseEdges -> [real: bb12, imaginary: bb5]; //pre_binding3
+// bb4: { //pre_binding3 and arm3
+// _1 = (const 3i32, const 3i32);
+// goto -> bb11;
// }
// bb5: {
// unreachable;
// }
-// bb6: {
-// unreachable;
-// }
-// bb7: { // binding1 and guard
+// bb6: { // binding1 and guard
// StorageLive(_6);
// _6 = &(((promoted[0]: std::option::Option<i32>) as Some).0: i32);
// _4 = &shallow _2;
// StorageLive(_7);
-// _7 = const guard() -> [return: bb8, unwind: bb1];
+// _7 = const guard() -> [return: bb7, unwind: bb1];
// }
-// bb8: { // end of guard
+// bb7: { // end of guard
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
-// switchInt(move _7) -> [false: bb10, otherwise: bb9];
+// switchInt(move _7) -> [false: bb9, otherwise: bb8];
// }
-// bb9: { // arm1
+// bb8: { // arm1
// StorageDead(_7);
// StorageLive(_5);
// _5 = ((_2 as Some).0: i32);
@@ -87,14 +85,14 @@
// StorageDead(_8);
// StorageDead(_5);
// StorageDead(_6);
-// goto -> bb13;
+// goto -> bb11;
// }
-// bb10: { // to pre_binding2
+// bb9: { // to pre_binding2
// StorageDead(_7);
// StorageDead(_6);
-// falseEdges -> [real: bb3, imaginary: bb3];
+// goto -> bb3;
// }
-// bb11: { // arm2
+// bb10: { // arm2
// StorageLive(_9);
// _9 = ((_2 as Some).0: i32);
// StorageLive(_10);
@@ -102,13 +100,9 @@
// _1 = (const 2i32, move _10);
// StorageDead(_10);
// StorageDead(_9);
-// goto -> bb13;
+// goto -> bb11;
// }
-// bb12: { // arm3
-// _1 = (const 3i32, const 3i32);
-// goto -> bb13;
-// }
-// bb13: {
+// bb11: { // arm3
// StorageDead(_2);
// StorageDead(_1);
// _0 = ();
@@ -122,39 +116,33 @@
// _2 = std::option::Option::<i32>::Some(const 42i32,);
// FakeRead(ForMatchedPlace, _2);
// _3 = discriminant(_2);
-// switchInt(move _3) -> [0isize: bb3, 1isize: bb2, otherwise: bb6];
+// switchInt(move _3) -> [0isize: bb3, 1isize: bb2, otherwise: bb4];
// }
// bb1 (cleanup): {
// resume;
// }
// bb2: {
-// falseEdges -> [real: bb7, imaginary: bb3];
+// falseEdges -> [real: bb5, imaginary: bb3];
// }
// bb3: {
-// falseEdges -> [real: bb11, imaginary: bb4];
+// falseEdges -> [real: bb9, imaginary: bb10];
// }
-// bb4: {
-// falseEdges -> [real: bb12, imaginary: bb5];
-// }
-// bb5: {
+// bb4: { // to arm3 (can skip 2 since this is `Some`)
// unreachable;
// }
-// bb6: {
-// unreachable;
-// }
-// bb7: { // binding1 and guard
+// bb5: { // binding1 and guard
// StorageLive(_6);
// _6 = &((_2 as Some).0: i32);
// _4 = &shallow _2;
// StorageLive(_7);
-// _7 = const guard() -> [return: bb8, unwind: bb1];
+// _7 = const guard() -> [return: bb6, unwind: bb1];
// }
-// bb8: { // end of guard
+// bb6: { // end of guard
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
-// switchInt(move _7) -> [false: bb10, otherwise: bb9];
+// switchInt(move _7) -> [false: bb8, otherwise: bb7];
// }
-// bb9: { // arm1
+// bb7: {
// StorageDead(_7);
// StorageLive(_5);
// _5 = ((_2 as Some).0: i32);
@@ -164,18 +152,18 @@
// StorageDead(_8);
// StorageDead(_5);
// StorageDead(_6);
-// goto -> bb13;
+// goto -> bb11;
// }
-// bb10: { // to pre_binding3 (can skip 2 since this is `Some`)
+// bb8: { // to pre_binding3 (can skip 2 since this is `Some`)
// StorageDead(_7);
// StorageDead(_6);
-// falseEdges -> [real: bb4, imaginary: bb3];
+// falseEdges -> [real: bb10, imaginary: bb3];
// }
-// bb11: { // arm2
+// bb9: { // arm2
// _1 = (const 3i32, const 3i32);
-// goto -> bb13;
+// goto -> bb11;
// }
-// bb12: { // binding3 and arm3
+// bb10: { // binding3 and arm3
// StorageLive(_9);
// _9 = ((_2 as Some).0: i32);
// StorageLive(_10);
@@ -183,9 +171,9 @@
// _1 = (const 2i32, move _10);
// StorageDead(_10);
// StorageDead(_9);
-// goto -> bb13;
+// goto -> bb11;
// }
-// bb13: {
+// bb11: {
// StorageDead(_2);
// StorageDead(_1);
// _0 = ();
@@ -198,97 +186,91 @@
// ...
// _2 = std::option::Option::<i32>::Some(const 1i32,);
// FakeRead(ForMatchedPlace, _2);
-// _3 = discriminant(_2);
-// switchInt(move _3) -> [1isize: bb2, otherwise: bb3];
+// _4 = discriminant(_2);
+// switchInt(move _4) -> [1isize: bb2, otherwise: bb3];
// }
// bb1 (cleanup): {
// resume;
// }
// bb2: {
-// falseEdges -> [real: bb7, imaginary: bb3];
+// falseEdges -> [real: bb5, imaginary: bb3];
// }
// bb3: {
-// falseEdges -> [real: bb11, imaginary: bb4];
+// falseEdges -> [real: bb9, imaginary: bb4];
// }
// bb4: {
-// falseEdges -> [real: bb12, imaginary: bb5];
+// falseEdges -> [real: bb10, imaginary: bb14];
// }
// bb5: {
-// falseEdges -> [real: bb16, imaginary: bb6];
-// }
-// bb6: {
-// unreachable;
-// }
-// bb7: { // binding1: Some(w) if guard()
// StorageLive(_7);
// _7 = &((_2 as Some).0: i32);
// _5 = &shallow _2;
// StorageLive(_8);
-// _8 = const guard() -> [return: bb8, unwind: bb1];
+// _8 = const guard() -> [return: bb6, unwind: bb1];
// }
-// bb8: { //end of guard1
+// bb6: { //end of guard1
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForGuardBinding, _7);
-// switchInt(move _8) -> [false: bb10, otherwise: bb9];
+// switchInt(move _8) -> [false: bb8, otherwise: bb7];
// }
-// bb9: {
+// bb7: {
// StorageDead(_8);
// StorageLive(_6);
// _6 = ((_2 as Some).0: i32);
// _1 = const 1i32;
// StorageDead(_6);
// StorageDead(_7);
-// goto -> bb17;
+// goto -> bb15;
// }
-// bb10: {
+// bb8: {
// StorageDead(_8);
// StorageDead(_7);
// falseEdges -> [real: bb3, imaginary: bb3];
// }
-// bb11: { // binding2 & arm2
+// bb9: { // binding2 & arm2
// StorageLive(_9);
// _9 = _2;
// _1 = const 2i32;
// StorageDead(_9);
-// goto -> bb17;
+// goto -> bb15;
// }
-// bb12: { // binding3: Some(y) if guard2(y)
+// bb10: { // binding3: Some(y) if guard2(y)
// StorageLive(_11);
// _11 = &((_2 as Some).0: i32);
// _5 = &shallow _2;
// StorageLive(_12);
// StorageLive(_13);
// _13 = (*_11);
-// _12 = const guard2(move _13) -> [return: bb13, unwind: bb1];
+// _12 = const guard2(move _13) -> [return: bb11, unwind: bb1];
// }
-// bb13: { // end of guard2
+// bb11: { // end of guard2
// StorageDead(_13);
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForGuardBinding, _11);
-// switchInt(move _12) -> [false: bb15, otherwise: bb14];
+// switchInt(move _12) -> [false: bb13, otherwise: bb12];
// }
-// bb14: { // binding4 & arm4
+// bb12: { // binding4 & arm4
// StorageDead(_12);
// StorageLive(_10);
// _10 = ((_2 as Some).0: i32);
// _1 = const 3i32;
// StorageDead(_10);
// StorageDead(_11);
-// goto -> bb17;
+// goto -> bb15;
// }
-// bb15: {
+// bb13: {
// StorageDead(_12);
// StorageDead(_11);
-// falseEdges -> [real: bb5, imaginary: bb5];
+// falseEdges -> [real: bb14, imaginary: bb14];
// }
-// bb16: {
+// bb14: {
// StorageLive(_14);
// _14 = _2;
// _1 = const 4i32;
// StorageDead(_14);
-// goto -> bb17;
+// goto -> bb15;
// }
-// bb17: {
+// bb15: {
// StorageDead(_2);
// StorageDead(_1);
// _0 = ();
diff --git a/src/test/mir-opt/match_test.rs b/src/test/mir-opt/match_test.rs
index 2ef9520..ef60a04 100644
--- a/src/test/mir-opt/match_test.rs
+++ b/src/test/mir-opt/match_test.rs
@@ -20,67 +20,61 @@
// START rustc.main.SimplifyCfg-initial.after.mir
// bb0: {
// ...
-// switchInt(move _4) -> [false: bb6, otherwise: bb7];
+// switchInt(move _6) -> [false: bb6, otherwise: bb5];
// }
// bb1: {
-// falseEdges -> [real: bb10, imaginary: bb2];
+// falseEdges -> [real: bb9, imaginary: bb2];
// }
// bb2: {
-// falseEdges -> [real: bb13, imaginary: bb3];
+// falseEdges -> [real: bb12, imaginary: bb3];
// }
// bb3: {
-// falseEdges -> [real: bb14, imaginary: bb4];
+// falseEdges -> [real: bb13, imaginary: bb4];
// }
// bb4: {
-// falseEdges -> [real: bb15, imaginary: bb5];
+// _3 = const 3i32;
+// goto -> bb14;
// }
// bb5: {
-// unreachable;
+// _7 = Lt(_1, const 10i32);
+// switchInt(move _7) -> [false: bb6, otherwise: bb1];
// }
// bb6: {
-// _6 = Le(const 10i32, _1);
-// switchInt(move _6) -> [false: bb8, otherwise: bb9];
+// _4 = Le(const 10i32, _1);
+// switchInt(move _4) -> [false: bb8, otherwise: bb7];
// }
// bb7: {
-// _5 = Lt(_1, const 10i32);
-// switchInt(move _5) -> [false: bb6, otherwise: bb1];
+// _5 = Le(_1, const 20i32);
+// switchInt(move _5) -> [false: bb8, otherwise: bb2];
// }
// bb8: {
// switchInt(_1) -> [-1i32: bb3, otherwise: bb4];
// }
// bb9: {
-// _7 = Le(_1, const 20i32);
-// switchInt(move _7) -> [false: bb8, otherwise: bb2];
-// }
-// bb10: {
// _8 = &shallow _1;
// StorageLive(_9);
// _9 = _2;
// FakeRead(ForMatchGuard, _8);
-// switchInt(move _9) -> [false: bb12, otherwise: bb11];
+// switchInt(move _9) -> [false: bb11, otherwise: bb10];
+// }
+// bb10: {
+// StorageDead(_9);
+// _3 = const 0i32;
+// goto -> bb14;
// }
// bb11: {
// StorageDead(_9);
-// _3 = const 0i32;
-// goto -> bb16;
-// }
-// bb12: {
-// StorageDead(_9);
// falseEdges -> [real: bb4, imaginary: bb2];
// }
-// bb13: {
+// bb12: {
// _3 = const 1i32;
-// goto -> bb16;
+// goto -> bb14;
+// }
+// bb13: {
+// _3 = const 2i32;
+// goto -> bb14;
// }
// bb14: {
-// _3 = const 2i32;
-// goto -> bb16;
-// }
-// bb15: {
-// _3 = const 3i32;
-// goto -> bb16;
-// }
-// bb16: {
// _0 = ();
// StorageDead(_2);
// StorageDead(_1);
diff --git a/src/test/mir-opt/nll/region-subtyping-basic.rs b/src/test/mir-opt/nll/region-subtyping-basic.rs
index 622cc99..fa0dbe5 100644
--- a/src/test/mir-opt/nll/region-subtyping-basic.rs
+++ b/src/test/mir-opt/nll/region-subtyping-basic.rs
@@ -22,9 +22,9 @@
// END RUST SOURCE
// START rustc.main.nll.0.mir
-// | '_#2r | U0 | {bb2[0..=8], bb3[0], bb6[0..=1]}
-// | '_#3r | U0 | {bb2[1..=8], bb3[0], bb6[0..=1]}
-// | '_#4r | U0 | {bb2[4..=8], bb3[0], bb6[0..=1]}
+// | '_#2r | U0 | {bb2[0..=8], bb3[0], bb5[0..=1]}
+// | '_#3r | U0 | {bb2[1..=8], bb3[0], bb5[0..=1]}
+// | '_#4r | U0 | {bb2[4..=8], bb3[0], bb5[0..=1]}
// END rustc.main.nll.0.mir
// START rustc.main.nll.0.mir
// let _2: &'_#3r usize;
diff --git a/src/test/mir-opt/remove_fake_borrows.rs b/src/test/mir-opt/remove_fake_borrows.rs
index 6ac9cee..0f9c6f6 100644
--- a/src/test/mir-opt/remove_fake_borrows.rs
+++ b/src/test/mir-opt/remove_fake_borrows.rs
@@ -19,21 +19,19 @@
// bb0: {
// FakeRead(ForMatchedPlace, _1);
// _3 = discriminant(_1);
-// switchInt(move _3) -> [1isize: bb4, otherwise: bb2];
+// switchInt(move _3) -> [1isize: bb3, otherwise: bb2];
// }
// bb1: {
-// goto -> bb5;
+// goto -> bb4;
// }
// bb2: {
-// goto -> bb8;
+// _0 = const 1i32;
+// goto -> bb7;
// }
// bb3: {
-// unreachable;
-// }
-// bb4: {
// switchInt((*(*((_1 as Some).0: &'<empty> &'<empty> i32)))) -> [0i32: bb1, otherwise: bb2];
// }
-// bb5: {
+// bb4: {
// _4 = &shallow _1;
// _5 = &shallow ((_1 as Some).0: &'<empty> &'<empty> i32);
// _6 = &shallow (*((_1 as Some).0: &'<empty> &'<empty> i32));
@@ -44,25 +42,21 @@
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForMatchGuard, _6);
// FakeRead(ForMatchGuard, _7);
-// switchInt(move _8) -> [false: bb7, otherwise: bb6];
+// switchInt(move _8) -> [false: bb6, otherwise: bb5];
+// }
+// bb5: {
+// StorageDead(_8);
+// _0 = const 0i32;
+// goto -> bb7;
// }
// bb6: {
// StorageDead(_8);
-// _0 = const 0i32;
-// goto -> bb9;
-// }
-// bb7: {
-// StorageDead(_8);
// goto -> bb2;
// }
-// bb8: {
-// _0 = const 1i32;
-// goto -> bb9;
-// }
-// bb9: {
+// bb7: {
// return;
// }
-// bb10 (cleanup): {
+// bb8 (cleanup): {
// resume;
// }
// END rustc.match_guard.CleanupNonCodegenStatements.before.mir
@@ -71,21 +65,19 @@
// bb0: {
// nop;
// _3 = discriminant(_1);
-// switchInt(move _3) -> [1isize: bb4, otherwise: bb2];
+// switchInt(move _3) -> [1isize: bb3, otherwise: bb2];
// }
// bb1: {
-// goto -> bb5;
+// goto -> bb4;
// }
// bb2: {
-// goto -> bb8;
+// _0 = const 1i32;
+// goto -> bb7;
// }
// bb3: {
-// unreachable;
-// }
-// bb4: {
// switchInt((*(*((_1 as Some).0: &'<empty> &'<empty> i32)))) -> [0i32: bb1, otherwise: bb2];
// }
-// bb5: {
+// bb4: {
// nop;
// nop;
// nop;
@@ -96,25 +88,21 @@
// nop;
// nop;
// nop;
-// switchInt(move _8) -> [false: bb7, otherwise: bb6];
+// switchInt(move _8) -> [false: bb6, otherwise: bb5];
+// }
+// bb5: {
+// StorageDead(_8);
+// _0 = const 0i32;
+// goto -> bb7;
// }
// bb6: {
// StorageDead(_8);
-// _0 = const 0i32;
-// goto -> bb9;
-// }
-// bb7: {
-// StorageDead(_8);
// goto -> bb2;
// }
-// bb8: {
-// _0 = const 1i32;
-// goto -> bb9;
-// }
-// bb9: {
+// bb7: {
// return;
// }
-// bb10 (cleanup): {
+// bb8 (cleanup): {
// resume;
// }
// END rustc.match_guard.CleanupNonCodegenStatements.after.mir
diff --git a/src/test/mir-opt/simple-match.rs b/src/test/mir-opt/simple-match.rs
new file mode 100644
index 0000000..fc1a3bb
--- /dev/null
+++ b/src/test/mir-opt/simple-match.rs
@@ -0,0 +1,39 @@
+// Test that we don't generate unnecessarily large MIR for very simple matches
+
+fn match_bool(x: bool) -> usize {
+ match x {
+ true => 10,
+ _ => 20,
+ }
+}
+
+fn main() {}
+
+
+// END RUST SOURCE
+// START rustc.match_bool.mir_map.0.mir
+// bb0: {
+// FakeRead(ForMatchedPlace, _1);
+// switchInt(_1) -> [false: bb3, otherwise: bb2];
+// }
+// bb1 (cleanup): {
+// resume;
+// }
+// bb2: {
+// falseEdges -> [real: bb4, imaginary: bb3];
+// }
+// bb3: {
+// _0 = const 20usize;
+// goto -> bb5;
+// }
+// bb4: {
+// _0 = const 10usize;
+// goto -> bb5;
+// }
+// bb5: {
+// goto -> bb6;
+// }
+// bb6: {
+// return;
+// }
+// END rustc.match_bool.mir_map.0.mir
diff --git a/src/test/mir-opt/simplify_if.rs b/src/test/mir-opt/simplify_if.rs
index 35512b9..471c1df 100644
--- a/src/test/mir-opt/simplify_if.rs
+++ b/src/test/mir-opt/simplify_if.rs
@@ -8,12 +8,12 @@
// START rustc.main.SimplifyBranches-after-const-prop.before.mir
// bb0: {
// ...
-// switchInt(const false) -> [false: bb3, otherwise: bb1];
+// switchInt(const false) -> [false: bb1, otherwise: bb2];
// }
// END rustc.main.SimplifyBranches-after-const-prop.before.mir
// START rustc.main.SimplifyBranches-after-const-prop.after.mir
// bb0: {
// ...
-// goto -> bb3;
+// goto -> bb1;
// }
// END rustc.main.SimplifyBranches-after-const-prop.after.mir
diff --git a/src/test/mir-opt/simplify_match.rs b/src/test/mir-opt/simplify_match.rs
index 0192aa0..8624899 100644
--- a/src/test/mir-opt/simplify_match.rs
+++ b/src/test/mir-opt/simplify_match.rs
@@ -9,14 +9,14 @@
// START rustc.main.SimplifyBranches-after-copy-prop.before.mir
// bb0: {
// ...
-// switchInt(const false) -> [false: bb3, otherwise: bb1];
+// switchInt(const false) -> [false: bb1, otherwise: bb2];
// }
// bb1: {
// END rustc.main.SimplifyBranches-after-copy-prop.before.mir
// START rustc.main.SimplifyBranches-after-copy-prop.after.mir
// bb0: {
// ...
-// goto -> bb3;
+// goto -> bb1;
// }
// bb1: {
// END rustc.main.SimplifyBranches-after-copy-prop.after.mir
diff --git a/src/test/run-make-fulldeps/alloc-extern-crates/fakealloc.rs b/src/test/run-make-fulldeps/alloc-extern-crates/fakealloc.rs
index 625c3af..d4612c3 100644
--- a/src/test/run-make-fulldeps/alloc-extern-crates/fakealloc.rs
+++ b/src/test/run-make-fulldeps/alloc-extern-crates/fakealloc.rs
@@ -2,14 +2,16 @@
#![no_std]
#[inline]
-pub unsafe fn allocate(_size: usize, _align: usize) -> *mut u8 { 0 as *mut u8 }
+pub unsafe fn allocate(_size: usize, _align: usize) -> *mut u8 {
+ core::ptr::null_mut()
+}
#[inline]
pub unsafe fn deallocate(_ptr: *mut u8, _old_size: usize, _align: usize) { }
#[inline]
pub unsafe fn reallocate(_ptr: *mut u8, _old_size: usize, _size: usize, _align: usize) -> *mut u8 {
- 0 as *mut u8
+ core::ptr::null_mut()
}
#[inline]
diff --git a/src/test/run-make-fulldeps/hotplug_codegen_backend/the_backend.rs b/src/test/run-make-fulldeps/hotplug_codegen_backend/the_backend.rs
index 1bab3f0..75cad9e 100644
--- a/src/test/run-make-fulldeps/hotplug_codegen_backend/the_backend.rs
+++ b/src/test/run-make-fulldeps/hotplug_codegen_backend/the_backend.rs
@@ -60,7 +60,7 @@
fn codegen_crate<'a, 'tcx>(
&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tcx: TyCtxt<'tcx>,
_metadata: EncodedMetadata,
_need_metadata_module: bool,
_rx: mpsc::Receiver<Box<Any + Send>>
diff --git a/src/test/run-pass-fulldeps/auxiliary/issue-40001-plugin.rs b/src/test/run-pass-fulldeps/auxiliary/issue-40001-plugin.rs
index 40e0115..76554ea 100644
--- a/src/test/run-pass-fulldeps/auxiliary/issue-40001-plugin.rs
+++ b/src/test/run-pass-fulldeps/auxiliary/issue-40001-plugin.rs
@@ -45,7 +45,7 @@
let item = match cx.tcx.hir().get_by_hir_id(id) {
Node::Item(item) => item,
- _ => cx.tcx.hir().expect_item_by_hir_id(cx.tcx.hir().get_parent_item(id)),
+ _ => cx.tcx.hir().expect_item(cx.tcx.hir().get_parent_item(id)),
};
if !attr::contains_name(&item.attrs, Symbol::intern("whitelisted_attr")) {
diff --git a/src/test/run-pass/cast.rs b/src/test/run-pass/cast.rs
index 37add84..c7977f4 100644
--- a/src/test/run-pass/cast.rs
+++ b/src/test/run-pass/cast.rs
@@ -15,5 +15,5 @@
// Test that `_` is correctly inferred.
let x = &"hello";
let mut y = x as *const _;
- y = 0 as *const _;
+ y = core::ptr::null_mut();
}
diff --git a/src/test/run-pass/cleanup-shortcircuit.rs b/src/test/run-pass/cleanup-shortcircuit.rs
index 118fa00..6e67a27 100644
--- a/src/test/run-pass/cleanup-shortcircuit.rs
+++ b/src/test/run-pass/cleanup-shortcircuit.rs
@@ -16,6 +16,6 @@
if args.len() >= 2 && args[1] == "signal" {
// Raise a segfault.
- unsafe { *(0 as *mut isize) = 0; }
+ unsafe { *std::ptr::null_mut::<isize>() = 0; }
}
}
diff --git a/src/test/run-pass/consts/const-block.rs b/src/test/run-pass/consts/const-block.rs
index 012523a..7172a34 100644
--- a/src/test/run-pass/consts/const-block.rs
+++ b/src/test/run-pass/consts/const-block.rs
@@ -21,7 +21,7 @@
static BLOCK_IMPLICIT_UNIT: () = { };
static BLOCK_FLOAT: f64 = { 1.0 };
static BLOCK_ENUM: Option<usize> = { Some(100) };
-static BLOCK_STRUCT: Foo = { Foo { a: 12, b: 0 as *const () } };
+static BLOCK_STRUCT: Foo = { Foo { a: 12, b: std::ptr::null::<()>() } };
static BLOCK_UNSAFE: usize = unsafe { 1000 };
static BLOCK_FN_INFERRED: fn(usize) -> usize = { foo };
@@ -36,7 +36,7 @@
assert_eq!(BLOCK_IMPLICIT_UNIT, ());
assert_eq!(BLOCK_FLOAT, 1.0_f64);
assert_eq!(BLOCK_STRUCT.a, 12);
- assert_eq!(BLOCK_STRUCT.b, 0 as *const ());
+ assert_eq!(BLOCK_STRUCT.b, std::ptr::null::<()>());
assert_eq!(BLOCK_ENUM, Some(100));
assert_eq!(BLOCK_UNSAFE, 1000);
assert_eq!(BLOCK_FN_INFERRED(300), 300);
diff --git a/src/test/run-pass/consts/const-enum-cast.rs b/src/test/run-pass/consts/const-enum-cast.rs
index 3b140d1..a3255c2 100644
--- a/src/test/run-pass/consts/const-enum-cast.rs
+++ b/src/test/run-pass/consts/const-enum-cast.rs
@@ -1,9 +1,8 @@
// run-pass
-#![allow(dead_code)]
#![allow(non_upper_case_globals)]
enum A { A1, A2 }
-enum B { B1=0, B2=2 }
+enum B { B1=4, B2=2 }
pub fn main () {
static c1: isize = A::A2 as isize;
@@ -14,4 +13,14 @@
assert_eq!(c2, 2);
assert_eq!(a1, 1);
assert_eq!(a2, 2);
+
+ // Turns out that adding a let-binding generates totally different MIR.
+ static c1_2: isize = { let v = A::A1; v as isize };
+ static c2_2: isize = { let v = B::B1; v as isize };
+ let a1_2 = { let v = A::A1; v as isize };
+ let a2_2 = { let v = B::B1; v as isize };
+ assert_eq!(c1_2, 0);
+ assert_eq!(c2_2, 4);
+ assert_eq!(a1_2, 0);
+ assert_eq!(a2_2, 4);
}
diff --git a/src/test/run-pass/issues/issue-13259-windows-tcb-trash.rs b/src/test/run-pass/issues/issue-13259-windows-tcb-trash.rs
index d79d34d..740e778 100644
--- a/src/test/run-pass/issues/issue-13259-windows-tcb-trash.rs
+++ b/src/test/run-pass/issues/issue-13259-windows-tcb-trash.rs
@@ -23,8 +23,8 @@
pub fn test() {
let mut buf: [u16; 50] = [0; 50];
let ret = unsafe {
- FormatMessageW(0x1000, 0 as *mut _, 1, 0x400,
- buf.as_mut_ptr(), buf.len() as u32, 0 as *const _)
+ FormatMessageW(0x1000, core::ptr::null_mut(), 1, 0x400,
+ buf.as_mut_ptr(), buf.len() as u32, core::ptr::null())
};
// On some 32-bit Windowses (Win7-8 at least) this will panic with segmented
// stacks taking control of pvArbitrary
diff --git a/src/test/run-pass/issues/issue-19001.rs b/src/test/run-pass/issues/issue-19001.rs
index 85f7a84..76c380c 100644
--- a/src/test/run-pass/issues/issue-19001.rs
+++ b/src/test/run-pass/issues/issue-19001.rs
@@ -7,5 +7,5 @@
}
fn main() {
- let _t = Loopy { ptr: 0 as *mut _ };
+ let _t = Loopy { ptr: core::ptr::null_mut() };
}
diff --git a/src/test/run-pass/issues/issue-39367.rs b/src/test/run-pass/issues/issue-39367.rs
index 484cd78..8314be3 100644
--- a/src/test/run-pass/issues/issue-39367.rs
+++ b/src/test/run-pass/issues/issue-39367.rs
@@ -11,13 +11,13 @@
ArenaSet(vec![], &Z)
}
unsafe {
- use std::sync::{Once, ONCE_INIT};
+ use std::sync::Once;
fn require_sync<T: Sync>(_: &T) { }
unsafe fn __stability() -> &'static ArenaSet<Vec<u8>> {
use std::mem::transmute;
- static mut DATA: *const ArenaSet<Vec<u8>> = 0 as *const ArenaSet<Vec<u8>>;
+ static mut DATA: *const ArenaSet<Vec<u8>> = std::ptr::null_mut();
- static mut ONCE: Once = ONCE_INIT;
+ static mut ONCE: Once = Once::new();
ONCE.call_once(|| {
DATA = transmute
::<Box<ArenaSet<Vec<u8>>>, *const ArenaSet<Vec<u8>>>
diff --git a/src/test/run-pass/issues/issue-46069.rs b/src/test/run-pass/issues/issue-46069.rs
index fba2a2e..1d4f789 100644
--- a/src/test/run-pass/issues/issue-46069.rs
+++ b/src/test/run-pass/issues/issue-46069.rs
@@ -17,7 +17,7 @@
}
fn main() {
- let _f = 0 as *mut <Fuse<Cloned<Iter<u8>>> as Iterator>::Item;
+ let _f: *mut <Fuse<Cloned<Iter<u8>>> as Iterator>::Item = std::ptr::null_mut();
copy_ex();
}
diff --git a/src/test/run-pass/structs-enums/enum-null-pointer-opt.rs b/src/test/run-pass/structs-enums/enum-null-pointer-opt.rs
index f871c21..32fdbf6 100644
--- a/src/test/run-pass/structs-enums/enum-null-pointer-opt.rs
+++ b/src/test/run-pass/structs-enums/enum-null-pointer-opt.rs
@@ -38,7 +38,7 @@
// The optimization can't apply to raw pointers or unions with a ZST field.
assert!(size_of::<Option<*const isize>>() != size_of::<*const isize>());
- assert!(Some(0 as *const isize).is_some()); // Can't collapse None to null
+ assert!(Some(std::ptr::null::<isize>()).is_some()); // Can't collapse None to null
assert_ne!(size_of::<fn(isize)>(), size_of::<Option<MaybeUninitUnion<fn(isize)>>>());
assert_ne!(size_of::<&str>(), size_of::<Option<MaybeUninitUnion<&str>>>());
assert_ne!(size_of::<NonNull<isize>>(), size_of::<Option<MaybeUninitUnion<NonNull<isize>>>>());
diff --git a/src/test/run-pass/weird-exprs.rs b/src/test/run-pass/weird-exprs.rs
index 02901db..7b2b46c 100644
--- a/src/test/run-pass/weird-exprs.rs
+++ b/src/test/run-pass/weird-exprs.rs
@@ -149,6 +149,14 @@
};
}
+fn match_nested_if() {
+ let val = match () {
+ () if if if if true {true} else {false} {true} else {false} {true} else {false} => true,
+ _ => false,
+ };
+ assert!(val);
+}
+
pub fn main() {
strange();
funny();
@@ -166,4 +174,5 @@
punch_card();
r#match();
i_yield();
+ match_nested_if();
}
diff --git a/src/test/ui-fulldeps/internal-lints/pass_ty_by_ref.rs b/src/test/ui-fulldeps/internal-lints/pass_ty_by_ref.rs
index 075ce8b..9534ddb 100644
--- a/src/test/ui-fulldeps/internal-lints/pass_ty_by_ref.rs
+++ b/src/test/ui-fulldeps/internal-lints/pass_ty_by_ref.rs
@@ -11,26 +11,26 @@
fn ty_by_ref(
ty_val: Ty<'_>,
ty_ref: &Ty<'_>, //~ ERROR passing `Ty<'_>` by reference
- ty_ctxt_val: TyCtxt<'_, '_, '_>,
- ty_ctxt_ref: &TyCtxt<'_, '_, '_>, //~ ERROR passing `TyCtxt<'_, '_, '_>` by reference
+ ty_ctxt_val: TyCtxt<'_>,
+ ty_ctxt_ref: &TyCtxt<'_>, //~ ERROR passing `TyCtxt<'_>` by reference
) {
}
-fn ty_multi_ref(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_, '_, '_>) {}
+fn ty_multi_ref(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_>) {}
//~^ ERROR passing `Ty<'_>` by reference
-//~^^ ERROR passing `TyCtxt<'_, '_, '_>` by reference
+//~^^ ERROR passing `TyCtxt<'_>` by reference
trait T {
fn ty_by_ref_in_trait(
ty_val: Ty<'_>,
ty_ref: &Ty<'_>, //~ ERROR passing `Ty<'_>` by reference
- ty_ctxt_val: TyCtxt<'_, '_, '_>,
- ty_ctxt_ref: &TyCtxt<'_, '_, '_>, //~ ERROR passing `TyCtxt<'_, '_, '_>` by reference
+ ty_ctxt_val: TyCtxt<'_>,
+ ty_ctxt_ref: &TyCtxt<'_>, //~ ERROR passing `TyCtxt<'_>` by reference
);
- fn ty_multi_ref_in_trait(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_, '_, '_>);
+ fn ty_multi_ref_in_trait(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_>);
//~^ ERROR passing `Ty<'_>` by reference
- //~^^ ERROR passing `TyCtxt<'_, '_, '_>` by reference
+ //~^^ ERROR passing `TyCtxt<'_>` by reference
}
struct Foo;
@@ -39,26 +39,26 @@
fn ty_by_ref_in_trait(
ty_val: Ty<'_>,
ty_ref: &Ty<'_>,
- ty_ctxt_val: TyCtxt<'_, '_, '_>,
- ty_ctxt_ref: &TyCtxt<'_, '_, '_>,
+ ty_ctxt_val: TyCtxt<'_>,
+ ty_ctxt_ref: &TyCtxt<'_>,
) {
}
- fn ty_multi_ref_in_trait(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_, '_, '_>) {}
+ fn ty_multi_ref_in_trait(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_>) {}
}
impl Foo {
fn ty_by_ref_assoc(
ty_val: Ty<'_>,
ty_ref: &Ty<'_>, //~ ERROR passing `Ty<'_>` by reference
- ty_ctxt_val: TyCtxt<'_, '_, '_>,
- ty_ctxt_ref: &TyCtxt<'_, '_, '_>, //~ ERROR passing `TyCtxt<'_, '_, '_>` by reference
+ ty_ctxt_val: TyCtxt<'_>,
+ ty_ctxt_ref: &TyCtxt<'_>, //~ ERROR passing `TyCtxt<'_>` by reference
) {
}
- fn ty_multi_ref_assoc(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_, '_, '_>) {}
+ fn ty_multi_ref_assoc(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_>) {}
//~^ ERROR passing `Ty<'_>` by reference
- //~^^ ERROR passing `TyCtxt<'_, '_, '_>` by reference
+ //~^^ ERROR passing `TyCtxt<'_>` by reference
}
fn main() {}
diff --git a/src/test/ui-fulldeps/internal-lints/pass_ty_by_ref.stderr b/src/test/ui-fulldeps/internal-lints/pass_ty_by_ref.stderr
index f3e630f..0f9f24b 100644
--- a/src/test/ui-fulldeps/internal-lints/pass_ty_by_ref.stderr
+++ b/src/test/ui-fulldeps/internal-lints/pass_ty_by_ref.stderr
@@ -10,23 +10,23 @@
LL | #![deny(ty_pass_by_reference)]
| ^^^^^^^^^^^^^^^^^^^^
-error: passing `TyCtxt<'_, '_, '_>` by reference
+error: passing `TyCtxt<'_>` by reference
--> $DIR/pass_ty_by_ref.rs:15:18
|
-LL | ty_ctxt_ref: &TyCtxt<'_, '_, '_>,
- | ^^^^^^^^^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_, '_, '_>`
+LL | ty_ctxt_ref: &TyCtxt<'_>,
+ | ^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_>`
error: passing `Ty<'_>` by reference
--> $DIR/pass_ty_by_ref.rs:19:28
|
-LL | fn ty_multi_ref(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_, '_, '_>) {}
+LL | fn ty_multi_ref(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_>) {}
| ^^^^^^^ help: try passing by value: `Ty<'_>`
-error: passing `TyCtxt<'_, '_, '_>` by reference
+error: passing `TyCtxt<'_>` by reference
--> $DIR/pass_ty_by_ref.rs:19:55
|
-LL | fn ty_multi_ref(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_, '_, '_>) {}
- | ^^^^^^^^^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_, '_, '_>`
+LL | fn ty_multi_ref(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_>) {}
+ | ^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_>`
error: passing `Ty<'_>` by reference
--> $DIR/pass_ty_by_ref.rs:26:17
@@ -34,23 +34,23 @@
LL | ty_ref: &Ty<'_>,
| ^^^^^^^ help: try passing by value: `Ty<'_>`
-error: passing `TyCtxt<'_, '_, '_>` by reference
+error: passing `TyCtxt<'_>` by reference
--> $DIR/pass_ty_by_ref.rs:28:22
|
-LL | ty_ctxt_ref: &TyCtxt<'_, '_, '_>,
- | ^^^^^^^^^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_, '_, '_>`
+LL | ty_ctxt_ref: &TyCtxt<'_>,
+ | ^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_>`
error: passing `Ty<'_>` by reference
--> $DIR/pass_ty_by_ref.rs:31:41
|
-LL | fn ty_multi_ref_in_trait(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_, '_, '_>);
+LL | fn ty_multi_ref_in_trait(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_>);
| ^^^^^^^ help: try passing by value: `Ty<'_>`
-error: passing `TyCtxt<'_, '_, '_>` by reference
+error: passing `TyCtxt<'_>` by reference
--> $DIR/pass_ty_by_ref.rs:31:68
|
-LL | fn ty_multi_ref_in_trait(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_, '_, '_>);
- | ^^^^^^^^^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_, '_, '_>`
+LL | fn ty_multi_ref_in_trait(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_>);
+ | ^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_>`
error: passing `Ty<'_>` by reference
--> $DIR/pass_ty_by_ref.rs:53:17
@@ -58,23 +58,23 @@
LL | ty_ref: &Ty<'_>,
| ^^^^^^^ help: try passing by value: `Ty<'_>`
-error: passing `TyCtxt<'_, '_, '_>` by reference
+error: passing `TyCtxt<'_>` by reference
--> $DIR/pass_ty_by_ref.rs:55:22
|
-LL | ty_ctxt_ref: &TyCtxt<'_, '_, '_>,
- | ^^^^^^^^^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_, '_, '_>`
+LL | ty_ctxt_ref: &TyCtxt<'_>,
+ | ^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_>`
error: passing `Ty<'_>` by reference
--> $DIR/pass_ty_by_ref.rs:59:38
|
-LL | fn ty_multi_ref_assoc(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_, '_, '_>) {}
+LL | fn ty_multi_ref_assoc(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_>) {}
| ^^^^^^^ help: try passing by value: `Ty<'_>`
-error: passing `TyCtxt<'_, '_, '_>` by reference
+error: passing `TyCtxt<'_>` by reference
--> $DIR/pass_ty_by_ref.rs:59:65
|
-LL | fn ty_multi_ref_assoc(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_, '_, '_>) {}
- | ^^^^^^^^^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_, '_, '_>`
+LL | fn ty_multi_ref_assoc(ty_multi: &&Ty<'_>, ty_ctxt_multi: &&&&TyCtxt<'_>) {}
+ | ^^^^^^^^^^^ help: try passing by value: `TyCtxt<'_>`
error: aborting due to 12 previous errors
diff --git a/src/test/ui-fulldeps/internal-lints/qualified_ty_ty_ctxt.rs b/src/test/ui-fulldeps/internal-lints/qualified_ty_ty_ctxt.rs
index 5e10697..5603310 100644
--- a/src/test/ui-fulldeps/internal-lints/qualified_ty_ty_ctxt.rs
+++ b/src/test/ui-fulldeps/internal-lints/qualified_ty_ty_ctxt.rs
@@ -13,8 +13,8 @@
fn ty_in_macro(
ty_q: ty::Ty<'_>,
ty: Ty<'_>,
- ty_ctxt_q: ty::TyCtxt<'_, '_, '_>,
- ty_ctxt: TyCtxt<'_, '_, '_>,
+ ty_ctxt_q: ty::TyCtxt<'_>,
+ ty_ctxt: TyCtxt<'_>,
) {
println!("{}", stringify!($a));
}
@@ -24,8 +24,8 @@
fn ty_qualified(
ty_q: ty::Ty<'_>, //~ ERROR usage of qualified `ty::Ty<'_>`
ty: Ty<'_>,
- ty_ctxt_q: ty::TyCtxt<'_, '_, '_>, //~ ERROR usage of qualified `ty::TyCtxt<'_, '_, '_>`
- ty_ctxt: TyCtxt<'_, '_, '_>,
+ ty_ctxt_q: ty::TyCtxt<'_>, //~ ERROR usage of qualified `ty::TyCtxt<'_>`
+ ty_ctxt: TyCtxt<'_>,
) {
}
diff --git a/src/test/ui-fulldeps/internal-lints/qualified_ty_ty_ctxt.stderr b/src/test/ui-fulldeps/internal-lints/qualified_ty_ty_ctxt.stderr
index 31d776c..c3642e6 100644
--- a/src/test/ui-fulldeps/internal-lints/qualified_ty_ty_ctxt.stderr
+++ b/src/test/ui-fulldeps/internal-lints/qualified_ty_ty_ctxt.stderr
@@ -10,11 +10,11 @@
LL | #![deny(usage_of_qualified_ty)]
| ^^^^^^^^^^^^^^^^^^^^^
-error: usage of qualified `ty::TyCtxt<'_, '_, '_>`
+error: usage of qualified `ty::TyCtxt<'_>`
--> $DIR/qualified_ty_ty_ctxt.rs:27:16
|
-LL | ty_ctxt_q: ty::TyCtxt<'_, '_, '_>,
- | ^^^^^^^^^^^^^^^^^^^^^^ help: try using it unqualified: `TyCtxt<'_, '_, '_>`
+LL | ty_ctxt_q: ty::TyCtxt<'_>,
+ | ^^^^^^^^^^^^^^ help: try using it unqualified: `TyCtxt<'_>`
error: aborting due to 2 previous errors
diff --git a/src/test/ui/borrowck/borrowck-mut-borrow-linear-errors.stderr b/src/test/ui/borrowck/borrowck-mut-borrow-linear-errors.stderr
index a8d00d1..ca1496a 100644
--- a/src/test/ui/borrowck/borrowck-mut-borrow-linear-errors.stderr
+++ b/src/test/ui/borrowck/borrowck-mut-borrow-linear-errors.stderr
@@ -2,22 +2,22 @@
--> $DIR/borrowck-mut-borrow-linear-errors.rs:10:30
|
LL | 1 => { addr.push(&mut x); }
- | ---- ^^^^^^ second mutable borrow occurs here
- | |
- | first borrow later used here
+ | ^^^^^^ second mutable borrow occurs here
LL | 2 => { addr.push(&mut x); }
LL | _ => { addr.push(&mut x); }
- | ------ first mutable borrow occurs here
+ | ---- ------ first mutable borrow occurs here
+ | |
+ | first borrow later used here
error[E0499]: cannot borrow `x` as mutable more than once at a time
--> $DIR/borrowck-mut-borrow-linear-errors.rs:11:30
|
-LL | 1 => { addr.push(&mut x); }
- | ---- first borrow later used here
LL | 2 => { addr.push(&mut x); }
| ^^^^^^ second mutable borrow occurs here
LL | _ => { addr.push(&mut x); }
- | ------ first mutable borrow occurs here
+ | ---- ------ first mutable borrow occurs here
+ | |
+ | first borrow later used here
error[E0499]: cannot borrow `x` as mutable more than once at a time
--> $DIR/borrowck-mut-borrow-linear-errors.rs:12:30
diff --git a/src/test/ui/casts-issue-46365.rs b/src/test/ui/casts-issue-46365.rs
index 2e7c26b..3d0fea2 100644
--- a/src/test/ui/casts-issue-46365.rs
+++ b/src/test/ui/casts-issue-46365.rs
@@ -3,5 +3,5 @@
}
fn main() {
- let _foo: *mut Lorem = 0 as *mut _; // no error here
+ let _foo: *mut Lorem = core::ptr::null_mut(); // no error here
}
diff --git a/src/test/ui/const-generics/issue-61336-2.rs b/src/test/ui/const-generics/issue-61336-2.rs
new file mode 100644
index 0000000..604c14e
--- /dev/null
+++ b/src/test/ui/const-generics/issue-61336-2.rs
@@ -0,0 +1,16 @@
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete and may cause the compiler to crash
+
+fn f<T: Copy, const N: usize>(x: T) -> [T; N] {
+ [x; {N}]
+}
+
+fn g<T, const N: usize>(x: T) -> [T; N] {
+ [x; {N}]
+ //~^ ERROR the trait bound `T: std::marker::Copy` is not satisfied [E0277]
+}
+
+fn main() {
+ let x: [u32; 5] = f::<u32, 5>(3);
+ assert_eq!(x, [3u32; 5]);
+}
diff --git a/src/test/ui/const-generics/issue-61336-2.stderr b/src/test/ui/const-generics/issue-61336-2.stderr
new file mode 100644
index 0000000..a7135b6
--- /dev/null
+++ b/src/test/ui/const-generics/issue-61336-2.stderr
@@ -0,0 +1,18 @@
+warning: the feature `const_generics` is incomplete and may cause the compiler to crash
+ --> $DIR/issue-61336-2.rs:1:12
+ |
+LL | #![feature(const_generics)]
+ | ^^^^^^^^^^^^^^
+
+error[E0277]: the trait bound `T: std::marker::Copy` is not satisfied
+ --> $DIR/issue-61336-2.rs:9:5
+ |
+LL | [x; {N}]
+ | ^^^^^^^^ the trait `std::marker::Copy` is not implemented for `T`
+ |
+ = help: consider adding a `where T: std::marker::Copy` bound
+ = note: the `Copy` trait is required because the repeated element will be copied
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/src/test/ui/consts/array-literal-index-oob.rs b/src/test/ui/consts/array-literal-index-oob.rs
new file mode 100644
index 0000000..76013c7
--- /dev/null
+++ b/src/test/ui/consts/array-literal-index-oob.rs
@@ -0,0 +1,6 @@
+fn main() {
+ &{[1, 2, 3][4]};
+ //~^ ERROR index out of bounds
+ //~| ERROR reaching this expression at runtime will panic or abort
+ //~| ERROR this expression will panic at runtime
+}
diff --git a/src/test/ui/consts/array-literal-index-oob.stderr b/src/test/ui/consts/array-literal-index-oob.stderr
new file mode 100644
index 0000000..727ce9e
--- /dev/null
+++ b/src/test/ui/consts/array-literal-index-oob.stderr
@@ -0,0 +1,24 @@
+error: index out of bounds: the len is 3 but the index is 4
+ --> $DIR/array-literal-index-oob.rs:2:7
+ |
+LL | &{[1, 2, 3][4]};
+ | ^^^^^^^^^^^^
+ |
+ = note: #[deny(const_err)] on by default
+
+error: this expression will panic at runtime
+ --> $DIR/array-literal-index-oob.rs:2:5
+ |
+LL | &{[1, 2, 3][4]};
+ | ^^^^^^^^^^^^^^^ index out of bounds: the len is 3 but the index is 4
+
+error: reaching this expression at runtime will panic or abort
+ --> $DIR/array-literal-index-oob.rs:2:7
+ |
+LL | &{[1, 2, 3][4]};
+ | --^^^^^^^^^^^^-
+ | |
+ | index out of bounds: the len is 3 but the index is 4
+
+error: aborting due to 3 previous errors
+
diff --git a/src/test/ui/consts/const-eval/ice-generic-assoc-const.rs b/src/test/ui/consts/const-eval/ice-generic-assoc-const.rs
index e92de84..2ad1a63 100644
--- a/src/test/ui/consts/const-eval/ice-generic-assoc-const.rs
+++ b/src/test/ui/consts/const-eval/ice-generic-assoc-const.rs
@@ -7,7 +7,7 @@
}
impl<T> Nullable for *const T {
- const NULL: Self = 0 as *const T;
+ const NULL: Self = core::ptr::null::<T>();
fn is_null(&self) -> bool {
*self == Self::NULL
diff --git a/src/test/ui/consts/const-eval/match-test-ptr-null.rs b/src/test/ui/consts/const-eval/match-test-ptr-null.rs
index e0af01a..50757af 100644
--- a/src/test/ui/consts/const-eval/match-test-ptr-null.rs
+++ b/src/test/ui/consts/const-eval/match-test-ptr-null.rs
@@ -6,6 +6,7 @@
match &1 as *const i32 as usize {
//~^ ERROR casting pointers to integers in constants
//~| NOTE for more information, see
+ //~| ERROR constant contains unimplemented expression type
0 => 42, //~ ERROR constant contains unimplemented expression type
//~^ NOTE "pointer arithmetic or comparison" needs an rfc before being allowed
//~| ERROR evaluation of constant value failed
diff --git a/src/test/ui/consts/const-eval/match-test-ptr-null.stderr b/src/test/ui/consts/const-eval/match-test-ptr-null.stderr
index d005e09..167d5ad 100644
--- a/src/test/ui/consts/const-eval/match-test-ptr-null.stderr
+++ b/src/test/ui/consts/const-eval/match-test-ptr-null.stderr
@@ -8,18 +8,24 @@
= help: add #![feature(const_raw_ptr_to_usize_cast)] to the crate attributes to enable
error[E0019]: constant contains unimplemented expression type
- --> $DIR/match-test-ptr-null.rs:9:13
+ --> $DIR/match-test-ptr-null.rs:6:15
+ |
+LL | match &1 as *const i32 as usize {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error[E0019]: constant contains unimplemented expression type
+ --> $DIR/match-test-ptr-null.rs:10:13
|
LL | 0 => 42,
| ^
error[E0080]: evaluation of constant value failed
- --> $DIR/match-test-ptr-null.rs:9:13
+ --> $DIR/match-test-ptr-null.rs:10:13
|
LL | 0 => 42,
| ^ "pointer arithmetic or comparison" needs an rfc before being allowed inside constants
-error: aborting due to 3 previous errors
+error: aborting due to 4 previous errors
Some errors have detailed explanations: E0019, E0080, E0658.
For more information about an error, try `rustc --explain E0019`.
diff --git a/src/test/ui/consts/const-match-pattern-arm.rs b/src/test/ui/consts/const-match-pattern-arm.rs
index 3b98526..6ed3ac2 100644
--- a/src/test/ui/consts/const-match-pattern-arm.rs
+++ b/src/test/ui/consts/const-match-pattern-arm.rs
@@ -1,6 +1,7 @@
#![allow(warnings)]
const x: bool = match Some(true) {
+ //~^ ERROR: constant contains unimplemented expression type [E0019]
Some(value) => true,
//~^ ERROR: constant contains unimplemented expression type [E0019]
_ => false
@@ -8,6 +9,7 @@
const y: bool = {
match Some(true) {
+ //~^ ERROR: constant contains unimplemented expression type [E0019]
Some(value) => true,
//~^ ERROR: constant contains unimplemented expression type [E0019]
_ => false
diff --git a/src/test/ui/consts/const-match-pattern-arm.stderr b/src/test/ui/consts/const-match-pattern-arm.stderr
index c793cc0..709b66b 100644
--- a/src/test/ui/consts/const-match-pattern-arm.stderr
+++ b/src/test/ui/consts/const-match-pattern-arm.stderr
@@ -1,15 +1,27 @@
error[E0019]: constant contains unimplemented expression type
- --> $DIR/const-match-pattern-arm.rs:4:5
+ --> $DIR/const-match-pattern-arm.rs:3:23
+ |
+LL | const x: bool = match Some(true) {
+ | ^^^^^^^^^^
+
+error[E0019]: constant contains unimplemented expression type
+ --> $DIR/const-match-pattern-arm.rs:5:5
|
LL | Some(value) => true,
| ^^^^^^^^^^^
error[E0019]: constant contains unimplemented expression type
- --> $DIR/const-match-pattern-arm.rs:11:9
+ --> $DIR/const-match-pattern-arm.rs:11:11
+ |
+LL | match Some(true) {
+ | ^^^^^^^^^^
+
+error[E0019]: constant contains unimplemented expression type
+ --> $DIR/const-match-pattern-arm.rs:13:9
|
LL | Some(value) => true,
| ^^^^^^^^^^^
-error: aborting due to 2 previous errors
+error: aborting due to 4 previous errors
For more information about this error, try `rustc --explain E0019`.
diff --git a/src/test/ui/consts/const_short_circuit.rs b/src/test/ui/consts/const_short_circuit.rs
index 1e7b7ed..87b14a1 100644
--- a/src/test/ui/consts/const_short_circuit.rs
+++ b/src/test/ui/consts/const_short_circuit.rs
@@ -1,5 +1,3 @@
-#![feature(underscore_const_names)]
-
const _: bool = false && false;
const _: bool = true && false;
const _: bool = {
diff --git a/src/test/ui/consts/const_short_circuit.stderr b/src/test/ui/consts/const_short_circuit.stderr
index a67bb0b..0a6536c 100644
--- a/src/test/ui/consts/const_short_circuit.stderr
+++ b/src/test/ui/consts/const_short_circuit.stderr
@@ -1,23 +1,23 @@
error: new features like let bindings are not permitted in constants which also use short circuiting operators
- --> $DIR/const_short_circuit.rs:6:9
+ --> $DIR/const_short_circuit.rs:4:9
|
LL | let mut x = true && false;
| ^^^^^
|
note: use of `&&` operator here does not actually short circuit due to the const evaluator presently not being able to do control flow. See https://github.com/rust-lang/rust/issues/49146 for more information.
- --> $DIR/const_short_circuit.rs:6:22
+ --> $DIR/const_short_circuit.rs:4:22
|
LL | let mut x = true && false;
| ^^
error: new features like let bindings are not permitted in constants which also use short circuiting operators
- --> $DIR/const_short_circuit.rs:11:9
+ --> $DIR/const_short_circuit.rs:9:9
|
LL | let x = true && false;
| ^
|
note: use of `&&` operator here does not actually short circuit due to the const evaluator presently not being able to do control flow. See https://github.com/rust-lang/rust/issues/49146 for more information.
- --> $DIR/const_short_circuit.rs:11:18
+ --> $DIR/const_short_circuit.rs:9:18
|
LL | let x = true && false;
| ^^
diff --git a/src/test/ui/consts/min_const_fn/min_const_fn.rs b/src/test/ui/consts/min_const_fn/min_const_fn.rs
index 40e7107..9523fcb 100644
--- a/src/test/ui/consts/min_const_fn/min_const_fn.rs
+++ b/src/test/ui/consts/min_const_fn/min_const_fn.rs
@@ -69,8 +69,8 @@
const fn i32_ops4(c: i32, d: i32) -> i32 { c + d }
const fn char_cast(u: u8) -> char { u as char }
const unsafe fn ret_i32_no_unsafe() -> i32 { 42 }
-const unsafe fn ret_null_ptr_no_unsafe<T>() -> *const T { 0 as *const T }
-const unsafe fn ret_null_mut_ptr_no_unsafe<T>() -> *mut T { 0 as *mut T }
+const unsafe fn ret_null_ptr_no_unsafe<T>() -> *const T { core::ptr::null() }
+const unsafe fn ret_null_mut_ptr_no_unsafe<T>() -> *mut T { core::ptr::null_mut() }
// not ok
const fn foo11<T: std::fmt::Display>(t: T) -> T { t }
diff --git a/src/test/ui/consts/min_const_fn/min_const_fn_unsafe.rs b/src/test/ui/consts/min_const_fn/min_const_fn_unsafe.rs
index e25dafa..0152561 100644
--- a/src/test/ui/consts/min_const_fn/min_const_fn_unsafe.rs
+++ b/src/test/ui/consts/min_const_fn/min_const_fn_unsafe.rs
@@ -3,8 +3,8 @@
//------------------------------------------------------------------------------
const unsafe fn ret_i32_no_unsafe() -> i32 { 42 }
-const unsafe fn ret_null_ptr_no_unsafe<T>() -> *const T { 0 as *const T }
-const unsafe fn ret_null_mut_ptr_no_unsafe<T>() -> *mut T { 0 as *mut T }
+const unsafe fn ret_null_ptr_no_unsafe<T>() -> *const T { std::ptr::null() }
+const unsafe fn ret_null_mut_ptr_no_unsafe<T>() -> *mut T { std::ptr::null_mut() }
const fn no_unsafe() { unsafe {} }
const fn call_unsafe_const_fn() -> i32 {
diff --git a/src/test/ui/consts/single_variant_match_ice.rs b/src/test/ui/consts/single_variant_match_ice.rs
index 6002506..75793c9 100644
--- a/src/test/ui/consts/single_variant_match_ice.rs
+++ b/src/test/ui/consts/single_variant_match_ice.rs
@@ -2,12 +2,12 @@
Prob,
}
-const FOO: u32 = match Foo::Prob {
- Foo::Prob => 42, //~ ERROR unimplemented expression type
+const FOO: u32 = match Foo::Prob { //~ ERROR unimplemented expression type
+ Foo::Prob => 42,
};
-const BAR: u32 = match Foo::Prob {
- x => 42, //~ ERROR unimplemented expression type
+const BAR: u32 = match Foo::Prob { //~ ERROR unimplemented expression type
+ x => 42,
};
impl Foo {
@@ -15,7 +15,8 @@
use self::Foo::*;
match *self {
- Prob => 0x1, //~ ERROR loops and conditional expressions are not stable in const fn
+ //~^ ERROR loops and conditional expressions are not stable in const fn
+ Prob => 0x1,
}
}
}
diff --git a/src/test/ui/consts/single_variant_match_ice.stderr b/src/test/ui/consts/single_variant_match_ice.stderr
index 1e092c8..8517337 100644
--- a/src/test/ui/consts/single_variant_match_ice.stderr
+++ b/src/test/ui/consts/single_variant_match_ice.stderr
@@ -1,20 +1,20 @@
error[E0019]: constant contains unimplemented expression type
- --> $DIR/single_variant_match_ice.rs:6:5
+ --> $DIR/single_variant_match_ice.rs:5:24
|
-LL | Foo::Prob => 42,
- | ^^^^^^^^^
+LL | const FOO: u32 = match Foo::Prob {
+ | ^^^^^^^^^
error[E0019]: constant contains unimplemented expression type
- --> $DIR/single_variant_match_ice.rs:10:5
+ --> $DIR/single_variant_match_ice.rs:9:24
|
-LL | x => 42,
- | ^
+LL | const BAR: u32 = match Foo::Prob {
+ | ^^^^^^^^^
error[E0723]: loops and conditional expressions are not stable in const fn
- --> $DIR/single_variant_match_ice.rs:18:13
+ --> $DIR/single_variant_match_ice.rs:17:15
|
-LL | Prob => 0x1,
- | ^^^^
+LL | match *self {
+ | ^^^^^
|
= note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
= help: add #![feature(const_fn)] to the crate attributes to enable
diff --git a/src/test/ui/underscore_const_names.rs b/src/test/ui/consts/underscore_const_names.rs
similarity index 88%
rename from src/test/ui/underscore_const_names.rs
rename to src/test/ui/consts/underscore_const_names.rs
index 1db022e..8d57e50 100644
--- a/src/test/ui/underscore_const_names.rs
+++ b/src/test/ui/consts/underscore_const_names.rs
@@ -1,9 +1,9 @@
// compile-pass
-#![feature(underscore_const_names)]
+#![deny(unused)]
trait Trt {}
-struct Str {}
+pub struct Str {}
impl Trt for Str {}
macro_rules! check_impl {
@@ -17,7 +17,6 @@
}
}
-#[deny(unused)]
const _ : () = ();
const _ : i32 = 42;
diff --git a/src/test/ui/derived-errors/issue-31997.rs b/src/test/ui/derived-errors/issue-31997.rs
index 025e910..cfdee26 100644
--- a/src/test/ui/derived-errors/issue-31997.rs
+++ b/src/test/ui/derived-errors/issue-31997.rs
@@ -10,7 +10,7 @@
}
fn foo() -> Result<(), ()> {
- try!(closure(|| bar(0 as *mut _))); //~ ERROR cannot find function `bar` in this scope
+ try!(closure(|| bar(core::ptr::null_mut()))); //~ ERROR cannot find function `bar` in this scope
Ok(())
}
diff --git a/src/test/ui/derived-errors/issue-31997.stderr b/src/test/ui/derived-errors/issue-31997.stderr
index dbceba0..e9fe0d3 100644
--- a/src/test/ui/derived-errors/issue-31997.stderr
+++ b/src/test/ui/derived-errors/issue-31997.stderr
@@ -1,7 +1,7 @@
error[E0425]: cannot find function `bar` in this scope
--> $DIR/issue-31997.rs:13:21
|
-LL | try!(closure(|| bar(0 as *mut _)));
+LL | try!(closure(|| bar(core::ptr::null_mut())));
| ^^^ not found in this scope
error: aborting due to previous error
diff --git a/src/test/ui/error-codes/E0605.rs b/src/test/ui/error-codes/E0605.rs
index 0e86e36..cfbf1aa 100644
--- a/src/test/ui/error-codes/E0605.rs
+++ b/src/test/ui/error-codes/E0605.rs
@@ -2,6 +2,6 @@
let x = 0u8;
x as Vec<u8>; //~ ERROR E0605
- let v = 0 as *const u8;
+ let v = std::ptr::null::<u8>();
v as &u8; //~ ERROR E0605
}
diff --git a/src/test/ui/error-codes/E0607.rs b/src/test/ui/error-codes/E0607.rs
index ad9f870..65001c4 100644
--- a/src/test/ui/error-codes/E0607.rs
+++ b/src/test/ui/error-codes/E0607.rs
@@ -1,4 +1,4 @@
fn main() {
- let v = 0 as *const u8;
+ let v = core::ptr::null::<u8>();
v as *const [u8]; //~ ERROR E0607
}
diff --git a/src/test/ui/error-festival.rs b/src/test/ui/error-festival.rs
index e462824..356564e 100644
--- a/src/test/ui/error-festival.rs
+++ b/src/test/ui/error-festival.rs
@@ -37,7 +37,7 @@
let y: u32 = x as u32;
//~^ ERROR E0606
- let v = 0 as *const u8;
+ let v = core::ptr::null::<u8>();
v as *const [u8];
//~^ ERROR E0607
}
diff --git a/src/test/ui/explain.stdout b/src/test/ui/explain.stdout
index 411cdfb..9ea5627 100644
--- a/src/test/ui/explain.stdout
+++ b/src/test/ui/explain.stdout
@@ -53,7 +53,7 @@
let f: extern "C" fn(*mut i32) = transmute(foo as usize); // works too
```
-The same applies to transmutes to `*mut fn()`, which were observedin practice.
+The same applies to transmutes to `*mut fn()`, which were observed in practice.
Note though that use of this type is generally incorrect.
The intention is typically to describe a function pointer, but just `fn()`
alone suffices for that. `*mut fn()` is a pointer to a fn pointer.
diff --git a/src/test/ui/feature-gates/feature-gate-underscore_const_names.rs b/src/test/ui/feature-gates/feature-gate-underscore_const_names.rs
deleted file mode 100644
index 6b97c24..0000000
--- a/src/test/ui/feature-gates/feature-gate-underscore_const_names.rs
+++ /dev/null
@@ -1,14 +0,0 @@
-trait Trt {}
-struct Str {}
-
-impl Trt for Str {}
-
-const _ : () = {
-//~^ ERROR is unstable
- use std::marker::PhantomData;
- struct ImplementsTrait<T: Trt>(PhantomData<T>);
- let _ = ImplementsTrait::<Str>(PhantomData);
- ()
-};
-
-fn main() {}
diff --git a/src/test/ui/feature-gates/feature-gate-underscore_const_names.stderr b/src/test/ui/feature-gates/feature-gate-underscore_const_names.stderr
deleted file mode 100644
index 8d92542..0000000
--- a/src/test/ui/feature-gates/feature-gate-underscore_const_names.stderr
+++ /dev/null
@@ -1,18 +0,0 @@
-error[E0658]: naming constants with `_` is unstable
- --> $DIR/feature-gate-underscore_const_names.rs:6:1
- |
-LL | / const _ : () = {
-LL | |
-LL | | use std::marker::PhantomData;
-LL | | struct ImplementsTrait<T: Trt>(PhantomData<T>);
-LL | | let _ = ImplementsTrait::<Str>(PhantomData);
-LL | | ()
-LL | | };
- | |__^
- |
- = note: for more information, see https://github.com/rust-lang/rust/issues/54912
- = help: add #![feature(underscore_const_names)] to the crate attributes to enable
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/feature-gates/underscore_const_names_feature_gate.rs b/src/test/ui/feature-gates/underscore_const_names_feature_gate.rs
deleted file mode 100644
index e50bbf5..0000000
--- a/src/test/ui/feature-gates/underscore_const_names_feature_gate.rs
+++ /dev/null
@@ -1,3 +0,0 @@
-const _: () = (); //~ ERROR is unstable
-
-fn main() {}
diff --git a/src/test/ui/feature-gates/underscore_const_names_feature_gate.stderr b/src/test/ui/feature-gates/underscore_const_names_feature_gate.stderr
deleted file mode 100644
index 0931145..0000000
--- a/src/test/ui/feature-gates/underscore_const_names_feature_gate.stderr
+++ /dev/null
@@ -1,12 +0,0 @@
-error[E0658]: naming constants with `_` is unstable
- --> $DIR/underscore_const_names_feature_gate.rs:1:1
- |
-LL | const _: () = ();
- | ^^^^^^^^^^^^^^^^^
- |
- = note: for more information, see https://github.com/rust-lang/rust/issues/54912
- = help: add #![feature(underscore_const_names)] to the crate attributes to enable
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/hygiene/no_implicit_prelude.rs b/src/test/ui/hygiene/no_implicit_prelude.rs
index 1cd05f4..890c830 100644
--- a/src/test/ui/hygiene/no_implicit_prelude.rs
+++ b/src/test/ui/hygiene/no_implicit_prelude.rs
@@ -13,7 +13,7 @@
}
fn f() {
::foo::m!();
- println!(); //~ ERROR cannot find macro `print!` in this scope
+ assert_eq!(0, 0); //~ ERROR cannot find macro `panic!` in this scope
}
}
diff --git a/src/test/ui/hygiene/no_implicit_prelude.stderr b/src/test/ui/hygiene/no_implicit_prelude.stderr
index dcb213f..737b375 100644
--- a/src/test/ui/hygiene/no_implicit_prelude.stderr
+++ b/src/test/ui/hygiene/no_implicit_prelude.stderr
@@ -7,11 +7,11 @@
LL | Vec::new();
| ^^^ use of undeclared type or module `Vec`
-error: cannot find macro `print!` in this scope
+error: cannot find macro `panic!` in this scope
--> $DIR/no_implicit_prelude.rs:16:9
|
-LL | println!();
- | ^^^^^^^^^^^
+LL | assert_eq!(0, 0);
+ | ^^^^^^^^^^^^^^^^^
|
= help: have you added the `#[macro_use]` on the module/import?
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
diff --git a/src/test/ui/imports/local-modularized-tricky-fail-1.rs b/src/test/ui/imports/local-modularized-tricky-fail-1.rs
index d1cb6b0..29e9b8e 100644
--- a/src/test/ui/imports/local-modularized-tricky-fail-1.rs
+++ b/src/test/ui/imports/local-modularized-tricky-fail-1.rs
@@ -33,7 +33,6 @@
fn main() {
panic!(); //~ ERROR `panic` is ambiguous
- //~| ERROR `panic` is ambiguous
}
mod inner3 {
diff --git a/src/test/ui/imports/local-modularized-tricky-fail-1.stderr b/src/test/ui/imports/local-modularized-tricky-fail-1.stderr
index d7ae8e6..13d3227 100644
--- a/src/test/ui/imports/local-modularized-tricky-fail-1.stderr
+++ b/src/test/ui/imports/local-modularized-tricky-fail-1.stderr
@@ -22,7 +22,7 @@
= help: consider adding an explicit import of `exported` to disambiguate
error[E0659]: `include` is ambiguous (macro-expanded name vs less macro-expanded name from outer scope during import/macro resolution)
- --> $DIR/local-modularized-tricky-fail-1.rs:47:1
+ --> $DIR/local-modularized-tricky-fail-1.rs:46:1
|
LL | include!();
| ^^^^^^^ ambiguous name
@@ -59,26 +59,6 @@
| ---------------- in this macro invocation
= help: use `crate::panic` to refer to this macro unambiguously
-error[E0659]: `panic` is ambiguous (macro-expanded name vs less macro-expanded name from outer scope during import/macro resolution)
- --> $DIR/local-modularized-tricky-fail-1.rs:35:5
- |
-LL | panic!();
- | ^^^^^^^^^ ambiguous name
- |
- = note: `panic` could refer to a macro from prelude
-note: `panic` could also refer to the macro defined here
- --> $DIR/local-modularized-tricky-fail-1.rs:11:5
- |
-LL | / macro_rules! panic {
-LL | | () => ()
-LL | | }
- | |_____^
-...
-LL | define_panic!();
- | ---------------- in this macro invocation
- = help: use `crate::panic` to refer to this macro unambiguously
- = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
-
-error: aborting due to 4 previous errors
+error: aborting due to 3 previous errors
For more information about this error, try `rustc --explain E0659`.
diff --git a/src/test/ui/in-band-lifetimes/issue-61124-anon-lifetime-in-struct-declaration.rs b/src/test/ui/in-band-lifetimes/issue-61124-anon-lifetime-in-struct-declaration.rs
new file mode 100644
index 0000000..cf08cb7
--- /dev/null
+++ b/src/test/ui/in-band-lifetimes/issue-61124-anon-lifetime-in-struct-declaration.rs
@@ -0,0 +1,10 @@
+#![deny(elided_lifetimes_in_paths)]
+
+// Previously, the elided-lifetimes-in-path lint would fire, but we don't want
+// that, because `'_` isn't legal in struct declarations.
+
+struct Betrayal<'a> { x: &'a u8 }
+
+struct Heartbreak(Betrayal); //~ ERROR missing lifetime specifier
+
+fn main() {}
diff --git a/src/test/ui/in-band-lifetimes/issue-61124-anon-lifetime-in-struct-declaration.stderr b/src/test/ui/in-band-lifetimes/issue-61124-anon-lifetime-in-struct-declaration.stderr
new file mode 100644
index 0000000..9579abb
--- /dev/null
+++ b/src/test/ui/in-band-lifetimes/issue-61124-anon-lifetime-in-struct-declaration.stderr
@@ -0,0 +1,9 @@
+error[E0106]: missing lifetime specifier
+ --> $DIR/issue-61124-anon-lifetime-in-struct-declaration.rs:8:19
+ |
+LL | struct Heartbreak(Betrayal);
+ | ^^^^^^^^ expected lifetime parameter
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0106`.
diff --git a/src/test/ui/issues/issue-17458.rs b/src/test/ui/issues/issue-17458.rs
index 444e94d..d56ffeb 100644
--- a/src/test/ui/issues/issue-17458.rs
+++ b/src/test/ui/issues/issue-17458.rs
@@ -1,4 +1,4 @@
-static X: usize = unsafe { 0 as *const usize as usize };
+static X: usize = unsafe { core::ptr::null::<usize>() as usize };
//~^ ERROR: casting pointers to integers in statics is unstable
fn main() {
diff --git a/src/test/ui/issues/issue-17458.stderr b/src/test/ui/issues/issue-17458.stderr
index 69b6ab7..d51d2f5 100644
--- a/src/test/ui/issues/issue-17458.stderr
+++ b/src/test/ui/issues/issue-17458.stderr
@@ -1,8 +1,8 @@
error[E0658]: casting pointers to integers in statics is unstable
--> $DIR/issue-17458.rs:1:28
|
-LL | static X: usize = unsafe { 0 as *const usize as usize };
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | static X: usize = unsafe { core::ptr::null::<usize>() as usize };
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: for more information, see https://github.com/rust-lang/rust/issues/51910
= help: add #![feature(const_raw_ptr_to_usize_cast)] to the crate attributes to enable
diff --git a/src/test/ui/issues/issue-20801.rs b/src/test/ui/issues/issue-20801.rs
index 35d6f8c..c3f136f 100644
--- a/src/test/ui/issues/issue-20801.rs
+++ b/src/test/ui/issues/issue-20801.rs
@@ -15,11 +15,11 @@
}
fn mut_ptr() -> *mut T {
- unsafe { 0 as *mut T }
+ unsafe { core::ptr::null_mut() }
}
fn const_ptr() -> *const T {
- unsafe { 0 as *const T }
+ unsafe { core::ptr::null() }
}
pub fn main() {
diff --git a/src/test/ui/issues/issue-22034.rs b/src/test/ui/issues/issue-22034.rs
index 508c9c9..fab1cda 100644
--- a/src/test/ui/issues/issue-22034.rs
+++ b/src/test/ui/issues/issue-22034.rs
@@ -3,7 +3,7 @@
extern crate libc;
fn main() {
- let ptr: *mut () = 0 as *mut _;
+ let ptr: *mut () = core::ptr::null_mut();
let _: &mut dyn Fn() = unsafe {
&mut *(ptr as *mut dyn Fn())
//~^ ERROR expected a `std::ops::Fn<()>` closure, found `()`
diff --git a/src/test/ui/issues/issue-46843.rs b/src/test/ui/issues/issue-46843.rs
index a310de6..aa252ef 100644
--- a/src/test/ui/issues/issue-46843.rs
+++ b/src/test/ui/issues/issue-46843.rs
@@ -4,7 +4,9 @@
Thing::This
}
-pub const Q: i32 = match non_const() { //~ ERROR E0015
+pub const Q: i32 = match non_const() {
+ //~^ ERROR E0015
+ //~^^ ERROR unimplemented expression type
Thing::This => 1, //~ ERROR unimplemented expression type
Thing::That => 0
};
diff --git a/src/test/ui/issues/issue-46843.stderr b/src/test/ui/issues/issue-46843.stderr
index b7abf02..92ee154 100644
--- a/src/test/ui/issues/issue-46843.stderr
+++ b/src/test/ui/issues/issue-46843.stderr
@@ -5,12 +5,18 @@
| ^^^^^^^^^^^
error[E0019]: constant contains unimplemented expression type
- --> $DIR/issue-46843.rs:8:5
+ --> $DIR/issue-46843.rs:7:26
+ |
+LL | pub const Q: i32 = match non_const() {
+ | ^^^^^^^^^^^
+
+error[E0019]: constant contains unimplemented expression type
+ --> $DIR/issue-46843.rs:10:5
|
LL | Thing::This => 1,
| ^^^^^^^^^^^
-error: aborting due to 2 previous errors
+error: aborting due to 3 previous errors
Some errors have detailed explanations: E0015, E0019.
For more information about an error, try `rustc --explain E0015`.
diff --git a/src/test/ui/issues/issue-51301.rs b/src/test/ui/issues/issue-51301.rs
new file mode 100644
index 0000000..7e0a519
--- /dev/null
+++ b/src/test/ui/issues/issue-51301.rs
@@ -0,0 +1,35 @@
+use std::any::TypeId;
+use std::collections::HashMap;
+use std::hash::Hash;
+
+trait State {
+ type EventType;
+ fn get_type_id_of_state(&self) -> TypeId;
+}
+
+struct StateMachine<EventType: Hash + Eq> {
+ current_state: Box<dyn State<EventType = EventType>>,
+ transition_table:
+ HashMap<TypeId, HashMap<EventType, fn() -> Box<dyn State<EventType = EventType>>>>,
+}
+
+impl<EventType: Hash + Eq> StateMachine<EventType> {
+ fn inner_process_event(&mut self, event: EventType) -> Result<(), i8> {
+ let new_state_creation_function = self
+ .transition_table
+ .iter()
+ .find(|(&event_typeid, _)| event_typeid == self.current_state.get_type_id_of_state())
+ .ok_or(1)?
+ .1
+ .iter()
+ .find(|(&event_type, _)| event == event_type)
+ //~^ ERROR cannot move out of a shared reference
+ .ok_or(2)?
+ .1;
+
+ self.current_state = new_state_creation_function();
+ Ok(())
+ }
+}
+
+fn main() {}
diff --git a/src/test/ui/issues/issue-51301.stderr b/src/test/ui/issues/issue-51301.stderr
new file mode 100644
index 0000000..f3decf7
--- /dev/null
+++ b/src/test/ui/issues/issue-51301.stderr
@@ -0,0 +1,12 @@
+error[E0507]: cannot move out of a shared reference
+ --> $DIR/issue-51301.rs:25:20
+ |
+LL | .find(|(&event_type, _)| event == event_type)
+ | ^^----------^^^^
+ | |
+ | data moved here
+ | move occurs because `event_type` has type `EventType`, which does not implement the `Copy` trait
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0507`.
diff --git a/src/test/ui/issues/issue-61106.rs b/src/test/ui/issues/issue-61106.rs
new file mode 100644
index 0000000..308ef1d
--- /dev/null
+++ b/src/test/ui/issues/issue-61106.rs
@@ -0,0 +1,6 @@
+fn main() {
+ let x = String::new();
+ foo(x.clone()); //~ ERROR mismatched types
+}
+
+fn foo(_: &str) {}
diff --git a/src/test/ui/issues/issue-61106.stderr b/src/test/ui/issues/issue-61106.stderr
new file mode 100644
index 0000000..ca67d51
--- /dev/null
+++ b/src/test/ui/issues/issue-61106.stderr
@@ -0,0 +1,15 @@
+error[E0308]: mismatched types
+ --> $DIR/issue-61106.rs:3:9
+ |
+LL | foo(x.clone());
+ | ^^^^^^^^^
+ | |
+ | expected &str, found struct `std::string::String`
+ | help: consider borrowing here: `&x`
+ |
+ = note: expected type `&str`
+ found type `std::string::String`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0308`.
diff --git a/src/test/ui/lifetimes/lifetime-errors/liveness-assign-imm-local-notes.stderr b/src/test/ui/lifetimes/lifetime-errors/liveness-assign-imm-local-notes.stderr
index c646912..13b6a7b 100644
--- a/src/test/ui/lifetimes/lifetime-errors/liveness-assign-imm-local-notes.stderr
+++ b/src/test/ui/lifetimes/lifetime-errors/liveness-assign-imm-local-notes.stderr
@@ -28,6 +28,9 @@
...
LL | x = 1;
| ^^^^^ cannot assign twice to immutable variable
+LL | } else {
+LL | x = 2;
+ | ----- first assignment to `x`
error[E0384]: cannot assign twice to immutable variable `x`
--> $DIR/liveness-assign-imm-local-notes.rs:32:13
@@ -35,9 +38,6 @@
LL | let x;
| - help: make this binding mutable: `mut x`
...
-LL | x = 1;
- | ----- first assignment to `x`
-LL | } else {
LL | x = 2;
| ^^^^^ cannot assign twice to immutable variable
diff --git a/src/test/ui/mismatched_types/cast-rfc0401.rs b/src/test/ui/mismatched_types/cast-rfc0401.rs
index 2f88c64..b8d12fb 100644
--- a/src/test/ui/mismatched_types/cast-rfc0401.rs
+++ b/src/test/ui/mismatched_types/cast-rfc0401.rs
@@ -21,9 +21,9 @@
fn main()
{
let f: f32 = 1.2;
- let v = 0 as *const u8;
- let fat_v : *const [u8] = unsafe { &*(0 as *const [u8; 1])};
- let fat_sv : *const [i8] = unsafe { &*(0 as *const [i8; 1])};
+ let v = core::ptr::null::<u8>();
+ let fat_v : *const [u8] = unsafe { &*core::ptr::null::<[u8; 1]>()};
+ let fat_sv : *const [i8] = unsafe { &*core::ptr::null::<[i8; 1]>()};
let foo: &dyn Foo = &f;
let _ = v as &u8; //~ ERROR non-primitive cast
diff --git a/src/test/ui/parser/fn-arg-doc-comment.rs b/src/test/ui/parser/fn-arg-doc-comment.rs
index 22af94b..4a4f959 100644
--- a/src/test/ui/parser/fn-arg-doc-comment.rs
+++ b/src/test/ui/parser/fn-arg-doc-comment.rs
@@ -1,20 +1,20 @@
pub fn f(
/// Comment
- //~^ ERROR documentation comments cannot be applied to method arguments
+ //~^ ERROR documentation comments cannot be applied to function parameters
//~| NOTE doc comments are not allowed here
+ //~| ERROR attributes on function parameters are unstable
+ //~| NOTE https://github.com/rust-lang/rust/issues/60406
id: u8,
/// Other
- //~^ ERROR documentation comments cannot be applied to method arguments
+ //~^ ERROR documentation comments cannot be applied to function parameters
//~| NOTE doc comments are not allowed here
+ //~| ERROR attributes on function parameters are unstable
+ //~| NOTE https://github.com/rust-lang/rust/issues/60406
a: u8,
) {}
-fn foo(#[allow(dead_code)] id: i32) {}
-//~^ ERROR attributes cannot be applied to method arguments
-//~| NOTE attributes are not allowed here
-
fn bar(id: #[allow(dead_code)] i32) {}
-//~^ ERROR attributes cannot be applied to a method argument's type
+//~^ ERROR attributes cannot be applied to a function parameter's type
//~| NOTE attributes are not allowed here
fn main() {
@@ -26,10 +26,6 @@
//~| ERROR mismatched types
//~| NOTE expected u8, found reference
//~| NOTE expected
- foo("");
- //~^ ERROR mismatched types
- //~| NOTE expected i32, found reference
- //~| NOTE expected
bar("");
//~^ ERROR mismatched types
//~| NOTE expected i32, found reference
diff --git a/src/test/ui/parser/fn-arg-doc-comment.stderr b/src/test/ui/parser/fn-arg-doc-comment.stderr
index 73a24eeb..9058e88 100644
--- a/src/test/ui/parser/fn-arg-doc-comment.stderr
+++ b/src/test/ui/parser/fn-arg-doc-comment.stderr
@@ -1,26 +1,38 @@
-error: documentation comments cannot be applied to method arguments
+error: attributes cannot be applied to a function parameter's type
+ --> $DIR/fn-arg-doc-comment.rs:16:12
+ |
+LL | fn bar(id: #[allow(dead_code)] i32) {}
+ | ^^^^^^^^^^^^^^^^^^^ attributes are not allowed here
+
+error: documentation comments cannot be applied to function parameters
--> $DIR/fn-arg-doc-comment.rs:2:5
|
LL | /// Comment
| ^^^^^^^^^^^ doc comments are not allowed here
-error: documentation comments cannot be applied to method arguments
- --> $DIR/fn-arg-doc-comment.rs:6:5
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/fn-arg-doc-comment.rs:8:5
|
LL | /// Other
| ^^^^^^^^^ doc comments are not allowed here
-error: attributes cannot be applied to method arguments
- --> $DIR/fn-arg-doc-comment.rs:12:8
+error[E0658]: attributes on function parameters are unstable
+ --> $DIR/fn-arg-doc-comment.rs:2:5
|
-LL | fn foo(#[allow(dead_code)] id: i32) {}
- | ^^^^^^^^^^^^^^^^^^^ attributes are not allowed here
+LL | /// Comment
+ | ^^^^^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/60406
+ = help: add #![feature(param_attrs)] to the crate attributes to enable
-error: attributes cannot be applied to a method argument's type
- --> $DIR/fn-arg-doc-comment.rs:16:12
+error[E0658]: attributes on function parameters are unstable
+ --> $DIR/fn-arg-doc-comment.rs:8:5
|
-LL | fn bar(id: #[allow(dead_code)] i32) {}
- | ^^^^^^^^^^^^^^^^^^^ attributes are not allowed here
+LL | /// Other
+ | ^^^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/60406
+ = help: add #![feature(param_attrs)] to the crate attributes to enable
error[E0308]: mismatched types
--> $DIR/fn-arg-doc-comment.rs:22:7
@@ -43,15 +55,6 @@
error[E0308]: mismatched types
--> $DIR/fn-arg-doc-comment.rs:29:9
|
-LL | foo("");
- | ^^ expected i32, found reference
- |
- = note: expected type `i32`
- found type `&'static str`
-
-error[E0308]: mismatched types
- --> $DIR/fn-arg-doc-comment.rs:33:9
- |
LL | bar("");
| ^^ expected i32, found reference
|
@@ -60,4 +63,5 @@
error: aborting due to 8 previous errors
-For more information about this error, try `rustc --explain E0308`.
+Some errors have detailed explanations: E0308, E0658.
+For more information about an error, try `rustc --explain E0308`.
diff --git a/src/test/ui/parser/underscore_item_not_const.rs b/src/test/ui/parser/underscore_item_not_const.rs
new file mode 100644
index 0000000..375bdc3
--- /dev/null
+++ b/src/test/ui/parser/underscore_item_not_const.rs
@@ -0,0 +1,30 @@
+// Test that various non-const items and associated consts do not permit `_` as a name.
+
+// Associated `const`s:
+
+pub trait A {
+ const _: () = (); //~ ERROR expected identifier, found reserved identifier `_`
+}
+impl A for () {
+ const _: () = (); //~ ERROR expected identifier, found reserved identifier `_`
+}
+impl dyn A {
+ const _: () = (); //~ ERROR expected identifier, found reserved identifier `_`
+}
+
+// Other kinds of items:
+
+static _: () = (); //~ ERROR expected identifier, found reserved identifier `_`
+struct _(); //~ ERROR expected identifier, found reserved identifier `_`
+enum _ {} //~ ERROR expected identifier, found reserved identifier `_`
+fn _() {} //~ ERROR expected identifier, found reserved identifier `_`
+mod _ {} //~ ERROR expected identifier, found reserved identifier `_`
+type _ = (); //~ ERROR expected identifier, found reserved identifier `_`
+use _; //~ ERROR expected identifier, found reserved identifier `_`
+use _ as g; //~ ERROR expected identifier, found reserved identifier `_`
+trait _ {} //~ ERROR expected identifier, found reserved identifier `_`
+trait _ = Copy; //~ ERROR expected identifier, found reserved identifier `_`
+macro_rules! _ { () => {} } //~ ERROR expected identifier, found reserved identifier `_`
+union _ { f: u8 } //~ ERROR expected one of `!` or `::`, found `_`
+
+fn main() {}
diff --git a/src/test/ui/parser/underscore_item_not_const.stderr b/src/test/ui/parser/underscore_item_not_const.stderr
new file mode 100644
index 0000000..deb4a01
--- /dev/null
+++ b/src/test/ui/parser/underscore_item_not_const.stderr
@@ -0,0 +1,92 @@
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:6:11
+ |
+LL | const _: () = ();
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:9:11
+ |
+LL | const _: () = ();
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:12:11
+ |
+LL | const _: () = ();
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:17:8
+ |
+LL | static _: () = ();
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:18:8
+ |
+LL | struct _();
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:19:6
+ |
+LL | enum _ {}
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:20:4
+ |
+LL | fn _() {}
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:21:5
+ |
+LL | mod _ {}
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:22:6
+ |
+LL | type _ = ();
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:23:5
+ |
+LL | use _;
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:24:5
+ |
+LL | use _ as g;
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:25:7
+ |
+LL | trait _ {}
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:26:7
+ |
+LL | trait _ = Copy;
+ | ^ expected identifier, found reserved identifier
+
+error: expected identifier, found reserved identifier `_`
+ --> $DIR/underscore_item_not_const.rs:27:14
+ |
+LL | macro_rules! _ { () => {} }
+ | ^ expected identifier, found reserved identifier
+
+error: expected one of `!` or `::`, found `_`
+ --> $DIR/underscore_item_not_const.rs:28:7
+ |
+LL | union _ { f: u8 }
+ | ^ expected one of `!` or `::` here
+
+error: aborting due to 15 previous errors
+
diff --git a/src/test/ui/parser/underscore_static.rs b/src/test/ui/parser/underscore_static.rs
deleted file mode 100644
index 21d6a1b..0000000
--- a/src/test/ui/parser/underscore_static.rs
+++ /dev/null
@@ -1,3 +0,0 @@
-static _: () = (); //~ ERROR expected identifier, found reserved identifier `_`
-
-fn main() {}
diff --git a/src/test/ui/parser/underscore_static.stderr b/src/test/ui/parser/underscore_static.stderr
deleted file mode 100644
index 4c41afd..0000000
--- a/src/test/ui/parser/underscore_static.stderr
+++ /dev/null
@@ -1,8 +0,0 @@
-error: expected identifier, found reserved identifier `_`
- --> $DIR/underscore_static.rs:1:8
- |
-LL | static _: () = ();
- | ^ expected identifier, found reserved identifier
-
-error: aborting due to previous error
-
diff --git a/src/test/ui/pattern/const-pat-ice.stderr b/src/test/ui/pattern/const-pat-ice.stderr
index c4f6e02..7ddeac4 100644
--- a/src/test/ui/pattern/const-pat-ice.stderr
+++ b/src/test/ui/pattern/const-pat-ice.stderr
@@ -1,4 +1,4 @@
-thread 'rustc' panicked at 'assertion failed: rows.iter().all(|r| r.len() == v.len())', src/librustc_mir/hair/pattern/_match.rs:1085:5
+thread 'rustc' panicked at 'assertion failed: rows.iter().all(|r| r.len() == v.len())', src/librustc_mir/hair/pattern/_match.rs:1084:5
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
error: internal compiler error: unexpected panic
diff --git a/src/test/ui/rfc-2565-param-attrs/param-attrs-2018.rs b/src/test/ui/rfc-2565-param-attrs/param-attrs-2018.rs
new file mode 100644
index 0000000..e900cca
--- /dev/null
+++ b/src/test/ui/rfc-2565-param-attrs/param-attrs-2018.rs
@@ -0,0 +1,8 @@
+// edition:2018
+
+#![feature(param_attrs)]
+
+trait Trait2015 { fn foo(#[allow(C)] i32); }
+//~^ ERROR expected one of `:` or `@`, found `)`
+
+fn main() {}
diff --git a/src/test/ui/rfc-2565-param-attrs/param-attrs-2018.stderr b/src/test/ui/rfc-2565-param-attrs/param-attrs-2018.stderr
new file mode 100644
index 0000000..d0ed65f
--- /dev/null
+++ b/src/test/ui/rfc-2565-param-attrs/param-attrs-2018.stderr
@@ -0,0 +1,18 @@
+error: expected one of `:` or `@`, found `)`
+ --> $DIR/param-attrs-2018.rs:5:41
+ |
+LL | trait Trait2015 { fn foo(#[allow(C)] i32); }
+ | ^ expected one of `:` or `@` here
+ |
+ = note: anonymous parameters are removed in the 2018 edition (see RFC 1685)
+help: if this was a parameter name, give it a type
+ |
+LL | trait Trait2015 { fn foo(#[allow(C)] i32: TypeName); }
+ | ^^^^^^^^^^^^^
+help: if this is a type, explicitly ignore the parameter name
+ |
+LL | trait Trait2015 { fn foo(#[allow(C)] _: i32); }
+ | ^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/rfc-2565-param-attrs/param-attrs-allowed.rs b/src/test/ui/rfc-2565-param-attrs/param-attrs-allowed.rs
new file mode 100644
index 0000000..c521d04
--- /dev/null
+++ b/src/test/ui/rfc-2565-param-attrs/param-attrs-allowed.rs
@@ -0,0 +1,225 @@
+// compile-flags: --cfg something
+// compile-pass
+
+#![feature(param_attrs)]
+
+extern "C" {
+ fn ffi(
+ #[allow(C)] a: i32,
+ #[cfg(something)] b: i32,
+ #[cfg_attr(something, cfg(nothing))] c: i32,
+ #[deny(C)] d: i32,
+ #[forbid(C)] #[warn(C)] ...
+ );
+}
+
+type FnType = fn(
+ #[allow(C)] a: i32,
+ #[cfg(something)] b: i32,
+ #[cfg_attr(something, cfg(nothing))] c: i32,
+ #[deny(C)] d: i32,
+ #[forbid(C)] #[warn(C)] e: i32
+);
+
+pub fn foo(
+ #[allow(C)] a: i32,
+ #[cfg(something)] b: i32,
+ #[cfg_attr(something, cfg(nothing))] c: i32,
+ #[deny(C)] d: i32,
+ #[forbid(C)] #[warn(C)] e: i32
+) {}
+
+// self, &self and &mut self
+
+struct SelfStruct {}
+impl SelfStruct {
+ fn foo(
+ #[allow(C)] self,
+ #[cfg(something)] a: i32,
+ #[cfg_attr(something, cfg(nothing))]
+ #[deny(C)] b: i32,
+ ) {}
+}
+
+struct RefStruct {}
+impl RefStruct {
+ fn foo(
+ #[allow(C)] &self,
+ #[cfg(something)] a: i32,
+ #[cfg_attr(something, cfg(nothing))]
+ #[deny(C)] b: i32,
+ ) {}
+}
+trait RefTrait {
+ fn foo(
+ #[forbid(C)] &self,
+ #[warn(C)] a: i32
+ ) {}
+}
+impl RefTrait for RefStruct {
+ fn foo(
+ #[forbid(C)] &self,
+ #[warn(C)] a: i32
+ ) {}
+}
+
+struct MutStruct {}
+impl MutStruct {
+ fn foo(
+ #[allow(C)] &mut self,
+ #[cfg(something)] a: i32,
+ #[cfg_attr(something, cfg(nothing))]
+ #[deny(C)] b: i32,
+ ) {}
+}
+trait MutTrait {
+ fn foo(
+ #[forbid(C)] &mut self,
+ #[warn(C)] a: i32
+ ) {}
+}
+impl MutTrait for MutStruct {
+ fn foo(
+ #[forbid(C)] &mut self,
+ #[warn(C)] a: i32
+ ) {}
+}
+
+// self: Self, self: &Self and self: &mut Self
+
+struct NamedSelfSelfStruct {}
+impl NamedSelfSelfStruct {
+ fn foo(
+ #[allow(C)] self: Self,
+ #[cfg(something)] a: i32,
+ #[cfg_attr(something, cfg(nothing))]
+ #[deny(C)] b: i32,
+ ) {}
+}
+
+struct NamedSelfRefStruct {}
+impl NamedSelfRefStruct {
+ fn foo(
+ #[allow(C)] self: &Self,
+ #[cfg(something)] a: i32,
+ #[cfg_attr(something, cfg(nothing))]
+ #[deny(C)] b: i32,
+ ) {}
+}
+trait NamedSelfRefTrait {
+ fn foo(
+ #[forbid(C)] self: &Self,
+ #[warn(C)] a: i32
+ ) {}
+}
+impl NamedSelfRefTrait for NamedSelfRefStruct {
+ fn foo(
+ #[forbid(C)] self: &Self,
+ #[warn(C)] a: i32
+ ) {}
+}
+
+struct NamedSelfMutStruct {}
+impl NamedSelfMutStruct {
+ fn foo(
+ #[allow(C)] self: &mut Self,
+ #[cfg(something)] a: i32,
+ #[cfg_attr(something, cfg(nothing))]
+ #[deny(C)] b: i32,
+ ) {}
+}
+trait NamedSelfMutTrait {
+ fn foo(
+ #[forbid(C)] self: &mut Self,
+ #[warn(C)] a: i32
+ ) {}
+}
+impl NamedSelfMutTrait for NamedSelfMutStruct {
+ fn foo(
+ #[forbid(C)] self: &mut Self,
+ #[warn(C)] a: i32
+ ) {}
+}
+
+// &'a self and &'a mut self
+
+struct NamedLifetimeRefStruct {}
+impl NamedLifetimeRefStruct {
+ fn foo<'a>(
+ #[allow(C)] self: &'a Self,
+ #[cfg(something)] a: i32,
+ #[cfg_attr(something, cfg(nothing))]
+ #[deny(C)] b: i32,
+ ) {}
+}
+trait NamedLifetimeRefTrait {
+ fn foo<'a>(
+ #[forbid(C)] &'a self,
+ #[warn(C)] a: i32
+ ) {}
+}
+impl NamedLifetimeRefTrait for NamedLifetimeRefStruct {
+ fn foo<'a>(
+ #[forbid(C)] &'a self,
+ #[warn(C)] a: i32
+ ) {}
+}
+
+struct NamedLifetimeMutStruct {}
+impl NamedLifetimeMutStruct {
+ fn foo<'a>(
+ #[allow(C)] self: &'a mut Self,
+ #[cfg(something)] a: i32,
+ #[cfg_attr(something, cfg(nothing))]
+ #[deny(C)] b: i32,
+ ) {}
+}
+trait NamedLifetimeMutTrait {
+ fn foo<'a>(
+ #[forbid(C)] &'a mut self,
+ #[warn(C)] a: i32
+ ) {}
+}
+impl NamedLifetimeMutTrait for NamedLifetimeMutStruct {
+ fn foo<'a>(
+ #[forbid(C)] &'a mut self,
+ #[warn(C)] a: i32
+ ) {}
+}
+
+// Box<Self>
+
+struct BoxSelfStruct {}
+impl BoxSelfStruct {
+ fn foo(
+ #[allow(C)] self: Box<Self>,
+ #[cfg(something)] a: i32,
+ #[cfg_attr(something, cfg(nothing))]
+ #[deny(C)] b: i32,
+ ) {}
+}
+trait BoxSelfTrait {
+ fn foo(
+ #[forbid(C)] self: Box<Self>,
+ #[warn(C)] a: i32
+ ) {}
+}
+impl BoxSelfTrait for BoxSelfStruct {
+ fn foo(
+ #[forbid(C)] self: Box<Self>,
+ #[warn(C)] a: i32
+ ) {}
+}
+
+fn main() {
+ let _: unsafe extern "C" fn(_, _, _, ...) = ffi;
+ let _: fn(_, _, _, _) = foo;
+ let _: FnType = |_, _, _, _| {};
+ let c = |
+ #[allow(C)] a: u32,
+ #[cfg(something)] b: i32,
+ #[cfg_attr(something, cfg(nothing))]
+ #[deny(C)] c: i32,
+ | {};
+ let _ = c(1, 2);
+}
diff --git a/src/test/ui/rfc-2565-param-attrs/param-attrs-builtin-attrs.rs b/src/test/ui/rfc-2565-param-attrs/param-attrs-builtin-attrs.rs
new file mode 100644
index 0000000..3523757
--- /dev/null
+++ b/src/test/ui/rfc-2565-param-attrs/param-attrs-builtin-attrs.rs
@@ -0,0 +1,145 @@
+#![feature(param_attrs)]
+
+extern "C" {
+ fn ffi(
+ /// Foo
+ //~^ ERROR documentation comments cannot be applied to function
+ #[test] a: i32,
+ //~^ ERROR The attribute `test` is currently unknown to the compiler and may have
+ /// Bar
+ //~^ ERROR documentation comments cannot be applied to function
+ #[must_use]
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ /// Baz
+ //~^ ERROR documentation comments cannot be applied to function
+ #[no_mangle] b: i32,
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ );
+}
+
+type FnType = fn(
+ /// Foo
+ //~^ ERROR documentation comments cannot be applied to function
+ #[test] a: u32,
+ //~^ ERROR The attribute `test` is currently unknown to the compiler and may have
+ /// Bar
+ //~^ ERROR documentation comments cannot be applied to function
+ #[must_use]
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ /// Baz
+ //~^ ERROR documentation comments cannot be applied to function
+ #[no_mangle] b: i32,
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+);
+
+pub fn foo(
+ /// Foo
+ //~^ ERROR documentation comments cannot be applied to function
+ #[test] a: u32,
+ //~^ ERROR The attribute `test` is currently unknown to the compiler and may have
+ /// Bar
+ //~^ ERROR documentation comments cannot be applied to function
+ #[must_use]
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ /// Baz
+ //~^ ERROR documentation comments cannot be applied to function
+ #[no_mangle] b: i32,
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+) {}
+
+struct SelfStruct {}
+impl SelfStruct {
+ fn foo(
+ /// Foo
+ //~^ ERROR documentation comments cannot be applied to function
+ self,
+ /// Bar
+ //~^ ERROR documentation comments cannot be applied to function
+ #[test] a: i32,
+ //~^ ERROR The attribute `test` is currently unknown to the compiler and may have
+ /// Baz
+ //~^ ERROR documentation comments cannot be applied to function
+ #[must_use]
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ /// Qux
+ //~^ ERROR documentation comments cannot be applied to function
+ #[no_mangle] b: i32,
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ ) {}
+}
+
+struct RefStruct {}
+impl RefStruct {
+ fn foo(
+ /// Foo
+ //~^ ERROR documentation comments cannot be applied to function
+ &self,
+ /// Bar
+ //~^ ERROR documentation comments cannot be applied to function
+ #[test] a: i32,
+ //~^ ERROR The attribute `test` is currently unknown to the compiler and may have
+ /// Baz
+ //~^ ERROR documentation comments cannot be applied to function
+ #[must_use]
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ /// Qux
+ //~^ ERROR documentation comments cannot be applied to function
+ #[no_mangle] b: i32,
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ ) {}
+}
+trait RefTrait {
+ fn foo(
+ /// Foo
+ //~^ ERROR documentation comments cannot be applied to function
+ &self,
+ /// Bar
+ //~^ ERROR documentation comments cannot be applied to function
+ #[test] a: i32,
+ //~^ ERROR The attribute `test` is currently unknown to the compiler and may have
+ /// Baz
+ //~^ ERROR documentation comments cannot be applied to function
+ #[must_use]
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ /// Qux
+ //~^ ERROR documentation comments cannot be applied to function
+ #[no_mangle] b: i32,
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ ) {}
+}
+impl RefTrait for RefStruct {
+ fn foo(
+ /// Foo
+ //~^ ERROR documentation comments cannot be applied to function
+ &self,
+ /// Bar
+ //~^ ERROR documentation comments cannot be applied to function
+ #[test] a: i32,
+ //~^ ERROR The attribute `test` is currently unknown to the compiler and may have
+ /// Baz
+ //~^ ERROR documentation comments cannot be applied to function
+ #[must_use]
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ /// Qux
+ //~^ ERROR documentation comments cannot be applied to function
+ #[no_mangle] b: i32,
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ ) {}
+}
+
+fn main() {
+ let _ = |
+ /// Foo
+ //~^ ERROR documentation comments cannot be applied to function
+ #[test] a: u32,
+ //~^ ERROR The attribute `test` is currently unknown to the compiler and may have
+ /// Bar
+ //~^ ERROR documentation comments cannot be applied to function
+ #[must_use]
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ /// Baz
+ //~^ ERROR documentation comments cannot be applied to function
+ #[no_mangle] b: i32
+ //~^ ERROR allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in
+ | {};
+}
diff --git a/src/test/ui/rfc-2565-param-attrs/param-attrs-builtin-attrs.stderr b/src/test/ui/rfc-2565-param-attrs/param-attrs-builtin-attrs.stderr
new file mode 100644
index 0000000..e6f3efc
--- /dev/null
+++ b/src/test/ui/rfc-2565-param-attrs/param-attrs-builtin-attrs.stderr
@@ -0,0 +1,339 @@
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:5:9
+ |
+LL | /// Foo
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:9:9
+ |
+LL | /// Bar
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:11:9
+ |
+LL | #[must_use]
+ | ^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:13:9
+ |
+LL | /// Baz
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:15:9
+ |
+LL | #[no_mangle] b: i32,
+ | ^^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:21:5
+ |
+LL | /// Foo
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:25:5
+ |
+LL | /// Bar
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:27:5
+ |
+LL | #[must_use]
+ | ^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:29:5
+ |
+LL | /// Baz
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:31:5
+ |
+LL | #[no_mangle] b: i32,
+ | ^^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:36:5
+ |
+LL | /// Foo
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:40:5
+ |
+LL | /// Bar
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:42:5
+ |
+LL | #[must_use]
+ | ^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:44:5
+ |
+LL | /// Baz
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:46:5
+ |
+LL | #[no_mangle] b: i32,
+ | ^^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:53:9
+ |
+LL | /// Foo
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:56:9
+ |
+LL | /// Bar
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:60:9
+ |
+LL | /// Baz
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:62:9
+ |
+LL | #[must_use]
+ | ^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:64:9
+ |
+LL | /// Qux
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:66:9
+ |
+LL | #[no_mangle] b: i32,
+ | ^^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:74:9
+ |
+LL | /// Foo
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:77:9
+ |
+LL | /// Bar
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:81:9
+ |
+LL | /// Baz
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:83:9
+ |
+LL | #[must_use]
+ | ^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:85:9
+ |
+LL | /// Qux
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:87:9
+ |
+LL | #[no_mangle] b: i32,
+ | ^^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:93:9
+ |
+LL | /// Foo
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:96:9
+ |
+LL | /// Bar
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:100:9
+ |
+LL | /// Baz
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:102:9
+ |
+LL | #[must_use]
+ | ^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:104:9
+ |
+LL | /// Qux
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:106:9
+ |
+LL | #[no_mangle] b: i32,
+ | ^^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:112:9
+ |
+LL | /// Foo
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:115:9
+ |
+LL | /// Bar
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:119:9
+ |
+LL | /// Baz
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:121:9
+ |
+LL | #[must_use]
+ | ^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:123:9
+ |
+LL | /// Qux
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:125:9
+ |
+LL | #[no_mangle] b: i32,
+ | ^^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:132:9
+ |
+LL | /// Foo
+ | ^^^^^^^ doc comments are not allowed here
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:136:9
+ |
+LL | /// Bar
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:138:9
+ |
+LL | #[must_use]
+ | ^^^^^^^^^^^
+
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:140:9
+ |
+LL | /// Baz
+ | ^^^^^^^ doc comments are not allowed here
+
+error: allow, cfg, cfg_attr, deny, forbid, and warn are the only allowed built-in attributes in function parameters
+ --> $DIR/param-attrs-builtin-attrs.rs:142:9
+ |
+LL | #[no_mangle] b: i32
+ | ^^^^^^^^^^^^
+
+error[E0658]: The attribute `test` is currently unknown to the compiler and may have meaning added to it in the future
+ --> $DIR/param-attrs-builtin-attrs.rs:7:9
+ |
+LL | #[test] a: i32,
+ | ^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+ = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error[E0658]: The attribute `test` is currently unknown to the compiler and may have meaning added to it in the future
+ --> $DIR/param-attrs-builtin-attrs.rs:23:5
+ |
+LL | #[test] a: u32,
+ | ^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+ = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error[E0658]: The attribute `test` is currently unknown to the compiler and may have meaning added to it in the future
+ --> $DIR/param-attrs-builtin-attrs.rs:38:5
+ |
+LL | #[test] a: u32,
+ | ^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+ = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error[E0658]: The attribute `test` is currently unknown to the compiler and may have meaning added to it in the future
+ --> $DIR/param-attrs-builtin-attrs.rs:58:9
+ |
+LL | #[test] a: i32,
+ | ^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+ = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error[E0658]: The attribute `test` is currently unknown to the compiler and may have meaning added to it in the future
+ --> $DIR/param-attrs-builtin-attrs.rs:79:9
+ |
+LL | #[test] a: i32,
+ | ^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+ = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error[E0658]: The attribute `test` is currently unknown to the compiler and may have meaning added to it in the future
+ --> $DIR/param-attrs-builtin-attrs.rs:98:9
+ |
+LL | #[test] a: i32,
+ | ^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+ = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error[E0658]: The attribute `test` is currently unknown to the compiler and may have meaning added to it in the future
+ --> $DIR/param-attrs-builtin-attrs.rs:117:9
+ |
+LL | #[test] a: i32,
+ | ^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+ = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error[E0658]: The attribute `test` is currently unknown to the compiler and may have meaning added to it in the future
+ --> $DIR/param-attrs-builtin-attrs.rs:134:9
+ |
+LL | #[test] a: u32,
+ | ^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+ = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error: aborting due to 52 previous errors
+
+For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/rfc-2565-param-attrs/param-attrs-cfg.rs b/src/test/ui/rfc-2565-param-attrs/param-attrs-cfg.rs
new file mode 100644
index 0000000..977b5d9
--- /dev/null
+++ b/src/test/ui/rfc-2565-param-attrs/param-attrs-cfg.rs
@@ -0,0 +1,79 @@
+// compile-flags: --cfg something
+
+#![feature(param_attrs)]
+#![deny(unused_variables)]
+
+extern "C" {
+ fn ffi(
+ #[cfg(nothing)] a: i32,
+ #[cfg(something)] b: i32,
+ #[cfg_attr(something, cfg(nothing))] c: i32,
+ #[cfg_attr(nothing, cfg(nothing))] ...
+ );
+}
+
+type FnType = fn(
+ #[cfg(nothing)] a: i32,
+ #[cfg(something)] b: i32,
+ #[cfg_attr(nothing, cfg(nothing))] c: i32,
+ #[cfg_attr(something, cfg(nothing))] d: i32,
+);
+
+fn foo(
+ #[cfg(nothing)] a: i32,
+ #[cfg(something)] b: i32,
+ //~^ ERROR unused variable: `b` [unused_variables]
+ #[cfg_attr(nothing, cfg(nothing))] c: i32,
+ //~^ ERROR unused variable: `c` [unused_variables]
+ #[cfg_attr(something, cfg(nothing))] d: i32,
+) {}
+
+struct RefStruct {}
+impl RefStruct {
+ fn bar(
+ &self,
+ #[cfg(nothing)] a: i32,
+ #[cfg(something)] b: i32,
+ //~^ ERROR unused variable: `b` [unused_variables]
+ #[cfg_attr(nothing, cfg(nothing))] c: i32,
+ //~^ ERROR unused variable: `c` [unused_variables]
+ #[cfg_attr(something, cfg(nothing))] d: i32,
+ ) {}
+}
+trait RefTrait {
+ fn bar(
+ &self,
+ #[cfg(nothing)] a: i32,
+ #[cfg(something)] b: i32,
+ //~^ ERROR unused variable: `b` [unused_variables]
+ #[cfg_attr(nothing, cfg(nothing))] c: i32,
+ //~^ ERROR unused variable: `c` [unused_variables]
+ #[cfg_attr(something, cfg(nothing))] d: i32,
+ ) {}
+}
+impl RefTrait for RefStruct {
+ fn bar(
+ &self,
+ #[cfg(nothing)] a: i32,
+ #[cfg(something)] b: i32,
+ //~^ ERROR unused variable: `b` [unused_variables]
+ #[cfg_attr(nothing, cfg(nothing))] c: i32,
+ //~^ ERROR unused variable: `c` [unused_variables]
+ #[cfg_attr(something, cfg(nothing))] d: i32,
+ ) {}
+}
+
+fn main() {
+ let _: unsafe extern "C" fn(_, ...) = ffi;
+ let _: fn(_, _) = foo;
+ let _: FnType = |_, _| {};
+ let c = |
+ #[cfg(nothing)] a: i32,
+ #[cfg(something)] b: i32,
+ //~^ ERROR unused variable: `b` [unused_variables]
+ #[cfg_attr(nothing, cfg(nothing))] c: i32,
+ //~^ ERROR unused variable: `c` [unused_variables]
+ #[cfg_attr(something, cfg(nothing))] d: i32,
+ | {};
+ let _ = c(1, 2);
+}
diff --git a/src/test/ui/rfc-2565-param-attrs/param-attrs-cfg.stderr b/src/test/ui/rfc-2565-param-attrs/param-attrs-cfg.stderr
new file mode 100644
index 0000000..c9719032
--- /dev/null
+++ b/src/test/ui/rfc-2565-param-attrs/param-attrs-cfg.stderr
@@ -0,0 +1,68 @@
+error: unused variable: `b`
+ --> $DIR/param-attrs-cfg.rs:24:23
+ |
+LL | #[cfg(something)] b: i32,
+ | ^ help: consider prefixing with an underscore: `_b`
+ |
+note: lint level defined here
+ --> $DIR/param-attrs-cfg.rs:4:9
+ |
+LL | #![deny(unused_variables)]
+ | ^^^^^^^^^^^^^^^^
+
+error: unused variable: `c`
+ --> $DIR/param-attrs-cfg.rs:26:40
+ |
+LL | #[cfg_attr(nothing, cfg(nothing))] c: i32,
+ | ^ help: consider prefixing with an underscore: `_c`
+
+error: unused variable: `b`
+ --> $DIR/param-attrs-cfg.rs:72:27
+ |
+LL | #[cfg(something)] b: i32,
+ | ^ help: consider prefixing with an underscore: `_b`
+
+error: unused variable: `c`
+ --> $DIR/param-attrs-cfg.rs:74:44
+ |
+LL | #[cfg_attr(nothing, cfg(nothing))] c: i32,
+ | ^ help: consider prefixing with an underscore: `_c`
+
+error: unused variable: `b`
+ --> $DIR/param-attrs-cfg.rs:47:27
+ |
+LL | #[cfg(something)] b: i32,
+ | ^ help: consider prefixing with an underscore: `_b`
+
+error: unused variable: `c`
+ --> $DIR/param-attrs-cfg.rs:49:44
+ |
+LL | #[cfg_attr(nothing, cfg(nothing))] c: i32,
+ | ^ help: consider prefixing with an underscore: `_c`
+
+error: unused variable: `b`
+ --> $DIR/param-attrs-cfg.rs:36:27
+ |
+LL | #[cfg(something)] b: i32,
+ | ^ help: consider prefixing with an underscore: `_b`
+
+error: unused variable: `c`
+ --> $DIR/param-attrs-cfg.rs:38:44
+ |
+LL | #[cfg_attr(nothing, cfg(nothing))] c: i32,
+ | ^ help: consider prefixing with an underscore: `_c`
+
+error: unused variable: `b`
+ --> $DIR/param-attrs-cfg.rs:58:27
+ |
+LL | #[cfg(something)] b: i32,
+ | ^ help: consider prefixing with an underscore: `_b`
+
+error: unused variable: `c`
+ --> $DIR/param-attrs-cfg.rs:60:44
+ |
+LL | #[cfg_attr(nothing, cfg(nothing))] c: i32,
+ | ^ help: consider prefixing with an underscore: `_c`
+
+error: aborting due to 10 previous errors
+
diff --git a/src/test/ui/rfc-2565-param-attrs/param-attrs-feature-gate.rs b/src/test/ui/rfc-2565-param-attrs/param-attrs-feature-gate.rs
new file mode 100644
index 0000000..c5a6514
--- /dev/null
+++ b/src/test/ui/rfc-2565-param-attrs/param-attrs-feature-gate.rs
@@ -0,0 +1,14 @@
+// gate-test-param_attrs
+
+fn foo(
+ /// Foo
+ //~^ ERROR documentation comments cannot be applied to function parameters
+ //~| NOTE doc comments are not allowed here
+ //~| ERROR attributes on function parameters are unstable
+ //~| NOTE https://github.com/rust-lang/rust/issues/60406
+ #[allow(C)] a: u8
+ //~^ ERROR attributes on function parameters are unstable
+ //~| NOTE https://github.com/rust-lang/rust/issues/60406
+) {}
+
+fn main() {}
diff --git a/src/test/ui/rfc-2565-param-attrs/param-attrs-feature-gate.stderr b/src/test/ui/rfc-2565-param-attrs/param-attrs-feature-gate.stderr
new file mode 100644
index 0000000..82f21e7
--- /dev/null
+++ b/src/test/ui/rfc-2565-param-attrs/param-attrs-feature-gate.stderr
@@ -0,0 +1,27 @@
+error: documentation comments cannot be applied to function parameters
+ --> $DIR/param-attrs-feature-gate.rs:4:5
+ |
+LL | /// Foo
+ | ^^^^^^^ doc comments are not allowed here
+
+error[E0658]: attributes on function parameters are unstable
+ --> $DIR/param-attrs-feature-gate.rs:4:5
+ |
+LL | /// Foo
+ | ^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/60406
+ = help: add #![feature(param_attrs)] to the crate attributes to enable
+
+error[E0658]: attributes on function parameters are unstable
+ --> $DIR/param-attrs-feature-gate.rs:9:5
+ |
+LL | #[allow(C)] a: u8
+ | ^^^^^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/60406
+ = help: add #![feature(param_attrs)] to the crate attributes to enable
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0658`.
diff --git a/src/test/ui/single-use-lifetime/one-use-in-struct.rs b/src/test/ui/single-use-lifetime/one-use-in-struct.rs
index e0328c9..6c4d2a4 100644
--- a/src/test/ui/single-use-lifetime/one-use-in-struct.rs
+++ b/src/test/ui/single-use-lifetime/one-use-in-struct.rs
@@ -18,4 +18,11 @@
trait Baz<'f> { }
+// `Derive`d impls shouldn't trigger a warning, either (Issue #53738).
+
+#[derive(Debug)]
+struct Quux<'a> {
+ priors: &'a u32,
+}
+
fn main() { }
diff --git a/src/test/ui/type-alias-enum-variants/issue-61801-path-pattern-can-infer.rs b/src/test/ui/type-alias-enum-variants/issue-61801-path-pattern-can-infer.rs
new file mode 100644
index 0000000..21be61a
--- /dev/null
+++ b/src/test/ui/type-alias-enum-variants/issue-61801-path-pattern-can-infer.rs
@@ -0,0 +1,30 @@
+// In this regression test we check that a path pattern referring to a unit variant
+// through a type alias is successful in inferring the generic argument.
+
+// compile-pass
+
+#![feature(type_alias_enum_variants)]
+
+enum Opt<T> {
+ N,
+ S(T),
+}
+
+type OptAlias<T> = Opt<T>;
+
+fn f1(x: OptAlias<u8>) {
+ match x {
+ OptAlias::N // We previously failed to infer `T` to `u8`.
+ => (),
+ _ => (),
+ }
+
+ match x {
+ <
+ OptAlias<_> // And we failed to infer this type also.
+ >::N => (),
+ _ => (),
+ }
+}
+
+fn main() {}
diff --git a/src/tools/build-manifest/src/main.rs b/src/tools/build-manifest/src/main.rs
index 5efd51b..b6e087c3 100644
--- a/src/tools/build-manifest/src/main.rs
+++ b/src/tools/build-manifest/src/main.rs
@@ -118,6 +118,7 @@
"x86_64-pc-windows-msvc",
"x86_64-rumprun-netbsd",
"x86_64-sun-solaris",
+ "x86_64-pc-solaris",
"x86_64-unknown-cloudabi",
"x86_64-unknown-freebsd",
"x86_64-unknown-linux-gnu",
diff --git a/src/tools/cargo b/src/tools/cargo
index 545f354..807429e 160000
--- a/src/tools/cargo
+++ b/src/tools/cargo
@@ -1 +1 @@
-Subproject commit 545f354259be4e9745ea00a524c0e4c51df01aa6
+Subproject commit 807429e1b6da4e2ec52488ef2f59e77068c31e1f
diff --git a/src/tools/clippy b/src/tools/clippy
index c0dbd34..be5d17f 160000
--- a/src/tools/clippy
+++ b/src/tools/clippy
@@ -1 +1 @@
-Subproject commit c0dbd34ba99a949ece25c297a4a377685eb89c7c
+Subproject commit be5d17feb20534d503c49722beecf5501d5d9c3a
diff --git a/src/tools/miri b/src/tools/miri
index e1a0f66..fd0dccd 160000
--- a/src/tools/miri
+++ b/src/tools/miri
@@ -1 +1 @@
-Subproject commit e1a0f66373a1a185334a6e3be24e94161e3b4a43
+Subproject commit fd0dccd4b12169e0aac42aff8addbb26b6d72197
diff --git a/src/tools/publish_toolstate.py b/src/tools/publish_toolstate.py
index 7d359fd..a777279 100755
--- a/src/tools/publish_toolstate.py
+++ b/src/tools/publish_toolstate.py
@@ -3,6 +3,7 @@
import sys
import re
+import os
import json
import datetime
import collections
@@ -53,6 +54,14 @@
return json.loads(status)
return {}
+def gh_url():
+ return os.environ['TOOLSTATE_ISSUES_API_URL']
+
+def maybe_delink(message):
+ if os.environ.get('TOOLSTATE_SKIP_MENTIONS') is not None:
+ return message.replace("@", "")
+ return message
+
def issue(
tool,
maintainers,
@@ -61,13 +70,12 @@
pr_reviewer,
):
# Open an issue about the toolstate failure.
- gh_url = 'https://api.github.com/repos/rust-lang/rust/issues'
assignees = [x.strip() for x in maintainers.split('@') if x != '']
assignees.append(relevant_pr_user)
response = urllib2.urlopen(urllib2.Request(
- gh_url,
+ gh_url(),
json.dumps({
- 'body': textwrap.dedent('''\
+ 'body': maybe_delink(textwrap.dedent('''\
Hello, this is your friendly neighborhood mergebot.
After merging PR {}, I observed that the tool {} no longer builds.
A follow-up PR to the repository {} is needed to fix the fallout.
@@ -77,7 +85,7 @@
cc @{}, the PR reviewer, and @rust-lang/compiler -- nominating for prioritization.
- ''').format(relevant_pr_number, tool, REPOS.get(tool), relevant_pr_user, pr_reviewer),
+ ''').format(relevant_pr_number, tool, REPOS.get(tool), relevant_pr_user, pr_reviewer)),
'title': '`{}` no longer builds after {}'.format(tool, relevant_pr_number),
'assignees': assignees,
'labels': ['T-compiler', 'I-nominated'],
@@ -216,11 +224,10 @@
f.write(message)
# Write the toolstate comment on the PR as well.
- gh_url = 'https://api.github.com/repos/rust-lang/rust/issues/{}/comments' \
- .format(number)
+ issue_url = gh_url() + '/{}/comments'.format(number)
response = urllib2.urlopen(urllib2.Request(
- gh_url,
- json.dumps({'body': message}),
+ issue_url,
+ json.dumps({'body': maybe_delink(message)}),
{
'Authorization': 'token ' + github_token,
'Content-Type': 'application/json',
diff --git a/src/tools/rustc-workspace-hack/Cargo.toml b/src/tools/rustc-workspace-hack/Cargo.toml
index 7472006..26b447d 100644
--- a/src/tools/rustc-workspace-hack/Cargo.toml
+++ b/src/tools/rustc-workspace-hack/Cargo.toml
@@ -69,8 +69,9 @@
serde = { version = "1.0.82", features = ['derive'] }
serde_json = { version = "1.0.31", features = ["raw_value"] }
smallvec = { version = "0.6", features = ['union', 'may_dangle'] }
-scopeguard = { version = "0.3.3", features = ["use_std", "default"]}
-byteorder = { version = "1.2.7", features = ["i128"]}
+scopeguard = { version = "0.3.3", features = ["use_std", "default"] }
+byteorder = { version = "1.2.7", features = ["i128"] }
+syn = { version = "0.15.35", features = ["extra-traits", "full"] }
[target.'cfg(not(windows))'.dependencies]
diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs
index 11f37b2..6169ffc 100644
--- a/src/tools/tidy/src/deps.rs
+++ b/src/tools/tidy/src/deps.rs
@@ -33,6 +33,8 @@
"is-match", // MPL-2.0, mdbook
"cssparser", // MPL-2.0, rustdoc
"smallvec", // MPL-2.0, rustdoc
+ "rdrand", // ISC, mdbook, rustfmt
+ "fuchsia-cprng", // BSD-3-Clause, mdbook, rustfmt
"fuchsia-zircon-sys", // BSD-3-Clause, rustdoc, rustc, cargo
"fuchsia-zircon", // BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)
"cssparser-macros", // MPL-2.0, rustdoc