Auto merge of #112026 - saethlin:misaligned-addrof, r=pnkfelix
Don't check for misaligned raw pointer derefs inside Rvalue::AddressOf
From https://github.com/rust-lang/rust/pull/112026#issuecomment-1565686697:
rustc 1.70 (stable next week) added a Mir pass to add pointer alignment checks in debug mode. Adding these checks caused some crates to break, but that was expected, since they contain broken code (https://github.com/rust-lang/rust/issues/111487) for tracking that.
However, the checks added are slightly more aggressive than they should have been. Specifically, they also check the place in an `addr_of!` expression. Whether lack of alignment there is or isn't UB is unclear. This PR modifies the pass to not affect those cases.
I spot checked the crater regressions and the ones I saw were not the case that this PR is modifying. It still seems good to not land anything overaggressive though
diff --git a/Cargo.lock b/Cargo.lock
index 0369442..443e6d0 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4380,6 +4380,15 @@
]
[[package]]
+name = "rustdoc-gui-test"
+version = "0.1.0"
+dependencies = [
+ "compiletest",
+ "getopts",
+ "walkdir",
+]
+
+[[package]]
name = "rustdoc-json-types"
version = "0.1.0"
dependencies = [
@@ -4972,22 +4981,22 @@
[[package]]
name = "thiserror"
-version = "1.0.38"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0"
+checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
-version = "1.0.38"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f"
+checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.102",
+ "syn 2.0.8",
]
[[package]]
diff --git a/Cargo.toml b/Cargo.toml
index 53331e2..8eb378a 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -40,6 +40,7 @@
"src/tools/generate-copyright",
"src/tools/suggest-tests",
"src/tools/generate-windows-sys",
+ "src/tools/rustdoc-gui-test",
]
exclude = [
diff --git a/RELEASES.md b/RELEASES.md
index 85266a1..fa95df6 100644
--- a/RELEASES.md
+++ b/RELEASES.md
@@ -1,3 +1,139 @@
+Version 1.70.0 (2023-06-01)
+==========================
+
+<a id="1.70.0-Language"></a>
+
+Language
+--------
+- [Relax ordering rules for `asm!` operands](https://github.com/rust-lang/rust/pull/105798/)
+- [Properly allow macro expanded `format_args` invocations to uses captures](https://github.com/rust-lang/rust/pull/106505/)
+- [Lint ambiguous glob re-exports](https://github.com/rust-lang/rust/pull/107880/)
+- [Perform const and unsafe checking for expressions in `let _ = expr` position.](https://github.com/rust-lang/rust/pull/102256/)
+
+<a id="1.70.0-Compiler"></a>
+
+Compiler
+--------
+- [Extend -Cdebuginfo with new options and named aliases](https://github.com/rust-lang/rust/pull/109808/)
+ This provides a smaller version of debuginfo for cases that only need line number information
+ (`-Cdebuginfo=line-tables-only`), which may eventually become the default for `-Cdebuginfo=1`.
+- [Make `unused_allocation` lint against `Box::new` too](https://github.com/rust-lang/rust/pull/104363/)
+- [Detect uninhabited types early in const eval](https://github.com/rust-lang/rust/pull/109435/)
+- [Switch to LLD as default linker for {arm,thumb}v4t-none-eabi](https://github.com/rust-lang/rust/pull/109721/)
+- [Add tier 3 target `loongarch64-unknown-linux-gnu`](https://github.com/rust-lang/rust/pull/96971)
+- [Add tier 3 target for `i586-pc-nto-qnx700` (QNX Neutrino RTOS, version 7.0)](https://github.com/rust-lang/rust/pull/109173/),
+- [Insert alignment checks for pointer dereferences as debug assertions](https://github.com/rust-lang/rust/pull/98112)
+ This catches undefined behavior at runtime, and may cause existing code to fail.
+
+Refer to Rust's [platform support page][platform-support-doc]
+for more information on Rust's tiered platform support.
+
+<a id="1.70.0-Libraries"></a>
+
+Libraries
+---------
+- [Document NonZeroXxx layout guarantees](https://github.com/rust-lang/rust/pull/94786/)
+- [Windows: make `Command` prefer non-verbatim paths](https://github.com/rust-lang/rust/pull/96391/)
+- [Implement Default for some alloc/core iterators](https://github.com/rust-lang/rust/pull/99929/)
+- [Fix handling of trailing bare CR in str::lines](https://github.com/rust-lang/rust/pull/100311/)
+- [allow negative numeric literals in `concat!`](https://github.com/rust-lang/rust/pull/106844/)
+- [Add documentation about the memory layout of `Cell`](https://github.com/rust-lang/rust/pull/106921/)
+- [Use `partial_cmp` to implement tuple `lt`/`le`/`ge`/`gt`](https://github.com/rust-lang/rust/pull/108157/)
+- [Stabilize `atomic_as_ptr`](https://github.com/rust-lang/rust/pull/108419/)
+- [Stabilize `nonnull_slice_from_raw_parts`](https://github.com/rust-lang/rust/pull/97506/)
+- [Partial stabilization of `once_cell`](https://github.com/rust-lang/rust/pull/105587/)
+- [Stabilize `nonzero_min_max`](https://github.com/rust-lang/rust/pull/106633/)
+- [Flatten/inline format_args!() and (string and int) literal arguments into format_args!()](https://github.com/rust-lang/rust/pull/106824/)
+- [Stabilize movbe target feature](https://github.com/rust-lang/rust/pull/107711/)
+- [don't splice from files into pipes in io::copy](https://github.com/rust-lang/rust/pull/108283/)
+- [Add a builtin unstable `FnPtr` trait that is implemented for all function pointers](https://github.com/rust-lang/rust/pull/108080/)
+ This extends `Debug`, `Pointer`, `Hash`, `PartialEq`, `Eq`, `PartialOrd`, and `Ord`
+ implementations for function pointers with all ABIs.
+
+<a id="1.70.0-Stabilized-APIs"></a>
+
+Stabilized APIs
+---------------
+
+- [`NonZero*::MIN/MAX`](https://doc.rust-lang.org/stable/std/num/struct.NonZeroI8.html#associatedconstant.MIN)
+- [`BinaryHeap::retain`](https://doc.rust-lang.org/stable/std/collections/struct.BinaryHeap.html#method.retain)
+- [`Default for std::collections::binary_heap::IntoIter`](https://doc.rust-lang.org/stable/std/collections/binary_heap/struct.IntoIter.html)
+- [`Default for std::collections::btree_map::{IntoIter, Iter, IterMut}`](https://doc.rust-lang.org/stable/std/collections/btree_map/struct.IntoIter.html)
+- [`Default for std::collections::btree_map::{IntoKeys, Keys}`](https://doc.rust-lang.org/stable/std/collections/btree_map/struct.IntoKeys.html)
+- [`Default for std::collections::btree_map::{IntoValues, Values}`](https://doc.rust-lang.org/stable/std/collections/btree_map/struct.IntoKeys.html)
+- [`Default for std::collections::btree_map::Range`](https://doc.rust-lang.org/stable/std/collections/btree_map/struct.Range.html)
+- [`Default for std::collections::btree_set::{IntoIter, Iter}`](https://doc.rust-lang.org/stable/std/collections/btree_set/struct.IntoIter.html)
+- [`Default for std::collections::btree_set::Range`](https://doc.rust-lang.org/stable/std/collections/btree_set/struct.Range.html)
+- [`Default for std::collections::linked_list::{IntoIter, Iter, IterMut}`](https://doc.rust-lang.org/stable/alloc/collections/linked_list/struct.IntoIter.html)
+- [`Default for std::vec::IntoIter`](https://doc.rust-lang.org/stable/alloc/vec/struct.IntoIter.html#impl-Default-for-IntoIter%3CT,+A%3E)
+- [`Default for std::iter::Chain`](https://doc.rust-lang.org/stable/std/iter/struct.Chain.html)
+- [`Default for std::iter::Cloned`](https://doc.rust-lang.org/stable/std/iter/struct.Cloned.html)
+- [`Default for std::iter::Copied`](https://doc.rust-lang.org/stable/std/iter/struct.Copied.html)
+- [`Default for std::iter::Enumerate`](https://doc.rust-lang.org/stable/std/iter/struct.Enumerate.html)
+- [`Default for std::iter::Flatten`](https://doc.rust-lang.org/stable/std/iter/struct.Flatten.html)
+- [`Default for std::iter::Fuse`](https://doc.rust-lang.org/stable/std/iter/struct.Fuse.html)
+- [`Default for std::iter::Rev`](https://doc.rust-lang.org/stable/std/iter/struct.Rev.html)
+- [`Default for std::slice::Iter`](https://doc.rust-lang.org/stable/std/slice/struct.Iter.html)
+- [`Default for std::slice::IterMut`](https://doc.rust-lang.org/stable/std/slice/struct.IterMut.html)
+- [`Rc::into_inner`](https://doc.rust-lang.org/stable/alloc/rc/struct.Rc.html#method.into_inner)
+- [`Arc::into_inner`](https://doc.rust-lang.org/stable/alloc/sync/struct.Arc.html#method.into_inner)
+- [`std::cell::OnceCell`](https://doc.rust-lang.org/stable/std/cell/struct.OnceCell.html)
+- [`Option::is_some_and`](https://doc.rust-lang.org/stable/std/option/enum.Option.html#method.is_some_and)
+- [`NonNull::slice_from_raw_parts`](https://doc.rust-lang.org/stable/std/ptr/struct.NonNull.html#method.slice_from_raw_parts)
+- [`Result::is_ok_and`](https://doc.rust-lang.org/stable/std/result/enum.Result.html#method.is_ok_and)
+- [`Result::is_err_and`](https://doc.rust-lang.org/stable/std/result/enum.Result.html#method.is_err_and)
+- [`std::sync::atomic::Atomic*::as_ptr`](https://doc.rust-lang.org/stable/std/sync/atomic/struct.AtomicU8.html#method.as_ptr)
+- [`std::io::IsTerminal`](https://doc.rust-lang.org/stable/std/io/trait.IsTerminal.html)
+- [`std::os::linux::net::SocketAddrExt`](https://doc.rust-lang.org/stable/std/os/linux/net/trait.SocketAddrExt.html)
+- [`std::os::unix::net::UnixDatagram::bind_addr`](https://doc.rust-lang.org/stable/std/os/unix/net/struct.UnixDatagram.html#method.bind_addr)
+- [`std::os::unix::net::UnixDatagram::connect_addr`](https://doc.rust-lang.org/stable/std/os/unix/net/struct.UnixDatagram.html#method.connect_addr)
+- [`std::os::unix::net::UnixDatagram::send_to_addr`](https://doc.rust-lang.org/stable/std/os/unix/net/struct.UnixDatagram.html#method.send_to_addr)
+- [`std::os::unix::net::UnixListener::bind_addr`](https://doc.rust-lang.org/stable/std/os/unix/net/struct.UnixListener.html#method.bind_addr)
+- [`std::path::Path::as_mut_os_str`](https://doc.rust-lang.org/stable/std/path/struct.Path.html#method.as_mut_os_str)
+- [`std::sync::OnceLock`](https://doc.rust-lang.org/stable/std/sync/struct.OnceLock.html)
+
+<a id="1.70.0-Cargo"></a>
+
+Cargo
+-----
+
+- [Add `CARGO_PKG_README`](https://github.com/rust-lang/cargo/pull/11645/)
+- [Make `sparse` the default protocol for crates.io](https://github.com/rust-lang/cargo/pull/11791/)
+- [Accurately show status when downgrading dependencies](https://github.com/rust-lang/cargo/pull/11839/)
+- [Use registry.default for login/logout](https://github.com/rust-lang/cargo/pull/11949/)
+- [Stabilize `cargo logout`](https://github.com/rust-lang/cargo/pull/11950/)
+
+<a id="1.70.0-Misc"></a>
+
+Misc
+----
+
+- [Stabilize rustdoc `--test-run-directory`](https://github.com/rust-lang/rust/pull/103682/)
+
+<a id="1.70.0-Compatibility-Notes"></a>
+
+Compatibility Notes
+-------------------
+
+- [Prevent stable `libtest` from supporting `-Zunstable-options`](https://github.com/rust-lang/rust/pull/109044/)
+- [Perform const and unsafe checking for expressions in `let _ = expr` position.](https://github.com/rust-lang/rust/pull/102256/)
+- [WebAssembly targets enable `sign-ext` and `mutable-globals` features in codegen](https://github.com/rust-lang/rust/issues/109807)
+ This may cause incompatibility with older execution environments.
+- [Insert alignment checks for pointer dereferences as debug assertions](https://github.com/rust-lang/rust/pull/98112)
+ This catches undefined behavior at runtime, and may cause existing code to fail.
+
+<a id="1.70.0-Internal-Changes"></a>
+
+Internal Changes
+----------------
+
+These changes do not affect any public interfaces of Rust, but they represent
+significant improvements to the performance or internals of rustc and related
+tools.
+
+- [Upgrade to LLVM 16](https://github.com/rust-lang/rust/pull/109474/)
+- [Use SipHash-1-3 instead of SipHash-2-4 for StableHasher](https://github.com/rust-lang/rust/pull/107925/)
+
Version 1.69.0 (2023-04-20)
==========================
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs
index 7ef39f8..6646fa9 100644
--- a/compiler/rustc_ast/src/token.rs
+++ b/compiler/rustc_ast/src/token.rs
@@ -11,6 +11,7 @@
use rustc_data_structures::sync::Lrc;
use rustc_macros::HashStable_Generic;
use rustc_span::symbol::{kw, sym};
+#[cfg_attr(not(bootstrap), allow(hidden_glob_reexports))]
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::{self, edition::Edition, Span, DUMMY_SP};
use std::borrow::Cow;
diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs
index 04b8174..15d73ed 100644
--- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs
@@ -1635,34 +1635,6 @@ pub(crate) fn describe_place_for_conflicting_borrow(
})
}
- /// Reports StorageDeadOrDrop of `place` conflicts with `borrow`.
- ///
- /// Depending on the origin of the StorageDeadOrDrop, this may be
- /// reported as either a drop or an illegal mutation of a borrowed value.
- /// The latter is preferred when the this is a drop triggered by a
- /// reassignment, as it's more user friendly to report a problem with the
- /// explicit assignment than the implicit drop.
- #[instrument(level = "debug", skip(self))]
- pub(crate) fn report_storage_dead_or_drop_of_borrowed(
- &mut self,
- location: Location,
- place_span: (Place<'tcx>, Span),
- borrow: &BorrowData<'tcx>,
- ) {
- // It's sufficient to check the last desugaring as Replace is the last
- // one to be applied.
- if let Some(DesugaringKind::Replace) = place_span.1.desugaring_kind() {
- self.report_illegal_mutation_of_borrowed(location, place_span, borrow)
- } else {
- self.report_borrowed_value_does_not_live_long_enough(
- location,
- borrow,
- place_span,
- Some(WriteKind::StorageDeadOrDrop),
- )
- }
- }
-
/// This means that some data referenced by `borrow` needs to live
/// past the point where the StorageDeadOrDrop of `place` occurs.
/// This is usually interpreted as meaning that `place` has too
diff --git a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs
index 4bde372..d0e17bf 100644
--- a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs
@@ -641,13 +641,8 @@ fn visit_stmt(&mut self, stmt: &'tcx hir::Stmt<'tcx>) {
let Some(hir::Node::Item(item)) = node else { return; };
let hir::ItemKind::Fn(.., body_id) = item.kind else { return; };
let body = self.infcx.tcx.hir().body(body_id);
- let mut assign_span = span;
- // Drop desugaring is done at MIR build so it's not in the HIR
- if let Some(DesugaringKind::Replace) = span.desugaring_kind() {
- assign_span.remove_mark();
- }
- let mut v = V { assign_span, err, ty, suggested: false };
+ let mut v = V { assign_span: span, err, ty, suggested: false };
v.visit_body(body);
if !v.suggested {
err.help(format!(
diff --git a/compiler/rustc_borrowck/src/invalidation.rs b/compiler/rustc_borrowck/src/invalidation.rs
index 036391d..b2ff25e 100644
--- a/compiler/rustc_borrowck/src/invalidation.rs
+++ b/compiler/rustc_borrowck/src/invalidation.rs
@@ -112,11 +112,13 @@ fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location
TerminatorKind::SwitchInt { discr, targets: _ } => {
self.consume_operand(location, discr);
}
- TerminatorKind::Drop { place: drop_place, target: _, unwind: _ } => {
+ TerminatorKind::Drop { place: drop_place, target: _, unwind: _, replace } => {
+ let write_kind =
+ if *replace { WriteKind::Replace } else { WriteKind::StorageDeadOrDrop };
self.access_place(
location,
*drop_place,
- (AccessDepth::Drop, Write(WriteKind::StorageDeadOrDrop)),
+ (AccessDepth::Drop, Write(write_kind)),
LocalMutationIsAllowed::Yes,
);
}
diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs
index 9277a26..a53ea10 100644
--- a/compiler/rustc_borrowck/src/lib.rs
+++ b/compiler/rustc_borrowck/src/lib.rs
@@ -685,17 +685,19 @@ fn visit_terminator_before_primary_effect(
TerminatorKind::SwitchInt { discr, targets: _ } => {
self.consume_operand(loc, (discr, span), flow_state);
}
- TerminatorKind::Drop { place, target: _, unwind: _ } => {
+ TerminatorKind::Drop { place, target: _, unwind: _, replace } => {
debug!(
"visit_terminator_drop \
loc: {:?} term: {:?} place: {:?} span: {:?}",
loc, term, place, span
);
+ let write_kind =
+ if *replace { WriteKind::Replace } else { WriteKind::StorageDeadOrDrop };
self.access_place(
loc,
(*place, span),
- (AccessDepth::Drop, Write(WriteKind::StorageDeadOrDrop)),
+ (AccessDepth::Drop, Write(write_kind)),
LocalMutationIsAllowed::Yes,
flow_state,
);
@@ -885,6 +887,7 @@ enum ReadKind {
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum WriteKind {
StorageDeadOrDrop,
+ Replace,
MutableBorrow(BorrowKind),
Mutate,
Move,
@@ -1132,13 +1135,21 @@ fn check_access_for_conflict(
this.buffer_error(err);
}
WriteKind::StorageDeadOrDrop => this
- .report_storage_dead_or_drop_of_borrowed(location, place_span, borrow),
+ .report_borrowed_value_does_not_live_long_enough(
+ location,
+ borrow,
+ place_span,
+ Some(WriteKind::StorageDeadOrDrop),
+ ),
WriteKind::Mutate => {
this.report_illegal_mutation_of_borrowed(location, place_span, borrow)
}
WriteKind::Move => {
this.report_move_out_while_borrowed(location, place_span, borrow)
}
+ WriteKind::Replace => {
+ this.report_illegal_mutation_of_borrowed(location, place_span, borrow)
+ }
}
Control::Break
}
@@ -1982,12 +1993,14 @@ fn check_access_permissions(
Reservation(
WriteKind::Move
+ | WriteKind::Replace
| WriteKind::StorageDeadOrDrop
| WriteKind::MutableBorrow(BorrowKind::Shared)
| WriteKind::MutableBorrow(BorrowKind::Shallow),
)
| Write(
WriteKind::Move
+ | WriteKind::Replace
| WriteKind::StorageDeadOrDrop
| WriteKind::MutableBorrow(BorrowKind::Shared)
| WriteKind::MutableBorrow(BorrowKind::Shallow),
diff --git a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs
index fd94ac8..eb02604 100644
--- a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs
+++ b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs
@@ -3,9 +3,9 @@
use rustc_index::interval::IntervalSet;
use rustc_infer::infer::canonical::QueryRegionConstraints;
use rustc_middle::mir::{BasicBlock, Body, ConstraintCategory, Local, Location};
+use rustc_middle::traits::query::DropckOutlivesResult;
use rustc_middle::ty::{Ty, TyCtxt, TypeVisitable, TypeVisitableExt};
use rustc_span::DUMMY_SP;
-use rustc_trait_selection::traits::query::dropck_outlives::DropckOutlivesResult;
use rustc_trait_selection::traits::query::type_op::outlives::DropckOutlives;
use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput};
use std::rc::Rc;
diff --git a/compiler/rustc_builtin_macros/src/assert/context.rs b/compiler/rustc_builtin_macros/src/assert/context.rs
index ea830a0..b619e80 100644
--- a/compiler/rustc_builtin_macros/src/assert/context.rs
+++ b/compiler/rustc_builtin_macros/src/assert/context.rs
@@ -233,10 +233,19 @@ fn manage_cond_expr(&mut self, expr: &mut P<Expr>) {
ExprKind::Cast(local_expr, _) => {
self.manage_cond_expr(local_expr);
}
+ ExprKind::If(local_expr, _, _) => {
+ self.manage_cond_expr(local_expr);
+ }
ExprKind::Index(prefix, suffix) => {
self.manage_cond_expr(prefix);
self.manage_cond_expr(suffix);
}
+ ExprKind::Let(_, local_expr, _) => {
+ self.manage_cond_expr(local_expr);
+ }
+ ExprKind::Match(local_expr, _) => {
+ self.manage_cond_expr(local_expr);
+ }
ExprKind::MethodCall(call) => {
for arg in &mut call.args {
self.manage_cond_expr(arg);
@@ -295,17 +304,14 @@ fn manage_cond_expr(&mut self, expr: &mut P<Expr>) {
| ExprKind::Continue(_)
| ExprKind::Err
| ExprKind::Field(_, _)
- | ExprKind::FormatArgs(_)
| ExprKind::ForLoop(_, _, _, _)
- | ExprKind::If(_, _, _)
+ | ExprKind::FormatArgs(_)
| ExprKind::IncludedBytes(..)
| ExprKind::InlineAsm(_)
- | ExprKind::OffsetOf(_, _)
- | ExprKind::Let(_, _, _)
| ExprKind::Lit(_)
| ExprKind::Loop(_, _, _)
| ExprKind::MacCall(_)
- | ExprKind::Match(_, _)
+ | ExprKind::OffsetOf(_, _)
| ExprKind::Path(_, _)
| ExprKind::Ret(_)
| ExprKind::Try(_)
diff --git a/compiler/rustc_codegen_cranelift/src/base.rs b/compiler/rustc_codegen_cranelift/src/base.rs
index 9c6a0fa..fcfa0b8 100644
--- a/compiler/rustc_codegen_cranelift/src/base.rs
+++ b/compiler/rustc_codegen_cranelift/src/base.rs
@@ -473,7 +473,7 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) {
| TerminatorKind::GeneratorDrop => {
bug!("shouldn't exist at codegen {:?}", bb_data.terminator());
}
- TerminatorKind::Drop { place, target, unwind: _ } => {
+ TerminatorKind::Drop { place, target, unwind: _, replace: _ } => {
let drop_place = codegen_place(fx, *place);
crate::abi::codegen_drop(fx, source_info, drop_place);
diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs
index 651d644..6d00464 100644
--- a/compiler/rustc_codegen_llvm/src/attributes.rs
+++ b/compiler/rustc_codegen_llvm/src/attributes.rs
@@ -88,6 +88,9 @@ pub fn sanitize_attrs<'ll>(
attrs.push(llvm::AttributeKind::SanitizeMemTag.create_attr(cx.llcx));
}
+ if enabled.contains(SanitizerSet::SAFESTACK) {
+ attrs.push(llvm::AttributeKind::SanitizeSafeStack.create_attr(cx.llcx));
+ }
attrs
}
diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
index de93a64..6ef3418 100644
--- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
@@ -196,6 +196,7 @@ pub enum AttributeKind {
AllocSize = 37,
AllocatedPointer = 38,
AllocAlign = 39,
+ SanitizeSafeStack = 40,
}
/// LLVMIntPredicate
diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs
index 8a00c42..5cc2342 100644
--- a/compiler/rustc_codegen_ssa/src/back/link.rs
+++ b/compiler/rustc_codegen_ssa/src/back/link.rs
@@ -1188,6 +1188,9 @@ fn add_sanitizer_libraries(sess: &Session, crate_type: CrateType, linker: &mut d
if sanitizer.contains(SanitizerSet::HWADDRESS) {
link_sanitizer_runtime(sess, linker, "hwasan");
}
+ if sanitizer.contains(SanitizerSet::SAFESTACK) {
+ link_sanitizer_runtime(sess, linker, "safestack");
+ }
}
fn link_sanitizer_runtime(sess: &Session, linker: &mut dyn Linker, name: &str) {
diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs
index d516ac4..3f0b64b 100644
--- a/compiler/rustc_codegen_ssa/src/mir/block.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/block.rs
@@ -1256,7 +1256,7 @@ fn codegen_terminator(
MergingSucc::False
}
- mir::TerminatorKind::Drop { place, target, unwind } => {
+ mir::TerminatorKind::Drop { place, target, unwind, replace: _ } => {
self.codegen_drop_terminator(helper, bx, place, target, unwind, mergeable_succ())
}
diff --git a/compiler/rustc_const_eval/src/interpret/terminator.rs b/compiler/rustc_const_eval/src/interpret/terminator.rs
index df38792..586e8f0 100644
--- a/compiler/rustc_const_eval/src/interpret/terminator.rs
+++ b/compiler/rustc_const_eval/src/interpret/terminator.rs
@@ -114,7 +114,7 @@ pub(super) fn eval_terminator(
}
}
- Drop { place, target, unwind } => {
+ Drop { place, target, unwind, replace: _ } => {
let frame = self.frame();
let ty = place.ty(&frame.body.local_decls, *self.tcx).ty;
let ty = self.subst_from_frame_and_normalize_erasing_regions(frame, ty)?;
diff --git a/compiler/rustc_driver_impl/src/args.rs b/compiler/rustc_driver_impl/src/args.rs
index a713aff..eb92ccc 100644
--- a/compiler/rustc_driver_impl/src/args.rs
+++ b/compiler/rustc_driver_impl/src/args.rs
@@ -18,6 +18,9 @@ fn arg_expand(arg: String) -> Result<Vec<String>, Error> {
}
}
+/// **Note:** This function doesn't interpret argument 0 in any special way.
+/// If this function is intended to be used with command line arguments,
+/// `argv[0]` must be removed prior to calling it manually.
pub fn arg_expand_all(at_args: &[String]) -> Vec<String> {
let mut args = Vec::new();
for arg in at_args {
diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs
index 40aa69e..0b5d737 100644
--- a/compiler/rustc_driver_impl/src/lib.rs
+++ b/compiler/rustc_driver_impl/src/lib.rs
@@ -250,6 +250,16 @@ fn run_compiler(
Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>,
>,
) -> interface::Result<()> {
+ // Throw away the first argument, the name of the binary.
+ // In case of at_args being empty, as might be the case by
+ // passing empty argument array to execve under some platforms,
+ // just use an empty slice.
+ //
+ // This situation was possible before due to arg_expand_all being
+ // called before removing the argument, enabling a crash by calling
+ // the compiler with @empty_file as argv[0] and no more arguments.
+ let at_args = at_args.get(1..).unwrap_or_default();
+
let args = args::arg_expand_all(at_args);
let Some(matches) = handle_options(&args) else { return Ok(()) };
@@ -1074,9 +1084,6 @@ fn print_flag_list<T>(
/// So with all that in mind, the comments below have some more detail about the
/// contortions done here to get things to work out correctly.
pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
- // Throw away the first argument, the name of the binary
- let args = &args[1..];
-
if args.is_empty() {
// user did not write `-v` nor `-Z unstable-options`, so do not
// include that extra information.
diff --git a/compiler/rustc_hir_analysis/src/astconv/mod.rs b/compiler/rustc_hir_analysis/src/astconv/mod.rs
index 5fb06cf..2c60a06 100644
--- a/compiler/rustc_hir_analysis/src/astconv/mod.rs
+++ b/compiler/rustc_hir_analysis/src/astconv/mod.rs
@@ -1159,7 +1159,7 @@ fn add_predicates_for_ast_type_binding(
// those that do.
self.one_bound_for_assoc_type(
|| traits::supertraits(tcx, trait_ref),
- trait_ref.print_only_trait_path(),
+ trait_ref.skip_binder().print_only_trait_name(),
binding.item_name,
path_span,
match binding.kind {
diff --git a/compiler/rustc_hir_typeck/messages.ftl b/compiler/rustc_hir_typeck/messages.ftl
index 9761b1d..aab432e 100644
--- a/compiler/rustc_hir_typeck/messages.ftl
+++ b/compiler/rustc_hir_typeck/messages.ftl
@@ -25,6 +25,8 @@
hir_typeck_convert_to_str = try converting the passed type into a `&str`
+hir_typeck_ctor_is_private = tuple struct constructor `{$def}` is private
+
hir_typeck_expected_default_return_type = expected `()` because of default return type
hir_typeck_expected_return_type = expected `{$expected}` because of return type
@@ -59,8 +61,8 @@
hir_typeck_lang_start_incorrect_ret_ty = the return type of the `start` lang item is incorrect
.suggestion = change the type from `{$found_ty}` to `{$expected_ty}`
-hir_typeck_method_call_on_unknown_type =
- the type of this value must be known to call a method on a raw pointer on it
+hir_typeck_method_call_on_unknown_raw_pointee =
+ cannot call a method on a raw pointer with an unknown pointee type
hir_typeck_missing_parentheses_in_range = can't call method `{$method_name}` on type `{$ty_str}`
diff --git a/compiler/rustc_hir_typeck/src/errors.rs b/compiler/rustc_hir_typeck/src/errors.rs
index 102a313..4222205 100644
--- a/compiler/rustc_hir_typeck/src/errors.rs
+++ b/compiler/rustc_hir_typeck/src/errors.rs
@@ -49,8 +49,8 @@ pub struct StructExprNonExhaustive {
}
#[derive(Diagnostic)]
-#[diag(hir_typeck_method_call_on_unknown_type, code = "E0699")]
-pub struct MethodCallOnUnknownType {
+#[diag(hir_typeck_method_call_on_unknown_raw_pointee, code = "E0699")]
+pub struct MethodCallOnUnknownRawPointee {
#[primary_span]
pub span: Span,
}
@@ -319,3 +319,11 @@ pub struct CandidateTraitNote {
pub item_name: Ident,
pub action_or_ty: String,
}
+
+#[derive(Diagnostic)]
+#[diag(hir_typeck_ctor_is_private, code = "E0603")]
+pub struct CtorIsPrivate {
+ #[primary_span]
+ pub span: Span,
+ pub def: String,
+}
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs
index 2fdcd09..5579503 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs
@@ -1,4 +1,5 @@
use crate::callee::{self, DeferredCallResolution};
+use crate::errors::CtorIsPrivate;
use crate::method::{self, MethodCallee, SelfSource};
use crate::rvalue_scopes;
use crate::{BreakableCtxt, Diverges, Expectation, FnCtxt, LocalTy, RawTy};
@@ -1207,6 +1208,12 @@ pub fn instantiate_value_path(
match ty.normalized.ty_adt_def() {
Some(adt_def) if adt_def.has_ctor() => {
let (ctor_kind, ctor_def_id) = adt_def.non_enum_variant().ctor.unwrap();
+ // Check the visibility of the ctor.
+ let vis = tcx.visibility(ctor_def_id);
+ if !vis.is_accessible_from(tcx.parent_module(hir_id).to_def_id(), tcx) {
+ tcx.sess
+ .emit_err(CtorIsPrivate { span, def: tcx.def_path_str(adt_def.did()) });
+ }
let new_res = Res::Def(DefKind::Ctor(CtorOf::Struct, ctor_kind), ctor_def_id);
let user_substs = Self::user_substs_for_adt(ty);
user_self_ty = user_substs.user_self_ty;
diff --git a/compiler/rustc_hir_typeck/src/method/probe.rs b/compiler/rustc_hir_typeck/src/method/probe.rs
index ba21ede..9f3d35a 100644
--- a/compiler/rustc_hir_typeck/src/method/probe.rs
+++ b/compiler/rustc_hir_typeck/src/method/probe.rs
@@ -3,7 +3,7 @@
use super::MethodError;
use super::NoMatchData;
-use crate::errors::MethodCallOnUnknownType;
+use crate::errors::MethodCallOnUnknownRawPointee;
use crate::FnCtxt;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
@@ -438,7 +438,7 @@ fn probe_op<OP, R>(
// so we do a future-compat lint here for the 2015 edition
// (see https://github.com/rust-lang/rust/issues/46906)
if self.tcx.sess.rust_2018() {
- self.tcx.sess.emit_err(MethodCallOnUnknownType { span });
+ self.tcx.sess.emit_err(MethodCallOnUnknownRawPointee { span });
} else {
self.tcx.struct_span_lint_hir(
lint::builtin::TYVAR_BEHIND_RAW_POINTER,
diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs
index 1d0c43e..947530a 100644
--- a/compiler/rustc_lint/src/context.rs
+++ b/compiler/rustc_lint/src/context.rs
@@ -952,6 +952,10 @@ fn lookup_with_diagnostics(
db.span_label(first_reexport_span, format!("the name `{}` in the {} namespace is first re-exported here", name, namespace));
db.span_label(duplicate_reexport_span, format!("but the name `{}` in the {} namespace is also re-exported here", name, namespace));
}
+ BuiltinLintDiagnostics::HiddenGlobReexports { name, namespace, glob_reexport_span, private_item_span } => {
+ db.span_label(glob_reexport_span, format!("the name `{}` in the {} namespace is supposed to be publicly re-exported here", name, namespace));
+ db.span_label(private_item_span, "but the private item here shadows it");
+ }
}
// Rewrap `db`, and pass control to the user.
decorate(db)
diff --git a/compiler/rustc_lint/src/levels.rs b/compiler/rustc_lint/src/levels.rs
index b92ed11f..8376835 100644
--- a/compiler/rustc_lint/src/levels.rs
+++ b/compiler/rustc_lint/src/levels.rs
@@ -242,7 +242,9 @@ fn get_lint_level(&self, lint: &'static Lint, _: &Session) -> LevelAndSource {
struct QueryMapExpectationsWrapper<'tcx> {
tcx: TyCtxt<'tcx>,
+ /// HirId of the currently investigated element.
cur: HirId,
+ /// Level map for `cur`.
specs: ShallowLintLevelMap,
expectations: Vec<(LintExpectationId, LintExpectation)>,
unstable_to_stable_ids: FxHashMap<LintExpectationId, LintExpectationId>,
@@ -255,11 +257,11 @@ fn current_specs(&self) -> &FxHashMap<LintId, LevelAndSource> {
self.specs.specs.get(&self.cur.local_id).unwrap_or(&self.empty)
}
fn insert(&mut self, id: LintId, lvl: LevelAndSource) {
- let specs = self.specs.specs.get_mut_or_insert_default(self.cur.local_id);
- specs.clear();
- specs.insert(id, lvl);
+ self.specs.specs.get_mut_or_insert_default(self.cur.local_id).insert(id, lvl);
}
fn get_lint_level(&self, lint: &'static Lint, _: &Session) -> LevelAndSource {
+ // We cannot use `tcx.lint_level_at_node` because we want to know in which order the
+ // attributes have been inserted, in particular whether an `expect` follows a `forbid`.
self.specs.lint_level_id_at_node(self.tcx, LintId::of(lint), self.cur)
}
fn push_expectation(&mut self, id: LintExpectationId, expectation: LintExpectation) {
@@ -355,7 +357,9 @@ fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem<'tcx>) {
impl<'tcx> LintLevelsBuilder<'_, QueryMapExpectationsWrapper<'tcx>> {
fn add_id(&mut self, hir_id: HirId) {
+ // Change both the `HirId` and the associated specs.
self.provider.cur = hir_id;
+ self.provider.specs.specs.clear();
self.add(self.provider.tcx.hir().attrs(hir_id), hir_id == hir::CRATE_HIR_ID, Some(hir_id));
}
}
diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs
index 6e9dc88..1507087 100644
--- a/compiler/rustc_lint_defs/src/builtin.rs
+++ b/compiler/rustc_lint_defs/src/builtin.rs
@@ -3272,6 +3272,43 @@
"ambiguous glob re-exports",
}
+declare_lint! {
+ /// The `hidden_glob_reexports` lint detects cases where glob re-export items are shadowed by
+ /// private items.
+ ///
+ /// ### Example
+ ///
+ /// ```rust,compile_fail
+ /// #![deny(hidden_glob_reexports)]
+ ///
+ /// pub mod upstream {
+ /// mod inner { pub struct Foo {}; pub struct Bar {}; }
+ /// pub use self::inner::*;
+ /// struct Foo {} // private item shadows `inner::Foo`
+ /// }
+ ///
+ /// // mod downstream {
+ /// // fn test() {
+ /// // let _ = crate::upstream::Foo; // inaccessible
+ /// // }
+ /// // }
+ ///
+ /// pub fn main() {}
+ /// ```
+ ///
+ /// {{produces}}
+ ///
+ /// ### Explanation
+ ///
+ /// This was previously accepted without any errors or warnings but it could silently break a
+ /// crate's downstream user code. If the `struct Foo` was added, `dep::inner::Foo` would
+ /// silently become inaccessible and trigger a "`struct `Foo` is private`" visibility error at
+ /// the downstream use site.
+ pub HIDDEN_GLOB_REEXPORTS,
+ Warn,
+ "name introduced by a private item shadows a name introduced by a public glob re-export",
+}
+
declare_lint_pass! {
/// Does nothing as a lint pass, but registers some `Lint`s
/// that are used by other parts of the compiler.
@@ -3304,6 +3341,7 @@
FORBIDDEN_LINT_GROUPS,
FUNCTION_ITEM_REFERENCES,
FUZZY_PROVENANCE_CASTS,
+ HIDDEN_GLOB_REEXPORTS,
ILL_FORMED_ATTRIBUTE_INPUT,
ILLEGAL_FLOATING_POINT_LITERAL_PATTERN,
IMPLIED_BOUNDS_ENTAILMENT,
diff --git a/compiler/rustc_lint_defs/src/lib.rs b/compiler/rustc_lint_defs/src/lib.rs
index e27e322..5a5031b 100644
--- a/compiler/rustc_lint_defs/src/lib.rs
+++ b/compiler/rustc_lint_defs/src/lib.rs
@@ -540,6 +540,16 @@ pub enum BuiltinLintDiagnostics {
/// Span where the same name is also re-exported.
duplicate_reexport_span: Span,
},
+ HiddenGlobReexports {
+ /// The name of the local binding which shadows the glob re-export.
+ name: String,
+ /// The namespace for which the shadowing occurred in.
+ namespace: String,
+ /// The glob reexport that is shadowed by the local binding.
+ glob_reexport_span: Span,
+ /// The local binding that shadows the glob reexport.
+ private_item_span: Span,
+ },
}
/// Lints that are buffered up early on in the `Session` before the
diff --git a/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h b/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h
index 0589062..af6f4d5 100644
--- a/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h
+++ b/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h
@@ -96,6 +96,7 @@
AllocatedPointer = 38,
AllocAlign = 39,
#endif
+ SanitizeSafeStack = 40,
};
typedef struct OpaqueRustString *RustStringRef;
diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
index 49acd71..ea04899 100644
--- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
+++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
@@ -234,6 +234,8 @@
case AllocAlign:
return Attribute::AllocAlign;
#endif
+ case SanitizeSafeStack:
+ return Attribute::SafeStack;
}
report_fatal_error("bad AttributeKind");
}
diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs
index 21faf19..6d6d71b 100644
--- a/compiler/rustc_middle/src/mir/syntax.rs
+++ b/compiler/rustc_middle/src/mir/syntax.rs
@@ -603,7 +603,11 @@ pub enum TerminatorKind<'tcx> {
/// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to
/// > the place or one of its "parents" occurred more recently than a move out of it. This does not
/// > consider indirect assignments.
- Drop { place: Place<'tcx>, target: BasicBlock, unwind: UnwindAction },
+ ///
+ /// The `replace` flag indicates whether this terminator was created as part of an assignment.
+ /// This should only be used for diagnostic purposes, and does not have any operational
+ /// meaning.
+ Drop { place: Place<'tcx>, target: BasicBlock, unwind: UnwindAction, replace: bool },
/// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of
/// the referred to function. The operand types must match the argument types of the function.
diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs
index 596dd80..942654b 100644
--- a/compiler/rustc_middle/src/mir/visit.rs
+++ b/compiler/rustc_middle/src/mir/visit.rs
@@ -504,6 +504,7 @@ fn super_terminator(&mut self,
place,
target: _,
unwind: _,
+ replace: _,
} => {
self.visit_place(
place,
diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs
index a8d0dca..96023a6 100644
--- a/compiler/rustc_middle/src/ty/mod.rs
+++ b/compiler/rustc_middle/src/ty/mod.rs
@@ -53,7 +53,6 @@
use rustc_span::{ExpnId, ExpnKind, Span};
use rustc_target::abi::{Align, FieldIdx, Integer, IntegerType, VariantIdx};
pub use rustc_target::abi::{ReprFlags, ReprOptions};
-use rustc_type_ir::WithCachedTypeInfo;
pub use subst::*;
pub use vtable::*;
@@ -145,6 +144,7 @@
mod parameterized;
mod rvalue_scopes;
mod structural_impls;
+#[cfg_attr(not(bootstrap), allow(hidden_glob_reexports))]
mod sty;
mod typeck_results;
diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs
index a064174..d6c88ea 100644
--- a/compiler/rustc_middle/src/ty/print/pretty.rs
+++ b/compiler/rustc_middle/src/ty/print/pretty.rs
@@ -700,7 +700,7 @@ fn pretty_print_type(mut self, ty: Ty<'tcx>) -> Result<Self::Type, Self::Error>
if verbose { p!(write("{:?}", infer_ty)) } else { p!(write("{}", infer_ty)) }
}
}
- ty::Error(_) => p!("[type error]"),
+ ty::Error(_) => p!("{{type error}}"),
ty::Param(ref param_ty) => p!(print(param_ty)),
ty::Bound(debruijn, bound_ty) => match bound_ty.kind {
ty::BoundTyKind::Anon => debug_bound_var(&mut self, debruijn, bound_ty.var)?,
@@ -1379,8 +1379,8 @@ macro_rules! print_underscore {
},
// FIXME(generic_const_exprs):
// write out some legible representation of an abstract const?
- ty::ConstKind::Expr(_) => p!("[const expr]"),
- ty::ConstKind::Error(_) => p!("[const error]"),
+ ty::ConstKind::Expr(_) => p!("{{const expr}}"),
+ ty::ConstKind::Error(_) => p!("{{const error}}"),
};
Ok(self)
}
diff --git a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
index b744227..ebf830c 100644
--- a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
+++ b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
@@ -57,6 +57,7 @@ pub fn parse_terminator(&self, expr_id: ExprId) -> PResult<TerminatorKind<'tcx>>
place: self.parse_place(args[0])?,
target: self.parse_block(args[1])?,
unwind: UnwindAction::Continue,
+ replace: false,
})
},
@call("mir_call", args) => {
diff --git a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs
index bcab4c0..3742d64 100644
--- a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs
+++ b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs
@@ -725,6 +725,7 @@ fn build_zero_repeat(
place: to_drop,
target: success,
unwind: UnwindAction::Continue,
+ replace: false,
},
);
this.diverge_from(block);
diff --git a/compiler/rustc_mir_build/src/build/scope.rs b/compiler/rustc_mir_build/src/build/scope.rs
index 7331f8e..7c0fbc6 100644
--- a/compiler/rustc_mir_build/src/build/scope.rs
+++ b/compiler/rustc_mir_build/src/build/scope.rs
@@ -91,7 +91,7 @@
use rustc_middle::mir::*;
use rustc_middle::thir::{Expr, LintLevel};
-use rustc_span::{DesugaringKind, Span, DUMMY_SP};
+use rustc_span::{Span, DUMMY_SP};
#[derive(Debug)]
pub struct Scopes<'tcx> {
@@ -371,6 +371,7 @@ fn link_blocks<'tcx>(
// The caller will handle this if needed.
unwind: UnwindAction::Terminate,
place: drop_data.0.local.into(),
+ replace: false,
};
cfg.terminate(block, drop_data.0.source_info, terminator);
}
@@ -1128,9 +1129,6 @@ pub(crate) fn build_drop_and_replace(
place: Place<'tcx>,
value: Rvalue<'tcx>,
) -> BlockAnd<()> {
- let span = self.tcx.with_stable_hashing_context(|hcx| {
- span.mark_with_reason(None, DesugaringKind::Replace, self.tcx.sess.edition(), hcx)
- });
let source_info = self.source_info(span);
// create the new block for the assignment
@@ -1148,6 +1146,7 @@ pub(crate) fn build_drop_and_replace(
place,
target: assign,
unwind: UnwindAction::Cleanup(assign_unwind),
+ replace: true,
},
);
self.diverge_from(block);
@@ -1261,6 +1260,7 @@ fn build_scope_drops<'tcx>(
place: local.into(),
target: next,
unwind: UnwindAction::Continue,
+ replace: false,
},
);
block = next;
diff --git a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs
index 1889507..d615c83 100644
--- a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs
+++ b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs
@@ -237,6 +237,7 @@ pub fn elaborate_drop(&mut self, bb: BasicBlock) {
place: self.place,
target: self.succ,
unwind: self.unwind.into_action(),
+ replace: false,
},
);
}
@@ -719,6 +720,7 @@ fn drop_loop(
place: tcx.mk_place_deref(ptr),
target: loop_block,
unwind: unwind.into_action(),
+ replace: false,
},
);
@@ -963,8 +965,12 @@ fn unelaborated_free_block(
}
fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
- let block =
- TerminatorKind::Drop { place: self.place, target, unwind: unwind.into_action() };
+ let block = TerminatorKind::Drop {
+ place: self.place,
+ target,
+ unwind: unwind.into_action(),
+ replace: false,
+ };
self.new_block(unwind, block)
}
diff --git a/compiler/rustc_mir_dataflow/src/framework/direction.rs b/compiler/rustc_mir_dataflow/src/framework/direction.rs
index c8fe1af..ba328e7 100644
--- a/compiler/rustc_mir_dataflow/src/framework/direction.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/direction.rs
@@ -479,7 +479,7 @@ fn join_state_into_successors_of<'tcx, A>(
Goto { target } => propagate(target, exit_state),
Assert { target, unwind, expected: _, msg: _, cond: _ }
- | Drop { target, unwind, place: _ }
+ | Drop { target, unwind, place: _, replace: _ }
| FalseUnwind { real_target: target, unwind } => {
if let UnwindAction::Cleanup(unwind) = unwind {
propagate(unwind, exit_state);
diff --git a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
index b29ffcc..ef2a0c7 100644
--- a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
+++ b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
@@ -80,7 +80,7 @@ fn add_move_for_packed_drop<'tcx>(
is_cleanup: bool,
) {
debug!("add_move_for_packed_drop({:?} @ {:?})", terminator, loc);
- let TerminatorKind::Drop { ref place, target, unwind } = terminator.kind else {
+ let TerminatorKind::Drop { ref place, target, unwind, replace } = terminator.kind else {
unreachable!();
};
@@ -98,6 +98,11 @@ fn add_move_for_packed_drop<'tcx>(
patch.add_assign(loc, Place::from(temp), Rvalue::Use(Operand::Move(*place)));
patch.patch_terminator(
loc.block,
- TerminatorKind::Drop { place: Place::from(temp), target: storage_dead_block, unwind },
+ TerminatorKind::Drop {
+ place: Place::from(temp),
+ target: storage_dead_block,
+ unwind,
+ replace,
+ },
);
}
diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs
index 98e7a51..fda0e10 100644
--- a/compiler/rustc_mir_transform/src/elaborate_drops.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs
@@ -14,7 +14,7 @@
use rustc_mir_dataflow::MoveDataParamEnv;
use rustc_mir_dataflow::{on_all_children_bits, on_all_drop_children_bits};
use rustc_mir_dataflow::{Analysis, ResultsCursor};
-use rustc_span::{DesugaringKind, Span};
+use rustc_span::Span;
use rustc_target::abi::{FieldIdx, VariantIdx};
use std::fmt;
@@ -401,7 +401,7 @@ fn elaborate_drops(&mut self) {
let terminator = data.terminator();
match terminator.kind {
- TerminatorKind::Drop { mut place, target, unwind } => {
+ TerminatorKind::Drop { mut place, target, unwind, replace } => {
if let Some(new_place) = self.un_derefer.derefer(place.as_ref(), self.body) {
place = new_place;
}
@@ -434,10 +434,7 @@ fn elaborate_drops(&mut self) {
)
}
LookupResult::Parent(..) => {
- if !matches!(
- terminator.source_info.span.desugaring_kind(),
- Some(DesugaringKind::Replace),
- ) {
+ if !replace {
self.tcx.sess.delay_span_bug(
terminator.source_info.span,
format!("drop of untracked value {:?}", bb),
diff --git a/compiler/rustc_mir_transform/src/generator.rs b/compiler/rustc_mir_transform/src/generator.rs
index 891e446..89567ed 100644
--- a/compiler/rustc_mir_transform/src/generator.rs
+++ b/compiler/rustc_mir_transform/src/generator.rs
@@ -1045,7 +1045,10 @@ fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
for (block, block_data) in body.basic_blocks.iter_enumerated() {
let (target, unwind, source_info) = match block_data.terminator() {
- Terminator { source_info, kind: TerminatorKind::Drop { place, target, unwind } } => {
+ Terminator {
+ source_info,
+ kind: TerminatorKind::Drop { place, target, unwind, replace: _ },
+ } => {
if let Some(local) = place.as_local() {
if local == SELF_ARG {
(target, unwind, source_info)
@@ -1304,6 +1307,7 @@ fn insert_clean_drop(body: &mut Body<'_>) -> BasicBlock {
place: Place::from(SELF_ARG),
target: return_block,
unwind: UnwindAction::Continue,
+ replace: false,
};
let source_info = SourceInfo::outermost(body.span);
diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs
index 1748b1b..ca1e209 100644
--- a/compiler/rustc_mir_transform/src/inline.rs
+++ b/compiler/rustc_mir_transform/src/inline.rs
@@ -449,7 +449,7 @@ fn check_mir_body(
checker.visit_basic_block_data(bb, blk);
let term = blk.terminator();
- if let TerminatorKind::Drop { ref place, target, unwind } = term.kind {
+ if let TerminatorKind::Drop { ref place, target, unwind, replace: _ } = term.kind {
work_list.push(target);
// If the place doesn't actually need dropping, treat it like a regular goto.
@@ -457,8 +457,8 @@ fn check_mir_body(
.callee
.subst_mir(self.tcx, ty::EarlyBinder(&place.ty(callee_body, tcx).ty));
if ty.needs_drop(tcx, self.param_env) && let UnwindAction::Cleanup(unwind) = unwind {
- work_list.push(unwind);
- }
+ work_list.push(unwind);
+ }
} else if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set
&& matches!(term.kind, TerminatorKind::InlineAsm { .. })
{
diff --git a/compiler/rustc_mir_transform/src/match_branches.rs b/compiler/rustc_mir_transform/src/match_branches.rs
index 59942dc..6eb4849 100644
--- a/compiler/rustc_mir_transform/src/match_branches.rs
+++ b/compiler/rustc_mir_transform/src/match_branches.rs
@@ -41,7 +41,7 @@
impl<'tcx> MirPass<'tcx> for MatchBranchSimplification {
fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
- sess.mir_opt_level() >= 3
+ sess.mir_opt_level() >= 1
}
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
@@ -62,7 +62,12 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
..
} if targets.iter().len() == 1 => {
let (value, target) = targets.iter().next().unwrap();
- if target == targets.otherwise() {
+ // We require that this block and the two possible target blocks all be
+ // distinct.
+ if target == targets.otherwise()
+ || bb_idx == target
+ || bb_idx == targets.otherwise()
+ {
continue;
}
(discr, value, target, targets.otherwise())
diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs
index 7c47d88..0eb27c2 100644
--- a/compiler/rustc_mir_transform/src/shim.rs
+++ b/compiler/rustc_mir_transform/src/shim.rs
@@ -544,6 +544,7 @@ fn clone_fields<I>(
place: dest_field,
target: unwind,
unwind: UnwindAction::Terminate,
+ replace: false,
},
true,
);
@@ -800,6 +801,7 @@ fn build_call_shim<'tcx>(
place: rcvr_place(),
target: BasicBlock::new(2),
unwind: UnwindAction::Continue,
+ replace: false,
},
false,
);
@@ -815,6 +817,7 @@ fn build_call_shim<'tcx>(
place: rcvr_place(),
target: BasicBlock::new(4),
unwind: UnwindAction::Terminate,
+ replace: false,
},
true,
);
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 0c265d7..c234206 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -536,7 +536,9 @@ pub fn expect_one_of(
} else if inedible.contains(&self.token.kind) {
// leave it in the input
Ok(false)
- } else if self.last_unexpected_token_span == Some(self.token.span) {
+ } else if self.token.kind != token::Eof
+ && self.last_unexpected_token_span == Some(self.token.span)
+ {
FatalError.raise();
} else {
self.expected_one_of_not_found(edible, inedible)
diff --git a/compiler/rustc_resolve/src/imports.rs b/compiler/rustc_resolve/src/imports.rs
index 7c4c05d..c1bb262 100644
--- a/compiler/rustc_resolve/src/imports.rs
+++ b/compiler/rustc_resolve/src/imports.rs
@@ -21,7 +21,8 @@
use rustc_middle::span_bug;
use rustc_middle::ty;
use rustc_session::lint::builtin::{
- AMBIGUOUS_GLOB_REEXPORTS, PUB_USE_OF_PRIVATE_EXTERN_CRATE, UNUSED_IMPORTS,
+ AMBIGUOUS_GLOB_REEXPORTS, HIDDEN_GLOB_REEXPORTS, PUB_USE_OF_PRIVATE_EXTERN_CRATE,
+ UNUSED_IMPORTS,
};
use rustc_session::lint::BuiltinLintDiagnostics;
use rustc_span::edit_distance::find_best_match_for_name;
@@ -526,31 +527,71 @@ pub(crate) fn finalize_imports(&mut self) {
}
}
- pub(crate) fn check_reexport_ambiguities(
+ pub(crate) fn check_hidden_glob_reexports(
&mut self,
exported_ambiguities: FxHashSet<Interned<'a, NameBinding<'a>>>,
) {
for module in self.arenas.local_modules().iter() {
- module.for_each_child(self, |this, ident, ns, binding| {
- if let NameBindingKind::Import { import, .. } = binding.kind
- && let Some((amb_binding, _)) = binding.ambiguity
- && binding.res() != Res::Err
- && exported_ambiguities.contains(&Interned::new_unchecked(binding))
- {
- this.lint_buffer.buffer_lint_with_diagnostic(
- AMBIGUOUS_GLOB_REEXPORTS,
- import.root_id,
- import.root_span,
- "ambiguous glob re-exports",
- BuiltinLintDiagnostics::AmbiguousGlobReexports {
- name: ident.to_string(),
- namespace: ns.descr().to_string(),
- first_reexport_span: import.root_span,
- duplicate_reexport_span: amb_binding.span,
- },
- );
+ for (key, resolution) in self.resolutions(module).borrow().iter() {
+ let resolution = resolution.borrow();
+
+ if let Some(binding) = resolution.binding {
+ if let NameBindingKind::Import { import, .. } = binding.kind
+ && let Some((amb_binding, _)) = binding.ambiguity
+ && binding.res() != Res::Err
+ && exported_ambiguities.contains(&Interned::new_unchecked(binding))
+ {
+ self.lint_buffer.buffer_lint_with_diagnostic(
+ AMBIGUOUS_GLOB_REEXPORTS,
+ import.root_id,
+ import.root_span,
+ "ambiguous glob re-exports",
+ BuiltinLintDiagnostics::AmbiguousGlobReexports {
+ name: key.ident.to_string(),
+ namespace: key.ns.descr().to_string(),
+ first_reexport_span: import.root_span,
+ duplicate_reexport_span: amb_binding.span,
+ },
+ );
+ }
+
+ if let Some(glob_binding) = resolution.shadowed_glob {
+ let binding_id = match binding.kind {
+ NameBindingKind::Res(res) => {
+ Some(self.def_id_to_node_id[res.def_id().expect_local()])
+ }
+ NameBindingKind::Module(module) => {
+ Some(self.def_id_to_node_id[module.def_id().expect_local()])
+ }
+ NameBindingKind::Import { import, .. } => import.id(),
+ };
+
+ if binding.res() != Res::Err
+ && glob_binding.res() != Res::Err
+ && let NameBindingKind::Import { import: glob_import, .. } = glob_binding.kind
+ && let Some(binding_id) = binding_id
+ && let Some(glob_import_id) = glob_import.id()
+ && let glob_import_def_id = self.local_def_id(glob_import_id)
+ && self.effective_visibilities.is_exported(glob_import_def_id)
+ && glob_binding.vis.is_public()
+ && !binding.vis.is_public()
+ {
+ self.lint_buffer.buffer_lint_with_diagnostic(
+ HIDDEN_GLOB_REEXPORTS,
+ binding_id,
+ binding.span,
+ "private item shadows public glob re-export",
+ BuiltinLintDiagnostics::HiddenGlobReexports {
+ name: key.ident.name.to_string(),
+ namespace: key.ns.descr().to_owned(),
+ glob_reexport_span: glob_binding.span,
+ private_item_span: binding.span,
+ },
+ );
+ }
+ }
}
- });
+ }
}
}
diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs
index 3d2bd84..fd977e8 100644
--- a/compiler/rustc_resolve/src/lib.rs
+++ b/compiler/rustc_resolve/src/lib.rs
@@ -1496,8 +1496,8 @@ pub fn resolve_crate(&mut self, krate: &Crate) {
let exported_ambiguities = self.tcx.sess.time("compute_effective_visibilities", || {
EffectiveVisibilitiesVisitor::compute_effective_visibilities(self, krate)
});
- self.tcx.sess.time("check_reexport_ambiguities", || {
- self.check_reexport_ambiguities(exported_ambiguities)
+ self.tcx.sess.time("check_hidden_glob_reexports", || {
+ self.check_hidden_glob_reexports(exported_ambiguities)
});
self.tcx.sess.time("finalize_macro_resolutions", || self.finalize_macro_resolutions());
self.tcx.sess.time("late_resolve_crate", || self.late_resolve_crate(krate));
diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs
index 2c4c4a7..007e720 100644
--- a/compiler/rustc_session/src/options.rs
+++ b/compiler/rustc_session/src/options.rs
@@ -372,7 +372,7 @@ mod desc {
pub const parse_opt_panic_strategy: &str = parse_panic_strategy;
pub const parse_oom_strategy: &str = "either `panic` or `abort`";
pub const parse_relro_level: &str = "one of: `full`, `partial`, or `off`";
- pub const parse_sanitizers: &str = "comma separated list of sanitizers: `address`, `cfi`, `hwaddress`, `kcfi`, `kernel-address`, `leak`, `memory`, `memtag`, `shadow-call-stack`, or `thread`";
+ pub const parse_sanitizers: &str = "comma separated list of sanitizers: `address`, `cfi`, `hwaddress`, `kcfi`, `kernel-address`, `leak`, `memory`, `memtag`, `safestack`, `shadow-call-stack`, or `thread`";
pub const parse_sanitizer_memory_track_origins: &str = "0, 1, or 2";
pub const parse_cfguard: &str =
"either a boolean (`yes`, `no`, `on`, `off`, etc), `checks`, or `nochecks`";
@@ -694,6 +694,7 @@ pub(crate) fn parse_sanitizers(slot: &mut SanitizerSet, v: Option<&str>) -> bool
"shadow-call-stack" => SanitizerSet::SHADOWCALLSTACK,
"thread" => SanitizerSet::THREAD,
"hwaddress" => SanitizerSet::HWADDRESS,
+ "safestack" => SanitizerSet::SAFESTACK,
_ => return false,
}
}
diff --git a/compiler/rustc_smir/src/rustc_smir/mod.rs b/compiler/rustc_smir/src/rustc_smir/mod.rs
index 6af43f5..5572108 100644
--- a/compiler/rustc_smir/src/rustc_smir/mod.rs
+++ b/compiler/rustc_smir/src/rustc_smir/mod.rs
@@ -309,7 +309,7 @@ fn rustc_terminator_to_terminator(
Terminate => Terminator::Abort,
Return => Terminator::Return,
Unreachable => Terminator::Unreachable,
- Drop { place, target, unwind } => Terminator::Drop {
+ Drop { place, target, unwind, replace: _ } => Terminator::Drop {
place: rustc_place_to_place(place),
target: target.as_usize(),
unwind: rustc_unwind_to_unwind(unwind),
diff --git a/compiler/rustc_span/src/hygiene.rs b/compiler/rustc_span/src/hygiene.rs
index 6755657..b219fde 100644
--- a/compiler/rustc_span/src/hygiene.rs
+++ b/compiler/rustc_span/src/hygiene.rs
@@ -1147,7 +1147,6 @@ pub enum DesugaringKind {
Await,
ForLoop,
WhileLoop,
- Replace,
}
impl DesugaringKind {
@@ -1163,7 +1162,6 @@ pub fn descr(self) -> &'static str {
DesugaringKind::OpaqueTy => "`impl Trait`",
DesugaringKind::ForLoop => "`for` loop",
DesugaringKind::WhileLoop => "`while` loop",
- DesugaringKind::Replace => "drop and replace",
}
}
}
diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs
index ba4b89c..62f9420 100644
--- a/compiler/rustc_target/src/spec/mod.rs
+++ b/compiler/rustc_target/src/spec/mod.rs
@@ -815,6 +815,7 @@ pub struct SanitizerSet: u16 {
const SHADOWCALLSTACK = 1 << 7;
const KCFI = 1 << 8;
const KERNELADDRESS = 1 << 9;
+ const SAFESTACK = 1 << 10;
}
}
@@ -831,6 +832,7 @@ pub fn as_str(self) -> Option<&'static str> {
SanitizerSet::LEAK => "leak",
SanitizerSet::MEMORY => "memory",
SanitizerSet::MEMTAG => "memtag",
+ SanitizerSet::SAFESTACK => "safestack",
SanitizerSet::SHADOWCALLSTACK => "shadow-call-stack",
SanitizerSet::THREAD => "thread",
SanitizerSet::HWADDRESS => "hwaddress",
@@ -871,6 +873,7 @@ fn into_iter(self) -> Self::IntoIter {
SanitizerSet::THREAD,
SanitizerSet::HWADDRESS,
SanitizerSet::KERNELADDRESS,
+ SanitizerSet::SAFESTACK,
]
.iter()
.copied()
@@ -2364,6 +2367,7 @@ macro_rules! key {
Some("leak") => SanitizerSet::LEAK,
Some("memory") => SanitizerSet::MEMORY,
Some("memtag") => SanitizerSet::MEMTAG,
+ Some("safestack") => SanitizerSet::SAFESTACK,
Some("shadow-call-stack") => SanitizerSet::SHADOWCALLSTACK,
Some("thread") => SanitizerSet::THREAD,
Some("hwaddress") => SanitizerSet::HWADDRESS,
diff --git a/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs b/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs
index 9af1049..deb15c0 100644
--- a/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs
+++ b/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs
@@ -11,6 +11,7 @@ pub fn target() -> Target {
| SanitizerSet::CFI
| SanitizerSet::LEAK
| SanitizerSet::MEMORY
+ | SanitizerSet::SAFESTACK
| SanitizerSet::THREAD;
base.supports_xray = true;
diff --git a/compiler/rustc_trait_selection/src/solve/mod.rs b/compiler/rustc_trait_selection/src/solve/mod.rs
index 26ace28f..56a254d 100644
--- a/compiler/rustc_trait_selection/src/solve/mod.rs
+++ b/compiler/rustc_trait_selection/src/solve/mod.rs
@@ -231,13 +231,21 @@ enum Invert {
let mut candidates = Vec::new();
// LHS normalizes-to RHS
- candidates.extend(
- evaluate_normalizes_to(self, alias_lhs, rhs, direction, Invert::No).ok(),
- );
+ candidates.extend(evaluate_normalizes_to(
+ self,
+ alias_lhs,
+ rhs,
+ direction,
+ Invert::No,
+ ));
// RHS normalizes-to RHS
- candidates.extend(
- evaluate_normalizes_to(self, alias_rhs, lhs, direction, Invert::Yes).ok(),
- );
+ candidates.extend(evaluate_normalizes_to(
+ self,
+ alias_rhs,
+ lhs,
+ direction,
+ Invert::Yes,
+ ));
// Relate via substs
let subst_relate_response = self.probe(|ecx| {
let span = tracing::span!(
@@ -265,10 +273,18 @@ enum Invert {
if let Some(merged) = self.try_merge_responses(&candidates) {
Ok(merged)
- } else if let Ok(subst_relate_response) = subst_relate_response {
- Ok(subst_relate_response)
} else {
- self.flounder(&candidates)
+ // When relating two aliases and we have ambiguity, we prefer
+ // relating the generic arguments of the aliases over normalizing
+ // them. This is necessary for inference during typeck.
+ //
+ // As this is incomplete, we must not do so during coherence.
+ match (self.solver_mode(), subst_relate_response) {
+ (SolverMode::Normal, Ok(response)) => Ok(response),
+ (SolverMode::Normal, Err(NoSolution)) | (SolverMode::Coherence, _) => {
+ self.flounder(&candidates)
+ }
+ }
}
}
}
diff --git a/compiler/rustc_trait_selection/src/traits/fulfill.rs b/compiler/rustc_trait_selection/src/traits/fulfill.rs
index 2f85c32..88d2091 100644
--- a/compiler/rustc_trait_selection/src/traits/fulfill.rs
+++ b/compiler/rustc_trait_selection/src/traits/fulfill.rs
@@ -116,6 +116,7 @@ fn select(&mut self, selcx: SelectionContext<'a, 'tcx>) -> Vec<FulfillmentError<
}
impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> {
+ #[inline]
fn register_predicate_obligation(
&mut self,
infcx: &InferCtxt<'tcx>,
diff --git a/compiler/rustc_trait_selection/src/traits/mod.rs b/compiler/rustc_trait_selection/src/traits/mod.rs
index f265230..a44d895 100644
--- a/compiler/rustc_trait_selection/src/traits/mod.rs
+++ b/compiler/rustc_trait_selection/src/traits/mod.rs
@@ -14,10 +14,12 @@
pub mod outlives_bounds;
mod project;
pub mod query;
+#[cfg_attr(not(bootstrap), allow(hidden_glob_reexports))]
mod select;
mod specialize;
mod structural_match;
mod structural_normalize;
+#[cfg_attr(not(bootstrap), allow(hidden_glob_reexports))]
mod util;
mod vtable;
pub mod wf;
diff --git a/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs b/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs
index 0e797a1..f8d056e 100644
--- a/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs
+++ b/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs
@@ -1,9 +1,9 @@
use crate::infer::InferCtxt;
-use crate::traits::query::type_op::{self, TypeOp, TypeOpOutput};
use crate::traits::{ObligationCause, ObligationCtxt};
use rustc_data_structures::fx::FxIndexSet;
-use rustc_errors::ErrorGuaranteed;
use rustc_infer::infer::resolve::OpportunisticRegionResolver;
+use rustc_infer::infer::InferOk;
+use rustc_middle::infer::canonical::{OriginalQueryValues, QueryRegionConstraints};
use rustc_middle::ty::{self, ParamEnv, Ty, TypeFolder, TypeVisitableExt};
use rustc_span::def_id::LocalDefId;
@@ -68,20 +68,29 @@ fn implied_outlives_bounds(
return vec![];
}
- let span = self.tcx.def_span(body_id);
- let result: Result<_, ErrorGuaranteed> = param_env
- .and(type_op::implied_outlives_bounds::ImpliedOutlivesBounds { ty })
- .fully_perform(self, span);
- let result = match result {
- Ok(r) => r,
- Err(_) => {
- return vec![];
- }
+ let mut canonical_var_values = OriginalQueryValues::default();
+ let canonical_ty =
+ self.canonicalize_query_keep_static(param_env.and(ty), &mut canonical_var_values);
+ let Ok(canonical_result) = self.tcx.implied_outlives_bounds(canonical_ty) else {
+ return vec![];
};
- let TypeOpOutput { output, constraints, .. } = result;
+ let mut constraints = QueryRegionConstraints::default();
+ let Ok(InferOk { value, obligations }) = self
+ .instantiate_nll_query_response_and_region_obligations(
+ &ObligationCause::dummy(),
+ param_env,
+ &canonical_var_values,
+ canonical_result,
+ &mut constraints,
+ ) else {
+ return vec![];
+ };
+ assert_eq!(&obligations, &[]);
- if let Some(constraints) = constraints {
+ if !constraints.is_empty() {
+ let span = self.tcx.def_span(body_id);
+
debug!(?constraints);
if !constraints.member_constraints.is_empty() {
span_bug!(span, "{:#?}", constraints.member_constraints);
@@ -108,7 +117,7 @@ fn implied_outlives_bounds(
}
};
- output
+ value
}
fn implied_bounds_tys(
diff --git a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs
index 455b53b..4e4172e 100644
--- a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs
@@ -1,6 +1,11 @@
-use rustc_middle::ty::{self, Ty, TyCtxt};
+use crate::traits::query::normalize::QueryNormalizeExt;
+use crate::traits::query::NoSolution;
+use crate::traits::{Normalized, ObligationCause, ObligationCtxt};
-pub use rustc_middle::traits::query::{DropckConstraint, DropckOutlivesResult};
+use rustc_data_structures::fx::FxHashSet;
+use rustc_middle::traits::query::{DropckConstraint, DropckOutlivesResult};
+use rustc_middle::ty::{self, EarlyBinder, ParamEnvAnd, Ty, TyCtxt};
+use rustc_span::source_map::{Span, DUMMY_SP};
/// This returns true if the type `ty` is "trivial" for
/// dropck-outlives -- that is, if it doesn't require any types to
@@ -71,3 +76,263 @@ pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool {
| ty::Generator(..) => false,
}
}
+
+pub fn compute_dropck_outlives_inner<'tcx>(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ goal: ParamEnvAnd<'tcx, Ty<'tcx>>,
+) -> Result<DropckOutlivesResult<'tcx>, NoSolution> {
+ let tcx = ocx.infcx.tcx;
+ let ParamEnvAnd { param_env, value: for_ty } = goal;
+
+ let mut result = DropckOutlivesResult { kinds: vec![], overflows: vec![] };
+
+ // A stack of types left to process. Each round, we pop
+ // something from the stack and invoke
+ // `dtorck_constraint_for_ty_inner`. This may produce new types that
+ // have to be pushed on the stack. This continues until we have explored
+ // all the reachable types from the type `for_ty`.
+ //
+ // Example: Imagine that we have the following code:
+ //
+ // ```rust
+ // struct A {
+ // value: B,
+ // children: Vec<A>,
+ // }
+ //
+ // struct B {
+ // value: u32
+ // }
+ //
+ // fn f() {
+ // let a: A = ...;
+ // ..
+ // } // here, `a` is dropped
+ // ```
+ //
+ // at the point where `a` is dropped, we need to figure out
+ // which types inside of `a` contain region data that may be
+ // accessed by any destructors in `a`. We begin by pushing `A`
+ // onto the stack, as that is the type of `a`. We will then
+ // invoke `dtorck_constraint_for_ty_inner` which will expand `A`
+ // into the types of its fields `(B, Vec<A>)`. These will get
+ // pushed onto the stack. Eventually, expanding `Vec<A>` will
+ // lead to us trying to push `A` a second time -- to prevent
+ // infinite recursion, we notice that `A` was already pushed
+ // once and stop.
+ let mut ty_stack = vec![(for_ty, 0)];
+
+ // Set used to detect infinite recursion.
+ let mut ty_set = FxHashSet::default();
+
+ let cause = ObligationCause::dummy();
+ let mut constraints = DropckConstraint::empty();
+ while let Some((ty, depth)) = ty_stack.pop() {
+ debug!(
+ "{} kinds, {} overflows, {} ty_stack",
+ result.kinds.len(),
+ result.overflows.len(),
+ ty_stack.len()
+ );
+ dtorck_constraint_for_ty_inner(tcx, DUMMY_SP, for_ty, depth, ty, &mut constraints)?;
+
+ // "outlives" represent types/regions that may be touched
+ // by a destructor.
+ result.kinds.append(&mut constraints.outlives);
+ result.overflows.append(&mut constraints.overflows);
+
+ // If we have even one overflow, we should stop trying to evaluate further --
+ // chances are, the subsequent overflows for this evaluation won't provide useful
+ // information and will just decrease the speed at which we can emit these errors
+ // (since we'll be printing for just that much longer for the often enormous types
+ // that result here).
+ if !result.overflows.is_empty() {
+ break;
+ }
+
+ // dtorck types are "types that will get dropped but which
+ // do not themselves define a destructor", more or less. We have
+ // to push them onto the stack to be expanded.
+ for ty in constraints.dtorck_types.drain(..) {
+ let Normalized { value: ty, obligations } =
+ ocx.infcx.at(&cause, param_env).query_normalize(ty)?;
+ ocx.register_obligations(obligations);
+
+ debug!("dropck_outlives: ty from dtorck_types = {:?}", ty);
+
+ match ty.kind() {
+ // All parameters live for the duration of the
+ // function.
+ ty::Param(..) => {}
+
+ // A projection that we couldn't resolve - it
+ // might have a destructor.
+ ty::Alias(..) => {
+ result.kinds.push(ty.into());
+ }
+
+ _ => {
+ if ty_set.insert(ty) {
+ ty_stack.push((ty, depth + 1));
+ }
+ }
+ }
+ }
+ }
+
+ debug!("dropck_outlives: result = {:#?}", result);
+ Ok(result)
+}
+
+/// Returns a set of constraints that needs to be satisfied in
+/// order for `ty` to be valid for destruction.
+pub fn dtorck_constraint_for_ty_inner<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ span: Span,
+ for_ty: Ty<'tcx>,
+ depth: usize,
+ ty: Ty<'tcx>,
+ constraints: &mut DropckConstraint<'tcx>,
+) -> Result<(), NoSolution> {
+ debug!("dtorck_constraint_for_ty_inner({:?}, {:?}, {:?}, {:?})", span, for_ty, depth, ty);
+
+ if !tcx.recursion_limit().value_within_limit(depth) {
+ constraints.overflows.push(ty);
+ return Ok(());
+ }
+
+ if trivial_dropck_outlives(tcx, ty) {
+ return Ok(());
+ }
+
+ match ty.kind() {
+ ty::Bool
+ | ty::Char
+ | ty::Int(_)
+ | ty::Uint(_)
+ | ty::Float(_)
+ | ty::Str
+ | ty::Never
+ | ty::Foreign(..)
+ | ty::RawPtr(..)
+ | ty::Ref(..)
+ | ty::FnDef(..)
+ | ty::FnPtr(_)
+ | ty::GeneratorWitness(..)
+ | ty::GeneratorWitnessMIR(..) => {
+ // these types never have a destructor
+ }
+
+ ty::Array(ety, _) | ty::Slice(ety) => {
+ // single-element containers, behave like their element
+ rustc_data_structures::stack::ensure_sufficient_stack(|| {
+ dtorck_constraint_for_ty_inner(tcx, span, for_ty, depth + 1, *ety, constraints)
+ })?;
+ }
+
+ ty::Tuple(tys) => rustc_data_structures::stack::ensure_sufficient_stack(|| {
+ for ty in tys.iter() {
+ dtorck_constraint_for_ty_inner(tcx, span, for_ty, depth + 1, ty, constraints)?;
+ }
+ Ok::<_, NoSolution>(())
+ })?,
+
+ ty::Closure(_, substs) => {
+ if !substs.as_closure().is_valid() {
+ // By the time this code runs, all type variables ought to
+ // be fully resolved.
+
+ tcx.sess.delay_span_bug(
+ span,
+ format!("upvar_tys for closure not found. Expected capture information for closure {ty}",),
+ );
+ return Err(NoSolution);
+ }
+
+ rustc_data_structures::stack::ensure_sufficient_stack(|| {
+ for ty in substs.as_closure().upvar_tys() {
+ dtorck_constraint_for_ty_inner(tcx, span, for_ty, depth + 1, ty, constraints)?;
+ }
+ Ok::<_, NoSolution>(())
+ })?
+ }
+
+ ty::Generator(_, substs, _movability) => {
+ // rust-lang/rust#49918: types can be constructed, stored
+ // in the interior, and sit idle when generator yields
+ // (and is subsequently dropped).
+ //
+ // It would be nice to descend into interior of a
+ // generator to determine what effects dropping it might
+ // have (by looking at any drop effects associated with
+ // its interior).
+ //
+ // However, the interior's representation uses things like
+ // GeneratorWitness that explicitly assume they are not
+ // traversed in such a manner. So instead, we will
+ // simplify things for now by treating all generators as
+ // if they were like trait objects, where its upvars must
+ // all be alive for the generator's (potential)
+ // destructor.
+ //
+ // In particular, skipping over `_interior` is safe
+ // because any side-effects from dropping `_interior` can
+ // only take place through references with lifetimes
+ // derived from lifetimes attached to the upvars and resume
+ // argument, and we *do* incorporate those here.
+
+ if !substs.as_generator().is_valid() {
+ // By the time this code runs, all type variables ought to
+ // be fully resolved.
+ tcx.sess.delay_span_bug(
+ span,
+ format!("upvar_tys for generator not found. Expected capture information for generator {ty}",),
+ );
+ return Err(NoSolution);
+ }
+
+ constraints.outlives.extend(
+ substs
+ .as_generator()
+ .upvar_tys()
+ .map(|t| -> ty::subst::GenericArg<'tcx> { t.into() }),
+ );
+ constraints.outlives.push(substs.as_generator().resume_ty().into());
+ }
+
+ ty::Adt(def, substs) => {
+ let DropckConstraint { dtorck_types, outlives, overflows } =
+ tcx.at(span).adt_dtorck_constraint(def.did())?;
+ // FIXME: we can try to recursively `dtorck_constraint_on_ty`
+ // there, but that needs some way to handle cycles.
+ constraints
+ .dtorck_types
+ .extend(dtorck_types.iter().map(|t| EarlyBinder(*t).subst(tcx, substs)));
+ constraints
+ .outlives
+ .extend(outlives.iter().map(|t| EarlyBinder(*t).subst(tcx, substs)));
+ constraints
+ .overflows
+ .extend(overflows.iter().map(|t| EarlyBinder(*t).subst(tcx, substs)));
+ }
+
+ // Objects must be alive in order for their destructor
+ // to be called.
+ ty::Dynamic(..) => {
+ constraints.outlives.push(ty.into());
+ }
+
+ // Types that can't be resolved. Pass them forward.
+ ty::Alias(..) | ty::Param(..) => {
+ constraints.dtorck_types.push(ty);
+ }
+
+ ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) | ty::Error(_) => {
+ // By the time this code runs, all type variables ought to
+ // be fully resolved.
+ return Err(NoSolution);
+ }
+ }
+
+ Ok(())
+}
diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs
index c61f545..01d7a1e 100644
--- a/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/type_op/ascribe_user_type.rs
@@ -1,8 +1,13 @@
use crate::infer::canonical::{Canonical, CanonicalQueryResponse};
+use crate::traits::ObligationCtxt;
+use rustc_hir::def_id::{DefId, CRATE_DEF_ID};
+use rustc_infer::traits::Obligation;
use rustc_middle::traits::query::NoSolution;
-use rustc_middle::ty::{ParamEnvAnd, TyCtxt};
+use rustc_middle::traits::{ObligationCause, ObligationCauseCode};
+use rustc_middle::ty::{self, ParamEnvAnd, Ty, TyCtxt, UserSelfTy, UserSubsts, UserType};
pub use rustc_middle::traits::query::type_op::AscribeUserType;
+use rustc_span::{Span, DUMMY_SP};
impl<'tcx> super::QueryTypeOp<'tcx> for AscribeUserType<'tcx> {
type QueryResponse = ();
@@ -20,4 +25,116 @@ fn perform_query(
) -> Result<CanonicalQueryResponse<'tcx, ()>, NoSolution> {
tcx.type_op_ascribe_user_type(canonicalized)
}
+
+ fn perform_locally_in_new_solver(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ key: ParamEnvAnd<'tcx, Self>,
+ ) -> Result<Self::QueryResponse, NoSolution> {
+ type_op_ascribe_user_type_with_span(ocx, key, None)
+ }
+}
+
+/// The core of the `type_op_ascribe_user_type` query: for diagnostics purposes in NLL HRTB errors,
+/// this query can be re-run to better track the span of the obligation cause, and improve the error
+/// message. Do not call directly unless you're in that very specific context.
+pub fn type_op_ascribe_user_type_with_span<'tcx>(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ key: ParamEnvAnd<'tcx, AscribeUserType<'tcx>>,
+ span: Option<Span>,
+) -> Result<(), NoSolution> {
+ let (param_env, AscribeUserType { mir_ty, user_ty }) = key.into_parts();
+ debug!("type_op_ascribe_user_type: mir_ty={:?} user_ty={:?}", mir_ty, user_ty);
+ let span = span.unwrap_or(DUMMY_SP);
+ match user_ty {
+ UserType::Ty(user_ty) => relate_mir_and_user_ty(ocx, param_env, span, mir_ty, user_ty)?,
+ UserType::TypeOf(def_id, user_substs) => {
+ relate_mir_and_user_substs(ocx, param_env, span, mir_ty, def_id, user_substs)?
+ }
+ };
+ Ok(())
+}
+
+#[instrument(level = "debug", skip(ocx, param_env, span))]
+fn relate_mir_and_user_ty<'tcx>(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ span: Span,
+ mir_ty: Ty<'tcx>,
+ user_ty: Ty<'tcx>,
+) -> Result<(), NoSolution> {
+ let cause = ObligationCause::dummy_with_span(span);
+ let user_ty = ocx.normalize(&cause, param_env, user_ty);
+ ocx.eq(&cause, param_env, mir_ty, user_ty)?;
+
+ // FIXME(#104764): We should check well-formedness before normalization.
+ let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(user_ty.into()));
+ ocx.register_obligation(Obligation::new(ocx.infcx.tcx, cause, param_env, predicate));
+ Ok(())
+}
+
+#[instrument(level = "debug", skip(ocx, param_env, span))]
+fn relate_mir_and_user_substs<'tcx>(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ span: Span,
+ mir_ty: Ty<'tcx>,
+ def_id: DefId,
+ user_substs: UserSubsts<'tcx>,
+) -> Result<(), NoSolution> {
+ let param_env = param_env.without_const();
+ let UserSubsts { user_self_ty, substs } = user_substs;
+ let tcx = ocx.infcx.tcx;
+ let cause = ObligationCause::dummy_with_span(span);
+
+ let ty = tcx.type_of(def_id).subst(tcx, substs);
+ let ty = ocx.normalize(&cause, param_env, ty);
+ debug!("relate_type_and_user_type: ty of def-id is {:?}", ty);
+
+ ocx.eq(&cause, param_env, mir_ty, ty)?;
+
+ // Prove the predicates coming along with `def_id`.
+ //
+ // Also, normalize the `instantiated_predicates`
+ // because otherwise we wind up with duplicate "type
+ // outlives" error messages.
+ let instantiated_predicates = tcx.predicates_of(def_id).instantiate(tcx, substs);
+
+ debug!(?instantiated_predicates);
+ for (instantiated_predicate, predicate_span) in instantiated_predicates {
+ let span = if span == DUMMY_SP { predicate_span } else { span };
+ let cause = ObligationCause::new(
+ span,
+ CRATE_DEF_ID,
+ ObligationCauseCode::AscribeUserTypeProvePredicate(predicate_span),
+ );
+ let instantiated_predicate =
+ ocx.normalize(&cause.clone(), param_env, instantiated_predicate);
+
+ ocx.register_obligation(Obligation::new(tcx, cause, param_env, instantiated_predicate));
+ }
+
+ if let Some(UserSelfTy { impl_def_id, self_ty }) = user_self_ty {
+ let self_ty = ocx.normalize(&cause, param_env, self_ty);
+ let impl_self_ty = tcx.type_of(impl_def_id).subst(tcx, substs);
+ let impl_self_ty = ocx.normalize(&cause, param_env, impl_self_ty);
+
+ ocx.eq(&cause, param_env, self_ty, impl_self_ty)?;
+ let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(impl_self_ty.into()));
+ ocx.register_obligation(Obligation::new(tcx, cause.clone(), param_env, predicate));
+ }
+
+ // In addition to proving the predicates, we have to
+ // prove that `ty` is well-formed -- this is because
+ // the WF of `ty` is predicated on the substs being
+ // well-formed, and we haven't proven *that*. We don't
+ // want to prove the WF of types from `substs` directly because they
+ // haven't been normalized.
+ //
+ // FIXME(nmatsakis): Well, perhaps we should normalize
+ // them? This would only be relevant if some input
+ // type were ill-formed but did not appear in `ty`,
+ // which...could happen with normalization...
+ let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(ty.into()));
+ ocx.register_obligation(Obligation::new(tcx, cause, param_env, predicate));
+ Ok(())
}
diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/eq.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/eq.rs
index 40f8ecf..f658930 100644
--- a/compiler/rustc_trait_selection/src/traits/query/type_op/eq.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/type_op/eq.rs
@@ -1,5 +1,7 @@
use crate::infer::canonical::{Canonical, CanonicalQueryResponse};
+use crate::traits::ObligationCtxt;
use rustc_middle::traits::query::NoSolution;
+use rustc_middle::traits::ObligationCause;
use rustc_middle::ty::{ParamEnvAnd, TyCtxt};
pub use rustc_middle::traits::query::type_op::Eq;
@@ -20,4 +22,12 @@ fn perform_query(
) -> Result<CanonicalQueryResponse<'tcx, ()>, NoSolution> {
tcx.type_op_eq(canonicalized)
}
+
+ fn perform_locally_in_new_solver(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ key: ParamEnvAnd<'tcx, Self>,
+ ) -> Result<Self::QueryResponse, NoSolution> {
+ ocx.eq(&ObligationCause::dummy(), key.param_env, key.value.a, key.value.b)?;
+ Ok(())
+ }
}
diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs
index 26f0d55..9989fc9 100644
--- a/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs
@@ -1,7 +1,15 @@
-use crate::infer::canonical::{Canonical, CanonicalQueryResponse};
+use crate::traits::query::NoSolution;
+use crate::traits::wf;
+use crate::traits::ObligationCtxt;
+
+use rustc_infer::infer::canonical::Canonical;
+use rustc_infer::infer::outlives::components::{push_outlives_components, Component};
use rustc_infer::traits::query::OutlivesBound;
-use rustc_middle::traits::query::NoSolution;
-use rustc_middle::ty::{self, ParamEnvAnd, Ty, TyCtxt};
+use rustc_middle::infer::canonical::CanonicalQueryResponse;
+use rustc_middle::ty::{self, ParamEnvAnd, Ty, TyCtxt, TypeVisitableExt};
+use rustc_span::def_id::CRATE_DEF_ID;
+use rustc_span::source_map::DUMMY_SP;
+use smallvec::{smallvec, SmallVec};
#[derive(Copy, Clone, Debug, HashStable, TypeFoldable, TypeVisitable, Lift)]
pub struct ImpliedOutlivesBounds<'tcx> {
@@ -39,4 +47,169 @@ fn perform_query(
tcx.implied_outlives_bounds(canonicalized)
}
+
+ fn perform_locally_in_new_solver(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ key: ParamEnvAnd<'tcx, Self>,
+ ) -> Result<Self::QueryResponse, NoSolution> {
+ compute_implied_outlives_bounds_inner(ocx, key.param_env, key.value.ty)
+ }
+}
+
+pub fn compute_implied_outlives_bounds_inner<'tcx>(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ ty: Ty<'tcx>,
+) -> Result<Vec<OutlivesBound<'tcx>>, NoSolution> {
+ let tcx = ocx.infcx.tcx;
+
+ // Sometimes when we ask what it takes for T: WF, we get back that
+ // U: WF is required; in that case, we push U onto this stack and
+ // process it next. Because the resulting predicates aren't always
+ // guaranteed to be a subset of the original type, so we need to store the
+ // WF args we've computed in a set.
+ let mut checked_wf_args = rustc_data_structures::fx::FxHashSet::default();
+ let mut wf_args = vec![ty.into()];
+
+ let mut outlives_bounds: Vec<ty::OutlivesPredicate<ty::GenericArg<'tcx>, ty::Region<'tcx>>> =
+ vec![];
+
+ while let Some(arg) = wf_args.pop() {
+ if !checked_wf_args.insert(arg) {
+ continue;
+ }
+
+ // Compute the obligations for `arg` to be well-formed. If `arg` is
+ // an unresolved inference variable, just substituted an empty set
+ // -- because the return type here is going to be things we *add*
+ // to the environment, it's always ok for this set to be smaller
+ // than the ultimate set. (Note: normally there won't be
+ // unresolved inference variables here anyway, but there might be
+ // during typeck under some circumstances.)
+ //
+ // FIXME(@lcnr): It's not really "always fine", having fewer implied
+ // bounds can be backward incompatible, e.g. #101951 was caused by
+ // us not dealing with inference vars in `TypeOutlives` predicates.
+ let obligations = wf::obligations(ocx.infcx, param_env, CRATE_DEF_ID, 0, arg, DUMMY_SP)
+ .unwrap_or_default();
+
+ for obligation in obligations {
+ debug!(?obligation);
+ assert!(!obligation.has_escaping_bound_vars());
+
+ // While these predicates should all be implied by other parts of
+ // the program, they are still relevant as they may constrain
+ // inference variables, which is necessary to add the correct
+ // implied bounds in some cases, mostly when dealing with projections.
+ //
+ // Another important point here: we only register `Projection`
+ // predicates, since otherwise we might register outlives
+ // predicates containing inference variables, and we don't
+ // learn anything new from those.
+ if obligation.predicate.has_non_region_infer() {
+ match obligation.predicate.kind().skip_binder() {
+ ty::PredicateKind::Clause(ty::Clause::Projection(..))
+ | ty::PredicateKind::AliasRelate(..) => {
+ ocx.register_obligation(obligation.clone());
+ }
+ _ => {}
+ }
+ }
+
+ let pred = match obligation.predicate.kind().no_bound_vars() {
+ None => continue,
+ Some(pred) => pred,
+ };
+ match pred {
+ ty::PredicateKind::Clause(ty::Clause::Trait(..))
+ // FIXME(const_generics): Make sure that `<'a, 'b, const N: &'a &'b u32>` is sound
+ // if we ever support that
+ | ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(..))
+ | ty::PredicateKind::Subtype(..)
+ | ty::PredicateKind::Coerce(..)
+ | ty::PredicateKind::Clause(ty::Clause::Projection(..))
+ | ty::PredicateKind::ClosureKind(..)
+ | ty::PredicateKind::ObjectSafe(..)
+ | ty::PredicateKind::ConstEvaluatable(..)
+ | ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
+ | ty::PredicateKind::AliasRelate(..)
+ | ty::PredicateKind::TypeWellFormedFromEnv(..) => {}
+
+ // We need to search through *all* WellFormed predicates
+ ty::PredicateKind::WellFormed(arg) => {
+ wf_args.push(arg);
+ }
+
+ // We need to register region relationships
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(ty::OutlivesPredicate(
+ r_a,
+ r_b,
+ ))) => outlives_bounds.push(ty::OutlivesPredicate(r_a.into(), r_b)),
+
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
+ ty_a,
+ r_b,
+ ))) => outlives_bounds.push(ty::OutlivesPredicate(ty_a.into(), r_b)),
+ }
+ }
+ }
+
+ // This call to `select_all_or_error` is necessary to constrain inference variables, which we
+ // use further down when computing the implied bounds.
+ match ocx.select_all_or_error().as_slice() {
+ [] => (),
+ _ => return Err(NoSolution),
+ }
+
+ // We lazily compute the outlives components as
+ // `select_all_or_error` constrains inference variables.
+ let implied_bounds = outlives_bounds
+ .into_iter()
+ .flat_map(|ty::OutlivesPredicate(a, r_b)| match a.unpack() {
+ ty::GenericArgKind::Lifetime(r_a) => vec![OutlivesBound::RegionSubRegion(r_b, r_a)],
+ ty::GenericArgKind::Type(ty_a) => {
+ let ty_a = ocx.infcx.resolve_vars_if_possible(ty_a);
+ let mut components = smallvec![];
+ push_outlives_components(tcx, ty_a, &mut components);
+ implied_bounds_from_components(r_b, components)
+ }
+ ty::GenericArgKind::Const(_) => unreachable!(),
+ })
+ .collect();
+
+ Ok(implied_bounds)
+}
+
+/// When we have an implied bound that `T: 'a`, we can further break
+/// this down to determine what relationships would have to hold for
+/// `T: 'a` to hold. We get to assume that the caller has validated
+/// those relationships.
+fn implied_bounds_from_components<'tcx>(
+ sub_region: ty::Region<'tcx>,
+ sup_components: SmallVec<[Component<'tcx>; 4]>,
+) -> Vec<OutlivesBound<'tcx>> {
+ sup_components
+ .into_iter()
+ .filter_map(|component| {
+ match component {
+ Component::Region(r) => Some(OutlivesBound::RegionSubRegion(sub_region, r)),
+ Component::Param(p) => Some(OutlivesBound::RegionSubParam(sub_region, p)),
+ Component::Alias(p) => Some(OutlivesBound::RegionSubAlias(sub_region, p)),
+ Component::EscapingAlias(_) =>
+ // If the projection has escaping regions, don't
+ // try to infer any implied bounds even for its
+ // free components. This is conservative, because
+ // the caller will still have to prove that those
+ // free components outlive `sub_region`. But the
+ // idea is that the WAY that the caller proves
+ // that may change in the future and we want to
+ // give ourselves room to get smarter here.
+ {
+ None
+ }
+ Component::UnresolvedInferenceVariable(..) => None,
+ }
+ })
+ .collect()
}
diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs
index 6423265..642fdec 100644
--- a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs
@@ -2,7 +2,7 @@
Canonical, CanonicalQueryResponse, OriginalQueryValues, QueryRegionConstraints,
};
use crate::infer::{InferCtxt, InferOk};
-use crate::traits::ObligationCause;
+use crate::traits::{ObligationCause, ObligationCtxt};
use rustc_errors::ErrorGuaranteed;
use rustc_infer::infer::canonical::Certainty;
use rustc_infer::traits::PredicateObligations;
@@ -23,6 +23,8 @@
pub use rustc_middle::traits::query::type_op::*;
+use self::custom::scrape_region_constraints;
+
/// "Type ops" are used in NLL to perform some particular action and
/// extract out the resulting region constraints (or an error if it
/// cannot be completed).
@@ -81,6 +83,17 @@ fn perform_query(
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Self>>,
) -> Result<CanonicalQueryResponse<'tcx, Self::QueryResponse>, NoSolution>;
+ /// In the new trait solver, we already do caching in the solver itself,
+ /// so there's no need to canonicalize and cache via the query system.
+ /// Additionally, even if we were to canonicalize, we'd still need to
+ /// make sure to feed it predefined opaque types and the defining anchor
+ /// and that would require duplicating all of the tcx queries. Instead,
+ /// just perform these ops locally.
+ fn perform_locally_in_new_solver(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ key: ParamEnvAnd<'tcx, Self>,
+ ) -> Result<Self::QueryResponse, NoSolution>;
+
fn fully_perform_into(
query_key: ParamEnvAnd<'tcx, Self>,
infcx: &InferCtxt<'tcx>,
@@ -133,6 +146,16 @@ fn fully_perform(
infcx: &InferCtxt<'tcx>,
span: Span,
) -> Result<TypeOpOutput<'tcx, Self>, ErrorGuaranteed> {
+ if infcx.tcx.trait_solver_next() {
+ return Ok(scrape_region_constraints(
+ infcx,
+ |ocx| QueryTypeOp::perform_locally_in_new_solver(ocx, self),
+ "query type op",
+ span,
+ )?
+ .0);
+ }
+
let mut region_constraints = QueryRegionConstraints::default();
let (output, error_info, mut obligations, _) =
Q::fully_perform_into(self, infcx, &mut region_constraints).map_err(|_| {
diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/normalize.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/normalize.rs
index 776c74f..57ca14a 100644
--- a/compiler/rustc_trait_selection/src/traits/query/type_op/normalize.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/type_op/normalize.rs
@@ -1,5 +1,7 @@
use crate::infer::canonical::{Canonical, CanonicalQueryResponse};
+use crate::traits::ObligationCtxt;
use rustc_middle::traits::query::NoSolution;
+use rustc_middle::traits::ObligationCause;
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::{self, Lift, ParamEnvAnd, Ty, TyCtxt, TypeVisitableExt};
use std::fmt;
@@ -22,6 +24,14 @@ fn perform_query(
) -> Result<CanonicalQueryResponse<'tcx, Self::QueryResponse>, NoSolution> {
T::type_op_method(tcx, canonicalized)
}
+
+ fn perform_locally_in_new_solver(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ key: ParamEnvAnd<'tcx, Self>,
+ ) -> Result<Self::QueryResponse, NoSolution> {
+ // FIXME(-Ztrait-solver=next): shouldn't be using old normalizer
+ Ok(ocx.normalize(&ObligationCause::dummy(), key.param_env, key.value.value))
+ }
}
pub trait Normalizable<'tcx>: fmt::Debug + TypeFoldable<TyCtxt<'tcx>> + Lift<'tcx> + Copy {
diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs
index 7ce09bb..9889426 100644
--- a/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/type_op/outlives.rs
@@ -1,6 +1,9 @@
use crate::infer::canonical::{Canonical, CanonicalQueryResponse};
-use crate::traits::query::dropck_outlives::{trivial_dropck_outlives, DropckOutlivesResult};
-use rustc_middle::traits::query::NoSolution;
+use crate::traits::query::dropck_outlives::{
+ compute_dropck_outlives_inner, trivial_dropck_outlives,
+};
+use crate::traits::ObligationCtxt;
+use rustc_middle::traits::query::{DropckOutlivesResult, NoSolution};
use rustc_middle::ty::{ParamEnvAnd, Ty, TyCtxt};
#[derive(Copy, Clone, Debug, HashStable, TypeFoldable, TypeVisitable, Lift)]
@@ -48,4 +51,11 @@ fn perform_query(
tcx.dropck_outlives(canonicalized)
}
+
+ fn perform_locally_in_new_solver(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ key: ParamEnvAnd<'tcx, Self>,
+ ) -> Result<Self::QueryResponse, NoSolution> {
+ compute_dropck_outlives_inner(ocx, key.param_env.and(key.value.dropped_ty))
+ }
}
diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs
index 7c02f36..47850bc 100644
--- a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs
@@ -1,5 +1,8 @@
use crate::infer::canonical::{Canonical, CanonicalQueryResponse};
+use crate::traits::ObligationCtxt;
+use rustc_infer::traits::Obligation;
use rustc_middle::traits::query::NoSolution;
+use rustc_middle::traits::ObligationCause;
use rustc_middle::ty::{self, ParamEnvAnd, TyCtxt};
pub use rustc_middle::traits::query::type_op::ProvePredicate;
@@ -36,4 +39,17 @@ fn perform_query(
) -> Result<CanonicalQueryResponse<'tcx, ()>, NoSolution> {
tcx.type_op_prove_predicate(canonicalized)
}
+
+ fn perform_locally_in_new_solver(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ key: ParamEnvAnd<'tcx, Self>,
+ ) -> Result<Self::QueryResponse, NoSolution> {
+ ocx.register_obligation(Obligation::new(
+ ocx.infcx.tcx,
+ ObligationCause::dummy(),
+ key.param_env,
+ key.value.predicate,
+ ));
+ Ok(())
+ }
}
diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/subtype.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/subtype.rs
index 2f2b931..10976d5 100644
--- a/compiler/rustc_trait_selection/src/traits/query/type_op/subtype.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/type_op/subtype.rs
@@ -1,5 +1,7 @@
use crate::infer::canonical::{Canonical, CanonicalQueryResponse};
+use crate::traits::ObligationCtxt;
use rustc_middle::traits::query::NoSolution;
+use rustc_middle::traits::ObligationCause;
use rustc_middle::ty::{ParamEnvAnd, TyCtxt};
pub use rustc_middle::traits::query::type_op::Subtype;
@@ -17,4 +19,12 @@ fn perform_query(
) -> Result<CanonicalQueryResponse<'tcx, ()>, NoSolution> {
tcx.type_op_subtype(canonicalized)
}
+
+ fn perform_locally_in_new_solver(
+ ocx: &ObligationCtxt<'_, 'tcx>,
+ key: ParamEnvAnd<'tcx, Self>,
+ ) -> Result<Self::QueryResponse, NoSolution> {
+ ocx.sub(&ObligationCause::dummy(), key.param_env, key.value.sub, key.value.sup)?;
+ Ok(())
+ }
}
diff --git a/compiler/rustc_traits/src/dropck_outlives.rs b/compiler/rustc_traits/src/dropck_outlives.rs
index 83f6c7d..f35c14e 100644
--- a/compiler/rustc_traits/src/dropck_outlives.rs
+++ b/compiler/rustc_traits/src/dropck_outlives.rs
@@ -3,17 +3,14 @@
use rustc_infer::infer::canonical::{Canonical, QueryResponse};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_middle::query::Providers;
+use rustc_middle::traits::query::{DropckConstraint, DropckOutlivesResult};
use rustc_middle::ty::InternalSubsts;
-use rustc_middle::ty::{self, EarlyBinder, ParamEnvAnd, Ty, TyCtxt};
-use rustc_span::source_map::{Span, DUMMY_SP};
+use rustc_middle::ty::TyCtxt;
use rustc_trait_selection::infer::InferCtxtBuilderExt;
-use rustc_trait_selection::traits::query::dropck_outlives::trivial_dropck_outlives;
use rustc_trait_selection::traits::query::dropck_outlives::{
- DropckConstraint, DropckOutlivesResult,
+ compute_dropck_outlives_inner, dtorck_constraint_for_ty_inner,
};
-use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt;
use rustc_trait_selection::traits::query::{CanonicalTyGoal, NoSolution};
-use rustc_trait_selection::traits::{Normalized, ObligationCause};
pub(crate) fn provide(p: &mut Providers) {
*p = Providers { dropck_outlives, adt_dtorck_constraint, ..*p };
@@ -26,263 +23,10 @@ fn dropck_outlives<'tcx>(
debug!("dropck_outlives(goal={:#?})", canonical_goal);
tcx.infer_ctxt().enter_canonical_trait_query(&canonical_goal, |ocx, goal| {
- let tcx = ocx.infcx.tcx;
- let ParamEnvAnd { param_env, value: for_ty } = goal;
-
- let mut result = DropckOutlivesResult { kinds: vec![], overflows: vec![] };
-
- // A stack of types left to process. Each round, we pop
- // something from the stack and invoke
- // `dtorck_constraint_for_ty`. This may produce new types that
- // have to be pushed on the stack. This continues until we have explored
- // all the reachable types from the type `for_ty`.
- //
- // Example: Imagine that we have the following code:
- //
- // ```rust
- // struct A {
- // value: B,
- // children: Vec<A>,
- // }
- //
- // struct B {
- // value: u32
- // }
- //
- // fn f() {
- // let a: A = ...;
- // ..
- // } // here, `a` is dropped
- // ```
- //
- // at the point where `a` is dropped, we need to figure out
- // which types inside of `a` contain region data that may be
- // accessed by any destructors in `a`. We begin by pushing `A`
- // onto the stack, as that is the type of `a`. We will then
- // invoke `dtorck_constraint_for_ty` which will expand `A`
- // into the types of its fields `(B, Vec<A>)`. These will get
- // pushed onto the stack. Eventually, expanding `Vec<A>` will
- // lead to us trying to push `A` a second time -- to prevent
- // infinite recursion, we notice that `A` was already pushed
- // once and stop.
- let mut ty_stack = vec![(for_ty, 0)];
-
- // Set used to detect infinite recursion.
- let mut ty_set = FxHashSet::default();
-
- let cause = ObligationCause::dummy();
- let mut constraints = DropckConstraint::empty();
- while let Some((ty, depth)) = ty_stack.pop() {
- debug!(
- "{} kinds, {} overflows, {} ty_stack",
- result.kinds.len(),
- result.overflows.len(),
- ty_stack.len()
- );
- dtorck_constraint_for_ty(tcx, DUMMY_SP, for_ty, depth, ty, &mut constraints)?;
-
- // "outlives" represent types/regions that may be touched
- // by a destructor.
- result.kinds.append(&mut constraints.outlives);
- result.overflows.append(&mut constraints.overflows);
-
- // If we have even one overflow, we should stop trying to evaluate further --
- // chances are, the subsequent overflows for this evaluation won't provide useful
- // information and will just decrease the speed at which we can emit these errors
- // (since we'll be printing for just that much longer for the often enormous types
- // that result here).
- if !result.overflows.is_empty() {
- break;
- }
-
- // dtorck types are "types that will get dropped but which
- // do not themselves define a destructor", more or less. We have
- // to push them onto the stack to be expanded.
- for ty in constraints.dtorck_types.drain(..) {
- let Normalized { value: ty, obligations } =
- ocx.infcx.at(&cause, param_env).query_normalize(ty)?;
- ocx.register_obligations(obligations);
-
- debug!("dropck_outlives: ty from dtorck_types = {:?}", ty);
-
- match ty.kind() {
- // All parameters live for the duration of the
- // function.
- ty::Param(..) => {}
-
- // A projection that we couldn't resolve - it
- // might have a destructor.
- ty::Alias(..) => {
- result.kinds.push(ty.into());
- }
-
- _ => {
- if ty_set.insert(ty) {
- ty_stack.push((ty, depth + 1));
- }
- }
- }
- }
- }
-
- debug!("dropck_outlives: result = {:#?}", result);
- Ok(result)
+ compute_dropck_outlives_inner(ocx, goal)
})
}
-/// Returns a set of constraints that needs to be satisfied in
-/// order for `ty` to be valid for destruction.
-fn dtorck_constraint_for_ty<'tcx>(
- tcx: TyCtxt<'tcx>,
- span: Span,
- for_ty: Ty<'tcx>,
- depth: usize,
- ty: Ty<'tcx>,
- constraints: &mut DropckConstraint<'tcx>,
-) -> Result<(), NoSolution> {
- debug!("dtorck_constraint_for_ty({:?}, {:?}, {:?}, {:?})", span, for_ty, depth, ty);
-
- if !tcx.recursion_limit().value_within_limit(depth) {
- constraints.overflows.push(ty);
- return Ok(());
- }
-
- if trivial_dropck_outlives(tcx, ty) {
- return Ok(());
- }
-
- match ty.kind() {
- ty::Bool
- | ty::Char
- | ty::Int(_)
- | ty::Uint(_)
- | ty::Float(_)
- | ty::Str
- | ty::Never
- | ty::Foreign(..)
- | ty::RawPtr(..)
- | ty::Ref(..)
- | ty::FnDef(..)
- | ty::FnPtr(_)
- | ty::GeneratorWitness(..)
- | ty::GeneratorWitnessMIR(..) => {
- // these types never have a destructor
- }
-
- ty::Array(ety, _) | ty::Slice(ety) => {
- // single-element containers, behave like their element
- rustc_data_structures::stack::ensure_sufficient_stack(|| {
- dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, *ety, constraints)
- })?;
- }
-
- ty::Tuple(tys) => rustc_data_structures::stack::ensure_sufficient_stack(|| {
- for ty in tys.iter() {
- dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty, constraints)?;
- }
- Ok::<_, NoSolution>(())
- })?,
-
- ty::Closure(_, substs) => {
- if !substs.as_closure().is_valid() {
- // By the time this code runs, all type variables ought to
- // be fully resolved.
-
- tcx.sess.delay_span_bug(
- span,
- format!("upvar_tys for closure not found. Expected capture information for closure {ty}",),
- );
- return Err(NoSolution);
- }
-
- rustc_data_structures::stack::ensure_sufficient_stack(|| {
- for ty in substs.as_closure().upvar_tys() {
- dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty, constraints)?;
- }
- Ok::<_, NoSolution>(())
- })?
- }
-
- ty::Generator(_, substs, _movability) => {
- // rust-lang/rust#49918: types can be constructed, stored
- // in the interior, and sit idle when generator yields
- // (and is subsequently dropped).
- //
- // It would be nice to descend into interior of a
- // generator to determine what effects dropping it might
- // have (by looking at any drop effects associated with
- // its interior).
- //
- // However, the interior's representation uses things like
- // GeneratorWitness that explicitly assume they are not
- // traversed in such a manner. So instead, we will
- // simplify things for now by treating all generators as
- // if they were like trait objects, where its upvars must
- // all be alive for the generator's (potential)
- // destructor.
- //
- // In particular, skipping over `_interior` is safe
- // because any side-effects from dropping `_interior` can
- // only take place through references with lifetimes
- // derived from lifetimes attached to the upvars and resume
- // argument, and we *do* incorporate those here.
-
- if !substs.as_generator().is_valid() {
- // By the time this code runs, all type variables ought to
- // be fully resolved.
- tcx.sess.delay_span_bug(
- span,
- format!("upvar_tys for generator not found. Expected capture information for generator {ty}",),
- );
- return Err(NoSolution);
- }
-
- constraints.outlives.extend(
- substs
- .as_generator()
- .upvar_tys()
- .map(|t| -> ty::subst::GenericArg<'tcx> { t.into() }),
- );
- constraints.outlives.push(substs.as_generator().resume_ty().into());
- }
-
- ty::Adt(def, substs) => {
- let DropckConstraint { dtorck_types, outlives, overflows } =
- tcx.at(span).adt_dtorck_constraint(def.did())?;
- // FIXME: we can try to recursively `dtorck_constraint_on_ty`
- // there, but that needs some way to handle cycles.
- constraints
- .dtorck_types
- .extend(dtorck_types.iter().map(|t| EarlyBinder(*t).subst(tcx, substs)));
- constraints
- .outlives
- .extend(outlives.iter().map(|t| EarlyBinder(*t).subst(tcx, substs)));
- constraints
- .overflows
- .extend(overflows.iter().map(|t| EarlyBinder(*t).subst(tcx, substs)));
- }
-
- // Objects must be alive in order for their destructor
- // to be called.
- ty::Dynamic(..) => {
- constraints.outlives.push(ty.into());
- }
-
- // Types that can't be resolved. Pass them forward.
- ty::Alias(..) | ty::Param(..) => {
- constraints.dtorck_types.push(ty);
- }
-
- ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) | ty::Error(_) => {
- // By the time this code runs, all type variables ought to
- // be fully resolved.
- return Err(NoSolution);
- }
- }
-
- Ok(())
-}
-
/// Calculates the dtorck constraint for a type.
pub(crate) fn adt_dtorck_constraint(
tcx: TyCtxt<'_>,
@@ -311,7 +55,7 @@ pub(crate) fn adt_dtorck_constraint(
let mut result = DropckConstraint::empty();
for field in def.all_fields() {
let fty = tcx.type_of(field.did).subst_identity();
- dtorck_constraint_for_ty(tcx, span, fty, 0, fty, &mut result)?;
+ dtorck_constraint_for_ty_inner(tcx, span, fty, 0, fty, &mut result)?;
}
result.outlives.extend(tcx.destructor_constraints(def));
dedup_dtorck_constraint(&mut result);
diff --git a/compiler/rustc_traits/src/implied_outlives_bounds.rs b/compiler/rustc_traits/src/implied_outlives_bounds.rs
index 49cbf9e..959838a 100644
--- a/compiler/rustc_traits/src/implied_outlives_bounds.rs
+++ b/compiler/rustc_traits/src/implied_outlives_bounds.rs
@@ -3,18 +3,13 @@
//! [`rustc_trait_selection::traits::query::type_op::implied_outlives_bounds`].
use rustc_infer::infer::canonical::{self, Canonical};
-use rustc_infer::infer::outlives::components::{push_outlives_components, Component};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_infer::traits::query::OutlivesBound;
use rustc_middle::query::Providers;
-use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt};
-use rustc_span::def_id::CRATE_DEF_ID;
-use rustc_span::source_map::DUMMY_SP;
+use rustc_middle::ty::TyCtxt;
use rustc_trait_selection::infer::InferCtxtBuilderExt;
+use rustc_trait_selection::traits::query::type_op::implied_outlives_bounds::compute_implied_outlives_bounds_inner;
use rustc_trait_selection::traits::query::{CanonicalTyGoal, NoSolution};
-use rustc_trait_selection::traits::wf;
-use rustc_trait_selection::traits::ObligationCtxt;
-use smallvec::{smallvec, SmallVec};
pub(crate) fn provide(p: &mut Providers) {
*p = Providers { implied_outlives_bounds, ..*p };
@@ -29,164 +24,6 @@ fn implied_outlives_bounds<'tcx>(
> {
tcx.infer_ctxt().enter_canonical_trait_query(&goal, |ocx, key| {
let (param_env, ty) = key.into_parts();
- compute_implied_outlives_bounds(ocx, param_env, ty)
+ compute_implied_outlives_bounds_inner(ocx, param_env, ty)
})
}
-
-fn compute_implied_outlives_bounds<'tcx>(
- ocx: &ObligationCtxt<'_, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- ty: Ty<'tcx>,
-) -> Result<Vec<OutlivesBound<'tcx>>, NoSolution> {
- let tcx = ocx.infcx.tcx;
-
- // Sometimes when we ask what it takes for T: WF, we get back that
- // U: WF is required; in that case, we push U onto this stack and
- // process it next. Because the resulting predicates aren't always
- // guaranteed to be a subset of the original type, so we need to store the
- // WF args we've computed in a set.
- let mut checked_wf_args = rustc_data_structures::fx::FxHashSet::default();
- let mut wf_args = vec![ty.into()];
-
- let mut outlives_bounds: Vec<ty::OutlivesPredicate<ty::GenericArg<'tcx>, ty::Region<'tcx>>> =
- vec![];
-
- while let Some(arg) = wf_args.pop() {
- if !checked_wf_args.insert(arg) {
- continue;
- }
-
- // Compute the obligations for `arg` to be well-formed. If `arg` is
- // an unresolved inference variable, just substituted an empty set
- // -- because the return type here is going to be things we *add*
- // to the environment, it's always ok for this set to be smaller
- // than the ultimate set. (Note: normally there won't be
- // unresolved inference variables here anyway, but there might be
- // during typeck under some circumstances.)
- //
- // FIXME(@lcnr): It's not really "always fine", having fewer implied
- // bounds can be backward incompatible, e.g. #101951 was caused by
- // us not dealing with inference vars in `TypeOutlives` predicates.
- let obligations = wf::obligations(ocx.infcx, param_env, CRATE_DEF_ID, 0, arg, DUMMY_SP)
- .unwrap_or_default();
-
- for obligation in obligations {
- debug!(?obligation);
- assert!(!obligation.has_escaping_bound_vars());
-
- // While these predicates should all be implied by other parts of
- // the program, they are still relevant as they may constrain
- // inference variables, which is necessary to add the correct
- // implied bounds in some cases, mostly when dealing with projections.
- //
- // Another important point here: we only register `Projection`
- // predicates, since otherwise we might register outlives
- // predicates containing inference variables, and we don't
- // learn anything new from those.
- if obligation.predicate.has_non_region_infer() {
- match obligation.predicate.kind().skip_binder() {
- ty::PredicateKind::Clause(ty::Clause::Projection(..))
- | ty::PredicateKind::AliasRelate(..) => {
- ocx.register_obligation(obligation.clone());
- }
- _ => {}
- }
- }
-
- let pred = match obligation.predicate.kind().no_bound_vars() {
- None => continue,
- Some(pred) => pred,
- };
- match pred {
- ty::PredicateKind::Clause(ty::Clause::Trait(..))
- // FIXME(const_generics): Make sure that `<'a, 'b, const N: &'a &'b u32>` is sound
- // if we ever support that
- | ty::PredicateKind::Clause(ty::Clause::ConstArgHasType(..))
- | ty::PredicateKind::Subtype(..)
- | ty::PredicateKind::Coerce(..)
- | ty::PredicateKind::Clause(ty::Clause::Projection(..))
- | ty::PredicateKind::ClosureKind(..)
- | ty::PredicateKind::ObjectSafe(..)
- | ty::PredicateKind::ConstEvaluatable(..)
- | ty::PredicateKind::ConstEquate(..)
- | ty::PredicateKind::Ambiguous
- | ty::PredicateKind::AliasRelate(..)
- | ty::PredicateKind::TypeWellFormedFromEnv(..) => {}
-
- // We need to search through *all* WellFormed predicates
- ty::PredicateKind::WellFormed(arg) => {
- wf_args.push(arg);
- }
-
- // We need to register region relationships
- ty::PredicateKind::Clause(ty::Clause::RegionOutlives(ty::OutlivesPredicate(
- r_a,
- r_b,
- ))) => outlives_bounds.push(ty::OutlivesPredicate(r_a.into(), r_b)),
-
- ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
- ty_a,
- r_b,
- ))) => outlives_bounds.push(ty::OutlivesPredicate(ty_a.into(), r_b)),
- }
- }
- }
-
- // This call to `select_all_or_error` is necessary to constrain inference variables, which we
- // use further down when computing the implied bounds.
- match ocx.select_all_or_error().as_slice() {
- [] => (),
- _ => return Err(NoSolution),
- }
-
- // We lazily compute the outlives components as
- // `select_all_or_error` constrains inference variables.
- let implied_bounds = outlives_bounds
- .into_iter()
- .flat_map(|ty::OutlivesPredicate(a, r_b)| match a.unpack() {
- ty::GenericArgKind::Lifetime(r_a) => vec![OutlivesBound::RegionSubRegion(r_b, r_a)],
- ty::GenericArgKind::Type(ty_a) => {
- let ty_a = ocx.infcx.resolve_vars_if_possible(ty_a);
- let mut components = smallvec![];
- push_outlives_components(tcx, ty_a, &mut components);
- implied_bounds_from_components(r_b, components)
- }
- ty::GenericArgKind::Const(_) => unreachable!(),
- })
- .collect();
-
- Ok(implied_bounds)
-}
-
-/// When we have an implied bound that `T: 'a`, we can further break
-/// this down to determine what relationships would have to hold for
-/// `T: 'a` to hold. We get to assume that the caller has validated
-/// those relationships.
-fn implied_bounds_from_components<'tcx>(
- sub_region: ty::Region<'tcx>,
- sup_components: SmallVec<[Component<'tcx>; 4]>,
-) -> Vec<OutlivesBound<'tcx>> {
- sup_components
- .into_iter()
- .filter_map(|component| {
- match component {
- Component::Region(r) => Some(OutlivesBound::RegionSubRegion(sub_region, r)),
- Component::Param(p) => Some(OutlivesBound::RegionSubParam(sub_region, p)),
- Component::Alias(p) => Some(OutlivesBound::RegionSubAlias(sub_region, p)),
- Component::EscapingAlias(_) =>
- // If the projection has escaping regions, don't
- // try to infer any implied bounds even for its
- // free components. This is conservative, because
- // the caller will still have to prove that those
- // free components outlive `sub_region`. But the
- // idea is that the WAY that the caller proves
- // that may change in the future and we want to
- // give ourselves room to get smarter here.
- {
- None
- }
- Component::UnresolvedInferenceVariable(..) => None,
- }
- })
- .collect()
-}
diff --git a/compiler/rustc_traits/src/lib.rs b/compiler/rustc_traits/src/lib.rs
index b0f9c57..907e2d3 100644
--- a/compiler/rustc_traits/src/lib.rs
+++ b/compiler/rustc_traits/src/lib.rs
@@ -21,7 +21,8 @@
mod normalize_projection_ty;
mod type_op;
-pub use type_op::{type_op_ascribe_user_type_with_span, type_op_prove_predicate_with_cause};
+pub use rustc_trait_selection::traits::query::type_op::ascribe_user_type::type_op_ascribe_user_type_with_span;
+pub use type_op::type_op_prove_predicate_with_cause;
use rustc_middle::query::Providers;
diff --git a/compiler/rustc_traits/src/type_op.rs b/compiler/rustc_traits/src/type_op.rs
index faf9851..9904acb 100644
--- a/compiler/rustc_traits/src/type_op.rs
+++ b/compiler/rustc_traits/src/type_op.rs
@@ -1,17 +1,15 @@
-use rustc_hir as hir;
use rustc_infer::infer::canonical::{Canonical, QueryResponse};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_middle::query::Providers;
use rustc_middle::traits::query::NoSolution;
-use rustc_middle::traits::{DefiningAnchor, ObligationCauseCode};
-use rustc_middle::ty::{self, FnSig, Lift, PolyFnSig, Ty, TyCtxt, TypeFoldable};
+use rustc_middle::traits::DefiningAnchor;
+use rustc_middle::ty::{FnSig, Lift, PolyFnSig, Ty, TyCtxt, TypeFoldable};
use rustc_middle::ty::{ParamEnvAnd, Predicate};
-use rustc_middle::ty::{UserSelfTy, UserSubsts, UserType};
-use rustc_span::def_id::CRATE_DEF_ID;
-use rustc_span::{Span, DUMMY_SP};
use rustc_trait_selection::infer::InferCtxtBuilderExt;
use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt;
-use rustc_trait_selection::traits::query::type_op::ascribe_user_type::AscribeUserType;
+use rustc_trait_selection::traits::query::type_op::ascribe_user_type::{
+ type_op_ascribe_user_type_with_span, AscribeUserType,
+};
use rustc_trait_selection::traits::query::type_op::eq::Eq;
use rustc_trait_selection::traits::query::type_op::normalize::Normalize;
use rustc_trait_selection::traits::query::type_op::prove_predicate::ProvePredicate;
@@ -42,111 +40,6 @@ fn type_op_ascribe_user_type<'tcx>(
})
}
-/// The core of the `type_op_ascribe_user_type` query: for diagnostics purposes in NLL HRTB errors,
-/// this query can be re-run to better track the span of the obligation cause, and improve the error
-/// message. Do not call directly unless you're in that very specific context.
-pub fn type_op_ascribe_user_type_with_span<'tcx>(
- ocx: &ObligationCtxt<'_, 'tcx>,
- key: ParamEnvAnd<'tcx, AscribeUserType<'tcx>>,
- span: Option<Span>,
-) -> Result<(), NoSolution> {
- let (param_env, AscribeUserType { mir_ty, user_ty }) = key.into_parts();
- debug!("type_op_ascribe_user_type: mir_ty={:?} user_ty={:?}", mir_ty, user_ty);
- let span = span.unwrap_or(DUMMY_SP);
- match user_ty {
- UserType::Ty(user_ty) => relate_mir_and_user_ty(ocx, param_env, span, mir_ty, user_ty)?,
- UserType::TypeOf(def_id, user_substs) => {
- relate_mir_and_user_substs(ocx, param_env, span, mir_ty, def_id, user_substs)?
- }
- };
- Ok(())
-}
-
-#[instrument(level = "debug", skip(ocx, param_env, span))]
-fn relate_mir_and_user_ty<'tcx>(
- ocx: &ObligationCtxt<'_, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- span: Span,
- mir_ty: Ty<'tcx>,
- user_ty: Ty<'tcx>,
-) -> Result<(), NoSolution> {
- let cause = ObligationCause::dummy_with_span(span);
- let user_ty = ocx.normalize(&cause, param_env, user_ty);
- ocx.eq(&cause, param_env, mir_ty, user_ty)?;
-
- // FIXME(#104764): We should check well-formedness before normalization.
- let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(user_ty.into()));
- ocx.register_obligation(Obligation::new(ocx.infcx.tcx, cause, param_env, predicate));
- Ok(())
-}
-
-#[instrument(level = "debug", skip(ocx, param_env, span))]
-fn relate_mir_and_user_substs<'tcx>(
- ocx: &ObligationCtxt<'_, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- span: Span,
- mir_ty: Ty<'tcx>,
- def_id: hir::def_id::DefId,
- user_substs: UserSubsts<'tcx>,
-) -> Result<(), NoSolution> {
- let param_env = param_env.without_const();
- let UserSubsts { user_self_ty, substs } = user_substs;
- let tcx = ocx.infcx.tcx;
- let cause = ObligationCause::dummy_with_span(span);
-
- let ty = tcx.type_of(def_id).subst(tcx, substs);
- let ty = ocx.normalize(&cause, param_env, ty);
- debug!("relate_type_and_user_type: ty of def-id is {:?}", ty);
-
- ocx.eq(&cause, param_env, mir_ty, ty)?;
-
- // Prove the predicates coming along with `def_id`.
- //
- // Also, normalize the `instantiated_predicates`
- // because otherwise we wind up with duplicate "type
- // outlives" error messages.
- let instantiated_predicates = tcx.predicates_of(def_id).instantiate(tcx, substs);
-
- debug!(?instantiated_predicates);
- for (instantiated_predicate, predicate_span) in instantiated_predicates {
- let span = if span == DUMMY_SP { predicate_span } else { span };
- let cause = ObligationCause::new(
- span,
- CRATE_DEF_ID,
- ObligationCauseCode::AscribeUserTypeProvePredicate(predicate_span),
- );
- let instantiated_predicate =
- ocx.normalize(&cause.clone(), param_env, instantiated_predicate);
-
- ocx.register_obligation(Obligation::new(tcx, cause, param_env, instantiated_predicate));
- }
-
- if let Some(UserSelfTy { impl_def_id, self_ty }) = user_self_ty {
- let self_ty = ocx.normalize(&cause, param_env, self_ty);
- let impl_self_ty = tcx.type_of(impl_def_id).subst(tcx, substs);
- let impl_self_ty = ocx.normalize(&cause, param_env, impl_self_ty);
-
- ocx.eq(&cause, param_env, self_ty, impl_self_ty)?;
- let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(impl_self_ty.into()));
- ocx.register_obligation(Obligation::new(tcx, cause.clone(), param_env, predicate));
- }
-
- // In addition to proving the predicates, we have to
- // prove that `ty` is well-formed -- this is because
- // the WF of `ty` is predicated on the substs being
- // well-formed, and we haven't proven *that*. We don't
- // want to prove the WF of types from `substs` directly because they
- // haven't been normalized.
- //
- // FIXME(nmatsakis): Well, perhaps we should normalize
- // them? This would only be relevant if some input
- // type were ill-formed but did not appear in `ty`,
- // which...could happen with normalization...
- let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(ty.into()));
- ocx.register_obligation(Obligation::new(tcx, cause, param_env, predicate));
- Ok(())
-}
-
fn type_op_eq<'tcx>(
tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Eq<'tcx>>>,
diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs
index 498fbd9..c524d4c 100644
--- a/library/alloc/src/string.rs
+++ b/library/alloc/src/string.rs
@@ -1851,7 +1851,7 @@ pub fn into_boxed_str(self) -> Box<str> {
}
/// Consumes and leaks the `String`, returning a mutable reference to the contents,
- /// `&'static mut str`.
+ /// `&'a mut str`.
///
/// This is mainly useful for data that lives for the remainder of
/// the program's life. Dropping the returned reference will cause a memory
@@ -1874,7 +1874,7 @@ pub fn into_boxed_str(self) -> Box<str> {
/// ```
#[unstable(feature = "string_leak", issue = "102929")]
#[inline]
- pub fn leak(self) -> &'static mut str {
+ pub fn leak<'a>(self) -> &'a mut str {
let slice = self.vec.leak();
unsafe { from_utf8_unchecked_mut(slice) }
}
diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs
index 82f30a2..47661a3 100644
--- a/library/alloc/src/vec/mod.rs
+++ b/library/alloc/src/vec/mod.rs
@@ -2662,7 +2662,6 @@ fn clone_from(&mut self, other: &Self) {
/// as required by the `core::borrow::Borrow` implementation.
///
/// ```
-/// #![feature(build_hasher_simple_hash_one)]
/// use std::hash::BuildHasher;
///
/// let b = std::collections::hash_map::RandomState::new();
diff --git a/library/core/src/alloc/global.rs b/library/core/src/alloc/global.rs
index 18da704..c582111 100644
--- a/library/core/src/alloc/global.rs
+++ b/library/core/src/alloc/global.rs
@@ -235,7 +235,8 @@ unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
/// * `new_size` must be greater than zero.
///
/// * `new_size`, when rounded up to the nearest multiple of `layout.align()`,
- /// must not overflow (i.e., the rounded value must be less than `usize::MAX`).
+ /// must not overflow isize (i.e., the rounded value must be less than or
+ /// equal to `isize::MAX`).
///
/// (Extension subtraits might provide more specific bounds on
/// behavior, e.g., guarantee a sentinel address or a null pointer
diff --git a/library/core/src/array/mod.rs b/library/core/src/array/mod.rs
index bdb4c97..fec9232 100644
--- a/library/core/src/array/mod.rs
+++ b/library/core/src/array/mod.rs
@@ -204,6 +204,7 @@ fn borrow_mut(&mut self) -> &mut [T] {
{
type Error = TryFromSliceError;
+ #[inline]
fn try_from(slice: &[T]) -> Result<[T; N], TryFromSliceError> {
<&Self>::try_from(slice).map(|r| *r)
}
@@ -228,6 +229,7 @@ fn borrow_mut(&mut self) -> &mut [T] {
{
type Error = TryFromSliceError;
+ #[inline]
fn try_from(slice: &mut [T]) -> Result<[T; N], TryFromSliceError> {
<Self>::try_from(&*slice)
}
@@ -249,6 +251,7 @@ fn borrow_mut(&mut self) -> &mut [T] {
impl<'a, T, const N: usize> TryFrom<&'a [T]> for &'a [T; N] {
type Error = TryFromSliceError;
+ #[inline]
fn try_from(slice: &'a [T]) -> Result<&'a [T; N], TryFromSliceError> {
if slice.len() == N {
let ptr = slice.as_ptr() as *const [T; N];
@@ -276,6 +279,7 @@ fn borrow_mut(&mut self) -> &mut [T] {
impl<'a, T, const N: usize> TryFrom<&'a mut [T]> for &'a mut [T; N] {
type Error = TryFromSliceError;
+ #[inline]
fn try_from(slice: &'a mut [T]) -> Result<&'a mut [T; N], TryFromSliceError> {
if slice.len() == N {
let ptr = slice.as_mut_ptr() as *mut [T; N];
@@ -291,7 +295,6 @@ fn borrow_mut(&mut self) -> &mut [T] {
/// as required by the `Borrow` implementation.
///
/// ```
-/// #![feature(build_hasher_simple_hash_one)]
/// use std::hash::BuildHasher;
///
/// let b = std::collections::hash_map::RandomState::new();
diff --git a/library/core/src/hash/mod.rs b/library/core/src/hash/mod.rs
index a73b5b6..ca7c077 100644
--- a/library/core/src/hash/mod.rs
+++ b/library/core/src/hash/mod.rs
@@ -674,8 +674,6 @@ pub trait BuildHasher {
/// # Example
///
/// ```
- /// #![feature(build_hasher_simple_hash_one)]
- ///
/// use std::cmp::{max, min};
/// use std::hash::{BuildHasher, Hash, Hasher};
/// struct OrderAmbivalentPair<T: Ord>(T, T);
@@ -697,7 +695,7 @@ pub trait BuildHasher {
/// bh.hash_one(&OrderAmbivalentPair(2, 10))
/// );
/// ```
- #[unstable(feature = "build_hasher_simple_hash_one", issue = "86161")]
+ #[stable(feature = "build_hasher_simple_hash_one", since = "CURRENT_RUSTC_VERSION")]
fn hash_one<T: Hash>(&self, x: T) -> u64
where
Self: Sized,
diff --git a/library/core/src/macros/mod.rs b/library/core/src/macros/mod.rs
index b24882d..c4134db 100644
--- a/library/core/src/macros/mod.rs
+++ b/library/core/src/macros/mod.rs
@@ -1427,7 +1427,7 @@ macro_rules! include {
#[rustc_builtin_macro]
#[macro_export]
#[rustc_diagnostic_item = "assert_macro"]
- #[allow_internal_unstable(core_panic, edition_panic)]
+ #[allow_internal_unstable(core_panic, edition_panic, generic_assert_internals)]
macro_rules! assert {
($cond:expr $(,)?) => {{ /* compiler built-in */ }};
($cond:expr, $($arg:tt)+) => {{ /* compiler built-in */ }};
diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs
index bd1b16e..1f19555 100644
--- a/library/core/src/slice/mod.rs
+++ b/library/core/src/slice/mod.rs
@@ -3005,8 +3005,9 @@ pub fn sort_unstable_by_key<K, F>(&mut self, mut f: F)
///
/// # Current implementation
///
- /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
- /// used for [`sort_unstable`].
+ /// The current algorithm is an introselect implementation based on Pattern Defeating Quicksort, which is also
+ /// the basis for [`sort_unstable`]. The fallback algorithm is Median of Medians using Tukey's Ninther for
+ /// pivot selection, which guarantees linear runtime for all inputs.
///
/// [`sort_unstable`]: slice::sort_unstable
///
@@ -3056,8 +3057,9 @@ pub fn select_nth_unstable(&mut self, index: usize) -> (&mut [T], &mut T, &mut [
///
/// # Current implementation
///
- /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
- /// used for [`sort_unstable`].
+ /// The current algorithm is an introselect implementation based on Pattern Defeating Quicksort, which is also
+ /// the basis for [`sort_unstable`]. The fallback algorithm is Median of Medians using Tukey's Ninther for
+ /// pivot selection, which guarantees linear runtime for all inputs.
///
/// [`sort_unstable`]: slice::sort_unstable
///
diff --git a/library/std/src/panicking.rs b/library/std/src/panicking.rs
index 6d59266..a6a3704 100644
--- a/library/std/src/panicking.rs
+++ b/library/std/src/panicking.rs
@@ -298,8 +298,18 @@ pub mod panic_count {
pub const ALWAYS_ABORT_FLAG: usize = 1 << (usize::BITS - 1);
- // Panic count for the current thread.
- thread_local! { static LOCAL_PANIC_COUNT: Cell<usize> = const { Cell::new(0) } }
+ /// A reason for forcing an immediate abort on panic.
+ #[derive(Debug)]
+ pub enum MustAbort {
+ AlwaysAbort,
+ PanicInHook,
+ }
+
+ // Panic count for the current thread and whether a panic hook is currently
+ // being executed..
+ thread_local! {
+ static LOCAL_PANIC_COUNT: Cell<(usize, bool)> = const { Cell::new((0, false)) }
+ }
// Sum of panic counts from all threads. The purpose of this is to have
// a fast path in `count_is_zero` (which is used by `panicking`). In any particular
@@ -328,34 +338,39 @@ pub mod panic_count {
// panicking thread consumes at least 2 bytes of address space.
static GLOBAL_PANIC_COUNT: AtomicUsize = AtomicUsize::new(0);
- // Return the state of the ALWAYS_ABORT_FLAG and number of panics.
+ // Increases the global and local panic count, and returns whether an
+ // immediate abort is required.
//
- // If ALWAYS_ABORT_FLAG is not set, the number is determined on a per-thread
- // base (stored in LOCAL_PANIC_COUNT), i.e. it is the amount of recursive calls
- // of the calling thread.
- // If ALWAYS_ABORT_FLAG is set, the number equals the *global* number of panic
- // calls. See above why LOCAL_PANIC_COUNT is not used.
- pub fn increase() -> (bool, usize) {
+ // This also updates thread-local state to keep track of whether a panic
+ // hook is currently executing.
+ pub fn increase(run_panic_hook: bool) -> Option<MustAbort> {
let global_count = GLOBAL_PANIC_COUNT.fetch_add(1, Ordering::Relaxed);
- let must_abort = global_count & ALWAYS_ABORT_FLAG != 0;
- let panics = if must_abort {
- global_count & !ALWAYS_ABORT_FLAG
- } else {
- LOCAL_PANIC_COUNT.with(|c| {
- let next = c.get() + 1;
- c.set(next);
- next
- })
- };
- (must_abort, panics)
+ if global_count & ALWAYS_ABORT_FLAG != 0 {
+ return Some(MustAbort::AlwaysAbort);
+ }
+
+ LOCAL_PANIC_COUNT.with(|c| {
+ let (count, in_panic_hook) = c.get();
+ if in_panic_hook {
+ return Some(MustAbort::PanicInHook);
+ }
+ c.set((count + 1, run_panic_hook));
+ None
+ })
+ }
+
+ pub fn finished_panic_hook() {
+ LOCAL_PANIC_COUNT.with(|c| {
+ let (count, _) = c.get();
+ c.set((count, false));
+ });
}
pub fn decrease() {
GLOBAL_PANIC_COUNT.fetch_sub(1, Ordering::Relaxed);
LOCAL_PANIC_COUNT.with(|c| {
- let next = c.get() - 1;
- c.set(next);
- next
+ let (count, _) = c.get();
+ c.set((count - 1, false));
});
}
@@ -366,7 +381,7 @@ pub fn set_always_abort() {
// Disregards ALWAYS_ABORT_FLAG
#[must_use]
pub fn get_count() -> usize {
- LOCAL_PANIC_COUNT.with(|c| c.get())
+ LOCAL_PANIC_COUNT.with(|c| c.get().0)
}
// Disregards ALWAYS_ABORT_FLAG
@@ -394,7 +409,7 @@ pub fn count_is_zero() -> bool {
#[inline(never)]
#[cold]
fn is_zero_slow_path() -> bool {
- LOCAL_PANIC_COUNT.with(|c| c.get() == 0)
+ LOCAL_PANIC_COUNT.with(|c| c.get().0 == 0)
}
}
@@ -655,23 +670,22 @@ fn rust_panic_with_hook(
location: &Location<'_>,
can_unwind: bool,
) -> ! {
- let (must_abort, panics) = panic_count::increase();
+ let must_abort = panic_count::increase(true);
- // If this is the third nested call (e.g., panics == 2, this is 0-indexed),
- // the panic hook probably triggered the last panic, otherwise the
- // double-panic check would have aborted the process. In this case abort the
- // process real quickly as we don't want to try calling it again as it'll
- // probably just panic again.
- if must_abort || panics > 2 {
- if panics > 2 {
- // Don't try to print the message in this case
- // - perhaps that is causing the recursive panics.
- rtprintpanic!("thread panicked while processing panic. aborting.\n");
- } else {
- // Unfortunately, this does not print a backtrace, because creating
- // a `Backtrace` will allocate, which we must to avoid here.
- let panicinfo = PanicInfo::internal_constructor(message, location, can_unwind);
- rtprintpanic!("{panicinfo}\npanicked after panic::always_abort(), aborting.\n");
+ // Check if we need to abort immediately.
+ if let Some(must_abort) = must_abort {
+ match must_abort {
+ panic_count::MustAbort::PanicInHook => {
+ // Don't try to print the message in this case
+ // - perhaps that is causing the recursive panics.
+ rtprintpanic!("thread panicked while processing panic. aborting.\n");
+ }
+ panic_count::MustAbort::AlwaysAbort => {
+ // Unfortunately, this does not print a backtrace, because creating
+ // a `Backtrace` will allocate, which we must to avoid here.
+ let panicinfo = PanicInfo::internal_constructor(message, location, can_unwind);
+ rtprintpanic!("{panicinfo}\npanicked after panic::always_abort(), aborting.\n");
+ }
}
crate::sys::abort_internal();
}
@@ -697,16 +711,16 @@ fn rust_panic_with_hook(
};
drop(hook);
- if panics > 1 || !can_unwind {
- // If a thread panics while it's already unwinding then we
- // have limited options. Currently our preference is to
- // just abort. In the future we may consider resuming
- // unwinding or otherwise exiting the thread cleanly.
- if !can_unwind {
- rtprintpanic!("thread caused non-unwinding panic. aborting.\n");
- } else {
- rtprintpanic!("thread panicked while panicking. aborting.\n");
- }
+ // Indicate that we have finished executing the panic hook. After this point
+ // it is fine if there is a panic while executing destructors, as long as it
+ // it contained within a `catch_unwind`.
+ panic_count::finished_panic_hook();
+
+ if !can_unwind {
+ // If a thread panics while running destructors or tries to unwind
+ // through a nounwind function (e.g. extern "C") then we cannot continue
+ // unwinding and have to abort immediately.
+ rtprintpanic!("thread caused non-unwinding panic. aborting.\n");
crate::sys::abort_internal();
}
@@ -716,7 +730,7 @@ fn rust_panic_with_hook(
/// This is the entry point for `resume_unwind`.
/// It just forwards the payload to the panic runtime.
pub fn rust_panic_without_hook(payload: Box<dyn Any + Send>) -> ! {
- panic_count::increase();
+ panic_count::increase(false);
struct RewrapBox(Box<dyn Any + Send>);
diff --git a/library/test/src/options.rs b/library/test/src/options.rs
index 75ec0b6..3eaad59 100644
--- a/library/test/src/options.rs
+++ b/library/test/src/options.rs
@@ -16,19 +16,21 @@ pub enum ShouldPanic {
}
/// Whether should console output be colored or not
-#[derive(Copy, Clone, Debug)]
+#[derive(Copy, Clone, Default, Debug)]
pub enum ColorConfig {
+ #[default]
AutoColor,
AlwaysColor,
NeverColor,
}
/// Format of the test results output
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub enum OutputFormat {
/// Verbose output
Pretty,
/// Quiet output
+ #[default]
Terse,
/// JSON output
Json,
diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock
index 8f8778e..311ac17 100644
--- a/src/bootstrap/Cargo.lock
+++ b/src/bootstrap/Cargo.lock
@@ -58,7 +58,6 @@
"once_cell",
"opener",
"pretty_assertions",
- "semver",
"serde",
"serde_derive",
"serde_json",
@@ -647,12 +646,6 @@
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
-name = "semver"
-version = "1.0.17"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
-
-[[package]]
name = "serde"
version = "1.0.160"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml
index 367c619..70ade77 100644
--- a/src/bootstrap/Cargo.toml
+++ b/src/bootstrap/Cargo.toml
@@ -57,7 +57,6 @@
sysinfo = { version = "0.26.0", optional = true }
clap = { version = "4.2.4", default-features = false, features = ["std", "usage", "help", "derive", "error-context"] }
clap_complete = "4.2.2"
-semver = "1.0.17"
# Solaris doesn't support flock() and thus fd-lock is not option now
[target.'cfg(not(target_os = "solaris"))'.dependencies]
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
index cf7c659..2fa4455 100644
--- a/src/bootstrap/builder.rs
+++ b/src/bootstrap/builder.rs
@@ -264,7 +264,7 @@ fn intersection_removing_matches(
/// A convenience wrapper for Steps which know they have no aliases and all their sets contain only a single path.
///
- /// This can be used with [`ShouldRun::krate`], [`ShouldRun::path`], or [`ShouldRun::alias`].
+ /// This can be used with [`ShouldRun::crate_or_deps`], [`ShouldRun::path`], or [`ShouldRun::alias`].
#[track_caller]
pub fn assert_single_path(&self) -> &TaskPath {
match self {
@@ -689,7 +689,8 @@ macro_rules! describe {
tool::Miri,
tool::CargoMiri,
llvm::Lld,
- llvm::CrtBeginEnd
+ llvm::CrtBeginEnd,
+ tool::RustdocGUITest,
),
Kind::Check | Kind::Clippy | Kind::Fix => describe!(
check::Std,
@@ -787,6 +788,7 @@ macro_rules! describe {
doc::EditionGuide,
doc::StyleGuide,
doc::Tidy,
+ doc::Bootstrap,
),
Kind::Dist => describe!(
dist::Docs,
@@ -1915,10 +1917,10 @@ pub fn cargo(
}
// For `cargo doc` invocations, make rustdoc print the Rust version into the docs
- // This replaces spaces with newlines because RUSTDOCFLAGS does not
+ // This replaces spaces with tabs because RUSTDOCFLAGS does not
// support arguments with regular spaces. Hopefully someday Cargo will
// have space support.
- let rust_version = self.rust_version().replace(' ', "\n");
+ let rust_version = self.rust_version().replace(' ', "\t");
rustdocflags.arg("--crate-version").arg(&rust_version);
// Environment variables *required* throughout the build
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
index e192cda..41aca02 100644
--- a/src/bootstrap/config.rs
+++ b/src/bootstrap/config.rs
@@ -24,7 +24,6 @@
use crate::flags::{Color, Flags, Warnings};
use crate::util::{exe, output, t};
use once_cell::sync::OnceCell;
-use semver::Version;
use serde::{Deserialize, Deserializer};
use serde_derive::Deserialize;
@@ -1118,7 +1117,6 @@ fn get_table(option: &str) -> Result<TomlConfig, toml::de::Error> {
config.download_beta_toolchain();
config.out.join(config.build.triple).join("stage0/bin/rustc")
});
-
config.initial_cargo = build
.cargo
.map(|cargo| {
@@ -1780,42 +1778,6 @@ pub fn default_codegen_backend(&self) -> Option<Interned<String>> {
self.rust_codegen_backends.get(0).cloned()
}
- pub fn check_build_rustc_version(&self) {
- if self.dry_run() {
- return;
- }
-
- // check rustc version is same or lower with 1 apart from the building one
- let mut cmd = Command::new(&self.initial_rustc);
- cmd.arg("--version");
- let rustc_output = output(&mut cmd)
- .lines()
- .next()
- .unwrap()
- .split(' ')
- .nth(1)
- .unwrap()
- .split('-')
- .next()
- .unwrap()
- .to_owned();
- let rustc_version = Version::parse(&rustc_output.trim()).unwrap();
- let source_version =
- Version::parse(&fs::read_to_string(self.src.join("src/version")).unwrap().trim())
- .unwrap();
- if !(source_version == rustc_version
- || (source_version.major == rustc_version.major
- && source_version.minor == rustc_version.minor + 1))
- {
- let prev_version = format!("{}.{}.x", source_version.major, source_version.minor - 1);
- eprintln!(
- "Unexpected rustc version: {}, we should use {}/{} to build source with {}",
- rustc_version, prev_version, source_version, source_version
- );
- crate::detail_exit(1);
- }
- }
-
/// Returns the commit to download, or `None` if we shouldn't download CI artifacts.
fn download_ci_rustc_commit(&self, download_rustc: Option<StringOrBool>) -> Option<String> {
// If `download-rustc` is not set, default to rebuilding.
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
index 8f5d9bb..b52c3b6 100644
--- a/src/bootstrap/doc.rs
+++ b/src/bootstrap/doc.rs
@@ -839,6 +839,8 @@ fn run(self, builder: &Builder<'_>) {
)+
cargo.rustdocflag("--document-private-items");
+ // Since we always pass --document-private-items, there's no need to warn about linking to private items.
+ cargo.rustdocflag("-Arustdoc::private-intra-doc-links");
cargo.rustdocflag("--enable-index-page");
cargo.rustdocflag("--show-type-layout");
cargo.rustdocflag("--generate-link-to-definition");
@@ -882,7 +884,8 @@ fn run(self, builder: &Builder<'_>) {
// "cargo-credential-wincred",
]
);
-tool_doc!(Tidy, "tidy", "src/tools/tidy", ["tidy"]);
+tool_doc!(Tidy, "tidy", "src/tools/tidy", rustc_tool = false, ["tidy"]);
+tool_doc!(Bootstrap, "bootstrap", "src/bootstrap", rustc_tool = false, ["bootstrap"]);
#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct ErrorIndex {
diff --git a/src/bootstrap/download.rs b/src/bootstrap/download.rs
index 25df5b2..c7969d2 100644
--- a/src/bootstrap/download.rs
+++ b/src/bootstrap/download.rs
@@ -123,7 +123,7 @@ fn should_fix_bins_and_dylibs(&self) -> bool {
/// This is only required on NixOS and uses the PatchELF utility to
/// change the interpreter/RPATH of ELF executables.
///
- /// Please see https://nixos.org/patchelf.html for more information
+ /// Please see <https://nixos.org/patchelf.html> for more information
fn fix_bin_or_dylib(&self, fname: &Path) {
assert_eq!(SHOULD_FIX_BINS_AND_DYLIBS.get(), Some(&true));
println!("attempting to patch {}", fname.display());
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
index 6ee50ee..fb76dff 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
@@ -414,7 +414,6 @@ pub fn new(mut config: Config) -> Build {
bootstrap_out.display()
)
}
- config.check_build_rustc_version();
if rust_info.is_from_tarball() && config.description.is_none() {
config.description = Some("built from a source tarball".to_owned());
@@ -1011,6 +1010,8 @@ fn msg_build(
}
/// Return a `Group` guard for a [`Step`] that is built for each `--stage`.
+ ///
+ /// [`Step`]: crate::builder::Step
fn msg(
&self,
action: impl Into<Kind>,
@@ -1035,6 +1036,8 @@ fn msg(
}
/// Return a `Group` guard for a [`Step`] that is only built once and isn't affected by `--stage`.
+ ///
+ /// [`Step`]: crate::builder::Step
fn msg_unstaged(
&self,
action: impl Into<Kind>,
diff --git a/src/bootstrap/llvm.rs b/src/bootstrap/llvm.rs
index 040a12f..3fd0cca 100644
--- a/src/bootstrap/llvm.rs
+++ b/src/bootstrap/llvm.rs
@@ -1017,7 +1017,7 @@ fn supported_sanitizers(
"x86_64-unknown-illumos" => common_libs("illumos", "x86_64", &["asan"]),
"x86_64-pc-solaris" => common_libs("solaris", "x86_64", &["asan"]),
"x86_64-unknown-linux-gnu" => {
- common_libs("linux", "x86_64", &["asan", "lsan", "msan", "tsan"])
+ common_libs("linux", "x86_64", &["asan", "lsan", "msan", "safestack", "tsan"])
}
"x86_64-unknown-linux-musl" => {
common_libs("linux", "x86_64", &["asan", "lsan", "msan", "tsan"])
diff --git a/src/bootstrap/metrics.rs b/src/bootstrap/metrics.rs
index e19d56c..5990f33 100644
--- a/src/bootstrap/metrics.rs
+++ b/src/bootstrap/metrics.rs
@@ -14,6 +14,25 @@
use std::time::{Duration, Instant, SystemTime};
use sysinfo::{CpuExt, System, SystemExt};
+// Update this number whenever a breaking change is made to the build metrics.
+//
+// The output format is versioned for two reasons:
+//
+// - The metadata is intended to be consumed by external tooling, and exposing a format version
+// helps the tools determine whether they're compatible with a metrics file.
+//
+// - If a developer enables build metrics in their local checkout, making a breaking change to the
+// metrics format would result in a hard-to-diagnose error message when an existing metrics file
+// is not compatible with the new changes. With a format version number, bootstrap can discard
+// incompatible metrics files instead of appending metrics to them.
+//
+// Version changelog:
+//
+// - v0: initial version
+// - v1: replaced JsonNode::Test with JsonNode::TestSuite
+//
+const CURRENT_FORMAT_VERSION: usize = 1;
+
pub(crate) struct BuildMetrics {
state: RefCell<MetricsState>,
}
@@ -57,7 +76,7 @@ pub(crate) fn enter_step<S: Step>(&self, step: &S, builder: &Builder<'_>) {
duration_excluding_children_sec: Duration::ZERO,
children: Vec::new(),
- tests: Vec::new(),
+ test_suites: Vec::new(),
});
}
@@ -84,6 +103,17 @@ pub(crate) fn exit_step(&self, builder: &Builder<'_>) {
}
}
+ pub(crate) fn begin_test_suite(&self, metadata: TestSuiteMetadata, builder: &Builder<'_>) {
+ // Do not record dry runs, as they'd be duplicates of the actual steps.
+ if builder.config.dry_run() {
+ return;
+ }
+
+ let mut state = self.state.borrow_mut();
+ let step = state.running_steps.last_mut().unwrap();
+ step.test_suites.push(TestSuite { metadata, tests: Vec::new() });
+ }
+
pub(crate) fn record_test(&self, name: &str, outcome: TestOutcome, builder: &Builder<'_>) {
// Do not record dry runs, as they'd be duplicates of the actual steps.
if builder.config.dry_run() {
@@ -91,12 +121,13 @@ pub(crate) fn record_test(&self, name: &str, outcome: TestOutcome, builder: &Bui
}
let mut state = self.state.borrow_mut();
- state
- .running_steps
- .last_mut()
- .unwrap()
- .tests
- .push(Test { name: name.to_string(), outcome });
+ let step = state.running_steps.last_mut().unwrap();
+
+ if let Some(test_suite) = step.test_suites.last_mut() {
+ test_suite.tests.push(Test { name: name.to_string(), outcome });
+ } else {
+ panic!("metrics.record_test() called without calling metrics.begin_test_suite() first");
+ }
}
fn collect_stats(&self, state: &mut MetricsState) {
@@ -131,7 +162,20 @@ pub(crate) fn persist(&self, build: &Build) {
// Some of our CI builds consist of multiple independent CI invocations. Ensure all the
// previous invocations are still present in the resulting file.
let mut invocations = match std::fs::read(&dest) {
- Ok(contents) => t!(serde_json::from_slice::<JsonRoot>(&contents)).invocations,
+ Ok(contents) => {
+ // We first parse just the format_version field to have the check succeed even if
+ // the rest of the contents are not valid anymore.
+ let version: OnlyFormatVersion = t!(serde_json::from_slice(&contents));
+ if version.format_version == CURRENT_FORMAT_VERSION {
+ t!(serde_json::from_slice::<JsonRoot>(&contents)).invocations
+ } else {
+ println!(
+ "warning: overriding existing build/metrics.json, as it's not \
+ compatible with build metrics format version {CURRENT_FORMAT_VERSION}."
+ );
+ Vec::new()
+ }
+ }
Err(err) => {
if err.kind() != std::io::ErrorKind::NotFound {
panic!("failed to open existing metrics file at {}: {err}", dest.display());
@@ -149,7 +193,7 @@ pub(crate) fn persist(&self, build: &Build) {
children: steps.into_iter().map(|step| self.prepare_json_step(step)).collect(),
});
- let json = JsonRoot { system_stats, invocations };
+ let json = JsonRoot { format_version: CURRENT_FORMAT_VERSION, system_stats, invocations };
t!(std::fs::create_dir_all(dest.parent().unwrap()));
let mut file = BufWriter::new(t!(File::create(&dest)));
@@ -159,11 +203,7 @@ pub(crate) fn persist(&self, build: &Build) {
fn prepare_json_step(&self, step: StepMetrics) -> JsonNode {
let mut children = Vec::new();
children.extend(step.children.into_iter().map(|child| self.prepare_json_step(child)));
- children.extend(
- step.tests
- .into_iter()
- .map(|test| JsonNode::Test { name: test.name, outcome: test.outcome }),
- );
+ children.extend(step.test_suites.into_iter().map(JsonNode::TestSuite));
JsonNode::RustbuildStep {
type_: step.type_,
@@ -198,17 +238,14 @@ struct StepMetrics {
duration_excluding_children_sec: Duration,
children: Vec<StepMetrics>,
- tests: Vec<Test>,
-}
-
-struct Test {
- name: String,
- outcome: TestOutcome,
+ test_suites: Vec<TestSuite>,
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
struct JsonRoot {
+ #[serde(default)] // For version 0 the field was not present.
+ format_version: usize,
system_stats: JsonInvocationSystemStats,
invocations: Vec<JsonInvocation>,
}
@@ -237,11 +274,39 @@ enum JsonNode {
children: Vec<JsonNode>,
},
- Test {
- name: String,
- #[serde(flatten)]
- outcome: TestOutcome,
+ TestSuite(TestSuite),
+}
+
+#[derive(Serialize, Deserialize)]
+struct TestSuite {
+ metadata: TestSuiteMetadata,
+ tests: Vec<Test>,
+}
+
+#[derive(Serialize, Deserialize)]
+#[serde(tag = "kind", rename_all = "snake_case")]
+pub(crate) enum TestSuiteMetadata {
+ CargoPackage {
+ crates: Vec<String>,
+ target: String,
+ host: String,
+ stage: u32,
},
+ Compiletest {
+ suite: String,
+ mode: String,
+ compare_mode: Option<String>,
+ target: String,
+ host: String,
+ stage: u32,
+ },
+}
+
+#[derive(Serialize, Deserialize)]
+pub(crate) struct Test {
+ name: String,
+ #[serde(flatten)]
+ outcome: TestOutcome,
}
#[derive(Serialize, Deserialize)]
@@ -266,3 +331,9 @@ struct JsonInvocationSystemStats {
struct JsonStepSystemStats {
cpu_utilization_percent: f64,
}
+
+#[derive(Deserialize)]
+struct OnlyFormatVersion {
+ #[serde(default)] // For version 0 the field was not present.
+ format_version: usize,
+}
diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs
index 2b72d6c..44cd84b 100644
--- a/src/bootstrap/test.rs
+++ b/src/bootstrap/test.rs
@@ -317,6 +317,17 @@ fn run(self, builder: &Builder<'_>) {
cargo.env("CARGO_TEST_DISABLE_NIGHTLY", "1");
cargo.env("PATH", &path_for_cargo(builder, compiler));
+ #[cfg(feature = "build-metrics")]
+ builder.metrics.begin_test_suite(
+ crate::metrics::TestSuiteMetadata::CargoPackage {
+ crates: vec!["cargo".into()],
+ target: self.host.triple.to_string(),
+ host: self.host.triple.to_string(),
+ stage: self.stage,
+ },
+ builder,
+ );
+
let _time = util::timeit(&builder);
add_flags_and_try_run_tests(builder, &mut cargo);
}
@@ -944,28 +955,6 @@ fn get_browser_ui_test_version(npm: &Path) -> Option<String> {
.or_else(|| get_browser_ui_test_version_inner(npm, true))
}
-fn compare_browser_ui_test_version(installed_version: &str, src: &Path) {
- match fs::read_to_string(
- src.join("src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version"),
- ) {
- Ok(v) => {
- if v.trim() != installed_version {
- eprintln!(
- "⚠️ Installed version of browser-ui-test (`{}`) is different than the \
- one used in the CI (`{}`)",
- installed_version, v
- );
- eprintln!(
- "You can install this version using `npm update browser-ui-test` or by using \
- `npm install browser-ui-test@{}`",
- v,
- );
- }
- }
- Err(e) => eprintln!("Couldn't find the CI browser-ui-test version: {:?}", e),
- }
-}
-
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RustdocGUI {
pub target: TargetSelection,
@@ -997,79 +986,30 @@ fn make_run(run: RunConfig<'_>) {
}
fn run(self, builder: &Builder<'_>) {
- let nodejs = builder.config.nodejs.as_ref().expect("nodejs isn't available");
- let npm = builder.config.npm.as_ref().expect("npm isn't available");
-
builder.ensure(compile::Std::new(self.compiler, self.target));
- // The goal here is to check if the necessary packages are installed, and if not, we
- // panic.
- match get_browser_ui_test_version(&npm) {
- Some(version) => {
- // We also check the version currently used in CI and emit a warning if it's not the
- // same one.
- compare_browser_ui_test_version(&version, &builder.build.src);
- }
- None => {
- eprintln!(
- "error: rustdoc-gui test suite cannot be run because npm `browser-ui-test` \
- dependency is missing",
- );
- eprintln!(
- "If you want to install the `{0}` dependency, run `npm install {0}`",
- "browser-ui-test",
- );
- panic!("Cannot run rustdoc-gui tests");
- }
- }
+ let mut cmd = builder.tool_cmd(Tool::RustdocGUITest);
let out_dir = builder.test_out(self.target).join("rustdoc-gui");
-
- // We remove existing folder to be sure there won't be artifacts remaining.
builder.clear_if_dirty(&out_dir, &builder.rustdoc(self.compiler));
- let src_path = builder.build.src.join("tests/rustdoc-gui/src");
- // We generate docs for the libraries present in the rustdoc-gui's src folder.
- for entry in src_path.read_dir().expect("read_dir call failed") {
- if let Ok(entry) = entry {
- let path = entry.path();
-
- if !path.is_dir() {
- continue;
- }
-
- let mut cargo = Command::new(&builder.initial_cargo);
- cargo
- .arg("doc")
- .arg("--target-dir")
- .arg(&out_dir)
- .env("RUSTC_BOOTSTRAP", "1")
- .env("RUSTDOC", builder.rustdoc(self.compiler))
- .env("RUSTC", builder.rustc(self.compiler))
- .current_dir(path);
- // FIXME: implement a `// compile-flags` command or similar
- // instead of hard-coding this test
- if entry.file_name() == "link_to_definition" {
- cargo.env("RUSTDOCFLAGS", "-Zunstable-options --generate-link-to-definition");
- } else if entry.file_name() == "scrape_examples" {
- cargo.arg("-Zrustdoc-scrape-examples");
- } else if entry.file_name() == "extend_css" {
- cargo.env("RUSTDOCFLAGS", &format!("--extend-css extra.css"));
- }
- builder.run(&mut cargo);
- }
+ if let Some(src) = builder.config.src.to_str() {
+ cmd.arg("--rust-src").arg(src);
}
- // We now run GUI tests.
- let mut command = Command::new(&nodejs);
- command
- .arg(builder.build.src.join("src/tools/rustdoc-gui/tester.js"))
- .arg("--jobs")
- .arg(&builder.jobs().to_string())
- .arg("--doc-folder")
- .arg(out_dir.join("doc"))
- .arg("--tests-folder")
- .arg(builder.build.src.join("tests/rustdoc-gui"));
+ if let Some(out_dir) = out_dir.to_str() {
+ cmd.arg("--out-dir").arg(out_dir);
+ }
+
+ if let Some(initial_cargo) = builder.config.initial_cargo.to_str() {
+ cmd.arg("--initial-cargo").arg(initial_cargo);
+ }
+
+ cmd.arg("--jobs").arg(builder.jobs().to_string());
+
+ cmd.env("RUSTDOC", builder.rustdoc(self.compiler))
+ .env("RUSTC", builder.rustc(self.compiler));
+
for path in &builder.paths {
if let Some(p) = util::is_valid_test_suite_arg(path, "tests/rustdoc-gui", builder) {
if !p.ends_with(".goml") {
@@ -1077,14 +1017,25 @@ fn run(self, builder: &Builder<'_>) {
panic!("Cannot run rustdoc-gui tests");
}
if let Some(name) = path.file_name().and_then(|f| f.to_str()) {
- command.arg("--file").arg(name);
+ cmd.arg("--goml-file").arg(name);
}
}
}
+
for test_arg in builder.config.test_args() {
- command.arg(test_arg);
+ cmd.arg("--test-arg").arg(test_arg);
}
- builder.run(&mut command);
+
+ if let Some(ref nodejs) = builder.config.nodejs {
+ cmd.arg("--nodejs").arg(nodejs);
+ }
+
+ if let Some(ref npm) = builder.config.npm {
+ cmd.arg("--npm").arg(npm);
+ }
+
+ let _time = util::timeit(&builder);
+ crate::render_tests::try_run_tests(builder, &mut cmd);
}
}
@@ -1759,6 +1710,19 @@ fn run(self, builder: &Builder<'_>) {
builder.ci_env.force_coloring_in_ci(&mut cmd);
+ #[cfg(feature = "build-metrics")]
+ builder.metrics.begin_test_suite(
+ crate::metrics::TestSuiteMetadata::Compiletest {
+ suite: suite.into(),
+ mode: mode.into(),
+ compare_mode: None,
+ target: self.target.triple.to_string(),
+ host: self.compiler.host.triple.to_string(),
+ stage: self.compiler.stage,
+ },
+ builder,
+ );
+
builder.info(&format!(
"Check compiletest suite={} mode={} ({} -> {})",
suite, mode, &compiler.host, target
@@ -1768,6 +1732,20 @@ fn run(self, builder: &Builder<'_>) {
if let Some(compare_mode) = compare_mode {
cmd.arg("--compare-mode").arg(compare_mode);
+
+ #[cfg(feature = "build-metrics")]
+ builder.metrics.begin_test_suite(
+ crate::metrics::TestSuiteMetadata::Compiletest {
+ suite: suite.into(),
+ mode: mode.into(),
+ compare_mode: Some(compare_mode.into()),
+ target: self.target.triple.to_string(),
+ host: self.compiler.host.triple.to_string(),
+ stage: self.compiler.stage,
+ },
+ builder,
+ );
+
builder.info(&format!(
"Check compiletest suite={} mode={} compare_mode={} ({} -> {})",
suite, mode, compare_mode, &compiler.host, target
@@ -2094,6 +2072,17 @@ fn run_cargo_test(
let mut cargo =
prepare_cargo_test(cargo, libtest_args, crates, primary_crate, compiler, target, builder);
let _time = util::timeit(&builder);
+
+ #[cfg(feature = "build-metrics")]
+ builder.metrics.begin_test_suite(
+ crate::metrics::TestSuiteMetadata::CargoPackage {
+ crates: crates.iter().map(|c| c.to_string()).collect(),
+ target: target.triple.to_string(),
+ host: compiler.host.triple.to_string(),
+ stage: compiler.stage,
+ },
+ builder,
+ );
add_flags_and_try_run_tests(builder, &mut cargo)
}
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
index f13d365..b3791ef 100644
--- a/src/bootstrap/tool.rs
+++ b/src/bootstrap/tool.rs
@@ -302,6 +302,7 @@ fn run(self, builder: &Builder<'_>) -> PathBuf {
GenerateCopyright, "src/tools/generate-copyright", "generate-copyright";
SuggestTests, "src/tools/suggest-tests", "suggest-tests";
GenerateWindowsSys, "src/tools/generate-windows-sys", "generate-windows-sys";
+ RustdocGUITest, "src/tools/rustdoc-gui-test", "rustdoc-gui-test", is_unstable_tool = true, allow_features = "test";
);
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs
index 2e1adbf..9bfdc77 100644
--- a/src/bootstrap/util.rs
+++ b/src/bootstrap/util.rs
@@ -488,7 +488,7 @@ fn GetFullPathNameW(
}
}
-/// Adapted from https://github.com/llvm/llvm-project/blob/782e91224601e461c019e0a4573bbccc6094fbcd/llvm/cmake/modules/HandleLLVMOptions.cmake#L1058-L1079
+/// Adapted from <https://github.com/llvm/llvm-project/blob/782e91224601e461c019e0a4573bbccc6094fbcd/llvm/cmake/modules/HandleLLVMOptions.cmake#L1058-L1079>
///
/// When `clang-cl` is used with instrumentation, we need to add clang's runtime library resource
/// directory to the linker flags, otherwise there will be linker errors about the profiler runtime
diff --git a/src/doc/rustc/src/exploit-mitigations.md b/src/doc/rustc/src/exploit-mitigations.md
index a82a532..1720487 100644
--- a/src/doc/rustc/src/exploit-mitigations.md
+++ b/src/doc/rustc/src/exploit-mitigations.md
@@ -55,88 +55,18 @@
Summary of exploit mitigations supported by the Rust compiler when building
programs for the Linux operating system on the AMD64 architecture and
equivalent.
-<table class="table">
- <tr>
- <td><strong>Exploit mitigation</strong>
- </td>
- <td><strong>Supported and enabled by default</strong>
- </td>
- <td><strong>Since</strong>
- </td>
- </tr>
- <tr>
- <td>Position-independent executable
- </td>
- <td>Yes
- </td>
- <td>0.12.0 (2014-10-09)
- </td>
- </tr>
- <tr>
- <td>Integer overflow checks
- </td>
- <td>Yes (enabled when debug assertions are enabled, and disabled when debug assertions are disabled)
- </td>
- <td>1.1.0 (2015-06-25)
- </td>
- </tr>
- <tr>
- <td>Non-executable memory regions
- </td>
- <td>Yes
- </td>
- <td>1.8.0 (2016-04-14)
- </td>
- </tr>
- <tr>
- <td>Stack clashing protection
- </td>
- <td>Yes
- </td>
- <td>1.20.0 (2017-08-31)
- </td>
- </tr>
- <tr>
- <td>Read-only relocations and immediate binding
- </td>
- <td>Yes
- </td>
- <td>1.21.0 (2017-10-12)
- </td>
- </tr>
- <tr>
- <td>Heap corruption protection
- </td>
- <td>Yes
- </td>
- <td>1.32.0 (2019-01-17) (via operating system default or specified allocator)
- </td>
- </tr>
- <tr>
- <td>Stack smashing protection
- </td>
- <td>Yes
- </td>
- <td>Nightly
- </td>
- </tr>
- <tr>
- <td>Forward-edge control flow protection
- </td>
- <td>Yes
- </td>
- <td>Nightly
- </td>
- </tr>
- <tr>
- <td>Backward-edge control flow protection (e.g., shadow and safe stack)
- </td>
- <td>No
- </td>
- <td>
- </td>
- </tr>
-</table>
+
+| Exploit mitigation | Supported and enabled by default | Since |
+| - | - | - |
+| Position-independent executable | Yes | 0.12.0 (2014-10-09) |
+| Integer overflow checks | Yes (enabled when debug assertions are enabled, and disabled when debug assertions are disabled) | 1.1.0 (2015-06-25) |
+| Non-executable memory regions | Yes | 1.8.0 (2016-04-14) |
+| Stack clashing protection | Yes | 1.20.0 (2017-08-31) |
+| Read-only relocations and immediate binding | Yes | 1.21.0 (2017-10-12) |
+| Heap corruption protection | Yes | 1.32.0 (2019-01-17) (via operating system default or specified allocator) |
+| Stack smashing protection | Yes | Nightly |
+| Forward-edge control flow protection | Yes | Nightly |
+| Backward-edge control flow protection (e.g., shadow and safe stack) | Yes | Nightly |
<small id="fn:1">1\. See
<https://github.com/rust-lang/rust/tree/master/compiler/rustc_target/src/spec>
@@ -513,20 +443,21 @@
protection, such as ARM Pointer Authentication, and Intel Shadow Stack as
part of Intel CET.
-The Rust compiler does not support shadow or safe stack. There is work
-currently ongoing to add support for the sanitizers[40], which may or may
-not include support for safe stack<sup id="fnref:7" role="doc-noteref"><a
-href="#fn:7" class="footnote">7</a></sup>.
+The Rust compiler supports shadow stack for aarch64 only
+<sup id="fnref:7" role="doc-noteref"><a href="#fn:7" class="footnote">7</a></sup>
+on nightly Rust compilers [43]-[44]. Safe stack is available on nightly
+Rust compilers [45]-[46].
```text
$ readelf -s target/release/hello-rust | grep __safestack_init
+ 1177: 00000000000057b0 444 FUNC GLOBAL DEFAULT 9 __safestack_init
```
Fig. 16. Checking if LLVM SafeStack is enabled for a given binary.
The presence of the `__safestack_init` symbol indicates that LLVM SafeStack
-is enabled for a given binary. Conversely, the absence of the
+is enabled for a given binary (see Fig. 16). Conversely, the absence of the
`__safestack_init` symbol indicates that LLVM SafeStack is not enabled for a
-given binary (see Fig. 16).
+given binary.
<small id="fn:7">7\. The shadow stack implementation for the AMD64
architecture and equivalent in LLVM was removed due to performance and
@@ -698,3 +629,15 @@
42. bbjornse. “add codegen option for using LLVM stack smash protection #84197.”
GitHub. <https://github.com/rust-lang/rust/pull/84197>
+
+43. ivanloz. “Add support for LLVM ShadowCallStack. #98208.” GitHub.
+ <https://github.com/rust-lang/rust/pull/98208>.
+
+44. “ShadowCallStack.” The Rust Unstable Book.
+ [https://doc.rust-lang.org/unstable-book/compiler-flags/sanitizer.html#shadowcallstack](../unstable-book/compiler-flags/sanitizer.html#shadowcallstack).
+
+45. W. Wiser. “Add support for LLVM SafeStack #112000” GitHub.
+ <https://github.com/rust-lang/rust/pull/112000>
+
+46. “SafeStack.” The Rust Unstable Book.
+ [https://doc.rust-lang/org/unstable-book/compiler-flags/sanitizer.html#safestack](../unstable-book/compiler-flags/sanitizer.html#safestack).
diff --git a/src/doc/unstable-book/src/compiler-flags/sanitizer.md b/src/doc/unstable-book/src/compiler-flags/sanitizer.md
index aa776da..49389b2 100644
--- a/src/doc/unstable-book/src/compiler-flags/sanitizer.md
+++ b/src/doc/unstable-book/src/compiler-flags/sanitizer.md
@@ -21,7 +21,8 @@
* [MemorySanitizer](#memorysanitizer) a detector of uninitialized reads.
* [MemTagSanitizer](#memtagsanitizer) fast memory error detector based on
Armv8.5-A Memory Tagging Extension.
-* [ShadowCallStack](#shadowcallstack) provides backward-edge control flow protection.
+* [SafeStack](#safestack) provides backward-edge control flow protection by separating the stack into safe and unsafe regions.
+* [ShadowCallStack](#shadowcallstack) provides backward-edge control flow protection (aarch64 only).
* [ThreadSanitizer](#threadsanitizer) a fast data race detector.
To enable a sanitizer compile with `-Zsanitizer=address`,`-Zsanitizer=cfi`,
@@ -712,6 +713,16 @@
See the [LLVM MemTagSanitizer documentation][llvm-memtag] for more details.
+# SafeStack
+
+SafeStack provides backward edge control flow protection by separating the stack into data which is only accessed safely (the safe stack) and all other data (the unsafe stack).
+
+SafeStack can be enabled with the `-Zsanitizer=safestack` option and is supported on the following targets:
+
+* `x86_64-unknown-linux-gnu`
+
+See the [Clang SafeStack documentation][clang-safestack] for more details.
+
# ShadowCallStack
ShadowCallStack provides backward edge control flow protection by storing a function's return address in a separately allocated 'shadow call stack' and loading the return address from that shadow call stack.
@@ -828,6 +839,7 @@
[clang-kcfi]: https://clang.llvm.org/docs/ControlFlowIntegrity.html#fsanitize-kcfi
[clang-lsan]: https://clang.llvm.org/docs/LeakSanitizer.html
[clang-msan]: https://clang.llvm.org/docs/MemorySanitizer.html
+[clang-safestack]: https://clang.llvm.org/docs/SafeStack.html
[clang-scs]: https://clang.llvm.org/docs/ShadowCallStack.html
[clang-tsan]: https://clang.llvm.org/docs/ThreadSanitizer.html
[linux-kasan]: https://www.kernel.org/doc/html/latest/dev-tools/kasan.html
diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs
index c852f9c..7dc08b3 100644
--- a/src/librustdoc/clean/inline.rs
+++ b/src/librustdoc/clean/inline.rs
@@ -355,9 +355,9 @@ pub(crate) fn build_impl(
return;
}
- let _prof_timer = cx.tcx.sess.prof.generic_activity("build_impl");
-
let tcx = cx.tcx;
+ let _prof_timer = tcx.sess.prof.generic_activity("build_impl");
+
let associated_trait = tcx.impl_trait_ref(did).map(ty::EarlyBinder::skip_binder);
// Only inline impl if the implemented trait is
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
index 59a3e63..03adc19 100644
--- a/src/librustdoc/clean/mod.rs
+++ b/src/librustdoc/clean/mod.rs
@@ -2592,7 +2592,8 @@ fn clean_use_statement_inner<'tcx>(
} else {
if inline_attr.is_none()
&& let Res::Def(DefKind::Mod, did) = path.res
- && !did.is_local() && did.is_crate_root()
+ && !did.is_local()
+ && did.is_crate_root()
{
// if we're `pub use`ing an extern crate root, don't inline it unless we
// were specifically asked for it
diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs
index 17aa6b3..366f939 100644
--- a/src/librustdoc/clean/utils.rs
+++ b/src/librustdoc/clean/utils.rs
@@ -193,7 +193,7 @@ pub(crate) fn build_deref_target_impls(
};
if let Some(prim) = target.primitive_type() {
- let _prof_timer = cx.tcx.sess.prof.generic_activity("build_primitive_inherent_impls");
+ let _prof_timer = tcx.sess.prof.generic_activity("build_primitive_inherent_impls");
for did in prim.impls(tcx).filter(|did| !did.is_local()) {
inline::build_impl(cx, did, None, ret);
}
diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs
index dfa4b09..9f08609 100644
--- a/src/librustdoc/config.rs
+++ b/src/librustdoc/config.rs
@@ -314,7 +314,6 @@ pub(crate) fn from_matches(
matches: &getopts::Matches,
args: Vec<String>,
) -> Result<(Options, RenderOptions), i32> {
- let args = &args[1..];
// Check for unstable options.
nightly_options::check_nightly_options(matches, &opts());
diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs
index c4758fd..8aaad8b 100644
--- a/src/librustdoc/formats/cache.rs
+++ b/src/librustdoc/formats/cache.rs
@@ -147,7 +147,7 @@ pub(crate) fn populate(cx: &mut DocContext<'_>, mut krate: clean::Crate) -> clea
// Cache where all our extern crates are located
// FIXME: this part is specific to HTML so it'd be nice to remove it from the common code
- for &crate_num in cx.tcx.crates(()) {
+ for &crate_num in tcx.crates(()) {
let e = ExternalCrate { crate_num };
let name = e.name(tcx);
diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs
index 09e7ed2..9bb2002 100644
--- a/src/librustdoc/html/markdown.rs
+++ b/src/librustdoc/html/markdown.rs
@@ -1237,7 +1237,27 @@ pub(crate) fn plain_text_summary(md: &str, link_names: &[RenderedLink]) -> Strin
pub(crate) struct MarkdownLink {
pub kind: LinkType,
pub link: String,
- pub range: Range<usize>,
+ pub range: MarkdownLinkRange,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) enum MarkdownLinkRange {
+ /// Normally, markdown link warnings point only at the destination.
+ Destination(Range<usize>),
+ /// In some cases, it's not possible to point at the destination.
+ /// Usually, this happens because backslashes `\\` are used.
+ /// When that happens, point at the whole link, and don't provide structured suggestions.
+ WholeLink(Range<usize>),
+}
+
+impl MarkdownLinkRange {
+ /// Extracts the inner range.
+ pub fn inner_range(&self) -> &Range<usize> {
+ match self {
+ MarkdownLinkRange::Destination(range) => range,
+ MarkdownLinkRange::WholeLink(range) => range,
+ }
+ }
}
pub(crate) fn markdown_links<R>(
@@ -1257,9 +1277,9 @@ pub(crate) fn markdown_links<R>(
if md_start <= s_start && s_end <= md_end {
let start = s_start.offset_from(md_start) as usize;
let end = s_end.offset_from(md_start) as usize;
- start..end
+ MarkdownLinkRange::Destination(start..end)
} else {
- fallback
+ MarkdownLinkRange::WholeLink(fallback)
}
};
@@ -1267,6 +1287,7 @@ pub(crate) fn markdown_links<R>(
// For diagnostics, we want to underline the link's definition but `span` will point at
// where the link is used. This is a problem for reference-style links, where the definition
// is separate from the usage.
+
match link {
// `Borrowed` variant means the string (the link's destination) may come directly from
// the markdown text and we can locate the original link destination.
@@ -1275,10 +1296,82 @@ pub(crate) fn markdown_links<R>(
CowStr::Borrowed(s) => locate(s, span),
// For anything else, we can only use the provided range.
- CowStr::Boxed(_) | CowStr::Inlined(_) => span,
+ CowStr::Boxed(_) | CowStr::Inlined(_) => MarkdownLinkRange::WholeLink(span),
}
};
+ let span_for_offset_backward = |span: Range<usize>, open: u8, close: u8| {
+ let mut open_brace = !0;
+ let mut close_brace = !0;
+ for (i, b) in md.as_bytes()[span.clone()].iter().copied().enumerate().rev() {
+ let i = i + span.start;
+ if b == close {
+ close_brace = i;
+ break;
+ }
+ }
+ if close_brace < span.start || close_brace >= span.end {
+ return MarkdownLinkRange::WholeLink(span);
+ }
+ let mut nesting = 1;
+ for (i, b) in md.as_bytes()[span.start..close_brace].iter().copied().enumerate().rev() {
+ let i = i + span.start;
+ if b == close {
+ nesting += 1;
+ }
+ if b == open {
+ nesting -= 1;
+ }
+ if nesting == 0 {
+ open_brace = i;
+ break;
+ }
+ }
+ assert!(open_brace != close_brace);
+ if open_brace < span.start || open_brace >= span.end {
+ return MarkdownLinkRange::WholeLink(span);
+ }
+ // do not actually include braces in the span
+ let range = (open_brace + 1)..close_brace;
+ MarkdownLinkRange::Destination(range.clone())
+ };
+
+ let span_for_offset_forward = |span: Range<usize>, open: u8, close: u8| {
+ let mut open_brace = !0;
+ let mut close_brace = !0;
+ for (i, b) in md.as_bytes()[span.clone()].iter().copied().enumerate() {
+ let i = i + span.start;
+ if b == open {
+ open_brace = i;
+ break;
+ }
+ }
+ if open_brace < span.start || open_brace >= span.end {
+ return MarkdownLinkRange::WholeLink(span);
+ }
+ let mut nesting = 0;
+ for (i, b) in md.as_bytes()[open_brace..span.end].iter().copied().enumerate() {
+ let i = i + open_brace;
+ if b == close {
+ nesting -= 1;
+ }
+ if b == open {
+ nesting += 1;
+ }
+ if nesting == 0 {
+ close_brace = i;
+ break;
+ }
+ }
+ assert!(open_brace != close_brace);
+ if open_brace < span.start || open_brace >= span.end {
+ return MarkdownLinkRange::WholeLink(span);
+ }
+ // do not actually include braces in the span
+ let range = (open_brace + 1)..close_brace;
+ MarkdownLinkRange::Destination(range.clone())
+ };
+
Parser::new_with_broken_link_callback(
md,
main_body_opts(),
@@ -1287,11 +1380,20 @@ pub(crate) fn markdown_links<R>(
.into_offset_iter()
.filter_map(|(event, span)| match event {
Event::Start(Tag::Link(link_type, dest, _)) if may_be_doc_link(link_type) => {
- preprocess_link(MarkdownLink {
- kind: link_type,
- range: span_for_link(&dest, span),
- link: dest.into_string(),
- })
+ let range = match link_type {
+ // Link is pulled from the link itself.
+ LinkType::ReferenceUnknown | LinkType::ShortcutUnknown => {
+ span_for_offset_backward(span, b'[', b']')
+ }
+ LinkType::CollapsedUnknown => span_for_offset_forward(span, b'[', b']'),
+ LinkType::Inline => span_for_offset_backward(span, b'(', b')'),
+ // Link is pulled from elsewhere in the document.
+ LinkType::Reference | LinkType::Collapsed | LinkType::Shortcut => {
+ span_for_link(&dest, span)
+ }
+ LinkType::Autolink | LinkType::Email => unreachable!(),
+ };
+ preprocess_link(MarkdownLink { kind: link_type, range, link: dest.into_string() })
}
_ => None,
})
diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs
index 76c8e08..d2dc47a 100644
--- a/src/librustdoc/html/render/print_item.rs
+++ b/src/librustdoc/html/render/print_item.rs
@@ -9,6 +9,8 @@
use rustc_middle::ty::{self, TyCtxt};
use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::{kw, sym, Symbol};
+use std::borrow::Borrow;
+use std::cell::{RefCell, RefMut};
use std::cmp::Ordering;
use std::fmt;
use std::rc::Rc;
@@ -216,6 +218,53 @@ fn toggle_close(mut w: impl fmt::Write) {
w.write_str("</details>").unwrap();
}
+trait ItemTemplate<'a, 'cx: 'a>: askama::Template + fmt::Display {
+ fn item_and_mut_cx(&self) -> (&'a clean::Item, RefMut<'_, &'a mut Context<'cx>>);
+}
+
+fn item_template_document<'a: 'b, 'b, 'cx: 'a>(
+ templ: &'b impl ItemTemplate<'a, 'cx>,
+) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let (item, mut cx) = templ.item_and_mut_cx();
+ let v = document(*cx, item, None, HeadingOffset::H2);
+ write!(f, "{v}")
+ })
+}
+
+fn item_template_document_type_layout<'a: 'b, 'b, 'cx: 'a>(
+ templ: &'b impl ItemTemplate<'a, 'cx>,
+) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let (item, cx) = templ.item_and_mut_cx();
+ let def_id = item.item_id.expect_def_id();
+ let v = document_type_layout(*cx, def_id);
+ write!(f, "{v}")
+ })
+}
+
+fn item_template_render_attributes_in_pre<'a: 'b, 'b, 'cx: 'a>(
+ templ: &'b impl ItemTemplate<'a, 'cx>,
+) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let (item, cx) = templ.item_and_mut_cx();
+ let tcx = cx.tcx();
+ let v = render_attributes_in_pre(item, "", tcx);
+ write!(f, "{v}")
+ })
+}
+
+fn item_template_render_assoc_items<'a: 'b, 'b, 'cx: 'a>(
+ templ: &'b impl ItemTemplate<'a, 'cx>,
+) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
+ display_fn(move |f| {
+ let (item, mut cx) = templ.item_and_mut_cx();
+ let def_id = item.item_id.expect_def_id();
+ let v = render_assoc_items(*cx, item, def_id, AssocItemRender::All);
+ write!(f, "{v}")
+ })
+}
+
fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items: &[clean::Item]) {
write!(w, "{}", document(cx, item, None, HeadingOffset::H2));
@@ -356,18 +405,18 @@ fn cmp(
clean::ImportItem(ref import) => {
let stab_tags = if let Some(import_def_id) = import.source.did {
- let ast_attrs = cx.tcx().get_attrs_unchecked(import_def_id);
+ let ast_attrs = tcx.get_attrs_unchecked(import_def_id);
let import_attrs = Box::new(clean::Attributes::from_ast(ast_attrs));
// Just need an item with the correct def_id and attrs
let import_item = clean::Item {
item_id: import_def_id.into(),
attrs: import_attrs,
- cfg: ast_attrs.cfg(cx.tcx(), &cx.cache().hidden_cfg),
+ cfg: ast_attrs.cfg(tcx, &cx.cache().hidden_cfg),
..myitem.clone()
};
- let stab_tags = Some(extra_info_tags(&import_item, item, cx.tcx()).to_string());
+ let stab_tags = Some(extra_info_tags(&import_item, item, tcx).to_string());
stab_tags
} else {
None
@@ -405,8 +454,7 @@ fn cmp(
let unsafety_flag = match *myitem.kind {
clean::FunctionItem(_) | clean::ForeignFunctionItem(_)
- if myitem.fn_header(cx.tcx()).unwrap().unsafety
- == hir::Unsafety::Unsafe =>
+ if myitem.fn_header(tcx).unwrap().unsafety == hir::Unsafety::Unsafe =>
{
"<sup title=\"unsafe function\">⚠</sup>"
}
@@ -439,7 +487,7 @@ fn cmp(
{docs_before}{docs}{docs_after}",
name = myitem.name.unwrap(),
visibility_emoji = visibility_emoji,
- stab_tags = extra_info_tags(myitem, item, cx.tcx()),
+ stab_tags = extra_info_tags(myitem, item, tcx),
class = myitem.type_(),
unsafety_flag = unsafety_flag,
href = item_path(myitem.type_(), myitem.name.unwrap().as_str()),
@@ -886,7 +934,7 @@ fn trait_item(w: &mut Buffer, cx: &mut Context<'_>, m: &clean::Item, t: &clean::
write_small_section_header(w, "foreign-impls", "Implementations on Foreign Types", "");
for implementor in foreign {
- let provided_methods = implementor.inner_impl().provided_trait_methods(cx.tcx());
+ let provided_methods = implementor.inner_impl().provided_trait_methods(tcx);
let assoc_link =
AssocItemLink::GotoSource(implementor.impl_item.item_id, &provided_methods);
render_impl(
@@ -919,7 +967,7 @@ fn trait_item(w: &mut Buffer, cx: &mut Context<'_>, m: &clean::Item, t: &clean::
}
w.write_str("</div>");
- if t.is_auto(cx.tcx()) {
+ if t.is_auto(tcx) {
write_small_section_header(
w,
"synthetic-implementors",
@@ -948,7 +996,7 @@ fn trait_item(w: &mut Buffer, cx: &mut Context<'_>, m: &clean::Item, t: &clean::
"<div id=\"implementors-list\"></div>",
);
- if t.is_auto(cx.tcx()) {
+ if t.is_auto(tcx) {
write_small_section_header(
w,
"synthetic-implementors",
@@ -1131,32 +1179,18 @@ fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean:
#[derive(Template)]
#[template(path = "item_union.html")]
struct ItemUnion<'a, 'cx> {
- cx: std::cell::RefCell<&'a mut Context<'cx>>,
+ cx: RefCell<&'a mut Context<'cx>>,
it: &'a clean::Item,
s: &'a clean::Union,
}
+ impl<'a, 'cx: 'a> ItemTemplate<'a, 'cx> for ItemUnion<'a, 'cx> {
+ fn item_and_mut_cx(&self) -> (&'a clean::Item, RefMut<'_, &'a mut Context<'cx>>) {
+ (self.it, self.cx.borrow_mut())
+ }
+ }
+
impl<'a, 'cx: 'a> ItemUnion<'a, 'cx> {
- fn render_assoc_items<'b>(
- &'b self,
- ) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
- display_fn(move |f| {
- let def_id = self.it.item_id.expect_def_id();
- let mut cx = self.cx.borrow_mut();
- let v = render_assoc_items(*cx, self.it, def_id, AssocItemRender::All);
- write!(f, "{v}")
- })
- }
- fn document_type_layout<'b>(
- &'b self,
- ) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
- display_fn(move |f| {
- let def_id = self.it.item_id.expect_def_id();
- let cx = self.cx.borrow_mut();
- let v = document_type_layout(*cx, def_id);
- write!(f, "{v}")
- })
- }
fn render_union<'b>(&'b self) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
display_fn(move |f| {
let cx = self.cx.borrow_mut();
@@ -1164,22 +1198,6 @@ fn render_union<'b>(&'b self) -> impl fmt::Display + Captures<'a> + 'b + Capture
write!(f, "{v}")
})
}
- fn render_attributes_in_pre<'b>(
- &'b self,
- ) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
- display_fn(move |f| {
- let tcx = self.cx.borrow().tcx();
- let v = render_attributes_in_pre(self.it, "", tcx);
- write!(f, "{v}")
- })
- }
- fn document<'b>(&'b self) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> {
- display_fn(move |f| {
- let mut cx = self.cx.borrow_mut();
- let v = document(*cx, self.it, None, HeadingOffset::H2);
- write!(f, "{v}")
- })
- }
fn document_field<'b>(
&'b self,
field: &'a clean::Item,
@@ -1219,7 +1237,7 @@ fn fields_iter(
}
}
- ItemUnion { cx: std::cell::RefCell::new(cx), it, s }.render_into(w).unwrap();
+ ItemUnion { cx: RefCell::new(cx), it, s }.render_into(w).unwrap();
}
fn print_tuple_struct_fields<'a, 'cx: 'a>(
@@ -1541,11 +1559,12 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
write!(w, "{}", document_type_layout(cx, def_id));
}
-fn item_static(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Static) {
- wrap_item(w, |w| {
- render_attributes_in_code(w, it, cx.tcx());
+fn item_static(w: &mut impl fmt::Write, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Static) {
+ let mut buffer = Buffer::new();
+ wrap_item(&mut buffer, |buffer| {
+ render_attributes_in_code(buffer, it, cx.tcx());
write!(
- w,
+ buffer,
"{vis}static {mutability}{name}: {typ}",
vis = visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx),
mutability = s.mutability.print_with_space(),
@@ -1553,24 +1572,29 @@ fn item_static(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean
typ = s.type_.print(cx)
);
});
- write!(w, "{}", document(cx, it, None, HeadingOffset::H2))
+
+ write!(w, "{}", buffer.into_inner()).unwrap();
+
+ write!(w, "{}", document(cx, it, None, HeadingOffset::H2)).unwrap();
}
-fn item_foreign_type(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
- wrap_item(w, |w| {
- w.write_str("extern {\n");
- render_attributes_in_code(w, it, cx.tcx());
+fn item_foreign_type(w: &mut impl fmt::Write, cx: &mut Context<'_>, it: &clean::Item) {
+ let mut buffer = Buffer::new();
+ wrap_item(&mut buffer, |buffer| {
+ buffer.write_str("extern {\n");
+ render_attributes_in_code(buffer, it, cx.tcx());
write!(
- w,
+ buffer,
" {}type {};\n}}",
visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx),
it.name.unwrap(),
);
});
- write!(w, "{}", document(cx, it, None, HeadingOffset::H2));
+ write!(w, "{}{}", buffer.into_inner(), document(cx, it, None, HeadingOffset::H2)).unwrap();
write!(w, "{}", render_assoc_items(cx, it, it.item_id.expect_def_id(), AssocItemRender::All))
+ .unwrap();
}
fn item_keyword(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item) {
diff --git a/src/librustdoc/html/render/type_layout.rs b/src/librustdoc/html/render/type_layout.rs
index 22aec62..c9b95b1 100644
--- a/src/librustdoc/html/render/type_layout.rs
+++ b/src/librustdoc/html/render/type_layout.rs
@@ -54,13 +54,13 @@ pub(crate) fn document_type_layout<'a, 'cx: 'a>(
} else if let Primitive::Int(i, _) = tag.primitive() {
i.size().bytes()
} else {
- span_bug!(cx.tcx().def_span(ty_def_id), "tag is neither niche nor int")
+ span_bug!(tcx.def_span(ty_def_id), "tag is neither niche nor int")
};
variants
.iter_enumerated()
.map(|(variant_idx, variant_layout)| {
let Adt(adt, _) = type_layout.ty.kind() else {
- span_bug!(cx.tcx().def_span(ty_def_id), "not an adt")
+ span_bug!(tcx.def_span(ty_def_id), "not an adt")
};
let name = adt.variant(variant_idx).name;
let is_unsized = variant_layout.abi.is_unsized();
diff --git a/src/librustdoc/html/templates/item_union.html b/src/librustdoc/html/templates/item_union.html
index a014579..c219670 100644
--- a/src/librustdoc/html/templates/item_union.html
+++ b/src/librustdoc/html/templates/item_union.html
@@ -1,8 +1,8 @@
<pre class="rust item-decl"><code>
- {{ self.render_attributes_in_pre() | safe }}
+ {{ self::item_template_render_attributes_in_pre(self.borrow()) | safe }}
{{ self.render_union() | safe }}
</code></pre>
-{{ self.document() | safe }}
+{{ self::item_template_document(self.borrow()) | safe }}
{% if self.fields_iter().peek().is_some() %}
<h2 id="fields" class="fields small-section-header">
Fields<a href="#fields" class="anchor">§</a>
@@ -19,5 +19,5 @@
{{ self.document_field(field) | safe }}
{% endfor %}
{% endif %}
-{{ self.render_assoc_items() | safe }}
-{{ self.document_type_layout() | safe }}
+{{ self::item_template_render_assoc_items(self.borrow()) | safe }}
+{{ self::item_template_document_type_layout(self.borrow()) | safe }}
diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs
index 0a56916..12c622e 100644
--- a/src/librustdoc/lib.rs
+++ b/src/librustdoc/lib.rs
@@ -712,13 +712,23 @@ fn run_renderer<'tcx, T: formats::FormatRenderer<'tcx>>(
}
fn main_args(at_args: &[String]) -> MainResult {
+ // Throw away the first argument, the name of the binary.
+ // In case of at_args being empty, as might be the case by
+ // passing empty argument array to execve under some platforms,
+ // just use an empty slice.
+ //
+ // This situation was possible before due to arg_expand_all being
+ // called before removing the argument, enabling a crash by calling
+ // the compiler with @empty_file as argv[0] and no more arguments.
+ let at_args = at_args.get(1..).unwrap_or_default();
+
let args = rustc_driver::args::arg_expand_all(at_args);
let mut options = getopts::Options::new();
for option in opts() {
(option.apply)(&mut options);
}
- let matches = match options.parse(&args[1..]) {
+ let matches = match options.parse(&args) {
Ok(m) => m,
Err(err) => {
early_error(ErrorOutputType::default(), err.to_string());
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
index 9e6894a..061a572 100644
--- a/src/librustdoc/passes/collect_intra_doc_links.rs
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -31,7 +31,7 @@
use crate::clean::{self, utils::find_nearest_parent_module};
use crate::clean::{Crate, Item, ItemLink, PrimitiveType};
use crate::core::DocContext;
-use crate::html::markdown::{markdown_links, MarkdownLink};
+use crate::html::markdown::{markdown_links, MarkdownLink, MarkdownLinkRange};
use crate::lint::{BROKEN_INTRA_DOC_LINKS, PRIVATE_INTRA_DOC_LINKS};
use crate::passes::Pass;
use crate::visit::DocVisitor;
@@ -248,7 +248,7 @@ struct DiagnosticInfo<'a> {
item: &'a Item,
dox: &'a str,
ori_link: &'a str,
- link_range: Range<usize>,
+ link_range: MarkdownLinkRange,
}
struct LinkCollector<'a, 'tcx> {
@@ -723,7 +723,7 @@ fn resolve_associated_trait_item<'a>(
.iter()
.flat_map(|&(impl_, trait_)| {
filter_assoc_items_by_name_and_namespace(
- cx.tcx,
+ tcx,
trait_,
Ident::with_dummy_span(item_name),
ns,
@@ -833,7 +833,7 @@ fn visit_item(&mut self, item: &Item) {
enum PreprocessingError {
/// User error: `[std#x#y]` is not valid
MultipleAnchors,
- Disambiguator(Range<usize>, String),
+ Disambiguator(MarkdownLinkRange, String),
MalformedGenerics(MalformedGenerics, String),
}
@@ -873,6 +873,7 @@ pub(crate) struct PreprocessedMarkdownLink(
/// `link_buffer` is needed for lifetime reasons; it will always be overwritten and the contents ignored.
fn preprocess_link(
ori_link: &MarkdownLink,
+ dox: &str,
) -> Option<Result<PreprocessingInfo, PreprocessingError>> {
// [] is mostly likely not supposed to be a link
if ori_link.link.is_empty() {
@@ -906,9 +907,15 @@ fn preprocess_link(
Err((err_msg, relative_range)) => {
// Only report error if we would not have ignored this link. See issue #83859.
if !should_ignore_link_with_disambiguators(link) {
- let no_backticks_range = range_between_backticks(ori_link);
- let disambiguator_range = (no_backticks_range.start + relative_range.start)
- ..(no_backticks_range.start + relative_range.end);
+ let disambiguator_range = match range_between_backticks(&ori_link.range, dox) {
+ MarkdownLinkRange::Destination(no_backticks_range) => {
+ MarkdownLinkRange::Destination(
+ (no_backticks_range.start + relative_range.start)
+ ..(no_backticks_range.start + relative_range.end),
+ )
+ }
+ mdlr @ MarkdownLinkRange::WholeLink(_) => mdlr,
+ };
return Some(Err(PreprocessingError::Disambiguator(disambiguator_range, err_msg)));
} else {
return None;
@@ -947,7 +954,7 @@ fn preprocess_link(
fn preprocessed_markdown_links(s: &str) -> Vec<PreprocessedMarkdownLink> {
markdown_links(s, |link| {
- preprocess_link(&link).map(|pp_link| PreprocessedMarkdownLink(pp_link, link))
+ preprocess_link(&link, s).map(|pp_link| PreprocessedMarkdownLink(pp_link, link))
})
}
@@ -1060,22 +1067,12 @@ fn resolve_link(
// valid omission. See https://github.com/rust-lang/rust/pull/80660#discussion_r551585677
// for discussion on the matter.
let kind = self.cx.tcx.def_kind(id);
- self.verify_disambiguator(
- path_str,
- ori_link,
- kind,
- id,
- disambiguator,
- item,
- &diag_info,
- )?;
+ self.verify_disambiguator(path_str, kind, id, disambiguator, item, &diag_info)?;
} else {
match disambiguator {
Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {}
Some(other) => {
- self.report_disambiguator_mismatch(
- path_str, ori_link, other, res, &diag_info,
- );
+ self.report_disambiguator_mismatch(path_str, other, res, &diag_info);
return None;
}
}
@@ -1096,7 +1093,6 @@ fn resolve_link(
};
self.verify_disambiguator(
path_str,
- ori_link,
kind_for_dis,
id_for_dis,
disambiguator,
@@ -1118,7 +1114,6 @@ fn resolve_link(
fn verify_disambiguator(
&self,
path_str: &str,
- ori_link: &MarkdownLink,
kind: DefKind,
id: DefId,
disambiguator: Option<Disambiguator>,
@@ -1142,7 +1137,7 @@ fn verify_disambiguator(
=> {}
(actual, Some(Disambiguator::Kind(expected))) if actual == expected => {}
(_, Some(specified @ Disambiguator::Kind(_) | specified @ Disambiguator::Primitive)) => {
- self.report_disambiguator_mismatch(path_str,ori_link,specified, Res::Def(kind, id),diag_info);
+ self.report_disambiguator_mismatch(path_str, specified, Res::Def(kind, id), diag_info);
return None;
}
}
@@ -1164,14 +1159,13 @@ fn verify_disambiguator(
fn report_disambiguator_mismatch(
&self,
path_str: &str,
- ori_link: &MarkdownLink,
specified: Disambiguator,
resolved: Res,
diag_info: &DiagnosticInfo<'_>,
) {
// The resolved item did not match the disambiguator; give a better error than 'not found'
let msg = format!("incompatible link kind for `{}`", path_str);
- let callback = |diag: &mut Diagnostic, sp: Option<rustc_span::Span>| {
+ let callback = |diag: &mut Diagnostic, sp: Option<rustc_span::Span>, link_range| {
let note = format!(
"this link resolved to {} {}, which is not {} {}",
resolved.article(),
@@ -1184,14 +1178,24 @@ fn report_disambiguator_mismatch(
} else {
diag.note(note);
}
- suggest_disambiguator(resolved, diag, path_str, &ori_link.link, sp);
+ suggest_disambiguator(resolved, diag, path_str, link_range, sp, diag_info);
};
report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, diag_info, callback);
}
- fn report_rawptr_assoc_feature_gate(&self, dox: &str, ori_link: &Range<usize>, item: &Item) {
- let span = super::source_span_for_markdown_range(self.cx.tcx, dox, ori_link, &item.attrs)
- .unwrap_or_else(|| item.attr_span(self.cx.tcx));
+ fn report_rawptr_assoc_feature_gate(
+ &self,
+ dox: &str,
+ ori_link: &MarkdownLinkRange,
+ item: &Item,
+ ) {
+ let span = super::source_span_for_markdown_range(
+ self.cx.tcx,
+ dox,
+ ori_link.inner_range(),
+ &item.attrs,
+ )
+ .unwrap_or_else(|| item.attr_span(self.cx.tcx));
rustc_session::parse::feature_err(
&self.cx.tcx.sess.parse_sess,
sym::intra_doc_pointers,
@@ -1371,16 +1375,23 @@ fn resolve_with_disambiguator(
/// [`Foo`]
/// ^^^
/// ```
-fn range_between_backticks(ori_link: &MarkdownLink) -> Range<usize> {
- let after_first_backtick_group = ori_link.link.bytes().position(|b| b != b'`').unwrap_or(0);
- let before_second_backtick_group = ori_link
- .link
+///
+/// This function does nothing if `ori_link.range` is a `MarkdownLinkRange::WholeLink`.
+fn range_between_backticks(ori_link_range: &MarkdownLinkRange, dox: &str) -> MarkdownLinkRange {
+ let range = match ori_link_range {
+ mdlr @ MarkdownLinkRange::WholeLink(_) => return mdlr.clone(),
+ MarkdownLinkRange::Destination(inner) => inner.clone(),
+ };
+ let ori_link_text = &dox[range.clone()];
+ let after_first_backtick_group = ori_link_text.bytes().position(|b| b != b'`').unwrap_or(0);
+ let before_second_backtick_group = ori_link_text
.bytes()
.skip(after_first_backtick_group)
.position(|b| b == b'`')
- .unwrap_or(ori_link.link.len());
- (ori_link.range.start + after_first_backtick_group)
- ..(ori_link.range.start + before_second_backtick_group)
+ .unwrap_or(ori_link_text.len());
+ MarkdownLinkRange::Destination(
+ (range.start + after_first_backtick_group)..(range.start + before_second_backtick_group),
+ )
}
/// Returns true if we should ignore `link` due to it being unlikely
@@ -1530,14 +1541,23 @@ fn as_help_span(
sp: rustc_span::Span,
) -> Vec<(rustc_span::Span, String)> {
let inner_sp = match ori_link.find('(') {
+ Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => {
+ sp.with_hi(sp.lo() + BytePos((index - 1) as _))
+ }
Some(index) => sp.with_hi(sp.lo() + BytePos(index as _)),
None => sp,
};
let inner_sp = match ori_link.find('!') {
+ Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => {
+ sp.with_hi(sp.lo() + BytePos((index - 1) as _))
+ }
Some(index) => inner_sp.with_hi(inner_sp.lo() + BytePos(index as _)),
None => inner_sp,
};
let inner_sp = match ori_link.find('@') {
+ Some(index) if index != 0 && ori_link.as_bytes()[index - 1] == b'\\' => {
+ sp.with_hi(sp.lo() + BytePos((index - 1) as _))
+ }
Some(index) => inner_sp.with_lo(inner_sp.lo() + BytePos(index as u32 + 1)),
None => inner_sp,
};
@@ -1584,7 +1604,7 @@ fn report_diagnostic(
lint: &'static Lint,
msg: impl Into<DiagnosticMessage> + Display,
DiagnosticInfo { item, ori_link: _, dox, link_range }: &DiagnosticInfo<'_>,
- decorate: impl FnOnce(&mut Diagnostic, Option<rustc_span::Span>),
+ decorate: impl FnOnce(&mut Diagnostic, Option<rustc_span::Span>, MarkdownLinkRange),
) {
let Some(hir_id) = DocContext::as_local_hir_id(tcx, item.item_id)
else {
@@ -1596,16 +1616,32 @@ fn report_diagnostic(
let sp = item.attr_span(tcx);
tcx.struct_span_lint_hir(lint, hir_id, sp, msg, |lint| {
- let span =
- super::source_span_for_markdown_range(tcx, dox, link_range, &item.attrs).map(|sp| {
- if dox.as_bytes().get(link_range.start) == Some(&b'`')
- && dox.as_bytes().get(link_range.end - 1) == Some(&b'`')
- {
- sp.with_lo(sp.lo() + BytePos(1)).with_hi(sp.hi() - BytePos(1))
- } else {
- sp
- }
- });
+ let (span, link_range) = match link_range {
+ MarkdownLinkRange::Destination(md_range) => {
+ let mut md_range = md_range.clone();
+ let sp = super::source_span_for_markdown_range(tcx, dox, &md_range, &item.attrs)
+ .map(|mut sp| {
+ while dox.as_bytes().get(md_range.start) == Some(&b' ')
+ || dox.as_bytes().get(md_range.start) == Some(&b'`')
+ {
+ md_range.start += 1;
+ sp = sp.with_lo(sp.lo() + BytePos(1));
+ }
+ while dox.as_bytes().get(md_range.end - 1) == Some(&b' ')
+ || dox.as_bytes().get(md_range.end - 1) == Some(&b'`')
+ {
+ md_range.end -= 1;
+ sp = sp.with_hi(sp.hi() - BytePos(1));
+ }
+ sp
+ });
+ (sp, MarkdownLinkRange::Destination(md_range))
+ }
+ MarkdownLinkRange::WholeLink(md_range) => (
+ super::source_span_for_markdown_range(tcx, dox, &md_range, &item.attrs),
+ link_range.clone(),
+ ),
+ };
if let Some(sp) = span {
lint.set_span(sp);
@@ -1614,21 +1650,22 @@ fn report_diagnostic(
// ^ ~~~~
// | link_range
// last_new_line_offset
- let last_new_line_offset = dox[..link_range.start].rfind('\n').map_or(0, |n| n + 1);
+ let md_range = link_range.inner_range().clone();
+ let last_new_line_offset = dox[..md_range.start].rfind('\n').map_or(0, |n| n + 1);
let line = dox[last_new_line_offset..].lines().next().unwrap_or("");
- // Print the line containing the `link_range` and manually mark it with '^'s.
+ // Print the line containing the `md_range` and manually mark it with '^'s.
lint.note(format!(
"the link appears in this line:\n\n{line}\n\
{indicator: <before$}{indicator:^<found$}",
line = line,
indicator = "",
- before = link_range.start - last_new_line_offset,
- found = link_range.len(),
+ before = md_range.start - last_new_line_offset,
+ found = md_range.len(),
));
}
- decorate(lint, span);
+ decorate(lint, span, link_range);
lint
});
@@ -1652,7 +1689,7 @@ fn resolution_failure(
BROKEN_INTRA_DOC_LINKS,
format!("unresolved link to `{}`", path_str),
&diag_info,
- |diag, sp| {
+ |diag, sp, link_range| {
let item = |res: Res| format!("the {} `{}`", res.descr(), res.name(tcx),);
let assoc_item_not_allowed = |res: Res| {
let name = res.name(tcx);
@@ -1706,7 +1743,7 @@ fn split(path: &str) -> Option<(&str, &str)> {
if let Ok(v_res) = collector.resolve(start, ns, item_id, module_id) {
debug!("found partial_res={:?}", v_res);
if !v_res.is_empty() {
- *partial_res = Some(full_res(collector.cx.tcx, v_res[0]));
+ *partial_res = Some(full_res(tcx, v_res[0]));
*unresolved = end.into();
break 'outer;
}
@@ -1845,7 +1882,14 @@ fn split(path: &str) -> Option<(&str, &str)> {
let note = match failure {
ResolutionFailure::NotResolved { .. } => unreachable!("handled above"),
ResolutionFailure::WrongNamespace { res, expected_ns } => {
- suggest_disambiguator(res, diag, path_str, diag_info.ori_link, sp);
+ suggest_disambiguator(
+ res,
+ diag,
+ path_str,
+ link_range.clone(),
+ sp,
+ &diag_info,
+ );
format!(
"this link resolves to {}, which is not in the {} namespace",
@@ -1882,7 +1926,7 @@ fn anchor_failure(
msg: String,
anchor_idx: usize,
) {
- report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, sp| {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, sp, _link_range| {
if let Some(mut sp) = sp {
if let Some((fragment_offset, _)) =
diag_info.ori_link.char_indices().filter(|(_, x)| *x == '#').nth(anchor_idx)
@@ -1898,11 +1942,11 @@ fn anchor_failure(
fn disambiguator_error(
cx: &DocContext<'_>,
mut diag_info: DiagnosticInfo<'_>,
- disambiguator_range: Range<usize>,
+ disambiguator_range: MarkdownLinkRange,
msg: impl Into<DiagnosticMessage> + Display,
) {
diag_info.link_range = disambiguator_range;
- report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, _sp| {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, _sp, _link_range| {
let msg = format!(
"see {}/rustdoc/write-documentation/linking-to-items-by-name.html#namespaces-and-disambiguators for more info about disambiguators",
crate::DOC_RUST_LANG_ORG_CHANNEL
@@ -1922,7 +1966,7 @@ fn report_malformed_generics(
BROKEN_INTRA_DOC_LINKS,
format!("unresolved link to `{}`", path_str),
&diag_info,
- |diag, sp| {
+ |diag, sp, _link_range| {
let note = match err {
MalformedGenerics::UnbalancedAngleBrackets => "unbalanced angle brackets",
MalformedGenerics::MissingType => "missing type for generic parameters",
@@ -1995,7 +2039,7 @@ fn ambiguity_error(
}
}
- report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, diag_info, |diag, sp| {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, diag_info, |diag, sp, link_range| {
if let Some(sp) = sp {
diag.span_label(sp, "ambiguous link");
} else {
@@ -2003,7 +2047,7 @@ fn ambiguity_error(
}
for res in kinds {
- suggest_disambiguator(res, diag, path_str, diag_info.ori_link, sp);
+ suggest_disambiguator(res, diag, path_str, link_range.clone(), sp, diag_info);
}
});
true
@@ -2015,13 +2059,19 @@ fn suggest_disambiguator(
res: Res,
diag: &mut Diagnostic,
path_str: &str,
- ori_link: &str,
+ link_range: MarkdownLinkRange,
sp: Option<rustc_span::Span>,
+ diag_info: &DiagnosticInfo<'_>,
) {
let suggestion = res.disambiguator_suggestion();
let help = format!("to link to the {}, {}", res.descr(), suggestion.descr());
- if let Some(sp) = sp {
+ let ori_link = match link_range {
+ MarkdownLinkRange::Destination(range) => Some(&diag_info.dox[range]),
+ MarkdownLinkRange::WholeLink(_) => None,
+ };
+
+ if let (Some(sp), Some(ori_link)) = (sp, ori_link) {
let mut spans = suggestion.as_help_span(path_str, ori_link, sp);
if spans.len() > 1 {
diag.multipart_suggestion(help, spans, Applicability::MaybeIncorrect);
@@ -2047,7 +2097,7 @@ fn privacy_error(cx: &DocContext<'_>, diag_info: &DiagnosticInfo<'_>, path_str:
let msg =
format!("public documentation for `{}` links to private item `{}`", item_name, path_str);
- report_diagnostic(cx.tcx, PRIVATE_INTRA_DOC_LINKS, msg, diag_info, |diag, sp| {
+ report_diagnostic(cx.tcx, PRIVATE_INTRA_DOC_LINKS, msg, diag_info, |diag, sp, _link_range| {
if let Some(sp) = sp {
diag.span_label(sp, "this item is private");
}
diff --git a/src/librustdoc/passes/collect_trait_impls.rs b/src/librustdoc/passes/collect_trait_impls.rs
index 8d204dd..fbf827c 100644
--- a/src/librustdoc/passes/collect_trait_impls.rs
+++ b/src/librustdoc/passes/collect_trait_impls.rs
@@ -19,9 +19,10 @@
};
pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) -> Crate {
+ let tcx = cx.tcx;
// We need to check if there are errors before running this pass because it would crash when
// we try to get auto and blanket implementations.
- if cx.tcx.sess.diagnostic().has_errors_or_lint_errors().is_some() {
+ if tcx.sess.diagnostic().has_errors_or_lint_errors().is_some() {
return krate;
}
@@ -32,8 +33,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
});
let local_crate = ExternalCrate { crate_num: LOCAL_CRATE };
- let prims: FxHashSet<PrimitiveType> =
- local_crate.primitives(cx.tcx).iter().map(|p| p.1).collect();
+ let prims: FxHashSet<PrimitiveType> = local_crate.primitives(tcx).iter().map(|p| p.1).collect();
let crate_items = {
let mut coll = ItemCollector::new();
@@ -46,9 +46,9 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// External trait impls.
{
- let _prof_timer = cx.tcx.sess.prof.generic_activity("build_extern_trait_impls");
- for &cnum in cx.tcx.crates(()) {
- for &impl_def_id in cx.tcx.trait_impls_in_crate(cnum) {
+ let _prof_timer = tcx.sess.prof.generic_activity("build_extern_trait_impls");
+ for &cnum in tcx.crates(()) {
+ for &impl_def_id in tcx.trait_impls_in_crate(cnum) {
inline::build_impl(cx, impl_def_id, None, &mut new_items_external);
}
}
@@ -56,14 +56,13 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// Local trait impls.
{
- let _prof_timer = cx.tcx.sess.prof.generic_activity("build_local_trait_impls");
+ let _prof_timer = tcx.sess.prof.generic_activity("build_local_trait_impls");
let mut attr_buf = Vec::new();
- for &impl_def_id in cx.tcx.trait_impls_in_crate(LOCAL_CRATE) {
- let mut parent = Some(cx.tcx.parent(impl_def_id));
+ for &impl_def_id in tcx.trait_impls_in_crate(LOCAL_CRATE) {
+ let mut parent = Some(tcx.parent(impl_def_id));
while let Some(did) = parent {
attr_buf.extend(
- cx.tcx
- .get_attrs(did, sym::doc)
+ tcx.get_attrs(did, sym::doc)
.filter(|attr| {
if let Some([attr]) = attr.meta_item_list().as_deref() {
attr.has_name(sym::cfg)
@@ -73,25 +72,24 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
})
.cloned(),
);
- parent = cx.tcx.opt_parent(did);
+ parent = tcx.opt_parent(did);
}
inline::build_impl(cx, impl_def_id, Some((&attr_buf, None)), &mut new_items_local);
attr_buf.clear();
}
}
- cx.tcx.sess.prof.generic_activity("build_primitive_trait_impls").run(|| {
- for def_id in PrimitiveType::all_impls(cx.tcx) {
+ tcx.sess.prof.generic_activity("build_primitive_trait_impls").run(|| {
+ for def_id in PrimitiveType::all_impls(tcx) {
// Try to inline primitive impls from other crates.
if !def_id.is_local() {
inline::build_impl(cx, def_id, None, &mut new_items_external);
}
}
- for (prim, did) in PrimitiveType::primitive_locations(cx.tcx) {
+ for (prim, did) in PrimitiveType::primitive_locations(tcx) {
// Do not calculate blanket impl list for docs that are not going to be rendered.
// While the `impl` blocks themselves are only in `libcore`, the module with `doc`
// attached is directly included in `libstd` as well.
- let tcx = cx.tcx;
if did.is_local() {
for def_id in prim.impls(tcx).filter(|def_id| {
// Avoid including impl blocks with filled-in generics.
@@ -157,7 +155,7 @@ fn add_deref_target(
// scan through included items ahead of time to splice in Deref targets to the "valid" sets
for it in new_items_external.iter().chain(new_items_local.iter()) {
if let ImplItem(box Impl { ref for_, ref trait_, ref items, .. }) = *it.kind &&
- trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait() &&
+ trait_.as_ref().map(|t| t.def_id()) == tcx.lang_items().deref_trait() &&
cleaner.keep_impl(for_, true)
{
let target = items
@@ -199,7 +197,7 @@ fn add_deref_target(
if let ImplItem(box Impl { ref for_, ref trait_, ref kind, .. }) = *it.kind {
cleaner.keep_impl(
for_,
- trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait(),
+ trait_.as_ref().map(|t| t.def_id()) == tcx.lang_items().deref_trait(),
) || trait_.as_ref().map_or(false, |t| cleaner.keep_impl_with_def_id(t.def_id().into()))
|| kind.is_blanket()
} else {
diff --git a/src/librustdoc/passes/lint/unescaped_backticks.rs b/src/librustdoc/passes/lint/unescaped_backticks.rs
index 683c224..8652122 100644
--- a/src/librustdoc/passes/lint/unescaped_backticks.rs
+++ b/src/librustdoc/passes/lint/unescaped_backticks.rs
@@ -56,7 +56,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item) {
)
.unwrap_or_else(|| item.attr_span(tcx));
- cx.tcx.struct_span_lint_hir(crate::lint::UNESCAPED_BACKTICKS, hir_id, span, "unescaped backtick", |lint| {
+ tcx.struct_span_lint_hir(crate::lint::UNESCAPED_BACKTICKS, hir_id, span, "unescaped backtick", |lint| {
let mut help_emitted = false;
match element.prev_code_guess {
diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs
index 8f8dc6b..6b7ad4c 100644
--- a/src/librustdoc/visit_ast.rs
+++ b/src/librustdoc/visit_ast.rs
@@ -280,9 +280,8 @@ fn maybe_inline_local(
return false;
};
- let is_private =
- !self.cx.cache.effective_visibilities.is_directly_public(self.cx.tcx, ori_res_did);
- let is_hidden = inherits_doc_hidden(self.cx.tcx, res_did, None);
+ let is_private = !self.cx.cache.effective_visibilities.is_directly_public(tcx, ori_res_did);
+ let is_hidden = inherits_doc_hidden(tcx, res_did, None);
// Only inline if requested or if the item would otherwise be stripped.
if (!please_inline && !is_private && !is_hidden) || is_no_inline {
@@ -290,7 +289,7 @@ fn maybe_inline_local(
}
if !please_inline &&
- let Some(item_def_id) = reexport_chain(self.cx.tcx, def_id, res_did).iter()
+ let Some(item_def_id) = reexport_chain(tcx, def_id, res_did).iter()
.flat_map(|reexport| reexport.id()).map(|id| id.expect_local())
.chain(iter::once(res_did)).nth(1) &&
item_def_id != def_id &&
@@ -298,22 +297,38 @@ fn maybe_inline_local(
.cx
.cache
.effective_visibilities
- .is_directly_public(self.cx.tcx, item_def_id.to_def_id()) &&
- !inherits_doc_hidden(self.cx.tcx, item_def_id, None)
+ .is_directly_public(tcx, item_def_id.to_def_id()) &&
+ !inherits_doc_hidden(tcx, item_def_id, None)
{
// The imported item is public and not `doc(hidden)` so no need to inline it.
return false;
}
- if !self.view_item_stack.insert(res_did) {
+ let is_bang_macro = matches!(
+ tcx.hir().get_by_def_id(res_did),
+ Node::Item(&hir::Item { kind: hir::ItemKind::Macro(_, MacroKind::Bang), .. })
+ );
+
+ if !self.view_item_stack.insert(res_did) && !is_bang_macro {
return false;
}
let ret = match tcx.hir().get_by_def_id(res_did) {
+ // Bang macros are handled a bit on their because of how they are handled by the
+ // compiler. If they have `#[doc(hidden)]` and the re-export doesn't have
+ // `#[doc(inline)]`, then we don't inline it.
+ Node::Item(_)
+ if is_bang_macro
+ && !please_inline
+ && renamed.is_some()
+ && self.cx.tcx.is_doc_hidden(ori_res_did) =>
+ {
+ return false;
+ }
Node::Item(&hir::Item { kind: hir::ItemKind::Mod(ref m), .. }) if glob => {
let prev = mem::replace(&mut self.inlining, true);
for &i in m.item_ids {
- let i = self.cx.tcx.hir().item(i);
+ let i = tcx.hir().item(i);
self.visit_item_inner(i, None, Some(def_id));
}
self.inlining = prev;
diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml
index e5297d4..d2f2583 100644
--- a/src/tools/compiletest/Cargo.toml
+++ b/src/tools/compiletest/Cargo.toml
@@ -3,6 +3,9 @@
version = "0.0.0"
edition = "2021"
+[lib]
+doctest = false
+
[dependencies]
colored = "2"
diff = "0.1.10"
diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs
index ba68b5e..f796c89 100644
--- a/src/tools/compiletest/src/common.rs
+++ b/src/tools/compiletest/src/common.rs
@@ -69,6 +69,12 @@ pub enum Mode {
}
}
+impl Default for Mode {
+ fn default() -> Self {
+ Mode::Ui
+ }
+}
+
impl Mode {
pub fn disambiguator(self) -> &'static str {
// Pretty-printing tests could run concurrently, and if they do,
@@ -125,7 +131,7 @@ pub enum PanicStrategy {
}
/// Configuration for compiletest
-#[derive(Debug, Clone)]
+#[derive(Debug, Default, Clone)]
pub struct Config {
/// `true` to overwrite stderr/stdout files instead of complaining about changes in output.
pub bless: bool,
diff --git a/src/tools/compiletest/src/header/needs.rs b/src/tools/compiletest/src/header/needs.rs
index 4a57c61..18b3b91 100644
--- a/src/tools/compiletest/src/header/needs.rs
+++ b/src/tools/compiletest/src/header/needs.rs
@@ -71,6 +71,11 @@ pub(super) fn handle_needs(
ignore_reason: "ignored on targets without shadow call stacks",
},
Need {
+ name: "needs-sanitizer-safestack",
+ condition: cache.sanitizer_safestack,
+ ignore_reason: "ignored on targets without SafeStack support",
+ },
+ Need {
name: "needs-run-enabled",
condition: config.run_enabled(),
ignore_reason: "ignored when running the resulting test binaries is disabled",
@@ -184,6 +189,7 @@ pub(super) struct CachedNeedsConditions {
sanitizer_hwaddress: bool,
sanitizer_memtag: bool,
sanitizer_shadow_call_stack: bool,
+ sanitizer_safestack: bool,
xray: bool,
rust_lld: bool,
i686_dlltool: bool,
@@ -220,6 +226,7 @@ pub(super) fn load(config: &Config) -> Self {
sanitizer_hwaddress: util::HWASAN_SUPPORTED_TARGETS.contains(target),
sanitizer_memtag: util::MEMTAG_SUPPORTED_TARGETS.contains(target),
sanitizer_shadow_call_stack: util::SHADOWCALLSTACK_SUPPORTED_TARGETS.contains(target),
+ sanitizer_safestack: util::SAFESTACK_SUPPORTED_TARGETS.contains(target),
xray: util::XRAY_SUPPORTED_TARGETS.contains(target),
// For tests using the `needs-rust-lld` directive (e.g. for `-Zgcc-ld=lld`), we need to find
diff --git a/src/tools/compiletest/src/lib.rs b/src/tools/compiletest/src/lib.rs
new file mode 100644
index 0000000..fc48d01
--- /dev/null
+++ b/src/tools/compiletest/src/lib.rs
@@ -0,0 +1,1136 @@
+#![crate_name = "compiletest"]
+// The `test` crate is the only unstable feature
+// allowed here, just to share similar code.
+#![feature(test)]
+
+extern crate test;
+
+#[cfg(test)]
+mod tests;
+
+pub mod common;
+pub mod compute_diff;
+pub mod errors;
+pub mod header;
+mod json;
+mod raise_fd_limit;
+mod read2;
+pub mod runtest;
+pub mod util;
+
+use crate::common::{expected_output_path, output_base_dir, output_relative_path, UI_EXTENSIONS};
+use crate::common::{Config, Debugger, Mode, PassMode, TestPaths};
+use crate::util::logv;
+use build_helper::git::{get_git_modified_files, get_git_untracked_files};
+use core::panic;
+use getopts::Options;
+use lazycell::AtomicLazyCell;
+use std::collections::BTreeSet;
+use std::ffi::OsString;
+use std::fs;
+use std::io::{self, ErrorKind};
+use std::path::{Path, PathBuf};
+use std::process::{Command, Stdio};
+use std::time::SystemTime;
+use std::{env, vec};
+use test::ColorConfig;
+use tracing::*;
+use walkdir::WalkDir;
+
+use self::header::{make_test_description, EarlyProps};
+use crate::header::HeadersCache;
+use std::sync::Arc;
+
+pub fn parse_config(args: Vec<String>) -> Config {
+ let mut opts = Options::new();
+ opts.reqopt("", "compile-lib-path", "path to host shared libraries", "PATH")
+ .reqopt("", "run-lib-path", "path to target shared libraries", "PATH")
+ .reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH")
+ .optopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH")
+ .optopt("", "rust-demangler-path", "path to rust-demangler to use in tests", "PATH")
+ .reqopt("", "python", "path to python to use for doc tests", "PATH")
+ .optopt("", "jsondocck-path", "path to jsondocck to use for doc tests", "PATH")
+ .optopt("", "jsondoclint-path", "path to jsondoclint to use for doc tests", "PATH")
+ .optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM")
+ .optflag("", "force-valgrind", "fail if Valgrind tests cannot be run under Valgrind")
+ .optopt("", "run-clang-based-tests-with", "path to Clang executable", "PATH")
+ .optopt("", "llvm-filecheck", "path to LLVM's FileCheck binary", "DIR")
+ .reqopt("", "src-base", "directory to scan for test files", "PATH")
+ .reqopt("", "build-base", "directory to deposit test outputs", "PATH")
+ .reqopt("", "sysroot-base", "directory containing the compiler sysroot", "PATH")
+ .reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET")
+ .reqopt(
+ "",
+ "mode",
+ "which sort of compile tests to run",
+ "run-pass-valgrind | pretty | debug-info | codegen | rustdoc \
+ | rustdoc-json | codegen-units | incremental | run-make | ui | js-doc-test | mir-opt | assembly",
+ )
+ .reqopt(
+ "",
+ "suite",
+ "which suite of compile tests to run. used for nicer error reporting.",
+ "SUITE",
+ )
+ .optopt(
+ "",
+ "pass",
+ "force {check,build,run}-pass tests to this mode.",
+ "check | build | run",
+ )
+ .optopt("", "run", "whether to execute run-* tests", "auto | always | never")
+ .optflag("", "ignored", "run tests marked as ignored")
+ .optmulti("", "skip", "skip tests matching SUBSTRING. Can be passed multiple times", "SUBSTRING")
+ .optflag("", "exact", "filters match exactly")
+ .optopt(
+ "",
+ "runtool",
+ "supervisor program to run tests under \
+ (eg. emulator, valgrind)",
+ "PROGRAM",
+ )
+ .optmulti("", "host-rustcflags", "flags to pass to rustc for host", "FLAGS")
+ .optmulti("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS")
+ .optflag("", "optimize-tests", "run tests with optimizations enabled")
+ .optflag("", "verbose", "run tests verbosely, showing all output")
+ .optflag(
+ "",
+ "bless",
+ "overwrite stderr/stdout files instead of complaining about a mismatch",
+ )
+ .optflag("", "quiet", "print one character per test instead of one line")
+ .optopt("", "color", "coloring: auto, always, never", "WHEN")
+ .optflag("", "json", "emit json output instead of plaintext output")
+ .optopt("", "logfile", "file to log test execution to", "FILE")
+ .optopt("", "target", "the target to build for", "TARGET")
+ .optopt("", "host", "the host to build for", "HOST")
+ .optopt("", "cdb", "path to CDB to use for CDB debuginfo tests", "PATH")
+ .optopt("", "gdb", "path to GDB to use for GDB debuginfo tests", "PATH")
+ .optopt("", "lldb-version", "the version of LLDB used", "VERSION STRING")
+ .optopt("", "llvm-version", "the version of LLVM used", "VERSION STRING")
+ .optflag("", "system-llvm", "is LLVM the system LLVM")
+ .optopt("", "android-cross-path", "Android NDK standalone path", "PATH")
+ .optopt("", "adb-path", "path to the android debugger", "PATH")
+ .optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH")
+ .optopt("", "lldb-python-dir", "directory containing LLDB's python module", "PATH")
+ .reqopt("", "cc", "path to a C compiler", "PATH")
+ .reqopt("", "cxx", "path to a C++ compiler", "PATH")
+ .reqopt("", "cflags", "flags for the C compiler", "FLAGS")
+ .reqopt("", "cxxflags", "flags for the CXX compiler", "FLAGS")
+ .optopt("", "ar", "path to an archiver", "PATH")
+ .optopt("", "target-linker", "path to a linker for the target", "PATH")
+ .optopt("", "host-linker", "path to a linker for the host", "PATH")
+ .reqopt("", "llvm-components", "list of LLVM components built in", "LIST")
+ .optopt("", "llvm-bin-dir", "Path to LLVM's `bin` directory", "PATH")
+ .optopt("", "nodejs", "the name of nodejs", "PATH")
+ .optopt("", "npm", "the name of npm", "PATH")
+ .optopt("", "remote-test-client", "path to the remote test client", "PATH")
+ .optopt(
+ "",
+ "compare-mode",
+ "mode describing what file the actual ui output will be compared to",
+ "COMPARE MODE",
+ )
+ .optflag(
+ "",
+ "rustfix-coverage",
+ "enable this to generate a Rustfix coverage file, which is saved in \
+ `./<build_base>/rustfix_missing_coverage.txt`",
+ )
+ .optflag("", "force-rerun", "rerun tests even if the inputs are unchanged")
+ .optflag("", "only-modified", "only run tests that result been modified")
+ .optflag("", "nocapture", "")
+ .optflag("h", "help", "show this message")
+ .reqopt("", "channel", "current Rust channel", "CHANNEL")
+ .optflag("", "git-hash", "run tests which rely on commit version being compiled into the binaries")
+ .optopt("", "edition", "default Rust edition", "EDITION");
+
+ let (argv0, args_) = args.split_first().unwrap();
+ if args.len() == 1 || args[1] == "-h" || args[1] == "--help" {
+ let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
+ println!("{}", opts.usage(&message));
+ println!();
+ panic!()
+ }
+
+ let matches = &match opts.parse(args_) {
+ Ok(m) => m,
+ Err(f) => panic!("{:?}", f),
+ };
+
+ if matches.opt_present("h") || matches.opt_present("help") {
+ let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
+ println!("{}", opts.usage(&message));
+ println!();
+ panic!()
+ }
+
+ fn opt_path(m: &getopts::Matches, nm: &str) -> PathBuf {
+ match m.opt_str(nm) {
+ Some(s) => PathBuf::from(&s),
+ None => panic!("no option (=path) found for {}", nm),
+ }
+ }
+
+ fn make_absolute(path: PathBuf) -> PathBuf {
+ if path.is_relative() { env::current_dir().unwrap().join(path) } else { path }
+ }
+
+ let target = opt_str2(matches.opt_str("target"));
+ let android_cross_path = opt_path(matches, "android-cross-path");
+ let (cdb, cdb_version) = analyze_cdb(matches.opt_str("cdb"), &target);
+ let (gdb, gdb_version, gdb_native_rust) =
+ analyze_gdb(matches.opt_str("gdb"), &target, &android_cross_path);
+ let (lldb_version, lldb_native_rust) = matches
+ .opt_str("lldb-version")
+ .as_deref()
+ .and_then(extract_lldb_version)
+ .map(|(v, b)| (Some(v), b))
+ .unwrap_or((None, false));
+ let color = match matches.opt_str("color").as_deref() {
+ Some("auto") | None => ColorConfig::AutoColor,
+ Some("always") => ColorConfig::AlwaysColor,
+ Some("never") => ColorConfig::NeverColor,
+ Some(x) => panic!("argument for --color must be auto, always, or never, but found `{}`", x),
+ };
+ let llvm_version =
+ matches.opt_str("llvm-version").as_deref().and_then(header::extract_llvm_version).or_else(
+ || header::extract_llvm_version_from_binary(&matches.opt_str("llvm-filecheck")?),
+ );
+
+ let src_base = opt_path(matches, "src-base");
+ let run_ignored = matches.opt_present("ignored");
+ let mode = matches.opt_str("mode").unwrap().parse().expect("invalid mode");
+ let has_tidy = if mode == Mode::Rustdoc {
+ Command::new("tidy")
+ .arg("--version")
+ .stdout(Stdio::null())
+ .status()
+ .map_or(false, |status| status.success())
+ } else {
+ // Avoid spawning an external command when we know tidy won't be used.
+ false
+ };
+ Config {
+ bless: matches.opt_present("bless"),
+ compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")),
+ run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
+ rustc_path: opt_path(matches, "rustc-path"),
+ rustdoc_path: matches.opt_str("rustdoc-path").map(PathBuf::from),
+ rust_demangler_path: matches.opt_str("rust-demangler-path").map(PathBuf::from),
+ python: matches.opt_str("python").unwrap(),
+ jsondocck_path: matches.opt_str("jsondocck-path"),
+ jsondoclint_path: matches.opt_str("jsondoclint-path"),
+ valgrind_path: matches.opt_str("valgrind-path"),
+ force_valgrind: matches.opt_present("force-valgrind"),
+ run_clang_based_tests_with: matches.opt_str("run-clang-based-tests-with"),
+ llvm_filecheck: matches.opt_str("llvm-filecheck").map(PathBuf::from),
+ llvm_bin_dir: matches.opt_str("llvm-bin-dir").map(PathBuf::from),
+ src_base,
+ build_base: opt_path(matches, "build-base"),
+ sysroot_base: opt_path(matches, "sysroot-base"),
+ stage_id: matches.opt_str("stage-id").unwrap(),
+ mode,
+ suite: matches.opt_str("suite").unwrap(),
+ debugger: None,
+ run_ignored,
+ filters: matches.free.clone(),
+ skip: matches.opt_strs("skip"),
+ filter_exact: matches.opt_present("exact"),
+ force_pass_mode: matches.opt_str("pass").map(|mode| {
+ mode.parse::<PassMode>()
+ .unwrap_or_else(|_| panic!("unknown `--pass` option `{}` given", mode))
+ }),
+ run: matches.opt_str("run").and_then(|mode| match mode.as_str() {
+ "auto" => None,
+ "always" => Some(true),
+ "never" => Some(false),
+ _ => panic!("unknown `--run` option `{}` given", mode),
+ }),
+ logfile: matches.opt_str("logfile").map(|s| PathBuf::from(&s)),
+ runtool: matches.opt_str("runtool"),
+ host_rustcflags: matches.opt_strs("host-rustcflags"),
+ target_rustcflags: matches.opt_strs("target-rustcflags"),
+ optimize_tests: matches.opt_present("optimize-tests"),
+ target,
+ host: opt_str2(matches.opt_str("host")),
+ cdb,
+ cdb_version,
+ gdb,
+ gdb_version,
+ gdb_native_rust,
+ lldb_version,
+ lldb_native_rust,
+ llvm_version,
+ system_llvm: matches.opt_present("system-llvm"),
+ android_cross_path,
+ adb_path: opt_str2(matches.opt_str("adb-path")),
+ adb_test_dir: opt_str2(matches.opt_str("adb-test-dir")),
+ adb_device_status: opt_str2(matches.opt_str("target")).contains("android")
+ && "(none)" != opt_str2(matches.opt_str("adb-test-dir"))
+ && !opt_str2(matches.opt_str("adb-test-dir")).is_empty(),
+ lldb_python_dir: matches.opt_str("lldb-python-dir"),
+ verbose: matches.opt_present("verbose"),
+ format: match (matches.opt_present("quiet"), matches.opt_present("json")) {
+ (true, true) => panic!("--quiet and --json are incompatible"),
+ (true, false) => test::OutputFormat::Terse,
+ (false, true) => test::OutputFormat::Json,
+ (false, false) => test::OutputFormat::Pretty,
+ },
+ only_modified: matches.opt_present("only-modified"),
+ color,
+ remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from),
+ compare_mode: matches
+ .opt_str("compare-mode")
+ .map(|s| s.parse().expect("invalid --compare-mode provided")),
+ rustfix_coverage: matches.opt_present("rustfix-coverage"),
+ has_tidy,
+ channel: matches.opt_str("channel").unwrap(),
+ git_hash: matches.opt_present("git-hash"),
+ edition: matches.opt_str("edition"),
+
+ cc: matches.opt_str("cc").unwrap(),
+ cxx: matches.opt_str("cxx").unwrap(),
+ cflags: matches.opt_str("cflags").unwrap(),
+ cxxflags: matches.opt_str("cxxflags").unwrap(),
+ ar: matches.opt_str("ar").unwrap_or_else(|| String::from("ar")),
+ target_linker: matches.opt_str("target-linker"),
+ host_linker: matches.opt_str("host-linker"),
+ llvm_components: matches.opt_str("llvm-components").unwrap(),
+ nodejs: matches.opt_str("nodejs"),
+ npm: matches.opt_str("npm"),
+
+ force_rerun: matches.opt_present("force-rerun"),
+
+ target_cfgs: AtomicLazyCell::new(),
+
+ nocapture: matches.opt_present("nocapture"),
+ }
+}
+
+pub fn log_config(config: &Config) {
+ let c = config;
+ logv(c, "configuration:".to_string());
+ logv(c, format!("compile_lib_path: {:?}", config.compile_lib_path));
+ logv(c, format!("run_lib_path: {:?}", config.run_lib_path));
+ logv(c, format!("rustc_path: {:?}", config.rustc_path.display()));
+ logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path));
+ logv(c, format!("rust_demangler_path: {:?}", config.rust_demangler_path));
+ logv(c, format!("src_base: {:?}", config.src_base.display()));
+ logv(c, format!("build_base: {:?}", config.build_base.display()));
+ logv(c, format!("stage_id: {}", config.stage_id));
+ logv(c, format!("mode: {}", config.mode));
+ logv(c, format!("run_ignored: {}", config.run_ignored));
+ logv(c, format!("filters: {:?}", config.filters));
+ logv(c, format!("skip: {:?}", config.skip));
+ logv(c, format!("filter_exact: {}", config.filter_exact));
+ logv(
+ c,
+ format!("force_pass_mode: {}", opt_str(&config.force_pass_mode.map(|m| format!("{}", m))),),
+ );
+ logv(c, format!("runtool: {}", opt_str(&config.runtool)));
+ logv(c, format!("host-rustcflags: {:?}", config.host_rustcflags));
+ logv(c, format!("target-rustcflags: {:?}", config.target_rustcflags));
+ logv(c, format!("target: {}", config.target));
+ logv(c, format!("host: {}", config.host));
+ logv(c, format!("android-cross-path: {:?}", config.android_cross_path.display()));
+ logv(c, format!("adb_path: {:?}", config.adb_path));
+ logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir));
+ logv(c, format!("adb_device_status: {}", config.adb_device_status));
+ logv(c, format!("ar: {}", config.ar));
+ logv(c, format!("target-linker: {:?}", config.target_linker));
+ logv(c, format!("host-linker: {:?}", config.host_linker));
+ logv(c, format!("verbose: {}", config.verbose));
+ logv(c, format!("format: {:?}", config.format));
+ logv(c, "\n".to_string());
+}
+
+pub fn opt_str(maybestr: &Option<String>) -> &str {
+ match *maybestr {
+ None => "(none)",
+ Some(ref s) => s,
+ }
+}
+
+pub fn opt_str2(maybestr: Option<String>) -> String {
+ match maybestr {
+ None => "(none)".to_owned(),
+ Some(s) => s,
+ }
+}
+
+pub fn run_tests(config: Arc<Config>) {
+ // If we want to collect rustfix coverage information,
+ // we first make sure that the coverage file does not exist.
+ // It will be created later on.
+ if config.rustfix_coverage {
+ let mut coverage_file_path = config.build_base.clone();
+ coverage_file_path.push("rustfix_missing_coverage.txt");
+ if coverage_file_path.exists() {
+ if let Err(e) = fs::remove_file(&coverage_file_path) {
+ panic!("Could not delete {} due to {}", coverage_file_path.display(), e)
+ }
+ }
+ }
+
+ // sadly osx needs some file descriptor limits raised for running tests in
+ // parallel (especially when we have lots and lots of child processes).
+ // For context, see #8904
+ unsafe {
+ raise_fd_limit::raise_fd_limit();
+ }
+ // Prevent issue #21352 UAC blocking .exe containing 'patch' etc. on Windows
+ // If #11207 is resolved (adding manifest to .exe) this becomes unnecessary
+ env::set_var("__COMPAT_LAYER", "RunAsInvoker");
+
+ // Let tests know which target they're running as
+ env::set_var("TARGET", &config.target);
+
+ let opts = test_opts(&config);
+
+ let mut configs = Vec::new();
+ if let Mode::DebugInfo = config.mode {
+ // Debugging emscripten code doesn't make sense today
+ if !config.target.contains("emscripten") {
+ configs.extend(configure_cdb(&config));
+ configs.extend(configure_gdb(&config));
+ configs.extend(configure_lldb(&config));
+ }
+ } else {
+ configs.push(config.clone());
+ };
+
+ let mut tests = Vec::new();
+ for c in configs {
+ let mut found_paths = BTreeSet::new();
+ make_tests(c, &mut tests, &mut found_paths);
+ check_overlapping_tests(&found_paths);
+ }
+
+ tests.sort_by(|a, b| a.desc.name.as_slice().cmp(&b.desc.name.as_slice()));
+
+ let res = test::run_tests_console(&opts, tests);
+ match res {
+ Ok(true) => {}
+ Ok(false) => {
+ // We want to report that the tests failed, but we also want to give
+ // some indication of just what tests we were running. Especially on
+ // CI, where there can be cross-compiled tests for a lot of
+ // architectures, without this critical information it can be quite
+ // easy to miss which tests failed, and as such fail to reproduce
+ // the failure locally.
+
+ println!(
+ "Some tests failed in compiletest suite={}{} mode={} host={} target={}",
+ config.suite,
+ config
+ .compare_mode
+ .as_ref()
+ .map(|c| format!(" compare_mode={:?}", c))
+ .unwrap_or_default(),
+ config.mode,
+ config.host,
+ config.target
+ );
+
+ std::process::exit(1);
+ }
+ Err(e) => {
+ // We don't know if tests passed or not, but if there was an error
+ // during testing we don't want to just succeed (we may not have
+ // tested something), so fail.
+ //
+ // This should realistically "never" happen, so don't try to make
+ // this a pretty error message.
+ panic!("I/O failure during tests: {:?}", e);
+ }
+ }
+}
+
+fn configure_cdb(config: &Config) -> Option<Arc<Config>> {
+ config.cdb.as_ref()?;
+
+ Some(Arc::new(Config { debugger: Some(Debugger::Cdb), ..config.clone() }))
+}
+
+fn configure_gdb(config: &Config) -> Option<Arc<Config>> {
+ config.gdb_version?;
+
+ if config.matches_env("msvc") {
+ return None;
+ }
+
+ if config.remote_test_client.is_some() && !config.target.contains("android") {
+ println!(
+ "WARNING: debuginfo tests are not available when \
+ testing with remote"
+ );
+ return None;
+ }
+
+ if config.target.contains("android") {
+ println!(
+ "{} debug-info test uses tcp 5039 port.\
+ please reserve it",
+ config.target
+ );
+
+ // android debug-info test uses remote debugger so, we test 1 thread
+ // at once as they're all sharing the same TCP port to communicate
+ // over.
+ //
+ // we should figure out how to lift this restriction! (run them all
+ // on different ports allocated dynamically).
+ env::set_var("RUST_TEST_THREADS", "1");
+ }
+
+ Some(Arc::new(Config { debugger: Some(Debugger::Gdb), ..config.clone() }))
+}
+
+fn configure_lldb(config: &Config) -> Option<Arc<Config>> {
+ config.lldb_python_dir.as_ref()?;
+
+ if let Some(350) = config.lldb_version {
+ println!(
+ "WARNING: The used version of LLDB (350) has a \
+ known issue that breaks debuginfo tests. See \
+ issue #32520 for more information. Skipping all \
+ LLDB-based tests!",
+ );
+ return None;
+ }
+
+ Some(Arc::new(Config { debugger: Some(Debugger::Lldb), ..config.clone() }))
+}
+
+pub fn test_opts(config: &Config) -> test::TestOpts {
+ if env::var("RUST_TEST_NOCAPTURE").is_ok() {
+ eprintln!(
+ "WARNING: RUST_TEST_NOCAPTURE is no longer used. \
+ Use the `--nocapture` flag instead."
+ );
+ }
+
+ test::TestOpts {
+ exclude_should_panic: false,
+ filters: config.filters.clone(),
+ filter_exact: config.filter_exact,
+ run_ignored: if config.run_ignored { test::RunIgnored::Yes } else { test::RunIgnored::No },
+ format: config.format,
+ logfile: config.logfile.clone(),
+ run_tests: true,
+ bench_benchmarks: true,
+ nocapture: config.nocapture,
+ color: config.color,
+ shuffle: false,
+ shuffle_seed: None,
+ test_threads: None,
+ skip: config.skip.clone(),
+ list: false,
+ options: test::Options::new(),
+ time_options: None,
+ force_run_in_process: false,
+ fail_fast: std::env::var_os("RUSTC_TEST_FAIL_FAST").is_some(),
+ }
+}
+
+pub fn make_tests(
+ config: Arc<Config>,
+ tests: &mut Vec<test::TestDescAndFn>,
+ found_paths: &mut BTreeSet<PathBuf>,
+) {
+ debug!("making tests from {:?}", config.src_base.display());
+ let inputs = common_inputs_stamp(&config);
+ let modified_tests = modified_tests(&config, &config.src_base).unwrap_or_else(|err| {
+ panic!("modified_tests got error from dir: {}, error: {}", config.src_base.display(), err)
+ });
+
+ let cache = HeadersCache::load(&config);
+ let mut poisoned = false;
+ collect_tests_from_dir(
+ config.clone(),
+ &cache,
+ &config.src_base,
+ &PathBuf::new(),
+ &inputs,
+ tests,
+ found_paths,
+ &modified_tests,
+ &mut poisoned,
+ )
+ .unwrap_or_else(|_| panic!("Could not read tests from {}", config.src_base.display()));
+
+ if poisoned {
+ eprintln!();
+ panic!("there are errors in tests");
+ }
+}
+
+/// Returns a stamp constructed from input files common to all test cases.
+fn common_inputs_stamp(config: &Config) -> Stamp {
+ let rust_src_dir = config.find_rust_src_root().expect("Could not find Rust source root");
+
+ let mut stamp = Stamp::from_path(&config.rustc_path);
+
+ // Relevant pretty printer files
+ let pretty_printer_files = [
+ "src/etc/rust_types.py",
+ "src/etc/gdb_load_rust_pretty_printers.py",
+ "src/etc/gdb_lookup.py",
+ "src/etc/gdb_providers.py",
+ "src/etc/lldb_batchmode.py",
+ "src/etc/lldb_lookup.py",
+ "src/etc/lldb_providers.py",
+ ];
+ for file in &pretty_printer_files {
+ let path = rust_src_dir.join(file);
+ stamp.add_path(&path);
+ }
+
+ stamp.add_dir(&rust_src_dir.join("src/etc/natvis"));
+
+ stamp.add_dir(&config.run_lib_path);
+
+ if let Some(ref rustdoc_path) = config.rustdoc_path {
+ stamp.add_path(&rustdoc_path);
+ stamp.add_path(&rust_src_dir.join("src/etc/htmldocck.py"));
+ }
+
+ // Compiletest itself.
+ stamp.add_dir(&rust_src_dir.join("src/tools/compiletest/"));
+
+ stamp
+}
+
+fn modified_tests(config: &Config, dir: &Path) -> Result<Vec<PathBuf>, String> {
+ if !config.only_modified {
+ return Ok(vec![]);
+ }
+ let files =
+ get_git_modified_files(Some(dir), &vec!["rs", "stderr", "fixed"])?.unwrap_or(vec![]);
+ // Add new test cases to the list, it will be convenient in daily development.
+ let untracked_files = get_git_untracked_files(None)?.unwrap_or(vec![]);
+
+ let all_paths = [&files[..], &untracked_files[..]].concat();
+ let full_paths = {
+ let mut full_paths: Vec<PathBuf> = all_paths
+ .into_iter()
+ .map(|f| PathBuf::from(f).with_extension("").with_extension("rs"))
+ .filter_map(|f| if Path::new(&f).exists() { f.canonicalize().ok() } else { None })
+ .collect();
+ full_paths.dedup();
+ full_paths.sort_unstable();
+ full_paths
+ };
+ Ok(full_paths)
+}
+
+fn collect_tests_from_dir(
+ config: Arc<Config>,
+ cache: &HeadersCache,
+ dir: &Path,
+ relative_dir_path: &Path,
+ inputs: &Stamp,
+ tests: &mut Vec<test::TestDescAndFn>,
+ found_paths: &mut BTreeSet<PathBuf>,
+ modified_tests: &Vec<PathBuf>,
+ poisoned: &mut bool,
+) -> io::Result<()> {
+ // Ignore directories that contain a file named `compiletest-ignore-dir`.
+ if dir.join("compiletest-ignore-dir").exists() {
+ return Ok(());
+ }
+
+ if config.mode == Mode::RunMake && dir.join("Makefile").exists() {
+ let paths = TestPaths {
+ file: dir.to_path_buf(),
+ relative_dir: relative_dir_path.parent().unwrap().to_path_buf(),
+ };
+ tests.extend(make_test(config, cache, &paths, inputs, poisoned));
+ return Ok(());
+ }
+
+ // If we find a test foo/bar.rs, we have to build the
+ // output directory `$build/foo` so we can write
+ // `$build/foo/bar` into it. We do this *now* in this
+ // sequential loop because otherwise, if we do it in the
+ // tests themselves, they race for the privilege of
+ // creating the directories and sometimes fail randomly.
+ let build_dir = output_relative_path(&config, relative_dir_path);
+ fs::create_dir_all(&build_dir).unwrap();
+
+ // Add each `.rs` file as a test, and recurse further on any
+ // subdirectories we find, except for `aux` directories.
+ for file in fs::read_dir(dir)? {
+ let file = file?;
+ let file_path = file.path();
+ let file_name = file.file_name();
+ if is_test(&file_name) && (!config.only_modified || modified_tests.contains(&file_path)) {
+ debug!("found test file: {:?}", file_path.display());
+ let rel_test_path = relative_dir_path.join(file_path.file_stem().unwrap());
+ found_paths.insert(rel_test_path);
+ let paths =
+ TestPaths { file: file_path, relative_dir: relative_dir_path.to_path_buf() };
+
+ tests.extend(make_test(config.clone(), cache, &paths, inputs, poisoned))
+ } else if file_path.is_dir() {
+ let relative_file_path = relative_dir_path.join(file.file_name());
+ if &file_name != "auxiliary" {
+ debug!("found directory: {:?}", file_path.display());
+ collect_tests_from_dir(
+ config.clone(),
+ cache,
+ &file_path,
+ &relative_file_path,
+ inputs,
+ tests,
+ found_paths,
+ modified_tests,
+ poisoned,
+ )?;
+ }
+ } else {
+ debug!("found other file/directory: {:?}", file_path.display());
+ }
+ }
+ Ok(())
+}
+
+/// Returns true if `file_name` looks like a proper test file name.
+pub fn is_test(file_name: &OsString) -> bool {
+ let file_name = file_name.to_str().unwrap();
+
+ if !file_name.ends_with(".rs") {
+ return false;
+ }
+
+ // `.`, `#`, and `~` are common temp-file prefixes.
+ let invalid_prefixes = &[".", "#", "~"];
+ !invalid_prefixes.iter().any(|p| file_name.starts_with(p))
+}
+
+fn make_test(
+ config: Arc<Config>,
+ cache: &HeadersCache,
+ testpaths: &TestPaths,
+ inputs: &Stamp,
+ poisoned: &mut bool,
+) -> Vec<test::TestDescAndFn> {
+ let test_path = if config.mode == Mode::RunMake {
+ // Parse directives in the Makefile
+ testpaths.file.join("Makefile")
+ } else {
+ PathBuf::from(&testpaths.file)
+ };
+ let early_props = EarlyProps::from_file(&config, &test_path);
+
+ // Incremental tests are special, they inherently cannot be run in parallel.
+ // `runtest::run` will be responsible for iterating over revisions.
+ let revisions = if early_props.revisions.is_empty() || config.mode == Mode::Incremental {
+ vec![None]
+ } else {
+ early_props.revisions.iter().map(Some).collect()
+ };
+
+ revisions
+ .into_iter()
+ .map(|revision| {
+ let src_file =
+ std::fs::File::open(&test_path).expect("open test file to parse ignores");
+ let cfg = revision.map(|v| &**v);
+ let test_name = crate::make_test_name(&config, testpaths, revision);
+ let mut desc = make_test_description(
+ &config, cache, test_name, &test_path, src_file, cfg, poisoned,
+ );
+ // Ignore tests that already run and are up to date with respect to inputs.
+ if !config.force_rerun {
+ desc.ignore |= is_up_to_date(
+ &config,
+ testpaths,
+ &early_props,
+ revision.map(|s| s.as_str()),
+ inputs,
+ );
+ }
+ test::TestDescAndFn {
+ desc,
+ testfn: make_test_closure(config.clone(), testpaths, revision),
+ }
+ })
+ .collect()
+}
+
+fn stamp(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf {
+ output_base_dir(config, testpaths, revision).join("stamp")
+}
+
+fn files_related_to_test(
+ config: &Config,
+ testpaths: &TestPaths,
+ props: &EarlyProps,
+ revision: Option<&str>,
+) -> Vec<PathBuf> {
+ let mut related = vec![];
+
+ if testpaths.file.is_dir() {
+ // run-make tests use their individual directory
+ for entry in WalkDir::new(&testpaths.file) {
+ let path = entry.unwrap().into_path();
+ if path.is_file() {
+ related.push(path);
+ }
+ }
+ } else {
+ related.push(testpaths.file.clone());
+ }
+
+ for aux in &props.aux {
+ let path = testpaths.file.parent().unwrap().join("auxiliary").join(aux);
+ related.push(path);
+ }
+
+ // UI test files.
+ for extension in UI_EXTENSIONS {
+ let path = expected_output_path(testpaths, revision, &config.compare_mode, extension);
+ related.push(path);
+ }
+
+ related
+}
+
+fn is_up_to_date(
+ config: &Config,
+ testpaths: &TestPaths,
+ props: &EarlyProps,
+ revision: Option<&str>,
+ inputs: &Stamp,
+) -> bool {
+ let stamp_name = stamp(config, testpaths, revision);
+ // Check hash.
+ let contents = match fs::read_to_string(&stamp_name) {
+ Ok(f) => f,
+ Err(ref e) if e.kind() == ErrorKind::InvalidData => panic!("Can't read stamp contents"),
+ Err(_) => return false,
+ };
+ let expected_hash = runtest::compute_stamp_hash(config);
+ if contents != expected_hash {
+ return false;
+ }
+
+ // Check timestamps.
+ let mut inputs = inputs.clone();
+ for path in files_related_to_test(config, testpaths, props, revision) {
+ inputs.add_path(&path);
+ }
+
+ inputs < Stamp::from_path(&stamp_name)
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+struct Stamp {
+ time: SystemTime,
+}
+
+impl Stamp {
+ fn from_path(path: &Path) -> Self {
+ let mut stamp = Stamp { time: SystemTime::UNIX_EPOCH };
+ stamp.add_path(path);
+ stamp
+ }
+
+ fn add_path(&mut self, path: &Path) {
+ let modified = fs::metadata(path)
+ .and_then(|metadata| metadata.modified())
+ .unwrap_or(SystemTime::UNIX_EPOCH);
+ self.time = self.time.max(modified);
+ }
+
+ fn add_dir(&mut self, path: &Path) {
+ for entry in WalkDir::new(path) {
+ let entry = entry.unwrap();
+ if entry.file_type().is_file() {
+ let modified = entry
+ .metadata()
+ .ok()
+ .and_then(|metadata| metadata.modified().ok())
+ .unwrap_or(SystemTime::UNIX_EPOCH);
+ self.time = self.time.max(modified);
+ }
+ }
+ }
+}
+
+fn make_test_name(
+ config: &Config,
+ testpaths: &TestPaths,
+ revision: Option<&String>,
+) -> test::TestName {
+ // Print the name of the file, relative to the repository root.
+ // `src_base` looks like `/path/to/rust/tests/ui`
+ let root_directory = config.src_base.parent().unwrap().parent().unwrap();
+ let path = testpaths.file.strip_prefix(root_directory).unwrap();
+ let debugger = match config.debugger {
+ Some(d) => format!("-{}", d),
+ None => String::new(),
+ };
+ let mode_suffix = match config.compare_mode {
+ Some(ref mode) => format!(" ({})", mode.to_str()),
+ None => String::new(),
+ };
+
+ test::DynTestName(format!(
+ "[{}{}{}] {}{}",
+ config.mode,
+ debugger,
+ mode_suffix,
+ path.display(),
+ revision.map_or("".to_string(), |rev| format!("#{}", rev))
+ ))
+}
+
+fn make_test_closure(
+ config: Arc<Config>,
+ testpaths: &TestPaths,
+ revision: Option<&String>,
+) -> test::TestFn {
+ let config = config.clone();
+ let testpaths = testpaths.clone();
+ let revision = revision.cloned();
+ test::DynTestFn(Box::new(move || {
+ runtest::run(config, &testpaths, revision.as_deref());
+ Ok(())
+ }))
+}
+
+/// Returns `true` if the given target is an Android target for the
+/// purposes of GDB testing.
+fn is_android_gdb_target(target: &str) -> bool {
+ matches!(
+ &target[..],
+ "arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android"
+ )
+}
+
+/// Returns `true` if the given target is a MSVC target for the purpouses of CDB testing.
+fn is_pc_windows_msvc_target(target: &str) -> bool {
+ target.ends_with("-pc-windows-msvc")
+}
+
+fn find_cdb(target: &str) -> Option<OsString> {
+ if !(cfg!(windows) && is_pc_windows_msvc_target(target)) {
+ return None;
+ }
+
+ let pf86 = env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?;
+ let cdb_arch = if cfg!(target_arch = "x86") {
+ "x86"
+ } else if cfg!(target_arch = "x86_64") {
+ "x64"
+ } else if cfg!(target_arch = "aarch64") {
+ "arm64"
+ } else if cfg!(target_arch = "arm") {
+ "arm"
+ } else {
+ return None; // No compatible CDB.exe in the Windows 10 SDK
+ };
+
+ let mut path = PathBuf::new();
+ path.push(pf86);
+ path.push(r"Windows Kits\10\Debuggers"); // We could check 8.1 etc. too?
+ path.push(cdb_arch);
+ path.push(r"cdb.exe");
+
+ if !path.exists() {
+ return None;
+ }
+
+ Some(path.into_os_string())
+}
+
+/// Returns Path to CDB
+fn analyze_cdb(cdb: Option<String>, target: &str) -> (Option<OsString>, Option<[u16; 4]>) {
+ let cdb = cdb.map(OsString::from).or_else(|| find_cdb(target));
+
+ let mut version = None;
+ if let Some(cdb) = cdb.as_ref() {
+ if let Ok(output) = Command::new(cdb).arg("/version").output() {
+ if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() {
+ version = extract_cdb_version(&first_line);
+ }
+ }
+ }
+
+ (cdb, version)
+}
+
+fn extract_cdb_version(full_version_line: &str) -> Option<[u16; 4]> {
+ // Example full_version_line: "cdb version 10.0.18362.1"
+ let version = full_version_line.rsplit(' ').next()?;
+ let mut components = version.split('.');
+ let major: u16 = components.next().unwrap().parse().unwrap();
+ let minor: u16 = components.next().unwrap().parse().unwrap();
+ let patch: u16 = components.next().unwrap_or("0").parse().unwrap();
+ let build: u16 = components.next().unwrap_or("0").parse().unwrap();
+ Some([major, minor, patch, build])
+}
+
+/// Returns (Path to GDB, GDB Version, GDB has Rust Support)
+fn analyze_gdb(
+ gdb: Option<String>,
+ target: &str,
+ android_cross_path: &PathBuf,
+) -> (Option<String>, Option<u32>, bool) {
+ #[cfg(not(windows))]
+ const GDB_FALLBACK: &str = "gdb";
+ #[cfg(windows)]
+ const GDB_FALLBACK: &str = "gdb.exe";
+
+ const MIN_GDB_WITH_RUST: u32 = 7011010;
+
+ let fallback_gdb = || {
+ if is_android_gdb_target(target) {
+ let mut gdb_path = match android_cross_path.to_str() {
+ Some(x) => x.to_owned(),
+ None => panic!("cannot find android cross path"),
+ };
+ gdb_path.push_str("/bin/gdb");
+ gdb_path
+ } else {
+ GDB_FALLBACK.to_owned()
+ }
+ };
+
+ let gdb = match gdb {
+ None => fallback_gdb(),
+ Some(ref s) if s.is_empty() => fallback_gdb(), // may be empty if configure found no gdb
+ Some(ref s) => s.to_owned(),
+ };
+
+ let mut version_line = None;
+ if let Ok(output) = Command::new(&gdb).arg("--version").output() {
+ if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() {
+ version_line = Some(first_line.to_string());
+ }
+ }
+
+ let version = match version_line {
+ Some(line) => extract_gdb_version(&line),
+ None => return (None, None, false),
+ };
+
+ let gdb_native_rust = version.map_or(false, |v| v >= MIN_GDB_WITH_RUST);
+
+ (Some(gdb), version, gdb_native_rust)
+}
+
+fn extract_gdb_version(full_version_line: &str) -> Option<u32> {
+ let full_version_line = full_version_line.trim();
+
+ // GDB versions look like this: "major.minor.patch?.yyyymmdd?", with both
+ // of the ? sections being optional
+
+ // We will parse up to 3 digits for each component, ignoring the date
+
+ // We skip text in parentheses. This avoids accidentally parsing
+ // the openSUSE version, which looks like:
+ // GNU gdb (GDB; openSUSE Leap 15.0) 8.1
+ // This particular form is documented in the GNU coding standards:
+ // https://www.gnu.org/prep/standards/html_node/_002d_002dversion.html#g_t_002d_002dversion
+
+ let unbracketed_part = full_version_line.split('[').next().unwrap();
+ let mut splits = unbracketed_part.trim_end().rsplit(' ');
+ let version_string = splits.next().unwrap();
+
+ let mut splits = version_string.split('.');
+ let major = splits.next().unwrap();
+ let minor = splits.next().unwrap();
+ let patch = splits.next();
+
+ let major: u32 = major.parse().unwrap();
+ let (minor, patch): (u32, u32) = match minor.find(not_a_digit) {
+ None => {
+ let minor = minor.parse().unwrap();
+ let patch: u32 = match patch {
+ Some(patch) => match patch.find(not_a_digit) {
+ None => patch.parse().unwrap(),
+ Some(idx) if idx > 3 => 0,
+ Some(idx) => patch[..idx].parse().unwrap(),
+ },
+ None => 0,
+ };
+ (minor, patch)
+ }
+ // There is no patch version after minor-date (e.g. "4-2012").
+ Some(idx) => {
+ let minor = minor[..idx].parse().unwrap();
+ (minor, 0)
+ }
+ };
+
+ Some(((major * 1000) + minor) * 1000 + patch)
+}
+
+/// Returns (LLDB version, LLDB is rust-enabled)
+fn extract_lldb_version(full_version_line: &str) -> Option<(u32, bool)> {
+ // Extract the major LLDB version from the given version string.
+ // LLDB version strings are different for Apple and non-Apple platforms.
+ // The Apple variant looks like this:
+ //
+ // LLDB-179.5 (older versions)
+ // lldb-300.2.51 (new versions)
+ //
+ // We are only interested in the major version number, so this function
+ // will return `Some(179)` and `Some(300)` respectively.
+ //
+ // Upstream versions look like:
+ // lldb version 6.0.1
+ //
+ // There doesn't seem to be a way to correlate the Apple version
+ // with the upstream version, and since the tests were originally
+ // written against Apple versions, we make a fake Apple version by
+ // multiplying the first number by 100. This is a hack, but
+ // normally fine because the only non-Apple version we test is
+ // rust-enabled.
+
+ let full_version_line = full_version_line.trim();
+
+ if let Some(apple_ver) =
+ full_version_line.strip_prefix("LLDB-").or_else(|| full_version_line.strip_prefix("lldb-"))
+ {
+ if let Some(idx) = apple_ver.find(not_a_digit) {
+ let version: u32 = apple_ver[..idx].parse().unwrap();
+ return Some((version, full_version_line.contains("rust-enabled")));
+ }
+ } else if let Some(lldb_ver) = full_version_line.strip_prefix("lldb version ") {
+ if let Some(idx) = lldb_ver.find(not_a_digit) {
+ let version: u32 = lldb_ver[..idx].parse().ok()?;
+ return Some((version * 100, full_version_line.contains("rust-enabled")));
+ }
+ }
+ None
+}
+
+fn not_a_digit(c: char) -> bool {
+ !c.is_digit(10)
+}
+
+fn check_overlapping_tests(found_paths: &BTreeSet<PathBuf>) {
+ let mut collisions = Vec::new();
+ for path in found_paths {
+ for ancestor in path.ancestors().skip(1) {
+ if found_paths.contains(ancestor) {
+ collisions.push((path, ancestor.clone()));
+ }
+ }
+ }
+ if !collisions.is_empty() {
+ let collisions: String = collisions
+ .into_iter()
+ .map(|(path, check_parent)| format!("test {path:?} clashes with {check_parent:?}\n"))
+ .collect();
+ panic!(
+ "{collisions}\n\
+ Tests cannot have overlapping names. Make sure they use unique prefixes."
+ );
+ }
+}
diff --git a/src/tools/compiletest/src/main.rs b/src/tools/compiletest/src/main.rs
index c4bef99..34d4855 100644
--- a/src/tools/compiletest/src/main.rs
+++ b/src/tools/compiletest/src/main.rs
@@ -1,45 +1,6 @@
-#![crate_name = "compiletest"]
-// The `test` crate is the only unstable feature
-// allowed here, just to share similar code.
-#![feature(test)]
+use std::{env, sync::Arc};
-extern crate test;
-
-use crate::common::{expected_output_path, output_base_dir, output_relative_path, UI_EXTENSIONS};
-use crate::common::{Config, Debugger, Mode, PassMode, TestPaths};
-use crate::util::logv;
-use build_helper::git::{get_git_modified_files, get_git_untracked_files};
-use core::panic;
-use getopts::Options;
-use lazycell::AtomicLazyCell;
-use std::collections::BTreeSet;
-use std::ffi::OsString;
-use std::fs;
-use std::io::{self, ErrorKind};
-use std::path::{Path, PathBuf};
-use std::process::{Command, Stdio};
-use std::time::SystemTime;
-use std::{env, vec};
-use test::ColorConfig;
-use tracing::*;
-use walkdir::WalkDir;
-
-use self::header::{make_test_description, EarlyProps};
-use crate::header::HeadersCache;
-use std::sync::Arc;
-
-#[cfg(test)]
-mod tests;
-
-pub mod common;
-pub mod compute_diff;
-pub mod errors;
-pub mod header;
-mod json;
-mod raise_fd_limit;
-mod read2;
-pub mod runtest;
-pub mod util;
+use compiletest::{common::Mode, log_config, parse_config, run_tests};
fn main() {
tracing_subscriber::fmt::init();
@@ -57,1097 +18,3 @@ fn main() {
log_config(&config);
run_tests(config);
}
-
-pub fn parse_config(args: Vec<String>) -> Config {
- let mut opts = Options::new();
- opts.reqopt("", "compile-lib-path", "path to host shared libraries", "PATH")
- .reqopt("", "run-lib-path", "path to target shared libraries", "PATH")
- .reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH")
- .optopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH")
- .optopt("", "rust-demangler-path", "path to rust-demangler to use in tests", "PATH")
- .reqopt("", "python", "path to python to use for doc tests", "PATH")
- .optopt("", "jsondocck-path", "path to jsondocck to use for doc tests", "PATH")
- .optopt("", "jsondoclint-path", "path to jsondoclint to use for doc tests", "PATH")
- .optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM")
- .optflag("", "force-valgrind", "fail if Valgrind tests cannot be run under Valgrind")
- .optopt("", "run-clang-based-tests-with", "path to Clang executable", "PATH")
- .optopt("", "llvm-filecheck", "path to LLVM's FileCheck binary", "DIR")
- .reqopt("", "src-base", "directory to scan for test files", "PATH")
- .reqopt("", "build-base", "directory to deposit test outputs", "PATH")
- .reqopt("", "sysroot-base", "directory containing the compiler sysroot", "PATH")
- .reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET")
- .reqopt(
- "",
- "mode",
- "which sort of compile tests to run",
- "run-pass-valgrind | pretty | debug-info | codegen | rustdoc \
- | rustdoc-json | codegen-units | incremental | run-make | ui | js-doc-test | mir-opt | assembly",
- )
- .reqopt(
- "",
- "suite",
- "which suite of compile tests to run. used for nicer error reporting.",
- "SUITE",
- )
- .optopt(
- "",
- "pass",
- "force {check,build,run}-pass tests to this mode.",
- "check | build | run",
- )
- .optopt("", "run", "whether to execute run-* tests", "auto | always | never")
- .optflag("", "ignored", "run tests marked as ignored")
- .optmulti("", "skip", "skip tests matching SUBSTRING. Can be passed multiple times", "SUBSTRING")
- .optflag("", "exact", "filters match exactly")
- .optopt(
- "",
- "runtool",
- "supervisor program to run tests under \
- (eg. emulator, valgrind)",
- "PROGRAM",
- )
- .optmulti("", "host-rustcflags", "flags to pass to rustc for host", "FLAGS")
- .optmulti("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS")
- .optflag("", "optimize-tests", "run tests with optimizations enabled")
- .optflag("", "verbose", "run tests verbosely, showing all output")
- .optflag(
- "",
- "bless",
- "overwrite stderr/stdout files instead of complaining about a mismatch",
- )
- .optflag("", "quiet", "print one character per test instead of one line")
- .optopt("", "color", "coloring: auto, always, never", "WHEN")
- .optflag("", "json", "emit json output instead of plaintext output")
- .optopt("", "logfile", "file to log test execution to", "FILE")
- .optopt("", "target", "the target to build for", "TARGET")
- .optopt("", "host", "the host to build for", "HOST")
- .optopt("", "cdb", "path to CDB to use for CDB debuginfo tests", "PATH")
- .optopt("", "gdb", "path to GDB to use for GDB debuginfo tests", "PATH")
- .optopt("", "lldb-version", "the version of LLDB used", "VERSION STRING")
- .optopt("", "llvm-version", "the version of LLVM used", "VERSION STRING")
- .optflag("", "system-llvm", "is LLVM the system LLVM")
- .optopt("", "android-cross-path", "Android NDK standalone path", "PATH")
- .optopt("", "adb-path", "path to the android debugger", "PATH")
- .optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH")
- .optopt("", "lldb-python-dir", "directory containing LLDB's python module", "PATH")
- .reqopt("", "cc", "path to a C compiler", "PATH")
- .reqopt("", "cxx", "path to a C++ compiler", "PATH")
- .reqopt("", "cflags", "flags for the C compiler", "FLAGS")
- .reqopt("", "cxxflags", "flags for the CXX compiler", "FLAGS")
- .optopt("", "ar", "path to an archiver", "PATH")
- .optopt("", "target-linker", "path to a linker for the target", "PATH")
- .optopt("", "host-linker", "path to a linker for the host", "PATH")
- .reqopt("", "llvm-components", "list of LLVM components built in", "LIST")
- .optopt("", "llvm-bin-dir", "Path to LLVM's `bin` directory", "PATH")
- .optopt("", "nodejs", "the name of nodejs", "PATH")
- .optopt("", "npm", "the name of npm", "PATH")
- .optopt("", "remote-test-client", "path to the remote test client", "PATH")
- .optopt(
- "",
- "compare-mode",
- "mode describing what file the actual ui output will be compared to",
- "COMPARE MODE",
- )
- .optflag(
- "",
- "rustfix-coverage",
- "enable this to generate a Rustfix coverage file, which is saved in \
- `./<build_base>/rustfix_missing_coverage.txt`",
- )
- .optflag("", "force-rerun", "rerun tests even if the inputs are unchanged")
- .optflag("", "only-modified", "only run tests that result been modified")
- .optflag("", "nocapture", "")
- .optflag("h", "help", "show this message")
- .reqopt("", "channel", "current Rust channel", "CHANNEL")
- .optflag("", "git-hash", "run tests which rely on commit version being compiled into the binaries")
- .optopt("", "edition", "default Rust edition", "EDITION");
-
- let (argv0, args_) = args.split_first().unwrap();
- if args.len() == 1 || args[1] == "-h" || args[1] == "--help" {
- let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
- println!("{}", opts.usage(&message));
- println!();
- panic!()
- }
-
- let matches = &match opts.parse(args_) {
- Ok(m) => m,
- Err(f) => panic!("{:?}", f),
- };
-
- if matches.opt_present("h") || matches.opt_present("help") {
- let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
- println!("{}", opts.usage(&message));
- println!();
- panic!()
- }
-
- fn opt_path(m: &getopts::Matches, nm: &str) -> PathBuf {
- match m.opt_str(nm) {
- Some(s) => PathBuf::from(&s),
- None => panic!("no option (=path) found for {}", nm),
- }
- }
-
- fn make_absolute(path: PathBuf) -> PathBuf {
- if path.is_relative() { env::current_dir().unwrap().join(path) } else { path }
- }
-
- let target = opt_str2(matches.opt_str("target"));
- let android_cross_path = opt_path(matches, "android-cross-path");
- let (cdb, cdb_version) = analyze_cdb(matches.opt_str("cdb"), &target);
- let (gdb, gdb_version, gdb_native_rust) =
- analyze_gdb(matches.opt_str("gdb"), &target, &android_cross_path);
- let (lldb_version, lldb_native_rust) = matches
- .opt_str("lldb-version")
- .as_deref()
- .and_then(extract_lldb_version)
- .map(|(v, b)| (Some(v), b))
- .unwrap_or((None, false));
- let color = match matches.opt_str("color").as_deref() {
- Some("auto") | None => ColorConfig::AutoColor,
- Some("always") => ColorConfig::AlwaysColor,
- Some("never") => ColorConfig::NeverColor,
- Some(x) => panic!("argument for --color must be auto, always, or never, but found `{}`", x),
- };
- let llvm_version =
- matches.opt_str("llvm-version").as_deref().and_then(header::extract_llvm_version).or_else(
- || header::extract_llvm_version_from_binary(&matches.opt_str("llvm-filecheck")?),
- );
-
- let src_base = opt_path(matches, "src-base");
- let run_ignored = matches.opt_present("ignored");
- let mode = matches.opt_str("mode").unwrap().parse().expect("invalid mode");
- let has_tidy = if mode == Mode::Rustdoc {
- Command::new("tidy")
- .arg("--version")
- .stdout(Stdio::null())
- .status()
- .map_or(false, |status| status.success())
- } else {
- // Avoid spawning an external command when we know tidy won't be used.
- false
- };
- Config {
- bless: matches.opt_present("bless"),
- compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")),
- run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
- rustc_path: opt_path(matches, "rustc-path"),
- rustdoc_path: matches.opt_str("rustdoc-path").map(PathBuf::from),
- rust_demangler_path: matches.opt_str("rust-demangler-path").map(PathBuf::from),
- python: matches.opt_str("python").unwrap(),
- jsondocck_path: matches.opt_str("jsondocck-path"),
- jsondoclint_path: matches.opt_str("jsondoclint-path"),
- valgrind_path: matches.opt_str("valgrind-path"),
- force_valgrind: matches.opt_present("force-valgrind"),
- run_clang_based_tests_with: matches.opt_str("run-clang-based-tests-with"),
- llvm_filecheck: matches.opt_str("llvm-filecheck").map(PathBuf::from),
- llvm_bin_dir: matches.opt_str("llvm-bin-dir").map(PathBuf::from),
- src_base,
- build_base: opt_path(matches, "build-base"),
- sysroot_base: opt_path(matches, "sysroot-base"),
- stage_id: matches.opt_str("stage-id").unwrap(),
- mode,
- suite: matches.opt_str("suite").unwrap(),
- debugger: None,
- run_ignored,
- filters: matches.free.clone(),
- skip: matches.opt_strs("skip"),
- filter_exact: matches.opt_present("exact"),
- force_pass_mode: matches.opt_str("pass").map(|mode| {
- mode.parse::<PassMode>()
- .unwrap_or_else(|_| panic!("unknown `--pass` option `{}` given", mode))
- }),
- run: matches.opt_str("run").and_then(|mode| match mode.as_str() {
- "auto" => None,
- "always" => Some(true),
- "never" => Some(false),
- _ => panic!("unknown `--run` option `{}` given", mode),
- }),
- logfile: matches.opt_str("logfile").map(|s| PathBuf::from(&s)),
- runtool: matches.opt_str("runtool"),
- host_rustcflags: matches.opt_strs("host-rustcflags"),
- target_rustcflags: matches.opt_strs("target-rustcflags"),
- optimize_tests: matches.opt_present("optimize-tests"),
- target,
- host: opt_str2(matches.opt_str("host")),
- cdb,
- cdb_version,
- gdb,
- gdb_version,
- gdb_native_rust,
- lldb_version,
- lldb_native_rust,
- llvm_version,
- system_llvm: matches.opt_present("system-llvm"),
- android_cross_path,
- adb_path: opt_str2(matches.opt_str("adb-path")),
- adb_test_dir: opt_str2(matches.opt_str("adb-test-dir")),
- adb_device_status: opt_str2(matches.opt_str("target")).contains("android")
- && "(none)" != opt_str2(matches.opt_str("adb-test-dir"))
- && !opt_str2(matches.opt_str("adb-test-dir")).is_empty(),
- lldb_python_dir: matches.opt_str("lldb-python-dir"),
- verbose: matches.opt_present("verbose"),
- format: match (matches.opt_present("quiet"), matches.opt_present("json")) {
- (true, true) => panic!("--quiet and --json are incompatible"),
- (true, false) => test::OutputFormat::Terse,
- (false, true) => test::OutputFormat::Json,
- (false, false) => test::OutputFormat::Pretty,
- },
- only_modified: matches.opt_present("only-modified"),
- color,
- remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from),
- compare_mode: matches
- .opt_str("compare-mode")
- .map(|s| s.parse().expect("invalid --compare-mode provided")),
- rustfix_coverage: matches.opt_present("rustfix-coverage"),
- has_tidy,
- channel: matches.opt_str("channel").unwrap(),
- git_hash: matches.opt_present("git-hash"),
- edition: matches.opt_str("edition"),
-
- cc: matches.opt_str("cc").unwrap(),
- cxx: matches.opt_str("cxx").unwrap(),
- cflags: matches.opt_str("cflags").unwrap(),
- cxxflags: matches.opt_str("cxxflags").unwrap(),
- ar: matches.opt_str("ar").unwrap_or_else(|| String::from("ar")),
- target_linker: matches.opt_str("target-linker"),
- host_linker: matches.opt_str("host-linker"),
- llvm_components: matches.opt_str("llvm-components").unwrap(),
- nodejs: matches.opt_str("nodejs"),
- npm: matches.opt_str("npm"),
-
- force_rerun: matches.opt_present("force-rerun"),
-
- target_cfgs: AtomicLazyCell::new(),
-
- nocapture: matches.opt_present("nocapture"),
- }
-}
-
-pub fn log_config(config: &Config) {
- let c = config;
- logv(c, "configuration:".to_string());
- logv(c, format!("compile_lib_path: {:?}", config.compile_lib_path));
- logv(c, format!("run_lib_path: {:?}", config.run_lib_path));
- logv(c, format!("rustc_path: {:?}", config.rustc_path.display()));
- logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path));
- logv(c, format!("rust_demangler_path: {:?}", config.rust_demangler_path));
- logv(c, format!("src_base: {:?}", config.src_base.display()));
- logv(c, format!("build_base: {:?}", config.build_base.display()));
- logv(c, format!("stage_id: {}", config.stage_id));
- logv(c, format!("mode: {}", config.mode));
- logv(c, format!("run_ignored: {}", config.run_ignored));
- logv(c, format!("filters: {:?}", config.filters));
- logv(c, format!("skip: {:?}", config.skip));
- logv(c, format!("filter_exact: {}", config.filter_exact));
- logv(
- c,
- format!("force_pass_mode: {}", opt_str(&config.force_pass_mode.map(|m| format!("{}", m))),),
- );
- logv(c, format!("runtool: {}", opt_str(&config.runtool)));
- logv(c, format!("host-rustcflags: {:?}", config.host_rustcflags));
- logv(c, format!("target-rustcflags: {:?}", config.target_rustcflags));
- logv(c, format!("target: {}", config.target));
- logv(c, format!("host: {}", config.host));
- logv(c, format!("android-cross-path: {:?}", config.android_cross_path.display()));
- logv(c, format!("adb_path: {:?}", config.adb_path));
- logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir));
- logv(c, format!("adb_device_status: {}", config.adb_device_status));
- logv(c, format!("ar: {}", config.ar));
- logv(c, format!("target-linker: {:?}", config.target_linker));
- logv(c, format!("host-linker: {:?}", config.host_linker));
- logv(c, format!("verbose: {}", config.verbose));
- logv(c, format!("format: {:?}", config.format));
- logv(c, "\n".to_string());
-}
-
-pub fn opt_str(maybestr: &Option<String>) -> &str {
- match *maybestr {
- None => "(none)",
- Some(ref s) => s,
- }
-}
-
-pub fn opt_str2(maybestr: Option<String>) -> String {
- match maybestr {
- None => "(none)".to_owned(),
- Some(s) => s,
- }
-}
-
-pub fn run_tests(config: Arc<Config>) {
- // If we want to collect rustfix coverage information,
- // we first make sure that the coverage file does not exist.
- // It will be created later on.
- if config.rustfix_coverage {
- let mut coverage_file_path = config.build_base.clone();
- coverage_file_path.push("rustfix_missing_coverage.txt");
- if coverage_file_path.exists() {
- if let Err(e) = fs::remove_file(&coverage_file_path) {
- panic!("Could not delete {} due to {}", coverage_file_path.display(), e)
- }
- }
- }
-
- // sadly osx needs some file descriptor limits raised for running tests in
- // parallel (especially when we have lots and lots of child processes).
- // For context, see #8904
- unsafe {
- raise_fd_limit::raise_fd_limit();
- }
- // Prevent issue #21352 UAC blocking .exe containing 'patch' etc. on Windows
- // If #11207 is resolved (adding manifest to .exe) this becomes unnecessary
- env::set_var("__COMPAT_LAYER", "RunAsInvoker");
-
- // Let tests know which target they're running as
- env::set_var("TARGET", &config.target);
-
- let opts = test_opts(&config);
-
- let mut configs = Vec::new();
- if let Mode::DebugInfo = config.mode {
- // Debugging emscripten code doesn't make sense today
- if !config.target.contains("emscripten") {
- configs.extend(configure_cdb(&config));
- configs.extend(configure_gdb(&config));
- configs.extend(configure_lldb(&config));
- }
- } else {
- configs.push(config.clone());
- };
-
- let mut tests = Vec::new();
- for c in configs {
- let mut found_paths = BTreeSet::new();
- make_tests(c, &mut tests, &mut found_paths);
- check_overlapping_tests(&found_paths);
- }
-
- tests.sort_by(|a, b| a.desc.name.as_slice().cmp(&b.desc.name.as_slice()));
-
- let res = test::run_tests_console(&opts, tests);
- match res {
- Ok(true) => {}
- Ok(false) => {
- // We want to report that the tests failed, but we also want to give
- // some indication of just what tests we were running. Especially on
- // CI, where there can be cross-compiled tests for a lot of
- // architectures, without this critical information it can be quite
- // easy to miss which tests failed, and as such fail to reproduce
- // the failure locally.
-
- println!(
- "Some tests failed in compiletest suite={}{} mode={} host={} target={}",
- config.suite,
- config
- .compare_mode
- .as_ref()
- .map(|c| format!(" compare_mode={:?}", c))
- .unwrap_or_default(),
- config.mode,
- config.host,
- config.target
- );
-
- std::process::exit(1);
- }
- Err(e) => {
- // We don't know if tests passed or not, but if there was an error
- // during testing we don't want to just succeed (we may not have
- // tested something), so fail.
- //
- // This should realistically "never" happen, so don't try to make
- // this a pretty error message.
- panic!("I/O failure during tests: {:?}", e);
- }
- }
-}
-
-fn configure_cdb(config: &Config) -> Option<Arc<Config>> {
- config.cdb.as_ref()?;
-
- Some(Arc::new(Config { debugger: Some(Debugger::Cdb), ..config.clone() }))
-}
-
-fn configure_gdb(config: &Config) -> Option<Arc<Config>> {
- config.gdb_version?;
-
- if config.matches_env("msvc") {
- return None;
- }
-
- if config.remote_test_client.is_some() && !config.target.contains("android") {
- println!(
- "WARNING: debuginfo tests are not available when \
- testing with remote"
- );
- return None;
- }
-
- if config.target.contains("android") {
- println!(
- "{} debug-info test uses tcp 5039 port.\
- please reserve it",
- config.target
- );
-
- // android debug-info test uses remote debugger so, we test 1 thread
- // at once as they're all sharing the same TCP port to communicate
- // over.
- //
- // we should figure out how to lift this restriction! (run them all
- // on different ports allocated dynamically).
- env::set_var("RUST_TEST_THREADS", "1");
- }
-
- Some(Arc::new(Config { debugger: Some(Debugger::Gdb), ..config.clone() }))
-}
-
-fn configure_lldb(config: &Config) -> Option<Arc<Config>> {
- config.lldb_python_dir.as_ref()?;
-
- if let Some(350) = config.lldb_version {
- println!(
- "WARNING: The used version of LLDB (350) has a \
- known issue that breaks debuginfo tests. See \
- issue #32520 for more information. Skipping all \
- LLDB-based tests!",
- );
- return None;
- }
-
- Some(Arc::new(Config { debugger: Some(Debugger::Lldb), ..config.clone() }))
-}
-
-pub fn test_opts(config: &Config) -> test::TestOpts {
- if env::var("RUST_TEST_NOCAPTURE").is_ok() {
- eprintln!(
- "WARNING: RUST_TEST_NOCAPTURE is no longer used. \
- Use the `--nocapture` flag instead."
- );
- }
-
- test::TestOpts {
- exclude_should_panic: false,
- filters: config.filters.clone(),
- filter_exact: config.filter_exact,
- run_ignored: if config.run_ignored { test::RunIgnored::Yes } else { test::RunIgnored::No },
- format: config.format,
- logfile: config.logfile.clone(),
- run_tests: true,
- bench_benchmarks: true,
- nocapture: config.nocapture,
- color: config.color,
- shuffle: false,
- shuffle_seed: None,
- test_threads: None,
- skip: config.skip.clone(),
- list: false,
- options: test::Options::new(),
- time_options: None,
- force_run_in_process: false,
- fail_fast: std::env::var_os("RUSTC_TEST_FAIL_FAST").is_some(),
- }
-}
-
-pub fn make_tests(
- config: Arc<Config>,
- tests: &mut Vec<test::TestDescAndFn>,
- found_paths: &mut BTreeSet<PathBuf>,
-) {
- debug!("making tests from {:?}", config.src_base.display());
- let inputs = common_inputs_stamp(&config);
- let modified_tests = modified_tests(&config, &config.src_base).unwrap_or_else(|err| {
- panic!("modified_tests got error from dir: {}, error: {}", config.src_base.display(), err)
- });
-
- let cache = HeadersCache::load(&config);
- let mut poisoned = false;
- collect_tests_from_dir(
- config.clone(),
- &cache,
- &config.src_base,
- &PathBuf::new(),
- &inputs,
- tests,
- found_paths,
- &modified_tests,
- &mut poisoned,
- )
- .unwrap_or_else(|_| panic!("Could not read tests from {}", config.src_base.display()));
-
- if poisoned {
- eprintln!();
- panic!("there are errors in tests");
- }
-}
-
-/// Returns a stamp constructed from input files common to all test cases.
-fn common_inputs_stamp(config: &Config) -> Stamp {
- let rust_src_dir = config.find_rust_src_root().expect("Could not find Rust source root");
-
- let mut stamp = Stamp::from_path(&config.rustc_path);
-
- // Relevant pretty printer files
- let pretty_printer_files = [
- "src/etc/rust_types.py",
- "src/etc/gdb_load_rust_pretty_printers.py",
- "src/etc/gdb_lookup.py",
- "src/etc/gdb_providers.py",
- "src/etc/lldb_batchmode.py",
- "src/etc/lldb_lookup.py",
- "src/etc/lldb_providers.py",
- ];
- for file in &pretty_printer_files {
- let path = rust_src_dir.join(file);
- stamp.add_path(&path);
- }
-
- stamp.add_dir(&rust_src_dir.join("src/etc/natvis"));
-
- stamp.add_dir(&config.run_lib_path);
-
- if let Some(ref rustdoc_path) = config.rustdoc_path {
- stamp.add_path(&rustdoc_path);
- stamp.add_path(&rust_src_dir.join("src/etc/htmldocck.py"));
- }
-
- // Compiletest itself.
- stamp.add_dir(&rust_src_dir.join("src/tools/compiletest/"));
-
- stamp
-}
-
-fn modified_tests(config: &Config, dir: &Path) -> Result<Vec<PathBuf>, String> {
- if !config.only_modified {
- return Ok(vec![]);
- }
- let files =
- get_git_modified_files(Some(dir), &vec!["rs", "stderr", "fixed"])?.unwrap_or(vec![]);
- // Add new test cases to the list, it will be convenient in daily development.
- let untracked_files = get_git_untracked_files(None)?.unwrap_or(vec![]);
-
- let all_paths = [&files[..], &untracked_files[..]].concat();
- let full_paths = {
- let mut full_paths: Vec<PathBuf> = all_paths
- .into_iter()
- .map(|f| PathBuf::from(f).with_extension("").with_extension("rs"))
- .filter_map(|f| if Path::new(&f).exists() { f.canonicalize().ok() } else { None })
- .collect();
- full_paths.dedup();
- full_paths.sort_unstable();
- full_paths
- };
- Ok(full_paths)
-}
-
-fn collect_tests_from_dir(
- config: Arc<Config>,
- cache: &HeadersCache,
- dir: &Path,
- relative_dir_path: &Path,
- inputs: &Stamp,
- tests: &mut Vec<test::TestDescAndFn>,
- found_paths: &mut BTreeSet<PathBuf>,
- modified_tests: &Vec<PathBuf>,
- poisoned: &mut bool,
-) -> io::Result<()> {
- // Ignore directories that contain a file named `compiletest-ignore-dir`.
- if dir.join("compiletest-ignore-dir").exists() {
- return Ok(());
- }
-
- if config.mode == Mode::RunMake && dir.join("Makefile").exists() {
- let paths = TestPaths {
- file: dir.to_path_buf(),
- relative_dir: relative_dir_path.parent().unwrap().to_path_buf(),
- };
- tests.extend(make_test(config, cache, &paths, inputs, poisoned));
- return Ok(());
- }
-
- // If we find a test foo/bar.rs, we have to build the
- // output directory `$build/foo` so we can write
- // `$build/foo/bar` into it. We do this *now* in this
- // sequential loop because otherwise, if we do it in the
- // tests themselves, they race for the privilege of
- // creating the directories and sometimes fail randomly.
- let build_dir = output_relative_path(&config, relative_dir_path);
- fs::create_dir_all(&build_dir).unwrap();
-
- // Add each `.rs` file as a test, and recurse further on any
- // subdirectories we find, except for `aux` directories.
- for file in fs::read_dir(dir)? {
- let file = file?;
- let file_path = file.path();
- let file_name = file.file_name();
- if is_test(&file_name) && (!config.only_modified || modified_tests.contains(&file_path)) {
- debug!("found test file: {:?}", file_path.display());
- let rel_test_path = relative_dir_path.join(file_path.file_stem().unwrap());
- found_paths.insert(rel_test_path);
- let paths =
- TestPaths { file: file_path, relative_dir: relative_dir_path.to_path_buf() };
-
- tests.extend(make_test(config.clone(), cache, &paths, inputs, poisoned))
- } else if file_path.is_dir() {
- let relative_file_path = relative_dir_path.join(file.file_name());
- if &file_name != "auxiliary" {
- debug!("found directory: {:?}", file_path.display());
- collect_tests_from_dir(
- config.clone(),
- cache,
- &file_path,
- &relative_file_path,
- inputs,
- tests,
- found_paths,
- modified_tests,
- poisoned,
- )?;
- }
- } else {
- debug!("found other file/directory: {:?}", file_path.display());
- }
- }
- Ok(())
-}
-
-/// Returns true if `file_name` looks like a proper test file name.
-pub fn is_test(file_name: &OsString) -> bool {
- let file_name = file_name.to_str().unwrap();
-
- if !file_name.ends_with(".rs") {
- return false;
- }
-
- // `.`, `#`, and `~` are common temp-file prefixes.
- let invalid_prefixes = &[".", "#", "~"];
- !invalid_prefixes.iter().any(|p| file_name.starts_with(p))
-}
-
-fn make_test(
- config: Arc<Config>,
- cache: &HeadersCache,
- testpaths: &TestPaths,
- inputs: &Stamp,
- poisoned: &mut bool,
-) -> Vec<test::TestDescAndFn> {
- let test_path = if config.mode == Mode::RunMake {
- // Parse directives in the Makefile
- testpaths.file.join("Makefile")
- } else {
- PathBuf::from(&testpaths.file)
- };
- let early_props = EarlyProps::from_file(&config, &test_path);
-
- // Incremental tests are special, they inherently cannot be run in parallel.
- // `runtest::run` will be responsible for iterating over revisions.
- let revisions = if early_props.revisions.is_empty() || config.mode == Mode::Incremental {
- vec![None]
- } else {
- early_props.revisions.iter().map(Some).collect()
- };
-
- revisions
- .into_iter()
- .map(|revision| {
- let src_file =
- std::fs::File::open(&test_path).expect("open test file to parse ignores");
- let cfg = revision.map(|v| &**v);
- let test_name = crate::make_test_name(&config, testpaths, revision);
- let mut desc = make_test_description(
- &config, cache, test_name, &test_path, src_file, cfg, poisoned,
- );
- // Ignore tests that already run and are up to date with respect to inputs.
- if !config.force_rerun {
- desc.ignore |= is_up_to_date(
- &config,
- testpaths,
- &early_props,
- revision.map(|s| s.as_str()),
- inputs,
- );
- }
- test::TestDescAndFn {
- desc,
- testfn: make_test_closure(config.clone(), testpaths, revision),
- }
- })
- .collect()
-}
-
-fn stamp(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf {
- output_base_dir(config, testpaths, revision).join("stamp")
-}
-
-fn files_related_to_test(
- config: &Config,
- testpaths: &TestPaths,
- props: &EarlyProps,
- revision: Option<&str>,
-) -> Vec<PathBuf> {
- let mut related = vec![];
-
- if testpaths.file.is_dir() {
- // run-make tests use their individual directory
- for entry in WalkDir::new(&testpaths.file) {
- let path = entry.unwrap().into_path();
- if path.is_file() {
- related.push(path);
- }
- }
- } else {
- related.push(testpaths.file.clone());
- }
-
- for aux in &props.aux {
- let path = testpaths.file.parent().unwrap().join("auxiliary").join(aux);
- related.push(path);
- }
-
- // UI test files.
- for extension in UI_EXTENSIONS {
- let path = expected_output_path(testpaths, revision, &config.compare_mode, extension);
- related.push(path);
- }
-
- related
-}
-
-fn is_up_to_date(
- config: &Config,
- testpaths: &TestPaths,
- props: &EarlyProps,
- revision: Option<&str>,
- inputs: &Stamp,
-) -> bool {
- let stamp_name = stamp(config, testpaths, revision);
- // Check hash.
- let contents = match fs::read_to_string(&stamp_name) {
- Ok(f) => f,
- Err(ref e) if e.kind() == ErrorKind::InvalidData => panic!("Can't read stamp contents"),
- Err(_) => return false,
- };
- let expected_hash = runtest::compute_stamp_hash(config);
- if contents != expected_hash {
- return false;
- }
-
- // Check timestamps.
- let mut inputs = inputs.clone();
- for path in files_related_to_test(config, testpaths, props, revision) {
- inputs.add_path(&path);
- }
-
- inputs < Stamp::from_path(&stamp_name)
-}
-
-#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
-struct Stamp {
- time: SystemTime,
-}
-
-impl Stamp {
- fn from_path(path: &Path) -> Self {
- let mut stamp = Stamp { time: SystemTime::UNIX_EPOCH };
- stamp.add_path(path);
- stamp
- }
-
- fn add_path(&mut self, path: &Path) {
- let modified = fs::metadata(path)
- .and_then(|metadata| metadata.modified())
- .unwrap_or(SystemTime::UNIX_EPOCH);
- self.time = self.time.max(modified);
- }
-
- fn add_dir(&mut self, path: &Path) {
- for entry in WalkDir::new(path) {
- let entry = entry.unwrap();
- if entry.file_type().is_file() {
- let modified = entry
- .metadata()
- .ok()
- .and_then(|metadata| metadata.modified().ok())
- .unwrap_or(SystemTime::UNIX_EPOCH);
- self.time = self.time.max(modified);
- }
- }
- }
-}
-
-fn make_test_name(
- config: &Config,
- testpaths: &TestPaths,
- revision: Option<&String>,
-) -> test::TestName {
- // Print the name of the file, relative to the repository root.
- // `src_base` looks like `/path/to/rust/tests/ui`
- let root_directory = config.src_base.parent().unwrap().parent().unwrap();
- let path = testpaths.file.strip_prefix(root_directory).unwrap();
- let debugger = match config.debugger {
- Some(d) => format!("-{}", d),
- None => String::new(),
- };
- let mode_suffix = match config.compare_mode {
- Some(ref mode) => format!(" ({})", mode.to_str()),
- None => String::new(),
- };
-
- test::DynTestName(format!(
- "[{}{}{}] {}{}",
- config.mode,
- debugger,
- mode_suffix,
- path.display(),
- revision.map_or("".to_string(), |rev| format!("#{}", rev))
- ))
-}
-
-fn make_test_closure(
- config: Arc<Config>,
- testpaths: &TestPaths,
- revision: Option<&String>,
-) -> test::TestFn {
- let config = config.clone();
- let testpaths = testpaths.clone();
- let revision = revision.cloned();
- test::DynTestFn(Box::new(move || {
- runtest::run(config, &testpaths, revision.as_deref());
- Ok(())
- }))
-}
-
-/// Returns `true` if the given target is an Android target for the
-/// purposes of GDB testing.
-fn is_android_gdb_target(target: &str) -> bool {
- matches!(
- &target[..],
- "arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android"
- )
-}
-
-/// Returns `true` if the given target is a MSVC target for the purpouses of CDB testing.
-fn is_pc_windows_msvc_target(target: &str) -> bool {
- target.ends_with("-pc-windows-msvc")
-}
-
-fn find_cdb(target: &str) -> Option<OsString> {
- if !(cfg!(windows) && is_pc_windows_msvc_target(target)) {
- return None;
- }
-
- let pf86 = env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?;
- let cdb_arch = if cfg!(target_arch = "x86") {
- "x86"
- } else if cfg!(target_arch = "x86_64") {
- "x64"
- } else if cfg!(target_arch = "aarch64") {
- "arm64"
- } else if cfg!(target_arch = "arm") {
- "arm"
- } else {
- return None; // No compatible CDB.exe in the Windows 10 SDK
- };
-
- let mut path = PathBuf::new();
- path.push(pf86);
- path.push(r"Windows Kits\10\Debuggers"); // We could check 8.1 etc. too?
- path.push(cdb_arch);
- path.push(r"cdb.exe");
-
- if !path.exists() {
- return None;
- }
-
- Some(path.into_os_string())
-}
-
-/// Returns Path to CDB
-fn analyze_cdb(cdb: Option<String>, target: &str) -> (Option<OsString>, Option<[u16; 4]>) {
- let cdb = cdb.map(OsString::from).or_else(|| find_cdb(target));
-
- let mut version = None;
- if let Some(cdb) = cdb.as_ref() {
- if let Ok(output) = Command::new(cdb).arg("/version").output() {
- if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() {
- version = extract_cdb_version(&first_line);
- }
- }
- }
-
- (cdb, version)
-}
-
-fn extract_cdb_version(full_version_line: &str) -> Option<[u16; 4]> {
- // Example full_version_line: "cdb version 10.0.18362.1"
- let version = full_version_line.rsplit(' ').next()?;
- let mut components = version.split('.');
- let major: u16 = components.next().unwrap().parse().unwrap();
- let minor: u16 = components.next().unwrap().parse().unwrap();
- let patch: u16 = components.next().unwrap_or("0").parse().unwrap();
- let build: u16 = components.next().unwrap_or("0").parse().unwrap();
- Some([major, minor, patch, build])
-}
-
-/// Returns (Path to GDB, GDB Version, GDB has Rust Support)
-fn analyze_gdb(
- gdb: Option<String>,
- target: &str,
- android_cross_path: &PathBuf,
-) -> (Option<String>, Option<u32>, bool) {
- #[cfg(not(windows))]
- const GDB_FALLBACK: &str = "gdb";
- #[cfg(windows)]
- const GDB_FALLBACK: &str = "gdb.exe";
-
- const MIN_GDB_WITH_RUST: u32 = 7011010;
-
- let fallback_gdb = || {
- if is_android_gdb_target(target) {
- let mut gdb_path = match android_cross_path.to_str() {
- Some(x) => x.to_owned(),
- None => panic!("cannot find android cross path"),
- };
- gdb_path.push_str("/bin/gdb");
- gdb_path
- } else {
- GDB_FALLBACK.to_owned()
- }
- };
-
- let gdb = match gdb {
- None => fallback_gdb(),
- Some(ref s) if s.is_empty() => fallback_gdb(), // may be empty if configure found no gdb
- Some(ref s) => s.to_owned(),
- };
-
- let mut version_line = None;
- if let Ok(output) = Command::new(&gdb).arg("--version").output() {
- if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() {
- version_line = Some(first_line.to_string());
- }
- }
-
- let version = match version_line {
- Some(line) => extract_gdb_version(&line),
- None => return (None, None, false),
- };
-
- let gdb_native_rust = version.map_or(false, |v| v >= MIN_GDB_WITH_RUST);
-
- (Some(gdb), version, gdb_native_rust)
-}
-
-fn extract_gdb_version(full_version_line: &str) -> Option<u32> {
- let full_version_line = full_version_line.trim();
-
- // GDB versions look like this: "major.minor.patch?.yyyymmdd?", with both
- // of the ? sections being optional
-
- // We will parse up to 3 digits for each component, ignoring the date
-
- // We skip text in parentheses. This avoids accidentally parsing
- // the openSUSE version, which looks like:
- // GNU gdb (GDB; openSUSE Leap 15.0) 8.1
- // This particular form is documented in the GNU coding standards:
- // https://www.gnu.org/prep/standards/html_node/_002d_002dversion.html#g_t_002d_002dversion
-
- let unbracketed_part = full_version_line.split('[').next().unwrap();
- let mut splits = unbracketed_part.trim_end().rsplit(' ');
- let version_string = splits.next().unwrap();
-
- let mut splits = version_string.split('.');
- let major = splits.next().unwrap();
- let minor = splits.next().unwrap();
- let patch = splits.next();
-
- let major: u32 = major.parse().unwrap();
- let (minor, patch): (u32, u32) = match minor.find(not_a_digit) {
- None => {
- let minor = minor.parse().unwrap();
- let patch: u32 = match patch {
- Some(patch) => match patch.find(not_a_digit) {
- None => patch.parse().unwrap(),
- Some(idx) if idx > 3 => 0,
- Some(idx) => patch[..idx].parse().unwrap(),
- },
- None => 0,
- };
- (minor, patch)
- }
- // There is no patch version after minor-date (e.g. "4-2012").
- Some(idx) => {
- let minor = minor[..idx].parse().unwrap();
- (minor, 0)
- }
- };
-
- Some(((major * 1000) + minor) * 1000 + patch)
-}
-
-/// Returns (LLDB version, LLDB is rust-enabled)
-fn extract_lldb_version(full_version_line: &str) -> Option<(u32, bool)> {
- // Extract the major LLDB version from the given version string.
- // LLDB version strings are different for Apple and non-Apple platforms.
- // The Apple variant looks like this:
- //
- // LLDB-179.5 (older versions)
- // lldb-300.2.51 (new versions)
- //
- // We are only interested in the major version number, so this function
- // will return `Some(179)` and `Some(300)` respectively.
- //
- // Upstream versions look like:
- // lldb version 6.0.1
- //
- // There doesn't seem to be a way to correlate the Apple version
- // with the upstream version, and since the tests were originally
- // written against Apple versions, we make a fake Apple version by
- // multiplying the first number by 100. This is a hack, but
- // normally fine because the only non-Apple version we test is
- // rust-enabled.
-
- let full_version_line = full_version_line.trim();
-
- if let Some(apple_ver) =
- full_version_line.strip_prefix("LLDB-").or_else(|| full_version_line.strip_prefix("lldb-"))
- {
- if let Some(idx) = apple_ver.find(not_a_digit) {
- let version: u32 = apple_ver[..idx].parse().unwrap();
- return Some((version, full_version_line.contains("rust-enabled")));
- }
- } else if let Some(lldb_ver) = full_version_line.strip_prefix("lldb version ") {
- if let Some(idx) = lldb_ver.find(not_a_digit) {
- let version: u32 = lldb_ver[..idx].parse().ok()?;
- return Some((version * 100, full_version_line.contains("rust-enabled")));
- }
- }
- None
-}
-
-fn not_a_digit(c: char) -> bool {
- !c.is_digit(10)
-}
-
-fn check_overlapping_tests(found_paths: &BTreeSet<PathBuf>) {
- let mut collisions = Vec::new();
- for path in found_paths {
- for ancestor in path.ancestors().skip(1) {
- if found_paths.contains(ancestor) {
- collisions.push((path, ancestor.clone()));
- }
- }
- }
- if !collisions.is_empty() {
- let collisions: String = collisions
- .into_iter()
- .map(|(path, check_parent)| format!("test {path:?} clashes with {check_parent:?}\n"))
- .collect();
- panic!(
- "{collisions}\n\
- Tests cannot have overlapping names. Make sure they use unique prefixes."
- );
- }
-}
diff --git a/src/tools/compiletest/src/util.rs b/src/tools/compiletest/src/util.rs
index 748240c..17bed38 100644
--- a/src/tools/compiletest/src/util.rs
+++ b/src/tools/compiletest/src/util.rs
@@ -104,6 +104,8 @@
"x86_64-unknown-openbsd",
];
+pub const SAFESTACK_SUPPORTED_TARGETS: &[&str] = &["x86_64-unknown-linux-gnu"];
+
pub fn make_new_path(path: &str) -> String {
assert!(cfg!(windows));
// Windows just uses PATH as the library search path, so we have to
diff --git a/src/tools/miri/src/concurrency/thread.rs b/src/tools/miri/src/concurrency/thread.rs
index d85cac7..25c8df4 100644
--- a/src/tools/miri/src/concurrency/thread.rs
+++ b/src/tools/miri/src/concurrency/thread.rs
@@ -133,10 +133,15 @@ pub struct Thread<'mir, 'tcx> {
/// The join status.
join_status: ThreadJoinStatus,
- /// The temporary used for storing the argument of
- /// the call to `miri_start_panic` (the panic payload) when unwinding.
+ /// Stack of active panic payloads for the current thread. Used for storing
+ /// the argument of the call to `miri_start_panic` (the panic payload) when unwinding.
/// This is pointer-sized, and matches the `Payload` type in `src/libpanic_unwind/miri.rs`.
- pub(crate) panic_payload: Option<Scalar<Provenance>>,
+ ///
+ /// In real unwinding, the payload gets passed as an argument to the landing pad,
+ /// which then forwards it to 'Resume'. However this argument is implicit in MIR,
+ /// so we have to store it out-of-band. When there are multiple active unwinds,
+ /// the innermost one is always caught first, so we can store them as a stack.
+ pub(crate) panic_payloads: Vec<Scalar<Provenance>>,
/// Last OS error location in memory. It is a 32-bit integer.
pub(crate) last_error: Option<MPlaceTy<'tcx, Provenance>>,
@@ -206,7 +211,7 @@ fn new(name: Option<&str>, on_stack_empty: Option<StackEmptyCallback<'mir, 'tcx>
stack: Vec::new(),
top_user_relevant_frame: None,
join_status: ThreadJoinStatus::Joinable,
- panic_payload: None,
+ panic_payloads: Vec::new(),
last_error: None,
on_stack_empty,
}
@@ -216,7 +221,7 @@ fn new(name: Option<&str>, on_stack_empty: Option<StackEmptyCallback<'mir, 'tcx>
impl VisitTags for Thread<'_, '_> {
fn visit_tags(&self, visit: &mut dyn FnMut(BorTag)) {
let Thread {
- panic_payload,
+ panic_payloads: panic_payload,
last_error,
stack,
top_user_relevant_frame: _,
@@ -226,7 +231,9 @@ fn visit_tags(&self, visit: &mut dyn FnMut(BorTag)) {
on_stack_empty: _, // we assume the closure captures no GC-relevant state
} = self;
- panic_payload.visit_tags(visit);
+ for payload in panic_payload {
+ payload.visit_tags(visit);
+ }
last_error.visit_tags(visit);
for frame in stack {
frame.visit_tags(visit)
diff --git a/src/tools/miri/src/shims/panic.rs b/src/tools/miri/src/shims/panic.rs
index 18ae01a..7aefdfc 100644
--- a/src/tools/miri/src/shims/panic.rs
+++ b/src/tools/miri/src/shims/panic.rs
@@ -63,8 +63,7 @@ fn handle_miri_start_panic(
let [payload] = this.check_shim(abi, Abi::Rust, link_name, args)?;
let payload = this.read_scalar(payload)?;
let thread = this.active_thread_mut();
- assert!(thread.panic_payload.is_none(), "the panic runtime should avoid double-panics");
- thread.panic_payload = Some(payload);
+ thread.panic_payloads.push(payload);
// Jump to the unwind block to begin unwinding.
this.unwind_to_block(unwind)?;
@@ -146,7 +145,7 @@ fn handle_stack_pop_unwind(
// The Thread's `panic_payload` holds what was passed to `miri_start_panic`.
// This is exactly the second argument we need to pass to `catch_fn`.
- let payload = this.active_thread_mut().panic_payload.take().unwrap();
+ let payload = this.active_thread_mut().panic_payloads.pop().unwrap();
// Push the `catch_fn` stackframe.
let f_instance = this.get_ptr_fn(catch_unwind.catch_fn)?.as_instance()?;
diff --git a/src/tools/miri/tests/fail/panic/double_panic.rs b/src/tools/miri/tests/fail/panic/double_panic.rs
index 9378adb..adb3071 100644
--- a/src/tools/miri/tests/fail/panic/double_panic.rs
+++ b/src/tools/miri/tests/fail/panic/double_panic.rs
@@ -1,6 +1,4 @@
-//@error-in-other-file: the program aborted
//@normalize-stderr-test: "\| +\^+" -> "| ^"
-//@normalize-stderr-test: "unsafe \{ libc::abort\(\) \}|crate::intrinsics::abort\(\);" -> "ABORT();"
//@normalize-stderr-test: "\n +[0-9]+:[^\n]+" -> "$1"
//@normalize-stderr-test: "\n at [^\n]+" -> "$1"
@@ -11,6 +9,7 @@ fn drop(&mut self) {
}
}
fn main() {
+ //~^ERROR: panic in a function that cannot unwind
let _foo = Foo;
panic!("first");
}
diff --git a/src/tools/miri/tests/fail/panic/double_panic.stderr b/src/tools/miri/tests/fail/panic/double_panic.stderr
index 77d5fc5..b6ac56f 100644
--- a/src/tools/miri/tests/fail/panic/double_panic.stderr
+++ b/src/tools/miri/tests/fail/panic/double_panic.stderr
@@ -2,30 +2,17 @@
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
thread 'main' panicked at 'second', $DIR/double_panic.rs:LL:CC
stack backtrace:
-thread panicked while panicking. aborting.
-error: abnormal termination: the program aborted execution
- --> RUSTLIB/std/src/sys/PLATFORM/mod.rs:LL:CC
- |
-LL | ABORT();
- | ^ the program aborted execution
- |
- = note: inside `std::sys::PLATFORM::abort_internal` at RUSTLIB/std/src/sys/PLATFORM/mod.rs:LL:CC
- = note: inside `std::panicking::rust_panic_with_hook` at RUSTLIB/std/src/panicking.rs:LL:CC
- = note: inside closure at RUSTLIB/std/src/panicking.rs:LL:CC
- = note: inside `std::sys_common::backtrace::__rust_end_short_backtrace::<[closure@std::panicking::begin_panic_handler::{closure#0}], !>` at RUSTLIB/std/src/sys_common/backtrace.rs:LL:CC
- = note: inside `std::panicking::begin_panic_handler` at RUSTLIB/std/src/panicking.rs:LL:CC
-note: inside `<Foo as std::ops::Drop>::drop`
+error: abnormal termination: panic in a function that cannot unwind
--> $DIR/double_panic.rs:LL:CC
|
-LL | panic!("second");
- | ^
- = note: inside `std::ptr::drop_in_place::<Foo> - shim(Some(Foo))` at RUSTLIB/core/src/ptr/mod.rs:LL:CC
-note: inside `main`
- --> $DIR/double_panic.rs:LL:CC
+LL | / fn main() {
+LL | |
+LL | | let _foo = Foo;
+LL | | panic!("first");
+LL | | }
+ | |_^ panic in a function that cannot unwind
|
-LL | }
- | ^
- = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info)
+ = note: inside `main` at $DIR/double_panic.rs:LL:CC
note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace
diff --git a/src/tools/miri/tests/pass/panic/nested_panic_caught.rs b/src/tools/miri/tests/pass/panic/nested_panic_caught.rs
new file mode 100644
index 0000000..8848131
--- /dev/null
+++ b/src/tools/miri/tests/pass/panic/nested_panic_caught.rs
@@ -0,0 +1,25 @@
+//@normalize-stderr-test: "\| +\^+" -> "| ^"
+//@normalize-stderr-test: "\n +[0-9]+:[^\n]+" -> "$1"
+//@normalize-stderr-test: "\n at [^\n]+" -> "$1"
+
+// Checks that nested panics work correctly.
+
+use std::panic::catch_unwind;
+
+fn double() {
+ struct Double;
+
+ impl Drop for Double {
+ fn drop(&mut self) {
+ let _ = catch_unwind(|| panic!("twice"));
+ }
+ }
+
+ let _d = Double;
+
+ panic!("once");
+}
+
+fn main() {
+ assert!(catch_unwind(|| double()).is_err());
+}
diff --git a/src/tools/miri/tests/pass/panic/nested_panic_caught.stderr b/src/tools/miri/tests/pass/panic/nested_panic_caught.stderr
new file mode 100644
index 0000000..4e25932
--- /dev/null
+++ b/src/tools/miri/tests/pass/panic/nested_panic_caught.stderr
@@ -0,0 +1,4 @@
+thread 'main' panicked at 'once', $DIR/nested_panic_caught.rs:LL:CC
+note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
+thread 'main' panicked at 'twice', $DIR/nested_panic_caught.rs:LL:CC
+stack backtrace:
diff --git a/src/tools/rustdoc-gui-test/Cargo.toml b/src/tools/rustdoc-gui-test/Cargo.toml
new file mode 100644
index 0000000..f0c5b36
--- /dev/null
+++ b/src/tools/rustdoc-gui-test/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "rustdoc-gui-test"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+compiletest = { path = "../compiletest" }
+getopts = "0.2"
+walkdir = "2"
diff --git a/src/tools/rustdoc-gui-test/src/config.rs b/src/tools/rustdoc-gui-test/src/config.rs
new file mode 100644
index 0000000..dc4c56a
--- /dev/null
+++ b/src/tools/rustdoc-gui-test/src/config.rs
@@ -0,0 +1,62 @@
+use getopts::Options;
+use std::{env, path::PathBuf};
+
+pub(crate) struct Config {
+ pub(crate) nodejs: PathBuf,
+ pub(crate) npm: PathBuf,
+ pub(crate) rust_src: PathBuf,
+ pub(crate) out_dir: PathBuf,
+ pub(crate) initial_cargo: PathBuf,
+ pub(crate) jobs: String,
+ pub(crate) test_args: Vec<PathBuf>,
+ pub(crate) goml_files: Vec<PathBuf>,
+ pub(crate) rustc: PathBuf,
+ pub(crate) rustdoc: PathBuf,
+ pub(crate) verbose: bool,
+}
+
+impl Config {
+ pub(crate) fn from_args(args: Vec<String>) -> Self {
+ let mut opts = Options::new();
+ opts.reqopt("", "nodejs", "absolute path of nodejs", "PATH")
+ .reqopt("", "npm", "absolute path of npm", "PATH")
+ .reqopt("", "out-dir", "output path of doc compilation", "PATH")
+ .reqopt("", "rust-src", "root source of the rust source", "PATH")
+ .reqopt(
+ "",
+ "initial-cargo",
+ "path to cargo to use for compiling tests/rustdoc-gui/src/*",
+ "PATH",
+ )
+ .reqopt("", "jobs", "jobs arg of browser-ui-test", "JOBS")
+ .optflag("", "verbose", "run tests verbosely, showing all output")
+ .optmulti("", "test-arg", "args for browser-ui-test", "FLAGS")
+ .optmulti("", "goml-file", "goml files for testing with browser-ui-test", "LIST");
+
+ let (argv0, args_) = args.split_first().unwrap();
+ if args.len() == 1 || args[1] == "-h" || args[1] == "--help" {
+ let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
+ println!("{}", opts.usage(&message));
+ std::process::exit(1);
+ }
+
+ let matches = &match opts.parse(args_) {
+ Ok(m) => m,
+ Err(f) => panic!("{:?}", f),
+ };
+
+ Self {
+ nodejs: matches.opt_str("nodejs").map(PathBuf::from).expect("nodejs isn't available"),
+ npm: matches.opt_str("npm").map(PathBuf::from).expect("npm isn't available"),
+ rust_src: matches.opt_str("rust-src").map(PathBuf::from).unwrap(),
+ out_dir: matches.opt_str("out-dir").map(PathBuf::from).unwrap(),
+ initial_cargo: matches.opt_str("initial-cargo").map(PathBuf::from).unwrap(),
+ jobs: matches.opt_str("jobs").unwrap(),
+ goml_files: matches.opt_strs("goml-file").iter().map(PathBuf::from).collect(),
+ test_args: matches.opt_strs("test-arg").iter().map(PathBuf::from).collect(),
+ rustc: env::var("RUSTC").map(PathBuf::from).unwrap(),
+ rustdoc: env::var("RUSTDOC").map(PathBuf::from).unwrap(),
+ verbose: matches.opt_present("verbose"),
+ }
+ }
+}
diff --git a/src/tools/rustdoc-gui-test/src/main.rs b/src/tools/rustdoc-gui-test/src/main.rs
new file mode 100644
index 0000000..8dc18df
--- /dev/null
+++ b/src/tools/rustdoc-gui-test/src/main.rs
@@ -0,0 +1,162 @@
+use compiletest::header::TestProps;
+use config::Config;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use std::sync::Arc;
+use std::{env, fs};
+
+mod config;
+
+fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option<String> {
+ let mut command = Command::new(&npm);
+ command.arg("list").arg("--parseable").arg("--long").arg("--depth=0");
+ if global {
+ command.arg("--global");
+ }
+ let lines = command
+ .output()
+ .map(|output| String::from_utf8_lossy(&output.stdout).into_owned())
+ .unwrap_or(String::new());
+ lines
+ .lines()
+ .find_map(|l| l.split(':').nth(1)?.strip_prefix("browser-ui-test@"))
+ .map(|v| v.to_owned())
+}
+
+fn get_browser_ui_test_version(npm: &Path) -> Option<String> {
+ get_browser_ui_test_version_inner(npm, false)
+ .or_else(|| get_browser_ui_test_version_inner(npm, true))
+}
+
+fn compare_browser_ui_test_version(installed_version: &str, src: &Path) {
+ match fs::read_to_string(
+ src.join("src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version"),
+ ) {
+ Ok(v) => {
+ if v.trim() != installed_version {
+ eprintln!(
+ "⚠️ Installed version of browser-ui-test (`{}`) is different than the \
+ one used in the CI (`{}`)",
+ installed_version, v
+ );
+ eprintln!(
+ "You can install this version using `npm update browser-ui-test` or by using \
+ `npm install browser-ui-test@{}`",
+ v,
+ );
+ }
+ }
+ Err(e) => eprintln!("Couldn't find the CI browser-ui-test version: {:?}", e),
+ }
+}
+
+fn find_librs<P: AsRef<Path>>(path: P) -> Option<PathBuf> {
+ for entry in walkdir::WalkDir::new(path) {
+ let entry = entry.ok()?;
+ if entry.file_type().is_file() && entry.file_name() == "lib.rs" {
+ return Some(entry.path().to_path_buf());
+ }
+ }
+ None
+}
+
+// FIXME: move `bootstrap::util::try_run` into `build_helper` crate
+// and use that one instead of creating this function.
+fn try_run(cmd: &mut Command, print_cmd_on_fail: bool) -> bool {
+ let status = match cmd.status() {
+ Ok(status) => status,
+ Err(e) => panic!("failed to execute command: {:?}\nerror: {}", cmd, e),
+ };
+ if !status.success() && print_cmd_on_fail {
+ println!(
+ "\n\ncommand did not execute successfully: {:?}\n\
+ expected success, got: {}\n\n",
+ cmd, status
+ );
+ }
+ status.success()
+}
+
+fn main() {
+ let config = Arc::new(Config::from_args(env::args().collect()));
+
+ // The goal here is to check if the necessary packages are installed, and if not, we
+ // panic.
+ match get_browser_ui_test_version(&config.npm) {
+ Some(version) => {
+ // We also check the version currently used in CI and emit a warning if it's not the
+ // same one.
+ compare_browser_ui_test_version(&version, &config.rust_src);
+ }
+ None => {
+ eprintln!(
+ r#"
+error: rustdoc-gui test suite cannot be run because npm `browser-ui-test` dependency is missing.
+
+If you want to install the `browser-ui-test` dependency, run `npm install browser-ui-test`
+"#,
+ );
+
+ panic!("Cannot run rustdoc-gui tests");
+ }
+ }
+
+ let src_path = config.rust_src.join("tests/rustdoc-gui/src");
+ for entry in src_path.read_dir().expect("read_dir call failed") {
+ if let Ok(entry) = entry {
+ let path = entry.path();
+
+ if !path.is_dir() {
+ continue;
+ }
+
+ let mut cargo = Command::new(&config.initial_cargo);
+ cargo
+ .arg("doc")
+ .arg("--target-dir")
+ .arg(&config.out_dir)
+ .env("RUSTC_BOOTSTRAP", "1")
+ .env("RUSTDOC", &config.rustdoc)
+ .env("RUSTC", &config.rustc)
+ .current_dir(path);
+
+ if let Some(librs) = find_librs(entry.path()) {
+ let compiletest_c = compiletest::common::Config {
+ edition: None,
+ mode: compiletest::common::Mode::Rustdoc,
+ ..Default::default()
+ };
+
+ let test_props = TestProps::from_file(&librs, None, &compiletest_c);
+
+ if !test_props.compile_flags.is_empty() {
+ cargo.env("RUSTDOCFLAGS", test_props.compile_flags.join(" "));
+ }
+
+ if let Some(flags) = &test_props.run_flags {
+ cargo.arg(flags);
+ }
+ }
+
+ try_run(&mut cargo, config.verbose);
+ }
+ }
+
+ let mut command = Command::new(&config.nodejs);
+ command
+ .arg(config.rust_src.join("src/tools/rustdoc-gui/tester.js"))
+ .arg("--jobs")
+ .arg(&config.jobs)
+ .arg("--doc-folder")
+ .arg(config.out_dir.join("doc"))
+ .arg("--tests-folder")
+ .arg(config.rust_src.join("tests/rustdoc-gui"));
+
+ for file in &config.goml_files {
+ command.arg("--file").arg(file);
+ }
+
+ command.args(&config.test_args);
+
+ try_run(&mut command, config.verbose);
+}
diff --git a/src/version b/src/version
index df484cb..0834888 100644
--- a/src/version
+++ b/src/version
@@ -1 +1 @@
-1.71.0
+1.72.0
diff --git a/tests/codegen/sanitizer-safestack-attr-check.rs b/tests/codegen/sanitizer-safestack-attr-check.rs
new file mode 100644
index 0000000..b73ed00
--- /dev/null
+++ b/tests/codegen/sanitizer-safestack-attr-check.rs
@@ -0,0 +1,11 @@
+// This tests that the safestack attribute is applied when enabling the safe-stack sanitizer.
+//
+// needs-sanitizer-safestack
+// compile-flags: -Zsanitizer=safestack
+
+#![crate_type = "lib"]
+
+// CHECK: ; Function Attrs:{{.*}}safestack
+pub fn tagged() {}
+
+// CHECK: attributes #0 = {{.*}}safestack
diff --git a/tests/mir-opt/simplify_locals_fixedpoint.foo.SimplifyLocals-final.diff b/tests/mir-opt/simplify_locals_fixedpoint.foo.SimplifyLocals-final.diff
index f908e8d..0b9ca29 100644
--- a/tests/mir-opt/simplify_locals_fixedpoint.foo.SimplifyLocals-final.diff
+++ b/tests/mir-opt/simplify_locals_fixedpoint.foo.SimplifyLocals-final.diff
@@ -8,8 +8,6 @@
let mut _3: std::option::Option<T>; // in scope 0 at $DIR/simplify_locals_fixedpoint.rs:+1:51: +1:68
let mut _4: isize; // in scope 0 at $DIR/simplify_locals_fixedpoint.rs:+1:22: +1:26
let mut _5: isize; // in scope 0 at $DIR/simplify_locals_fixedpoint.rs:+1:13: +1:20
-- let mut _7: bool; // in scope 0 at $DIR/simplify_locals_fixedpoint.rs:+2:12: +2:20
-- let mut _8: u8; // in scope 0 at $DIR/simplify_locals_fixedpoint.rs:+2:12: +2:13
scope 1 {
debug a => _6; // in scope 1 at $DIR/simplify_locals_fixedpoint.rs:+1:18: +1:19
let _6: u8; // in scope 1 at $DIR/simplify_locals_fixedpoint.rs:+1:18: +1:19
@@ -34,10 +32,9 @@
}
bb2: {
+ StorageLive(_6); // scope 1 at $DIR/simplify_locals_fixedpoint.rs:+1:18: +1:19
_6 = (((_1.0: std::option::Option<u8>) as Some).0: u8); // scope 1 at $DIR/simplify_locals_fixedpoint.rs:+1:18: +1:19
-- StorageLive(_7); // scope 1 at $DIR/simplify_locals_fixedpoint.rs:+2:12: +2:20
-- _7 = Gt(_6, const 42_u8); // scope 1 at $DIR/simplify_locals_fixedpoint.rs:+2:12: +2:20
-- StorageDead(_7); // scope 1 at $DIR/simplify_locals_fixedpoint.rs:+4:9: +4:10
+ StorageDead(_6); // scope 0 at $DIR/simplify_locals_fixedpoint.rs:+5:5: +5:6
goto -> bb3; // scope 0 at $DIR/simplify_locals_fixedpoint.rs:+1:5: +5:6
}
diff --git a/tests/mir-opt/switch_to_self.rs b/tests/mir-opt/switch_to_self.rs
new file mode 100644
index 0000000..6678e4b
--- /dev/null
+++ b/tests/mir-opt/switch_to_self.rs
@@ -0,0 +1,21 @@
+// Test that MatchBranchSimplification doesn't ICE on a SwitchInt where
+// one of the targets is the block that the SwitchInt terminates.
+#![crate_type = "lib"]
+#![feature(core_intrinsics, custom_mir)]
+use std::intrinsics::mir::*;
+
+// EMIT_MIR switch_to_self.test.MatchBranchSimplification.diff
+#[custom_mir(dialect = "runtime", phase = "post-cleanup")]
+pub fn test(x: bool) {
+ mir!(
+ {
+ Goto(bb0)
+ }
+ bb0 = {
+ match x { false => bb0, _ => bb1 }
+ }
+ bb1 = {
+ match x { false => bb0, _ => bb1 }
+ }
+ )
+}
diff --git a/tests/mir-opt/switch_to_self.test.MatchBranchSimplification.diff b/tests/mir-opt/switch_to_self.test.MatchBranchSimplification.diff
new file mode 100644
index 0000000..b0a4f9f
--- /dev/null
+++ b/tests/mir-opt/switch_to_self.test.MatchBranchSimplification.diff
@@ -0,0 +1,19 @@
+- // MIR for `test` before MatchBranchSimplification
++ // MIR for `test` after MatchBranchSimplification
+
+ fn test(_1: bool) -> () {
+ let mut _0: (); // return place in scope 0 at $DIR/switch_to_self.rs:+0:22: +0:22
+
+ bb0: {
+ goto -> bb1; // scope 0 at $DIR/switch_to_self.rs:+3:13: +3:22
+ }
+
+ bb1: {
+ switchInt(_1) -> [0: bb1, otherwise: bb2]; // scope 0 at $DIR/switch_to_self.rs:+6:13: +6:47
+ }
+
+ bb2: {
+ switchInt(_1) -> [0: bb1, otherwise: bb2]; // scope 0 at $DIR/switch_to_self.rs:+9:13: +9:47
+ }
+ }
+
diff --git a/tests/rustdoc-gui/source-anchor-scroll.goml b/tests/rustdoc-gui/source-anchor-scroll.goml
index 3d88d56..67f1497 100644
--- a/tests/rustdoc-gui/source-anchor-scroll.goml
+++ b/tests/rustdoc-gui/source-anchor-scroll.goml
@@ -8,13 +8,13 @@
assert-property: ("html", {"scrollTop": "0"})
click: '//a[text() = "barbar"]'
-assert-property: ("html", {"scrollTop": "125"})
+assert-property: ("html", {"scrollTop": "149"})
click: '//a[text() = "bar"]'
-assert-property: ("html", {"scrollTop": "156"})
+assert-property: ("html", {"scrollTop": "180"})
click: '//a[text() = "sub_fn"]'
-assert-property: ("html", {"scrollTop": "53"})
+assert-property: ("html", {"scrollTop": "77"})
// We now check that clicking on lines doesn't change the scroll
// Extra information: the "sub_fn" function header is on line 1.
click: '//*[@id="6"]'
-assert-property: ("html", {"scrollTop": "53"})
+assert-property: ("html", {"scrollTop": "77"})
diff --git a/tests/rustdoc-gui/src/extend_css/lib.rs b/tests/rustdoc-gui/src/extend_css/lib.rs
index 3a3babf..2308c09 100644
--- a/tests/rustdoc-gui/src/extend_css/lib.rs
+++ b/tests/rustdoc-gui/src/extend_css/lib.rs
@@ -1 +1,2 @@
+// compile-flags: --extend-css extra.css
//! <div class="extend">text in red</div>
diff --git a/tests/rustdoc-gui/src/link_to_definition/lib.rs b/tests/rustdoc-gui/src/link_to_definition/lib.rs
index 419a9cc..6fed79a 100644
--- a/tests/rustdoc-gui/src/link_to_definition/lib.rs
+++ b/tests/rustdoc-gui/src/link_to_definition/lib.rs
@@ -1,3 +1,4 @@
+// compile-flags: -Zunstable-options --generate-link-to-definition
pub fn sub_fn() {
barbar();
}
diff --git a/tests/rustdoc-gui/src/scrape_examples/src/lib.rs b/tests/rustdoc-gui/src/scrape_examples/src/lib.rs
index 88b03cf..6666587 100644
--- a/tests/rustdoc-gui/src/scrape_examples/src/lib.rs
+++ b/tests/rustdoc-gui/src/scrape_examples/src/lib.rs
@@ -1,3 +1,4 @@
+// run-flags:-Zrustdoc-scrape-examples
/// # Examples
///
/// ```
diff --git a/tests/rustdoc-ui/intra-doc/issue-110495-suffix-with-space.stderr b/tests/rustdoc-ui/intra-doc/issue-110495-suffix-with-space.stderr
index 8669b0c..6c834fd 100644
--- a/tests/rustdoc-ui/intra-doc/issue-110495-suffix-with-space.stderr
+++ b/tests/rustdoc-ui/intra-doc/issue-110495-suffix-with-space.stderr
@@ -36,7 +36,7 @@
help: to link to the trait, prefix with `trait@`
|
LL - //! [`Clone ()`].
-LL + //! [`trait@Clone (`].
+LL + //! [`trait@Clone `].
|
error: incompatible link kind for `Clone`
@@ -47,8 +47,9 @@
|
help: to link to the derive macro, prefix with `derive@`
|
-LL | //! [`derive@Clone !`].
- | +++++++
+LL - //! [`Clone !`].
+LL + //! [`derive@Clone `].
+ |
error: aborting due to 4 previous errors
diff --git a/tests/rustdoc-ui/intra-doc/weird-syntax.rs b/tests/rustdoc-ui/intra-doc/weird-syntax.rs
new file mode 100644
index 0000000..ca18842
--- /dev/null
+++ b/tests/rustdoc-ui/intra-doc/weird-syntax.rs
@@ -0,0 +1,140 @@
+// Many examples are from
+// https://github.com/rust-lang/rust/issues/110111#issuecomment-1517800781
+#![deny(rustdoc::broken_intra_doc_links)]
+
+//! This test case is closely linked to [raphlinus/pulldown-cmark#441], getting offsets of
+//! link components. In particular, pulldown-cmark doesn't provide the offsets of the contents
+//! of a link.
+//!
+//! To work around this, rustdoc parses parts of a link definition itself. This is basically a
+//! test suite for that link syntax parser.
+//!
+//! [raphlinus/pulldown-cmark#441]: https://github.com/raphlinus/pulldown-cmark/issues/441
+
+use std::clone::Clone;
+
+// Basic version //
+
+/// [`struct@Clone`] //~ERROR link
+pub struct LinkToCloneWithBackquotes;
+
+/// [```struct@Clone```] //~ERROR link
+pub struct LinkToCloneWithMultipleBackquotes;
+
+/// [ ` struct@Clone ` ] //~ERROR link
+pub struct LinkToCloneWithSpacesAndBackquotes;
+
+/// [ `Clone ()` ] //~ERROR link
+pub struct LinkToCloneWithSpacesBackquotesAndParens;
+
+/// [`Clone ()` ] //~ERROR link
+pub struct LinkToCloneWithSpacesEndBackquotesAndParens;
+
+/// [ `Clone ()`] //~ERROR link
+pub struct LinkToCloneWithSpacesStartBackquotesAndParens;
+
+/// [```Clone ()```] //~ERROR link
+pub struct LinkToCloneWithMultipleBackquotesAndParens;
+
+/// [```Clone \(\)```] // not URL-shaped enough
+pub struct LinkToCloneWithMultipleBackquotesAndEscapedParens;
+
+/// [ ``` Clone () ``` ] //~ERROR link
+pub struct LinkToCloneWithSpacesMultipleBackquotesAndParens;
+
+/// [ x \] ] // not URL-shaped enough
+pub struct LinkWithEscapedCloseBrace;
+
+/// [ x \[ ] // not URL-shaped enough
+pub struct LinkWithEscapedOpenBrace;
+
+/// [ x \( ] // not URL-shaped enough
+pub struct LinkWithEscapedCloseParen;
+
+/// [ x \) ] // not URL-shaped enough
+pub struct LinkWithEscapedOpenParen;
+
+/// [ Clone \(\) ] // not URL-shaped enough
+pub struct LinkWithEscapedParens;
+
+// [][] version //
+
+/// [x][ struct@Clone] //~ERROR link
+pub struct XLinkToCloneWithStartSpace;
+
+/// [x][struct@Clone ] //~ERROR link
+pub struct XLinkToCloneWithEndSpace;
+
+/// [x][Clone\(\)] not URL-shaped enough
+pub struct XLinkToCloneWithEscapedParens;
+
+/// [x][`Clone`] not URL-shaped enough
+pub struct XLinkToCloneWithBackquotes;
+
+/// [x][Clone()] //~ERROR link
+pub struct XLinkToCloneWithUnescapedParens;
+
+/// [x][Clone ()] //~ERROR link
+pub struct XLinkToCloneWithUnescapedParensAndDoubleSpace;
+
+/// [x][Clone [] //~ERROR unresolved link to `x`
+pub struct XLinkToCloneWithUnmatchedOpenParenAndDoubleSpace;
+
+/// [x][Clone \[] // not URL-shaped enough
+pub struct XLinkToCloneWithUnmatchedEscapedOpenParenAndDoubleSpace;
+
+/// [x][Clone \]] // not URL-shaped enough
+pub struct XLinkToCloneWithUnmatchedEscapedCloseParenAndDoubleSpace;
+
+// []() version //
+
+/// [w]( struct@Clone) //~ERROR link
+pub struct WLinkToCloneWithStartSpace;
+
+/// [w](struct@Clone ) //~ERROR link
+pub struct WLinkToCloneWithEndSpace;
+
+/// [w](Clone\(\)) //~ERROR link
+pub struct WLinkToCloneWithEscapedParens;
+
+/// [w](`Clone`) not URL-shaped enough
+pub struct WLinkToCloneWithBackquotes;
+
+/// [w](Clone()) //~ERROR link
+pub struct WLinkToCloneWithUnescapedParens;
+
+/// [w](Clone ()) not URL-shaped enough
+pub struct WLinkToCloneWithUnescapedParensAndDoubleSpace;
+
+/// [w](Clone () //~ERROR unresolved link to `w`
+pub struct WLinkToCloneWithUnmatchedOpenParenAndDoubleSpace;
+
+/// [w](Clone \() //~ERROR unresolved link to `w`
+pub struct WLinkToCloneWithUnmatchedEscapedOpenParenAndDoubleSpace;
+
+/// [w](Clone \)) //~ERROR unresolved link to `w`
+pub struct WLinkToCloneWithUnmatchedEscapedCloseParenAndDoubleSpace;
+
+// References
+
+/// The [cln][] link here is going to be unresolved, because `Clone()` gets rejected //~ERROR link
+/// in Markdown for not being URL-shaped enough.
+///
+/// [cln]: Clone() //~ERROR link
+pub struct LinkToCloneWithParensInReference;
+
+/// The [cln][] link here is going to be unresolved, because `struct@Clone` gets //~ERROR link
+/// rejected in Markdown for not being URL-shaped enough.
+///
+/// [cln]: struct@Clone //~ERROR link
+pub struct LinkToCloneWithWrongPrefix;
+
+/// The [cln][] link here will produce a plain text suggestion //~ERROR link
+///
+/// [cln]: Clone\(\)
+pub struct LinkToCloneWithEscapedParensInReference;
+
+/// The [cln][] link here will produce a plain text suggestion //~ERROR link
+///
+/// [cln]: struct\@Clone
+pub struct LinkToCloneWithEscapedAtsInReference;
diff --git a/tests/rustdoc-ui/intra-doc/weird-syntax.stderr b/tests/rustdoc-ui/intra-doc/weird-syntax.stderr
new file mode 100644
index 0000000..f50feb5
--- /dev/null
+++ b/tests/rustdoc-ui/intra-doc/weird-syntax.stderr
@@ -0,0 +1,272 @@
+error: incompatible link kind for `Clone`
+ --> $DIR/weird-syntax.rs:18:7
+ |
+LL | /// [`struct@Clone`]
+ | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct
+ |
+note: the lint level is defined here
+ --> $DIR/weird-syntax.rs:3:9
+ |
+LL | #![deny(rustdoc::broken_intra_doc_links)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: to link to the trait, prefix with `trait@`
+ |
+LL | /// [`trait@Clone`]
+ | ~~~~~~
+
+error: incompatible link kind for `Clone`
+ --> $DIR/weird-syntax.rs:21:9
+ |
+LL | /// [```struct@Clone```]
+ | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL | /// [```trait@Clone```]
+ | ~~~~~~
+
+error: incompatible link kind for `Clone`
+ --> $DIR/weird-syntax.rs:24:11
+ |
+LL | /// [ ` struct@Clone ` ]
+ | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL | /// [ ` trait@Clone ` ]
+ | ~~~~~~
+
+error: unresolved link to `Clone`
+ --> $DIR/weird-syntax.rs:27:9
+ |
+LL | /// [ `Clone ()` ]
+ | ^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL - /// [ `Clone ()` ]
+LL + /// [ `trait@Clone ` ]
+ |
+
+error: unresolved link to `Clone`
+ --> $DIR/weird-syntax.rs:30:7
+ |
+LL | /// [`Clone ()` ]
+ | ^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL - /// [`Clone ()` ]
+LL + /// [`trait@Clone ` ]
+ |
+
+error: unresolved link to `Clone`
+ --> $DIR/weird-syntax.rs:33:9
+ |
+LL | /// [ `Clone ()`]
+ | ^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL - /// [ `Clone ()`]
+LL + /// [ `trait@Clone `]
+ |
+
+error: unresolved link to `Clone`
+ --> $DIR/weird-syntax.rs:36:9
+ |
+LL | /// [```Clone ()```]
+ | ^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL - /// [```Clone ()```]
+LL + /// [```trait@Clone ```]
+ |
+
+error: unresolved link to `Clone`
+ --> $DIR/weird-syntax.rs:42:13
+ |
+LL | /// [ ``` Clone () ``` ]
+ | ^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL - /// [ ``` Clone () ``` ]
+LL + /// [ ``` trait@Clone ``` ]
+ |
+
+error: incompatible link kind for `Clone`
+ --> $DIR/weird-syntax.rs:62:10
+ |
+LL | /// [x][ struct@Clone]
+ | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL | /// [x][ trait@Clone]
+ | ~~~~~~
+
+error: incompatible link kind for `Clone`
+ --> $DIR/weird-syntax.rs:65:9
+ |
+LL | /// [x][struct@Clone ]
+ | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL | /// [x][trait@Clone ]
+ | ~~~~~~
+
+error: unresolved link to `Clone`
+ --> $DIR/weird-syntax.rs:74:9
+ |
+LL | /// [x][Clone()]
+ | ^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL - /// [x][Clone()]
+LL + /// [x][trait@Clone]
+ |
+
+error: unresolved link to `Clone`
+ --> $DIR/weird-syntax.rs:77:9
+ |
+LL | /// [x][Clone ()]
+ | ^^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL - /// [x][Clone ()]
+LL + /// [x][trait@Clone ]
+ |
+
+error: unresolved link to `x`
+ --> $DIR/weird-syntax.rs:80:6
+ |
+LL | /// [x][Clone []
+ | ^ no item named `x` in scope
+ |
+ = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`
+
+error: incompatible link kind for `Clone`
+ --> $DIR/weird-syntax.rs:91:10
+ |
+LL | /// [w]( struct@Clone)
+ | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL | /// [w]( trait@Clone)
+ | ~~~~~~
+
+error: incompatible link kind for `Clone`
+ --> $DIR/weird-syntax.rs:94:9
+ |
+LL | /// [w](struct@Clone )
+ | ^^^^^^^^^^^^ this link resolved to a trait, which is not a struct
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL | /// [w](trait@Clone )
+ | ~~~~~~
+
+error: unresolved link to `Clone`
+ --> $DIR/weird-syntax.rs:97:9
+ |
+LL | /// [w](Clone\(\))
+ | ^^^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL - /// [w](Clone\(\))
+LL + /// [w](trait@Clone)
+ |
+
+error: unresolved link to `Clone`
+ --> $DIR/weird-syntax.rs:103:9
+ |
+LL | /// [w](Clone())
+ | ^^^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace
+ |
+help: to link to the trait, prefix with `trait@`
+ |
+LL - /// [w](Clone())
+LL + /// [w](trait@Clone)
+ |
+
+error: unresolved link to `w`
+ --> $DIR/weird-syntax.rs:109:6
+ |
+LL | /// [w](Clone ()
+ | ^ no item named `w` in scope
+ |
+ = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`
+
+error: unresolved link to `w`
+ --> $DIR/weird-syntax.rs:112:6
+ |
+LL | /// [w](Clone \()
+ | ^ no item named `w` in scope
+ |
+ = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`
+
+error: unresolved link to `w`
+ --> $DIR/weird-syntax.rs:115:6
+ |
+LL | /// [w](Clone \))
+ | ^ no item named `w` in scope
+ |
+ = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`
+
+error: unresolved link to `cln`
+ --> $DIR/weird-syntax.rs:120:10
+ |
+LL | /// The [cln][] link here is going to be unresolved, because `Clone()` gets rejected
+ | ^^^ no item named `cln` in scope
+ |
+ = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`
+
+error: unresolved link to `cln`
+ --> $DIR/weird-syntax.rs:123:6
+ |
+LL | /// [cln]: Clone()
+ | ^^^ no item named `cln` in scope
+ |
+ = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`
+
+error: unresolved link to `cln`
+ --> $DIR/weird-syntax.rs:126:10
+ |
+LL | /// The [cln][] link here is going to be unresolved, because `struct@Clone` gets
+ | ^^^ no item named `cln` in scope
+ |
+ = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`
+
+error: unresolved link to `cln`
+ --> $DIR/weird-syntax.rs:129:6
+ |
+LL | /// [cln]: struct@Clone
+ | ^^^ no item named `cln` in scope
+ |
+ = help: to escape `[` and `]` characters, add '\' before them like `\[` or `\]`
+
+error: unresolved link to `Clone`
+ --> $DIR/weird-syntax.rs:132:9
+ |
+LL | /// The [cln][] link here will produce a plain text suggestion
+ | ^^^^^ this link resolves to the trait `Clone`, which is not in the value namespace
+ |
+ = help: to link to the trait, prefix with `trait@`: trait@Clone
+
+error: incompatible link kind for `Clone`
+ --> $DIR/weird-syntax.rs:137:9
+ |
+LL | /// The [cln][] link here will produce a plain text suggestion
+ | ^^^^^ this link resolved to a trait, which is not a struct
+ |
+ = help: to link to the trait, prefix with `trait@`: trait@Clone
+
+error: aborting due to 26 previous errors
+
diff --git a/tests/rustdoc/reexport-doc-hidden.rs b/tests/rustdoc/reexport-doc-hidden.rs
index 3ea5fde..d9ed954 100644
--- a/tests/rustdoc/reexport-doc-hidden.rs
+++ b/tests/rustdoc/reexport-doc-hidden.rs
@@ -21,6 +21,5 @@ macro_rules! foo {
() => {};
}
-// This is a bug: https://github.com/rust-lang/rust/issues/59368
-// @!has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;'
+// @has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;'
pub use crate::foo as Macro;
diff --git a/tests/rustdoc/reexport-hidden-macro.rs b/tests/rustdoc/reexport-hidden-macro.rs
index afcfa97..47a21e3 100644
--- a/tests/rustdoc/reexport-hidden-macro.rs
+++ b/tests/rustdoc/reexport-hidden-macro.rs
@@ -5,6 +5,7 @@
// @has 'foo/index.html'
// @has - '//*[@id="main-content"]//a[@href="macro.Macro2.html"]' 'Macro2'
+// @has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;'
// @has 'foo/macro.Macro2.html'
// @has - '//*[@class="docblock"]' 'Displayed'
@@ -15,7 +16,6 @@ macro_rules! foo {
() => {};
}
-/// not displayed
pub use crate::foo as Macro;
/// Displayed
#[doc(inline)]
diff --git a/tests/rustdoc/reexport-of-doc-hidden.rs b/tests/rustdoc/reexport-of-doc-hidden.rs
new file mode 100644
index 0000000..b733716
--- /dev/null
+++ b/tests/rustdoc/reexport-of-doc-hidden.rs
@@ -0,0 +1,42 @@
+// This test ensures that all re-exports of doc hidden elements are displayed.
+
+#![crate_name = "foo"]
+
+#[doc(hidden)]
+pub struct Bar;
+
+#[macro_export]
+#[doc(hidden)]
+macro_rules! foo {
+ () => {};
+}
+
+// @has 'foo/index.html'
+// @has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;'
+pub use crate::foo as Macro;
+// @has - '//*[@id="reexport.Macro2"]/code' 'pub use crate::foo as Macro2;'
+pub use crate::foo as Macro2;
+// @has - '//*[@id="reexport.Boo"]/code' 'pub use crate::Bar as Boo;'
+pub use crate::Bar as Boo;
+// @has - '//*[@id="reexport.Boo2"]/code' 'pub use crate::Bar as Boo2;'
+pub use crate::Bar as Boo2;
+
+pub fn fofo() {}
+
+// @has - '//*[@id="reexport.f1"]/code' 'pub use crate::fofo as f1;'
+pub use crate::fofo as f1;
+// @has - '//*[@id="reexport.f2"]/code' 'pub use crate::fofo as f2;'
+pub use crate::fofo as f2;
+
+pub mod sub {
+ // @has 'foo/sub/index.html'
+ // @has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;'
+ pub use crate::foo as Macro;
+ // @has - '//*[@id="reexport.Macro2"]/code' 'pub use crate::foo as Macro2;'
+ pub use crate::foo as Macro2;
+
+ // @has - '//*[@id="reexport.f1"]/code' 'pub use crate::fofo as f1;'
+ pub use crate::fofo as f1;
+ // @has - '//*[@id="reexport.f2"]/code' 'pub use crate::fofo as f2;'
+ pub use crate::fofo as f2;
+}
diff --git a/tests/ui/backtrace.rs b/tests/ui/backtrace.rs
index dd73dd9..66b378f 100644
--- a/tests/ui/backtrace.rs
+++ b/tests/ui/backtrace.rs
@@ -104,13 +104,17 @@ fn runtest(me: &str) {
"bad output3: {}", s);
// Make sure a stack trace isn't printed too many times
+ //
+ // Currently it is printed 3 times ("once", "twice" and "panic in a
+ // function that cannot unwind") but in the future the last one may be
+ // removed.
let p = template(me).arg("double-fail")
.env("RUST_BACKTRACE", "1").spawn().unwrap();
let out = p.wait_with_output().unwrap();
assert!(!out.status.success());
let s = str::from_utf8(&out.stderr).unwrap();
let mut i = 0;
- for _ in 0..2 {
+ for _ in 0..3 {
i += s[i + 10..].find("stack backtrace").unwrap() + 10;
}
assert!(s[i + 10..].find("stack backtrace").is_none(),
diff --git a/tests/ui/borrowck/borrowck-vec-pattern-nesting.rs b/tests/ui/borrowck/borrowck-vec-pattern-nesting.rs
index 127a3f5..1bda7a4 100644
--- a/tests/ui/borrowck/borrowck-vec-pattern-nesting.rs
+++ b/tests/ui/borrowck/borrowck-vec-pattern-nesting.rs
@@ -8,7 +8,6 @@ fn a() {
//~^ NOTE `vec[_]` is borrowed here
vec[0] = Box::new(4); //~ ERROR cannot assign
//~^ NOTE `vec[_]` is assigned to here
- //~| NOTE in this expansion of desugaring of drop and replace
_a.use_ref();
//~^ NOTE borrow later used here
}
@@ -23,7 +22,6 @@ fn b() {
//~^ `vec[_]` is borrowed here
vec[0] = Box::new(4); //~ ERROR cannot assign
//~^ NOTE `vec[_]` is assigned to here
- //~| NOTE in this expansion of desugaring of drop and replace
_b.use_ref();
//~^ NOTE borrow later used here
}
diff --git a/tests/ui/borrowck/borrowck-vec-pattern-nesting.stderr b/tests/ui/borrowck/borrowck-vec-pattern-nesting.stderr
index 5e1251b..70b9e4f 100644
--- a/tests/ui/borrowck/borrowck-vec-pattern-nesting.stderr
+++ b/tests/ui/borrowck/borrowck-vec-pattern-nesting.stderr
@@ -6,24 +6,24 @@
LL |
LL | vec[0] = Box::new(4);
| ^^^^^^ `vec[_]` is assigned to here but it was already borrowed
-...
+LL |
LL | _a.use_ref();
| ------------ borrow later used here
error[E0506]: cannot assign to `vec[_]` because it is borrowed
- --> $DIR/borrowck-vec-pattern-nesting.rs:24:13
+ --> $DIR/borrowck-vec-pattern-nesting.rs:23:13
|
LL | &mut [ref _b @ ..] => {
| ------ `vec[_]` is borrowed here
LL |
LL | vec[0] = Box::new(4);
| ^^^^^^ `vec[_]` is assigned to here but it was already borrowed
-...
+LL |
LL | _b.use_ref();
| ------------ borrow later used here
error[E0508]: cannot move out of type `[Box<isize>]`, a non-copy slice
- --> $DIR/borrowck-vec-pattern-nesting.rs:36:11
+ --> $DIR/borrowck-vec-pattern-nesting.rs:34:11
|
LL | match vec {
| ^^^ cannot move out of here
@@ -41,7 +41,7 @@
|
error[E0508]: cannot move out of type `[Box<isize>]`, a non-copy slice
- --> $DIR/borrowck-vec-pattern-nesting.rs:48:13
+ --> $DIR/borrowck-vec-pattern-nesting.rs:46:13
|
LL | let a = vec[0];
| ^^^^^^
@@ -55,7 +55,7 @@
| +
error[E0508]: cannot move out of type `[Box<isize>]`, a non-copy slice
- --> $DIR/borrowck-vec-pattern-nesting.rs:57:11
+ --> $DIR/borrowck-vec-pattern-nesting.rs:55:11
|
LL | match vec {
| ^^^ cannot move out of here
@@ -73,7 +73,7 @@
|
error[E0508]: cannot move out of type `[Box<isize>]`, a non-copy slice
- --> $DIR/borrowck-vec-pattern-nesting.rs:67:13
+ --> $DIR/borrowck-vec-pattern-nesting.rs:65:13
|
LL | let a = vec[0];
| ^^^^^^
@@ -87,7 +87,7 @@
| +
error[E0508]: cannot move out of type `[Box<isize>]`, a non-copy slice
- --> $DIR/borrowck-vec-pattern-nesting.rs:76:11
+ --> $DIR/borrowck-vec-pattern-nesting.rs:74:11
|
LL | match vec {
| ^^^ cannot move out of here
@@ -106,7 +106,7 @@
|
error[E0508]: cannot move out of type `[Box<isize>]`, a non-copy slice
- --> $DIR/borrowck-vec-pattern-nesting.rs:87:13
+ --> $DIR/borrowck-vec-pattern-nesting.rs:85:13
|
LL | let a = vec[0];
| ^^^^^^
diff --git a/tests/ui/borrowck/issue-45199.rs b/tests/ui/borrowck/issue-45199.rs
index 6a6b255..ded46e5 100644
--- a/tests/ui/borrowck/issue-45199.rs
+++ b/tests/ui/borrowck/issue-45199.rs
@@ -5,7 +5,6 @@
b = Box::new(1); //~ NOTE first assignment
b = Box::new(2); //~ ERROR cannot assign twice to immutable variable `b`
//~| NOTE cannot assign twice to immutable
- //~| NOTE in this expansion of desugaring of drop and replace
}
fn test_call() {
@@ -14,14 +13,12 @@ fn test_call() {
//~| SUGGESTION mut b
b = Box::new(2); //~ ERROR cannot assign twice to immutable variable `b`
//~| NOTE cannot assign twice to immutable
- //~| NOTE in this expansion of desugaring of drop and replace
}
fn test_args(b: Box<i32>) { //~ HELP consider making this binding mutable
//~| SUGGESTION mut b
b = Box::new(2); //~ ERROR cannot assign to immutable argument `b`
//~| NOTE cannot assign to immutable argument
- //~| NOTE in this expansion of desugaring of drop and replace
}
fn main() {}
diff --git a/tests/ui/borrowck/issue-45199.stderr b/tests/ui/borrowck/issue-45199.stderr
index 163f237..47aa309 100644
--- a/tests/ui/borrowck/issue-45199.stderr
+++ b/tests/ui/borrowck/issue-45199.stderr
@@ -10,7 +10,7 @@
| ^ cannot assign twice to immutable variable
error[E0384]: cannot assign twice to immutable variable `b`
- --> $DIR/issue-45199.rs:15:5
+ --> $DIR/issue-45199.rs:14:5
|
LL | let b = Box::new(1);
| -
@@ -22,7 +22,7 @@
| ^ cannot assign twice to immutable variable
error[E0384]: cannot assign to immutable argument `b`
- --> $DIR/issue-45199.rs:22:5
+ --> $DIR/issue-45199.rs:20:5
|
LL | fn test_args(b: Box<i32>) {
| - help: consider making this binding mutable: `mut b`
diff --git a/tests/ui/const-generics/transmute-fail.stderr b/tests/ui/const-generics/transmute-fail.stderr
index 41b0981..3d1197a 100644
--- a/tests/ui/const-generics/transmute-fail.stderr
+++ b/tests/ui/const-generics/transmute-fail.stderr
@@ -4,8 +4,8 @@
LL | std::mem::transmute(v)
| ^^^^^^^^^^^^^^^^^^^
|
- = note: source type: `[[u32; H+1]; W]` (generic size [const expr])
- = note: target type: `[[u32; W+1]; H]` (generic size [const expr])
+ = note: source type: `[[u32; H+1]; W]` (generic size {const expr})
+ = note: target type: `[[u32; W+1]; H]` (generic size {const expr})
error[E0512]: cannot transmute between types of different sizes, or dependently-sized types
--> $DIR/transmute-fail.rs:16:5
@@ -34,8 +34,8 @@
LL | std::mem::transmute(v)
| ^^^^^^^^^^^^^^^^^^^
|
- = note: source type: `[[u32; H]; W]` (generic size [const expr])
- = note: target type: `[u32; W * H * H]` (generic size [const expr])
+ = note: source type: `[[u32; H]; W]` (generic size {const expr})
+ = note: target type: `[u32; W * H * H]` (generic size {const expr})
error[E0512]: cannot transmute between types of different sizes, or dependently-sized types
--> $DIR/transmute-fail.rs:30:5
diff --git a/tests/ui/editions/edition-raw-pointer-method-2018.rs b/tests/ui/editions/edition-raw-pointer-method-2018.rs
index af0b2d6..0bae65a 100644
--- a/tests/ui/editions/edition-raw-pointer-method-2018.rs
+++ b/tests/ui/editions/edition-raw-pointer-method-2018.rs
@@ -7,5 +7,5 @@ fn main() {
let x = 0;
let y = &x as *const _;
let _ = y.is_null();
- //~^ error: the type of this value must be known to call a method on a raw pointer on it [E0699]
+ //~^ error: cannot call a method on a raw pointer with an unknown pointee type [E0699]
}
diff --git a/tests/ui/editions/edition-raw-pointer-method-2018.stderr b/tests/ui/editions/edition-raw-pointer-method-2018.stderr
index 2345249..b9afa01 100644
--- a/tests/ui/editions/edition-raw-pointer-method-2018.stderr
+++ b/tests/ui/editions/edition-raw-pointer-method-2018.stderr
@@ -1,4 +1,4 @@
-error[E0699]: the type of this value must be known to call a method on a raw pointer on it
+error[E0699]: cannot call a method on a raw pointer with an unknown pointee type
--> $DIR/edition-raw-pointer-method-2018.rs:9:15
|
LL | let _ = y.is_null();
diff --git a/tests/ui/imports/issue-55884-2.rs b/tests/ui/imports/issue-55884-2.rs
index 75bb420..6f8d0cf 100644
--- a/tests/ui/imports/issue-55884-2.rs
+++ b/tests/ui/imports/issue-55884-2.rs
@@ -6,6 +6,7 @@ mod parser {
pub use options::*;
// Private single import shadows public glob import, but arrives too late for initial
// resolution of `use parser::ParseOptions` because it depends on that resolution itself.
+ #[allow(hidden_glob_reexports)]
use ParseOptions;
}
diff --git a/tests/ui/imports/issue-55884-2.stderr b/tests/ui/imports/issue-55884-2.stderr
index 5adbc4b..67d4114 100644
--- a/tests/ui/imports/issue-55884-2.stderr
+++ b/tests/ui/imports/issue-55884-2.stderr
@@ -1,16 +1,16 @@
error[E0603]: struct import `ParseOptions` is private
- --> $DIR/issue-55884-2.rs:12:17
+ --> $DIR/issue-55884-2.rs:13:17
|
LL | pub use parser::ParseOptions;
| ^^^^^^^^^^^^ private struct import
|
note: the struct import `ParseOptions` is defined here...
- --> $DIR/issue-55884-2.rs:9:9
+ --> $DIR/issue-55884-2.rs:10:9
|
LL | use ParseOptions;
| ^^^^^^^^^^^^
note: ...and refers to the struct import `ParseOptions` which is defined here...
- --> $DIR/issue-55884-2.rs:12:9
+ --> $DIR/issue-55884-2.rs:13:9
|
LL | pub use parser::ParseOptions;
| ^^^^^^^^^^^^^^^^^^^^ consider importing it directly
diff --git a/tests/ui/lint/rfc-2383-lint-reason/root-attribute-confusion.rs b/tests/ui/lint/rfc-2383-lint-reason/root-attribute-confusion.rs
new file mode 100644
index 0000000..0cade7f
--- /dev/null
+++ b/tests/ui/lint/rfc-2383-lint-reason/root-attribute-confusion.rs
@@ -0,0 +1,7 @@
+// check-pass
+// compile-flags: -Dunused_attributes
+
+#![deny(unused_crate_dependencies)]
+#![feature(lint_reasons)]
+
+fn main() {}
diff --git a/tests/ui/liveness/liveness-assign/liveness-assign-imm-local-with-drop.rs b/tests/ui/liveness/liveness-assign/liveness-assign-imm-local-with-drop.rs
index 293fdca..c9b16e4 100644
--- a/tests/ui/liveness/liveness-assign/liveness-assign-imm-local-with-drop.rs
+++ b/tests/ui/liveness/liveness-assign/liveness-assign-imm-local-with-drop.rs
@@ -5,7 +5,6 @@
drop(b);
b = Box::new(2); //~ ERROR cannot assign twice to immutable variable `b`
//~| NOTE cannot assign twice to immutable
- //~| NOTE in this expansion of desugaring of drop and replace
drop(b);
}
diff --git a/tests/ui/macros/rfc-2011-nicer-assert-messages/all-expr-kinds.rs b/tests/ui/macros/rfc-2011-nicer-assert-messages/all-expr-kinds.rs
index e88e244..b1db05a 100644
--- a/tests/ui/macros/rfc-2011-nicer-assert-messages/all-expr-kinds.rs
+++ b/tests/ui/macros/rfc-2011-nicer-assert-messages/all-expr-kinds.rs
@@ -5,7 +5,7 @@
// needs-unwind Asserting on contents of error message
#![allow(path_statements, unused_allocation)]
-#![feature(core_intrinsics, generic_assert, generic_assert_internals)]
+#![feature(core_intrinsics, generic_assert)]
macro_rules! test {
(
@@ -51,6 +51,7 @@ macro_rules! tests {
const FOO: Foo = Foo { bar: 1 };
+
#[derive(Clone, Copy, Debug, PartialEq)]
struct Foo {
bar: i32
@@ -83,9 +84,18 @@ fn main() {
// cast
[ elem as i32 == 3 ] => "Assertion failed: elem as i32 == 3\nWith captures:\n elem = 1\n"
+ // if
+ [ if elem == 3 { true } else { false } ] => "Assertion failed: if elem == 3 { true } else { false }\nWith captures:\n elem = 1\n"
+
// index
[ [1i32, 1][elem as usize] == 3 ] => "Assertion failed: [1i32, 1][elem as usize] == 3\nWith captures:\n elem = 1\n"
+ // let
+ [ if let 3 = elem { true } else { false } ] => "Assertion failed: if let 3 = elem { true } else { false }\nWith captures:\n elem = 1\n"
+
+ // match
+ [ match elem { 3 => true, _ => false, } ] => "Assertion failed: match elem { 3 => true, _ => false, }\nWith captures:\n elem = 1\n"
+
// method call
[ FOO.add(elem, elem) == 3 ] => "Assertion failed: FOO.add(elem, elem) == 3\nWith captures:\n elem = 1\n"
@@ -107,77 +117,4 @@ fn main() {
// unary
[ -elem == -3 ] => "Assertion failed: -elem == -3\nWith captures:\n elem = 1\n"
);
-
- // ***** Disallowed *****
-
- tests!(
- let mut elem = 1i32;
-
- // assign
- [ { let local = elem; local } == 3 ] => "Assertion failed: { let local = elem; local } == 3"
-
- // assign op
- [ { elem += 1; elem } == 3 ] => "Assertion failed: { elem += 1; elem } == 3"
-
- // async
- [ { let _ = async { elem }; elem } == 3 ] => "Assertion failed: { let _ = async { elem }; elem } == 3"
-
- // await
-
- // block
- [ { elem } == 3 ] => "Assertion failed: { elem } == 3"
-
- // break
- [ loop { break elem; } == 3 ] => "Assertion failed: loop { break elem; } == 3"
-
- // closure
- [(|| elem)() == 3 ] => "Assertion failed: (|| elem)() == 3"
-
- // const block
-
- // continue
-
- // err
-
- // field
- [ FOO.bar == 3 ] => "Assertion failed: FOO.bar == 3"
-
- // for loop
- [ { for _ in 0..elem { elem; } elem } == 3 ] => "Assertion failed: { for _ in 0..elem { elem; } elem } == 3"
-
- // if
- [ if true { elem } else { elem } == 3 ] => "Assertion failed: if true { elem } else { elem } == 3"
-
- // inline asm
-
- // let
- [ if let true = true { elem } else { elem } == 3 ] => "Assertion failed: if let true = true { elem } else { elem } == 3"
-
- // lit
-
- // loop
- [ loop { elem; break elem; } == 3 ] => "Assertion failed: loop { elem; break elem; } == 3"
-
- // mac call
-
- // match
- [ match elem { _ => elem } == 3 ] => "Assertion failed: (match elem { _ => elem, }) == 3"
-
- // ret
- [ (|| { return elem; })() == 3 ] => "Assertion failed: (|| { return elem; })() == 3"
-
- // try
- [ (|| { Some(Some(elem)?) })() == Some(3) ] => "Assertion failed: (|| { Some(Some(elem)?) })() == Some(3)"
-
- // try block
-
- // underscore
-
- // while
- [ { while false { elem; break; } elem } == 3 ] => "Assertion failed: { while false { elem; break; } elem } == 3"
-
- // yeet
-
- // yield
- );
}
diff --git a/tests/ui/macros/rfc-2011-nicer-assert-messages/all-not-available-cases.rs b/tests/ui/macros/rfc-2011-nicer-assert-messages/all-not-available-cases.rs
index d46f396..fcf4f36 100644
--- a/tests/ui/macros/rfc-2011-nicer-assert-messages/all-not-available-cases.rs
+++ b/tests/ui/macros/rfc-2011-nicer-assert-messages/all-not-available-cases.rs
@@ -4,7 +4,7 @@
// run-pass
// needs-unwind Asserting on contents of error message
-#![feature(core_intrinsics, generic_assert, generic_assert_internals)]
+#![feature(core_intrinsics, generic_assert)]
extern crate common;
diff --git a/tests/ui/macros/rfc-2011-nicer-assert-messages/assert-with-custom-errors-does-not-create-unnecessary-code.rs b/tests/ui/macros/rfc-2011-nicer-assert-messages/assert-with-custom-errors-does-not-create-unnecessary-code.rs
index 6a1435f..c8408d1 100644
--- a/tests/ui/macros/rfc-2011-nicer-assert-messages/assert-with-custom-errors-does-not-create-unnecessary-code.rs
+++ b/tests/ui/macros/rfc-2011-nicer-assert-messages/assert-with-custom-errors-does-not-create-unnecessary-code.rs
@@ -1,7 +1,7 @@
// compile-flags: --test
// run-pass
-#![feature(core_intrinsics, generic_assert, generic_assert_internals)]
+#![feature(core_intrinsics, generic_assert)]
#[should_panic(expected = "Custom user message")]
#[test]
diff --git a/tests/ui/macros/rfc-2011-nicer-assert-messages/assert-without-captures-does-not-create-unnecessary-code.rs b/tests/ui/macros/rfc-2011-nicer-assert-messages/assert-without-captures-does-not-create-unnecessary-code.rs
index 1f5a29a..0e3c14a 100644
--- a/tests/ui/macros/rfc-2011-nicer-assert-messages/assert-without-captures-does-not-create-unnecessary-code.rs
+++ b/tests/ui/macros/rfc-2011-nicer-assert-messages/assert-without-captures-does-not-create-unnecessary-code.rs
@@ -3,7 +3,7 @@
// run-pass
// needs-unwind Asserting on contents of error message
-#![feature(core_intrinsics, generic_assert, generic_assert_internals)]
+#![feature(core_intrinsics, generic_assert)]
extern crate common;
diff --git a/tests/ui/macros/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs b/tests/ui/macros/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs
index 01860ad..0d2518d 100644
--- a/tests/ui/macros/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs
+++ b/tests/ui/macros/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs
@@ -2,7 +2,7 @@
// ignore-tidy-linelength
// run-pass
-#![feature(core_intrinsics, generic_assert, generic_assert_internals)]
+#![feature(core_intrinsics, generic_assert)]
use std::fmt::{Debug, Formatter};
diff --git a/tests/ui/macros/rfc-2011-nicer-assert-messages/non-consuming-methods-have-optimized-codegen.rs b/tests/ui/macros/rfc-2011-nicer-assert-messages/non-consuming-methods-have-optimized-codegen.rs
index 5ec84b0..57b79a5 100644
--- a/tests/ui/macros/rfc-2011-nicer-assert-messages/non-consuming-methods-have-optimized-codegen.rs
+++ b/tests/ui/macros/rfc-2011-nicer-assert-messages/non-consuming-methods-have-optimized-codegen.rs
@@ -1,7 +1,7 @@
// check-pass
// compile-flags: -Z unpretty=expanded
-#![feature(core_intrinsics, generic_assert, generic_assert_internals)]
+#![feature(core_intrinsics, generic_assert)]
fn arbitrary_consuming_method_for_demonstration_purposes() {
let elem = 1i32;
diff --git a/tests/ui/macros/rfc-2011-nicer-assert-messages/non-consuming-methods-have-optimized-codegen.stdout b/tests/ui/macros/rfc-2011-nicer-assert-messages/non-consuming-methods-have-optimized-codegen.stdout
index b69b5bc..66321bc 100644
--- a/tests/ui/macros/rfc-2011-nicer-assert-messages/non-consuming-methods-have-optimized-codegen.stdout
+++ b/tests/ui/macros/rfc-2011-nicer-assert-messages/non-consuming-methods-have-optimized-codegen.stdout
@@ -3,7 +3,7 @@
// check-pass
// compile-flags: -Z unpretty=expanded
-#![feature(core_intrinsics, generic_assert, generic_assert_internals)]
+#![feature(core_intrinsics, generic_assert)]
#[prelude_import]
use ::std::prelude::rust_2015::*;
#[macro_use]
diff --git a/tests/ui/methods/call_method_unknown_pointee.rs b/tests/ui/methods/call_method_unknown_pointee.rs
new file mode 100644
index 0000000..fe4275f
--- /dev/null
+++ b/tests/ui/methods/call_method_unknown_pointee.rs
@@ -0,0 +1,28 @@
+// edition: 2018
+
+// tests that the pointee type of a raw pointer must be known to call methods on it
+// see also: `tests/ui/editions/edition-raw-pointer-method-2018.rs`
+
+fn main() {
+ let val = 1_u32;
+ let ptr = &val as *const u32;
+ unsafe {
+ let _a: i32 = (ptr as *const _).read();
+ //~^ ERROR cannot call a method on a raw pointer with an unknown pointee type [E0699]
+ let b = ptr as *const _;
+ let _b: u8 = b.read();
+ //~^ ERROR cannot call a method on a raw pointer with an unknown pointee type [E0699]
+ let _c = (ptr as *const u8).read(); // we know the type here
+ }
+
+ let mut val = 2_u32;
+ let ptr = &mut val as *mut u32;
+ unsafe {
+ let _a: i32 = (ptr as *mut _).read();
+ //~^ ERROR cannot call a method on a raw pointer with an unknown pointee type [E0699]
+ let b = ptr as *mut _;
+ b.write(10);
+ //~^ ERROR cannot call a method on a raw pointer with an unknown pointee type [E0699]
+ (ptr as *mut i32).write(1000); // we know the type here
+ }
+}
diff --git a/tests/ui/methods/call_method_unknown_pointee.stderr b/tests/ui/methods/call_method_unknown_pointee.stderr
new file mode 100644
index 0000000..84ecf04
--- /dev/null
+++ b/tests/ui/methods/call_method_unknown_pointee.stderr
@@ -0,0 +1,27 @@
+error[E0699]: cannot call a method on a raw pointer with an unknown pointee type
+ --> $DIR/call_method_unknown_pointee.rs:10:41
+ |
+LL | let _a: i32 = (ptr as *const _).read();
+ | ^^^^
+
+error[E0699]: cannot call a method on a raw pointer with an unknown pointee type
+ --> $DIR/call_method_unknown_pointee.rs:13:24
+ |
+LL | let _b: u8 = b.read();
+ | ^^^^
+
+error[E0699]: cannot call a method on a raw pointer with an unknown pointee type
+ --> $DIR/call_method_unknown_pointee.rs:21:39
+ |
+LL | let _a: i32 = (ptr as *mut _).read();
+ | ^^^^
+
+error[E0699]: cannot call a method on a raw pointer with an unknown pointee type
+ --> $DIR/call_method_unknown_pointee.rs:24:11
+ |
+LL | b.write(10);
+ | ^^^^^
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0699`.
diff --git a/tests/ui/panics/nested_panic_caught.rs b/tests/ui/panics/nested_panic_caught.rs
new file mode 100644
index 0000000..d43886e
--- /dev/null
+++ b/tests/ui/panics/nested_panic_caught.rs
@@ -0,0 +1,24 @@
+// run-pass
+// needs-unwind
+
+// Checks that nested panics work correctly.
+
+use std::panic::catch_unwind;
+
+fn double() {
+ struct Double;
+
+ impl Drop for Double {
+ fn drop(&mut self) {
+ let _ = catch_unwind(|| panic!("twice"));
+ }
+ }
+
+ let _d = Double;
+
+ panic!("once");
+}
+
+fn main() {
+ assert!(catch_unwind(|| double()).is_err());
+}
diff --git a/tests/ui/parser/issues/issue-111148.rs b/tests/ui/parser/issues/issue-111148.rs
new file mode 100644
index 0000000..2502bea
--- /dev/null
+++ b/tests/ui/parser/issues/issue-111148.rs
@@ -0,0 +1,2 @@
+fn a<<i<Y<w<>#
+//~^ ERROR expected one of `#`, `>`, `const`, identifier, or lifetime, found `<`
diff --git a/tests/ui/parser/issues/issue-111148.stderr b/tests/ui/parser/issues/issue-111148.stderr
new file mode 100644
index 0000000..e6595a5
--- /dev/null
+++ b/tests/ui/parser/issues/issue-111148.stderr
@@ -0,0 +1,8 @@
+error: expected one of `#`, `>`, `const`, identifier, or lifetime, found `<`
+ --> $DIR/issue-111148.rs:1:6
+ |
+LL | fn a<<i<Y<w<>#
+ | ^ expected one of `#`, `>`, `const`, identifier, or lifetime
+
+error: aborting due to previous error
+
diff --git a/tests/ui/privacy/issue-111220-2-tuple-struct-fields-projection.rs b/tests/ui/privacy/issue-111220-2-tuple-struct-fields-projection.rs
new file mode 100644
index 0000000..f413b50
--- /dev/null
+++ b/tests/ui/privacy/issue-111220-2-tuple-struct-fields-projection.rs
@@ -0,0 +1,33 @@
+mod b {
+ pub struct A(u32);
+}
+
+trait Id {
+ type Assoc;
+}
+impl Id for b::A {
+ type Assoc = b::A;
+}
+impl Id for u32 {
+ type Assoc = u32;
+}
+
+
+trait Trait<T> {
+ fn method(&self)
+ where
+ T: Id<Assoc = b::A>;
+}
+
+impl<T: Id> Trait<T> for <T as Id>::Assoc {
+ fn method(&self)
+ where
+ T: Id<Assoc = b::A>,
+ {
+ let Self(a) = self;
+ //~^ ERROR: tuple struct constructor `A` is private
+ println!("{a}");
+ }
+}
+
+fn main() {}
diff --git a/tests/ui/privacy/issue-111220-2-tuple-struct-fields-projection.stderr b/tests/ui/privacy/issue-111220-2-tuple-struct-fields-projection.stderr
new file mode 100644
index 0000000..231a4da
--- /dev/null
+++ b/tests/ui/privacy/issue-111220-2-tuple-struct-fields-projection.stderr
@@ -0,0 +1,9 @@
+error[E0603]: tuple struct constructor `A` is private
+ --> $DIR/issue-111220-2-tuple-struct-fields-projection.rs:27:13
+ |
+LL | let Self(a) = self;
+ | ^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0603`.
diff --git a/tests/ui/privacy/issue-111220-tuple-struct-fields.rs b/tests/ui/privacy/issue-111220-tuple-struct-fields.rs
new file mode 100644
index 0000000..78d35fd
--- /dev/null
+++ b/tests/ui/privacy/issue-111220-tuple-struct-fields.rs
@@ -0,0 +1,46 @@
+mod b {
+ #[derive(Default)]
+ pub struct A(u32);
+}
+
+impl b::A {
+ fn inherent_bypass(&self) {
+ let Self(x) = self;
+ //~^ ERROR: tuple struct constructor `A` is private
+ println!("{x}");
+ }
+}
+
+pub trait B {
+ fn f(&self);
+}
+
+impl B for b::A {
+ fn f(&self) {
+ let Self(a) = self;
+ //~^ ERROR: tuple struct constructor `A` is private
+ println!("{}", a);
+ }
+}
+
+pub trait Projector {
+ type P;
+}
+
+impl Projector for () {
+ type P = b::A;
+}
+
+pub trait Bypass2 {
+ fn f2(&self);
+}
+
+impl Bypass2 for <() as Projector>::P {
+ fn f2(&self) {
+ let Self(a) = self;
+ //~^ ERROR: tuple struct constructor `A` is private
+ println!("{}", a);
+ }
+}
+
+fn main() {}
diff --git a/tests/ui/privacy/issue-111220-tuple-struct-fields.stderr b/tests/ui/privacy/issue-111220-tuple-struct-fields.stderr
new file mode 100644
index 0000000..17a3237
--- /dev/null
+++ b/tests/ui/privacy/issue-111220-tuple-struct-fields.stderr
@@ -0,0 +1,21 @@
+error[E0603]: tuple struct constructor `A` is private
+ --> $DIR/issue-111220-tuple-struct-fields.rs:8:13
+ |
+LL | let Self(x) = self;
+ | ^^^^^^^
+
+error[E0603]: tuple struct constructor `A` is private
+ --> $DIR/issue-111220-tuple-struct-fields.rs:20:13
+ |
+LL | let Self(a) = self;
+ | ^^^^^^^
+
+error[E0603]: tuple struct constructor `A` is private
+ --> $DIR/issue-111220-tuple-struct-fields.rs:40:13
+ |
+LL | let Self(a) = self;
+ | ^^^^^^^
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0603`.
diff --git a/tests/ui/resolve/hidden_glob_reexports.rs b/tests/ui/resolve/hidden_glob_reexports.rs
new file mode 100644
index 0000000..361243f
--- /dev/null
+++ b/tests/ui/resolve/hidden_glob_reexports.rs
@@ -0,0 +1,52 @@
+// check-pass
+
+pub mod upstream_a {
+ mod inner {
+ pub struct Foo {}
+ pub struct Bar {}
+ }
+
+ pub use self::inner::*;
+
+ struct Foo;
+ //~^ WARN private item shadows public glob re-export
+}
+
+pub mod upstream_b {
+ mod inner {
+ pub struct Foo {}
+ pub struct Qux {}
+ }
+
+ mod other {
+ pub struct Foo;
+ }
+
+ pub use self::inner::*;
+
+ use self::other::Foo;
+ //~^ WARN private item shadows public glob re-export
+}
+
+pub mod upstream_c {
+ mod no_def_id {
+ #![allow(non_camel_case_types)]
+ pub struct u8;
+ pub struct World;
+ }
+
+ pub use self::no_def_id::*;
+
+ use std::primitive::u8;
+ //~^ WARN private item shadows public glob re-export
+}
+
+// Downstream crate
+// mod downstream {
+// fn proof() {
+// let _ = crate::upstream_a::Foo;
+// let _ = crate::upstream_b::Foo;
+// }
+// }
+
+pub fn main() {}
diff --git a/tests/ui/resolve/hidden_glob_reexports.stderr b/tests/ui/resolve/hidden_glob_reexports.stderr
new file mode 100644
index 0000000..ddf7bcd
--- /dev/null
+++ b/tests/ui/resolve/hidden_glob_reexports.stderr
@@ -0,0 +1,31 @@
+warning: private item shadows public glob re-export
+ --> $DIR/hidden_glob_reexports.rs:11:5
+ |
+LL | pub use self::inner::*;
+ | -------------- the name `Foo` in the type namespace is supposed to be publicly re-exported here
+LL |
+LL | struct Foo;
+ | ^^^^^^^^^^^ but the private item here shadows it
+ |
+ = note: `#[warn(hidden_glob_reexports)]` on by default
+
+warning: private item shadows public glob re-export
+ --> $DIR/hidden_glob_reexports.rs:27:9
+ |
+LL | pub use self::inner::*;
+ | -------------- the name `Foo` in the type namespace is supposed to be publicly re-exported here
+LL |
+LL | use self::other::Foo;
+ | ^^^^^^^^^^^^^^^^ but the private item here shadows it
+
+warning: private item shadows public glob re-export
+ --> $DIR/hidden_glob_reexports.rs:40:9
+ |
+LL | pub use self::no_def_id::*;
+ | ------------------ the name `u8` in the type namespace is supposed to be publicly re-exported here
+LL |
+LL | use std::primitive::u8;
+ | ^^^^^^^^^^^^^^^^^^ but the private item here shadows it
+
+warning: 3 warnings emitted
+
diff --git a/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-1.rs b/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-1.rs
index a6c8631..e7f7fdc 100644
--- a/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-1.rs
+++ b/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-1.rs
@@ -2,7 +2,7 @@
trait One<A> { fn foo(&self) -> A; }
-fn foo(_: &dyn One()) //~ ERROR associated type `Output` not found for `One<()>`
+fn foo(_: &dyn One()) //~ ERROR associated type `Output` not found for `One`
{}
fn main() { }
diff --git a/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-1.stderr b/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-1.stderr
index 59e7bc8..e477247 100644
--- a/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-1.stderr
+++ b/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-1.stderr
@@ -1,4 +1,4 @@
-error[E0220]: associated type `Output` not found for `One<()>`
+error[E0220]: associated type `Output` not found for `One`
--> $DIR/unboxed-closure-sugar-wrong-number-number-type-parameters-1.rs:5:16
|
LL | fn foo(_: &dyn One())
diff --git a/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-3.stderr b/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-3.stderr
index 5d7fe3f..eb18b12 100644
--- a/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-3.stderr
+++ b/tests/ui/unboxed-closures/unboxed-closure-sugar-wrong-number-number-type-parameters-3.stderr
@@ -12,7 +12,7 @@
LL | trait Three<A,B,C> { fn dummy(&self) -> (A,B,C); }
| ^^^^^ - - -
-error[E0220]: associated type `Output` not found for `Three<(), [type error], [type error]>`
+error[E0220]: associated type `Output` not found for `Three`
--> $DIR/unboxed-closure-sugar-wrong-number-number-type-parameters-3.rs:5:16
|
LL | fn foo(_: &dyn Three())
diff --git a/triagebot.toml b/triagebot.toml
index d7cd3ea..c160c83 100644
--- a/triagebot.toml
+++ b/triagebot.toml
@@ -190,6 +190,7 @@
"src/stage0.json",
"src/tools/compiletest",
"src/tools/tidy",
+ "src/tools/rustdoc-gui-test",
]
[autolabel."T-infra"]
@@ -640,3 +641,4 @@
"/src/tools/rustdoc-themes" = ["rustdoc"]
"/src/tools/tidy" = ["bootstrap"]
"/src/tools/x" = ["bootstrap"]
+"/src/tools/rustdoc-gui-test" = ["bootstrap", "@ozkanonur"]