Rollup merge of #64119 - pietroalbini:validate-toolstate-maintainers, r=kennytm

ci: ensure all tool maintainers are assignable on issues

GitHub only allows people explicitly listed as collaborators on the repository or who commented on the issue/PR to be assignees, failing to create the issue if non-assignable people are assigned.

This adds an extra check on CI to make sure all the people listed as tool maintainers can be assigned to toolstate issues. The check won't be executed on PR builds due to the lack of a valid token.

r? @kennytm
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index 370e5cf..9e6ed92 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -117,7 +117,7 @@
 #![feature(allocator_internals)]
 #![feature(on_unimplemented)]
 #![feature(rustc_const_unstable)]
-#![feature(const_vec_new)]
+#![cfg_attr(bootstrap, feature(const_vec_new))]
 #![feature(slice_partition_dedup)]
 #![feature(maybe_uninit_extra, maybe_uninit_slice)]
 #![feature(alloc_layout_extra)]
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index cf025ee..ee75fc2 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -113,13 +113,38 @@
 }
 
 impl<T> RawVec<T, Global> {
+    /// HACK(Centril): This exists because `#[unstable]` `const fn`s needn't conform
+    /// to `min_const_fn` and so they cannot be called in `min_const_fn`s either.
+    ///
+    /// If you change `RawVec<T>::new` or dependencies, please take care to not
+    /// introduce anything that would truly violate `min_const_fn`.
+    ///
+    /// NOTE: We could avoid this hack and check conformance with some
+    /// `#[rustc_force_min_const_fn]` attribute which requires conformance
+    /// with `min_const_fn` but does not necessarily allow calling it in
+    /// `stable(...) const fn` / user code not enabling `foo` when
+    /// `#[rustc_const_unstable(feature = "foo", ..)]` is present.
+    pub const NEW: Self = Self::new();
+
     /// Creates the biggest possible `RawVec` (on the system heap)
     /// without allocating. If `T` has positive size, then this makes a
     /// `RawVec` with capacity `0`. If `T` is zero-sized, then it makes a
     /// `RawVec` with capacity `usize::MAX`. Useful for implementing
     /// delayed allocation.
     pub const fn new() -> Self {
-        Self::new_in(Global)
+        // FIXME(Centril): Reintegrate this with `fn new_in` when we can.
+
+        // `!0` is `usize::MAX`. This branch should be stripped at compile time.
+        // FIXME(mark-i-m): use this line when `if`s are allowed in `const`:
+        //let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
+
+        // `Unique::empty()` doubles as "unallocated" and "zero-sized allocation".
+        RawVec {
+            ptr: Unique::empty(),
+            // FIXME(mark-i-m): use `cap` when ifs are allowed in const
+            cap: [0, !0][(mem::size_of::<T>() == 0) as usize],
+            a: Global,
+        }
     }
 
     /// Creates a `RawVec` (on the system heap) with exactly the
diff --git a/src/liballoc/string.rs b/src/liballoc/string.rs
index b65f191..1166e7b 100644
--- a/src/liballoc/string.rs
+++ b/src/liballoc/string.rs
@@ -369,7 +369,7 @@
     /// ```
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
-    #[rustc_const_unstable(feature = "const_string_new")]
+    #[cfg_attr(bootstrap, rustc_const_unstable(feature = "const_string_new"))]
     pub const fn new() -> String {
         String { vec: Vec::new() }
     }
diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs
index c513658..405969a 100644
--- a/src/liballoc/vec.rs
+++ b/src/liballoc/vec.rs
@@ -314,10 +314,10 @@
     /// ```
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
-    #[rustc_const_unstable(feature = "const_vec_new")]
+    #[cfg_attr(bootstrap, rustc_const_unstable(feature = "const_vec_new"))]
     pub const fn new() -> Vec<T> {
         Vec {
-            buf: RawVec::new(),
+            buf: RawVec::NEW,
             len: 0,
         }
     }
diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs
index 57ddaa4..3f53f84 100644
--- a/src/librustc_mir/const_eval.rs
+++ b/src/librustc_mir/const_eval.rs
@@ -134,9 +134,8 @@
     ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
     cid: GlobalId<'tcx>,
     body: &'mir mir::Body<'tcx>,
-    param_env: ty::ParamEnv<'tcx>,
 ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
-    debug!("eval_body_using_ecx: {:?}, {:?}", cid, param_env);
+    debug!("eval_body_using_ecx: {:?}, {:?}", cid, ecx.param_env);
     let tcx = ecx.tcx.tcx;
     let layout = ecx.layout_of(body.return_ty().subst(tcx, cid.instance.substs))?;
     assert!(!layout.is_unsized());
@@ -162,7 +161,6 @@
         ecx,
         cid.instance.def_id(),
         ret,
-        param_env,
     )?;
 
     debug!("eval_body_using_ecx done: {:?}", *ret);
@@ -658,7 +656,7 @@
 
     let res = ecx.load_mir(cid.instance.def, cid.promoted);
     res.and_then(
-        |body| eval_body_using_ecx(&mut ecx, cid, body, key.param_env)
+        |body| eval_body_using_ecx(&mut ecx, cid, body)
     ).and_then(|place| {
         Ok(RawConst {
             alloc_id: place.ptr.assert_ptr().alloc_id,
diff --git a/src/librustc_mir/interpret/intern.rs b/src/librustc_mir/interpret/intern.rs
index 4cbbc0f..95647ce 100644
--- a/src/librustc_mir/interpret/intern.rs
+++ b/src/librustc_mir/interpret/intern.rs
@@ -3,7 +3,7 @@
 //! After a const evaluation has computed a value, before we destroy the const evaluator's session
 //! memory, we need to extract all memory allocations to the global memory pool so they stay around.
 
-use rustc::ty::{Ty, TyCtxt, ParamEnv, self};
+use rustc::ty::{Ty, self};
 use rustc::mir::interpret::{InterpResult, ErrorHandled};
 use rustc::hir;
 use rustc::hir::def_id::DefId;
@@ -11,32 +11,29 @@
 use rustc_data_structures::fx::FxHashSet;
 
 use syntax::ast::Mutability;
-use syntax_pos::Span;
 
 use super::{
-    ValueVisitor, MemoryKind, Pointer, AllocId, MPlaceTy, Scalar,
+    ValueVisitor, MemoryKind, AllocId, MPlaceTy, Scalar,
 };
 use crate::const_eval::{CompileTimeInterpreter, CompileTimeEvalContext};
 
 struct InternVisitor<'rt, 'mir, 'tcx> {
-    /// previously encountered safe references
-    ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>,
+    /// The ectx from which we intern.
     ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>,
-    param_env: ParamEnv<'tcx>,
+    /// Previously encountered safe references.
+    ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>,
+    /// A list of all encountered allocations. After type-based interning, we traverse this list to
+    /// also intern allocations that are only referenced by a raw pointer or inside a union.
+    leftover_allocations: &'rt mut FxHashSet<AllocId>,
     /// The root node of the value that we're looking at. This field is never mutated and only used
     /// for sanity assertions that will ICE when `const_qualif` screws up.
     mode: InternMode,
     /// This field stores the mutability of the value *currently* being checked.
-    /// It is set to mutable when an `UnsafeCell` is encountered
-    /// When recursing across a reference, we don't recurse but store the
-    /// value to be checked in `ref_tracking` together with the mutability at which we are checking
-    /// the value.
-    /// When encountering an immutable reference, we treat everything as immutable that is behind
-    /// it.
+    /// When encountering a mutable reference, we determine the pointee mutability
+    /// taking into account the mutability of the context: `& &mut i32` is entirely immutable,
+    /// despite the nested mutable reference!
+    /// The field gets updated when an `UnsafeCell` is encountered.
     mutability: Mutability,
-    /// A list of all encountered relocations. After type-based interning, we traverse this list to
-    /// also intern allocations that are only referenced by a raw pointer or inside a union.
-    leftover_relocations: &'rt mut FxHashSet<AllocId>,
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
@@ -45,9 +42,10 @@
     /// `static`. In a `static mut` we start out as mutable and thus can also contain further `&mut`
     /// that will actually be treated as mutable.
     Static,
-    /// UnsafeCell is OK in the value of a constant, but not behind references in a constant
+    /// UnsafeCell is OK in the value of a constant: `const FOO = Cell::new(0)` creates
+    /// a new cell every time it is used.
     ConstBase,
-    /// `UnsafeCell` ICEs
+    /// `UnsafeCell` ICEs.
     Const,
 }
 
@@ -55,48 +53,100 @@
 /// into the memory of other constants or statics
 struct IsStaticOrFn;
 
+/// Intern an allocation without looking at its children.
+/// `mode` is the mode of the environment where we found this pointer.
+/// `mutablity` is the mutability of the place to be interned; even if that says
+/// `immutable` things might become mutable if `ty` is not frozen.
+/// `ty` can be `None` if there is no potential interior mutability
+/// to account for (e.g. for vtables).
+fn intern_shallow<'rt, 'mir, 'tcx>(
+    ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>,
+    leftover_allocations: &'rt mut FxHashSet<AllocId>,
+    mode: InternMode,
+    alloc_id: AllocId,
+    mutability: Mutability,
+    ty: Option<Ty<'tcx>>,
+) -> InterpResult<'tcx, Option<IsStaticOrFn>> {
+    trace!(
+        "InternVisitor::intern {:?} with {:?}",
+        alloc_id, mutability,
+    );
+    // remove allocation
+    let tcx = ecx.tcx;
+    let memory = ecx.memory_mut();
+    let (kind, mut alloc) = match memory.alloc_map.remove(&alloc_id) {
+        Some(entry) => entry,
+        None => {
+            // Pointer not found in local memory map. It is either a pointer to the global
+            // map, or dangling.
+            // If the pointer is dangling (neither in local nor global memory), we leave it
+            // to validation to error. The `delay_span_bug` ensures that we don't forget such
+            // a check in validation.
+            if tcx.alloc_map.lock().get(alloc_id).is_none() {
+                tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
+            }
+            // treat dangling pointers like other statics
+            // just to stop trying to recurse into them
+            return Ok(Some(IsStaticOrFn));
+        },
+    };
+    // This match is just a canary for future changes to `MemoryKind`, which most likely need
+    // changes in this function.
+    match kind {
+        MemoryKind::Stack | MemoryKind::Vtable => {},
+    }
+    // Set allocation mutability as appropriate. This is used by LLVM to put things into
+    // read-only memory, and also by Miri when evluating other constants/statics that
+    // access this one.
+    if mode == InternMode::Static {
+        // When `ty` is `None`, we assume no interior mutability.
+        let frozen = ty.map_or(true, |ty| ty.is_freeze(
+            ecx.tcx.tcx,
+            ecx.param_env,
+            ecx.tcx.span,
+        ));
+        // For statics, allocation mutability is the combination of the place mutability and
+        // the type mutability.
+        // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere.
+        if mutability == Mutability::Immutable && frozen {
+            alloc.mutability = Mutability::Immutable;
+        } else {
+            // Just making sure we are not "upgrading" an immutable allocation to mutable.
+            assert_eq!(alloc.mutability, Mutability::Mutable);
+        }
+    } else {
+        // We *could* be non-frozen at `ConstBase`, for constants like `Cell::new(0)`.
+        // But we still intern that as immutable as the memory cannot be changed once the
+        // initial value was computed.
+        // Constants are never mutable.
+        assert_eq!(
+            mutability, Mutability::Immutable,
+            "Something went very wrong: mutability requested for a constant"
+        );
+        alloc.mutability = Mutability::Immutable;
+    };
+    // link the alloc id to the actual allocation
+    let alloc = tcx.intern_const_alloc(alloc);
+    leftover_allocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc));
+    tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc);
+    Ok(None)
+}
+
 impl<'rt, 'mir, 'tcx> InternVisitor<'rt, 'mir, 'tcx> {
-    /// Intern an allocation without looking at its children
     fn intern_shallow(
         &mut self,
-        ptr: Pointer,
+        alloc_id: AllocId,
         mutability: Mutability,
+        ty: Option<Ty<'tcx>>,
     ) -> InterpResult<'tcx, Option<IsStaticOrFn>> {
-        trace!(
-            "InternVisitor::intern {:?} with {:?}",
-            ptr, mutability,
-        );
-        // remove allocation
-        let tcx = self.ecx.tcx;
-        let memory = self.ecx.memory_mut();
-        let (kind, mut alloc) = match memory.alloc_map.remove(&ptr.alloc_id) {
-            Some(entry) => entry,
-            None => {
-                // if the pointer is dangling (neither in local nor global memory), we leave it
-                // to validation to error. The `delay_span_bug` ensures that we don't forget such
-                // a check in validation.
-                if tcx.alloc_map.lock().get(ptr.alloc_id).is_none() {
-                    tcx.sess.delay_span_bug(self.ecx.tcx.span, "tried to intern dangling pointer");
-                }
-                // treat dangling pointers like other statics
-                // just to stop trying to recurse into them
-                return Ok(Some(IsStaticOrFn));
-            },
-        };
-        // This match is just a canary for future changes to `MemoryKind`, which most likely need
-        // changes in this function.
-        match kind {
-            MemoryKind::Stack | MemoryKind::Vtable => {},
-        }
-        // Ensure llvm knows to only put this into immutable memory if the value is immutable either
-        // by being behind a reference or by being part of a static or const without interior
-        // mutability
-        alloc.mutability = mutability;
-        // link the alloc id to the actual allocation
-        let alloc = tcx.intern_const_alloc(alloc);
-        self.leftover_relocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc));
-        tcx.alloc_map.lock().set_alloc_id_memory(ptr.alloc_id, alloc);
-        Ok(None)
+        intern_shallow(
+            self.ecx,
+            self.leftover_allocations,
+            self.mode,
+            alloc_id,
+            mutability,
+            ty,
+        )
     }
 }
 
@@ -119,14 +169,16 @@
     ) -> InterpResult<'tcx> {
         if let Some(def) = mplace.layout.ty.ty_adt_def() {
             if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
-                // We are crossing over an `UnsafeCell`, we can mutate again
+                // We are crossing over an `UnsafeCell`, we can mutate again. This means that
+                // References we encounter inside here are interned as pointing to mutable
+                // allocations.
                 let old = std::mem::replace(&mut self.mutability, Mutability::Mutable);
                 assert_ne!(
                     self.mode, InternMode::Const,
                     "UnsafeCells are not allowed behind references in constants. This should have \
                     been prevented statically by const qualification. If this were allowed one \
-                    would be able to change a constant at one use site and other use sites may \
-                    arbitrarily decide to change, too.",
+                    would be able to change a constant at one use site and other use sites could \
+                    observe that mutation.",
                 );
                 let walked = self.walk_aggregate(mplace, fields);
                 self.mutability = old;
@@ -145,12 +197,13 @@
             // Handle trait object vtables
             if let Ok(meta) = value.to_meta() {
                 if let ty::Dynamic(..) =
-                    self.ecx.tcx.struct_tail_erasing_lifetimes(referenced_ty, self.param_env).sty
+                    self.ecx.tcx.struct_tail_erasing_lifetimes(
+                        referenced_ty, self.ecx.param_env).sty
                 {
                     if let Ok(vtable) = meta.unwrap().to_ptr() {
                         // explitly choose `Immutable` here, since vtables are immutable, even
                         // if the reference of the fat pointer is mutable
-                        self.intern_shallow(vtable, Mutability::Immutable)?;
+                        self.intern_shallow(vtable.alloc_id, Mutability::Immutable, None)?;
                     }
                 }
             }
@@ -177,7 +230,7 @@
                     (InternMode::Const, hir::Mutability::MutMutable) => {
                         match referenced_ty.sty {
                             ty::Array(_, n)
-                                if n.eval_usize(self.ecx.tcx.tcx, self.param_env) == 0 => {}
+                                if n.eval_usize(self.ecx.tcx.tcx, self.ecx.param_env) == 0 => {}
                             ty::Slice(_)
                                 if value.to_meta().unwrap().unwrap().to_usize(self.ecx)? == 0 => {}
                             _ => bug!("const qualif failed to prevent mutable references"),
@@ -195,21 +248,13 @@
                     (Mutability::Mutable, hir::Mutability::MutMutable) => Mutability::Mutable,
                     _ => Mutability::Immutable,
                 };
-                // Compute the mutability of the allocation
-                let intern_mutability = intern_mutability(
-                    self.ecx.tcx.tcx,
-                    self.param_env,
-                    mplace.layout.ty,
-                    self.ecx.tcx.span,
-                    mutability,
-                );
                 // Recursing behind references changes the intern mode for constants in order to
                 // cause assertions to trigger if we encounter any `UnsafeCell`s.
                 let mode = match self.mode {
                     InternMode::ConstBase => InternMode::Const,
                     other => other,
                 };
-                match self.intern_shallow(ptr, intern_mutability)? {
+                match self.intern_shallow(ptr.alloc_id, mutability, Some(mplace.layout.ty))? {
                     // No need to recurse, these are interned already and statics may have
                     // cycles, so we don't want to recurse there
                     Some(IsStaticOrFn) => {},
@@ -224,69 +269,45 @@
     }
 }
 
-/// Figure out the mutability of the allocation.
-/// Mutable if it has interior mutability *anywhere* in the type.
-fn intern_mutability<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    param_env: ParamEnv<'tcx>,
-    ty: Ty<'tcx>,
-    span: Span,
-    mutability: Mutability,
-) -> Mutability {
-    let has_interior_mutability = !ty.is_freeze(tcx, param_env, span);
-    if has_interior_mutability {
-        Mutability::Mutable
-    } else {
-        mutability
-    }
-}
-
 pub fn intern_const_alloc_recursive(
     ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
     def_id: DefId,
     ret: MPlaceTy<'tcx>,
-    // FIXME(oli-obk): can we scrap the param env? I think we can, the final value of a const eval
-    // must always be monomorphic, right?
-    param_env: ty::ParamEnv<'tcx>,
 ) -> InterpResult<'tcx> {
     let tcx = ecx.tcx;
     // this `mutability` is the mutability of the place, ignoring the type
-    let (mutability, base_intern_mode) = match tcx.static_mutability(def_id) {
+    let (base_mutability, base_intern_mode) = match tcx.static_mutability(def_id) {
         Some(hir::Mutability::MutImmutable) => (Mutability::Immutable, InternMode::Static),
-        None => (Mutability::Immutable, InternMode::ConstBase),
         // `static mut` doesn't care about interior mutability, it's mutable anyway
         Some(hir::Mutability::MutMutable) => (Mutability::Mutable, InternMode::Static),
+        // consts, promoteds. FIXME: what about array lengths, array initializers?
+        None => (Mutability::Immutable, InternMode::ConstBase),
     };
 
-    // type based interning
-    let mut ref_tracking = RefTracking::new((ret, mutability, base_intern_mode));
-    let leftover_relocations = &mut FxHashSet::default();
-
-    // This mutability is the combination of the place mutability and the type mutability. If either
-    // is mutable, `alloc_mutability` is mutable. This exists because the entire allocation needs
-    // to be mutable if it contains an `UnsafeCell` anywhere. The other `mutability` exists so that
-    // the visitor does not treat everything outside the `UnsafeCell` as mutable.
-    let alloc_mutability = intern_mutability(
-        tcx.tcx, param_env, ret.layout.ty, tcx.span, mutability,
-    );
+    // Type based interning.
+    // `ref_tracking` tracks typed references we have seen and still need to crawl for
+    // more typed information inside them.
+    // `leftover_allocations` collects *all* allocations we see, because some might not
+    // be available in a typed way. They get interned at the end.
+    let mut ref_tracking = RefTracking::new((ret, base_mutability, base_intern_mode));
+    let leftover_allocations = &mut FxHashSet::default();
 
     // start with the outermost allocation
-    InternVisitor {
-        ref_tracking: &mut ref_tracking,
+    intern_shallow(
         ecx,
-        mode: base_intern_mode,
-        leftover_relocations,
-        param_env,
-        mutability,
-    }.intern_shallow(ret.ptr.to_ptr()?, alloc_mutability)?;
+        leftover_allocations,
+        base_intern_mode,
+        ret.ptr.to_ptr()?.alloc_id,
+        base_mutability,
+        Some(ret.layout.ty)
+    )?;
 
     while let Some(((mplace, mutability, mode), _)) = ref_tracking.todo.pop() {
         let interned = InternVisitor {
             ref_tracking: &mut ref_tracking,
             ecx,
             mode,
-            leftover_relocations,
-            param_env,
+            leftover_allocations,
             mutability,
         }.visit_value(mplace);
         if let Err(error) = interned {
@@ -309,15 +330,23 @@
     // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw
     // pointers, ... So we can't intern them according to their type rules
 
-    let mut todo: Vec<_> = leftover_relocations.iter().cloned().collect();
+    let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect();
     while let Some(alloc_id) = todo.pop() {
-        if let Some((_, alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) {
-            // We can't call the `intern` method here, as its logic is tailored to safe references.
-            // So we hand-roll the interning logic here again
+        if let Some((_, mut alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) {
+            // We can't call the `intern_shallow` method here, as its logic is tailored to safe
+            // references and a `leftover_allocations` set (where we only have a todo-list here).
+            // So we hand-roll the interning logic here again.
+            if base_intern_mode != InternMode::Static {
+                // If it's not a static, it *must* be immutable.
+                // We cannot have mutable memory inside a constant.
+                // FIXME: ideally we would assert that they already are immutable, to double-
+                // check our static checks.
+                alloc.mutability = Mutability::Immutable;
+            }
             let alloc = tcx.intern_const_alloc(alloc);
             tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc);
             for &(_, ((), reloc)) in alloc.relocations().iter() {
-                if leftover_relocations.insert(reloc) {
+                if leftover_allocations.insert(reloc) {
                     todo.push(reloc);
                 }
             }
diff --git a/src/test/ui/collections-const-new.rs b/src/test/ui/collections-const-new.rs
index e01b0df..a93f9a1 100644
--- a/src/test/ui/collections-const-new.rs
+++ b/src/test/ui/collections-const-new.rs
@@ -1,15 +1,11 @@
-// run-pass
+// check-pass
 
-#![allow(dead_code)]
 // Test several functions can be used for constants
 // 1. Vec::new()
 // 2. String::new()
 
-#![feature(const_vec_new)]
-#![feature(const_string_new)]
-
 const MY_VEC: Vec<usize> = Vec::new();
 
 const MY_STRING: String = String::new();
 
-pub fn main() {}
+fn main() {}
diff --git a/src/test/ui/consts/miri_unleashed/feature-gate-unleash_the_miri_inside_of_you.rs b/src/test/ui/consts/miri_unleashed/feature-gate-unleash_the_miri_inside_of_you.rs
index 5fb9253..8b17f68 100644
--- a/src/test/ui/consts/miri_unleashed/feature-gate-unleash_the_miri_inside_of_you.rs
+++ b/src/test/ui/consts/miri_unleashed/feature-gate-unleash_the_miri_inside_of_you.rs
@@ -14,8 +14,9 @@
 impl Foo<u32> for () {
     const X: u32 = 42;
 }
+
 impl Foo<Vec<u32>> for String {
-    const X: Vec<u32> = Vec::new(); //~ ERROR not yet stable as a const fn
+    const X: Vec<u32> = Vec::new();
 }
 
 impl Bar<u32, ()> for () {}
diff --git a/src/test/ui/consts/miri_unleashed/feature-gate-unleash_the_miri_inside_of_you.stderr b/src/test/ui/consts/miri_unleashed/feature-gate-unleash_the_miri_inside_of_you.stderr
index c56ebf6..5bc7b70 100644
--- a/src/test/ui/consts/miri_unleashed/feature-gate-unleash_the_miri_inside_of_you.stderr
+++ b/src/test/ui/consts/miri_unleashed/feature-gate-unleash_the_miri_inside_of_you.stderr
@@ -4,13 +4,5 @@
 LL |     const F: u32 = (U::X, 42).1;
    |                    ^^^^^^^^^^ constants cannot evaluate destructors
 
-error: `std::vec::Vec::<T>::new` is not yet stable as a const fn
-  --> $DIR/feature-gate-unleash_the_miri_inside_of_you.rs:18:25
-   |
-LL |     const X: Vec<u32> = Vec::new();
-   |                         ^^^^^^^^^^
-   |
-   = help: add `#![feature(const_vec_new)]` to the crate attributes to enable
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
 
diff --git a/src/test/ui/consts/miri_unleashed/mutable_const.rs b/src/test/ui/consts/miri_unleashed/mutable_const.rs
new file mode 100644
index 0000000..b476e04
--- /dev/null
+++ b/src/test/ui/consts/miri_unleashed/mutable_const.rs
@@ -0,0 +1,20 @@
+// compile-flags: -Zunleash-the-miri-inside-of-you
+
+#![feature(const_raw_ptr_deref)]
+#![deny(const_err)]
+
+use std::cell::UnsafeCell;
+
+// make sure we do not just intern this as mutable
+const MUTABLE_BEHIND_RAW: *mut i32 = &UnsafeCell::new(42) as *const _ as *mut _;
+
+const MUTATING_BEHIND_RAW: () = {
+    // Test that `MUTABLE_BEHIND_RAW` is actually immutable, by doing this at const time.
+    unsafe {
+        *MUTABLE_BEHIND_RAW = 99 //~ WARN skipping const checks
+        //~^ ERROR any use of this value will cause an error
+        //~^^ tried to modify constant memory
+    }
+};
+
+fn main() {}
diff --git a/src/test/ui/consts/miri_unleashed/mutable_const.stderr b/src/test/ui/consts/miri_unleashed/mutable_const.stderr
new file mode 100644
index 0000000..507d4823
--- /dev/null
+++ b/src/test/ui/consts/miri_unleashed/mutable_const.stderr
@@ -0,0 +1,27 @@
+warning: skipping const checks
+  --> $DIR/mutable_const.rs:14:9
+   |
+LL |         *MUTABLE_BEHIND_RAW = 99
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: any use of this value will cause an error
+  --> $DIR/mutable_const.rs:14:9
+   |
+LL | / const MUTATING_BEHIND_RAW: () = {
+LL | |     // Test that `MUTABLE_BEHIND_RAW` is actually immutable, by doing this at const time.
+LL | |     unsafe {
+LL | |         *MUTABLE_BEHIND_RAW = 99
+   | |         ^^^^^^^^^^^^^^^^^^^^^^^^ tried to modify constant memory
+...  |
+LL | |     }
+LL | | };
+   | |__-
+   |
+note: lint level defined here
+  --> $DIR/mutable_const.rs:4:9
+   |
+LL | #![deny(const_err)]
+   |         ^^^^^^^^^
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/consts/miri_unleashed/mutable_references_ice.stderr b/src/test/ui/consts/miri_unleashed/mutable_references_ice.stderr
index 82569e2..28cf353 100644
--- a/src/test/ui/consts/miri_unleashed/mutable_references_ice.stderr
+++ b/src/test/ui/consts/miri_unleashed/mutable_references_ice.stderr
@@ -6,7 +6,7 @@
 
 thread 'rustc' panicked at 'assertion failed: `(left != right)`
   left: `Const`,
- right: `Const`: UnsafeCells are not allowed behind references in constants. This should have been prevented statically by const qualification. If this were allowed one would be able to change a constant at one use site and other use sites may arbitrarily decide to change, too.', src/librustc_mir/interpret/intern.rs:LL:CC
+ right: `Const`: UnsafeCells are not allowed behind references in constants. This should have been prevented statically by const qualification. If this were allowed one would be able to change a constant at one use site and other use sites could observe that mutation.', src/librustc_mir/interpret/intern.rs:LL:CC
 note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
 
 error: internal compiler error: unexpected panic