Auto merge of #84088 - m-ou-se:stabilize-option-insert, r=m-ou-se
Stabilize option_insert.
FCP finished here: https://github.com/rust-lang/rust/issues/78271#issuecomment-817201319
diff --git a/Cargo.lock b/Cargo.lock
index 2b7fbf1..8fec4bf 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1909,9 +1909,9 @@
[[package]]
name = "libc"
-version = "0.2.88"
+version = "0.2.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "03b07a082330a35e43f63177cc01689da34fbffa0105e1246cf0311472cac73a"
+checksum = "9385f66bf6105b241aa65a61cb923ef20efc665cb9f9bb50ac2f0c4b7f378d41"
dependencies = [
"rustc-std-workspace-core",
]
@@ -2492,6 +2492,7 @@
name = "panic_abort"
version = "0.0.0"
dependencies = [
+ "alloc",
"cfg-if 0.1.10",
"compiler_builtins",
"core",
@@ -2875,9 +2876,9 @@
[[package]]
name = "racer"
-version = "2.1.44"
+version = "2.1.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7972a124e2b24dce35eb19f81eced829faec0e8227a7d744bbb1089934d05399"
+checksum = "15408926f6207643150e0fc2c54a75a689b192df03ac6c59d42ea99c6782c7f7"
dependencies = [
"bitflags",
"clap",
@@ -3117,6 +3118,7 @@
dependencies = [
"anyhow",
"cargo",
+ "cargo-util",
"cargo_metadata 0.8.2",
"clippy_lints",
"crossbeam-channel",
@@ -3262,18 +3264,19 @@
[[package]]
name = "rustc-ap-rustc_arena"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "93575affa286089b92c8208aea4e60fe9fdd251a619a09b566d6e4e2cc123212"
+checksum = "259cca0e975ecb05fd289ace45280c30ff792efc04e856a7f18b7fc86a3cb610"
dependencies = [
+ "rustc-ap-rustc_data_structures",
"smallvec 1.6.1",
]
[[package]]
name = "rustc-ap-rustc_ast"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c700f2d3b25aa8d6446dd2936048737b08b2d547bd86e2a70afa9fee4e9c522"
+checksum = "bb9be435d50c88e94bbad6ea468c8680b52c5043bb298ab8058d05251717f8f8"
dependencies = [
"bitflags",
"rustc-ap-rustc_data_structures",
@@ -3288,9 +3291,9 @@
[[package]]
name = "rustc-ap-rustc_ast_passes"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e01f63e5259ee397bbe2e395d34a2e6b6b24f10c184d30fbbee1dcd7117f4f3"
+checksum = "75246dd1a95a57f7767e53bde3971baa2d948078e180564709f5ea46cf863ddd"
dependencies = [
"itertools 0.9.0",
"rustc-ap-rustc_ast",
@@ -3307,9 +3310,9 @@
[[package]]
name = "rustc-ap-rustc_ast_pretty"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "99d644c69c55deb24257cb0cb5261265fe5134f6f545e9062e1c18b07e422c68"
+checksum = "79bede0b44bed453fd0034b7ba492840391f6486bf3e17a1af12922f0b98d4cc"
dependencies = [
"rustc-ap-rustc_ast",
"rustc-ap-rustc_span",
@@ -3318,9 +3321,9 @@
[[package]]
name = "rustc-ap-rustc_attr"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "797fc68816d5396870f04e03d35164f5275d2502403239d4caec7ce063683f41"
+checksum = "84a92a4a34b996694ca2dab70361c60d2d48c07adce57e8155b7ec75e069e3ea"
dependencies = [
"rustc-ap-rustc_ast",
"rustc-ap-rustc_ast_pretty",
@@ -3336,9 +3339,9 @@
[[package]]
name = "rustc-ap-rustc_data_structures"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5d840c4e6198b57982a54543ae604d634c7ceb7107f0c75970b88ebaff077ac5"
+checksum = "9cbfa7f82517a1b2efe7106c864c3f930b1da8aff07a27fd317af2f36522fd2e"
dependencies = [
"arrayvec",
"bitflags",
@@ -3367,9 +3370,9 @@
[[package]]
name = "rustc-ap-rustc_errors"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2f2f99bdc828ad417636d9016611dc9047b641fadcb7f533b8b0e9616d81f90b"
+checksum = "58a272a5101843bcb40900cc9ccf80ecfec62830bb1f4a242986da4a34c0da89"
dependencies = [
"annotate-snippets 0.8.0",
"atty",
@@ -3387,9 +3390,9 @@
[[package]]
name = "rustc-ap-rustc_expand"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "27008b4c7ded287bf5cb20b84d6d5a6566329140f2e2bc8f6e68b37a34898595"
+checksum = "3bc7988f3facf2402fe057405ef0f7fbacc7e7a483da25e35a35ac09491fbbfb"
dependencies = [
"rustc-ap-rustc_ast",
"rustc-ap-rustc_ast_passes",
@@ -3411,9 +3414,9 @@
[[package]]
name = "rustc-ap-rustc_feature"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6bb47b53670f1263ed1389dda932d5b5a6daf98579c1f076c2ee7d7f22709b7c"
+checksum = "5e931cd1580ae60c5737d3fa57633034935e885414e794d83b3e52a81021985c"
dependencies = [
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_span",
@@ -3421,21 +3424,21 @@
[[package]]
name = "rustc-ap-rustc_fs_util"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cdaddc4bae5ffab17037553e172f5014686db600050429aaa60aec14fe780e84"
+checksum = "8fe9422e10d5b441d2a78202667bc85d7cf713a087b9ae6cdea0dfc825d79f07"
[[package]]
name = "rustc-ap-rustc_graphviz"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d73c72543311e88786f7380a3bfd946395579c1a0c0441a879a97fcdea79130"
+checksum = "ffffffdef9fd51db69c1d4c045ced8aaab999be5627f2d3a0ce020d74c1f1e50"
[[package]]
name = "rustc-ap-rustc_index"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bba8d74ed4bad44a5b4264cf2a51ad0bd458ed56caa5bb090e989b8002ec6327"
+checksum = "7f6f53afc4f7111c82295cb7ea3878f520bbac6a2c5a12e125b4ca9156498cff"
dependencies = [
"arrayvec",
"rustc-ap-rustc_macros",
@@ -3444,18 +3447,18 @@
[[package]]
name = "rustc-ap-rustc_lexer"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3a030d00510966cd31e13dca5e6c1bd40d303a932c54eca40e854188bca8c49e"
+checksum = "8056b05346dff7e39164d0434c6ec443a14ab5fbf6221bd1a56e5abbeae5f60c"
dependencies = [
"unicode-xid",
]
[[package]]
name = "rustc-ap-rustc_lint_defs"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bdff95da1b5d979183ef5c285817ba6cc67a1ac11296ef1e87b1b5bbaf57213c"
+checksum = "364c3fb7b3cbdfe3fbb21d4078ff2cb3c58df63cda27995f8b064d21ee6dede5"
dependencies = [
"rustc-ap-rustc_ast",
"rustc-ap-rustc_data_structures",
@@ -3468,9 +3471,9 @@
[[package]]
name = "rustc-ap-rustc_macros"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fe3ed7401bf6f5a256d58cd0e1c1e2e77eec25e60a0d7ad75313962edcb4e396"
+checksum = "4607d6879cae3bae4d0369ca4b3a7510fd6295ac32eec088ac975208ba96ca45"
dependencies = [
"proc-macro2",
"quote",
@@ -3480,9 +3483,9 @@
[[package]]
name = "rustc-ap-rustc_parse"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "609a624baffa3f99847d57d30c96ee6732ce0912f8df4be239b6fd91533910d6"
+checksum = "78d22889bff7ca2346037c9df7ea55c66ffb714f5b50fb62b41975f8ac7a2d70"
dependencies = [
"bitflags",
"rustc-ap-rustc_ast",
@@ -3500,9 +3503,9 @@
[[package]]
name = "rustc-ap-rustc_serialize"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bc232e2a351d8131c8f1386ce372ee22ef7b1b0b897bbf817a8ce4792029a564"
+checksum = "d33c710120953c0214f47a6caf42064d7e241003b4af36c98a6d6156e70335f1"
dependencies = [
"indexmap",
"smallvec 1.6.1",
@@ -3510,9 +3513,9 @@
[[package]]
name = "rustc-ap-rustc_session"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "18acf94c820cd0c64ee1cbd811fd1f4d5ba18987c457c88771359b90cb1a12f5"
+checksum = "6d35919041429a90713c8f704fa5209ba159cb554ce74d95722cbc18ac4b4c6f"
dependencies = [
"bitflags",
"getopts",
@@ -3532,9 +3535,9 @@
[[package]]
name = "rustc-ap-rustc_span"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d3479f453a38b6a5572938d035fc2b3cb6ec379c57f598b8682b512eb90c7858"
+checksum = "73b12170c69603c0bf4b50e5c25fd348aae13b8c6465aa0ef4389c9eaa568e51"
dependencies = [
"cfg-if 0.1.10",
"md-5",
@@ -3552,9 +3555,9 @@
[[package]]
name = "rustc-ap-rustc_target"
-version = "705.0.0"
+version = "712.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78cacaf829778cf07bb97a9f4604896789de12392175f3743e74a30ed370f1c1"
+checksum = "0a8329d92e7dc24b974f759e6c6e97e2bbc47b18d0573343028f8135ca367200"
dependencies = [
"bitflags",
"rustc-ap-rustc_data_structures",
@@ -4662,7 +4665,7 @@
[[package]]
name = "rustfmt-nightly"
-version = "1.4.36"
+version = "1.4.37"
dependencies = [
"annotate-snippets 0.6.1",
"anyhow",
@@ -4680,7 +4683,6 @@
"regex",
"rustc-ap-rustc_ast",
"rustc-ap-rustc_ast_pretty",
- "rustc-ap-rustc_attr",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_errors",
"rustc-ap-rustc_expand",
@@ -5032,11 +5034,23 @@
"profiler_builtins",
"rand 0.7.3",
"rustc-demangle",
+ "std_detect",
"unwind",
"wasi",
]
[[package]]
+name = "std_detect"
+version = "0.1.5"
+dependencies = [
+ "cfg-if 0.1.10",
+ "compiler_builtins",
+ "libc",
+ "rustc-std-workspace-alloc",
+ "rustc-std-workspace-core",
+]
+
+[[package]]
name = "string_cache"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
index f961d3e..0201135 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -45,6 +45,8 @@
# not all `Cargo.toml` files are available, so we exclude the `x` binary,
# so it can be invoked before the current checkout is set up.
"src/tools/x",
+ # stdarch has its own Cargo workspace
+ "library/stdarch",
]
[profile.release.package.compiler_builtins]
@@ -88,6 +90,7 @@
# vendored copy.
[patch."https://github.com/rust-lang/cargo"]
cargo = { path = "src/tools/cargo" }
+cargo-util = { path = "src/tools/cargo/crates/cargo-util" }
[patch."https://github.com/rust-lang/rustfmt"]
# Similar to Cargo above we want the RLS to use a vendored version of `rustfmt`
diff --git a/RELEASES.md b/RELEASES.md
index c6c0007..024610b 100644
--- a/RELEASES.md
+++ b/RELEASES.md
@@ -50,6 +50,8 @@
- [`io::Empty` now implements `io::Seek`.][78044]
- [`rc::Weak<T>` and `sync::Weak<T>`'s methods such as `as_ptr` are now implemented for
`T: ?Sized` types.][80764]
+- [`Div` and `Rem` by their `NonZero` variant is now implemented for all unsigned integers.][79134]
+
Stabilized APIs
---------------
@@ -72,6 +74,8 @@
- [`str::split_inclusive`]
- [`sync::OnceState`]
- [`task::Wake`]
+- [`VecDeque::range`]
+- [`VecDeque::range_mut`]
Cargo
-----
@@ -115,6 +119,7 @@
- `thumbv7neon-unknown-linux-gnueabihf`
- `armv7-unknown-linux-gnueabi`
- `x86_64-unknown-linux-gnux32`
+- [`atomic::spin_loop_hint` has been deprecated.][80966] It's recommended to use `hint::spin_loop` instead.
Internal Only
-------------
@@ -145,6 +150,8 @@
[80764]: https://github.com/rust-lang/rust/pull/80764
[80749]: https://github.com/rust-lang/rust/pull/80749
[80662]: https://github.com/rust-lang/rust/pull/80662
+[79134]: https://github.com/rust-lang/rust/pull/79134
+[80966]: https://github.com/rust-lang/rust/pull/80966
[cargo/8997]: https://github.com/rust-lang/cargo/pull/8997
[cargo/9112]: https://github.com/rust-lang/cargo/pull/9112
[feature-resolver@2.0]: https://doc.rust-lang.org/nightly/cargo/reference/features.html#feature-resolver-version-2
@@ -166,6 +173,8 @@
[`Seek::stream_position`]: https://doc.rust-lang.org/nightly/std/io/trait.Seek.html#method.stream_position
[`Peekable::next_if`]: https://doc.rust-lang.org/nightly/std/iter/struct.Peekable.html#method.next_if
[`Peekable::next_if_eq`]: https://doc.rust-lang.org/nightly/std/iter/struct.Peekable.html#method.next_if_eq
+[`VecDeque::range`]: https://doc.rust-lang.org/nightly/std/collections/struct.VecDeque.html#method.range
+[`VecDeque::range_mut`]: https://doc.rust-lang.org/nightly/std/collections/struct.VecDeque.html#method.range_mut
Version 1.50.0 (2021-02-11)
============================
diff --git a/compiler/rustc_ast/src/ast_like.rs b/compiler/rustc_ast/src/ast_like.rs
index 63bc7c4..945a44a 100644
--- a/compiler/rustc_ast/src/ast_like.rs
+++ b/compiler/rustc_ast/src/ast_like.rs
@@ -1,20 +1,32 @@
use super::ptr::P;
+use super::token::Nonterminal;
use super::tokenstream::LazyTokenStream;
use super::{Arm, ExprField, FieldDef, GenericParam, Param, PatField, Variant};
-use super::{AssocItem, Expr, ForeignItem, Item, Local};
+use super::{AssocItem, Expr, ForeignItem, Item, Local, MacCallStmt};
use super::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility};
use super::{AttrVec, Attribute, Stmt, StmtKind};
+use std::fmt::Debug;
+
/// An `AstLike` represents an AST node (or some wrapper around
/// and AST node) which stores some combination of attributes
/// and tokens.
-pub trait AstLike: Sized {
+pub trait AstLike: Sized + Debug {
+ /// This is `true` if this `AstLike` might support 'custom' (proc-macro) inner
+ /// attributes. Attributes like `#![cfg]` and `#![cfg_attr]` are not
+ /// considered 'custom' attributes
+ ///
+ /// If this is `false`, then this `AstLike` definitely does
+ /// not support 'custom' inner attributes, which enables some optimizations
+ /// during token collection.
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool;
fn attrs(&self) -> &[Attribute];
fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>));
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>>;
}
impl<T: AstLike + 'static> AstLike for P<T> {
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = T::SUPPORTS_CUSTOM_INNER_ATTRS;
fn attrs(&self) -> &[Attribute] {
(**self).attrs()
}
@@ -26,6 +38,55 @@
}
}
+impl AstLike for crate::token::Nonterminal {
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
+ fn attrs(&self) -> &[Attribute] {
+ match self {
+ Nonterminal::NtItem(item) => item.attrs(),
+ Nonterminal::NtStmt(stmt) => stmt.attrs(),
+ Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.attrs(),
+ Nonterminal::NtPat(_)
+ | Nonterminal::NtTy(_)
+ | Nonterminal::NtMeta(_)
+ | Nonterminal::NtPath(_)
+ | Nonterminal::NtVis(_)
+ | Nonterminal::NtTT(_)
+ | Nonterminal::NtBlock(_)
+ | Nonterminal::NtIdent(..)
+ | Nonterminal::NtLifetime(_) => &[],
+ }
+ }
+ fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>)) {
+ match self {
+ Nonterminal::NtItem(item) => item.visit_attrs(f),
+ Nonterminal::NtStmt(stmt) => stmt.visit_attrs(f),
+ Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.visit_attrs(f),
+ Nonterminal::NtPat(_)
+ | Nonterminal::NtTy(_)
+ | Nonterminal::NtMeta(_)
+ | Nonterminal::NtPath(_)
+ | Nonterminal::NtVis(_)
+ | Nonterminal::NtTT(_)
+ | Nonterminal::NtBlock(_)
+ | Nonterminal::NtIdent(..)
+ | Nonterminal::NtLifetime(_) => {}
+ }
+ }
+ fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
+ match self {
+ Nonterminal::NtItem(item) => item.tokens_mut(),
+ Nonterminal::NtStmt(stmt) => stmt.tokens_mut(),
+ Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.tokens_mut(),
+ Nonterminal::NtPat(pat) => pat.tokens_mut(),
+ Nonterminal::NtTy(ty) => ty.tokens_mut(),
+ Nonterminal::NtMeta(attr_item) => attr_item.tokens_mut(),
+ Nonterminal::NtPath(path) => path.tokens_mut(),
+ Nonterminal::NtVis(vis) => vis.tokens_mut(),
+ _ => panic!("Called tokens_mut on {:?}", self),
+ }
+ }
+}
+
fn visit_attrvec(attrs: &mut AttrVec, f: impl FnOnce(&mut Vec<Attribute>)) {
crate::mut_visit::visit_clobber(attrs, |attrs| {
let mut vec = attrs.into();
@@ -35,6 +96,10 @@
}
impl AstLike for StmtKind {
+ // This might be an `StmtKind::Item`, which contains
+ // an item that supports inner attrs
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
+
fn attrs(&self) -> &[Attribute] {
match self {
StmtKind::Local(local) => local.attrs(),
@@ -66,6 +131,8 @@
}
impl AstLike for Stmt {
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = StmtKind::SUPPORTS_CUSTOM_INNER_ATTRS;
+
fn attrs(&self) -> &[Attribute] {
self.kind.attrs()
}
@@ -79,6 +146,8 @@
}
impl AstLike for Attribute {
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
+
fn attrs(&self) -> &[Attribute] {
&[]
}
@@ -94,6 +163,8 @@
}
impl<T: AstLike> AstLike for Option<T> {
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = T::SUPPORTS_CUSTOM_INNER_ATTRS;
+
fn attrs(&self) -> &[Attribute] {
self.as_ref().map(|inner| inner.attrs()).unwrap_or(&[])
}
@@ -127,8 +198,13 @@
}
macro_rules! derive_has_tokens_and_attrs {
- ($($ty:path),*) => { $(
+ (
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = $inner_attrs:literal;
+ $($ty:path),*
+ ) => { $(
impl AstLike for $ty {
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = $inner_attrs;
+
fn attrs(&self) -> &[Attribute] {
&self.attrs
}
@@ -140,6 +216,7 @@
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
Some(&mut self.tokens)
}
+
}
)* }
}
@@ -147,6 +224,8 @@
macro_rules! derive_has_attrs_no_tokens {
($($ty:path),*) => { $(
impl AstLike for $ty {
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
+
fn attrs(&self) -> &[Attribute] {
&self.attrs
}
@@ -165,12 +244,13 @@
macro_rules! derive_has_tokens_no_attrs {
($($ty:path),*) => { $(
impl AstLike for $ty {
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
+
fn attrs(&self) -> &[Attribute] {
&[]
}
fn visit_attrs(&mut self, _f: impl FnOnce(&mut Vec<Attribute>)) {}
-
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
Some(&mut self.tokens)
}
@@ -178,10 +258,18 @@
)* }
}
-// These AST nodes support both inert and active
-// attributes, so they also have tokens.
+// These ast nodes support both active and inert attributes,
+// so they have tokens collected to pass to proc macros
derive_has_tokens_and_attrs! {
- Item, Expr, Local, AssocItem, ForeignItem
+ // Both `Item` and `AssocItem` can have bodies, which
+ // can contain inner attributes
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
+ Item, AssocItem, ForeignItem
+}
+
+derive_has_tokens_and_attrs! {
+ const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
+ Local, MacCallStmt, Expr
}
// These ast nodes only support inert attributes, so they don't
diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs
index 0fbe4d0..41121d0 100644
--- a/compiler/rustc_ast/src/attr/mod.rs
+++ b/compiler/rustc_ast/src/attr/mod.rs
@@ -6,7 +6,9 @@
use crate::ast::{MacArgs, MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem};
use crate::ast::{Path, PathSegment};
use crate::token::{self, CommentKind, Token};
-use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream, TokenTree, TreeAndSpacing};
+use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
+use crate::tokenstream::{DelimSpan, Spacing, TokenTree, TreeAndSpacing};
+use crate::tokenstream::{LazyTokenStream, TokenStream};
use rustc_index::bit_set::GrowableBitSet;
use rustc_span::source_map::BytePos;
@@ -268,14 +270,18 @@
}
}
- pub fn tokens(&self) -> TokenStream {
+ pub fn tokens(&self) -> AttrAnnotatedTokenStream {
match self.kind {
AttrKind::Normal(_, ref tokens) => tokens
.as_ref()
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
.create_token_stream(),
- AttrKind::DocComment(comment_kind, data) => TokenStream::from(TokenTree::Token(
- Token::new(token::DocComment(comment_kind, self.style, data), self.span),
+ AttrKind::DocComment(comment_kind, data) => AttrAnnotatedTokenStream::from((
+ AttrAnnotatedTokenTree::Token(Token::new(
+ token::DocComment(comment_kind, self.style, data),
+ self.span,
+ )),
+ Spacing::Alone,
)),
}
}
diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs
index b1840f4..05f57f9 100644
--- a/compiler/rustc_ast/src/mut_visit.rs
+++ b/compiler/rustc_ast/src/mut_visit.rs
@@ -631,6 +631,33 @@
}
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
+pub fn visit_attr_annotated_tt<T: MutVisitor>(tt: &mut AttrAnnotatedTokenTree, vis: &mut T) {
+ match tt {
+ AttrAnnotatedTokenTree::Token(token) => {
+ visit_token(token, vis);
+ }
+ AttrAnnotatedTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
+ vis.visit_span(open);
+ vis.visit_span(close);
+ visit_attr_annotated_tts(tts, vis);
+ }
+ AttrAnnotatedTokenTree::Attributes(data) => {
+ for attr in &mut *data.attrs {
+ match &mut attr.kind {
+ AttrKind::Normal(_, attr_tokens) => {
+ visit_lazy_tts(attr_tokens, vis);
+ }
+ AttrKind::DocComment(..) => {
+ vis.visit_span(&mut attr.span);
+ }
+ }
+ }
+ visit_lazy_tts_opt_mut(Some(&mut data.tokens), vis);
+ }
+ }
+}
+
+// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
pub fn visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
match tt {
TokenTree::Token(token) => {
@@ -652,16 +679,30 @@
}
}
-pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis: &mut T) {
- if vis.token_visiting_enabled() {
- visit_opt(lazy_tts, |lazy_tts| {
- let mut tts = lazy_tts.create_token_stream();
- visit_tts(&mut tts, vis);
- *lazy_tts = LazyTokenStream::new(tts);
- })
+pub fn visit_attr_annotated_tts<T: MutVisitor>(
+ AttrAnnotatedTokenStream(tts): &mut AttrAnnotatedTokenStream,
+ vis: &mut T,
+) {
+ if vis.token_visiting_enabled() && !tts.is_empty() {
+ let tts = Lrc::make_mut(tts);
+ visit_vec(tts, |(tree, _is_joint)| visit_attr_annotated_tt(tree, vis));
}
}
+pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(lazy_tts: Option<&mut LazyTokenStream>, vis: &mut T) {
+ if vis.token_visiting_enabled() {
+ if let Some(lazy_tts) = lazy_tts {
+ let mut tts = lazy_tts.create_token_stream();
+ visit_attr_annotated_tts(&mut tts, vis);
+ *lazy_tts = LazyTokenStream::new(tts);
+ }
+ }
+}
+
+pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis: &mut T) {
+ visit_lazy_tts_opt_mut(lazy_tts.as_mut(), vis);
+}
+
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
// In practice the ident part is not actually used by specific visitors right now,
diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs
index 1c26668..8318b24 100644
--- a/compiler/rustc_ast/src/tokenstream.rs
+++ b/compiler/rustc_ast/src/tokenstream.rs
@@ -14,6 +14,7 @@
//! ownership of the original.
use crate::token::{self, DelimToken, Token, TokenKind};
+use crate::AttrVec;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{self, Lrc};
@@ -123,11 +124,11 @@
}
pub trait CreateTokenStream: sync::Send + sync::Sync {
- fn create_token_stream(&self) -> TokenStream;
+ fn create_token_stream(&self) -> AttrAnnotatedTokenStream;
}
-impl CreateTokenStream for TokenStream {
- fn create_token_stream(&self) -> TokenStream {
+impl CreateTokenStream for AttrAnnotatedTokenStream {
+ fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
self.clone()
}
}
@@ -143,14 +144,14 @@
LazyTokenStream(Lrc::new(Box::new(inner)))
}
- pub fn create_token_stream(&self) -> TokenStream {
+ pub fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
self.0.create_token_stream()
}
}
impl fmt::Debug for LazyTokenStream {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Debug::fmt("LazyTokenStream", f)
+ write!(f, "LazyTokenStream({:?})", self.create_token_stream())
}
}
@@ -173,6 +174,145 @@
}
}
+/// A `AttrAnnotatedTokenStream` is similar to a `TokenStream`, but with extra
+/// information about the tokens for attribute targets. This is used
+/// during expansion to perform early cfg-expansion, and to process attributes
+/// during proc-macro invocations.
+#[derive(Clone, Debug, Default, Encodable, Decodable)]
+pub struct AttrAnnotatedTokenStream(pub Lrc<Vec<(AttrAnnotatedTokenTree, Spacing)>>);
+
+/// Like `TokenTree`, but for `AttrAnnotatedTokenStream`
+#[derive(Clone, Debug, Encodable, Decodable)]
+pub enum AttrAnnotatedTokenTree {
+ Token(Token),
+ Delimited(DelimSpan, DelimToken, AttrAnnotatedTokenStream),
+ /// Stores the attributes for an attribute target,
+ /// along with the tokens for that attribute target.
+ /// See `AttributesData` for more information
+ Attributes(AttributesData),
+}
+
+impl AttrAnnotatedTokenStream {
+ pub fn new(tokens: Vec<(AttrAnnotatedTokenTree, Spacing)>) -> AttrAnnotatedTokenStream {
+ AttrAnnotatedTokenStream(Lrc::new(tokens))
+ }
+
+ /// Converts this `AttrAnnotatedTokenStream` to a plain `TokenStream
+ /// During conversion, `AttrAnnotatedTokenTree::Attributes` get 'flattened'
+ /// back to a `TokenStream` of the form `outer_attr attr_target`.
+ /// If there are inner attributes, they are inserted into the proper
+ /// place in the attribute target tokens.
+ pub fn to_tokenstream(&self) -> TokenStream {
+ let trees: Vec<_> = self
+ .0
+ .iter()
+ .flat_map(|tree| match &tree.0 {
+ AttrAnnotatedTokenTree::Token(inner) => {
+ smallvec![(TokenTree::Token(inner.clone()), tree.1)].into_iter()
+ }
+ AttrAnnotatedTokenTree::Delimited(span, delim, stream) => smallvec![(
+ TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),
+ tree.1,
+ )]
+ .into_iter(),
+ AttrAnnotatedTokenTree::Attributes(data) => {
+ let mut outer_attrs = Vec::new();
+ let mut inner_attrs = Vec::new();
+ let attrs: Vec<_> = data.attrs.clone().into();
+ for attr in attrs {
+ match attr.style {
+ crate::AttrStyle::Outer => {
+ assert!(
+ inner_attrs.len() == 0,
+ "Found outer attribute {:?} after inner attrs {:?}",
+ attr,
+ inner_attrs
+ );
+ outer_attrs.push(attr);
+ }
+ crate::AttrStyle::Inner => {
+ inner_attrs.push(attr);
+ }
+ }
+ }
+
+ let mut target_tokens: Vec<_> = data
+ .tokens
+ .create_token_stream()
+ .to_tokenstream()
+ .0
+ .iter()
+ .cloned()
+ .collect();
+ if !inner_attrs.is_empty() {
+ let mut found = false;
+ // Check the last two trees (to account for a trailing semi)
+ for (tree, _) in target_tokens.iter_mut().rev().take(2) {
+ if let TokenTree::Delimited(span, delim, delim_tokens) = tree {
+ // Inner attributes are only supported on extern blocks, functions, impls,
+ // and modules. All of these have their inner attributes placed at
+ // the beginning of the rightmost outermost braced group:
+ // e.g. fn foo() { #![my_attr} }
+ //
+ // Therefore, we can insert them back into the right location
+ // without needing to do any extra position tracking.
+ //
+ // Note: Outline modules are an exception - they can
+ // have attributes like `#![my_attr]` at the start of a file.
+ // Support for custom attributes in this position is not
+ // properly implemented - we always synthesize fake tokens,
+ // so we never reach this code.
+
+ let mut builder = TokenStreamBuilder::new();
+ for inner_attr in &inner_attrs {
+ builder.push(inner_attr.tokens().to_tokenstream());
+ }
+ builder.push(delim_tokens.clone());
+ *tree = TokenTree::Delimited(*span, *delim, builder.build());
+ found = true;
+ break;
+ }
+ }
+
+ assert!(
+ found,
+ "Failed to find trailing delimited group in: {:?}",
+ target_tokens
+ );
+ }
+ let mut flat: SmallVec<[_; 1]> = SmallVec::new();
+ for attr in outer_attrs {
+ // FIXME: Make this more efficient
+ flat.extend(attr.tokens().to_tokenstream().0.clone().iter().cloned());
+ }
+ flat.extend(target_tokens);
+ flat.into_iter()
+ }
+ })
+ .collect();
+ TokenStream::new(trees)
+ }
+}
+
+/// Stores the tokens for an attribute target, along
+/// with its attributes.
+///
+/// This is constructed during parsing when we need to capture
+/// tokens.
+///
+/// For example, `#[cfg(FALSE)] struct Foo {}` would
+/// have an `attrs` field contaiing the `#[cfg(FALSE)]` attr,
+/// and a `tokens` field storing the (unparesd) tokens `struct Foo {}`
+#[derive(Clone, Debug, Encodable, Decodable)]
+pub struct AttributesData {
+ /// Attributes, both outer and inner.
+ /// These are stored in the original order that they were parsed in.
+ pub attrs: AttrVec,
+ /// The underlying tokens for the attribute target that `attrs`
+ /// are applied to
+ pub tokens: LazyTokenStream,
+}
+
/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
///
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
@@ -235,6 +375,12 @@
}
}
+impl From<(AttrAnnotatedTokenTree, Spacing)> for AttrAnnotatedTokenStream {
+ fn from((tree, spacing): (AttrAnnotatedTokenTree, Spacing)) -> AttrAnnotatedTokenStream {
+ AttrAnnotatedTokenStream::new(vec![(tree, spacing)])
+ }
+}
+
impl From<TokenTree> for TokenStream {
fn from(tree: TokenTree) -> TokenStream {
TokenStream::new(vec![(tree, Spacing::Alone)])
@@ -457,6 +603,10 @@
}
}
+ pub fn index(&self) -> usize {
+ self.index
+ }
+
pub fn append(&mut self, new_stream: TokenStream) {
if new_stream.is_empty() {
return;
diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs
index cd5d116..44056df 100644
--- a/compiler/rustc_ast_lowering/src/lib.rs
+++ b/compiler/rustc_ast_lowering/src/lib.rs
@@ -37,8 +37,8 @@
#![recursion_limit = "256"]
use rustc_ast::node_id::NodeMap;
-use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
-use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, DelimSpan, TokenStream, TokenTree};
+use rustc_ast::token::{self, Token};
+use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream, TokenTree};
use rustc_ast::visit::{self, AssocCtxt, Visitor};
use rustc_ast::walk_list;
use rustc_ast::{self as ast, *};
@@ -56,7 +56,7 @@
use rustc_index::vec::{Idx, IndexVec};
use rustc_session::lint::builtin::{BARE_TRAIT_OBJECTS, MISSING_ABI};
use rustc_session::lint::{BuiltinLintDiagnostics, LintBuffer};
-use rustc_session::parse::ParseSess;
+use rustc_session::utils::{FlattenNonterminals, NtToTokenstream};
use rustc_session::Session;
use rustc_span::hygiene::ExpnId;
use rustc_span::source_map::{respan, DesugaringKind};
@@ -213,8 +213,6 @@
) -> LocalDefId;
}
-type NtToTokenstream = fn(&Nonterminal, &ParseSess, CanSynthesizeMissingTokens) -> TokenStream;
-
/// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree,
/// and if so, what meaning it has.
#[derive(Debug)]
@@ -403,42 +401,6 @@
PassThrough,
}
-struct TokenStreamLowering<'a> {
- parse_sess: &'a ParseSess,
- synthesize_tokens: CanSynthesizeMissingTokens,
- nt_to_tokenstream: NtToTokenstream,
-}
-
-impl<'a> TokenStreamLowering<'a> {
- fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
- tokens.into_trees().flat_map(|tree| self.lower_token_tree(tree).into_trees()).collect()
- }
-
- fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
- match tree {
- TokenTree::Token(token) => self.lower_token(token),
- TokenTree::Delimited(span, delim, tts) => {
- TokenTree::Delimited(span, delim, self.lower_token_stream(tts)).into()
- }
- }
- }
-
- fn lower_token(&mut self, token: Token) -> TokenStream {
- match token.kind {
- token::Interpolated(nt) => {
- let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
- TokenTree::Delimited(
- DelimSpan::from_single(token.span),
- DelimToken::NoDelim,
- self.lower_token_stream(tts),
- )
- .into()
- }
- _ => TokenTree::Token(token).into(),
- }
- }
-}
-
impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_crate(mut self, c: &Crate) -> hir::Crate<'hir> {
/// Full-crate AST visitor that inserts into a fresh
@@ -1037,12 +999,12 @@
}
}
- let tokens = TokenStreamLowering {
+ let tokens = FlattenNonterminals {
parse_sess: &self.sess.parse_sess,
synthesize_tokens: CanSynthesizeMissingTokens::Yes,
nt_to_tokenstream: self.nt_to_tokenstream,
}
- .lower_token(token.clone());
+ .process_token(token.clone());
MacArgs::Eq(eq_span, unwrap_single_token(self.sess, tokens, token.span))
}
}
@@ -1053,12 +1015,12 @@
tokens: TokenStream,
synthesize_tokens: CanSynthesizeMissingTokens,
) -> TokenStream {
- TokenStreamLowering {
+ FlattenNonterminals {
parse_sess: &self.sess.parse_sess,
synthesize_tokens,
nt_to_tokenstream: self.nt_to_tokenstream,
}
- .lower_token_stream(tokens)
+ .process_token_stream(tokens)
}
/// Given an associated type constraint like one of these:
diff --git a/compiler/rustc_builtin_macros/src/cfg_eval.rs b/compiler/rustc_builtin_macros/src/cfg_eval.rs
index 025872d..79dc857 100644
--- a/compiler/rustc_builtin_macros/src/cfg_eval.rs
+++ b/compiler/rustc_builtin_macros/src/cfg_eval.rs
@@ -1,11 +1,18 @@
use crate::util::check_builtin_macro_attribute;
-use rustc_ast::mut_visit::{self, MutVisitor};
-use rustc_ast::ptr::P;
-use rustc_ast::{self as ast, AstLike};
+use rustc_ast as ast;
+use rustc_ast::mut_visit::MutVisitor;
+use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
+use rustc_ast::visit::Visitor;
+use rustc_ast::{mut_visit, visit};
+use rustc_ast::{AstLike, Attribute};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_expand::config::StripUnconfigured;
use rustc_expand::configure;
+use rustc_parse::parser::ForceCollect;
+use rustc_session::utils::FlattenNonterminals;
+
+use rustc_ast::ptr::P;
use rustc_span::symbol::sym;
use rustc_span::Span;
use smallvec::SmallVec;
@@ -22,74 +29,179 @@
crate fn cfg_eval(ecx: &ExtCtxt<'_>, annotatable: Annotatable) -> Vec<Annotatable> {
let mut visitor = CfgEval {
- cfg: StripUnconfigured { sess: ecx.sess, features: ecx.ecfg.features, modified: false },
+ cfg: &mut StripUnconfigured {
+ sess: ecx.sess,
+ features: ecx.ecfg.features,
+ config_tokens: true,
+ },
};
- let mut annotatable = visitor.configure_annotatable(annotatable);
- if visitor.cfg.modified {
- // Erase the tokens if cfg-stripping modified the item
- // This will cause us to synthesize fake tokens
- // when `nt_to_tokenstream` is called on this item.
- if let Some(tokens) = annotatable.tokens_mut() {
- *tokens = None;
- }
- }
+ let annotatable = visitor.configure_annotatable(annotatable);
vec![annotatable]
}
-struct CfgEval<'a> {
- cfg: StripUnconfigured<'a>,
+struct CfgEval<'a, 'b> {
+ cfg: &'a mut StripUnconfigured<'b>,
}
-impl CfgEval<'_> {
+fn flat_map_annotatable(vis: &mut impl MutVisitor, annotatable: Annotatable) -> Annotatable {
+ // Since the item itself has already been configured by the InvocationCollector,
+ // we know that fold result vector will contain exactly one element
+ match annotatable {
+ Annotatable::Item(item) => Annotatable::Item(vis.flat_map_item(item).pop().unwrap()),
+ Annotatable::TraitItem(item) => {
+ Annotatable::TraitItem(vis.flat_map_trait_item(item).pop().unwrap())
+ }
+ Annotatable::ImplItem(item) => {
+ Annotatable::ImplItem(vis.flat_map_impl_item(item).pop().unwrap())
+ }
+ Annotatable::ForeignItem(item) => {
+ Annotatable::ForeignItem(vis.flat_map_foreign_item(item).pop().unwrap())
+ }
+ Annotatable::Stmt(stmt) => {
+ Annotatable::Stmt(stmt.map(|stmt| vis.flat_map_stmt(stmt).pop().unwrap()))
+ }
+ Annotatable::Expr(mut expr) => Annotatable::Expr({
+ vis.visit_expr(&mut expr);
+ expr
+ }),
+ Annotatable::Arm(arm) => Annotatable::Arm(vis.flat_map_arm(arm).pop().unwrap()),
+ Annotatable::ExprField(field) => {
+ Annotatable::ExprField(vis.flat_map_expr_field(field).pop().unwrap())
+ }
+ Annotatable::PatField(fp) => {
+ Annotatable::PatField(vis.flat_map_pat_field(fp).pop().unwrap())
+ }
+ Annotatable::GenericParam(param) => {
+ Annotatable::GenericParam(vis.flat_map_generic_param(param).pop().unwrap())
+ }
+ Annotatable::Param(param) => Annotatable::Param(vis.flat_map_param(param).pop().unwrap()),
+ Annotatable::FieldDef(sf) => {
+ Annotatable::FieldDef(vis.flat_map_field_def(sf).pop().unwrap())
+ }
+ Annotatable::Variant(v) => Annotatable::Variant(vis.flat_map_variant(v).pop().unwrap()),
+ }
+}
+
+struct CfgFinder {
+ has_cfg_or_cfg_attr: bool,
+}
+
+impl CfgFinder {
+ fn has_cfg_or_cfg_attr(annotatable: &Annotatable) -> bool {
+ let mut finder = CfgFinder { has_cfg_or_cfg_attr: false };
+ match annotatable {
+ Annotatable::Item(item) => finder.visit_item(&item),
+ Annotatable::TraitItem(item) => finder.visit_assoc_item(&item, visit::AssocCtxt::Trait),
+ Annotatable::ImplItem(item) => finder.visit_assoc_item(&item, visit::AssocCtxt::Impl),
+ Annotatable::ForeignItem(item) => finder.visit_foreign_item(&item),
+ Annotatable::Stmt(stmt) => finder.visit_stmt(&stmt),
+ Annotatable::Expr(expr) => finder.visit_expr(&expr),
+ Annotatable::Arm(arm) => finder.visit_arm(&arm),
+ Annotatable::ExprField(field) => finder.visit_expr_field(&field),
+ Annotatable::PatField(field) => finder.visit_pat_field(&field),
+ Annotatable::GenericParam(param) => finder.visit_generic_param(¶m),
+ Annotatable::Param(param) => finder.visit_param(¶m),
+ Annotatable::FieldDef(field) => finder.visit_field_def(&field),
+ Annotatable::Variant(variant) => finder.visit_variant(&variant),
+ };
+ finder.has_cfg_or_cfg_attr
+ }
+}
+
+impl<'ast> visit::Visitor<'ast> for CfgFinder {
+ fn visit_attribute(&mut self, attr: &'ast Attribute) {
+ // We want short-circuiting behavior, so don't use the '|=' operator.
+ self.has_cfg_or_cfg_attr = self.has_cfg_or_cfg_attr
+ || attr
+ .ident()
+ .map_or(false, |ident| ident.name == sym::cfg || ident.name == sym::cfg_attr);
+ }
+}
+
+impl CfgEval<'_, '_> {
fn configure<T: AstLike>(&mut self, node: T) -> Option<T> {
self.cfg.configure(node)
}
- fn configure_annotatable(&mut self, annotatable: Annotatable) -> Annotatable {
- // Since the item itself has already been configured by the InvocationCollector,
- // we know that fold result vector will contain exactly one element
- match annotatable {
- Annotatable::Item(item) => Annotatable::Item(self.flat_map_item(item).pop().unwrap()),
- Annotatable::TraitItem(item) => {
- Annotatable::TraitItem(self.flat_map_trait_item(item).pop().unwrap())
- }
- Annotatable::ImplItem(item) => {
- Annotatable::ImplItem(self.flat_map_impl_item(item).pop().unwrap())
- }
- Annotatable::ForeignItem(item) => {
- Annotatable::ForeignItem(self.flat_map_foreign_item(item).pop().unwrap())
- }
- Annotatable::Stmt(stmt) => {
- Annotatable::Stmt(stmt.map(|stmt| self.flat_map_stmt(stmt).pop().unwrap()))
- }
- Annotatable::Expr(mut expr) => Annotatable::Expr({
- self.visit_expr(&mut expr);
- expr
- }),
- Annotatable::Arm(arm) => Annotatable::Arm(self.flat_map_arm(arm).pop().unwrap()),
- Annotatable::ExprField(field) => {
- Annotatable::ExprField(self.flat_map_expr_field(field).pop().unwrap())
- }
- Annotatable::PatField(fp) => {
- Annotatable::PatField(self.flat_map_pat_field(fp).pop().unwrap())
- }
- Annotatable::GenericParam(param) => {
- Annotatable::GenericParam(self.flat_map_generic_param(param).pop().unwrap())
- }
- Annotatable::Param(param) => {
- Annotatable::Param(self.flat_map_param(param).pop().unwrap())
- }
- Annotatable::FieldDef(sf) => {
- Annotatable::FieldDef(self.flat_map_field_def(sf).pop().unwrap())
- }
- Annotatable::Variant(v) => {
- Annotatable::Variant(self.flat_map_variant(v).pop().unwrap())
- }
+ pub fn configure_annotatable(&mut self, mut annotatable: Annotatable) -> Annotatable {
+ // Tokenizing and re-parsing the `Annotatable` can have a significant
+ // performance impact, so try to avoid it if possible
+ if !CfgFinder::has_cfg_or_cfg_attr(&annotatable) {
+ return annotatable;
}
+
+ // The majority of parsed attribute targets will never need to have early cfg-expansion
+ // run (e.g. they are not part of a `#[derive]` or `#[cfg_eval]` macro inoput).
+ // Therefore, we normally do not capture the necessary information about `#[cfg]`
+ // and `#[cfg_attr]` attributes during parsing.
+ //
+ // Therefore, when we actually *do* run early cfg-expansion, we need to tokenize
+ // and re-parse the attribute target, this time capturing information about
+ // the location of `#[cfg]` and `#[cfg_attr]` in the token stream. The tokenization
+ // process is lossless, so this process is invisible to proc-macros.
+
+ // FIXME - get rid of this clone
+ let nt = annotatable.clone().into_nonterminal();
+
+ let mut orig_tokens = rustc_parse::nt_to_tokenstream(
+ &nt,
+ &self.cfg.sess.parse_sess,
+ CanSynthesizeMissingTokens::No,
+ );
+
+ // 'Flatten' all nonterminals (i.e. `TokenKind::Interpolated`)
+ // to `None`-delimited groups containing the corresponding tokens. This
+ // is normally delayed until the proc-macro server actually needs to
+ // provide a `TokenKind::Interpolated` to a proc-macro. We do this earlier,
+ // so that we can handle cases like:
+ //
+ // ```rust
+ // #[cfg_eval] #[cfg] $item
+ //```
+ //
+ // where `$item` is `#[cfg_attr] struct Foo {}`. We want to make
+ // sure to evaluate *all* `#[cfg]` and `#[cfg_attr]` attributes - the simplest
+ // way to do this is to do a single parse of a stream without any nonterminals.
+ let mut flatten = FlattenNonterminals {
+ nt_to_tokenstream: rustc_parse::nt_to_tokenstream,
+ parse_sess: &self.cfg.sess.parse_sess,
+ synthesize_tokens: CanSynthesizeMissingTokens::No,
+ };
+ orig_tokens = flatten.process_token_stream(orig_tokens);
+
+ // Re-parse the tokens, setting the `capture_cfg` flag to save extra information
+ // to the captured `AttrAnnotatedTokenStream` (specifically, we capture
+ // `AttrAnnotatedTokenTree::AttributesData` for all occurences of `#[cfg]` and `#[cfg_attr]`)
+ let mut parser =
+ rustc_parse::stream_to_parser(&self.cfg.sess.parse_sess, orig_tokens, None);
+ parser.capture_cfg = true;
+ annotatable = match annotatable {
+ Annotatable::Item(_) => {
+ Annotatable::Item(parser.parse_item(ForceCollect::Yes).unwrap().unwrap())
+ }
+ Annotatable::TraitItem(_) => Annotatable::TraitItem(
+ parser.parse_trait_item(ForceCollect::Yes).unwrap().unwrap().unwrap(),
+ ),
+ Annotatable::ImplItem(_) => Annotatable::ImplItem(
+ parser.parse_impl_item(ForceCollect::Yes).unwrap().unwrap().unwrap(),
+ ),
+ Annotatable::ForeignItem(_) => Annotatable::ForeignItem(
+ parser.parse_foreign_item(ForceCollect::Yes).unwrap().unwrap().unwrap(),
+ ),
+ Annotatable::Stmt(_) => {
+ Annotatable::Stmt(P(parser.parse_stmt(ForceCollect::Yes).unwrap().unwrap()))
+ }
+ Annotatable::Expr(_) => Annotatable::Expr(parser.parse_expr_force_collect().unwrap()),
+ _ => unreachable!(),
+ };
+
+ // Now that we have our re-parsed `AttrAnnotatedTokenStream`, recursively configuring
+ // our attribute target will correctly the tokens as well.
+ flat_map_annotatable(self, annotatable)
}
}
-impl MutVisitor for CfgEval<'_> {
+impl MutVisitor for CfgEval<'_, '_> {
fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
self.cfg.configure_expr(expr);
mut_visit::noop_visit_expr(expr, self);
diff --git a/compiler/rustc_codegen_ssa/src/back/write.rs b/compiler/rustc_codegen_ssa/src/back/write.rs
index 04d0686..c8688fa 100644
--- a/compiler/rustc_codegen_ssa/src/back/write.rs
+++ b/compiler/rustc_codegen_ssa/src/back/write.rs
@@ -1094,7 +1094,7 @@
// only place where we have access to the compiler `Session`.
// - LLVM work can be done on any thread.
// - Codegen can only happen on the main thread.
- // - Each thread doing substantial work most be in possession of a `Token`
+ // - Each thread doing substantial work must be in possession of a `Token`
// from the `Jobserver`.
// - The compiler process always holds one `Token`. Any additional `Tokens`
// have to be requested from the `Jobserver`.
@@ -1146,7 +1146,7 @@
// if possible. These two goals are at odds with each other: If memory
// consumption were not an issue, we could just let the main thread produce
// LLVM WorkItems at full speed, assuring maximal utilization of
- // Tokens/LLVM worker threads. However, since codegen usual is faster
+ // Tokens/LLVM worker threads. However, since codegen is usually faster
// than LLVM processing, the queue of LLVM WorkItems would fill up and each
// WorkItem potentially holds on to a substantial amount of memory.
//
diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs
index a2035ee..5950584 100644
--- a/compiler/rustc_expand/src/base.rs
+++ b/compiler/rustc_expand/src/base.rs
@@ -3,7 +3,7 @@
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Nonterminal};
-use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, LazyTokenStream, TokenStream};
+use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream};
use rustc_ast::visit::{AssocCtxt, Visitor};
use rustc_ast::{self as ast, AstLike, Attribute, Item, NodeId, PatKind};
use rustc_attr::{self as attr, Deprecation, Stability};
@@ -46,62 +46,6 @@
Variant(ast::Variant),
}
-impl AstLike for Annotatable {
- fn attrs(&self) -> &[Attribute] {
- match *self {
- Annotatable::Item(ref item) => &item.attrs,
- Annotatable::TraitItem(ref trait_item) => &trait_item.attrs,
- Annotatable::ImplItem(ref impl_item) => &impl_item.attrs,
- Annotatable::ForeignItem(ref foreign_item) => &foreign_item.attrs,
- Annotatable::Stmt(ref stmt) => stmt.attrs(),
- Annotatable::Expr(ref expr) => &expr.attrs,
- Annotatable::Arm(ref arm) => &arm.attrs,
- Annotatable::ExprField(ref field) => &field.attrs,
- Annotatable::PatField(ref fp) => &fp.attrs,
- Annotatable::GenericParam(ref gp) => &gp.attrs,
- Annotatable::Param(ref p) => &p.attrs,
- Annotatable::FieldDef(ref sf) => &sf.attrs,
- Annotatable::Variant(ref v) => &v.attrs(),
- }
- }
-
- fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>)) {
- match self {
- Annotatable::Item(item) => item.visit_attrs(f),
- Annotatable::TraitItem(trait_item) => trait_item.visit_attrs(f),
- Annotatable::ImplItem(impl_item) => impl_item.visit_attrs(f),
- Annotatable::ForeignItem(foreign_item) => foreign_item.visit_attrs(f),
- Annotatable::Stmt(stmt) => stmt.visit_attrs(f),
- Annotatable::Expr(expr) => expr.visit_attrs(f),
- Annotatable::Arm(arm) => arm.visit_attrs(f),
- Annotatable::ExprField(field) => field.visit_attrs(f),
- Annotatable::PatField(fp) => fp.visit_attrs(f),
- Annotatable::GenericParam(gp) => gp.visit_attrs(f),
- Annotatable::Param(p) => p.visit_attrs(f),
- Annotatable::FieldDef(sf) => sf.visit_attrs(f),
- Annotatable::Variant(v) => v.visit_attrs(f),
- }
- }
-
- fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
- match self {
- Annotatable::Item(item) => item.tokens_mut(),
- Annotatable::TraitItem(trait_item) => trait_item.tokens_mut(),
- Annotatable::ImplItem(impl_item) => impl_item.tokens_mut(),
- Annotatable::ForeignItem(foreign_item) => foreign_item.tokens_mut(),
- Annotatable::Stmt(stmt) => stmt.tokens_mut(),
- Annotatable::Expr(expr) => expr.tokens_mut(),
- Annotatable::Arm(arm) => arm.tokens_mut(),
- Annotatable::ExprField(field) => field.tokens_mut(),
- Annotatable::PatField(fp) => fp.tokens_mut(),
- Annotatable::GenericParam(gp) => gp.tokens_mut(),
- Annotatable::Param(p) => p.tokens_mut(),
- Annotatable::FieldDef(sf) => sf.tokens_mut(),
- Annotatable::Variant(v) => v.tokens_mut(),
- }
- }
-}
-
impl Annotatable {
pub fn span(&self) -> Span {
match *self {
@@ -121,6 +65,24 @@
}
}
+ pub fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>)) {
+ match self {
+ Annotatable::Item(item) => item.visit_attrs(f),
+ Annotatable::TraitItem(trait_item) => trait_item.visit_attrs(f),
+ Annotatable::ImplItem(impl_item) => impl_item.visit_attrs(f),
+ Annotatable::ForeignItem(foreign_item) => foreign_item.visit_attrs(f),
+ Annotatable::Stmt(stmt) => stmt.visit_attrs(f),
+ Annotatable::Expr(expr) => expr.visit_attrs(f),
+ Annotatable::Arm(arm) => arm.visit_attrs(f),
+ Annotatable::ExprField(field) => field.visit_attrs(f),
+ Annotatable::PatField(fp) => fp.visit_attrs(f),
+ Annotatable::GenericParam(gp) => gp.visit_attrs(f),
+ Annotatable::Param(p) => p.visit_attrs(f),
+ Annotatable::FieldDef(sf) => sf.visit_attrs(f),
+ Annotatable::Variant(v) => v.visit_attrs(f),
+ }
+ }
+
pub fn visit_with<'a, V: Visitor<'a>>(&'a self, visitor: &mut V) {
match self {
Annotatable::Item(item) => visitor.visit_item(item),
@@ -139,7 +101,7 @@
}
}
- crate fn into_nonterminal(self) -> Nonterminal {
+ pub fn into_nonterminal(self) -> Nonterminal {
match self {
Annotatable::Item(item) => token::NtItem(item),
Annotatable::TraitItem(item) | Annotatable::ImplItem(item) => {
@@ -161,10 +123,7 @@
}
crate fn into_tokens(self, sess: &ParseSess) -> TokenStream {
- // Tokens of an attribute target may be invalidated by some outer `#[derive]` performing
- // "full configuration" (attributes following derives on the same item should be the most
- // common case), that's why synthesizing tokens is allowed.
- nt_to_tokenstream(&self.into_nonterminal(), sess, CanSynthesizeMissingTokens::Yes)
+ nt_to_tokenstream(&self.into_nonterminal(), sess, CanSynthesizeMissingTokens::No)
}
pub fn expect_item(self) -> P<ast::Item> {
diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs
index a23731c..03c83f9 100644
--- a/compiler/rustc_expand/src/config.rs
+++ b/compiler/rustc_expand/src/config.rs
@@ -2,8 +2,10 @@
use rustc_ast::ptr::P;
use rustc_ast::token::{DelimToken, Token, TokenKind};
-use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing, TokenStream, TokenTree};
-use rustc_ast::{self as ast, AstLike, AttrItem, Attribute, MetaItem};
+use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
+use rustc_ast::tokenstream::{DelimSpan, Spacing};
+use rustc_ast::tokenstream::{LazyTokenStream, TokenTree};
+use rustc_ast::{self as ast, AstLike, AttrItem, AttrStyle, Attribute, MetaItem};
use rustc_attr as attr;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::map_in_place::MapInPlace;
@@ -23,7 +25,10 @@
pub struct StripUnconfigured<'a> {
pub sess: &'a Session,
pub features: Option<&'a Features>,
- pub modified: bool,
+ /// If `true`, perform cfg-stripping on attached tokens.
+ /// This is only used for the input to derive macros,
+ /// which needs eager expansion of `cfg` and `cfg_attr`
+ pub config_tokens: bool,
}
fn get_features(
@@ -194,7 +199,7 @@
// `cfg_attr`-process the crate's attributes and compute the crate's features.
pub fn features(sess: &Session, mut krate: ast::Crate) -> (ast::Crate, Features) {
- let mut strip_unconfigured = StripUnconfigured { sess, features: None, modified: false };
+ let mut strip_unconfigured = StripUnconfigured { sess, features: None, config_tokens: false };
let unconfigured_attrs = krate.attrs.clone();
let diag = &sess.parse_sess.span_diagnostic;
@@ -241,24 +246,83 @@
pub fn configure<T: AstLike>(&mut self, mut node: T) -> Option<T> {
self.process_cfg_attrs(&mut node);
if self.in_cfg(node.attrs()) {
+ self.try_configure_tokens(&mut node);
Some(node)
} else {
- self.modified = true;
None
}
}
+ fn try_configure_tokens<T: AstLike>(&mut self, node: &mut T) {
+ if self.config_tokens {
+ if let Some(Some(tokens)) = node.tokens_mut() {
+ let attr_annotated_tokens = tokens.create_token_stream();
+ *tokens = LazyTokenStream::new(self.configure_tokens(&attr_annotated_tokens));
+ }
+ }
+ }
+
fn configure_krate_attrs(
&mut self,
mut attrs: Vec<ast::Attribute>,
) -> Option<Vec<ast::Attribute>> {
attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr));
- if self.in_cfg(&attrs) {
- Some(attrs)
- } else {
- self.modified = true;
- None
+ if self.in_cfg(&attrs) { Some(attrs) } else { None }
+ }
+
+ /// Performs cfg-expansion on `stream`, producing a new `AttrAnnotatedTokenStream`.
+ /// This is only used during the invocation of `derive` proc-macros,
+ /// which require that we cfg-expand their entire input.
+ /// Normal cfg-expansion operates on parsed AST nodes via the `configure` method
+ fn configure_tokens(&mut self, stream: &AttrAnnotatedTokenStream) -> AttrAnnotatedTokenStream {
+ fn can_skip(stream: &AttrAnnotatedTokenStream) -> bool {
+ stream.0.iter().all(|(tree, _spacing)| match tree {
+ AttrAnnotatedTokenTree::Attributes(_) => false,
+ AttrAnnotatedTokenTree::Token(_) => true,
+ AttrAnnotatedTokenTree::Delimited(_, _, inner) => can_skip(inner),
+ })
}
+
+ if can_skip(stream) {
+ return stream.clone();
+ }
+
+ let trees: Vec<_> = stream
+ .0
+ .iter()
+ .flat_map(|(tree, spacing)| match tree.clone() {
+ AttrAnnotatedTokenTree::Attributes(mut data) => {
+ let mut attrs: Vec<_> = std::mem::take(&mut data.attrs).into();
+ attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr));
+ data.attrs = attrs.into();
+
+ if self.in_cfg(&data.attrs) {
+ data.tokens = LazyTokenStream::new(
+ self.configure_tokens(&data.tokens.create_token_stream()),
+ );
+ Some((AttrAnnotatedTokenTree::Attributes(data), *spacing)).into_iter()
+ } else {
+ None.into_iter()
+ }
+ }
+ AttrAnnotatedTokenTree::Delimited(sp, delim, mut inner) => {
+ inner = self.configure_tokens(&inner);
+ Some((AttrAnnotatedTokenTree::Delimited(sp, delim, inner), *spacing))
+ .into_iter()
+ }
+ AttrAnnotatedTokenTree::Token(token) => {
+ if let TokenKind::Interpolated(nt) = token.kind {
+ panic!(
+ "Nonterminal should have been flattened at {:?}: {:?}",
+ token.span, nt
+ );
+ } else {
+ Some((AttrAnnotatedTokenTree::Token(token), *spacing)).into_iter()
+ }
+ }
+ })
+ .collect();
+ AttrAnnotatedTokenStream::new(trees)
}
/// Parse and expand all `cfg_attr` attributes into a list of attributes
@@ -285,9 +349,6 @@
return vec![attr];
}
- // A `#[cfg_attr]` either gets removed, or replaced with a new attribute
- self.modified = true;
-
let (cfg_predicate, expanded_attrs) = match self.parse_cfg_attr(&attr) {
None => return vec![],
Some(r) => r,
@@ -311,7 +372,7 @@
expanded_attrs
.into_iter()
.flat_map(|(item, span)| {
- let orig_tokens = attr.tokens();
+ let orig_tokens = attr.tokens().to_tokenstream();
// We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
// and producing an attribute of the form `#[attr]`. We
@@ -321,25 +382,34 @@
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
// for `attr` when we expand it to `#[attr]`
- let pound_token = orig_tokens.trees().next().unwrap();
- if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) {
- panic!("Bad tokens for attribute {:?}", attr);
+ let mut orig_trees = orig_tokens.trees();
+ let pound_token = match orig_trees.next().unwrap() {
+ TokenTree::Token(token @ Token { kind: TokenKind::Pound, .. }) => token,
+ _ => panic!("Bad tokens for attribute {:?}", attr),
+ };
+ let pound_span = pound_token.span;
+
+ let mut trees = vec![(AttrAnnotatedTokenTree::Token(pound_token), Spacing::Alone)];
+ if attr.style == AttrStyle::Inner {
+ // For inner attributes, we do the same thing for the `!` in `#![some_attr]`
+ let bang_token = match orig_trees.next().unwrap() {
+ TokenTree::Token(token @ Token { kind: TokenKind::Not, .. }) => token,
+ _ => panic!("Bad tokens for attribute {:?}", attr),
+ };
+ trees.push((AttrAnnotatedTokenTree::Token(bang_token), Spacing::Alone));
}
// We don't really have a good span to use for the syntheized `[]`
// in `#[attr]`, so just use the span of the `#` token.
- let bracket_group = TokenTree::Delimited(
- DelimSpan::from_single(pound_token.span()),
+ let bracket_group = AttrAnnotatedTokenTree::Delimited(
+ DelimSpan::from_single(pound_span),
DelimToken::Bracket,
item.tokens
.as_ref()
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
.create_token_stream(),
);
- let tokens = Some(LazyTokenStream::new(TokenStream::new(vec![
- (pound_token, Spacing::Alone),
- (bracket_group, Spacing::Alone),
- ])));
-
+ trees.push((bracket_group, Spacing::Alone));
+ let tokens = Some(LazyTokenStream::new(AttrAnnotatedTokenStream::new(trees)));
self.process_cfg_attr(attr::mk_attr_from_item(item, tokens, attr.style, span))
})
.collect()
@@ -457,7 +527,8 @@
self.sess.parse_sess.span_diagnostic.span_err(attr.span, msg);
}
- self.process_cfg_attrs(expr)
+ self.process_cfg_attrs(expr);
+ self.try_configure_tokens(&mut *expr);
}
}
diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs
index 27274f7..529ef7e 100644
--- a/compiler/rustc_expand/src/expand.rs
+++ b/compiler/rustc_expand/src/expand.rs
@@ -12,7 +12,7 @@
use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::visit::{self, AssocCtxt, Visitor};
-use rustc_ast::{AstLike, AttrItem, AttrStyle, Block, Inline, ItemKind, LitKind, MacArgs};
+use rustc_ast::{AstLike, AttrItem, Block, Inline, ItemKind, LitKind, MacArgs};
use rustc_ast::{MacCallStmt, MacStmtStyle, MetaItemKind, ModKind, NestedMetaItem};
use rustc_ast::{NodeId, PatKind, Path, StmtKind, Unsafe};
use rustc_ast_pretty::pprust;
@@ -611,10 +611,15 @@
let invocations = {
let mut collector = InvocationCollector {
+ // Non-derive macro invocations cannot see the results of cfg expansion - they
+ // will either be removed along with the item, or invoked before the cfg/cfg_attr
+ // attribute is expanded. Therefore, we don't need to configure the tokens
+ // Derive macros *can* see the results of cfg-expansion - they are handled
+ // specially in `fully_expand_fragment`
cfg: StripUnconfigured {
sess: &self.cx.sess,
features: self.cx.ecfg.features,
- modified: false,
+ config_tokens: false,
},
cx: self.cx,
invocations: Vec::new(),
@@ -709,13 +714,26 @@
SyntaxExtensionKind::Attr(expander) => {
self.gate_proc_macro_input(&item);
self.gate_proc_macro_attr_item(span, &item);
- let tokens = match attr.style {
- AttrStyle::Outer => item.into_tokens(&self.cx.sess.parse_sess),
- // FIXME: Properly collect tokens for inner attributes
- AttrStyle::Inner => rustc_parse::fake_token_stream(
+ let mut fake_tokens = false;
+ if let Annotatable::Item(item_inner) = &item {
+ if let ItemKind::Mod(_, mod_kind) = &item_inner.kind {
+ // FIXME: Collect tokens and use them instead of generating
+ // fake ones. These are unstable, so it needs to be
+ // fixed prior to stabilization
+ // Fake tokens when we are invoking an inner attribute, and:
+ fake_tokens = matches!(attr.style, ast::AttrStyle::Inner) &&
+ // We are invoking an attribute on the crate root, or an outline
+ // module
+ (item_inner.ident.name.is_empty() || !matches!(mod_kind, ast::ModKind::Loaded(_, Inline::Yes, _)));
+ }
+ }
+ let tokens = if fake_tokens {
+ rustc_parse::fake_token_stream(
&self.cx.sess.parse_sess,
&item.into_nonterminal(),
- ),
+ )
+ } else {
+ item.into_tokens(&self.cx.sess.parse_sess)
};
let attr_item = attr.unwrap_normal_item();
if let MacArgs::Eq(..) = attr_item.args {
@@ -897,21 +915,21 @@
}
AstFragmentKind::TraitItems => {
let mut items = SmallVec::new();
- while let Some(item) = this.parse_trait_item()? {
+ while let Some(item) = this.parse_trait_item(ForceCollect::No)? {
items.extend(item);
}
AstFragment::TraitItems(items)
}
AstFragmentKind::ImplItems => {
let mut items = SmallVec::new();
- while let Some(item) = this.parse_impl_item()? {
+ while let Some(item) = this.parse_impl_item(ForceCollect::No)? {
items.extend(item);
}
AstFragment::ImplItems(items)
}
AstFragmentKind::ForeignItems => {
let mut items = SmallVec::new();
- while let Some(item) = this.parse_foreign_item()? {
+ while let Some(item) = this.parse_foreign_item(ForceCollect::No)? {
items.extend(item);
}
AstFragment::ForeignItems(items)
diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs
index 61b776f..3f84979 100644
--- a/compiler/rustc_expand/src/proc_macro.rs
+++ b/compiler/rustc_expand/src/proc_macro.rs
@@ -94,7 +94,7 @@
{
TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into()
} else {
- nt_to_tokenstream(&item, &ecx.sess.parse_sess, CanSynthesizeMissingTokens::Yes)
+ nt_to_tokenstream(&item, &ecx.sess.parse_sess, CanSynthesizeMissingTokens::No)
};
let server = proc_macro_server::Rustc::new(ecx);
diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs
index 0b49e65..7f9e459 100644
--- a/compiler/rustc_lint/src/builtin.rs
+++ b/compiler/rustc_lint/src/builtin.rs
@@ -1,3 +1,5 @@
+// ignore-tidy-filelength
+
//! Lints in the Rust compiler.
//!
//! This contains lints which can feasibly be implemented as their own
@@ -857,11 +859,10 @@
/// ```
///
/// This syntax is now a hard error in the 2018 edition. In the 2015
- /// edition, this lint is "allow" by default, because the old code is
- /// still valid, and warning for all old code can be noisy. This lint
+ /// edition, this lint is "warn" by default. This lint
/// enables the [`cargo fix`] tool with the `--edition` flag to
/// automatically transition old code from the 2015 edition to 2018. The
- /// tool will switch this lint to "warn" and will automatically apply the
+ /// tool will run this lint and automatically apply the
/// suggested fix from the compiler (which is to add `_` to each
/// parameter). This provides a completely automated way to update old
/// code for a new edition. See [issue #41686] for more details.
@@ -869,7 +870,7 @@
/// [issue #41686]: https://github.com/rust-lang/rust/issues/41686
/// [`cargo fix`]: https://doc.rust-lang.org/cargo/commands/cargo-fix.html
pub ANONYMOUS_PARAMETERS,
- Allow,
+ Warn,
"detects anonymous parameters",
@future_incompatible = FutureIncompatibleInfo {
reference: "issue #41686 <https://github.com/rust-lang/rust/issues/41686>",
@@ -884,6 +885,10 @@
impl EarlyLintPass for AnonymousParameters {
fn check_trait_item(&mut self, cx: &EarlyContext<'_>, it: &ast::AssocItem) {
+ if cx.sess.edition() != Edition::Edition2015 {
+ // This is a hard error in future editions; avoid linting and erroring
+ return;
+ }
if let ast::AssocItemKind::Fn(box FnKind(_, ref sig, _, _)) = it.kind {
for arg in sig.decl.inputs.iter() {
if let ast::PatKind::Ident(_, ident, None) = arg.pat.kind {
@@ -2961,3 +2966,88 @@
}
}
}
+
+declare_lint! {
+ /// The `deref_nullptr` lint detects when an null pointer is dereferenced,
+ /// which causes [undefined behavior].
+ ///
+ /// ### Example
+ ///
+ /// ```rust,no_run
+ /// # #![allow(unused)]
+ /// use std::ptr;
+ /// unsafe {
+ /// let x = &*ptr::null::<i32>();
+ /// let x = ptr::addr_of!(*ptr::null::<i32>());
+ /// let x = *(0 as *const i32);
+ /// }
+ /// ```
+ ///
+ /// {{produces}}
+ ///
+ /// ### Explanation
+ ///
+ /// Dereferencing a null pointer causes [undefined behavior] even as a place expression,
+ /// like `&*(0 as *const i32)` or `addr_of!(*(0 as *const i32))`.
+ ///
+ /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+ pub DEREF_NULLPTR,
+ Warn,
+ "detects when an null pointer is dereferenced"
+}
+
+declare_lint_pass!(DerefNullPtr => [DEREF_NULLPTR]);
+
+impl<'tcx> LateLintPass<'tcx> for DerefNullPtr {
+ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &hir::Expr<'_>) {
+ /// test if expression is a null ptr
+ fn is_null_ptr(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
+ match &expr.kind {
+ rustc_hir::ExprKind::Cast(ref expr, ref ty) => {
+ if let rustc_hir::TyKind::Ptr(_) = ty.kind {
+ return is_zero(expr) || is_null_ptr(cx, expr);
+ }
+ }
+ // check for call to `core::ptr::null` or `core::ptr::null_mut`
+ rustc_hir::ExprKind::Call(ref path, _) => {
+ if let rustc_hir::ExprKind::Path(ref qpath) = path.kind {
+ if let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id() {
+ return cx.tcx.is_diagnostic_item(sym::ptr_null, def_id)
+ || cx.tcx.is_diagnostic_item(sym::ptr_null_mut, def_id);
+ }
+ }
+ }
+ _ => {}
+ }
+ false
+ }
+
+ /// test if experssion is the literal `0`
+ fn is_zero(expr: &hir::Expr<'_>) -> bool {
+ match &expr.kind {
+ rustc_hir::ExprKind::Lit(ref lit) => {
+ if let LitKind::Int(a, _) = lit.node {
+ return a == 0;
+ }
+ }
+ _ => {}
+ }
+ false
+ }
+
+ if let rustc_hir::ExprKind::Unary(ref un_op, ref expr_deref) = expr.kind {
+ if let rustc_hir::UnOp::Deref = un_op {
+ if is_null_ptr(cx, expr_deref) {
+ cx.struct_span_lint(DEREF_NULLPTR, expr.span, |lint| {
+ let mut err = lint.build("dereferencing a null pointer");
+ err.span_label(
+ expr.span,
+ "this code causes undefined behavior when executed",
+ );
+ err.emit();
+ });
+ }
+ }
+ }
+ }
+}
diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs
index e2724b5..2f46969 100644
--- a/compiler/rustc_lint/src/lib.rs
+++ b/compiler/rustc_lint/src/lib.rs
@@ -206,6 +206,7 @@
UnreachablePub: UnreachablePub,
ExplicitOutlivesRequirements: ExplicitOutlivesRequirements,
InvalidValue: InvalidValue,
+ DerefNullPtr: DerefNullPtr,
]
);
};
diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs
index 27724b4..ad42366 100644
--- a/compiler/rustc_lint_defs/src/builtin.rs
+++ b/compiler/rustc_lint_defs/src/builtin.rs
@@ -2522,9 +2522,10 @@
///
/// The fix to this is to wrap the unsafe code in an `unsafe` block.
///
- /// This lint is "allow" by default because it has not yet been
- /// stabilized, and is not yet complete. See [RFC #2585] and [issue
- /// #71668] for more details
+ /// This lint is "allow" by default since this will affect a large amount
+ /// of existing code, and the exact plan for increasing the severity is
+ /// still being considered. See [RFC #2585] and [issue #71668] for more
+ /// details.
///
/// [`unsafe fn`]: https://doc.rust-lang.org/reference/unsafe-functions.html
/// [`unsafe` block]: https://doc.rust-lang.org/reference/expressions/block-expr.html#unsafe-blocks
@@ -2944,6 +2945,7 @@
NONTRIVIAL_STRUCTURAL_MATCH,
SOFT_UNSTABLE,
INLINE_NO_SANITIZE,
+ BAD_ASM_STYLE,
ASM_SUB_REGISTER,
UNSAFE_OP_IN_UNSAFE_FN,
INCOMPLETE_INCLUDE,
diff --git a/compiler/rustc_middle/src/ty/error.rs b/compiler/rustc_middle/src/ty/error.rs
index d295b17..008e6d0 100644
--- a/compiler/rustc_middle/src/ty/error.rs
+++ b/compiler/rustc_middle/src/ty/error.rs
@@ -36,6 +36,7 @@
UnsafetyMismatch(ExpectedFound<hir::Unsafety>),
AbiMismatch(ExpectedFound<abi::Abi>),
Mutability,
+ ArgumentMutability(usize),
TupleSize(ExpectedFound<usize>),
FixedArraySize(ExpectedFound<u64>),
ArgCount,
@@ -46,6 +47,7 @@
RegionsPlaceholderMismatch,
Sorts(ExpectedFound<Ty<'tcx>>),
+ ArgumentSorts(ExpectedFound<Ty<'tcx>>, usize),
IntMismatch(ExpectedFound<ty::IntVarValue>),
FloatMismatch(ExpectedFound<ty::FloatTy>),
Traits(ExpectedFound<DefId>),
@@ -110,7 +112,7 @@
AbiMismatch(values) => {
write!(f, "expected {} fn, found {} fn", values.expected, values.found)
}
- Mutability => write!(f, "types differ in mutability"),
+ ArgumentMutability(_) | Mutability => write!(f, "types differ in mutability"),
TupleSize(values) => write!(
f,
"expected a tuple with {} element{}, \
@@ -142,7 +144,7 @@
br_string(br)
),
RegionsPlaceholderMismatch => write!(f, "one type is more general than the other"),
- Sorts(values) => ty::tls::with(|tcx| {
+ ArgumentSorts(values, _) | Sorts(values) => ty::tls::with(|tcx| {
report_maybe_different(
f,
&values.expected.sort_string(tcx),
@@ -199,10 +201,11 @@
use self::TypeError::*;
match self {
CyclicTy(_) | CyclicConst(_) | UnsafetyMismatch(_) | Mismatch | AbiMismatch(_)
- | FixedArraySize(_) | Sorts(_) | IntMismatch(_) | FloatMismatch(_)
- | VariadicMismatch(_) | TargetFeatureCast(_) => false,
+ | FixedArraySize(_) | ArgumentSorts(..) | Sorts(_) | IntMismatch(_)
+ | FloatMismatch(_) | VariadicMismatch(_) | TargetFeatureCast(_) => false,
Mutability
+ | ArgumentMutability(_)
| TupleSize(_)
| ArgCount
| RegionsDoesNotOutlive(..)
@@ -339,7 +342,7 @@
use self::TypeError::*;
debug!("note_and_explain_type_err err={:?} cause={:?}", err, cause);
match err {
- Sorts(values) => {
+ ArgumentSorts(values, _) | Sorts(values) => {
match (values.expected.kind(), values.found.kind()) {
(ty::Closure(..), ty::Closure(..)) => {
db.note("no two closures, even if identical, have the same type");
diff --git a/compiler/rustc_middle/src/ty/relate.rs b/compiler/rustc_middle/src/ty/relate.rs
index ca60339..b6f93c9 100644
--- a/compiler/rustc_middle/src/ty/relate.rs
+++ b/compiler/rustc_middle/src/ty/relate.rs
@@ -179,6 +179,12 @@
} else {
relation.relate_with_variance(ty::Contravariant, a, b)
}
+ })
+ .enumerate()
+ .map(|(i, r)| match r {
+ Err(TypeError::Sorts(exp_found)) => Err(TypeError::ArgumentSorts(exp_found, i)),
+ Err(TypeError::Mutability) => Err(TypeError::ArgumentMutability(i)),
+ r => r,
});
Ok(ty::FnSig {
inputs_and_output: tcx.mk_type_list(inputs_and_output)?,
diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs
index a969626..7290c41 100644
--- a/compiler/rustc_middle/src/ty/structural_impls.rs
+++ b/compiler/rustc_middle/src/ty/structural_impls.rs
@@ -587,6 +587,7 @@
UnsafetyMismatch(x) => UnsafetyMismatch(x),
AbiMismatch(x) => AbiMismatch(x),
Mutability => Mutability,
+ ArgumentMutability(i) => ArgumentMutability(i),
TupleSize(x) => TupleSize(x),
FixedArraySize(x) => FixedArraySize(x),
ArgCount => ArgCount,
@@ -607,6 +608,7 @@
CyclicTy(t) => return tcx.lift(t).map(|t| CyclicTy(t)),
CyclicConst(ct) => return tcx.lift(ct).map(|ct| CyclicConst(ct)),
ProjectionMismatched(x) => ProjectionMismatched(x),
+ ArgumentSorts(x, i) => return tcx.lift(x).map(|x| ArgumentSorts(x, i)),
Sorts(x) => return tcx.lift(x).map(Sorts),
ExistentialMismatch(x) => return tcx.lift(x).map(ExistentialMismatch),
ConstMismatch(x) => return tcx.lift(x).map(ConstMismatch),
diff --git a/compiler/rustc_mir_build/src/build/mod.rs b/compiler/rustc_mir_build/src/build/mod.rs
index 3a189e6..f944e5f 100644
--- a/compiler/rustc_mir_build/src/build/mod.rs
+++ b/compiler/rustc_mir_build/src/build/mod.rs
@@ -584,7 +584,7 @@
Some(UnwindAttr::Aborts) => true,
// If no attribute was found and the panic strategy is `unwind`, then we should examine
// the function's ABI string to determine whether it should abort upon panic.
- None => {
+ None if tcx.features().c_unwind => {
use Abi::*;
match abi {
// In the case of ABI's that have an `-unwind` equivalent, check whether the ABI
@@ -615,6 +615,10 @@
| Unadjusted => true,
}
}
+ // If the `c_unwind` feature gate is not active, follow the behavior that was in place
+ // prior to #76570. This is a special case: some functions have a C ABI but are meant to
+ // unwind anyway. Don't stop them.
+ None => false, // FIXME(#58794); should be `!(abi == Abi::Rust || abi == Abi::RustCall)`
}
}
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 9fead30..905077a 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -1,5 +1,6 @@
//! The main parser interface.
+#![feature(array_windows)]
#![feature(crate_visibility_modifier)]
#![feature(bindings_after_at)]
#![feature(iter_order_by)]
@@ -9,9 +10,12 @@
#![recursion_limit = "256"]
use rustc_ast as ast;
-use rustc_ast::token::{self, Nonterminal};
-use rustc_ast::tokenstream::{self, CanSynthesizeMissingTokens, LazyTokenStream, TokenStream};
+use rustc_ast::token::{self, Nonterminal, Token, TokenKind};
+use rustc_ast::tokenstream::{self, AttributesData, CanSynthesizeMissingTokens, LazyTokenStream};
+use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
+use rustc_ast::tokenstream::{Spacing, TokenStream};
use rustc_ast::AstLike;
+use rustc_ast::Attribute;
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
@@ -21,8 +25,6 @@
use std::path::Path;
use std::str;
-use tracing::debug;
-
pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
#[macro_use]
@@ -255,19 +257,23 @@
// before we fall back to the stringification.
let convert_tokens =
- |tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());
+ |tokens: Option<&LazyTokenStream>| Some(tokens?.create_token_stream().to_tokenstream());
let tokens = match *nt {
- Nonterminal::NtItem(ref item) => prepend_attrs(sess, &item.attrs, nt, item.tokens.as_ref()),
+ Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()),
Nonterminal::NtStmt(ref stmt) => {
- let do_prepend = |tokens| prepend_attrs(sess, stmt.attrs(), nt, tokens);
if let ast::StmtKind::Empty = stmt.kind {
- let tokens: TokenStream =
- tokenstream::TokenTree::token(token::Semi, stmt.span).into();
- do_prepend(Some(&LazyTokenStream::new(tokens)))
+ let tokens = AttrAnnotatedTokenStream::new(vec![(
+ tokenstream::AttrAnnotatedTokenTree::Token(Token::new(
+ TokenKind::Semi,
+ stmt.span,
+ )),
+ Spacing::Alone,
+ )]);
+ prepend_attrs(&stmt.attrs(), Some(&LazyTokenStream::new(tokens)))
} else {
- do_prepend(stmt.tokens())
+ prepend_attrs(&stmt.attrs(), stmt.tokens())
}
}
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()),
@@ -283,10 +289,7 @@
Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.as_ref()),
Nonterminal::NtTT(ref tt) => Some(tt.clone().into()),
Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
- if expr.tokens.is_none() {
- debug!("missing tokens for expr {:?}", expr);
- }
- prepend_attrs(sess, &expr.attrs, nt, expr.tokens.as_ref())
+ prepend_attrs(&expr.attrs, expr.tokens.as_ref())
}
};
@@ -295,34 +298,30 @@
} else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) {
return fake_token_stream(sess, nt);
} else {
- panic!("Missing tokens for nt at {:?}: {:?}", nt.span(), pprust::nonterminal_to_string(nt));
+ panic!(
+ "Missing tokens for nt {:?} at {:?}: {:?}",
+ nt,
+ nt.span(),
+ pprust::nonterminal_to_string(nt)
+ );
}
}
+fn prepend_attrs(attrs: &[Attribute], tokens: Option<&LazyTokenStream>) -> Option<TokenStream> {
+ let tokens = tokens?;
+ if attrs.is_empty() {
+ return Some(tokens.create_token_stream().to_tokenstream());
+ }
+ let attr_data = AttributesData { attrs: attrs.to_vec().into(), tokens: tokens.clone() };
+ let wrapped = AttrAnnotatedTokenStream::new(vec![(
+ AttrAnnotatedTokenTree::Attributes(attr_data),
+ Spacing::Alone,
+ )]);
+ Some(wrapped.to_tokenstream())
+}
+
pub fn fake_token_stream(sess: &ParseSess, nt: &Nonterminal) -> TokenStream {
let source = pprust::nonterminal_to_string(nt);
let filename = FileName::macro_expansion_source_code(&source);
parse_stream_from_source_str(filename, source, sess, Some(nt.span()))
}
-
-fn prepend_attrs(
- sess: &ParseSess,
- attrs: &[ast::Attribute],
- nt: &Nonterminal,
- tokens: Option<&tokenstream::LazyTokenStream>,
-) -> Option<tokenstream::TokenStream> {
- if attrs.is_empty() {
- return Some(tokens?.create_token_stream());
- }
- let mut builder = tokenstream::TokenStreamBuilder::new();
- for attr in attrs {
- // FIXME: Correctly handle tokens for inner attributes.
- // For now, we fall back to reparsing the original AST node
- if attr.style == ast::AttrStyle::Inner {
- return Some(fake_token_stream(sess, nt));
- }
- builder.push(attr.tokens());
- }
- builder.push(tokens?.create_token_stream());
- Some(builder.build())
-}
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 95d4a48..ee6ff4d 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -1,10 +1,11 @@
-use super::{AttrWrapper, Parser, PathStyle};
+use super::{AttrWrapper, Capturing, Parser, PathStyle};
use rustc_ast as ast;
use rustc_ast::attr;
use rustc_ast::token::{self, Nonterminal};
use rustc_ast_pretty::pprust;
use rustc_errors::{error_code, PResult};
use rustc_span::{sym, Span};
+use std::convert::TryInto;
use tracing::debug;
@@ -29,6 +30,7 @@
pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> {
let mut attrs: Vec<ast::Attribute> = Vec::new();
let mut just_parsed_doc_comment = false;
+ let start_pos = self.token_cursor.num_next_calls;
loop {
debug!("parse_outer_attributes: self.token={:?}", self.token);
let attr = if self.check(&token::Pound) {
@@ -74,7 +76,7 @@
break;
}
}
- Ok(AttrWrapper::new(attrs))
+ Ok(AttrWrapper::new(attrs.into(), start_pos))
}
/// Matches `attribute = # ! [ meta_item ]`.
@@ -177,6 +179,7 @@
crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
let mut attrs: Vec<ast::Attribute> = vec![];
loop {
+ let start_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap();
// Only try to parse if it is an inner attribute (has `!`).
let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
@@ -191,6 +194,18 @@
None
};
if let Some(attr) = attr {
+ let end_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap();
+ // If we are currently capturing tokens, mark the location of this inner attribute.
+ // If capturing ends up creating a `LazyTokenStream`, we will include
+ // this replace range with it, removing the inner attribute from the final
+ // `AttrAnnotatedTokenStream`. Inner attributes are stored in the parsed AST note.
+ // During macro expansion, they are selectively inserted back into the
+ // token stream (the first inner attribute is remoevd each time we invoke the
+ // corresponding macro).
+ let range = start_pos..end_pos;
+ if let Capturing::Yes = self.capture_state.capturing {
+ self.capture_state.inner_attr_ranges.insert(attr.id, (range, vec![]));
+ }
attrs.push(attr);
} else {
break;
@@ -311,6 +326,9 @@
// One of the attributes may either itself be a macro,
// or expand to macro attributes (`cfg_attr`).
attrs.iter().any(|attr| {
+ if attr.is_doc_comment() {
+ return false;
+ }
attr.ident().map_or(true, |ident| {
ident.name == sym::cfg_attr || !rustc_feature::is_builtin_attr_name(ident.name)
})
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 36a0fda..35759a3 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -1,12 +1,14 @@
-use super::attr;
-use super::{ForceCollect, Parser, TokenCursor, TrailingToken};
-use rustc_ast::token::{self, Token, TokenKind};
-use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
-use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing};
-use rustc_ast::AstLike;
+use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
+use rustc_ast::token::{self, DelimToken, Token, TokenKind};
+use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttributesData, CreateTokenStream};
+use rustc_ast::tokenstream::{AttrAnnotatedTokenTree, DelimSpan, LazyTokenStream, Spacing};
use rustc_ast::{self as ast};
+use rustc_ast::{AstLike, AttrVec, Attribute};
use rustc_errors::PResult;
-use rustc_span::{Span, DUMMY_SP};
+use rustc_span::{sym, Span, DUMMY_SP};
+
+use std::convert::TryInto;
+use std::ops::Range;
/// A wrapper type to ensure that the parser handles outer attributes correctly.
/// When we parse outer attributes, we need to ensure that we capture tokens
@@ -23,23 +25,158 @@
/// cannot directly access the `attrs` field
#[derive(Debug, Clone)]
pub struct AttrWrapper {
- attrs: Vec<ast::Attribute>,
+ attrs: AttrVec,
+ // The start of the outer attributes in the token cursor.
+ // This allows us to create a `ReplaceRange` for the entire attribute
+ // target, including outer attributes.
+ start_pos: usize,
}
+// This struct is passed around very frequently,
+// so make sure it doesn't accidentally get larger
+#[cfg(target_arch = "x86_64")]
+rustc_data_structures::static_assert_size!(AttrWrapper, 16);
+
impl AttrWrapper {
- pub fn empty() -> AttrWrapper {
- AttrWrapper { attrs: vec![] }
+ pub(super) fn new(attrs: AttrVec, start_pos: usize) -> AttrWrapper {
+ AttrWrapper { attrs, start_pos }
}
- pub fn new(attrs: Vec<ast::Attribute>) -> AttrWrapper {
- AttrWrapper { attrs }
+ pub fn empty() -> AttrWrapper {
+ AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
}
// FIXME: Delay span bug here?
- pub(crate) fn take_for_recovery(self) -> Vec<ast::Attribute> {
+ pub(crate) fn take_for_recovery(self) -> AttrVec {
self.attrs
}
+
+ // FIXME: require passing an NT to prevent misuse of this method
+ pub(crate) fn prepend_to_nt_inner(self, attrs: &mut Vec<Attribute>) {
+ let mut self_attrs: Vec<_> = self.attrs.into();
+ std::mem::swap(attrs, &mut self_attrs);
+ attrs.extend(self_attrs);
+ }
+
pub fn is_empty(&self) -> bool {
self.attrs.is_empty()
}
+
+ pub fn maybe_needs_tokens(&self) -> bool {
+ crate::parser::attr::maybe_needs_tokens(&self.attrs)
+ }
+}
+
+/// Returns `true` if `attrs` contains a `cfg` or `cfg_attr` attribute
+fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
+ // NOTE: Builtin attributes like `cfg` and `cfg_attr` cannot be renamed via imports.
+ // Therefore, the absence of a literal `cfg` or `cfg_attr` guarantees that
+ // we don't need to do any eager expansion.
+ attrs.iter().any(|attr| {
+ attr.ident().map_or(false, |ident| ident.name == sym::cfg || ident.name == sym::cfg_attr)
+ })
+}
+
+// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
+// and `num_calls`, we can reconstruct the `TokenStream` seen
+// by the callback. This allows us to avoid producing a `TokenStream`
+// if it is never needed - for example, a captured `macro_rules!`
+// argument that is never passed to a proc macro.
+// In practice token stream creation happens rarely compared to
+// calls to `collect_tokens` (see some statistics in #78736),
+// so we are doing as little up-front work as possible.
+//
+// This also makes `Parser` very cheap to clone, since
+// there is no intermediate collection buffer to clone.
+#[derive(Clone)]
+struct LazyTokenStreamImpl {
+ start_token: (Token, Spacing),
+ cursor_snapshot: TokenCursor,
+ num_calls: usize,
+ break_last_token: bool,
+ replace_ranges: Box<[ReplaceRange]>,
+}
+
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
+
+impl CreateTokenStream for LazyTokenStreamImpl {
+ fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
+ // The token produced by the final call to `next` or `next_desugared`
+ // was not actually consumed by the callback. The combination
+ // of chaining the initial token and using `take` produces the desired
+ // result - we produce an empty `TokenStream` if no calls were made,
+ // and omit the final token otherwise.
+ let mut cursor_snapshot = self.cursor_snapshot.clone();
+ let tokens =
+ std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
+ .chain((0..self.num_calls).map(|_| {
+ let token = if cursor_snapshot.desugar_doc_comments {
+ cursor_snapshot.next_desugared()
+ } else {
+ cursor_snapshot.next()
+ };
+ (FlatToken::Token(token.0), token.1)
+ }))
+ .take(self.num_calls);
+
+ if !self.replace_ranges.is_empty() {
+ let mut tokens: Vec<_> = tokens.collect();
+ let mut replace_ranges = self.replace_ranges.clone();
+ replace_ranges.sort_by_key(|(range, _)| range.start);
+
+ #[cfg(debug_assertions)]
+ {
+ for [(range, tokens), (next_range, next_tokens)] in replace_ranges.array_windows() {
+ assert!(
+ range.end <= next_range.start || range.end >= next_range.end,
+ "Replace ranges should either be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
+ range,
+ tokens,
+ next_range,
+ next_tokens,
+ );
+ }
+ }
+
+ // Process the replace ranges, starting from the highest start
+ // position and working our way back. If have tokens like:
+ //
+ // `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
+ //
+ // Then we will generate replace ranges for both
+ // the `#[cfg(FALSE)] field: bool` and the entire
+ // `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
+ //
+ // By starting processing from the replace range with the greatest
+ // start position, we ensure that any replace range which encloses
+ // another replace range will capture the *replaced* tokens for the inner
+ // range, not the original tokens.
+ for (range, new_tokens) in replace_ranges.iter().rev() {
+ assert!(!range.is_empty(), "Cannot replace an empty range: {:?}", range);
+ // Replace ranges are only allowed to decrease the number of tokens.
+ assert!(
+ range.len() >= new_tokens.len(),
+ "Range {:?} has greater len than {:?}",
+ range,
+ new_tokens
+ );
+
+ // Replace any removed tokens with `FlatToken::Empty`.
+ // This keeps the total length of `tokens` constant throughout the
+ // replacement process, allowing us to use all of the `ReplaceRanges` entries
+ // without adjusting indices.
+ let filler = std::iter::repeat((FlatToken::Empty, Spacing::Alone))
+ .take(range.len() - new_tokens.len());
+
+ tokens.splice(
+ (range.start as usize)..(range.end as usize),
+ new_tokens.clone().into_iter().chain(filler),
+ );
+ }
+ make_token_stream(tokens.into_iter(), self.break_last_token)
+ } else {
+ make_token_stream(tokens, self.break_last_token)
+ }
+ }
}
impl<'a> Parser<'a> {
@@ -65,106 +202,195 @@
force_collect: ForceCollect,
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, (R, TrailingToken)>,
) -> PResult<'a, R> {
- if matches!(force_collect, ForceCollect::No) && !attr::maybe_needs_tokens(&attrs.attrs) {
- return Ok(f(self, attrs.attrs)?.0);
+ // We only bail out when nothing could possibly observe the collected tokens:
+ // 1. We cannot be force collecting tokens (since force-collecting requires tokens
+ // by definition
+ if matches!(force_collect, ForceCollect::No)
+ // None of our outer attributes can require tokens (e.g. a proc-macro)
+ && !attrs.maybe_needs_tokens()
+ // If our target supports custom inner attributes, then we cannot bail
+ // out early, since we may need to capture tokens for a custom inner attribute
+ // invocation.
+ && !R::SUPPORTS_CUSTOM_INNER_ATTRS
+ // Never bail out early in `capture_cfg` mode, since there might be `#[cfg]`
+ // or `#[cfg_attr]` attributes.
+ && !self.capture_cfg
+ {
+ return Ok(f(self, attrs.attrs.into())?.0);
}
+
let start_token = (self.token.clone(), self.token_spacing);
let cursor_snapshot = self.token_cursor.clone();
- let (mut ret, trailing_token) = f(self, attrs.attrs)?;
- let tokens = match ret.tokens_mut() {
- Some(tokens) if tokens.is_none() => tokens,
- _ => return Ok(ret),
- };
+ let has_outer_attrs = !attrs.attrs.is_empty();
+ let prev_capturing = std::mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
+ let replace_ranges_start = self.capture_state.replace_ranges.len();
- // Produces a `TokenStream` on-demand. Using `cursor_snapshot`
- // and `num_calls`, we can reconstruct the `TokenStream` seen
- // by the callback. This allows us to avoid producing a `TokenStream`
- // if it is never needed - for example, a captured `macro_rules!`
- // argument that is never passed to a proc macro.
- // In practice token stream creation happens rarely compared to
- // calls to `collect_tokens` (see some statistics in #78736),
- // so we are doing as little up-front work as possible.
- //
- // This also makes `Parser` very cheap to clone, since
- // there is no intermediate collection buffer to clone.
- #[derive(Clone)]
- struct LazyTokenStreamImpl {
- start_token: (Token, Spacing),
- cursor_snapshot: TokenCursor,
- num_calls: usize,
- desugar_doc_comments: bool,
- append_unglued_token: Option<TreeAndSpacing>,
+ let ret = f(self, attrs.attrs.into());
+
+ self.capture_state.capturing = prev_capturing;
+
+ let (mut ret, trailing) = ret?;
+
+ // When we're not in `capture-cfg` mode, then bail out early if:
+ // 1. Our target doesn't support tokens at all (e.g we're parsing an `NtIdent`)
+ // so there's nothing for us to do.
+ // 2. Our target already has tokens set (e.g. we've parsed something
+ // like `#[my_attr] $item`. The actual parsing code takes care of prepending
+ // any attributes to the nonterminal, so we don't need to modify the
+ // already captured tokens.
+ // Note that this check is independent of `force_collect`- if we already
+ // have tokens, or can't even store them, then there's never a need to
+ // force collection of new tokens.
+ if !self.capture_cfg && matches!(ret.tokens_mut(), None | Some(Some(_))) {
+ return Ok(ret);
}
- impl CreateTokenStream for LazyTokenStreamImpl {
- fn create_token_stream(&self) -> TokenStream {
- if self.num_calls == 0 {
- return TokenStream::new(vec![]);
- }
- let mut cursor_snapshot = self.cursor_snapshot.clone();
- // Don't skip `None` delimiters, since we want to pass them to
- // proc macros. Normally, we'll end up capturing `TokenKind::Interpolated`,
- // which gets converted to a `None`-delimited group when we invoke
- // a proc-macro. However, it's possible to already have a `None`-delimited
- // group in the stream (such as when parsing the output of a proc-macro,
- // or in certain unusual cases with cross-crate `macro_rules!` macros).
- cursor_snapshot.skip_none_delims = false;
+ // This is very similar to the bail out check at the start of this function.
+ // Now that we've parsed an AST node, we have more information available.
+ if matches!(force_collect, ForceCollect::No)
+ // We now have inner attributes available, so this check is more precise
+ // than `attrs.maybe_needs_tokens()` at the start of the function.
+ // As a result, we don't need to check `R::SUPPORTS_CUSTOM_INNER_ATTRS`
+ && !crate::parser::attr::maybe_needs_tokens(ret.attrs())
+ // Subtle: We call `has_cfg_or_cfg_attr` with the attrs from `ret`.
+ // This ensures that we consider inner attributes (e.g. `#![cfg]`),
+ // which require us to have tokens available
+ // We also call `has_cfg_or_cfg_attr` at the beginning of this function,
+ // but we only bail out if there's no possibility of inner attributes
+ // (!R::SUPPORTS_CUSTOM_INNER_ATTRS)
+ // We only catpure about `#[cfg]` or `#[cfg_attr]` in `capture_cfg`
+ // mode - during normal parsing, we don't need any special capturing
+ // for those attributes, since they're builtin.
+ && !(self.capture_cfg && has_cfg_or_cfg_attr(ret.attrs()))
+ {
+ return Ok(ret);
+ }
- // The token produced by the final call to `next` or `next_desugared`
- // was not actually consumed by the callback.
- let num_calls = self.num_calls - 1;
- let mut i = 0;
- let tokens =
- std::iter::once(self.start_token.clone()).chain(std::iter::from_fn(|| {
- if i >= num_calls {
- return None;
- }
-
- let token = if self.desugar_doc_comments {
- cursor_snapshot.next_desugared()
- } else {
- cursor_snapshot.next()
- };
-
- // When the `LazyTokenStreamImpl` was original produced, we did *not*
- // include `NoDelim` tokens in `num_calls`, since they are normally ignored
- // by the parser. Therefore, we only increment our counter for other types of tokens.
- if !matches!(
- token.0.kind,
- token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
- ) {
- i += 1;
- }
- Some(token)
- }));
-
- make_token_stream(tokens, self.append_unglued_token.clone())
+ let mut inner_attr_replace_ranges = Vec::new();
+ // Take the captured ranges for any inner attributes that we parsed.
+ for inner_attr in ret.attrs().iter().filter(|a| a.style == ast::AttrStyle::Inner) {
+ if let Some(attr_range) = self.capture_state.inner_attr_ranges.remove(&inner_attr.id) {
+ inner_attr_replace_ranges.push(attr_range);
+ } else {
+ self.sess
+ .span_diagnostic
+ .delay_span_bug(inner_attr.span, "Missing token range for attribute");
}
}
- let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
- match trailing_token {
+ let replace_ranges_end = self.capture_state.replace_ranges.len();
+
+ let cursor_snapshot_next_calls = cursor_snapshot.num_next_calls;
+ let mut end_pos = self.token_cursor.num_next_calls;
+
+ // Capture a trailing token if requested by the callback 'f'
+ match trailing {
TrailingToken::None => {}
TrailingToken::Semi => {
assert_eq!(self.token.kind, token::Semi);
- num_calls += 1;
+ end_pos += 1;
}
TrailingToken::MaybeComma => {
if self.token.kind == token::Comma {
- num_calls += 1;
+ end_pos += 1;
}
}
}
- *tokens = Some(LazyTokenStream::new(LazyTokenStreamImpl {
+ // If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
+ // then extend the range of captured tokens to include it, since the parser
+ // was not actually bumped past it. When the `LazyTokenStream` gets converted
+ // into a `AttrAnnotatedTokenStream`, we will create the proper token.
+ if self.token_cursor.break_last_token {
+ assert_eq!(
+ trailing,
+ TrailingToken::None,
+ "Cannot set `break_last_token` and have trailing token"
+ );
+ end_pos += 1;
+ }
+
+ let num_calls = end_pos - cursor_snapshot_next_calls;
+
+ // If we have no attributes, then we will never need to
+ // use any replace ranges.
+ let replace_ranges: Box<[ReplaceRange]> = if ret.attrs().is_empty() && !self.capture_cfg {
+ Box::new([])
+ } else {
+ // Grab any replace ranges that occur *inside* the current AST node.
+ // We will perform the actual replacement when we convert the `LazyTokenStream`
+ // to a `AttrAnnotatedTokenStream`
+ let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap();
+ self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
+ .iter()
+ .cloned()
+ .chain(inner_attr_replace_ranges.clone().into_iter())
+ .map(|(range, tokens)| {
+ ((range.start - start_calls)..(range.end - start_calls), tokens)
+ })
+ .collect()
+ };
+
+ let tokens = LazyTokenStream::new(LazyTokenStreamImpl {
start_token,
num_calls,
cursor_snapshot,
- desugar_doc_comments: self.desugar_doc_comments,
- append_unglued_token: self.token_cursor.append_unglued_token.clone(),
- }));
+ break_last_token: self.token_cursor.break_last_token,
+ replace_ranges,
+ });
+ // If we support tokens at all
+ if let Some(target_tokens) = ret.tokens_mut() {
+ if let Some(target_tokens) = target_tokens {
+ assert!(
+ !self.capture_cfg,
+ "Encountered existing tokens with capture_cfg set: {:?}",
+ target_tokens
+ );
+ } else {
+ // Store se our newly captured tokens into the AST node
+ *target_tokens = Some(tokens.clone());
+ };
+ }
+
+ let final_attrs = ret.attrs();
+
+ // If `capture_cfg` is set and we're inside a recursive call to
+ // `collect_tokens_trailing_token`, then we need to register a replace range
+ // if we have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager cfg-expansion
+ // on the captured token stream.
+ if self.capture_cfg
+ && matches!(self.capture_state.capturing, Capturing::Yes)
+ && has_cfg_or_cfg_attr(&final_attrs)
+ {
+ let attr_data = AttributesData { attrs: final_attrs.to_vec().into(), tokens };
+
+ // Replace the entire AST node that we just parsed, including attributes,
+ // with a `FlatToken::AttrTarget`. If this AST node is inside an item
+ // that has `#[derive]`, then this will allow us to cfg-expand this
+ // AST node.
+ let start_pos =
+ if has_outer_attrs { attrs.start_pos } else { cursor_snapshot_next_calls };
+ let new_tokens = vec![(FlatToken::AttrTarget(attr_data), Spacing::Alone)];
+
+ assert!(
+ !self.token_cursor.break_last_token,
+ "Should not have unglued last token with cfg attr"
+ );
+ let range: Range<u32> = (start_pos.try_into().unwrap())..(end_pos.try_into().unwrap());
+ self.capture_state.replace_ranges.push((range, new_tokens));
+ self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
+ }
+
+ // Only clear our `replace_ranges` when we're finished capturing entirely.
+ if matches!(self.capture_state.capturing, Capturing::No) {
+ self.capture_state.replace_ranges.clear();
+ // We don't clear `inner_attr_ranges`, as doing so repeatedly
+ // had a measureable performance impact. Most inner attributes that
+ // we insert will get removed - when we drop the parser, we'll free
+ // up the memory used by any attributes that we didn't remove from the map.
+ }
Ok(ret)
}
}
@@ -172,43 +398,112 @@
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
/// of open and close delims.
+// FIXME(#67062): Currently, we don't parse `None`-delimited groups correctly,
+// which can cause us to end up with mismatched `None` delimiters in our
+// captured tokens. This function contains several hacks to work around this -
+// essentially, we throw away mismatched `None` delimiters when we encounter them.
+// Once we properly parse `None` delimiters, they can be captured just like any
+// other tokens, and these hacks can be removed.
fn make_token_stream(
- tokens: impl Iterator<Item = (Token, Spacing)>,
- append_unglued_token: Option<TreeAndSpacing>,
-) -> TokenStream {
+ mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
+ break_last_token: bool,
+) -> AttrAnnotatedTokenStream {
#[derive(Debug)]
struct FrameData {
open: Span,
- inner: Vec<(TokenTree, Spacing)>,
+ open_delim: DelimToken,
+ inner: Vec<(AttrAnnotatedTokenTree, Spacing)>,
}
- let mut stack = vec![FrameData { open: DUMMY_SP, inner: vec![] }];
- for (token, spacing) in tokens {
+ let mut stack =
+ vec![FrameData { open: DUMMY_SP, open_delim: DelimToken::NoDelim, inner: vec![] }];
+ let mut token_and_spacing = iter.next();
+ while let Some((token, spacing)) = token_and_spacing {
match token {
- Token { kind: TokenKind::OpenDelim(_), span } => {
- stack.push(FrameData { open: span, inner: vec![] });
+ FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => {
+ stack.push(FrameData { open: span, open_delim: delim, inner: vec![] });
}
- Token { kind: TokenKind::CloseDelim(delim), span } => {
- let frame_data = stack.pop().expect("Token stack was empty!");
+ FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
+ // HACK: If we enconter a mismatched `None` delimiter at the top
+ // level, just ignore it.
+ if matches!(delim, DelimToken::NoDelim)
+ && (stack.len() == 1
+ || !matches!(stack.last_mut().unwrap().open_delim, DelimToken::NoDelim))
+ {
+ token_and_spacing = iter.next();
+ continue;
+ }
+ let frame_data = stack
+ .pop()
+ .unwrap_or_else(|| panic!("Token stack was empty for token: {:?}", token));
+
+ // HACK: If our current frame has a mismatched opening `None` delimiter,
+ // merge our current frame with the one above it. That is, transform
+ // `[ { < first second } third ]` into `[ { first second } third ]`
+ if !matches!(delim, DelimToken::NoDelim)
+ && matches!(frame_data.open_delim, DelimToken::NoDelim)
+ {
+ stack.last_mut().unwrap().inner.extend(frame_data.inner);
+ // Process our closing delimiter again, this time at the previous
+ // frame in the stack
+ token_and_spacing = Some((token, spacing));
+ continue;
+ }
+
+ assert_eq!(
+ frame_data.open_delim, delim,
+ "Mismatched open/close delims: open={:?} close={:?}",
+ frame_data.open, span
+ );
let dspan = DelimSpan::from_pair(frame_data.open, span);
- let stream = TokenStream::new(frame_data.inner);
- let delimited = TokenTree::Delimited(dspan, delim, stream);
+ let stream = AttrAnnotatedTokenStream::new(frame_data.inner);
+ let delimited = AttrAnnotatedTokenTree::Delimited(dspan, delim, stream);
stack
.last_mut()
- .unwrap_or_else(|| panic!("Bottom token frame is missing for tokens!"))
+ .unwrap_or_else(|| {
+ panic!("Bottom token frame is missing for token: {:?}", token)
+ })
.inner
.push((delimited, Spacing::Alone));
}
- token => {
- stack
- .last_mut()
- .expect("Bottom token frame is missing!")
- .inner
- .push((TokenTree::Token(token), spacing));
- }
+ FlatToken::Token(token) => stack
+ .last_mut()
+ .expect("Bottom token frame is missing!")
+ .inner
+ .push((AttrAnnotatedTokenTree::Token(token), spacing)),
+ FlatToken::AttrTarget(data) => stack
+ .last_mut()
+ .expect("Bottom token frame is missing!")
+ .inner
+ .push((AttrAnnotatedTokenTree::Attributes(data), spacing)),
+ FlatToken::Empty => {}
}
+ token_and_spacing = iter.next();
+ }
+ // HACK: If we don't have a closing `None` delimiter for our last
+ // frame, merge the frame with the top-level frame. That is,
+ // turn `< first second` into `first second`
+ if stack.len() == 2 && stack[1].open_delim == DelimToken::NoDelim {
+ let temp_buf = stack.pop().unwrap();
+ stack.last_mut().unwrap().inner.extend(temp_buf.inner);
}
let mut final_buf = stack.pop().expect("Missing final buf!");
- final_buf.inner.extend(append_unglued_token);
+ if break_last_token {
+ let (last_token, spacing) = final_buf.inner.pop().unwrap();
+ if let AttrAnnotatedTokenTree::Token(last_token) = last_token {
+ let unglued_first = last_token.kind.break_two_token_op().unwrap().0;
+
+ // A 'unglued' token is always two ASCII characters
+ let mut first_span = last_token.span.shrink_to_lo();
+ first_span = first_span.with_hi(first_span.lo() + rustc_span::BytePos(1));
+
+ final_buf.inner.push((
+ AttrAnnotatedTokenTree::Token(Token::new(unglued_first, first_span)),
+ spacing,
+ ));
+ } else {
+ panic!("Unexpected last token {:?}", last_token)
+ }
+ }
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
- TokenStream::new(final_buf.inner)
+ AttrAnnotatedTokenStream::new(final_buf.inner)
}
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 77e85c0..70a5ac6 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -666,21 +666,23 @@
);
match x {
Ok((_, _, false)) => {
- self.bump(); // `>`
- match self.parse_expr() {
- Ok(_) => {
- e.span_suggestion_verbose(
- binop.span.shrink_to_lo(),
- TURBOFISH_SUGGESTION_STR,
- "::".to_string(),
- Applicability::MaybeIncorrect,
- );
- e.emit();
- *expr = self.mk_expr_err(expr.span.to(self.prev_token.span));
- return Ok(());
- }
- Err(mut err) => {
- err.cancel();
+ if self.eat(&token::Gt) {
+ match self.parse_expr() {
+ Ok(_) => {
+ e.span_suggestion_verbose(
+ binop.span.shrink_to_lo(),
+ TURBOFISH_SUGGESTION_STR,
+ "::".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ e.emit();
+ *expr =
+ self.mk_expr_err(expr.span.to(self.prev_token.span));
+ return Ok(());
+ }
+ Err(mut err) => {
+ err.cancel();
+ }
}
}
}
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 02ee268..e155b3f 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -2581,19 +2581,17 @@
attrs: AttrWrapper,
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, P<Expr>>,
) -> PResult<'a, P<Expr>> {
- // FIXME - come up with a nice way to properly forward `ForceCollect`from
- // the nonterminal parsing code. TThis approach iscorrect, but will cause
- // us to unnecessarily capture tokens for exprs that have only builtin
- // attributes. Revisit this before #![feature(stmt_expr_attributes)] is stabilized
- let force_collect = if attrs.is_empty() { ForceCollect::No } else { ForceCollect::Yes };
- self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
+ self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let res = f(this, attrs)?;
let trailing = if this.restrictions.contains(Restrictions::STMT_EXPR)
&& this.token.kind == token::Semi
{
TrailingToken::Semi
} else {
- TrailingToken::None
+ // FIXME - pass this through from the place where we know
+ // we need a comma, rather than assuming that `#[attr] expr,`
+ // always captures a trailing comma
+ TrailingToken::MaybeComma
};
Ok((res, trailing))
})
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 70dbaa5..2b7b584 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -103,20 +103,11 @@
// over when we bump the parser
if let token::Interpolated(nt) = &self.token.kind {
if let token::NtItem(item) = &**nt {
- let item = item.clone();
+ let mut item = item.clone();
+ self.bump();
- return self.collect_tokens_trailing_token(
- attrs,
- force_collect,
- |this, mut attrs| {
- let mut item = item;
- mem::swap(&mut item.attrs, &mut attrs);
- item.attrs.extend(attrs);
- // Bump the parser so the we capture the token::Interpolated
- this.bump();
- Ok((Some(item.into_inner()), TrailingToken::None))
- },
- );
+ attrs.prepend_to_nt_inner(&mut item.attrs);
+ return Ok(Some(item.into_inner()));
}
};
@@ -530,7 +521,7 @@
generics.where_clause = self.parse_where_clause()?;
- let impl_items = self.parse_item_list(attrs, |p| p.parse_impl_item())?;
+ let impl_items = self.parse_item_list(attrs, |p| p.parse_impl_item(ForceCollect::No))?;
let item_kind = match ty_second {
Some(ty_second) => {
@@ -718,22 +709,32 @@
} else {
// It's a normal trait.
tps.where_clause = self.parse_where_clause()?;
- let items = self.parse_item_list(attrs, |p| p.parse_trait_item())?;
+ let items = self.parse_item_list(attrs, |p| p.parse_trait_item(ForceCollect::No))?;
Ok((ident, ItemKind::Trait(box TraitKind(is_auto, unsafety, tps, bounds, items))))
}
}
- pub fn parse_impl_item(&mut self) -> PResult<'a, Option<Option<P<AssocItem>>>> {
- self.parse_assoc_item(|_| true)
+ pub fn parse_impl_item(
+ &mut self,
+ force_collect: ForceCollect,
+ ) -> PResult<'a, Option<Option<P<AssocItem>>>> {
+ self.parse_assoc_item(|_| true, force_collect)
}
- pub fn parse_trait_item(&mut self) -> PResult<'a, Option<Option<P<AssocItem>>>> {
- self.parse_assoc_item(|edition| edition >= Edition::Edition2018)
+ pub fn parse_trait_item(
+ &mut self,
+ force_collect: ForceCollect,
+ ) -> PResult<'a, Option<Option<P<AssocItem>>>> {
+ self.parse_assoc_item(|edition| edition >= Edition::Edition2018, force_collect)
}
/// Parses associated items.
- fn parse_assoc_item(&mut self, req_name: ReqName) -> PResult<'a, Option<Option<P<AssocItem>>>> {
- Ok(self.parse_item_(req_name, ForceCollect::No)?.map(
+ fn parse_assoc_item(
+ &mut self,
+ req_name: ReqName,
+ force_collect: ForceCollect,
+ ) -> PResult<'a, Option<Option<P<AssocItem>>>> {
+ Ok(self.parse_item_(req_name, force_collect)?.map(
|Item { attrs, id, span, vis, ident, kind, tokens }| {
let kind = match AssocItemKind::try_from(kind) {
Ok(kind) => kind,
@@ -918,14 +919,17 @@
unsafety: Unsafe,
) -> PResult<'a, ItemInfo> {
let abi = self.parse_abi(); // ABI?
- let items = self.parse_item_list(attrs, |p| p.parse_foreign_item())?;
+ let items = self.parse_item_list(attrs, |p| p.parse_foreign_item(ForceCollect::No))?;
let module = ast::ForeignMod { unsafety, abi, items };
Ok((Ident::invalid(), ItemKind::ForeignMod(module)))
}
/// Parses a foreign item (one in an `extern { ... }` block).
- pub fn parse_foreign_item(&mut self) -> PResult<'a, Option<Option<P<ForeignItem>>>> {
- Ok(self.parse_item_(|_| true, ForceCollect::No)?.map(
+ pub fn parse_foreign_item(
+ &mut self,
+ force_collect: ForceCollect,
+ ) -> PResult<'a, Option<Option<P<ForeignItem>>>> {
+ Ok(self.parse_item_(|_| true, force_collect)?.map(
|Item { attrs, id, span, vis, ident, kind, tokens }| {
let kind = match ForeignItemKind::try_from(kind) {
Ok(kind) => kind,
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 2fa25e4..ed95a56 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -19,13 +19,16 @@
use rustc_ast::ptr::P;
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
+use rustc_ast::tokenstream::AttributesData;
use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
-use rustc_ast::tokenstream::{TokenStream, TokenTree, TreeAndSpacing};
+use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast::AttrId;
use rustc_ast::DUMMY_NODE_ID;
use rustc_ast::{self as ast, AnonConst, AstLike, AttrStyle, AttrVec, Const, CrateSugar, Extern};
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe};
use rustc_ast::{Visibility, VisibilityKind};
use rustc_ast_pretty::pprust;
+use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use rustc_errors::PResult;
use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, FatalError};
@@ -34,6 +37,7 @@
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use tracing::debug;
+use std::ops::Range;
use std::{cmp, mem, slice};
bitflags::bitflags! {
@@ -64,6 +68,7 @@
No,
}
+#[derive(Debug, Eq, PartialEq)]
pub enum TrailingToken {
None,
Semi,
@@ -111,6 +116,7 @@
pub token_spacing: Spacing,
/// The previous token.
pub prev_token: Token,
+ pub capture_cfg: bool,
restrictions: Restrictions,
expected_tokens: Vec<TokenType>,
// Important: This must only be advanced from `next_tok`
@@ -134,6 +140,44 @@
pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
/// If present, this `Parser` is not parsing Rust code but rather a macro call.
subparser_name: Option<&'static str>,
+ capture_state: CaptureState,
+}
+
+/// Indicates a range of tokens that should be replaced by
+/// the tokens in the provided vector. This is used in two
+/// places during token collection:
+///
+/// 1. During the parsing of an AST node that may have a `#[derive]`
+/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
+/// In this case, we use a `ReplaceRange` to replace the entire inner AST node
+/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
+/// on a `AttrAnnotatedTokenStream`
+///
+/// 2. When we parse an inner attribute while collecting tokens. We
+/// remove inner attributes from the token stream entirely, and
+/// instead track them through the `attrs` field on the AST node.
+/// This allows us to easily manipulate them (for example, removing
+/// the first macro inner attribute to invoke a proc-macro).
+/// When create a `TokenStream`, the inner attributes get inserted
+/// into the proper place in the token stream.
+pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
+
+/// Controls how we capture tokens. Capturing can be expensive,
+/// so we try to avoid performing capturing in cases where
+/// we will never need a `AttrAnnotatedTokenStream`
+#[derive(Copy, Clone)]
+pub enum Capturing {
+ /// We aren't performing any capturing - this is the default mode.
+ No,
+ /// We are capturing tokens
+ Yes,
+}
+
+#[derive(Clone)]
+struct CaptureState {
+ capturing: Capturing,
+ replace_ranges: Vec<ReplaceRange>,
+ inner_attr_ranges: FxHashMap<AttrId, ReplaceRange>,
}
impl<'a> Drop for Parser<'a> {
@@ -167,18 +211,11 @@
// want to capture just the first 'unglued' token.
// For example, capturing the `Vec<u8>`
// in `Option<Vec<u8>>` requires us to unglue
- // the trailing `>>` token. The `append_unglued_token`
+ // the trailing `>>` token. The `break_last_token`
// field is used to track this token - it gets
// appended to the captured stream when
// we evaluate a `LazyTokenStream`
- append_unglued_token: Option<TreeAndSpacing>,
- // If `true`, skip the delimiters for `None`-delimited groups,
- // and just yield the inner tokens. This is `true` during
- // normal parsing, since the parser code is not currently prepared
- // to handle `None` delimiters. When capturing a `TokenStream`,
- // however, we want to handle `None`-delimiters, since
- // proc-macros always see `None`-delimited groups.
- skip_none_delims: bool,
+ break_last_token: bool,
}
#[derive(Clone)]
@@ -191,13 +228,13 @@
}
impl TokenCursorFrame {
- fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream, skip_none_delims: bool) -> Self {
+ fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self {
TokenCursorFrame {
delim,
span,
- open_delim: delim == token::NoDelim && skip_none_delims,
+ open_delim: false,
tree_cursor: tts.into_trees(),
- close_delim: delim == token::NoDelim && skip_none_delims,
+ close_delim: false,
}
}
}
@@ -225,7 +262,7 @@
return (token, spacing);
}
TokenTree::Delimited(sp, delim, tts) => {
- let frame = TokenCursorFrame::new(sp, delim, tts, self.skip_none_delims);
+ let frame = TokenCursorFrame::new(sp, delim, tts);
self.stack.push(mem::replace(&mut self.frame, frame));
}
}
@@ -283,7 +320,6 @@
.cloned()
.collect::<TokenStream>()
},
- self.skip_none_delims,
),
));
@@ -372,26 +408,24 @@
desugar_doc_comments: bool,
subparser_name: Option<&'static str>,
) -> Self {
+ let mut start_frame = TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens);
+ start_frame.open_delim = true;
+ start_frame.close_delim = true;
+
let mut parser = Parser {
sess,
token: Token::dummy(),
token_spacing: Spacing::Alone,
prev_token: Token::dummy(),
+ capture_cfg: false,
restrictions: Restrictions::empty(),
expected_tokens: Vec::new(),
- // Skip over the delimiters for `None`-delimited groups
token_cursor: TokenCursor {
- frame: TokenCursorFrame::new(
- DelimSpan::dummy(),
- token::NoDelim,
- tokens,
- /* skip_none_delims */ true,
- ),
+ frame: start_frame,
stack: Vec::new(),
num_next_calls: 0,
desugar_doc_comments,
- append_unglued_token: None,
- skip_none_delims: true,
+ break_last_token: false,
},
desugar_doc_comments,
unmatched_angle_bracket_count: 0,
@@ -400,6 +434,11 @@
last_unexpected_token_span: None,
last_type_ascription: None,
subparser_name,
+ capture_state: CaptureState {
+ capturing: Capturing::No,
+ replace_ranges: Vec::new(),
+ inner_attr_ranges: Default::default(),
+ },
};
// Make parser point to the first token.
@@ -409,21 +448,29 @@
}
fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) {
- let (mut next, spacing) = if self.desugar_doc_comments {
- self.token_cursor.next_desugared()
- } else {
- self.token_cursor.next()
- };
- self.token_cursor.num_next_calls += 1;
- // We've retrieved an token from the underlying
- // cursor, so we no longer need to worry about
- // an unglued token. See `break_and_eat` for more details
- self.token_cursor.append_unglued_token = None;
- if next.span.is_dummy() {
- // Tweak the location for better diagnostics, but keep syntactic context intact.
- next.span = fallback_span.with_ctxt(next.span.ctxt());
+ loop {
+ let (mut next, spacing) = if self.desugar_doc_comments {
+ self.token_cursor.next_desugared()
+ } else {
+ self.token_cursor.next()
+ };
+ self.token_cursor.num_next_calls += 1;
+ // We've retrieved an token from the underlying
+ // cursor, so we no longer need to worry about
+ // an unglued token. See `break_and_eat` for more details
+ self.token_cursor.break_last_token = false;
+ if next.span.is_dummy() {
+ // Tweak the location for better diagnostics, but keep syntactic context intact.
+ next.span = fallback_span.with_ctxt(next.span.ctxt());
+ }
+ if matches!(
+ next.kind,
+ token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
+ ) {
+ continue;
+ }
+ return (next, spacing);
}
- (next, spacing)
}
pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
@@ -621,8 +668,7 @@
// If we consume any additional tokens, then this token
// is not needed (we'll capture the entire 'glued' token),
// and `next_tok` will set this field to `None`
- self.token_cursor.append_unglued_token =
- Some((TokenTree::Token(self.token.clone()), Spacing::Alone));
+ self.token_cursor.break_last_token = true;
// Use the spacing of the glued token as the spacing
// of the unglued second token.
self.bump_with((Token::new(second, second_span), self.token_spacing));
@@ -884,15 +930,38 @@
}
let frame = &self.token_cursor.frame;
- match frame.tree_cursor.look_ahead(dist - 1) {
- Some(tree) => match tree {
- TokenTree::Token(token) => looker(token),
- TokenTree::Delimited(dspan, delim, _) => {
- looker(&Token::new(token::OpenDelim(*delim), dspan.open))
- }
- },
- None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)),
+ if frame.delim != DelimToken::NoDelim {
+ let all_normal = (0..dist).all(|i| {
+ let token = frame.tree_cursor.look_ahead(i);
+ !matches!(token, Some(TokenTree::Delimited(_, DelimToken::NoDelim, _)))
+ });
+ if all_normal {
+ return match frame.tree_cursor.look_ahead(dist - 1) {
+ Some(tree) => match tree {
+ TokenTree::Token(token) => looker(token),
+ TokenTree::Delimited(dspan, delim, _) => {
+ looker(&Token::new(token::OpenDelim(*delim), dspan.open))
+ }
+ },
+ None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)),
+ };
+ }
}
+
+ let mut cursor = self.token_cursor.clone();
+ let mut i = 0;
+ let mut token = Token::dummy();
+ while i < dist {
+ token = cursor.next().0;
+ if matches!(
+ token.kind,
+ token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
+ ) {
+ continue;
+ }
+ i += 1;
+ }
+ return looker(&token);
}
/// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
@@ -1304,3 +1373,24 @@
}
}
}
+
+/// A helper struct used when building a `AttrAnnotatedTokenStream` from
+/// a `LazyTokenStream`. Both delimiter and non-delimited tokens
+/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
+/// is then 'parsed' to build up a `AttrAnnotatedTokenStream` with nested
+/// `AttrAnnotatedTokenTree::Delimited` tokens
+#[derive(Debug, Clone)]
+pub enum FlatToken {
+ /// A token - this holds both delimiter (e.g. '{' and '}')
+ /// and non-delimiter tokens
+ Token(Token),
+ /// Holds the `AttributesData` for an AST node. The
+ /// `AttributesData` is inserted directly into the
+ /// constructed `AttrAnnotatedTokenStream` as
+ /// a `AttrAnnotatedTokenTree::Attributes`
+ AttrTarget(AttributesData),
+ /// A special 'empty' token that is ignored during the conversion
+ /// to a `AttrAnnotatedTokenStream`. This is used to simplify the
+ /// handling of replace ranges.
+ Empty,
+}
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index 0c49d10..5c4a278 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -153,9 +153,7 @@
NonterminalKind::Path => token::NtPath(
self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?,
),
- NonterminalKind::Meta => {
- token::NtMeta(P(self.collect_tokens_no_attrs(|this| this.parse_attr_item(false))?))
- }
+ NonterminalKind::Meta => token::NtMeta(P(self.parse_attr_item(true)?)),
NonterminalKind::TT => token::NtTT(self.parse_token_tree()),
NonterminalKind::Vis => token::NtVis(
self.collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?,
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 24fb430..592f64f 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -48,39 +48,26 @@
if let token::Interpolated(nt) = &self.token.kind {
if let token::NtStmt(stmt) = &**nt {
let mut stmt = stmt.clone();
- return self.collect_tokens_trailing_token(
- attrs,
- force_collect,
- |this, mut attrs| {
- stmt.visit_attrs(|stmt_attrs| {
- mem::swap(stmt_attrs, &mut attrs);
- stmt_attrs.extend(attrs);
- });
- // Make sure we capture the token::Interpolated
- this.bump();
- Ok((Some(stmt), TrailingToken::None))
- },
- );
+ self.bump();
+ stmt.visit_attrs(|stmt_attrs| {
+ attrs.prepend_to_nt_inner(stmt_attrs);
+ });
+ return Ok(Some(stmt));
}
}
Ok(Some(if self.token.is_keyword(kw::Let) {
self.parse_local_mk(lo, attrs, capture_semi, force_collect)?
} else if self.is_kw_followed_by_ident(kw::Mut) {
- self.recover_stmt_local(
- lo,
- attrs.take_for_recovery().into(),
- "missing keyword",
- "let mut",
- )?
+ self.recover_stmt_local(lo, attrs, "missing keyword", "let mut")?
} else if self.is_kw_followed_by_ident(kw::Auto) {
self.bump(); // `auto`
let msg = "write `let` instead of `auto` to introduce a new variable";
- self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")?
+ self.recover_stmt_local(lo, attrs, msg, "let")?
} else if self.is_kw_followed_by_ident(sym::var) {
self.bump(); // `var`
let msg = "write `let` instead of `var` to introduce a new variable";
- self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")?
+ self.recover_stmt_local(lo, attrs, msg, "let")?
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
// We have avoided contextual keywords like `union`, items with `crate` visibility,
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
@@ -112,7 +99,7 @@
attrs: AttrWrapper,
force_collect: ForceCollect,
) -> PResult<'a, Stmt> {
- self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
+ let stmt = self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
let path = this.parse_path(PathStyle::Expr)?;
if this.eat(&token::Not) {
@@ -132,14 +119,22 @@
};
let expr = this.with_res(Restrictions::STMT_EXPR, |this| {
- let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs)?;
+ this.parse_dot_or_call_expr_with(expr, lo, attrs)
+ })?;
+ // `DUMMY_SP` will get overwritten later in this function
+ Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), TrailingToken::None))
+ })?;
+
+ if let StmtKind::Expr(expr) = stmt.kind {
+ // Perform this outside of the `collect_tokens_trailing_token` closure,
+ // since our outer attributes do not apply to this part of the expression
+ let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
})?;
- Ok((
- this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Expr(expr)),
- TrailingToken::None,
- ))
- })
+ Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
+ } else {
+ Ok(stmt)
+ }
}
/// Parses a statement macro `mac!(args)` provided a `path` representing `mac`.
@@ -183,7 +178,7 @@
fn recover_stmt_local(
&mut self,
lo: Span,
- attrs: AttrVec,
+ attrs: AttrWrapper,
msg: &str,
sugg: &str,
) -> PResult<'a, Stmt> {
@@ -213,9 +208,15 @@
})
}
- fn recover_local_after_let(&mut self, lo: Span, attrs: AttrVec) -> PResult<'a, Stmt> {
- let local = self.parse_local(attrs)?;
- Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Local(local)))
+ fn recover_local_after_let(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
+ self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
+ let local = this.parse_local(attrs.into())?;
+ // FIXME - maybe capture semicolon in recovery?
+ Ok((
+ this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Local(local)),
+ TrailingToken::None,
+ ))
+ })
}
/// Parses a local variable declaration.
diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs
index 1377bb7..9321f11 100644
--- a/compiler/rustc_resolve/src/late.rs
+++ b/compiler/rustc_resolve/src/late.rs
@@ -1031,7 +1031,6 @@
}
ItemKind::Static(ref ty, _, ref expr) | ItemKind::Const(_, ref ty, ref expr) => {
- debug!("resolve_item ItemKind::Const");
self.with_item_rib(HasGenericParams::No, |this| {
this.visit_ty(ty);
if let Some(expr) = expr {
@@ -1597,6 +1596,7 @@
.try_resolve_as_non_binding(pat_src, pat, bmode, ident, has_sub)
.unwrap_or_else(|| self.fresh_binding(ident, pat.id, pat_src, bindings));
self.r.record_partial_res(pat.id, PartialRes::new(res));
+ self.r.record_pat_span(pat.id, pat.span);
}
PatKind::TupleStruct(ref path, ref sub_patterns) => {
self.smart_resolve_path(
diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs
index 3fdd47e..1299543 100644
--- a/compiler/rustc_resolve/src/lib.rs
+++ b/compiler/rustc_resolve/src/lib.rs
@@ -891,6 +891,10 @@
/// "self-confirming" import resolutions during import validation.
unusable_binding: Option<&'a NameBinding<'a>>,
+ // Spans for local variables found during pattern resolution.
+ // Used for suggestions during error reporting.
+ pat_span_map: NodeMap<Span>,
+
/// Resolutions for nodes that have a single resolution.
partial_res_map: NodeMap<PartialRes>,
/// Resolutions for import nodes, which have multiple resolutions in different namespaces.
@@ -1270,6 +1274,7 @@
last_import_segment: false,
unusable_binding: None,
+ pat_span_map: Default::default(),
partial_res_map: Default::default(),
import_res_map: Default::default(),
label_res_map: Default::default(),
@@ -1917,7 +1922,6 @@
return Some(LexicalScopeBinding::Item(binding));
}
}
-
self.early_resolve_ident_in_lexical_scope(
orig_ident,
ScopeSet::Late(ns, module, record_used_id),
@@ -2394,7 +2398,59 @@
.next()
.map_or(false, |c| c.is_ascii_uppercase())
{
- (format!("use of undeclared type `{}`", ident), None)
+ // Check whether the name refers to an item in the value namespace.
+ let suggestion = if ribs.is_some() {
+ let match_span = match self.resolve_ident_in_lexical_scope(
+ ident,
+ ValueNS,
+ parent_scope,
+ None,
+ path_span,
+ &ribs.unwrap()[ValueNS],
+ ) {
+ // Name matches a local variable. For example:
+ // ```
+ // fn f() {
+ // let Foo: &str = "";
+ // println!("{}", Foo::Bar); // Name refers to local
+ // // variable `Foo`.
+ // }
+ // ```
+ Some(LexicalScopeBinding::Res(Res::Local(id))) => {
+ Some(*self.pat_span_map.get(&id).unwrap())
+ }
+
+ // Name matches item from a local name binding
+ // created by `use` declaration. For example:
+ // ```
+ // pub Foo: &str = "";
+ //
+ // mod submod {
+ // use super::Foo;
+ // println!("{}", Foo::Bar); // Name refers to local
+ // // binding `Foo`.
+ // }
+ // ```
+ Some(LexicalScopeBinding::Item(name_binding)) => {
+ Some(name_binding.span)
+ }
+ _ => None,
+ };
+
+ if let Some(span) = match_span {
+ Some((
+ vec![(span, String::from(""))],
+ format!("`{}` is defined here, but is not a type", ident),
+ Applicability::MaybeIncorrect,
+ ))
+ } else {
+ None
+ }
+ } else {
+ None
+ };
+
+ (format!("use of undeclared type `{}`", ident), suggestion)
} else {
(format!("use of undeclared crate or module `{}`", ident), None)
}
@@ -2805,6 +2861,11 @@
}
}
+ fn record_pat_span(&mut self, node: NodeId, span: Span) {
+ debug!("(recording pat) recording {:?} for {:?}", node, span);
+ self.pat_span_map.insert(node, span);
+ }
+
fn is_accessible_from(&self, vis: ty::Visibility, module: Module<'a>) -> bool {
vis.is_accessible_from(module.nearest_parent_mod, self)
}
diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs
index 3488efa..cc2583b 100644
--- a/compiler/rustc_session/src/session.rs
+++ b/compiler/rustc_session/src/session.rs
@@ -807,8 +807,11 @@
// This is used to control the emission of the `uwtable` attribute on
// LLVM functions.
//
- // At the very least, unwind tables are needed when compiling with
- // `-C panic=unwind`.
+ // Unwind tables are needed when compiling with `-C panic=unwind`, but
+ // LLVM won't omit unwind tables unless the function is also marked as
+ // `nounwind`, so users are allowed to disable `uwtable` emission.
+ // Historically rustc always emits `uwtable` attributes by default, so
+ // even they can be disabled, they're still emitted by default.
//
// On some targets (including windows), however, exceptions include
// other events such as illegal instructions, segfaults, etc. This means
@@ -821,13 +824,10 @@
// If a target requires unwind tables, then they must be emitted.
// Otherwise, we can defer to the `-C force-unwind-tables=<yes/no>`
// value, if it is provided, or disable them, if not.
- if self.panic_strategy() == PanicStrategy::Unwind {
- true
- } else if self.target.requires_uwtable {
- true
- } else {
- self.opts.cg.force_unwind_tables.unwrap_or(self.target.default_uwtable)
- }
+ self.target.requires_uwtable
+ || self.opts.cg.force_unwind_tables.unwrap_or(
+ self.panic_strategy() == PanicStrategy::Unwind || self.target.default_uwtable,
+ )
}
/// Returns the symbol name for the registrar function,
@@ -1483,13 +1483,6 @@
// Unwind tables cannot be disabled if the target requires them.
if let Some(include_uwtables) = sess.opts.cg.force_unwind_tables {
- if sess.panic_strategy() == PanicStrategy::Unwind && !include_uwtables {
- sess.err(
- "panic=unwind requires unwind tables, they cannot be disabled \
- with `-C force-unwind-tables=no`.",
- );
- }
-
if sess.target.requires_uwtable && !include_uwtables {
sess.err(
"target requires unwind tables, they cannot be disabled with \
diff --git a/compiler/rustc_session/src/utils.rs b/compiler/rustc_session/src/utils.rs
index f3d3330..e9d597d 100644
--- a/compiler/rustc_session/src/utils.rs
+++ b/compiler/rustc_session/src/utils.rs
@@ -1,7 +1,13 @@
+use crate::parse::ParseSess;
use crate::session::Session;
+use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
+use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
+use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_data_structures::profiling::VerboseTimingGuard;
use std::path::{Path, PathBuf};
+pub type NtToTokenstream = fn(&Nonterminal, &ParseSess, CanSynthesizeMissingTokens) -> TokenStream;
+
impl Session {
pub fn timer<'a>(&'a self, what: &'static str) -> VerboseTimingGuard<'a> {
self.prof.verbose_generic_activity(what)
@@ -53,3 +59,52 @@
&self.original
}
}
+
+// FIXME: Find a better spot for this - it needs to be accessible from `rustc_ast_lowering`,
+// and needs to access `ParseSess
+pub struct FlattenNonterminals<'a> {
+ pub parse_sess: &'a ParseSess,
+ pub synthesize_tokens: CanSynthesizeMissingTokens,
+ pub nt_to_tokenstream: NtToTokenstream,
+}
+
+impl<'a> FlattenNonterminals<'a> {
+ pub fn process_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
+ fn can_skip(stream: &TokenStream) -> bool {
+ stream.trees().all(|tree| match tree {
+ TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
+ TokenTree::Delimited(_, _, inner) => can_skip(&inner),
+ })
+ }
+
+ if can_skip(&tokens) {
+ return tokens;
+ }
+
+ tokens.into_trees().flat_map(|tree| self.process_token_tree(tree).into_trees()).collect()
+ }
+
+ pub fn process_token_tree(&mut self, tree: TokenTree) -> TokenStream {
+ match tree {
+ TokenTree::Token(token) => self.process_token(token),
+ TokenTree::Delimited(span, delim, tts) => {
+ TokenTree::Delimited(span, delim, self.process_token_stream(tts)).into()
+ }
+ }
+ }
+
+ pub fn process_token(&mut self, token: Token) -> TokenStream {
+ match token.kind {
+ token::Interpolated(nt) => {
+ let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
+ TokenTree::Delimited(
+ DelimSpan::from_single(token.span),
+ DelimToken::NoDelim,
+ self.process_token_stream(tts),
+ )
+ .into()
+ }
+ _ => TokenTree::Token(token).into(),
+ }
+ }
+}
diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs
index ee1d206..42e521a 100644
--- a/compiler/rustc_span/src/symbol.rs
+++ b/compiler/rustc_span/src/symbol.rs
@@ -900,6 +900,8 @@
profiler_runtime,
ptr_guaranteed_eq,
ptr_guaranteed_ne,
+ ptr_null,
+ ptr_null_mut,
ptr_offset_from,
pub_macro_rules,
pub_restricted,
diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs
index 57b0a36..2af4669 100644
--- a/compiler/rustc_target/src/spec/mod.rs
+++ b/compiler/rustc_target/src/spec/mod.rs
@@ -1446,8 +1446,8 @@
let get_req_field = |name: &str| {
obj.find(name)
- .map(|s| s.as_string())
- .and_then(|os| os.map(|s| s.to_string()))
+ .and_then(Json::as_string)
+ .map(str::to_string)
.ok_or_else(|| format!("Field {} in target specification is required", name))
};
diff --git a/compiler/rustc_typeck/src/astconv/generics.rs b/compiler/rustc_typeck/src/astconv/generics.rs
index 845375f..7a297f2 100644
--- a/compiler/rustc_typeck/src/astconv/generics.rs
+++ b/compiler/rustc_typeck/src/astconv/generics.rs
@@ -82,7 +82,7 @@
if param_type.is_suggestable() {
err.span_suggestion(
tcx.def_span(src_def_id),
- "consider changing this type paramater to a `const`-generic",
+ "consider changing this type parameter to be a `const` generic",
format!("const {}: {}", param_name, param_type),
Applicability::MaybeIncorrect,
);
diff --git a/compiler/rustc_typeck/src/check/compare_method.rs b/compiler/rustc_typeck/src/check/compare_method.rs
index f044daa..60ca562 100644
--- a/compiler/rustc_typeck/src/check/compare_method.rs
+++ b/compiler/rustc_typeck/src/check/compare_method.rs
@@ -278,9 +278,8 @@
if let Err(terr) = sub_result {
debug!("sub_types failed: impl ty {:?}, trait ty {:?}", impl_fty, trait_fty);
- let (impl_err_span, trait_err_span) = extract_spans_for_error_reporting(
- &infcx, param_env, &terr, &cause, impl_m, impl_sig, trait_m, trait_sig,
- );
+ let (impl_err_span, trait_err_span) =
+ extract_spans_for_error_reporting(&infcx, &terr, &cause, impl_m, trait_m);
cause.make_mut().span = impl_err_span;
@@ -291,18 +290,79 @@
"method `{}` has an incompatible type for trait",
trait_m.ident
);
- if let TypeError::Mutability = terr {
- if let Some(trait_err_span) = trait_err_span {
- if let Ok(trait_err_str) = tcx.sess.source_map().span_to_snippet(trait_err_span)
+ match &terr {
+ TypeError::ArgumentMutability(0) | TypeError::ArgumentSorts(_, 0)
+ if trait_m.fn_has_self_parameter =>
+ {
+ let ty = trait_sig.inputs()[0];
+ let sugg = match ExplicitSelf::determine(ty, |_| ty == impl_trait_ref.self_ty())
{
+ ExplicitSelf::ByValue => "self".to_owned(),
+ ExplicitSelf::ByReference(_, hir::Mutability::Not) => "&self".to_owned(),
+ ExplicitSelf::ByReference(_, hir::Mutability::Mut) => {
+ "&mut self".to_owned()
+ }
+ _ => format!("self: {}", ty),
+ };
+
+ // When the `impl` receiver is an arbitrary self type, like `self: Box<Self>`, the
+ // span points only at the type `Box<Self`>, but we want to cover the whole
+ // argument pattern and type.
+ let impl_m_hir_id =
+ tcx.hir().local_def_id_to_hir_id(impl_m.def_id.expect_local());
+ let span = match tcx.hir().expect_impl_item(impl_m_hir_id).kind {
+ ImplItemKind::Fn(ref sig, body) => tcx
+ .hir()
+ .body_param_names(body)
+ .zip(sig.decl.inputs.iter())
+ .map(|(param, ty)| param.span.to(ty.span))
+ .next()
+ .unwrap_or(impl_err_span),
+ _ => bug!("{:?} is not a method", impl_m),
+ };
+
+ diag.span_suggestion(
+ span,
+ "change the self-receiver type to match the trait",
+ sugg,
+ Applicability::MachineApplicable,
+ );
+ }
+ TypeError::ArgumentMutability(i) | TypeError::ArgumentSorts(_, i) => {
+ if trait_sig.inputs().len() == *i {
+ // Suggestion to change output type. We do not suggest in `async` functions
+ // to avoid complex logic or incorrect output.
+ let impl_m_hir_id =
+ tcx.hir().local_def_id_to_hir_id(impl_m.def_id.expect_local());
+ match tcx.hir().expect_impl_item(impl_m_hir_id).kind {
+ ImplItemKind::Fn(ref sig, _)
+ if sig.header.asyncness == hir::IsAsync::NotAsync =>
+ {
+ let msg = "change the output type to match the trait";
+ let ap = Applicability::MachineApplicable;
+ match sig.decl.output {
+ hir::FnRetTy::DefaultReturn(sp) => {
+ let sugg = format!("-> {} ", trait_sig.output());
+ diag.span_suggestion_verbose(sp, msg, sugg, ap);
+ }
+ hir::FnRetTy::Return(hir_ty) => {
+ let sugg = trait_sig.output().to_string();
+ diag.span_suggestion(hir_ty.span, msg, sugg, ap);
+ }
+ };
+ }
+ _ => {}
+ };
+ } else if let Some(trait_ty) = trait_sig.inputs().get(*i) {
diag.span_suggestion(
impl_err_span,
- "consider changing the mutability to match the trait",
- trait_err_str,
+ "change the parameter type to match the trait",
+ trait_ty.to_string(),
Applicability::MachineApplicable,
);
}
}
+ _ => {}
}
infcx.note_type_err(
@@ -385,86 +445,35 @@
fn extract_spans_for_error_reporting<'a, 'tcx>(
infcx: &infer::InferCtxt<'a, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
terr: &TypeError<'_>,
cause: &ObligationCause<'tcx>,
impl_m: &ty::AssocItem,
- impl_sig: ty::FnSig<'tcx>,
trait_m: &ty::AssocItem,
- trait_sig: ty::FnSig<'tcx>,
) -> (Span, Option<Span>) {
let tcx = infcx.tcx;
let impl_m_hir_id = tcx.hir().local_def_id_to_hir_id(impl_m.def_id.expect_local());
- let (impl_m_output, impl_m_iter) = match tcx.hir().expect_impl_item(impl_m_hir_id).kind {
- ImplItemKind::Fn(ref impl_m_sig, _) => {
- (&impl_m_sig.decl.output, impl_m_sig.decl.inputs.iter())
+ let mut impl_args = match tcx.hir().expect_impl_item(impl_m_hir_id).kind {
+ ImplItemKind::Fn(ref sig, _) => {
+ sig.decl.inputs.iter().map(|t| t.span).chain(iter::once(sig.decl.output.span()))
}
_ => bug!("{:?} is not a method", impl_m),
};
+ let trait_args = trait_m.def_id.as_local().map(|def_id| {
+ let trait_m_hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
+ match tcx.hir().expect_trait_item(trait_m_hir_id).kind {
+ TraitItemKind::Fn(ref sig, _) => {
+ sig.decl.inputs.iter().map(|t| t.span).chain(iter::once(sig.decl.output.span()))
+ }
+ _ => bug!("{:?} is not a TraitItemKind::Fn", trait_m),
+ }
+ });
match *terr {
- TypeError::Mutability => {
- if let Some(def_id) = trait_m.def_id.as_local() {
- let trait_m_hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
- let trait_m_iter = match tcx.hir().expect_trait_item(trait_m_hir_id).kind {
- TraitItemKind::Fn(ref trait_m_sig, _) => trait_m_sig.decl.inputs.iter(),
- _ => bug!("{:?} is not a TraitItemKind::Fn", trait_m),
- };
-
- iter::zip(impl_m_iter, trait_m_iter)
- .find(|&(ref impl_arg, ref trait_arg)| {
- match (&impl_arg.kind, &trait_arg.kind) {
- (
- &hir::TyKind::Rptr(_, ref impl_mt),
- &hir::TyKind::Rptr(_, ref trait_mt),
- )
- | (&hir::TyKind::Ptr(ref impl_mt), &hir::TyKind::Ptr(ref trait_mt)) => {
- impl_mt.mutbl != trait_mt.mutbl
- }
- _ => false,
- }
- })
- .map(|(ref impl_arg, ref trait_arg)| (impl_arg.span, Some(trait_arg.span)))
- .unwrap_or_else(|| (cause.span(tcx), tcx.hir().span_if_local(trait_m.def_id)))
- } else {
- (cause.span(tcx), tcx.hir().span_if_local(trait_m.def_id))
- }
+ TypeError::ArgumentMutability(i) => {
+ (impl_args.nth(i).unwrap(), trait_args.and_then(|mut args| args.nth(i)))
}
- TypeError::Sorts(ExpectedFound { .. }) => {
- if let Some(def_id) = trait_m.def_id.as_local() {
- let trait_m_hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
- let (trait_m_output, trait_m_iter) =
- match tcx.hir().expect_trait_item(trait_m_hir_id).kind {
- TraitItemKind::Fn(ref trait_m_sig, _) => {
- (&trait_m_sig.decl.output, trait_m_sig.decl.inputs.iter())
- }
- _ => bug!("{:?} is not a TraitItemKind::Fn", trait_m),
- };
-
- let impl_iter = impl_sig.inputs().iter();
- let trait_iter = trait_sig.inputs().iter();
- iter::zip(iter::zip(impl_iter, trait_iter), iter::zip(impl_m_iter, trait_m_iter))
- .find_map(|((&impl_arg_ty, &trait_arg_ty), (impl_arg, trait_arg))| match infcx
- .at(&cause, param_env)
- .sub(trait_arg_ty, impl_arg_ty)
- {
- Ok(_) => None,
- Err(_) => Some((impl_arg.span, Some(trait_arg.span))),
- })
- .unwrap_or_else(|| {
- if infcx
- .at(&cause, param_env)
- .sup(trait_sig.output(), impl_sig.output())
- .is_err()
- {
- (impl_m_output.span(), Some(trait_m_output.span()))
- } else {
- (cause.span(tcx), tcx.hir().span_if_local(trait_m.def_id))
- }
- })
- } else {
- (cause.span(tcx), tcx.hir().span_if_local(trait_m.def_id))
- }
+ TypeError::ArgumentSorts(ExpectedFound { .. }, i) => {
+ (impl_args.nth(i).unwrap(), trait_args.and_then(|mut args| args.nth(i)))
}
_ => (cause.span(tcx), tcx.hir().span_if_local(trait_m.def_id)),
}
@@ -514,8 +523,7 @@
tcx.sess,
impl_m_span,
E0185,
- "method `{}` has a `{}` declaration in the impl, but \
- not in the trait",
+ "method `{}` has a `{}` declaration in the impl, but not in the trait",
trait_m.ident,
self_descr
);
@@ -535,8 +543,7 @@
tcx.sess,
impl_m_span,
E0186,
- "method `{}` has a `{}` declaration in the trait, but \
- not in the impl",
+ "method `{}` has a `{}` declaration in the trait, but not in the impl",
trait_m.ident,
self_descr
);
@@ -993,8 +1000,7 @@
tcx.sess,
cause.span,
E0326,
- "implemented const `{}` has an incompatible type for \
- trait",
+ "implemented const `{}` has an incompatible type for trait",
trait_c.ident
);
diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs
index a0dbb28..9712447 100644
--- a/library/alloc/src/collections/btree/map.rs
+++ b/library/alloc/src/collections/btree/map.rs
@@ -940,7 +940,6 @@
/// # Examples
///
/// ```
- /// #![feature(btree_retain)]
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<i32, i32> = (0..8).map(|x| (x, x*10)).collect();
@@ -949,7 +948,7 @@
/// assert!(map.into_iter().eq(vec![(0, 0), (2, 20), (4, 40), (6, 60)]));
/// ```
#[inline]
- #[unstable(feature = "btree_retain", issue = "79025")]
+ #[stable(feature = "btree_retain", since = "1.53.0")]
pub fn retain<F>(&mut self, mut f: F)
where
K: Ord,
diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs
index a331b8d..737932d 100644
--- a/library/alloc/src/collections/btree/set.rs
+++ b/library/alloc/src/collections/btree/set.rs
@@ -851,7 +851,6 @@
/// # Examples
///
/// ```
- /// #![feature(btree_retain)]
/// use std::collections::BTreeSet;
///
/// let xs = [1, 2, 3, 4, 5, 6];
@@ -860,7 +859,7 @@
/// set.retain(|&k| k % 2 == 0);
/// assert!(set.iter().eq([2, 4, 6].iter()));
/// ```
- #[unstable(feature = "btree_retain", issue = "79025")]
+ #[stable(feature = "btree_retain", since = "1.53.0")]
pub fn retain<F>(&mut self, mut f: F)
where
T: Ord,
diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs
index 91c3b16..0dab035 100644
--- a/library/alloc/src/vec/mod.rs
+++ b/library/alloc/src/vec/mod.rs
@@ -2567,7 +2567,7 @@
/// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 };
/// # let mut vec = vec![1, 2, 3, 4, 5, 6];
/// let mut i = 0;
- /// while i != vec.len() {
+ /// while i < vec.len() {
/// if some_predicate(&mut vec[i]) {
/// let val = vec.remove(i);
/// // your code here
diff --git a/library/core/src/array/mod.rs b/library/core/src/array/mod.rs
index 8f52985..b6ce825 100644
--- a/library/core/src/array/mod.rs
+++ b/library/core/src/array/mod.rs
@@ -12,7 +12,6 @@
use crate::fmt;
use crate::hash::{self, Hash};
use crate::iter::TrustedLen;
-use crate::marker::Unsize;
use crate::mem::{self, MaybeUninit};
use crate::ops::{Index, IndexMut};
use crate::slice::{Iter, IterMut};
@@ -36,41 +35,6 @@
unsafe { &mut *(s as *mut T).cast::<[T; 1]>() }
}
-/// Utility trait implemented only on arrays of fixed size
-///
-/// This trait can be used to implement other traits on fixed-size arrays
-/// without causing much metadata bloat.
-///
-/// The trait is marked unsafe in order to restrict implementors to fixed-size
-/// arrays. A user of this trait can assume that implementors have the exact
-/// layout in memory of a fixed size array (for example, for unsafe
-/// initialization).
-///
-/// Note that the traits [`AsRef`] and [`AsMut`] provide similar methods for types that
-/// may not be fixed-size arrays. Implementors should prefer those traits
-/// instead.
-#[unstable(feature = "fixed_size_array", issue = "27778")]
-pub unsafe trait FixedSizeArray<T> {
- /// Converts the array to immutable slice
- #[unstable(feature = "fixed_size_array", issue = "27778")]
- fn as_slice(&self) -> &[T];
- /// Converts the array to mutable slice
- #[unstable(feature = "fixed_size_array", issue = "27778")]
- fn as_mut_slice(&mut self) -> &mut [T];
-}
-
-#[unstable(feature = "fixed_size_array", issue = "27778")]
-unsafe impl<T, A: Unsize<[T]>> FixedSizeArray<T> for A {
- #[inline]
- fn as_slice(&self) -> &[T] {
- self
- }
- #[inline]
- fn as_mut_slice(&mut self) -> &mut [T] {
- self
- }
-}
-
/// The error type returned when a conversion from a slice to an array fails.
#[stable(feature = "try_from", since = "1.34.0")]
#[derive(Debug, Copy, Clone)]
diff --git a/library/core/src/cmp.rs b/library/core/src/cmp.rs
index 1dbf472..67dd1d8 100644
--- a/library/core/src/cmp.rs
+++ b/library/core/src/cmp.rs
@@ -982,6 +982,9 @@
#[stable(feature = "rust1", since = "1.0.0")]
fn le(&self, other: &Rhs) -> bool {
// Pattern `Some(Less | Eq)` optimizes worse than negating `None | Some(Greater)`.
+ // FIXME: The root cause was fixed upstream in LLVM with:
+ // https://github.com/llvm/llvm-project/commit/9bad7de9a3fb844f1ca2965f35d0c2a3d1e11775
+ // Revert this workaround once support for LLVM 12 gets dropped.
!matches!(self.partial_cmp(other), None | Some(Greater))
}
diff --git a/library/core/src/fmt/mod.rs b/library/core/src/fmt/mod.rs
index d696ffa..59493bb 100644
--- a/library/core/src/fmt/mod.rs
+++ b/library/core/src/fmt/mod.rs
@@ -2268,7 +2268,7 @@
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
-impl<T: ?Sized + Debug> Debug for UnsafeCell<T> {
+impl<T: ?Sized> Debug for UnsafeCell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.pad("UnsafeCell")
}
diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs
index b5371d6..1ba0b23 100644
--- a/library/core/src/intrinsics.rs
+++ b/library/core/src/intrinsics.rs
@@ -1543,7 +1543,7 @@
/// let num_trailing = unsafe { cttz_nonzero(x) };
/// assert_eq!(num_trailing, 3);
/// ```
- #[rustc_const_unstable(feature = "const_cttz", issue = "none")]
+ #[rustc_const_stable(feature = "const_cttz", since = "1.53.0")]
pub fn cttz_nonzero<T: Copy>(x: T) -> T;
/// Reverses the bytes in an integer type `T`.
diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs
index 760b8d8..d0c52a4 100644
--- a/library/core/src/lib.rs
+++ b/library/core/src/lib.rs
@@ -79,7 +79,6 @@
#![feature(const_int_unchecked_arith)]
#![feature(const_mut_refs)]
#![feature(const_refs_to_cell)]
-#![feature(const_cttz)]
#![feature(const_panic)]
#![feature(const_pin)]
#![feature(const_fn)]
@@ -112,7 +111,6 @@
#![cfg_attr(bootstrap, feature(doc_spotlight))]
#![cfg_attr(not(bootstrap), feature(doc_notable_trait))]
#![feature(duration_consts_2)]
-#![feature(duration_saturating_ops)]
#![feature(extended_key_value_attributes)]
#![feature(extern_types)]
#![feature(fundamental)]
diff --git a/library/core/src/num/f32.rs b/library/core/src/num/f32.rs
index 690247b..0d6d919 100644
--- a/library/core/src/num/f32.rs
+++ b/library/core/src/num/f32.rs
@@ -500,7 +500,6 @@
/// Returns `true` if the number is [subnormal].
///
/// ```
- /// #![feature(is_subnormal)]
/// let min = f32::MIN_POSITIVE; // 1.17549435e-38f32
/// let max = f32::MAX;
/// let lower_than_min = 1.0e-40_f32;
@@ -516,7 +515,7 @@
/// assert!(lower_than_min.is_subnormal());
/// ```
/// [subnormal]: https://en.wikipedia.org/wiki/Denormal_number
- #[unstable(feature = "is_subnormal", issue = "79288")]
+ #[stable(feature = "is_subnormal", since = "1.53.0")]
#[rustc_const_unstable(feature = "const_float_classify", issue = "72505")]
#[inline]
pub const fn is_subnormal(self) -> bool {
diff --git a/library/core/src/num/f64.rs b/library/core/src/num/f64.rs
index 6d37372..42214e7 100644
--- a/library/core/src/num/f64.rs
+++ b/library/core/src/num/f64.rs
@@ -499,7 +499,6 @@
/// Returns `true` if the number is [subnormal].
///
/// ```
- /// #![feature(is_subnormal)]
/// let min = f64::MIN_POSITIVE; // 2.2250738585072014e-308_f64
/// let max = f64::MAX;
/// let lower_than_min = 1.0e-308_f64;
@@ -515,7 +514,7 @@
/// assert!(lower_than_min.is_subnormal());
/// ```
/// [subnormal]: https://en.wikipedia.org/wiki/Denormal_number
- #[unstable(feature = "is_subnormal", issue = "79288")]
+ #[stable(feature = "is_subnormal", since = "1.53.0")]
#[rustc_const_unstable(feature = "const_float_classify", issue = "72505")]
#[inline]
pub const fn is_subnormal(self) -> bool {
diff --git a/library/core/src/num/nonzero.rs b/library/core/src/num/nonzero.rs
index 111feb7..81262a2 100644
--- a/library/core/src/num/nonzero.rs
+++ b/library/core/src/num/nonzero.rs
@@ -191,13 +191,12 @@
/// Basic usage:
///
/// ```
- /// #![feature(nonzero_leading_trailing_zeros)]
#[doc = concat!("let n = std::num::", stringify!($Ty), "::new(", stringify!($LeadingTestExpr), ").unwrap();")]
///
/// assert_eq!(n.leading_zeros(), 0);
/// ```
- #[unstable(feature = "nonzero_leading_trailing_zeros", issue = "79143")]
- #[rustc_const_unstable(feature = "nonzero_leading_trailing_zeros", issue = "79143")]
+ #[stable(feature = "nonzero_leading_trailing_zeros", since = "1.53.0")]
+ #[rustc_const_stable(feature = "nonzero_leading_trailing_zeros", since = "1.53.0")]
#[inline]
pub const fn leading_zeros(self) -> u32 {
// SAFETY: since `self` can not be zero it is safe to call ctlz_nonzero
@@ -214,13 +213,12 @@
/// Basic usage:
///
/// ```
- /// #![feature(nonzero_leading_trailing_zeros)]
#[doc = concat!("let n = std::num::", stringify!($Ty), "::new(0b0101000).unwrap();")]
///
/// assert_eq!(n.trailing_zeros(), 3);
/// ```
- #[unstable(feature = "nonzero_leading_trailing_zeros", issue = "79143")]
- #[rustc_const_unstable(feature = "nonzero_leading_trailing_zeros", issue = "79143")]
+ #[stable(feature = "nonzero_leading_trailing_zeros", since = "1.53.0")]
+ #[rustc_const_stable(feature = "nonzero_leading_trailing_zeros", since = "1.53.0")]
#[inline]
pub const fn trailing_zeros(self) -> u32 {
// SAFETY: since `self` can not be zero it is safe to call cttz_nonzero
diff --git a/library/core/src/ops/drop.rs b/library/core/src/ops/drop.rs
index ce7d1c3..f4b1ec3 100644
--- a/library/core/src/ops/drop.rs
+++ b/library/core/src/ops/drop.rs
@@ -78,7 +78,7 @@
///
/// In other words, if you tried to explicitly call `Drop::drop` in the above example, you'd get a compiler error.
///
-/// If you'd like explicitly call the destructor of a value, [`mem::drop`] can be used instead.
+/// If you'd like to explicitly call the destructor of a value, [`mem::drop`] can be used instead.
///
/// [`mem::drop`]: drop
///
diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs
index 6e20715..ad8696a 100644
--- a/library/core/src/ptr/mod.rs
+++ b/library/core/src/ptr/mod.rs
@@ -211,6 +211,7 @@
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_promotable]
#[rustc_const_stable(feature = "const_ptr_null", since = "1.24.0")]
+#[rustc_diagnostic_item = "ptr_null"]
pub const fn null<T>() -> *const T {
0 as *const T
}
@@ -229,6 +230,7 @@
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_promotable]
#[rustc_const_stable(feature = "const_ptr_null", since = "1.24.0")]
+#[rustc_diagnostic_item = "ptr_null_mut"]
pub const fn null_mut<T>() -> *mut T {
0 as *mut T
}
diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs
index 9085d5c..f1a1155 100644
--- a/library/core/src/sync/atomic.rs
+++ b/library/core/src/sync/atomic.rs
@@ -839,7 +839,6 @@
/// # Examples
///
/// ```rust
- /// #![feature(atomic_fetch_update)]
/// use std::sync::atomic::{AtomicBool, Ordering};
///
/// let x = AtomicBool::new(false);
@@ -849,7 +848,7 @@
/// assert_eq!(x.load(Ordering::SeqCst), false);
/// ```
#[inline]
- #[unstable(feature = "atomic_fetch_update", reason = "recently added", issue = "78639")]
+ #[stable(feature = "atomic_fetch_update", since = "1.53.0")]
#[cfg(target_has_atomic = "8")]
pub fn fetch_update<F>(
&self,
@@ -1227,7 +1226,6 @@
/// # Examples
///
/// ```rust
- /// #![feature(atomic_fetch_update)]
/// use std::sync::atomic::{AtomicPtr, Ordering};
///
/// let ptr: *mut _ = &mut 5;
@@ -1246,7 +1244,7 @@
/// assert_eq!(some_ptr.load(Ordering::SeqCst), new);
/// ```
#[inline]
- #[unstable(feature = "atomic_fetch_update", reason = "recently added", issue = "78639")]
+ #[stable(feature = "atomic_fetch_update", since = "1.53.0")]
#[cfg(target_has_atomic = "ptr")]
pub fn fetch_update<F>(
&self,
diff --git a/library/core/src/time.rs b/library/core/src/time.rs
index 8c0848c..fa6a6c2 100644
--- a/library/core/src/time.rs
+++ b/library/core/src/time.rs
@@ -124,14 +124,13 @@
/// # Examples
///
/// ```
- /// #![feature(duration_zero)]
/// use std::time::Duration;
///
/// let duration = Duration::ZERO;
/// assert!(duration.is_zero());
/// assert_eq!(duration.as_nanos(), 0);
/// ```
- #[unstable(feature = "duration_zero", issue = "73544")]
+ #[stable(feature = "duration_zero", since = "1.53.0")]
pub const ZERO: Duration = Duration::from_nanos(0);
/// The maximum duration.
@@ -269,7 +268,6 @@
/// # Examples
///
/// ```
- /// #![feature(duration_zero)]
/// use std::time::Duration;
///
/// assert!(Duration::ZERO.is_zero());
@@ -281,7 +279,8 @@
/// assert!(!Duration::from_nanos(1).is_zero());
/// assert!(!Duration::from_secs(1).is_zero());
/// ```
- #[unstable(feature = "duration_zero", issue = "73544")]
+ #[stable(feature = "duration_zero", since = "1.53.0")]
+ #[rustc_const_stable(feature = "duration_zero", since = "1.53.0")]
#[inline]
pub const fn is_zero(&self) -> bool {
self.secs == 0 && self.nanos == 0
@@ -479,14 +478,13 @@
/// # Examples
///
/// ```
- /// #![feature(duration_saturating_ops)]
/// #![feature(duration_constants)]
/// use std::time::Duration;
///
/// assert_eq!(Duration::new(0, 0).saturating_add(Duration::new(0, 1)), Duration::new(0, 1));
/// assert_eq!(Duration::new(1, 0).saturating_add(Duration::new(u64::MAX, 0)), Duration::MAX);
/// ```
- #[unstable(feature = "duration_saturating_ops", issue = "76416")]
+ #[stable(feature = "duration_saturating_ops", since = "1.53.0")]
#[inline]
#[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
pub const fn saturating_add(self, rhs: Duration) -> Duration {
@@ -537,14 +535,12 @@
/// # Examples
///
/// ```
- /// #![feature(duration_saturating_ops)]
- /// #![feature(duration_zero)]
/// use std::time::Duration;
///
/// assert_eq!(Duration::new(0, 1).saturating_sub(Duration::new(0, 0)), Duration::new(0, 1));
/// assert_eq!(Duration::new(0, 0).saturating_sub(Duration::new(0, 1)), Duration::ZERO);
/// ```
- #[unstable(feature = "duration_saturating_ops", issue = "76416")]
+ #[stable(feature = "duration_saturating_ops", since = "1.53.0")]
#[inline]
#[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
pub const fn saturating_sub(self, rhs: Duration) -> Duration {
@@ -590,14 +586,13 @@
/// # Examples
///
/// ```
- /// #![feature(duration_saturating_ops)]
/// #![feature(duration_constants)]
/// use std::time::Duration;
///
/// assert_eq!(Duration::new(0, 500_000_001).saturating_mul(2), Duration::new(1, 2));
/// assert_eq!(Duration::new(u64::MAX - 1, 0).saturating_mul(2), Duration::MAX);
/// ```
- #[unstable(feature = "duration_saturating_ops", issue = "76416")]
+ #[stable(feature = "duration_saturating_ops", since = "1.53.0")]
#[inline]
#[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
pub const fn saturating_mul(self, rhs: u32) -> Duration {
diff --git a/library/core/tests/array.rs b/library/core/tests/array.rs
index 89c2a96..ce7480c 100644
--- a/library/core/tests/array.rs
+++ b/library/core/tests/array.rs
@@ -1,25 +1,7 @@
-use core::array::{self, FixedSizeArray, IntoIter};
+use core::array::{self, IntoIter};
use core::convert::TryFrom;
#[test]
-fn fixed_size_array() {
- let mut array = [0; 64];
- let mut zero_sized = [(); 64];
- let mut empty_array = [0; 0];
- let mut empty_zero_sized = [(); 0];
-
- assert_eq!(FixedSizeArray::as_slice(&array).len(), 64);
- assert_eq!(FixedSizeArray::as_slice(&zero_sized).len(), 64);
- assert_eq!(FixedSizeArray::as_slice(&empty_array).len(), 0);
- assert_eq!(FixedSizeArray::as_slice(&empty_zero_sized).len(), 0);
-
- assert_eq!(FixedSizeArray::as_mut_slice(&mut array).len(), 64);
- assert_eq!(FixedSizeArray::as_mut_slice(&mut zero_sized).len(), 64);
- assert_eq!(FixedSizeArray::as_mut_slice(&mut empty_array).len(), 0);
- assert_eq!(FixedSizeArray::as_mut_slice(&mut empty_zero_sized).len(), 0);
-}
-
-#[test]
fn array_from_ref() {
let value: String = "Hello World!".into();
let arr: &[String; 1] = array::from_ref(&value);
diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs
index 7dc6e22..f6bfe67 100644
--- a/library/core/tests/lib.rs
+++ b/library/core/tests/lib.rs
@@ -24,11 +24,8 @@
#![feature(div_duration)]
#![feature(duration_consts_2)]
#![feature(duration_constants)]
-#![feature(duration_saturating_ops)]
-#![feature(duration_zero)]
#![feature(exact_size_is_empty)]
#![feature(extern_types)]
-#![feature(fixed_size_array)]
#![feature(flt2dec)]
#![feature(fmt_internals)]
#![feature(hashmap_internals)]
@@ -69,7 +66,6 @@
#![feature(ptr_metadata)]
#![feature(once_cell)]
#![feature(unsized_tuple_coercion)]
-#![feature(nonzero_leading_trailing_zeros)]
#![feature(const_option)]
#![feature(integer_atomics)]
#![feature(slice_group_by)]
diff --git a/library/panic_abort/Cargo.toml b/library/panic_abort/Cargo.toml
index caa89aa..bdab664 100644
--- a/library/panic_abort/Cargo.toml
+++ b/library/panic_abort/Cargo.toml
@@ -13,6 +13,7 @@
doc = false
[dependencies]
+alloc = { path = "../alloc" }
cfg-if = { version = "0.1.8", features = ['rustc-dep-of-std'] }
core = { path = "../core" }
libc = { version = "0.2", default-features = false }
diff --git a/library/panic_abort/src/android.rs b/library/panic_abort/src/android.rs
new file mode 100644
index 0000000..34d7750
--- /dev/null
+++ b/library/panic_abort/src/android.rs
@@ -0,0 +1,49 @@
+use alloc::string::String;
+use core::mem::transmute;
+use core::panic::BoxMeUp;
+use core::ptr::copy_nonoverlapping;
+
+const ANDROID_SET_ABORT_MESSAGE: &[u8] = b"android_set_abort_message\0";
+type SetAbortMessageType = unsafe extern "C" fn(*const libc::c_char) -> ();
+
+// Forward the abort message to libc's android_set_abort_message. We try our best to populate the
+// message but as this function may already be called as part of a failed allocation, it may not be
+// possible to do so.
+//
+// Some methods of core are on purpose avoided (such as try_reserve) as these rely on the correct
+// resolution of rust_eh_personality which is loosely defined in panic_abort.
+//
+// Weakly resolve the symbol for android_set_abort_message. This function is only available
+// for API >= 21.
+pub(crate) unsafe fn android_set_abort_message(payload: *mut &mut dyn BoxMeUp) {
+ let func_addr =
+ libc::dlsym(libc::RTLD_DEFAULT, ANDROID_SET_ABORT_MESSAGE.as_ptr() as *const libc::c_char)
+ as usize;
+ if func_addr == 0 {
+ return;
+ }
+
+ let payload = (*payload).get();
+ let msg = match payload.downcast_ref::<&'static str>() {
+ Some(msg) => msg.as_bytes(),
+ None => match payload.downcast_ref::<String>() {
+ Some(msg) => msg.as_bytes(),
+ None => &[],
+ },
+ };
+ if msg.is_empty() {
+ return;
+ }
+
+ // Allocate a new buffer to append the null byte.
+ let size = msg.len() + 1usize;
+ let buf = libc::malloc(size) as *mut libc::c_char;
+ if buf.is_null() {
+ return; // allocation failure
+ }
+ copy_nonoverlapping(msg.as_ptr(), buf as *mut u8, msg.len());
+ buf.offset(msg.len() as isize).write(0);
+
+ let func = transmute::<usize, SetAbortMessageType>(func_addr);
+ func(buf);
+}
diff --git a/library/panic_abort/src/lib.rs b/library/panic_abort/src/lib.rs
index eb2277d..5dcd1e6 100644
--- a/library/panic_abort/src/lib.rs
+++ b/library/panic_abort/src/lib.rs
@@ -19,6 +19,9 @@
#![feature(rustc_attrs)]
#![feature(asm)]
+#[cfg(target_os = "android")]
+mod android;
+
use core::any::Any;
use core::panic::BoxMeUp;
@@ -31,6 +34,10 @@
// "Leak" the payload and shim to the relevant abort on the platform in question.
#[rustc_std_internal_symbol]
pub unsafe extern "C" fn __rust_start_panic(_payload: *mut &mut dyn BoxMeUp) -> u32 {
+ // Android has the ability to attach a message as part of the abort.
+ #[cfg(target_os = "android")]
+ android::android_set_abort_message(_payload);
+
abort();
cfg_if::cfg_if! {
diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml
index 22ca7ed..ab7b142 100644
--- a/library/std/Cargo.toml
+++ b/library/std/Cargo.toml
@@ -16,11 +16,12 @@
panic_unwind = { path = "../panic_unwind", optional = true }
panic_abort = { path = "../panic_abort" }
core = { path = "../core" }
-libc = { version = "0.2.88", default-features = false, features = ['rustc-dep-of-std'] }
+libc = { version = "0.2.93", default-features = false, features = ['rustc-dep-of-std'] }
compiler_builtins = { version = "0.1.39" }
profiler_builtins = { path = "../profiler_builtins", optional = true }
unwind = { path = "../unwind" }
hashbrown = { version = "0.11", default-features = false, features = ['rustc-dep-of-std'] }
+std_detect = { path = "../stdarch/crates/std_detect", default-features = false, features = ['rustc-dep-of-std'] }
# Dependencies of the `backtrace` crate
addr2line = { version = "0.14.0", optional = true, default-features = false }
@@ -70,8 +71,8 @@
# Enable std_detect default features for stdarch/crates/std_detect:
# https://github.com/rust-lang/stdarch/blob/master/crates/std_detect/Cargo.toml
-std_detect_file_io = []
-std_detect_dlsym_getauxval = []
+std_detect_file_io = ["std_detect/std_detect_file_io"]
+std_detect_dlsym_getauxval = ["std_detect/std_detect_dlsym_getauxval"]
[package.metadata.fortanix-sgx]
# Maximum possible number of threads when testing
diff --git a/library/std/src/fs.rs b/library/std/src/fs.rs
index 860bc13..e6120b8 100644
--- a/library/std/src/fs.rs
+++ b/library/std/src/fs.rs
@@ -265,8 +265,9 @@
/// ```no_run
/// use std::fs;
/// use std::net::SocketAddr;
+/// use std::error::Error;
///
-/// fn main() -> Result<(), Box<dyn std::error::Error + 'static>> {
+/// fn main() -> Result<(), Box<dyn Error>> {
/// let foo: SocketAddr = fs::read_to_string("address.txt")?.parse()?;
/// Ok(())
/// }
diff --git a/library/std/src/io/buffered/bufreader.rs b/library/std/src/io/buffered/bufreader.rs
index 02b0fc0..d8021d3 100644
--- a/library/std/src/io/buffered/bufreader.rs
+++ b/library/std/src/io/buffered/bufreader.rs
@@ -234,7 +234,7 @@
/// the buffer will not be flushed, allowing for more efficient seeks.
/// This method does not return the location of the underlying reader, so the caller
/// must track this information themselves if it is required.
- #[unstable(feature = "bufreader_seek_relative", issue = "31100")]
+ #[stable(feature = "bufreader_seek_relative", since = "1.53.0")]
pub fn seek_relative(&mut self, offset: i64) -> io::Result<()> {
let pos = self.pos as u64;
if offset < 0 {
diff --git a/library/std/src/keyword_docs.rs b/library/std/src/keyword_docs.rs
index 2a3d44f..39ed624 100644
--- a/library/std/src/keyword_docs.rs
+++ b/library/std/src/keyword_docs.rs
@@ -1768,6 +1768,7 @@
/// In the 2015 edition the parameters pattern was not needed for traits:
///
/// ```rust,edition2015
+/// # #![allow(anonymous_parameters)]
/// trait Tr {
/// fn f(i32);
/// }
diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs
index 6baf9f2..90603cd 100644
--- a/library/std/src/lib.rs
+++ b/library/std/src/lib.rs
@@ -214,7 +214,6 @@
feature(slice_index_methods, coerce_unsized, sgx_platform)
)]
#![deny(rustc::existing_doc_keyword)]
-#![cfg_attr(all(test, target_vendor = "fortanix", target_env = "sgx"), feature(fixed_size_array))]
// std is implemented with unstable features, many of which are internal
// compiler details that will never be stable
// NB: the following list is sorted to minimize merge conflicts.
@@ -262,7 +261,6 @@
#![cfg_attr(not(bootstrap), feature(doc_notable_trait))]
#![feature(dropck_eyepatch)]
#![feature(duration_constants)]
-#![feature(duration_zero)]
#![feature(edition_panic)]
#![feature(exact_size_is_empty)]
#![feature(exhaustive_patterns)]
@@ -539,22 +537,16 @@
#[allow(dead_code, unused_attributes)]
mod backtrace_rs;
-// Pull in the `std_detect` crate directly into libstd. The contents of
-// `std_detect` are in a different repository: rust-lang/stdarch.
-//
-// `std_detect` depends on libstd, but the contents of this module are
-// set up in such a way that directly pulling it here works such that the
-// crate uses the this crate as its libstd.
-#[path = "../../stdarch/crates/std_detect/src/mod.rs"]
-#[allow(missing_debug_implementations, missing_docs, dead_code)]
-#[unstable(feature = "stdsimd", issue = "48556")]
-#[cfg(not(test))]
-mod std_detect;
-
+#[stable(feature = "simd_x86", since = "1.27.0")]
+pub use std_detect::is_x86_feature_detected;
#[doc(hidden)]
#[unstable(feature = "stdsimd", issue = "48556")]
-#[cfg(not(test))]
-pub use std_detect::detect;
+pub use std_detect::*;
+#[unstable(feature = "stdsimd", issue = "48556")]
+pub use std_detect::{
+ is_aarch64_feature_detected, is_arm_feature_detected, is_mips64_feature_detected,
+ is_mips_feature_detected, is_powerpc64_feature_detected, is_powerpc_feature_detected,
+};
// Re-export macros defined in libcore.
#[stable(feature = "rust1", since = "1.0.0")]
diff --git a/library/std/src/sys_common/alloc.rs b/library/std/src/sys/common/alloc.rs
similarity index 98%
rename from library/std/src/sys_common/alloc.rs
rename to library/std/src/sys/common/alloc.rs
index 6c1bc0d..2a54e99 100644
--- a/library/std/src/sys_common/alloc.rs
+++ b/library/std/src/sys/common/alloc.rs
@@ -1,5 +1,3 @@
-#![allow(dead_code)]
-
use crate::alloc::{GlobalAlloc, Layout, System};
use crate::cmp;
use crate::ptr;
diff --git a/library/std/src/sys/common/mod.rs b/library/std/src/sys/common/mod.rs
new file mode 100644
index 0000000..ff64d2a
--- /dev/null
+++ b/library/std/src/sys/common/mod.rs
@@ -0,0 +1,13 @@
+// This module contains code that is shared between all platforms, mostly utility or fallback code.
+// This explicitly does not include code that is shared between only a few platforms,
+// such as when reusing an implementation from `unix` or `unsupported`.
+// In those cases the desired code should be included directly using the #[path] attribute,
+// not moved to this module.
+//
+// Currently `sys_common` contains a lot of code that should live in this module,
+// ideally `sys_common` would only contain platform-independent abstractions on top of `sys`.
+// Progress on this is tracked in #84187.
+
+#![allow(dead_code)]
+
+pub mod alloc;
diff --git a/library/std/src/sys/mod.rs b/library/std/src/sys/mod.rs
index 9b35939..50c2660 100644
--- a/library/std/src/sys/mod.rs
+++ b/library/std/src/sys/mod.rs
@@ -22,6 +22,8 @@
#![allow(missing_debug_implementations)]
+mod common;
+
cfg_if::cfg_if! {
if #[cfg(target_os = "vxworks")] {
mod vxworks;
diff --git a/library/std/src/sys/unix/alloc.rs b/library/std/src/sys/unix/alloc.rs
index 964abe8..1b71905 100644
--- a/library/std/src/sys/unix/alloc.rs
+++ b/library/std/src/sys/unix/alloc.rs
@@ -1,6 +1,6 @@
use crate::alloc::{GlobalAlloc, Layout, System};
use crate::ptr;
-use crate::sys_common::alloc::{realloc_fallback, MIN_ALIGN};
+use crate::sys::common::alloc::{realloc_fallback, MIN_ALIGN};
#[stable(feature = "alloc_system_type", since = "1.28.0")]
unsafe impl GlobalAlloc for System {
diff --git a/library/std/src/sys/unix/os.rs b/library/std/src/sys/unix/os.rs
index 4a077e2..ce2c4e8 100644
--- a/library/std/src/sys/unix/os.rs
+++ b/library/std/src/sys/unix/os.rs
@@ -223,7 +223,7 @@
impl fmt::Display for JoinPathsError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "path segment contains separator `{}`", PATH_SEPARATOR)
+ write!(f, "path segment contains separator `{}`", char::from(PATH_SEPARATOR))
}
}
diff --git a/library/std/src/sys/windows/alloc.rs b/library/std/src/sys/windows/alloc.rs
index af93cd7..2fe71f9 100644
--- a/library/std/src/sys/windows/alloc.rs
+++ b/library/std/src/sys/windows/alloc.rs
@@ -5,7 +5,7 @@
use crate::ptr;
use crate::sync::atomic::{AtomicPtr, Ordering};
use crate::sys::c;
-use crate::sys_common::alloc::{realloc_fallback, MIN_ALIGN};
+use crate::sys::common::alloc::{realloc_fallback, MIN_ALIGN};
#[cfg(test)]
mod tests;
diff --git a/library/std/src/sys_common/mod.rs b/library/std/src/sys_common/mod.rs
index 660f0e0..23a3a0e 100644
--- a/library/std/src/sys_common/mod.rs
+++ b/library/std/src/sys_common/mod.rs
@@ -8,9 +8,11 @@
//! rest of `std` is complex, with dependencies going in all
//! directions: `std` depending on `sys_common`, `sys_common`
//! depending on `sys`, and `sys` depending on `sys_common` and `std`.
-//! Ideally `sys_common` would be split into two and the dependencies
-//! between them all would form a dag, facilitating the extraction of
-//! `std::sys` from the standard library.
+//! This is because `sys_common` not only contains platform-independent code,
+//! but also code that is shared between the different platforms in `sys`.
+//! Ideally all that shared code should be moved to `sys::common`,
+//! and the dependencies between `std`, `sys_common` and `sys` all would form a dag.
+//! Progress on this is tracked in #84187.
#![allow(missing_docs)]
#![allow(missing_debug_implementations)]
@@ -46,7 +48,6 @@
};
}
-pub mod alloc;
pub mod at_exit_imp;
pub mod backtrace;
pub mod bytestring;
diff --git a/library/stdarch b/library/stdarch
index 9c732a5..19f5459 160000
--- a/library/stdarch
+++ b/library/stdarch
@@ -1 +1 @@
-Subproject commit 9c732a56f67f54d12a0b4fd99993154906c95ea6
+Subproject commit 19f5459dd0f89e466b7bcaa0f69ecca90f21a4d1
diff --git a/library/test/src/bench.rs b/library/test/src/bench.rs
index d4b3728..1691541 100644
--- a/library/test/src/bench.rs
+++ b/library/test/src/bench.rs
@@ -2,7 +2,11 @@
pub use std::hint::black_box;
use super::{
- event::CompletedTest, options::BenchMode, test_result::TestResult, types::TestDesc, Sender,
+ event::CompletedTest,
+ options::BenchMode,
+ test_result::TestResult,
+ types::{TestDesc, TestId},
+ Sender,
};
use crate::stats;
@@ -177,8 +181,13 @@
}
}
-pub fn benchmark<F>(desc: TestDesc, monitor_ch: Sender<CompletedTest>, nocapture: bool, f: F)
-where
+pub fn benchmark<F>(
+ id: TestId,
+ desc: TestDesc,
+ monitor_ch: Sender<CompletedTest>,
+ nocapture: bool,
+ f: F,
+) where
F: FnMut(&mut Bencher),
{
let mut bs = Bencher { mode: BenchMode::Auto, summary: None, bytes: 0 };
@@ -213,7 +222,7 @@
};
let stdout = data.lock().unwrap().to_vec();
- let message = CompletedTest::new(desc, test_result, None, stdout);
+ let message = CompletedTest::new(id, desc, test_result, None, stdout);
monitor_ch.send(message).unwrap();
}
diff --git a/library/test/src/event.rs b/library/test/src/event.rs
index 2103a0d..206f3e1 100644
--- a/library/test/src/event.rs
+++ b/library/test/src/event.rs
@@ -3,10 +3,11 @@
use super::test_result::TestResult;
use super::time::TestExecTime;
-use super::types::TestDesc;
+use super::types::{TestDesc, TestId};
#[derive(Debug, Clone)]
pub struct CompletedTest {
+ pub id: TestId,
pub desc: TestDesc,
pub result: TestResult,
pub exec_time: Option<TestExecTime>,
@@ -15,12 +16,13 @@
impl CompletedTest {
pub fn new(
+ id: TestId,
desc: TestDesc,
result: TestResult,
exec_time: Option<TestExecTime>,
stdout: Vec<u8>,
) -> Self {
- Self { desc, result, exec_time, stdout }
+ Self { id, desc, result, exec_time, stdout }
}
}
diff --git a/library/test/src/lib.rs b/library/test/src/lib.rs
index 7683f79..2e0864f 100644
--- a/library/test/src/lib.rs
+++ b/library/test/src/lib.rs
@@ -54,7 +54,7 @@
time::{TestExecTime, TestTimeOptions},
types::{
DynTestFn, DynTestName, StaticBenchFn, StaticTestFn, StaticTestName, TestDesc,
- TestDescAndFn, TestName, TestType,
+ TestDescAndFn, TestId, TestName, TestType,
},
};
}
@@ -215,9 +215,10 @@
// Use a deterministic hasher
type TestMap =
- HashMap<TestDesc, RunningTest, BuildHasherDefault<collections::hash_map::DefaultHasher>>;
+ HashMap<TestId, RunningTest, BuildHasherDefault<collections::hash_map::DefaultHasher>>;
struct TimeoutEntry {
+ id: TestId,
desc: TestDesc,
timeout: Instant,
}
@@ -249,7 +250,9 @@
let (filtered_tests, filtered_benchs): (Vec<_>, _) = filtered_tests
.into_iter()
- .partition(|e| matches!(e.testfn, StaticTestFn(_) | DynTestFn(_)));
+ .enumerate()
+ .map(|(i, e)| (TestId(i), e))
+ .partition(|(_, e)| matches!(e.testfn, StaticTestFn(_) | DynTestFn(_)));
let concurrency = opts.test_threads.unwrap_or_else(get_concurrency);
@@ -278,7 +281,7 @@
break;
}
let timeout_entry = timeout_queue.pop_front().unwrap();
- if running_tests.contains_key(&timeout_entry.desc) {
+ if running_tests.contains_key(&timeout_entry.id) {
timed_out.push(timeout_entry.desc);
}
}
@@ -294,11 +297,11 @@
if concurrency == 1 {
while !remaining.is_empty() {
- let test = remaining.pop().unwrap();
+ let (id, test) = remaining.pop().unwrap();
let event = TestEvent::TeWait(test.desc.clone());
notify_about_test_event(event)?;
let join_handle =
- run_test(opts, !opts.run_tests, test, run_strategy, tx.clone(), Concurrent::No);
+ run_test(opts, !opts.run_tests, id, test, run_strategy, tx.clone(), Concurrent::No);
assert!(join_handle.is_none());
let completed_test = rx.recv().unwrap();
@@ -308,7 +311,7 @@
} else {
while pending > 0 || !remaining.is_empty() {
while pending < concurrency && !remaining.is_empty() {
- let test = remaining.pop().unwrap();
+ let (id, test) = remaining.pop().unwrap();
let timeout = time::get_default_test_timeout();
let desc = test.desc.clone();
@@ -317,13 +320,14 @@
let join_handle = run_test(
opts,
!opts.run_tests,
+ id,
test,
run_strategy,
tx.clone(),
Concurrent::Yes,
);
- running_tests.insert(desc.clone(), RunningTest { join_handle });
- timeout_queue.push_back(TimeoutEntry { desc, timeout });
+ running_tests.insert(id, RunningTest { join_handle });
+ timeout_queue.push_back(TimeoutEntry { id, desc, timeout });
pending += 1;
}
@@ -352,13 +356,12 @@
}
let mut completed_test = res.unwrap();
- if let Some(running_test) = running_tests.remove(&completed_test.desc) {
- if let Some(join_handle) = running_test.join_handle {
- if let Err(_) = join_handle.join() {
- if let TrOk = completed_test.result {
- completed_test.result =
- TrFailedMsg("panicked after reporting success".to_string());
- }
+ let running_test = running_tests.remove(&completed_test.id).unwrap();
+ if let Some(join_handle) = running_test.join_handle {
+ if let Err(_) = join_handle.join() {
+ if let TrOk = completed_test.result {
+ completed_test.result =
+ TrFailedMsg("panicked after reporting success".to_string());
}
}
}
@@ -371,10 +374,10 @@
if opts.bench_benchmarks {
// All benchmarks run at the end, in serial.
- for b in filtered_benchs {
+ for (id, b) in filtered_benchs {
let event = TestEvent::TeWait(b.desc.clone());
notify_about_test_event(event)?;
- run_test(opts, false, b, run_strategy, tx.clone(), Concurrent::No);
+ run_test(opts, false, id, b, run_strategy, tx.clone(), Concurrent::No);
let completed_test = rx.recv().unwrap();
let event = TestEvent::TeResult(completed_test);
@@ -448,6 +451,7 @@
pub fn run_test(
opts: &TestOpts,
force_ignore: bool,
+ id: TestId,
test: TestDescAndFn,
strategy: RunStrategy,
monitor_ch: Sender<CompletedTest>,
@@ -461,7 +465,7 @@
&& !cfg!(target_os = "emscripten");
if force_ignore || desc.ignore || ignore_because_no_process_support {
- let message = CompletedTest::new(desc, TrIgnored, None, Vec::new());
+ let message = CompletedTest::new(id, desc, TrIgnored, None, Vec::new());
monitor_ch.send(message).unwrap();
return None;
}
@@ -474,6 +478,7 @@
}
fn run_test_inner(
+ id: TestId,
desc: TestDesc,
monitor_ch: Sender<CompletedTest>,
testfn: Box<dyn FnOnce() + Send>,
@@ -484,6 +489,7 @@
let runtest = move || match opts.strategy {
RunStrategy::InProcess => run_test_in_process(
+ id,
desc,
opts.nocapture,
opts.time.is_some(),
@@ -492,6 +498,7 @@
opts.time,
),
RunStrategy::SpawnPrimary => spawn_test_subprocess(
+ id,
desc,
opts.nocapture,
opts.time.is_some(),
@@ -530,14 +537,14 @@
match testfn {
DynBenchFn(bencher) => {
// Benchmarks aren't expected to panic, so we run them all in-process.
- crate::bench::benchmark(desc, monitor_ch, opts.nocapture, |harness| {
+ crate::bench::benchmark(id, desc, monitor_ch, opts.nocapture, |harness| {
bencher.run(harness)
});
None
}
StaticBenchFn(benchfn) => {
// Benchmarks aren't expected to panic, so we run them all in-process.
- crate::bench::benchmark(desc, monitor_ch, opts.nocapture, benchfn);
+ crate::bench::benchmark(id, desc, monitor_ch, opts.nocapture, benchfn);
None
}
DynTestFn(f) => {
@@ -546,6 +553,7 @@
_ => panic!("Cannot run dynamic test fn out-of-process"),
};
run_test_inner(
+ id,
desc,
monitor_ch,
Box::new(move || __rust_begin_short_backtrace(f)),
@@ -553,6 +561,7 @@
)
}
StaticTestFn(f) => run_test_inner(
+ id,
desc,
monitor_ch,
Box::new(move || __rust_begin_short_backtrace(f)),
@@ -571,6 +580,7 @@
}
fn run_test_in_process(
+ id: TestId,
desc: TestDesc,
nocapture: bool,
report_time: bool,
@@ -599,11 +609,12 @@
Err(e) => calc_result(&desc, Err(e.as_ref()), &time_opts, &exec_time),
};
let stdout = data.lock().unwrap_or_else(|e| e.into_inner()).to_vec();
- let message = CompletedTest::new(desc, test_result, exec_time, stdout);
+ let message = CompletedTest::new(id, desc, test_result, exec_time, stdout);
monitor_ch.send(message).unwrap();
}
fn spawn_test_subprocess(
+ id: TestId,
desc: TestDesc,
nocapture: bool,
report_time: bool,
@@ -653,7 +664,7 @@
(result, test_output, exec_time)
})();
- let message = CompletedTest::new(desc, result, exec_time, test_output);
+ let message = CompletedTest::new(id, desc, result, exec_time, test_output);
monitor_ch.send(message).unwrap();
}
diff --git a/library/test/src/tests.rs b/library/test/src/tests.rs
index e3c9b38..6a3f31b 100644
--- a/library/test/src/tests.rs
+++ b/library/test/src/tests.rs
@@ -94,7 +94,7 @@
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, RunStrategy::InProcess, tx, Concurrent::No);
+ run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
let result = rx.recv().unwrap().result;
assert_ne!(result, TrOk);
}
@@ -113,7 +113,7 @@
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, RunStrategy::InProcess, tx, Concurrent::No);
+ run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
let result = rx.recv().unwrap().result;
assert_eq!(result, TrIgnored);
}
@@ -136,7 +136,7 @@
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, RunStrategy::InProcess, tx, Concurrent::No);
+ run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
let result = rx.recv().unwrap().result;
assert_eq!(result, TrOk);
}
@@ -159,7 +159,7 @@
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, RunStrategy::InProcess, tx, Concurrent::No);
+ run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
let result = rx.recv().unwrap().result;
assert_eq!(result, TrOk);
}
@@ -187,7 +187,7 @@
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, RunStrategy::InProcess, tx, Concurrent::No);
+ run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
let result = rx.recv().unwrap().result;
assert_eq!(result, TrFailedMsg(failed_msg.to_string()));
}
@@ -219,7 +219,7 @@
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, RunStrategy::InProcess, tx, Concurrent::No);
+ run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
let result = rx.recv().unwrap().result;
assert_eq!(result, TrFailedMsg(failed_msg));
}
@@ -243,7 +243,15 @@
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
- run_test(&TestOpts::new(), false, desc, RunStrategy::InProcess, tx, Concurrent::No);
+ run_test(
+ &TestOpts::new(),
+ false,
+ TestId(0),
+ desc,
+ RunStrategy::InProcess,
+ tx,
+ Concurrent::No,
+ );
let result = rx.recv().unwrap().result;
assert_eq!(
result,
@@ -270,7 +278,7 @@
let test_opts = TestOpts { time_options, ..TestOpts::new() };
let (tx, rx) = channel();
- run_test(&test_opts, false, desc, RunStrategy::InProcess, tx, Concurrent::No);
+ run_test(&test_opts, false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
let exec_time = rx.recv().unwrap().exec_time;
exec_time
}
@@ -305,7 +313,7 @@
let test_opts = TestOpts { time_options: Some(time_options), ..TestOpts::new() };
let (tx, rx) = channel();
- run_test(&test_opts, false, desc, RunStrategy::InProcess, tx, Concurrent::No);
+ run_test(&test_opts, false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
let result = rx.recv().unwrap().result;
result
@@ -637,7 +645,7 @@
test_type: TestType::Unknown,
};
- crate::bench::benchmark(desc, tx, true, f);
+ crate::bench::benchmark(TestId(0), desc, tx, true, f);
rx.recv().unwrap();
}
@@ -657,7 +665,7 @@
test_type: TestType::Unknown,
};
- crate::bench::benchmark(desc, tx, true, f);
+ crate::bench::benchmark(TestId(0), desc, tx, true, f);
rx.recv().unwrap();
}
diff --git a/library/test/src/types.rs b/library/test/src/types.rs
index 5b75d2f3..c5d91f6 100644
--- a/library/test/src/types.rs
+++ b/library/test/src/types.rs
@@ -112,9 +112,13 @@
}
}
+// A unique integer associated with each test.
+#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
+pub struct TestId(pub usize);
+
// The definition of a single test. A test runner will run a list of
// these.
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+#[derive(Clone, Debug)]
pub struct TestDesc {
pub name: TestName,
pub ignore: bool,
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
index 8244c77..66a88e8 100644
--- a/src/bootstrap/compile.rs
+++ b/src/bootstrap/compile.rs
@@ -462,11 +462,13 @@
let dst_file = &dst_dir.join(file.to_string() + ".o");
if !up_to_date(src_file, dst_file) {
let mut cmd = Command::new(&builder.initial_rustc);
+ cmd.env("RUSTC_BOOTSTRAP", "1");
+ if !builder.local_rebuild {
+ // a local_rebuild compiler already has stage1 features
+ cmd.arg("--cfg").arg("bootstrap");
+ }
builder.run(
- cmd.env("RUSTC_BOOTSTRAP", "1")
- .arg("--cfg")
- .arg("bootstrap")
- .arg("--target")
+ cmd.arg("--target")
.arg(target.rustc_target_arg())
.arg("--emit=obj")
.arg("-o")
diff --git a/src/doc/embedded-book b/src/doc/embedded-book
index d3f2ace..569c339 160000
--- a/src/doc/embedded-book
+++ b/src/doc/embedded-book
@@ -1 +1 @@
-Subproject commit d3f2ace94d51610cf3e3c265705bb8416d37f8e4
+Subproject commit 569c3391f5c0cc43433bc77831d17f8ff4d76602
diff --git a/src/doc/nomicon b/src/doc/nomicon
index 6fe4769..8551afb 160000
--- a/src/doc/nomicon
+++ b/src/doc/nomicon
@@ -1 +1 @@
-Subproject commit 6fe476943afd53a9a6e91f38a6ea7bb48811d8ff
+Subproject commit 8551afbb2ca6f5ea37fe58380318b209785e4e02
diff --git a/src/doc/reference b/src/doc/reference
index fd97729..e1abb17 160000
--- a/src/doc/reference
+++ b/src/doc/reference
@@ -1 +1 @@
-Subproject commit fd97729e2d82f8b08d68a31c9bfdf0c37a7fd542
+Subproject commit e1abb17cd94cd5a8a374b48e1bc8134a2208ed48
diff --git a/src/doc/rust-by-example b/src/doc/rust-by-example
index 29d91f5..c80f0b0 160000
--- a/src/doc/rust-by-example
+++ b/src/doc/rust-by-example
@@ -1 +1 @@
-Subproject commit 29d91f591c90dd18fdca6d23f1a9caf9c139d0d7
+Subproject commit c80f0b09fc15b9251825343be910c08531938ab2
diff --git a/src/doc/rustc-dev-guide b/src/doc/rustc-dev-guide
index 0687daa..a9bd2bb 160000
--- a/src/doc/rustc-dev-guide
+++ b/src/doc/rustc-dev-guide
@@ -1 +1 @@
-Subproject commit 0687daac28939c476df51778f5a1d1aff1a3fddf
+Subproject commit a9bd2bbf31e4f92b5d3d8e80b22839d0cc7a2022
diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs
index c9fdaa5..be7bff1 100644
--- a/src/librustdoc/core.rs
+++ b/src/librustdoc/core.rs
@@ -5,8 +5,8 @@
use rustc_errors::emitter::{Emitter, EmitterWriter};
use rustc_errors::json::JsonEmitter;
use rustc_feature::UnstableFeatures;
-use rustc_hir::def::{Namespace::TypeNS, Res};
-use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE};
+use rustc_hir::def::Res;
+use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, LOCAL_CRATE};
use rustc_hir::HirId;
use rustc_hir::{
intravisit::{self, NestedVisitorMap, Visitor},
@@ -356,55 +356,7 @@
let (krate, resolver, _) = &*parts;
let resolver = resolver.borrow().clone();
- // Letting the resolver escape at the end of the function leads to inconsistencies between the
- // crates the TyCtxt sees and the resolver sees (because the resolver could load more crates
- // after escaping). Hopefully `IntraLinkCrateLoader` gets all the crates we need ...
- struct IntraLinkCrateLoader {
- current_mod: DefId,
- resolver: Rc<RefCell<interface::BoxedResolver>>,
- }
- impl ast::visit::Visitor<'_> for IntraLinkCrateLoader {
- fn visit_attribute(&mut self, attr: &ast::Attribute) {
- use crate::html::markdown::{markdown_links, MarkdownLink};
- use crate::passes::collect_intra_doc_links::Disambiguator;
-
- if let Some(doc) = attr.doc_str() {
- for MarkdownLink { link, .. } in markdown_links(&doc.as_str()) {
- // FIXME: this misses a *lot* of the preprocessing done in collect_intra_doc_links
- // I think most of it shouldn't be necessary since we only need the crate prefix?
- let path_str = match Disambiguator::from_str(&link) {
- Ok(x) => x.map_or(link.as_str(), |(_, p)| p),
- Err(_) => continue,
- };
- self.resolver.borrow_mut().access(|resolver| {
- let _ = resolver.resolve_str_path_error(
- attr.span,
- path_str,
- TypeNS,
- self.current_mod,
- );
- });
- }
- }
- ast::visit::walk_attribute(self, attr);
- }
-
- fn visit_item(&mut self, item: &ast::Item) {
- use rustc_ast_lowering::ResolverAstLowering;
-
- if let ast::ItemKind::Mod(..) = item.kind {
- let new_mod =
- self.resolver.borrow_mut().access(|resolver| resolver.local_def_id(item.id));
- let old_mod = mem::replace(&mut self.current_mod, new_mod.to_def_id());
- ast::visit::walk_item(self, item);
- self.current_mod = old_mod;
- } else {
- ast::visit::walk_item(self, item);
- }
- }
- }
- let crate_id = LocalDefId { local_def_index: CRATE_DEF_INDEX }.to_def_id();
- let mut loader = IntraLinkCrateLoader { current_mod: crate_id, resolver };
+ let mut loader = crate::passes::collect_intra_doc_links::IntraLinkCrateLoader::new(resolver);
ast::visit::walk_crate(&mut loader, krate);
loader.resolver
diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs
index fbe799e..efd453f 100644
--- a/src/librustdoc/html/render/mod.rs
+++ b/src/librustdoc/html/render/mod.rs
@@ -912,10 +912,9 @@
let cache = cx.cache();
let tcx = cx.tcx();
let name = meth.name.as_ref().unwrap();
- let anchor = format!("#{}.{}", meth.type_(), name);
let href = match link {
AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id),
- AssocItemLink::Anchor(None) => anchor,
+ AssocItemLink::Anchor(None) => format!("#{}.{}", meth.type_(), name),
AssocItemLink::GotoSource(did, provided_methods) => {
// We're creating a link from an impl-item to the corresponding
// trait-item and need to map the anchored type accordingly.
@@ -925,7 +924,9 @@
ItemType::TyMethod
};
- href(did, cache).map(|p| format!("{}#{}.{}", p.0, ty, name)).unwrap_or(anchor)
+ href(did, cache)
+ .map(|p| format!("{}#{}.{}", p.0, ty, name))
+ .unwrap_or_else(|| format!("#{}.{}", ty, name))
}
};
let vis = meth.visibility.print_with_space(tcx, meth.def_id, cache).to_string();
@@ -1452,14 +1453,32 @@
} else {
(true, " hidden")
};
+ let in_trait_class = if trait_.is_some() { " trait-impl" } else { "" };
match *item.kind {
clean::MethodItem(..) | clean::TyMethodItem(_) => {
// Only render when the method is not static or we allow static methods
if render_method_item {
let id = cx.derive_id(format!("{}.{}", item_type, name));
- write!(w, "<h4 id=\"{}\" class=\"{}{}\">", id, item_type, extra_class);
+ let source_id = trait_
+ .and_then(|trait_| {
+ trait_.items.iter().find(|item| {
+ item.name.map(|n| n.as_str().eq(&name.as_str())).unwrap_or(false)
+ })
+ })
+ .map(|item| format!("{}.{}", item.type_(), name));
+ write!(
+ w,
+ "<h4 id=\"{}\" class=\"{}{}{}\">",
+ id, item_type, extra_class, in_trait_class,
+ );
w.write_str("<code>");
- render_assoc_item(w, item, link.anchor(&id), ItemType::Impl, cx);
+ render_assoc_item(
+ w,
+ item,
+ link.anchor(source_id.as_ref().unwrap_or(&id)),
+ ItemType::Impl,
+ cx,
+ );
w.write_str("</code>");
render_stability_since_raw(
w,
@@ -1468,29 +1487,50 @@
outer_version,
outer_const_version,
);
+ write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
write_srclink(cx, item, w);
w.write_str("</h4>");
}
}
clean::TypedefItem(ref tydef, _) => {
- let id = cx.derive_id(format!("{}.{}", ItemType::AssocType, name));
- write!(w, "<h4 id=\"{}\" class=\"{}{}\"><code>", id, item_type, extra_class);
+ let source_id = format!("{}.{}", ItemType::AssocType, name);
+ let id = cx.derive_id(source_id.clone());
+ write!(
+ w,
+ "<h4 id=\"{}\" class=\"{}{}{}\"><code>",
+ id, item_type, extra_class, in_trait_class
+ );
assoc_type(
w,
item,
&Vec::new(),
Some(&tydef.type_),
- link.anchor(&id),
+ link.anchor(if trait_.is_some() { &source_id } else { &id }),
"",
cx.cache(),
tcx,
);
- w.write_str("</code></h4>");
+ w.write_str("</code>");
+ write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
+ w.write_str("</h4>");
}
clean::AssocConstItem(ref ty, ref default) => {
- let id = cx.derive_id(format!("{}.{}", item_type, name));
- write!(w, "<h4 id=\"{}\" class=\"{}{}\"><code>", id, item_type, extra_class);
- assoc_const(w, item, ty, default.as_ref(), link.anchor(&id), "", cx);
+ let source_id = format!("{}.{}", item_type, name);
+ let id = cx.derive_id(source_id.clone());
+ write!(
+ w,
+ "<h4 id=\"{}\" class=\"{}{}{}\"><code>",
+ id, item_type, extra_class, in_trait_class
+ );
+ assoc_const(
+ w,
+ item,
+ ty,
+ default.as_ref(),
+ link.anchor(if trait_.is_some() { &source_id } else { &id }),
+ "",
+ cx,
+ );
w.write_str("</code>");
render_stability_since_raw(
w,
@@ -1499,23 +1539,31 @@
outer_version,
outer_const_version,
);
+ write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
write_srclink(cx, item, w);
w.write_str("</h4>");
}
clean::AssocTypeItem(ref bounds, ref default) => {
- let id = cx.derive_id(format!("{}.{}", item_type, name));
- write!(w, "<h4 id=\"{}\" class=\"{}{}\"><code>", id, item_type, extra_class);
+ let source_id = format!("{}.{}", item_type, name);
+ let id = cx.derive_id(source_id.clone());
+ write!(
+ w,
+ "<h4 id=\"{}\" class=\"{}{}{}\"><code>",
+ id, item_type, extra_class, in_trait_class
+ );
assoc_type(
w,
item,
bounds,
default.as_ref(),
- link.anchor(&id),
+ link.anchor(if trait_.is_some() { &source_id } else { &id }),
"",
cx.cache(),
tcx,
);
- w.write_str("</code></h4>");
+ w.write_str("</code>");
+ write!(w, "<a href=\"#{}\" class=\"anchor\"></a>", id);
+ w.write_str("</h4>");
}
clean::StrippedItem(..) => return,
_ => panic!("can't make docs for trait item with name {:?}", item.name),
@@ -1605,7 +1653,7 @@
true,
outer_version,
outer_const_version,
- None,
+ Some(t),
show_def_docs,
);
}
diff --git a/src/librustdoc/html/static/rustdoc.css b/src/librustdoc/html/static/rustdoc.css
index 585b745..705ae17 100644
--- a/src/librustdoc/html/static/rustdoc.css
+++ b/src/librustdoc/html/static/rustdoc.css
@@ -133,7 +133,8 @@
margin-bottom: 10px;
position: relative;
}
-h3.impl, h3.method, h3.type {
+h3.impl, h3.method, h4.method.trait-impl, h3.type,
+h4.type.trait-impl, h4.associatedconstant.trait-impl {
padding-left: 15px;
}
@@ -655,7 +656,8 @@
display: initial;
}
-.in-band:hover > .anchor, .impl:hover > .anchor {
+.in-band:hover > .anchor, .impl:hover > .anchor, .method.trait-impl:hover > .anchor,
+.type.trait-impl:hover > .anchor, .associatedconstant.trait-impl:hover > .anchor {
display: inline-block;
position: absolute;
}
diff --git a/src/librustdoc/passes/bare_urls.rs b/src/librustdoc/passes/bare_urls.rs
index 524f266..ac0d74c 100644
--- a/src/librustdoc/passes/bare_urls.rs
+++ b/src/librustdoc/passes/bare_urls.rs
@@ -73,6 +73,7 @@
.unwrap_or(item.span.inner());
cx.tcx.struct_span_lint_hir(crate::lint::BARE_URLS, hir_id, sp, |lint| {
lint.build(msg)
+ .note("bare URLs are not automatically turned into clickable links")
.span_suggestion(
sp,
"use an automatic link instead",
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
index 4bc7544..4ce7c70 100644
--- a/src/librustdoc/passes/collect_intra_doc_links.rs
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -39,13 +39,16 @@
use super::span_of_attrs;
+mod early;
+crate use early::IntraLinkCrateLoader;
+
crate const COLLECT_INTRA_DOC_LINKS: Pass = Pass {
name: "collect-intra-doc-links",
run: collect_intra_doc_links,
description: "resolves intra-doc links",
};
-crate fn collect_intra_doc_links(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
+fn collect_intra_doc_links(krate: Crate, cx: &mut DocContext<'_>) -> Crate {
LinkCollector {
cx,
mod_ids: Vec::new(),
@@ -892,6 +895,117 @@
}
}
+enum PreprocessingError<'a> {
+ Anchor(AnchorFailure),
+ Disambiguator(Range<usize>, String),
+ Resolution(ResolutionFailure<'a>, String, Option<Disambiguator>),
+}
+
+impl From<AnchorFailure> for PreprocessingError<'_> {
+ fn from(err: AnchorFailure) -> Self {
+ Self::Anchor(err)
+ }
+}
+
+struct PreprocessingInfo {
+ path_str: String,
+ disambiguator: Option<Disambiguator>,
+ extra_fragment: Option<String>,
+ link_text: String,
+}
+
+/// Returns:
+/// - `None` if the link should be ignored.
+/// - `Some(Err)` if the link should emit an error
+/// - `Some(Ok)` if the link is valid
+///
+/// `link_buffer` is needed for lifetime reasons; it will always be overwritten and the contents ignored.
+fn preprocess_link<'a>(
+ ori_link: &'a MarkdownLink,
+) -> Option<Result<PreprocessingInfo, PreprocessingError<'a>>> {
+ // [] is mostly likely not supposed to be a link
+ if ori_link.link.is_empty() {
+ return None;
+ }
+
+ // Bail early for real links.
+ if ori_link.link.contains('/') {
+ return None;
+ }
+
+ let stripped = ori_link.link.replace("`", "");
+ let mut parts = stripped.split('#');
+
+ let link = parts.next().unwrap();
+ if link.trim().is_empty() {
+ // This is an anchor to an element of the current page, nothing to do in here!
+ return None;
+ }
+ let extra_fragment = parts.next();
+ if parts.next().is_some() {
+ // A valid link can't have multiple #'s
+ return Some(Err(AnchorFailure::MultipleAnchors.into()));
+ }
+
+ // Parse and strip the disambiguator from the link, if present.
+ let (path_str, disambiguator) = match Disambiguator::from_str(&link) {
+ Ok(Some((d, path))) => (path.trim(), Some(d)),
+ Ok(None) => (link.trim(), None),
+ Err((err_msg, relative_range)) => {
+ // Only report error if we would not have ignored this link. See issue #83859.
+ if !should_ignore_link_with_disambiguators(link) {
+ let no_backticks_range = range_between_backticks(&ori_link);
+ let disambiguator_range = (no_backticks_range.start + relative_range.start)
+ ..(no_backticks_range.start + relative_range.end);
+ return Some(Err(PreprocessingError::Disambiguator(disambiguator_range, err_msg)));
+ } else {
+ return None;
+ }
+ }
+ };
+
+ if should_ignore_link(path_str) {
+ return None;
+ }
+
+ // We stripped `()` and `!` when parsing the disambiguator.
+ // Add them back to be displayed, but not prefix disambiguators.
+ let link_text =
+ disambiguator.map(|d| d.display_for(path_str)).unwrap_or_else(|| path_str.to_owned());
+
+ // Strip generics from the path.
+ let path_str = if path_str.contains(['<', '>'].as_slice()) {
+ match strip_generics_from_path(&path_str) {
+ Ok(path) => path,
+ Err(err_kind) => {
+ debug!("link has malformed generics: {}", path_str);
+ return Some(Err(PreprocessingError::Resolution(
+ err_kind,
+ path_str.to_owned(),
+ disambiguator,
+ )));
+ }
+ }
+ } else {
+ path_str.to_owned()
+ };
+
+ // Sanity check to make sure we don't have any angle brackets after stripping generics.
+ assert!(!path_str.contains(['<', '>'].as_slice()));
+
+ // The link is not an intra-doc link if it still contains spaces after stripping generics.
+ if path_str.contains(' ') {
+ return None;
+ }
+
+ Some(Ok(PreprocessingInfo {
+ path_str,
+ disambiguator,
+ extra_fragment: extra_fragment.map(String::from),
+ link_text,
+ }))
+}
+
impl LinkCollector<'_, '_> {
/// This is the entry point for resolving an intra-doc link.
///
@@ -907,16 +1021,6 @@
) -> Option<ItemLink> {
trace!("considering link '{}'", ori_link.link);
- // Bail early for real links.
- if ori_link.link.contains('/') {
- return None;
- }
-
- // [] is mostly likely not supposed to be a link
- if ori_link.link.is_empty() {
- return None;
- }
-
let diag_info = DiagnosticInfo {
item,
dox,
@@ -924,47 +1028,29 @@
link_range: ori_link.range.clone(),
};
- let link = ori_link.link.replace("`", "");
- let no_backticks_range = range_between_backticks(&ori_link);
- let parts = link.split('#').collect::<Vec<_>>();
- let (link, extra_fragment) = if parts.len() > 2 {
- // A valid link can't have multiple #'s
- anchor_failure(self.cx, diag_info, AnchorFailure::MultipleAnchors);
- return None;
- } else if parts.len() == 2 {
- if parts[0].trim().is_empty() {
- // This is an anchor to an element of the current page, nothing to do in here!
- return None;
- }
- (parts[0], Some(parts[1].to_owned()))
- } else {
- (parts[0], None)
- };
-
- // Parse and strip the disambiguator from the link, if present.
- let (mut path_str, disambiguator) = match Disambiguator::from_str(&link) {
- Ok(Some((d, path))) => (path.trim(), Some(d)),
- Ok(None) => (link.trim(), None),
- Err((err_msg, relative_range)) => {
- if !should_ignore_link_with_disambiguators(link) {
- // Only report error if we would not have ignored this link.
- // See issue #83859.
- let disambiguator_range = (no_backticks_range.start + relative_range.start)
- ..(no_backticks_range.start + relative_range.end);
- disambiguator_error(self.cx, diag_info, disambiguator_range, &err_msg);
+ let PreprocessingInfo { path_str, disambiguator, extra_fragment, link_text } =
+ match preprocess_link(&ori_link)? {
+ Ok(x) => x,
+ Err(err) => {
+ match err {
+ PreprocessingError::Anchor(err) => anchor_failure(self.cx, diag_info, err),
+ PreprocessingError::Disambiguator(range, msg) => {
+ disambiguator_error(self.cx, diag_info, range, &msg)
+ }
+ PreprocessingError::Resolution(err, path_str, disambiguator) => {
+ resolution_failure(
+ self,
+ diag_info,
+ &path_str,
+ disambiguator,
+ smallvec![err],
+ );
+ }
+ }
+ return None;
}
- return None;
- }
- };
-
- if should_ignore_link(path_str) {
- return None;
- }
-
- // We stripped `()` and `!` when parsing the disambiguator.
- // Add them back to be displayed, but not prefix disambiguators.
- let link_text =
- disambiguator.map(|d| d.display_for(path_str)).unwrap_or_else(|| path_str.to_owned());
+ };
+ let mut path_str = &*path_str;
// In order to correctly resolve intra-doc links we need to
// pick a base AST node to work from. If the documentation for
@@ -1029,39 +1115,12 @@
module_id = DefId { krate, index: CRATE_DEF_INDEX };
}
- // Strip generics from the path.
- let stripped_path_string;
- if path_str.contains(['<', '>'].as_slice()) {
- stripped_path_string = match strip_generics_from_path(path_str) {
- Ok(path) => path,
- Err(err_kind) => {
- debug!("link has malformed generics: {}", path_str);
- resolution_failure(
- self,
- diag_info,
- path_str,
- disambiguator,
- smallvec![err_kind],
- );
- return None;
- }
- };
- path_str = &stripped_path_string;
- }
- // Sanity check to make sure we don't have any angle brackets after stripping generics.
- assert!(!path_str.contains(['<', '>'].as_slice()));
-
- // The link is not an intra-doc link if it still contains spaces after stripping generics.
- if path_str.contains(' ') {
- return None;
- }
-
let (mut res, mut fragment) = self.resolve_with_disambiguator_cached(
ResolutionInfo {
module_id,
dis: disambiguator,
path_str: path_str.to_owned(),
- extra_fragment,
+ extra_fragment: extra_fragment.map(String::from),
},
diag_info.clone(), // this struct should really be Copy, but Range is not :(
matches!(ori_link.kind, LinkType::Reference | LinkType::Shortcut),
@@ -1438,7 +1497,7 @@
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
/// Disambiguators for a link.
-crate enum Disambiguator {
+enum Disambiguator {
/// `prim@`
///
/// This is buggy, see <https://github.com/rust-lang/rust/pull/77875#discussion_r503583103>
@@ -1467,7 +1526,7 @@
/// This returns `Ok(Some(...))` if a disambiguator was found,
/// `Ok(None)` if no disambiguator was found, or `Err(...)`
/// if there was a problem with the disambiguator.
- crate fn from_str(link: &str) -> Result<Option<(Self, &str)>, (String, Range<usize>)> {
+ fn from_str(link: &str) -> Result<Option<(Self, &str)>, (String, Range<usize>)> {
use Disambiguator::{Kind, Namespace as NS, Primitive};
if let Some(idx) = link.find('@') {
@@ -1913,6 +1972,10 @@
if let Some(sp) = sp {
diag.span_label(sp, "contains invalid anchor");
}
+ if let AnchorFailure::RustdocAnchorConflict(Res::Primitive(_)) = failure {
+ diag.note("this restriction may be lifted in a future release");
+ diag.note("see https://github.com/rust-lang/rust/issues/83083 for more information");
+ }
});
}
diff --git a/src/librustdoc/passes/collect_intra_doc_links/early.rs b/src/librustdoc/passes/collect_intra_doc_links/early.rs
new file mode 100644
index 0000000..7cba252
--- /dev/null
+++ b/src/librustdoc/passes/collect_intra_doc_links/early.rs
@@ -0,0 +1,63 @@
+use rustc_ast as ast;
+use rustc_hir::def::Namespace::TypeNS;
+use rustc_hir::def_id::{DefId, LocalDefId, CRATE_DEF_INDEX};
+use rustc_interface::interface;
+
+use std::cell::RefCell;
+use std::mem;
+use std::rc::Rc;
+
+// Letting the resolver escape at the end of the function leads to inconsistencies between the
+// crates the TyCtxt sees and the resolver sees (because the resolver could load more crates
+// after escaping). Hopefully `IntraLinkCrateLoader` gets all the crates we need ...
+crate struct IntraLinkCrateLoader {
+ current_mod: DefId,
+ crate resolver: Rc<RefCell<interface::BoxedResolver>>,
+}
+
+impl IntraLinkCrateLoader {
+ crate fn new(resolver: Rc<RefCell<interface::BoxedResolver>>) -> Self {
+ let crate_id = LocalDefId { local_def_index: CRATE_DEF_INDEX }.to_def_id();
+ Self { current_mod: crate_id, resolver }
+ }
+}
+
+impl ast::visit::Visitor<'_> for IntraLinkCrateLoader {
+ fn visit_attribute(&mut self, attr: &ast::Attribute) {
+ use crate::html::markdown::markdown_links;
+ use crate::passes::collect_intra_doc_links::preprocess_link;
+
+ if let Some(doc) = attr.doc_str() {
+ for link in markdown_links(&doc.as_str()) {
+ let path_str = if let Some(Ok(x)) = preprocess_link(&link) {
+ x.path_str
+ } else {
+ continue;
+ };
+ self.resolver.borrow_mut().access(|resolver| {
+ let _ = resolver.resolve_str_path_error(
+ attr.span,
+ &path_str,
+ TypeNS,
+ self.current_mod,
+ );
+ });
+ }
+ }
+ ast::visit::walk_attribute(self, attr);
+ }
+
+ fn visit_item(&mut self, item: &ast::Item) {
+ use rustc_ast_lowering::ResolverAstLowering;
+
+ if let ast::ItemKind::Mod(..) = item.kind {
+ let new_mod =
+ self.resolver.borrow_mut().access(|resolver| resolver.local_def_id(item.id));
+ let old_mod = mem::replace(&mut self.current_mod, new_mod.to_def_id());
+ ast::visit::walk_item(self, item);
+ self.current_mod = old_mod;
+ } else {
+ ast::visit::walk_item(self, item);
+ }
+ }
+}
diff --git a/src/test/assembly/panic-no-unwind-no-uwtable.rs b/src/test/assembly/panic-no-unwind-no-uwtable.rs
new file mode 100644
index 0000000..499d4e6
--- /dev/null
+++ b/src/test/assembly/panic-no-unwind-no-uwtable.rs
@@ -0,0 +1,8 @@
+// assembly-output: emit-asm
+// only-x86_64-unknown-linux-gnu
+// compile-flags: -C panic=unwind -C force-unwind-tables=n -O
+
+#![crate_type = "lib"]
+
+// CHECK-NOT: .cfi_startproc
+pub fn foo() {}
diff --git a/src/test/assembly/panic-unwind-no-uwtable.rs b/src/test/assembly/panic-unwind-no-uwtable.rs
new file mode 100644
index 0000000..8eed72b
--- /dev/null
+++ b/src/test/assembly/panic-unwind-no-uwtable.rs
@@ -0,0 +1,12 @@
+// assembly-output: emit-asm
+// only-x86_64-unknown-linux-gnu
+// compile-flags: -C panic=unwind -C force-unwind-tables=n
+
+#![crate_type = "lib"]
+
+// CHECK-LABEL: foo:
+// CHECK: .cfi_startproc
+#[no_mangle]
+fn foo() {
+ panic!();
+}
diff --git a/src/test/codegen/force-no-unwind-tables.rs b/src/test/codegen/force-no-unwind-tables.rs
index dc77e6c..3ee23f0 100644
--- a/src/test/codegen/force-no-unwind-tables.rs
+++ b/src/test/codegen/force-no-unwind-tables.rs
@@ -3,5 +3,9 @@
#![crate_type="lib"]
+// CHECK-LABEL: define{{.*}}void @foo
// CHECK-NOT: attributes #{{.*}} uwtable
-pub fn foo() {}
+#[no_mangle]
+fn foo() {
+ panic!();
+}
diff --git a/src/test/codegen/panic-unwind-default-uwtable.rs b/src/test/codegen/panic-unwind-default-uwtable.rs
new file mode 100644
index 0000000..4c85008
--- /dev/null
+++ b/src/test/codegen/panic-unwind-default-uwtable.rs
@@ -0,0 +1,6 @@
+// compile-flags: -C panic=unwind -C no-prepopulate-passes
+
+#![crate_type = "lib"]
+
+// CHECK: attributes #{{.*}} uwtable
+pub fn foo() {}
diff --git a/src/test/rustdoc-ui/url-improvements.rs b/src/test/rustdoc-ui/bare-urls.fixed
similarity index 63%
copy from src/test/rustdoc-ui/url-improvements.rs
copy to src/test/rustdoc-ui/bare-urls.fixed
index 43a13b0..23aa5c4 100644
--- a/src/test/rustdoc-ui/url-improvements.rs
+++ b/src/test/rustdoc-ui/bare-urls.fixed
@@ -1,38 +1,40 @@
+// run-rustfix
+
#![deny(rustdoc::bare_urls)]
-/// https://somewhere.com
+/// <https://somewhere.com>
//~^ ERROR this URL is not a hyperlink
-/// https://somewhere.com/a
+/// <https://somewhere.com/a>
//~^ ERROR this URL is not a hyperlink
-/// https://www.somewhere.com
+/// <https://www.somewhere.com>
//~^ ERROR this URL is not a hyperlink
-/// https://www.somewhere.com/a
+/// <https://www.somewhere.com/a>
//~^ ERROR this URL is not a hyperlink
-/// https://subdomain.example.com
+/// <https://subdomain.example.com>
//~^ ERROR not a hyperlink
-/// https://somewhere.com?
+/// <https://somewhere.com?>
//~^ ERROR this URL is not a hyperlink
-/// https://somewhere.com/a?
+/// <https://somewhere.com/a?>
//~^ ERROR this URL is not a hyperlink
-/// https://somewhere.com?hello=12
+/// <https://somewhere.com?hello=12>
//~^ ERROR this URL is not a hyperlink
-/// https://somewhere.com/a?hello=12
+/// <https://somewhere.com/a?hello=12>
//~^ ERROR this URL is not a hyperlink
-/// https://example.com?hello=12#xyz
+/// <https://example.com?hello=12#xyz>
//~^ ERROR this URL is not a hyperlink
-/// https://example.com/a?hello=12#xyz
+/// <https://example.com/a?hello=12#xyz>
//~^ ERROR this URL is not a hyperlink
-/// https://example.com#xyz
+/// <https://example.com#xyz>
//~^ ERROR this URL is not a hyperlink
-/// https://example.com/a#xyz
+/// <https://example.com/a#xyz>
//~^ ERROR this URL is not a hyperlink
-/// https://somewhere.com?hello=12&bye=11
+/// <https://somewhere.com?hello=12&bye=11>
//~^ ERROR this URL is not a hyperlink
-/// https://somewhere.com/a?hello=12&bye=11
+/// <https://somewhere.com/a?hello=12&bye=11>
//~^ ERROR this URL is not a hyperlink
-/// https://somewhere.com?hello=12&bye=11#xyz
+/// <https://somewhere.com?hello=12&bye=11#xyz>
//~^ ERROR this URL is not a hyperlink
-/// hey! https://somewhere.com/a?hello=12&bye=11#xyz
+/// hey! <https://somewhere.com/a?hello=12&bye=11#xyz>
//~^ ERROR this URL is not a hyperlink
pub fn c() {}
diff --git a/src/test/rustdoc-ui/url-improvements.rs b/src/test/rustdoc-ui/bare-urls.rs
similarity index 98%
rename from src/test/rustdoc-ui/url-improvements.rs
rename to src/test/rustdoc-ui/bare-urls.rs
index 43a13b0..592f573 100644
--- a/src/test/rustdoc-ui/url-improvements.rs
+++ b/src/test/rustdoc-ui/bare-urls.rs
@@ -1,3 +1,5 @@
+// run-rustfix
+
#![deny(rustdoc::bare_urls)]
/// https://somewhere.com
diff --git a/src/test/rustdoc-ui/url-improvements.stderr b/src/test/rustdoc-ui/bare-urls.stderr
similarity index 65%
rename from src/test/rustdoc-ui/url-improvements.stderr
rename to src/test/rustdoc-ui/bare-urls.stderr
index 3d5ebd8..7097a8d 100644
--- a/src/test/rustdoc-ui/url-improvements.stderr
+++ b/src/test/rustdoc-ui/bare-urls.stderr
@@ -1,110 +1,143 @@
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:3:5
+ --> $DIR/bare-urls.rs:5:5
|
LL | /// https://somewhere.com
| ^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://somewhere.com>`
|
note: the lint level is defined here
- --> $DIR/url-improvements.rs:1:9
+ --> $DIR/bare-urls.rs:3:9
|
LL | #![deny(rustdoc::bare_urls)]
| ^^^^^^^^^^^^^^^^^^
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:5:5
+ --> $DIR/bare-urls.rs:7:5
|
LL | /// https://somewhere.com/a
| ^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://somewhere.com/a>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:7:5
+ --> $DIR/bare-urls.rs:9:5
|
LL | /// https://www.somewhere.com
| ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://www.somewhere.com>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:9:5
+ --> $DIR/bare-urls.rs:11:5
|
LL | /// https://www.somewhere.com/a
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://www.somewhere.com/a>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:11:5
+ --> $DIR/bare-urls.rs:13:5
|
LL | /// https://subdomain.example.com
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://subdomain.example.com>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:13:5
+ --> $DIR/bare-urls.rs:15:5
|
LL | /// https://somewhere.com?
| ^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://somewhere.com?>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:15:5
+ --> $DIR/bare-urls.rs:17:5
|
LL | /// https://somewhere.com/a?
| ^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://somewhere.com/a?>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:17:5
+ --> $DIR/bare-urls.rs:19:5
|
LL | /// https://somewhere.com?hello=12
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://somewhere.com?hello=12>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:19:5
+ --> $DIR/bare-urls.rs:21:5
|
LL | /// https://somewhere.com/a?hello=12
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://somewhere.com/a?hello=12>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:21:5
+ --> $DIR/bare-urls.rs:23:5
|
LL | /// https://example.com?hello=12#xyz
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://example.com?hello=12#xyz>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:23:5
+ --> $DIR/bare-urls.rs:25:5
|
LL | /// https://example.com/a?hello=12#xyz
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://example.com/a?hello=12#xyz>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:25:5
+ --> $DIR/bare-urls.rs:27:5
|
LL | /// https://example.com#xyz
| ^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://example.com#xyz>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:27:5
+ --> $DIR/bare-urls.rs:29:5
|
LL | /// https://example.com/a#xyz
| ^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://example.com/a#xyz>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:29:5
+ --> $DIR/bare-urls.rs:31:5
|
LL | /// https://somewhere.com?hello=12&bye=11
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://somewhere.com?hello=12&bye=11>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:31:5
+ --> $DIR/bare-urls.rs:33:5
|
LL | /// https://somewhere.com/a?hello=12&bye=11
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://somewhere.com/a?hello=12&bye=11>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:33:5
+ --> $DIR/bare-urls.rs:35:5
|
LL | /// https://somewhere.com?hello=12&bye=11#xyz
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://somewhere.com?hello=12&bye=11#xyz>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: this URL is not a hyperlink
- --> $DIR/url-improvements.rs:35:10
+ --> $DIR/bare-urls.rs:37:10
|
LL | /// hey! https://somewhere.com/a?hello=12&bye=11#xyz
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use an automatic link instead: `<https://somewhere.com/a?hello=12&bye=11#xyz>`
+ |
+ = note: bare URLs are not automatically turned into clickable links
error: aborting due to 17 previous errors
diff --git a/src/test/rustdoc-ui/intra-doc/anchors.stderr b/src/test/rustdoc-ui/intra-doc/anchors.stderr
index 787a68e..42a8832 100644
--- a/src/test/rustdoc-ui/intra-doc/anchors.stderr
+++ b/src/test/rustdoc-ui/intra-doc/anchors.stderr
@@ -9,6 +9,8 @@
|
LL | #![deny(rustdoc::broken_intra_doc_links)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = note: this restriction may be lifted in a future release
+ = note: see https://github.com/rust-lang/rust/issues/83083 for more information
error: `Foo::f#hola` contains an anchor, but links to fields are already anchored
--> $DIR/anchors.rs:25:15
@@ -33,6 +35,9 @@
|
LL | /// [u32#hello]
| ^^^^^^^^^ contains invalid anchor
+ |
+ = note: this restriction may be lifted in a future release
+ = note: see https://github.com/rust-lang/rust/issues/83083 for more information
error: aborting due to 5 previous errors
diff --git a/src/test/rustdoc-ui/renamed-lint-still-applies.stderr b/src/test/rustdoc-ui/renamed-lint-still-applies.stderr
index 19c253b..3040cad 100644
--- a/src/test/rustdoc-ui/renamed-lint-still-applies.stderr
+++ b/src/test/rustdoc-ui/renamed-lint-still-applies.stderr
@@ -31,6 +31,7 @@
|
LL | #![deny(rustdoc::non_autolinks)]
| ^^^^^^^^^^^^^^^^^^^^^^
+ = note: bare URLs are not automatically turned into clickable links
error: aborting due to 2 previous errors; 1 warning emitted
diff --git a/src/test/rustdoc/intra-doc/auxiliary/empty.rs b/src/test/rustdoc/intra-doc/auxiliary/empty.rs
new file mode 100644
index 0000000..d11c69f
--- /dev/null
+++ b/src/test/rustdoc/intra-doc/auxiliary/empty.rs
@@ -0,0 +1 @@
+// intentionally empty
diff --git a/src/test/rustdoc/intra-doc/auxiliary/empty2.rs b/src/test/rustdoc/intra-doc/auxiliary/empty2.rs
new file mode 100644
index 0000000..d11c69f
--- /dev/null
+++ b/src/test/rustdoc/intra-doc/auxiliary/empty2.rs
@@ -0,0 +1 @@
+// intentionally empty
diff --git a/src/test/rustdoc/intra-doc/extern-crate-only-used-in-link.rs b/src/test/rustdoc/intra-doc/extern-crate-only-used-in-link.rs
index 0964c79..5d8dcf8 100644
--- a/src/test/rustdoc/intra-doc/extern-crate-only-used-in-link.rs
+++ b/src/test/rustdoc/intra-doc/extern-crate-only-used-in-link.rs
@@ -1,8 +1,19 @@
+// This test is just a little cursed.
// aux-build:issue-66159-1.rs
// aux-crate:priv:issue_66159_1=issue-66159-1.rs
+// aux-build:empty.rs
+// aux-crate:priv:empty=empty.rs
+// aux-build:empty2.rs
+// aux-crate:priv:empty2=empty2.rs
// build-aux-docs
-// compile-flags:-Z unstable-options
+// compile-flags:-Z unstable-options --edition 2018
// @has extern_crate_only_used_in_link/index.html
// @has - '//a[@href="../issue_66159_1/struct.Something.html"]' 'issue_66159_1::Something'
//! [issue_66159_1::Something]
+
+// @has - '//a[@href="../empty/index.html"]' 'empty'
+//! [`empty`]
+
+// @has - '//a[@href="../empty2/index.html"]' 'empty2'
+//! [empty2<x>]
diff --git a/src/test/rustdoc/trait-impl-items-links-and-anchors.rs b/src/test/rustdoc/trait-impl-items-links-and-anchors.rs
new file mode 100644
index 0000000..6c09be1
--- /dev/null
+++ b/src/test/rustdoc/trait-impl-items-links-and-anchors.rs
@@ -0,0 +1,65 @@
+pub trait MyTrait {
+ type Assoc;
+ const VALUE: u32;
+ fn trait_function(&self);
+ fn defaulted(&self) {}
+ fn defaulted_override(&self) {}
+}
+
+
+impl MyTrait for String {
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedtype.Assoc-1"]//a[@class="type"]/@href' #associatedtype.Assoc
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedtype.Assoc-1"]//a[@class="anchor"]/@href' #associatedtype.Assoc-1
+ type Assoc = ();
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedconstant.VALUE-1"]//a[@class="constant"]/@href' #associatedconstant.VALUE
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedconstant.VALUE-1"]//a[@class="anchor"]/@href' #associatedconstant.VALUE-1
+ const VALUE: u32 = 5;
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.trait_function"]//a[@class="fnname"]/@href' #tymethod.trait_function
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.trait_function"]//a[@class="anchor"]/@href' #method.trait_function
+ fn trait_function(&self) {}
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.defaulted_override-1"]//a[@class="fnname"]/@href' #method.defaulted_override
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.defaulted_override-1"]//a[@class="anchor"]/@href' #method.defaulted_override-1
+ fn defaulted_override(&self) {}
+}
+
+impl MyTrait for Vec<u8> {
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedtype.Assoc-2"]//a[@class="type"]/@href' #associatedtype.Assoc
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedtype.Assoc-2"]//a[@class="anchor"]/@href' #associatedtype.Assoc-2
+ type Assoc = ();
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedconstant.VALUE-2"]//a[@class="constant"]/@href' #associatedconstant.VALUE
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedconstant.VALUE-2"]//a[@class="anchor"]/@href' #associatedconstant.VALUE-2
+ const VALUE: u32 = 5;
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.trait_function"]//a[@class="fnname"]/@href' #tymethod.trait_function
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.trait_function-1"]//a[@class="anchor"]/@href' #method.trait_function-1
+ fn trait_function(&self) {}
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.defaulted_override-2"]//a[@class="fnname"]/@href' #method.defaulted_override
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.defaulted_override-2"]//a[@class="anchor"]/@href' #method.defaulted_override-2
+ fn defaulted_override(&self) {}
+}
+
+impl MyTrait for MyStruct {
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedtype.Assoc-3"]//a[@class="type"]/@href' #associatedtype.Assoc
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedtype.Assoc-3"]//a[@class="anchor"]/@href' #associatedtype.Assoc-3
+ // @has trait_impl_items_links_and_anchors/struct.MyStruct.html '//h4[@id="associatedtype.Assoc"]//a[@class="type"]/@href' ../trait_impl_items_links_and_anchors/trait.MyTrait.html#associatedtype.Assoc
+ // @has trait_impl_items_links_and_anchors/struct.MyStruct.html '//h4[@id="associatedtype.Assoc"]//a[@class="anchor"]/@href' #associatedtype.Assoc
+ type Assoc = bool;
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedconstant.VALUE-3"]//a[@class="constant"]/@href' #associatedconstant.VALUE
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="associatedconstant.VALUE-3"]//a[@class="anchor"]/@href' #associatedconstant.VALUE-3
+ // @has trait_impl_items_links_and_anchors/struct.MyStruct.html '//h4[@id="associatedconstant.VALUE"]//a[@class="constant"]/@href' ../trait_impl_items_links_and_anchors/trait.MyTrait.html#associatedconstant.VALUE
+ // @has trait_impl_items_links_and_anchors/struct.MyStruct.html '//h4[@id="associatedconstant.VALUE"]//a[@class="anchor"]/@href' #associatedconstant.VALUE
+ const VALUE: u32 = 20;
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.trait_function-2"]//a[@class="fnname"]/@href' #tymethod.trait_function
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.trait_function-2"]//a[@class="anchor"]/@href' #method.trait_function-2
+ // @has trait_impl_items_links_and_anchors/struct.MyStruct.html '//h4[@id="method.trait_function"]//a[@class="fnname"]/@href' ../trait_impl_items_links_and_anchors/trait.MyTrait.html#tymethod.trait_function
+ // @has trait_impl_items_links_and_anchors/struct.MyStruct.html '//h4[@id="method.trait_function"]//a[@class="anchor"]/@href' #method.trait_function
+ fn trait_function(&self) {}
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.defaulted_override-3"]//a[@class="fnname"]/@href' #method.defaulted_override
+ // @has trait_impl_items_links_and_anchors/trait.MyTrait.html '//h4[@id="method.defaulted_override-3"]//a[@class="anchor"]/@href' #method.defaulted_override-3
+ // @has trait_impl_items_links_and_anchors/struct.MyStruct.html '//h4[@id="method.defaulted_override"]//a[@class="fnname"]/@href' ../trait_impl_items_links_and_anchors/trait.MyTrait.html#method.defaulted_override
+ // @has trait_impl_items_links_and_anchors/struct.MyStruct.html '//h4[@id="method.defaulted_override"]//a[@class="anchor"]/@href' #method.defaulted_override
+ fn defaulted_override(&self) {}
+ // @has trait_impl_items_links_and_anchors/struct.MyStruct.html '//h4[@id="method.defaulted"]//a[@class="fnname"]/@href' ../trait_impl_items_links_and_anchors/trait.MyTrait.html#method.defaulted
+ // @has trait_impl_items_links_and_anchors/struct.MyStruct.html '//h4[@id="method.defaulted"]//a[@class="anchor"]/@href' #method.defaulted
+}
+
+pub struct MyStruct;
diff --git a/src/test/ui/anon-params/anon-params-edition-hygiene.rs b/src/test/ui/anon-params/anon-params-edition-hygiene.rs
index 14e11c5..6936205 100644
--- a/src/test/ui/anon-params/anon-params-edition-hygiene.rs
+++ b/src/test/ui/anon-params/anon-params-edition-hygiene.rs
@@ -2,6 +2,9 @@
// edition:2018
// aux-build:anon-params-edition-hygiene.rs
+// This warning is still surfaced
+#![allow(anonymous_parameters)]
+
#[macro_use]
extern crate anon_params_edition_hygiene;
diff --git a/src/test/ui/asm/inline-syntax.arm.stderr b/src/test/ui/asm/inline-syntax.arm.stderr
index 56e6572..bcae1d5 100644
--- a/src/test/ui/asm/inline-syntax.arm.stderr
+++ b/src/test/ui/asm/inline-syntax.arm.stderr
@@ -1,5 +1,5 @@
error: unknown directive
- --> $DIR/inline-syntax.rs:22:15
+ --> $DIR/inline-syntax.rs:25:15
|
LL | asm!(".intel_syntax noprefix", "nop");
| ^
@@ -11,7 +11,7 @@
| ^
error: unknown directive
- --> $DIR/inline-syntax.rs:25:15
+ --> $DIR/inline-syntax.rs:28:15
|
LL | asm!(".intel_syntax aaa noprefix", "nop");
| ^
@@ -23,7 +23,7 @@
| ^
error: unknown directive
- --> $DIR/inline-syntax.rs:28:15
+ --> $DIR/inline-syntax.rs:31:15
|
LL | asm!(".att_syntax noprefix", "nop");
| ^
@@ -35,7 +35,7 @@
| ^
error: unknown directive
- --> $DIR/inline-syntax.rs:31:15
+ --> $DIR/inline-syntax.rs:34:15
|
LL | asm!(".att_syntax bbb noprefix", "nop");
| ^
@@ -47,7 +47,7 @@
| ^
error: unknown directive
- --> $DIR/inline-syntax.rs:34:15
+ --> $DIR/inline-syntax.rs:37:15
|
LL | asm!(".intel_syntax noprefix; nop");
| ^
@@ -59,7 +59,7 @@
| ^
error: unknown directive
- --> $DIR/inline-syntax.rs:40:13
+ --> $DIR/inline-syntax.rs:43:13
|
LL | .intel_syntax noprefix
| ^
diff --git a/src/test/ui/asm/inline-syntax.rs b/src/test/ui/asm/inline-syntax.rs
index 78dde5a..13ded19 100644
--- a/src/test/ui/asm/inline-syntax.rs
+++ b/src/test/ui/asm/inline-syntax.rs
@@ -2,12 +2,15 @@
// revisions: x86_64 arm
//[x86_64] compile-flags: --target x86_64-unknown-linux-gnu
//[x86_64] check-pass
+//[x86_64_allowed] compile-flags: --target x86_64-unknown-linux-gnu
+//[x86_64_allowed] check-pass
//[arm] compile-flags: --target armv7-unknown-linux-gnueabihf
//[arm] build-fail
#![feature(no_core, lang_items, rustc_attrs)]
#![crate_type = "rlib"]
#![no_core]
+#![cfg_attr(x86_64_allowed, allow(bad_asm_style))]
#[rustc_builtin_macro]
macro_rules! asm {
diff --git a/src/test/ui/asm/inline-syntax.x86_64.stderr b/src/test/ui/asm/inline-syntax.x86_64.stderr
index 5c03d3a..02b29b0 100644
--- a/src/test/ui/asm/inline-syntax.x86_64.stderr
+++ b/src/test/ui/asm/inline-syntax.x86_64.stderr
@@ -1,5 +1,5 @@
warning: avoid using `.intel_syntax`, Intel syntax is the default
- --> $DIR/inline-syntax.rs:22:15
+ --> $DIR/inline-syntax.rs:25:15
|
LL | asm!(".intel_syntax noprefix", "nop");
| ^^^^^^^^^^^^^^^^^^^^^^
@@ -7,31 +7,31 @@
= note: `#[warn(bad_asm_style)]` on by default
warning: avoid using `.intel_syntax`, Intel syntax is the default
- --> $DIR/inline-syntax.rs:25:15
+ --> $DIR/inline-syntax.rs:28:15
|
LL | asm!(".intel_syntax aaa noprefix", "nop");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
warning: avoid using `.att_syntax`, prefer using `options(att_syntax)` instead
- --> $DIR/inline-syntax.rs:28:15
+ --> $DIR/inline-syntax.rs:31:15
|
LL | asm!(".att_syntax noprefix", "nop");
| ^^^^^^^^^^^^^^^^^^^^
warning: avoid using `.att_syntax`, prefer using `options(att_syntax)` instead
- --> $DIR/inline-syntax.rs:31:15
+ --> $DIR/inline-syntax.rs:34:15
|
LL | asm!(".att_syntax bbb noprefix", "nop");
| ^^^^^^^^^^^^^^^^^^^^^^^^
warning: avoid using `.intel_syntax`, Intel syntax is the default
- --> $DIR/inline-syntax.rs:34:15
+ --> $DIR/inline-syntax.rs:37:15
|
LL | asm!(".intel_syntax noprefix; nop");
| ^^^^^^^^^^^^^^^^^^^^^^
warning: avoid using `.intel_syntax`, Intel syntax is the default
- --> $DIR/inline-syntax.rs:40:13
+ --> $DIR/inline-syntax.rs:43:13
|
LL | .intel_syntax noprefix
| ^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/test/ui/asm/naked-invalid-attr.rs b/src/test/ui/asm/naked-invalid-attr.rs
index cdb6c17..2576d11 100644
--- a/src/test/ui/asm/naked-invalid-attr.rs
+++ b/src/test/ui/asm/naked-invalid-attr.rs
@@ -1,6 +1,6 @@
// Checks that #[naked] attribute can be placed on function definitions only.
//
-// ignore-wasm32 asm unsupported
+// needs-asm-support
#![feature(asm)]
#![feature(naked_functions)]
#![naked] //~ ERROR should be applied to a function definition
diff --git a/src/test/ui/associated-types/defaults-specialization.stderr b/src/test/ui/associated-types/defaults-specialization.stderr
index 920f832..3c7dc1f 100644
--- a/src/test/ui/associated-types/defaults-specialization.stderr
+++ b/src/test/ui/associated-types/defaults-specialization.stderr
@@ -15,7 +15,10 @@
| -------- type in trait
...
LL | fn make() -> u8 { 0 }
- | ^^ expected associated type, found `u8`
+ | ^^
+ | |
+ | expected associated type, found `u8`
+ | help: change the output type to match the trait: `<A<T> as Tr>::Ty`
|
= note: expected fn pointer `fn() -> <A<T> as Tr>::Ty`
found fn pointer `fn() -> u8`
@@ -30,7 +33,10 @@
| ----------------------- expected this associated type
LL |
LL | fn make() -> bool { true }
- | ^^^^ expected associated type, found `bool`
+ | ^^^^
+ | |
+ | expected associated type, found `bool`
+ | help: change the output type to match the trait: `<B<T> as Tr>::Ty`
|
= note: expected fn pointer `fn() -> <B<T> as Tr>::Ty`
found fn pointer `fn() -> bool`
diff --git a/src/test/ui/cleanup-shortcircuit.rs b/src/test/ui/cleanup-shortcircuit.rs
index 4f5197a..fe867ce 100644
--- a/src/test/ui/cleanup-shortcircuit.rs
+++ b/src/test/ui/cleanup-shortcircuit.rs
@@ -3,6 +3,9 @@
// pretty-expanded FIXME #23616
+#![allow(deref_nullptr)]
+
+
use std::env;
pub fn main() {
diff --git a/src/test/ui/compare-method/bad-self-type.rs b/src/test/ui/compare-method/bad-self-type.rs
new file mode 100644
index 0000000..f42a9e4
--- /dev/null
+++ b/src/test/ui/compare-method/bad-self-type.rs
@@ -0,0 +1,26 @@
+use std::future::Future;
+use std::task::{Context, Poll};
+
+fn main() {}
+
+struct MyFuture {}
+
+impl Future for MyFuture {
+ type Output = ();
+ fn poll(self, _: &mut Context<'_>) -> Poll<()> {
+ //~^ ERROR method `poll` has an incompatible type for trait
+ todo!()
+ }
+}
+
+trait T {
+ fn foo(self);
+ fn bar(self) -> Option<()>;
+}
+
+impl T for MyFuture {
+ fn foo(self: Box<Self>) {}
+ //~^ ERROR method `foo` has an incompatible type for trait
+ fn bar(self) {}
+ //~^ ERROR method `bar` has an incompatible type for trait
+}
diff --git a/src/test/ui/compare-method/bad-self-type.stderr b/src/test/ui/compare-method/bad-self-type.stderr
new file mode 100644
index 0000000..76f91fb
--- /dev/null
+++ b/src/test/ui/compare-method/bad-self-type.stderr
@@ -0,0 +1,46 @@
+error[E0053]: method `poll` has an incompatible type for trait
+ --> $DIR/bad-self-type.rs:10:13
+ |
+LL | fn poll(self, _: &mut Context<'_>) -> Poll<()> {
+ | ^^^^
+ | |
+ | expected struct `Pin`, found struct `MyFuture`
+ | help: change the self-receiver type to match the trait: `self: Pin<&mut MyFuture>`
+ |
+ = note: expected fn pointer `fn(Pin<&mut MyFuture>, &mut Context<'_>) -> Poll<_>`
+ found fn pointer `fn(MyFuture, &mut Context<'_>) -> Poll<_>`
+
+error[E0053]: method `foo` has an incompatible type for trait
+ --> $DIR/bad-self-type.rs:22:18
+ |
+LL | fn foo(self);
+ | ---- type in trait
+...
+LL | fn foo(self: Box<Self>) {}
+ | ------^^^^^^^^^
+ | | |
+ | | expected struct `MyFuture`, found struct `Box`
+ | help: change the self-receiver type to match the trait: `self`
+ |
+ = note: expected fn pointer `fn(MyFuture)`
+ found fn pointer `fn(Box<MyFuture>)`
+
+error[E0053]: method `bar` has an incompatible type for trait
+ --> $DIR/bad-self-type.rs:24:18
+ |
+LL | fn bar(self) -> Option<()>;
+ | ---------- type in trait
+...
+LL | fn bar(self) {}
+ | ^ expected enum `Option`, found `()`
+ |
+ = note: expected fn pointer `fn(MyFuture) -> Option<()>`
+ found fn pointer `fn(MyFuture)`
+help: change the output type to match the trait
+ |
+LL | fn bar(self) -> Option<()> {}
+ | ^^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0053`.
diff --git a/src/test/ui/compare-method/reordered-type-param.stderr b/src/test/ui/compare-method/reordered-type-param.stderr
index f1f8a66..d581628 100644
--- a/src/test/ui/compare-method/reordered-type-param.stderr
+++ b/src/test/ui/compare-method/reordered-type-param.stderr
@@ -5,8 +5,10 @@
| - type in trait
...
LL | fn b<F:Clone,G>(&self, _x: G) -> G { panic!() }
- | - - ^ expected type parameter `F`, found type parameter `G`
- | | |
+ | - - ^
+ | | | |
+ | | | expected type parameter `F`, found type parameter `G`
+ | | | help: change the parameter type to match the trait: `F`
| | found type parameter
| expected type parameter
|
diff --git a/src/test/ui/const-generics/diagnostics.stderr b/src/test/ui/const-generics/diagnostics.stderr
index 7d038ff..c8ee6ad 100644
--- a/src/test/ui/const-generics/diagnostics.stderr
+++ b/src/test/ui/const-generics/diagnostics.stderr
@@ -31,9 +31,12 @@
--> $DIR/diagnostics.rs:12:19
|
LL | impl<N> Foo for B<N> {}
- | - ^
- | |
- | help: consider changing this type paramater to a `const`-generic: `const N: u8`
+ | ^
+ |
+help: consider changing this type parameter to be a `const` generic
+ |
+LL | impl<const N: u8> Foo for B<N> {}
+ | ^^^^^^^^^^^
error[E0747]: unresolved item provided when a constant was expected
--> $DIR/diagnostics.rs:16:32
diff --git a/src/test/ui/consts/const-int-unchecked.rs b/src/test/ui/consts/const-int-unchecked.rs
index 41d8f7a..2ccc5d2 100644
--- a/src/test/ui/consts/const-int-unchecked.rs
+++ b/src/test/ui/consts/const-int-unchecked.rs
@@ -186,4 +186,13 @@
//~^ ERROR any use of this value will cause an error
//~| WARN this was previously accepted by the compiler but is being phased out
+// capture fault with zero value
+
+const _: u32 = unsafe { std::intrinsics::ctlz_nonzero(0) };
+//~^ ERROR any use of this value will cause an error
+//~| WARN this was previously accepted by the compiler but is being phased out
+const _: u32 = unsafe { std::intrinsics::cttz_nonzero(0) };
+//~^ ERROR any use of this value will cause an error
+//~| WARN this was previously accepted by the compiler but is being phased out
+
fn main() {}
diff --git a/src/test/ui/consts/const-int-unchecked.stderr b/src/test/ui/consts/const-int-unchecked.stderr
index e5ecbbc..999b265 100644
--- a/src/test/ui/consts/const-int-unchecked.stderr
+++ b/src/test/ui/consts/const-int-unchecked.stderr
@@ -516,5 +516,27 @@
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #71800 <https://github.com/rust-lang/rust/issues/71800>
-error: aborting due to 47 previous errors
+error: any use of this value will cause an error
+ --> $DIR/const-int-unchecked.rs:191:25
+ |
+LL | const _: u32 = unsafe { std::intrinsics::ctlz_nonzero(0) };
+ | ------------------------^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^---
+ | |
+ | `ctlz_nonzero` called on 0
+ |
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #71800 <https://github.com/rust-lang/rust/issues/71800>
+
+error: any use of this value will cause an error
+ --> $DIR/const-int-unchecked.rs:194:25
+ |
+LL | const _: u32 = unsafe { std::intrinsics::cttz_nonzero(0) };
+ | ------------------------^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^---
+ | |
+ | `cttz_nonzero` called on 0
+ |
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #71800 <https://github.com/rust-lang/rust/issues/71800>
+
+error: aborting due to 49 previous errors
diff --git a/src/test/ui/feature-gates/feature-gate-naked_functions.rs b/src/test/ui/feature-gates/feature-gate-naked_functions.rs
index 06bddc4..71ca5b9 100644
--- a/src/test/ui/feature-gates/feature-gate-naked_functions.rs
+++ b/src/test/ui/feature-gates/feature-gate-naked_functions.rs
@@ -1,3 +1,4 @@
+// needs-asm-support
#![feature(asm)]
#[naked]
diff --git a/src/test/ui/feature-gates/feature-gate-naked_functions.stderr b/src/test/ui/feature-gates/feature-gate-naked_functions.stderr
index d95561d..653d7b7 100644
--- a/src/test/ui/feature-gates/feature-gate-naked_functions.stderr
+++ b/src/test/ui/feature-gates/feature-gate-naked_functions.stderr
@@ -1,5 +1,5 @@
error[E0658]: the `#[naked]` attribute is an experimental feature
- --> $DIR/feature-gate-naked_functions.rs:3:1
+ --> $DIR/feature-gate-naked_functions.rs:4:1
|
LL | #[naked]
| ^^^^^^^^
@@ -8,7 +8,7 @@
= help: add `#![feature(naked_functions)]` to the crate attributes to enable
error[E0658]: the `#[naked]` attribute is an experimental feature
- --> $DIR/feature-gate-naked_functions.rs:9:1
+ --> $DIR/feature-gate-naked_functions.rs:10:1
|
LL | #[naked]
| ^^^^^^^^
diff --git a/src/test/ui/feature-gates/feature-gate-object_safe_for_dispatch.rs b/src/test/ui/feature-gates/feature-gate-object_safe_for_dispatch.rs
index 8945360..37348e4 100644
--- a/src/test/ui/feature-gates/feature-gate-object_safe_for_dispatch.rs
+++ b/src/test/ui/feature-gates/feature-gate-object_safe_for_dispatch.rs
@@ -12,7 +12,7 @@
}
trait NonObjectSafe4 {
- fn foo(&self, &Self);
+ fn foo(&self, s: &Self);
}
fn takes_non_object_safe_ref<T>(obj: &dyn NonObjectSafe1) {
diff --git a/src/test/ui/feature-gates/feature-gate-object_safe_for_dispatch.stderr b/src/test/ui/feature-gates/feature-gate-object_safe_for_dispatch.stderr
index b61d560..12195bc 100644
--- a/src/test/ui/feature-gates/feature-gate-object_safe_for_dispatch.stderr
+++ b/src/test/ui/feature-gates/feature-gate-object_safe_for_dispatch.stderr
@@ -57,12 +57,12 @@
|
= help: consider moving `foo` to another trait
note: for a trait to be "object safe" it needs to allow building a vtable to allow the call to be resolvable dynamically; for more information visit <https://doc.rust-lang.org/reference/items/traits.html#object-safety>
- --> $DIR/feature-gate-object_safe_for_dispatch.rs:15:19
+ --> $DIR/feature-gate-object_safe_for_dispatch.rs:15:22
|
LL | trait NonObjectSafe4 {
| -------------- this trait cannot be made into an object...
-LL | fn foo(&self, &Self);
- | ^^^^^ ...because method `foo` references the `Self` type in this parameter
+LL | fn foo(&self, s: &Self);
+ | ^^^^^ ...because method `foo` references the `Self` type in this parameter
error[E0038]: the trait `NonObjectSafe1` cannot be made into an object
--> $DIR/feature-gate-object_safe_for_dispatch.rs:38:16
diff --git a/src/test/ui/impl-trait/impl-generic-mismatch-ab.stderr b/src/test/ui/impl-trait/impl-generic-mismatch-ab.stderr
index 638a009..d37670d 100644
--- a/src/test/ui/impl-trait/impl-generic-mismatch-ab.stderr
+++ b/src/test/ui/impl-trait/impl-generic-mismatch-ab.stderr
@@ -5,8 +5,10 @@
| -- type in trait
...
LL | fn foo<B: Debug>(&self, a: &impl Debug, b: &B) { }
- | - ^^^^^^^^^^^ expected type parameter `B`, found type parameter `impl Debug`
- | |
+ | - ^^^^^^^^^^^
+ | | |
+ | | expected type parameter `B`, found type parameter `impl Debug`
+ | | help: change the parameter type to match the trait: `&B`
| expected type parameter
|
= note: expected fn pointer `fn(&(), &B, &impl Debug)`
diff --git a/src/test/ui/impl-trait/trait_type.stderr b/src/test/ui/impl-trait/trait_type.stderr
index 961bb73..bea2433 100644
--- a/src/test/ui/impl-trait/trait_type.stderr
+++ b/src/test/ui/impl-trait/trait_type.stderr
@@ -1,8 +1,11 @@
error[E0053]: method `fmt` has an incompatible type for trait
- --> $DIR/trait_type.rs:7:4
+ --> $DIR/trait_type.rs:7:21
|
LL | fn fmt(&self, x: &str) -> () { }
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ types differ in mutability
+ | ^^^^
+ | |
+ | types differ in mutability
+ | help: change the parameter type to match the trait: `&mut Formatter<'_>`
|
= note: expected fn pointer `fn(&MyType, &mut Formatter<'_>) -> Result<(), std::fmt::Error>`
found fn pointer `fn(&MyType, &str)`
diff --git a/src/test/ui/issues/issue-13033.stderr b/src/test/ui/issues/issue-13033.stderr
index 57447fa..6c3651f 100644
--- a/src/test/ui/issues/issue-13033.stderr
+++ b/src/test/ui/issues/issue-13033.stderr
@@ -8,7 +8,7 @@
| ^^^^^^^^
| |
| types differ in mutability
- | help: consider changing the mutability to match the trait: `&mut dyn Foo`
+ | help: change the parameter type to match the trait: `&mut dyn Foo`
|
= note: expected fn pointer `fn(&mut Baz, &mut dyn Foo)`
found fn pointer `fn(&mut Baz, &dyn Foo)`
diff --git a/src/test/ui/issues/issue-20225.stderr b/src/test/ui/issues/issue-20225.stderr
index 3bcc50d..6f4813c 100644
--- a/src/test/ui/issues/issue-20225.stderr
+++ b/src/test/ui/issues/issue-20225.stderr
@@ -1,33 +1,42 @@
error[E0053]: method `call` has an incompatible type for trait
- --> $DIR/issue-20225.rs:6:3
+ --> $DIR/issue-20225.rs:6:43
|
LL | impl<'a, T> Fn<(&'a T,)> for Foo {
| - this type parameter
LL | extern "rust-call" fn call(&self, (_,): (T,)) {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `&T`, found type parameter `T`
+ | ^^^^
+ | |
+ | expected `&T`, found type parameter `T`
+ | help: change the parameter type to match the trait: `(&'a T,)`
|
= note: expected fn pointer `extern "rust-call" fn(&Foo, (&'a T,))`
found fn pointer `extern "rust-call" fn(&Foo, (T,))`
error[E0053]: method `call_mut` has an incompatible type for trait
- --> $DIR/issue-20225.rs:11:3
+ --> $DIR/issue-20225.rs:11:51
|
LL | impl<'a, T> FnMut<(&'a T,)> for Foo {
| - this type parameter
LL | extern "rust-call" fn call_mut(&mut self, (_,): (T,)) {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `&T`, found type parameter `T`
+ | ^^^^
+ | |
+ | expected `&T`, found type parameter `T`
+ | help: change the parameter type to match the trait: `(&'a T,)`
|
= note: expected fn pointer `extern "rust-call" fn(&mut Foo, (&'a T,))`
found fn pointer `extern "rust-call" fn(&mut Foo, (T,))`
error[E0053]: method `call_once` has an incompatible type for trait
- --> $DIR/issue-20225.rs:18:3
+ --> $DIR/issue-20225.rs:18:47
|
LL | impl<'a, T> FnOnce<(&'a T,)> for Foo {
| - this type parameter
...
LL | extern "rust-call" fn call_once(self, (_,): (T,)) {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `&T`, found type parameter `T`
+ | ^^^^
+ | |
+ | expected `&T`, found type parameter `T`
+ | help: change the parameter type to match the trait: `(&'a T,)`
|
= note: expected fn pointer `extern "rust-call" fn(Foo, (&'a T,))`
found fn pointer `extern "rust-call" fn(Foo, (T,))`
diff --git a/src/test/ui/issues/issue-21332.stderr b/src/test/ui/issues/issue-21332.stderr
index 35863fb..d92966d 100644
--- a/src/test/ui/issues/issue-21332.stderr
+++ b/src/test/ui/issues/issue-21332.stderr
@@ -1,8 +1,11 @@
error[E0053]: method `next` has an incompatible type for trait
- --> $DIR/issue-21332.rs:5:5
+ --> $DIR/issue-21332.rs:5:27
|
LL | fn next(&mut self) -> Result<i32, i32> { Ok(7) }
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected enum `Option`, found enum `Result`
+ | ^^^^^^^^^^^^^^^^
+ | |
+ | expected enum `Option`, found enum `Result`
+ | help: change the output type to match the trait: `Option<i32>`
|
= note: expected fn pointer `fn(&mut S) -> Option<i32>`
found fn pointer `fn(&mut S) -> Result<i32, i32>`
diff --git a/src/test/ui/issues/issue-35869.stderr b/src/test/ui/issues/issue-35869.stderr
index f80561b..71b2a9d 100644
--- a/src/test/ui/issues/issue-35869.stderr
+++ b/src/test/ui/issues/issue-35869.stderr
@@ -5,7 +5,10 @@
| ------------ type in trait
...
LL | fn foo(_: fn(u16) -> ()) {}
- | ^^^^^^^^^^^^^ expected `u8`, found `u16`
+ | ^^^^^^^^^^^^^
+ | |
+ | expected `u8`, found `u16`
+ | help: change the parameter type to match the trait: `fn(u8)`
|
= note: expected fn pointer `fn(fn(u8))`
found fn pointer `fn(fn(u16))`
@@ -17,7 +20,10 @@
| ---------- type in trait
...
LL | fn bar(_: Option<u16>) {}
- | ^^^^^^^^^^^ expected `u8`, found `u16`
+ | ^^^^^^^^^^^
+ | |
+ | expected `u8`, found `u16`
+ | help: change the parameter type to match the trait: `Option<u8>`
|
= note: expected fn pointer `fn(Option<u8>)`
found fn pointer `fn(Option<u16>)`
@@ -29,7 +35,10 @@
| --------- type in trait
...
LL | fn baz(_: (u16, u16)) {}
- | ^^^^^^^^^^ expected `u8`, found `u16`
+ | ^^^^^^^^^^
+ | |
+ | expected `u8`, found `u16`
+ | help: change the parameter type to match the trait: `(u8, u16)`
|
= note: expected fn pointer `fn((u8, _))`
found fn pointer `fn((u16, _))`
@@ -41,7 +50,10 @@
| -- type in trait
...
LL | fn qux() -> u16 { 5u16 }
- | ^^^ expected `u8`, found `u16`
+ | ^^^
+ | |
+ | expected `u8`, found `u16`
+ | help: change the output type to match the trait: `u8`
|
= note: expected fn pointer `fn() -> u8`
found fn pointer `fn() -> u16`
diff --git a/src/test/ui/issues/issue-78720.rs b/src/test/ui/issues/issue-78720.rs
index 57615d1..4cdb9f4 100644
--- a/src/test/ui/issues/issue-78720.rs
+++ b/src/test/ui/issues/issue-78720.rs
@@ -4,7 +4,7 @@
}
trait FilterBase2 {
- fn map2<F>(self, F) -> Map2<F> {}
+ fn map2<F>(self, f: F) -> Map2<F> {}
//~^ ERROR mismatched types
//~^^ ERROR the size for values of type `Self` cannot be known at compilation time
}
diff --git a/src/test/ui/issues/issue-78720.stderr b/src/test/ui/issues/issue-78720.stderr
index a3a14e3..ee15826 100644
--- a/src/test/ui/issues/issue-78720.stderr
+++ b/src/test/ui/issues/issue-78720.stderr
@@ -25,10 +25,10 @@
| ^^^
error[E0308]: mismatched types
- --> $DIR/issue-78720.rs:7:36
+ --> $DIR/issue-78720.rs:7:39
|
-LL | fn map2<F>(self, F) -> Map2<F> {}
- | ^^ expected struct `Map2`, found `()`
+LL | fn map2<F>(self, f: F) -> Map2<F> {}
+ | ^^ expected struct `Map2`, found `()`
|
= note: expected struct `Map2<F>`
found unit type `()`
@@ -36,17 +36,17 @@
error[E0277]: the size for values of type `Self` cannot be known at compilation time
--> $DIR/issue-78720.rs:7:16
|
-LL | fn map2<F>(self, F) -> Map2<F> {}
+LL | fn map2<F>(self, f: F) -> Map2<F> {}
| ^^^^ doesn't have a size known at compile-time
|
= help: unsized fn params are gated as an unstable feature
help: consider further restricting `Self`
|
-LL | fn map2<F>(self, F) -> Map2<F> where Self: Sized {}
- | ^^^^^^^^^^^^^^^^^
+LL | fn map2<F>(self, f: F) -> Map2<F> where Self: Sized {}
+ | ^^^^^^^^^^^^^^^^^
help: function arguments must have a statically known size, borrowed types always have a known size
|
-LL | fn map2<F>(&self, F) -> Map2<F> {}
+LL | fn map2<F>(&self, f: F) -> Map2<F> {}
| ^
error: aborting due to 4 previous errors
diff --git a/src/test/ui/lint/lint-deref-nullptr.rs b/src/test/ui/lint/lint-deref-nullptr.rs
new file mode 100644
index 0000000..d052dbd
--- /dev/null
+++ b/src/test/ui/lint/lint-deref-nullptr.rs
@@ -0,0 +1,38 @@
+// test the deref_nullptr lint
+
+#![deny(deref_nullptr)]
+
+use std::ptr;
+
+struct Struct {
+ field: u8,
+}
+
+fn f() {
+ unsafe {
+ let a = 1;
+ let ub = *(a as *const i32);
+ let ub = *(0 as *const i32);
+ //~^ ERROR dereferencing a null pointer
+ let ub = *ptr::null::<i32>();
+ //~^ ERROR dereferencing a null pointer
+ let ub = *ptr::null_mut::<i32>();
+ //~^ ERROR dereferencing a null pointer
+ let ub = *(ptr::null::<i16>() as *const i32);
+ //~^ ERROR dereferencing a null pointer
+ let ub = *(ptr::null::<i16>() as *mut i32 as *mut usize as *const u8);
+ //~^ ERROR dereferencing a null pointer
+ let ub = &*ptr::null::<i32>();
+ //~^ ERROR dereferencing a null pointer
+ let ub = &*ptr::null_mut::<i32>();
+ //~^ ERROR dereferencing a null pointer
+ ptr::addr_of!(*ptr::null::<i32>());
+ //~^ ERROR dereferencing a null pointer
+ ptr::addr_of_mut!(*ptr::null_mut::<i32>());
+ //~^ ERROR dereferencing a null pointer
+ let offset = ptr::addr_of!((*ptr::null::<Struct>()).field);
+ //~^ ERROR dereferencing a null pointer
+ }
+}
+
+fn main() {}
diff --git a/src/test/ui/lint/lint-deref-nullptr.stderr b/src/test/ui/lint/lint-deref-nullptr.stderr
new file mode 100644
index 0000000..c6f432e
--- /dev/null
+++ b/src/test/ui/lint/lint-deref-nullptr.stderr
@@ -0,0 +1,68 @@
+error: dereferencing a null pointer
+ --> $DIR/lint-deref-nullptr.rs:15:18
+ |
+LL | let ub = *(0 as *const i32);
+ | ^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed
+ |
+note: the lint level is defined here
+ --> $DIR/lint-deref-nullptr.rs:3:9
+ |
+LL | #![deny(deref_nullptr)]
+ | ^^^^^^^^^^^^^
+
+error: dereferencing a null pointer
+ --> $DIR/lint-deref-nullptr.rs:17:18
+ |
+LL | let ub = *ptr::null::<i32>();
+ | ^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed
+
+error: dereferencing a null pointer
+ --> $DIR/lint-deref-nullptr.rs:19:18
+ |
+LL | let ub = *ptr::null_mut::<i32>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed
+
+error: dereferencing a null pointer
+ --> $DIR/lint-deref-nullptr.rs:21:18
+ |
+LL | let ub = *(ptr::null::<i16>() as *const i32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed
+
+error: dereferencing a null pointer
+ --> $DIR/lint-deref-nullptr.rs:23:18
+ |
+LL | let ub = *(ptr::null::<i16>() as *mut i32 as *mut usize as *const u8);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed
+
+error: dereferencing a null pointer
+ --> $DIR/lint-deref-nullptr.rs:25:19
+ |
+LL | let ub = &*ptr::null::<i32>();
+ | ^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed
+
+error: dereferencing a null pointer
+ --> $DIR/lint-deref-nullptr.rs:27:19
+ |
+LL | let ub = &*ptr::null_mut::<i32>();
+ | ^^^^^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed
+
+error: dereferencing a null pointer
+ --> $DIR/lint-deref-nullptr.rs:29:23
+ |
+LL | ptr::addr_of!(*ptr::null::<i32>());
+ | ^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed
+
+error: dereferencing a null pointer
+ --> $DIR/lint-deref-nullptr.rs:31:27
+ |
+LL | ptr::addr_of_mut!(*ptr::null_mut::<i32>());
+ | ^^^^^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed
+
+error: dereferencing a null pointer
+ --> $DIR/lint-deref-nullptr.rs:33:36
+ |
+LL | let offset = ptr::addr_of!((*ptr::null::<Struct>()).field);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed
+
+error: aborting due to 10 previous errors
+
diff --git a/src/test/ui/macros/macro-pat-pattern-followed-by-or-in-2021.rs b/src/test/ui/macros/macro-pat-pattern-followed-by-or-in-2021.rs
new file mode 100644
index 0000000..f5a97ec
--- /dev/null
+++ b/src/test/ui/macros/macro-pat-pattern-followed-by-or-in-2021.rs
@@ -0,0 +1,20 @@
+// edition:2021
+#![allow(unused_macros)]
+macro_rules! foo { ($x:pat | $y:pat) => {} } //~ ERROR `$x:pat` is followed by `|`, which is not allowed for `pat` fragments
+macro_rules! bar { ($($x:pat)+ | $($y:pat)+) => {} } //~ ERROR `$x:pat` is followed by `|`, which is not allowed for `pat` fragments
+macro_rules! qux { ($x:pat, $y:pat) => {} } // should be ok
+macro_rules! match_any {
+ ( $expr:expr , $( $( $pat:pat )|+ => $expr_arm:expr ),+ ) => { //~ ERROR `$pat:pat` may be followed by `|`, which is not allowed for `pat` fragments
+ match $expr {
+ $(
+ $( $pat => $expr_arm, )+
+ )+
+ }
+ };
+}
+
+fn main() {
+ let result: Result<i64, i32> = Err(42);
+ let int: i64 = match_any!(result, Ok(i) | Err(i) => i.into());
+ assert_eq!(int, 42);
+}
diff --git a/src/test/ui/macros/macro-pat-pattern-followed-by-or-in-2021.stderr b/src/test/ui/macros/macro-pat-pattern-followed-by-or-in-2021.stderr
new file mode 100644
index 0000000..a5987a2
--- /dev/null
+++ b/src/test/ui/macros/macro-pat-pattern-followed-by-or-in-2021.stderr
@@ -0,0 +1,26 @@
+error: `$x:pat` is followed by `|`, which is not allowed for `pat` fragments
+ --> $DIR/macro-pat-pattern-followed-by-or-in-2021.rs:3:28
+ |
+LL | macro_rules! foo { ($x:pat | $y:pat) => {} }
+ | ^ not allowed after `pat` fragments
+ |
+ = note: allowed there are: `=>`, `,`, `=`, `if` or `in`
+
+error: `$x:pat` is followed by `|`, which is not allowed for `pat` fragments
+ --> $DIR/macro-pat-pattern-followed-by-or-in-2021.rs:4:32
+ |
+LL | macro_rules! bar { ($($x:pat)+ | $($y:pat)+) => {} }
+ | ^ not allowed after `pat` fragments
+ |
+ = note: allowed there are: `=>`, `,`, `=`, `if` or `in`
+
+error: `$pat:pat` may be followed by `|`, which is not allowed for `pat` fragments
+ --> $DIR/macro-pat-pattern-followed-by-or-in-2021.rs:7:36
+ |
+LL | ( $expr:expr , $( $( $pat:pat )|+ => $expr_arm:expr ),+ ) => {
+ | ^ not allowed after `pat` fragments
+ |
+ = note: allowed there are: `=>`, `,`, `=`, `if` or `in`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/test/ui/macros/macro-pat-pattern-followed-by-or.rs b/src/test/ui/macros/macro-pat-pattern-followed-by-or.rs
new file mode 100644
index 0000000..54bd13d
--- /dev/null
+++ b/src/test/ui/macros/macro-pat-pattern-followed-by-or.rs
@@ -0,0 +1,20 @@
+// run-pass
+#![allow(unused_macros)]
+macro_rules! foo { ($x:pat | $y:pat) => {} } // should be ok
+macro_rules! bar { ($($x:pat)+ | $($y:pat)+) => {} } // should be ok
+macro_rules! qux { ($x:pat, $y:pat) => {} } // should be ok
+macro_rules! match_any {
+ ( $expr:expr , $( $( $pat:pat )|+ => $expr_arm:expr ),+ ) => { // should be ok
+ match $expr {
+ $(
+ $( $pat => $expr_arm, )+
+ )+
+ }
+ };
+}
+
+fn main() {
+ let result: Result<i64, i32> = Err(42);
+ let int: i64 = match_any!(result, Ok(i) | Err(i) => i.into());
+ assert_eq!(int, 42);
+}
diff --git a/src/test/ui/macros/macro-pat2021-pattern-followed-by-or.rs b/src/test/ui/macros/macro-pat2021-pattern-followed-by-or.rs
new file mode 100644
index 0000000..edd3f3e
--- /dev/null
+++ b/src/test/ui/macros/macro-pat2021-pattern-followed-by-or.rs
@@ -0,0 +1,21 @@
+#![feature(edition_macro_pats)]
+#![allow(unused_macros)]
+macro_rules! foo { ($x:pat2021 | $y:pat2021) => {} } //~ ERROR `$x:pat2021` is followed by `|`, which is not allowed for `pat2021` fragments
+macro_rules! baz { ($x:pat2015 | $y:pat2015) => {} } // should be ok
+macro_rules! qux { ($x:pat2015 | $y:pat2021) => {} } // should be ok
+macro_rules! ogg { ($x:pat2021 | $y:pat2015) => {} } //~ ERROR `$x:pat2021` is followed by `|`, which is not allowed for `pat2021` fragments
+macro_rules! match_any {
+ ( $expr:expr , $( $( $pat:pat2021 )|+ => $expr_arm:pat2021 ),+ ) => { //~ ERROR `$pat:pat2021` may be followed by `|`, which is not allowed for `pat2021` fragments
+ match $expr {
+ $(
+ $( $pat => $expr_arm, )+
+ )+
+ }
+ };
+}
+
+fn main() {
+ let result: Result<i64, i32> = Err(42);
+ let int: i64 = match_any!(result, Ok(i) | Err(i) => i.into());
+ assert_eq!(int, 42);
+}
diff --git a/src/test/ui/macros/macro-pat2021-pattern-followed-by-or.stderr b/src/test/ui/macros/macro-pat2021-pattern-followed-by-or.stderr
new file mode 100644
index 0000000..fe0b40c
--- /dev/null
+++ b/src/test/ui/macros/macro-pat2021-pattern-followed-by-or.stderr
@@ -0,0 +1,26 @@
+error: `$x:pat2021` is followed by `|`, which is not allowed for `pat2021` fragments
+ --> $DIR/macro-pat2021-pattern-followed-by-or.rs:3:32
+ |
+LL | macro_rules! foo { ($x:pat2021 | $y:pat2021) => {} }
+ | ^ not allowed after `pat2021` fragments
+ |
+ = note: allowed there are: `=>`, `,`, `=`, `if` or `in`
+
+error: `$x:pat2021` is followed by `|`, which is not allowed for `pat2021` fragments
+ --> $DIR/macro-pat2021-pattern-followed-by-or.rs:6:32
+ |
+LL | macro_rules! ogg { ($x:pat2021 | $y:pat2015) => {} }
+ | ^ not allowed after `pat2021` fragments
+ |
+ = note: allowed there are: `=>`, `,`, `=`, `if` or `in`
+
+error: `$pat:pat2021` may be followed by `|`, which is not allowed for `pat2021` fragments
+ --> $DIR/macro-pat2021-pattern-followed-by-or.rs:8:40
+ |
+LL | ( $expr:expr , $( $( $pat:pat2021 )|+ => $expr_arm:pat2021 ),+ ) => {
+ | ^ not allowed after `pat2021` fragments
+ |
+ = note: allowed there are: `=>`, `,`, `=`, `if` or `in`
+
+error: aborting due to 3 previous errors
+
diff --git a/src/test/ui/macros/none-delim-lookahead.rs b/src/test/ui/macros/none-delim-lookahead.rs
new file mode 100644
index 0000000..bf4fddea
--- /dev/null
+++ b/src/test/ui/macros/none-delim-lookahead.rs
@@ -0,0 +1,15 @@
+// check-pass
+
+macro_rules! make_struct {
+ ($name:ident) => {
+ #[derive(Debug)]
+ struct Foo {
+ #[cfg(not(FALSE))]
+ field: fn($name: bool)
+ }
+ }
+}
+
+make_struct!(param_name);
+
+fn main() {}
diff --git a/src/test/ui/mismatched_types/E0053.stderr b/src/test/ui/mismatched_types/E0053.stderr
index e0a3ce9..6ce8126 100644
--- a/src/test/ui/mismatched_types/E0053.stderr
+++ b/src/test/ui/mismatched_types/E0053.stderr
@@ -5,7 +5,10 @@
| --- type in trait
...
LL | fn foo(x: i16) { }
- | ^^^ expected `u16`, found `i16`
+ | ^^^
+ | |
+ | expected `u16`, found `i16`
+ | help: change the parameter type to match the trait: `u16`
|
= note: expected fn pointer `fn(u16)`
found fn pointer `fn(i16)`
@@ -20,7 +23,7 @@
| ^^^^^^^^^
| |
| types differ in mutability
- | help: consider changing the mutability to match the trait: `&self`
+ | help: change the self-receiver type to match the trait: `self: &Bar`
|
= note: expected fn pointer `fn(&Bar)`
found fn pointer `fn(&mut Bar)`
diff --git a/src/test/ui/mismatched_types/trait-impl-fn-incompatibility.stderr b/src/test/ui/mismatched_types/trait-impl-fn-incompatibility.stderr
index 1618434..2ac4d1c 100644
--- a/src/test/ui/mismatched_types/trait-impl-fn-incompatibility.stderr
+++ b/src/test/ui/mismatched_types/trait-impl-fn-incompatibility.stderr
@@ -5,7 +5,10 @@
| --- type in trait
...
LL | fn foo(x: i16) { }
- | ^^^ expected `u16`, found `i16`
+ | ^^^
+ | |
+ | expected `u16`, found `i16`
+ | help: change the parameter type to match the trait: `u16`
|
= note: expected fn pointer `fn(u16)`
found fn pointer `fn(i16)`
@@ -20,7 +23,7 @@
| ^^^^
| |
| types differ in mutability
- | help: consider changing the mutability to match the trait: `&mut Bar`
+ | help: change the parameter type to match the trait: `&mut Bar`
|
= note: expected fn pointer `fn(&mut Bar, &mut Bar)`
found fn pointer `fn(&mut Bar, &Bar)`
diff --git a/src/test/ui/panic-runtime/unwind-tables-panic-required.rs b/src/test/ui/panic-runtime/unwind-tables-panic-required.rs
deleted file mode 100644
index 79e9187..0000000
--- a/src/test/ui/panic-runtime/unwind-tables-panic-required.rs
+++ /dev/null
@@ -1,10 +0,0 @@
-// Tests that the compiler errors if the user tries to turn off unwind tables
-// when they are required.
-//
-// dont-check-compiler-stderr
-// compile-flags: -C panic=unwind -C force-unwind-tables=no
-//
-// error-pattern: panic=unwind requires unwind tables, they cannot be disabled with `-C force-unwind-tables=no`.
-
-pub fn main() {
-}
diff --git a/src/test/ui/parser/issue-84117.rs b/src/test/ui/parser/issue-84117.rs
new file mode 100644
index 0000000..0f20073
--- /dev/null
+++ b/src/test/ui/parser/issue-84117.rs
@@ -0,0 +1,9 @@
+fn main() {
+ let outer_local:e_outer<&str, { let inner_local:e_inner<&str, }
+ //~^ ERROR expected one of `>`, a const expression
+ //~| ERROR expected one of `>`, a const expression, lifetime, or type, found `}`
+ //~| ERROR expected one of `!`, `.`, `::`, `;`, `?`, `{`, or an operator, found `,`
+ //~| ERROR expected one of `!`, `.`, `::`, `;`, `?`, `{`, or an operator, found `,`
+ //~| ERROR expected one of `!`, `.`, `::`, `;`, `?`, `{`, or an operator, found `,`
+}
+//~^ ERROR expected one of `,`, `:`, `=`, or `>`, found `}`
diff --git a/src/test/ui/parser/issue-84117.stderr b/src/test/ui/parser/issue-84117.stderr
new file mode 100644
index 0000000..d667a49
--- /dev/null
+++ b/src/test/ui/parser/issue-84117.stderr
@@ -0,0 +1,49 @@
+error: expected one of `>`, a const expression, lifetime, or type, found `}`
+ --> $DIR/issue-84117.rs:2:67
+ |
+LL | let outer_local:e_outer<&str, { let inner_local:e_inner<&str, }
+ | ------------ ^ expected one of `>`, a const expression, lifetime, or type
+ | | |
+ | | help: use `=` if you meant to assign
+ | while parsing the type for `inner_local`
+
+error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, or an operator, found `,`
+ --> $DIR/issue-84117.rs:2:65
+ |
+LL | let outer_local:e_outer<&str, { let inner_local:e_inner<&str, }
+ | ^ expected one of 7 possible tokens
+
+error: expected one of `,`, `:`, `=`, or `>`, found `}`
+ --> $DIR/issue-84117.rs:8:1
+ |
+LL | let outer_local:e_outer<&str, { let inner_local:e_inner<&str, }
+ | ------------ help: use `=` if you meant to assign - expected one of `,`, `:`, `=`, or `>`
+ | |
+ | while parsing the type for `outer_local`
+...
+LL | }
+ | ^ unexpected token
+
+error: expected one of `>`, a const expression, lifetime, or type, found `}`
+ --> $DIR/issue-84117.rs:2:67
+ |
+LL | let outer_local:e_outer<&str, { let inner_local:e_inner<&str, }
+ | ------------ ^ expected one of `>`, a const expression, lifetime, or type
+ | | |
+ | | help: use `=` if you meant to assign
+ | while parsing the type for `inner_local`
+
+error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, or an operator, found `,`
+ --> $DIR/issue-84117.rs:2:65
+ |
+LL | let outer_local:e_outer<&str, { let inner_local:e_inner<&str, }
+ | ^ expected one of 7 possible tokens
+
+error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, or an operator, found `,`
+ --> $DIR/issue-84117.rs:2:33
+ |
+LL | let outer_local:e_outer<&str, { let inner_local:e_inner<&str, }
+ | ^ expected one of 7 possible tokens
+
+error: aborting due to 6 previous errors
+
diff --git a/src/test/ui/parser/variadic-ffi-semantic-restrictions.rs b/src/test/ui/parser/variadic-ffi-semantic-restrictions.rs
index 404f409..fe993a6 100644
--- a/src/test/ui/parser/variadic-ffi-semantic-restrictions.rs
+++ b/src/test/ui/parser/variadic-ffi-semantic-restrictions.rs
@@ -1,4 +1,5 @@
#![feature(c_variadic)]
+#![allow(anonymous_parameters)]
fn main() {}
diff --git a/src/test/ui/parser/variadic-ffi-semantic-restrictions.stderr b/src/test/ui/parser/variadic-ffi-semantic-restrictions.stderr
index ebfe497..10fd05c 100644
--- a/src/test/ui/parser/variadic-ffi-semantic-restrictions.stderr
+++ b/src/test/ui/parser/variadic-ffi-semantic-restrictions.stderr
@@ -1,203 +1,203 @@
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:5:19
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:6:19
|
LL | fn f1_1(x: isize, ...) {}
| ^^^
error: C-variadic function must be declared with at least one named argument
- --> $DIR/variadic-ffi-semantic-restrictions.rs:8:9
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:9:9
|
LL | fn f1_2(...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:8:9
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:9:9
|
LL | fn f1_2(...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:12:30
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:13:30
|
LL | extern "C" fn f2_1(x: isize, ...) {}
| ^^^
error: C-variadic function must be declared with at least one named argument
- --> $DIR/variadic-ffi-semantic-restrictions.rs:15:20
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:16:20
|
LL | extern "C" fn f2_2(...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:15:20
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:16:20
|
LL | extern "C" fn f2_2(...) {}
| ^^^
error: `...` must be the last argument of a C-variadic function
- --> $DIR/variadic-ffi-semantic-restrictions.rs:19:20
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:20:20
|
LL | extern "C" fn f2_3(..., x: isize) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:19:20
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:20:20
|
LL | extern "C" fn f2_3(..., x: isize) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:23:30
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:24:30
|
LL | extern "C" fn f3_1(x: isize, ...) {}
| ^^^
error: C-variadic function must be declared with at least one named argument
- --> $DIR/variadic-ffi-semantic-restrictions.rs:26:20
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:27:20
|
LL | extern "C" fn f3_2(...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:26:20
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:27:20
|
LL | extern "C" fn f3_2(...) {}
| ^^^
error: `...` must be the last argument of a C-variadic function
- --> $DIR/variadic-ffi-semantic-restrictions.rs:30:20
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:31:20
|
LL | extern "C" fn f3_3(..., x: isize) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:30:20
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:31:20
|
LL | extern "C" fn f3_3(..., x: isize) {}
| ^^^
error: C-variadic function must be declared with at least one named argument
- --> $DIR/variadic-ffi-semantic-restrictions.rs:35:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:36:13
|
LL | fn e_f1(...);
| ^^^
error: `...` must be the last argument of a C-variadic function
- --> $DIR/variadic-ffi-semantic-restrictions.rs:37:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:38:13
|
LL | fn e_f2(..., x: isize);
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:44:23
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:45:23
|
LL | fn i_f1(x: isize, ...) {}
| ^^^
error: C-variadic function must be declared with at least one named argument
- --> $DIR/variadic-ffi-semantic-restrictions.rs:46:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:47:13
|
LL | fn i_f2(...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:46:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:47:13
|
LL | fn i_f2(...) {}
| ^^^
error: `...` must be the last argument of a C-variadic function
- --> $DIR/variadic-ffi-semantic-restrictions.rs:49:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:50:13
|
LL | fn i_f3(..., x: isize, ...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:49:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:50:13
|
LL | fn i_f3(..., x: isize, ...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:49:28
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:50:28
|
LL | fn i_f3(..., x: isize, ...) {}
| ^^^
error: `...` must be the last argument of a C-variadic function
- --> $DIR/variadic-ffi-semantic-restrictions.rs:53:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:54:13
|
LL | fn i_f4(..., x: isize, ...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:53:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:54:13
|
LL | fn i_f4(..., x: isize, ...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:53:28
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:54:28
|
LL | fn i_f4(..., x: isize, ...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:60:23
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:61:23
|
LL | fn t_f1(x: isize, ...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:62:23
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:63:23
|
LL | fn t_f2(x: isize, ...);
| ^^^
error: C-variadic function must be declared with at least one named argument
- --> $DIR/variadic-ffi-semantic-restrictions.rs:64:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:65:13
|
LL | fn t_f3(...) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:64:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:65:13
|
LL | fn t_f3(...) {}
| ^^^
error: C-variadic function must be declared with at least one named argument
- --> $DIR/variadic-ffi-semantic-restrictions.rs:67:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:68:13
|
LL | fn t_f4(...);
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:67:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:68:13
|
LL | fn t_f4(...);
| ^^^
error: `...` must be the last argument of a C-variadic function
- --> $DIR/variadic-ffi-semantic-restrictions.rs:70:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:71:13
|
LL | fn t_f5(..., x: isize) {}
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:70:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:71:13
|
LL | fn t_f5(..., x: isize) {}
| ^^^
error: `...` must be the last argument of a C-variadic function
- --> $DIR/variadic-ffi-semantic-restrictions.rs:73:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:74:13
|
LL | fn t_f6(..., x: isize);
| ^^^
error: only foreign or `unsafe extern "C" functions may be C-variadic
- --> $DIR/variadic-ffi-semantic-restrictions.rs:73:13
+ --> $DIR/variadic-ffi-semantic-restrictions.rs:74:13
|
LL | fn t_f6(..., x: isize);
| ^^^
diff --git a/src/test/ui/proc-macro/attr-complex-fn.stdout b/src/test/ui/proc-macro/attr-complex-fn.stdout
index a395a9a..72783ef 100644
--- a/src/test/ui/proc-macro/attr-complex-fn.stdout
+++ b/src/test/ui/proc-macro/attr-complex-fn.stdout
@@ -80,67 +80,67 @@
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "impl",
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:1: 21:5 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:5: 21:6 (#0),
},
Ident {
ident: "T",
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:6: 21:7 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:7: 21:8 (#0),
},
Ident {
ident: "MyTrait",
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:9: 21:16 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:16: 21:17 (#0),
},
Ident {
ident: "T",
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:17: 21:18 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:18: 21:19 (#0),
},
Ident {
ident: "for",
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:20: 21:23 (#0),
},
Ident {
ident: "MyStruct",
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:24: 21:32 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:32: 21:33 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "true",
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:34: 21:38 (#0),
},
],
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:33: 21:39 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:39: 21:40 (#0),
},
Group {
delimiter: Brace,
@@ -148,24 +148,24 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:23:5: 23:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:23:6: 23:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:23:8: 23:19 (#0),
},
],
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:23:7: 23:20 (#0),
},
],
- span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
+ span: $DIR/attr-complex-fn.rs:21:41: 24:2 (#0),
},
]
diff --git a/src/test/ui/proc-macro/attribute-after-derive.stdout b/src/test/ui/proc-macro/attribute-after-derive.stdout
index d0b6655..4c48e41 100644
--- a/src/test/ui/proc-macro/attribute-after-derive.stdout
+++ b/src/test/ui/proc-macro/attribute-after-derive.stdout
@@ -87,16 +87,16 @@
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
- span: $DIR/attribute-after-derive.rs:18:1: 21:2 (#0),
+ span: $DIR/attribute-after-derive.rs:18:1: 18:7 (#0),
},
Ident {
ident: "AttributeDerive",
- span: $DIR/attribute-after-derive.rs:18:1: 21:2 (#0),
+ span: $DIR/attribute-after-derive.rs:18:8: 18:23 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
- span: $DIR/attribute-after-derive.rs:18:1: 21:2 (#0),
+ span: $DIR/attribute-after-derive.rs:18:24: 21:2 (#0),
},
]
PRINT-DERIVE INPUT (DISPLAY): #[print_attr] struct DeriveAttribute { }
@@ -104,45 +104,45 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
+ span: $DIR/attribute-after-derive.rs:24:1: 24:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_attr",
- span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
+ span: $DIR/attribute-after-derive.rs:24:3: 24:13 (#0),
},
],
- span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
+ span: $DIR/attribute-after-derive.rs:24:2: 24:14 (#0),
},
Ident {
ident: "struct",
- span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
+ span: $DIR/attribute-after-derive.rs:25:1: 25:7 (#0),
},
Ident {
ident: "DeriveAttribute",
- span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
+ span: $DIR/attribute-after-derive.rs:25:8: 25:23 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
- span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
+ span: $DIR/attribute-after-derive.rs:25:24: 28:2 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): struct DeriveAttribute { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
- span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
+ span: $DIR/attribute-after-derive.rs:25:1: 25:7 (#0),
},
Ident {
ident: "DeriveAttribute",
- span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
+ span: $DIR/attribute-after-derive.rs:25:8: 25:23 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
- span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
+ span: $DIR/attribute-after-derive.rs:25:24: 28:2 (#0),
},
]
diff --git a/src/test/ui/proc-macro/cfg-eval-inner.stdout b/src/test/ui/proc-macro/cfg-eval-inner.stdout
index a9301d3..1f2b003 100644
--- a/src/test/ui/proc-macro/cfg-eval-inner.stdout
+++ b/src/test/ui/proc-macro/cfg-eval-inner.stdout
@@ -2,251 +2,246 @@
[u8 ;
{
# ! [rustc_dummy(cursed_inner)] # ! [allow(unused)] struct Inner
- { field : [u8 ; { # ! [rustc_dummy(another_cursed_inner)] 1 }], } 0
+ { field : [u8 ; { # ! [rustc_dummy(another_cursed_inner)] 1 }] } 0
}] > { # ! [rustc_dummy(evaluated_attr)] fn bar() { } }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "impl",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:18:1: 18:5 (#0),
},
Ident {
ident: "Foo",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:18:6: 18:9 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:18:9: 18:10 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "u8",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:18:11: 18:13 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:18:13: 18:14 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Punct {
ch: '#',
- spacing: Joint,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ spacing: Alone,
+ span: $DIR/cfg-eval-inner.rs:19:5: 19:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:19:6: 19:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:19:29: 19:40 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "cursed_inner",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:19:41: 19:53 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:19:40: 19:54 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:19:5: 19:6 (#0),
},
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:20:5: 20:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:20:6: 20:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:20:8: 20:13 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "unused",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:20:14: 20:20 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:20:13: 20:21 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:20:7: 20:22 (#0),
},
Ident {
ident: "struct",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:21:5: 21:11 (#0),
},
Ident {
ident: "Inner",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:21:12: 21:17 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "field",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:22:9: 22:14 (#0),
},
Punct {
ch: ':',
spacing: Alone,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:22:14: 22:15 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "u8",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:22:17: 22:19 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:22:19: 22:20 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Punct {
ch: '#',
- spacing: Joint,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ spacing: Alone,
+ span: $DIR/cfg-eval-inner.rs:23:13: 23:14 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:23:14: 23:15 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:23:37: 23:48 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "another_cursed_inner",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:23:49: 23:69 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:23:48: 23:70 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:23:13: 23:14 (#0),
},
Literal {
kind: Integer,
symbol: "1",
suffix: None,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:24:13: 24:14 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:22:21: 25:10 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
- },
- Punct {
- ch: ',',
- spacing: Alone,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:22:16: 25:11 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:21:18: 26:6 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:28:5: 28:6 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:18:15: 29:2 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:18:10: 29:3 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:29:3: 29:4 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Punct {
ch: '#',
- spacing: Joint,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ spacing: Alone,
+ span: $DIR/cfg-eval-inner.rs:32:5: 32:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:32:6: 32:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:32:29: 32:40 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "evaluated_attr",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:32:41: 32:55 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:32:40: 32:56 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:32:5: 32:6 (#0),
},
Ident {
ident: "fn",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:34:5: 34:7 (#0),
},
Ident {
ident: "bar",
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:34:8: 34:11 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:34:11: 34:13 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:34:14: 36:6 (#0),
},
],
- span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
+ span: $DIR/cfg-eval-inner.rs:29:5: 37:2 (#0),
},
]
diff --git a/src/test/ui/proc-macro/cfg-eval.stdout b/src/test/ui/proc-macro/cfg-eval.stdout
index 1f78459..6732caf 100644
--- a/src/test/ui/proc-macro/cfg-eval.stdout
+++ b/src/test/ui/proc-macro/cfg-eval.stdout
@@ -2,11 +2,11 @@
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:17:1: 17:7 (#0),
},
Ident {
ident: "S1",
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:17:8: 17:10 (#0),
},
Group {
delimiter: Brace,
@@ -14,73 +14,73 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:20:5: 20:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:20:7: 20:10 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "all",
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:20:11: 20:14 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:20:14: 20:24 (#0),
},
],
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:20:10: 20:25 (#0),
},
],
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:20:6: 20:26 (#0),
},
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:22:5: 22:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:22:31: 22:36 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:22:36: 22:38 (#0),
},
],
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:22:5: 22:6 (#0),
},
Ident {
ident: "field_true",
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:23:5: 23:15 (#0),
},
Punct {
ch: ':',
spacing: Alone,
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:23:15: 23:16 (#0),
},
Ident {
ident: "u8",
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:23:17: 23:19 (#0),
},
Punct {
ch: ',',
spacing: Alone,
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:23:19: 23:20 (#0),
},
],
- span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
+ span: $DIR/cfg-eval.rs:17:11: 24:2 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] (#[cfg(all())] 1,)
@@ -88,17 +88,17 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
+ span: $DIR/cfg-eval.rs:35:39: 35:40 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
+ span: $DIR/cfg-eval.rs:35:62: 35:73 (#0),
},
],
- span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
+ span: $DIR/cfg-eval.rs:35:39: 35:40 (#0),
},
Group {
delimiter: Parenthesis,
@@ -106,43 +106,43 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
+ span: $DIR/cfg-eval.rs:36:23: 36:24 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
- span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
+ span: $DIR/cfg-eval.rs:36:25: 36:28 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "all",
- span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
+ span: $DIR/cfg-eval.rs:36:29: 36:32 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
- span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
+ span: $DIR/cfg-eval.rs:36:32: 36:42 (#0),
},
],
- span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
+ span: $DIR/cfg-eval.rs:36:28: 36:43 (#0),
},
],
- span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
+ span: $DIR/cfg-eval.rs:36:24: 36:44 (#0),
},
Literal {
kind: Integer,
symbol: "1",
suffix: None,
- span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
+ span: $DIR/cfg-eval.rs:36:45: 36:46 (#0),
},
Punct {
ch: ',',
spacing: Alone,
- span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
+ span: $DIR/cfg-eval.rs:36:46: 36:47 (#0),
},
],
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
diff --git a/src/test/ui/proc-macro/expand-to-derive.stdout b/src/test/ui/proc-macro/expand-to-derive.stdout
index 7eb6864..a643798 100644
--- a/src/test/ui/proc-macro/expand-to-derive.stdout
+++ b/src/test/ui/proc-macro/expand-to-derive.stdout
@@ -1,40 +1,40 @@
PRINT-DERIVE INPUT (DISPLAY): struct Foo
{
field :
- [bool ; { #[rustc_dummy] struct Inner { other_inner_field : u8, } 0 }],
+ [bool ; { #[rustc_dummy] struct Inner { other_inner_field : u8, } 0 }]
}
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:16:9: 16:15 (#4),
},
Ident {
ident: "Foo",
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:16:16: 16:19 (#4),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "field",
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:18:13: 18:18 (#4),
},
Punct {
ch: ':',
spacing: Alone,
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:18:18: 18:19 (#4),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "bool",
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:18:21: 18:25 (#4),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:18:25: 18:26 (#4),
},
Group {
delimiter: Brace,
@@ -42,68 +42,63 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:27:5: 27:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:27:28: 27:39 (#0),
},
],
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:27:5: 27:6 (#0),
},
Ident {
ident: "struct",
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:28:5: 28:11 (#0),
},
Ident {
ident: "Inner",
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:28:12: 28:17 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "other_inner_field",
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:30:9: 30:26 (#0),
},
Punct {
ch: ':',
spacing: Alone,
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:30:26: 30:27 (#0),
},
Ident {
ident: "u8",
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:30:28: 30:30 (#0),
},
Punct {
ch: ',',
spacing: Alone,
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:30:30: 30:31 (#0),
},
],
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:28:18: 31:6 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:20:17: 20:18 (#4),
},
],
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:18:27: 21:14 (#4),
},
],
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
- },
- Punct {
- ch: ',',
- spacing: Alone,
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:18:20: 21:15 (#4),
},
],
- span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
+ span: $DIR/expand-to-derive.rs:16:20: 22:10 (#4),
},
]
diff --git a/src/test/ui/proc-macro/inner-attrs.rs b/src/test/ui/proc-macro/inner-attrs.rs
index 1ff97db..14ec57a 100644
--- a/src/test/ui/proc-macro/inner-attrs.rs
+++ b/src/test/ui/proc-macro/inner-attrs.rs
@@ -37,7 +37,7 @@
#![cfg_attr(not(FALSE), rustc_dummy(first))]
#![cfg_attr(not(FALSE), rustc_dummy(second))]
_ => {
- #![cfg_attr(not(FALSE), rustc_dummy(second))]
+ #![cfg_attr(not(FALSE), rustc_dummy(third))]
true
}
};
diff --git a/src/test/ui/proc-macro/inner-attrs.stdout b/src/test/ui/proc-macro/inner-attrs.stdout
index b44822f..2f442e8 100644
--- a/src/test/ui/proc-macro/inner-attrs.stdout
+++ b/src/test/ui/proc-macro/inner-attrs.stdout
@@ -11,40 +11,40 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:16:1: 16:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:16:3: 16:24 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:16:25: 16:31 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:16:24: 16:32 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:16:2: 16:33 (#0),
},
Ident {
ident: "fn",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:1: 17:3 (#0),
},
Ident {
ident: "foo",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:4: 17:7 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:7: 17:9 (#0),
},
Group {
delimiter: Brace,
@@ -52,72 +52,72 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:5: 18:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:6: 18:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:8: 18:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "third",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:30: 18:35 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:29: 18:36 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:7: 18:37 (#0),
},
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:5: 19:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:6: 19:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:8: 19:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "fourth",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:30: 19:36 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:29: 19:37 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:7: 19:38 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:10: 20:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): second
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "second",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:16:25: 16:31 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): fn foo()
@@ -125,16 +125,16 @@
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "fn",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:1: 17:3 (#0),
},
Ident {
ident: "foo",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:4: 17:7 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:7: 17:9 (#0),
},
Group {
delimiter: Brace,
@@ -142,88 +142,88 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:5: 18:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:6: 18:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:8: 18:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "third",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:30: 18:35 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:29: 18:36 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:7: 18:37 (#0),
},
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:5: 19:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:6: 19:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:8: 19:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "fourth",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:30: 19:36 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:29: 19:37 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:7: 19:38 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:10: 20:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): third
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "third",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:18:30: 18:35 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): fn foo() { # ! [print_target_and_args(fourth)] }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "fn",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:1: 17:3 (#0),
},
Ident {
ident: "foo",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:4: 17:7 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:7: 17:9 (#0),
},
Group {
delimiter: Brace,
@@ -231,63 +231,63 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:5: 19:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:6: 19:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:8: 19:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "fourth",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:30: 19:36 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:29: 19:37 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:7: 19:38 (#0),
},
],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:10: 20:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): fourth
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "fourth",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:19:30: 19:36 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): fn foo() { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "fn",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:1: 17:3 (#0),
},
Ident {
ident: "foo",
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:4: 17:7 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:7: 17:9 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
- span: $DIR/inner-attrs.rs:17:1: 20:2 (#0),
+ span: $DIR/inner-attrs.rs:17:10: 20:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): mod_first
@@ -306,35 +306,35 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:23:1: 23:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:23:3: 23:24 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "mod_second",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:23:25: 23:35 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:23:24: 23:36 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:23:2: 23:37 (#0),
},
Ident {
ident: "mod",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:1: 24:4 (#0),
},
Ident {
ident: "inline_mod",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:5: 24:15 (#0),
},
Group {
delimiter: Brace,
@@ -342,72 +342,72 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:5: 25:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:6: 25:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:8: 25:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "mod_third",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:30: 25:39 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:29: 25:40 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:7: 25:41 (#0),
},
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:5: 26:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:6: 26:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:8: 26:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "mod_fourth",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:30: 26:40 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:29: 26:41 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:7: 26:42 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:16: 27:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): mod_second
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "mod_second",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:23:25: 23:35 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): mod inline_mod
@@ -418,11 +418,11 @@
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "mod",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:1: 24:4 (#0),
},
Ident {
ident: "inline_mod",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:5: 24:15 (#0),
},
Group {
delimiter: Brace,
@@ -430,83 +430,83 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:5: 25:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:6: 25:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:8: 25:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "mod_third",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:30: 25:39 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:29: 25:40 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:7: 25:41 (#0),
},
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:5: 26:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:6: 26:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:8: 26:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "mod_fourth",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:30: 26:40 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:29: 26:41 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:7: 26:42 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:16: 27:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): mod_third
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "mod_third",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:25:30: 25:39 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): mod inline_mod { # ! [print_target_and_args(mod_fourth)] }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "mod",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:1: 24:4 (#0),
},
Ident {
ident: "inline_mod",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:5: 24:15 (#0),
},
Group {
delimiter: Brace,
@@ -514,58 +514,58 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:5: 26:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:6: 26:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:8: 26:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "mod_fourth",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:30: 26:40 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:29: 26:41 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:7: 26:42 (#0),
},
],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:16: 27:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): mod_fourth
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "mod_fourth",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:26:30: 26:40 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): mod inline_mod { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "mod",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:1: 24:4 (#0),
},
Ident {
ident: "inline_mod",
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:5: 24:15 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
- span: $DIR/inner-attrs.rs:24:1: 27:2 (#0),
+ span: $DIR/inner-attrs.rs:24:16: 27:2 (#0),
},
]
PRINT-DERIVE INPUT (DISPLAY): struct MyDerivePrint
@@ -574,168 +574,195 @@
[u8 ;
{
match true
- { # ! [rustc_dummy(first)] # ! [rustc_dummy(second)] _ => { true } }
- ; 0
- }],
+ {
+ # ! [rustc_dummy(first)] # ! [rustc_dummy(second)] _ =>
+ { # ! [rustc_dummy(third)] true }
+ } ; 0
+ }]
}
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:34:1: 34:7 (#0),
},
Ident {
ident: "MyDerivePrint",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:34:8: 34:21 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "field",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:35:5: 35:10 (#0),
},
Punct {
ch: ':',
spacing: Alone,
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:35:10: 35:11 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "u8",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:35:13: 35:15 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:35:15: 35:16 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "match",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:36:9: 36:14 (#0),
},
Ident {
ident: "true",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:36:15: 36:19 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Punct {
ch: '#',
- spacing: Joint,
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ spacing: Alone,
+ span: $DIR/inner-attrs.rs:37:13: 37:14 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:37:14: 37:15 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:37:37: 37:48 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "first",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:37:49: 37:54 (#0),
},
],
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:37:48: 37:55 (#0),
},
],
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:37:13: 37:14 (#0),
},
Punct {
ch: '#',
- spacing: Joint,
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ spacing: Alone,
+ span: $DIR/inner-attrs.rs:38:13: 38:14 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:38:14: 38:15 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:38:37: 38:48 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:38:49: 38:55 (#0),
},
],
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:38:48: 38:56 (#0),
},
],
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:38:13: 38:14 (#0),
},
Ident {
ident: "_",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:39:13: 39:14 (#0),
},
Punct {
ch: '=',
spacing: Joint,
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:39:15: 39:17 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:39:15: 39:17 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
+ Punct {
+ ch: '#',
+ spacing: Alone,
+ span: $DIR/inner-attrs.rs:40:17: 40:18 (#0),
+ },
+ Punct {
+ ch: '!',
+ spacing: Alone,
+ span: $DIR/inner-attrs.rs:40:18: 40:19 (#0),
+ },
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ ident: "rustc_dummy",
+ span: $DIR/inner-attrs.rs:40:41: 40:52 (#0),
+ },
+ Group {
+ delimiter: Parenthesis,
+ stream: TokenStream [
+ Ident {
+ ident: "third",
+ span: $DIR/inner-attrs.rs:40:53: 40:58 (#0),
+ },
+ ],
+ span: $DIR/inner-attrs.rs:40:52: 40:59 (#0),
+ },
+ ],
+ span: $DIR/inner-attrs.rs:40:17: 40:18 (#0),
+ },
Ident {
ident: "true",
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:41:17: 41:21 (#0),
},
],
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:39:18: 42:14 (#0),
},
],
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:36:20: 43:10 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:43:10: 43:11 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:44:9: 44:10 (#0),
},
],
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:35:17: 45:6 (#0),
},
],
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
- },
- Punct {
- ch: ',',
- spacing: Alone,
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:35:12: 45:7 (#0),
},
],
- span: $DIR/inner-attrs.rs:34:1: 46:2 (#0),
+ span: $DIR/inner-attrs.rs:34:22: 46:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): tuple_attrs
@@ -745,51 +772,11 @@
span: $DIR/inner-attrs.rs:52:29: 52:40 (#0),
},
]
-PRINT-ATTR INPUT (DISPLAY): (# ! [cfg_attr(FALSE, rustc_dummy)] 3, 4,
- { # ! [cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
+PRINT-ATTR INPUT (DISPLAY): (3, 4, { # ! [cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Group {
delimiter: Parenthesis,
stream: TokenStream [
- Punct {
- ch: '#',
- spacing: Joint,
- span: $DIR/inner-attrs.rs:53:9: 53:10 (#0),
- },
- Punct {
- ch: '!',
- spacing: Alone,
- span: $DIR/inner-attrs.rs:53:10: 53:11 (#0),
- },
- Group {
- delimiter: Bracket,
- stream: TokenStream [
- Ident {
- ident: "cfg_attr",
- span: $DIR/inner-attrs.rs:53:12: 53:20 (#0),
- },
- Group {
- delimiter: Parenthesis,
- stream: TokenStream [
- Ident {
- ident: "FALSE",
- span: $DIR/inner-attrs.rs:53:21: 53:26 (#0),
- },
- Punct {
- ch: ',',
- spacing: Alone,
- span: $DIR/inner-attrs.rs:53:26: 53:27 (#0),
- },
- Ident {
- ident: "rustc_dummy",
- span: $DIR/inner-attrs.rs:53:28: 53:39 (#0),
- },
- ],
- span: $DIR/inner-attrs.rs:53:20: 53:40 (#0),
- },
- ],
- span: $DIR/inner-attrs.rs:53:11: 53:41 (#0),
- },
Literal {
kind: Integer,
symbol: "3",
@@ -907,55 +894,55 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:60:43: 63:7 (#0),
+ span: $DIR/inner-attrs.rs:61:9: 61:10 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:60:43: 63:7 (#0),
+ span: $DIR/inner-attrs.rs:61:10: 61:11 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/inner-attrs.rs:60:43: 63:7 (#0),
+ span: $DIR/inner-attrs.rs:61:12: 61:23 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "inner",
- span: $DIR/inner-attrs.rs:60:43: 63:7 (#0),
+ span: $DIR/inner-attrs.rs:61:24: 61:29 (#0),
},
],
- span: $DIR/inner-attrs.rs:60:43: 63:7 (#0),
+ span: $DIR/inner-attrs.rs:61:23: 61:30 (#0),
},
],
- span: $DIR/inner-attrs.rs:60:43: 63:7 (#0),
+ span: $DIR/inner-attrs.rs:61:11: 61:31 (#0),
},
Ident {
ident: "true",
- span: $DIR/inner-attrs.rs:60:43: 63:7 (#0),
+ span: $DIR/inner-attrs.rs:62:9: 62:13 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/inner-attrs.rs:60:43: 63:7 (#0),
+ span: $DIR/inner-attrs.rs:62:13: 62:14 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/inner-attrs.rs:60:43: 63:7 (#0),
+ span: $DIR/inner-attrs.rs:62:15: 62:16 (#0),
},
],
- span: $DIR/inner-attrs.rs:60:43: 63:7 (#0),
+ span: $DIR/inner-attrs.rs:60:43: 63:6 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/inner-attrs.rs:60:43: 63:7 (#0),
+ span: $DIR/inner-attrs.rs:63:6: 63:7 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): tuple_attrs
@@ -965,51 +952,11 @@
span: $DIR/inner-attrs.rs:65:29: 65:40 (#0),
},
]
-PRINT-ATTR INPUT (DISPLAY): (# ! [cfg_attr(FALSE, rustc_dummy)] 3, 4,
- { # ! [cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
+PRINT-ATTR INPUT (DISPLAY): (3, 4, { # ! [cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Group {
delimiter: Parenthesis,
stream: TokenStream [
- Punct {
- ch: '#',
- spacing: Joint,
- span: $DIR/inner-attrs.rs:66:9: 66:10 (#0),
- },
- Punct {
- ch: '!',
- spacing: Alone,
- span: $DIR/inner-attrs.rs:66:10: 66:11 (#0),
- },
- Group {
- delimiter: Bracket,
- stream: TokenStream [
- Ident {
- ident: "cfg_attr",
- span: $DIR/inner-attrs.rs:66:12: 66:20 (#0),
- },
- Group {
- delimiter: Parenthesis,
- stream: TokenStream [
- Ident {
- ident: "FALSE",
- span: $DIR/inner-attrs.rs:66:21: 66:26 (#0),
- },
- Punct {
- ch: ',',
- spacing: Alone,
- span: $DIR/inner-attrs.rs:66:26: 66:27 (#0),
- },
- Ident {
- ident: "rustc_dummy",
- span: $DIR/inner-attrs.rs:66:28: 66:39 (#0),
- },
- ],
- span: $DIR/inner-attrs.rs:66:20: 66:40 (#0),
- },
- ],
- span: $DIR/inner-attrs.rs:66:11: 66:41 (#0),
- },
Literal {
kind: Integer,
symbol: "3",
@@ -1127,55 +1074,55 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/inner-attrs.rs:73:43: 76:7 (#0),
+ span: $DIR/inner-attrs.rs:74:9: 74:10 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/inner-attrs.rs:73:43: 76:7 (#0),
+ span: $DIR/inner-attrs.rs:74:10: 74:11 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/inner-attrs.rs:73:43: 76:7 (#0),
+ span: $DIR/inner-attrs.rs:74:12: 74:23 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "inner",
- span: $DIR/inner-attrs.rs:73:43: 76:7 (#0),
+ span: $DIR/inner-attrs.rs:74:24: 74:29 (#0),
},
],
- span: $DIR/inner-attrs.rs:73:43: 76:7 (#0),
+ span: $DIR/inner-attrs.rs:74:23: 74:30 (#0),
},
],
- span: $DIR/inner-attrs.rs:73:43: 76:7 (#0),
+ span: $DIR/inner-attrs.rs:74:11: 74:31 (#0),
},
Ident {
ident: "true",
- span: $DIR/inner-attrs.rs:73:43: 76:7 (#0),
+ span: $DIR/inner-attrs.rs:75:9: 75:13 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/inner-attrs.rs:73:43: 76:7 (#0),
+ span: $DIR/inner-attrs.rs:75:13: 75:14 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/inner-attrs.rs:73:43: 76:7 (#0),
+ span: $DIR/inner-attrs.rs:75:15: 75:16 (#0),
},
],
- span: $DIR/inner-attrs.rs:73:43: 76:7 (#0),
+ span: $DIR/inner-attrs.rs:73:43: 76:6 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/inner-attrs.rs:73:43: 76:7 (#0),
+ span: $DIR/inner-attrs.rs:76:6: 76:7 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): tenth
@@ -1189,20 +1136,20 @@
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "fn",
- span: $DIR/inner-attrs.rs:111:5: 113:6 (#0),
+ span: $DIR/inner-attrs.rs:111:5: 111:7 (#0),
},
Ident {
ident: "weird_extern",
- span: $DIR/inner-attrs.rs:111:5: 113:6 (#0),
+ span: $DIR/inner-attrs.rs:111:8: 111:20 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
- span: $DIR/inner-attrs.rs:111:5: 113:6 (#0),
+ span: $DIR/inner-attrs.rs:111:20: 111:22 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
- span: $DIR/inner-attrs.rs:111:5: 113:6 (#0),
+ span: $DIR/inner-attrs.rs:111:23: 113:6 (#0),
},
]
diff --git a/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout b/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout
index 7cbc0c6..d7adc53 100644
--- a/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout
+++ b/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout
@@ -1279,152 +1279,152 @@
[u8 ;
{
#[cfg(not(FALSE))] struct Inner ; match true
- { #[allow(warnings)] false => { } _ => { } } ; #[print_helper(c)]
+ { #[allow(warnings)] false => { }, _ => { } } ; #[print_helper(c)]
#[cfg(not(FALSE))] fn kept_fn()
{ # ! [cfg(not(FALSE))] let my_val = true ; } enum TupleEnum
- { Foo(#[cfg(not(FALSE))] i32, u8), } struct
+ { Foo(#[cfg(not(FALSE))] i32, u8) } struct
TupleStruct(#[cfg(not(FALSE))] i32, u8) ; 0
- }], #[print_helper(d)] fourth : B,
+ }], #[print_helper(d)] fourth : B
}
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:19:1: 19:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_helper",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:19:3: 19:15 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "a",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:19:16: 19:17 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:19:15: 19:18 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:19:2: 19:19 (#0),
},
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:21:1: 21:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:21:24: 21:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "dead_code",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:21:30: 21:39 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:21:29: 21:40 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:21:1: 21:2 (#0),
},
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:24:1: 24:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_helper",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:24:3: 24:15 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "b",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:24:16: 24:17 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:24:15: 24:18 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:24:2: 24:19 (#0),
},
Ident {
ident: "struct",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:25:1: 25:7 (#0),
},
Ident {
ident: "Foo",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:25:8: 25:11 (#0),
},
Punct {
ch: '<',
- spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ spacing: Joint,
+ span: $DIR/issue-75930-derive-cfg.rs:25:11: 25:12 (#0),
},
Ident {
ident: "B",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:25:29: 25:30 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:25:30: 25:31 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "second",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:27:40: 27:46 (#0),
},
Punct {
ch: ':',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:27:46: 27:47 (#0),
},
Ident {
ident: "bool",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:27:48: 27:52 (#0),
},
Punct {
ch: ',',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:27:52: 27:53 (#0),
},
Ident {
ident: "third",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:28:5: 28:10 (#0),
},
Punct {
ch: ':',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:28:10: 28:11 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "u8",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:28:13: 28:15 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:28:15: 28:16 (#0),
},
Group {
delimiter: Brace,
@@ -1432,58 +1432,58 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:30:9: 30:10 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:30:11: 30:14 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "not",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:30:15: 30:18 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "FALSE",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:30:19: 30:24 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:30:18: 30:25 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:30:14: 30:26 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:30:10: 30:27 (#0),
},
Ident {
ident: "struct",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:30:28: 30:34 (#0),
},
Ident {
ident: "Inner",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:30:35: 30:40 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:30:40: 30:41 (#0),
},
Ident {
ident: "match",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:32:9: 32:14 (#0),
},
Ident {
ident: "true",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:32:15: 32:19 (#0),
},
Group {
delimiter: Brace,
@@ -1491,146 +1491,151 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:34:13: 34:14 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:34:36: 34:41 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "warnings",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:34:42: 34:50 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:34:41: 34:51 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:34:13: 34:14 (#0),
},
Ident {
ident: "false",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:34:54: 34:59 (#0),
},
Punct {
ch: '=',
spacing: Joint,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:34:60: 34:62 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:34:60: 34:62 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:34:63: 34:65 (#0),
+ },
+ Punct {
+ ch: ',',
+ spacing: Alone,
+ span: $DIR/issue-75930-derive-cfg.rs:34:65: 34:66 (#0),
},
Ident {
ident: "_",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:35:13: 35:14 (#0),
},
Punct {
ch: '=',
spacing: Joint,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:35:15: 35:17 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:35:15: 35:17 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:35:18: 35:20 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:32:20: 36:10 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:36:10: 36:11 (#0),
},
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:9: 43:10 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_helper",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:11: 43:23 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "c",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:24: 43:25 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:23: 43:26 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:10: 43:27 (#0),
},
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:28: 43:29 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:30: 43:33 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "not",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:34: 43:37 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "FALSE",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:38: 43:43 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:37: 43:44 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:33: 43:45 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:29: 43:46 (#0),
},
Ident {
ident: "fn",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:47: 43:49 (#0),
},
Ident {
ident: "kept_fn",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:50: 43:57 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:57: 43:59 (#0),
},
Group {
delimiter: Brace,
@@ -1638,82 +1643,82 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:44:13: 44:14 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:44:14: 44:15 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:44:16: 44:19 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "not",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:44:20: 44:23 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "FALSE",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:44:24: 44:29 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:44:23: 44:30 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:44:19: 44:31 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:44:15: 44:32 (#0),
},
Ident {
ident: "let",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:45:13: 45:16 (#0),
},
Ident {
ident: "my_val",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:45:17: 45:23 (#0),
},
Punct {
ch: '=',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:45:24: 45:25 (#0),
},
Ident {
ident: "true",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:45:26: 45:30 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:45:30: 45:31 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:43:60: 46:10 (#0),
},
Ident {
ident: "enum",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:48:9: 48:13 (#0),
},
Ident {
ident: "TupleEnum",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:48:14: 48:23 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "Foo",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:49:13: 49:16 (#0),
},
Group {
delimiter: Parenthesis,
@@ -1721,69 +1726,64 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:52:17: 52:18 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:52:19: 52:22 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "not",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:52:23: 52:26 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "FALSE",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:52:27: 52:32 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:52:26: 52:33 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:52:22: 52:34 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:52:18: 52:35 (#0),
},
Ident {
ident: "i32",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:52:36: 52:39 (#0),
},
Punct {
ch: ',',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:52:39: 52:40 (#0),
},
Ident {
ident: "u8",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:53:39: 53:41 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
- },
- Punct {
- ch: ',',
- spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:49:16: 54:14 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:48:24: 55:10 (#0),
},
Ident {
ident: "struct",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:57:9: 57:15 (#0),
},
Ident {
ident: "TupleStruct",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:57:16: 57:27 (#0),
},
Group {
delimiter: Parenthesis,
@@ -1791,120 +1791,115 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:59:13: 59:14 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:59:15: 59:18 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "not",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:59:19: 59:22 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "FALSE",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:59:23: 59:28 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:59:22: 59:29 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:59:18: 59:30 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:59:14: 59:31 (#0),
},
Ident {
ident: "i32",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:59:32: 59:35 (#0),
},
Punct {
ch: ',',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:59:35: 59:36 (#0),
},
Ident {
ident: "u8",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:61:13: 61:15 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:57:27: 62:10 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:62:10: 62:11 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:68:9: 68:10 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:28:17: 69:6 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:28:12: 69:7 (#0),
},
Punct {
ch: ',',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:69:7: 69:8 (#0),
},
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:70:5: 70:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_helper",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:70:7: 70:19 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "d",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:70:20: 70:21 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:70:19: 70:22 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:70:6: 70:23 (#0),
},
Ident {
ident: "fourth",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:71:5: 71:11 (#0),
},
Punct {
ch: ':',
spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:71:11: 71:12 (#0),
},
Ident {
ident: "B",
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
- },
- Punct {
- ch: ',',
- spacing: Alone,
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:71:13: 71:14 (#0),
},
],
- span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
+ span: $DIR/issue-75930-derive-cfg.rs:25:32: 72:2 (#0),
},
]
diff --git a/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout b/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout
index 9b467a5..607c295 100644
--- a/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout
+++ b/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout
@@ -35,48 +35,48 @@
stream: TokenStream [
Ident {
ident: "mod",
- span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
+ span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 27:8 (#0),
},
Ident {
ident: "bar",
- span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
+ span: $DIR/issue-78675-captured-inner-attrs.rs:27:9: 27:12 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Punct {
ch: '#',
- spacing: Joint,
- span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
+ spacing: Alone,
+ span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
+ span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "doc",
- span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
+ span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
Punct {
ch: '=',
spacing: Alone,
- span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
+ span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
Literal {
kind: StrRaw(0),
symbol: " Foo",
suffix: None,
- span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
+ span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
],
- span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
+ span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
],
- span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
+ span: $DIR/issue-78675-captured-inner-attrs.rs:27:13: 29:6 (#0),
},
],
span: $DIR/issue-78675-captured-inner-attrs.rs:22:13: 22:18 (#4),
diff --git a/src/test/ui/proc-macro/macro-rules-derive-cfg.stdout b/src/test/ui/proc-macro/macro-rules-derive-cfg.stdout
index 5db1859..a0b0cbb 100644
--- a/src/test/ui/proc-macro/macro-rules-derive-cfg.stdout
+++ b/src/test/ui/proc-macro/macro-rules-derive-cfg.stdout
@@ -5,172 +5,167 @@
{
let a = #[rustc_dummy(first)] #[rustc_dummy(second)]
{ # ! [allow(unused)] 30 } ; 0
- }],
+ }]
}
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:17:9: 17:15 (#4),
},
Ident {
ident: "Foo",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:17:16: 17:19 (#4),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "val",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:18:13: 18:16 (#4),
},
Punct {
ch: ':',
spacing: Alone,
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:18:16: 18:17 (#4),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "bool",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:18:19: 18:23 (#4),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:18:23: 18:24 (#4),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "let",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:19:17: 19:20 (#4),
},
Ident {
ident: "a",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:19:21: 19:22 (#4),
},
Punct {
ch: '=',
spacing: Alone,
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:19:23: 19:24 (#4),
},
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:19:25: 19:26 (#4),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:19:48: 19:59 (#4),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "first",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:19:60: 19:65 (#4),
},
],
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:19:59: 19:66 (#4),
},
],
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:19:25: 19:26 (#4),
},
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:26:13: 26:14 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:26:36: 26:47 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:26:48: 26:54 (#0),
},
],
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:26:47: 26:55 (#0),
},
],
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:26:13: 26:14 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Punct {
ch: '#',
- spacing: Joint,
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ spacing: Alone,
+ span: $DIR/macro-rules-derive-cfg.rs:27:5: 27:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:27:6: 27:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:27:29: 27:34 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "unused",
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:27:35: 27:41 (#0),
},
],
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:27:34: 27:42 (#0),
},
],
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:27:5: 27:6 (#0),
},
Literal {
kind: Integer,
symbol: "30",
suffix: None,
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:28:5: 28:7 (#0),
},
],
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:26:58: 29:2 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:19:74: 19:75 (#4),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:20:17: 20:18 (#4),
},
],
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:18:25: 21:14 (#4),
},
],
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
- },
- Punct {
- ch: ',',
- spacing: Alone,
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:18:18: 21:15 (#4),
},
],
- span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
+ span: $DIR/macro-rules-derive-cfg.rs:17:20: 22:10 (#4),
},
]
diff --git a/src/test/ui/proc-macro/nested-derive-cfg.stdout b/src/test/ui/proc-macro/nested-derive-cfg.stdout
index cf4e5d9..9a562c9 100644
--- a/src/test/ui/proc-macro/nested-derive-cfg.stdout
+++ b/src/test/ui/proc-macro/nested-derive-cfg.stdout
@@ -1,94 +1,81 @@
PRINT-DERIVE INPUT (DISPLAY): struct Foo
-{
- my_array :
- [bool ; { struct Inner { non_removed_inner_field : usize, } 0 }],
-}
+{ my_array : [bool ; { struct Inner { non_removed_inner_field : usize } 0 }] }
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:12:1: 12:7 (#0),
},
Ident {
ident: "Foo",
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:12:8: 12:11 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "my_array",
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:14:5: 14:13 (#0),
},
Punct {
ch: ':',
spacing: Alone,
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:14:13: 14:14 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "bool",
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:14:16: 14:20 (#0),
},
Punct {
ch: ';',
spacing: Alone,
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:14:20: 14:21 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "struct",
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:15:9: 15:15 (#0),
},
Ident {
ident: "Inner",
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:15:16: 15:21 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "non_removed_inner_field",
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:17:13: 17:36 (#0),
},
Punct {
ch: ':',
spacing: Alone,
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:17:36: 17:37 (#0),
},
Ident {
ident: "usize",
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
- },
- Punct {
- ch: ',',
- spacing: Alone,
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:17:38: 17:43 (#0),
},
],
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:15:22: 18:10 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:19:9: 19:10 (#0),
},
],
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:14:22: 20:6 (#0),
},
],
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
- },
- Punct {
- ch: ',',
- spacing: Alone,
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:14:15: 20:7 (#0),
},
],
- span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
+ span: $DIR/nested-derive-cfg.rs:12:12: 21:2 (#0),
},
]
diff --git a/src/test/ui/proc-macro/simple-tuple.rs b/src/test/ui/proc-macro/simple-tuple.rs
new file mode 100644
index 0000000..c94c587
--- /dev/null
+++ b/src/test/ui/proc-macro/simple-tuple.rs
@@ -0,0 +1,19 @@
+// check-pass
+// compile-flags: -Z span-debug --error-format human
+// aux-build:test-macros.rs
+// edition:2018
+
+#![feature(proc_macro_hygiene)]
+
+#![no_std] // Don't load unnecessary hygiene information from std
+extern crate std;
+
+#[macro_use]
+extern crate test_macros;
+
+fn main() {
+ #[print_target_and_args(my_arg)] (
+ #![cfg_attr(not(FALSE), allow(unused))]
+ 1, 2, 3
+ );
+}
diff --git a/src/test/ui/proc-macro/simple-tuple.stdout b/src/test/ui/proc-macro/simple-tuple.stdout
new file mode 100644
index 0000000..1cc8579
--- /dev/null
+++ b/src/test/ui/proc-macro/simple-tuple.stdout
@@ -0,0 +1,79 @@
+PRINT-ATTR_ARGS INPUT (DISPLAY): my_arg
+PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
+ Ident {
+ ident: "my_arg",
+ span: $DIR/simple-tuple.rs:15:29: 15:35 (#0),
+ },
+]
+PRINT-ATTR INPUT (DISPLAY): (# ! [allow(unused)] 1, 2, 3) ;
+PRINT-ATTR INPUT (DEBUG): TokenStream [
+ Group {
+ delimiter: Parenthesis,
+ stream: TokenStream [
+ Punct {
+ ch: '#',
+ spacing: Alone,
+ span: $DIR/simple-tuple.rs:16:9: 16:10 (#0),
+ },
+ Punct {
+ ch: '!',
+ spacing: Alone,
+ span: $DIR/simple-tuple.rs:16:10: 16:11 (#0),
+ },
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ ident: "allow",
+ span: $DIR/simple-tuple.rs:16:33: 16:38 (#0),
+ },
+ Group {
+ delimiter: Parenthesis,
+ stream: TokenStream [
+ Ident {
+ ident: "unused",
+ span: $DIR/simple-tuple.rs:16:39: 16:45 (#0),
+ },
+ ],
+ span: $DIR/simple-tuple.rs:16:38: 16:46 (#0),
+ },
+ ],
+ span: $DIR/simple-tuple.rs:16:9: 16:10 (#0),
+ },
+ Literal {
+ kind: Integer,
+ symbol: "1",
+ suffix: None,
+ span: $DIR/simple-tuple.rs:17:9: 17:10 (#0),
+ },
+ Punct {
+ ch: ',',
+ spacing: Alone,
+ span: $DIR/simple-tuple.rs:17:10: 17:11 (#0),
+ },
+ Literal {
+ kind: Integer,
+ symbol: "2",
+ suffix: None,
+ span: $DIR/simple-tuple.rs:17:12: 17:13 (#0),
+ },
+ Punct {
+ ch: ',',
+ spacing: Alone,
+ span: $DIR/simple-tuple.rs:17:13: 17:14 (#0),
+ },
+ Literal {
+ kind: Integer,
+ symbol: "3",
+ suffix: None,
+ span: $DIR/simple-tuple.rs:17:15: 17:16 (#0),
+ },
+ ],
+ span: $DIR/simple-tuple.rs:15:38: 18:6 (#0),
+ },
+ Punct {
+ ch: ';',
+ spacing: Alone,
+ span: $DIR/simple-tuple.rs:18:6: 18:7 (#0),
+ },
+]
diff --git a/src/test/ui/proc-macro/trait-fn-args-2015.rs b/src/test/ui/proc-macro/trait-fn-args-2015.rs
index 3a448d4..6b8df78 100644
--- a/src/test/ui/proc-macro/trait-fn-args-2015.rs
+++ b/src/test/ui/proc-macro/trait-fn-args-2015.rs
@@ -3,6 +3,8 @@
// check-pass
// aux-build:test-macros.rs
+#![allow(anonymous_parameters)]
+
#[macro_use]
extern crate test_macros;
diff --git a/src/test/ui/proc-macro/weird-braces.stdout b/src/test/ui/proc-macro/weird-braces.stdout
index 25f0eaf..9908294 100644
--- a/src/test/ui/proc-macro/weird-braces.stdout
+++ b/src/test/ui/proc-macro/weird-braces.stdout
@@ -15,40 +15,40 @@
Punct {
ch: '#',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:17:1: 17:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:17:3: 17:24 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second_outer",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:17:25: 17:37 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:17:24: 17:38 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:17:2: 17:39 (#0),
},
Ident {
ident: "impl",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:1: 18:5 (#0),
},
Ident {
ident: "Bar",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:6: 18:9 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:9: 18:10 (#0),
},
Group {
delimiter: Brace,
@@ -57,54 +57,54 @@
kind: Integer,
symbol: "1",
suffix: None,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:11: 18:12 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:13: 18:14 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:15: 18:16 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:10: 18:17 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:17: 18:18 (#0),
},
Ident {
ident: "for",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:19: 18:22 (#0),
},
Ident {
ident: "Foo",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:23: 18:26 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:26: 18:27 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "true",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:28: 18:32 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:27: 18:33 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:33: 18:34 (#0),
},
Group {
delimiter: Brace,
@@ -112,72 +112,72 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:5: 19:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:6: 19:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:8: 19:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "first_inner",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:30: 19:41 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:29: 19:42 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:7: 19:43 (#0),
},
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:5: 20:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:6: 20:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:8: 20:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second_inner",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:30: 20:42 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:29: 20:43 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:7: 20:44 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:35: 21:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): second_outer
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "second_outer",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:17:25: 17:37 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
@@ -188,16 +188,16 @@
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "impl",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:1: 18:5 (#0),
},
Ident {
ident: "Bar",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:6: 18:9 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:9: 18:10 (#0),
},
Group {
delimiter: Brace,
@@ -206,54 +206,54 @@
kind: Integer,
symbol: "1",
suffix: None,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:11: 18:12 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:13: 18:14 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:15: 18:16 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:10: 18:17 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:17: 18:18 (#0),
},
Ident {
ident: "for",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:19: 18:22 (#0),
},
Ident {
ident: "Foo",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:23: 18:26 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:26: 18:27 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "true",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:28: 18:32 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:27: 18:33 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:33: 18:34 (#0),
},
Group {
delimiter: Brace,
@@ -261,72 +261,72 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:5: 19:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:6: 19:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:8: 19:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "first_inner",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:30: 19:41 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:29: 19:42 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:7: 19:43 (#0),
},
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:5: 20:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:6: 20:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:8: 20:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second_inner",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:30: 20:42 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:29: 20:43 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:7: 20:44 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:35: 21:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): first_inner
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "first_inner",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:19:30: 19:41 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
@@ -334,16 +334,16 @@
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "impl",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:1: 18:5 (#0),
},
Ident {
ident: "Bar",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:6: 18:9 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:9: 18:10 (#0),
},
Group {
delimiter: Brace,
@@ -352,54 +352,54 @@
kind: Integer,
symbol: "1",
suffix: None,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:11: 18:12 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:13: 18:14 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:15: 18:16 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:10: 18:17 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:17: 18:18 (#0),
},
Ident {
ident: "for",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:19: 18:22 (#0),
},
Ident {
ident: "Foo",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:23: 18:26 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:26: 18:27 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "true",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:28: 18:32 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:27: 18:33 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:33: 18:34 (#0),
},
Group {
delimiter: Brace,
@@ -407,58 +407,58 @@
Punct {
ch: '#',
spacing: Joint,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:5: 20:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:6: 20:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:8: 20:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second_inner",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:30: 20:42 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:29: 20:43 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:7: 20:44 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:35: 21:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): second_inner
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "second_inner",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:20:30: 20:42 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } > { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "impl",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:1: 18:5 (#0),
},
Ident {
ident: "Bar",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:6: 18:9 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:9: 18:10 (#0),
},
Group {
delimiter: Brace,
@@ -467,58 +467,58 @@
kind: Integer,
symbol: "1",
suffix: None,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:11: 18:12 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:13: 18:14 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:15: 18:16 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:10: 18:17 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:17: 18:18 (#0),
},
Ident {
ident: "for",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:19: 18:22 (#0),
},
Ident {
ident: "Foo",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:23: 18:26 (#0),
},
Punct {
ch: '<',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:26: 18:27 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "true",
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:28: 18:32 (#0),
},
],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:27: 18:33 (#0),
},
Punct {
ch: '>',
spacing: Alone,
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:33: 18:34 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
- span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
+ span: $DIR/weird-braces.rs:18:35: 21:2 (#0),
},
]
diff --git a/src/test/ui/resolve/issue-81508.rs b/src/test/ui/resolve/issue-81508.rs
new file mode 100644
index 0000000..23605cd
--- /dev/null
+++ b/src/test/ui/resolve/issue-81508.rs
@@ -0,0 +1,22 @@
+// Confusing diagnostic when using variable as a type:
+//
+// Previous warnings indicate Foo is not used, when in fact it is
+// used improperly as a variable or constant. New warning points
+// out user may be trying to use variable as a type. Test demonstrates
+// cases for both local variable and const.
+
+fn main() {
+ let Baz: &str = "";
+
+ println!("{}", Baz::Bar); //~ ERROR: failed to resolve: use of undeclared type `Baz`
+}
+
+#[allow(non_upper_case_globals)]
+pub const Foo: &str = "";
+
+mod submod {
+ use super::Foo;
+ fn function() {
+ println!("{}", Foo::Bar); //~ ERROR: failed to resolve: use of undeclared type `Foo`
+ }
+}
diff --git a/src/test/ui/resolve/issue-81508.stderr b/src/test/ui/resolve/issue-81508.stderr
new file mode 100644
index 0000000..1555563
--- /dev/null
+++ b/src/test/ui/resolve/issue-81508.stderr
@@ -0,0 +1,21 @@
+error[E0433]: failed to resolve: use of undeclared type `Baz`
+ --> $DIR/issue-81508.rs:11:20
+ |
+LL | let Baz: &str = "";
+ | --- help: `Baz` is defined here, but is not a type
+LL |
+LL | println!("{}", Baz::Bar);
+ | ^^^ use of undeclared type `Baz`
+
+error[E0433]: failed to resolve: use of undeclared type `Foo`
+ --> $DIR/issue-81508.rs:20:24
+ |
+LL | use super::Foo;
+ | ---------- help: `Foo` is defined here, but is not a type
+LL | fn function() {
+LL | println!("{}", Foo::Bar);
+ | ^^^ use of undeclared type `Foo`
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0433`.
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-naked.rs b/src/test/ui/rfc-2091-track-caller/error-with-naked.rs
index 70ec0e3..9464ffe 100644
--- a/src/test/ui/rfc-2091-track-caller/error-with-naked.rs
+++ b/src/test/ui/rfc-2091-track-caller/error-with-naked.rs
@@ -1,3 +1,4 @@
+// needs-asm-support
#![feature(asm, naked_functions)]
#[track_caller] //~ ERROR cannot use `#[track_caller]` with `#[naked]`
diff --git a/src/test/ui/rfc-2091-track-caller/error-with-naked.stderr b/src/test/ui/rfc-2091-track-caller/error-with-naked.stderr
index 1b49148..5f17d6b 100644
--- a/src/test/ui/rfc-2091-track-caller/error-with-naked.stderr
+++ b/src/test/ui/rfc-2091-track-caller/error-with-naked.stderr
@@ -1,11 +1,11 @@
error[E0736]: cannot use `#[track_caller]` with `#[naked]`
- --> $DIR/error-with-naked.rs:3:1
+ --> $DIR/error-with-naked.rs:4:1
|
LL | #[track_caller]
| ^^^^^^^^^^^^^^^
error[E0736]: cannot use `#[track_caller]` with `#[naked]`
- --> $DIR/error-with-naked.rs:12:5
+ --> $DIR/error-with-naked.rs:13:5
|
LL | #[track_caller]
| ^^^^^^^^^^^^^^^
diff --git a/src/test/ui/rfc-2565-param-attrs/proc-macro-cannot-be-used.rs b/src/test/ui/rfc-2565-param-attrs/proc-macro-cannot-be-used.rs
index fcfa610..54f2f45 100644
--- a/src/test/ui/rfc-2565-param-attrs/proc-macro-cannot-be-used.rs
+++ b/src/test/ui/rfc-2565-param-attrs/proc-macro-cannot-be-used.rs
@@ -1,6 +1,7 @@
// aux-build:ident-mac.rs
#![feature(c_variadic)]
+#![allow(anonymous_parameters)]
extern crate ident_mac;
use ident_mac::id;
diff --git a/src/test/ui/rfc-2565-param-attrs/proc-macro-cannot-be-used.stderr b/src/test/ui/rfc-2565-param-attrs/proc-macro-cannot-be-used.stderr
index 38c5050..e74d05d 100644
--- a/src/test/ui/rfc-2565-param-attrs/proc-macro-cannot-be-used.stderr
+++ b/src/test/ui/rfc-2565-param-attrs/proc-macro-cannot-be-used.stderr
@@ -1,173 +1,173 @@
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:10:23
+ --> $DIR/proc-macro-cannot-be-used.rs:11:23
|
LL | extern "C" { fn ffi(#[id] arg1: i32, #[id] ...); }
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:10:40
+ --> $DIR/proc-macro-cannot-be-used.rs:11:40
|
LL | extern "C" { fn ffi(#[id] arg1: i32, #[id] ...); }
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:14:40
+ --> $DIR/proc-macro-cannot-be-used.rs:15:40
|
LL | unsafe extern "C" fn cvar(arg1: i32, #[id] mut args: ...) {}
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:17:30
+ --> $DIR/proc-macro-cannot-be-used.rs:18:30
|
LL | type Alias = extern "C" fn(#[id] u8, #[id] ...);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:17:40
+ --> $DIR/proc-macro-cannot-be-used.rs:18:40
|
LL | type Alias = extern "C" fn(#[id] u8, #[id] ...);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:21:11
+ --> $DIR/proc-macro-cannot-be-used.rs:22:11
|
LL | fn free(#[id] arg1: u8) {
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:23:18
+ --> $DIR/proc-macro-cannot-be-used.rs:24:18
|
LL | let lam = |#[id] W(x), #[id] y: usize| ();
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:23:30
+ --> $DIR/proc-macro-cannot-be-used.rs:24:30
|
LL | let lam = |#[id] W(x), #[id] y: usize| ();
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:29:20
+ --> $DIR/proc-macro-cannot-be-used.rs:30:20
|
LL | fn inherent1(#[id] self, #[id] arg1: u8) {}
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:29:32
+ --> $DIR/proc-macro-cannot-be-used.rs:30:32
|
LL | fn inherent1(#[id] self, #[id] arg1: u8) {}
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:32:20
+ --> $DIR/proc-macro-cannot-be-used.rs:33:20
|
LL | fn inherent2(#[id] &self, #[id] arg1: u8) {}
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:32:33
+ --> $DIR/proc-macro-cannot-be-used.rs:33:33
|
LL | fn inherent2(#[id] &self, #[id] arg1: u8) {}
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:35:24
+ --> $DIR/proc-macro-cannot-be-used.rs:36:24
|
LL | fn inherent3<'a>(#[id] &'a mut self, #[id] arg1: u8) {}
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:35:44
+ --> $DIR/proc-macro-cannot-be-used.rs:36:44
|
LL | fn inherent3<'a>(#[id] &'a mut self, #[id] arg1: u8) {}
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:38:24
+ --> $DIR/proc-macro-cannot-be-used.rs:39:24
|
LL | fn inherent4<'a>(#[id] self: Box<Self>, #[id] arg1: u8) {}
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:38:47
+ --> $DIR/proc-macro-cannot-be-used.rs:39:47
|
LL | fn inherent4<'a>(#[id] self: Box<Self>, #[id] arg1: u8) {}
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:41:40
+ --> $DIR/proc-macro-cannot-be-used.rs:42:40
|
LL | fn issue_64682_associated_fn<'a>(#[id] arg1: u8, #[id] arg2: u8) {}
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:41:56
+ --> $DIR/proc-macro-cannot-be-used.rs:42:56
|
LL | fn issue_64682_associated_fn<'a>(#[id] arg1: u8, #[id] arg2: u8) {}
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:47:17
+ --> $DIR/proc-macro-cannot-be-used.rs:48:17
|
LL | fn trait1(#[id] self, #[id] arg1: u8);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:47:29
+ --> $DIR/proc-macro-cannot-be-used.rs:48:29
|
LL | fn trait1(#[id] self, #[id] arg1: u8);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:50:17
+ --> $DIR/proc-macro-cannot-be-used.rs:51:17
|
LL | fn trait2(#[id] &self, #[id] arg1: u8);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:50:30
+ --> $DIR/proc-macro-cannot-be-used.rs:51:30
|
LL | fn trait2(#[id] &self, #[id] arg1: u8);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:53:21
+ --> $DIR/proc-macro-cannot-be-used.rs:54:21
|
LL | fn trait3<'a>(#[id] &'a mut self, #[id] arg1: u8);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:53:41
+ --> $DIR/proc-macro-cannot-be-used.rs:54:41
|
LL | fn trait3<'a>(#[id] &'a mut self, #[id] arg1: u8);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:56:21
+ --> $DIR/proc-macro-cannot-be-used.rs:57:21
|
LL | fn trait4<'a>(#[id] self: Box<Self>, #[id] arg1: u8, #[id] Vec<u8>);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:56:44
+ --> $DIR/proc-macro-cannot-be-used.rs:57:44
|
LL | fn trait4<'a>(#[id] self: Box<Self>, #[id] arg1: u8, #[id] Vec<u8>);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:56:60
+ --> $DIR/proc-macro-cannot-be-used.rs:57:60
|
LL | fn trait4<'a>(#[id] self: Box<Self>, #[id] arg1: u8, #[id] Vec<u8>);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:60:40
+ --> $DIR/proc-macro-cannot-be-used.rs:61:40
|
LL | fn issue_64682_associated_fn<'a>(#[id] arg1: u8, #[id] arg2: u8);
| ^^ not a non-macro attribute
error: expected non-macro attribute, found attribute macro `id`
- --> $DIR/proc-macro-cannot-be-used.rs:60:56
+ --> $DIR/proc-macro-cannot-be-used.rs:61:56
|
LL | fn issue_64682_associated_fn<'a>(#[id] arg1: u8, #[id] arg2: u8);
| ^^ not a non-macro attribute
diff --git a/src/test/ui/specialization/issue-39448.rs b/src/test/ui/specialization/issue-39448.rs
index 9dd47a4..a15c4bd 100644
--- a/src/test/ui/specialization/issue-39448.rs
+++ b/src/test/ui/specialization/issue-39448.rs
@@ -18,7 +18,7 @@
}
trait FromA<T> {
- fn from(T) -> Self;
+ fn from(t: T) -> Self;
}
impl<T: A, U: A + FromA<T>> FromA<T> for U {
diff --git a/src/test/ui/typeck/issue-65611.rs b/src/test/ui/typeck/issue-65611.rs
index b74ee1b..7645311 100644
--- a/src/test/ui/typeck/issue-65611.rs
+++ b/src/test/ui/typeck/issue-65611.rs
@@ -17,7 +17,7 @@
pub trait Index : PartialEq + Copy {
fn to_usize(self) -> usize;
- fn from(usize) -> Self;
+ fn from(i: usize) -> Self;
}
impl Index for usize {
diff --git a/src/test/ui/unwind-no-uwtable.rs b/src/test/ui/unwind-no-uwtable.rs
new file mode 100644
index 0000000..f249d3f
--- /dev/null
+++ b/src/test/ui/unwind-no-uwtable.rs
@@ -0,0 +1,34 @@
+// run-pass
+// ignore-windows target requires uwtable
+// ignore-wasm32-bare no proper panic=unwind support
+// compile-flags: -C panic=unwind -C force-unwind-tables=n
+
+use std::panic::{self, AssertUnwindSafe};
+
+struct Increase<'a>(&'a mut u8);
+
+impl Drop for Increase<'_> {
+ fn drop(&mut self) {
+ *self.0 += 1;
+ }
+}
+
+#[inline(never)]
+fn unwind() {
+ panic!();
+}
+
+#[inline(never)]
+fn increase(count: &mut u8) {
+ let _increase = Increase(count);
+ unwind();
+}
+
+fn main() {
+ let mut count = 0;
+ assert!(panic::catch_unwind(AssertUnwindSafe(
+ #[inline(never)]
+ || increase(&mut count)
+ )).is_err());
+ assert_eq!(count, 1);
+}
diff --git a/src/test/ui/wrong-mul-method-signature.stderr b/src/test/ui/wrong-mul-method-signature.stderr
index 4c367fb..9f8896f 100644
--- a/src/test/ui/wrong-mul-method-signature.stderr
+++ b/src/test/ui/wrong-mul-method-signature.stderr
@@ -1,26 +1,35 @@
error[E0053]: method `mul` has an incompatible type for trait
- --> $DIR/wrong-mul-method-signature.rs:16:5
+ --> $DIR/wrong-mul-method-signature.rs:16:21
|
LL | fn mul(self, s: &f64) -> Vec1 {
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `f64`, found `&f64`
+ | ^^^^
+ | |
+ | expected `f64`, found `&f64`
+ | help: change the parameter type to match the trait: `f64`
|
= note: expected fn pointer `fn(Vec1, f64) -> Vec1`
found fn pointer `fn(Vec1, &f64) -> Vec1`
error[E0053]: method `mul` has an incompatible type for trait
- --> $DIR/wrong-mul-method-signature.rs:33:5
+ --> $DIR/wrong-mul-method-signature.rs:33:21
|
LL | fn mul(self, s: f64) -> Vec2 {
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected struct `Vec2`, found `f64`
+ | ^^^
+ | |
+ | expected struct `Vec2`, found `f64`
+ | help: change the parameter type to match the trait: `Vec2`
|
= note: expected fn pointer `fn(Vec2, Vec2) -> f64`
found fn pointer `fn(Vec2, f64) -> Vec2`
error[E0053]: method `mul` has an incompatible type for trait
- --> $DIR/wrong-mul-method-signature.rs:52:5
+ --> $DIR/wrong-mul-method-signature.rs:52:29
|
LL | fn mul(self, s: f64) -> f64 {
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `i32`, found `f64`
+ | ^^^
+ | |
+ | expected `i32`, found `f64`
+ | help: change the output type to match the trait: `i32`
|
= note: expected fn pointer `fn(Vec3, _) -> i32`
found fn pointer `fn(Vec3, _) -> f64`
diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs
index 531a23d..363105a 100644
--- a/src/tools/compiletest/src/header.rs
+++ b/src/tools/compiletest/src/header.rs
@@ -44,6 +44,7 @@
let mut props = EarlyProps::default();
let rustc_has_profiler_support = env::var_os("RUSTC_PROFILER_SUPPORT").is_some();
let rustc_has_sanitizer_support = env::var_os("RUSTC_SANITIZER_SUPPORT").is_some();
+ let has_asm_support = util::has_asm_support(&config.target);
let has_asan = util::ASAN_SUPPORTED_TARGETS.contains(&&*config.target);
let has_lsan = util::LSAN_SUPPORTED_TARGETS.contains(&&*config.target);
let has_msan = util::MSAN_SUPPORTED_TARGETS.contains(&&*config.target);
@@ -76,6 +77,10 @@
props.ignore = true;
}
+ if !has_asm_support && config.parse_name_directive(ln, "needs-asm-support") {
+ props.ignore = true;
+ }
+
if !rustc_has_profiler_support && config.parse_needs_profiler_support(ln) {
props.ignore = true;
}
diff --git a/src/tools/compiletest/src/header/tests.rs b/src/tools/compiletest/src/header/tests.rs
index ec99fde..c41b43c 100644
--- a/src/tools/compiletest/src/header/tests.rs
+++ b/src/tools/compiletest/src/header/tests.rs
@@ -224,6 +224,17 @@
}
#[test]
+fn asm_support() {
+ let mut config = config();
+
+ config.target = "avr-unknown-gnu-atmega328".to_owned();
+ assert!(parse_rs(&config, "// needs-asm-support").ignore);
+
+ config.target = "i686-unknown-netbsd".to_owned();
+ assert!(!parse_rs(&config, "// needs-asm-support").ignore);
+}
+
+#[test]
fn test_extract_version_range() {
use super::{extract_llvm_version, extract_version_range};
diff --git a/src/tools/compiletest/src/util.rs b/src/tools/compiletest/src/util.rs
index b302953..7dbd709 100644
--- a/src/tools/compiletest/src/util.rs
+++ b/src/tools/compiletest/src/util.rs
@@ -128,6 +128,15 @@
"sparcv9",
];
+static ASM_SUPPORTED_ARCHS: &[&str] = &[
+ "x86", "x86_64", "arm", "aarch64", "riscv32", "riscv64", "nvptx64", "hexagon", "mips",
+ "mips64", "spirv", "wasm32",
+];
+
+pub fn has_asm_support(triple: &str) -> bool {
+ ASM_SUPPORTED_ARCHS.contains(&get_arch(triple))
+}
+
pub fn matches_os(triple: &str, name: &str) -> bool {
// For the wasm32 bare target we ignore anything also ignored on emscripten
// and then we also recognize `wasm32-bare` as the os for the target
diff --git a/src/tools/miri b/src/tools/miri
index b9b2af9..b13b79d 160000
--- a/src/tools/miri
+++ b/src/tools/miri
@@ -1 +1 @@
-Subproject commit b9b2af9729243ab8d5b02cca2e19ce93cc23c1b1
+Subproject commit b13b79db73e3fa692fc648a8cd70f162a5eade34
diff --git a/src/tools/rls b/src/tools/rls
index fd1df15..32c0fe0 160000
--- a/src/tools/rls
+++ b/src/tools/rls
@@ -1 +1 @@
-Subproject commit fd1df1554a22accde727e8c4bdeb2a065627d10c
+Subproject commit 32c0fe006dcdc13e1ca0ca31de543e4436c1299e
diff --git a/src/tools/rust-analyzer b/src/tools/rust-analyzer
index 19e09a4..7be0613 160000
--- a/src/tools/rust-analyzer
+++ b/src/tools/rust-analyzer
@@ -1 +1 @@
-Subproject commit 19e09a4a54c75312aeaac04577f2d0e067463ab6
+Subproject commit 7be06139b632ee615fc18af04dd67947e2c794b2
diff --git a/src/tools/rustfmt b/src/tools/rustfmt
index 7de6968..0bd2b19 160000
--- a/src/tools/rustfmt
+++ b/src/tools/rustfmt
@@ -1 +1 @@
-Subproject commit 7de6968ee22696b7feb6b477a05656de89275291
+Subproject commit 0bd2b1927c2b02a6fe7447d58e897cf1f1a1d41f
diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs
index e99dd45..3d5f39e 100644
--- a/src/tools/tidy/src/style.rs
+++ b/src/tools/tidy/src/style.rs
@@ -379,11 +379,9 @@
if let Directive::Ignore(false) = skip_tab {
tidy_error!(bad, "{}: ignoring tab characters unnecessarily", file.display());
}
- // FIXME: Temporarily disabled to simplify landing the ignore-rules for the line
- // length check (https://github.com/rust-lang/rust/issues/77548):
- //if let Directive::Ignore(false) = skip_line_length {
- // tidy_error!(bad, "{}: ignoring line length unnecessarily", file.display());
- //}
+ if let Directive::Ignore(false) = skip_line_length {
+ tidy_error!(bad, "{}: ignoring line length unnecessarily", file.display());
+ }
if let Directive::Ignore(false) = skip_file_length {
tidy_error!(bad, "{}: ignoring file length unnecessarily", file.display());
}