Auto merge of #146267 - matthiaskrgr:rollup-tbz7shx, r=matthiaskrgr

Rollup of 5 pull requests

Successful merges:

 - rust-lang/rust#127316 (move pinned version from tracing_core to tracing)
 - rust-lang/rust#144801 (Suggest bounds in more cases, accounting for type parameters referenced in predicate)
 - rust-lang/rust#146211 (Disallow shebang in `--cfg` and `--check-cfg` arguments)
 - rust-lang/rust#146263 (Fix `bump-stage0` build failure, and check-build `bump-stage0` in CI)
 - rust-lang/rust#146266 (miri std tests: skip all of sys::)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors
2025-09-06 12:10:21 +00:00
22 changed files with 232 additions and 53 deletions
+3 -4
View File
@@ -336,7 +336,7 @@ dependencies = [
"curl",
"indexmap",
"serde",
"toml 0.7.8",
"toml 0.8.23",
]
[[package]]
@@ -4072,7 +4072,6 @@ name = "rustc_log"
version = "0.0.0"
dependencies = [
"tracing",
"tracing-core",
"tracing-subscriber",
"tracing-tree",
]
@@ -5541,9 +5540,9 @@ dependencies = [
[[package]]
name = "tracing-core"
version = "0.1.30"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a"
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
dependencies = [
"once_cell",
"valuable",
+2 -2
View File
@@ -5,8 +5,8 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
tracing = "0.1.28"
tracing-core = "=0.1.30" # FIXME(Nilstrieb) tracing has a deadlock: https://github.com/tokio-rs/tracing/issues/2635
# tracing > 0.1.37 have huge binary size / instructions regression
tracing = "=0.1.37"
tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] }
tracing-tree = "0.3.1"
# tidy-alphabetical-end
+1 -1
View File
@@ -38,7 +38,7 @@
use std::io::{self, IsTerminal};
use tracing::dispatcher::SetGlobalDefaultError;
use tracing_core::{Event, Subscriber};
use tracing::{Event, Subscriber};
use tracing_subscriber::filter::{Directive, EnvFilter, LevelFilter};
use tracing_subscriber::fmt::FmtContext;
use tracing_subscriber::fmt::format::{self, FormatEvent, FormatFields};
+30 -5
View File
@@ -44,19 +44,44 @@ pub(crate) struct UnmatchedDelim {
pub candidate_span: Option<Span>,
}
/// Which tokens should be stripped before lexing the tokens.
pub(crate) enum StripTokens {
/// Strip both shebang and frontmatter.
ShebangAndFrontmatter,
/// Strip the shebang but not frontmatter.
///
/// That means that char sequences looking like frontmatter are simply
/// interpreted as regular Rust lexemes.
Shebang,
/// Strip nothing.
///
/// In other words, char sequences looking like a shebang or frontmatter
/// are simply interpreted as regular Rust lexemes.
Nothing,
}
pub(crate) fn lex_token_trees<'psess, 'src>(
psess: &'psess ParseSess,
mut src: &'src str,
mut start_pos: BytePos,
override_span: Option<Span>,
frontmatter_allowed: FrontmatterAllowed,
strip_tokens: StripTokens,
) -> Result<TokenStream, Vec<Diag<'psess>>> {
// Skip `#!`, if present.
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
src = &src[shebang_len..];
start_pos = start_pos + BytePos::from_usize(shebang_len);
match strip_tokens {
StripTokens::Shebang | StripTokens::ShebangAndFrontmatter => {
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
src = &src[shebang_len..];
start_pos = start_pos + BytePos::from_usize(shebang_len);
}
}
StripTokens::Nothing => {}
}
let frontmatter_allowed = match strip_tokens {
StripTokens::ShebangAndFrontmatter => FrontmatterAllowed::Yes,
StripTokens::Shebang | StripTokens::Nothing => FrontmatterAllowed::No,
};
let cursor = Cursor::new(src, frontmatter_allowed);
let mut lexer = Lexer {
psess,
+12 -17
View File
@@ -21,7 +21,6 @@
use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
use rustc_ast_pretty::pprust;
use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
use rustc_lexer::FrontmatterAllowed;
use rustc_session::parse::ParseSess;
use rustc_span::source_map::SourceMap;
use rustc_span::{FileName, SourceFile, Span};
@@ -34,6 +33,8 @@
use parser::Parser;
use rustc_ast::token::Delimiter;
use crate::lexer::StripTokens;
pub mod lexer;
mod errors;
@@ -62,10 +63,10 @@ pub fn new_parser_from_source_str(
source: String,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::Yes)
new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
}
/// Creates a new parser from a simple (no frontmatter) source string.
/// Creates a new parser from a simple (no shebang, no frontmatter) source string.
///
/// On failure, the errors must be consumed via `unwrap_or_emit_fatal`, `emit`, `cancel`,
/// etc., otherwise a panic will occur when they are dropped.
@@ -75,7 +76,7 @@ pub fn new_parser_from_simple_source_str(
source: String,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::No)
new_parser_from_source_file(psess, source_file, StripTokens::Nothing)
}
/// Creates a new parser from a filename. On failure, the errors must be consumed via
@@ -109,7 +110,7 @@ pub fn new_parser_from_file<'a>(
}
err.emit();
});
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::Yes)
new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
}
pub fn utf8_error<E: EmissionGuarantee>(
@@ -160,10 +161,10 @@ pub fn utf8_error<E: EmissionGuarantee>(
fn new_parser_from_source_file(
psess: &ParseSess,
source_file: Arc<SourceFile>,
frontmatter_allowed: FrontmatterAllowed,
strip_tokens: StripTokens,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let end_pos = source_file.end_position();
let stream = source_file_to_stream(psess, source_file, None, frontmatter_allowed)?;
let stream = source_file_to_stream(psess, source_file, None, strip_tokens)?;
let mut parser = Parser::new(psess, stream, None);
if parser.token == token::Eof {
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
@@ -179,8 +180,8 @@ pub fn source_str_to_stream(
) -> Result<TokenStream, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
// used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse
// frontmatters as frontmatters.
source_file_to_stream(psess, source_file, override_span, FrontmatterAllowed::No)
// frontmatters as frontmatters, but for compatibility reason still strip the shebang
source_file_to_stream(psess, source_file, override_span, StripTokens::Shebang)
}
/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
@@ -189,7 +190,7 @@ fn source_file_to_stream<'psess>(
psess: &'psess ParseSess,
source_file: Arc<SourceFile>,
override_span: Option<Span>,
frontmatter_allowed: FrontmatterAllowed,
strip_tokens: StripTokens,
) -> Result<TokenStream, Vec<Diag<'psess>>> {
let src = source_file.src.as_ref().unwrap_or_else(|| {
psess.dcx().bug(format!(
@@ -198,13 +199,7 @@ fn source_file_to_stream<'psess>(
));
});
lexer::lex_token_trees(
psess,
src.as_str(),
source_file.start_pos,
override_span,
frontmatter_allowed,
)
lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span, strip_tokens)
}
/// Runs the given subparser `f` on the tokens of the given `attr`'s item.
@@ -32,8 +32,8 @@
};
use rustc_middle::ty::{
self, AdtKind, GenericArgs, InferTy, IsSuggestable, Ty, TyCtxt, TypeFoldable, TypeFolder,
TypeSuperFoldable, TypeVisitableExt, TypeckResults, Upcast, suggest_arbitrary_trait_bound,
suggest_constraining_type_param,
TypeSuperFoldable, TypeSuperVisitable, TypeVisitableExt, TypeVisitor, TypeckResults, Upcast,
suggest_arbitrary_trait_bound, suggest_constraining_type_param,
};
use rustc_middle::{bug, span_bug};
use rustc_span::def_id::LocalDefId;
@@ -263,6 +263,9 @@ pub fn suggest_restricting_param_bound(
_ => (false, None),
};
let mut finder = ParamFinder { .. };
finder.visit_binder(&trait_pred);
// FIXME: Add check for trait bound that is already present, particularly `?Sized` so we
// don't suggest `T: Sized + ?Sized`.
loop {
@@ -411,6 +414,26 @@ pub fn suggest_restricting_param_bound(
}
}
hir::Node::TraitItem(hir::TraitItem {
generics,
kind: hir::TraitItemKind::Fn(..),
..
})
| hir::Node::ImplItem(hir::ImplItem {
generics,
trait_item_def_id: None,
kind: hir::ImplItemKind::Fn(..),
..
}) if finder.can_suggest_bound(generics) => {
// Missing generic type parameter bound.
suggest_arbitrary_trait_bound(
self.tcx,
generics,
err,
trait_pred,
associated_ty,
);
}
hir::Node::Item(hir::Item {
kind:
hir::ItemKind::Struct(_, generics, _)
@@ -423,7 +446,7 @@ pub fn suggest_restricting_param_bound(
| hir::ItemKind::Const(_, generics, _, _)
| hir::ItemKind::TraitAlias(_, generics, _),
..
}) if !param_ty => {
}) if finder.can_suggest_bound(generics) => {
// Missing generic type parameter bound.
if suggest_arbitrary_trait_bound(
self.tcx,
@@ -5068,8 +5091,7 @@ fn suggest_indirection_for_unsized(
// Suggesting `T: ?Sized` is only valid in an ADT if `T` is only used in a
// borrow. `struct S<'a, T: ?Sized>(&'a T);` is valid, `struct S<T: ?Sized>(T);`
// is not. Look for invalid "bare" parameter uses, and suggest using indirection.
let mut visitor =
FindTypeParam { param: param.name.ident().name, invalid_spans: vec![], nested: false };
let mut visitor = FindTypeParam { param: param.name.ident().name, .. };
visitor.visit_item(item);
if visitor.invalid_spans.is_empty() {
return false;
@@ -5228,7 +5250,7 @@ fn hint_missing_borrow<'tcx>(
/// Used to suggest replacing associated types with an explicit type in `where` clauses.
#[derive(Debug)]
pub struct SelfVisitor<'v> {
pub paths: Vec<&'v hir::Ty<'v>>,
pub paths: Vec<&'v hir::Ty<'v>> = Vec::new(),
pub name: Option<Symbol>,
}
@@ -5599,7 +5621,7 @@ fn point_at_assoc_type_restriction<G: EmissionGuarantee>(
);
// Search for the associated type `Self::{name}`, get
// its type and suggest replacing the bound with it.
let mut visitor = SelfVisitor { paths: vec![], name: Some(name) };
let mut visitor = SelfVisitor { name: Some(name), .. };
visitor.visit_trait_ref(trait_ref);
for path in visitor.paths {
err.span_suggestion_verbose(
@@ -5610,7 +5632,7 @@ fn point_at_assoc_type_restriction<G: EmissionGuarantee>(
);
}
} else {
let mut visitor = SelfVisitor { paths: vec![], name: None };
let mut visitor = SelfVisitor { name: None, .. };
visitor.visit_trait_ref(trait_ref);
let span: MultiSpan =
visitor.paths.iter().map(|p| p.span).collect::<Vec<Span>>().into();
@@ -5640,8 +5662,8 @@ fn get_deref_type_and_refs(mut ty: Ty<'_>) -> (Ty<'_>, Vec<hir::Mutability>) {
/// `param: ?Sized` would be a valid constraint.
struct FindTypeParam {
param: rustc_span::Symbol,
invalid_spans: Vec<Span>,
nested: bool,
invalid_spans: Vec<Span> = Vec::new(),
nested: bool = false,
}
impl<'v> Visitor<'v> for FindTypeParam {
@@ -5679,3 +5701,38 @@ fn visit_ty(&mut self, ty: &hir::Ty<'_, AmbigArg>) {
}
}
}
/// Look for type parameters in predicates. We use this to identify whether a bound is suitable in
/// on a given item.
struct ParamFinder {
params: Vec<Symbol> = Vec::new(),
}
impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for ParamFinder {
fn visit_ty(&mut self, t: Ty<'tcx>) -> Self::Result {
match t.kind() {
ty::Param(p) => self.params.push(p.name),
_ => {}
}
t.super_visit_with(self)
}
}
impl ParamFinder {
/// Whether the `hir::Generics` of the current item can suggest the evaluated bound because its
/// references to type parameters are present in the generics.
fn can_suggest_bound(&self, generics: &hir::Generics<'_>) -> bool {
if self.params.is_empty() {
// There are no references to type parameters at all, so suggesting the bound
// would be reasonable.
return true;
}
generics.params.iter().any(|p| match p.name {
hir::ParamName::Plain(p_name) => {
// All of the parameters in the bound can be referenced in the current item.
self.params.iter().any(|p| *p == p_name.name || *p == kw::SelfUpper)
}
_ => true,
})
}
}
@@ -19,6 +19,7 @@
#![feature(assert_matches)]
#![feature(associated_type_defaults)]
#![feature(box_patterns)]
#![feature(default_field_values)]
#![feature(if_let_guard)]
#![feature(iter_intersperse)]
#![feature(iterator_try_reduce)]
+1 -1
View File
@@ -76,7 +76,7 @@ check-aux:
library/std \
$(BOOTSTRAP_ARGS) \
-- \
--skip fs:: --skip net:: --skip process:: --skip sys::fd:: --skip sys::pal::
--skip fs:: --skip net:: --skip process:: --skip sys::
# Also test some very target-specific modules on other targets
# (making sure to cover an i686 target as well).
$(Q)MIRIFLAGS="-Zmiri-disable-isolation" BOOTSTRAP_SKIP_TARGET_SANITY=1 \
@@ -839,3 +839,9 @@ fn run_tool_check_step(
mode: |_builder| Mode::ToolBootstrap,
default: false
});
tool_check_step!(BumpStage0 {
path: "src/tools/bump-stage0",
mode: |_builder| Mode::ToolBootstrap,
default: false
});
+1
View File
@@ -1061,6 +1061,7 @@ macro_rules! describe {
check::FeaturesStatusDump,
check::CoverageDump,
check::Linkchecker,
check::BumpStage0,
// This has special staging logic, it may run on stage 1 while others run on stage 0.
// It takes quite some time to build stage 1, so put this at the end.
//
@@ -28,6 +28,7 @@ RUN sh /scripts/sccache.sh
ENV SCRIPT \
python3 ../x.py check && \
python3 ../x.py check src/tools/bump-stage0 && \
python3 ../x.py clippy ci --stage 2 && \
python3 ../x.py test --stage 1 core alloc std test proc_macro && \
python3 ../x.py test --stage 1 src/tools/compiletest && \
+1 -1
View File
@@ -11,4 +11,4 @@ build_helper = { path = "../../build_helper" }
curl = "0.4.38"
indexmap = { version = "2.0.0", features = ["serde"] }
serde = { version = "1.0.125", features = ["derive"] }
toml = "0.7"
toml = "0.8.23"
+5 -1
View File
@@ -185,7 +185,11 @@ fn fetch_manifest(
format!("{}/dist/channel-rust-{}.toml", config.dist_server, channel)
};
Ok(toml::from_slice(&http_get(&url)?)?)
// FIXME: on newer `toml` (>= `0.9.*`), use `toml::from_slice`. For now, we use the most recent
// `toml` available in-tree which is `0.8.*`, so we have to do an additional dance here.
let response = http_get(&url)?;
let response = String::from_utf8(response)?;
Ok(toml::from_str(&response)?)
}
fn http_get(url: &str) -> Result<Vec<u8>, Error> {
@@ -0,0 +1,3 @@
error: invalid `--cfg` argument: `#!/usr/bin/shebang
key` (expected `key` or `key="value"`)
@@ -0,0 +1,6 @@
error: invalid `--check-cfg` argument: `#!/usr/bin/shebang
cfg(key)`
|
= note: expected `cfg(name, values("value1", "value2", ... "valueN"))`
= note: visit <https://doc.rust-lang.org/nightly/rustc/check-cfg.html> for more details
+24 -7
View File
@@ -1,15 +1,14 @@
use run_make_support::{cwd, diff, rustc};
fn test_and_compare(flag: &str, val: &str) {
fn test_and_compare(test_name: &str, flag: &str, val: &str) {
let mut cmd = rustc();
let output =
cmd.input("").arg("--crate-type=lib").arg(&format!("--{flag}")).arg(val).run_fail();
let output = cmd.input("").arg("--crate-type=lib").arg(flag).arg(val).run_fail();
assert_eq!(output.stdout_utf8(), "");
diff()
.expected_file(format!("{flag}.stderr"))
.actual_text("output", output.stderr_utf8())
.expected_file(format!("{test_name}.stderr"))
.actual_text("stderr", output.stderr_utf8())
.run();
}
@@ -17,7 +16,8 @@ fn main() {
// Verify that frontmatter isn't allowed in `--cfg` arguments.
// https://github.com/rust-lang/rust/issues/146130
test_and_compare(
"cfg",
"cfg-frontmatter",
"--cfg",
r#"---
---
key"#,
@@ -26,9 +26,26 @@ fn main() {
// Verify that frontmatter isn't allowed in `--check-cfg` arguments.
// https://github.com/rust-lang/rust/issues/146130
test_and_compare(
"check-cfg",
"check-cfg-frontmatter",
"--check-cfg",
r#"---
---
cfg(key)"#,
);
// Verify that shebang isn't allowed in `--cfg` arguments.
test_and_compare(
"cfg-shebang",
"--cfg",
r#"#!/usr/bin/shebang
key"#,
);
// Verify that shebang isn't allowed in `--check-cfg` arguments.
test_and_compare(
"check-cfg-shebang",
"--check-cfg",
r#"#!/usr/bin/shebang
cfg(key)"#,
);
}
@@ -21,10 +21,6 @@ LL | [0u8; std::mem::size_of::<Self::A>()] == Self::P;
| ^^ no implementation for `[u8; std::mem::size_of::<Self::A>()] == <Self as T>::A`
|
= help: the trait `PartialEq<<Self as T>::A>` is not implemented for `[u8; std::mem::size_of::<Self::A>()]`
help: consider introducing a `where` clause, but there might be an alternative better way to express this requirement
|
LL | pub trait T where [u8; std::mem::size_of::<Self::A>()]: PartialEq<<Self as T>::A> {
| +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
error: aborting due to 3 previous errors
@@ -0,0 +1,23 @@
//@ run-rustfix
#![allow(dead_code)]
struct Application;
// https://github.com/rust-lang/rust/issues/144734
trait Trait {
type Error: std::error::Error;
fn run(&self) -> Result<(), Self::Error>;
}
#[derive(Debug)]
enum ApplicationError {
Quit,
}
impl Application {
fn thing<T: Trait>(&self, t: T) -> Result<(), ApplicationError> where ApplicationError: From<<T as Trait>::Error> {
t.run()?; //~ ERROR E0277
Ok(())
}
}
fn main() {}
@@ -0,0 +1,23 @@
//@ run-rustfix
#![allow(dead_code)]
struct Application;
// https://github.com/rust-lang/rust/issues/144734
trait Trait {
type Error: std::error::Error;
fn run(&self) -> Result<(), Self::Error>;
}
#[derive(Debug)]
enum ApplicationError {
Quit,
}
impl Application {
fn thing<T: Trait>(&self, t: T) -> Result<(), ApplicationError> {
t.run()?; //~ ERROR E0277
Ok(())
}
}
fn main() {}
@@ -0,0 +1,22 @@
error[E0277]: `?` couldn't convert the error to `ApplicationError`
--> $DIR/suggest-complex-bound-on-method.rs:18:16
|
LL | t.run()?;
| -----^ the trait `From<<T as Trait>::Error>` is not implemented for `ApplicationError`
| |
| this can't be annotated with `?` because it has type `Result<_, <T as Trait>::Error>`
|
note: `ApplicationError` needs to implement `From<<T as Trait>::Error>`
--> $DIR/suggest-complex-bound-on-method.rs:12:1
|
LL | enum ApplicationError {
| ^^^^^^^^^^^^^^^^^^^^^
= note: the question mark operation (`?`) implicitly performs a conversion on the error value using the `From` trait
help: consider introducing a `where` clause, but there might be an alternative better way to express this requirement
|
LL | fn thing<T: Trait>(&self, t: T) -> Result<(), ApplicationError> where ApplicationError: From<<T as Trait>::Error> {
| +++++++++++++++++++++++++++++++++++++++++++++++++
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0277`.