Auto merge of #153416 - JonathanBrouwer:rollup-eezxWTV, r=JonathanBrouwer

Rollup of 12 pull requests



Successful merges:

 - rust-lang/rust#153402 (miri subtree update)
 - rust-lang/rust#152164 (Lint unused features)
 - rust-lang/rust#152801 (Refactor WriteBackendMethods a bit)
 - rust-lang/rust#153196 (Update path separators to be available in const context)
 - rust-lang/rust#153204 (Add `#[must_use]` attribute to `HashMap` and `HashSet` constructors)
 - rust-lang/rust#153317 (Abort after `representability` errors)
 - rust-lang/rust#153276 (Remove `cycle_fatal` query modifier)
 - rust-lang/rust#153300 (Tweak some of our internal `#[rustc_*]` TEST attributes)
 - rust-lang/rust#153396 (use `minicore` in some `run-make` tests)
 - rust-lang/rust#153401 (Migrationg of `LintDiagnostic` - part 7)
 - rust-lang/rust#153406 (Remove a ping for myself)
 - rust-lang/rust#153414 (Rename translation -> formatting)
This commit is contained in:
bors
2026-03-05 00:14:57 +00:00
305 changed files with 1819 additions and 1657 deletions
@@ -1,5 +1,5 @@
use rustc_hir::Target;
use rustc_hir::attrs::AttributeKind;
use rustc_hir::{MethodKind, Target};
use rustc_span::{Span, Symbol, sym};
use crate::attributes::prelude::Allow;
@@ -25,6 +25,20 @@ impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpDefParentsParser {
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcDumpDefParents;
}
pub(crate) struct RustcDumpInferredOutlivesParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpInferredOutlivesParser {
const PATH: &[Symbol] = &[sym::rustc_dump_inferred_outlives];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::Struct),
Allow(Target::Enum),
Allow(Target::Union),
Allow(Target::TyAlias),
]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcDumpInferredOutlives;
}
pub(crate) struct RustcDumpItemBoundsParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpItemBoundsParser {
@@ -34,21 +48,88 @@ impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpItemBoundsParser {
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcDumpItemBounds;
}
pub(crate) struct RustcDumpObjectLifetimeDefaultsParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpObjectLifetimeDefaultsParser {
const PATH: &[Symbol] = &[sym::rustc_dump_object_lifetime_defaults];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::AssocConst),
Allow(Target::AssocTy),
Allow(Target::Const),
Allow(Target::Enum),
Allow(Target::Fn),
Allow(Target::ForeignFn),
Allow(Target::Impl { of_trait: false }),
Allow(Target::Impl { of_trait: true }),
Allow(Target::Method(MethodKind::Inherent)),
Allow(Target::Method(MethodKind::Trait { body: false })),
Allow(Target::Method(MethodKind::Trait { body: true })),
Allow(Target::Method(MethodKind::TraitImpl)),
Allow(Target::Struct),
Allow(Target::Trait),
Allow(Target::TraitAlias),
Allow(Target::TyAlias),
Allow(Target::Union),
]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcDumpObjectLifetimeDefaults;
}
pub(crate) struct RustcDumpPredicatesParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpPredicatesParser {
const PATH: &[Symbol] = &[sym::rustc_dump_predicates];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::Struct),
Allow(Target::Enum),
Allow(Target::Union),
Allow(Target::Trait),
Allow(Target::AssocConst),
Allow(Target::AssocTy),
Allow(Target::Const),
Allow(Target::Delegation { mac: false }),
Allow(Target::Delegation { mac: true }),
Allow(Target::Enum),
Allow(Target::Fn),
Allow(Target::Impl { of_trait: false }),
Allow(Target::Impl { of_trait: true }),
Allow(Target::Method(MethodKind::Inherent)),
Allow(Target::Method(MethodKind::Trait { body: false })),
Allow(Target::Method(MethodKind::Trait { body: true })),
Allow(Target::Method(MethodKind::TraitImpl)),
Allow(Target::Struct),
Allow(Target::Trait),
Allow(Target::TraitAlias),
Allow(Target::TyAlias),
Allow(Target::Union),
]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcDumpPredicates;
}
pub(crate) struct RustcDumpVariancesParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpVariancesParser {
const PATH: &[Symbol] = &[sym::rustc_dump_variances];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::Enum),
Allow(Target::Fn),
Allow(Target::Method(MethodKind::Inherent)),
Allow(Target::Method(MethodKind::Trait { body: false })),
Allow(Target::Method(MethodKind::Trait { body: true })),
Allow(Target::Method(MethodKind::TraitImpl)),
Allow(Target::Struct),
Allow(Target::Union),
]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcDumpVariances;
}
pub(crate) struct RustcDumpVariancesOfOpaquesParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpVariancesOfOpaquesParser {
const PATH: &[Symbol] = &[sym::rustc_dump_variances_of_opaques];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcDumpVariancesOfOpaques;
}
pub(crate) struct RustcDumpVtableParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcDumpVtableParser {
@@ -588,15 +588,6 @@ impl<S: Stage> NoArgsAttributeParser<S> for RustcLintUntrackedQueryInformationPa
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcLintUntrackedQueryInformation;
}
pub(crate) struct RustcObjectLifetimeDefaultParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcObjectLifetimeDefaultParser {
const PATH: &[Symbol] = &[sym::rustc_object_lifetime_default];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Struct)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcObjectLifetimeDefault;
}
pub(crate) struct RustcSimdMonomorphizeLaneLimitParser;
impl<S: Stage> SingleAttributeParser<S> for RustcSimdMonomorphizeLaneLimitParser {
@@ -93,28 +93,6 @@ fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser) -> Option<Attrib
}
}
pub(crate) struct RustcVarianceParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcVarianceParser {
const PATH: &[Symbol] = &[sym::rustc_variance];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::Struct),
Allow(Target::Enum),
Allow(Target::Union),
]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcVariance;
}
pub(crate) struct RustcVarianceOfOpaquesParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcVarianceOfOpaquesParser {
const PATH: &[Symbol] = &[sym::rustc_variance_of_opaques];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcVarianceOfOpaques;
}
pub(crate) struct ReexportTestHarnessMainParser;
impl<S: Stage> SingleAttributeParser<S> for ReexportTestHarnessMainParser {
@@ -215,20 +193,6 @@ impl<S: Stage> NoArgsAttributeParser<S> for RustcEvaluateWhereClausesParser {
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcEvaluateWhereClauses;
}
pub(crate) struct RustcOutlivesParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcOutlivesParser {
const PATH: &[Symbol] = &[sym::rustc_outlives];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
Allow(Target::Struct),
Allow(Target::Enum),
Allow(Target::Union),
Allow(Target::TyAlias),
]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcOutlives;
}
pub(crate) struct TestRunnerParser;
impl<S: Stage> SingleAttributeParser<S> for TestRunnerParser {
+4 -4
View File
@@ -281,9 +281,13 @@ mod late {
Single<WithoutArgs<RustcDenyExplicitImplParser>>,
Single<WithoutArgs<RustcDoNotConstCheckParser>>,
Single<WithoutArgs<RustcDumpDefParentsParser>>,
Single<WithoutArgs<RustcDumpInferredOutlivesParser>>,
Single<WithoutArgs<RustcDumpItemBoundsParser>>,
Single<WithoutArgs<RustcDumpObjectLifetimeDefaultsParser>>,
Single<WithoutArgs<RustcDumpPredicatesParser>>,
Single<WithoutArgs<RustcDumpUserArgsParser>>,
Single<WithoutArgs<RustcDumpVariancesOfOpaquesParser>>,
Single<WithoutArgs<RustcDumpVariancesParser>>,
Single<WithoutArgs<RustcDumpVtableParser>>,
Single<WithoutArgs<RustcDynIncompatibleTraitParser>>,
Single<WithoutArgs<RustcEffectiveVisibilityParser>>,
@@ -306,9 +310,7 @@ mod late {
Single<WithoutArgs<RustcNonConstTraitMethodParser>>,
Single<WithoutArgs<RustcNonnullOptimizationGuaranteedParser>>,
Single<WithoutArgs<RustcNounwindParser>>,
Single<WithoutArgs<RustcObjectLifetimeDefaultParser>>,
Single<WithoutArgs<RustcOffloadKernelParser>>,
Single<WithoutArgs<RustcOutlivesParser>>,
Single<WithoutArgs<RustcParenSugarParser>>,
Single<WithoutArgs<RustcPassByValueParser>>,
Single<WithoutArgs<RustcPassIndirectlyInNonRusticAbisParser>>,
@@ -323,8 +325,6 @@ mod late {
Single<WithoutArgs<RustcStrictCoherenceParser>>,
Single<WithoutArgs<RustcTrivialFieldReadsParser>>,
Single<WithoutArgs<RustcUnsafeSpecializationMarkerParser>>,
Single<WithoutArgs<RustcVarianceOfOpaquesParser>>,
Single<WithoutArgs<RustcVarianceParser>>,
Single<WithoutArgs<ThreadLocalParser>>,
Single<WithoutArgs<TrackCallerParser>>,
// tidy-alphabetical-end
@@ -1310,7 +1310,7 @@ fn explain_captures(
{
let mut span: MultiSpan = spans.clone().into();
err.arg("ty", param_ty.to_string());
let msg = err.dcx.eagerly_translate_to_string(
let msg = err.dcx.eagerly_format_to_string(
msg!("`{$ty}` is made to be an `FnOnce` closure here"),
err.args.iter(),
);
+3 -3
View File
@@ -764,7 +764,7 @@ pub(crate) struct FormatUnusedArg {
impl Subdiagnostic for FormatUnusedArg {
fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
diag.arg("named", self.named);
let msg = diag.eagerly_translate(msg!(
let msg = diag.eagerly_format(msg!(
"{$named ->
[true] named argument
*[false] argument
@@ -947,8 +947,8 @@ pub(crate) struct AsmClobberNoReg {
impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for AsmClobberNoReg {
fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, G> {
// eager translation as `span_labels` takes `AsRef<str>`
let lbl1 = dcx.eagerly_translate_to_string(msg!("clobber_abi"), [].into_iter());
let lbl2 = dcx.eagerly_translate_to_string(msg!("generic outputs"), [].into_iter());
let lbl1 = dcx.eagerly_format_to_string(msg!("clobber_abi"), [].into_iter());
let lbl2 = dcx.eagerly_format_to_string(msg!("generic outputs"), [].into_iter());
Diag::new(
dcx,
level,
+6 -6
View File
@@ -26,7 +26,7 @@
use rustc_codegen_ssa::back::lto::SerializedModule;
use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput, SharedEmitter};
use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file};
use rustc_codegen_ssa::{CompiledModule, ModuleCodegen, ModuleKind, looks_like_rust_object_file};
use rustc_data_structures::memmap::Mmap;
use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_errors::{DiagCtxt, DiagCtxtHandle};
@@ -34,7 +34,7 @@
use rustc_session::config::Lto;
use tempfile::{TempDir, tempdir};
use crate::back::write::save_temp_bitcode;
use crate::back::write::{codegen, save_temp_bitcode};
use crate::errors::LtoBitcodeFromRlib;
use crate::{GccCodegenBackend, GccContext, LtoMode, to_gcc_opt_level};
@@ -112,7 +112,7 @@ pub(crate) fn run_fat(
shared_emitter: &SharedEmitter,
each_linked_rlib_for_lto: &[PathBuf],
modules: Vec<FatLtoInput<GccCodegenBackend>>,
) -> ModuleCodegen<GccContext> {
) -> CompiledModule {
let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
let dcx = dcx.handle();
let lto_data = prepare_lto(cgcx, each_linked_rlib_for_lto, dcx);
@@ -132,12 +132,12 @@ pub(crate) fn run_fat(
fn fat_lto(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
_dcx: DiagCtxtHandle<'_>,
dcx: DiagCtxtHandle<'_>,
modules: Vec<FatLtoInput<GccCodegenBackend>>,
mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
tmp_path: TempDir,
//symbols_below_threshold: &[String],
) -> ModuleCodegen<GccContext> {
) -> CompiledModule {
let _timer = prof.generic_activity("GCC_fat_lto_build_monolithic_module");
info!("going for a fat lto");
@@ -260,7 +260,7 @@ fn fat_lto(
// of now.
module.module_llvm.temp_dir = Some(tmp_path);
module
codegen(cgcx, prof, dcx, module, &cgcx.module_config)
}
pub struct ModuleBuffer(PathBuf);
+3 -8
View File
@@ -2,12 +2,10 @@
use gccjit::{Context, OutputKind};
use rustc_codegen_ssa::back::link::ensure_removed;
use rustc_codegen_ssa::back::write::{
BitcodeSection, CodegenContext, EmitObj, ModuleConfig, SharedEmitter,
};
use rustc_codegen_ssa::back::write::{BitcodeSection, CodegenContext, EmitObj, ModuleConfig};
use rustc_codegen_ssa::{CompiledModule, ModuleCodegen};
use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_errors::DiagCtxt;
use rustc_errors::DiagCtxtHandle;
use rustc_fs_util::link_or_copy;
use rustc_log::tracing::debug;
use rustc_session::config::OutputType;
@@ -20,13 +18,10 @@
pub(crate) fn codegen(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: &SharedEmitter,
dcx: DiagCtxtHandle<'_>,
module: ModuleCodegen<GccContext>,
config: &ModuleConfig,
) -> CompiledModule {
let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
let dcx = dcx.handle();
let _timer = prof.generic_activity_with_arg("GCC_module_codegen", &*module.name);
{
let context = &module.module_llvm.context;
+18 -24
View File
@@ -92,7 +92,7 @@
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_data_structures::sync::IntoDynSyncSend;
use rustc_errors::DiagCtxtHandle;
use rustc_errors::{DiagCtxt, DiagCtxtHandle};
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
use rustc_middle::ty::TyCtxt;
use rustc_middle::util::Providers;
@@ -371,16 +371,6 @@ fn compile_codegen_unit(
self.lto_supported.load(Ordering::SeqCst),
)
}
fn target_machine_factory(
&self,
_sess: &Session,
_opt_level: OptLevel,
_features: &[String],
) -> TargetMachineFactoryFn<Self> {
// TODO(antoyo): set opt level.
Arc::new(|_, _| ())
}
}
#[derive(Clone, Copy, PartialEq)]
@@ -429,7 +419,17 @@ impl WriteBackendMethods for GccCodegenBackend {
type ModuleBuffer = ModuleBuffer;
type ThinData = ();
fn run_and_optimize_fat_lto(
fn target_machine_factory(
&self,
_sess: &Session,
_opt_level: OptLevel,
_features: &[String],
) -> TargetMachineFactoryFn<Self> {
// TODO(antoyo): set opt level.
Arc::new(|_, _| ())
}
fn optimize_and_codegen_fat_lto(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: &SharedEmitter,
@@ -438,7 +438,7 @@ fn run_and_optimize_fat_lto(
_exported_symbols_for_lto: &[String],
each_linked_rlib_for_lto: &[PathBuf],
modules: Vec<FatLtoInput<Self>>,
) -> ModuleCodegen<Self::Module> {
) -> CompiledModule {
back::lto::run_fat(cgcx, prof, shared_emitter, each_linked_rlib_for_lto, modules)
}
@@ -455,14 +455,6 @@ fn run_thin_lto(
unreachable!()
}
fn print_pass_timings(&self) {
unimplemented!();
}
fn print_statistics(&self) {
unimplemented!()
}
fn optimize(
_cgcx: &CodegenContext,
_prof: &SelfProfilerRef,
@@ -473,13 +465,13 @@ fn optimize(
module.module_llvm.context.set_optimization_level(to_gcc_opt_level(config.opt_level));
}
fn optimize_thin(
fn optimize_and_codegen_thin(
_cgcx: &CodegenContext,
_prof: &SelfProfilerRef,
_shared_emitter: &SharedEmitter,
_tm_factory: TargetMachineFactoryFn<Self>,
_thin: ThinModule<Self>,
) -> ModuleCodegen<Self::Module> {
) -> CompiledModule {
unreachable!()
}
@@ -490,7 +482,9 @@ fn codegen(
module: ModuleCodegen<Self::Module>,
config: &ModuleConfig,
) -> CompiledModule {
back::write::codegen(cgcx, prof, shared_emitter, module, config)
let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
let dcx = dcx.handle();
back::write::codegen(cgcx, prof, dcx, module, config)
}
fn serialize_module(_module: Self::Module, _is_thin: bool) -> Self::ModuleBuffer {
+6 -5
View File
@@ -12,7 +12,7 @@
CodegenContext, FatLtoInput, SharedEmitter, TargetMachineFactoryFn,
};
use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file};
use rustc_codegen_ssa::{CompiledModule, ModuleCodegen, ModuleKind, looks_like_rust_object_file};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::memmap::Mmap;
use rustc_data_structures::profiling::SelfProfilerRef;
@@ -24,7 +24,8 @@
use tracing::{debug, info};
use crate::back::write::{
self, CodegenDiagnosticsStage, DiagnosticHandlers, bitcode_section_name, save_temp_bitcode,
self, CodegenDiagnosticsStage, DiagnosticHandlers, bitcode_section_name, codegen,
save_temp_bitcode,
};
use crate::errors::{LlvmError, LtoBitcodeFromRlib};
use crate::llvm::{self, build_string};
@@ -709,13 +710,13 @@ fn data(&self) -> &[u8] {
}
}
pub(crate) fn optimize_thin_module(
pub(crate) fn optimize_and_codegen_thin_module(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: &SharedEmitter,
tm_factory: TargetMachineFactoryFn<LlvmCodegenBackend>,
thin_module: ThinModule<LlvmCodegenBackend>,
) -> ModuleCodegen<ModuleLlvm> {
) -> CompiledModule {
let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
let dcx = dcx.handle();
@@ -794,7 +795,7 @@ pub(crate) fn optimize_thin_module(
save_temp_bitcode(cgcx, &module, "thin-lto-after-pm");
}
}
module
codegen(cgcx, prof, shared_emitter, module, &cgcx.module_config)
}
/// Maps LLVM module identifiers to their corresponding LLVM LTO cache keys
+5 -10
View File
@@ -335,13 +335,13 @@ pub(crate) fn save_temp_bitcode(
&module.name,
cgcx.invocation_temp.as_deref(),
);
write_bitcode_to_file(module, &path)
write_bitcode_to_file(&module.module_llvm, &path)
}
fn write_bitcode_to_file(module: &ModuleCodegen<ModuleLlvm>, path: &Path) {
fn write_bitcode_to_file(module: &ModuleLlvm, path: &Path) {
unsafe {
let path = path_to_c_string(&path);
let llmod = module.module_llvm.llmod();
let llmod = module.llmod();
llvm::LLVMWriteBitcodeToFile(llmod, path.as_ptr());
}
}
@@ -905,13 +905,8 @@ pub(crate) fn optimize(
let _handlers =
DiagnosticHandlers::new(cgcx, shared_emitter, llcx, module, CodegenDiagnosticsStage::Opt);
if config.emit_no_opt_bc {
let out = cgcx.output_filenames.temp_path_ext_for_cgu(
"no-opt.bc",
&module.name,
cgcx.invocation_temp.as_deref(),
);
write_bitcode_to_file(module, &out)
if module.kind == ModuleKind::Regular {
save_temp_bitcode(cgcx, module, "no-opt");
}
// FIXME(ZuseZ4): support SanitizeHWAddress and prevent illegal/unsupported opts
+1 -1
View File
@@ -24,7 +24,7 @@ impl<G: EmissionGuarantee> Diagnostic<'_, G> for ParseTargetMachineConfig<'_> {
fn into_diag(self, dcx: DiagCtxtHandle<'_>, level: Level) -> Diag<'_, G> {
let diag: Diag<'_, G> = self.0.into_diag(dcx, level);
let (message, _) = diag.messages.first().expect("`LlvmError` with no message");
let message = dcx.eagerly_translate_to_string(message.clone(), diag.args.iter());
let message = dcx.eagerly_format_to_string(message.clone(), diag.args.iter());
Diag::new(
dcx,
level,
+31 -48
View File
@@ -79,24 +79,18 @@
#[derive(Clone)]
pub struct LlvmCodegenBackend(());
struct TimeTraceProfiler {
enabled: bool,
}
struct TimeTraceProfiler {}
impl TimeTraceProfiler {
fn new(enabled: bool) -> Self {
if enabled {
unsafe { llvm::LLVMRustTimeTraceProfilerInitialize() }
}
TimeTraceProfiler { enabled }
fn new() -> Self {
unsafe { llvm::LLVMRustTimeTraceProfilerInitialize() }
TimeTraceProfiler {}
}
}
impl Drop for TimeTraceProfiler {
fn drop(&mut self) {
if self.enabled {
unsafe { llvm::LLVMRustTimeTraceProfilerFinishThread() }
}
unsafe { llvm::LLVMRustTimeTraceProfilerFinishThread() }
}
}
@@ -122,6 +116,16 @@ fn compile_codegen_unit(
) -> (ModuleCodegen<ModuleLlvm>, u64) {
base::compile_codegen_unit(tcx, cgu_name)
}
}
impl WriteBackendMethods for LlvmCodegenBackend {
type Module = ModuleLlvm;
type ModuleBuffer = back::lto::ModuleBuffer;
type TargetMachine = OwnedTargetMachine;
type ThinData = back::lto::ThinData;
fn thread_profiler() -> Box<dyn Any> {
Box::new(TimeTraceProfiler::new())
}
fn target_machine_factory(
&self,
sess: &Session,
@@ -130,38 +134,7 @@ fn target_machine_factory(
) -> TargetMachineFactoryFn<Self> {
back::write::target_machine_factory(sess, optlvl, target_features)
}
fn spawn_named_thread<F, T>(
time_trace: bool,
name: String,
f: F,
) -> std::io::Result<std::thread::JoinHandle<T>>
where
F: FnOnce() -> T,
F: Send + 'static,
T: Send + 'static,
{
std::thread::Builder::new().name(name).spawn(move || {
let _profiler = TimeTraceProfiler::new(time_trace);
f()
})
}
}
impl WriteBackendMethods for LlvmCodegenBackend {
type Module = ModuleLlvm;
type ModuleBuffer = back::lto::ModuleBuffer;
type TargetMachine = OwnedTargetMachine;
type ThinData = back::lto::ThinData;
fn print_pass_timings(&self) {
let timings = llvm::build_string(|s| unsafe { llvm::LLVMRustPrintPassTimings(s) }).unwrap();
print!("{timings}");
}
fn print_statistics(&self) {
let stats = llvm::build_string(|s| unsafe { llvm::LLVMRustPrintStatistics(s) }).unwrap();
print!("{stats}");
}
fn run_and_optimize_fat_lto(
fn optimize_and_codegen_fat_lto(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: &SharedEmitter,
@@ -169,7 +142,7 @@ fn run_and_optimize_fat_lto(
exported_symbols_for_lto: &[String],
each_linked_rlib_for_lto: &[PathBuf],
modules: Vec<FatLtoInput<Self>>,
) -> ModuleCodegen<Self::Module> {
) -> CompiledModule {
let mut module = back::lto::run_fat(
cgcx,
prof,
@@ -184,7 +157,7 @@ fn run_and_optimize_fat_lto(
let dcx = dcx.handle();
back::lto::run_pass_manager(cgcx, prof, dcx, &mut module, false);
module
back::write::codegen(cgcx, prof, shared_emitter, module, &cgcx.module_config)
}
fn run_thin_lto(
cgcx: &CodegenContext,
@@ -214,14 +187,14 @@ fn optimize(
) {
back::write::optimize(cgcx, prof, shared_emitter, module, config)
}
fn optimize_thin(
fn optimize_and_codegen_thin(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: &SharedEmitter,
tm_factory: TargetMachineFactoryFn<LlvmCodegenBackend>,
thin: ThinModule<Self>,
) -> ModuleCodegen<Self::Module> {
back::lto::optimize_thin_module(cgcx, prof, shared_emitter, tm_factory, thin)
) -> CompiledModule {
back::lto::optimize_and_codegen_thin_module(cgcx, prof, shared_emitter, tm_factory, thin)
}
fn codegen(
cgcx: &CodegenContext,
@@ -389,6 +362,16 @@ fn join_codegen(
(compiled_modules, work_products)
}
fn print_pass_timings(&self) {
let timings = llvm::build_string(|s| unsafe { llvm::LLVMRustPrintPassTimings(s) }).unwrap();
print!("{timings}");
}
fn print_statistics(&self) {
let stats = llvm::build_string(|s| unsafe { llvm::LLVMRustPrintStatistics(s) }).unwrap();
print!("{stats}");
}
fn link(
&self,
sess: &Session,
+38 -41
View File
@@ -94,7 +94,6 @@ pub struct ModuleConfig {
// Flags indicating which outputs to produce.
pub emit_pre_lto_bc: bool,
pub emit_no_opt_bc: bool,
pub emit_bc: bool,
pub emit_ir: bool,
pub emit_asm: bool,
@@ -195,7 +194,6 @@ macro_rules! if_regular {
save_temps || need_pre_lto_bitcode_for_incr_comp(sess),
false
),
emit_no_opt_bc: if_regular!(save_temps, false),
emit_bc: if_regular!(
save_temps || sess.opts.output_types.contains_key(&OutputType::Bitcode),
save_temps
@@ -356,7 +354,7 @@ pub struct CodegenContext {
pub parallel: bool,
}
fn generate_thin_lto_work<B: ExtraBackendMethods>(
fn generate_thin_lto_work<B: WriteBackendMethods>(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
dcx: DiagCtxtHandle<'_>,
@@ -824,7 +822,7 @@ pub(crate) fn compute_per_cgu_lto_type(
}
}
fn execute_optimize_work_item<B: ExtraBackendMethods>(
fn execute_optimize_work_item<B: WriteBackendMethods>(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: SharedEmitter,
@@ -969,7 +967,7 @@ fn execute_copy_from_cache_work_item(
}
}
fn do_fat_lto<B: ExtraBackendMethods>(
fn do_fat_lto<B: WriteBackendMethods>(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: SharedEmitter,
@@ -990,7 +988,7 @@ fn do_fat_lto<B: ExtraBackendMethods>(
needs_fat_lto.push(FatLtoInput::Serialized { name: wp.cgu_name, buffer: module })
}
let module = B::run_and_optimize_fat_lto(
B::optimize_and_codegen_fat_lto(
cgcx,
prof,
&shared_emitter,
@@ -998,11 +996,10 @@ fn do_fat_lto<B: ExtraBackendMethods>(
exported_symbols_for_lto,
each_linked_rlib_for_lto,
needs_fat_lto,
);
B::codegen(cgcx, prof, &shared_emitter, module, &cgcx.module_config)
)
}
fn do_thin_lto<B: ExtraBackendMethods>(
fn do_thin_lto<B: WriteBackendMethods>(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: SharedEmitter,
@@ -1155,7 +1152,7 @@ fn do_thin_lto<B: ExtraBackendMethods>(
compiled_modules
}
fn execute_thin_lto_work_item<B: ExtraBackendMethods>(
fn execute_thin_lto_work_item<B: WriteBackendMethods>(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: SharedEmitter,
@@ -1164,8 +1161,7 @@ fn execute_thin_lto_work_item<B: ExtraBackendMethods>(
) -> CompiledModule {
let _timer = prof.generic_activity_with_arg("codegen_module_perform_lto", module.name());
let module = B::optimize_thin(cgcx, prof, &shared_emitter, tm_factory, module);
B::codegen(cgcx, prof, &shared_emitter, module, &cgcx.module_config)
B::optimize_and_codegen_thin(cgcx, prof, &shared_emitter, tm_factory, module)
}
/// Messages sent to the coordinator.
@@ -1472,7 +1468,9 @@ fn start_executing_work<B: ExtraBackendMethods>(
// Each LLVM module is automatically sent back to the coordinator for LTO if
// necessary. There's already optimizations in place to avoid sending work
// back to the coordinator if LTO isn't requested.
return B::spawn_named_thread(cgcx.time_trace, "coordinator".to_string(), move || {
let f = move || {
let _profiler = if cgcx.time_trace { B::thread_profiler() } else { Box::new(()) };
// This is where we collect codegen units that have gone all the way
// through codegen and LLVM.
let mut compiled_modules = vec![];
@@ -1813,8 +1811,11 @@ enum CodegenState {
B::codegen(&cgcx, &prof, &shared_emitter, allocator_module, &allocator_config)
}),
}))
})
.expect("failed to spawn coordinator thread");
};
return std::thread::Builder::new()
.name("coordinator".to_owned())
.spawn(f)
.expect("failed to spawn coordinator thread");
// A heuristic that determines if we have enough LLVM WorkItems in the
// queue so that the main thread can do LLVM work instead of codegen
@@ -1878,7 +1879,7 @@ fn queue_full_enough(items_in_queue: usize, workers_running: usize) -> bool {
#[must_use]
pub(crate) struct WorkerFatalError;
fn spawn_work<'a, B: ExtraBackendMethods>(
fn spawn_work<'a, B: WriteBackendMethods>(
cgcx: &CodegenContext,
prof: &'a SelfProfilerRef,
shared_emitter: SharedEmitter,
@@ -1893,7 +1894,10 @@ fn spawn_work<'a, B: ExtraBackendMethods>(
let cgcx = cgcx.clone();
let prof = prof.clone();
B::spawn_named_thread(cgcx.time_trace, work.short_description(), move || {
let name = work.short_description();
let f = move || {
let _profiler = if cgcx.time_trace { B::thread_profiler() } else { Box::new(()) };
let result = std::panic::catch_unwind(AssertUnwindSafe(|| match work {
WorkItem::Optimize(m) => execute_optimize_work_item(&cgcx, &prof, shared_emitter, m),
WorkItem::CopyPostLtoArtifacts(m) => WorkItemResult::Finished(
@@ -1914,11 +1918,11 @@ fn spawn_work<'a, B: ExtraBackendMethods>(
Err(_) => Message::WorkItem::<B> { result: Err(None) },
};
drop(coordinator_send.send(msg));
})
.expect("failed to spawn work thread");
};
std::thread::Builder::new().name(name).spawn(f).expect("failed to spawn work thread");
}
fn spawn_thin_lto_work<B: ExtraBackendMethods>(
fn spawn_thin_lto_work<B: WriteBackendMethods>(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: SharedEmitter,
@@ -1929,7 +1933,10 @@ fn spawn_thin_lto_work<B: ExtraBackendMethods>(
let cgcx = cgcx.clone();
let prof = prof.clone();
B::spawn_named_thread(cgcx.time_trace, work.short_description(), move || {
let name = work.short_description();
let f = move || {
let _profiler = if cgcx.time_trace { B::thread_profiler() } else { Box::new(()) };
let result = std::panic::catch_unwind(AssertUnwindSafe(|| match work {
ThinLtoWorkItem::CopyPostLtoArtifacts(m) => {
execute_copy_from_cache_work_item(&cgcx, &prof, shared_emitter, m)
@@ -1952,8 +1959,8 @@ fn spawn_thin_lto_work<B: ExtraBackendMethods>(
Err(_) => ThinLtoMessage::WorkItem { result: Err(None) },
};
drop(coordinator_send.send(msg));
})
.expect("failed to spawn work thread");
};
std::thread::Builder::new().name(name).spawn(f).expect("failed to spawn work thread");
}
enum SharedEmitterMessage {
@@ -2102,20 +2109,20 @@ fn check(&self, sess: &Session, blocking: bool) {
}
}
pub struct Coordinator<B: ExtraBackendMethods> {
pub struct Coordinator<B: WriteBackendMethods> {
sender: Sender<Message<B>>,
future: Option<thread::JoinHandle<Result<MaybeLtoModules<B>, ()>>>,
// Only used for the Message type.
phantom: PhantomData<B>,
}
impl<B: ExtraBackendMethods> Coordinator<B> {
impl<B: WriteBackendMethods> Coordinator<B> {
fn join(mut self) -> std::thread::Result<Result<MaybeLtoModules<B>, ()>> {
self.future.take().unwrap().join()
}
}
impl<B: ExtraBackendMethods> Drop for Coordinator<B> {
impl<B: WriteBackendMethods> Drop for Coordinator<B> {
fn drop(&mut self) {
if let Some(future) = self.future.take() {
// If we haven't joined yet, signal to the coordinator that it should spawn no more
@@ -2126,7 +2133,7 @@ fn drop(&mut self) {
}
}
pub struct OngoingCodegen<B: ExtraBackendMethods> {
pub struct OngoingCodegen<B: WriteBackendMethods> {
pub backend: B,
pub output_filenames: Arc<OutputFilenames>,
// Field order below is intended to terminate the coordinator thread before two fields below
@@ -2137,7 +2144,7 @@ pub struct OngoingCodegen<B: ExtraBackendMethods> {
pub shared_emitter_main: SharedEmitterMain,
}
impl<B: ExtraBackendMethods> OngoingCodegen<B> {
impl<B: WriteBackendMethods> OngoingCodegen<B> {
pub fn join(self, sess: &Session) -> (CompiledModules, FxIndexMap<WorkProductId, WorkProduct>) {
self.shared_emitter_main.check(sess, true);
@@ -2234,16 +2241,6 @@ pub fn join(self, sess: &Session) -> (CompiledModules, FxIndexMap<WorkProductId,
copy_all_cgu_workproducts_to_incr_comp_cache_dir(sess, &compiled_modules);
produce_final_output_artifacts(sess, &compiled_modules, &self.output_filenames);
// FIXME: time_llvm_passes support - does this use a global context or
// something?
if sess.codegen_units().as_usize() == 1 && sess.opts.unstable_opts.time_llvm_passes {
self.backend.print_pass_timings()
}
if sess.print_llvm_stats() {
self.backend.print_statistics()
}
(compiled_modules, work_products)
}
@@ -2270,7 +2267,7 @@ pub(crate) fn wait_for_signal_to_codegen_item(&self) {
}
}
pub(crate) fn submit_codegened_module_to_llvm<B: ExtraBackendMethods>(
pub(crate) fn submit_codegened_module_to_llvm<B: WriteBackendMethods>(
coordinator: &Coordinator<B>,
module: ModuleCodegen<B::Module>,
cost: u64,
@@ -2279,7 +2276,7 @@ pub(crate) fn submit_codegened_module_to_llvm<B: ExtraBackendMethods>(
drop(coordinator.sender.send(Message::CodegenDone::<B> { llvm_work_item, cost }));
}
pub(crate) fn submit_post_lto_module_to_llvm<B: ExtraBackendMethods>(
pub(crate) fn submit_post_lto_module_to_llvm<B: WriteBackendMethods>(
coordinator: &Coordinator<B>,
module: CachedModuleCodegen,
) {
@@ -2287,7 +2284,7 @@ pub(crate) fn submit_post_lto_module_to_llvm<B: ExtraBackendMethods>(
drop(coordinator.sender.send(Message::CodegenDone::<B> { llvm_work_item, cost: 0 }));
}
pub(crate) fn submit_pre_lto_module_to_llvm<B: ExtraBackendMethods>(
pub(crate) fn submit_pre_lto_module_to_llvm<B: WriteBackendMethods>(
tcx: TyCtxt<'_>,
coordinator: &Coordinator<B>,
module: CachedModuleCodegen,
@@ -10,14 +10,13 @@
use rustc_middle::ty::TyCtxt;
use rustc_middle::util::Providers;
use rustc_session::Session;
use rustc_session::config::{self, CrateType, OutputFilenames, PrintRequest};
use rustc_session::config::{CrateType, OutputFilenames, PrintRequest};
use rustc_span::Symbol;
use super::CodegenObject;
use super::write::WriteBackendMethods;
use crate::back::archive::ArArchiveBuilderBuilder;
use crate::back::link::link_binary;
use crate::back::write::TargetMachineFactoryFn;
use crate::{CompiledModules, CrateInfo, ModuleCodegen, TargetConfig};
pub trait BackendTypes {
@@ -119,6 +118,10 @@ fn join_codegen(
outputs: &OutputFilenames,
) -> (CompiledModules, FxIndexMap<WorkProductId, WorkProduct>);
fn print_pass_timings(&self) {}
fn print_statistics(&self) {}
/// This is called on the returned [`CompiledModules`] from [`join_codegen`](Self::join_codegen).
fn link(
&self,
@@ -158,26 +161,6 @@ fn compile_codegen_unit(
cgu_name: Symbol,
) -> (ModuleCodegen<Self::Module>, u64);
fn target_machine_factory(
&self,
sess: &Session,
opt_level: config::OptLevel,
target_features: &[String],
) -> TargetMachineFactoryFn<Self>;
fn spawn_named_thread<F, T>(
_time_trace: bool,
name: String,
f: F,
) -> std::io::Result<std::thread::JoinHandle<T>>
where
F: FnOnce() -> T,
F: Send + 'static,
T: Send + 'static,
{
std::thread::Builder::new().name(name).spawn(f)
}
/// Returns `true` if this backend can be safely called from multiple threads.
///
/// Defaults to `true`.
+15 -6
View File
@@ -1,8 +1,10 @@
use std::any::Any;
use std::path::PathBuf;
use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_errors::DiagCtxtHandle;
use rustc_middle::dep_graph::WorkProduct;
use rustc_session::{Session, config};
use crate::back::lto::{SerializedModule, ThinModule};
use crate::back::write::{
@@ -16,9 +18,18 @@ pub trait WriteBackendMethods: Clone + 'static {
type ModuleBuffer: ModuleBufferMethods;
type ThinData: Send + Sync;
fn thread_profiler() -> Box<dyn Any> {
Box::new(())
}
fn target_machine_factory(
&self,
sess: &Session,
opt_level: config::OptLevel,
target_features: &[String],
) -> TargetMachineFactoryFn<Self>;
/// Performs fat LTO by merging all modules into a single one, running autodiff
/// if necessary and running any further optimizations
fn run_and_optimize_fat_lto(
fn optimize_and_codegen_fat_lto(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: &SharedEmitter,
@@ -26,7 +37,7 @@ fn run_and_optimize_fat_lto(
exported_symbols_for_lto: &[String],
each_linked_rlib_for_lto: &[PathBuf],
modules: Vec<FatLtoInput<Self>>,
) -> ModuleCodegen<Self::Module>;
) -> CompiledModule;
/// Performs thin LTO by performing necessary global analysis and returning two
/// lists, one of the modules that need optimization and another for modules that
/// can simply be copied over from the incr. comp. cache.
@@ -39,8 +50,6 @@ fn run_thin_lto(
modules: Vec<(String, Self::ModuleBuffer)>,
cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
) -> (Vec<ThinModule<Self>>, Vec<WorkProduct>);
fn print_pass_timings(&self);
fn print_statistics(&self);
fn optimize(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
@@ -48,13 +57,13 @@ fn optimize(
module: &mut ModuleCodegen<Self::Module>,
config: &ModuleConfig,
);
fn optimize_thin(
fn optimize_and_codegen_thin(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
shared_emitter: &SharedEmitter,
tm_factory: TargetMachineFactoryFn<Self>,
thin: ThinModule<Self>,
) -> ModuleCodegen<Self::Module>;
) -> CompiledModule;
fn codegen(
cgcx: &CodegenContext,
prof: &SelfProfilerRef,
+6 -6
View File
@@ -366,7 +366,7 @@ fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
if self.has_label && !self.span.is_dummy() {
span.push_span_label(self.span, msg!("the failure occurred here"));
}
let msg = diag.eagerly_translate(msg!(
let msg = diag.eagerly_format(msg!(
r#"{$times ->
[0] inside {$where_ ->
[closure] closure
@@ -624,7 +624,7 @@ fn debug(self) -> String
let mut diag = dcx.struct_allow(DiagMessage::Str(String::new().into()));
let message = self.diagnostic_message();
self.add_args(&mut diag);
let s = dcx.eagerly_translate_to_string(message, diag.args.iter());
let s = dcx.eagerly_format_to_string(message, diag.args.iter());
diag.cancel();
s
})
@@ -1086,12 +1086,12 @@ fn add_args<G: EmissionGuarantee>(self, err: &mut Diag<'_, G>) {
}
let message = if let Some(path) = self.path {
err.dcx.eagerly_translate_to_string(
err.dcx.eagerly_format_to_string(
msg!("constructing invalid value at {$path}"),
[("path".into(), DiagArgValue::Str(path.into()))].iter().map(|(a, b)| (a, b)),
)
} else {
err.dcx.eagerly_translate_to_string(msg!("constructing invalid value"), [].into_iter())
err.dcx.eagerly_format_to_string(msg!("constructing invalid value"), [].into_iter())
};
err.arg("front_matter", message);
@@ -1122,7 +1122,7 @@ fn add_range_arg<G: EmissionGuarantee>(
("hi".into(), DiagArgValue::Str(hi.to_string().into())),
];
let args = args.iter().map(|(a, b)| (a, b));
let message = err.dcx.eagerly_translate_to_string(msg, args);
let message = err.dcx.eagerly_format_to_string(msg, args);
err.arg("in_range", message);
}
@@ -1144,7 +1144,7 @@ fn add_range_arg<G: EmissionGuarantee>(
ExpectedKind::EnumTag => msg!("expected a valid enum tag"),
ExpectedKind::Str => msg!("expected a string"),
};
let msg = err.dcx.eagerly_translate_to_string(msg, [].into_iter());
let msg = err.dcx.eagerly_format_to_string(msg, [].into_iter());
err.arg("expected", msg);
}
InvalidEnumTag { value }
@@ -235,7 +235,7 @@ pub fn format_interp_error<'tcx>(dcx: DiagCtxtHandle<'_>, e: InterpErrorInfo<'tc
let mut diag = dcx.struct_allow("");
let msg = e.diagnostic_message();
e.add_args(&mut diag);
let s = dcx.eagerly_translate_to_string(msg, diag.args.iter());
let s = dcx.eagerly_format_to_string(msg, diag.args.iter());
diag.cancel();
s
}
+5 -1
View File
@@ -338,7 +338,11 @@ pub fn run_compiler(at_args: &[String], callbacks: &mut (dyn Callbacks + Send))
}
}
Some(Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend))
let linker = Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend);
tcx.report_unused_features();
Some(linker)
});
// Linking is done outside the `compiler.enter()` so that the
@@ -1,47 +1,2 @@
#### This error code is internal to the compiler and will not be emitted with normal Rust code.
#### Note: this error code is no longer emitted by the compiler.
This error code shows the variance of a type's generic parameters.
Erroneous code example:
```compile_fail
// NOTE: this feature is perma-unstable and should *only* be used for
// testing purposes.
#![allow(internal_features)]
#![feature(rustc_attrs)]
#[rustc_variance]
struct Foo<'a, T> { // error: deliberate error to display type's variance
t: &'a mut T,
}
```
which produces the following error:
```text
error: [-, o]
--> <anon>:4:1
|
4 | struct Foo<'a, T> {
| ^^^^^^^^^^^^^^^^^
```
*Note that while `#[rustc_variance]` still exists and is used within the*
*compiler, it no longer is marked as `E0208` and instead has no error code.*
This error is deliberately triggered with the `#[rustc_variance]` attribute
(`#![feature(rustc_attrs)]` must be enabled) and helps to show you the variance
of the type's generic parameters. You can read more about variance and
subtyping in [this section of the Rustonomicon]. For a more in depth look at
variance (including a more complete list of common variances) see
[this section of the Reference]. For information on how variance is implemented
in the compiler, see [this section of `rustc-dev-guide`].
This error can be easily fixed by removing the `#[rustc_variance]` attribute,
the compiler's suggestion to comment it out can be applied automatically with
`rustfix`.
[this section of the Rustonomicon]: https://doc.rust-lang.org/nomicon/subtyping.html
[this section of the Reference]: https://doc.rust-lang.org/reference/subtyping.html#variance
[this section of `rustc-dev-guide`]: https://rustc-dev-guide.rust-lang.org/variance.html
@@ -26,7 +26,7 @@
ConfusionType, Destination, MAX_SUGGESTIONS, OutputTheme, detect_confusion_type, is_different,
normalize_whitespace, should_show_source_code,
};
use crate::translation::{format_diag_message, format_diag_messages};
use crate::formatting::{format_diag_message, format_diag_messages};
use crate::{
CodeSuggestion, DiagInner, DiagMessage, Emitter, ErrCode, Level, MultiSpan, Style, Subdiag,
SuggestionStyle, TerminalUrl,
+1 -1
View File
@@ -7,7 +7,7 @@
use crate::{Diag, DiagCtxtHandle, Diagnostic, Level};
/// We can't implement `LintDiagnostic` for `BuiltinLintDiag`, because decorating some of its
/// We can't implement `Diagnostic` for `BuiltinLintDiag`, because decorating some of its
/// variants requires types we don't have yet. So, handle that case separately.
pub enum DecorateDiagCompat {
Dynamic(Box<dyn for<'a> FnOnce(DiagCtxtHandle<'a>, Level) -> Diag<'a, ()> + DynSend + 'static>),
+4 -12
View File
@@ -138,14 +138,6 @@ pub trait Subdiagnostic
fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>);
}
/// Trait implemented by lint types. This should not be implemented manually. Instead, use
/// `#[derive(LintDiagnostic)]` -- see [rustc_macros::LintDiagnostic].
#[rustc_diagnostic_item = "LintDiagnostic"]
pub trait LintDiagnostic<'a, G: EmissionGuarantee> {
/// Decorate a lint with the information from this type.
fn decorate_lint<'b>(self, diag: &'b mut Diag<'a, G>);
}
#[derive(Clone, Debug, Encodable, Decodable)]
pub(crate) struct DiagLocation {
file: Cow<'static, str>,
@@ -1143,7 +1135,7 @@ pub fn tool_only_span_suggestion(
} }
/// Add a subdiagnostic from a type that implements `Subdiagnostic` (see
/// [rustc_macros::Subdiagnostic]). Performs eager translation of any translatable messages
/// [rustc_macros::Subdiagnostic]). Performs eager formatting of any messages
/// used in the subdiagnostic, so suitable for use with repeated messages (i.e. re-use of
/// interpolated variables).
pub fn subdiagnostic(&mut self, subdiagnostic: impl Subdiagnostic) -> &mut Self {
@@ -1153,12 +1145,12 @@ pub fn subdiagnostic(&mut self, subdiagnostic: impl Subdiagnostic) -> &mut Self
/// Fluent variables are not namespaced from each other, so when
/// `Diagnostic`s and `Subdiagnostic`s use the same variable name,
/// one value will clobber the other. Eagerly translating the
/// one value will clobber the other. Eagerly formatting the
/// diagnostic uses the variables defined right then, before the
/// clobbering occurs.
pub fn eagerly_translate(&self, msg: impl Into<DiagMessage>) -> DiagMessage {
pub fn eagerly_format(&self, msg: impl Into<DiagMessage>) -> DiagMessage {
let args = self.args.iter();
self.dcx.eagerly_translate(msg.into(), args)
self.dcx.eagerly_format(msg.into(), args)
}
with_fn! { with_span,
+1 -1
View File
@@ -23,8 +23,8 @@
use rustc_span::{FileName, SourceFile, Span};
use tracing::{debug, warn};
use crate::formatting::format_diag_message;
use crate::timings::TimingRecord;
use crate::translation::format_diag_message;
use crate::{
CodeSuggestion, DiagInner, DiagMessage, Level, MultiSpan, Style, Subdiag, SuggestionStyle,
};
@@ -8,10 +8,7 @@
use crate::{DiagArg, DiagMessage, Style, fluent_bundle};
/// Convert diagnostic arguments (a rustc internal type that exists to implement
/// `Encodable`/`Decodable`) into `FluentArgs` which is necessary to perform translation.
///
/// Typically performed once for each diagnostic at the start of `emit_diagnostic` and then
/// passed around as a reference thereafter.
/// `Encodable`/`Decodable`) into `FluentArgs` which is necessary to perform formatting.
fn to_fluent_args<'iter>(iter: impl Iterator<Item = DiagArg<'iter>>) -> FluentArgs<'static> {
let mut args = if let Some(size) = iter.size_hint().1 {
FluentArgs::with_capacity(size)
@@ -40,9 +37,6 @@ pub fn format_diag_message<'a>(message: &'a DiagMessage, args: &DiagArgMap) -> C
match message {
DiagMessage::Str(msg) => Cow::Borrowed(msg),
// This translates an inline fluent diagnostic message
// It does this by creating a new `FluentBundle` with only one message,
// and then translating using this bundle.
DiagMessage::Inline(msg) => {
const GENERATED_MSG_ID: &str = "generated_msg";
let resource =
@@ -56,10 +50,10 @@ pub fn format_diag_message<'a>(message: &'a DiagMessage, args: &DiagArgMap) -> C
let args = to_fluent_args(args.iter());
let mut errs = vec![];
let translated = bundle.format_pattern(value, Some(&args), &mut errs).to_string();
debug!(?translated, ?errs);
let formatted = bundle.format_pattern(value, Some(&args), &mut errs).to_string();
debug!(?formatted, ?errs);
if errs.is_empty() {
Cow::Owned(translated)
Cow::Owned(formatted)
} else {
panic!("Fluent errors while formatting message: {errs:?}");
}
+7 -7
View File
@@ -30,8 +30,8 @@
ColorConfig, Destination, Emitter, HumanReadableErrorType, OutputTheme, TimingEvent,
should_show_source_code,
};
use crate::formatting::{format_diag_message, format_diag_messages};
use crate::timings::{TimingRecord, TimingSection};
use crate::translation::{format_diag_message, format_diag_messages};
use crate::{CodeSuggestion, MultiSpan, SpanLabel, Subdiag, Suggestions, TerminalUrl};
#[cfg(test)]
@@ -299,9 +299,9 @@ impl Diagnostic {
/// Converts from `rustc_errors::DiagInner` to `Diagnostic`.
fn from_errors_diagnostic(diag: crate::DiagInner, je: &JsonEmitter) -> Diagnostic {
let sugg_to_diag = |sugg: &CodeSuggestion| {
let translated_message = format_diag_message(&sugg.msg, &diag.args);
let formatted_message = format_diag_message(&sugg.msg, &diag.args);
Diagnostic {
message: translated_message.to_string(),
message: formatted_message.to_string(),
code: None,
level: "help",
spans: DiagnosticSpan::from_suggestion(sugg, &diag.args, je),
@@ -330,7 +330,7 @@ fn flush(&mut self) -> io::Result<()> {
}
}
let translated_message = format_diag_messages(&diag.messages, &diag.args);
let formatted_message = format_diag_messages(&diag.messages, &diag.args);
let code = if let Some(code) = diag.code {
Some(DiagnosticCode {
@@ -380,7 +380,7 @@ fn flush(&mut self) -> io::Result<()> {
let buf = String::from_utf8(buf).unwrap();
Diagnostic {
message: translated_message.to_string(),
message: formatted_message.to_string(),
code,
level,
spans,
@@ -390,9 +390,9 @@ fn flush(&mut self) -> io::Result<()> {
}
fn from_sub_diagnostic(subdiag: &Subdiag, args: &DiagArgMap, je: &JsonEmitter) -> Diagnostic {
let translated_message = format_diag_messages(&subdiag.messages, args);
let formatted_message = format_diag_messages(&subdiag.messages, args);
Diagnostic {
message: translated_message.to_string(),
message: formatted_message.to_string(),
code: None,
level: subdiag.level.to_str(),
spans: DiagnosticSpan::from_multispan(&subdiag.span, args, je),
+18 -20
View File
@@ -7,9 +7,7 @@
#![allow(rustc::direct_use_of_rustc_type_ir)]
#![cfg_attr(bootstrap, feature(assert_matches))]
#![feature(associated_type_defaults)]
#![feature(box_patterns)]
#![feature(default_field_values)]
#![feature(error_reporter)]
#![feature(macro_metavar_expr_concat)]
#![feature(negative_impls)]
#![feature(never_type)]
@@ -40,7 +38,7 @@
pub use decorate_diag::{BufferedEarlyLint, DecorateDiagCompat, LintBuffer};
pub use diagnostic::{
BugAbort, Diag, DiagInner, DiagStyledString, Diagnostic, EmissionGuarantee, FatalAbort,
LintDiagnostic, StringPart, Subdiag, Subdiagnostic,
StringPart, Subdiag, Subdiagnostic,
};
pub use diagnostic_impls::{
DiagSymbolList, ElidedLifetimeInPathSubdiag, ExpectedLifetimeParameter,
@@ -68,8 +66,8 @@
use tracing::debug;
use crate::emitter::TimingEvent;
use crate::formatting::format_diag_message;
use crate::timings::TimingRecord;
use crate::translation::format_diag_message;
pub mod annotate_snippet_emitter_writer;
pub mod codes;
@@ -77,11 +75,11 @@
mod diagnostic;
mod diagnostic_impls;
pub mod emitter;
pub mod formatting;
pub mod json;
mod lock;
pub mod markdown;
pub mod timings;
pub mod translation;
pub type PResult<'a, T> = Result<T, Diag<'a>>;
@@ -484,24 +482,24 @@ pub fn set_emitter(&self, emitter: Box<dyn Emitter + DynSend>) {
self.inner.borrow_mut().emitter = emitter;
}
/// Translate `message` eagerly with `args` to `DiagMessage::Eager`.
pub fn eagerly_translate<'a>(
/// Format `message` eagerly with `args` to `DiagMessage::Eager`.
pub fn eagerly_format<'a>(
&self,
message: DiagMessage,
args: impl Iterator<Item = DiagArg<'a>>,
) -> DiagMessage {
let inner = self.inner.borrow();
inner.eagerly_translate(message, args)
inner.eagerly_format(message, args)
}
/// Translate `message` eagerly with `args` to `String`.
pub fn eagerly_translate_to_string<'a>(
/// Format `message` eagerly with `args` to `String`.
pub fn eagerly_format_to_string<'a>(
&self,
message: DiagMessage,
args: impl Iterator<Item = DiagArg<'a>>,
) -> String {
let inner = self.inner.borrow();
inner.eagerly_translate_to_string(message, args)
inner.eagerly_format_to_string(message, args)
}
// This is here to not allow mutation of flags;
@@ -1419,17 +1417,17 @@ fn has_errors_or_delayed_bugs(&self) -> Option<ErrorGuaranteed> {
self.has_errors().or_else(|| self.delayed_bugs.get(0).map(|(_, guar)| guar).copied())
}
/// Translate `message` eagerly with `args` to `DiagMessage::Eager`.
fn eagerly_translate<'a>(
/// Format `message` eagerly with `args` to `DiagMessage::Eager`.
fn eagerly_format<'a>(
&self,
message: DiagMessage,
args: impl Iterator<Item = DiagArg<'a>>,
) -> DiagMessage {
DiagMessage::Str(Cow::from(self.eagerly_translate_to_string(message, args)))
DiagMessage::Str(Cow::from(self.eagerly_format_to_string(message, args)))
}
/// Translate `message` eagerly with `args` to `String`.
fn eagerly_translate_to_string<'a>(
/// Format `message` eagerly with `args` to `String`.
fn eagerly_format_to_string<'a>(
&self,
message: DiagMessage,
args: impl Iterator<Item = DiagArg<'a>>,
@@ -1438,12 +1436,12 @@ fn eagerly_translate_to_string<'a>(
format_diag_message(&message, &args).to_string()
}
fn eagerly_translate_for_subdiag(
fn eagerly_format_for_subdiag(
&self,
diag: &DiagInner,
msg: impl Into<DiagMessage>,
) -> DiagMessage {
self.eagerly_translate(msg.into(), diag.args.iter())
self.eagerly_format(msg.into(), diag.args.iter())
}
fn flush_delayed(&mut self) {
@@ -1509,7 +1507,7 @@ fn flush_delayed(&mut self) {
let msg = msg!(
"`flushed_delayed` got diagnostic with level {$level}, instead of the expected `DelayedBug`"
);
let msg = self.eagerly_translate_for_subdiag(&bug, msg); // after the `arg` call
let msg = self.eagerly_format_for_subdiag(&bug, msg); // after the `arg` call
bug.sub(Note, msg, bug.span.primary_span().unwrap().into());
}
bug.level = Bug;
@@ -1560,7 +1558,7 @@ fn decorate(self, dcx: &DiagCtxtInner) -> DiagInner {
};
diag.arg("emitted_at", diag.emitted_at.clone());
diag.arg("note", self.note);
let msg = dcx.eagerly_translate_for_subdiag(&diag, msg); // after the `arg` calls
let msg = dcx.eagerly_format_for_subdiag(&diag, msg); // after the `arg` calls
diag.sub(Note, msg, diag.span.primary_span().unwrap_or(DUMMY_SP).into());
diag
}
+5 -5
View File
@@ -1330,7 +1330,7 @@ pub struct BuiltinAttribute {
safety: AttributeSafety::Normal,
template: template!(NameValueStr: "name"),
duplicates: ErrorFollowing,
gate: Gated{
gate: Gated {
feature: sym::rustc_attrs,
message: "use of an internal attribute",
check: Features::rustc_attrs,
@@ -1419,7 +1419,7 @@ pub struct BuiltinAttribute {
rustc_attr!(TEST, rustc_effective_visibility, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::Yes),
rustc_attr!(
TEST, rustc_outlives, Normal, template!(Word),
TEST, rustc_dump_inferred_outlives, Normal, template!(Word),
WarnFollowing, EncodeCrossCrate::No
),
rustc_attr!(
@@ -1439,11 +1439,11 @@ pub struct BuiltinAttribute {
WarnFollowing, EncodeCrossCrate::Yes
),
rustc_attr!(
TEST, rustc_variance, Normal, template!(Word),
TEST, rustc_dump_variances, Normal, template!(Word),
WarnFollowing, EncodeCrossCrate::No
),
rustc_attr!(
TEST, rustc_variance_of_opaques, Normal, template!(Word),
TEST, rustc_dump_variances_of_opaques, Normal, template!(Word),
WarnFollowing, EncodeCrossCrate::No
),
rustc_attr!(
@@ -1531,7 +1531,7 @@ pub struct BuiltinAttribute {
WarnFollowing, EncodeCrossCrate::No
),
rustc_attr!(
TEST, rustc_object_lifetime_default, Normal, template!(Word),
TEST, rustc_dump_object_lifetime_defaults, Normal, template!(Word),
WarnFollowing, EncodeCrossCrate::No
),
rustc_attr!(
+1 -1
View File
@@ -137,5 +137,5 @@ pub fn find_feature_issue(feature: Symbol, issue: GateIssue) -> Option<NonZero<u
pub use removed::REMOVED_LANG_FEATURES;
pub use unstable::{
DEPENDENT_FEATURES, EnabledLangFeature, EnabledLibFeature, Features, INCOMPATIBLE_FEATURES,
UNSTABLE_LANG_FEATURES,
TRACK_FEATURE, UNSTABLE_LANG_FEATURES,
};
+14 -2
View File
@@ -3,11 +3,18 @@
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
use rustc_data_structures::AtomicRef;
use rustc_data_structures::fx::FxHashSet;
use rustc_span::{Span, Symbol, sym};
use super::{Feature, to_nonzero};
fn default_track_feature(_: Symbol) {}
/// Recording used features in the dependency graph so incremental can
/// replay used features when needed.
pub static TRACK_FEATURE: AtomicRef<fn(Symbol)> = AtomicRef::new(&(default_track_feature as _));
#[derive(PartialEq)]
enum FeatureStatus {
Default,
@@ -103,7 +110,12 @@ pub fn enabled_features_iter_stable_order(
/// Is the given feature enabled (via `#[feature(...)]`)?
pub fn enabled(&self, feature: Symbol) -> bool {
self.enabled_features.contains(&feature)
if self.enabled_features.contains(&feature) {
TRACK_FEATURE(feature);
true
} else {
false
}
}
}
@@ -124,7 +136,7 @@ macro_rules! declare_features {
impl Features {
$(
pub fn $feature(&self) -> bool {
self.enabled_features.contains(&sym::$feature)
self.enabled(sym::$feature)
}
)*
+12 -12
View File
@@ -1366,15 +1366,27 @@ pub enum AttributeKind {
/// Represents `#[rustc_dump_def_parents]`
RustcDumpDefParents,
/// Represents `#[rustc_dump_inferred_outlives]`
RustcDumpInferredOutlives,
/// Represents `#[rustc_dump_item_bounds]`
RustcDumpItemBounds,
/// Represents `#[rustc_dump_object_lifetime_defaults]`.
RustcDumpObjectLifetimeDefaults,
/// Represents `#[rustc_dump_predicates]`
RustcDumpPredicates,
/// Represents `#[rustc_dump_user_args]`
RustcDumpUserArgs,
/// Represents `#[rustc_dump_variances]`
RustcDumpVariances,
/// Represents `#[rustc_dump_variances_of_opaques]`
RustcDumpVariancesOfOpaques,
/// Represents `#[rustc_dump_vtable]`
RustcDumpVtable(Span),
@@ -1493,15 +1505,9 @@ pub enum AttributeKind {
span: Span,
},
/// Represents `#[rustc_object_lifetime_default]`.
RustcObjectLifetimeDefault,
/// Represents `#[rustc_offload_kernel]`
RustcOffloadKernel,
/// Represents `#[rustc_outlives]`
RustcOutlives,
/// Represents `#[rustc_paren_sugar]`.
RustcParenSugar(Span),
@@ -1574,12 +1580,6 @@ pub enum AttributeKind {
/// Represents `#[rustc_unsafe_specialization_marker]`.
RustcUnsafeSpecializationMarker(Span),
/// Represents `#[rustc_variance]`
RustcVariance,
/// Represents `#[rustc_variance_of_opaques]`
RustcVarianceOfOpaques,
/// Represents `#[sanitize]`
///
/// the on set and off set are distjoint since there's a third option: unset.
@@ -124,9 +124,13 @@ pub fn encode_cross_crate(&self) -> EncodeCrossCrate {
RustcDocPrimitive(..) => Yes,
RustcDummy => No,
RustcDumpDefParents => No,
RustcDumpInferredOutlives => No,
RustcDumpItemBounds => No,
RustcDumpObjectLifetimeDefaults => No,
RustcDumpPredicates => No,
RustcDumpUserArgs => No,
RustcDumpVariances => No,
RustcDumpVariancesOfOpaques => No,
RustcDumpVtable(..) => No,
RustcDynIncompatibleTrait(..) => No,
RustcEffectiveVisibility => Yes,
@@ -161,9 +165,7 @@ pub fn encode_cross_crate(&self) -> EncodeCrossCrate {
RustcNounwind => No,
RustcObjcClass { .. } => No,
RustcObjcSelector { .. } => No,
RustcObjectLifetimeDefault => No,
RustcOffloadKernel => Yes,
RustcOutlives => No,
RustcParenSugar(..) => No,
RustcPassByValue(..) => Yes,
RustcPassIndirectlyInNonRusticAbis(..) => No,
@@ -185,8 +187,6 @@ pub fn encode_cross_crate(&self) -> EncodeCrossCrate {
RustcThenThisWouldNeed(..) => No,
RustcTrivialFieldReads => Yes,
RustcUnsafeSpecializationMarker(..) => No,
RustcVariance => No,
RustcVarianceOfOpaques => No,
Sanitize { .. } => No,
ShouldPanic { .. } => No,
Stability { .. } => Yes,
@@ -997,7 +997,7 @@ fn check_type_defn<'tcx>(
item: &hir::Item<'tcx>,
all_sized: bool,
) -> Result<(), ErrorGuaranteed> {
let _ = tcx.representability(item.owner_id.def_id);
let _ = tcx.check_representability(item.owner_id.def_id);
let adt_def = tcx.adt_def(item.owner_id);
enter_wf_checking_ctxt(tcx, item.owner_id.def_id, |wfcx| {
@@ -1,4 +1,5 @@
use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::{find_attr, intravisit};
use rustc_middle::hir::nested_filter;
@@ -27,7 +28,10 @@ pub(crate) fn opaque_hidden_types(tcx: TyCtxt<'_>) {
pub(crate) fn predicates_and_item_bounds(tcx: TyCtxt<'_>) {
for id in tcx.hir_crate_items(()).owners() {
if find_attr!(tcx, id, RustcDumpPredicates) {
#[expect(deprecated)] // we don't want to unnecessarily retrieve the attrs twice in a row.
let attrs = tcx.get_all_attrs(id);
if find_attr!(attrs, RustcDumpPredicates) {
let preds = tcx.predicates_of(id).instantiate_identity(tcx).predicates;
let span = tcx.def_span(id);
@@ -37,15 +41,26 @@ pub(crate) fn predicates_and_item_bounds(tcx: TyCtxt<'_>) {
}
diag.emit();
}
if find_attr!(tcx, id, RustcDumpItemBounds) {
let bounds = tcx.item_bounds(id).instantiate_identity();
let span = tcx.def_span(id);
let mut diag = tcx.dcx().struct_span_err(span, sym::rustc_dump_item_bounds.as_str());
for bound in bounds {
diag.note(format!("{bound:?}"));
}
diag.emit();
if find_attr!(attrs, RustcDumpItemBounds) {
let name = sym::rustc_dump_item_bounds.as_str();
match tcx.def_kind(id) {
DefKind::AssocTy => {
let bounds = tcx.item_bounds(id).instantiate_identity();
let span = tcx.def_span(id);
let mut diag = tcx.dcx().struct_span_err(span, name);
for bound in bounds {
diag.note(format!("{bound:?}"));
}
diag.emit()
}
kind => tcx.dcx().span_delayed_bug(
tcx.def_span(id),
format!("attr parsing didn't report an error for `#[{name}]` on {kind:?}"),
),
};
}
}
}
@@ -735,14 +735,6 @@ pub(crate) enum CannotCaptureLateBound {
},
}
#[derive(Diagnostic)]
#[diag("{$variances}")]
pub(crate) struct VariancesOf {
#[primary_span]
pub span: Span,
pub variances: String,
}
#[derive(Diagnostic)]
#[diag("{$ty}")]
pub(crate) struct TypeOf<'tcx> {
@@ -5,7 +5,7 @@
pub(crate) fn inferred_outlives(tcx: TyCtxt<'_>) {
for id in tcx.hir_free_items() {
if !find_attr!(tcx, id.owner_id, RustcOutlives) {
if !find_attr!(tcx, id.owner_id, RustcDumpInferredOutlives) {
continue;
}
@@ -21,7 +21,7 @@ pub(crate) fn inferred_outlives(tcx: TyCtxt<'_>) {
preds.sort();
let span = tcx.def_span(id.owner_id);
let mut err = tcx.dcx().struct_span_err(span, sym::rustc_outlives.as_str());
let mut err = tcx.dcx().struct_span_err(span, sym::rustc_dump_inferred_outlives.as_str());
for pred in preds {
err.note(pred);
}
@@ -1,5 +1,6 @@
use std::fmt::Write;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::find_attr;
use rustc_middle::ty::{GenericArgs, TyCtxt};
@@ -25,23 +26,30 @@ fn format_variances(tcx: TyCtxt<'_>, def_id: LocalDefId) -> String {
pub(crate) fn variances(tcx: TyCtxt<'_>) {
let crate_items = tcx.hir_crate_items(());
if find_attr!(tcx, crate, RustcVarianceOfOpaques) {
if find_attr!(tcx, crate, RustcDumpVariancesOfOpaques) {
for id in crate_items.opaques() {
tcx.dcx().emit_err(crate::errors::VariancesOf {
span: tcx.def_span(id),
variances: format_variances(tcx, id),
});
tcx.dcx().span_err(tcx.def_span(id), format_variances(tcx, id));
}
}
for id in crate_items.free_items() {
if !find_attr!(tcx, id.owner_id, RustcVariance) {
for id in crate_items.owners() {
if !find_attr!(tcx, id, RustcDumpVariances) {
continue;
}
tcx.dcx().emit_err(crate::errors::VariancesOf {
span: tcx.def_span(id.owner_id),
variances: format_variances(tcx, id.owner_id.def_id),
});
match tcx.def_kind(id) {
DefKind::AssocFn | DefKind::Fn | DefKind::Enum | DefKind::Struct | DefKind::Union => {}
DefKind::TyAlias if tcx.type_alias_is_lazy(id) => {}
kind => {
let message = format!(
"attr parsing didn't report an error for `#[{}]` on {kind:?}",
rustc_span::sym::rustc_dump_variances,
);
tcx.dcx().span_delayed_bug(tcx.def_span(id), message);
continue;
}
}
tcx.dcx().span_err(tcx.def_span(id), format_variances(tcx, id.def_id));
}
}
+3 -3
View File
@@ -987,13 +987,13 @@ impl rustc_errors::Subdiagnostic for CastUnknownPointerSub {
fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
match self {
CastUnknownPointerSub::To(span) => {
let msg = diag.eagerly_translate(msg!("needs more type information"));
let msg = diag.eagerly_format(msg!("needs more type information"));
diag.span_label(span, msg);
let msg = diag.eagerly_translate(msg!("the type information given here is insufficient to check whether the pointer cast is valid"));
let msg = diag.eagerly_format(msg!("the type information given here is insufficient to check whether the pointer cast is valid"));
diag.note(msg);
}
CastUnknownPointerSub::From(span) => {
let msg = diag.eagerly_translate(msg!("the type information given here is insufficient to check whether the pointer cast is valid"));
let msg = diag.eagerly_format(msg!("the type information given here is insufficient to check whether the pointer cast is valid"));
diag.span_label(span, msg);
}
}
+22 -1
View File
@@ -12,8 +12,9 @@
use std::fmt;
use rustc_errors::DiagInner;
use rustc_middle::dep_graph::TaskDepsRef;
use rustc_middle::dep_graph::{DepNodeIndex, QuerySideEffect, TaskDepsRef};
use rustc_middle::ty::tls;
use rustc_span::Symbol;
fn track_span_parent(def_id: rustc_span::def_id::LocalDefId) {
tls::with_context_opt(|icx| {
@@ -51,6 +52,25 @@ fn track_diagnostic<R>(diagnostic: DiagInner, f: &mut dyn FnMut(DiagInner) -> R)
})
}
fn track_feature(feature: Symbol) {
tls::with_context_opt(|icx| {
let Some(icx) = icx else {
return;
};
let tcx = icx.tcx;
if let Some(dep_node_index) = tcx.sess.used_features.lock().get(&feature).copied() {
tcx.dep_graph.read_index(DepNodeIndex::from_u32(dep_node_index));
} else {
let dep_node_index = tcx
.dep_graph
.encode_side_effect(tcx, QuerySideEffect::CheckFeature { symbol: feature });
tcx.sess.used_features.lock().insert(feature, dep_node_index.as_u32());
tcx.dep_graph.read_index(dep_node_index);
}
})
}
/// This is a callback from `rustc_hir` as it cannot access the implicit state
/// in `rustc_middle` otherwise.
fn def_id_debug(def_id: rustc_hir::def_id::DefId, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -70,4 +90,5 @@ pub fn setup_callbacks() {
rustc_span::SPAN_TRACK.swap(&(track_span_parent as fn(_)));
rustc_hir::def_id::DEF_ID_DEBUG.swap(&(def_id_debug as fn(_, &mut fmt::Formatter<'_>) -> _));
rustc_errors::TRACK_DIAGNOSTIC.swap(&(track_diagnostic as _));
rustc_feature::TRACK_FEATURE.swap(&(track_feature as _));
}
+9
View File
@@ -58,6 +58,15 @@ pub fn link(self, sess: &Session, codegen_backend: &dyn CodegenBackend) {
}
}
});
if sess.codegen_units().as_usize() == 1 && sess.opts.unstable_opts.time_llvm_passes {
codegen_backend.print_pass_timings()
}
if sess.print_llvm_stats() {
codegen_backend.print_statistics()
}
sess.timings.end_section(sess.dcx(), TimingSection::Codegen);
if sess.opts.incremental.is_some()
+1 -1
View File
@@ -354,7 +354,7 @@ fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
.chain(repeat_n('}', closing_brackets.count))
.collect(),
));
let msg = diag.eagerly_translate(msg!(
let msg = diag.eagerly_format(msg!(
"a `match` with a single arm can preserve the drop order up to Edition 2021"
));
diag.multipart_suggestion_with_style(
+1 -1
View File
@@ -3629,7 +3629,7 @@ fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
Explicit { lifetime_name, suggestions, optional_alternative } => {
diag.arg("lifetime_name", lifetime_name);
let msg = diag.eagerly_translate(msg!("consistently use `{$lifetime_name}`"));
let msg = diag.eagerly_format(msg!("consistently use `{$lifetime_name}`"));
diag.remove_arg("lifetime_name");
diag.multipart_suggestion_with_style(
msg,
-5
View File
@@ -1088,11 +1088,6 @@
/// crate-level [`feature` attributes].
///
/// [`feature` attributes]: https://doc.rust-lang.org/nightly/unstable-book/
///
/// Note: This lint is currently not functional, see [issue #44232] for
/// more details.
///
/// [issue #44232]: https://github.com/rust-lang/rust/issues/44232
pub UNUSED_FEATURES,
Warn,
"unused features found in crate-level `#[feature]` directives"
@@ -4,7 +4,7 @@
use quote::quote;
use synstructure::Structure;
use crate::diagnostics::diagnostic_builder::DiagnosticDeriveKind;
use crate::diagnostics::diagnostic_builder::each_variant;
use crate::diagnostics::error::DiagnosticDeriveError;
/// The central struct for constructing the `into_diag` method from an annotated struct.
@@ -19,8 +19,7 @@ pub(crate) fn new(structure: Structure<'a>) -> Self {
pub(crate) fn into_tokens(self) -> TokenStream {
let DiagnosticDerive { mut structure } = self;
let kind = DiagnosticDeriveKind::Diagnostic;
let implementation = kind.each_variant(&mut structure, |mut builder, variant| {
let implementation = each_variant(&mut structure, |mut builder, variant| {
let preamble = builder.preamble(variant);
let body = builder.body(variant);
@@ -64,52 +63,3 @@ fn into_diag(
})
}
}
/// The central struct for constructing the `decorate_lint` method from an annotated struct.
pub(crate) struct LintDiagnosticDerive<'a> {
structure: Structure<'a>,
}
impl<'a> LintDiagnosticDerive<'a> {
pub(crate) fn new(structure: Structure<'a>) -> Self {
Self { structure }
}
pub(crate) fn into_tokens(self) -> TokenStream {
let LintDiagnosticDerive { mut structure } = self;
let kind = DiagnosticDeriveKind::LintDiagnostic;
let implementation = kind.each_variant(&mut structure, |mut builder, variant| {
let preamble = builder.preamble(variant);
let body = builder.body(variant);
let Some(message) = builder.primary_message() else {
return DiagnosticDeriveError::ErrorHandled.to_compile_error();
};
let message = message.diag_message(Some(variant));
let primary_message = quote! {
diag.primary_message(#message);
};
let formatting_init = &builder.formatting_init;
quote! {
#primary_message
#preamble
#formatting_init
#body
diag
}
});
structure.gen_impl(quote! {
gen impl<'__a> rustc_errors::LintDiagnostic<'__a, ()> for @Self {
#[track_caller]
fn decorate_lint<'__b>(
self,
diag: &'__b mut rustc_errors::Diag<'__a, ()>
) {
#implementation;
}
}
})
}
}
@@ -18,20 +18,54 @@
should_generate_arg, type_is_bool, type_is_unit, type_matches_path,
};
/// What kind of diagnostic is being derived - a fatal/error/warning or a lint?
#[derive(Clone, Copy, PartialEq, Eq)]
pub(crate) enum DiagnosticDeriveKind {
Diagnostic,
LintDiagnostic,
pub(crate) fn each_variant<'s, F>(structure: &mut Structure<'s>, f: F) -> TokenStream
where
F: for<'v> Fn(DiagnosticDeriveVariantBuilder, &VariantInfo<'v>) -> TokenStream,
{
let ast = structure.ast();
let span = ast.span().unwrap();
match ast.data {
syn::Data::Struct(..) | syn::Data::Enum(..) => (),
syn::Data::Union(..) => {
span_err(span, "diagnostic derives can only be used on structs and enums").emit();
}
}
if matches!(ast.data, syn::Data::Enum(..)) {
for attr in &ast.attrs {
span_err(attr.span().unwrap(), "unsupported type attribute for diagnostic derive enum")
.emit();
}
}
structure.bind_with(|_| synstructure::BindStyle::Move);
let variants = structure.each_variant(|variant| {
let span = match structure.ast().data {
syn::Data::Struct(..) => span,
// There isn't a good way to get the span of the variant, so the variant's
// name will need to do.
_ => variant.ast().ident.span().unwrap(),
};
let builder = DiagnosticDeriveVariantBuilder {
span,
field_map: build_field_mapping(variant),
formatting_init: TokenStream::new(),
message: None,
code: None,
};
f(builder, variant)
});
quote! {
match self {
#variants
}
}
}
/// Tracks persistent information required for a specific variant when building up individual calls
/// to diagnostic methods for generated diagnostic derives - both `Diagnostic` for
/// fatal/errors/warnings and `LintDiagnostic` for lints.
/// to diagnostic methods for generated diagnostic derives.
pub(crate) struct DiagnosticDeriveVariantBuilder {
/// The kind for the entire type.
pub kind: DiagnosticDeriveKind,
/// Initialization of format strings for code suggestions.
pub formatting_init: TokenStream,
@@ -51,60 +85,6 @@ pub(crate) struct DiagnosticDeriveVariantBuilder {
pub code: SpannedOption<()>,
}
impl DiagnosticDeriveKind {
/// Call `f` for the struct or for each variant of the enum, returning a `TokenStream` with the
/// tokens from `f` wrapped in an `match` expression. Emits errors for use of derive on unions
/// or attributes on the type itself when input is an enum.
pub(crate) fn each_variant<'s, F>(self, structure: &mut Structure<'s>, f: F) -> TokenStream
where
F: for<'v> Fn(DiagnosticDeriveVariantBuilder, &VariantInfo<'v>) -> TokenStream,
{
let ast = structure.ast();
let span = ast.span().unwrap();
match ast.data {
syn::Data::Struct(..) | syn::Data::Enum(..) => (),
syn::Data::Union(..) => {
span_err(span, "diagnostic derives can only be used on structs and enums").emit();
}
}
if matches!(ast.data, syn::Data::Enum(..)) {
for attr in &ast.attrs {
span_err(
attr.span().unwrap(),
"unsupported type attribute for diagnostic derive enum",
)
.emit();
}
}
structure.bind_with(|_| synstructure::BindStyle::Move);
let variants = structure.each_variant(|variant| {
let span = match structure.ast().data {
syn::Data::Struct(..) => span,
// There isn't a good way to get the span of the variant, so the variant's
// name will need to do.
_ => variant.ast().ident.span().unwrap(),
};
let builder = DiagnosticDeriveVariantBuilder {
kind: self,
span,
field_map: build_field_mapping(variant),
formatting_init: TokenStream::new(),
message: None,
code: None,
};
f(builder, variant)
});
quote! {
match self {
#variants
}
}
}
}
impl DiagnosticDeriveVariantBuilder {
pub(crate) fn primary_message(&self) -> Option<&Message> {
match self.message.as_ref() {
@@ -358,20 +338,11 @@ fn generate_inner_field_code(
// `arg` call will not be generated.
(Meta::Path(_), "skip_arg") => return Ok(quote! {}),
(Meta::Path(_), "primary_span") => {
match self.kind {
DiagnosticDeriveKind::Diagnostic => {
report_error_if_not_applied_to_span(attr, &info)?;
report_error_if_not_applied_to_span(attr, &info)?;
return Ok(quote! {
diag.span(#binding);
});
}
DiagnosticDeriveKind::LintDiagnostic => {
throw_invalid_attr!(attr, |diag| {
diag.help("the `primary_span` field attribute is not valid for lint diagnostics")
})
}
}
return Ok(quote! {
diag.span(#binding);
});
}
(Meta::Path(_), "subdiagnostic") => {
return Ok(quote! { diag.subdiagnostic(#binding); });
+2 -34
View File
@@ -6,7 +6,7 @@
mod subdiagnostic;
mod utils;
use diagnostic::{DiagnosticDerive, LintDiagnosticDerive};
use diagnostic::DiagnosticDerive;
pub(super) use msg_macro::msg_macro;
use proc_macro2::TokenStream;
use subdiagnostic::SubdiagnosticDerive;
@@ -51,38 +51,6 @@ pub(super) fn diagnostic_derive(s: Structure<'_>) -> TokenStream {
DiagnosticDerive::new(s).into_tokens()
}
/// Implements `#[derive(LintDiagnostic)]`, which allows for lints to be specified as a struct,
/// independent from the actual lint emitting code.
///
/// ```ignore (rust)
/// #[derive(LintDiagnostic)]
/// #[diag("unused attribute")]
/// pub(crate) struct UnusedAttribute {
/// #[suggestion("remove this attribute", code = "", applicability = "machine-applicable")]
/// pub this: Span,
/// #[note("attribute also specified here")]
/// pub other: Span,
/// #[warning(
/// "this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!"
/// )]
/// pub warning: bool,
/// }
/// ```
///
/// Then, later, to emit the error:
///
/// ```ignore (rust)
/// cx.emit_span_lint(UNUSED_ATTRIBUTES, span, UnusedAttribute {
/// ...
/// });
/// ```
///
/// See rustc dev guide for more examples on using the `#[derive(LintDiagnostic)]`:
/// <https://rustc-dev-guide.rust-lang.org/diagnostics/diagnostic-structs.html#reference>
pub(super) fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream {
LintDiagnosticDerive::new(s).into_tokens()
}
/// Implements `#[derive(Subdiagnostic)]`, which allows for labels, notes, helps and
/// suggestions to be specified as a structs or enums, independent from the actual diagnostics
/// emitting code or diagnostic derives.
@@ -99,7 +67,7 @@ pub(super) fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream {
/// Then, later, use the subdiagnostic in a diagnostic:
///
/// ```ignore (rust)
/// #[derive(LintDiagnostic)]
/// #[derive(Diagnostic)]
/// #[diag("unused doc comment")]
/// pub(crate) struct BuiltinUnusedDocComment<'a> {
/// pub kind: &'a str,
@@ -536,7 +536,7 @@ pub(crate) fn into_tokens(&mut self) -> Result<TokenStream, DiagnosticDeriveErro
for (kind, messages) in kind_messages {
let message = format_ident!("__message");
let message_stream = messages.diag_message(Some(self.variant));
calls.extend(quote! { let #message = #diag.eagerly_translate(#message_stream); });
calls.extend(quote! { let #message = #diag.eagerly_format(#message_stream); });
let name = format_ident!("{}{}", if span_field.is_some() { "span_" } else { "" }, kind);
let call = match kind {
-19
View File
@@ -196,25 +196,6 @@ pub fn extension(attr: TokenStream, input: TokenStream) -> TokenStream {
suggestion_hidden,
suggestion_verbose)] => diagnostics::diagnostic_derive
);
decl_derive!(
[LintDiagnostic, attributes(
// struct attributes
diag,
help,
help_once,
note,
note_once,
warning,
// field attributes
skip_arg,
primary_span,
label,
subdiagnostic,
suggestion,
suggestion_short,
suggestion_hidden,
suggestion_verbose)] => diagnostics::lint_diagnostic_derive
);
decl_derive!(
[Subdiagnostic, attributes(
// struct/variant attributes
-9
View File
@@ -144,7 +144,6 @@ struct QueryModifiers {
arena_cache: Option<Ident>,
cache_on_disk_if: Option<CacheOnDiskIf>,
cycle_delay_bug: Option<Ident>,
cycle_fatal: Option<Ident>,
cycle_stash: Option<Ident>,
depth_limit: Option<Ident>,
desc: Desc,
@@ -160,7 +159,6 @@ fn parse_query_modifiers(input: ParseStream<'_>) -> Result<QueryModifiers> {
let mut arena_cache = None;
let mut cache_on_disk_if = None;
let mut desc = None;
let mut cycle_fatal = None;
let mut cycle_delay_bug = None;
let mut cycle_stash = None;
let mut no_hash = None;
@@ -197,8 +195,6 @@ macro_rules! try_insert {
try_insert!(cache_on_disk_if = CacheOnDiskIf { modifier, block });
} else if modifier == "arena_cache" {
try_insert!(arena_cache = modifier);
} else if modifier == "cycle_fatal" {
try_insert!(cycle_fatal = modifier);
} else if modifier == "cycle_delay_bug" {
try_insert!(cycle_delay_bug = modifier);
} else if modifier == "cycle_stash" {
@@ -228,7 +224,6 @@ macro_rules! try_insert {
arena_cache,
cache_on_disk_if,
desc,
cycle_fatal,
cycle_delay_bug,
cycle_stash,
no_hash,
@@ -248,7 +243,6 @@ fn make_modifiers_stream(query: &Query, modifiers: &QueryModifiers) -> proc_macr
arena_cache,
cache_on_disk_if,
cycle_delay_bug,
cycle_fatal,
cycle_stash,
depth_limit,
desc: _,
@@ -266,8 +260,6 @@ fn make_modifiers_stream(query: &Query, modifiers: &QueryModifiers) -> proc_macr
let cycle_error_handling = if cycle_delay_bug.is_some() {
quote! { DelayBug }
} else if cycle_fatal.is_some() {
quote! { Fatal }
} else if cycle_stash.is_some() {
quote! { Stash }
} else {
@@ -407,7 +399,6 @@ macro_rules! doc_link {
doc_link!(
arena_cache,
cycle_fatal,
cycle_delay_bug,
cycle_stash,
no_hash,
+48 -24
View File
@@ -17,6 +17,7 @@
use rustc_macros::{Decodable, Encodable};
use rustc_serialize::opaque::{FileEncodeResult, FileEncoder};
use rustc_session::Session;
use rustc_span::Symbol;
use tracing::{debug, instrument};
#[cfg(debug_assertions)]
use {super::debug::EdgeFilter, std::env};
@@ -45,6 +46,11 @@ pub enum QuerySideEffect {
/// the query as green, as that query will have the side
/// effect dep node as a dependency.
Diagnostic(DiagInner),
/// Records the feature used during query execution.
/// This feature will be inserted into `sess.used_features`
/// if we mark the query as green, as that query will have
/// the side effect dep node as a dependency.
CheckFeature { symbol: Symbol },
}
#[derive(Clone)]
pub struct DepGraph {
@@ -514,29 +520,40 @@ pub fn read_index(&self, dep_node_index: DepNodeIndex) {
}
}
/// This encodes a diagnostic by creating a node with an unique index and associating
/// `diagnostic` with it, for use in the next session.
/// This encodes a side effect by creating a node with an unique index and associating
/// it with the node, for use in the next session.
#[inline]
pub fn record_diagnostic<'tcx>(&self, tcx: TyCtxt<'tcx>, diagnostic: &DiagInner) {
if let Some(ref data) = self.data {
read_deps(|task_deps| match task_deps {
TaskDepsRef::EvalAlways | TaskDepsRef::Ignore => return,
TaskDepsRef::Forbid | TaskDepsRef::Allow(..) => {
self.read_index(data.encode_diagnostic(tcx, diagnostic));
let dep_node_index = data
.encode_side_effect(tcx, QuerySideEffect::Diagnostic(diagnostic.clone()));
self.read_index(dep_node_index);
}
})
}
}
/// This forces a diagnostic node green by running its side effect. `prev_index` would
/// refer to a node created used `encode_diagnostic` in the previous session.
/// This forces a side effect node green by running its side effect. `prev_index` would
/// refer to a node created used `encode_side_effect` in the previous session.
#[inline]
pub fn force_diagnostic_node<'tcx>(
pub fn force_side_effect<'tcx>(&self, tcx: TyCtxt<'tcx>, prev_index: SerializedDepNodeIndex) {
if let Some(ref data) = self.data {
data.force_side_effect(tcx, prev_index);
}
}
#[inline]
pub fn encode_side_effect<'tcx>(
&self,
tcx: TyCtxt<'tcx>,
prev_index: SerializedDepNodeIndex,
) {
side_effect: QuerySideEffect,
) -> DepNodeIndex {
if let Some(ref data) = self.data {
data.force_diagnostic_node(tcx, prev_index);
data.encode_side_effect(tcx, side_effect)
} else {
self.next_virtual_depnode_index()
}
}
@@ -673,10 +690,14 @@ pub fn mark_debug_loaded_from_disk(&self, dep_node: DepNode) {
self.debug_loaded_from_disk.lock().insert(dep_node);
}
/// This encodes a diagnostic by creating a node with an unique index and associating
/// `diagnostic` with it, for use in the next session.
/// This encodes a side effect by creating a node with an unique index and associating
/// it with the node, for use in the next session.
#[inline]
fn encode_diagnostic<'tcx>(&self, tcx: TyCtxt<'tcx>, diagnostic: &DiagInner) -> DepNodeIndex {
fn encode_side_effect<'tcx>(
&self,
tcx: TyCtxt<'tcx>,
side_effect: QuerySideEffect,
) -> DepNodeIndex {
// Use `send_new` so we get an unique index, even though the dep node is not.
let dep_node_index = self.current.encoder.send_new(
DepNode {
@@ -684,28 +705,21 @@ fn encode_diagnostic<'tcx>(&self, tcx: TyCtxt<'tcx>, diagnostic: &DiagInner) ->
key_fingerprint: PackedFingerprint::from(Fingerprint::ZERO),
},
Fingerprint::ZERO,
// We want the side effect node to always be red so it will be forced and emit the
// diagnostic.
// We want the side effect node to always be red so it will be forced and run the
// side effect.
std::iter::once(DepNodeIndex::FOREVER_RED_NODE).collect(),
);
let side_effect = QuerySideEffect::Diagnostic(diagnostic.clone());
tcx.store_side_effect(dep_node_index, side_effect);
dep_node_index
}
/// This forces a diagnostic node green by running its side effect. `prev_index` would
/// refer to a node created used `encode_diagnostic` in the previous session.
/// This forces a side effect node green by running its side effect. `prev_index` would
/// refer to a node created used `encode_side_effect` in the previous session.
#[inline]
fn force_diagnostic_node<'tcx>(&self, tcx: TyCtxt<'tcx>, prev_index: SerializedDepNodeIndex) {
fn force_side_effect<'tcx>(&self, tcx: TyCtxt<'tcx>, prev_index: SerializedDepNodeIndex) {
with_deps(TaskDepsRef::Ignore, || {
let side_effect = tcx.load_side_effect(prev_index).unwrap();
match &side_effect {
QuerySideEffect::Diagnostic(diagnostic) => {
tcx.dcx().emit_diagnostic(diagnostic.clone());
}
}
// Use `send_and_color` as `promote_node_and_deps_to_current` expects all
// green dependencies. `send_and_color` will also prevent multiple nodes
// being encoded for concurrent calls.
@@ -720,6 +734,16 @@ fn force_diagnostic_node<'tcx>(&self, tcx: TyCtxt<'tcx>, prev_index: SerializedD
std::iter::once(DepNodeIndex::FOREVER_RED_NODE).collect(),
true,
);
match &side_effect {
QuerySideEffect::Diagnostic(diagnostic) => {
tcx.dcx().emit_diagnostic(diagnostic.clone());
}
QuerySideEffect::CheckFeature { symbol } => {
tcx.sess.used_features.lock().insert(*symbol, dep_node_index.as_u32());
}
}
// This will just overwrite the same value for concurrent calls.
tcx.store_side_effect(dep_node_index, side_effect);
})
+2 -2
View File
@@ -492,8 +492,8 @@ fn lint_level_impl(
/// - [`TyCtxt::node_lint`]
/// - `LintContext::opt_span_lint`
///
/// This function will replace `lint_level` once all `LintDiagnostic` items have been migrated to
/// `Diagnostic`.
/// This function will replace `lint_level` once all its callers have been replaced
/// with `diag_lint_level`.
#[track_caller]
pub fn diag_lint_level<'a, D: Diagnostic<'a, ()> + 'a>(
sess: &'a Session,
+10 -16
View File
@@ -4,7 +4,7 @@
use std::num::NonZero;
use rustc_ast::NodeId;
use rustc_errors::{Applicability, Diag, EmissionGuarantee, LintBuffer, LintDiagnostic, msg};
use rustc_errors::{Applicability, Diag, EmissionGuarantee, LintBuffer, msg};
use rustc_feature::GateIssue;
use rustc_hir::attrs::{DeprecatedSince, Deprecation};
use rustc_hir::def_id::{DefId, LocalDefId};
@@ -131,15 +131,8 @@ fn into_diag(
dcx: rustc_errors::DiagCtxtHandle<'a>,
level: rustc_errors::Level,
) -> Diag<'a, G> {
let mut diag = Diag::new(dcx, level, "");
self.decorate_lint(&mut diag);
diag
}
}
impl<'a, G: EmissionGuarantee> LintDiagnostic<'a, G> for Deprecated {
fn decorate_lint<'b>(self, diag: &'b mut Diag<'a, G>) {
diag.primary_message(match &self.since_kind {
let Self { sub, kind, path, note, since_kind } = self;
let mut diag = Diag::new(dcx, level, match &since_kind {
DeprecatedSinceKind::InEffect => msg!(
"use of deprecated {$kind} `{$path}`{$has_note ->
[true] : {$note}
@@ -160,21 +153,22 @@ fn decorate_lint<'b>(self, diag: &'b mut Diag<'a, G>) {
}"
)
}
});
diag.arg("kind", self.kind);
diag.arg("path", self.path);
if let DeprecatedSinceKind::InVersion(version) = self.since_kind {
})
.with_arg("kind", kind)
.with_arg("path", path);
if let DeprecatedSinceKind::InVersion(version) = since_kind {
diag.arg("version", version);
}
if let Some(note) = self.note {
if let Some(note) = note {
diag.arg("has_note", true);
diag.arg("note", note);
} else {
diag.arg("has_note", false);
}
if let Some(sub) = self.sub {
if let Some(sub) = sub {
diag.subdiagnostic(sub);
}
diag
}
}
+20 -28
View File
@@ -32,7 +32,6 @@
//! - `arena_cache`: Use an arena for in-memory caching of the query result.
//! - `cache_on_disk_if { ... }`: Cache the query result to disk if the provided block evaluates to
//! true. The query key identifier is available for use within the block, as is `tcx`.
//! - `cycle_fatal`: If a dependency cycle is detected, abort compilation with a fatal error.
//! - `cycle_delay_bug`: If a dependency cycle is detected, emit a delayed bug instead of aborting immediately.
//! - `cycle_stash`: If a dependency cycle is detected, stash the error for later handling.
//! - `no_hash`: Do not hash the query result for incremental compilation; just mark as dirty if recomputed.
@@ -149,11 +148,11 @@
// which memoizes and does dep-graph tracking, wrapping around the actual
// `Providers` that the driver creates (using several `rustc_*` crates).
//
// The result type of each query must implement `Clone`, and additionally
// `ty::query::from_cycle_error::FromCycleError`, which produces an appropriate
// The result type of each query must implement `Clone`. Additionally
// `ty::query::from_cycle_error::FromCycleError` can be implemented which produces an appropriate
// placeholder (error) value if the query resulted in a query cycle.
// Queries marked with `cycle_fatal` do not need the latter implementation,
// as they will raise a fatal error on query cycles instead.
// Queries without a `FromCycleError` implementation will raise a fatal error on query
// cycles instead.
rustc_queries! {
/// Caches the expansion of a derive proc macro, e.g. `#[derive(Serialize)]`.
/// The key is:
@@ -587,24 +586,28 @@
}
query is_panic_runtime(_: CrateNum) -> bool {
cycle_fatal
desc { "checking if the crate is_panic_runtime" }
separate_provide_extern
}
/// Checks whether a type is representable or infinitely sized
query representability(key: LocalDefId) -> rustc_middle::ty::Representability {
query check_representability(key: LocalDefId) -> rustc_middle::ty::Representability {
desc { "checking if `{}` is representable", tcx.def_path_str(key) }
// infinitely sized types will cause a cycle
// Infinitely sized types will cause a cycle. The custom `FromCycleError` impl for
// `Representability` will print a custom error about the infinite size and then abort
// compilation. (In the past we recovered and continued, but in practice that leads to
// confusing subsequent error messages about cycles that then abort.)
cycle_delay_bug
// we don't want recursive representability calls to be forced with
// We don't want recursive representability calls to be forced with
// incremental compilation because, if a cycle occurs, we need the
// entire cycle to be in memory for diagnostics
// entire cycle to be in memory for diagnostics. This means we can't
// use `ensure_ok()` with this query.
anon
}
/// An implementation detail for the `representability` query
query representability_adt_ty(key: Ty<'tcx>) -> rustc_middle::ty::Representability {
/// An implementation detail for the `check_representability` query. See that query for more
/// details, particularly on the modifiers.
query check_representability_adt_ty(key: Ty<'tcx>) -> rustc_middle::ty::Representability {
desc { "checking if `{}` is representable", key }
cycle_delay_bug
anon
@@ -838,8 +841,8 @@
///
/// E.g., for `struct Foo<'a, T> { x: &'a T }`, this would return `[T: 'a]`.
///
/// **Tip**: You can use `#[rustc_outlives]` on an item to basically print the
/// result of this query for use in UI tests or for debugging purposes.
/// **Tip**: You can use `#[rustc_dump_inferred_outlives]` on an item to basically
/// print the result of this query for use in UI tests or for debugging purposes.
query inferred_outlives_of(key: DefId) -> &'tcx [(ty::Clause<'tcx>, Span)] {
desc { "computing inferred outlives-predicates of `{}`", tcx.def_path_str(key) }
cache_on_disk_if { key.is_local() }
@@ -1046,8 +1049,8 @@
/// The list of variances corresponds to the list of (early-bound) generic
/// parameters of the item (including its parents).
///
/// **Tip**: You can use `#[rustc_variance]` on an item to basically print the
/// result of this query for use in UI tests or for debugging purposes.
/// **Tip**: You can use `#[rustc_dump_variances]` on an item to basically print
/// the result of this query for use in UI tests or for debugging purposes.
query variances_of(def_id: DefId) -> &'tcx [ty::Variance] {
desc { "computing the variances of `{}`", tcx.def_path_str(def_id) }
cache_on_disk_if { def_id.is_local() }
@@ -1318,7 +1321,6 @@
/// Return the set of (transitive) callees that may result in a recursive call to `key`,
/// if we were able to walk all callees.
query mir_callgraph_cyclic(key: LocalDefId) -> &'tcx Option<UnordSet<LocalDefId>> {
cycle_fatal
arena_cache
desc {
"computing (transitive) callees of `{}` that may recurse",
@@ -1329,7 +1331,6 @@
/// Obtain all the calls into other local functions
query mir_inliner_callees(key: ty::InstanceKind<'tcx>) -> &'tcx [(DefId, GenericArgsRef<'tcx>)] {
cycle_fatal
desc {
"computing all local function calls in `{}`",
tcx.def_path_str(key.def_id()),
@@ -1850,31 +1851,26 @@
}
query is_compiler_builtins(_: CrateNum) -> bool {
cycle_fatal
desc { "checking if the crate is_compiler_builtins" }
separate_provide_extern
}
query has_global_allocator(_: CrateNum) -> bool {
// This query depends on untracked global state in CStore
eval_always
cycle_fatal
desc { "checking if the crate has_global_allocator" }
separate_provide_extern
}
query has_alloc_error_handler(_: CrateNum) -> bool {
// This query depends on untracked global state in CStore
eval_always
cycle_fatal
desc { "checking if the crate has_alloc_error_handler" }
separate_provide_extern
}
query has_panic_handler(_: CrateNum) -> bool {
cycle_fatal
desc { "checking if the crate has_panic_handler" }
separate_provide_extern
}
query is_profiler_runtime(_: CrateNum) -> bool {
cycle_fatal
desc { "checking if a crate is `#![profiler_runtime]`" }
separate_provide_extern
}
@@ -1883,22 +1879,18 @@
cache_on_disk_if { true }
}
query required_panic_strategy(_: CrateNum) -> Option<PanicStrategy> {
cycle_fatal
desc { "getting a crate's required panic strategy" }
separate_provide_extern
}
query panic_in_drop_strategy(_: CrateNum) -> PanicStrategy {
cycle_fatal
desc { "getting a crate's configured panic-in-drop strategy" }
separate_provide_extern
}
query is_no_builtins(_: CrateNum) -> bool {
cycle_fatal
desc { "getting whether a crate has `#![no_builtins]`" }
separate_provide_extern
}
query symbol_mangling_version(_: CrateNum) -> SymbolManglingVersion {
cycle_fatal
desc { "getting a crate's symbol mangling version" }
separate_provide_extern
}
@@ -2145,7 +2137,7 @@
/// Returns the *default lifetime* to be used if a trait object type were to be passed for
/// the type parameter given by `DefId`.
///
/// **Tip**: You can use `#[rustc_object_lifetime_default]` on an item to basically
/// **Tip**: You can use `#[rustc_dump_object_lifetime_defaults]` on an item to basically
/// print the result of this query for use in UI tests or for debugging purposes.
///
/// # Examples
@@ -28,11 +28,6 @@
/// A cycle error results in a delay_bug call
pub(crate) struct cycle_delay_bug;
/// # `cycle_fatal` query modifier
///
/// A cycle error for this query aborting the compilation with a fatal error.
pub(crate) struct cycle_fatal;
/// # `cycle_stash` query modifier
///
/// A cycle error results in a stashed cycle error that can be unstashed and canceled later
@@ -57,7 +57,6 @@ pub enum ActiveKeyStatus<'tcx> {
#[derive(Copy, Clone)]
pub enum CycleErrorHandling {
Error,
Fatal,
DelayBug,
Stash,
}
+3 -4
View File
@@ -741,8 +741,7 @@ pub fn sizedness_constraint(
}
}
/// This type exists just so a `FromCycleError` impl can be made for the `check_representability`
/// query.
#[derive(Clone, Copy, Debug, HashStable)]
pub enum Representability {
Representable,
Infinite(ErrorGuaranteed),
}
pub struct Representability;
+31 -1
View File
@@ -36,7 +36,7 @@
use rustc_hir::intravisit::VisitorExt;
use rustc_hir::lang_items::LangItem;
use rustc_hir::limit::Limit;
use rustc_hir::{self as hir, HirId, Node, TraitCandidate, find_attr};
use rustc_hir::{self as hir, CRATE_HIR_ID, HirId, Node, TraitCandidate, find_attr};
use rustc_index::IndexVec;
use rustc_serialize::opaque::{FileEncodeResult, FileEncoder};
use rustc_session::Session;
@@ -1688,6 +1688,36 @@ pub fn finish(self) {
self.sess.dcx().emit_fatal(crate::error::FailedWritingFile { path: &path, error });
}
}
pub fn report_unused_features(self) {
// Collect first to avoid holding the lock while linting.
let used_features = self.sess.used_features.lock();
let unused_features = self
.features()
.enabled_features_iter_stable_order()
.filter(|(f, _)| {
!used_features.contains_key(f)
// FIXME: `restricted_std` is used to tell a standard library built
// for a platform that it doesn't know how to support. But it
// could only gate a private mod (see `__restricted_std_workaround`)
// with `cfg(not(restricted_std))`, so it cannot be recorded as used
// in downstream crates. It should never be linted, but should we
// hack this in the linter to ignore it?
&& f.as_str() != "restricted_std"
})
.collect::<Vec<_>>();
for (feature, span) in unused_features {
self.node_span_lint(
rustc_session::lint::builtin::UNUSED_FEATURES,
CRATE_HIR_ID,
span,
|lint| {
lint.primary_message(format!("feature `{}` is declared but not used", feature));
},
);
}
}
}
macro_rules! nop_lift {
@@ -61,10 +61,9 @@ pub(crate) fn provide(providers: &mut Providers) {
/// requires calling [`InhabitedPredicate::instantiate`]
fn inhabited_predicate_adt(tcx: TyCtxt<'_>, def_id: DefId) -> InhabitedPredicate<'_> {
if let Some(def_id) = def_id.as_local() {
if matches!(tcx.representability(def_id), ty::Representability::Infinite(_)) {
return InhabitedPredicate::True;
}
let _ = tcx.check_representability(def_id);
}
let adt = tcx.adt_def(def_id);
InhabitedPredicate::any(
tcx,
@@ -531,7 +531,7 @@ fn add_to_diag<G: rustc_errors::EmissionGuarantee>(self, diag: &mut rustc_errors
diag.arg("is_generated_name", self.is_generated_name);
diag.remove_arg("is_dropped_first_edition_2024");
diag.arg("is_dropped_first_edition_2024", self.is_dropped_first_edition_2024);
let msg = diag.eagerly_translate(msg!(
let msg = diag.eagerly_format(msg!(
"{$is_generated_name ->
[true] this value will be stored in a temporary; let us call it `{$name}`
*[false] `{$name}` calls a custom destructor
@@ -542,7 +542,7 @@ fn add_to_diag<G: rustc_errors::EmissionGuarantee>(self, diag: &mut rustc_errors
dtor.add_to_diag(diag);
}
let msg =
diag.eagerly_translate(msg!(
diag.eagerly_format(msg!(
"{$is_dropped_first_edition_2024 ->
[true] up until Edition 2021 `{$name}` is dropped last but will be dropped earlier in Edition 2024
*[false] `{$name}` will be dropped later as of Edition 2024
+8 -8
View File
@@ -184,8 +184,8 @@ fn check_attributes(
Attribute::Parsed(AttributeKind::TargetFeature{ attr_span, ..}) => {
self.check_target_feature(hir_id, *attr_span, target, attrs)
}
Attribute::Parsed(AttributeKind::RustcObjectLifetimeDefault) => {
self.check_object_lifetime_default(hir_id);
Attribute::Parsed(AttributeKind::RustcDumpObjectLifetimeDefaults) => {
self.check_dump_object_lifetime_defaults(hir_id);
}
&Attribute::Parsed(AttributeKind::RustcPubTransparent(attr_span)) => {
self.check_rustc_pub_transparent(attr_span, span, attrs)
@@ -319,9 +319,12 @@ fn check_attributes(
| AttributeKind::RustcDocPrimitive(..)
| AttributeKind::RustcDummy
| AttributeKind::RustcDumpDefParents
| AttributeKind::RustcDumpInferredOutlives
| AttributeKind::RustcDumpItemBounds
| AttributeKind::RustcDumpPredicates
| AttributeKind::RustcDumpUserArgs
| AttributeKind::RustcDumpVariances
| AttributeKind::RustcDumpVariancesOfOpaques
| AttributeKind::RustcDumpVtable(..)
| AttributeKind::RustcDynIncompatibleTrait(..)
| AttributeKind::RustcEffectiveVisibility
@@ -355,7 +358,6 @@ fn check_attributes(
| AttributeKind::RustcObjcClass { .. }
| AttributeKind::RustcObjcSelector { .. }
| AttributeKind::RustcOffloadKernel
| AttributeKind::RustcOutlives
| AttributeKind::RustcParenSugar(..)
| AttributeKind::RustcPassByValue (..)
| AttributeKind::RustcPassIndirectlyInNonRusticAbis(..)
@@ -376,8 +378,6 @@ fn check_attributes(
| AttributeKind::RustcThenThisWouldNeed(..)
| AttributeKind::RustcTrivialFieldReads
| AttributeKind::RustcUnsafeSpecializationMarker(..)
| AttributeKind::RustcVariance
| AttributeKind::RustcVarianceOfOpaques
| AttributeKind::ShouldPanic { .. }
| AttributeKind::TestRunner(..)
| AttributeKind::ThreadLocal
@@ -781,8 +781,8 @@ fn check_naked(&self, hir_id: HirId, target: Target) {
}
}
/// Debugging aid for `object_lifetime_default` query.
fn check_object_lifetime_default(&self, hir_id: HirId) {
/// Debugging aid for the `object_lifetime_default` query.
fn check_dump_object_lifetime_defaults(&self, hir_id: HirId) {
let tcx = self.tcx;
if let Some(owner_id) = hir_id.as_owner()
&& let Some(generics) = tcx.hir_get_generics(owner_id.def_id)
@@ -796,7 +796,7 @@ fn check_object_lifetime_default(&self, hir_id: HirId) {
ObjectLifetimeDefault::Param(def_id) => tcx.item_name(def_id).to_string(),
ObjectLifetimeDefault::Ambiguous => "Ambiguous".to_owned(),
};
tcx.dcx().emit_err(errors::ObjectLifetimeErr { span: p.span, repr });
tcx.dcx().span_err(p.span, repr);
}
}
}
-8
View File
@@ -814,14 +814,6 @@ pub(crate) struct UselessAssignment<'a> {
)]
pub(crate) struct InlineIgnoredForExported;
#[derive(Diagnostic)]
#[diag("{$repr}")]
pub(crate) struct ObjectLifetimeErr {
#[primary_span]
pub span: Span,
pub repr: String,
}
#[derive(Diagnostic)]
pub(crate) enum AttrApplication {
#[diag("attribute should be applied to an enum", code = E0517)]
@@ -1,5 +1,5 @@
use rustc_errors::{Diag, EmissionGuarantee, Subdiagnostic};
use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
use rustc_macros::{Diagnostic, Subdiagnostic};
use rustc_middle::ty::Ty;
use rustc_span::Span;
@@ -109,8 +109,7 @@ impl Subdiagnostic for GappedRange {
fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
let GappedRange { span, gap, first_range } = self;
// FIXME(mejrs) unfortunately `#[derive(LintDiagnostic)]`
// does not support `#[subdiagnostic(eager)]`...
// FIXME(mejrs) Use `#[subdiagnostic(eager)]` instead
let message = format!(
"this could appear to continue range `{first_range}`, but `{gap}` isn't matched by \
either of them"
@@ -131,10 +130,12 @@ pub(crate) struct NonExhaustiveOmittedPattern<'tcx> {
pub uncovered: Uncovered,
}
#[derive(LintDiagnostic)]
#[derive(Diagnostic)]
#[diag("the lint level must be set on the whole match")]
#[help("it no longer has any effect to set the lint level on an individual match arm")]
pub(crate) struct NonExhaustiveOmittedPatternLintOnArm {
#[primary_span]
pub span: Span,
#[label("remove this attribute")]
pub lint_span: Span,
#[suggestion(
+3 -7
View File
@@ -92,17 +92,13 @@ pub(crate) fn lint_nonexhaustive_missing_variants<'p, 'tcx>(
let LevelAndSource { level, src, .. } =
rcx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, arm.arm_data);
if !matches!(level, rustc_session::lint::Level::Allow) {
let decorator = NonExhaustiveOmittedPatternLintOnArm {
rcx.tcx.dcx().emit_warn(NonExhaustiveOmittedPatternLintOnArm {
span: arm.pat.data().span,
lint_span: src.span(),
suggest_lint_on_match: rcx.whole_match_span.map(|span| span.shrink_to_lo()),
lint_level: level.as_str(),
lint_name: "non_exhaustive_omitted_patterns",
};
use rustc_errors::LintDiagnostic;
let mut err = rcx.tcx.dcx().struct_span_warn(arm.pat.data().span, "");
decorator.decorate_lint(&mut err);
err.emit();
});
}
}
}
@@ -40,7 +40,7 @@ pub(crate) fn SideEffect<'tcx>() -> DepKindVTable<'tcx> {
is_eval_always: false,
key_fingerprint_style: KeyFingerprintStyle::Unit,
force_from_dep_node_fn: Some(|tcx, _, prev_index| {
tcx.dep_graph.force_diagnostic_node(tcx, prev_index);
tcx.dep_graph.force_side_effect(tcx, prev_index);
true
}),
promote_from_disk_fn: None,
@@ -131,10 +131,6 @@ fn mk_cycle<'tcx, C: QueryCache>(
let guar = error.emit();
query.value_from_cycle_error(tcx, cycle_error, guar)
}
CycleErrorHandling::Fatal => {
let guar = error.emit();
guar.raise_fatal();
}
CycleErrorHandling::DelayBug => {
let guar = error.delay_as_bug();
query.value_from_cycle_error(tcx, cycle_error, guar)
@@ -95,7 +95,7 @@ fn from_cycle_error(
let mut item_and_field_ids = Vec::new();
let mut representable_ids = FxHashSet::default();
for info in &cycle_error.cycle {
if info.frame.dep_kind == DepKind::representability
if info.frame.dep_kind == DepKind::check_representability
&& let Some(field_id) = info.frame.def_id
&& let Some(field_id) = field_id.as_local()
&& let Some(DefKind::Field) = info.frame.info.def_kind
@@ -109,7 +109,7 @@ fn from_cycle_error(
}
}
for info in &cycle_error.cycle {
if info.frame.dep_kind == DepKind::representability_adt_ty
if info.frame.dep_kind == DepKind::check_representability_adt_ty
&& let Some(def_id) = info.frame.def_id_for_ty_in_cycle
&& let Some(def_id) = def_id.as_local()
&& !item_and_field_ids.iter().any(|&(id, _)| id == def_id)
@@ -117,8 +117,10 @@ fn from_cycle_error(
representable_ids.insert(def_id);
}
}
// We used to continue here, but the cycle error printed next is actually less useful than
// the error produced by `recursive_type_error`.
let guar = recursive_type_error(tcx, item_and_field_ids, &representable_ids);
Representability::Infinite(guar)
guar.raise_fatal();
}
}
+1 -1
View File
@@ -1366,7 +1366,7 @@ fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
ItemWas::BehindFeature { feature, span } => {
let key = "feature".into();
let value = feature.into_diag_arg(&mut None);
let msg = diag.dcx.eagerly_translate_to_string(
let msg = diag.dcx.eagerly_format_to_string(
msg!("the item is gated behind the `{$feature}` feature"),
[(&key, &value)].into_iter(),
);
+6
View File
@@ -166,6 +166,11 @@ pub struct Session {
/// Used by `-Zmir-opt-bisect-limit` to assign an index to each
/// optimization-pass execution candidate during this compilation.
pub mir_opt_bisect_eval_count: AtomicUsize,
/// Enabled features that are used in the current compilation.
///
/// The value is the `DepNodeIndex` of the node encodes the used feature.
pub used_features: Lock<FxHashMap<Symbol, u32>>,
}
#[derive(Clone, Copy)]
@@ -1096,6 +1101,7 @@ pub fn build_session(
replaced_intrinsics: FxHashSet::default(), // filled by `run_compiler`
thin_lto_supported: true, // filled by `run_compiler`
mir_opt_bisect_eval_count: AtomicUsize::new(0),
used_features: Lock::default(),
};
validate_commandline_args_with_session_available(&sess);
+4 -4
View File
@@ -1708,9 +1708,13 @@
rustc_driver,
rustc_dummy,
rustc_dump_def_parents,
rustc_dump_inferred_outlives,
rustc_dump_item_bounds,
rustc_dump_object_lifetime_defaults,
rustc_dump_predicates,
rustc_dump_user_args,
rustc_dump_variances,
rustc_dump_variances_of_opaques,
rustc_dump_vtable,
rustc_dyn_incompatible_trait,
rustc_effective_visibility,
@@ -1747,10 +1751,8 @@
rustc_nounwind,
rustc_objc_class,
rustc_objc_selector,
rustc_object_lifetime_default,
rustc_offload_kernel,
rustc_on_unimplemented,
rustc_outlives,
rustc_paren_sugar,
rustc_partition_codegened,
rustc_partition_reused,
@@ -1780,8 +1782,6 @@
rustc_then_this_would_need,
rustc_trivial_field_reads,
rustc_unsafe_specialization_marker,
rustc_variance,
rustc_variance_of_opaques,
rustdoc,
rustdoc_internals,
rustdoc_missing_doc_code_examples,
+8 -10
View File
@@ -456,7 +456,7 @@ fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
// See https://github.com/rust-lang/rust/issues/143872 for details.
diag.store_args();
diag.arg("requirement", requirement);
let msg = diag.eagerly_translate(msg!(
let msg = diag.eagerly_format(msg!(
"...so that the {$requirement ->
[method_compat] method type is compatible with trait
[type_compat] associated type is compatible with trait
@@ -482,7 +482,7 @@ fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
// *terrible*.
diag.store_args();
diag.arg("requirement", requirement);
let msg = diag.eagerly_translate(msg!(
let msg = diag.eagerly_format(msg!(
"...so that {$requirement ->
[method_compat] method type is compatible with trait
[type_compat] associated type is compatible with trait
@@ -1174,7 +1174,7 @@ fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
type_param_span
.push_span_label(span, msg!("consider borrowing this type parameter in the trait"));
}
let msg = diag.eagerly_translate(msg!("the lifetime requirements from the `impl` do not correspond to the requirements in the `trait`"));
let msg = diag.eagerly_format(msg!("the lifetime requirements from the `impl` do not correspond to the requirements in the `trait`"));
diag.span_help(type_param_span, msg);
}
}
@@ -1218,10 +1218,9 @@ fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
self.ident.span,
msg!("calling this method introduces the `impl`'s `'static` requirement"),
);
let msg = diag.eagerly_translate(msg!("the used `impl` has a `'static` requirement"));
let msg = diag.eagerly_format(msg!("the used `impl` has a `'static` requirement"));
diag.span_note(multi_span, msg);
let msg =
diag.eagerly_translate(msg!("consider relaxing the implicit `'static` requirement"));
let msg = diag.eagerly_format(msg!("consider relaxing the implicit `'static` requirement"));
diag.span_suggestion_verbose(
self.span.shrink_to_hi(),
msg,
@@ -1284,9 +1283,8 @@ fn add_to_diag<G: EmissionGuarantee>(mut self, diag: &mut Diag<'_, G>) {
);
}
self.span.push_span_label(self.cause_span, msg!("because of this returned expression"));
let msg = diag.eagerly_translate(msg!(
"\"`'static` lifetime requirement introduced by the return type"
));
let msg = diag
.eagerly_format(msg!("\"`'static` lifetime requirement introduced by the return type"));
diag.span_note(self.span, msg);
}
}
@@ -1727,7 +1725,7 @@ impl Subdiagnostic for SuggestTuplePatternMany {
fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
diag.arg("path", self.path);
let message =
diag.eagerly_translate(msg!("try wrapping the pattern in a variant of `{$path}`"));
diag.eagerly_format(msg!("try wrapping the pattern in a variant of `{$path}`"));
diag.multipart_suggestions(
message,
self.compatible_variants.into_iter().map(|variant| {
@@ -169,7 +169,7 @@ fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
diag.arg("desc_kind", self.desc.kind);
diag.arg("desc_arg", self.desc.arg);
let msg = diag.eagerly_translate(msg!(
let msg = diag.eagerly_format(msg!(
"{$pref_kind ->
*[should_not_happen] [{$pref_kind}]
[ref_valid_for] ...the reference is valid for
+42 -40
View File
@@ -6,69 +6,71 @@
use rustc_span::def_id::LocalDefId;
pub(crate) fn provide(providers: &mut Providers) {
*providers =
Providers { representability, representability_adt_ty, params_in_repr, ..*providers };
}
macro_rules! rtry {
($e:expr) => {
match $e {
e @ Representability::Infinite(_) => return e,
Representability::Representable => {}
}
*providers = Providers {
check_representability,
check_representability_adt_ty,
params_in_repr,
..*providers
};
}
fn representability(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Representability {
fn check_representability(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Representability {
match tcx.def_kind(def_id) {
DefKind::Struct | DefKind::Union | DefKind::Enum => {
for variant in tcx.adt_def(def_id).variants() {
for field in variant.fields.iter() {
rtry!(tcx.representability(field.did.expect_local()));
let _ = tcx.check_representability(field.did.expect_local());
}
}
Representability::Representable
}
DefKind::Field => representability_ty(tcx, tcx.type_of(def_id).instantiate_identity()),
DefKind::Field => {
check_representability_ty(tcx, tcx.type_of(def_id).instantiate_identity());
}
def_kind => bug!("unexpected {def_kind:?}"),
}
Representability
}
fn representability_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Representability {
fn check_representability_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) {
match *ty.kind() {
ty::Adt(..) => tcx.representability_adt_ty(ty),
// This one must be a query rather than a vanilla `check_representability_adt_ty` call. See
// the comment on `check_representability_adt_ty` below for why.
ty::Adt(..) => {
let _ = tcx.check_representability_adt_ty(ty);
}
// FIXME(#11924) allow zero-length arrays?
ty::Array(ty, _) => representability_ty(tcx, ty),
ty::Array(ty, _) => {
check_representability_ty(tcx, ty);
}
ty::Tuple(tys) => {
for ty in tys {
rtry!(representability_ty(tcx, ty));
check_representability_ty(tcx, ty);
}
Representability::Representable
}
_ => Representability::Representable,
_ => {}
}
}
/*
The reason for this being a separate query is very subtle:
Consider this infinitely sized struct: `struct Foo(Box<Foo>, Bar<Foo>)`:
When calling representability(Foo), a query cycle will occur:
representability(Foo)
-> representability_adt_ty(Bar<Foo>)
-> representability(Foo)
For the diagnostic output (in `Value::from_cycle_error`), we want to detect that
the `Foo` in the *second* field of the struct is culpable. This requires
traversing the HIR of the struct and calling `params_in_repr(Bar)`. But we can't
call params_in_repr for a given type unless it is known to be representable.
params_in_repr will cycle/panic on infinitely sized types. Looking at the query
cycle above, we know that `Bar` is representable because
representability_adt_ty(Bar<..>) is in the cycle and representability(Bar) is
*not* in the cycle.
*/
fn representability_adt_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Representability {
// The reason for this being a separate query is very subtle. Consider this
// infinitely sized struct: `struct Foo(Box<Foo>, Bar<Foo>)`. When calling
// check_representability(Foo), a query cycle will occur:
//
// check_representability(Foo)
// -> check_representability_adt_ty(Bar<Foo>)
// -> check_representability(Foo)
//
// For the diagnostic output (in `Value::from_cycle_error`), we want to detect
// that the `Foo` in the *second* field of the struct is culpable. This
// requires traversing the HIR of the struct and calling `params_in_repr(Bar)`.
// But we can't call params_in_repr for a given type unless it is known to be
// representable. params_in_repr will cycle/panic on infinitely sized types.
// Looking at the query cycle above, we know that `Bar` is representable
// because `check_representability_adt_ty(Bar<..>)` is in the cycle and
// `check_representability(Bar)` is *not* in the cycle.
fn check_representability_adt_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Representability {
let ty::Adt(adt, args) = ty.kind() else { bug!("expected adt") };
if let Some(def_id) = adt.did().as_local() {
rtry!(tcx.representability(def_id));
let _ = tcx.check_representability(def_id);
}
// At this point, we know that the item of the ADT type is representable;
// but the type parameters may cause a cycle with an upstream type
@@ -76,11 +78,11 @@ fn representability_adt_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Representab
for (i, arg) in args.iter().enumerate() {
if let ty::GenericArgKind::Type(ty) = arg.kind() {
if params_in_repr.contains(i as u32) {
rtry!(representability_ty(tcx, ty));
check_representability_ty(tcx, ty);
}
}
}
Representability::Representable
Representability
}
fn params_in_repr(tcx: TyCtxt<'_>, def_id: LocalDefId) -> DenseBitSet<u32> {
+3 -4
View File
@@ -116,11 +116,10 @@ fn adt_sizedness_constraint<'tcx>(
tcx: TyCtxt<'tcx>,
(def_id, sizedness): (DefId, SizedTraitKind),
) -> Option<ty::EarlyBinder<'tcx, Ty<'tcx>>> {
if let Some(def_id) = def_id.as_local()
&& let ty::Representability::Infinite(_) = tcx.representability(def_id)
{
return None;
if let Some(def_id) = def_id.as_local() {
let _ = tcx.check_representability(def_id);
}
let def = tcx.adt_def(def_id);
if !def.is_struct() {
@@ -693,6 +693,7 @@ pub fn clear(&mut self) {
/// map.insert(1, "a");
/// ```
#[unstable(feature = "btreemap_alloc", issue = "32838")]
#[must_use]
pub const fn new_in(alloc: A) -> BTreeMap<K, V, A> {
BTreeMap { root: None, length: 0, alloc: ManuallyDrop::new(alloc), _marker: PhantomData }
}
@@ -361,6 +361,7 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// let mut set: BTreeSet<i32> = BTreeSet::new_in(Global);
/// ```
#[unstable(feature = "btreemap_alloc", issue = "32838")]
#[must_use]
pub const fn new_in(alloc: A) -> BTreeSet<T, A> {
BTreeSet { map: BTreeMap::new_in(alloc) }
}
-1
View File
@@ -1,6 +1,5 @@
#![feature(allocator_api)]
#![feature(binary_heap_pop_if)]
#![feature(btree_merge)]
#![feature(const_heap)]
#![feature(deque_extend_front)]
#![feature(iter_array_chunks)]
+5 -1
View File
@@ -1521,7 +1521,11 @@ pub const fn algebraic_rem(self, rhs: f16) -> f16 {
// Functions in this module fall into `core_float_math`
// #[unstable(feature = "core_float_math", issue = "137578")]
#[cfg(not(test))]
#[doc(test(attr(feature(cfg_target_has_reliable_f16_f128), expect(internal_features))))]
#[doc(test(attr(
feature(cfg_target_has_reliable_f16_f128),
expect(internal_features),
allow(unused_features)
)))]
impl f16 {
/// Returns the largest integer less than or equal to `self`.
///
+1
View File
@@ -393,6 +393,7 @@ pub mod consts {
pub const LN_10: f32 = 2.30258509299404568401799145468436421_f32;
}
#[doc(test(attr(allow(unused_features))))]
impl f32 {
/// The radix or base of the internal representation of `f32`.
#[stable(feature = "assoc_int_consts", since = "1.43.0")]
+1
View File
@@ -393,6 +393,7 @@ pub mod consts {
pub const LN_10: f64 = 2.30258509299404568401799145468436421_f64;
}
#[doc(test(attr(allow(unused_features))))]
impl f64 {
/// The radix or base of the internal representation of `f64`.
#[stable(feature = "assoc_int_consts", since = "1.43.0")]
-1
View File
@@ -50,7 +50,6 @@
#![feature(f16)]
#![feature(f128)]
#![feature(float_algebraic)]
#![feature(float_bits_const)]
#![feature(float_exact_integer_constants)]
#![feature(float_gamma)]
#![feature(float_minimum_maximum)]
+4
View File
@@ -357,6 +357,7 @@ impl<K, V, S> HashMap<K, V, S> {
/// map.insert(1, 2);
/// ```
#[inline]
#[must_use]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
#[rustc_const_stable(feature = "const_collections_with_hasher", since = "1.85.0")]
pub const fn with_hasher(hash_builder: S) -> HashMap<K, V, S> {
@@ -389,6 +390,7 @@ pub const fn with_hasher(hash_builder: S) -> HashMap<K, V, S> {
/// map.insert(1, 2);
/// ```
#[inline]
#[must_use]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashMap<K, V, S> {
HashMap { base: base::HashMap::with_capacity_and_hasher(capacity, hasher) }
@@ -409,6 +411,7 @@ impl<K, V, S, A: Allocator> HashMap<K, V, S, A> {
/// The `hash_builder` passed should implement the [`BuildHasher`] trait for
/// the `HashMap` to be useful, see its documentation for details.
#[inline]
#[must_use]
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn with_hasher_in(hash_builder: S, alloc: A) -> Self {
HashMap { base: base::HashMap::with_hasher_in(hash_builder, alloc) }
@@ -430,6 +433,7 @@ pub fn with_hasher_in(hash_builder: S, alloc: A) -> Self {
/// the `HashMap` to be useful, see its documentation for details.
///
#[inline]
#[must_use]
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn with_capacity_and_hasher_in(capacity: usize, hash_builder: S, alloc: A) -> Self {
HashMap { base: base::HashMap::with_capacity_and_hasher_in(capacity, hash_builder, alloc) }
+4
View File
@@ -229,6 +229,7 @@ impl<T, S> HashSet<T, S> {
/// set.insert(2);
/// ```
#[inline]
#[must_use]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
#[rustc_const_stable(feature = "const_collections_with_hasher", since = "1.85.0")]
pub const fn with_hasher(hasher: S) -> HashSet<T, S> {
@@ -261,6 +262,7 @@ pub const fn with_hasher(hasher: S) -> HashSet<T, S> {
/// set.insert(1);
/// ```
#[inline]
#[must_use]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashSet<T, S> {
HashSet { base: base::HashSet::with_capacity_and_hasher(capacity, hasher) }
@@ -281,6 +283,7 @@ impl<T, S, A: Allocator> HashSet<T, S, A> {
/// The `hash_builder` passed should implement the [`BuildHasher`] trait for
/// the `HashSet` to be useful, see its documentation for details.
#[inline]
#[must_use]
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn with_hasher_in(hasher: S, alloc: A) -> HashSet<T, S, A> {
HashSet { base: base::HashSet::with_hasher_in(hasher, alloc) }
@@ -301,6 +304,7 @@ pub fn with_hasher_in(hasher: S, alloc: A) -> HashSet<T, S, A> {
/// The `hash_builder` passed should implement the [`BuildHasher`] trait for
/// the `HashSet` to be useful, see its documentation for details.
#[inline]
#[must_use]
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn with_capacity_and_hasher_in(capacity: usize, hasher: S, alloc: A) -> HashSet<T, S, A> {
HashSet { base: base::HashSet::with_capacity_and_hasher_in(capacity, hasher, alloc) }
+1
View File
@@ -16,6 +16,7 @@
use crate::sys::cmath;
#[cfg(not(test))]
#[doc(test(attr(allow(unused_features))))]
impl f16 {
/// Raises a number to a floating point power.
///
+24 -13
View File
@@ -93,7 +93,7 @@
use crate::rc::Rc;
use crate::str::FromStr;
use crate::sync::Arc;
use crate::sys::path::{HAS_PREFIXES, MAIN_SEP_STR, is_sep_byte, is_verbatim_sep, parse_prefix};
use crate::sys::path::{HAS_PREFIXES, is_sep_byte, is_verbatim_sep, parse_prefix};
use crate::{cmp, fmt, fs, io, sys};
////////////////////////////////////////////////////////////////////////////////
@@ -266,22 +266,33 @@ fn has_implicit_root(&self) -> bool {
/// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_separator(c: char) -> bool {
#[rustc_const_unstable(feature = "const_path_separators", issue = "153106")]
pub const fn is_separator(c: char) -> bool {
c.is_ascii() && is_sep_byte(c as u8)
}
/// The primary separator of path components for the current platform.
///
/// For example, `/` on Unix and `\` on Windows.
/// All path separators recognized on the current platform, represented as [`char`]s; for example,
/// this is `&['/'][..]` on Unix and `&['\\', '/'][..]` on Windows. The [primary
/// separator](MAIN_SEPARATOR) is always element 0 of the slice.
#[unstable(feature = "const_path_separators", issue = "153106")]
pub const SEPARATORS: &[char] = crate::sys::path::SEPARATORS;
/// All path separators recognized on the current platform, represented as [`&str`]s; for example,
/// this is `&["/"][..]` on Unix and `&["\\", "/"][..]` on Windows. The [primary
/// separator](MAIN_SEPARATOR_STR) is always element 0 of the slice.
#[unstable(feature = "const_path_separators", issue = "153106")]
pub const SEPARATORS_STR: &[&str] = crate::sys::path::SEPARATORS_STR;
/// The primary separator of path components for the current platform, represented as a [`char`];
/// for example, this is `'/'` on Unix and `'\\'` on Windows.
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "path_main_separator")]
pub const MAIN_SEPARATOR: char = crate::sys::path::MAIN_SEP;
pub const MAIN_SEPARATOR: char = SEPARATORS[0];
/// The primary separator of path components for the current platform.
///
/// For example, `/` on Unix and `\` on Windows.
/// The primary separator of path components for the current platform, represented as a [`&str`];
/// for example, this is `"/"` on Unix and `"\\"` on Windows.
#[stable(feature = "main_separator_str", since = "1.68.0")]
pub const MAIN_SEPARATOR_STR: &str = crate::sys::path::MAIN_SEP_STR;
pub const MAIN_SEPARATOR_STR: &str = SEPARATORS_STR[0];
////////////////////////////////////////////////////////////////////////////////
// Misc helpers
@@ -562,7 +573,7 @@ impl<'a> Component<'a> {
pub fn as_os_str(self) -> &'a OsStr {
match self {
Component::Prefix(p) => p.as_os_str(),
Component::RootDir => OsStr::new(MAIN_SEP_STR),
Component::RootDir => OsStr::new(MAIN_SEPARATOR_STR),
Component::CurDir => OsStr::new("."),
Component::ParentDir => OsStr::new(".."),
Component::Normal(path) => path,
@@ -1379,7 +1390,7 @@ fn _push(&mut self, path: &Path) {
for c in buf {
if need_sep && c != Component::RootDir {
res.push(MAIN_SEP_STR);
res.push(MAIN_SEPARATOR_STR);
}
res.push(c.as_os_str());
@@ -1402,7 +1413,7 @@ fn _push(&mut self, path: &Path) {
// `path` is a pure relative path
} else if need_sep {
self.inner.push(MAIN_SEP_STR);
self.inner.push(MAIN_SEPARATOR_STR);
}
self.inner.push(path);
+2
View File
@@ -1381,6 +1381,7 @@ impl Output {
/// # Examples
///
/// ```
/// # #![allow(unused_features)]
/// #![feature(exit_status_error)]
/// # #[cfg(all(unix, not(target_os = "android"), not(all(target_vendor = "apple", not(target_os = "macos")))))] {
/// use std::process::Command;
@@ -1960,6 +1961,7 @@ impl crate::sealed::Sealed for ExitStatusError {}
pub struct ExitStatusError(imp::ExitStatusError);
#[unstable(feature = "exit_status_error", issue = "84908")]
#[doc(test(attr(allow(unused_features))))]
impl ExitStatusError {
/// Reports the exit code, if applicable, from an `ExitStatusError`.
///
+3 -8
View File
@@ -5,25 +5,20 @@
use crate::sys::helpers::run_path_with_cstr;
use crate::{io, ptr};
#[inline]
pub fn is_sep_byte(b: u8) -> bool {
b == b'/' || b == b'\\'
}
path_separator_bytes!(b'/', b'\\');
/// Cygwin always prefers `/` over `\`, and it always converts all `/` to `\`
/// internally when calling Win32 APIs. Therefore, the server component of path
/// `\\?\UNC\localhost/share` is `localhost/share` on Win32, but `localhost`
/// on Cygwin.
#[inline]
pub fn is_verbatim_sep(b: u8) -> bool {
b == b'/' || b == b'\\'
pub const fn is_verbatim_sep(b: u8) -> bool {
is_sep_byte(b)
}
pub use super::windows_prefix::parse_prefix;
pub const HAS_PREFIXES: bool = true;
pub const MAIN_SEP_STR: &str = "/";
pub const MAIN_SEP: char = '/';
unsafe extern "C" {
// Doc: https://cygwin.com/cygwin-api/func-cygwin-conv-path.html
+19
View File
@@ -1,3 +1,22 @@
// There's a lot of necessary redundancy in separator definition. Consolidated into a macro to
// prevent transcription errors.
macro_rules! path_separator_bytes {
($($sep:literal),+) => (
pub const SEPARATORS: &[char] = &[$($sep as char,)+];
pub const SEPARATORS_STR: &[&str] = &[$(
match str::from_utf8(&[$sep]) {
Ok(s) => s,
Err(_) => panic!("path_separator_bytes must be ASCII bytes"),
}
),+];
#[inline]
pub const fn is_sep_byte(b: u8) -> bool {
$(b == $sep) ||+
}
)
}
cfg_select! {
target_os = "windows" => {
mod windows;
+3 -8
View File
@@ -3,14 +3,11 @@
use crate::path::{Path, PathBuf, Prefix};
use crate::sys::unsupported;
#[inline]
pub fn is_sep_byte(b: u8) -> bool {
b == b'/'
}
path_separator_bytes!(b'/');
#[inline]
pub fn is_verbatim_sep(b: u8) -> bool {
b == b'/'
pub const fn is_verbatim_sep(b: u8) -> bool {
is_sep_byte(b)
}
pub fn parse_prefix(_: &OsStr) -> Option<Prefix<'_>> {
@@ -18,8 +15,6 @@ pub fn parse_prefix(_: &OsStr) -> Option<Prefix<'_>> {
}
pub const HAS_PREFIXES: bool = false;
pub const MAIN_SEP_STR: &str = "/";
pub const MAIN_SEP: char = '/';
pub(crate) fn absolute(_path: &Path) -> io::Result<PathBuf> {
unsupported()
+4 -9
View File
@@ -5,17 +5,14 @@
use crate::sys::pal::helpers;
use crate::sys::unsupported_err;
path_separator_bytes!(b'\\');
const FORWARD_SLASH: u8 = b'/';
const COLON: u8 = b':';
#[inline]
pub fn is_sep_byte(b: u8) -> bool {
b == b'\\'
}
#[inline]
pub fn is_verbatim_sep(b: u8) -> bool {
b == b'\\'
pub const fn is_verbatim_sep(b: u8) -> bool {
is_sep_byte(b)
}
pub fn parse_prefix(_: &OsStr) -> Option<Prefix<'_>> {
@@ -23,8 +20,6 @@ pub fn parse_prefix(_: &OsStr) -> Option<Prefix<'_>> {
}
pub const HAS_PREFIXES: bool = true;
pub const MAIN_SEP_STR: &str = "\\";
pub const MAIN_SEP: char = '\\';
/// UEFI paths can be of 4 types:
///
+3 -8
View File
@@ -2,14 +2,11 @@
use crate::path::{Path, PathBuf, Prefix};
use crate::{env, io};
#[inline]
pub fn is_sep_byte(b: u8) -> bool {
b == b'/'
}
path_separator_bytes!(b'/');
#[inline]
pub fn is_verbatim_sep(b: u8) -> bool {
b == b'/'
pub const fn is_verbatim_sep(b: u8) -> bool {
is_sep_byte(b)
}
#[inline]
@@ -18,8 +15,6 @@ pub fn parse_prefix(_: &OsStr) -> Option<Prefix<'_>> {
}
pub const HAS_PREFIXES: bool = false;
pub const MAIN_SEP_STR: &str = "/";
pub const MAIN_SEP: char = '/';
/// Make a POSIX path absolute without changing its semantics.
pub(crate) fn absolute(path: &Path) -> io::Result<PathBuf> {
@@ -4,14 +4,11 @@
use crate::path::{Path, PathBuf, Prefix};
use crate::sys::unsupported;
#[inline]
pub fn is_sep_byte(b: u8) -> bool {
b == b'\\'
}
path_separator_bytes!(b'\\');
#[inline]
pub fn is_verbatim_sep(b: u8) -> bool {
b == b'\\'
pub const fn is_verbatim_sep(b: u8) -> bool {
is_sep_byte(b)
}
pub fn parse_prefix(_: &OsStr) -> Option<Prefix<'_>> {
@@ -19,8 +16,6 @@ pub fn parse_prefix(_: &OsStr) -> Option<Prefix<'_>> {
}
pub const HAS_PREFIXES: bool = true;
pub const MAIN_SEP_STR: &str = "\\";
pub const MAIN_SEP: char = '\\';
pub(crate) fn absolute(_path: &Path) -> io::Result<PathBuf> {
unsupported()
+3 -8
View File
@@ -9,9 +9,9 @@
pub use super::windows_prefix::parse_prefix;
path_separator_bytes!(b'\\', b'/');
pub const HAS_PREFIXES: bool = true;
pub const MAIN_SEP_STR: &str = "\\";
pub const MAIN_SEP: char = '\\';
/// A null terminated wide string.
#[repr(transparent)]
@@ -48,12 +48,7 @@ pub fn with_native_path<T>(path: &Path, f: &dyn Fn(&WCStr) -> io::Result<T>) ->
}
#[inline]
pub fn is_sep_byte(b: u8) -> bool {
b == b'/' || b == b'\\'
}
#[inline]
pub fn is_verbatim_sep(b: u8) -> bool {
pub const fn is_verbatim_sep(b: u8) -> bool {
b == b'\\'
}
@@ -275,16 +275,16 @@ Here are some notable ones:
|----------------|-------------|
| `rustc_def_path` | Dumps the [`def_path_str`] of an item. |
| `rustc_dump_def_parents` | Dumps the chain of `DefId` parents of certain definitions. |
| `rustc_dump_inferred_outlives` | Dumps implied bounds of an item. More precisely, the [`inferred_outlives_of`] an item. |
| `rustc_dump_item_bounds` | Dumps the [`item_bounds`] of an item. |
| `rustc_dump_object_lifetime_defaults` | Dumps the [object lifetime defaults] of an item. |
| `rustc_dump_predicates` | Dumps the [`predicates_of`] an item. |
| `rustc_dump_variances` | Dumps the [variances] of an item. |
| `rustc_dump_vtable` | Dumps the vtable layout of an impl, or a type alias of a dyn type. |
| `rustc_hidden_type_of_opaques` | Dumps the [hidden type of each opaque types][opaq] in the crate. |
| `rustc_layout` | [See this section](#debugging-type-layouts). |
| `rustc_object_lifetime_default` | Dumps the [object lifetime defaults] of an item. |
| `rustc_outlives` | Dumps implied bounds of an item. More precisely, the [`inferred_outlives_of`] an item. |
| `rustc_regions` | Dumps NLL closure region requirements. |
| `rustc_symbol_name` | Dumps the mangled & demangled [`symbol_name`] of an item. |
| `rustc_variances` | Dumps the [variances] of an item. |
Right below you can find elaborate explainers on a selected few.
@@ -1,6 +1,6 @@
# Diagnostic and subdiagnostic structs
rustc has three diagnostic traits that can be used to create diagnostics:
`Diagnostic`, `LintDiagnostic`, and `Subdiagnostic`.
rustc has two diagnostic traits that can be used to create diagnostics:
`Diagnostic` and `Subdiagnostic`.
For simple diagnostics,
derived impls can be used, e.g. `#[derive(Diagnostic)]`. They are only suitable for simple diagnostics that
@@ -8,12 +8,12 @@ don't require much logic in deciding whether or not to add additional subdiagnos
In cases where diagnostics require more complex or dynamic behavior, such as conditionally adding subdiagnostics,
customizing the rendering logic, or selecting messages at runtime, you will need to manually implement
the corresponding trait (`Diagnostic`, `LintDiagnostic`, or `Subdiagnostic`).
the corresponding trait (`Diagnostic` or `Subdiagnostic`).
This approach provides greater flexibility and is recommended for diagnostics that go beyond simple, static structures.
Diagnostic can be translated into different languages.
## `#[derive(Diagnostic)]` and `#[derive(LintDiagnostic)]`
## `#[derive(Diagnostic)]`
Consider the [definition][defn] of the "field already declared" diagnostic shown below:
@@ -123,8 +123,8 @@ tcx.dcx().emit_err(FieldAlreadyDeclared {
});
```
### Reference for `#[derive(Diagnostic)]` and `#[derive(LintDiagnostic)]`
`#[derive(Diagnostic)]` and `#[derive(LintDiagnostic)]` support the following attributes:
### Reference for `#[derive(Diagnostic)]`
`#[derive(Diagnostic)]` supports the following attributes:
- `#[diag("message", code = "...")]`
- _Applied to struct or enum variant._
@@ -171,7 +171,6 @@ tcx.dcx().emit_err(FieldAlreadyDeclared {
- Adds the subdiagnostic represented by the subdiagnostic struct.
- `#[primary_span]` (_Optional_)
- _Applied to `Span` fields on `Subdiagnostic`s.
Not used for `LintDiagnostic`s._
- Indicates the primary span of the diagnostic.
- `#[skip_arg]` (_Optional_)
- _Applied to any field._
@@ -32,7 +32,7 @@ There are two ways of writing translatable diagnostics:
deciding to emit subdiagnostics and can therefore be represented as diagnostic structs).
See [the diagnostic and subdiagnostic structs documentation](./diagnostic-structs.md).
2. Using typed identifiers with `Diag` APIs (in
`Diagnostic` or `Subdiagnostic` or `LintDiagnostic` implementations).
`Diagnostic` or `Subdiagnostic` implementations).
When adding or changing a translatable diagnostic,
you don't need to worry about the translations.
@@ -5,7 +5,7 @@
use rustc_data_structures::sync::Lock;
use rustc_errors::emitter::Emitter;
use rustc_errors::translation::format_diag_message;
use rustc_errors::formatting::format_diag_message;
use rustc_errors::{Applicability, DiagCtxt, DiagInner};
use rustc_parse::{source_str_to_stream, unwrap_or_emit_fatal};
use rustc_resolve::rustdoc::source_span_for_markdown_range;
+2
View File
@@ -189,6 +189,8 @@ you can visualize in [Perfetto](https://ui.perfetto.dev/). For example:
MIRI_TRACING=1 ./miri run --features=tracing tests/pass/hello.rs
```
See [doc/tracing.md](./doc/tracing.md) for more information.
### UI testing
We use ui-testing in Miri, meaning we generate `.stderr` and `.stdout` files for the output
+9 -3
View File
@@ -2,6 +2,9 @@
Miri can be traced to understand how much time is spent in its various components (e.g. borrow tracker, data race checker, etc.). When tracing is enabled, running Miri will create a `.json` trace file that can be opened and analyzed in [Perfetto](https://ui.perfetto.dev/). For any questions regarding this documentation you may contact [Stypox](https://rust-lang.zulipchat.com/#narrow/dm/627563-Stypox) on Zulip.
> [!WARNING]
> Tracing in Miri at the moment is broken due to two bugs in tracing libraries: https://github.com/tokio-rs/tracing/pull/3392 and https://github.com/davidbarsky/tracing-tree/issues/93. Also see https://github.com/rust-lang/miri/issues/4752.
## Obtaining a trace file
### From the Miri codebase
@@ -240,7 +243,7 @@ let _trace = enter_trace_span!(M, "borrow_tracker", borrow_tracker = "on_stack_p
### `tracing_separate_thread` parameter
Miri saves traces using the the `tracing_chrome` `tracing::Layer` so that they can be visualized in Perfetto. To instruct `tracing_chrome` to put some spans on a separate trace thread/line than other spans when viewed in Perfetto, you can pass `tracing_separate_thread = tracing::field::Empty` to the tracing macros. This is useful to separate out spans which just indicate the current step or program frame being processed by the interpreter. As explained in [The timeline](#the-timeline), those spans end up under the "Global Legacy Events" track. You should use a value of `tracing::field::Empty` so that other tracing layers (e.g. the logger) will ignore the `tracing_separate_thread` field. For example:
Miri saves traces using the `tracing_chrome` `tracing::Layer` so that they can be visualized in Perfetto. To instruct `tracing_chrome` to put some spans on a separate trace thread/line than other spans when viewed in Perfetto, you can pass `tracing_separate_thread = tracing::field::Empty` to the tracing macros. This is useful to separate out spans which just indicate the current step or program frame being processed by the interpreter. As explained in [The timeline](#the-timeline), those spans end up under the "Global Legacy Events" track. You should use a value of `tracing::field::Empty` so that other tracing layers (e.g. the logger) will ignore the `tracing_separate_thread` field. For example:
```rust
let _trace = enter_trace_span!(M, step::eval_statement, tracing_separate_thread = tracing::field::Empty);
```
@@ -277,9 +280,12 @@ So the solution was to copy-paste [the only file](https://github.com/thoren-d/tr
### Time measurements
tracing-chrome originally used `std::time::Instant` to measure time, however on some x86/x86_64 Linux systems it might be unbearably slow since the underlying system call (`clock_gettime`) would take ≈1.3µs. Read more [here](https://btorpey.github.io/blog/2014/02/18/clock-sources-in-linux/) about how the Linux kernel chooses the clock source.
tracing-chrome uses `std::time::Instant` to measure time. On most modern systems this is ok, since the underlying system call (`clock_gettime`) uses very fast hardware counters (e.g. `tsc`) and has a latency of ≈16ns.
Therefore, on x86/x86_64 Linux systems with a CPU that has an invariant TSC counter, we read from that instead to measure time, which takes only ≈13ns. There are unfortunately a lot of caveats to this approach though, as explained [in the code](https://github.com/rust-lang/miri/blob/master/src/bin/log/tracing_chrome_instant.rs) and [in the PR](https://github.com/rust-lang/miri/pull/4524). The most impactful one is that: every thread spawned in Miri that wants to trace something (which requires measuring time) needs to pin itself to a single CPU core (using `sched_setaffinity`).
On some x86/x86_64 Linux systems, however, `tsc` is not "reliable" and the system thus relies on other timers, e.g. `hpet` which takes ≈1.3µs. Read [here](https://btorpey.github.io/blog/2014/02/18/clock-sources-in-linux/) how the Linux kernel chooses the clock source, and how to check if your system is using `tsc`. If it doesn't use `tsc`, then expect most of the trace time being spent in time measurements, which degrades traces' usefulness... See [here](https://github.com/rust-lang/miri/issues/4563) for some discussion.
> [!WARNING]
> A (somewhat risky) workaround is to add `tsc=reliable clocksource=tsc hpet=disable` to the kernel boot parameters, which forces it to use `tsc` even if it is unreliable. But this may render the system unstable, so try it at your own risk!
## Other useful stuff
+1 -1
View File
@@ -1 +1 @@
7bee525095c0872e87c038c412c781b9bbb3f5dc
d933cf483edf1605142ac6899ff32536c0ad8b22
+5 -1
View File
@@ -168,7 +168,11 @@ fn addr_from_alloc_id_uncached(
if let Some(GlobalAlloc::Function { instance, .. }) =
this.tcx.try_get_global_alloc(alloc_id)
{
let fn_sig = this.tcx.fn_sig(instance.def_id()).skip_binder().skip_binder();
let fn_sig = this.tcx.instantiate_bound_regions_with_erased(
this.tcx
.fn_sig(instance.def_id())
.instantiate(*this.tcx, instance.args),
);
let fn_ptr = crate::shims::native_lib::build_libffi_closure(this, fn_sig)?;
#[expect(

Some files were not shown because too many files have changed in this diff Show More