Auto merge of #152965 - JonathanBrouwer:rollup-QnrcBRx, r=JonathanBrouwer

Rollup of 14 pull requests

Successful merges:

 - rust-lang/rust#150468 (rustc_target: callconv: powerpc64: Use the ABI set in target options instead of guessing)
 - rust-lang/rust#151628 (Fix ICE in const eval of packed SIMD types with non-power-of-two element counts)
 - rust-lang/rust#151871 (don't use env with infer vars)
 - rust-lang/rust#152591 (Simplify internals of `{Rc,Arc}::default`)
 - rust-lang/rust#152865 (Fixed ByteStr not padding within its Display trait when no specific alignment is mentioned)
 - rust-lang/rust#147859 (reduce the amount of panics in `{TokenStream, Literal}::from_str` calls)
 - rust-lang/rust#152705 (Test(lib/win/proc): Skip `raw_attributes` doctest under Win7)
 - rust-lang/rust#152767 (fix typo in `carryless_mul` macro invocation)
 - rust-lang/rust#152837 (fix(codegen): Use `body_codegen_attrs` For Caller In `adjust_target_feature_sig`)
 - rust-lang/rust#152871 (Fix warnings in rs{begin,end}.rs files)
 - rust-lang/rust#152879 (Remove `impl IntoQueryParam<P> for &'a P`.)
 - rust-lang/rust#152933 (Start migration for `LintDiagnostic` items by adding API and migrating `LinkerOutput` lint)
 - rust-lang/rust#152937 (remove unneeded reboxing)
 - rust-lang/rust#152953 (Fix typo in armv7a-vex-v5.md)
This commit is contained in:
bors
2026-02-22 13:27:26 +00:00
82 changed files with 596 additions and 310 deletions
+2 -4
View File
@@ -4211,9 +4211,7 @@ pub fn ident(&self) -> Option<Ident> {
impl From<ForeignItemKind> for ItemKind {
fn from(foreign_item_kind: ForeignItemKind) -> ItemKind {
match foreign_item_kind {
ForeignItemKind::Static(box static_foreign_item) => {
ItemKind::Static(Box::new(static_foreign_item))
}
ForeignItemKind::Static(static_foreign_item) => ItemKind::Static(static_foreign_item),
ForeignItemKind::Fn(fn_kind) => ItemKind::Fn(fn_kind),
ForeignItemKind::TyAlias(ty_alias_kind) => ItemKind::TyAlias(ty_alias_kind),
ForeignItemKind::MacCall(a) => ItemKind::MacCall(a),
@@ -4226,7 +4224,7 @@ impl TryFrom<ItemKind> for ForeignItemKind {
fn try_from(item_kind: ItemKind) -> Result<ForeignItemKind, ItemKind> {
Ok(match item_kind {
ItemKind::Static(box static_item) => ForeignItemKind::Static(Box::new(static_item)),
ItemKind::Static(static_item) => ForeignItemKind::Static(static_item),
ItemKind::Fn(fn_kind) => ForeignItemKind::Fn(fn_kind),
ItemKind::TyAlias(ty_alias_kind) => ForeignItemKind::TyAlias(ty_alias_kind),
ItemKind::MacCall(a) => ForeignItemKind::MacCall(a),
@@ -626,7 +626,7 @@ fn get_closure_bound_clause_span(
let predicates = match parent.kind {
hir::ExprKind::Call(callee, _) => {
let ty = typeck_result.node_type_opt(callee.hir_id)?;
let ty::FnDef(fn_def_id, args) = ty.kind() else { return None };
let ty::FnDef(fn_def_id, args) = *ty.kind() else { return None };
tcx.predicates_of(fn_def_id).instantiate(tcx, args)
}
hir::ExprKind::MethodCall(..) => {
@@ -614,7 +614,7 @@ pub(crate) fn report_mutability_error(
}
_ => {
let local = &self.body.local_decls[local];
match local.local_info() {
match *local.local_info() {
LocalInfo::StaticRef { def_id, .. } => {
let span = self.infcx.tcx.def_span(def_id);
err.span_label(span, format!("this `static` cannot be {acted_on}"));
@@ -926,7 +926,7 @@ fn maybe_suggest_constrain_dyn_trait_impl(
let tcx = self.infcx.tcx;
let ConstraintCategory::CallArgument(Some(func_ty)) = category else { return };
let ty::FnDef(fn_did, args) = func_ty.kind() else { return };
let ty::FnDef(fn_did, args) = *func_ty.kind() else { return };
debug!(?fn_did, ?args);
// Only suggest this on function calls, not closures
@@ -938,7 +938,7 @@ fn maybe_suggest_constrain_dyn_trait_impl(
let Ok(Some(instance)) = ty::Instance::try_resolve(
tcx,
self.infcx.typing_env(self.infcx.param_env),
*fn_did,
fn_did,
self.infcx.resolve_vars_if_possible(args),
) else {
return;
@@ -1013,12 +1013,12 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
let is_implicit_coercion = coercion_source == CoercionSource::Implicit;
let src_ty = op.ty(self.body, tcx);
let mut src_sig = src_ty.fn_sig(tcx);
if let ty::FnDef(def_id, _) = src_ty.kind()
if let ty::FnDef(def_id, _) = *src_ty.kind()
&& let ty::FnPtr(_, target_hdr) = *ty.kind()
&& tcx.codegen_fn_attrs(def_id).safe_target_features
&& target_hdr.safety.is_safe()
&& let Some(safe_sig) = tcx.adjust_target_feature_sig(
*def_id,
def_id,
src_sig,
self.body.source.def_id(),
)
+5 -7
View File
@@ -18,18 +18,18 @@
use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
use rustc_data_structures::memmap::Mmap;
use rustc_data_structures::temp_dir::MaybeTempDir;
use rustc_errors::{DiagCtxtHandle, LintDiagnostic};
use rustc_errors::DiagCtxtHandle;
use rustc_fs_util::{TempDirBuilder, fix_windows_verbatim_for_gcc, try_canonicalize};
use rustc_hir::attrs::NativeLibKind;
use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc_macros::LintDiagnostic;
use rustc_macros::Diagnostic;
use rustc_metadata::fs::{METADATA_FILENAME, copy_to_stdout, emit_wrapper_file};
use rustc_metadata::{
EncodedMetadata, NativeLibSearchFallback, find_native_static_library,
walk_native_lib_search_dirs,
};
use rustc_middle::bug;
use rustc_middle::lint::lint_level;
use rustc_middle::lint::diag_lint_level;
use rustc_middle::middle::debugger_visualizer::DebuggerVisualizerFile;
use rustc_middle::middle::dependency_format::Linkage;
use rustc_middle::middle::exported_symbols::SymbolExportKind;
@@ -662,7 +662,7 @@ fn read_input(&self, path: &Path) -> std::io::Result<&[u8]> {
}
}
#[derive(LintDiagnostic)]
#[derive(Diagnostic)]
#[diag("{$inner}")]
/// Translating this is kind of useless. We don't pass translation flags to the linker, so we'd just
/// end up with inconsistent languages within the same diagnostic.
@@ -938,9 +938,7 @@ fn link_natively(
let level = codegen_results.crate_info.lint_levels.linker_messages;
let lint = |msg| {
lint_level(sess, LINKER_MESSAGES, level, None, |diag| {
LinkerOutput { inner: msg }.decorate_lint(diag)
})
diag_lint_level(sess, LINKER_MESSAGES, level, None, LinkerOutput { inner: msg });
};
if !prog.stderr.is_empty() {
@@ -577,7 +577,7 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
Rvalue::Aggregate(kind, ..) => {
if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref()
&& let Some(coroutine_kind) = self.tcx.coroutine_kind(def_id)
&& let Some(coroutine_kind) = self.tcx.coroutine_kind(*def_id)
{
self.check_op(ops::Coroutine(coroutine_kind));
}
@@ -2,7 +2,6 @@
use rustc_abi::{BackendRepr, Endian};
use rustc_apfloat::ieee::{Double, Half, Quad, Single};
use rustc_apfloat::{Float, Round};
use rustc_data_structures::assert_matches;
use rustc_middle::mir::interpret::{InterpErrorKind, Pointer, UndefinedBehaviorInfo};
use rustc_middle::ty::{FloatTy, ScalarInt, SimdAlign};
use rustc_middle::{bug, err_ub_format, mir, span_bug, throw_unsup_format, ty};
@@ -838,7 +837,20 @@ fn check_simd_ptr_alignment(
vector_layout: TyAndLayout<'tcx>,
alignment: SimdAlign,
) -> InterpResult<'tcx> {
assert_matches!(vector_layout.backend_repr, BackendRepr::SimdVector { .. });
// Packed SIMD types with non-power-of-two element counts use BackendRepr::Memory
// instead of BackendRepr::SimdVector. We need to handle both cases.
// FIXME: remove the BackendRepr::Memory case when SIMD vectors are always passed as BackendRepr::SimdVector.
assert!(vector_layout.ty.is_simd(), "check_simd_ptr_alignment called on non-SIMD type");
match vector_layout.backend_repr {
BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => {}
_ => {
span_bug!(
self.cur_span(),
"SIMD type has unexpected backend_repr: {:?}",
vector_layout.backend_repr
);
}
}
let align = match alignment {
ty::SimdAlign::Unaligned => {
+13
View File
@@ -109,10 +109,18 @@ pub fn from_spans(mut vec: Vec<Span>) -> MultiSpan {
MultiSpan { primary_spans: vec, span_labels: vec![] }
}
pub fn push_primary_span(&mut self, primary_span: Span) {
self.primary_spans.push(primary_span);
}
pub fn push_span_label(&mut self, span: Span, label: impl Into<DiagMessage>) {
self.span_labels.push((span, label.into()));
}
pub fn push_span_diag(&mut self, span: Span, diag: DiagMessage) {
self.span_labels.push((span, diag));
}
/// Selects the first primary span (if any).
pub fn primary_span(&self) -> Option<Span> {
self.primary_spans.first().cloned()
@@ -179,6 +187,11 @@ pub fn span_labels(&self) -> Vec<SpanLabel> {
span_labels
}
/// Returns the span labels as contained by `MultiSpan`.
pub fn span_labels_raw(&self) -> &[(Span, DiagMessage)] {
&self.span_labels
}
/// Returns `true` if any of the span labels is displayable.
pub fn has_span_labels(&self) -> bool {
self.span_labels.iter().any(|(sp, _)| !sp.is_dummy())
+7
View File
@@ -1324,6 +1324,13 @@ pub fn cancel(mut self) {
drop(self);
}
/// Cancels this diagnostic and returns its first message, if it exists.
pub fn cancel_into_message(self) -> Option<String> {
let s = self.diag.as_ref()?.messages.get(0)?.0.as_str().map(ToString::to_string);
self.cancel();
s
}
/// See `DiagCtxt::stash_diagnostic` for details.
pub fn stash(mut self, span: Span, key: StashKey) -> Option<ErrorGuaranteed> {
let diag = self.take_diag();
+22 -15
View File
@@ -10,7 +10,7 @@
use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan};
use rustc_parse::lexer::{StripTokens, nfc_normalize};
use rustc_parse::parser::Parser;
use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream};
use rustc_proc_macro::bridge::{
DelimSpan, Diagnostic, ExpnGlobals, Group, Ident, LitKind, Literal, Punct, TokenTree, server,
};
@@ -415,6 +415,13 @@ fn to_internal(self) -> rustc_errors::Level {
}
}
fn cancel_diags_into_string(diags: Vec<Diag<'_>>) -> String {
let mut messages = diags.into_iter().flat_map(Diag::cancel_into_message);
let msg = messages.next().expect("no diagnostic has a message");
messages.for_each(|_| ()); // consume iterator to cancel the remaining diagnostics
msg
}
pub(crate) struct Rustc<'a, 'b> {
ecx: &'a mut ExtCtxt<'b>,
def_site: Span,
@@ -478,35 +485,32 @@ fn track_path(&mut self, path: &str) {
self.psess().file_depinfo.borrow_mut().insert(Symbol::intern(path));
}
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span, Self::Symbol>, ()> {
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span, Self::Symbol>, String> {
let name = FileName::proc_macro_source_code(s);
let mut parser = unwrap_or_emit_fatal(new_parser_from_source_str(
self.psess(),
name,
s.to_owned(),
StripTokens::Nothing,
));
let mut parser =
new_parser_from_source_str(self.psess(), name, s.to_owned(), StripTokens::Nothing)
.map_err(cancel_diags_into_string)?;
let first_span = parser.token.span.data();
let minus_present = parser.eat(exp!(Minus));
let lit_span = parser.token.span.data();
let token::Literal(mut lit) = parser.token.kind else {
return Err(());
return Err("not a literal".to_string());
};
// Check no comment or whitespace surrounding the (possibly negative)
// literal, or more tokens after it.
if (lit_span.hi.0 - first_span.lo.0) as usize != s.len() {
return Err(());
return Err("comment or whitespace around literal".to_string());
}
if minus_present {
// If minus is present, check no comment or whitespace in between it
// and the literal token.
if first_span.hi.0 != lit_span.lo.0 {
return Err(());
return Err("comment or whitespace after minus".to_string());
}
// Check literal is a kind we allow to be negated in a proc macro token.
@@ -520,7 +524,9 @@ fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span, Self::Symb
| token::LitKind::ByteStrRaw(_)
| token::LitKind::CStr
| token::LitKind::CStrRaw(_)
| token::LitKind::Err(_) => return Err(()),
| token::LitKind::Err(_) => {
return Err("non-numeric literal may not be negated".to_string());
}
token::LitKind::Integer | token::LitKind::Float => {}
}
@@ -560,13 +566,14 @@ fn ts_is_empty(&mut self, stream: &Self::TokenStream) -> bool {
stream.is_empty()
}
fn ts_from_str(&mut self, src: &str) -> Self::TokenStream {
unwrap_or_emit_fatal(source_str_to_stream(
fn ts_from_str(&mut self, src: &str) -> Result<Self::TokenStream, String> {
source_str_to_stream(
self.psess(),
FileName::proc_macro_source_code(src),
src.to_string(),
Some(self.call_site),
))
)
.map_err(cancel_diags_into_string)
}
fn ts_to_string(&mut self, stream: &Self::TokenStream) -> String {
@@ -1221,7 +1221,7 @@ fn check_impl_items_against_trait<'tcx>(
match impl_trait_header.polarity {
ty::ImplPolarity::Reservation | ty::ImplPolarity::Positive => {}
ty::ImplPolarity::Negative => {
if let [first_item_ref, ..] = impl_item_refs {
if let [first_item_ref, ..] = *impl_item_refs {
let first_item_span = tcx.def_span(first_item_ref);
struct_span_code_err!(
tcx.dcx(),
@@ -1333,7 +1333,7 @@ fn check_region_late_boundedness<'tcx>(
.iter()
.map(|param| {
let (LateEarlyMismatch::EarlyInImpl(impl_param_def_id, ..)
| LateEarlyMismatch::LateInImpl(impl_param_def_id, ..)) = param;
| LateEarlyMismatch::LateInImpl(impl_param_def_id, ..)) = *param;
tcx.def_span(impl_param_def_id)
})
.collect();
@@ -2445,12 +2445,12 @@ fn visit_ty(&mut self, ty: &'v hir::Ty<'v, AmbigArg>) {
)) => {
// See comments on `ConstrainedCollectorPostHirTyLowering` for why this arm does not
// just consider args to be unconstrained.
let generics = self.tcx.generics_of(alias_def);
let generics = self.tcx.generics_of(*alias_def);
let mut walker = ConstrainedCollectorPostHirTyLowering {
arg_is_constrained: vec![false; generics.own_params.len()]
.into_boxed_slice(),
};
walker.visit_ty(self.tcx.type_of(alias_def).instantiate_identity());
walker.visit_ty(self.tcx.type_of(*alias_def).instantiate_identity());
match segments.last() {
Some(hir::PathSegment { args: Some(args), .. }) => {
@@ -214,7 +214,7 @@ pub(super) fn report_unresolved_assoc_item<I>(
if let [best_trait] = visible_traits
.iter()
.copied()
.filter(|trait_def_id| {
.filter(|&trait_def_id| {
tcx.associated_items(trait_def_id)
.filter_by_name_unhygienic(suggested_name)
.any(|item| item.tag() == assoc_tag)
@@ -1234,7 +1234,7 @@ pub(crate) fn maybe_report_similar_assoc_fn(
&& let name = Symbol::intern(&format!("{ident2}_{ident3}"))
&& let Some(item) = inherent_impls
.iter()
.flat_map(|inherent_impl| {
.flat_map(|&inherent_impl| {
tcx.associated_items(inherent_impl).filter_by_name_unhygienic(name)
})
.next()
@@ -101,7 +101,7 @@ pub(crate) fn enforce_impl_lifetime_params_are_constrained(
let lifetimes_in_associated_types: FxHashSet<_> = tcx
.associated_item_def_ids(impl_def_id)
.iter()
.flat_map(|def_id| {
.flat_map(|&def_id| {
let item = tcx.associated_item(def_id);
match item.kind {
ty::AssocKind::Type { .. } => {
+3 -3
View File
@@ -87,7 +87,7 @@ pub(crate) fn check_expr_call(
result = self.try_overloaded_call_step(call_expr, callee_expr, arg_exprs, &autoderef);
}
match autoderef.final_ty().kind() {
match *autoderef.final_ty().kind() {
ty::FnDef(def_id, _) => {
let abi = self.tcx.fn_sig(def_id).skip_binder().skip_binder().abi;
self.check_call_abi(abi, call_expr.span);
@@ -430,11 +430,11 @@ fn try_overloaded_call_traits(
}
fn is_scalable_vector_ctor(&self, callee_ty: Ty<'_>) -> bool {
if let ty::FnDef(def_id, _) = callee_ty.kind()
if let ty::FnDef(def_id, _) = *callee_ty.kind()
&& let def::DefKind::Ctor(def::CtorOf::Struct, _) = self.tcx.def_kind(def_id)
{
self.tcx
.opt_parent(*def_id)
.opt_parent(def_id)
.and_then(|id| self.tcx.adt_def(id).repr().scalable)
.is_some()
} else {
+2 -2
View File
@@ -2448,7 +2448,7 @@ fn report_private_fields(
.tcx
.inherent_impls(def_id)
.into_iter()
.flat_map(|i| self.tcx.associated_items(i).in_definition_order())
.flat_map(|&i| self.tcx.associated_items(i).in_definition_order())
// Only assoc fn with no receivers.
.filter(|item| item.is_fn() && !item.is_method())
.filter_map(|item| {
@@ -3183,7 +3183,7 @@ fn no_such_field_err(
// Check if there is an associated function with the same name.
if let Some(def_id) = base_ty.peel_refs().ty_adt_def().map(|d| d.did()) {
for impl_def_id in self.tcx.inherent_impls(def_id) {
for &impl_def_id in self.tcx.inherent_impls(def_id) {
for item in self.tcx.associated_items(impl_def_id).in_definition_order() {
if let ExprKind::Field(base_expr, _) = expr.kind
&& item.name() == field.name
@@ -1118,7 +1118,7 @@ pub(crate) fn instantiate_value_path(
// error in `validate_res_from_ribs` -- it's just difficult to tell whether the
// self type has any generic types during rustc_resolve, which is what we use
// to determine if this is a hard error or warning.
if std::iter::successors(Some(self.body_id.to_def_id()), |def_id| {
if std::iter::successors(Some(self.body_id.to_def_id()), |&def_id| {
self.tcx.generics_of(def_id).parent
})
.all(|def_id| def_id != impl_def_id)
@@ -567,7 +567,7 @@ pub(in super::super) fn suggest_boxing_when_appropriate(
return false;
}
if self.may_coerce(Ty::new_box(self.tcx, found), expected) {
let suggest_boxing = match found.kind() {
let suggest_boxing = match *found.kind() {
ty::Tuple(tuple) if tuple.is_empty() => {
errors::SuggestBoxing::Unit { start: span.shrink_to_lo(), end: span }
}
@@ -1414,7 +1414,7 @@ fn set_not_found_span_label(
})
.collect::<Vec<_>>();
if !inherent_impls_candidate.is_empty() {
inherent_impls_candidate.sort_by_key(|id| self.tcx.def_path_str(id));
inherent_impls_candidate.sort_by_key(|&id| self.tcx.def_path_str(id));
inherent_impls_candidate.dedup();
// number of types to show at most
@@ -2256,7 +2256,7 @@ pub(crate) fn confusable_method_name(
call_args: Option<Vec<Ty<'tcx>>>,
) -> Option<Symbol> {
if let ty::Adt(adt, adt_args) = rcvr_ty.kind() {
for inherent_impl_did in self.tcx.inherent_impls(adt.did()).into_iter() {
for &inherent_impl_did in self.tcx.inherent_impls(adt.did()).into_iter() {
for inherent_method in
self.tcx.associated_items(inherent_impl_did).in_definition_order()
{
@@ -2315,7 +2315,7 @@ fn note_candidates_on_method_error(
sources: &mut Vec<CandidateSource>,
sugg_span: Option<Span>,
) {
sources.sort_by_key(|source| match source {
sources.sort_by_key(|source| match *source {
CandidateSource::Trait(id) => (0, self.tcx.def_path_str(id)),
CandidateSource::Impl(id) => (1, self.tcx.def_path_str(id)),
});
@@ -2468,7 +2468,7 @@ fn find_builder_fn(&self, err: &mut Diag<'_>, rcvr_ty: Ty<'tcx>, expr_id: hir::H
.tcx
.inherent_impls(adt_def.did())
.iter()
.flat_map(|i| self.tcx.associated_items(i).in_definition_order())
.flat_map(|&i| self.tcx.associated_items(i).in_definition_order())
// Only assoc fn with no receivers and only if
// they are resolvable
.filter(|item| {
@@ -2521,7 +2521,7 @@ fn find_builder_fn(&self, err: &mut Diag<'_>, rcvr_ty: Ty<'tcx>, expr_id: hir::H
} else {
String::new()
};
match &items[..] {
match items[..] {
[] => {}
[(def_id, ret_ty)] => {
err.span_note(
@@ -2536,7 +2536,7 @@ fn find_builder_fn(&self, err: &mut Diag<'_>, rcvr_ty: Ty<'tcx>, expr_id: hir::H
_ => {
let span: MultiSpan = items
.iter()
.map(|(def_id, _)| self.tcx.def_span(def_id))
.map(|&(def_id, _)| self.tcx.def_span(def_id))
.collect::<Vec<Span>>()
.into();
err.span_note(
@@ -2546,7 +2546,7 @@ fn find_builder_fn(&self, err: &mut Diag<'_>, rcvr_ty: Ty<'tcx>, expr_id: hir::H
following associated functions:\n{}{post}",
items
.iter()
.map(|(def_id, _ret_ty)| self.tcx.def_path_str(def_id))
.map(|&(def_id, _ret_ty)| self.tcx.def_path_str(def_id))
.collect::<Vec<String>>()
.join("\n")
),
@@ -3518,7 +3518,7 @@ fn note_predicate_source_and_get_derives(
traits.push(trait_pred.def_id());
}
}
traits.sort_by_key(|id| self.tcx.def_path_str(id));
traits.sort_by_key(|&id| self.tcx.def_path_str(id));
traits.dedup();
let len = traits.len();
@@ -3920,7 +3920,7 @@ fn suggest_valid_traits(
valid_out_of_scope_traits.retain(|id| self.tcx.is_user_visible_dep(id.krate));
if !valid_out_of_scope_traits.is_empty() {
let mut candidates = valid_out_of_scope_traits;
candidates.sort_by_key(|id| self.tcx.def_path_str(id));
candidates.sort_by_key(|&id| self.tcx.def_path_str(id));
candidates.dedup();
// `TryFrom` and `FromIterator` have no methods
+2 -2
View File
@@ -380,8 +380,8 @@ fn check_overloaded_binop(
self.tcx
.associated_item_def_ids(def_id)
.iter()
.find(|item_def_id| {
self.tcx.associated_item(*item_def_id).name() == sym::Output
.find(|&&item_def_id| {
self.tcx.associated_item(item_def_id).name() == sym::Output
})
.cloned()
});
@@ -308,7 +308,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) {
return true;
};
// We only computed variance of lifetimes...
debug_assert_matches!(self.tcx.def_kind(def_id), DefKind::LifetimeParam);
debug_assert_matches!(self.tcx.def_kind(*def_id), DefKind::LifetimeParam);
let uncaptured = match *kind {
ParamKind::Early(name, index) => ty::Region::new_early_param(
self.tcx,
@@ -342,7 +342,7 @@ fn visit_ty(&mut self, t: Ty<'tcx>) {
let uncaptured_spans: Vec<_> = uncaptured_args
.into_iter()
.map(|(def_id, _)| self.tcx.def_span(def_id))
.map(|(&def_id, _)| self.tcx.def_span(def_id))
.collect();
self.tcx.emit_node_span_lint(
+2 -2
View File
@@ -282,9 +282,9 @@ fn lint_ty_kind_usage(cx: &LateContext<'_>, res: &Res) -> bool {
}
fn is_ty_or_ty_ctxt(cx: &LateContext<'_>, path: &hir::Path<'_>) -> Option<String> {
match &path.res {
match path.res {
Res::Def(_, def_id) => {
if let Some(name @ (sym::Ty | sym::TyCtxt)) = cx.tcx.get_diagnostic_name(*def_id) {
if let Some(name @ (sym::Ty | sym::TyCtxt)) = cx.tcx.get_diagnostic_name(def_id) {
return Some(format!("{}{}", name, gen_args(path.segments.last().unwrap())));
}
}
+203 -1
View File
@@ -2,7 +2,7 @@
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::sorted_map::SortedMap;
use rustc_errors::{Diag, MultiSpan};
use rustc_errors::{Diag, Diagnostic, MultiSpan};
use rustc_hir::{HirId, ItemLocalId};
use rustc_lint_defs::EditionFcw;
use rustc_macros::{Decodable, Encodable, HashStable};
@@ -482,3 +482,205 @@ fn lint_level_impl(
}
lint_level_impl(sess, lint, level, span, Box::new(decorate))
}
/// The innermost function for emitting lints implementing the [`trait@Diagnostic`] trait.
///
/// If you are looking to implement a lint, look for higher level functions,
/// for example:
///
/// - [`TyCtxt::emit_node_span_lint`]
/// - [`TyCtxt::node_span_lint`]
/// - [`TyCtxt::emit_node_lint`]
/// - [`TyCtxt::node_lint`]
/// - `LintContext::opt_span_lint`
///
/// This function will replace `lint_level` once all `LintDiagnostic` items have been migrated to
/// `Diagnostic`.
#[track_caller]
pub fn diag_lint_level<'a, D: Diagnostic<'a, ()> + 'a>(
sess: &'a Session,
lint: &'static Lint,
level: LevelAndSource,
span: Option<MultiSpan>,
decorate: D,
) {
// Avoid codegen bloat from monomorphization by immediately doing dyn dispatch of `decorate` to
// the "real" work.
#[track_caller]
fn diag_lint_level_impl<'a>(
sess: &'a Session,
lint: &'static Lint,
level: LevelAndSource,
span: Option<MultiSpan>,
decorate: Box<
dyn FnOnce(rustc_errors::DiagCtxtHandle<'a>, rustc_errors::Level) -> Diag<'a, ()> + 'a,
>,
) {
let LevelAndSource { level, lint_id, src } = level;
// Check for future incompatibility lints and issue a stronger warning.
let future_incompatible = lint.future_incompatible;
let has_future_breakage = future_incompatible.map_or(
// Default allow lints trigger too often for testing.
sess.opts.unstable_opts.future_incompat_test && lint.default_level != Level::Allow,
|incompat| incompat.report_in_deps,
);
// Convert lint level to error level.
let err_level = match level {
Level::Allow => {
if has_future_breakage {
rustc_errors::Level::Allow
} else {
return;
}
}
Level::Expect => {
// This case is special as we actually allow the lint itself in this context, but
// we can't return early like in the case for `Level::Allow` because we still
// need the lint diagnostic to be emitted to `rustc_error::DiagCtxtInner`.
//
// We can also not mark the lint expectation as fulfilled here right away, as it
// can still be cancelled in the decorate function. All of this means that we simply
// create a `Diag` and continue as we would for warnings.
rustc_errors::Level::Expect
}
Level::ForceWarn => rustc_errors::Level::ForceWarning,
Level::Warn => rustc_errors::Level::Warning,
Level::Deny | Level::Forbid => rustc_errors::Level::Error,
};
// Finally, run `decorate`. `decorate` can call `trimmed_path_str` (directly or indirectly),
// so we need to make sure when we do call `decorate` that the diagnostic is eventually
// emitted or we'll get a `must_produce_diag` ICE.
//
// When is a diagnostic *eventually* emitted? Well, that is determined by 2 factors:
// 1. If the corresponding `rustc_errors::Level` is beyond warning, i.e. `ForceWarning(_)`
// or `Error`, then the diagnostic will be emitted regardless of CLI options.
// 2. If the corresponding `rustc_errors::Level` is warning, then that can be affected by
// `-A warnings` or `--cap-lints=xxx` on the command line. In which case, the diagnostic
// will be emitted if `can_emit_warnings` is true.
let skip = err_level == rustc_errors::Level::Warning && !sess.dcx().can_emit_warnings();
let disable_suggestions = if let Some(ref span) = span
// If this code originates in a foreign macro, aka something that this crate
// did not itself author, then it's likely that there's nothing this crate
// can do about it. We probably want to skip the lint entirely.
&& span.primary_spans().iter().any(|s| s.in_external_macro(sess.source_map()))
{
true
} else {
false
};
let mut err: Diag<'_, ()> = if !skip {
decorate(sess.dcx(), err_level)
} else {
Diag::new(sess.dcx(), err_level, "")
};
if let Some(span) = span
&& err.span.primary_span().is_none()
{
// We can't use `err.span()` because it overwrites the labels, so we need to do it manually.
for primary in span.primary_spans() {
err.span.push_primary_span(*primary);
}
for (label_span, label) in span.span_labels_raw() {
err.span.push_span_diag(*label_span, label.clone());
}
}
if let Some(lint_id) = lint_id {
err.lint_id(lint_id);
}
if disable_suggestions {
// Any suggestions made here are likely to be incorrect, so anything we
// emit shouldn't be automatically fixed by rustfix.
err.disable_suggestions();
// If this is a future incompatible that is not an edition fixing lint
// it'll become a hard error, so we have to emit *something*. Also,
// if this lint occurs in the expansion of a macro from an external crate,
// allow individual lints to opt-out from being reported.
let incompatible = future_incompatible.is_some_and(|f| f.reason.edition().is_none());
if !incompatible && !lint.report_in_external_macro {
err.cancel();
// Don't continue further, since we don't want to have
// `diag_span_note_once` called for a diagnostic that isn't emitted.
return;
}
}
err.is_lint(lint.name_lower(), has_future_breakage);
// Lint diagnostics that are covered by the expect level will not be emitted outside
// the compiler. It is therefore not necessary to add any information for the user.
// This will therefore directly call the decorate function which will in turn emit
// the diagnostic.
if let Level::Expect = level {
err.emit();
return;
}
if let Some(future_incompatible) = future_incompatible {
let explanation = match future_incompatible.reason {
FutureIncompatibilityReason::FutureReleaseError(_) => {
"this was previously accepted by the compiler but is being phased out; \
it will become a hard error in a future release!"
.to_owned()
}
FutureIncompatibilityReason::FutureReleaseSemanticsChange(_) => {
"this will change its meaning in a future release!".to_owned()
}
FutureIncompatibilityReason::EditionError(EditionFcw { edition, .. }) => {
let current_edition = sess.edition();
format!(
"this is accepted in the current edition (Rust {current_edition}) but is a hard error in Rust {edition}!"
)
}
FutureIncompatibilityReason::EditionSemanticsChange(EditionFcw {
edition, ..
}) => {
format!("this changes meaning in Rust {edition}")
}
FutureIncompatibilityReason::EditionAndFutureReleaseError(EditionFcw {
edition,
..
}) => {
format!(
"this was previously accepted by the compiler but is being phased out; \
it will become a hard error in Rust {edition} and in a future release in all editions!"
)
}
FutureIncompatibilityReason::EditionAndFutureReleaseSemanticsChange(
EditionFcw { edition, .. },
) => {
format!(
"this changes meaning in Rust {edition} and in a future release in all editions!"
)
}
FutureIncompatibilityReason::Custom(reason, _) => reason.to_owned(),
FutureIncompatibilityReason::Unreachable => unreachable!(),
};
if future_incompatible.explain_reason {
err.warn(explanation);
}
let citation =
format!("for more information, see {}", future_incompatible.reason.reference());
err.note(citation);
}
explain_lint_level_source(sess, lint, level, src, &mut err);
err.emit();
}
diag_lint_level_impl(
sess,
lint,
level,
span,
Box::new(move |dcx, level| decorate.into_diag(dcx, level)),
);
}
+5 -5
View File
@@ -16,12 +16,12 @@ pub fn write_mir_graphviz<W>(tcx: TyCtxt<'_>, single: Option<DefId>, w: &mut W)
let mirs = def_ids
.iter()
.filter(|def_id| !tcx.is_trivial_const(*def_id))
.flat_map(|def_id| {
if tcx.is_const_fn(*def_id) {
vec![tcx.optimized_mir(*def_id), tcx.mir_for_ctfe(*def_id)]
.filter(|&&def_id| !tcx.is_trivial_const(def_id))
.flat_map(|&def_id| {
if tcx.is_const_fn(def_id) {
vec![tcx.optimized_mir(def_id), tcx.mir_for_ctfe(def_id)]
} else {
vec![tcx.instance_mir(ty::InstanceKind::Item(*def_id))]
vec![tcx.instance_mir(ty::InstanceKind::Item(def_id))]
}
})
.collect::<Vec<_>>();
@@ -309,7 +309,7 @@ pub fn address_space(&self, cx: &impl HasDataLayout) -> AddressSpace {
pub fn mutability(&self, tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>) -> Mutability {
// Let's see what kind of memory we are.
match self {
match *self {
GlobalAlloc::Static(did) => {
let DefKind::Static { safety: _, mutability, nested } = tcx.def_kind(did) else {
bug!()
@@ -351,7 +351,7 @@ pub fn size_and_align(
tcx: TyCtxt<'tcx>,
typing_env: ty::TypingEnv<'tcx>,
) -> (Size, Align) {
match self {
match *self {
GlobalAlloc::Static(def_id) => {
let DefKind::Static { nested, .. } = tcx.def_kind(def_id) else {
bug!("GlobalAlloc::Static is not a static")
@@ -633,13 +633,6 @@ fn into_query_param(self) -> P {
}
}
impl<'a, P: Copy> IntoQueryParam<P> for &'a P {
#[inline(always)]
fn into_query_param(self) -> P {
*self
}
}
impl IntoQueryParam<LocalDefId> for OwnerId {
#[inline(always)]
fn into_query_param(self) -> LocalDefId {
+3 -3
View File
@@ -1331,8 +1331,8 @@ pub fn adjust_target_feature_sig(
caller: DefId,
) -> Option<ty::Binder<'tcx, ty::FnSig<'tcx>>> {
let fun_features = &self.codegen_fn_attrs(fun_def).target_features;
let callee_features = &self.codegen_fn_attrs(caller).target_features;
if self.is_target_feature_call_safe(&fun_features, &callee_features) {
let caller_features = &self.body_codegen_attrs(caller).target_features;
if self.is_target_feature_call_safe(&fun_features, &caller_features) {
return Some(fun_sig.map_bound(|sig| ty::FnSig { safety: hir::Safety::Safe, ..sig }));
}
None
@@ -2086,7 +2086,7 @@ pub fn trait_may_define_assoc_item(self, trait_def_id: DefId, assoc_name: Ident)
/// Given a `ty`, return whether it's an `impl Future<...>`.
pub fn ty_is_opaque_future(self, ty: Ty<'_>) -> bool {
let ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) = ty.kind() else { return false };
let ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) = *ty.kind() else { return false };
let future_trait = self.require_lang_item(LangItem::Future, DUMMY_SP);
self.explicit_item_self_bounds(def_id).skip_binder().iter().any(|&(predicate, _)| {
@@ -119,7 +119,7 @@ pub fn extract_component_with_significant_dtor<'tcx>(
/// when we are working with current local crate.
#[instrument(level = "trace", skip(tcx))]
pub fn ty_dtor_span<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Option<Span> {
match ty.kind() {
match *ty.kind() {
ty::Bool
| ty::Char
| ty::Int(_)
+2 -2
View File
@@ -417,7 +417,7 @@ pub fn calculate_dtor(
continue;
}
let Some(item_id) = self.associated_item_def_ids(impl_did).first() else {
let Some(&item_id) = self.associated_item_def_ids(impl_did).first() else {
self.dcx()
.span_delayed_bug(self.def_span(impl_did), "Drop impl without drop function");
continue;
@@ -435,7 +435,7 @@ pub fn calculate_dtor(
.delay_as_bug();
}
dtor_candidate = Some(*item_id);
dtor_candidate = Some(item_id);
}
let did = dtor_candidate?;
@@ -370,7 +370,7 @@ fn is_supported_loop_match_type(ty: Ty<'_>) -> bool {
// Some intrinsics are handled here because they desperately want to avoid introducing
// unnecessary copies.
ExprKind::Call { ty, fun, ref args, .. }
if let ty::FnDef(def_id, generic_args) = ty.kind()
if let ty::FnDef(def_id, generic_args) = *ty.kind()
&& let Some(intrinsic) = this.tcx.intrinsic(def_id)
&& matches!(intrinsic.name, sym::write_via_move | sym::write_box_via_move) =>
{
@@ -471,7 +471,7 @@ fn visit_expr(&mut self, expr: &'a Expr<'tcx>) {
ExprKind::Call { fun, ty: _, args: _, from_hir_call: _, fn_span: _ } => {
let fn_ty = self.thir[fun].ty;
let sig = fn_ty.fn_sig(self.tcx);
let (callee_features, safe_target_features): (&[_], _) = match fn_ty.kind() {
let (callee_features, safe_target_features): (&[_], _) = match *fn_ty.kind() {
ty::FnDef(func_id, ..) => {
let cg_attrs = self.tcx.codegen_fn_attrs(func_id);
(&cg_attrs.target_features, cg_attrs.safe_target_features)
@@ -251,19 +251,16 @@ fn build_async_drop(
// impl_item_refs may be empty if drop fn is not implemented in 'impl AsyncDrop for ...'
// (#140974).
// Such code will report error, so just generate sync drop here and return
let Some(drop_fn_def_id) = tcx
.associated_item_def_ids(drop_trait)
.first()
.and_then(|def_id| {
let Some(drop_fn_def_id) =
tcx.associated_item_def_ids(drop_trait).first().and_then(|&def_id| {
if tcx.def_kind(def_id) == DefKind::AssocFn
&& tcx.check_args_compatible(*def_id, trait_args)
&& tcx.check_args_compatible(def_id, trait_args)
{
Some(def_id)
} else {
None
}
})
.copied()
else {
tcx.dcx().span_delayed_bug(
self.elaborator.body().span,
+1 -1
View File
@@ -440,7 +440,7 @@ fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location
}
}
if let ty::FnDef(did, ..) = func.ty(&self.body.local_decls, self.tcx).kind()
if let ty::FnDef(did, ..) = *func.ty(&self.body.local_decls, self.tcx).kind()
&& self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
&& matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. })
{
@@ -28,13 +28,13 @@ fn collect_autodiff_fn_from_arg<'tcx>(
output: &mut MonoItems<'tcx>,
) {
let (instance, span) = match arg.kind() {
ty::GenericArgKind::Type(ty) => match ty.kind() {
ty::GenericArgKind::Type(ty) => match *ty.kind() {
ty::FnDef(def_id, substs) => {
let span = tcx.def_span(def_id);
let instance = ty::Instance::expect_resolve(
tcx,
ty::TypingEnv::non_body_analysis(tcx, def_id),
*def_id,
def_id,
substs,
span,
);
+1 -1
View File
@@ -37,7 +37,7 @@ fn custom_coerce_unsize_info<'tcx>(
Ok(traits::ImplSource::UserDefined(traits::ImplSourceUserDefinedData {
impl_def_id,
..
})) => Ok(tcx.coerce_unsized_info(impl_def_id)?.custom_kind.unwrap()),
})) => Ok(tcx.coerce_unsized_info(*impl_def_id)?.custom_kind.unwrap()),
impl_source => {
bug!(
"invalid `CoerceUnsized` from {source_ty} to {target_ty}: impl_source: {:?}",
@@ -190,7 +190,7 @@ fn lint_large_assignment(
}
fn assoc_fn_of_type<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, fn_ident: Ident) -> Option<DefId> {
for impl_def_id in tcx.inherent_impls(def_id) {
for &impl_def_id in tcx.inherent_impls(def_id) {
if let Some(new) = tcx.associated_items(impl_def_id).find_by_ident_and_kind(
tcx,
fn_ident,
+1 -1
View File
@@ -2207,7 +2207,7 @@ fn report_privacy_error(&mut self, privacy_error: &PrivacyError<'ra>) {
})
})
.collect();
if let Some(def_id) = path.get(0)
if let Some(&def_id) = path.get(0)
&& let Some(path) = path_names
{
if let Some(def_id) = def_id.as_local() {
+1 -1
View File
@@ -1295,7 +1295,7 @@ fn finalize_module_binding(
.tcx
.associated_item_def_ids(def_id)
.iter()
.map(|field_id| self.tcx.visibility(field_id))
.map(|&field_id| self.tcx.visibility(field_id))
.collect();
(ctor_res, ctor_vis, field_visibilities)
})
@@ -2359,7 +2359,7 @@ fn smart_resolve_context_dependent_help(
.tcx
.associated_item_def_ids(def_id)
.iter()
.map(|field_id| self.r.tcx.visibility(field_id))
.map(|&field_id| self.r.tcx.visibility(field_id))
.collect();
(ctor_res, ctor_vis, field_visibilities)
})
@@ -2594,7 +2594,7 @@ fn suggest_alternative_construction_methods(
.tcx
.inherent_impls(def_id)
.iter()
.flat_map(|i| self.r.tcx.associated_items(i).in_definition_order())
.flat_map(|&i| self.r.tcx.associated_items(i).in_definition_order())
// Only assoc fn with no receivers.
.filter(|item| item.is_fn() && !item.is_method())
.filter_map(|item| {
@@ -2702,7 +2702,7 @@ fn has_private_fields(&self, def_id: DefId) -> bool {
.tcx
.associated_item_def_ids(def_id)
.iter()
.map(|field_id| self.r.tcx.visibility(field_id))
.map(|&field_id| self.r.tcx.visibility(field_id))
.collect(),
),
};
@@ -5,7 +5,7 @@
use rustc_abi::{Endian, HasDataLayout, TyAbiInterface};
use crate::callconv::{Align, ArgAbi, FnAbi, Reg, RegKind, Uniform};
use crate::spec::{Env, HasTargetSpec, Os};
use crate::spec::{Abi, HasTargetSpec, Os};
#[derive(Debug, Clone, Copy, PartialEq)]
enum ABI {
@@ -106,8 +106,10 @@ pub(crate) fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
Ty: TyAbiInterface<'a, C> + Copy,
C: HasDataLayout + HasTargetSpec,
{
let abi = if cx.target_spec().env == Env::Musl || cx.target_spec().os == Os::FreeBsd {
let abi = if cx.target_spec().options.abi == Abi::ElfV2 {
ELFv2
} else if cx.target_spec().options.abi == Abi::ElfV1 {
ELFv1
} else if cx.target_spec().os == Os::Aix {
AIX
} else {
+21
View File
@@ -3194,6 +3194,27 @@ macro_rules! check_matches {
"ARM targets must set `llvm-floatabi` to `hard` or `soft`",
)
}
// PowerPC64 targets that are not AIX must set their ABI to either ELFv1 or ELFv2
Arch::PowerPC64 => {
if self.os == Os::Aix {
check!(
self.llvm_abiname.is_empty(),
"AIX targets always use the AIX ABI and `llvm_abiname` should be left empty",
);
} else if self.endian == Endian::Big {
check_matches!(
&*self.llvm_abiname,
"elfv1" | "elfv2",
"invalid PowerPC64 ABI name: {}",
self.llvm_abiname,
);
} else {
check!(
self.llvm_abiname == "elfv2",
"little-endian PowerPC64 targets only support the `elfv2` ABI",
);
}
}
_ => {}
}
@@ -362,7 +362,7 @@ pub(super) fn maybe_report_ambiguity(
&& self.tcx.trait_of_assoc(*item_id) == Some(*trait_id)
&& let None = self.tainted_by_errors()
{
let assoc_item = self.tcx.associated_item(item_id);
let assoc_item = self.tcx.associated_item(*item_id);
let (verb, noun) = match assoc_item.kind {
ty::AssocKind::Const { .. } => ("refer to the", "constant"),
ty::AssocKind::Fn { .. } => ("call", "function"),
@@ -394,7 +394,7 @@ pub(super) fn maybe_report_ambiguity(
let trait_impls = self.tcx.trait_impls_of(data.trait_ref.def_id);
if let Some(impl_def_id) =
if let Some(&impl_def_id) =
trait_impls.non_blanket_impls().values().flatten().next()
{
let non_blanket_impl_count =
@@ -418,7 +418,7 @@ pub(super) fn maybe_report_ambiguity(
.non_blanket_impls()
.values()
.flatten()
.map(|id| {
.map(|&id| {
format!(
"{}",
self.tcx.type_of(id).instantiate_identity()
@@ -1799,7 +1799,7 @@ fn maybe_detailed_projection_msg(
with_forced_trimmed_paths! {
if self.tcx.is_lang_item(projection_term.def_id, LangItem::FnOnceOutput) {
let (span, closure_span) = if let ty::Closure(def_id, _) = self_ty.kind() {
let (span, closure_span) = if let ty::Closure(def_id, _) = *self_ty.kind() {
let def_span = self.tcx.def_span(def_id);
if let Some(local_def_id) = def_id.as_local()
&& let node = self.tcx.hir_node_by_def_id(local_def_id)
@@ -2305,7 +2305,7 @@ pub(super) fn report_similar_impl_candidates(
};
if candidates.len() < 5 {
let spans: Vec<_> =
candidates.iter().map(|(_, def_id)| self.tcx.def_span(def_id)).collect();
candidates.iter().map(|&(_, def_id)| self.tcx.def_span(def_id)).collect();
let mut span: MultiSpan = spans.into();
for (c, def_id) in &candidates {
let msg = if all_traits_equal {
@@ -2317,7 +2317,7 @@ pub(super) fn report_similar_impl_candidates(
self.tcx.short_string(c.print_only_trait_path(), err.long_ty_path()),
)
};
span.push_span_label(self.tcx.def_span(def_id), msg);
span.push_span_label(self.tcx.def_span(*def_id), msg);
}
err.span_help(
span,
@@ -2631,16 +2631,16 @@ fn check_same_name_different_path(
)
};
let trait_name = self.tcx.item_name(trait_def_id);
if let Some(other_trait_def_id) = self.tcx.all_traits_including_private().find(|def_id| {
trait_def_id != *def_id
if let Some(other_trait_def_id) = self.tcx.all_traits_including_private().find(|&def_id| {
trait_def_id != def_id
&& trait_name == self.tcx.item_name(def_id)
&& trait_has_same_params(*def_id)
&& trait_has_same_params(def_id)
&& self.predicate_must_hold_modulo_regions(&Obligation::new(
self.tcx,
obligation.cause.clone(),
obligation.param_env,
trait_pred.map_bound(|tr| ty::TraitPredicate {
trait_ref: ty::TraitRef::new(self.tcx, *def_id, tr.trait_ref.args),
trait_ref: ty::TraitRef::new(self.tcx, def_id, tr.trait_ref.args),
..tr
}),
))
@@ -3314,7 +3314,7 @@ fn report_cyclic_signature_error(
terr: TypeError<'tcx>,
) -> Diag<'a> {
let self_ty = found_trait_ref.self_ty();
let (cause, terr) = if let ty::Closure(def_id, _) = self_ty.kind() {
let (cause, terr) = if let ty::Closure(def_id, _) = *self_ty.kind() {
(
ObligationCause::dummy_with_span(self.tcx.def_span(def_id)),
TypeError::CyclicTy(self_ty),
@@ -373,12 +373,12 @@ fn extern_crates_with_the_same_name(
self.tcx.extern_crate(trait_def_id.krate),
) {
(
Some(ExternCrate {
Some(&ExternCrate {
src: ExternCrateSource::Extern(expected_def_id),
dependency_of: LOCAL_CRATE,
..
}),
Some(ExternCrate {
Some(&ExternCrate {
src: ExternCrateSource::Extern(trait_def_id),
dependency_of: LOCAL_CRATE,
..
@@ -402,9 +402,9 @@ pub fn check_same_definition_different_crate<F>(
let krate = self.tcx.crate_name(expected_did.krate);
let name = self.tcx.item_name(expected_did);
let definitions_with_same_path: UnordSet<_> = found_dids
.filter(|def_id| {
.filter(|&def_id| {
def_id.krate != expected_did.krate
&& (self.extern_crates_with_the_same_name(expected_did, *def_id)
&& (self.extern_crates_with_the_same_name(expected_did, def_id)
|| self.tcx.crate_name(def_id.krate) == krate)
&& self.tcx.item_name(def_id) == name
})
@@ -207,7 +207,7 @@ pub(crate) fn on_unimplemented_components(
if self_ty.is_fn() {
let fn_sig = self_ty.fn_sig(self.tcx);
let shortname = if let ty::FnDef(def_id, _) = self_ty.kind()
let shortname = if let ty::FnDef(def_id, _) = *self_ty.kind()
&& self.tcx.codegen_fn_attrs(def_id).safe_target_features
{
"#[target_feature] fn"
@@ -231,7 +231,7 @@ fn parse_arg<'tcx>(
is_source_literal: bool,
) -> FormatArg {
let (Ctx::RustcOnUnimplemented { tcx, trait_def_id }
| Ctx::DiagnosticOnUnimplemented { tcx, trait_def_id }) = ctx;
| Ctx::DiagnosticOnUnimplemented { tcx, trait_def_id }) = *ctx;
let span = slice_span(input_span, arg.position_span.clone(), is_source_literal);
@@ -2244,9 +2244,9 @@ fn note_conflicting_closure_bounds(
// First, look for an `WhereClauseInExpr`, which means we can get
// the uninstantiated predicate list of the called function. And check
// that the predicate that we failed to satisfy is a `Fn`-like trait.
if let ObligationCauseCode::WhereClauseInExpr(def_id, _, _, idx) = cause
if let ObligationCauseCode::WhereClauseInExpr(def_id, _, _, idx) = *cause
&& let predicates = self.tcx.predicates_of(def_id).instantiate_identity(self.tcx)
&& let Some(pred) = predicates.predicates.get(*idx)
&& let Some(pred) = predicates.predicates.get(idx)
&& let ty::ClauseKind::Trait(trait_pred) = pred.kind().skip_binder()
&& self.tcx.is_fn_trait(trait_pred.def_id())
{
@@ -2257,7 +2257,7 @@ fn note_conflicting_closure_bounds(
// Find another predicate whose self-type is equal to the expected self type,
// but whose args don't match.
let other_pred = predicates.into_iter().enumerate().find(|(other_idx, (pred, _))| {
let other_pred = predicates.into_iter().enumerate().find(|&(other_idx, (pred, _))| {
match pred.kind().skip_binder() {
ty::ClauseKind::Trait(trait_pred)
if self.tcx.is_fn_trait(trait_pred.def_id())
@@ -3029,10 +3029,10 @@ pub(super) fn note_obligation_cause_code<G: EmissionGuarantee, T>(
let len = impls.len();
let mut types = impls
.iter()
.map(|t| {
.map(|&&t| {
with_no_trimmed_paths!(format!(
" {}",
tcx.type_of(*t).instantiate_identity(),
tcx.type_of(t).instantiate_identity(),
))
})
.collect::<Vec<_>>();
@@ -3421,7 +3421,7 @@ pub(super) fn note_obligation_cause_code<G: EmissionGuarantee, T>(
let ty_str = tcx.short_string(ty, err.long_ty_path());
format!("required because it appears within the type `{ty_str}`")
};
match ty.kind() {
match *ty.kind() {
ty::Adt(def, _) => {
let msg = msg();
match tcx.opt_item_ident(def.did()) {
@@ -4216,11 +4216,11 @@ fn note_function_argument_obligation<G: EmissionGuarantee>(
// to an associated type (as seen from `trait_pred`) in the predicate. Like in
// trait_pred `S: Sum<<Self as Iterator>::Item>` and predicate `i32: Sum<&()>`
let mut type_diffs = vec![];
if let ObligationCauseCode::WhereClauseInExpr(def_id, _, _, idx) = parent_code
if let ObligationCauseCode::WhereClauseInExpr(def_id, _, _, idx) = *parent_code
&& let Some(node_args) = typeck_results.node_args_opt(call_hir_id)
&& let where_clauses =
self.tcx.predicates_of(def_id).instantiate(self.tcx, node_args)
&& let Some(where_pred) = where_clauses.predicates.get(*idx)
&& let Some(where_pred) = where_clauses.predicates.get(idx)
{
if let Some(where_pred) = where_pred.as_trait_clause()
&& let Some(failed_pred) = failed_pred.as_trait_clause()
@@ -497,22 +497,19 @@ fn impl_intersection_has_negative_obligation(
) -> bool {
debug!("negative_impl(impl1_def_id={:?}, impl2_def_id={:?})", impl1_def_id, impl2_def_id);
// N.B. We need to unify impl headers *with* intercrate mode, even if proving negative predicates
// do not need intercrate mode enabled.
// N.B. We need to unify impl headers *with* `TypingMode::Coherence`,
// even if proving negative predicates doesn't need `TypingMode::Coherence`.
let ref infcx = tcx.infer_ctxt().with_next_trait_solver(true).build(TypingMode::Coherence);
let root_universe = infcx.universe();
assert_eq!(root_universe, ty::UniverseIndex::ROOT);
let impl1_header = fresh_impl_header(infcx, impl1_def_id, is_of_trait);
let param_env =
ty::EarlyBinder::bind(tcx.param_env(impl1_def_id)).instantiate(tcx, impl1_header.impl_args);
let impl2_header = fresh_impl_header(infcx, impl2_def_id, is_of_trait);
// Equate the headers to find their intersection (the general type, with infer vars,
// that may apply both impls).
let Some(equate_obligations) =
equate_impl_headers(infcx, param_env, &impl1_header, &impl2_header)
equate_impl_headers(infcx, ty::ParamEnv::empty(), &impl1_header, &impl2_header)
else {
return false;
};
@@ -530,7 +527,16 @@ fn impl_intersection_has_negative_obligation(
root_universe,
(impl1_header.impl_args, impl2_header.impl_args),
);
let param_env = infcx.resolve_vars_if_possible(param_env);
// Right above we plug inference variables with placeholders,
// this gets us new impl1_header_args with the inference variables actually resolved
// to those placeholders.
let impl1_header_args = infcx.resolve_vars_if_possible(impl1_header.impl_args);
// So there are no infer variables left now, except regions which aren't resolved by `resolve_vars_if_possible`.
assert!(!impl1_header_args.has_non_region_infer());
let param_env =
ty::EarlyBinder::bind(tcx.param_env(impl1_def_id)).instantiate(tcx, impl1_header_args);
util::elaborate(tcx, tcx.predicates_of(impl2_def_id).instantiate(tcx, impl2_header.impl_args))
.elaborate_sized()
@@ -267,7 +267,7 @@ pub fn dtorck_constraint_for_ty_inner<'tcx>(
return;
}
match ty.kind() {
match *ty.kind() {
ty::Bool
| ty::Char
| ty::Int(_)
@@ -287,7 +287,7 @@ pub fn dtorck_constraint_for_ty_inner<'tcx>(
ty::Pat(ety, _) | ty::Array(ety, _) | ty::Slice(ety) => {
// single-element containers, behave like their element
rustc_data_structures::stack::ensure_sufficient_stack(|| {
dtorck_constraint_for_ty_inner(tcx, typing_env, span, depth + 1, *ety, constraints)
dtorck_constraint_for_ty_inner(tcx, typing_env, span, depth + 1, ety, constraints)
});
}
+18 -7
View File
@@ -289,6 +289,7 @@ struct RcInner<T: ?Sized> {
}
/// Calculate layout for `RcInner<T>` using the inner value's layout
#[inline]
fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
// Calculate layout using the given value layout.
// Previously, layout was calculated on the expression
@@ -2518,15 +2519,25 @@ impl<T: Default> Default for Rc<T> {
/// ```
#[inline]
fn default() -> Self {
// First create an uninitialized allocation before creating an instance
// of `T`. This avoids having `T` on the stack and avoids the need to
// codegen a call to the destructor for `T` leading to generally better
// codegen. See #131460 for some more details.
let mut rc = Rc::new_uninit();
// SAFETY: this is a freshly allocated `Rc` so it's guaranteed there are
// no other strong or weak pointers other than `rc` itself.
unsafe {
Self::from_inner(
Box::leak(Box::write(
Box::new_uninit(),
RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
))
.into(),
)
let raw = Rc::get_mut_unchecked(&mut rc);
// Note that `ptr::write` here is used specifically instead of
// `MaybeUninit::write` to avoid creating an extra stack copy of `T`
// in debug mode. See #136043 for more context.
ptr::write(raw.as_mut_ptr(), T::default());
}
// SAFETY: this allocation was just initialized above.
unsafe { rc.assume_init() }
}
}
+18 -11
View File
@@ -392,6 +392,7 @@ struct ArcInner<T: ?Sized> {
}
/// Calculate layout for `ArcInner<T>` using the inner value's layout
#[inline]
fn arcinner_layout_for_value_layout(layout: Layout) -> Layout {
// Calculate layout using the given value layout.
// Previously, layout was calculated on the expression
@@ -3724,19 +3725,25 @@ impl<T: Default> Default for Arc<T> {
/// assert_eq!(*x, 0);
/// ```
fn default() -> Arc<T> {
// First create an uninitialized allocation before creating an instance
// of `T`. This avoids having `T` on the stack and avoids the need to
// codegen a call to the destructor for `T` leading to generally better
// codegen. See #131460 for some more details.
let mut arc = Arc::new_uninit();
// SAFETY: this is a freshly allocated `Arc` so it's guaranteed there
// are no other strong or weak pointers other than `arc` itself.
unsafe {
Self::from_inner(
Box::leak(Box::write(
Box::new_uninit(),
ArcInner {
strong: atomic::AtomicUsize::new(1),
weak: atomic::AtomicUsize::new(1),
data: T::default(),
},
))
.into(),
)
let raw = Arc::get_mut_unchecked(&mut arc);
// Note that `ptr::write` here is used specifically instead of
// `MaybeUninit::write` to avoid creating an extra stack copy of `T`
// in debug mode. See #136043 for more context.
ptr::write(raw.as_mut_ptr(), T::default());
}
// SAFETY: this allocation was just initialized above.
unsafe { arc.assume_init() }
}
}
+17 -18
View File
@@ -172,39 +172,38 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
#[unstable(feature = "bstr", issue = "134915")]
impl fmt::Display for ByteStr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fn fmt_nopad(this: &ByteStr, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for chunk in this.utf8_chunks() {
f.write_str(chunk.valid())?;
if !chunk.invalid().is_empty() {
f.write_str("\u{FFFD}")?;
}
}
Ok(())
}
let Some(align) = f.align() else {
return fmt_nopad(self, f);
};
let nchars: usize = self
.utf8_chunks()
.map(|chunk| {
chunk.valid().chars().count() + if chunk.invalid().is_empty() { 0 } else { 1 }
})
.sum();
let padding = f.width().unwrap_or(0).saturating_sub(nchars);
let fill = f.fill();
let (lpad, rpad) = match align {
fmt::Alignment::Left => (0, padding),
fmt::Alignment::Right => (padding, 0),
fmt::Alignment::Center => {
let (lpad, rpad) = match f.align() {
Some(fmt::Alignment::Right) => (padding, 0),
Some(fmt::Alignment::Center) => {
let half = padding / 2;
(half, half + padding % 2)
}
// Either alignment is not specified or it's left aligned
// which behaves the same with padding
_ => (0, padding),
};
for _ in 0..lpad {
write!(f, "{fill}")?;
}
fmt_nopad(self, f)?;
for chunk in self.utf8_chunks() {
f.write_str(chunk.valid())?;
if !chunk.invalid().is_empty() {
f.write_str("\u{FFFD}")?;
}
}
for _ in 0..rpad {
write!(f, "{fill}")?;
}
+2 -2
View File
@@ -17,8 +17,8 @@ macro_rules! uint_impl {
fsh_op = $fsh_op:literal,
fshl_result = $fshl_result:literal,
fshr_result = $fshr_result:literal,
clmul_lhs = $clmul_rhs:literal,
clmul_rhs = $clmul_lhs:literal,
clmul_lhs = $clmul_lhs:literal,
clmul_rhs = $clmul_rhs:literal,
clmul_result = $clmul_result:literal,
swap_op = $swap_op:literal,
swapped = $swapped:literal,
+2 -2
View File
@@ -37,14 +37,14 @@ macro_rules! with_api {
fn injected_env_var(var: &str) -> Option<String>;
fn track_env_var(var: &str, value: Option<&str>);
fn track_path(path: &str);
fn literal_from_str(s: &str) -> Result<Literal<$Span, $Symbol>, ()>;
fn literal_from_str(s: &str) -> Result<Literal<$Span, $Symbol>, String>;
fn emit_diagnostic(diagnostic: Diagnostic<$Span>);
fn ts_drop(stream: $TokenStream);
fn ts_clone(stream: &$TokenStream) -> $TokenStream;
fn ts_is_empty(stream: &$TokenStream) -> bool;
fn ts_expand_expr(stream: &$TokenStream) -> Result<$TokenStream, ()>;
fn ts_from_str(src: &str) -> $TokenStream;
fn ts_from_str(src: &str) -> Result<$TokenStream, String>;
fn ts_to_string(stream: &$TokenStream) -> String;
fn ts_from_token_tree(
tree: TokenTree<$TokenStream, $Span, $Symbol>,
+7 -4
View File
@@ -110,15 +110,18 @@ impl !Send for TokenStream {}
impl !Sync for TokenStream {}
/// Error returned from `TokenStream::from_str`.
///
/// The contained error message is explicitly not guaranteed to be stable in any way,
/// and may change between Rust versions or across compilations.
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
#[non_exhaustive]
#[derive(Debug)]
pub struct LexError;
pub struct LexError(String);
#[stable(feature = "proc_macro_lexerror_impls", since = "1.44.0")]
impl fmt::Display for LexError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("cannot parse string into token stream")
f.write_str(&self.0)
}
}
@@ -197,7 +200,7 @@ impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
Ok(TokenStream(Some(BridgeMethods::ts_from_str(src))))
Ok(TokenStream(Some(BridgeMethods::ts_from_str(src).map_err(LexError)?)))
}
}
@@ -1594,7 +1597,7 @@ impl FromStr for Literal {
fn from_str(src: &str) -> Result<Self, LexError> {
match BridgeMethods::literal_from_str(src) {
Ok(literal) => Ok(Literal(literal)),
Err(()) => Err(LexError),
Err(msg) => Err(LexError(msg)),
}
}
}
+2 -2
View File
@@ -90,12 +90,12 @@ impl ::Copy for $t {}
unsafe extern "C" fn init() {
// register unwind info on module startup
__register_frame_info(&__EH_FRAME_BEGIN__ as *const u8, &mut OBJ as *mut _ as *mut u8);
__register_frame_info(&__EH_FRAME_BEGIN__ as *const u8, &raw mut OBJ as *mut u8);
}
unsafe extern "C" fn uninit() {
// unregister on shutdown
__deregister_frame_info(&__EH_FRAME_BEGIN__ as *const u8, &mut OBJ as *mut _ as *mut u8);
__deregister_frame_info(&__EH_FRAME_BEGIN__ as *const u8, &raw mut OBJ as *mut u8);
}
// MinGW-specific init/uninit routine registration
+2 -1
View File
@@ -573,7 +573,8 @@ pub fn attribute<T>(self, attribute: usize, value: &'a T) -> Self {
///
/// # Example
///
/// ```
#[cfg_attr(target_vendor = "win7", doc = "```no_run")]
#[cfg_attr(not(target_vendor = "win7"), doc = "```")]
/// #![feature(windows_process_extensions_raw_attribute)]
/// use std::ffi::c_void;
/// use std::os::windows::process::{CommandExt, ProcThreadAttributeList};
+20
View File
@@ -2291,6 +2291,26 @@ fn display_format_flags() {
assert_eq!(format!("a{:#<5}b", Path::new("a").display()), "aa####b");
}
#[test]
fn display_path_with_padding_no_align() {
assert_eq!(format!("{:10}", Path::new("/foo/bar").display()), "/foo/bar ");
}
#[test]
fn display_path_with_padding_align_left() {
assert_eq!(format!("{:<10}", Path::new("/foo/bar").display()), "/foo/bar ");
}
#[test]
fn display_path_with_padding_align_right() {
assert_eq!(format!("{:>10}", Path::new("/foo/bar").display()), " /foo/bar");
}
#[test]
fn display_path_with_padding_align_center() {
assert_eq!(format!("{:^10}", Path::new("/foo/bar").display()), " /foo/bar ");
}
#[test]
fn into_rc() {
let orig = "hello/world";
@@ -24,7 +24,7 @@ This target is cross-compiled. Dynamic linking is unsupported.
`std` has only partial support due to platform limitations. Notably:
- `std::process` and `std::net` are unimplemented. `std::thread` only supports sleeping and yielding, as this is a single-threaded environment.
- `std::time` has full support for `Instant`, but no support for `SystemTime`.
- `std::io` has full support for `stdin`/`stdout`/`stderr`. `stdout` and `stderr` both write to to USB channel 1 on this platform and are not differentiated.
- `std::io` has full support for `stdin`/`stdout`/`stderr`. `stdout` and `stderr` both write to USB channel 1 on this platform and are not differentiated.
- `std::fs` has limited support for reading or writing to files. Directory operations, file deletion, and some file opening features are unsupported and will return errors.
- A global allocator implemented on top of `dlmalloc` is provided.
- Modules that do not need to interact with the OS beyond allocation such as `std::collections`, `std::hash`, `std::future`, `std::sync`, etc are fully supported.
+1 -1
View File
@@ -191,7 +191,7 @@ pub(crate) fn try_inline_glob(
.iter()
.filter(|child| !child.reexport_chain.is_empty())
.filter_map(|child| child.res.opt_def_id())
.filter(|def_id| !cx.tcx.is_doc_hidden(def_id))
.filter(|&def_id| !cx.tcx.is_doc_hidden(def_id))
.collect();
let attrs = cx.tcx.hir_attrs(import.hir_id());
let mut items = build_module_items(
+1 -1
View File
@@ -94,7 +94,7 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
// While the `impl` blocks themselves are only in `libcore`, the module with `doc`
// attached is directly included in `libstd` as well.
if did.is_local() {
for def_id in prim.impls(tcx).filter(|def_id| {
for def_id in prim.impls(tcx).filter(|&def_id| {
// Avoid including impl blocks with filled-in generics.
// https://github.com/rust-lang/rust/issues/94937
//
@@ -150,16 +150,16 @@ fn check_closure<'tcx>(cx: &LateContext<'tcx>, outer_receiver: Option<&Expr<'tcx
let callee_ty_adjustments = typeck.expr_adjustments(callee);
let callee_ty_adjusted = callee_ty_adjustments.last().map_or(callee_ty, |a| a.target);
let sig = match callee_ty_adjusted.kind() {
let sig = match *callee_ty_adjusted.kind() {
ty::FnDef(def, _) => {
// Rewriting `x(|| f())` to `x(f)` where f is marked `#[track_caller]` moves the `Location`
if find_attr!(cx.tcx, *def, TrackCaller(..)) {
if find_attr!(cx.tcx, def, TrackCaller(..)) {
return;
}
cx.tcx.fn_sig(def).skip_binder().skip_binder()
},
ty::FnPtr(sig_tys, hdr) => sig_tys.with(*hdr).skip_binder(),
ty::FnPtr(sig_tys, hdr) => sig_tys.with(hdr).skip_binder(),
ty::Closure(_, subs) => cx
.tcx
.signature_unclosure(subs.as_closure().sig(), Safety::Safe)
@@ -133,7 +133,7 @@ fn check_unwrap_or_default(
let output_type_implements_default = |fun| {
let fun_ty = cx.typeck_results().expr_ty(fun);
if let ty::FnDef(def_id, args) = fun_ty.kind() {
if let ty::FnDef(def_id, args) = *fun_ty.kind() {
let output_ty = cx.tcx.fn_sig(def_id).instantiate(cx.tcx, args).skip_binder().output();
cx.tcx
.get_diagnostic_item(sym::Default)
@@ -153,7 +153,7 @@ fn check_unwrap_or_default(
cx.tcx
.inherent_impls(adt_def.did())
.iter()
.flat_map(|impl_id| cx.tcx.associated_items(impl_id).filter_by_name_unhygienic(sugg))
.flat_map(|&impl_id| cx.tcx.associated_items(impl_id).filter_by_name_unhygienic(sugg))
.find_map(|assoc| {
if assoc.is_method() && cx.tcx.fn_sig(assoc.def_id).skip_binder().inputs().skip_binder().len() == 1 {
Some(assoc.def_id)
@@ -287,7 +287,7 @@ fn is_assoc_fn_without_type_instance<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'
..
},
)) = func.kind
&& let output_ty = cx.tcx.fn_sig(def_id).instantiate_identity().skip_binder().output()
&& let output_ty = cx.tcx.fn_sig(*def_id).instantiate_identity().skip_binder().output()
&& let ty::Param(ty::ParamTy {
name: kw::SelfUpper, ..
}) = output_ty.kind()
@@ -335,8 +335,8 @@ fn init_default_impl_for_type_if_needed(&mut self, cx: &LateContext<'_>) {
let impls = cx.tcx.trait_impls_of(default_trait_id);
for (ty, impl_def_ids) in impls.non_blanket_impls() {
let Some(self_def_id) = ty.def() else { continue };
for impl_def_id in impl_def_ids {
if !cx.tcx.is_automatically_derived(*impl_def_id) &&
for &impl_def_id in impl_def_ids {
if !cx.tcx.is_automatically_derived(impl_def_id) &&
let Some(assoc_item) = cx
.tcx
.associated_items(impl_def_id)
+2 -2
View File
@@ -601,9 +601,9 @@ pub fn is_default_equivalent_call(
&& let StatementKind::Assign(assign) = &block_data.statements[0].kind
&& assign.0.local == RETURN_PLACE
&& let Rvalue::Aggregate(kind, _places) = &assign.1
&& let AggregateKind::Adt(did, variant_index, _, _, _) = &**kind
&& let AggregateKind::Adt(did, variant_index, _, _, _) = **kind
&& let def = cx.tcx.adt_def(did)
&& let variant = &def.variant(*variant_index)
&& let variant = &def.variant(variant_index)
&& variant.fields.is_empty()
&& let Some((_, did)) = variant.ctor
&& did == repl_def_id
+1 -1
View File
@@ -335,7 +335,7 @@ fn non_local_item_child_by_name(tcx: TyCtxt<'_>, def_id: DefId, ns: PathNS, name
.associated_item_def_ids(def_id)
.iter()
.copied()
.find(|assoc_def_id| tcx.item_name(*assoc_def_id) == name && ns.matches(tcx.def_kind(assoc_def_id).ns())),
.find(|&assoc_def_id| tcx.item_name(assoc_def_id) == name && ns.matches(tcx.def_kind(assoc_def_id).ns())),
_ => None,
}
}
+4 -4
View File
@@ -310,13 +310,13 @@ pub fn has_drop<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
// Returns whether the type has #[must_use] attribute
pub fn is_must_use_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
match ty.kind() {
match *ty.kind() {
ty::Adt(adt, _) => find_attr!(cx.tcx, adt.did(), MustUse { .. }),
ty::Foreign(did) => find_attr!(cx.tcx, *did, MustUse { .. }),
ty::Foreign(did) => find_attr!(cx.tcx, did, MustUse { .. }),
ty::Slice(ty) | ty::Array(ty, _) | ty::RawPtr(ty, _) | ty::Ref(_, ty, _) => {
// for the Array case we don't need to care for the len == 0 case
// because we don't want to lint functions returning empty arrays
is_must_use_ty(cx, *ty)
is_must_use_ty(cx, ty)
},
ty::Tuple(args) => args.iter().any(|ty| is_must_use_ty(cx, ty)),
ty::Alias(ty::Opaque, AliasTy { def_id, .. }) => {
@@ -330,7 +330,7 @@ pub fn is_must_use_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
false
},
ty::Dynamic(binder, _) => {
for predicate in *binder {
for predicate in binder {
if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder()
&& find_attr!(cx.tcx, trait_ref.def_id, MustUse { .. })
{
+2 -2
View File
@@ -62,7 +62,7 @@ fn find_children<'tcx: 'a, 'a>(
// Go over the modules.
for &segment in modules {
let Some(next_item) = find_children(tcx, cur_item, segment)
.find(|item| tcx.def_kind(item) == DefKind::Mod)
.find(|&item| tcx.def_kind(item) == DefKind::Mod)
else {
continue 'crates;
};
@@ -72,7 +72,7 @@ fn find_children<'tcx: 'a, 'a>(
match item {
Some((item_name, namespace)) => {
let Some(item) = find_children(tcx, cur_item, item_name)
.find(|item| tcx.def_kind(item).ns() == Some(namespace))
.find(|&item| tcx.def_kind(item).ns() == Some(namespace))
else {
continue 'crates;
};
@@ -62,8 +62,9 @@ fn track_path(&mut self, path: &str) {
self.tracked_paths.insert(path.into());
}
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, ()> {
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, String> {
literal_from_str(s, self.call_site)
.map_err(|()| "cannot parse string into literal".to_string())
}
fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {
@@ -81,14 +82,9 @@ fn ts_clone(&mut self, stream: &Self::TokenStream) -> Self::TokenStream {
fn ts_is_empty(&mut self, stream: &Self::TokenStream) -> bool {
stream.is_empty()
}
fn ts_from_str(&mut self, src: &str) -> Self::TokenStream {
Self::TokenStream::from_str(src, self.call_site).unwrap_or_else(|e| {
Self::TokenStream::from_str(
&format!("compile_error!(\"failed to parse str to token stream: {e}\")"),
self.call_site,
)
.unwrap()
})
fn ts_from_str(&mut self, src: &str) -> Result<Self::TokenStream, String> {
Self::TokenStream::from_str(src, self.call_site)
.map_err(|e| format!("failed to parse str to token stream: {e}"))
}
fn ts_to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
@@ -67,8 +67,9 @@ fn track_path(&mut self, path: &str) {
self.tracked_paths.insert(path.into());
}
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, ()> {
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, String> {
literal_from_str(s, self.call_site)
.map_err(|()| "cannot parse string into literal".to_string())
}
fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {}
@@ -84,14 +85,9 @@ fn ts_clone(&mut self, stream: &Self::TokenStream) -> Self::TokenStream {
fn ts_is_empty(&mut self, stream: &Self::TokenStream) -> bool {
stream.is_empty()
}
fn ts_from_str(&mut self, src: &str) -> Self::TokenStream {
Self::TokenStream::from_str(src, self.call_site).unwrap_or_else(|e| {
Self::TokenStream::from_str(
&format!("compile_error!(\"failed to parse str to token stream: {e}\")"),
self.call_site,
)
.unwrap()
})
fn ts_from_str(&mut self, src: &str) -> Result<Self::TokenStream, String> {
Self::TokenStream::from_str(src, self.call_site)
.map_err(|e| format!("failed to parse str to token stream: {e}"))
}
fn ts_to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
+6 -6
View File
@@ -10,9 +10,9 @@
pub fn new_from_array(x: u64) -> Arc<[u64]> {
// Ensure that we only generate one alloca for the array.
// CHECK: alloca
// CHECK: %[[A:.+]] = alloca
// CHECK-SAME: [8000 x i8]
// CHECK-NOT: alloca
// CHECK-NOT: %[[B:.+]] = alloca
let array = [x; 1000];
Arc::new(array)
}
@@ -20,8 +20,9 @@ pub fn new_from_array(x: u64) -> Arc<[u64]> {
// CHECK-LABEL: @new_uninit
#[no_mangle]
pub fn new_uninit(x: u64) -> Arc<[u64; 1000]> {
// CHECK: call alloc::sync::arcinner_layout_for_value_layout
// CHECK-NOT: call alloc::sync::arcinner_layout_for_value_layout
// CHECK: %[[A:.+]] = alloca
// CHECK-SAME: [8000 x i8]
// CHECK-NOT: %[[B:.+]] = alloca
let mut arc = Arc::new_uninit();
unsafe { Arc::get_mut_unchecked(&mut arc) }.write([x; 1000]);
unsafe { arc.assume_init() }
@@ -30,8 +31,7 @@ pub fn new_from_array(x: u64) -> Arc<[u64]> {
// CHECK-LABEL: @new_uninit_slice
#[no_mangle]
pub fn new_uninit_slice(x: u64) -> Arc<[u64]> {
// CHECK: call alloc::sync::arcinner_layout_for_value_layout
// CHECK-NOT: call alloc::sync::arcinner_layout_for_value_layout
// CHECK-NOT: %[[B:.+]] = alloca
let mut arc = Arc::new_uninit_slice(1000);
for elem in unsafe { Arc::get_mut_unchecked(&mut arc) } {
elem.write(x);
@@ -0,0 +1,18 @@
//@ check-pass
// Fixes #151537
#![feature(portable_simd, core_intrinsics)]
use std::intrinsics::simd::SimdAlign;
use std::{ptr::null, simd::prelude::*};
const _: () = {
let c = Simd::from_array([0; 3]);
unsafe {
core::intrinsics::simd::simd_masked_store::<_, _, _, { SimdAlign::Element }>(
c,
null::<i32>(),
c,
)
};
};
fn main() {}
@@ -20,5 +20,9 @@ pub fn invalid_raw_ident(_: TokenStream) -> TokenStream {
#[proc_macro]
pub fn lexer_failure(_: TokenStream) -> TokenStream {
"a b ) c".parse().expect("parsing failed without panic")
assert_eq!(
"a b ) c".parse::<TokenStream>().unwrap_err().to_string(),
"unexpected closing delimiter: `)`"
);
TokenStream::new()
}
@@ -110,6 +110,10 @@ pub fn run() {
lit("3//\n4", NormalErr);
lit("18.u8E", NormalErr);
lit("/*a*/ //", NormalErr);
stream("1 ) 2", NormalErr);
stream("( x [ ) ]", NormalErr);
lit("1 ) 2", NormalErr);
lit("( x [ ) ]", NormalErr);
// FIXME: all of the cases below should return an Err and emit no diagnostics, but don't yet.
// emits diagnostics and returns LexError
@@ -122,8 +126,6 @@ pub fn run() {
for parse in [stream as fn(&str, Mode), lit] {
// emits diagnostic(s), then panics
parse("1 ) 2", OtherWithPanic);
parse("( x [ ) ]", OtherWithPanic);
parse("r#", OtherWithPanic);
// emits diagnostic(s), then returns Ok(Literal { kind: ErrWithGuar, .. })
+2 -6
View File
@@ -1,13 +1,9 @@
//@ proc-macro: invalid-punct-ident.rs
//@ needs-unwind proc macro panics to report errors
//@ check-pass
#[macro_use]
extern crate invalid_punct_ident;
lexer_failure!();
//~^ ERROR proc macro panicked
//~| ERROR unexpected closing delimiter: `)`
fn main() {
let _recovery_witness: () = 0; //~ ERROR mismatched types
}
fn main() {}
@@ -1,25 +0,0 @@
error: unexpected closing delimiter: `)`
--> $DIR/invalid-punct-ident-4.rs:7:1
|
LL | lexer_failure!();
| ^^^^^^^^^^^^^^^^ unexpected closing delimiter
|
= note: this error originates in the macro `lexer_failure` (in Nightly builds, run with -Z macro-backtrace for more info)
error: proc macro panicked
--> $DIR/invalid-punct-ident-4.rs:7:1
|
LL | lexer_failure!();
| ^^^^^^^^^^^^^^^^
error[E0308]: mismatched types
--> $DIR/invalid-punct-ident-4.rs:12:33
|
LL | let _recovery_witness: () = 0;
| -- ^ expected `()`, found integer
| |
| expected due to this
error: aborting due to 3 previous errors
For more information about this error, try `rustc --explain E0308`.
+1 -36
View File
@@ -40,26 +40,6 @@ LL | nonfatal_parsing::run!();
= note: prefixed identifiers and literals are reserved since Rust 2021
= note: this error originates in the macro `nonfatal_parsing::run` (in Nightly builds, run with -Z macro-backtrace for more info)
error: unexpected closing delimiter: `)`
--> $DIR/nonfatal-parsing.rs:15:5
|
LL | nonfatal_parsing::run!();
| ^^^^^^^^^^^^^^^^^^^^^^^^ unexpected closing delimiter
|
= note: this error originates in the macro `nonfatal_parsing::run` (in Nightly builds, run with -Z macro-backtrace for more info)
error: unexpected closing delimiter: `]`
--> $DIR/nonfatal-parsing.rs:15:5
|
LL | nonfatal_parsing::run!();
| -^^^^^^^^^^^^^^^^^^^^^^^
| |
| the nearest open delimiter
| missing open `(` for this delimiter
| unexpected closing delimiter
|
= note: this error originates in the macro `nonfatal_parsing::run` (in Nightly builds, run with -Z macro-backtrace for more info)
error: found invalid character; only `#` is allowed in raw string delimitation: \u{0}
--> $DIR/nonfatal-parsing.rs:15:5
|
@@ -135,21 +115,6 @@ LL | nonfatal_parsing::run!();
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
= note: this error originates in the macro `nonfatal_parsing::run` (in Nightly builds, run with -Z macro-backtrace for more info)
error: unexpected closing delimiter: `)`
--> <proc-macro source code>:1:3
|
LL | 1 ) 2
| ^ unexpected closing delimiter
error: unexpected closing delimiter: `]`
--> <proc-macro source code>:1:10
|
LL | ( x [ ) ]
| - - ^ unexpected closing delimiter
| | |
| | missing open `(` for this delimiter
| the nearest open delimiter
error: found invalid character; only `#` is allowed in raw string delimitation: \u{0}
--> <proc-macro source code>:1:1
|
@@ -210,6 +175,6 @@ error: invalid digit for a base 2 literal
LL | /*a*/ 0b2 //
| ^
error: aborting due to 24 previous errors
error: aborting due to 20 previous errors
For more information about this error, try `rustc --explain E0768`.
+14 -10
View File
@@ -29,15 +29,19 @@ Ok(TokenStream [Literal { kind: Integer, symbol: "3", suffix: None, span: #44 by
Ok(TokenStream [Literal { kind: Char, symbol: "c", suffix: None, span: #44 bytes(361..385) }])
Ok(TokenStream [])
### ERRORS
Err(LexError)
Err(LexError)
Err(LexError)
Err(LexError)
Err(LexError)
Err(LexError)
Err(LexError)
Err(LexError)
Err(LexError)
Err(LexError("comment or whitespace around literal"))
Err(LexError("comment or whitespace around literal"))
Err(LexError("comment or whitespace around literal"))
Err(LexError("comment or whitespace around literal"))
Err(LexError("comment or whitespace around literal"))
Err(LexError("comment or whitespace around literal"))
Err(LexError("not a literal"))
Err(LexError("unexpected closing delimiter: `)`"))
Err(LexError("unexpected closing delimiter: `]`"))
Err(LexError("unexpected closing delimiter: `)`"))
Err(LexError("unexpected closing delimiter: `]`"))
Err(LexError("not a literal"))
Err(LexError("not a literal"))
Ok(TokenStream [Ident { ident: "r", span: #44 bytes(361..385) }, Literal { kind: Char, symbol: "r", suffix: None, span: #44 bytes(361..385) }])
Ok(TokenStream [Ident { ident: "c", span: #44 bytes(361..385) }, Literal { kind: Char, symbol: "r", suffix: None, span: #44 bytes(361..385) }])
Ok(TokenStream [Literal { kind: ErrWithGuar, symbol: "0b2", suffix: None, span: #44 bytes(361..385) }])
@@ -51,4 +55,4 @@ Ok(Literal { kind: ErrWithGuar, symbol: "0b", suffix: Some("f32"), span: #44 byt
Ok(Literal { kind: ErrWithGuar, symbol: "0b0.0", suffix: Some("f32"), span: #44 bytes(361..385) })
Ok(Literal { kind: ErrWithGuar, symbol: "'''", suffix: None, span: #44 bytes(361..385) })
Ok(Literal { kind: ErrWithGuar, symbol: "'\n'", suffix: None, span: #44 bytes(361..385) })
Err(LexError)
Err(LexError("comment or whitespace around literal"))
@@ -0,0 +1,33 @@
//@ only-x86_64
//@ check-pass
//
// Regression test for <https://github.com/rust-lang/rust/issues/152340>.
#![allow(dead_code)]
#[target_feature(enable = "sse2")]
const fn foo() {}
// DefKind::Const
const _: () = unsafe {
let _: unsafe fn() = foo;
};
// DefKind::AssocConst
struct S;
impl S {
const C: () = unsafe {
let _: unsafe fn() = foo;
};
}
// DefKind::InlineConst
fn bar() {
let _ = const {
unsafe {
let _: unsafe fn() = foo;
}
};
}
fn main() {}