Auto merge of #155655 - JonathanBrouwer:rollup-KFUw3UR, r=JonathanBrouwer

Rollup of 10 pull requests

Successful merges:

 - rust-lang/rust#154794 (Add on_unmatch_args)
 - rust-lang/rust#155133 (Document precision considerations of `Duration`-float methods)
 - rust-lang/rust#154283 (Remove `nodes_in_current_session` field and related assertions)
 - rust-lang/rust#155374 (rustdoc: fix a few spots where emit isn't respected)
 - rust-lang/rust#155587 (Immediately feed visibility on DefId creation)
 - rust-lang/rust#155622 (c-variadic: `va_arg` fixes )
 - rust-lang/rust#155629 (rustc_public: Add `constness` & `asyncness` in `FnDef`)
 - rust-lang/rust#155632 (Some metadata cleanups)
 - rust-lang/rust#155639 (BinOpAssign always returns unit)
 - rust-lang/rust#155647 (rustc-dev-guide subtree update)
This commit is contained in:
bors
2026-04-22 19:16:27 +00:00
84 changed files with 1536 additions and 591 deletions
@@ -25,6 +25,7 @@
pub(crate) mod on_move;
pub(crate) mod on_unimplemented;
pub(crate) mod on_unknown;
pub(crate) mod on_unmatch_args;
#[derive(Copy, Clone)]
pub(crate) enum Mode {
@@ -38,6 +39,8 @@ pub(crate) enum Mode {
DiagnosticOnMove,
/// `#[diagnostic::on_unknown]`
DiagnosticOnUnknown,
/// `#[diagnostic::on_unmatch_args]`
DiagnosticOnUnmatchArgs,
}
impl Mode {
@@ -48,6 +51,7 @@ fn as_str(&self) -> &'static str {
Self::DiagnosticOnConst => "diagnostic::on_const",
Self::DiagnosticOnMove => "diagnostic::on_move",
Self::DiagnosticOnUnknown => "diagnostic::on_unknown",
Self::DiagnosticOnUnmatchArgs => "diagnostic::on_unmatch_args",
}
}
@@ -62,6 +66,7 @@ fn expected_options(&self) -> &'static str {
Self::DiagnosticOnConst => DEFAULT,
Self::DiagnosticOnMove => DEFAULT,
Self::DiagnosticOnUnknown => DEFAULT,
Self::DiagnosticOnUnmatchArgs => DEFAULT,
}
}
@@ -75,6 +80,7 @@ fn allowed_options(&self) -> &'static str {
Self::DiagnosticOnConst => DEFAULT,
Self::DiagnosticOnMove => DEFAULT,
Self::DiagnosticOnUnknown => DEFAULT,
Self::DiagnosticOnUnmatchArgs => DEFAULT,
}
}
}
@@ -398,7 +404,9 @@ fn parse_arg(
Position::ArgumentNamed(name) => match (mode, Symbol::intern(name)) {
// Only `#[rustc_on_unimplemented]` can use these
(Mode::RustcOnUnimplemented { .. }, sym::ItemContext) => FormatArg::ItemContext,
(Mode::RustcOnUnimplemented { .. }, sym::This) => FormatArg::This,
(Mode::RustcOnUnimplemented { .. } | Mode::DiagnosticOnUnmatchArgs, sym::This) => {
FormatArg::This
}
(Mode::RustcOnUnimplemented { .. }, sym::Trait) => FormatArg::Trait,
// Any attribute can use these
(_, kw::SelfUpper) => FormatArg::SelfUpper,
@@ -0,0 +1,58 @@
use rustc_errors::Diagnostic;
use rustc_hir::attrs::diagnostic::Directive;
use rustc_session::lint::builtin::MISPLACED_DIAGNOSTIC_ATTRIBUTES;
use crate::attributes::diagnostic::*;
use crate::attributes::prelude::*;
use crate::errors::DiagnosticOnUnmatchArgsOnlyForMacros;
#[derive(Default)]
pub(crate) struct OnUnmatchArgsParser {
span: Option<Span>,
directive: Option<(Span, Directive)>,
}
impl<S: Stage> AttributeParser<S> for OnUnmatchArgsParser {
const ATTRIBUTES: AcceptMapping<Self, S> = &[(
&[sym::diagnostic, sym::on_unmatch_args],
template!(List: &[r#"/*opt*/ message = "...", /*opt*/ label = "...", /*opt*/ note = "...""#]),
|this, cx, args| {
if !cx.features().diagnostic_on_unmatch_args() {
return;
}
let span = cx.attr_span;
this.span = Some(span);
if !matches!(cx.target, Target::MacroDef) {
cx.emit_dyn_lint(
MISPLACED_DIAGNOSTIC_ATTRIBUTES,
move |dcx, level| DiagnosticOnUnmatchArgsOnlyForMacros.into_diag(dcx, level),
span,
);
return;
}
let mode = Mode::DiagnosticOnUnmatchArgs;
let Some(items) = parse_list(cx, args, mode) else { return };
let Some(directive) = parse_directive_items(cx, mode, items.mixed(), true) else {
return;
};
merge_directives(cx, &mut this.directive, (span, directive));
},
)];
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS);
fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> {
if let Some(span) = self.span {
Some(AttributeKind::OnUnmatchArgs {
span,
directive: self.directive.map(|d| Box::new(d.1)),
})
} else {
None
}
}
}
@@ -33,6 +33,7 @@
use crate::attributes::diagnostic::on_move::*;
use crate::attributes::diagnostic::on_unimplemented::*;
use crate::attributes::diagnostic::on_unknown::*;
use crate::attributes::diagnostic::on_unmatch_args::*;
use crate::attributes::doc::*;
use crate::attributes::dummy::*;
use crate::attributes::inline::*;
@@ -159,6 +160,7 @@ mod late {
OnMoveParser,
OnUnimplementedParser,
OnUnknownParser,
OnUnmatchArgsParser,
RustcAlignParser,
RustcAlignStaticParser,
RustcCguTestAttributeParser,
@@ -315,6 +315,10 @@ pub(crate) struct DiagnosticOnUnknownOnlyForImports {
pub target_span: Span,
}
#[derive(Diagnostic)]
#[diag("`#[diagnostic::on_unmatch_args]` can only be applied to macro definitions")]
pub(crate) struct DiagnosticOnUnmatchArgsOnlyForMacros;
#[derive(Diagnostic)]
#[diag("`#[diagnostic::do_not_recommend]` can only be placed on trait implementations")]
pub(crate) struct IncorrectDoNotRecommendLocation {
+13 -16
View File
@@ -11,7 +11,7 @@
use rustc_target::spec::{Arch, Env, LlvmAbi, RustcAbi};
use crate::builder::Builder;
use crate::llvm::{Type, Value};
use crate::llvm::Value;
use crate::type_of::LayoutLlvmExt;
fn round_up_to_alignment<'ll>(
@@ -27,13 +27,14 @@ fn round_pointer_up_to_alignment<'ll>(
bx: &mut Builder<'_, 'll, '_>,
addr: &'ll Value,
align: Align,
ptr_ty: &'ll Type,
) -> &'ll Value {
let ptr = bx.inbounds_ptradd(addr, bx.const_i32(align.bytes() as i32 - 1));
let pointer_width = bx.tcx().sess.target.pointer_width;
let mask = align.bytes().wrapping_neg() & (u64::MAX >> (64 - pointer_width));
bx.call_intrinsic(
"llvm.ptrmask",
&[ptr_ty, bx.type_i32()],
&[ptr, bx.const_int(bx.isize_ty, -(align.bytes() as isize) as i64)],
&[bx.type_ptr(), bx.type_isize()],
&[ptr, bx.const_usize(mask)],
)
}
@@ -53,7 +54,7 @@ fn emit_direct_ptr_va_arg<'ll, 'tcx>(
let ptr = bx.load(va_list_ty, va_list_addr, ptr_align_abi);
let (addr, addr_align) = if allow_higher_align && align > slot_size {
(round_pointer_up_to_alignment(bx, ptr, align, bx.type_ptr()), align)
(round_pointer_up_to_alignment(bx, ptr, align), align)
} else {
(ptr, slot_size)
};
@@ -69,7 +70,8 @@ fn emit_direct_ptr_va_arg<'ll, 'tcx>(
{
let adjusted_size = bx.cx().const_i32((slot_size.bytes() - size.bytes()) as i32);
let adjusted = bx.inbounds_ptradd(addr, adjusted_size);
(adjusted, addr_align)
// We're in the middle of a slot now, so use the type's alignment, not the slot's.
(adjusted, align)
} else {
(addr, addr_align)
}
@@ -357,12 +359,8 @@ fn emit_powerpc_va_arg<'ll, 'tcx>(
// Round up address of argument to alignment
if layout.layout.align.abi > overflow_area_align {
overflow_area = round_pointer_up_to_alignment(
bx,
overflow_area,
layout.layout.align.abi,
bx.type_ptr(),
);
overflow_area =
round_pointer_up_to_alignment(bx, overflow_area, layout.layout.align.abi);
}
let mem_addr = overflow_area;
@@ -827,7 +825,7 @@ fn emit_hexagon_va_arg_musl<'ll, 'tcx>(
} else {
Align::from_bytes(4).unwrap()
};
let aligned_current = round_pointer_up_to_alignment(bx, current_ptr, arg_align, bx.type_ptr());
let aligned_current = round_pointer_up_to_alignment(bx, current_ptr, arg_align);
// Calculate next pointer position (following LLVM's logic)
// Arguments <= 32 bits take 4 bytes, > 32 bits take 8 bytes
@@ -849,8 +847,7 @@ fn emit_hexagon_va_arg_musl<'ll, 'tcx>(
bx.switch_to_block(from_overflow);
// Align overflow pointer using the same alignment rules
let aligned_overflow =
round_pointer_up_to_alignment(bx, overflow_ptr, arg_align, bx.type_ptr());
let aligned_overflow = round_pointer_up_to_alignment(bx, overflow_ptr, arg_align);
let overflow_value_addr = aligned_overflow;
// Update overflow pointer - use the same size calculation
@@ -890,7 +887,7 @@ fn emit_hexagon_va_arg_bare_metal<'ll, 'tcx>(
let aligned_ptr = if ty_align.bytes() > 4 {
// Ensure alignment is a power of 2
debug_assert!(ty_align.bytes().is_power_of_two(), "Alignment is not power of 2!");
round_pointer_up_to_alignment(bx, current_ptr, ty_align, bx.type_ptr())
round_pointer_up_to_alignment(bx, current_ptr, ty_align)
} else {
current_ptr
};
+28 -2
View File
@@ -3,6 +3,7 @@
use rustc_ast::token::{self, Token};
use rustc_ast::tokenstream::TokenStream;
use rustc_errors::{Applicability, Diag, DiagCtxtHandle, DiagMessage};
use rustc_hir::attrs::diagnostic::{CustomDiagnostic, Directive, FormatArgs};
use rustc_macros::Subdiagnostic;
use rustc_parse::parser::{Parser, Recovery, token_descr};
use rustc_session::parse::ParseSess;
@@ -32,6 +33,7 @@ pub(super) fn failed_to_match_macro(
args: FailedMacro<'_>,
body: &TokenStream,
rules: &[MacroRule],
on_unmatch_args: Option<&Directive>,
) -> (Span, ErrorGuaranteed) {
debug!("failed to match macro");
let def_head_span = if !def_span.is_dummy() && !psess.source_map().is_imported(def_span) {
@@ -72,9 +74,30 @@ pub(super) fn failed_to_match_macro(
};
let span = token.span.substitute_dummy(sp);
let CustomDiagnostic {
message: custom_message, label: custom_label, notes: custom_notes, ..
} = {
let macro_name = name.to_string();
on_unmatch_args
.map(|directive| {
directive.eval(
None,
&FormatArgs {
this: macro_name.clone(),
this_sugared: macro_name,
item_context: "macro invocation",
generic_args: Vec::new(),
},
)
})
.unwrap_or_default()
};
let mut err = psess.dcx().struct_span_err(span, parse_failure_msg(&token, None));
err.span_label(span, label);
let mut err = match custom_message {
Some(message) => psess.dcx().struct_span_err(span, message),
None => psess.dcx().struct_span_err(span, parse_failure_msg(&token, None)),
};
err.span_label(span, custom_label.unwrap_or_else(|| label.to_string()));
if !def_head_span.is_dummy() {
err.span_label(def_head_span, "when calling this macro");
}
@@ -86,6 +109,9 @@ pub(super) fn failed_to_match_macro(
} else {
err.note(format!("while trying to match {remaining_matcher}"));
}
for note in custom_notes {
err.note(note);
}
if let MatcherLoc::Token { token: expected_token } = &remaining_matcher
&& (matches!(expected_token.kind, token::OpenInvisible(_))
+31 -6
View File
@@ -14,6 +14,7 @@
use rustc_errors::{Applicability, Diag, ErrorGuaranteed, MultiSpan};
use rustc_feature::Features;
use rustc_hir as hir;
use rustc_hir::attrs::diagnostic::Directive;
use rustc_hir::def::MacroKinds;
use rustc_hir::find_attr;
use rustc_lint_defs::builtin::{
@@ -164,6 +165,7 @@ pub struct MacroRulesMacroExpander {
node_id: NodeId,
name: Ident,
span: Span,
on_unmatch_args: Option<Directive>,
transparency: Transparency,
kinds: MacroKinds,
rules: Vec<MacroRule>,
@@ -194,7 +196,8 @@ pub fn expand_derive(
) -> Result<TokenStream, ErrorGuaranteed> {
// This is similar to `expand_macro`, but they have very different signatures, and will
// diverge further once derives support arguments.
let Self { name, ref rules, node_id, .. } = *self;
let name = self.name;
let rules = &self.rules;
let psess = &cx.sess.psess;
if cx.trace_macros() {
@@ -220,8 +223,8 @@ pub fn expand_derive(
trace_macros_note(&mut cx.expansions, sp, msg);
}
if is_defined_in_current_crate(node_id) {
cx.resolver.record_macro_rule_usage(node_id, rule_index);
if is_defined_in_current_crate(self.node_id) {
cx.resolver.record_macro_rule_usage(self.node_id, rule_index);
}
Ok(tts)
@@ -236,6 +239,7 @@ pub fn expand_derive(
FailedMacro::Derive,
body,
rules,
self.on_unmatch_args.as_ref(),
);
cx.macro_error_and_trace_macros_diag();
Err(guar)
@@ -260,6 +264,7 @@ fn expand<'cx, 'a: 'cx>(
self.transparency,
input,
&self.rules,
self.on_unmatch_args.as_ref(),
))
}
}
@@ -294,6 +299,7 @@ fn expand_with_safety(
args,
body,
&self.rules,
self.on_unmatch_args.as_ref(),
)
}
}
@@ -355,7 +361,7 @@ fn description() -> &'static str {
}
/// Expands the rules based macro defined by `rules` for a given input `arg`.
#[instrument(skip(cx, transparency, arg, rules))]
#[instrument(skip(cx, transparency, arg, rules, on_unmatch_args))]
fn expand_macro<'cx, 'a: 'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
@@ -365,6 +371,7 @@ fn expand_macro<'cx, 'a: 'cx>(
transparency: Transparency,
arg: TokenStream,
rules: &'a [MacroRule],
on_unmatch_args: Option<&Directive>,
) -> Box<dyn MacResult + 'cx> {
let psess = &cx.sess.psess;
@@ -423,6 +430,7 @@ fn expand_macro<'cx, 'a: 'cx>(
FailedMacro::Func,
&arg,
rules,
on_unmatch_args,
);
cx.macro_error_and_trace_macros_diag();
DummyResult::any(span, guar)
@@ -431,7 +439,7 @@ fn expand_macro<'cx, 'a: 'cx>(
}
/// Expands the rules based macro defined by `rules` for a given attribute `args` and `body`.
#[instrument(skip(cx, transparency, args, body, rules))]
#[instrument(skip(cx, transparency, args, body, rules, on_unmatch_args))]
fn expand_macro_attr(
cx: &mut ExtCtxt<'_>,
sp: Span,
@@ -443,6 +451,7 @@ fn expand_macro_attr(
args: TokenStream,
body: TokenStream,
rules: &[MacroRule],
on_unmatch_args: Option<&Directive>,
) -> Result<TokenStream, ErrorGuaranteed> {
let psess = &cx.sess.psess;
// Macros defined in the current crate have a real node id,
@@ -507,6 +516,7 @@ fn expand_macro_attr(
FailedMacro::Attr(&args),
&body,
rules,
on_unmatch_args,
);
cx.trace_macros_diag();
Err(guar)
@@ -849,7 +859,22 @@ pub fn compile_declarative_macro(
// Return the number of rules for unused rule linting, if this is a local macro.
let nrules = if is_defined_in_current_crate(node_id) { rules.len() } else { 0 };
let exp = MacroRulesMacroExpander { name: ident, kinds, span, node_id, transparency, rules };
let on_unmatch_args = find_attr!(
attrs,
OnUnmatchArgs { directive, .. } => directive.clone()
)
.flatten()
.map(|directive| *directive);
let exp = MacroRulesMacroExpander {
name: ident,
kinds,
span,
node_id,
on_unmatch_args,
transparency,
rules,
};
(mk_syn_ext(SyntaxExtensionKind::MacroRules(Arc::new(exp))), nrules)
}
+2
View File
@@ -480,6 +480,8 @@ pub fn internal(&self, feature: Symbol) -> bool {
(unstable, diagnostic_on_move, "1.96.0", Some(154181)),
/// Allows giving unresolved imports a custom diagnostic message
(unstable, diagnostic_on_unknown, "1.96.0", Some(152900)),
/// Allows macros to customize macro argument matcher diagnostics.
(unstable, diagnostic_on_unmatch_args, "CURRENT_RUSTC_VERSION", Some(155642)),
/// Allows `#[doc(cfg(...))]`.
(unstable, doc_cfg, "1.21.0", Some(43781)),
/// Allows `#[doc(masked)]`.
@@ -1208,6 +1208,13 @@ pub enum AttributeKind {
directive: Option<Box<Directive>>,
},
/// Represents `#[diagnostic::on_unmatch_args]`.
OnUnmatchArgs {
span: Span,
/// None if the directive was malformed in some way.
directive: Option<Box<Directive>>,
},
/// Represents `#[optimize(size|speed)]`
Optimize(OptimizeAttr, Span),
@@ -80,6 +80,7 @@ pub fn encode_cross_crate(&self) -> EncodeCrossCrate {
OnMove { .. } => Yes,
OnUnimplemented { .. } => Yes,
OnUnknown { .. } => Yes,
OnUnmatchArgs { .. } => Yes,
Optimize(..) => No,
PanicRuntime => No,
PatchableFunctionEntry { .. } => Yes,
+11 -21
View File
@@ -34,19 +34,14 @@ pub(crate) fn check_expr_assign_op(
rhs: &'tcx Expr<'tcx>,
expected: Expectation<'tcx>,
) -> Ty<'tcx> {
let (lhs_ty, rhs_ty, return_ty) =
let (lhs_ty, rhs_ty, _return_ty) =
self.check_overloaded_binop(expr, lhs, rhs, Op::AssignOp(op), expected);
let category = BinOpCategory::from(op.node);
let ty = if !lhs_ty.is_ty_var()
&& !rhs_ty.is_ty_var()
&& is_builtin_binop(lhs_ty, rhs_ty, category)
if !lhs_ty.is_ty_var() && !rhs_ty.is_ty_var() && is_builtin_binop(lhs_ty, rhs_ty, category)
{
self.enforce_builtin_binop_types(lhs.span, lhs_ty, rhs.span, rhs_ty, category);
self.tcx.types.unit
} else {
return_ty
};
}
self.check_lhs_assignable(lhs, E0067, op.span, |err| {
if let Some(lhs_deref_ty) = self.deref_once_mutably_for_diagnostic(lhs_ty) {
@@ -86,7 +81,7 @@ pub(crate) fn check_expr_assign_op(
}
});
ty
self.tcx.types.unit
}
/// Checks a potentially overloaded binary operator.
@@ -1311,21 +1306,16 @@ fn deref_ty_if_possible(ty: Ty<'_>) -> Ty<'_> {
}
/// Returns `true` if this is a built-in arithmetic operation (e.g.,
/// u32 + u32, i16x4 == i16x4) and false if these types would have to be
/// overloaded to be legal. There are two reasons that we distinguish
/// u32 + u32) and false if these types would have to be
/// overloaded to be legal. The reason that we distinguish
/// builtin operations from overloaded ones (vs trying to drive
/// everything uniformly through the trait system and intrinsics or
/// something like that):
/// something like that) is that builtin operations can trivially
/// be evaluated in constants on stable, but the traits and their
/// impls for these primitive types.
///
/// 1. Builtin operations can trivially be evaluated in constants.
/// 2. For comparison operators applied to SIMD types the result is
/// not of type `bool`. For example, `i16x4 == i16x4` yields a
/// type like `i16x4`. This means that the overloaded trait
/// `PartialEq` is not applicable.
///
/// Reason #2 is the killer. I tried for a while to always use
/// overloaded logic and just check the types in constants/codegen after
/// the fact, and it worked fine, except for SIMD types. -nmatsakis
/// FIXME(const_trait_impls): once the traits and their impls are const stable
/// remove this function and the builtin-specific checks.
fn is_builtin_binop<'tcx>(lhs: Ty<'tcx>, rhs: Ty<'tcx>, category: BinOpCategory) -> bool {
// Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work.
// (See https://github.com/rust-lang/rust/issues/57447.)
@@ -52,7 +52,6 @@ fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, ()> {
lints::MalFormedDiagnosticAttributeLint { attribute, options, span }
.into_diag(dcx, level)
}
AttributeLintKind::MalformedDiagnosticFormat { warning } => match warning {
FormatWarning::PositionalArgument { .. } => {
lints::DisallowedPositionalArgument.into_diag(dcx, level)
+70 -69
View File
@@ -221,7 +221,7 @@ fn get_lazy_state(&self) -> LazyState {
/// This is the decode context used when crate metadata was already read.
/// Decoding of some types, like `Span` require some information to already been read.
/// Can be constructed from a [`TyCtxt`] and [`CrateMetadataRef`] (see the [`Metadata`] trait)
/// Can be constructed from a [`TyCtxt`] and [`CrateMetadataRef`] (see the [`MetaDecoder`] trait)
pub(super) struct MetadataDecodeContext<'a, 'tcx> {
blob_decoder: BlobDecodeContext<'a>,
cdata: CrateMetadataRef<'a>,
@@ -255,19 +255,24 @@ fn deref(&self) -> &Self::Target {
}
}
pub(super) trait Metadata<'a>: Copy {
pub(super) trait MetaBlob<'a>: Copy {
fn blob(&self) -> &'a MetadataBlob;
}
pub(super) trait MetaDecoder: Copy {
type Context: BlobDecoder + LazyDecoder;
fn blob(self) -> &'a MetadataBlob;
fn decoder(self, pos: usize) -> Self::Context;
}
impl<'a> Metadata<'a> for &'a MetadataBlob {
type Context = BlobDecodeContext<'a>;
fn blob(self) -> &'a MetadataBlob {
impl<'a> MetaBlob<'a> for &'a MetadataBlob {
fn blob(&self) -> &'a MetadataBlob {
self
}
}
impl<'a> MetaDecoder for &'a MetadataBlob {
type Context = BlobDecodeContext<'a>;
fn decoder(self, pos: usize) -> Self::Context {
BlobDecodeContext {
@@ -285,16 +290,18 @@ fn decoder(self, pos: usize) -> Self::Context {
}
}
impl<'a, 'tcx> Metadata<'a> for (CrateMetadataRef<'a>, TyCtxt<'tcx>) {
type Context = MetadataDecodeContext<'a, 'tcx>;
fn blob(self) -> &'a MetadataBlob {
&self.0.cdata.blob
impl<'a> MetaBlob<'a> for &'a CrateMetadata {
fn blob(&self) -> &'a MetadataBlob {
&self.blob
}
}
impl<'a, 'tcx> MetaDecoder for (CrateMetadataRef<'a>, TyCtxt<'tcx>) {
type Context = MetadataDecodeContext<'a, 'tcx>;
fn decoder(self, pos: usize) -> MetadataDecodeContext<'a, 'tcx> {
MetadataDecodeContext {
blob_decoder: self.blob().decoder(pos),
blob_decoder: self.0.cdata.blob().decoder(pos),
cdata: self.0,
tcx: self.1,
alloc_decoding_session: self.0.cdata.alloc_decoding_state.new_decoding_session(),
@@ -304,7 +311,7 @@ fn decoder(self, pos: usize) -> MetadataDecodeContext<'a, 'tcx> {
impl<T: ParameterizedOverTcx> LazyValue<T> {
#[inline]
fn decode<'a, 'tcx, M: Metadata<'a>>(self, metadata: M) -> T::Value<'tcx>
fn decode<'tcx, M: MetaDecoder>(self, metadata: M) -> T::Value<'tcx>
where
T::Value<'tcx>: Decodable<M::Context>,
{
@@ -344,10 +351,7 @@ unsafe impl<D: Decoder, T: Decodable<D>> TrustedLen for DecodeIterator<T, D> {}
impl<T: ParameterizedOverTcx> LazyArray<T> {
#[inline]
fn decode<'a, 'tcx, M: Metadata<'a>>(
self,
metadata: M,
) -> DecodeIterator<T::Value<'tcx>, M::Context>
fn decode<'tcx, M: MetaDecoder>(self, metadata: M) -> DecodeIterator<T::Value<'tcx>, M::Context>
where
T::Value<'tcx>: Decodable<M::Context>,
{
@@ -468,7 +472,7 @@ fn decode_syntax_context(&mut self) -> SyntaxContext {
cdata
.root
.syntax_contexts
.get((cdata, tcx), id)
.get(cdata.cdata, id)
.unwrap_or_else(|| panic!("Missing SyntaxContext {id:?} for crate {cname:?}"))
.decode((cdata, tcx))
})
@@ -494,13 +498,13 @@ fn decode_expn_id(&mut self) -> ExpnId {
let expn_data = crate_data
.root
.expn_data
.get((crate_data, tcx), index)
.get(crate_data.cdata, index)
.unwrap()
.decode((crate_data, tcx));
let expn_hash = crate_data
.root
.expn_hashes
.get((crate_data, tcx), index)
.get(crate_data.cdata, index)
.unwrap()
.decode((crate_data, tcx));
(expn_data, expn_hash)
@@ -1017,7 +1021,7 @@ fn opt_item_ident(self, tcx: TyCtxt<'_>, item_index: DefIndex) -> Option<Ident>
.root
.tables
.def_ident_span
.get((self, tcx), item_index)
.get(self.cdata, item_index)
.unwrap_or_else(|| self.missing("def_ident_span", item_index))
.decode((self, tcx));
Some(Ident::new(name, span))
@@ -1032,11 +1036,11 @@ pub(super) fn map_encoded_cnum_to_current(self, cnum: CrateNum) -> CrateNum {
if cnum == LOCAL_CRATE { self.cnum } else { self.cnum_map[cnum] }
}
fn def_kind(self, tcx: TyCtxt<'_>, item_id: DefIndex) -> DefKind {
fn def_kind(self, item_id: DefIndex) -> DefKind {
self.root
.tables
.def_kind
.get((self, tcx), item_id)
.get(self.cdata, item_id)
.unwrap_or_else(|| self.missing("def_kind", item_id))
}
@@ -1044,7 +1048,7 @@ fn get_span(self, tcx: TyCtxt<'_>, index: DefIndex) -> Span {
self.root
.tables
.def_span
.get((self, tcx), index)
.get(self.cdata, index)
.unwrap_or_else(|| self.missing("def_span", index))
.decode((self, tcx))
}
@@ -1097,7 +1101,7 @@ fn get_variant(
};
let data =
self.root.tables.variant_data.get((self, tcx), index).unwrap().decode((self, tcx));
self.root.tables.variant_data.get(self.cdata, index).unwrap().decode((self, tcx));
let variant_did =
if adt_kind == ty::AdtKind::Enum { Some(self.local_def_id(index)) } else { None };
@@ -1115,7 +1119,7 @@ fn get_variant(
did,
name: self.item_name(did.index),
vis: self.get_visibility(tcx, did.index),
safety: self.get_safety(tcx, did.index),
safety: self.get_safety(did.index),
value: self.get_default_field(tcx, did.index),
})
.collect(),
@@ -1127,7 +1131,7 @@ fn get_variant(
}
fn get_adt_def<'tcx>(self, tcx: TyCtxt<'tcx>, item_id: DefIndex) -> ty::AdtDef<'tcx> {
let kind = self.def_kind(tcx, item_id);
let kind = self.def_kind(item_id);
let did = self.local_def_id(item_id);
let adt_kind = match kind {
@@ -1137,17 +1141,17 @@ fn get_adt_def<'tcx>(self, tcx: TyCtxt<'tcx>, item_id: DefIndex) -> ty::AdtDef<'
_ => bug!("get_adt_def called on a non-ADT {:?}", did),
};
let repr =
self.root.tables.repr_options.get((self, tcx), item_id).unwrap().decode((self, tcx));
self.root.tables.repr_options.get(self.cdata, item_id).unwrap().decode((self, tcx));
let mut variants: Vec<_> = if let ty::AdtKind::Enum = adt_kind {
self.root
.tables
.module_children_non_reexports
.get((self, tcx), item_id)
.get(self.cdata, item_id)
.expect("variants are not encoded for an enum")
.decode((self, tcx))
.filter_map(|index| {
let kind = self.def_kind(tcx, index);
let kind = self.def_kind(index);
match kind {
DefKind::Ctor(..) => None,
_ => Some(self.get_variant(tcx, kind, index, did)),
@@ -1172,25 +1176,25 @@ fn get_visibility(self, tcx: TyCtxt<'_>, id: DefIndex) -> Visibility<DefId> {
self.root
.tables
.visibility
.get((self, tcx), id)
.get(self.cdata, id)
.unwrap_or_else(|| self.missing("visibility", id))
.decode((self, tcx))
.map_id(|index| self.local_def_id(index))
}
fn get_safety(self, tcx: TyCtxt<'_>, id: DefIndex) -> Safety {
self.root.tables.safety.get((self, tcx), id)
fn get_safety(self, id: DefIndex) -> Safety {
self.root.tables.safety.get(self.cdata, id)
}
fn get_default_field(self, tcx: TyCtxt<'_>, id: DefIndex) -> Option<DefId> {
self.root.tables.default_fields.get((self, tcx), id).map(|d| d.decode((self, tcx)))
self.root.tables.default_fields.get(self.cdata, id).map(|d| d.decode((self, tcx)))
}
fn get_expn_that_defined(self, tcx: TyCtxt<'_>, id: DefIndex) -> ExpnId {
self.root
.tables
.expn_that_defined
.get((self, tcx), id)
.get(self.cdata, id)
.unwrap_or_else(|| self.missing("expn_that_defined", id))
.decode((self, tcx))
}
@@ -1259,7 +1263,7 @@ fn get_diagnostic_items(self, tcx: TyCtxt<'_>) -> DiagnosticItems {
fn get_mod_child(self, tcx: TyCtxt<'_>, id: DefIndex) -> ModChild {
let ident = self.item_ident(tcx, id);
let res = Res::Def(self.def_kind(tcx, id), self.local_def_id(id));
let res = Res::Def(self.def_kind(id), self.local_def_id(id));
let vis = self.get_visibility(tcx, id);
ModChild { ident, res, vis, reexport_chain: Default::default() }
@@ -1282,12 +1286,12 @@ fn get_module_children(self, tcx: TyCtxt<'_>, id: DefIndex) -> impl Iterator<Ite
} else {
// Iterate over all children.
let non_reexports =
self.root.tables.module_children_non_reexports.get((self, tcx), id);
self.root.tables.module_children_non_reexports.get(self.cdata, id);
for child_index in non_reexports.unwrap().decode((self, tcx)) {
yield self.get_mod_child(tcx, child_index);
}
let reexports = self.root.tables.module_children_reexports.get((self, tcx), id);
let reexports = self.root.tables.module_children_reexports.get(self.cdata, id);
if !reexports.is_default() {
for reexport in reexports.decode((self, tcx)) {
yield reexport;
@@ -1303,7 +1307,7 @@ fn get_ambig_module_children(
id: DefIndex,
) -> impl Iterator<Item = AmbigModChild> {
gen move {
let children = self.root.tables.ambig_module_children.get((self, tcx), id);
let children = self.root.tables.ambig_module_children.get(self.cdata, id);
if !children.is_default() {
for child in children.decode((self, tcx)) {
yield child;
@@ -1312,15 +1316,15 @@ fn get_ambig_module_children(
}
}
fn is_item_mir_available(self, tcx: TyCtxt<'_>, id: DefIndex) -> bool {
self.root.tables.optimized_mir.get((self, tcx), id).is_some()
fn is_item_mir_available(self, id: DefIndex) -> bool {
self.root.tables.optimized_mir.get(self.cdata, id).is_some()
}
fn get_fn_has_self_parameter(self, tcx: TyCtxt<'_>, id: DefIndex) -> bool {
self.root
.tables
.fn_arg_idents
.get((self, tcx), id)
.get(self.cdata, id)
.expect("argument names not encoded for a function")
.decode((self, tcx))
.nth(0)
@@ -1335,14 +1339,14 @@ fn get_associated_item_or_field_def_ids(
self.root
.tables
.associated_item_or_field_def_ids
.get((self, tcx), id)
.get(self.cdata, id)
.unwrap_or_else(|| self.missing("associated_item_or_field_def_ids", id))
.decode((self, tcx))
.map(move |child_index| self.local_def_id(child_index))
}
fn get_associated_item(self, tcx: TyCtxt<'_>, id: DefIndex) -> ty::AssocItem {
let kind = match self.def_kind(tcx, id) {
let kind = match self.def_kind(id) {
DefKind::AssocConst { is_type_const } => {
ty::AssocKind::Const { name: self.item_name(id), is_type_const }
}
@@ -1352,7 +1356,7 @@ fn get_associated_item(self, tcx: TyCtxt<'_>, id: DefIndex) -> ty::AssocItem {
},
DefKind::AssocTy => {
let data = if let Some(rpitit_info) =
self.root.tables.opt_rpitit_info.get((self, tcx), id)
self.root.tables.opt_rpitit_info.get(self.cdata, id)
{
ty::AssocTypeData::Rpitit(rpitit_info.decode((self, tcx)))
} else {
@@ -1363,19 +1367,19 @@ fn get_associated_item(self, tcx: TyCtxt<'_>, id: DefIndex) -> ty::AssocItem {
_ => bug!("cannot get associated-item of `{:?}`", self.def_key(id)),
};
let container =
self.root.tables.assoc_container.get((self, tcx), id).unwrap().decode((self, tcx));
self.root.tables.assoc_container.get(self.cdata, id).unwrap().decode((self, tcx));
ty::AssocItem { kind, def_id: self.local_def_id(id), container }
}
fn get_ctor(self, tcx: TyCtxt<'_>, node_id: DefIndex) -> Option<(CtorKind, DefId)> {
match self.def_kind(tcx, node_id) {
match self.def_kind(node_id) {
DefKind::Struct | DefKind::Variant => {
let vdata = self
.root
.tables
.variant_data
.get((self, tcx), node_id)
.get(self.cdata, node_id)
.unwrap()
.decode((self, tcx));
vdata.ctor.map(|(kind, index)| (kind, self.local_def_id(index)))
@@ -1388,7 +1392,7 @@ fn get_item_attrs(self, tcx: TyCtxt<'_>, id: DefIndex) -> impl Iterator<Item = h
self.root
.tables
.attributes
.get((self, tcx), id)
.get(self.cdata, id)
.unwrap_or_else(|| {
// Structure and variant constructors don't have any attributes encoded for them,
// but we assume that someone passing a constructor ID actually wants to look at
@@ -1399,7 +1403,7 @@ fn get_item_attrs(self, tcx: TyCtxt<'_>, id: DefIndex) -> impl Iterator<Item = h
self.root
.tables
.attributes
.get((self, tcx), parent_id)
.get(self.cdata, parent_id)
.expect("no encoded attributes for a structure or variant")
})
.decode((self, tcx))
@@ -1414,7 +1418,7 @@ fn get_inherent_implementations_for_type<'tcx>(
self.root
.tables
.inherent_impls
.get((self, tcx), id)
.get(self.cdata, id)
.decode((self, tcx))
.map(|index| self.local_def_id(index)),
)
@@ -1475,7 +1479,7 @@ fn get_proc_macro_quoted_span(self, tcx: TyCtxt<'_>, index: usize) -> Span {
self.root
.tables
.proc_macro_quoted_spans
.get((self, tcx), index)
.get(self.cdata, index)
.unwrap_or_else(|| panic!("Missing proc macro quoted span: {index:?}"))
.decode((self, tcx))
}
@@ -1538,14 +1542,14 @@ fn exported_generic_symbols<'tcx>(
}
fn get_macro(self, tcx: TyCtxt<'_>, id: DefIndex) -> ast::MacroDef {
match self.def_kind(tcx, id) {
match self.def_kind(id) {
DefKind::Macro(_) => {
let macro_rules = self.root.tables.is_macro_rules.get((self, tcx), id);
let macro_rules = self.root.tables.is_macro_rules.get(self.cdata, id);
let body = self
.root
.tables
.macro_definition
.get((self, tcx), id)
.get(self.cdata, id)
.unwrap()
.decode((self, tcx));
ast::MacroDef { macro_rules, body: Box::new(body), eii_declaration: None }
@@ -1586,11 +1590,8 @@ fn def_path_hash_to_def_index(self, hash: DefPathHash) -> Option<DefIndex> {
fn expn_hash_to_expn_id(self, tcx: TyCtxt<'_>, index_guess: u32, hash: ExpnHash) -> ExpnId {
let index_guess = ExpnIndex::from_u32(index_guess);
let old_hash = self
.root
.expn_hashes
.get((self, tcx), index_guess)
.map(|lazy| lazy.decode((self, tcx)));
let old_hash =
self.root.expn_hashes.get(self.cdata, index_guess).map(|lazy| lazy.decode((self, tcx)));
let index = if old_hash == Some(hash) {
// Fast path: the expn and its index is unchanged from the
@@ -1607,7 +1608,7 @@ fn expn_hash_to_expn_id(self, tcx: TyCtxt<'_>, index_guess: u32, hash: ExpnHash)
UnhashMap::with_capacity_and_hasher(end_id as usize, Default::default());
for i in 0..end_id {
let i = ExpnIndex::from_u32(i);
if let Some(hash) = self.root.expn_hashes.get((self, tcx), i) {
if let Some(hash) = self.root.expn_hashes.get(self.cdata, i) {
map.insert(hash.decode((self, tcx)), i);
}
}
@@ -1616,7 +1617,7 @@ fn expn_hash_to_expn_id(self, tcx: TyCtxt<'_>, index_guess: u32, hash: ExpnHash)
map[&hash]
};
let data = self.root.expn_data.get((self, tcx), index).unwrap().decode((self, tcx));
let data = self.root.expn_data.get(self.cdata, index).unwrap().decode((self, tcx));
rustc_span::hygiene::register_expn_id(self.cnum, index, data, hash)
}
@@ -1753,7 +1754,7 @@ fn filter<'a>(
let source_file_to_import = self
.root
.source_map
.get((self, tcx), source_file_index)
.get(self.cdata, source_file_index)
.expect("missing source file")
.decode((self, tcx));
@@ -1854,19 +1855,19 @@ fn filter<'a>(
.clone()
}
fn get_attr_flags(self, tcx: TyCtxt<'_>, index: DefIndex) -> AttrFlags {
self.root.tables.attr_flags.get((self, tcx), index)
fn get_attr_flags(self, index: DefIndex) -> AttrFlags {
self.root.tables.attr_flags.get(self.cdata, index)
}
fn get_intrinsic(self, tcx: TyCtxt<'_>, index: DefIndex) -> Option<ty::IntrinsicDef> {
self.root.tables.intrinsic.get((self, tcx), index).map(|d| d.decode((self, tcx)))
self.root.tables.intrinsic.get(self.cdata, index).map(|d| d.decode((self, tcx)))
}
fn get_doc_link_resolutions(self, tcx: TyCtxt<'_>, index: DefIndex) -> DocLinkResMap {
self.root
.tables
.doc_link_resolutions
.get((self, tcx), index)
.get(self.cdata, index)
.expect("no resolutions for a doc link")
.decode((self, tcx))
}
@@ -1879,7 +1880,7 @@ fn get_doc_link_traits_in_scope(
self.root
.tables
.doc_link_traits_in_scope
.get((self, tcx), index)
.get(self.cdata, index)
.expect("no traits in scope for a doc link")
.decode((self, tcx))
}
@@ -100,7 +100,7 @@ macro_rules! provide_one {
.root
.tables
.$name
.get(($cdata, $tcx), $def_id.index)
.get($cdata.cdata, $def_id.index)
.map(|lazy| lazy.decode(($cdata, $tcx)))
.process_decoded($tcx, || panic!("{:?} does not have a {:?}", $def_id, stringify!($name)))
}
@@ -109,7 +109,7 @@ macro_rules! provide_one {
($tcx:ident, $def_id:ident, $other:ident, $cdata:ident, $name:ident => { table_defaulted_array }) => {
provide_one! {
$tcx, $def_id, $other, $cdata, $name => {
let lazy = $cdata.root.tables.$name.get(($cdata, $tcx), $def_id.index);
let lazy = $cdata.root.tables.$name.get($cdata.cdata, $def_id.index);
let value = if lazy.is_default() {
&[] as &[_]
} else {
@@ -127,7 +127,7 @@ macro_rules! provide_one {
.root
.tables
.$name
.get(($cdata, $tcx), $def_id.index)
.get($cdata.cdata, $def_id.index)
.process_decoded($tcx, || panic!("{:?} does not have a {:?}", $def_id, stringify!($name)))
}
}
@@ -253,7 +253,7 @@ fn into_args(self) -> (DefId, SimplifiedType) {
lookup_default_body_stability => { table }
lookup_deprecation_entry => { table }
params_in_repr => { table }
def_kind => { cdata.def_kind(tcx, def_id.index) }
def_kind => { cdata.def_kind(def_id.index) }
impl_parent => { table }
defaultness => { table_direct }
constness => { table_direct }
@@ -264,7 +264,7 @@ fn into_args(self) -> (DefId, SimplifiedType) {
.root
.tables
.coerce_unsized_info
.get((cdata, tcx), def_id.index)
.get(cdata.cdata, def_id.index)
.map(|lazy| lazy.decode((cdata, tcx)))
.process_decoded(tcx, || panic!("{def_id:?} does not have coerce_unsized_info"))) }
mir_const_qualif => { table }
@@ -280,7 +280,7 @@ fn into_args(self) -> (DefId, SimplifiedType) {
.root
.tables
.eval_static_initializer
.get((cdata, tcx), def_id.index)
.get(cdata.cdata, def_id.index)
.map(|lazy| lazy.decode((cdata, tcx)))
.unwrap_or_else(|| panic!("{def_id:?} does not have eval_static_initializer")))
}
@@ -293,7 +293,7 @@ fn into_args(self) -> (DefId, SimplifiedType) {
.root
.tables
.deduced_param_attrs
.get((cdata, tcx), def_id.index)
.get(cdata.cdata, def_id.index)
.map(|lazy| {
&*tcx.arena.alloc_from_iter(lazy.decode((cdata, tcx)))
})
@@ -306,7 +306,7 @@ fn into_args(self) -> (DefId, SimplifiedType) {
.root
.tables
.trait_impl_trait_tys
.get((cdata, tcx), def_id.index)
.get(cdata.cdata, def_id.index)
.map(|lazy| lazy.decode((cdata, tcx)))
.process_decoded(tcx, || panic!("{def_id:?} does not have trait_impl_trait_tys")))
}
@@ -323,7 +323,7 @@ fn into_args(self) -> (DefId, SimplifiedType) {
associated_item => { cdata.get_associated_item(tcx, def_id.index) }
inherent_impls => { cdata.get_inherent_implementations_for_type(tcx, def_id.index) }
attrs_for_def => { tcx.arena.alloc_from_iter(cdata.get_item_attrs(tcx, def_id.index)) }
is_mir_available => { cdata.is_item_mir_available(tcx, def_id.index) }
is_mir_available => { cdata.is_item_mir_available(def_id.index) }
cross_crate_inlinable => { table_direct }
dylib_dependency_formats => { cdata.get_dylib_dependency_formats(tcx) }
@@ -411,7 +411,7 @@ fn into_args(self) -> (DefId, SimplifiedType) {
crate_extern_paths => { cdata.source().paths().cloned().collect() }
expn_that_defined => { cdata.get_expn_that_defined(tcx, def_id.index) }
default_field => { cdata.get_default_field(tcx, def_id.index) }
is_doc_hidden => { cdata.get_attr_flags(tcx,def_id.index).contains(AttrFlags::IS_DOC_HIDDEN) }
is_doc_hidden => { cdata.get_attr_flags(def_id.index).contains(AttrFlags::IS_DOC_HIDDEN) }
doc_link_resolutions => { tcx.arena.alloc(cdata.get_doc_link_resolutions(tcx, def_id.index)) }
doc_link_traits_in_scope => {
tcx.arena.alloc_from_iter(cdata.get_doc_link_traits_in_scope(tcx, def_id.index))
@@ -613,8 +613,8 @@ pub fn def_span_untracked(&self, tcx: TyCtxt<'_>, def_id: DefId) -> Span {
self.get_crate_data(def_id.krate).get_span(tcx, def_id.index)
}
pub fn def_kind_untracked(&self, tcx: TyCtxt<'_>, def: DefId) -> DefKind {
self.get_crate_data(def.krate).def_kind(tcx, def.index)
pub fn def_kind_untracked(&self, def: DefId) -> DefKind {
self.get_crate_data(def.krate).def_kind(def.index)
}
pub fn expn_that_defined_untracked(&self, tcx: TyCtxt<'_>, def_id: DefId) -> ExpnId {
+2 -2
View File
@@ -1,7 +1,7 @@
use rustc_hir::def::CtorOf;
use rustc_index::Idx;
use crate::rmeta::decoder::Metadata;
use crate::rmeta::decoder::MetaBlob;
use crate::rmeta::*;
pub(super) trait IsDefault: Default {
@@ -515,7 +515,7 @@ fn trailing_zeros(x: &[u8]) -> usize {
for<'tcx> T::Value<'tcx>: FixedSizeEncoding<ByteArray = [u8; N]>,
{
/// Given the metadata, extract out the value at a particular index (if any).
pub(super) fn get<'a, 'tcx, M: Metadata<'a>>(&self, metadata: M, i: I) -> T::Value<'tcx> {
pub(super) fn get<'a, 'tcx, M: MetaBlob<'a>>(&self, metadata: M, i: I) -> T::Value<'tcx> {
// Access past the end of the table returns a Default
if i.index() >= self.len {
return Default::default();
+6 -76
View File
@@ -5,12 +5,11 @@
use std::sync::atomic::{AtomicU32, Ordering};
use rustc_data_structures::fingerprint::{Fingerprint, PackedFingerprint};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::outline;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::profiling::QueryInvocationId;
use rustc_data_structures::sharded::{self, ShardedHashMap};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{AtomicU64, Lock, is_dyn_thread_safe};
use rustc_data_structures::sync::{AtomicU64, Lock};
use rustc_data_structures::unord::UnordMap;
use rustc_errors::DiagInner;
use rustc_index::IndexVec;
@@ -635,11 +634,6 @@ fn assert_dep_node_not_yet_allocated_in_current_session<S: std::fmt::Display>(
if !ok {
panic!("{}", msg())
}
} else if let Some(nodes_in_current_session) = &self.current.nodes_in_current_session {
outline(|| {
let seen = nodes_in_current_session.lock().contains_key(dep_node);
assert!(!seen, "{}", msg());
});
}
}
@@ -775,7 +769,8 @@ fn alloc_and_color_node(
is_green,
);
self.current.record_node(dep_node_index, key, value_fingerprint);
#[cfg(debug_assertions)]
self.current.record_edge(dep_node_index, key, value_fingerprint);
dep_node_index
} else {
@@ -787,8 +782,6 @@ fn promote_node_and_deps_to_current(
&self,
prev_index: SerializedDepNodeIndex,
) -> Option<DepNodeIndex> {
self.current.debug_assert_not_in_new_nodes(&self.previous, prev_index);
let dep_node_index = self.current.encoder.send_promoted(prev_index, &self.colors);
#[cfg(debug_assertions)]
@@ -1113,13 +1106,6 @@ pub(super) struct CurrentDepGraph {
#[cfg(debug_assertions)]
forbidden_edge: Option<EdgeFilter>,
/// Used to verify the absence of hash collisions among DepNodes.
/// This field is only `Some` if the `-Z incremental_verify_ich` option is present
/// or if `debug_assertions` are enabled.
///
/// The map contains all DepNodes that have been allocated in the current session so far.
nodes_in_current_session: Option<Lock<FxHashMap<DepNode, DepNodeIndex>>>,
/// Anonymous `DepNode`s are nodes whose IDs we compute from the list of
/// their edges. This has the beneficial side-effect that multiple anonymous
/// nodes can be coalesced into one without changing the semantics of the
@@ -1160,9 +1146,6 @@ fn new(
let new_node_count_estimate = 102 * prev_graph_node_count / 100 + 200;
let new_node_dbg =
session.opts.unstable_opts.incremental_verify_ich || cfg!(debug_assertions);
CurrentDepGraph {
encoder: GraphEncoder::new(session, encoder, prev_graph_node_count, previous),
anon_node_to_index: ShardedHashMap::with_capacity(
@@ -1174,12 +1157,6 @@ fn new(
forbidden_edge,
#[cfg(debug_assertions)]
value_fingerprints: Lock::new(IndexVec::from_elem_n(None, new_node_count_estimate)),
nodes_in_current_session: new_node_dbg.then(|| {
Lock::new(FxHashMap::with_capacity_and_hasher(
new_node_count_estimate,
Default::default(),
))
}),
total_read_count: AtomicU64::new(0),
total_duplicate_read_count: AtomicU64::new(0),
}
@@ -1202,25 +1179,6 @@ fn record_edge(
assert_eq!(prior_value_fingerprint, value_fingerprint, "Unstable fingerprints for {key:?}");
}
#[inline(always)]
fn record_node(
&self,
dep_node_index: DepNodeIndex,
key: DepNode,
_value_fingerprint: Fingerprint,
) {
#[cfg(debug_assertions)]
self.record_edge(dep_node_index, key, _value_fingerprint);
if let Some(ref nodes_in_current_session) = self.nodes_in_current_session {
outline(|| {
if nodes_in_current_session.lock().insert(key, dep_node_index).is_some() {
panic!("Found duplicate dep-node {key:?}");
}
});
}
}
/// Writes the node to the current dep-graph and allocates a `DepNodeIndex` for it.
/// Assumes that this is a node that has no equivalent in the previous dep-graph.
#[inline(always)]
@@ -1232,28 +1190,11 @@ fn alloc_new_node(
) -> DepNodeIndex {
let dep_node_index = self.encoder.send_new(key, value_fingerprint, edges);
self.record_node(dep_node_index, key, value_fingerprint);
#[cfg(debug_assertions)]
self.record_edge(dep_node_index, key, value_fingerprint);
dep_node_index
}
#[inline]
fn debug_assert_not_in_new_nodes(
&self,
prev_graph: &SerializedDepGraph,
prev_index: SerializedDepNodeIndex,
) {
if !is_dyn_thread_safe()
&& let Some(ref nodes_in_current_session) = self.nodes_in_current_session
{
debug_assert!(
!nodes_in_current_session
.lock()
.contains_key(&prev_graph.index_to_node(prev_index)),
"node from previous graph present in new node collection"
);
}
}
}
#[derive(Debug, Clone, Copy)]
@@ -1430,17 +1371,6 @@ fn panic_on_forbidden_read(data: &DepGraphData, dep_node_index: DepNodeIndex) ->
}
}
if dep_node.is_none()
&& let Some(nodes) = &data.current.nodes_in_current_session
{
// Try to find it among the nodes allocated so far in this session
// This is OK, there's only ever one node result possible so this is deterministic.
#[allow(rustc::potential_query_instability)]
if let Some((node, _)) = nodes.lock().iter().find(|&(_, index)| *index == dep_node_index) {
dep_node = Some(*node);
}
}
let dep_node = dep_node.map_or_else(
|| format!("with index {:?}", dep_node_index),
|dep_node| format!("`{:?}`", dep_node),
+17
View File
@@ -703,6 +703,23 @@ pub fn feed_delayed_owner(self, key: LocalDefId, owner: MaybeOwner<'tcx>) {
self.dep_graph.assert_ignored();
TyCtxtFeed { tcx: self, key }.delayed_owner(owner);
}
// Trait impl item visibility is inherited from its trait when not specified
// explicitly. In that case we cannot determine it in early resolve,
// but instead are feeding it in late resolve, where we don't have access to the
// `TyCtxtFeed` anymore.
// To avoid having to hash the `LocalDefId` multiple times for inserting and removing the
// `TyCtxtFeed` from a hash table, we add this hack to feed the visibility.
// Do not use outside of the resolver query.
pub fn feed_visibility_for_trait_impl_item(self, key: LocalDefId, vis: ty::Visibility) {
if cfg!(debug_assertions) {
match self.def_kind(self.local_parent(key)) {
DefKind::Impl { of_trait: true } => {}
other => bug!("{key:?} is not an assoc item of a trait impl: {other:?}"),
}
}
TyCtxtFeed { tcx: self, key }.visibility(vis.to_def_id())
}
}
impl<'tcx, KEY: Copy> TyCtxtFeed<'tcx, KEY> {
+18 -1
View File
@@ -200,7 +200,10 @@ fn check_attributes(
self.check_rustc_must_implement_one_of(*attr_span, fn_names, hir_id,target)
},
Attribute::Parsed(AttributeKind::OnUnimplemented{directive,..}) => {self.check_diagnostic_on_unimplemented(hir_id, directive.as_deref())},
Attribute::Parsed(AttributeKind::OnConst{span, ..}) => {self.check_diagnostic_on_const(*span, hir_id, target, item)}
Attribute::Parsed(AttributeKind::OnConst{span, ..}) => {self.check_diagnostic_on_const(*span, hir_id, target, item)},
Attribute::Parsed(AttributeKind::OnUnmatchArgs { directive, .. }) => {
self.check_diagnostic_on_unmatch_args(hir_id, directive.as_deref())
},
Attribute::Parsed(AttributeKind::OnMove { directive , .. }) => {
self.check_diagnostic_on_move(hir_id, directive.as_deref())
},
@@ -559,6 +562,20 @@ fn check_diagnostic_on_const(
// ...whose generics would that be, anyway? The traits' or the impls'?
}
/// Checks use of generic formatting parameters in `#[diagnostic::on_unmatch_args]`.
fn check_diagnostic_on_unmatch_args(&self, hir_id: HirId, directive: Option<&Directive>) {
if let Some(directive) = directive {
directive.visit_params(&mut |argument_name, span| {
self.tcx.emit_node_span_lint(
MALFORMED_DIAGNOSTIC_FORMAT_LITERALS,
hir_id,
span,
errors::OnUnmatchArgsMalformedFormatLiterals { name: argument_name },
)
});
}
}
/// Checks use of generic formatting parameters in `#[diagnostic::on_move]`
fn check_diagnostic_on_move(&self, hir_id: HirId, directive: Option<&Directive>) {
if let Some(directive) = directive {
+7
View File
@@ -1303,3 +1303,10 @@ pub(crate) struct UnknownFormatParameterForOnUnimplementedAttr {
pub(crate) struct OnMoveMalformedFormatLiterals {
pub name: Symbol,
}
#[derive(Diagnostic)]
#[diag("unknown parameter `{$name}`")]
#[help(r#"use {"`{This}`"} to refer to the macro name"#)]
pub(crate) struct OnUnmatchArgsMalformedFormatLiterals {
pub name: Symbol,
}
@@ -17,11 +17,11 @@
use crate::mir::{BinOp, Body, Place, UnOp};
use crate::target::{MachineInfo, MachineSize};
use crate::ty::{
AdtDef, AdtKind, Allocation, ClosureDef, ClosureKind, CoroutineDef, Discr, FieldDef, FnDef,
ForeignDef, ForeignItemKind, ForeignModule, ForeignModuleDef, GenericArgs, GenericPredicates,
Generics, ImplDef, ImplTrait, IntrinsicDef, LineInfo, MirConst, PolyFnSig, RigidTy, Span,
TraitDecl, TraitDef, TraitRef, Ty, TyConst, TyConstId, TyKind, UintTy, VariantDef, VariantIdx,
VtblEntry,
AdtDef, AdtKind, Allocation, Asyncness, ClosureDef, ClosureKind, Constness, CoroutineDef,
Discr, FieldDef, FnDef, ForeignDef, ForeignItemKind, ForeignModule, ForeignModuleDef,
GenericArgs, GenericPredicates, Generics, ImplDef, ImplTrait, IntrinsicDef, LineInfo, MirConst,
PolyFnSig, RigidTy, Span, TraitDecl, TraitDef, TraitRef, Ty, TyConst, TyConstId, TyKind,
UintTy, VariantDef, VariantIdx, VtblEntry,
};
use crate::unstable::{RustcInternal, Stable, new_item_kind};
use crate::{
@@ -388,6 +388,22 @@ pub(crate) fn fn_sig(&self, def: FnDef, args: &GenericArgs) -> PolyFnSig {
cx.fn_sig(def_id, args_ref).stable(&mut *tables, cx)
}
/// Retrieve the constness for the given function definition.
pub(crate) fn constness(&self, def: FnDef) -> Constness {
let mut tables = self.tables.borrow_mut();
let cx = &*self.cx.borrow();
let def_id = def.0.internal(&mut *tables, cx.tcx);
cx.constness(def_id).stable(&mut *tables, cx)
}
/// Retrieve the asyncness for the given function definition.
pub(crate) fn asyncness(&self, def: FnDef) -> Asyncness {
let mut tables = self.tables.borrow_mut();
let cx = &*self.cx.borrow();
let def_id = def.0.internal(&mut *tables, cx.tcx);
cx.asyncness(def_id).stable(&mut *tables, cx)
}
/// Retrieve the intrinsic definition if the item corresponds one.
pub(crate) fn intrinsic(&self, item: DefId) -> Option<IntrinsicDef> {
let mut tables = self.tables.borrow_mut();
+34
View File
@@ -710,6 +710,16 @@ pub fn is_intrinsic(&self) -> bool {
self.as_intrinsic().is_some()
}
/// Get the constness of this function definition.
pub fn constness(&self) -> Constness {
with(|cx| cx.constness(*self))
}
/// Get the asyncness of this function definition.
pub fn asyncness(&self) -> Asyncness {
with(|cx| cx.asyncness(*self))
}
/// Get the function signature for this function definition.
pub fn fn_sig(&self) -> PolyFnSig {
let kind = self.ty().kind();
@@ -1103,6 +1113,30 @@ pub fn inputs(&self) -> &[Ty] {
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
pub enum Constness {
Const,
NotConst,
}
impl Constness {
pub fn is_const(self) -> bool {
matches!(self, Constness::Const)
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)]
pub enum Asyncness {
Async,
NotAsync,
}
impl Asyncness {
pub fn is_async(self) -> bool {
matches!(self, Asyncness::Async)
}
}
#[derive(Clone, PartialEq, Eq, Debug, Serialize)]
pub enum Abi {
Rust,
@@ -14,8 +14,8 @@
use crate::mir::mono::{Instance, MonoItem, StaticDef};
use crate::mir::{BinOp, Mutability, Place, ProjectionElem, RawPtrKind, Safety, UnOp};
use crate::ty::{
Abi, AdtDef, Binder, BoundRegionKind, BoundTyKind, BoundVariableKind, ClosureKind,
ExistentialPredicate, ExistentialProjection, ExistentialTraitRef, FloatTy, FnSig,
Abi, AdtDef, Asyncness, Binder, BoundRegionKind, BoundTyKind, BoundVariableKind, ClosureKind,
Constness, ExistentialPredicate, ExistentialProjection, ExistentialTraitRef, FloatTy, FnSig,
GenericArgKind, GenericArgs, IntTy, MirConst, Movability, Pattern, Region, RigidTy, Span,
TermKind, TraitRef, Ty, TyConst, UintTy, VariantDef, VariantIdx,
};
@@ -637,6 +637,36 @@ fn internal<'tcx>(
}
}
}
impl RustcInternal for Constness {
type T<'tcx> = rustc_hir::Constness;
fn internal<'tcx>(
&self,
_tables: &mut Tables<'_, BridgeTys>,
_tcx: impl InternalCx<'tcx>,
) -> Self::T<'tcx> {
match self {
Constness::Const => rustc_hir::Constness::Const,
Constness::NotConst => rustc_hir::Constness::NotConst,
}
}
}
impl RustcInternal for Asyncness {
type T<'tcx> = rustc_ty::Asyncness;
fn internal<'tcx>(
&self,
_tables: &mut Tables<'_, BridgeTys>,
_tcx: impl InternalCx<'tcx>,
) -> Self::T<'tcx> {
match self {
Asyncness::Async => rustc_ty::Asyncness::Yes,
Asyncness::NotAsync => rustc_ty::Asyncness::No,
}
}
}
impl RustcInternal for Span {
type T<'tcx> = rustc_span::Span;
@@ -21,6 +21,26 @@ fn stable(&self, _: &mut Tables<'_, BridgeTys>, _: &CompilerCtxt<'_, BridgeTys>)
}
}
impl<'tcx> Stable<'tcx> for rustc_hir::Constness {
type T = crate::ty::Constness;
fn stable(&self, _: &mut Tables<'_, BridgeTys>, _: &CompilerCtxt<'_, BridgeTys>) -> Self::T {
match self {
rustc_hir::Constness::Const => crate::ty::Constness::Const,
rustc_hir::Constness::NotConst => crate::ty::Constness::NotConst,
}
}
}
impl<'tcx> Stable<'tcx> for rustc_middle::ty::Asyncness {
type T = crate::ty::Asyncness;
fn stable(&self, _: &mut Tables<'_, BridgeTys>, _: &CompilerCtxt<'_, BridgeTys>) -> Self::T {
match self {
rustc_middle::ty::Asyncness::Yes => crate::ty::Asyncness::Async,
rustc_middle::ty::Asyncness::No => crate::ty::Asyncness::NotAsync,
}
}
}
impl<'tcx> Stable<'tcx> for FieldIdx {
type T = usize;
fn stable(&self, _: &mut Tables<'_, BridgeTys>, _: &CompilerCtxt<'_, BridgeTys>) -> Self::T {
@@ -383,6 +383,16 @@ pub fn fn_sig(
sig
}
/// Retrieve the constness for the given function definition.
pub fn constness(&self, def_id: DefId) -> rustc_hir::Constness {
self.tcx.constness(def_id)
}
/// Retrieve the asyncness for the given function definition.
pub fn asyncness(&self, def_id: DefId) -> ty::Asyncness {
self.tcx.asyncness(def_id)
}
/// Retrieve the intrinsic definition if the item corresponds one.
pub fn intrinsic(&self, def_id: DefId) -> Option<IntrinsicDef> {
let intrinsic = self.tcx.intrinsic_raw(def_id);
@@ -21,7 +21,7 @@
use rustc_index::bit_set::DenseBitSet;
use rustc_metadata::creader::LoadedMacro;
use rustc_middle::metadata::{ModChild, Reexport};
use rustc_middle::ty::{Feed, Visibility};
use rustc_middle::ty::{TyCtxtFeed, Visibility};
use rustc_middle::{bug, span_bug};
use rustc_span::hygiene::{ExpnId, LocalExpnId, MacroKind};
use rustc_span::{Ident, Span, Symbol, kw, sym};
@@ -155,7 +155,7 @@ pub(crate) fn get_module(&self, def_id: DefId) -> Option<Module<'ra>> {
}
// Query `def_kind` is not used because query system overhead is too expensive here.
let def_kind = self.cstore().def_kind_untracked(self.tcx, def_id);
let def_kind = self.cstore().def_kind_untracked(def_id);
if def_kind.is_module_like() {
let parent = self.tcx.opt_parent(def_id).map(|parent_id| {
self.get_nearest_non_block_module(parent_id).expect_extern()
@@ -563,6 +563,7 @@ fn build_reduced_graph_for_use_tree(
item: &Item,
vis: Visibility,
root_span: Span,
feed: TyCtxtFeed<'tcx, LocalDefId>,
) {
debug!(
"build_reduced_graph_for_use_tree(parent_prefix={:?}, use_tree={:?}, nested={})",
@@ -572,7 +573,7 @@ fn build_reduced_graph_for_use_tree(
// Top level use tree reuses the item's id and list stems reuse their parent
// use tree's ids, so in both cases their visibilities are already filled.
if nested && !list_stem {
self.r.feed_visibility(self.r.feed(id), vis);
self.r.feed_visibility(feed, vis);
}
let mut prefix_iter = parent_prefix
@@ -735,11 +736,11 @@ fn build_reduced_graph_for_use_tree(
}
ast::UseTreeKind::Nested { ref items, .. } => {
for &(ref tree, id) in items {
self.create_def(id, None, DefKind::Use, use_tree.span());
let feed = self.create_def(id, None, DefKind::Use, use_tree.span());
self.build_reduced_graph_for_use_tree(
// This particular use tree
tree, id, &prefix, true, false, // The whole `use` item
item, vis, root_span,
item, vis, root_span, feed,
);
}
@@ -768,6 +769,7 @@ fn build_reduced_graph_for_use_tree(
self.parent_scope.module.nearest_parent_mod().expect_local(),
),
root_span,
feed,
);
}
}
@@ -778,7 +780,7 @@ fn build_reduced_graph_for_struct_variant(
&mut self,
fields: &[ast::FieldDef],
ident: Ident,
feed: Feed<'tcx, LocalDefId>,
feed: TyCtxtFeed<'tcx, LocalDefId>,
adt_res: Res,
adt_vis: Visibility,
adt_span: Span,
@@ -798,13 +800,12 @@ fn build_reduced_graph_for_struct_variant(
}
/// Constructs the reduced graph for one item.
fn build_reduced_graph_for_item(&mut self, item: &'a Item) {
fn build_reduced_graph_for_item(&mut self, item: &'a Item, feed: TyCtxtFeed<'tcx, LocalDefId>) {
let parent_scope = &self.parent_scope;
let parent = parent_scope.module.expect_local();
let expansion = parent_scope.expansion;
let sp = item.span;
let vis = self.resolve_visibility(&item.vis);
let feed = self.r.feed(item.id);
let local_def_id = feed.key();
let def_id = local_def_id.to_def_id();
let def_kind = self.r.tcx.def_kind(def_id);
@@ -825,6 +826,7 @@ fn build_reduced_graph_for_item(&mut self, item: &'a Item) {
item,
vis,
use_tree.span(),
feed,
);
}
@@ -867,7 +869,7 @@ fn build_reduced_graph_for_item(&mut self, item: &'a Item) {
// Functions introducing procedural macros reserve a slot
// in the macro namespace as well (see #52225).
self.define_macro(item);
self.define_macro(item, feed);
}
// These items live in the type namespace.
@@ -902,7 +904,7 @@ fn build_reduced_graph_for_item(&mut self, item: &'a Item) {
// If this is a tuple or unit struct, define a name
// in the value namespace as well.
if let Some(ctor_node_id) = vdata.ctor_node_id() {
if let Some((ctor_kind, ctor_node_id)) = CtorKind::from_ast(vdata) {
// If the structure is marked as non_exhaustive then lower the visibility
// to within the crate.
let mut ctor_vis = if vis.is_public()
@@ -927,7 +929,14 @@ fn build_reduced_graph_for_item(&mut self, item: &'a Item) {
}
field_visibilities.push(field_vis.to_def_id());
}
let feed = self.r.feed(ctor_node_id);
// If this is a unit or tuple-like struct, register the constructor.
let feed = self.create_def(
ctor_node_id,
None,
DefKind::Ctor(CtorOf::Struct, ctor_kind),
item.span,
);
let ctor_def_id = feed.key();
let ctor_res = self.res(ctor_def_id);
self.r.define_local(parent, ident, ValueNS, ctor_res, ctor_vis, sp, expansion);
@@ -1062,8 +1071,8 @@ pub(crate) fn build_reduced_graph_for_foreign_item(
&mut self,
item: &ForeignItem,
ident: Ident,
feed: TyCtxtFeed<'tcx, LocalDefId>,
) {
let feed = self.r.feed(item.id);
let local_def_id = feed.key();
let def_id = local_def_id.to_def_id();
let ns = match item.kind {
@@ -1259,10 +1268,13 @@ fn insert_unused_macro(&mut self, ident: Ident, def_id: LocalDefId, node_id: Nod
}
}
fn define_macro(&mut self, item: &ast::Item) -> MacroRulesScopeRef<'ra> {
fn define_macro(
&mut self,
item: &ast::Item,
feed: TyCtxtFeed<'tcx, LocalDefId>,
) -> MacroRulesScopeRef<'ra> {
let parent_scope = self.parent_scope;
let expansion = parent_scope.expansion;
let feed = self.r.feed(item.id);
let def_id = feed.key();
let (res, orig_ident, span, macro_rules) = match &item.kind {
ItemKind::MacroDef(ident, def) => {
@@ -1361,18 +1373,17 @@ fn define_macro(&mut self, item: &ast::Item) -> MacroRulesScopeRef<'ra> {
}
impl<'a, 'ra, 'tcx> DefCollector<'a, 'ra, 'tcx> {
pub(crate) fn brg_visit_item(&mut self, item: &'a Item) {
pub(crate) fn brg_visit_item(&mut self, item: &'a Item, feed: TyCtxtFeed<'tcx, LocalDefId>) {
let orig_module_scope = self.parent_scope.module;
self.parent_scope.macro_rules = match item.kind {
ItemKind::MacroDef(..) => {
let macro_rules_scope = self.define_macro(item);
let macro_rules_scope = self.define_macro(item, feed);
visit::walk_item(self, item);
macro_rules_scope
}
ItemKind::MacCall(..) => self.visit_invoc_in_module(item.id),
_ => {
let orig_macro_rules_scope = self.parent_scope.macro_rules;
self.build_reduced_graph_for_item(item);
self.build_reduced_graph_for_item(item, feed);
match item.kind {
ItemKind::Mod(..) => {
// Visit attributes after items for backward compatibility.
@@ -1394,8 +1405,10 @@ pub(crate) fn brg_visit_item(&mut self, item: &'a Item) {
self.parent_scope.module = orig_module_scope;
}
pub(crate) fn brg_visit_stmt_mac_call(&mut self, stmt: &'a ast::Stmt) {
self.parent_scope.macro_rules = self.visit_invoc_in_module(stmt.id);
/// Handle a macro call that itself can produce new `macro_rules` items
/// in the current module.
pub(crate) fn brg_visit_mac_call_in_module(&mut self, id: NodeId) {
self.parent_scope.macro_rules = self.visit_invoc_in_module(id);
}
pub(crate) fn brg_visit_block(&mut self, block: &'a Block) {
@@ -1413,9 +1426,9 @@ pub(crate) fn brg_visit_assoc_item(
ctxt: AssocCtxt,
ident: Ident,
ns: Namespace,
feed: TyCtxtFeed<'tcx, LocalDefId>,
) {
let vis = self.resolve_visibility(&item.vis);
let feed = self.r.feed(item.id);
let local_def_id = feed.key();
let def_id = local_def_id.to_def_id();
@@ -1467,21 +1480,28 @@ pub(crate) fn visit_assoc_item_mac_call(
}
}
pub(crate) fn brg_visit_field_def(&mut self, sf: &'a ast::FieldDef) {
pub(crate) fn brg_visit_field_def(
&mut self,
sf: &'a ast::FieldDef,
feed: TyCtxtFeed<'tcx, LocalDefId>,
) {
let vis = self.resolve_visibility(&sf.vis);
self.r.feed_visibility(self.r.feed(sf.id), vis);
self.r.feed_visibility(feed, vis);
visit::walk_field_def(self, sf);
}
// Constructs the reduced graph for one variant. Variants exist in the
// type and value namespaces.
pub(crate) fn brg_visit_variant(&mut self, variant: &'a ast::Variant) {
pub(crate) fn brg_visit_variant(
&mut self,
variant: &'a ast::Variant,
feed: TyCtxtFeed<'tcx, LocalDefId>,
) {
let parent = self.parent_scope.module.expect_local();
let expn_id = self.parent_scope.expansion;
let ident = variant.ident;
// Define a name in the type namespace.
let feed = self.r.feed(variant.id);
let def_id = feed.key();
let vis = self.resolve_visibility(&variant.vis);
self.r.define_local(parent, ident, TypeNS, self.res(def_id), vis, variant.span, expn_id);
@@ -1496,8 +1516,13 @@ pub(crate) fn brg_visit_variant(&mut self, variant: &'a ast::Variant) {
};
// Define a constructor name in the value namespace.
if let Some(ctor_node_id) = variant.data.ctor_node_id() {
let feed = self.r.feed(ctor_node_id);
if let Some((ctor_kind, ctor_node_id)) = CtorKind::from_ast(&variant.data) {
let feed = self.create_def(
ctor_node_id,
None,
DefKind::Ctor(CtorOf::Variant, ctor_kind),
variant.span,
);
let ctor_def_id = feed.key();
let ctor_res = self.res(ctor_def_id);
self.r.define_local(parent, ident, ValueNS, ctor_res, ctor_vis, variant.span, expn_id);
+54 -74
View File
@@ -7,10 +7,11 @@
use rustc_expand::expand::AstFragment;
use rustc_hir as hir;
use rustc_hir::Target;
use rustc_hir::def::DefKind;
use rustc_hir::def::Namespace::{TypeNS, ValueNS};
use rustc_hir::def::{CtorKind, CtorOf, DefKind};
use rustc_hir::def_id::LocalDefId;
use rustc_middle::span_bug;
use rustc_middle::ty::TyCtxtFeed;
use rustc_span::{Span, Symbol, sym};
use tracing::{debug, instrument};
@@ -43,22 +44,20 @@ pub(super) fn create_def(
name: Option<Symbol>,
def_kind: DefKind,
span: Span,
) -> LocalDefId {
) -> TyCtxtFeed<'tcx, LocalDefId> {
let parent_def = self.invocation_parent.parent_def;
debug!(
"create_def(node_id={:?}, def_kind={:?}, parent_def={:?})",
node_id, def_kind, parent_def
);
self.r
.create_def(
parent_def,
node_id,
name,
def_kind,
self.parent_scope.expansion.to_expn_id(),
span.with_parent(None),
)
.def_id()
self.r.create_def(
parent_def,
node_id,
name,
def_kind,
self.parent_scope.expansion.to_expn_id(),
span.with_parent(None),
)
}
fn with_parent<F: FnOnce(&mut Self)>(&mut self, parent_def: LocalDefId, f: F) {
@@ -100,7 +99,7 @@ fn collect_field(&mut self, field: &'a FieldDef, index: Option<usize>) {
} else {
let name = field.ident.map_or_else(|| sym::integer(index(self)), |ident| ident.name);
let def = self.create_def(field.id, Some(name), DefKind::Field, field.span);
self.with_parent(def, |this| this.brg_visit_field_def(field));
self.with_parent(def.def_id(), |this| this.brg_visit_field_def(field, def));
}
}
@@ -173,42 +172,25 @@ fn visit_item(&mut self, i: &'a Item) {
}
ItemKind::GlobalAsm(..) => DefKind::GlobalAsm,
ItemKind::Use(_) => {
self.create_def(i.id, None, DefKind::Use, i.span);
self.brg_visit_item(i);
let feed = self.create_def(i.id, None, DefKind::Use, i.span);
self.brg_visit_item(i, feed);
return;
}
ItemKind::MacCall(..) | ItemKind::DelegationMac(..) => {
ItemKind::MacCall(..) => {
self.visit_macro_invoc(i.id);
self.brg_visit_item(i);
self.brg_visit_mac_call_in_module(i.id);
return;
}
ItemKind::DelegationMac(..) => unreachable!(),
};
let def_id =
self.create_def(i.id, i.kind.ident().map(|ident| ident.name), def_kind, i.span);
let feed = self.create_def(i.id, i.kind.ident().map(|ident| ident.name), def_kind, i.span);
if let Some(macro_data) = opt_macro_data {
self.r.new_local_macro(def_id, macro_data);
self.r.new_local_macro(feed.def_id(), macro_data);
}
self.with_parent(def_id, |this| {
this.with_impl_trait(ImplTraitContext::Existential, |this| {
match i.kind {
ItemKind::Struct(_, _, ref struct_def)
| ItemKind::Union(_, _, ref struct_def) => {
// If this is a unit or tuple-like struct, register the constructor.
if let Some((ctor_kind, ctor_node_id)) = CtorKind::from_ast(struct_def) {
this.create_def(
ctor_node_id,
None,
DefKind::Ctor(CtorOf::Struct, ctor_kind),
i.span,
);
}
}
_ => {}
}
this.brg_visit_item(i);
})
self.with_parent(feed.def_id(), |this| {
this.with_impl_trait(ImplTraitContext::Existential, |this| this.brg_visit_item(i, feed))
});
}
@@ -244,15 +226,17 @@ fn visit_fn(&mut self, fn_kind: FnKind<'a>, _: &AttrVec, span: Span, _: NodeId)
}
let (return_id, return_span) = coroutine_kind.return_id();
let return_def = self.create_def(return_id, None, DefKind::OpaqueTy, return_span);
let return_def =
self.create_def(return_id, None, DefKind::OpaqueTy, return_span).def_id();
self.with_parent(return_def, |this| this.visit_fn_ret_ty(output));
// If this async fn has no body (i.e. it's an async fn signature in a trait)
// then the closure_def will never be used, and we should avoid generating a
// def-id for it.
if let Some(body) = body {
let closure_def =
self.create_def(coroutine_kind.closure_id(), None, DefKind::Closure, span);
let closure_def = self
.create_def(coroutine_kind.closure_id(), None, DefKind::Closure, span)
.def_id();
self.with_parent(closure_def, |this| this.visit_block(body));
}
}
@@ -262,8 +246,9 @@ fn visit_fn(&mut self, fn_kind: FnKind<'a>, _: &AttrVec, span: Span, _: NodeId)
// Async closures desugar to closures inside of closures, so
// we must create two defs.
let coroutine_def =
self.create_def(coroutine_kind.closure_id(), None, DefKind::Closure, span);
let coroutine_def = self
.create_def(coroutine_kind.closure_id(), None, DefKind::Closure, span)
.def_id();
self.with_parent(coroutine_def, |this| this.visit_expr(body));
}
_ => visit::walk_fn(self, fn_kind),
@@ -299,8 +284,8 @@ fn visit_foreign_item(&mut self, fi: &'a ForeignItem) {
let def = self.create_def(fi.id, Some(ident.name), def_kind, fi.span);
self.with_parent(def, |this| {
this.build_reduced_graph_for_foreign_item(fi, ident);
self.with_parent(def.def_id(), |this| {
this.build_reduced_graph_for_foreign_item(fi, ident, def);
visit::walk_item(this, fi)
});
}
@@ -311,18 +296,8 @@ fn visit_variant(&mut self, v: &'a Variant) {
self.visit_invoc_in_module(v.id);
return;
}
let def = self.create_def(v.id, Some(v.ident.name), DefKind::Variant, v.span);
self.with_parent(def, |this| {
if let Some((ctor_kind, ctor_node_id)) = CtorKind::from_ast(&v.data) {
this.create_def(
ctor_node_id,
None,
DefKind::Ctor(CtorOf::Variant, ctor_kind),
v.span,
);
}
this.brg_visit_variant(v);
});
let feed = self.create_def(v.id, Some(v.ident.name), DefKind::Variant, v.span);
self.with_parent(feed.def_id(), |this| this.brg_visit_variant(v, feed));
}
fn visit_where_predicate(&mut self, pred: &'a WherePredicate) {
@@ -391,8 +366,8 @@ fn visit_assoc_item(&mut self, i: &'a AssocItem, ctxt: visit::AssocCtxt) {
}
};
let def = self.create_def(i.id, Some(ident.name), def_kind, i.span);
self.with_parent(def, |this| this.brg_visit_assoc_item(i, ctxt, ident, ns));
let feed = self.create_def(i.id, Some(ident.name), def_kind, i.span);
self.with_parent(feed.def_id(), |this| this.brg_visit_assoc_item(i, ctxt, ident, ns, feed));
}
fn visit_pat(&mut self, pat: &'a Pat) {
@@ -410,8 +385,9 @@ fn visit_anon_const(&mut self, constant: &'a AnonConst) {
// to avoid affecting stable we have to feature gate the not creating
// anon consts
if !self.r.tcx.features().min_generic_const_args() {
let parent =
self.create_def(constant.id, None, DefKind::AnonConst, constant.value.span);
let parent = self
.create_def(constant.id, None, DefKind::AnonConst, constant.value.span)
.def_id();
return self.with_parent(parent, |this| visit::walk_anon_const(this, constant));
}
@@ -421,8 +397,9 @@ fn visit_anon_const(&mut self, constant: &'a AnonConst) {
}),
MgcaDisambiguation::AnonConst => {
self.with_const_arg(ConstArgContext::NonDirect, |this| {
let parent =
this.create_def(constant.id, None, DefKind::AnonConst, constant.value.span);
let parent = this
.create_def(constant.id, None, DefKind::AnonConst, constant.value.span)
.def_id();
this.with_parent(parent, |this| visit::walk_anon_const(this, constant));
})
}
@@ -440,7 +417,7 @@ fn visit_expr(&mut self, expr: &'a Expr) {
return;
}
ExprKind::Closure(..) | ExprKind::Gen(..) => {
self.create_def(expr.id, None, DefKind::Closure, expr.span)
self.create_def(expr.id, None, DefKind::Closure, expr.span).def_id()
}
ExprKind::ConstBlock(constant) => {
// Under `min_generic_const_args` a `const { }` block sometimes
@@ -455,7 +432,8 @@ fn visit_expr(&mut self, expr: &'a Expr) {
visit::walk_attribute(this, attr);
}
let def = this.create_def(constant.id, None, def_kind, constant.value.span);
let def =
this.create_def(constant.id, None, def_kind, constant.value.span).def_id();
this.with_parent(def, |this| visit::walk_anon_const(this, constant));
});
}
@@ -505,7 +483,7 @@ fn visit_ty(&mut self, ty: &'a Ty) {
ImplTraitContext::Existential => DefKind::OpaqueTy,
ImplTraitContext::InBinding => return visit::walk_ty(self, ty),
};
let id = self.create_def(opaque_id, Some(name), kind, ty.span);
let id = self.create_def(opaque_id, Some(name), kind, ty.span).def_id();
match self.invocation_parent.impl_trait_context {
// Do not nest APIT, as we desugar them as `impl_trait: bounds`,
// so the `impl_trait` node is not a parent to `bounds`.
@@ -523,7 +501,7 @@ fn visit_ty(&mut self, ty: &'a Ty) {
fn visit_stmt(&mut self, stmt: &'a Stmt) {
match stmt.kind {
StmtKind::MacCall(..) => {
self.brg_visit_stmt_mac_call(stmt);
self.brg_visit_mac_call_in_module(stmt.id);
self.visit_macro_invoc(stmt.id)
}
// FIXME(impl_trait_in_bindings): We don't really have a good way of
@@ -628,12 +606,14 @@ fn visit_inline_asm(&mut self, asm: &'a InlineAsm) {
}
}
InlineAsmOperand::Const { anon_const } => {
let def = self.create_def(
anon_const.id,
None,
DefKind::InlineConst,
anon_const.value.span,
);
let def = self
.create_def(
anon_const.id,
None,
DefKind::InlineConst,
anon_const.value.span,
)
.def_id();
self.with_parent(def, |this| visit::walk_anon_const(this, anon_const));
}
InlineAsmOperand::Sym { sym } => self.visit_inline_asm_sym(sym),
+4 -1
View File
@@ -3765,7 +3765,10 @@ fn check_trait_item<F>(
);
Visibility::Public
};
this.r.feed_visibility(this.r.feed(id), vis);
// HACK: because we don't want to track the `TyCtxtFeed` through the resolver to here
// in a hash-map, we instead conjure a `TyCtxtFeed` for any `DefId` here, but prevent
// it from being used generally.
this.r.tcx.feed_visibility_for_trait_impl_item(this.r.local_def_id(id), vis);
};
let Some(decl) = decl else {
+12 -27
View File
@@ -66,8 +66,8 @@
use rustc_middle::middle::privacy::EffectiveVisibilities;
use rustc_middle::query::Providers;
use rustc_middle::ty::{
self, DelegationInfo, Feed, MainDefinition, RegisteredTools, ResolverAstLowering,
ResolverGlobalCtxt, TyCtxt, TyCtxtFeed, Visibility,
self, DelegationInfo, MainDefinition, RegisteredTools, ResolverAstLowering, ResolverGlobalCtxt,
TyCtxt, TyCtxtFeed, Visibility,
};
use rustc_session::config::CrateType;
use rustc_session::lint::builtin::PRIVATE_MACRO_USE;
@@ -1422,7 +1422,7 @@ pub struct Resolver<'ra, 'tcx> {
next_node_id: NodeId = CRATE_NODE_ID,
node_id_to_def_id: NodeMap<Feed<'tcx, LocalDefId>>,
node_id_to_def_id: NodeMap<LocalDefId>,
disambiguators: LocalDefIdMap<PerParentDisambiguatorState>,
@@ -1589,19 +1589,11 @@ fn as_ref(&self) -> &Resolver<'ra, 'tcx> {
impl<'tcx> Resolver<'_, 'tcx> {
fn opt_local_def_id(&self, node: NodeId) -> Option<LocalDefId> {
self.opt_feed(node).map(|f| f.key())
}
fn local_def_id(&self, node: NodeId) -> LocalDefId {
self.feed(node).key()
}
fn opt_feed(&self, node: NodeId) -> Option<Feed<'tcx, LocalDefId>> {
self.node_id_to_def_id.get(&node).copied()
}
fn feed(&self, node: NodeId) -> Feed<'tcx, LocalDefId> {
self.opt_feed(node).unwrap_or_else(|| panic!("no entry for node id: `{node:?}`"))
fn local_def_id(&self, node: NodeId) -> LocalDefId {
self.opt_local_def_id(node).unwrap_or_else(|| panic!("no entry for node id: `{node:?}`"))
}
fn local_def_kind(&self, node: NodeId) -> DefKind {
@@ -1624,7 +1616,7 @@ fn create_def(
node_id,
name,
def_kind,
self.tcx.definitions_untracked().def_key(self.node_id_to_def_id[&node_id].key()),
self.tcx.definitions_untracked().def_key(self.node_id_to_def_id[&node_id]),
);
let disambiguator = self.disambiguators.get_or_create(parent);
@@ -1648,7 +1640,7 @@ fn create_def(
// we don't need a mapping from `NodeId` to `LocalDefId`.
if node_id != ast::DUMMY_NODE_ID {
debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id);
self.node_id_to_def_id.insert(node_id, feed.downgrade());
self.node_id_to_def_id.insert(node_id, def_id);
}
feed
@@ -1699,7 +1691,7 @@ pub fn tcx(&self) -> TyCtxt<'tcx> {
fn def_id_to_node_id(&self, def_id: LocalDefId) -> NodeId {
self.node_id_to_def_id
.items()
.filter(|(_, v)| v.key() == def_id)
.filter(|(_, v)| **v == def_id)
.map(|(k, _)| *k)
.get_only()
.unwrap()
@@ -1740,8 +1732,7 @@ pub fn new(
let crate_feed = tcx.create_local_crate_def_id(crate_span);
crate_feed.def_kind(DefKind::Mod);
let crate_feed = crate_feed.downgrade();
node_id_to_def_id.insert(CRATE_NODE_ID, crate_feed);
node_id_to_def_id.insert(CRATE_NODE_ID, CRATE_DEF_ID);
let mut invocation_parents = FxHashMap::default();
invocation_parents.insert(LocalExpnId::ROOT, InvocationParent::ROOT);
@@ -1894,8 +1885,7 @@ pub fn arenas() -> ResolverArenas<'ra> {
Default::default()
}
fn feed_visibility(&mut self, feed: Feed<'tcx, LocalDefId>, vis: Visibility) {
let feed = feed.upgrade(self.tcx);
fn feed_visibility(&mut self, feed: TyCtxtFeed<'tcx, LocalDefId>, vis: Visibility) {
feed.visibility(vis.to_def_id());
self.visibilities_for_hashing.push((feed.def_id(), vis));
}
@@ -1914,8 +1904,7 @@ pub fn into_outputs(self) -> ResolverOutputs<'tcx> {
.stripped_cfg_items
.into_iter()
.filter_map(|item| {
let parent_scope =
self.node_id_to_def_id.get(&item.parent_scope)?.key().to_def_id();
let parent_scope = self.node_id_to_def_id.get(&item.parent_scope)?.to_def_id();
Some(StrippedCfgItem { parent_scope, ident: item.ident, cfg: item.cfg })
})
.collect();
@@ -1945,11 +1934,7 @@ pub fn into_outputs(self) -> ResolverOutputs<'tcx> {
lifetimes_res_map: self.lifetimes_res_map,
extra_lifetime_params_map: self.extra_lifetime_params_map,
next_node_id: self.next_node_id,
node_id_to_def_id: self
.node_id_to_def_id
.into_items()
.map(|(k, f)| (k, f.key()))
.collect(),
node_id_to_def_id: self.node_id_to_def_id,
trait_map: self.trait_map,
lifetime_elision_allowed: self.lifetime_elision_allowed,
lint_buffer: Steal::new(self.lint_buffer),
+1
View File
@@ -710,6 +710,7 @@ fn smart_resolve_macro_path(
(sym::on_move, Some(sym::diagnostic_on_move)),
(sym::on_const, Some(sym::diagnostic_on_const)),
(sym::on_unknown, Some(sym::diagnostic_on_unknown)),
(sym::on_unmatch_args, Some(sym::diagnostic_on_unmatch_args)),
];
if res == Res::NonMacroAttr(NonMacroAttrKind::Tool)
+2
View File
@@ -803,6 +803,7 @@
diagnostic_on_const,
diagnostic_on_move,
diagnostic_on_unknown,
diagnostic_on_unmatch_args,
dialect,
direct,
discriminant_kind,
@@ -1427,6 +1428,7 @@
on_move,
on_unimplemented,
on_unknown,
on_unmatch_args,
opaque,
opaque_module_name_placeholder: "<opaque>",
ops,
+3
View File
@@ -45,6 +45,9 @@ fn clone(&self) -> Self {
/// variant must also be specified. Only a single field is supported.
#[unstable(feature = "field_projections", issue = "145383")]
#[allow_internal_unstable(field_representing_type_raw, builtin_syntax)]
#[diagnostic::on_unmatch_args(
note = "this macro expects a container type and a field path, like `field_of!(Type, field)` or `field_of!(Enum, Variant.field)`"
)]
// NOTE: when stabilizing this macro, we can never add new trait impls for `FieldRepresentingType`,
// since it is `#[fundamental]` and thus could break users of this macro, since the compiler expands
// it to `FieldRepresentingType<...>`. Thus stabilizing this requires careful thought about the
+1
View File
@@ -131,6 +131,7 @@
#![feature(deprecated_suggestion)]
#![feature(derive_const)]
#![feature(diagnostic_on_const)]
#![feature(diagnostic_on_unmatch_args)]
#![feature(doc_cfg)]
#![feature(doc_notable_trait)]
#![feature(extern_types)]
+3
View File
@@ -1567,6 +1567,9 @@ impl<T> SizedTypeProperties for T {}
/// [`offset_of_enum`]: https://doc.rust-lang.org/nightly/unstable-book/language-features/offset-of-enum.html
/// [`offset_of_slice`]: https://doc.rust-lang.org/nightly/unstable-book/language-features/offset-of-slice.html
#[stable(feature = "offset_of", since = "1.77.0")]
#[diagnostic::on_unmatch_args(
note = "this macro expects a container type and a (nested) field path, like `offset_of!(Type, field)`"
)]
#[allow_internal_unstable(builtin_syntax, core_intrinsics)]
pub macro offset_of($Container:ty, $($fields:expr)+ $(,)?) {
// The `{}` is for better error messages
+82 -6
View File
@@ -1006,6 +1006,7 @@ pub fn from_secs_f32(secs: f32) -> Duration {
/// This method will panic if result is negative, overflows `Duration` or not finite.
///
/// # Examples
///
/// ```
/// use std::time::Duration;
///
@@ -1013,6 +1014,37 @@ pub fn from_secs_f32(secs: f32) -> Duration {
/// assert_eq!(dur.mul_f64(3.14), Duration::new(8, 478_000_000));
/// assert_eq!(dur.mul_f64(3.14e5), Duration::new(847_800, 0));
/// ```
///
/// Note that `f64` does not have enough bits ([`f64::MANTISSA_DIGITS`]) to represent the full
/// range of possible `Duration` with nanosecond precision, so rounding may occur even for
/// trivial operations like multiplying by 1.
///
/// ```
/// # #![feature(float_exact_integer_constants)]
/// use std::time::Duration;
///
/// // This is about 14.9 weeks, remaining precise to the nanosecond:
/// let weeks = Duration::from_nanos(f64::MAX_EXACT_INTEGER as u64);
/// assert_eq!(weeks, weeks.mul_f64(1.0));
///
/// // A larger value incurs rounding in the floating-point operation:
/// let weeks = Duration::from_nanos(u64::MAX);
/// assert_ne!(weeks, weeks.mul_f64(1.0));
///
/// // This is over 285 million years, remaining precise to the second:
/// let years = Duration::from_secs(f64::MAX_EXACT_INTEGER as u64);
/// assert_eq!(years, years.mul_f64(1.0));
///
/// // And again larger values incur rounding:
/// let years = Duration::from_secs(u64::MAX / 2);
/// assert_ne!(years, years.mul_f64(1.0));
/// ```
///
/// ```should_panic
/// # use std::time::Duration;
/// // In the extreme, rounding can even overflow `Duration`, which panics.
/// let _ = Duration::from_secs(u64::MAX).mul_f64(1.0);
/// ```
#[stable(feature = "duration_float", since = "1.38.0")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
@@ -1023,6 +1055,10 @@ pub fn mul_f64(self, rhs: f64) -> Duration {
/// Multiplies `Duration` by `f32`.
///
/// Since the significand of `f32` is quite limited compared to the range of `Duration`
/// -- only about 16.8ms of exact nanosecond precision -- this method currently forwards
/// to [`mul_f64`][Self::mul_f64] for greater accuracy.
///
/// # Panics
/// This method will panic if result is negative, overflows `Duration` or not finite.
///
@@ -1031,7 +1067,10 @@ pub fn mul_f64(self, rhs: f64) -> Duration {
/// use std::time::Duration;
///
/// let dur = Duration::new(2, 700_000_000);
/// assert_eq!(dur.mul_f32(3.14), Duration::new(8, 478_000_641));
/// // Note that this `3.14_f32` argument already has more floating-point
/// // representation error than a direct `3.14_f64` would, so the result
/// // is slightly different from the ideal 8.478s.
/// assert_eq!(dur.mul_f32(3.14), Duration::new(8, 478_000_283));
/// assert_eq!(dur.mul_f32(3.14e5), Duration::new(847_800, 0));
/// ```
#[stable(feature = "duration_float", since = "1.38.0")]
@@ -1039,7 +1078,7 @@ pub fn mul_f64(self, rhs: f64) -> Duration {
without modifying the original"]
#[inline]
pub fn mul_f32(self, rhs: f32) -> Duration {
Duration::from_secs_f32(rhs * self.as_secs_f32())
self.mul_f64(rhs.into())
}
/// Divides `Duration` by `f64`.
@@ -1048,6 +1087,7 @@ pub fn mul_f32(self, rhs: f32) -> Duration {
/// This method will panic if result is negative, overflows `Duration` or not finite.
///
/// # Examples
///
/// ```
/// use std::time::Duration;
///
@@ -1055,6 +1095,37 @@ pub fn mul_f32(self, rhs: f32) -> Duration {
/// assert_eq!(dur.div_f64(3.14), Duration::new(0, 859_872_611));
/// assert_eq!(dur.div_f64(3.14e5), Duration::new(0, 8_599));
/// ```
///
/// Note that `f64` does not have enough bits ([`f64::MANTISSA_DIGITS`]) to represent the full
/// range of possible `Duration` with nanosecond precision, so rounding may occur even for
/// trivial operations like dividing by 1.
///
/// ```
/// # #![feature(float_exact_integer_constants)]
/// use std::time::Duration;
///
/// // This is about 14.9 weeks, remaining precise to the nanosecond:
/// let weeks = Duration::from_nanos(f64::MAX_EXACT_INTEGER as u64);
/// assert_eq!(weeks, weeks.div_f64(1.0));
///
/// // A larger value incurs rounding in the floating-point operation:
/// let weeks = Duration::from_nanos(u64::MAX);
/// assert_ne!(weeks, weeks.div_f64(1.0));
///
/// // This is over 285 million years, remaining precise to the second:
/// let years = Duration::from_secs(f64::MAX_EXACT_INTEGER as u64);
/// assert_eq!(years, years.div_f64(1.0));
///
/// // And again larger values incur rounding:
/// let years = Duration::from_secs(u64::MAX / 2);
/// assert_ne!(years, years.div_f64(1.0));
/// ```
///
/// ```should_panic
/// # use std::time::Duration;
/// // In the extreme, rounding can even overflow `Duration`, which panics.
/// let _ = Duration::from_secs(u64::MAX).div_f64(1.0);
/// ```
#[stable(feature = "duration_float", since = "1.38.0")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
@@ -1065,6 +1136,10 @@ pub fn div_f64(self, rhs: f64) -> Duration {
/// Divides `Duration` by `f32`.
///
/// Since the significand of `f32` is quite limited compared to the range of `Duration`
/// -- only about 16.8ms of exact nanosecond precision -- this method currently forwards
/// to [`div_f64`][Self::div_f64] for greater accuracy.
///
/// # Panics
/// This method will panic if result is negative, overflows `Duration` or not finite.
///
@@ -1073,9 +1148,10 @@ pub fn div_f64(self, rhs: f64) -> Duration {
/// use std::time::Duration;
///
/// let dur = Duration::new(2, 700_000_000);
/// // note that due to rounding errors result is slightly
/// // different from 0.859_872_611
/// assert_eq!(dur.div_f32(3.14), Duration::new(0, 859_872_580));
/// // Note that this `3.14_f32` argument already has more floating-point
/// // representation error than a direct `3.14_f64` would, so the result
/// // is slightly different from the ideally rounded 0.859_872_611.
/// assert_eq!(dur.div_f32(3.14), Duration::new(0, 859_872_583));
/// assert_eq!(dur.div_f32(3.14e5), Duration::new(0, 8_599));
/// ```
#[stable(feature = "duration_float", since = "1.38.0")]
@@ -1083,7 +1159,7 @@ pub fn div_f64(self, rhs: f64) -> Duration {
without modifying the original"]
#[inline]
pub fn div_f32(self, rhs: f32) -> Duration {
Duration::from_secs_f32(self.as_secs_f32() / rhs)
self.div_f64(rhs.into())
}
/// Divides `Duration` by `Duration` and returns `f64`.
+1 -1
View File
@@ -1 +1 @@
30d0309fa821f7a0984a9629e0d227ca3c0d2eda
cf1817bc6ecd2d14ca492247c804bad31948dd56
@@ -98,7 +98,7 @@ You might also find the following sites useful:
[gsearchdocs]: https://www.google.com/search?q=site:doc.rust-lang.org+your+query+here
[stddocs]: https://doc.rust-lang.org/std
[rif]: http://internals.rust-lang.org
[rr]: https://doc.rust-lang.org/book/
[rr]: https://doc.rust-lang.org/reference/
[rustforge]: https://forge.rust-lang.org/
[tlgba]: https://tomlee.co/2014/04/a-more-detailed-tour-of-the-rust-compiler/
[ro]: https://www.rustaceans.org/
@@ -144,12 +144,19 @@ If running `./x check` on save is inconvenient, in VS Code you can use a [Build
### Neovim
For Neovim users, there are a few options.
The easiest way is by using [neoconf.nvim](https://github.com/folke/neoconf.nvim/),
which allows for project-local configuration files with the native LSP.
The steps for how to use it are below.
Note that they require rust-analyzer to already be configured with Neovim.
Steps for this can be [found here](https://rust-analyzer.github.io/manual.html#nvim-lsp).
For Neovim users, there are a few options:
1. The easiest way is using [neoconf.nvim][neoconf.nvim] but it uses the
deprecated `require('lspconfig')` API which displays a warning on neovim 0.11+.
2. Using `coc.nvim` is another option but it requires node.js to be installed.
3. Using a custom script to load rust-analyzer settings.
#### neoconf.nvim
[neoconf.nvim][neoconf.nvim] allows for project-local configuration
files with the native LSP. The steps for how to use it are below. Note that they require
rust-analyzer to already be configured with Neovim. Steps for this can be
[found here][r-a nvim lsp].
1. First install the plugin.
This can be done by following the steps in the README.
@@ -157,64 +164,62 @@ Steps for this can be [found here](https://rust-analyzer.github.io/manual.html#n
`neoconf` is able to read and update
rust-analyzer settings automatically when the project is opened when this file is detected.
#### coc.nvim
If you're using `coc.nvim`, you can run `./x setup editor` and select `vim` to
create a `.vim/coc-settings.json`.
The settings can be edited with `:CocLocalConfig`.
The recommended settings live at [`src/etc/rust_analyzer_settings.json`].
Another way is without a plugin, and creating your own logic in your configuration.
The following code will work for any checkout of rust-lang/rust (newer than February 2025):
#### Custom LSP settings
If you're running neovim 0.11+, you can configure rust-analyzer with just
[nvim-lspconfig](https://github.com/neovim/nvim-lspconfig) and a custom script.
1. Make sure [rust-analyzer LSP][r-a nvim lsp] is set up
2. Create `$HOME/.config/nvim/after/plugged/rust_analyzer.lua` with the following content:
```lua
local function expand_config_variables(option)
local var_placeholders = {
['${workspaceFolder}'] = function(_)
return vim.lsp.buf.list_workspace_folders()[1]
end,
}
-- Capture the default functions from nvim-lspconfig/lsp/rust_analyzer.lua before overriding it.
-- This file is in after/plugin to guarantee nvim-lspconfig has been initialised already.
local default_root_dir = vim.lsp.config["rust_analyzer"].root_dir
local default_before_init = vim.lsp.config["rust_analyzer"].before_init
if type(option) == "table" then
local mt = getmetatable(option)
local result = {}
for k, v in pairs(option) do
result[expand_config_variables(k)] = expand_config_variables(v)
vim.lsp.config("rust_analyzer", {
cmd = { "rust-analyzer" },
filetypes = { "rust" },
-- To support rust-lang/rust, we need to detect when we're in the rust repo and use the git root
-- instead of cargo project root.
root_dir = function(bufnr, on_dir)
local git_root = vim.fs.root(bufnr, { ".git" })
if git_root then
if vim.uv.fs_stat(vim.fs.joinpath(git_root, "src/etc/rust_analyzer_zed.json")) then
on_dir(git_root)
return
end
end
return setmetatable(result, mt)
end
if type(option) ~= "string" then
return option
end
local ret = option
for key, fn in pairs(var_placeholders) do
ret = ret:gsub(key, fn)
end
return ret
end
lspconfig.rust_analyzer.setup {
root_dir = function()
local default = lspconfig.rust_analyzer.config_def.default_config.root_dir()
-- the default root detection uses the cargo workspace root.
-- but for rust-lang/rust, the standard library is in its own workspace.
-- use the git root instead.
local compiler_config = vim.fs.joinpath(default, "../src/bootstrap/defaults/config.compiler.toml")
if vim.fs.basename(default) == "library" and vim.uv.fs_stat(compiler_config) then
return vim.fs.dirname(default)
end
return default
-- For anything that doesn't match rust-lang/rust, fallback to default root_dir
default_root_dir(bufnr, on_dir)
end,
on_init = function(client)
local path = client.workspace_folders[1].name
local config = vim.fs.joinpath(path, "src/etc/rust_analyzer_zed.json")
if vim.uv.fs_stat(config) then
-- load rust-lang/rust settings
local file = io.open(config)
local json = vim.json.decode(file:read("*a"))
client.config.settings["rust-analyzer"] = expand_config_variables(json.lsp["rust-analyzer"].initialization_options)
client.notify("workspace/didChangeConfiguration", { settings = client.config.settings })
before_init = function(init_params, config)
-- When inside rust-lang/rust, we need to use the special rust-analyzer settings.
local settings = vim.fs.joinpath(config.root_dir, "src/etc/rust_analyzer_zed.json")
if vim.uv.fs_stat(settings) then
local file = io.open(settings)
-- nvim 0.12+ supports comments otherwise you'll need content:gsub("//[^\n]*", "").
local json = vim.json.decode(file:read("*a"), { skip_comments = true })
file:close()
config.settings["rust-analyzer"] = vim.tbl_deep_extend(
"force", -- Overwrite with the special settings when there is a conflict.
config.settings["rust-analyzer"] or {},
json.lsp["rust-analyzer"].initialization_options
)
end
return true
end
}
default_before_init(init_params, config)
end,
})
vim.lsp.enable("rust_analyzer")
```
If you would like to use the build task that is described above, you may either
@@ -223,6 +228,9 @@ make your own command in your config, or you can install a plugin such as
files](https://github.com/stevearc/overseer.nvim/blob/master/doc/guides.md#vs-code-tasks),
and follow the same instructions as above.
[neoconf.nvim]: https://github.com/folke/neoconf.nvim/
[r-a nvim lsp]: https://rust-analyzer.github.io/book/other_editors.html#nvim-lsp
### Emacs
Emacs provides support for rust-analyzer with project-local configuration
+11 -11
View File
@@ -8,13 +8,14 @@ Coherence checking is what detects both of trait impls and inherent impls overla
Overlapping trait impls always produce an error,
while overlapping inherent impls result in an error only if they have methods with the same name.
Checking for overlaps is split in two parts. First there's the [overlap check(s)](#overlap-checks),
Checking for overlaps is split in two parts.
First there's the [overlap check(s)](#overlap-checks),
which finds overlaps between traits and inherent implementations that the compiler currently knows about.
However, Coherence also results in an error if any other impls **could** exist,
even if they are currently unknown.
even if they are currently unknown.
This affects impls which may get added to upstream crates in a backwards compatible way,
and impls from downstream crates.
and impls from downstream crates.
This is called the Orphan check.
## Overlap checks
@@ -25,7 +26,7 @@ Overlap checks always consider pairs of implementations, comparing them to each
Overlap checking for inherent impl blocks is done through `fn check_item` (in coherence/inherent_impls_overlap.rs),
where you can very clearly see that (at least for small `n`), the check really performs `n^2`
comparisons between impls.
comparisons between impls.
In the case of traits, this check is currently done as part of building the [specialization graph](traits/specialization.md),
to handle specializing impls overlapping with their parent, but this may change in the future.
@@ -37,7 +38,7 @@ Overlapping is sometimes partially allowed:
1. for marker traits
2. under [specialization](traits/specialization.md)
but normally isn't.
It normally isn't.
The overlap check has various modes (see [`OverlapMode`]).
Importantly, there's the explicit negative impl check, and the implicit negative impl check.
@@ -47,9 +48,9 @@ Both try to prove that an overlap is definitely impossible.
### The explicit negative impl check
This check is done in [`impl_intersection_has_negative_obligation`].
This check is done in [`impl_intersection_has_negative_obligation`].
This check tries to find a negative trait implementation.
This check tries to find a negative trait implementation.
For example:
```rust
@@ -64,7 +65,7 @@ In this example, we'd get:
`MyCustomErrorType: From<&str>` and `MyCustomErrorType: From<?E>`, giving `?E = &str`.
And thus, these two implementations would overlap.
However, libstd provides `&str: !Error`, and therefore guarantees that there
However, libstd provides `&str: !Error`, and therefore guarantees that there
will never be a positive implementation of `&str: Error`, and thus there is no overlap.
Note that for this kind of negative impl check, we must have explicit negative implementations provided.
@@ -77,13 +78,13 @@ This is not currently stable.
This check is done in [`impl_intersection_has_impossible_obligation`],
and does not rely on negative trait implementations and is stable.
Let's say there's a
Let's say there's a
```rust
impl From<MyLocalType> for Box<dyn Error> {} // in your own crate
impl<E> From<E> for Box<dyn Error> where E: Error {} // in std
```
This would give: `Box<dyn Error>: From<MyLocalType>`, and `Box<dyn Error>: From<?E>`,
This would give: `Box<dyn Error>: From<MyLocalType>`, and `Box<dyn Error>: From<?E>`,
giving `?E = MyLocalType`.
In your crate there's no `MyLocalType: Error`, downstream crates cannot implement `Error` (a remote trait) for `MyLocalType` (a remote type).
@@ -91,4 +92,3 @@ Therefore, these two impls do not overlap.
Importantly, this works even if there isn't a `impl !Error for MyLocalType`.
[`impl_intersection_has_impossible_obligation`]: https://doc.rust-lang.org/beta/nightly-rustc/rustc_trait_selection/traits/coherence/fn.impl_intersection_has_impossible_obligation.html
+14 -14
View File
@@ -2,16 +2,16 @@
## The `HostEffect` predicate
[`HostEffectPredicate`]s are a kind of predicate from `~const Tr` or `const Tr` bounds.
[`HostEffectPredicate`]s are a kind of predicate from `[const] Tr` or `const Tr` bounds.
It has a trait reference, and a `constness` which could be `Maybe` or
`Const` depending on the bound.
Because `~const Tr`, or rather `Maybe` bounds
Because `[const] Tr`, or rather `Maybe` bounds
apply differently based on whichever contexts they are in, they have different
behavior than normal bounds.
Where normal trait bounds on a function such as
`T: Tr` are collected within the [`predicates_of`] query to be proven when a
function is called and to be assumed within the function, bounds such as
`T: ~const Tr` will behave as a normal trait bound and add `T: Tr` to the result
`T: [const] Tr` will behave as a normal trait bound and add `T: Tr` to the result
from `predicates_of`, but also adds a `HostEffectPredicate` to the [`const_conditions`] query.
On the other hand, `T: const Tr` bounds do not change meaning across contexts,
@@ -37,7 +37,7 @@ In a similar vein,
an item *in const contexts*. If we adjust the example above to use `const` trait bounds:
```rust
const fn foo<T>() where T: ~const Default {}
const fn foo<T>() where T: [const] Default {}
```
Then `foo` would get a `HostEffect(T: Default, maybe)` in the `const_conditions`
@@ -55,7 +55,7 @@ Note that we don't check
if the function is only referred to but not called, as the following code needs to compile:
```rust
const fn hi<T: ~const Default>() -> T {
const fn hi<T: [const] Default>() -> T {
T::default()
}
const X: fn() -> u32 = hi::<u32>;
@@ -69,7 +69,7 @@ Here's an example:
```rust
const trait Bar {}
const trait Foo: ~const Bar {}
const trait Foo: [const] Bar {}
// `const_conditions` contains `HostEffect(Self: Bar, maybe)`
impl const Bar for () {}
@@ -86,13 +86,13 @@ We do the same for `const_conditions`:
```rust
const trait Foo {
fn hi<T: ~const Default>();
fn hi<T: [const] Default>();
}
impl<T: ~const Clone> Foo for Vec<T> {
fn hi<T: ~const PartialEq>();
// ^ we can't prove `T: ~const PartialEq` given `T: ~const Clone` and
// `T: ~const Default`, therefore we know that the method on the impl
impl<T: [const] Clone> Foo for Vec<T> {
fn hi<T: [const] PartialEq>();
// ^ we can't prove `T: [const] PartialEq` given `T: [const] Clone` and
// `T: [const] Default`, therefore we know that the method on the impl
// is stricter than the method on the trait.
}
```
@@ -117,11 +117,11 @@ Bounds on associated types, opaque types, and supertraits such as the following
have their bounds represented differently:
```rust
trait Foo: ~const PartialEq {
type X: ~const PartialEq;
trait Foo: [const] PartialEq {
type X: [const] PartialEq;
}
fn foo() -> impl ~const PartialEq {
fn foo() -> impl [const] PartialEq {
// ^ unimplemented syntax
}
```
+2 -2
View File
@@ -385,8 +385,8 @@ the history becomes easier to work with.
The easiest way to squash your commits in a PR on the `rust-lang/rust` repository is to use the `@bors squash` command in a comment on the PR.
By default, [bors] combines all commit messages of the PR into the squashed commit message.
To customize the commit message, use `@bors squash msg=<commit message>`.
To customize the commit message, use `@bors squash msg="<commit message>"`.
For example, `@bors squash msg="Improve diagnostics for missing lifetime parameter"`.
If you want to squash commits using local git operations, read on below.
+30 -21
View File
@@ -2,8 +2,8 @@
The AST lowering step converts AST to [HIR](../hir.md).
This means many structures are removed if they are irrelevant
for type analysis or similar syntax agnostic analyses. Examples
of such structures include but are not limited to
for type analysis or similar syntax-agnostic analyses.
Examples of such structures include but are not limited to
* Parenthesis
* Removed without replacement, the tree structure makes order explicit
@@ -19,16 +19,19 @@ The implementation of AST lowering is in the [`rustc_ast_lowering`] crate.
The entry point is [`lower_to_hir`], which retrieves the post-expansion AST
and resolver data from [`TyCtxt`] and builds the [`hir::Crate`] for the whole crate.
Lowering is organized around HIR owners. [`lower_to_hir`] first indexes the
Lowering is organized around HIR owners.
[`lower_to_hir`] first indexes the
crate and then [`ItemLowerer::lower_node`] lowers each crate, item, associated
item, and foreign item.
Most of the lowering logic lives on [`LoweringContext`]. The implementation is
Most of the lowering logic lives on [`LoweringContext`].
The implementation is
split across multiple files in the [`rustc_ast_lowering`] crate such as `item.rs`,
`expr.rs`, `pat.rs`, `path.rs`, and others, but they all share the same [`LoweringContext`]
state and IDlowering machinery.
Each owner is lowered in its own [`with_hir_id_owner`] scope. This is why the
Each owner is lowered in its own [`with_hir_id_owner`] scope.
This is why the
`HirId` invariants below matter: `lower_node_id` maps AST `NodeId`s into the
current owner, while `next_id` creates fresh HIR-only nodes introduced during
desugaring.
@@ -36,25 +39,27 @@ desugaring.
Lowering needs to uphold several invariants in order to not trigger the
sanity checks in [`compiler/rustc_passes/src/hir_id_validator.rs`][hir_id_validator]:
1. A `HirId` must be used if created. So if you use the `lower_node_id`,
you *must* use the resulting `NodeId` or `HirId` (either is fine, since
any `NodeId`s in the `HIR` are checked for existing `HirId`s)
1. A `HirId` must be used if created.
So, if you use the `lower_node_id`,
you *must* use the resulting `NodeId` or `HirId` (either is fine, since
any `NodeId`s in the `HIR` are checked for existing `HirId`s).
2. Lowering a `HirId` must be done in the scope of the *owning* item.
This means you need to use `with_hir_id_owner` if you are creating parts
of an item other than the one being currently lowered. This happens for
example during the lowering of existential `impl Trait`
This means you need to use `with_hir_id_owner` if you are creating parts
of an item other than the one being currently lowered.
This happens, for example, during the lowering of existential `impl Trait`.
3. A `NodeId` that will be placed into a HIR structure must be lowered,
even if its `HirId` is unused. Calling
`let _ = self.lower_node_id(node_id);` is perfectly legitimate.
even if its `HirId` is unused.
Calling `let _ = self.lower_node_id(node_id);` is perfectly legitimate.
4. If you are creating new nodes that didn't exist in the `AST`, you *must*
create new ids for them. This is done by calling the `next_id` method,
which produces both a new `NodeId` as well as automatically lowering it
for you so you also get the `HirId`.
create new ids for them.
This is done by calling the `next_id` method,
which produces both a new `NodeId` as well as automatically lowering it
for you so you also get the `HirId`.
[`rustc_ast_lowering`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast_lowering/index.html
[`lower_to_hir`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast_lowering/fn.lower_to_hir.html
[`TyCtxt`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyCtxt.html
[`hir::Crate`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/hir/struct.Crate.html
[`hir::Crate`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/hir/struct.Crate.html
[`ItemLowerer::lower_node`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast_lowering/item/struct.ItemLowerer.html
[`LoweringContext`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast_lowering/struct.LoweringContext.html
[`with_hir_id_owner`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_ast_lowering/struct.LoweringContext.html#method.with_hir_id_owner
@@ -62,12 +67,16 @@ sanity checks in [`compiler/rustc_passes/src/hir_id_validator.rs`][hir_id_valida
If you are creating new `DefId`s, since each `DefId` needs to have a
corresponding `NodeId`, it is advisable to add these `NodeId`s to the
`AST` so you don't have to generate new ones during lowering. This has
`AST` so you don't have to generate new ones during lowering.
This has
the advantage of creating a way to find the `DefId` of something via its
`NodeId`. If lowering needs this `DefId` in multiple places, you can't
`NodeId`.
If lowering needs this `DefId` in multiple places, you can't
generate a new `NodeId` in all those places because you'd also get a new
`DefId` then. With a `NodeId` from the `AST` this is not an issue.
`DefId` then.
With a `NodeId` from the `AST`, this is not an issue.
Having the `NodeId` also allows the `DefCollector` to generate the `DefId`s
instead of lowering having to do it on the fly. Centralizing the `DefId`
instead of lowering having to do it on the fly.
Centralizing the `DefId`
generation in one place makes it easier to refactor and reason about.
@@ -162,7 +162,7 @@ The below steps needs to be followed in order to implement a new unstable featur
```rust ignore
/// Allows defining identifiers beyond ASCII.
(unstable, non_ascii_idents, "CURRENT_RUSTC_VERSION", Some(55467), None),
(unstable, non_ascii_idents, "CURRENT_RUSTC_VERSION", Some(55467)),
```
Features can be marked as incomplete,
@@ -173,7 +173,7 @@ The below steps needs to be followed in order to implement a new unstable featur
```rust ignore
/// Allows deref patterns.
(incomplete, deref_patterns, "CURRENT_RUSTC_VERSION", Some(87121), None),
(incomplete, deref_patterns, "CURRENT_RUSTC_VERSION", Some(87121)),
```
Feature flags related to a lang experiment must be marked as `incomplete`
+69 -47
View File
@@ -1,36 +1,42 @@
# Name resolution
In the previous chapters, we saw how the [*Abstract Syntax Tree* (`AST`)][ast]
is built with all macros expanded. We saw how doing that requires doing some
name resolution to resolve imports and macro names. In this chapter, we show
how this is actually done and more.
is built with all macros expanded.
We saw how doing that requires doing some
name resolution to resolve imports and macro names.
In this chapter, we show how this is actually done and more.
[ast]: ./ast-validation.md
In fact, we don't do full name resolution during macro expansion -- we only
resolve imports and macros at that time. This is required to know what to even
expand. Later, after we have the whole AST, we do full name resolution to
resolve all names in the crate. This happens in [`rustc_resolve::late`][late].
resolve imports and macros at that time.
This is required to know what to even expand.
Later, after we have the whole AST, we do full name resolution to
resolve all names in the crate.
This happens in [`rustc_resolve::late`][late].
Unlike during macro expansion, in this late expansion, we only need to try to
resolve a name once, since no new names can be added. If we fail to resolve a
name, then it is a compiler error.
resolve a name once, since no new names can be added.
If we fail to resolve a name, then it is a compiler error.
Name resolution is complex. There are different namespaces (e.g.
macros, values, types, lifetimes), and names may be valid at different (nested)
scopes. Also, different types of names can fail resolution differently, and
failures can happen differently at different scopes. For example, in a module
scope, failure means no unexpanded macros and no unresolved glob imports in
that module. On the other hand, in a function body scope, failure requires that a
name be absent from the block we are in, all outer scopes, and the global
scope.
scopes.
Also, different types of names can fail resolution differently, and
failures can happen differently at different scopes.
For example, in a module scope,
failure means no unexpanded macros and no unresolved glob imports in
that module.
On the other hand, in a function body scope, failure requires that a
name be absent from the block we are in, all outer scopes, and the global scope.
[late]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_resolve/late/index.html
## Basics
In our programs we refer to variables, types, functions, etc, by giving them
a name. These names are not always unique. For example, take this valid Rust
program:
a name.
These names are not always unique.
For example, take this valid Rust program:
```rust
type x = u32;
@@ -38,20 +44,25 @@ let x: x = 1;
let y: x = 2;
```
How do we know on line 3 whether `x` is a type (`u32`) or a value (1)? These
conflicts are resolved during name resolution. In this specific case, name
resolution defines that type names and variable names live in separate
How do we know on line 3 whether `x` is a type (`u32`) or a value (1)?
These conflicts are resolved during name resolution.
In this specific case,
name resolution defines that type names and variable names live in separate
namespaces and therefore can co-exist.
The name resolution in Rust is a two-phase process. In the first phase, which runs
during `macro` expansion, we build a tree of modules and resolve imports. Macro
expansion and name resolution communicate with each other via the
The name resolution in Rust is a two-phase process.
In the first phase,
which runs during `macro` expansion,
we build a tree of modules and resolve imports.
Macro expansion and name resolution communicate with each other via the
[`ResolverAstLoweringExt`] trait.
The input to the second phase is the syntax tree, produced by parsing input
files and expanding `macros`. This phase produces links from all the names in the
source to relevant places where the name was introduced. It also generates
helpful error messages, like typo suggestions, traits to import or lints about
files and expanding `macros`.
This phase produces links from all the names in the
source to relevant places where the name was introduced.
It also generates helpful error messages,
like typo suggestions, traits to import or lints about
unused items.
A successful run of the second phase ([`Resolver::resolve_crate`]) creates kind
@@ -68,9 +79,11 @@ The name resolution lives in the [`rustc_resolve`] crate, with the bulk in
## Namespaces
Different kind of symbols live in different namespaces e.g. types don't
clash with variables. This usually doesn't happen, because variables start with
clash with variables.
This usually doesn't happen, because variables start with
lower-case letter while types with upper-case one, but this is only a
convention. This is legal Rust code that will compile (with warnings):
convention.
This is legal Rust code that will compile (with warnings):
```rust
type x = u32;
@@ -83,19 +96,21 @@ namespaces, the resolver keeps them separated and builds separate structures for
them.
In other words, when the code talks about namespaces, it doesn't mean the module
hierarchy, it's types vs. values vs. macros.
hierarchy, it's types versus values versus macros.
## Scopes and ribs
A name is visible only in certain area in the source code. This forms a
hierarchical structure, but not necessarily a simple one if one scope is
A name is visible only in certain area in the source code.
This forms a hierarchical structure,
but not necessarily a simple one if one scope is
part of another, it doesn't mean a name visible in the outer scope is also
visible in the inner scope, or that it refers to the same thing.
To cope with that, the compiler introduces the concept of [`Rib`]s. This is
an abstraction of a scope. Every time the set of visible names potentially changes,
a new [`Rib`] is pushed onto a stack. The places where this can happen include for
example:
To cope with that, the compiler introduces the concept of [`Rib`]s.
This is an abstraction of a scope.
Every time the set of visible names potentially changes,
a new [`Rib`] is pushed onto a stack.
The places where this can happen include for example:
[`Rib`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_resolve/late/struct.Rib.html
@@ -106,11 +121,14 @@ example:
* Macro expansion border to cope with macro hygiene.
When searching for a name, the stack of [`ribs`] is traversed from the innermost
outwards. This helps to find the closest meaning of the name (the one not
shadowed by anything else). The transition to outer [`Rib`] may also affect
outwards.
This helps to find the closest meaning of the name (the one not
shadowed by anything else).
The transition to outer [`Rib`] may also affect
what names are usable if there are nested functions (not closures),
the inner one can't access parameters and local bindings of the outer one,
even though they should be visible by ordinary scoping rules. An example:
even though they should be visible by ordinary scoping rules.
An example:
[`ribs`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_resolve/late/struct.LateResolutionVisitor.html#structfield.ribs
@@ -139,11 +157,13 @@ blocks), which isn't a full namespace in its own right.
## Overall strategy
To perform the name resolution of the whole crate, the syntax tree is traversed
top-down and every encountered name is resolved. This works for most kinds of
names, because at the point of use of a name it is already introduced in the [`Rib`]
top-down and every encountered name is resolved.
This works for most kinds of names,
because at the point of use of a name it is already introduced in the [`Rib`]
hierarchy.
There are some exceptions to this. Items are bit tricky, because they can be
There are some exceptions to this.
Items are bit tricky, because they can be
used even before encountered therefore every block needs to be first scanned
for items to fill in its [`Rib`].
@@ -156,14 +176,15 @@ Therefore, the resolution is performed in multiple stages.
## Speculative crate loading
To give useful errors, rustc suggests importing paths into scope if they're
not found. How does it do this? It looks through every module of every crate
and looks for possible matches. This even includes crates that haven't yet
been loaded!
not found.
How does it do this?
It looks through every module of every crate and looks for possible matches.
This even includes crates that haven't yet been loaded!
Eagerly loading crates to include import suggestions that haven't yet been
loaded is called _speculative crate loading_, because any errors it encounters
shouldn't be reported: [`rustc_resolve`] decided to load them, not the user. The function
that does this is [`lookup_import_candidates`] and lives in
shouldn't be reported: [`rustc_resolve`] decided to load them, not the user.
The function that does this is [`lookup_import_candidates`] and lives in
[`rustc_resolve::diagnostics`].
[`rustc_resolve`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_resolve/index.html
@@ -176,8 +197,9 @@ the load is speculative.
## TODO: [#16](https://github.com/rust-lang/rustc-dev-guide/issues/16)
This is a result of the first pass of learning the code. It is definitely
incomplete and not detailed enough. It also might be inaccurate in places.
This is a result of the first pass of learning the code.
It is definitely incomplete and not detailed enough.
It also might be inaccurate in places.
Still, it probably provides useful first guidepost to what happens in there.
* What exactly does it link to and how is that published and consumed by
@@ -6,13 +6,13 @@
This notification group deals with linker related issues and their integration
within the compiler.
The group also has an associated Zulip stream ([`#t-compiler/linker`])
The group also has an associated Zulip stream ([`#t-compiler/gpgpu-backend`])
where people can go to ask questions and discuss GPU-related topics and issues.
if you're interested in participating, feel free to sign up for this group! To
do so, open a PR against the [rust-lang/team] repository and add your GitHub
user to [this file][gpu-target-team].
[`#t-compiler/linker`]: https://rust-lang.zulipchat.com/#narrow/channel/585172-t-compiler.2Flinker
[`#t-compiler/gpgpu-backend`]: https://rust-lang.zulipchat.com/#narrow/channel/422870-t-compiler.2Fgpgpu-backend
[rust-lang/team]: https://github.com/rust-lang/team
[gpu-target-team]: https://github.com/rust-lang/team/blob/main/teams/gpu-target.toml
@@ -6,14 +6,14 @@
The `rustc_private` feature allows external crates to use compiler internals.
### Using `rustc-private` with Official Toolchains
### Using `rustc_private` with official toolchains
When using the `rustc_private` feature with official Rust toolchains distributed via rustup, you need to install two additional components:
1. **`rustc-dev`**: Provides compiler libraries
2. **`llvm-tools`**: Provides LLVM libraries required for linking
#### Installation Steps
#### Installation steps
Install both components using rustup:
@@ -21,7 +21,7 @@ Install both components using rustup:
rustup component add rustc-dev llvm-tools
```
#### Common Error
#### Common error
Without the `llvm-tools` component, you'll encounter linking errors like:
@@ -40,7 +40,7 @@ For custom-built toolchains or environments not using rustup, additional configu
- LLVM libraries must be available in your system's library search paths
- The LLVM version must match the one used to build your Rust toolchain
#### Troubleshooting Steps
#### Troubleshooting steps
1. Verify LLVM is installed and accessible
2. Ensure that library paths are set:
@@ -53,9 +53,10 @@ For custom-built toolchains or environments not using rustup, additional configu
When developing out-of-tree projects that use `rustc_private` crates, you can configure `rust-analyzer` to recognize these crates.
#### Configuration Steps
#### Configuration steps
1. Configure `rust-analyzer.rustc.source` to `"discover"` in your editor settings.
1. Configure `rust-analyzer.rustc.source` to `"discover"` in your editor settings.
For VS Code, add to `rust_analyzer_settings.json`:
```json
{
@@ -69,9 +70,39 @@ When developing out-of-tree projects that use `rustc_private` crates, you can co
rustc_private = true
```
This configuration allows `rust-analyzer` to properly recognize and provide IDE support for `rustc_private` crates in out-of-tree projects.
This configuration allows `rust-analyzer` to properly recognize and provide IDE support for `rustc_private` crates in out-of-tree projects.
### Additional Resources
### Getting nightly documentation for `rustc_private`
#### Latest nightly
For the latest nightly, you can install the `rustc-docs` component and open it directly in your browser:
```sh
rustup component add rustc-docs
rustup doc --rustc-docs
```
> Note: The `rustc-docs` component is only available for recent nightly toolchains and may not be present for every nightly date. It was first introduced in [PR #75560](https://github.com/rust-lang/rust/pull/75560) (August 2020).
#### Older nightlies
If you depend on compiler internals from an older nightly, you may want to refer to the internal documentation from that particular nightly.
The only way to do this is to generate the documentation locally.
For example, to get documentation for `nightly-2025-11-08`:
Get the Git commit hash for that nightly:
```sh
rustup toolchain install nightly-2025-11-08
rustc +nightly-2025-11-08 --version --verbose
```
The output will include a `commit-hash` line identifying the exact source revision.
Check out `rust-lang/rust` at that commit, then follow the steps in [compiler documentation](../building/compiler-documenting.md).
### Additional resources
- [GitHub Issue #137421] explains that `rustc_private` linker failures often occur because `llvm-tools` is not installed
+63 -40
View File
@@ -3,45 +3,51 @@
This section is about the stability attributes and schemes that allow stable
APIs to use unstable APIs internally in the rustc standard library.
**NOTE**: this section is for *library* features, not *language* features. For instructions on
stabilizing a language feature see [Stabilizing Features](./stabilization-guide.md).
**NOTE**: this section is for *library* features, not *language* features.
For instructions on stabilizing a language feature,
see [Stabilizing Features](./stabilization-guide.md).
## unstable
The `#[unstable(feature = "foo", issue = "1234", reason = "lorem ipsum")]`
attribute explicitly marks an item as unstable. Items that are marked as
attribute explicitly marks an item as unstable.
Items that are marked as
"unstable" cannot be used without a corresponding `#![feature]` attribute on
the crate, even on a nightly compiler. This restriction only applies across
crate boundaries, unstable items may be used within the crate that defines
them.
the crate, even on a nightly compiler.
This restriction only applies across
crate boundaries, unstable items may be used within the crate that defines them.
The `issue` field specifies the associated GitHub [issue number]. This field is
required and all unstable features should have an associated tracking issue. In
rare cases where there is no sensible value `issue = "none"` is used.
The `issue` field specifies the associated GitHub [issue number].
This field is required,
and all unstable features should have an associated tracking issue.
In rare cases where there is no sensible value, `issue = "none"` is used.
The `unstable` attribute infects all sub-items, where the attribute doesn't
have to be reapplied. So if you apply this to a module, all items in the module
will be unstable.
have to be reapplied.
So, if you apply this to a module, all items in the module will be unstable.
If you rename a feature, you can add `old_name = "old_name"` to produce a
If you rename a feature, you can add `old_name = "old_name"` to produce a
useful error message.
You can make specific sub-items stable by using the `#[stable]` attribute on
them. The stability scheme works similarly to how `pub` works. You can have
public functions of nonpublic modules and you can have stable functions in
unstable modules or vice versa.
them.
The stability scheme works similarly to how `pub` works.
You can have public functions of non-public modules,
and you can have stable functions in unstable modules or vice versa.
Previously, due to a [rustc bug], stable items inside unstable modules were
available to stable code in that location.
As of <!-- date-check --> September 2024, items with [accidentally stabilized
paths] are marked with the `#[rustc_allowed_through_unstable_modules]` attribute
to prevent code dependent on those paths from breaking. Do *not* add this attribute
to any more items unless that is needed to avoid breaking changes.
to prevent code dependent on those paths from breaking.
Do *not* add this attribute to any more items,
unless that is needed to avoid breaking changes.
The `unstable` attribute may also have the `soft` value, which makes it a
future-incompatible deny-by-default lint instead of a hard error. This is used
by the `bench` attribute which was accidentally accepted in the past. This
prevents breaking dependencies by leveraging Cargo's lint capping.
future-incompatible deny-by-default lint instead of a hard error.
This is used
by the `bench` attribute which was accidentally accepted in the past.
This prevents breaking dependencies by leveraging Cargo's lint capping.
[issue number]: https://github.com/rust-lang/rust/issues
[rustc bug]: https://github.com/rust-lang/rust/issues/15702
@@ -49,22 +55,26 @@ prevents breaking dependencies by leveraging Cargo's lint capping.
## stable
The `#[stable(feature = "foo", since = "1.420.69")]` attribute explicitly
marks an item as stabilized. Note that stable functions may use unstable things in their body.
marks an item as stabilized.
Note that stable functions may use unstable things in their body.
## rustc_const_unstable
The `#[rustc_const_unstable(feature = "foo", issue = "1234", reason = "lorem
ipsum")]` has the same interface as the `unstable` attribute. It is used to mark
`const fn` as having their constness be unstable. This is only needed in rare cases:
ipsum")]` has the same interface as the `unstable` attribute.
It is used to mark `const fn` as having their constness be unstable.
This is only needed in rare cases:
- If a `const fn` makes use of unstable language features or intrinsics.
(The compiler will tell you to add the attribute if you run into this.)
- If a `const fn` is `#[stable]` but not yet intended to be const-stable.
- To change the feature gate that is required to call a const-unstable intrinsic.
Const-stability differs from regular stability in that it is *recursive*: a
`#[rustc_const_unstable(...)]` function cannot even be indirectly called from stable code. This is
`#[rustc_const_unstable(...)]` function cannot even be indirectly called from stable code.
This is
to avoid accidentally leaking unstable compiler implementation artifacts to stable code or locking
us into the accidental quirks of an incomplete implementation. See the rustc_const_stable_indirect
us into the accidental quirks of an incomplete implementation.
See the rustc_const_stable_indirect
and rustc_allow_const_fn_unstable attributes below for how to fine-tune this check.
## rustc_const_stable
@@ -75,7 +85,8 @@ a `const fn` as having its constness be `stable`.
## rustc_const_stable_indirect
The `#[rustc_const_stable_indirect]` attribute can be added to a `#[rustc_const_unstable(...)]`
function to make it callable from `#[rustc_const_stable(...)]` functions. This indicates that the
function to make it callable from `#[rustc_const_stable(...)]` functions.
This indicates that the
function is ready for stable in terms of its implementation (i.e., it doesn't use any unstable
compiler features); the only reason it is not const-stable yet are API concerns.
@@ -105,7 +116,8 @@ To stabilize a feature, follow these steps:
1. Ask a **@T-libs-api** member to start an FCP on the tracking issue and wait for
the FCP to complete (with `disposition-merge`).
2. Change `#[unstable(...)]` to `#[stable(since = "CURRENT_RUSTC_VERSION")]`.
3. Remove `#![feature(...)]` from any test or doc-test for this API. If the feature is used in the
3. Remove `#![feature(...)]` from any test or doc-test for this API.
If the feature is used in the
compiler or tools, remove it from there as well.
4. If this is a `const fn`, add `#[rustc_const_stable(since = "CURRENT_RUSTC_VERSION")]`.
Alternatively, if this is not supposed to be const-stabilized yet,
@@ -121,14 +133,15 @@ and the associated
## allow_internal_unstable
Macros and compiler desugarings expose their bodies to the call
site. To work around not being able to use unstable things in the standard
Macros and compiler desugarings expose their bodies to the call site.
To work around not being able to use unstable things in the standard
library's macros, there's the `#[allow_internal_unstable(feature1, feature2)]`
attribute that allows the given features to be used in stable macros.
Note that if a macro is used in const context and generates a call to a
`#[rustc_const_unstable(...)]` function, that will *still* be rejected even with
`allow_internal_unstable`. Add `#[rustc_const_stable_indirect]` to the function to ensure the macro
`allow_internal_unstable`.
Add `#[rustc_const_stable_indirect]` to the function to ensure the macro
cannot accidentally bypass the recursive const stability checks.
## rustc_allow_const_fn_unstable
@@ -138,14 +151,16 @@ indirectly.
However, sometimes we do know that a feature will get stabilized, just not when, or there is a
stable (but e.g. runtime-slow) workaround, so we could always fall back to some stable version if we
scrapped the unstable feature. In those cases, the `[rustc_allow_const_fn_unstable(feature1,
scrapped the unstable feature.
In those cases, the `[rustc_allow_const_fn_unstable(feature1,
feature2)]` attribute can be used to allow some unstable features in the body of a stable (or
indirectly stable) `const fn`.
You also need to take care to uphold the `const fn` invariant that calling it at runtime and
compile-time needs to behave the same (see also [this blog post][blog]). This means that you
compile-time needs to behave the same (see also [this blog post][blog]).
This means that you
may not create a `const fn` that e.g. transmutes a memory address to an integer,
because the addresses of things are nondeterministic and often unknown at
because the addresses of things are non-deterministic and often unknown at
compile-time.
**Always ping @rust-lang/wg-const-eval if you are adding more
@@ -159,7 +174,8 @@ Any crate that uses the `stable` or `unstable` attributes must include the
## deprecated
Deprecations in the standard library are nearly identical to deprecations in
user code. When `#[deprecated]` is used on an item, it must also have a `stable`
user code.
When `#[deprecated]` is used on an item, it must also have a `stable`
or `unstable `attribute.
`deprecated` has the following form:
@@ -172,20 +188,26 @@ or `unstable `attribute.
)]
```
The `suggestion` field is optional. If given, it should be a string that can be
used as a machine-applicable suggestion to correct the warning. This is
The `suggestion` field is optional.
If given, it should be a string that can be
used as a machine-applicable suggestion to correct the warning.
This is
typically used when the identifier is renamed, but no other significant changes
are necessary. When the `suggestion` field is used, you need to have
are necessary.
When the `suggestion` field is used, you need to have
`#![feature(deprecated_suggestion)]` at the crate root.
Another difference from user code is that the `since` field is actually checked
against the current version of `rustc`. If `since` is in a future version, then
against the current version of `rustc`.
If `since` is in a future version, then
the `deprecated_in_future` lint is triggered which is default `allow`, but most
of the standard library raises it to a warning with
`#![warn(deprecated_in_future)]`.
## unstable_feature_bound
The `#[unstable_feature_bound(foo)]` attribute can be used together with `#[unstable]` attribute to mark an `impl` of stable type and stable trait as unstable. In std/core, an item annotated with `#[unstable_feature_bound(foo)]` can only be used by another item that is also annotated with `#[unstable_feature_bound(foo)]`. Outside of std/core, using an item with `#[unstable_feature_bound(foo)]` requires the feature to be enabled with `#![feature(foo)]` attribute on the crate.
The `#[unstable_feature_bound(foo)]` attribute can be used together with `#[unstable]` attribute to mark an `impl` of stable type and stable trait as unstable.
In std/core, an item annotated with `#[unstable_feature_bound(foo)]` can only be used by another item that is also annotated with `#[unstable_feature_bound(foo)]`.
Outside of std/core, using an item with `#[unstable_feature_bound(foo)]` requires the feature to be enabled with `#![feature(foo)]` attribute on the crate.
Currently, the items that can be annotated with `#[unstable_feature_bound]` are:
- `impl`
@@ -193,7 +215,8 @@ Currently, the items that can be annotated with `#[unstable_feature_bound]` are:
- trait
## renamed and removed features
Unstable features can get renamed and removed. If you rename a feature, you can add `old_name = "old_name"` to the `#[unstable]` attribute.
Unstable features can get renamed and removed.
If you rename a feature, you can add `old_name = "old_name"` to the `#[unstable]` attribute.
If you remove a feature, the `#!unstable_removed(feature = "foo", reason = "brief description", link = "link", since = "1.90.0")`
attribute should be used to produce a good error message for users of the removed feature.
+17
View File
@@ -454,6 +454,23 @@ More information is available in the [toolstate documentation].
[rust-toolstate]: https://rust-lang-nursery.github.io/rust-toolstate
[toolstate documentation]: https://forge.rust-lang.org/infra/toolstate.html
## Public CI dashboard
To monitor the Rust CI, you can have a look at the [public dashboard] maintained by the infra team.
These are some useful panels from the dashboard:
- Pipeline duration: check how long the auto builds take to run.
- Top slowest jobs: check which jobs are taking the longest to run.
- Change in median job duration: check what jobs are slowest than before.
This is useful for detecting regressions.
- Top failed jobs: check which jobs are failing the most.
To learn more about the dashboard, see the [Datadog CI docs].
[Datadog CI docs]: https://docs.datadoghq.com/continuous_integration/
[public dashboard]: https://p.datadoghq.com/sb/3a172e20-e9e1-11ed-80e3-da7ad0900002-b5f7bb7e08b664a06b08527da85f7e30
## Determining the CI configuration
If you want to determine which `bootstrap.toml` settings are used in CI for a
@@ -458,7 +458,8 @@ as they must be compilable by a stage 0 rustc that may be a beta or even stable
By default, run-make tests print each subprocess command and its stdout/stderr.
When running with `--no-capture` on `panic=abort` test suites (such as `cg_clif`),
this can flood the terminal. Omit `--verbose-run-make-subprocess-output` to
this can flood the terminal.
Omit `--verbose-run-make-subprocess-output` to
suppress this output for passing tests — failing tests always print regardless:
```bash
@@ -0,0 +1,53 @@
# `diagnostic_on_unmatch_args`
The tracking issue for this feature is: [#155642]
[#155642]: https://github.com/rust-lang/rust/issues/155642
------------------------
The `diagnostic_on_unmatch_args` feature adds the
`#[diagnostic::on_unmatch_args(...)]` attribute for declarative macros.
It lets a macro definition customize diagnostics for matcher failures after all arms have been
tried, such as incomplete invocations or trailing extra arguments.
This attribute currently applies to declarative macros such as `macro_rules!` and `pub macro`.
It is currently used for errors emitted by declarative macro matching itself; fragment parser
errors still use their existing diagnostics.
```rust,compile_fail
#![feature(diagnostic_on_unmatch_args)]
#[diagnostic::on_unmatch_args(
message = "invalid arguments to {This} macro invocation",
label = "expected a type and value here",
note = "this macro expects a type and a value, like `pair!(u8, 0)`",
note = "see <link/to/docs>",
)]
macro_rules! pair {
($ty:ty, $value:expr) => {};
}
pair!(u8);
```
This emits output like:
```text
error: invalid arguments to pair macro invocation
--> example.rs:13:9
|
9 | macro_rules! pair {
| ----------------- when calling this macro
...
13 | pair!(u8);
| ^ expected a type and value here
|
note: while trying to match `,`
--> example.rs:10:12
|
10 | ($ty:ty, $value:expr) => {};
| ^
= note: this macro expects a type and a value, like `pair!(u8, 0)`
= note: see <link/to/docs>
```
+27 -25
View File
@@ -153,7 +153,7 @@ pub(crate) fn write_not_crate_specific(
include_sources: bool,
) -> Result<(), Error> {
write_rendered_cross_crate_info(crates, dst, opt, include_sources, resource_suffix)?;
write_static_files(dst, opt, style_files, css_file_extension, resource_suffix)?;
write_resources(dst, opt, style_files, css_file_extension, resource_suffix)?;
Ok(())
}
@@ -183,43 +183,45 @@ fn write_rendered_cross_crate_info(
/// Writes the static files, the style files, and the css extensions.
/// Have to be careful about these, because they write to the root out dir.
fn write_static_files(
fn write_resources(
dst: &Path,
opt: &RenderOptions,
style_files: &[StylePath],
css_file_extension: Option<&Path>,
resource_suffix: &str,
) -> Result<(), Error> {
let static_dir = dst.join("static.files");
try_err!(fs::create_dir_all(&static_dir), &static_dir);
if opt.emit.is_empty() || opt.emit.contains(&EmitType::HtmlNonStaticFiles) {
// Handle added third-party themes
for entry in style_files {
let theme = entry.basename()?;
let extension =
try_none!(try_none!(entry.path.extension(), &entry.path).to_str(), &entry.path);
// Handle added third-party themes
for entry in style_files {
let theme = entry.basename()?;
let extension =
try_none!(try_none!(entry.path.extension(), &entry.path).to_str(), &entry.path);
// Skip the official themes. They are written below as part of STATIC_FILES_LIST.
if matches!(theme.as_str(), "light" | "dark" | "ayu") {
continue;
}
// Skip the official themes. They are written below as part of STATIC_FILES_LIST.
if matches!(theme.as_str(), "light" | "dark" | "ayu") {
continue;
let bytes = try_err!(fs::read(&entry.path), &entry.path);
let filename = format!("{theme}{resource_suffix}.{extension}");
let dst_filename = dst.join(filename);
try_err!(fs::write(&dst_filename, bytes), &dst_filename);
}
let bytes = try_err!(fs::read(&entry.path), &entry.path);
let filename = format!("{theme}{resource_suffix}.{extension}");
let dst_filename = dst.join(filename);
try_err!(fs::write(&dst_filename, bytes), &dst_filename);
}
// When the user adds their own CSS files with --extend-css, we write that as an
// invocation-specific file (that is, with a resource suffix).
if let Some(css) = css_file_extension {
let buffer = try_err!(fs::read_to_string(css), css);
let path = static_files::suffix_path("theme.css", resource_suffix);
let dst_path = dst.join(path);
try_err!(fs::write(&dst_path, buffer), &dst_path);
// When the user adds their own CSS files with --extend-css, we write that as an
// invocation-specific file (that is, with a resource suffix).
if let Some(css) = css_file_extension {
let buffer = try_err!(fs::read_to_string(css), css);
let path = static_files::suffix_path("theme.css", resource_suffix);
let dst_path = dst.join(path);
try_err!(fs::write(&dst_path, buffer), &dst_path);
}
}
if opt.emit.is_empty() || opt.emit.contains(&EmitType::HtmlStaticFiles) {
let static_dir = dst.join("static.files");
try_err!(fs::create_dir_all(&static_dir), &static_dir);
static_files::for_each(|f: &static_files::StaticFile| {
let filename = static_dir.join(f.output_filename());
let contents: &[u8] =
+6 -1
View File
@@ -81,6 +81,7 @@
use tracing::info;
use crate::clean::utils::DOC_RUST_LANG_ORG_VERSION;
use crate::config::EmitType;
use crate::error::Error;
use crate::formats::cache::Cache;
@@ -868,7 +869,11 @@ fn main_args(early_dcx: &mut EarlyDiagCtxt, at_args: &[String]) {
};
rustc_interface::create_and_enter_global_ctxt(compiler, krate, |tcx| {
let has_dep_info = render_options.dep_info().is_some();
markdown::render_and_write(file, render_options, edition)?;
if render_options.emit.contains(&EmitType::HtmlNonStaticFiles)
|| render_options.emit.is_empty()
{
markdown::render_and_write(file, render_options, edition)?;
}
if has_dep_info {
// Register the loaded external files in the source map so they show up in depinfo.
// We can't load them via the source map because it gets created after we process the options.
+44 -9
View File
@@ -9,8 +9,6 @@
fn main() {
rfs::create_dir("doc");
// We're only emitting dep info, so we shouldn't be running static analysis to
// figure out that this program is erroneous.
// Ensure that all kinds of input reading flags end up in dep-info.
rustdoc()
.input("lib.rs")
@@ -18,7 +16,7 @@ fn main() {
.arg("--html-before-content=before.html")
.arg("--markdown-after-content=after.md")
.arg("--extend-css=extend.css")
.arg("--theme=theme.css")
.arg("--theme=custom_theme.css")
.arg("--index-page=index-page.md")
.emit("dep-info")
.run();
@@ -31,8 +29,31 @@ fn main() {
assert_contains(&content, "after.md:");
assert_contains(&content, "before.html:");
assert_contains(&content, "extend.css:");
assert_contains(&content, "theme.css:");
assert_contains(&content, "custom_theme.css:");
assert_contains(&content, "index-page.md:");
// Only emit dep-info. Don't emit the actual page.
assert!(!path("doc/foo/index.html").exists());
assert!(!path("doc/custom_theme.css").exists());
// weird that --extend-css generates a file named theme.css
assert!(!path("doc/theme.css").exists());
// Now try emitting dep-info and html files at the same time.
rustdoc()
.input("lib.rs")
.arg("-Zunstable-options")
.arg("--html-before-content=before.html")
.arg("--markdown-after-content=after.md")
.arg("--extend-css=extend.css")
.arg("--theme=custom_theme.css")
.arg("--index-page=index-page.md")
.emit("dep-info,html-non-static-files,html-static-files")
.run();
assert!(path("doc/foo/index.html").exists());
// These files are copied into the doc output folder,
// which is why they show up in dep-info.
assert!(path("doc/custom_theme.css").exists());
// weird that --extend-css generates a file named theme.css
assert!(path("doc/theme.css").exists());
// Now we check that we can provide a file name to the `dep-info` argument.
rustdoc().input("lib.rs").arg("-Zunstable-options").emit("dep-info=bla.d").run();
@@ -72,7 +93,9 @@ fn main() {
assert_not_contains(&content, "after.md:");
assert_not_contains(&content, "before.html:");
assert_not_contains(&content, "extend.css:");
assert_not_contains(&content, "theme.css:");
assert_not_contains(&content, "custom_theme.css:");
// Only emit dep-info, not the actual html.
assert!(!path("doc/example.html").exists());
// combine --emit=dep-info=filename with plain markdown input
rustdoc()
@@ -81,10 +104,12 @@ fn main() {
.arg("--html-before-content=before.html")
.arg("--markdown-after-content=after.md")
.arg("--extend-css=extend.css")
.arg("--theme=theme.css")
.arg("--theme=custom_theme.css")
.arg("--markdown-css=markdown.css")
.arg("--index-page=index-page.md")
.emit("dep-info=example.d")
.emit("dep-info=example.d,html-non-static-files,html-static-files")
.run();
assert!(path("doc/example.html").exists());
let content = rfs::read_to_string("example.d");
assert_contains(&content, "example.md:");
assert_not_contains(&content, "lib.rs:");
@@ -93,7 +118,17 @@ fn main() {
assert_not_contains(&content, "doc.md:");
assert_contains(&content, "after.md:");
assert_contains(&content, "before.html:");
assert_contains(&content, "extend.css:");
assert_contains(&content, "theme.css:");
assert_contains(&content, "index-page.md:");
// This is a hotlink, not a file that gets copied,
// so it shouldn't add to the dep-info, it shouldn't be copied,
// and it shouldn't be resolved relative to the root path.
//
// It's weird that this is different from the other two css
// files, but it's stable, so I can't change it.
assert!(!path("doc/markdown.css").exists());
assert_not_contains(&content, "markdown.css:");
// These files aren't actually used, and the fact that they show up
// is arguably a bug, but test it anyway.
assert_contains(&content, "extend.css:");
assert_contains(&content, "custom_theme.css:");
}
@@ -0,0 +1,93 @@
//@ run-pass
//! Test that users are able to query function-level constness and asyncness.
//@ ignore-stage1
//@ ignore-cross-compile
//@ ignore-remote
//@ edition: 2021
#![feature(rustc_private)]
extern crate rustc_driver;
extern crate rustc_interface;
extern crate rustc_middle;
#[macro_use]
extern crate rustc_public;
use rustc_public::crate_def::CrateDef;
use rustc_public::ty::{Asyncness, Constness, FnDef};
use std::io::Write;
use std::ops::ControlFlow;
const CRATE_NAME: &str = "input";
fn test_stable_mir() -> ControlFlow<()> {
let fns = rustc_public::local_crate().fn_defs();
check_fn(&fns, "input::const_sync", Constness::Const, Asyncness::NotAsync);
check_fn(&fns, "input::async_fn", Constness::NotConst, Asyncness::Async);
check_fn(&fns, "input::plain", Constness::NotConst, Asyncness::NotAsync);
check_fn(&fns, "input::Widget::assoc_const", Constness::Const, Asyncness::NotAsync);
check_fn(&fns, "input::Widget::assoc_async", Constness::NotConst, Asyncness::Async);
check_fn(&fns, "input::Widget::assoc_plain", Constness::NotConst, Asyncness::NotAsync);
ControlFlow::Continue(())
}
fn check_fn(fns: &[FnDef], name: &str, constness: Constness, asyncness: Asyncness) {
let fn_def =
fns.iter().find(|def| def.name() == name).unwrap_or_else(|| panic!("missing {name}"));
assert_eq!(fn_def.constness(), constness, "wrong constness for {}", fn_def.name());
assert_eq!(fn_def.asyncness(), asyncness, "wrong asyncness for {}", fn_def.name());
}
fn main() {
let path = "fn_attrs_input.rs";
generate_input(&path).unwrap();
let args = &[
"rustc".to_string(),
"--edition=2021".to_string(),
"--crate-type=lib".to_string(),
"--crate-name".to_string(),
CRATE_NAME.to_string(),
path.to_string(),
];
run!(args, test_stable_mir).unwrap();
}
fn generate_input(path: &str) -> std::io::Result<()> {
let mut file = std::fs::File::create(path)?;
write!(
file,
r#"
pub const fn const_sync() -> u32 {{
1
}}
pub async fn async_fn() -> u32 {{
2
}}
pub fn plain() -> u32 {{
3
}}
pub struct Widget;
impl Widget {{
pub const fn assoc_const() -> u32 {{
4
}}
pub async fn assoc_async() -> u32 {{
5
}}
pub fn assoc_plain() -> u32 {{
6
}}
}}
"#
)?;
Ok(())
}
@@ -0,0 +1,12 @@
#![feature(diagnostic_on_unmatch_args)]
#[macro_export]
#[diagnostic::on_unmatch_args(
message = "invalid arguments to {This} macro invocation",
label = "expected a type and value here",
note = "this macro expects a type and a value, like `pair!(u8, 0)`",
note = "see the macro documentation for accepted forms",
)]
macro_rules! pair {
($ty:ty, $value:expr) => {};
}
@@ -0,0 +1,12 @@
//@ aux-build:other.rs
extern crate other;
fn main() {
other::pair!(u8);
//~^ ERROR invalid arguments to pair macro invocation
//~| NOTE expected a type and value here
//~| NOTE while trying to match `,`
//~| NOTE this macro expects a type and a value, like `pair!(u8, 0)`
//~| NOTE see the macro documentation for accepted forms
}
@@ -0,0 +1,16 @@
error: invalid arguments to pair macro invocation
--> $DIR/error_is_shown_in_downstream_crates.rs:6:20
|
LL | other::pair!(u8);
| ^ expected a type and value here
|
note: while trying to match `,`
--> $DIR/auxiliary/other.rs:11:12
|
LL | ($ty:ty, $value:expr) => {};
| ^
= note: this macro expects a type and a value, like `pair!(u8, 0)`
= note: see the macro documentation for accepted forms
error: aborting due to 1 previous error
@@ -0,0 +1,21 @@
#![feature(diagnostic_on_unmatch_args)]
#[diagnostic::on_unmatch_args(
message = "invalid arguments to {This} macro invocation",
label = "expected a type and value here",
note = "this macro expects a type and a value, like `pair!(u8, 0)`",
note = "see the macro documentation for accepted forms",
)]
macro_rules! pair {
//~^ NOTE when calling this macro
($ty:ty, $value:expr) => {};
//~^ NOTE while trying to match `,`
}
fn main() {
pair!(u8);
//~^ ERROR invalid arguments to pair macro invocation
//~| NOTE expected a type and value here
//~| NOTE this macro expects a type and a value, like `pair!(u8, 0)`
//~| NOTE see the macro documentation for accepted forms
}
@@ -0,0 +1,19 @@
error: invalid arguments to pair macro invocation
--> $DIR/message_and_label.rs:16:13
|
LL | macro_rules! pair {
| ----------------- when calling this macro
...
LL | pair!(u8);
| ^ expected a type and value here
|
note: while trying to match `,`
--> $DIR/message_and_label.rs:11:12
|
LL | ($ty:ty, $value:expr) => {};
| ^
= note: this macro expects a type and a value, like `pair!(u8, 0)`
= note: see the macro documentation for accepted forms
error: aborting due to 1 previous error
@@ -0,0 +1,21 @@
#![feature(diagnostic_on_unmatch_args)]
#[diagnostic::on_unmatch_args(
message = "{This}! expects exactly two arguments",
label = "unexpected extra input starts here",
note = "this macro expects a type and a value, like `pair!(u8, 0)`",
note = "make sure to pass both arguments",
)]
macro_rules! pair {
//~^ NOTE when calling this macro
($ty:ty, $value:expr) => {};
//~^ NOTE while trying to match meta-variable `$value:expr`
}
fn main() {
pair!(u8, 0, 42);
//~^ ERROR pair! expects exactly two arguments
//~| NOTE unexpected extra input starts here
//~| NOTE this macro expects a type and a value, like `pair!(u8, 0)`
//~| NOTE make sure to pass both arguments
}
@@ -0,0 +1,19 @@
error: pair! expects exactly two arguments
--> $DIR/notes_on_extra_args.rs:16:16
|
LL | macro_rules! pair {
| ----------------- when calling this macro
...
LL | pair!(u8, 0, 42);
| ^ unexpected extra input starts here
|
note: while trying to match meta-variable `$value:expr`
--> $DIR/notes_on_extra_args.rs:11:14
|
LL | ($ty:ty, $value:expr) => {};
| ^^^^^^^^^^^
= note: this macro expects a type and a value, like `pair!(u8, 0)`
= note: make sure to pass both arguments
error: aborting due to 1 previous error
@@ -0,0 +1,19 @@
#![feature(diagnostic_on_unmatch_args)]
#[diagnostic::on_unmatch_args(
note = "this macro expects a type and a value, like `pair!(u8, 0)`",
note = "make sure to pass both arguments",
)]
macro_rules! pair {
//~^ NOTE when calling this macro
($ty:ty, $value:expr) => {};
//~^ NOTE while trying to match `,`
}
fn main() {
pair!(u8);
//~^ ERROR unexpected end of macro invocation
//~| NOTE missing tokens in macro arguments
//~| NOTE this macro expects a type and a value, like `pair!(u8, 0)`
//~| NOTE make sure to pass both arguments
}
@@ -0,0 +1,19 @@
error: unexpected end of macro invocation
--> $DIR/on_unmatch_args.rs:14:13
|
LL | macro_rules! pair {
| ----------------- when calling this macro
...
LL | pair!(u8);
| ^ missing tokens in macro arguments
|
note: while trying to match `,`
--> $DIR/on_unmatch_args.rs:9:12
|
LL | ($ty:ty, $value:expr) => {};
| ^
= note: this macro expects a type and a value, like `pair!(u8, 0)`
= note: make sure to pass both arguments
error: aborting due to 1 previous error
@@ -0,0 +1,14 @@
#![feature(diagnostic_on_unmatch_args)]
#[diagnostic::on_unmatch_args(
message = "invalid route method",
note = "this macro expects a action, like `{This}!(get \"/hello\")`"
)]
macro_rules! route {
(get $path:literal) => {};
}
fn main() {
route!(post "/");
//~^ ERROR invalid route method
}
@@ -0,0 +1,18 @@
error: invalid route method
--> $DIR/other_match_macro_error.rs:12:12
|
LL | macro_rules! route {
| ------------------ when calling this macro
...
LL | route!(post "/");
| ^^^^ no rules expected this token in macro call
|
note: while trying to match `get`
--> $DIR/other_match_macro_error.rs:8:6
|
LL | (get $path:literal) => {};
| ^^^
= note: this macro expects a action, like `route!(get "/hello")`
error: aborting due to 1 previous error
@@ -0,0 +1,14 @@
//@ check-pass
#![feature(diagnostic_on_unmatch_args)]
#[diagnostic::on_unmatch_args(
message = "{T}! is missing arguments",
//~^ WARN unknown parameter `T`
)]
macro_rules! pair {
($ty:ty, $value:expr) => {};
}
fn main() {
pair!(u8, 0);
}
@@ -0,0 +1,11 @@
warning: unknown parameter `T`
--> $DIR/report_warning_on_invalid_formats.rs:5:17
|
LL | message = "{T}! is missing arguments",
| ^
|
= help: use `{This}` to refer to the macro name
= note: `#[warn(malformed_diagnostic_format_literals)]` (part of `#[warn(unknown_or_malformed_diagnostic_attributes)]`) on by default
warning: 1 warning emitted
@@ -0,0 +1,12 @@
//@ check-pass
#![feature(diagnostic_on_unmatch_args)]
#[diagnostic::on_unmatch_args = "foo"]
//~^ WARN malformed `diagnostic::on_unmatch_args` attribute [malformed_diagnostic_attributes]
macro_rules! pair {
($ty:ty, $value:expr) => {};
}
fn main() {
pair!(u8, 0);
}
@@ -0,0 +1,11 @@
warning: malformed `diagnostic::on_unmatch_args` attribute
--> $DIR/report_warning_on_invalid_meta_item_syntax.rs:4:1
|
LL | #[diagnostic::on_unmatch_args = "foo"]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ invalid option found here
|
= help: only `message`, `note` and `label` are allowed as options
= note: `#[warn(malformed_diagnostic_attributes)]` (part of `#[warn(unknown_or_malformed_diagnostic_attributes)]`) on by default
warning: 1 warning emitted
@@ -0,0 +1,12 @@
//@ check-pass
#![feature(diagnostic_on_unmatch_args)]
#[diagnostic::on_unmatch_args]
//~^ WARN missing options for `diagnostic::on_unmatch_args` attribute [malformed_diagnostic_attributes]
macro_rules! pair {
($ty:ty, $value:expr) => {};
}
fn main() {
pair!(u8, 0);
}
@@ -0,0 +1,11 @@
warning: missing options for `diagnostic::on_unmatch_args` attribute
--> $DIR/report_warning_on_missing_options.rs:4:1
|
LL | #[diagnostic::on_unmatch_args]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: at least one of the `message`, `note` and `label` options are expected
= note: `#[warn(malformed_diagnostic_attributes)]` (part of `#[warn(unknown_or_malformed_diagnostic_attributes)]`) on by default
warning: 1 warning emitted
@@ -0,0 +1,10 @@
//@ check-pass
#![feature(diagnostic_on_unmatch_args)]
#[diagnostic::on_unmatch_args(message = "not allowed here")]
//~^ WARN `#[diagnostic::on_unmatch_args]` can only be applied to macro definitions
struct Foo;
fn main() {
let _ = Foo;
}
@@ -0,0 +1,10 @@
warning: `#[diagnostic::on_unmatch_args]` can only be applied to macro definitions
--> $DIR/report_warning_on_non_macro.rs:4:1
|
LL | #[diagnostic::on_unmatch_args(message = "not allowed here")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: `#[warn(misplaced_diagnostic_attributes)]` (part of `#[warn(unknown_or_malformed_diagnostic_attributes)]`) on by default
warning: 1 warning emitted
@@ -0,0 +1,12 @@
//@ check-pass
#![feature(diagnostic_on_unmatch_args)]
#[diagnostic::on_unmatch_args(unsupported = "foo")]
//~^ WARN malformed `diagnostic::on_unmatch_args` attribute [malformed_diagnostic_attributes]
macro_rules! pair {
($ty:ty, $value:expr) => {};
}
fn main() {
pair!(u8, 0);
}
@@ -0,0 +1,11 @@
warning: malformed `diagnostic::on_unmatch_args` attribute
--> $DIR/report_warning_on_unknown_options.rs:4:31
|
LL | #[diagnostic::on_unmatch_args(unsupported = "foo")]
| ^^^^^^^^^^^^^^^^^^^ invalid option found here
|
= help: only `message`, `note` and `label` are allowed as options
= note: `#[warn(malformed_diagnostic_attributes)]` (part of `#[warn(unknown_or_malformed_diagnostic_attributes)]`) on by default
warning: 1 warning emitted
@@ -0,0 +1,15 @@
//! This is an unusual feature gate test, as it doesn't test the feature
//! gate, but the fact that not adding the feature gate will cause the
//! diagnostic to not emit the custom diagnostic message.
#[diagnostic::on_unmatch_args(note = "custom note")]
macro_rules! pair {
//~^ NOTE when calling this macro
($ty:ty, $value:expr) => {};
//~^ NOTE while trying to match `,`
}
fn main() {
pair!(u8);
//~^ ERROR unexpected end of macro invocation
//~| NOTE missing tokens in macro arguments
}
@@ -0,0 +1,17 @@
error: unexpected end of macro invocation
--> $DIR/feature-gate-diagnostic-on-unmatch-args.rs:12:13
|
LL | macro_rules! pair {
| ----------------- when calling this macro
...
LL | pair!(u8);
| ^ missing tokens in macro arguments
|
note: while trying to match `,`
--> $DIR/feature-gate-diagnostic-on-unmatch-args.rs:7:12
|
LL | ($ty:ty, $value:expr) => {};
| ^
error: aborting due to 1 previous error
@@ -6,6 +6,7 @@ LL | let _: field_of!(Struct);
|
note: while trying to match `,`
--> $SRC_DIR/core/src/field.rs:LL:COL
= note: this macro expects a container type and a field path, like `field_of!(Type, field)` or `field_of!(Enum, Variant.field)`
error: unexpected end of macro invocation
--> $DIR/invalid.rs:24:29
@@ -15,6 +16,7 @@ LL | let _: field_of!(Struct,);
|
note: while trying to match meta-variable `$fields:expr`
--> $SRC_DIR/core/src/field.rs:LL:COL
= note: this macro expects a container type and a field path, like `field_of!(Type, field)` or `field_of!(Enum, Variant.field)`
error: no rules expected `extra`
--> $DIR/invalid.rs:25:37
@@ -23,6 +25,7 @@ LL | let _: field_of!(Struct, field, extra);
| ^^^^^ no rules expected this token in macro call
|
= note: while trying to match sequence end
= note: this macro expects a container type and a field path, like `field_of!(Type, field)` or `field_of!(Enum, Variant.field)`
error: offset_of expects dot-separated field and variant names
--> $DIR/invalid.rs:27:28
@@ -6,6 +6,7 @@ LL | let _: field_of!(Struct);
|
note: while trying to match `,`
--> $SRC_DIR/core/src/field.rs:LL:COL
= note: this macro expects a container type and a field path, like `field_of!(Type, field)` or `field_of!(Enum, Variant.field)`
error: unexpected end of macro invocation
--> $DIR/invalid.rs:24:29
@@ -15,6 +16,7 @@ LL | let _: field_of!(Struct,);
|
note: while trying to match meta-variable `$fields:expr`
--> $SRC_DIR/core/src/field.rs:LL:COL
= note: this macro expects a container type and a field path, like `field_of!(Type, field)` or `field_of!(Enum, Variant.field)`
error: no rules expected `extra`
--> $DIR/invalid.rs:25:37
@@ -23,6 +25,7 @@ LL | let _: field_of!(Struct, field, extra);
| ^^^^^ no rules expected this token in macro call
|
= note: while trying to match sequence end
= note: this macro expects a container type and a field path, like `field_of!(Type, field)` or `field_of!(Enum, Variant.field)`
error: offset_of expects dot-separated field and variant names
--> $DIR/invalid.rs:27:28
@@ -6,6 +6,7 @@ LL | offset_of!(NotEnoughArguments);
|
note: while trying to match `,`
--> $SRC_DIR/core/src/mem/mod.rs:LL:COL
= note: this macro expects a container type and a (nested) field path, like `offset_of!(Type, field)`
error: unexpected end of macro invocation
--> $DIR/offset-of-arg-count.rs:5:45
@@ -15,6 +16,7 @@ LL | offset_of!(NotEnoughArgumentsWithAComma, );
|
note: while trying to match meta-variable `$fields:expr`
--> $SRC_DIR/core/src/mem/mod.rs:LL:COL
= note: this macro expects a container type and a (nested) field path, like `offset_of!(Type, field)`
error: no rules expected `too`
--> $DIR/offset-of-arg-count.rs:6:34
@@ -23,6 +25,7 @@ LL | offset_of!(Container, field, too many arguments);
| ^^^ no rules expected this token in macro call
|
= note: while trying to match sequence end
= note: this macro expects a container type and a (nested) field path, like `offset_of!(Type, field)`
error: unexpected token: `)`
--> $DIR/offset-of-arg-count.rs:9:21
@@ -72,6 +72,7 @@ LL | offset_of!((u8, u8), +1);
|
note: while trying to match meta-variable `$fields:expr`
--> $SRC_DIR/core/src/mem/mod.rs:LL:COL
= note: this macro expects a container type and a (nested) field path, like `offset_of!(Type, field)`
error: offset_of expects dot-separated field and variant names
--> $DIR/offset-of-tuple.rs:7:26
@@ -15,6 +15,8 @@ fn test_where_left_is_not_let() {
//~| ERROR binary assignment operation `+=` cannot be used in a let chain
//~| NOTE cannot use `+=` in a let chain
//~| HELP you might have meant to compare with `==` instead of assigning with `+=`
//~| ERROR mismatched types
//~| NOTE expected `bool`, found `()`
}
fn test_where_left_is_let() {
@@ -28,6 +30,8 @@ fn test_where_left_is_let() {
//~| ERROR binary assignment operation `+=` cannot be used in a let chain
//~| NOTE cannot use `+=` in a let chain
//~| HELP you might have meant to compare with `==` instead of assigning with `+=`
//~| ERROR mismatched types
//~| NOTE expected `bool`, found `()`
}
fn main() {
@@ -15,6 +15,8 @@ fn test_where_left_is_not_let() {
//~| ERROR binary assignment operation `+=` cannot be used in a let chain
//~| NOTE cannot use `+=` in a let chain
//~| HELP you might have meant to compare with `==` instead of assigning with `+=`
//~| ERROR mismatched types
//~| NOTE expected `bool`, found `()`
}
fn test_where_left_is_let() {
@@ -28,6 +30,8 @@ fn test_where_left_is_let() {
//~| ERROR binary assignment operation `+=` cannot be used in a let chain
//~| NOTE cannot use `+=` in a let chain
//~| HELP you might have meant to compare with `==` instead of assigning with `+=`
//~| ERROR mismatched types
//~| NOTE expected `bool`, found `()`
}
fn main() {
@@ -7,7 +7,7 @@ LL | if let _ = 1 && true && y += 2 {};
= note: only supported directly in conditions of `if` and `while` expressions
error: expected expression, found `let` statement
--> $DIR/let-chains-assign-add-incorrect.rs:22:8
--> $DIR/let-chains-assign-add-incorrect.rs:24:8
|
LL | if let _ = 1 && y += 2 {};
| ^^^^^^^^^
@@ -37,13 +37,19 @@ LL + if let _ = 1 && true && y == 2 {};
|
error[E0308]: mismatched types
--> $DIR/let-chains-assign-add-incorrect.rs:22:21
--> $DIR/let-chains-assign-add-incorrect.rs:8:8
|
LL | if let _ = 1 && true && y += 2 {};
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `bool`, found `()`
error[E0308]: mismatched types
--> $DIR/let-chains-assign-add-incorrect.rs:24:21
|
LL | if let _ = 1 && y += 2 {};
| ^ expected `bool`, found integer
error: binary assignment operation `+=` cannot be used in a let chain
--> $DIR/let-chains-assign-add-incorrect.rs:22:23
--> $DIR/let-chains-assign-add-incorrect.rs:24:23
|
LL | if let _ = 1 && y += 2 {};
| -------------- ^^ cannot use `+=` in a let chain
@@ -56,6 +62,12 @@ LL - if let _ = 1 && y += 2 {};
LL + if let _ = 1 && y == 2 {};
|
error: aborting due to 6 previous errors
error[E0308]: mismatched types
--> $DIR/let-chains-assign-add-incorrect.rs:24:8
|
LL | if let _ = 1 && y += 2 {};
| ^^^^^^^^^^^^^^^^^^^ expected `bool`, found `()`
error: aborting due to 8 previous errors
For more information about this error, try `rustc --explain E0308`.