Auto merge of #155239 - JonathanBrouwer:rollup-XUNKT4X, r=JonathanBrouwer

Rollup of 10 pull requests

Successful merges:

 - rust-lang/rust#155227 (`rust-analyzer` subtree update)
 - rust-lang/rust#153335 (Add #![unstable_removed(..)] attribute to track removed features)
 - rust-lang/rust#154932 (Handle RTN projections in assoc type restriction diagnostics)
 - rust-lang/rust#155096 (delegation: support proper interaction of user-specified args and impl Traits)
 - rust-lang/rust#155106 (cg_llvm: scalable vectors with `simd_cast` and `simd_select`)
 - rust-lang/rust#155140 (add regression test for OpenOptionsExt downstream compat)
 - rust-lang/rust#155182 (Make the expansion of guard metavars begin guard non-terminals)
 - rust-lang/rust#155226 (delegation: revert execution of hir_crate_items before delayed lowering)
 - rust-lang/rust#153997 (Use closures more consistently in `dep_graph.rs`.)
 - rust-lang/rust#155003 (update thin-vec)
This commit is contained in:
bors
2026-04-13 15:18:43 +00:00
199 changed files with 8885 additions and 3894 deletions
+2 -2
View File
@@ -5540,9 +5540,9 @@ dependencies = [
[[package]]
name = "thin-vec"
version = "0.2.14"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d"
checksum = "da322882471314edc77fa5232c587bcb87c9df52bfd0d7d4826f8868ead61899"
[[package]]
name = "thiserror"
+20
View File
@@ -47,6 +47,8 @@
#[cfg(feature = "nightly")]
use rustc_data_structures::stable_hasher::StableOrd;
#[cfg(feature = "nightly")]
use rustc_error_messages::{DiagArgValue, IntoDiagArg};
#[cfg(feature = "nightly")]
use rustc_errors::{Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, msg};
use rustc_hashes::Hash64;
use rustc_index::{Idx, IndexSlice, IndexVec};
@@ -1775,6 +1777,24 @@ pub fn from_field_count(count: usize) -> Option<Self> {
}
}
#[cfg(feature = "nightly")]
impl IntoDiagArg for NumScalableVectors {
fn into_diag_arg(self, _: &mut Option<std::path::PathBuf>) -> DiagArgValue {
DiagArgValue::Str(std::borrow::Cow::Borrowed(match self.0 {
0 => panic!("`NumScalableVectors(0)` is illformed"),
1 => "one",
2 => "two",
3 => "three",
4 => "four",
5 => "five",
6 => "six",
7 => "seven",
8 => "eight",
_ => panic!("`NumScalableVectors(N)` for N>8 is illformed"),
}))
}
}
/// The way we represent values to the backend
///
/// Previously this was conflated with the "ABI" a type is given, as in the platform-specific ABI.
+1 -1
View File
@@ -15,6 +15,6 @@ rustc_macros = { path = "../rustc_macros" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
tracing = "0.1"
# tidy-alphabetical-end
+1 -1
View File
@@ -23,6 +23,6 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
tracing = "0.1"
# tidy-alphabetical-end
@@ -433,7 +433,6 @@ fn lower_delegation_body(
// also nested delegations may need to access information about this code (#154332),
// so it is better to leave this code as opposed to bodies of extern functions,
// which are completely erased from existence.
// FIXME(fn_delegation): fix `help` in error message (see `inner-attr.stderr`)
if param_count == 0
&& let Some(block) = block
{
@@ -10,18 +10,38 @@
use crate::{LoweringContext, ResolverAstLoweringExt};
pub(super) enum DelegationGenerics<T> {
#[derive(Clone, Copy)]
pub(super) enum DelegationGenericsKind {
/// User-specified args are present: `reuse foo::<String>;`.
UserSpecified,
/// The default case when no user-specified args are present: `reuse Trait::foo;`.
Default(T),
Default,
/// In free-to-trait reuse, when user specified args for trait `reuse Trait::<i32>::foo;`
/// in this case we need to both generate `Self` and process user args.
SelfAndUserSpecified(T),
SelfAndUserSpecified,
/// In delegations from trait impl to other entities like free functions or trait functions,
/// we want to generate a function whose generics matches generics of signature function
/// in trait.
TraitImpl(T, bool /* Has user-specified args */),
TraitImpl(bool /* Has user-specified args */),
}
pub(super) struct DelegationGenerics<T> {
generics: T,
kind: DelegationGenericsKind,
}
impl<'hir> DelegationGenerics<&'hir [ty::GenericParamDef]> {
fn default(generics: &'hir [ty::GenericParamDef]) -> Self {
DelegationGenerics { generics, kind: DelegationGenericsKind::Default }
}
fn user_specified(generics: &'hir [ty::GenericParamDef]) -> Self {
DelegationGenerics { generics, kind: DelegationGenericsKind::UserSpecified }
}
fn trait_impl(generics: &'hir [ty::GenericParamDef], user_specified: bool) -> Self {
DelegationGenerics { generics, kind: DelegationGenericsKind::TraitImpl(user_specified) }
}
}
/// Used for storing either ty generics or their uplifted HIR version. First we obtain
@@ -54,20 +74,19 @@ pub(super) struct GenericArgsPropagationDetails {
pub(super) use_args_in_sig_inheritance: bool,
}
impl<T> DelegationGenerics<T> {
fn args_propagation_details(&self) -> GenericArgsPropagationDetails {
impl DelegationGenericsKind {
fn args_propagation_details(self) -> GenericArgsPropagationDetails {
match self {
DelegationGenerics::UserSpecified | DelegationGenerics::SelfAndUserSpecified { .. } => {
GenericArgsPropagationDetails {
should_propagate: false,
use_args_in_sig_inheritance: true,
}
}
DelegationGenerics::TraitImpl(_, user_specified) => GenericArgsPropagationDetails {
should_propagate: !*user_specified,
DelegationGenericsKind::UserSpecified
| DelegationGenericsKind::SelfAndUserSpecified => GenericArgsPropagationDetails {
should_propagate: false,
use_args_in_sig_inheritance: true,
},
DelegationGenericsKind::TraitImpl(user_specified) => GenericArgsPropagationDetails {
should_propagate: !user_specified,
use_args_in_sig_inheritance: false,
},
DelegationGenerics::Default(_) => GenericArgsPropagationDetails {
DelegationGenericsKind::Default => GenericArgsPropagationDetails {
should_propagate: true,
use_args_in_sig_inheritance: false,
},
@@ -81,25 +100,9 @@ pub(super) fn into_hir_generics(
ctx: &mut LoweringContext<'_, 'hir, impl ResolverAstLoweringExt<'hir>>,
span: Span,
) -> &mut HirOrTyGenerics<'hir> {
if let HirOrTyGenerics::Ty(params) = self {
let mut uplift_params = |generics: &'hir [ty::GenericParamDef]| {
ctx.uplift_delegation_generic_params(span, generics)
};
let hir_generics = match params {
DelegationGenerics::UserSpecified => DelegationGenerics::UserSpecified,
DelegationGenerics::Default(params) => {
DelegationGenerics::Default(uplift_params(params))
}
DelegationGenerics::SelfAndUserSpecified(params) => {
DelegationGenerics::SelfAndUserSpecified(uplift_params(params))
}
DelegationGenerics::TraitImpl(params, user_specified) => {
DelegationGenerics::TraitImpl(uplift_params(params), *user_specified)
}
};
*self = HirOrTyGenerics::Hir(hir_generics);
if let HirOrTyGenerics::Ty(ty) = self {
let params = ctx.uplift_delegation_generic_params(span, ty.generics);
*self = HirOrTyGenerics::Hir(DelegationGenerics { generics: params, kind: ty.kind });
}
self
@@ -108,12 +111,7 @@ pub(super) fn into_hir_generics(
fn hir_generics_or_empty(&self) -> &'hir hir::Generics<'hir> {
match self {
HirOrTyGenerics::Ty(_) => hir::Generics::empty(),
HirOrTyGenerics::Hir(hir_generics) => match hir_generics {
DelegationGenerics::UserSpecified => hir::Generics::empty(),
DelegationGenerics::Default(generics)
| DelegationGenerics::SelfAndUserSpecified(generics)
| DelegationGenerics::TraitImpl(generics, _) => generics,
},
HirOrTyGenerics::Hir(hir) => hir.generics,
}
}
@@ -127,21 +125,16 @@ pub(super) fn into_generic_args(
HirOrTyGenerics::Ty(_) => {
bug!("Attempting to get generic args before uplifting to HIR")
}
HirOrTyGenerics::Hir(hir_generics) => match hir_generics {
DelegationGenerics::UserSpecified => hir::GenericArgs::NONE,
DelegationGenerics::Default(generics)
| DelegationGenerics::SelfAndUserSpecified(generics)
| DelegationGenerics::TraitImpl(generics, _) => {
ctx.create_generics_args_from_params(generics.params, add_lifetimes, span)
}
},
HirOrTyGenerics::Hir(hir) => {
ctx.create_generics_args_from_params(hir.generics.params, add_lifetimes, span)
}
}
}
pub(super) fn args_propagation_details(&self) -> GenericArgsPropagationDetails {
match self {
HirOrTyGenerics::Ty(ty_generics) => ty_generics.args_propagation_details(),
HirOrTyGenerics::Hir(hir_generics) => hir_generics.args_propagation_details(),
HirOrTyGenerics::Ty(ty) => ty.kind.args_propagation_details(),
HirOrTyGenerics::Hir(hir) => hir.kind.args_propagation_details(),
}
}
}
@@ -231,9 +224,10 @@ pub(super) fn uplift_delegation_generics(
if matches!(delegation_parent_kind, DefKind::Impl { of_trait: true }) {
// Considering parent generics, during signature inheritance
// we will take those args that are in trait impl header trait ref.
let parent = GenericsGenerationResult::new(DelegationGenerics::TraitImpl(&[], true));
let parent = DelegationGenerics::trait_impl(&[], true);
let parent = GenericsGenerationResult::new(parent);
let child = DelegationGenerics::TraitImpl(sig_params, child_user_specified);
let child = DelegationGenerics::trait_impl(sig_params, child_user_specified);
let child = GenericsGenerationResult::new(child);
return GenericsGenerationResults { parent, child };
@@ -257,22 +251,28 @@ pub(super) fn uplift_delegation_generics(
if segments[len - 2].args.is_some() {
if generate_self {
// Take only first Self parameter, it is trait so Self must be present.
DelegationGenerics::SelfAndUserSpecified(&sig_parent_params[..1])
DelegationGenerics {
kind: DelegationGenericsKind::SelfAndUserSpecified,
generics: &sig_parent_params[..1],
}
} else {
DelegationGenerics::UserSpecified
DelegationGenerics::user_specified(&[])
}
} else {
let skip_self = usize::from(!generate_self);
DelegationGenerics::Default(&sig_parent_params[skip_self..])
DelegationGenerics::default(&sig_parent_params[skip_self..])
}
} else {
DelegationGenerics::<&'hir [ty::GenericParamDef]>::Default(&[])
DelegationGenerics::default(&[])
};
let child_generics = if child_user_specified {
DelegationGenerics::UserSpecified
let synth_params_index =
sig_params.iter().position(|p| p.kind.is_synthetic()).unwrap_or(sig_params.len());
DelegationGenerics::user_specified(&sig_params[synth_params_index..])
} else {
DelegationGenerics::Default(sig_params)
DelegationGenerics::default(sig_params)
};
GenericsGenerationResults {
+1 -1
View File
@@ -38,7 +38,7 @@ pub(super) enum Owners<'a, 'hir> {
}
impl<'hir> Owners<'_, 'hir> {
pub(super) fn get_or_insert_mut(&mut self, def_id: LocalDefId) -> &mut hir::MaybeOwner<'hir> {
fn get_or_insert_mut(&mut self, def_id: LocalDefId) -> &mut hir::MaybeOwner<'hir> {
match self {
Owners::IndexVec(index_vec) => {
index_vec.ensure_contains_elem(def_id, || hir::MaybeOwner::Phantom)
+5 -22
View File
@@ -39,7 +39,6 @@
use std::sync::Arc;
use rustc_ast::node_id::NodeMap;
use rustc_ast::visit::AssocCtxt;
use rustc_ast::{self as ast, *};
use rustc_attr_parsing::{AttributeParser, Late, OmitDoc};
use rustc_data_structures::fingerprint::Fingerprint;
@@ -634,29 +633,13 @@ pub fn lower_to_hir(tcx: TyCtxt<'_>, (): ()) -> mid_hir::Crate<'_> {
let mut delayed_ids: FxIndexSet<LocalDefId> = Default::default();
for def_id in ast_index.indices() {
let delayed_owner_kind = match &ast_index[def_id] {
AstOwner::Item(Item { kind: ItemKind::Delegation(_), .. }) => {
Some(hir::DelayedOwnerKind::Item)
match &ast_index[def_id] {
AstOwner::Item(Item { kind: ItemKind::Delegation { .. }, .. })
| AstOwner::AssocItem(Item { kind: AssocItemKind::Delegation { .. }, .. }, _) => {
delayed_ids.insert(def_id);
}
AstOwner::AssocItem(Item { kind: AssocItemKind::Delegation(_), .. }, ctx) => {
Some(match ctx {
AssocCtxt::Trait => hir::DelayedOwnerKind::TraitItem,
AssocCtxt::Impl { .. } => hir::DelayedOwnerKind::ImplItem,
})
}
_ => None,
_ => lowerer.lower_node(def_id),
};
if let Some(kind) = delayed_owner_kind {
delayed_ids.insert(def_id);
let owner = lowerer.owners.get_or_insert_mut(def_id);
if let hir::MaybeOwner::Phantom = owner {
*owner = hir::MaybeOwner::Delayed(kind)
}
} else {
lowerer.lower_node(def_id);
}
}
// Don't hash unless necessary, because it's expensive.
+1 -1
View File
@@ -18,5 +18,5 @@ rustc_macros = { path = "../rustc_macros" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
# tidy-alphabetical-end
+1 -1
View File
@@ -13,5 +13,5 @@ rustc_span = { path = "../rustc_span" }
[dev-dependencies]
# tidy-alphabetical-start
thin-vec = "0.2.12"
thin-vec = "0.2.15"
# tidy-alphabetical-end
+1 -1
View File
@@ -19,5 +19,5 @@ rustc_parse_format = { path = "../rustc_parse_format" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
# tidy-alphabetical-end
@@ -2,6 +2,7 @@
use rustc_errors::ErrorGuaranteed;
use rustc_feature::ACCEPTED_LANG_FEATURES;
use rustc_hir::attrs::UnstableRemovedFeature;
use rustc_hir::target::GenericParamKind;
use rustc_hir::{
DefaultBodyStability, MethodKind, PartialConstStability, Stability, StabilityLevel,
@@ -476,3 +477,89 @@ pub(crate) fn parse_unstability<S: Stage>(
(Err(ErrorGuaranteed { .. }), _) | (_, Err(ErrorGuaranteed { .. })) => None,
}
}
pub(crate) struct UnstableRemovedParser;
impl<S: Stage> CombineAttributeParser<S> for UnstableRemovedParser {
type Item = UnstableRemovedFeature;
const PATH: &[Symbol] = &[sym::unstable_removed];
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const TEMPLATE: AttributeTemplate =
template!(List: &[r#"feature = "name", reason = "...", link = "...", since = "version""#]);
const CONVERT: ConvertFn<Self::Item> = |items, _| AttributeKind::UnstableRemoved(items);
fn extend(
cx: &mut AcceptContext<'_, '_, S>,
args: &ArgParser,
) -> impl IntoIterator<Item = Self::Item> {
let mut feature = None;
let mut reason = None;
let mut link = None;
let mut since = None;
if !cx.features().staged_api() {
cx.emit_err(session_diagnostics::StabilityOutsideStd { span: cx.attr_span });
return None;
}
let ArgParser::List(list) = args else {
let attr_span = cx.attr_span;
cx.adcx().expected_list(attr_span, args);
return None;
};
for param in list.mixed() {
let Some(param) = param.meta_item() else {
cx.adcx().expected_not_literal(param.span());
return None;
};
let Some(word) = param.path().word() else {
cx.adcx().expected_specific_argument(
param.span(),
&[sym::feature, sym::reason, sym::link, sym::since],
);
return None;
};
match word.name {
sym::feature => insert_value_into_option_or_error(cx, &param, &mut feature, word)?,
sym::since => insert_value_into_option_or_error(cx, &param, &mut since, word)?,
sym::reason => insert_value_into_option_or_error(cx, &param, &mut reason, word)?,
sym::link => insert_value_into_option_or_error(cx, &param, &mut link, word)?,
_ => {
cx.adcx().expected_specific_argument(
param.span(),
&[sym::feature, sym::reason, sym::link, sym::since],
);
return None;
}
}
}
// Check all the arguments are present
let Some(feature) = feature else {
cx.adcx().missing_name_value(list.span, sym::feature);
return None;
};
let Some(reason) = reason else {
cx.adcx().missing_name_value(list.span, sym::reason);
return None;
};
let Some(link) = link else {
cx.adcx().missing_name_value(list.span, sym::link);
return None;
};
let Some(since) = since else {
cx.adcx().missing_name_value(list.span, sym::since);
return None;
};
let Some(version) = parse_version(since) else {
cx.emit_err(session_diagnostics::InvalidSince { span: cx.attr_span });
return None;
};
Some(UnstableRemovedFeature { feature, reason, link, since: version })
}
}
@@ -179,6 +179,7 @@ mod late {
Combine<RustcThenThisWouldNeedParser>,
Combine<TargetFeatureParser>,
Combine<UnstableFeatureBoundParser>,
Combine<UnstableRemovedParser>,
// tidy-alphabetical-end
// tidy-alphabetical-start
@@ -776,6 +777,11 @@ pub(crate) fn expected_name_value(
self.emit_parse_error(span, AttributeParseErrorReason::ExpectedNameValue(name))
}
/// Emit an error that a `name = value` argument is missing in a list of name-value pairs.
pub(crate) fn missing_name_value(&mut self, span: Span, name: Symbol) -> ErrorGuaranteed {
self.emit_parse_error(span, AttributeParseErrorReason::MissingNameValue(name))
}
/// Emit an error that a `name = value` pair was found where that name was already seen.
pub(crate) fn duplicate_key(&mut self, span: Span, key: Symbol) -> ErrorGuaranteed {
self.emit_parse_error(span, AttributeParseErrorReason::DuplicateKey(key))
@@ -568,6 +568,7 @@ pub(crate) enum AttributeParseErrorReason<'a> {
ExpectedNonEmptyStringLiteral,
ExpectedNotLiteral,
ExpectedNameValue(Option<Symbol>),
MissingNameValue(Symbol),
DuplicateKey(Symbol),
ExpectedSpecificArgument {
possibilities: &'a [Symbol],
@@ -823,6 +824,9 @@ fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, G> {
format!("expected this to be of the form `{name} = \"...\"`"),
);
}
AttributeParseErrorReason::MissingNameValue(name) => {
diag.span_label(self.span, format!("missing argument `{name} = \"...\"`"));
}
AttributeParseErrorReason::ExpectedSpecificArgument {
possibilities,
strings,
+1 -1
View File
@@ -29,7 +29,7 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
tracing = "0.1"
# tidy-alphabetical-end
@@ -380,11 +380,9 @@ fn codegen_cgu_content(
fn module_codegen(
tcx: TyCtxt<'_>,
(global_asm_config, cgu_name, token): (
Arc<GlobalAsmConfig>,
rustc_span::Symbol,
ConcurrencyLimiterToken,
),
global_asm_config: Arc<GlobalAsmConfig>,
cgu_name: rustc_span::Symbol,
token: ConcurrencyLimiterToken,
) -> OngoingModuleCodegen {
let mut module = make_module(tcx.sess, cgu_name.as_str().to_string());
@@ -513,8 +511,14 @@ pub(crate) fn run_aot(tcx: TyCtxt<'_>) -> Box<OngoingCodegen> {
let (module, _) = tcx.dep_graph.with_task(
dep_node,
tcx,
(global_asm_config.clone(), cgu.name(), concurrency_limiter.acquire(tcx.dcx())),
module_codegen,
|| {
module_codegen(
tcx,
global_asm_config.clone(),
cgu.name(),
concurrency_limiter.acquire(tcx.dcx()),
)
},
Some(rustc_middle::dep_graph::hash_result),
);
IntoDynSyncSend(module)
+4 -3
View File
@@ -83,8 +83,7 @@ pub fn compile_codegen_unit(
let (module, _) = tcx.dep_graph.with_task(
dep_node,
tcx,
(cgu_name, target_info, lto_supported),
module_codegen,
|| module_codegen(tcx, cgu_name, target_info, lto_supported),
Some(dep_graph::hash_result),
);
let time_to_codegen = start_time.elapsed();
@@ -96,7 +95,9 @@ pub fn compile_codegen_unit(
fn module_codegen(
tcx: TyCtxt<'_>,
(cgu_name, target_info, lto_supported): (Symbol, LockedTargetInfo, bool),
cgu_name: Symbol,
target_info: LockedTargetInfo,
lto_supported: bool,
) -> ModuleCodegen<GccContext> {
let cgu = tcx.codegen_unit(cgu_name);
// Instantiate monomorphizations without filling out definitions yet...
+1 -2
View File
@@ -65,8 +65,7 @@ pub(crate) fn compile_codegen_unit(
let (module, _) = tcx.dep_graph.with_task(
dep_node,
tcx,
cgu_name,
module_codegen,
|| module_codegen(tcx, cgu_name),
Some(dep_graph::hash_result),
);
let time_to_codegen = start_time.elapsed();
+125 -111
View File
@@ -606,27 +606,6 @@ fn codegen_intrinsic_call(
self.pointercast(val, self.type_ptr())
}
sym::sve_cast => {
let Some((in_cnt, in_elem, in_num_vecs)) =
args[0].layout.ty.scalable_vector_parts(self.cx.tcx)
else {
bug!("input parameter to `sve_cast` was not scalable vector");
};
let out_layout = self.layout_of(fn_args.type_at(1));
let Some((out_cnt, out_elem, out_num_vecs)) =
out_layout.ty.scalable_vector_parts(self.cx.tcx)
else {
bug!("output parameter to `sve_cast` was not scalable vector");
};
assert_eq!(in_cnt, out_cnt);
assert_eq!(in_num_vecs, out_num_vecs);
let out_llty = self.backend_type(out_layout);
match simd_cast(self, sym::simd_cast, args, out_llty, in_elem, out_elem) {
Some(val) => val,
_ => bug!("could not cast scalable vectors"),
}
}
sym::sve_tuple_create2 => {
assert_matches!(
self.layout_of(fn_args.type_at(0)).backend_repr,
@@ -1668,6 +1647,23 @@ macro_rules! require_simd {
}};
}
macro_rules! require_simd_or_scalable {
($ty: expr, $variant:ident) => {{
require!(
$ty.is_simd() || $ty.is_scalable_vector(),
InvalidMonomorphization::$variant { span, name, ty: $ty }
);
if $ty.is_simd() {
let (len, ty) = $ty.simd_size_and_type(bx.tcx());
(len, ty, None)
} else {
let (count, ty, num_vecs) =
$ty.scalable_vector_parts(bx.tcx()).expect("`is_scalable_vector` was wrong");
(count as u64, ty, Some(num_vecs))
}
}};
}
/// Returns the bitwidth of the `$ty` argument if it is an `Int` or `Uint` type.
macro_rules! require_int_or_uint_ty {
($ty: expr, $diag: expr) => {
@@ -1787,8 +1783,19 @@ fn vector_mask_to_bitmask<'a, 'll, 'tcx>(
return Ok(splat);
}
// every intrinsic below takes a SIMD vector as its first argument
let (in_len, in_elem) = require_simd!(args[0].layout.ty, SimdInput);
let supports_scalable = match name {
sym::simd_cast | sym::simd_select => true,
_ => false,
};
// Every intrinsic below takes a SIMD vector as its first argument. Some intrinsics also accept
// scalable vectors. `require_simd_or_scalable` is used regardless as it'll do the right thing
// for non-scalable vectors, and an additional check to prohibit scalable vectors for those
// intrinsics that do not support them is added.
if !supports_scalable {
let _ = require_simd!(args[0].layout.ty, SimdInput);
}
let (in_len, in_elem, in_num_vecs) = require_simd_or_scalable!(args[0].layout.ty, SimdInput);
let in_ty = args[0].layout.ty;
let comparison = match name {
@@ -1977,7 +1984,7 @@ fn vector_mask_to_bitmask<'a, 'll, 'tcx>(
if name == sym::simd_select {
let m_elem_ty = in_elem;
let m_len = in_len;
let (v_len, _) = require_simd!(args[1].layout.ty, SimdArgument);
let (v_len, _, _) = require_simd_or_scalable!(args[1].layout.ty, SimdArgument);
require!(
m_len == v_len,
InvalidMonomorphization::MismatchedLengths { span, name, m_len, v_len }
@@ -2781,7 +2788,7 @@ macro_rules! bitwise_red {
}
if name == sym::simd_cast || name == sym::simd_as {
let (out_len, out_elem) = require_simd!(ret_ty, SimdReturn);
let (out_len, out_elem, out_num_vecs) = require_simd_or_scalable!(ret_ty, SimdReturn);
require!(
in_len == out_len,
InvalidMonomorphization::ReturnLengthInputType {
@@ -2793,9 +2800,99 @@ macro_rules! bitwise_red {
out_len
}
);
match simd_cast(bx, name, args, llret_ty, in_elem, out_elem) {
Some(val) => return Ok(val),
None => return_error!(InvalidMonomorphization::UnsupportedCast {
require!(
in_num_vecs == out_num_vecs,
InvalidMonomorphization::ReturnNumVecsInputType {
span,
name,
in_num_vecs: in_num_vecs.unwrap_or(NumScalableVectors(1)),
in_ty,
ret_ty,
out_num_vecs: out_num_vecs.unwrap_or(NumScalableVectors(1))
}
);
// Casting cares about nominal type, not just structural type
if in_elem == out_elem {
return Ok(args[0].immediate());
}
#[derive(Copy, Clone)]
enum Sign {
Unsigned,
Signed,
}
use Sign::*;
enum Style {
Float,
Int(Sign),
Unsupported,
}
let (in_style, in_width) = match in_elem.kind() {
// vectors of pointer-sized integers should've been
// disallowed before here, so this unwrap is safe.
ty::Int(i) => (
Style::Int(Signed),
i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Uint(u) => (
Style::Int(Unsigned),
u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Float(f) => (Style::Float, f.bit_width()),
_ => (Style::Unsupported, 0),
};
let (out_style, out_width) = match out_elem.kind() {
ty::Int(i) => (
Style::Int(Signed),
i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Uint(u) => (
Style::Int(Unsigned),
u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Float(f) => (Style::Float, f.bit_width()),
_ => (Style::Unsupported, 0),
};
match (in_style, out_style) {
(Style::Int(sign), Style::Int(_)) => {
return Ok(match in_width.cmp(&out_width) {
Ordering::Greater => bx.trunc(args[0].immediate(), llret_ty),
Ordering::Equal => args[0].immediate(),
Ordering::Less => match sign {
Sign::Signed => bx.sext(args[0].immediate(), llret_ty),
Sign::Unsigned => bx.zext(args[0].immediate(), llret_ty),
},
});
}
(Style::Int(Sign::Signed), Style::Float) => {
return Ok(bx.sitofp(args[0].immediate(), llret_ty));
}
(Style::Int(Sign::Unsigned), Style::Float) => {
return Ok(bx.uitofp(args[0].immediate(), llret_ty));
}
(Style::Float, Style::Int(sign)) => {
return Ok(match (sign, name == sym::simd_as) {
(Sign::Unsigned, false) => bx.fptoui(args[0].immediate(), llret_ty),
(Sign::Signed, false) => bx.fptosi(args[0].immediate(), llret_ty),
(_, true) => bx.cast_float_to_int(
matches!(sign, Sign::Signed),
args[0].immediate(),
llret_ty,
),
});
}
(Style::Float, Style::Float) => {
return Ok(match in_width.cmp(&out_width) {
Ordering::Greater => bx.fptrunc(args[0].immediate(), llret_ty),
Ordering::Equal => args[0].immediate(),
Ordering::Less => bx.fpext(args[0].immediate(), llret_ty),
});
}
_ => return_error!(InvalidMonomorphization::UnsupportedCast {
span,
name,
in_ty,
@@ -2977,86 +3074,3 @@ macro_rules! arith_unary {
span_bug!(span, "unknown SIMD intrinsic");
}
/// Implementation of `core::intrinsics::simd_cast`, re-used by `core::scalable::sve_cast`.
fn simd_cast<'ll, 'tcx>(
bx: &mut Builder<'_, 'll, 'tcx>,
name: Symbol,
args: &[OperandRef<'tcx, &'ll Value>],
llret_ty: &'ll Type,
in_elem: Ty<'tcx>,
out_elem: Ty<'tcx>,
) -> Option<&'ll Value> {
// Casting cares about nominal type, not just structural type
if in_elem == out_elem {
return Some(args[0].immediate());
}
#[derive(Copy, Clone)]
enum Sign {
Unsigned,
Signed,
}
use Sign::*;
enum Style {
Float,
Int(Sign),
Unsupported,
}
let (in_style, in_width) = match in_elem.kind() {
// vectors of pointer-sized integers should've been
// disallowed before here, so this unwrap is safe.
ty::Int(i) => (
Style::Int(Signed),
i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Uint(u) => (
Style::Int(Unsigned),
u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Float(f) => (Style::Float, f.bit_width()),
_ => (Style::Unsupported, 0),
};
let (out_style, out_width) = match out_elem.kind() {
ty::Int(i) => (
Style::Int(Signed),
i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Uint(u) => (
Style::Int(Unsigned),
u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Float(f) => (Style::Float, f.bit_width()),
_ => (Style::Unsupported, 0),
};
match (in_style, out_style) {
(Style::Int(sign), Style::Int(_)) => Some(match in_width.cmp(&out_width) {
Ordering::Greater => bx.trunc(args[0].immediate(), llret_ty),
Ordering::Equal => args[0].immediate(),
Ordering::Less => match sign {
Sign::Signed => bx.sext(args[0].immediate(), llret_ty),
Sign::Unsigned => bx.zext(args[0].immediate(), llret_ty),
},
}),
(Style::Int(Sign::Signed), Style::Float) => Some(bx.sitofp(args[0].immediate(), llret_ty)),
(Style::Int(Sign::Unsigned), Style::Float) => {
Some(bx.uitofp(args[0].immediate(), llret_ty))
}
(Style::Float, Style::Int(sign)) => Some(match (sign, name == sym::simd_as) {
(Sign::Unsigned, false) => bx.fptoui(args[0].immediate(), llret_ty),
(Sign::Signed, false) => bx.fptosi(args[0].immediate(), llret_ty),
(_, true) => {
bx.cast_float_to_int(matches!(sign, Sign::Signed), args[0].immediate(), llret_ty)
}
}),
(Style::Float, Style::Float) => Some(match in_width.cmp(&out_width) {
Ordering::Greater => bx.fptrunc(args[0].immediate(), llret_ty),
Ordering::Equal => args[0].immediate(),
Ordering::Less => bx.fpext(args[0].immediate(), llret_ty),
}),
_ => None,
}
}
+12
View File
@@ -6,6 +6,7 @@
use std::path::{Path, PathBuf};
use std::process::ExitStatus;
use rustc_abi::NumScalableVectors;
use rustc_errors::codes::*;
use rustc_errors::{
Diag, DiagArgValue, DiagCtxtHandle, DiagSymbolList, Diagnostic, EmissionGuarantee, IntoDiagArg,
@@ -809,6 +810,17 @@ pub enum InvalidMonomorphization<'tcx> {
out_len: u64,
},
#[diag("invalid monomorphization of `{$name}` intrinsic: expected return type with {$in_num_vecs} vectors (same as input type `{$in_ty}`), found `{$ret_ty}` with length {$out_num_vecs}", code = E0511)]
ReturnNumVecsInputType {
#[primary_span]
span: Span,
name: Symbol,
in_num_vecs: NumScalableVectors,
in_ty: Ty<'tcx>,
ret_ty: Ty<'tcx>,
out_num_vecs: NumScalableVectors,
},
#[diag("invalid monomorphization of `{$name}` intrinsic: expected second argument with length {$in_len} (same as input type `{$in_ty}`), found `{$arg_ty}` with length {$out_len}", code = E0511)]
SecondArgumentLength {
#[primary_span]
+7 -3
View File
@@ -23,17 +23,21 @@ rustc_index = { path = "../rustc_index", package = "rustc_index" }
rustc_macros = { path = "../rustc_macros" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_thread_pool = { path = "../rustc_thread_pool" }
smallvec = { version = "1.8.1", features = ["const_generics", "union", "may_dangle"] }
smallvec = { version = "1.8.1", features = [
"const_generics",
"union",
"may_dangle",
] }
stacker = "0.1.17"
tempfile = "3.2"
thin-vec = "0.2.12"
thin-vec = "0.2.15"
tracing = "0.1"
# tidy-alphabetical-end
[dependencies.hashbrown]
version = "0.16.1"
default-features = false
features = ["nightly"] # for may_dangle
features = ["nightly"] # for may_dangle
[target.'cfg(windows)'.dependencies.windows]
version = "0.61.0"
@@ -80,7 +80,6 @@
pub mod sync;
pub mod tagged_ptr;
pub mod temp_dir;
pub mod thinvec;
pub mod thousands;
pub mod transitive_relation;
pub mod unhash;
@@ -1,92 +0,0 @@
//! This is a copy-paste of `Vec::extract_if` for `ThinVec`.
//!
//! FIXME: <https://github.com/Gankra/thin-vec/pull/66> is merged, this can be removed.
use std::{ptr, slice};
use thin_vec::ThinVec;
/// An iterator for [`ThinVec`] which uses a closure to determine if an element should be removed.
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct ExtractIf<'a, T, F> {
vec: &'a mut ThinVec<T>,
/// The index of the item that will be inspected by the next call to `next`.
idx: usize,
/// The number of items that have been drained (removed) thus far.
del: usize,
/// The original length of `vec` prior to draining.
old_len: usize,
/// The filter test predicate.
pred: F,
}
impl<'a, T, F> ExtractIf<'a, T, F>
where
F: FnMut(&mut T) -> bool,
{
pub fn new(vec: &'a mut ThinVec<T>, filter: F) -> Self {
let old_len = vec.len();
// Guard against us getting leaked (leak amplification)
unsafe {
vec.set_len(0);
}
ExtractIf { vec, idx: 0, del: 0, old_len, pred: filter }
}
}
impl<T, F> Iterator for ExtractIf<'_, T, F>
where
F: FnMut(&mut T) -> bool,
{
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
unsafe {
while self.idx < self.old_len {
let i = self.idx;
let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
let drained = (self.pred)(&mut v[i]);
// Update the index *after* the predicate is called. If the index
// is updated prior and the predicate panics, the element at this
// index would be leaked.
self.idx += 1;
if drained {
self.del += 1;
return Some(ptr::read(&v[i]));
} else if self.del > 0 {
let del = self.del;
let src: *const T = &v[i];
let dst: *mut T = &mut v[i - del];
ptr::copy_nonoverlapping(src, dst, 1);
}
}
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.old_len - self.idx))
}
}
impl<A, F> Drop for ExtractIf<'_, A, F> {
fn drop(&mut self) {
unsafe {
if self.idx < self.old_len && self.del > 0 {
// This is a pretty messed up state, and there isn't really an
// obviously right thing to do. We don't want to keep trying
// to execute `pred`, so we just backshift all the unprocessed
// elements and tell the vec that they still exist. The backshift
// is required to prevent a double-drop of the last successfully
// drained item prior to a panic in the predicate.
let ptr = self.vec.as_mut_ptr();
let src = ptr.add(self.idx);
let dst = src.sub(self.del);
let tail_len = self.old_len - self.idx;
src.copy_to(dst, tail_len);
}
self.vec.set_len(self.old_len - self.del);
}
}
}
+1 -1
View File
@@ -30,6 +30,6 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
scoped-tls = "1.0"
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
tracing = "0.1"
# tidy-alphabetical-end
@@ -928,6 +928,11 @@ pub struct BuiltinAttribute {
unstable_feature_bound, Normal, template!(Word, List: &["feat1, feat2, ..."]),
DuplicatesOk, EncodeCrossCrate::No,
),
ungated!(
unstable_removed, CrateLevel,
template!(List: &[r#"feature = "name", reason = "...", link = "...", since = "version""#]),
DuplicatesOk, EncodeCrossCrate::Yes
),
ungated!(
rustc_const_unstable, Normal, template!(List: &[r#"feature = "name""#]),
DuplicatesOk, EncodeCrossCrate::Yes
-14
View File
@@ -310,18 +310,4 @@ macro_rules! declare_features {
// -------------------------------------------------------------------------
// feature-group-end: removed features
// -------------------------------------------------------------------------
// -------------------------------------------------------------------------
// feature-group-start: removed library features
// -------------------------------------------------------------------------
//
// FIXME(#141617): we should have a better way to track removed library features, but we reuse
// the infrastructure here so users still get hints. The symbols used here can be remove from
// `symbol.rs` when that happens.
(removed, concat_idents, "1.90.0", Some(29599),
Some("use the `${concat(..)}` metavariable expression instead"), 142704),
// -------------------------------------------------------------------------
// feature-group-end: removed library features
// -------------------------------------------------------------------------
);
+1 -1
View File
@@ -22,6 +22,6 @@ rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
tracing = "0.1"
# tidy-alphabetical-end
@@ -894,6 +894,14 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
}
}
#[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable, PrintAttribute)]
pub struct UnstableRemovedFeature {
pub feature: Symbol,
pub reason: Symbol,
pub link: Symbol,
pub since: RustcVersion,
}
/// Represents parsed *built-in* inert attributes.
///
/// ## Overview
@@ -1648,6 +1656,9 @@ pub enum AttributeKind {
/// Represents `#[unstable_feature_bound]`.
UnstableFeatureBound(ThinVec<(Symbol, Span)>),
/// Represents all `#![unstable_removed(...)]` features
UnstableRemoved(ThinVec<UnstableRemovedFeature>),
/// Represents `#[used]`
Used {
used_by: UsedBy,
@@ -199,6 +199,7 @@ pub fn encode_cross_crate(&self) -> EncodeCrossCrate {
TrackCaller(..) => Yes,
TypeLengthLimit { .. } => No,
UnstableFeatureBound(..) => No,
UnstableRemoved(..) => Yes,
Used { .. } => No,
WindowsSubsystem(..) => No,
// tidy-alphabetical-end
+2 -17
View File
@@ -1641,18 +1641,10 @@ pub fn node(&self) -> OwnerNode<'tcx> {
}
}
#[derive(Copy, Clone, Debug, HashStable_Generic)]
pub enum DelayedOwnerKind {
Item,
ImplItem,
TraitItem,
}
#[derive(Copy, Clone, Debug, HashStable_Generic)]
pub enum MaybeOwner<'tcx> {
Owner(&'tcx OwnerInfo<'tcx>),
NonOwner(HirId),
Delayed(DelayedOwnerKind),
/// Used as a placeholder for unused LocalDefId.
Phantom,
}
@@ -1661,19 +1653,12 @@ impl<'tcx> MaybeOwner<'tcx> {
pub fn as_owner(self) -> Option<&'tcx OwnerInfo<'tcx>> {
match self {
MaybeOwner::Owner(i) => Some(i),
_ => None,
MaybeOwner::NonOwner(_) | MaybeOwner::Phantom => None,
}
}
pub fn unwrap(self) -> &'tcx OwnerInfo<'tcx> {
self.as_owner().unwrap_or_else(|| panic!("not a HIR owner"))
}
pub fn expect_delayed(self) -> DelayedOwnerKind {
match self {
MaybeOwner::Delayed(delayed_owner) => delayed_owner,
_ => panic!("not a delayed owner"),
}
self.as_owner().unwrap_or_else(|| panic!("Not a HIR owner"))
}
}
+3 -13
View File
@@ -226,11 +226,6 @@ pub trait Visitor<'v>: Sized {
/// or `ControlFlow<T>`.
type Result: VisitorResult = ();
#[inline]
fn visit_if_delayed(&self, _: LocalDefId) -> bool {
true
}
/// If `type NestedFilter` is set to visit nested items, this method
/// must also be overridden to provide a map to retrieve nested items.
fn maybe_tcx(&mut self) -> Self::MaybeTyCtxt {
@@ -249,23 +244,18 @@ fn maybe_tcx(&mut self) -> Self::MaybeTyCtxt {
/// this method is if you want a nested pattern but cannot supply a
/// `TyCtxt`; see `maybe_tcx` for advice.
fn visit_nested_item(&mut self, id: ItemId) -> Self::Result {
if self.should_visit_maybe_delayed_inter(id.owner_id.def_id) {
if Self::NestedFilter::INTER {
let item = self.maybe_tcx().hir_item(id);
try_visit!(self.visit_item(item));
}
Self::Result::output()
}
// Now delayed owners are only delegations, which are either item, trait item or impl item.
fn should_visit_maybe_delayed_inter(&mut self, id: LocalDefId) -> bool {
Self::NestedFilter::INTER && self.visit_if_delayed(id)
}
/// Like `visit_nested_item()`, but for trait items. See
/// `visit_nested_item()` for advice on when to override this
/// method.
fn visit_nested_trait_item(&mut self, id: TraitItemId) -> Self::Result {
if self.should_visit_maybe_delayed_inter(id.owner_id.def_id) {
if Self::NestedFilter::INTER {
let item = self.maybe_tcx().hir_trait_item(id);
try_visit!(self.visit_trait_item(item));
}
@@ -276,7 +266,7 @@ fn visit_nested_trait_item(&mut self, id: TraitItemId) -> Self::Result {
/// `visit_nested_item()` for advice on when to override this
/// method.
fn visit_nested_impl_item(&mut self, id: ImplItemId) -> Self::Result {
if self.should_visit_maybe_delayed_inter(id.owner_id.def_id) {
if Self::NestedFilter::INTER {
let item = self.maybe_tcx().hir_impl_item(id);
try_visit!(self.visit_impl_item(item));
}
+13 -5
View File
@@ -318,11 +318,15 @@ fn create_generic_args<'tcx>(
let (caller_kind, callee_kind) = (fn_kind(tcx, delegation_id), fn_kind(tcx, sig_id));
let delegation_args = ty::GenericArgs::identity_for_item(tcx, delegation_id);
let delegation_parent_args_count = tcx.generics_of(delegation_id).parent_count;
let deleg_parent_args_without_self_count =
get_delegation_parent_args_count_without_self(tcx, delegation_id, sig_id);
let delegation_generics = tcx.generics_of(delegation_id);
let real_args_count = delegation_args.len() - delegation_generics.own_synthetic_params_count();
let synth_args = &delegation_args[real_args_count..];
let delegation_args = &delegation_args[..real_args_count];
let args = match (caller_kind, callee_kind) {
(FnKind::Free, FnKind::Free)
| (FnKind::Free, FnKind::AssocTrait)
@@ -339,14 +343,15 @@ fn create_generic_args<'tcx>(
assert!(child_args.is_empty(), "Child args can not be used in trait impl case");
tcx.mk_args(&delegation_args[delegation_parent_args_count..])
tcx.mk_args(&delegation_args[delegation_generics.parent_count..])
}
(FnKind::AssocInherentImpl, FnKind::AssocTrait) => {
let self_ty = tcx.type_of(tcx.local_parent(delegation_id)).instantiate_identity();
tcx.mk_args_from_iter(
std::iter::once(ty::GenericArg::from(self_ty)).chain(delegation_args.iter()),
std::iter::once(ty::GenericArg::from(self_ty))
.chain(delegation_args.iter().copied()),
)
}
@@ -411,7 +416,7 @@ fn create_generic_args<'tcx>(
new_args.extend_from_slice(&child_args[child_lifetimes_count..]);
} else if !parent_args.is_empty() {
let child_args = &delegation_args[delegation_parent_args_count..];
let child_args = &delegation_args[delegation_generics.parent_count..];
let child_lifetimes_count =
child_args.iter().take_while(|a| a.as_region().is_some()).count();
@@ -424,6 +429,8 @@ fn create_generic_args<'tcx>(
new_args.extend(&child_args[child_lifetimes_count + skip_self as usize..]);
}
new_args.extend(synth_args);
new_args
}
@@ -606,7 +613,8 @@ fn get_delegation_user_specified_args<'tcx>(
.lower_generic_args_of_path(segment.ident.span, def_id, parent_args, segment, None)
.0;
&args[parent_args.len()..]
let synth_params_count = tcx.generics_of(def_id).own_synthetic_params_count();
&args[parent_args.len()..args.len() - synth_params_count]
});
(parent_args.unwrap_or_default(), child_args.unwrap_or_default())
@@ -747,7 +747,7 @@ fn inferred_kind(
GenericParamDefKind::Lifetime => {
self.lowerer.re_infer(self.span, RegionInferReason::Param(param)).into()
}
GenericParamDefKind::Type { has_default, .. } => {
GenericParamDefKind::Type { has_default, synthetic } => {
if !infer_args && has_default {
// No type parameter provided, but a default exists.
if let Some(prev) =
@@ -763,6 +763,8 @@ fn inferred_kind(
.type_of(param.def_id)
.instantiate(tcx, preceding_args)
.into()
} else if synthetic {
Ty::new_param(tcx, param.index, param.name).into()
} else if infer_args {
self.lowerer.ty_infer(Some(param), self.span).into()
} else {
+1 -1
View File
@@ -17,6 +17,6 @@ rustc_middle = { path = "../rustc_middle" }
rustc_span = { path = "../rustc_span" }
rustc_type_ir = { path = "../rustc_type_ir" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
tracing = "0.1"
# tidy-alphabetical-end
+5 -4
View File
@@ -1054,10 +1054,6 @@ pub fn emit_delayed_lints(tcx: TyCtxt<'_>) {
/// Runs all analyses that we guarantee to run, even if errors were reported in earlier analyses.
/// This function never fails.
fn run_required_analyses(tcx: TyCtxt<'_>) {
// Forces all delayed owners to be lowered and drops AST crate after it.
// Also refetches hir_crate_items to prevent multiple threads from blocking on it later.
tcx.force_delayed_owners_lowering();
if tcx.sess.opts.unstable_opts.input_stats {
rustc_passes::input_stats::print_hir_stats(tcx);
}
@@ -1066,6 +1062,11 @@ fn run_required_analyses(tcx: TyCtxt<'_>) {
#[cfg(all(not(doc), debug_assertions))]
rustc_passes::hir_id_validator::check_crate(tcx);
// Prefetch this to prevent multiple threads from blocking on it later.
// This is needed since the `hir_id_validator::check_crate` call above is not guaranteed
// to use `hir_crate_items`.
tcx.ensure_done().hir_crate_items(());
let sess = tcx.sess;
sess.time("misc_checking_1", || {
par_fns(&mut [
+1 -2
View File
@@ -2477,8 +2477,7 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path, ref_path: Option<&Path>) {
tcx.dep_graph.with_task(
dep_node,
tcx,
path,
|tcx, path| {
|| {
with_encode_metadata_header(tcx, path, |ecx| {
// Encode all the entries and extra information in the crate,
// culminating in the `CrateRoot` which points to all of it.
+2 -2
View File
@@ -16,7 +16,7 @@ rustc_arena = { path = "../rustc_arena" }
rustc_ast = { path = "../rustc_ast" }
rustc_ast_ir = { path = "../rustc_ast_ir" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_error_messages = { path = "../rustc_error_messages" } # Used for intra-doc links
rustc_error_messages = { path = "../rustc_error_messages" } # Used for intra-doc links
rustc_errors = { path = "../rustc_errors" }
rustc_feature = { path = "../rustc_feature" }
rustc_graphviz = { path = "../rustc_graphviz" }
@@ -33,7 +33,7 @@ rustc_target = { path = "../rustc_target" }
rustc_thread_pool = { path = "../rustc_thread_pool" }
rustc_type_ir = { path = "../rustc_type_ir" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
tracing = "0.1"
# tidy-alphabetical-end
+17 -31
View File
@@ -52,6 +52,7 @@ pub enum QuerySideEffect {
/// the side effect dep node as a dependency.
CheckFeature { symbol: Symbol },
}
#[derive(Clone)]
pub struct DepGraph {
data: Option<Arc<DepGraphData>>,
@@ -274,17 +275,19 @@ pub fn with_query_deserialization<OP, R>(&self, op: OP) -> R
}
#[inline(always)]
pub fn with_task<'tcx, A: Debug, R>(
pub fn with_task<'tcx, OP, R>(
&self,
dep_node: DepNode,
tcx: TyCtxt<'tcx>,
task_arg: A,
task_fn: fn(tcx: TyCtxt<'tcx>, task_arg: A) -> R,
op: OP,
hash_result: Option<fn(&mut StableHashingContext<'_>, &R) -> Fingerprint>,
) -> (R, DepNodeIndex) {
) -> (R, DepNodeIndex)
where
OP: FnOnce() -> R,
{
match self.data() {
Some(data) => data.with_task(dep_node, tcx, task_arg, task_fn, hash_result),
None => (task_fn(tcx, task_arg), self.next_virtual_depnode_index()),
Some(data) => data.with_task(dep_node, tcx, op, hash_result),
None => (op(), self.next_virtual_depnode_index()),
}
}
@@ -309,44 +312,27 @@ pub fn with_anon_task<'tcx, OP, R>(
}
impl DepGraphData {
/// Starts a new dep-graph task. Dep-graph tasks are specified
/// using a free function (`task`) and **not** a closure -- this
/// is intentional because we want to exercise tight control over
/// what state they have access to. In particular, we want to
/// prevent implicit 'leaks' of tracked state into the task (which
/// could then be read without generating correct edges in the
/// dep-graph -- see the [rustc dev guide] for more details on
/// the dep-graph).
///
/// Therefore, the task function takes a `TyCtxt`, plus exactly one
/// additional argument, `task_arg`. The additional argument type can be
/// `()` if no argument is needed, or a tuple if multiple arguments are
/// needed.
///
/// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/queries/incremental-compilation.html
#[inline(always)]
pub fn with_task<'tcx, A: Debug, R>(
pub fn with_task<'tcx, OP, R>(
&self,
dep_node: DepNode,
tcx: TyCtxt<'tcx>,
task_arg: A,
task_fn: fn(tcx: TyCtxt<'tcx>, task_arg: A) -> R,
op: OP,
hash_result: Option<fn(&mut StableHashingContext<'_>, &R) -> Fingerprint>,
) -> (R, DepNodeIndex) {
) -> (R, DepNodeIndex)
where
OP: FnOnce() -> R,
{
// If the following assertion triggers, it can have two reasons:
// 1. Something is wrong with DepNode creation, either here or
// in `DepGraph::try_mark_green()`.
// 2. Two distinct query keys get mapped to the same `DepNode`
// (see for example #48923).
self.assert_dep_node_not_yet_allocated_in_current_session(tcx.sess, &dep_node, || {
format!(
"forcing query with already existing `DepNode`\n\
- query-key: {task_arg:?}\n\
- dep-node: {dep_node:?}"
)
format!("forcing query with already existing `DepNode`: {dep_node:?}")
});
let with_deps = |task_deps| with_deps(task_deps, || task_fn(tcx, task_arg));
let with_deps = |task_deps| with_deps(task_deps, op);
let (result, edges) = if tcx.is_eval_always(dep_node.kind) {
(with_deps(TaskDepsRef::EvalAlways), EdgesVec::new())
} else {
+20 -37
View File
@@ -5,10 +5,9 @@
use rustc_abi::ExternAbi;
use rustc_ast::visit::{VisitorResult, walk_list};
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::FxIndexSet;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::{DynSend, DynSync, par_for_each_in, try_par_for_each_in};
use rustc_data_structures::sync::{DynSend, DynSync, par_for_each_in, spawn, try_par_for_each_in};
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId, LocalModDefId};
use rustc_hir::definitions::{DefKey, DefPath, DefPathHash};
@@ -1246,7 +1245,25 @@ pub(super) fn hir_module_items(tcx: TyCtxt<'_>, module_id: LocalModDefId) -> Mod
}
}
fn force_delayed_owners_lowering(tcx: TyCtxt<'_>) {
let krate = tcx.hir_crate(());
for &id in &krate.delayed_ids {
tcx.ensure_done().lower_delayed_owner(id);
}
let (_, krate) = krate.delayed_resolver.steal();
let prof = tcx.sess.prof.clone();
// Drop AST to free memory. It can be expensive so try to drop it on a separate thread.
spawn(move || {
let _timer = prof.verbose_generic_activity("drop_ast");
drop(krate);
});
}
pub(crate) fn hir_crate_items(tcx: TyCtxt<'_>, _: ()) -> ModuleItems {
force_delayed_owners_lowering(tcx);
let mut collector = ItemCollector::new(tcx, true);
// A "crate collector" and "module collector" start at a
@@ -1307,12 +1324,11 @@ struct ItemCollector<'tcx> {
nested_bodies: Vec<LocalDefId>,
delayed_lint_items: Vec<OwnerId>,
eiis: Vec<LocalDefId>,
delayed_ids: Option<&'tcx FxIndexSet<LocalDefId>>,
}
impl<'tcx> ItemCollector<'tcx> {
fn new(tcx: TyCtxt<'tcx>, crate_collector: bool) -> ItemCollector<'tcx> {
let mut collector = ItemCollector {
ItemCollector {
crate_collector,
tcx,
submodules: Vec::default(),
@@ -1325,46 +1341,13 @@ fn new(tcx: TyCtxt<'tcx>, crate_collector: bool) -> ItemCollector<'tcx> {
nested_bodies: Vec::default(),
delayed_lint_items: Vec::default(),
eiis: Vec::default(),
delayed_ids: None,
};
if crate_collector {
let krate = tcx.hir_crate(());
collector.delayed_ids = Some(&krate.delayed_ids);
let delayed_kinds =
krate.delayed_ids.iter().copied().map(|id| (id, krate.owners[id].expect_delayed()));
// FIXME(fn_delegation): need to add delayed lints, eiis
for (def_id, kind) in delayed_kinds {
let owner_id = OwnerId { def_id };
match kind {
DelayedOwnerKind::Item => collector.items.push(ItemId { owner_id }),
DelayedOwnerKind::ImplItem => {
collector.impl_items.push(ImplItemId { owner_id })
}
DelayedOwnerKind::TraitItem => {
collector.trait_items.push(TraitItemId { owner_id })
}
};
collector.body_owners.push(def_id);
}
}
collector
}
}
impl<'hir> Visitor<'hir> for ItemCollector<'hir> {
type NestedFilter = nested_filter::All;
#[inline]
fn visit_if_delayed(&self, def_id: LocalDefId) -> bool {
!self.crate_collector || self.delayed_ids.is_none_or(|ids| !ids.contains(&def_id))
}
fn maybe_tcx(&mut self) -> Self::MaybeTyCtxt {
self.tcx
}
+3 -23
View File
@@ -14,7 +14,7 @@
use rustc_data_structures::sorted_map::SortedMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::steal::Steal;
use rustc_data_structures::sync::{DynSend, DynSync, spawn, try_par_for_each_in};
use rustc_data_structures::sync::{DynSend, DynSync, try_par_for_each_in};
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId};
use rustc_hir::lints::DelayedLint;
@@ -64,8 +64,7 @@ pub fn owner(&self, tcx: TyCtxt<'hir>, def_id: LocalDefId) -> MaybeOwner<'hir> {
// which is greater than delayed LocalDefId, we use IndexVec for owners,
// so we will call ensure_contains_elem which will grow it.
if let Some(owner) = self.owners.get(def_id)
&& (self.delayed_ids.is_empty()
|| !matches!(owner, MaybeOwner::Phantom | MaybeOwner::Delayed(_)))
&& (self.delayed_ids.is_empty() || !matches!(owner, MaybeOwner::Phantom))
{
return *owner;
}
@@ -208,24 +207,6 @@ pub fn par_opaques(
}
impl<'tcx> TyCtxt<'tcx> {
pub fn force_delayed_owners_lowering(self) {
let krate = self.hir_crate(());
self.ensure_done().hir_crate_items(());
for &id in &krate.delayed_ids {
self.ensure_done().lower_delayed_owner(id);
}
let (_, krate) = krate.delayed_resolver.steal();
let prof = self.sess.prof.clone();
// Drop AST to free memory. It can be expensive so try to drop it on a separate thread.
spawn(move || {
let _timer = prof.verbose_generic_activity("drop_ast");
drop(krate);
});
}
pub fn parent_module(self, id: HirId) -> LocalModDefId {
if !id.is_owner() && self.def_kind(id.owner) == DefKind::Mod {
LocalModDefId::new_unchecked(id.owner.def_id)
@@ -494,8 +475,7 @@ pub fn provide(providers: &mut Providers) {
providers.local_def_id_to_hir_id = |tcx, def_id| match tcx.hir_crate(()).owner(tcx, def_id) {
MaybeOwner::Owner(_) => HirId::make_owner(def_id),
MaybeOwner::NonOwner(hir_id) => hir_id,
MaybeOwner::Phantom => bug!("no HirId for {:?}", def_id),
MaybeOwner::Delayed(_) => bug!("delayed owner should be lowered {:?}", def_id),
MaybeOwner::Phantom => bug!("No HirId for {:?}", def_id),
};
providers.opt_hir_owner_nodes =
|tcx, id| tcx.hir_crate(()).owner(tcx, id).as_owner().map(|i| &i.nodes);
+4
View File
@@ -274,6 +274,10 @@ pub fn has_impl_trait(&'tcx self) -> bool {
})
}
pub fn own_synthetic_params_count(&'tcx self) -> usize {
self.own_params.iter().filter(|p| p.kind.is_synthetic()).count()
}
/// Returns the args corresponding to the generic parameters
/// of this item, excluding `Self`.
///
+1 -1
View File
@@ -17,7 +17,7 @@ rustc_lexer = { path = "../rustc_lexer" }
rustc_macros = { path = "../rustc_macros" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
tracing = "0.1"
unicode-normalization = "0.1.25"
unicode-width = "0.2.2"
+1 -5
View File
@@ -3464,11 +3464,7 @@ pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
}
pub(crate) fn eat_metavar_guard(&mut self) -> Option<Box<Guard>> {
self.eat_metavar_seq_with_matcher(
|mv_kind| matches!(mv_kind, MetaVarKind::Guard),
|this| this.parse_match_arm_guard(),
)
.flatten()
self.eat_metavar_seq(MetaVarKind::Guard, |this| this.parse_match_arm_guard()).flatten()
}
fn parse_match_arm_guard(&mut self) -> PResult<'a, Option<Box<Guard>>> {
@@ -105,7 +105,10 @@ fn may_be_ident(kind: MetaVarKind) -> bool {
token::Lifetime(..) | token::NtLifetime(..) => true,
_ => false,
},
NonterminalKind::Guard => token.is_keyword(kw::If),
NonterminalKind::Guard => match token.kind {
token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Guard)) => true,
_ => token.is_keyword(kw::If),
},
NonterminalKind::TT | NonterminalKind::Item | NonterminalKind::Stmt => {
token.kind.close_delim().is_none()
}
+1
View File
@@ -385,6 +385,7 @@ fn check_attributes(
| AttributeKind::ThreadLocal
| AttributeKind::TypeLengthLimit { .. }
| AttributeKind::UnstableFeatureBound(..)
| AttributeKind::UnstableRemoved(..)
| AttributeKind::Used { .. }
| AttributeKind::WindowsSubsystem(..)
// tidy-alphabetical-end
+14
View File
@@ -895,6 +895,20 @@ pub(crate) struct ImpliedFeatureNotExist {
pub implied_by: Symbol,
}
#[derive(Diagnostic)]
#[diag("feature `{$feature}` has been removed", code = E0557)]
#[note("removed in {$since}; see <{$link}> for more information")]
#[note("{$reason}")]
pub(crate) struct FeatureRemoved {
#[primary_span]
#[label("feature has been removed")]
pub span: Span,
pub feature: Symbol,
pub reason: Symbol,
pub since: String,
pub link: Symbol,
}
#[derive(Diagnostic)]
#[diag(
"attributes `#[rustc_const_unstable]`, `#[rustc_const_stable]` and `#[rustc_const_stable_indirect]` require the function or method to be `const`"
+27 -5
View File
@@ -1097,7 +1097,7 @@ fn check_features<'tcx>(
let lang_features =
UNSTABLE_LANG_FEATURES.iter().map(|feature| feature.name).collect::<Vec<_>>();
let lib_features = crates
.into_iter()
.iter()
.flat_map(|&cnum| {
tcx.lib_features(cnum).stability.keys().copied().into_sorted_stable_ord()
})
@@ -1105,11 +1105,33 @@ fn check_features<'tcx>(
let valid_feature_names = [lang_features, lib_features].concat();
// Collect all of the marked as "removed" features
let unstable_removed_features = crates
.iter()
.flat_map(|&cnum| {
find_attr!(tcx, cnum.as_def_id(), UnstableRemoved(rem_features) => rem_features)
.into_iter()
.flatten()
})
.collect::<Vec<_>>();
for (feature, span) in remaining_lib_features {
let suggestion = feature
.find_similar(&valid_feature_names)
.map(|(actual_name, _)| errors::MisspelledFeature { span, actual_name });
tcx.dcx().emit_err(errors::UnknownFeature { span, feature, suggestion });
if let Some(removed) =
unstable_removed_features.iter().find(|removed| removed.feature == feature)
{
tcx.dcx().emit_err(errors::FeatureRemoved {
span,
feature,
reason: removed.reason,
link: removed.link,
since: removed.since.to_string(),
});
} else {
let suggestion = feature
.find_similar(&valid_feature_names)
.map(|(actual_name, _)| errors::MisspelledFeature { span, actual_name });
tcx.dcx().emit_err(errors::UnknownFeature { span, feature, suggestion });
}
}
}
}
+1 -2
View File
@@ -447,8 +447,7 @@ fn execute_job_incr<'tcx, C: QueryCache>(
dep_graph_data.with_task(
dep_node,
tcx,
(query, key),
|tcx, (query, key)| (query.invoke_provider_fn)(tcx, key),
|| (query.invoke_provider_fn)(tcx, key),
query.hash_value_fn,
)
});
+4 -2
View File
@@ -7,7 +7,9 @@ edition = "2024"
# tidy-alphabetical-start
indexmap = "2.4.0"
itertools = "0.12"
pulldown-cmark = { version = "0.11", features = ["html"], default-features = false }
pulldown-cmark = { version = "0.11", features = [
"html",
], default-features = false }
rustc_arena = { path = "../rustc_arena" }
rustc_ast = { path = "../rustc_ast" }
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
@@ -24,6 +26,6 @@ rustc_middle = { path = "../rustc_middle" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
tracing = "0.1"
# tidy-alphabetical-end
+1 -1
View File
@@ -8,7 +8,7 @@ edition = "2024"
indexmap = "2.0.0"
rustc_hashes = { path = "../rustc_hashes" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.12"
thin-vec = "0.2.15"
# tidy-alphabetical-end
[dev-dependencies]
+1 -1
View File
@@ -641,7 +641,6 @@
compiler_move,
concat,
concat_bytes,
concat_idents,
conservative_impl_trait,
console,
const_allocate,
@@ -2185,6 +2184,7 @@
unstable_location_reason_default: "this crate is being loaded from the sysroot, an \
unstable location; did you mean to load this crate \
from crates.io via `Cargo.toml` instead?",
unstable_removed,
untagged_unions,
unused_imports,
unwind,
+1 -1
View File
@@ -19,6 +19,6 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_transmute = { path = "../rustc_transmute", features = ["rustc"] }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2"
thin-vec = "0.2.15"
tracing = "0.1"
# tidy-alphabetical-end
@@ -6074,7 +6074,16 @@ fn point_at_assoc_type_restriction<G: EmissionGuarantee>(
let ty::ClauseKind::Projection(proj) = clause else {
return;
};
let name = tcx.item_name(proj.projection_term.def_id);
let Some(name) = tcx
.opt_rpitit_info(proj.projection_term.def_id)
.and_then(|data| match data {
ty::ImplTraitInTraitData::Trait { fn_def_id, .. } => Some(tcx.item_name(fn_def_id)),
ty::ImplTraitInTraitData::Impl { .. } => None,
})
.or_else(|| tcx.opt_item_name(proj.projection_term.def_id))
else {
return;
};
let mut predicates = generics.predicates.iter().peekable();
let mut prev: Option<(&hir::WhereBoundPredicate<'_>, Span)> = None;
while let Some(pred) = predicates.next() {
@@ -2,7 +2,6 @@
use std::mem;
use std::ops::ControlFlow;
use rustc_data_structures::thinvec::ExtractIf;
use rustc_hir::def_id::LocalDefId;
use rustc_infer::infer::InferCtxt;
use rustc_infer::traits::query::NoSolution;
@@ -103,18 +102,18 @@ fn on_fulfillment_overflow(&mut self, infcx: &InferCtxt<'tcx>) {
// we get all obligations involved in the overflow. We pretty much check: if
// we were to do another step of `try_evaluate_obligations`, which goals would
// change.
// FIXME: <https://github.com/Gankra/thin-vec/pull/66> is merged, this can be removed.
self.overflowed.extend(
ExtractIf::new(&mut self.pending, |(o, stalled_on)| {
let goal = o.as_goal();
let result = <&SolverDelegate<'tcx>>::from(infcx).evaluate_root_goal(
goal,
o.cause.span,
stalled_on.take(),
);
matches!(result, Ok(GoalEvaluation { has_changed: HasChanged::Yes, .. }))
})
.map(|(o, _)| o),
self.pending
.extract_if(.., |(o, stalled_on)| {
let goal = o.as_goal();
let result = <&SolverDelegate<'tcx>>::from(infcx).evaluate_root_goal(
goal,
o.cause.span,
stalled_on.take(),
);
matches!(result, Ok(GoalEvaluation { has_changed: HasChanged::Yes, .. }))
})
.map(|(o, _)| o),
);
})
}
+4 -2
View File
@@ -20,8 +20,10 @@ rustc_macros = { path = "../rustc_macros", optional = true }
rustc_serialize = { path = "../rustc_serialize", optional = true }
rustc_span = { path = "../rustc_span", optional = true }
rustc_type_ir_macros = { path = "../rustc_type_ir_macros" }
smallvec = { version = "1.8.1", default-features = false, features = ["const_generics"] }
thin-vec = "0.2.12"
smallvec = { version = "1.8.1", default-features = false, features = [
"const_generics",
] }
thin-vec = "0.2.15"
tracing = "0.1"
# tidy-alphabetical-end
@@ -2,27 +2,6 @@
//!
//! In this module, a "vector" is any `#[rustc_scalable_vector]`-annotated type.
/// Numerically casts a vector, elementwise.
///
/// `T` and `U` must be vectors of integers or floats, and must have the same length.
///
/// When casting floats to integers, the result is truncated. Out-of-bounds result lead to UB.
/// When casting integers to floats, the result is rounded.
/// Otherwise, truncates or extends the value, maintaining the sign for signed integers.
///
/// # Safety
/// Casting from integer types is always safe.
/// Casting between two float types is also always safe.
///
/// Casting floats to integers truncates, following the same rules as `to_int_unchecked`.
/// Specifically, each element must:
/// * Not be `NaN`
/// * Not be infinite
/// * Be representable in the return type, after truncating off its fractional part
#[rustc_intrinsic]
#[rustc_nounwind]
pub unsafe fn sve_cast<T, U>(x: T) -> U;
/// Create a tuple of two vectors.
///
/// `SVecTup` must be a scalable vector tuple (`#[rustc_scalable_vector]`) and `SVec` must be a
+7
View File
@@ -418,6 +418,13 @@
// tidy-alphabetical-end
//
#![default_lib_allocator]
// Removed features
#![unstable_removed(
feature = "concat_idents",
reason = "Replaced by the macro_metavar_expr_concat feature",
link = "https://github.com/rust-lang/rust/issues/29599#issuecomment-2986866250",
since = "1.90.0"
)]
// The Rust prelude
// The compiler expects the prelude definition to be defined before its use statement.
+10
View File
@@ -23,6 +23,9 @@ The `unstable` attribute infects all sub-items, where the attribute doesn't
have to be reapplied. So if you apply this to a module, all items in the module
will be unstable.
If you rename a feature, you can add `old_name = "old_name"` to produce a
useful error message.
You can make specific sub-items stable by using the `#[stable]` attribute on
them. The stability scheme works similarly to how `pub` works. You can have
public functions of nonpublic modules and you can have stable functions in
@@ -189,4 +192,11 @@ Currently, the items that can be annotated with `#[unstable_feature_bound]` are:
- free function
- trait
## renamed and removed features
Unstable features can get renamed and removed. If you rename a feature, you can add `old_name = "old_name"` to the `#[unstable]` attribute.
If you remove a feature, the `#!unstable_removed(feature = "foo", reason = "brief description", link = "link", since = "1.90.0")`
attribute should be used to produce a good error message for users of the removed feature.
The `link` field can be used to link to the most relevant information on the removal of the feature such as a GitHub issue, comment or PR.
[blog]: https://www.ralfj.de/blog/2018/07/19/const.html
-1
View File
@@ -904,7 +904,6 @@ fn main_args(early_dcx: &mut EarlyDiagCtxt, at_args: &[String]) {
return;
}
tcx.force_delayed_owners_lowering();
rustc_interface::passes::emit_delayed_lints(tcx);
if render_opts.dep_info().is_some() {
@@ -10,7 +10,6 @@
use rustc_ast::{self as ast, DUMMY_NODE_ID, Mutability, Pat, PatKind, Pinnedness};
use rustc_ast_pretty::pprust;
use rustc_data_structures::thin_vec::{ThinVec, thin_vec};
use rustc_data_structures::thinvec::ExtractIf;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::impl_lint_pass;
@@ -422,9 +421,7 @@ fn drain_matching(
let mut tail_or = ThinVec::new();
let mut idx = 0;
// FIXME: once `thin-vec` releases a new version, change this to `alternatives.extract_if()`
// See https://github.com/mozilla/thin-vec/issues/77
for pat in ExtractIf::new(alternatives, |p| {
for pat in alternatives.extract_if(.., |p| {
// Check if we should extract, but only if `idx >= start`.
idx += 1;
idx > start && predicate(&p.kind)
@@ -13,6 +13,8 @@ jobs:
lints-gen:
name: Generate lints
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v6
@@ -23,9 +25,16 @@ jobs:
- name: Generate lints/feature flags
run: cargo codegen lint-definitions
- uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0
id: app-token
with:
app-id: ${{ vars.APP_CLIENT_ID }}
private-key: ${{ secrets.APP_PRIVATE_KEY }}
- name: Submit PR
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
with:
token: ${{ steps.app-token.outputs.token }}
commit-message: "internal: update generated lints"
branch: "ci/gen-lints"
delete-branch: true
+1 -2
View File
@@ -234,7 +234,6 @@ dependencies = [
"intern",
"oorandom",
"rustc-hash 2.1.1",
"span",
"syntax",
"syntax-bridge",
"tracing",
@@ -846,7 +845,6 @@ dependencies = [
name = "hir-expand"
version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
"cfg",
"cov-mark",
@@ -2733,6 +2731,7 @@ dependencies = [
"rustc-hash 2.1.1",
"rustc-literal-escaper 0.0.4",
"rustc_apfloat",
"smallvec",
"smol_str 0.3.2",
"stdx",
"test-utils",
@@ -19,7 +19,6 @@ tracing.workspace = true
# locals deps
tt = { workspace = true, optional = true }
syntax = { workspace = true, optional = true }
span = { path = "../span", version = "0.0", optional = true }
intern.workspace = true
[dev-dependencies]
@@ -36,7 +35,7 @@ cfg = { path = ".", default-features = false, features = ["tt"] }
[features]
default = []
syntax = ["dep:syntax", "dep:span"]
syntax = ["dep:syntax"]
tt = ["dep:tt"]
in-rust-tree = []
@@ -106,10 +106,54 @@ pub fn parse_from_iter(tt: &mut tt::iter::TtIter<'_>) -> CfgExpr {
}
#[cfg(feature = "syntax")]
pub fn parse_from_ast(
ast: &mut std::iter::Peekable<syntax::ast::TokenTreeChildren>,
) -> CfgExpr {
next_cfg_expr_from_ast(ast).unwrap_or(CfgExpr::Invalid)
pub fn parse_from_ast(ast: syntax::ast::CfgPredicate) -> CfgExpr {
use intern::sym;
use syntax::ast::{self, AstToken};
match ast {
ast::CfgPredicate::CfgAtom(atom) => {
let atom = match atom.key() {
Some(ast::CfgAtomKey::True) => CfgAtom::Flag(sym::true_),
Some(ast::CfgAtomKey::False) => CfgAtom::Flag(sym::false_),
Some(ast::CfgAtomKey::Ident(key)) => {
let key = Symbol::intern(key.text());
match atom.string_token().and_then(ast::String::cast) {
Some(value) => {
if let Ok(value) = value.value() {
CfgAtom::KeyValue { key, value: Symbol::intern(&value) }
} else {
return CfgExpr::Invalid;
}
}
None => CfgAtom::Flag(key),
}
}
None => return CfgExpr::Invalid,
};
CfgExpr::Atom(atom)
}
ast::CfgPredicate::CfgComposite(composite) => {
let Some(keyword) = composite.keyword() else {
return CfgExpr::Invalid;
};
match keyword.text() {
"all" => CfgExpr::All(
composite.cfg_predicates().map(CfgExpr::parse_from_ast).collect(),
),
"any" => CfgExpr::Any(
composite.cfg_predicates().map(CfgExpr::parse_from_ast).collect(),
),
"not" => {
let mut inner = composite.cfg_predicates();
let (Some(inner), None) = (inner.next(), inner.next()) else {
return CfgExpr::Invalid;
};
CfgExpr::Not(Box::new(CfgExpr::parse_from_ast(inner)))
}
_ => CfgExpr::Invalid,
}
}
}
}
/// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
@@ -128,65 +172,6 @@ pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
}
}
#[cfg(feature = "syntax")]
fn next_cfg_expr_from_ast(
it: &mut std::iter::Peekable<syntax::ast::TokenTreeChildren>,
) -> Option<CfgExpr> {
use intern::sym;
use syntax::{NodeOrToken, SyntaxKind, T, ast};
let name = match it.next() {
None => return None,
Some(NodeOrToken::Token(ident)) if ident.kind().is_any_identifier() => {
Symbol::intern(ident.text())
}
Some(_) => return Some(CfgExpr::Invalid),
};
let ret = match it.peek() {
Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
it.next();
if let Some(NodeOrToken::Token(literal)) = it.peek()
&& matches!(literal.kind(), SyntaxKind::STRING)
{
let dummy_span = span::Span {
range: span::TextRange::empty(span::TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: span::EditionedFileId::from_raw(0),
ast_id: span::FIXUP_ERASED_FILE_AST_ID_MARKER,
},
ctx: span::SyntaxContext::root(span::Edition::Edition2015),
};
let literal =
Symbol::intern(tt::token_to_literal(literal.text(), dummy_span).text());
it.next();
CfgAtom::KeyValue { key: name, value: literal.clone() }.into()
} else {
return Some(CfgExpr::Invalid);
}
}
Some(NodeOrToken::Node(subtree)) => {
let mut subtree_iter = ast::TokenTreeChildren::new(subtree).peekable();
it.next();
let mut subs = std::iter::from_fn(|| next_cfg_expr_from_ast(&mut subtree_iter));
match name {
s if s == sym::all => CfgExpr::All(subs.collect()),
s if s == sym::any => CfgExpr::Any(subs.collect()),
s if s == sym::not => {
CfgExpr::Not(Box::new(subs.next().unwrap_or(CfgExpr::Invalid)))
}
_ => CfgExpr::Invalid,
}
}
_ => CfgAtom::Flag(name).into(),
};
// Eat comma separator
while it.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
Some(ret)
}
#[cfg(feature = "tt")]
fn next_cfg_expr(it: &mut tt::iter::TtIter<'_>) -> Option<CfgExpr> {
use intern::sym;
+16 -33
View File
@@ -1,10 +1,7 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{Expect, expect};
use intern::Symbol;
use syntax::{
AstNode, Edition,
ast::{self, TokenTreeChildren},
};
use syntax::{AstNode, Edition, ast};
use syntax_bridge::{
DocCommentDesugarMode,
dummy_test_span_utils::{DUMMY, DummyTestSpanMap},
@@ -14,32 +11,32 @@
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
#[track_caller]
fn parse_ast_cfg(tt: &ast::TokenTree) -> CfgExpr {
CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable())
fn parse_ast_cfg(pred: ast::CfgPredicate) -> CfgExpr {
CfgExpr::parse_from_ast(pred)
}
#[track_caller]
fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).syntax_node();
let pred_ast = source_file.descendants().find_map(ast::CfgPredicate::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt_ast.syntax(),
pred_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
let cfg = parse_ast_cfg(&tt_ast);
let cfg = parse_ast_cfg(pred_ast);
assert_eq!(cfg, expected);
}
#[track_caller]
fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let pred_ast = source_file.syntax().descendants().find_map(ast::CfgPredicate::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt_ast.syntax(),
pred_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@@ -47,7 +44,7 @@ fn check_dnf(input: &str, expect: Expect) {
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
expect.assert_eq(&actual);
let cfg = parse_ast_cfg(&tt_ast);
let cfg = parse_ast_cfg(pred_ast);
let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
expect.assert_eq(&actual);
}
@@ -55,9 +52,9 @@ fn check_dnf(input: &str, expect: Expect) {
#[track_caller]
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let pred_ast = source_file.syntax().descendants().find_map(ast::CfgPredicate::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt_ast.syntax(),
pred_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@@ -66,7 +63,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let dnf = DnfExpr::new(&cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
expect.assert_eq(&why_inactive);
let cfg = parse_ast_cfg(&tt_ast);
let cfg = parse_ast_cfg(pred_ast);
let dnf = DnfExpr::new(&cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
expect.assert_eq(&why_inactive);
@@ -75,9 +72,9 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
#[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let pred_ast = source_file.syntax().descendants().find_map(ast::CfgPredicate::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt_ast.syntax(),
pred_ast.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
@@ -86,7 +83,7 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let dnf = DnfExpr::new(&cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
assert_eq!(hints, expected_hints);
let cfg = parse_ast_cfg(&tt_ast);
let cfg = parse_ast_cfg(pred_ast);
let dnf = DnfExpr::new(&cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
assert_eq!(hints, expected_hints);
@@ -119,20 +116,6 @@ fn test_cfg_expr_parser() {
.into_boxed_slice(),
),
);
assert_parse_result(
r#"#![cfg(any(not(), all(), , bar = "baz",))]"#,
CfgExpr::Any(
vec![
CfgExpr::Not(Box::new(CfgExpr::Invalid)),
CfgExpr::All(Box::new([])),
CfgExpr::Invalid,
CfgAtom::KeyValue { key: Symbol::intern("bar"), value: Symbol::intern("baz") }
.into(),
]
.into_boxed_slice(),
),
);
}
#[test]
@@ -22,7 +22,7 @@
use either::Either;
use hir_expand::{
InFile, Lookup,
attrs::{Meta, expand_cfg_attr},
attrs::{AstKeyValueMetaExt, AstPathExt, expand_cfg_attr},
};
use intern::Symbol;
use itertools::Itertools;
@@ -128,63 +128,89 @@ fn extract_rustc_skip_during_method_dispatch(attr_flags: &mut AttrFlags, tt: ast
}
#[inline]
fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow<Infallible> {
fn match_attr_flags(attr_flags: &mut AttrFlags, attr: ast::Meta) -> ControlFlow<Infallible> {
match attr {
Meta::NamedKeyValue { name: Some(name), value, .. } => match name.text() {
"deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
"ignore" => attr_flags.insert(AttrFlags::IS_IGNORE),
"lang" => attr_flags.insert(AttrFlags::LANG_ITEM),
"path" => attr_flags.insert(AttrFlags::HAS_PATH),
"unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
"export_name" => {
if let Some(value) = value
&& let Some(value) = ast::String::cast(value)
&& let Ok(value) = value.value()
&& *value == *"main"
{
attr_flags.insert(AttrFlags::IS_EXPORT_NAME_MAIN);
}
}
_ => {}
},
Meta::TokenTree { path, tt } => match path.segments.len() {
1 => match path.segments[0].text() {
ast::Meta::CfgMeta(_) => attr_flags.insert(AttrFlags::HAS_CFG),
ast::Meta::KeyValueMeta(attr) => {
let Some(key) = attr.path().as_one_segment() else { return ControlFlow::Continue(()) };
match &*key {
"deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
"cfg" => attr_flags.insert(AttrFlags::HAS_CFG),
"doc" => extract_doc_tt_attr(attr_flags, tt),
"repr" => attr_flags.insert(AttrFlags::HAS_REPR),
"target_feature" => attr_flags.insert(AttrFlags::HAS_TARGET_FEATURE),
"proc_macro_derive" | "rustc_builtin_macro" => {
attr_flags.insert(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO)
}
"ignore" => attr_flags.insert(AttrFlags::IS_IGNORE),
"lang" => attr_flags.insert(AttrFlags::LANG_ITEM),
"path" => attr_flags.insert(AttrFlags::HAS_PATH),
"unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
"rustc_layout_scalar_valid_range_start" | "rustc_layout_scalar_valid_range_end" => {
attr_flags.insert(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE)
}
"rustc_legacy_const_generics" => {
attr_flags.insert(AttrFlags::HAS_LEGACY_CONST_GENERICS)
}
"rustc_skip_during_method_dispatch" => {
extract_rustc_skip_during_method_dispatch(attr_flags, tt)
}
"rustc_deprecated_safe_2024" => {
attr_flags.insert(AttrFlags::RUSTC_DEPRECATED_SAFE_2024)
"export_name" => {
if let Some(value) = attr.value_string()
&& *value == *"main"
{
attr_flags.insert(AttrFlags::IS_EXPORT_NAME_MAIN);
}
}
_ => {}
},
2 => match path.segments[0].text() {
"rust_analyzer" => match path.segments[1].text() {
"completions" => extract_ra_completions(attr_flags, tt),
"macro_style" => extract_ra_macro_style(attr_flags, tt),
}
}
ast::Meta::TokenTreeMeta(attr) => {
let (Some((first_segment, second_segment)), Some(tt)) =
(attr.path().as_up_to_two_segment(), attr.token_tree())
else {
return ControlFlow::Continue(());
};
match second_segment {
None => match &*first_segment {
"deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
"doc" => extract_doc_tt_attr(attr_flags, tt),
"repr" => attr_flags.insert(AttrFlags::HAS_REPR),
"target_feature" => attr_flags.insert(AttrFlags::HAS_TARGET_FEATURE),
"proc_macro_derive" | "rustc_builtin_macro" => {
attr_flags.insert(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO)
}
"unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
"rustc_layout_scalar_valid_range_start"
| "rustc_layout_scalar_valid_range_end" => {
attr_flags.insert(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE)
}
"rustc_legacy_const_generics" => {
attr_flags.insert(AttrFlags::HAS_LEGACY_CONST_GENERICS)
}
"rustc_skip_during_method_dispatch" => {
extract_rustc_skip_during_method_dispatch(attr_flags, tt)
}
"rustc_deprecated_safe_2024" => {
attr_flags.insert(AttrFlags::RUSTC_DEPRECATED_SAFE_2024)
}
_ => {}
},
_ => {}
},
_ => {}
},
Meta::Path { path } => {
match path.segments.len() {
1 => match path.segments[0].text() {
Some(second_segment) => match &*first_segment {
"rust_analyzer" => match &*second_segment {
"completions" => extract_ra_completions(attr_flags, tt),
"macro_style" => extract_ra_macro_style(attr_flags, tt),
_ => {}
},
_ => {}
},
}
}
ast::Meta::PathMeta(attr) => {
let is_test = attr.path().is_some_and(|path| {
let Some(segment1) = (|| path.segment()?.name_ref())() else { return false };
let segment2 = path.qualifier();
let segment3 = segment2.as_ref().and_then(|it| it.qualifier());
let segment4 = segment3.as_ref().and_then(|it| it.qualifier());
let segment3 = segment3.and_then(|it| it.segment()?.name_ref());
let segment4 = segment4.and_then(|it| it.segment()?.name_ref());
segment1.text() == "test"
&& segment3.is_none_or(|it| it.text() == "prelude")
&& segment4.is_none_or(|it| it.text() == "core")
});
if is_test {
attr_flags.insert(AttrFlags::IS_TEST);
}
let Some((first_segment, second_segment)) = attr.path().as_up_to_two_segment() else {
return ControlFlow::Continue(());
};
match second_segment {
None => match &*first_segment {
"rustc_has_incoherent_inherent_impls" => {
attr_flags.insert(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS)
}
@@ -228,18 +254,13 @@ fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow<Infal
}
_ => {}
},
2 => match path.segments[0].text() {
"rust_analyzer" => match path.segments[1].text() {
Some(second_segment) => match &*first_segment {
"rust_analyzer" => match &*second_segment {
"skip" => attr_flags.insert(AttrFlags::RUST_ANALYZER_SKIP),
_ => {}
},
_ => {}
},
_ => {}
}
if path.is_test {
attr_flags.insert(AttrFlags::IS_TEST);
}
}
_ => {}
@@ -420,7 +441,7 @@ fn resolver_for_attr_def_id(db: &dyn DefDatabase, owner: AttrDefId) -> Resolver<
fn collect_attrs<BreakValue>(
db: &dyn DefDatabase,
owner: AttrDefId,
mut callback: impl FnMut(Meta) -> ControlFlow<BreakValue>,
mut callback: impl FnMut(ast::Meta) -> ControlFlow<BreakValue>,
) -> Option<BreakValue> {
let (source, outer_mod_decl, extra_crate_attrs, krate) = attrs_source(db, owner);
let extra_attrs = extra_crate_attrs
@@ -432,7 +453,7 @@ fn collect_attrs<BreakValue>(
expand_cfg_attr(
extra_attrs.chain(ast::attrs_including_inner(&source.value)),
|| cfg_options.get_or_insert_with(|| krate.cfg_options(db)),
move |meta, _, _, _| callback(meta),
move |meta, _| callback(meta),
)
}
@@ -500,9 +521,10 @@ pub struct DeriveInfo {
pub helpers: Box<[Symbol]>,
}
fn extract_doc_aliases(result: &mut Vec<Symbol>, attr: Meta) -> ControlFlow<Infallible> {
if let Meta::TokenTree { path, tt } = attr
&& path.is1("doc")
fn extract_doc_aliases(result: &mut Vec<Symbol>, attr: ast::Meta) -> ControlFlow<Infallible> {
if let ast::Meta::TokenTreeMeta(attr) = attr
&& attr.path().is1("doc")
&& let Some(tt) = attr.token_tree()
{
for atom in DocAtom::parse(tt) {
match atom {
@@ -519,11 +541,11 @@ fn extract_doc_aliases(result: &mut Vec<Symbol>, attr: Meta) -> ControlFlow<Infa
ControlFlow::Continue(())
}
fn extract_cfgs(result: &mut Vec<CfgExpr>, attr: Meta) -> ControlFlow<Infallible> {
if let Meta::TokenTree { path, tt } = attr
&& path.is1("cfg")
fn extract_cfgs(result: &mut Vec<CfgExpr>, attr: ast::Meta) -> ControlFlow<Infallible> {
if let ast::Meta::CfgMeta(attr) = attr
&& let Some(cfg_predicate) = attr.cfg_predicate()
{
result.push(CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable()));
result.push(CfgExpr::parse_from_ast(cfg_predicate));
}
ControlFlow::Continue(())
}
@@ -554,7 +576,7 @@ fn field_attr_flags(
expand_cfg_attr(
field.value.attrs(),
|| cfg_options,
|attr, _, _, _| match_attr_flags(&mut attr_flags, attr),
|attr, _| match_attr_flags(&mut attr_flags, attr),
);
attr_flags
})
@@ -591,7 +613,7 @@ fn generic_params_attr_flags(
let lifetimes_source = HasChildSource::<LocalLifetimeParamId>::child_source(&def, db);
for (lifetime_id, lifetime) in lifetimes_source.value.iter() {
let mut attr_flags = AttrFlags::empty();
expand_cfg_attr(lifetime.attrs(), &mut cfg_options, |attr, _, _, _| {
expand_cfg_attr(lifetime.attrs(), &mut cfg_options, |attr, _| {
match_attr_flags(&mut attr_flags, attr)
});
if !attr_flags.is_empty() {
@@ -603,7 +625,7 @@ fn generic_params_attr_flags(
HasChildSource::<LocalTypeOrConstParamId>::child_source(&def, db);
for (type_or_const_id, type_or_const) in type_and_consts_source.value.iter() {
let mut attr_flags = AttrFlags::empty();
expand_cfg_attr(type_or_const.attrs(), &mut cfg_options, |attr, _, _, _| {
expand_cfg_attr(type_or_const.attrs(), &mut cfg_options, |attr, _| {
match_attr_flags(&mut attr_flags, attr)
});
if !attr_flags.is_empty() {
@@ -642,11 +664,10 @@ pub(crate) fn is_cfg_enabled_for(
let result = expand_cfg_attr(
attrs,
|| cfg_options,
|attr, _, _, _| {
if let Meta::TokenTree { path, tt } = attr
&& path.is1("cfg")
&& let cfg =
CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable())
|attr, _| {
if let ast::Meta::CfgMeta(attr) = attr
&& let Some(cfg_predicate) = attr.cfg_predicate()
&& let cfg = CfgExpr::parse_from_ast(cfg_predicate)
&& cfg_options.check(&cfg) == Some(false)
{
ControlFlow::Break(cfg)
@@ -678,10 +699,9 @@ pub fn lang_item_with_attrs(self, db: &dyn DefDatabase, owner: AttrDefId) -> Opt
#[salsa::tracked]
fn lang_item(db: &dyn DefDatabase, owner: AttrDefId) -> Option<Symbol> {
collect_attrs(db, owner, |attr| {
if let Meta::NamedKeyValue { name: Some(name), value: Some(value), .. } = attr
&& name.text() == "lang"
&& let Some(value) = ast::String::cast(value)
&& let Ok(value) = value.value()
if let ast::Meta::KeyValueMeta(attr) = attr
&& attr.path().is1("lang")
&& let Some(value) = attr.value_string()
{
ControlFlow::Break(Symbol::intern(&value))
} else {
@@ -704,8 +724,9 @@ pub fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option<ReprOptions> {
fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option<ReprOptions> {
let mut result = None;
collect_attrs::<Infallible>(db, owner.into(), |attr| {
if let Meta::TokenTree { path, tt } = attr
&& path.is1("repr")
if let ast::Meta::TokenTreeMeta(attr) = attr
&& attr.path().is1("repr")
&& let Some(tt) = attr.token_tree()
&& let Some(repr) = parse_repr_tt(&tt)
{
match &mut result {
@@ -726,8 +747,9 @@ pub(crate) fn legacy_const_generic_indices(
owner: FunctionId,
) -> Option<Box<[u32]>> {
let result = collect_attrs(db, owner.into(), |attr| {
if let Meta::TokenTree { path, tt } = attr
&& path.is1("rustc_legacy_const_generics")
if let ast::Meta::TokenTreeMeta(attr) = attr
&& attr.path().is1("rustc_legacy_const_generics")
&& let Some(tt) = attr.token_tree()
{
let result = parse_rustc_legacy_const_generics(tt);
ControlFlow::Break(result)
@@ -750,9 +772,10 @@ pub fn doc_html_root_url(db: &dyn DefDatabase, krate: Crate) -> Option<SmolStr>
expand_cfg_attr(
extra_crate_attrs.chain(syntax.attrs()),
|| cfg_options.get_or_insert(krate.cfg_options(db)),
|attr, _, _, _| {
if let Meta::TokenTree { path, tt } = attr
&& path.is1("doc")
|attr, _| {
if let ast::Meta::TokenTreeMeta(attr) = attr
&& attr.path().is1("doc")
&& let Some(tt) = attr.token_tree()
&& let Some(result) = DocAtom::parse(tt).into_iter().find_map(|atom| {
if let DocAtom::KeyValue { key, value } = atom
&& key == "html_root_url"
@@ -783,8 +806,9 @@ pub fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> &FxHashSet<Sy
fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> FxHashSet<Symbol> {
let mut result = FxHashSet::default();
collect_attrs::<Infallible>(db, owner.into(), |attr| {
if let Meta::TokenTree { path, tt } = attr
&& path.is1("target_feature")
if let ast::Meta::TokenTreeMeta(attr) = attr
&& attr.path().is1("target_feature")
&& let Some(tt) = attr.token_tree()
{
let mut tt = TokenTreeChildren::new(&tt);
while let Some(NodeOrToken::Token(enable_ident)) = tt.next()
@@ -831,9 +855,11 @@ fn rustc_layout_scalar_valid_range(
) -> RustcLayoutScalarValidRange {
let mut result = RustcLayoutScalarValidRange::default();
collect_attrs::<Infallible>(db, owner.into(), |attr| {
if let Meta::TokenTree { path, tt } = attr
if let ast::Meta::TokenTreeMeta(attr) = attr
&& let path = attr.path()
&& (path.is1("rustc_layout_scalar_valid_range_start")
|| path.is1("rustc_layout_scalar_valid_range_end"))
&& let Some(tt) = attr.token_tree()
&& let tt = TokenTreeChildren::new(&tt)
&& let Ok(NodeOrToken::Token(value)) = Itertools::exactly_one(tt)
&& let Some(value) = ast::IntNumber::cast(value)
@@ -881,7 +907,7 @@ fn fields_doc_aliases(
expand_cfg_attr(
field.value.attrs(),
|| cfg_options,
|attr, _, _, _| extract_doc_aliases(&mut result, attr),
|attr, _| extract_doc_aliases(&mut result, attr),
);
result.into_boxed_slice()
})
@@ -923,7 +949,7 @@ fn fields_cfgs(
expand_cfg_attr(
field.value.attrs(),
|| cfg_options,
|attr, _, _, _| extract_cfgs(&mut result, attr),
|attr, _| extract_cfgs(&mut result, attr),
);
match result.len() {
0 => None,
@@ -944,8 +970,9 @@ pub fn doc_keyword(db: &dyn DefDatabase, owner: ModuleId) -> Option<Symbol> {
#[salsa::tracked]
fn doc_keyword(db: &dyn DefDatabase, owner: ModuleId) -> Option<Symbol> {
collect_attrs(db, AttrDefId::ModuleId(owner), |attr| {
if let Meta::TokenTree { path, tt } = attr
&& path.is1("doc")
if let ast::Meta::TokenTreeMeta(attr) = attr
&& attr.path().is1("doc")
&& let Some(tt) = attr.token_tree()
{
for atom in DocAtom::parse(tt) {
if let DocAtom::KeyValue { key, value } = atom
@@ -1015,12 +1042,10 @@ pub fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option<&DeriveInfo>
#[salsa::tracked(returns(ref))]
fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option<DeriveInfo> {
collect_attrs(db, owner.into(), |attr| {
if let Meta::TokenTree { path, tt } = attr
&& path.segments.len() == 1
&& matches!(
path.segments[0].text(),
"proc_macro_derive" | "rustc_builtin_macro"
)
if let ast::Meta::TokenTreeMeta(attr) = attr
&& (attr.path().is1("proc_macro_derive")
|| attr.path().is1("rustc_builtin_macro"))
&& let Some(tt) = attr.token_tree()
&& let mut tt = TokenTreeChildren::new(&tt)
&& let Some(NodeOrToken::Token(trait_name)) = tt.next()
&& trait_name.kind().is_any_identifier()
@@ -16,7 +16,7 @@
use either::Either;
use hir_expand::{
AstId, ExpandTo, HirFileId, InFile,
attrs::{Meta, expand_cfg_attr_with_doc_comments},
attrs::{AstPathExt, expand_cfg_attr_with_doc_comments},
mod_path::ModPath,
span_map::SpanMap,
};
@@ -182,8 +182,7 @@ fn extend_with_doc_comment(&mut self, comment: ast::Comment, indent: &mut usize)
self.extend_with_doc_str(doc, comment.syntax().text_range().start() + offset, indent);
}
fn extend_with_doc_attr(&mut self, value: syntax::SyntaxToken, indent: &mut usize) {
let Some(value) = ast::String::cast(value) else { return };
fn extend_with_doc_attr(&mut self, value: ast::String, indent: &mut usize) {
let Some(value_offset) = value.text_range_between_quotes() else { return };
let value_offset = value_offset.start();
let Ok(value) = value.value() else { return };
@@ -423,10 +422,6 @@ fn extend_with_attrs<'a, 'db>(
// Lazily initialised when we first encounter a `#[doc = macro!()]`.
let mut expander: Option<(DocMacroExpander<'db>, DocExprSourceCtx<'db>)> = None;
// FIXME: `#[cfg_attr(..., doc = macro!())]` skips macro expansion because
// `top_attr` points to the `cfg_attr` node, not the inner `doc = macro!()`.
// Fixing this is difficult as we need an `Expr` that doesn't exist here for
// the ast id and for sanely parsing the macro call.
expand_cfg_attr_with_doc_comments::<_, Infallible>(
AttrDocCommentIter::from_syntax_node(node).filter(|attr| match attr {
Either::Left(attr) => attr.kind().is_inner() == expect_inner_attrs,
@@ -439,46 +434,38 @@ fn extend_with_attrs<'a, 'db>(
|attr| {
match attr {
Either::Right(doc_comment) => result.extend_with_doc_comment(doc_comment, indent),
Either::Left((attr, _, _, top_attr)) => match attr {
Meta::NamedKeyValue { name: Some(name), value: Some(value), .. }
if name.text() == "doc" =>
{
result.extend_with_doc_attr(value, indent);
}
Meta::NamedKeyValue { name: Some(name), value: None, .. }
if name.text() == "doc" =>
{
// When the doc attribute comes from inside a `cfg_attr`,
// `top_attr` points to the `cfg_attr(...)` node, not the
// inner `doc = macro!()`. In that case `top_attr.expr()`
// would not yield the macro expression we need, so skip
// expansion (see FIXME above).
let is_from_cfg_attr =
top_attr.as_simple_call().is_some_and(|(name, _)| name == "cfg_attr");
if !is_from_cfg_attr && let Some(expr) = top_attr.expr() {
let (exp, ctx) = expander.get_or_insert_with(|| {
let resolver = make_resolver();
let def_map = resolver.top_level_def_map();
let recursion_limit = def_map.recursion_limit() as usize;
(
DocMacroExpander {
db,
krate,
recursion_depth: 0,
recursion_limit,
},
DocExprSourceCtx {
resolver,
file_id,
ast_id_map: db.ast_id_map(file_id),
span_map: db.span_map(file_id),
},
)
});
if let Some(expanded) =
expand_doc_expr_via_macro_pipeline(exp, ctx, expr)
Either::Left((attr, _)) => match attr {
ast::Meta::KeyValueMeta(attr) if attr.path().is1("doc") => {
if let Some(value) = attr.expr() {
if let ast::Expr::Literal(value) = &value
&& let ast::LiteralKind::String(value) = value.kind()
{
result.extend_with_unmapped_doc_str(&expanded, indent);
result.extend_with_doc_attr(value, indent);
} else {
let (exp, ctx) = expander.get_or_insert_with(|| {
let resolver = make_resolver();
let def_map = resolver.top_level_def_map();
let recursion_limit = def_map.recursion_limit() as usize;
(
DocMacroExpander {
db,
krate,
recursion_depth: 0,
recursion_limit,
},
DocExprSourceCtx {
resolver,
file_id,
ast_id_map: db.ast_id_map(file_id),
span_map: db.span_map(file_id),
},
)
});
if let Some(expanded) =
expand_doc_expr_via_macro_pipeline(exp, ctx, value)
{
result.extend_with_unmapped_doc_str(&expanded, indent);
}
}
}
}
@@ -68,7 +68,7 @@ pub mod keys {
pub const MACRO_CALL: Key<ast::MacroCall, MacroCallId> = Key::new();
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
pub const DERIVE_MACRO_CALL: Key<
ast::Attr,
ast::Meta,
(
AttrId,
/* derive() */ MacroCallId,
@@ -13,12 +13,12 @@
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
attrs::{Attr, AttrId, AttrInput, Meta, collect_item_tree_attrs},
attrs::{Attr, AttrId, AttrInput, collect_item_tree_attrs},
mod_path::ModPath,
name::Name,
};
use intern::{Interned, Symbol, sym};
use syntax::{AstNode, T, ast};
use syntax::{AstNode, ast};
use syntax_bridge::DocCommentDesugarMode;
use tt::token_to_literal;
@@ -51,58 +51,62 @@ pub(crate) fn lower<'a, S>(
S: syntax_bridge::SpanMapper + Copy,
{
let mut attrs = Vec::new();
let result =
collect_item_tree_attrs::<Infallible>(owner, cfg_options, |meta, container, _, _| {
// NOTE: We cannot early return from this function, *every* attribute must be pushed, otherwise we'll mess the `AttrId`
// tracking.
let (span, path_range, input) = match meta {
Meta::NamedKeyValue { path_range, name: _, value } => {
let span = span_map.span_for(path_range);
let input = value.map(|value| {
Box::new(AttrInput::Literal(token_to_literal(
value.text(),
span_map.span_for(value.text_range()),
)))
});
(span, path_range, input)
}
Meta::TokenTree { path, tt } => {
let span = span_map.span_for(path.range);
let tt = syntax_bridge::syntax_node_to_token_tree(
tt.syntax(),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
let input = Some(Box::new(AttrInput::TokenTree(tt)));
(span, path.range, input)
}
Meta::Path { path } => {
let span = span_map.span_for(path.range);
(span, path.range, None)
}
};
let result = collect_item_tree_attrs::<Infallible>(owner, cfg_options, |meta, _| {
// NOTE: We cannot early return from this function, *every* attribute must be pushed, otherwise we'll mess the `AttrId`
// tracking.
let path = meta.path();
let path_range = path
.as_ref()
.map(|path| path.syntax().text_range())
.unwrap_or_else(|| meta.syntax().text_range());
let (span, input) = match &meta {
ast::Meta::KeyValueMeta(meta) => {
let span = span_map.span_for(path_range);
let input = meta.expr().and_then(|value| {
if let ast::Expr::Literal(value) = value {
Some(Box::new(AttrInput::Literal(token_to_literal(
value.token().text(),
span_map.span_for(value.syntax().text_range()),
))))
} else {
None
}
});
(span, input)
}
ast::Meta::TokenTreeMeta(meta) => {
let span = span_map.span_for(path_range);
let tt = syntax_bridge::syntax_node_to_token_tree(
&meta
.token_tree()
.map(|it| it.syntax().clone())
.unwrap_or_else(|| meta.syntax().clone()),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
let input = Some(Box::new(AttrInput::TokenTree(tt)));
(span, input)
}
ast::Meta::PathMeta(_) => {
let span = span_map.span_for(path_range);
(span, None)
}
ast::Meta::CfgMeta(_) | ast::Meta::CfgAttrMeta(_) | ast::Meta::UnsafeMeta(_) => {
unreachable!(
"`cfg`, `cfg_attr` and `unsafe(...)` are handled in `collect_item_tree_attrs()`"
)
}
};
let path = container.token_at_offset(path_range.start()).right_biased().and_then(
|first_path_token| {
let is_abs = matches!(first_path_token.kind(), T![:] | T![::]);
let segments =
std::iter::successors(Some(first_path_token), |it| it.next_token())
.take_while(|it| it.text_range().end() <= path_range.end())
.filter(|it| it.kind().is_any_identifier());
ModPath::from_tokens(
db,
&mut |range| span_map.span_for(range).ctx,
is_abs,
segments,
)
},
);
let path = path.unwrap_or_else(|| Name::missing().into());
attrs.push(Attr { path: Interned::new(path), input, ctxt: span.ctx });
ControlFlow::Continue(())
let path = path.and_then(|path| {
ModPath::from_src(db, path, &mut |range| span_map.span_for(range).ctx)
});
let path = path.unwrap_or_else(|| Name::missing().into());
attrs.push(Attr { path: Interned::new(path), input, ctxt: span.ctx });
ControlFlow::Continue(())
});
let attrs = AttrsOwned(attrs.into_boxed_slice());
match result {
Some(Either::Right(cfg)) => AttrsOrCfg::CfgDisabled(Box::new((cfg, attrs))),
@@ -1198,7 +1198,7 @@ macro_rules! m {
macro_rules! m {
($m:meta) => ( #[$m] fn bar() {} )
}
#[cfg(target_os = "windows")] fn bar() {}
#[cfg (target_os = "windows")] fn bar() {}
#[hello::world] fn bar() {}
"#]],
);
@@ -205,7 +205,7 @@ impl Clone for D3DVSHADERCAPS2_0 {
*self
}
}
#[cfg(feature = "impl-default")] impl Default for D3DVSHADERCAPS2_0 {
#[cfg (feature = "impl-default")] impl Default for D3DVSHADERCAPS2_0 {
#[inline] fn default() -> D3DVSHADERCAPS2_0 {
unsafe {
$crate::_core::mem::zeroed()
@@ -215,7 +215,7 @@ impl Clone for D3DVSHADERCAPS2_0 {
#[repr(C)]
#[derive(Copy)]
#[cfg_attr(target_arch = "x86", repr(packed))] pub struct D3DCONTENTPROTECTIONCAPS {
#[cfg_attr (target_arch = "x86", repr(packed))] pub struct D3DCONTENTPROTECTIONCAPS {
pub Caps: u8,
}
impl Clone for D3DCONTENTPROTECTIONCAPS {
@@ -223,7 +223,7 @@ impl Clone for D3DCONTENTPROTECTIONCAPS {
*self
}
}
#[cfg(feature = "impl-default")] impl Default for D3DCONTENTPROTECTIONCAPS {
#[cfg (feature = "impl-default")] impl Default for D3DCONTENTPROTECTIONCAPS {
#[inline] fn default() -> D3DCONTENTPROTECTIONCAPS {
unsafe {
$crate::_core::mem::zeroed()
@@ -1001,8 +1001,8 @@ macro_rules! with_std {
($($i:item)*) => ($(#[cfg(feature = "std")]$i)*)
}
#[cfg(feature = "std")] mod m;
#[cfg(feature = "std")] mod f;
#[cfg (feature = "std")] mod m;
#[cfg (feature = "std")] mod f;
"#]],
)
}
@@ -55,8 +55,8 @@ fn bar() {}
# ![doc = "123..."]
# ![attr2]
# ![attr3]
#[cfg_attr(true , cfg(false ))] fn foo() {}
#[cfg(true )] fn bar() {}
#[cfg_attr (true , cfg (false ))] fn foo() {}
#[cfg (true )] fn bar() {}
}"##]],
);
}
@@ -23,7 +23,6 @@ triomphe.workspace = true
query-group.workspace = true
salsa.workspace = true
salsa-macros.workspace = true
arrayvec.workspace = true
thin-vec.workspace = true
# local deps
@@ -4,20 +4,8 @@
//! [`expand_cfg_attr_with_doc_comments()`]. It is used to implement all attribute lowering
//! in r-a. Its basic job is to list attributes; however, attributes do not necessarily map
//! into [`ast::Attr`], because `cfg_attr` can map to zero, one, or more attributes
//! (`#[cfg_attr(predicate, attr1, attr2, ...)]`). To bridge this gap, this module defines
//! [`Meta`], which represents a desugared attribute. Various bits of r-a need different
//! things from [`Meta`], therefore it contains many parts. The basic idea is:
//!
//! - There are three kinds of attributes, `path = value`, `path`, and `path(token_tree)`.
//! - Most bits of rust-analyzer only need to deal with some paths. Therefore, we keep
//! the path only if it has up to 2 segments, or one segment for `path = value`.
//! We also only keep the value in `path = value` if it is a literal. However, we always
//! save the all relevant ranges of attributes (the path range, and the full attribute range)
//! for parts of r-a (e.g. name resolution) that need a faithful representation of the
//! attribute.
//!
//! [`expand_cfg_attr()`] expands `cfg_attr`s as it goes (as its name implies), to list
//! all attributes.
//! (`#[cfg_attr(predicate, attr1, attr2, ...)]`). [`expand_cfg_attr()`] expands `cfg_attr`s
//! as it goes (as its name implies), to list all attributes.
//!
//! Another thing to note is that we need to be able to map an attribute back to a range
//! (for diagnostic purposes etc.). This is only ever needed for attributes that participate
@@ -26,26 +14,18 @@
//! place (here) and one function ([`is_item_tree_filtered_attr()`]) that decides whether
//! an attribute participate in name resolution.
use std::{
borrow::Cow, cell::OnceCell, convert::Infallible, fmt, iter::Peekable, ops::ControlFlow,
};
use std::{borrow::Cow, cell::OnceCell, convert::Infallible, fmt, ops::ControlFlow};
use ::tt::{TextRange, TextSize};
use arrayvec::ArrayVec;
use ::tt::TextRange;
use base_db::Crate;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use intern::Interned;
use itertools::Itertools;
use mbe::{DelimiterKind, Punct};
use parser::T;
use smallvec::SmallVec;
use span::{RealSpanMap, Span, SyntaxContext};
use syntax::{
AstNode, NodeOrToken, SyntaxNode, SyntaxToken,
ast::{self, TokenTreeChildren},
unescape,
};
use syntax::{AstNode, SmolStr, ast, unescape};
use syntax_bridge::DocCommentDesugarMode;
use crate::{
@@ -56,207 +36,75 @@
tt::{self, TopSubtree},
};
#[derive(Debug)]
pub struct AttrPath {
/// This can be empty if the path is not of 1 or 2 segments exactly.
pub segments: ArrayVec<SyntaxToken, 2>,
pub range: TextRange,
// FIXME: This shouldn't be textual, `#[test]` needs name resolution.
// And if textual, it shouldn't be here, it should be in hir-def/src/attrs.rs. But some macros
// fully qualify `test` as `core::prelude::vX::test`, and this is more than 2 segments, so hir-def
// attrs can't find it. But this will mean we have to push every up-to-4-segments path, which
// may impact perf. So it was easier to just hack it here.
pub is_test: bool,
pub trait AstPathExt {
fn is1(&self, segment: &str) -> bool;
fn as_one_segment(&self) -> Option<SmolStr>;
fn as_up_to_two_segment(&self) -> Option<(SmolStr, Option<SmolStr>)>;
}
impl AttrPath {
#[inline]
fn extract(path: &ast::Path) -> Self {
let mut is_test = false;
let segments = (|| {
let mut segments = ArrayVec::new();
let segment2 = path.segment()?.name_ref()?.syntax().first_token()?;
if segment2.text() == "test" {
// `#[test]` or `#[core::prelude::vX::test]`.
is_test = true;
}
let segment1 = path.qualifier();
if let Some(segment1) = segment1 {
if segment1.qualifier().is_some() {
None
} else {
let segment1 = segment1.segment()?.name_ref()?.syntax().first_token()?;
segments.push(segment1);
segments.push(segment2);
Some(segments)
}
} else {
segments.push(segment2);
Some(segments)
}
})();
AttrPath {
segments: segments.unwrap_or(ArrayVec::new()),
range: path.syntax().text_range(),
is_test,
}
impl AstPathExt for ast::Path {
fn is1(&self, segment: &str) -> bool {
self.as_one_segment().is_some_and(|it| it == segment)
}
#[inline]
pub fn is1(&self, segment: &str) -> bool {
self.segments.len() == 1 && self.segments[0].text() == segment
fn as_one_segment(&self) -> Option<SmolStr> {
Some(self.as_single_name_ref()?.text().into())
}
fn as_up_to_two_segment(&self) -> Option<(SmolStr, Option<SmolStr>)> {
let parent = self.qualifier().as_one_segment();
let this = self.segment()?.name_ref()?.text().into();
if let Some(parent) = parent { Some((parent, Some(this))) } else { Some((this, None)) }
}
}
#[derive(Debug)]
pub enum Meta {
/// `name` is `None` if not a single token. `value` is a literal or `None`.
NamedKeyValue {
path_range: TextRange,
name: Option<SyntaxToken>,
value: Option<SyntaxToken>,
},
TokenTree {
path: AttrPath,
tt: ast::TokenTree,
},
Path {
path: AttrPath,
},
}
impl Meta {
#[inline]
pub fn path_range(&self) -> TextRange {
match self {
Meta::NamedKeyValue { path_range, .. } => *path_range,
Meta::TokenTree { path, .. } | Meta::Path { path } => path.range,
}
impl AstPathExt for Option<ast::Path> {
fn is1(&self, segment: &str) -> bool {
self.as_ref().is_some_and(|it| it.is1(segment))
}
fn extract(iter: &mut Peekable<TokenTreeChildren>) -> Option<(Self, TextSize)> {
let mut start_offset = None;
if let Some(NodeOrToken::Token(colon1)) = iter.peek()
&& colon1.kind() == T![:]
fn as_one_segment(&self) -> Option<SmolStr> {
self.as_ref().and_then(|it| it.as_one_segment())
}
fn as_up_to_two_segment(&self) -> Option<(SmolStr, Option<SmolStr>)> {
self.as_ref().and_then(|it| it.as_up_to_two_segment())
}
}
pub trait AstKeyValueMetaExt {
fn value_string(&self) -> Option<SmolStr>;
}
impl AstKeyValueMetaExt for ast::KeyValueMeta {
fn value_string(&self) -> Option<SmolStr> {
if let Some(ast::Expr::Literal(value)) = self.expr()
&& let ast::LiteralKind::String(value) = value.kind()
&& let Ok(value) = value.value()
{
start_offset = Some(colon1.text_range().start());
iter.next();
iter.next_if(|it| it.as_token().is_some_and(|it| it.kind() == T![:]));
}
let first_segment = iter
.next_if(|it| it.as_token().is_some_and(|it| it.kind().is_any_identifier()))?
.into_token()?;
let mut is_test = first_segment.text() == "test";
let start_offset = start_offset.unwrap_or_else(|| first_segment.text_range().start());
let mut segments_len = 1;
let mut second_segment = None;
let mut path_range = first_segment.text_range();
while iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
&& let _ = iter.next()
&& iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
&& let _ = iter.next()
&& let Some(NodeOrToken::Token(segment)) = iter.peek()
&& segment.kind().is_any_identifier()
{
segments_len += 1;
is_test = segment.text() == "test";
second_segment = Some(segment.clone());
path_range = TextRange::new(path_range.start(), segment.text_range().end());
iter.next();
}
let segments = |first, second| {
let mut segments = ArrayVec::new();
if segments_len <= 2 {
segments.push(first);
if let Some(second) = second {
segments.push(second);
}
}
segments
};
let meta = match iter.peek() {
Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
iter.next();
let value = match iter.peek() {
Some(NodeOrToken::Token(token)) if token.kind().is_literal() => {
// No need to consume it, it will be consumed by `extract_and_eat_comma()`.
Some(token.clone())
}
_ => None,
};
let name = if second_segment.is_none() { Some(first_segment) } else { None };
Meta::NamedKeyValue { path_range, name, value }
}
Some(NodeOrToken::Node(tt)) => Meta::TokenTree {
path: AttrPath {
segments: segments(first_segment, second_segment),
range: path_range,
is_test,
},
tt: tt.clone(),
},
_ => Meta::Path {
path: AttrPath {
segments: segments(first_segment, second_segment),
range: path_range,
is_test,
},
},
};
Some((meta, start_offset))
}
fn extract_possibly_unsafe(
iter: &mut Peekable<TokenTreeChildren>,
container: &ast::TokenTree,
) -> Option<(Self, TextRange)> {
if iter.peek().is_some_and(|it| it.as_token().is_some_and(|it| it.kind() == T![unsafe])) {
iter.next();
let tt = iter.next()?.into_node()?;
let result = Self::extract(&mut TokenTreeChildren::new(&tt).peekable()).map(
|(meta, start_offset)| (meta, TextRange::new(start_offset, tt_end_offset(&tt))),
);
while iter.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
result
Some((*value).into())
} else {
Self::extract(iter).map(|(meta, start_offset)| {
let end_offset = 'find_end_offset: {
for it in iter {
if let NodeOrToken::Token(it) = it
&& it.kind() == T![,]
{
break 'find_end_offset it.text_range().start();
}
}
tt_end_offset(container)
};
(meta, TextRange::new(start_offset, end_offset))
})
None
}
}
}
fn tt_end_offset(tt: &ast::TokenTree) -> TextSize {
tt.syntax().last_token().unwrap().text_range().start()
}
/// The callback is passed a desugared form of the attribute ([`Meta`]), a [`SyntaxNode`] fully containing it
/// (note: it may not be the direct parent), the range within the [`SyntaxNode`] bounding the attribute,
/// and the outermost `ast::Attr`. Note that one node may map to multiple [`Meta`]s due to `cfg_attr`.
/// The callback is passed the attribute and the outermost `ast::Attr`.
/// Note that one node may map to multiple [`Meta`]s due to `cfg_attr`.
///
/// `unsafe(attr)` are passed the inner attribute for now.
#[inline]
pub fn expand_cfg_attr<'a, BreakValue>(
attrs: impl Iterator<Item = ast::Attr>,
cfg_options: impl FnMut() -> &'a CfgOptions,
mut callback: impl FnMut(Meta, &SyntaxNode, TextRange, &ast::Attr) -> ControlFlow<BreakValue>,
mut callback: impl FnMut(ast::Meta, ast::Attr) -> ControlFlow<BreakValue>,
) -> Option<BreakValue> {
expand_cfg_attr_with_doc_comments::<Infallible, _>(
attrs.map(Either::Left),
cfg_options,
move |Either::Left((meta, container, range, top_attr))| {
callback(meta, container, range, top_attr)
},
move |Either::Left((meta, top_attr))| callback(meta, top_attr),
)
}
@@ -264,66 +112,47 @@ pub fn expand_cfg_attr<'a, BreakValue>(
pub fn expand_cfg_attr_with_doc_comments<'a, DocComment, BreakValue>(
mut attrs: impl Iterator<Item = Either<ast::Attr, DocComment>>,
mut cfg_options: impl FnMut() -> &'a CfgOptions,
mut callback: impl FnMut(
Either<(Meta, &SyntaxNode, TextRange, &ast::Attr), DocComment>,
) -> ControlFlow<BreakValue>,
mut callback: impl FnMut(Either<(ast::Meta, ast::Attr), DocComment>) -> ControlFlow<BreakValue>,
) -> Option<BreakValue> {
let mut stack = SmallVec::<[_; 1]>::new();
let result = attrs.try_for_each(|top_attr| {
let top_attr = match top_attr {
Either::Left(it) => it,
Either::Right(comment) => return callback(Either::Right(comment)),
};
if let Some((attr_name, tt)) = top_attr.as_simple_call()
&& attr_name == "cfg_attr"
{
let mut tt_iter = TokenTreeChildren::new(&tt).peekable();
let cfg = cfg::CfgExpr::parse_from_ast(&mut tt_iter);
if cfg_options().check(&cfg) != Some(false) {
stack.push((tt_iter, tt));
while let Some((tt_iter, tt)) = stack.last_mut() {
let Some((attr, range)) = Meta::extract_possibly_unsafe(tt_iter, tt) else {
stack.pop();
continue;
};
if let Meta::TokenTree { path, tt: nested_tt } = &attr
&& path.is1("cfg_attr")
{
let mut nested_tt_iter = TokenTreeChildren::new(nested_tt).peekable();
let cfg = cfg::CfgExpr::parse_from_ast(&mut nested_tt_iter);
if cfg_options().check(&cfg) != Some(false) {
stack.push((nested_tt_iter, nested_tt.clone()));
}
} else {
callback(Either::Left((attr, tt.syntax(), range, &top_attr)))?;
loop {
let (mut meta, top_attr) = if let Some(it) = stack.pop() {
it
} else {
let attr = attrs.next()?;
match attr {
Either::Left(attr) => {
let Some(meta) = attr.meta() else { continue };
stack.push((meta, attr));
}
Either::Right(doc_comment) => {
if let ControlFlow::Break(break_value) = callback(Either::Right(doc_comment)) {
return Some(break_value);
}
}
}
} else if let Some(ast_meta) = top_attr.meta()
&& let Some(path) = ast_meta.path()
{
let path = AttrPath::extract(&path);
let meta = if let Some(tt) = ast_meta.token_tree() {
Meta::TokenTree { path, tt }
} else if let Some(value) = ast_meta.expr() {
let value =
if let ast::Expr::Literal(value) = value { Some(value.token()) } else { None };
let name =
if path.segments.len() == 1 { Some(path.segments[0].clone()) } else { None };
Meta::NamedKeyValue { name, value, path_range: path.range }
} else {
Meta::Path { path }
};
callback(Either::Left((
meta,
ast_meta.syntax(),
ast_meta.syntax().text_range(),
&top_attr,
)))?;
continue;
};
while let ast::Meta::UnsafeMeta(unsafe_meta) = &meta {
let Some(inner) = unsafe_meta.meta() else { continue };
meta = inner;
}
ControlFlow::Continue(())
});
result.break_value()
if let ast::Meta::CfgAttrMeta(meta) = meta {
let Some(cfg_predicate) = meta.cfg_predicate() else { continue };
let cfg_predicate = CfgExpr::parse_from_ast(cfg_predicate);
if cfg_options().check(&cfg_predicate) != Some(false) {
let prev_stack_len = stack.len();
stack.extend(meta.metas().map(|meta| (meta, top_attr.clone())));
stack[prev_stack_len..].reverse();
}
} else {
if let ControlFlow::Break(break_value) = callback(Either::Left((meta, top_attr))) {
return Some(break_value);
}
}
}
}
#[inline]
@@ -351,39 +180,33 @@ pub(crate) fn is_item_tree_filtered_attr(name: &str) -> bool {
pub fn collect_item_tree_attrs<'a, BreakValue>(
owner: &dyn ast::HasAttrs,
cfg_options: impl Fn() -> &'a CfgOptions,
mut on_attr: impl FnMut(Meta, &SyntaxNode, &ast::Attr, TextRange) -> ControlFlow<BreakValue>,
mut on_attr: impl FnMut(ast::Meta, ast::Attr) -> ControlFlow<BreakValue>,
) -> Option<Either<BreakValue, CfgExpr>> {
let attrs = ast::attrs_including_inner(owner);
expand_cfg_attr(
attrs,
|| cfg_options(),
|attr, container, range, top_attr| {
|attr, top_attr| {
// We filter builtin attributes that we don't need for nameres, because this saves memory.
// I only put the most common attributes, but if some attribute becomes common feel free to add it.
// Notice, however: for an attribute to be filtered out, it *must* not be shadowable with a macro!
let filter = match &attr {
Meta::NamedKeyValue { name: Some(name), .. } => {
is_item_tree_filtered_attr(name.text())
}
Meta::TokenTree { path, tt } if path.segments.len() == 1 => {
let name = path.segments[0].text();
if name == "cfg" {
let cfg =
CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable());
if cfg_options().check(&cfg) == Some(false) {
return ControlFlow::Break(Either::Right(cfg));
}
true
} else {
is_item_tree_filtered_attr(name)
ast::Meta::CfgMeta(attr) => {
let Some(cfg_predicate) = attr.cfg_predicate() else {
return ControlFlow::Continue(());
};
let cfg = CfgExpr::parse_from_ast(cfg_predicate);
if cfg_options().check(&cfg) == Some(false) {
return ControlFlow::Break(Either::Right(cfg));
}
true
}
Meta::Path { path } => {
path.segments.len() == 1 && is_item_tree_filtered_attr(path.segments[0].text())
}
_ => false,
_ => attr
.path()
.and_then(|path| path.as_one_segment())
.is_some_and(|segment| is_item_tree_filtered_attr(&segment)),
};
if !filter && let ControlFlow::Break(v) = on_attr(attr, container, top_attr, range) {
if !filter && let ControlFlow::Break(v) = on_attr(attr, top_attr) {
return ControlFlow::Break(Either::Left(v));
}
ControlFlow::Continue(())
@@ -540,34 +363,32 @@ pub fn item_tree_index(self) -> u32 {
}
/// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
/// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
/// attribute, and its desugared [`Meta`].
/// to `cfg_attr`) and its [`ast::Meta`].
pub fn find_attr_range<N: ast::HasAttrs>(
self,
db: &dyn ExpandDatabase,
krate: Crate,
owner: AstId<N>,
) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
) -> (ast::Attr, ast::Meta) {
self.find_attr_range_with_source(db, krate, &owner.to_node(db))
}
/// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
/// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
/// attribute, and its desugared [`Meta`].
/// to `cfg_attr`) and its [`ast::Meta`].
pub fn find_attr_range_with_source(
self,
db: &dyn ExpandDatabase,
krate: Crate,
owner: &dyn ast::HasAttrs,
) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
) -> (ast::Attr, ast::Meta) {
let cfg_options = OnceCell::new();
let mut index = 0;
let result = collect_item_tree_attrs(
owner,
|| cfg_options.get_or_init(|| krate.cfg_options(db)),
|meta, container, top_attr, range| {
|meta, top_attr| {
if index == self.id {
return ControlFlow::Break((top_attr.clone(), container.clone(), range, meta));
return ControlFlow::Break((top_attr, meta));
}
index += 1;
ControlFlow::Continue(())
@@ -588,9 +409,12 @@ pub fn find_derive_range(
owner: AstId<ast::Adt>,
derive_index: u32,
) -> TextRange {
let (_, _, derive_attr_range, derive_attr) = self.find_attr_range(db, krate, owner);
let Meta::TokenTree { tt, .. } = derive_attr else {
return derive_attr_range;
let (_, derive_attr) = self.find_attr_range(db, krate, owner);
let ast::Meta::TokenTreeMeta(derive_attr) = derive_attr else {
return derive_attr.syntax().text_range();
};
let Some(tt) = derive_attr.token_tree() else {
return derive_attr.syntax().text_range();
};
// Fake the span map, as we don't really need spans here, just the offsets of the node in the file.
let span_map = RealSpanMap::absolute(span::EditionedFileId::current_edition(
@@ -605,11 +429,11 @@ pub fn find_derive_range(
let Some((_, _, derive_tts)) =
parse_path_comma_token_tree(db, &tt).nth(derive_index as usize)
else {
return derive_attr_range;
return derive_attr.syntax().text_range();
};
let (Some(first_span), Some(last_span)) = (derive_tts.first_span(), derive_tts.last_span())
else {
return derive_attr_range;
return derive_attr.syntax().text_range();
};
let start = first_span.range.start();
let end = last_span.range.end();
@@ -8,12 +8,12 @@
use smallvec::SmallVec;
use syntax::{
AstNode, PreorderWithTokens, SyntaxElement, SyntaxNode, SyntaxToken, WalkEvent,
ast::{self, HasAttrs, TokenTreeChildren},
ast::{self, HasAttrs},
};
use syntax_bridge::DocCommentDesugarMode;
use crate::{
attrs::{AttrId, Meta, expand_cfg_attr, is_item_tree_filtered_attr},
attrs::{AstPathExt, AttrId, expand_cfg_attr, is_item_tree_filtered_attr},
db::ExpandDatabase,
fixup::{self, SyntaxFixupUndoInfo},
span_map::SpanMapRef,
@@ -24,7 +24,7 @@
#[derive(Debug)]
struct ExpandedAttrToProcess {
range: TextRange,
attr: ast::Meta,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -143,42 +143,29 @@ fn macro_input_callback(
});
attrs_idx = 0;
let strip_current_item = expand_cfg_attr(
node_attrs,
&cfg_options,
|attr, _container, range, top_attr| {
let strip_current_item =
expand_cfg_attr(node_attrs, &cfg_options, |attr, top_attr| {
// Find the attr.
while attrs[attrs_idx].range != top_attr.syntax().text_range() {
attrs_idx += 1;
}
let mut strip_current_attr = false;
match attr {
Meta::NamedKeyValue { name, .. } => {
if name
.is_none_or(|name| !is_item_tree_filtered_attr(name.text()))
{
strip_current_attr = should_strip_attr();
}
}
Meta::TokenTree { path, tt } => {
if path.is1("cfg") {
let cfg_expr = CfgExpr::parse_from_ast(
&mut TokenTreeChildren::new(&tt).peekable(),
);
match &attr {
ast::Meta::CfgMeta(attr) => {
if let Some(cfg_predicate) = attr.cfg_predicate() {
let cfg_expr = CfgExpr::parse_from_ast(cfg_predicate);
if cfg_options().check(&cfg_expr) == Some(false) {
return ControlFlow::Break(ItemIsCfgedOut);
}
strip_current_attr = true;
} else if path.segments.len() != 1
|| !is_item_tree_filtered_attr(path.segments[0].text())
{
strip_current_attr = should_strip_attr();
}
}
Meta::Path { path } => {
if path.segments.len() != 1
|| !is_item_tree_filtered_attr(path.segments[0].text())
_ => {
if attr
.path()
.as_one_segment()
.is_none_or(|name| !is_item_tree_filtered_attr(&name))
{
strip_current_attr = should_strip_attr();
}
@@ -188,12 +175,11 @@ fn macro_input_callback(
if !strip_current_attr {
attrs[attrs_idx]
.expanded_attrs
.push(ExpandedAttrToProcess { range });
.push(ExpandedAttrToProcess { attr });
}
ControlFlow::Continue(())
},
);
});
attrs_idx = 0;
if strip_current_item.is_some() {
@@ -248,7 +234,7 @@ fn macro_input_callback(
};
match ast_attr.next_expanded_attr {
NextExpandedAttrState::NotStarted => {
if token_range.start() >= expanded_attr.range.start() {
if token_range.start() >= expanded_attr.attr.syntax().text_range().start() {
// We started the next attribute.
let mut insert_tokens = Vec::with_capacity(3);
insert_tokens.push(tt::Leaf::Punct(tt::Punct {
@@ -278,7 +264,7 @@ fn macro_input_callback(
}
}
NextExpandedAttrState::InTheMiddle => {
if token_range.start() >= expanded_attr.range.end() {
if token_range.start() >= expanded_attr.attr.syntax().text_range().end() {
// Finished the current attribute.
let insert_tokens = vec![tt::Leaf::Punct(tt::Punct {
char: ']',
@@ -329,12 +315,3 @@ pub(crate) fn attr_macro_input_to_token_tree(
fixups.undo_info,
)
}
pub fn check_cfg_attr_value(
db: &dyn ExpandDatabase,
attr: &ast::TokenTree,
krate: Crate,
) -> Option<bool> {
let cfg_expr = CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(attr).peekable());
krate.cfg_options(db).check(&cfg_expr)
}
@@ -11,7 +11,6 @@
AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, FileRange, HirFileId,
MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
attrs::Meta,
builtin::pseudo_derive_attr_expansion,
cfg_process::attr_macro_input_to_token_tree,
declarative::DeclarativeMacroExpander,
@@ -239,8 +238,15 @@ pub fn expand_speculative(
MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => {
if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
ast::Attr::cast(speculative_args.clone()).and_then(|attr| attr.token_tree()).map(
|token_tree| {
ast::Attr::cast(speculative_args.clone())
.and_then(|attr| {
if let ast::Meta::TokenTreeMeta(meta) = attr.meta()? {
meta.token_tree()
} else {
None
}
})
.map(|token_tree| {
let mut tree = syntax_node_to_token_tree(
token_tree.syntax(),
span_map,
@@ -250,26 +256,26 @@ pub fn expand_speculative(
tree.set_top_subtree_delimiter_kind(tt::DelimiterKind::Invisible);
tree.set_top_subtree_delimiter_span(tt::DelimSpan::from_single(span));
tree
},
)
})
} else {
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
let (_, _, _, meta) =
let (_, meta) =
attr_ids.invoc_attr().find_attr_range_with_source(db, loc.krate, &item);
match meta {
Meta::TokenTree { tt, .. } => {
let mut attr_arg = syntax_bridge::syntax_node_to_token_tree(
tt.syntax(),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
attr_arg.set_top_subtree_delimiter_kind(tt::DelimiterKind::Invisible);
Some(attr_arg)
}
_ => None,
if let ast::Meta::TokenTreeMeta(meta) = meta
&& let Some(tt) = meta.token_tree()
{
let mut attr_arg = syntax_bridge::syntax_node_to_token_tree(
tt.syntax(),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
attr_arg.set_top_subtree_delimiter_kind(tt::DelimiterKind::Invisible);
Some(attr_arg)
} else {
None
}
}
}
@@ -501,11 +507,11 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
}
MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
let node = ast_id.to_ptr(db).to_node(&root);
let range = attr_ids
.invoc_attr()
.find_attr_range_with_source(db, loc.krate, &node)
.3
.path_range();
let (_, attr) = attr_ids.invoc_attr().find_attr_range_with_source(db, loc.krate, &node);
let range = attr
.path()
.map(|path| path.syntax().text_range())
.unwrap_or_else(|| attr.syntax().text_range());
let span = map.span_for_range(range);
let is_derive = matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive());
@@ -6,7 +6,7 @@
use span::{Edition, Span, SyntaxContext};
use stdx::TupleExt;
use syntax::{
AstNode, AstToken,
AstNode,
ast::{self, HasAttrs},
};
use syntax_bridge::DocCommentDesugarMode;
@@ -15,7 +15,7 @@
use crate::{
AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
MacroCallStyle,
attrs::{Meta, expand_cfg_attr},
attrs::{AstKeyValueMetaExt, AstPathExt, expand_cfg_attr},
db::ExpandDatabase,
hygiene::{Transparency, apply_mark},
tt,
@@ -92,11 +92,10 @@ pub(crate) fn expander(
expand_cfg_attr(
node.attrs(),
|| cfg_options.get_or_init(|| def_crate.cfg_options(db)),
|attr, _, _, _| {
if let Meta::NamedKeyValue { name: Some(name), value, .. } = attr
&& name.text() == "rustc_macro_transparency"
&& let Some(value) = value.and_then(ast::String::cast)
&& let Ok(value) = value.value()
|attr, _| {
if let ast::Meta::KeyValueMeta(attr) = attr
&& attr.path().is1("rustc_macro_transparency")
&& let Some(value) = attr.value_string()
{
match &*value {
"transparent" => ControlFlow::Break(Transparency::Transparent),
@@ -58,7 +58,6 @@
};
pub use crate::{
cfg_process::check_cfg_attr_value,
files::{AstId, ErasedAstId, FileRange, InFile, InMacroFile, InRealFile},
prettify_macro_expansion_::prettify_macro_expansion,
};
@@ -635,14 +634,12 @@ pub fn to_node(&self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode> {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
}
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: handle `cfg_attr`
let (attr, _, _, _) = derive_attr_index.find_attr_range(db, self.krate, *ast_id);
let (_, attr) = derive_attr_index.find_attr_range(db, self.krate, *ast_id);
ast_id.with_value(attr.syntax().clone())
}
MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
if self.def.is_attribute_derive() {
let (attr, _, _, _) =
attr_ids.invoc_attr().find_attr_range(db, self.krate, *ast_id);
let (_, attr) = attr_ids.invoc_attr().find_attr_range(db, self.krate, *ast_id);
ast_id.with_value(attr.syntax().clone())
} else {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
@@ -770,11 +767,11 @@ pub fn original_call_range(self, db: &dyn ExpandDatabase, krate: Crate) -> FileR
}
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
derive_attr_index.find_attr_range(db, krate, ast_id).2
derive_attr_index.find_attr_range(db, krate, ast_id).1.syntax().text_range()
}
// FIXME: handle `cfg_attr`
MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
attr_ids.invoc_attr().find_attr_range(db, krate, ast_id).2
attr_ids.invoc_attr().find_attr_range(db, krate, ast_id).1.syntax().text_range()
}
};
@@ -1239,11 +1239,15 @@ fn emit_def_diagnostic_<'db>(
);
}
DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
let derive = id.find_attr_range(db, krate, *ast).3.path_range();
let (_, attr) = id.find_attr_range(db, krate, *ast);
let derive = attr
.path()
.map(|path| path.syntax().text_range())
.unwrap_or_else(|| attr.syntax().text_range());
acc.push(InvalidDeriveTarget { range: ast.with_value(derive) }.into());
}
DefDiagnosticKind::MalformedDerive { ast, id } => {
let derive = id.find_attr_range(db, krate, *ast).2;
let derive = id.find_attr_range(db, krate, *ast).1.syntax().text_range();
acc.push(MalformedDerive { range: ast.with_value(derive) }.into());
}
DefDiagnosticKind::MacroDefError { ast, message } => {
@@ -1283,7 +1287,8 @@ fn precise_macro_call_location(
ast_id.with_value(range)
}
MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
let attr_range = attr_ids.invoc_attr().find_attr_range(db, krate, *ast_id).2;
let attr_range =
attr_ids.invoc_attr().find_attr_range(db, krate, *ast_id).1.syntax().text_range();
ast_id.with_value(attr_range)
}
}
@@ -24,6 +24,7 @@
};
use hir_expand::{
EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
attrs::AstPathExt,
builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
files::{FileRangeWrapper, HirFileRange, InRealFile},
@@ -298,14 +299,15 @@ pub fn lint_attrs(
hir_expand::attrs::expand_cfg_attr::<Infallible>(
extra_crate_attrs.chain(ast::attrs_including_inner(&item)),
cfg_options,
|attr, _, _, _| {
let hir_expand::attrs::Meta::TokenTree { path, tt } = attr else {
|attr, _| {
let ast::Meta::TokenTreeMeta(attr) = attr else {
return ControlFlow::Continue(());
};
if path.segments.len() != 1 {
let (Some(segment), Some(tt)) = (attr.path().as_one_segment(), attr.token_tree())
else {
return ControlFlow::Continue(());
}
let lint_attr = match path.segments[0].text() {
};
let lint_attr = match &*segment {
"allow" => LintAttr::Allow,
"expect" => LintAttr::Expect,
"warn" => LintAttr::Warn,
@@ -554,17 +556,6 @@ pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<Sy
Some(InFile::new(file_id.into(), node))
}
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
let file_id = self.find_file(attr.syntax()).file_id;
let krate = match file_id {
HirFileId::FileId(file_id) => {
self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate(self.db).id
}
HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate,
};
hir_expand::check_cfg_attr_value(self.db, attr, krate)
}
/// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy
/// expansions.
pub fn expand_allowed_builtins(
@@ -608,8 +599,8 @@ pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<
Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
}
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Meta) -> Option<SyntaxNode> {
let adt = attr.parent_attr()?.syntax().parent().and_then(ast::Adt::cast)?;
let src = self.wrap_node_infile(attr.clone());
let call_id = self.with_ctx(|ctx| {
ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
@@ -617,7 +608,7 @@ pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<Syn
Some(self.parse_or_expand(call_id.into()))
}
pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
pub fn resolve_derive_macro(&self, attr: &ast::Meta) -> Option<Vec<Option<Macro>>> {
let calls = self.derive_macro_calls(attr)?;
self.with_ctx(|ctx| {
Some(
@@ -644,7 +635,7 @@ pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>
pub fn expand_derive_macro(
&self,
attr: &ast::Attr,
attr: &ast::Meta,
) -> Option<Vec<Option<ExpandResult<SyntaxNode>>>> {
let res: Vec<_> = self
.derive_macro_calls(attr)?
@@ -662,9 +653,9 @@ pub fn expand_derive_macro(
fn derive_macro_calls(
&self,
attr: &ast::Attr,
attr: &ast::Meta,
) -> Option<Vec<Option<Either<MacroCallId, BuiltinDeriveImplId>>>> {
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
let adt = attr.parent_attr()?.syntax().parent().and_then(ast::Adt::cast)?;
let file_id = self.find_file(adt.syntax()).file_id;
let adt = InFile::new(file_id, &adt);
let src = InFile::new(file_id, attr.clone());
@@ -773,7 +764,11 @@ pub fn speculative_expand_derive_as_pseudo_attr_macro(
let attr = self.wrap_node_infile(actual_macro_call.clone());
let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
let macro_call_id = self.with_ctx(|ctx| {
ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
ctx.attr_to_derive_macro_call(
attr.with_value(&adt),
attr.with_value(attr.value.meta()?),
)
.map(|(_, it, _)| it)
})?;
hir_expand::db::expand_speculative(
self.db,
@@ -1328,7 +1323,7 @@ fn descend_into_macros_impl<T>(
// text ranges of the outer ones, and then all of the inner ones up
// to the invoking attribute so that the inbetween is ignored.
// FIXME: Should cfg_attr be handled differently?
let (attr, _, _, _) = attr_ids
let (attr, _) = attr_ids
.invoc_attr()
.find_attr_range_with_source(db, loc.krate, &item);
let start = attr.syntax().text_range().start();
@@ -1435,7 +1430,7 @@ fn descend_into_macros_impl<T>(
let derive_call = ctx
.attr_to_derive_macro_call(
InFile::new(expansion, &adt),
InFile::new(expansion, attr.clone()),
InFile::new(expansion, meta.clone()),
)?
.1;
@@ -126,8 +126,7 @@ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: Hi
calls.for_each(|(attr_id, call_id, calls)| {
// FIXME: Is this the right crate?
let krate = call_id.lookup(db).krate;
// FIXME: Fix cfg_attr handling.
let (attr, _, _, _) = attr_id.find_attr_range_with_source(db, krate, &adt);
let (_, attr) = attr_id.find_attr_range_with_source(db, krate, &adt);
res[keys::DERIVE_MACRO_CALL]
.insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
});
@@ -398,7 +398,7 @@ pub(super) fn label_ref_to_def(
pub(super) fn attr_to_derive_macro_call(
&mut self,
item: InFile<&ast::Adt>,
src: InFile<ast::Attr>,
src: InFile<ast::Meta>,
) -> Option<(AttrId, MacroCallId, &[Option<Either<MacroCallId, BuiltinDeriveImplId>>])> {
let map = self.dyn_map(item)?;
map[keys::DERIVE_MACRO_CALL]
@@ -423,6 +423,7 @@ impl Iterator<
let dyn_map = &map[keys::DERIVE_MACRO_CALL];
adt.value
.attrs()
.flat_map(|attr| attr.skip_cfg_attrs())
.filter_map(move |attr| dyn_map.get(&AstPtr::new(&attr)))
.map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
})
@@ -279,7 +279,7 @@ fn todo_fn(f: &ast::Fn, config: &AssistConfig) -> ast::Fn {
}
fn cfg_attrs(node: &impl HasAttrs) -> impl Iterator<Item = ast::Attr> {
node.attrs().filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg"))
node.attrs().filter(|attr| matches!(attr.meta(), Some(ast::Meta::CfgMeta(_))))
}
#[cfg(test)]
@@ -68,9 +68,11 @@ pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
],
);
let delimiter = derive
.meta()
.expect("make::attr_outer was expected to have Meta")
let meta = derive.meta().expect("make::attr_outer was expected to have Meta");
let ast::Meta::TokenTreeMeta(meta) = meta else {
unreachable!("make::attr_outer was passed a token tree meta");
};
let delimiter = meta
.token_tree()
.expect("failed to get token tree out of Meta")
.r_paren_token()
@@ -121,9 +121,8 @@ pub(crate) fn generate_single_field_struct_from(
)
.indent_with_mapping(1.into(), &make);
let cfg_attrs = strukt
.attrs()
.filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg"));
let cfg_attrs =
strukt.attrs().filter(|attr| matches!(attr.meta(), Some(ast::Meta::CfgMeta(_))));
let impl_ = make.impl_trait(
cfg_attrs,
@@ -64,9 +64,10 @@ pub(crate) fn replace_derive_with_manual_impl(
.filter_map(|attr| attr.path())
.collect::<Vec<_>>();
let adt = value.parent().and_then(ast::Adt::cast)?;
let attr = ast::Attr::cast(value)?;
let args = attr.token_tree()?;
let attr = ast::Meta::cast(value)?.parent_attr()?;
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
let ast::Meta::TokenTreeMeta(meta) = attr.meta()? else { return None };
let args = meta.token_tree()?;
let current_module = ctx.sema.scope(adt.syntax())?.module();
let current_crate = current_module.krate(ctx.db());
@@ -19,7 +19,7 @@
// ```
// ->
// ```
// #[cfg_attr($0, derive(Debug))]
// #[cfg_attr(${0:cfg}, derive(Debug))]
// struct S {
// field: i32
// }
@@ -147,12 +147,15 @@ pub(crate) fn wrap_unwrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>) -
}
}?;
match option {
WrapUnwrapOption::WrapAttr(attrs) => match &attrs[..] {
[attr] if attr.simple_name().as_deref() == Some("cfg_attr") => {
unwrap_cfg_attr(acc, attrs.into_iter().next().unwrap())
WrapUnwrapOption::WrapAttr(attrs) => {
if let [attr] = &attrs[..]
&& let Some(ast::Meta::CfgAttrMeta(meta)) = attr.meta()
{
unwrap_cfg_attr(acc, meta)
} else {
wrap_cfg_attrs(acc, ctx, attrs)
}
_ => wrap_cfg_attrs(acc, ctx, attrs),
},
}
WrapUnwrapOption::WrapDerive { derive, attr } => wrap_derive(acc, ctx, attr, derive),
}
}
@@ -164,7 +167,8 @@ fn wrap_derive(
derive_element: TextRange,
) -> Option<()> {
let range = attr.syntax().text_range();
let token_tree = attr.token_tree()?;
let ast::Meta::TokenTreeMeta(meta) = attr.meta()? else { return None };
let token_tree = meta.token_tree()?;
let mut path_text = String::new();
let mut cfg_derive_tokens = Vec::new();
@@ -193,20 +197,15 @@ fn wrap_derive(
let new_derive = make.attr_outer(
make.meta_token_tree(make.ident_path("derive"), make.token_tree(T!['('], new_derive)),
);
let meta = make.meta_token_tree(
make.ident_path("cfg_attr"),
make.token_tree(
T!['('],
vec![
NodeOrToken::Token(make.token(T![,])),
NodeOrToken::Token(make.whitespace(" ")),
NodeOrToken::Token(make.ident("derive")),
NodeOrToken::Node(make.token_tree(T!['('], cfg_derive_tokens)),
],
),
let meta = make.cfg_attr_meta(
make.cfg_flag("cfg"),
[make.meta_token_tree(
make.ident_path("derive"),
make.token_tree(T!['('], cfg_derive_tokens),
)],
);
let cfg_attr = make.attr_outer(meta);
let cfg_attr = make.attr_outer(meta.clone().into());
editor.replace_with_many(
attr.syntax(),
vec![
@@ -217,11 +216,10 @@ fn wrap_derive(
);
if let Some(snippet_cap) = ctx.config.snippet_cap
&& let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
&& let Some(cfg_predicate) = meta.cfg_predicate()
{
let tabstop = edit.make_tabstop_after(snippet_cap);
editor.add_annotation(first_meta, tabstop);
let tabstop = edit.make_placeholder_snippet(snippet_cap);
editor.add_annotation(cfg_predicate.syntax(), tabstop);
}
editor.add_mappings(make.finish_with_mappings());
@@ -236,58 +234,29 @@ fn wrap_derive(
);
Some(())
}
fn wrap_cfg_attrs(acc: &mut Assists, ctx: &AssistContext<'_>, attrs: Vec<ast::Attr>) -> Option<()> {
let (first_attr, last_attr) = (attrs.first()?, attrs.last()?);
let range = first_attr.syntax().text_range().cover(last_attr.syntax().text_range());
let path_attrs =
attrs.iter().map(|attr| Some((attr.path()?, attr.clone()))).collect::<Option<Vec<_>>>()?;
let handle_source_change = |edit: &mut SourceChangeBuilder| {
let make = SyntaxFactory::with_mappings();
let mut editor = edit.make_editor(first_attr.syntax());
let mut raw_tokens = vec![];
for (path, attr) in path_attrs {
raw_tokens.extend([
NodeOrToken::Token(make.token(T![,])),
NodeOrToken::Token(make.whitespace(" ")),
]);
path.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
raw_tokens.push(NodeOrToken::Token(token));
}
});
if let Some(meta) = attr.meta() {
if let (Some(eq), Some(expr)) = (meta.eq_token(), meta.expr()) {
raw_tokens.push(NodeOrToken::Token(make.whitespace(" ")));
raw_tokens.push(NodeOrToken::Token(eq));
raw_tokens.push(NodeOrToken::Token(make.whitespace(" ")));
expr.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
raw_tokens.push(NodeOrToken::Token(token));
}
});
} else if let Some(tt) = meta.token_tree() {
raw_tokens.extend(tt.token_trees_and_tokens());
}
}
}
let meta =
make.meta_token_tree(make.ident_path("cfg_attr"), make.token_tree(T!['('], raw_tokens));
make.cfg_attr_meta(make.cfg_flag("cfg"), attrs.iter().filter_map(|attr| attr.meta()));
let cfg_attr = if first_attr.excl_token().is_some() {
make.attr_inner(meta)
make.attr_inner(meta.clone().into())
} else {
make.attr_outer(meta)
make.attr_outer(meta.clone().into())
};
let syntax_range = first_attr.syntax().clone().into()..=last_attr.syntax().clone().into();
editor.replace_all(syntax_range, vec![cfg_attr.syntax().clone().into()]);
if let Some(snippet_cap) = ctx.config.snippet_cap
&& let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
&& let Some(cfg_flag) = meta.cfg_predicate()
{
let tabstop = edit.make_tabstop_after(snippet_cap);
editor.add_annotation(first_meta, tabstop);
let tabstop = edit.make_placeholder_snippet(snippet_cap);
editor.add_annotation(cfg_flag.syntax(), tabstop);
}
editor.add_mappings(make.finish_with_mappings());
@@ -301,66 +270,28 @@ fn wrap_cfg_attrs(acc: &mut Assists, ctx: &AssistContext<'_>, attrs: Vec<ast::At
);
Some(())
}
fn unwrap_cfg_attr(acc: &mut Assists, attr: ast::Attr) -> Option<()> {
let range = attr.syntax().text_range();
let meta = attr.meta()?;
let meta_tt = meta.token_tree()?;
let mut inner_attrs = Vec::with_capacity(1);
let mut found_comma = false;
let mut iter = meta_tt.token_trees_and_tokens().skip(1).peekable();
while let Some(tt) = iter.next() {
if let NodeOrToken::Token(token) = &tt {
if token.kind() == T![')'] {
break;
fn unwrap_cfg_attr(acc: &mut Assists, meta: ast::CfgAttrMeta) -> Option<()> {
let top_attr = ast::Meta::from(meta.clone()).parent_attr()?;
let range = top_attr.syntax().text_range();
let inner_attrs = meta
.metas()
.map(|meta| {
if top_attr.excl_token().is_some() {
make::attr_inner(meta)
} else {
make::attr_outer(meta)
}
if token.kind() == T![,] {
found_comma = true;
continue;
}
}
if !found_comma {
continue;
}
let Some(attr_name) = tt.into_token().and_then(|token| {
if token.kind() == T![ident] { Some(make::ext::ident_path(token.text())) } else { None }
}) else {
continue;
};
let next_tt = iter.next()?;
let meta = match next_tt {
NodeOrToken::Node(tt) => make::meta_token_tree(attr_name, tt),
NodeOrToken::Token(token) if token.kind() == T![,] || token.kind() == T![')'] => {
make::meta_path(attr_name)
}
NodeOrToken::Token(token) => {
let equals = algo::skip_trivia_token(token, syntax::Direction::Next)?;
if equals.kind() != T![=] {
return None;
}
let expr_token =
algo::skip_trivia_token(equals.next_token()?, syntax::Direction::Next)
.and_then(|it| {
if it.kind().is_literal() {
Some(make::expr_literal(it.text()))
} else {
None
}
})?;
make::meta_expr(attr_name, ast::Expr::Literal(expr_token))
}
};
if attr.excl_token().is_some() {
inner_attrs.push(make::attr_inner(meta));
} else {
inner_attrs.push(make::attr_outer(meta));
}
}
})
.collect::<Vec<_>>();
if inner_attrs.is_empty() {
return None;
}
let handle_source_change = |f: &mut SourceChangeBuilder| {
let inner_attrs =
inner_attrs.iter().map(|it| it.to_string()).join(&format!("\n{}", attr.indent_level()));
let inner_attrs = inner_attrs
.iter()
.map(|it| it.to_string())
.join(&format!("\n{}", top_attr.indent_level()));
f.replace(range, inner_attrs);
};
acc.add(
@@ -388,7 +319,7 @@ pub struct Test {
}
"#,
r#"
#[cfg_attr($0, derive(Debug))]
#[cfg_attr(${0:cfg}, derive(Debug))]
pub struct Test {
test: u32,
}
@@ -422,7 +353,7 @@ pub struct Test {
"#,
r#"
pub struct Test {
#[cfg_attr($0, foo)]
#[cfg_attr(${0:cfg}, foo)]
test: u32,
}
"#,
@@ -456,7 +387,7 @@ pub struct Test {
r#"
pub struct Test {
#[other_attr]
#[cfg_attr($0, foo, bar)]
#[cfg_attr(${0:cfg}, foo, bar)]
#[other_attr]
test: u32,
}
@@ -491,7 +422,7 @@ pub struct Test {
"#,
r#"
pub struct Test {
#[cfg_attr($0, foo = "bar")]
#[cfg_attr(${0:cfg}, foo = "bar")]
test: u32,
}
"#,
@@ -520,7 +451,7 @@ fn inner_attrs() {
#![no_std$0]
"#,
r#"
#![cfg_attr($0, no_std)]
#![cfg_attr(${0:cfg}, no_std)]
"#,
);
check_assist(
@@ -545,7 +476,7 @@ pub struct Test {
"#,
r#"
#[derive( Clone, Copy)]
#[cfg_attr($0, derive(Debug))]
#[cfg_attr(${0:cfg}, derive(Debug))]
pub struct Test {
test: u32,
}
@@ -561,7 +492,7 @@ pub struct Test {
"#,
r#"
#[derive(Clone, Copy)]
#[cfg_attr($0, derive(Debug))]
#[cfg_attr(${0:cfg}, derive(Debug))]
pub struct Test {
test: u32,
}
@@ -580,7 +511,7 @@ pub struct Test {
"#,
r#"
#[derive( Clone, Copy)]
#[cfg_attr($0, derive(std::fmt::Debug))]
#[cfg_attr(${0:cfg}, derive(std::fmt::Debug))]
pub struct Test {
test: u32,
}
@@ -596,7 +527,7 @@ pub struct Test {
"#,
r#"
#[derive(Clone, Copy)]
#[cfg_attr($0, derive(std::fmt::Debug))]
#[cfg_attr(${0:cfg}, derive(std::fmt::Debug))]
pub struct Test {
test: u32,
}
@@ -615,7 +546,7 @@ pub struct Test {
"#,
r#"
#[derive(std::fmt::Debug, Clone)]
#[cfg_attr($0, derive(Copy))]
#[cfg_attr(${0:cfg}, derive(Copy))]
pub struct Test {
test: u32,
}
@@ -631,7 +562,7 @@ pub struct Test {
"#,
r#"
#[derive(Clone, Copy)]
#[cfg_attr($0, derive(std::fmt::Debug))]
#[cfg_attr(${0:cfg}, derive(std::fmt::Debug))]
pub struct Test {
test: u32,
}
@@ -3852,7 +3852,7 @@ struct S {
}
"#####,
r#####"
#[cfg_attr($0, derive(Debug))]
#[cfg_attr(${0:cfg}, derive(Debug))]
struct S {
field: i32
}
@@ -598,9 +598,7 @@ fn generate_impl_text_inner(
// Copy any cfg attrs from the original adt
buf.push_str("\n\n");
let cfg_attrs = adt
.attrs()
.filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false));
let cfg_attrs = adt.attrs().filter(|attr| matches!(attr.meta(), Some(ast::Meta::CfgMeta(_))));
cfg_attrs.for_each(|attr| buf.push_str(&format!("{attr}\n")));
// `impl{generic_params} {trait_text} for {name}{generic_params.to_generic_args()}`
@@ -740,8 +738,7 @@ fn generate_impl_inner(
let ty = make::ty_path(make::ext::ident_path(&adt.name().unwrap().text()));
let cfg_attrs =
adt.attrs().filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg"));
let cfg_attrs = adt.attrs().filter(|attr| matches!(attr.meta(), Some(ast::Meta::CfgMeta(_))));
match trait_ {
Some(trait_) => make::impl_trait(
cfg_attrs,
@@ -811,8 +808,7 @@ fn generate_impl_inner_with_factory(
let ty: ast::Type = make.ty_path(make.ident_path(&adt.name().unwrap().text())).into();
let cfg_attrs =
adt.attrs().filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg"));
let cfg_attrs = adt.attrs().filter(|attr| matches!(attr.meta(), Some(ast::Meta::CfgMeta(_))));
match trait_ {
Some(trait_) => make.impl_trait(
cfg_attrs,
@@ -756,7 +756,7 @@ pub(super) fn complete_name_ref(
match &path_ctx.kind {
PathKind::Expr { expr_ctx } => {
expr::complete_expr_path(acc, ctx, path_ctx, expr_ctx);
expr::complete_expr(acc, ctx);
expr::complete_expr(acc, ctx, path_ctx);
dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx);
item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx);
@@ -30,6 +30,7 @@
mod macro_use;
mod repr;
pub(crate) use self::cfg::complete_cfg;
pub(crate) use self::derive::complete_derive_path;
/// Complete inputs to known builtin attributes as well as derive attributes
@@ -37,7 +38,7 @@ pub(crate) fn complete_known_attribute_input(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
&colon_prefix: &bool,
fake_attribute_under_caret: &ast::Attr,
fake_attribute_under_caret: &ast::TokenTreeMeta,
extern_crate: Option<&ast::ExternCrate>,
) -> Option<()> {
let attribute = fake_attribute_under_caret;
@@ -70,7 +71,6 @@ pub(crate) fn complete_known_attribute_input(
lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints);
}
["cfg"] | ["cfg_attr"] => cfg::complete_cfg(acc, ctx),
["macro_use"] => macro_use::complete_macro_use(
acc,
ctx,
@@ -451,7 +451,11 @@ pub(crate) fn complete_expr_path(
}
}
pub(crate) fn complete_expr(acc: &mut Completions, ctx: &CompletionContext<'_>) {
pub(crate) fn complete_expr(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
) {
let _p = tracing::info_span!("complete_expr").entered();
if !ctx.config.enable_term_search {
@@ -462,6 +466,10 @@ pub(crate) fn complete_expr(acc: &mut Completions, ctx: &CompletionContext<'_>)
return;
}
if !matches!(qualified, Qualified::No) {
return;
}
if let Some(ty) = &ctx.expected_type {
// Ignore unit types as they are not very interesting
if ty.is_unit() || ty.is_unknown() {
@@ -133,7 +133,8 @@ pub(crate) fn import_on_the_fly_path(
let potential_import_name = import_name(ctx);
let qualifier = match qualified {
Qualified::With { path, .. } => Some(path.clone()),
_ => None,
Qualified::TypeAnchor { .. } => return None,
Qualified::No | Qualified::Absolute => None,
};
let import_assets = import_assets_for_path(
ctx,
@@ -408,9 +408,11 @@ pub(crate) enum CompletionAnalysis<'db> {
/// Set if we are currently completing in an unexpanded attribute, this usually implies a builtin attribute like `allow($0)`
UnexpandedAttrTT {
colon_prefix: bool,
fake_attribute_under_caret: Option<ast::Attr>,
fake_attribute_under_caret: Option<ast::TokenTreeMeta>,
extern_crate: Option<ast::ExternCrate>,
},
/// Set if we are inside the predicate of a #[cfg] or #[cfg_attr].
CfgPredicate,
MacroSegment,
}
@@ -284,9 +284,12 @@ fn expand(
};
// Expand pseudo-derive expansion aka `derive(Debug$0)`
if let Some((orig_attr, spec_attr)) = attrs {
if let Some((orig_attr, spec_attr)) = attrs
&& let Some(orig_meta) = orig_attr.meta()
{
// FIXME: Support speculative expansion with `cfg_attr`.
if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) = (
sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
sema.expand_derive_as_pseudo_attr_macro(&orig_meta),
sema.speculative_expand_derive_as_pseudo_attr_macro(
&orig_attr,
&spec_attr,
@@ -463,7 +466,9 @@ fn analyze<'db>(
}
// Overwrite the path kind for derives
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx
&& let Some(origin_meta) = origin_attr.meta()
{
if let Some(ast::NameLike::NameRef(name_ref)) =
find_node_at_offset(&file_with_fake_ident, offset)
{
@@ -473,7 +478,7 @@ fn analyze<'db>(
if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
path_ctx.kind = PathKind::Derive {
existing_derives: sema
.resolve_derive_macro(&origin_attr)
.resolve_derive_macro(&origin_meta)
.into_iter()
.flatten()
.flatten()
@@ -498,7 +503,7 @@ fn analyze<'db>(
let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
let p = token.parent()?;
if p.kind() == SyntaxKind::TOKEN_TREE
&& p.ancestors().any(|it| it.kind() == SyntaxKind::META)
&& p.ancestors().any(|it| it.kind() == SyntaxKind::TOKEN_TREE_META)
{
let colon_prefix = previous_non_trivia_token(self_token.clone())
.is_some_and(|it| T![:] == it.kind());
@@ -506,7 +511,7 @@ fn analyze<'db>(
CompletionAnalysis::UnexpandedAttrTT {
fake_attribute_under_caret: fake_ident_token
.parent_ancestors()
.find_map(ast::Attr::cast),
.find_map(ast::TokenTreeMeta::cast),
colon_prefix,
extern_crate: p.ancestors().find_map(ast::ExternCrate::cast),
}
@@ -525,6 +530,13 @@ fn analyze<'db>(
} else {
return None;
}
} else if find_node_at_offset::<ast::CfgPredicate>(
&speculative_file,
speculative_offset,
)
.is_some()
{
CompletionAnalysis::CfgPredicate
} else {
return None;
}
@@ -263,6 +263,7 @@ pub fn completions(
extern_crate.as_ref(),
);
}
CompletionAnalysis::CfgPredicate => completions::attribute::complete_cfg(acc, ctx),
CompletionAnalysis::MacroSegment => {
completions::macro_def::complete_macro_segment(acc, ctx);
}
@@ -2211,7 +2211,6 @@ fn bb()-> &'static aa {
}
"#,
expect![[r#"
ex bb() [type]
fn from_bytes() fn(&[u8]) -> &aa [type_could_unify]
"#]],
);
@@ -1030,8 +1030,6 @@ fn main() {
"#,
expect![[r#"
fn test() fn() -> Zulu
ex Zulu
ex Zulu::test()
"#]],
);
}
@@ -1242,6 +1242,39 @@ impl Bar for Foo {
);
}
#[test]
fn no_flyimports_type_anchor() {
check(
r#"
mod m {
pub fn foo() {}
}
struct Bar;
trait Foo {}
impl Foo for Bar {}
fn main() {
<Bar as Foo>::foo$0
}
"#,
expect![[r#""#]],
);
check(
r#"
mod m {
pub fn foo() {}
}
struct Bar;
trait Foo {}
impl Foo for Bar {}
fn main() {
<Bar>::foo$0
}
"#,
expect![[r#""#]],
);
}
#[test]
fn no_inherent_candidates_proposed() {
check(
@@ -896,9 +896,6 @@ fn bar() -> Bar {
"#,
expect![[r#"
fn foo() (as Foo) fn() -> Self
ex Bar
ex Bar::foo()
ex bar()
"#]],
);
}
@@ -926,9 +923,6 @@ fn bar() -> Bar {
expect![[r#"
fn bar() fn()
fn foo() (as Foo) fn() -> Self
ex Bar
ex Bar::foo()
ex bar()
"#]],
);
}
@@ -955,9 +949,6 @@ fn bar() -> Bar {
"#,
expect![[r#"
fn foo() (as Foo) fn() -> Self
ex Bar
ex Bar::foo()
ex bar()
"#]],
);
}

Some files were not shown because too many files have changed in this diff Show More