Auto merge of #152399 - matthiaskrgr:rollup-uDIDnAN, r=matthiaskrgr

Rollup of 12 pull requests

Successful merges:

 - rust-lang/rust#152388 (`rust-analyzer` subtree update)
 - rust-lang/rust#151613 (Align `ArrayWindows` trait impls with `Windows`)
 - rust-lang/rust#152134 (Set crt_static_allow_dylibs to true for Emscripten target)
 - rust-lang/rust#152166 (cleanup some more things in `proc_macro::bridge`)
 - rust-lang/rust#152236 (compiletest: `-Zunstable-options` for json targets)
 - rust-lang/rust#152287 (Fix an ICE in the vtable iteration for a trait reference in const eval when a supertrait not implemented)
 - rust-lang/rust#142957 (std: introduce path normalize methods at top of `std::path`)
 - rust-lang/rust#145504 (Add some conversion trait impls)
 - rust-lang/rust#152131 (Port rustc_no_implicit_bounds attribute to parser.)
 - rust-lang/rust#152315 (fix: rhs_span to rhs_span_new)
 - rust-lang/rust#152327 (Check stalled coroutine obligations eagerly)
 - rust-lang/rust#152377 (Rename the query system's `JobOwner` to `ActiveJobGuard`, and include `key_hash`)
This commit is contained in:
bors
2026-02-09 19:19:04 +00:00
118 changed files with 1862 additions and 919 deletions
@@ -283,3 +283,12 @@ impl<S: Stage> NoArgsAttributeParser<S> for RustcPreserveUbChecksParser {
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcPreserveUbChecks;
}
pub(crate) struct RustcNoImplicitBoundsParser;
impl<S: Stage> NoArgsAttributeParser<S> for RustcNoImplicitBoundsParser {
const PATH: &[Symbol] = &[sym::rustc_no_implicit_bounds];
const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Warn;
const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::RustcNoImplicitBounds;
}
@@ -282,6 +282,7 @@ mod late {
Single<WithoutArgs<RustcMainParser>>,
Single<WithoutArgs<RustcNeverReturnsNullPointerParser>>,
Single<WithoutArgs<RustcNoImplicitAutorefsParser>>,
Single<WithoutArgs<RustcNoImplicitBoundsParser>>,
Single<WithoutArgs<RustcNonConstTraitMethodParser>>,
Single<WithoutArgs<RustcNounwindParser>>,
Single<WithoutArgs<RustcOffloadKernelParser>>,
@@ -1695,7 +1695,7 @@ fn suggest_ampmut<'tcx>(
&& let Either::Left(rhs_stmt_new) = body.stmt_at(*assign)
&& let StatementKind::Assign(box (_, rvalue_new)) = &rhs_stmt_new.kind
&& let rhs_span_new = rhs_stmt_new.source_info.span
&& let Ok(rhs_str_new) = tcx.sess.source_map().span_to_snippet(rhs_span)
&& let Ok(rhs_str_new) = tcx.sess.source_map().span_to_snippet(rhs_span_new)
{
(rvalue, rhs_span, rhs_str) = (rvalue_new, rhs_span_new, rhs_str_new);
}
+5
View File
@@ -121,6 +121,11 @@ fn mir_borrowck(
let (input_body, _) = tcx.mir_promoted(def);
debug!("run query mir_borrowck: {}", tcx.def_path_str(def));
// We should eagerly check stalled coroutine obligations from HIR typeck.
// Not doing so leads to silent normalization failures later, which will
// fail to register opaque types in the next solver.
tcx.check_coroutine_obligations(def)?;
let input_body: &Body<'_> = &input_body.borrow();
if let Some(guar) = input_body.tainted_by_errors {
debug!("Skipping borrowck because of tainted body");
@@ -1177,6 +1177,9 @@ pub enum AttributeKind {
/// Represents `#[rustc_no_implicit_autorefs]`
RustcNoImplicitAutorefs,
/// Represents `#[rustc_no_implicit_bounds]`
RustcNoImplicitBounds,
/// Represents `#[rustc_non_const_trait_method]`.
RustcNonConstTraitMethod,
@@ -137,6 +137,7 @@ pub fn encode_cross_crate(&self) -> EncodeCrossCrate {
RustcMustImplementOneOf { .. } => No,
RustcNeverReturnsNullPointer => Yes,
RustcNoImplicitAutorefs => Yes,
RustcNoImplicitBounds => No,
RustcNonConstTraitMethod => No, // should be reported via other queries like `constness`
RustcNounwind => No,
RustcObjcClass { .. } => No,
@@ -4,15 +4,16 @@
use rustc_errors::codes::*;
use rustc_errors::struct_span_code_err;
use rustc_hir as hir;
use rustc_hir::PolyTraitRef;
use rustc_hir::attrs::AttributeKind;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{CRATE_DEF_ID, DefId};
use rustc_hir::{PolyTraitRef, find_attr};
use rustc_middle::bug;
use rustc_middle::ty::{
self as ty, IsSuggestable, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
TypeVisitor, Upcast,
};
use rustc_span::{ErrorGuaranteed, Ident, Span, kw, sym};
use rustc_span::{ErrorGuaranteed, Ident, Span, kw};
use rustc_trait_selection::traits;
use smallvec::SmallVec;
use tracing::{debug, instrument};
@@ -170,7 +171,7 @@ pub(crate) fn add_implicit_sizedness_bounds(
let tcx = self.tcx();
// Skip adding any default bounds if `#![rustc_no_implicit_bounds]`
if tcx.has_attr(CRATE_DEF_ID, sym::rustc_no_implicit_bounds) {
if find_attr!(tcx.get_all_attrs(CRATE_DEF_ID), AttributeKind::RustcNoImplicitBounds) {
return;
}
@@ -284,7 +285,8 @@ fn should_add_default_traits<'a>(
context: ImpliedBoundsContext<'tcx>,
) -> bool {
let collected = collect_bounds(hir_bounds, context, trait_def_id);
!self.tcx().has_attr(CRATE_DEF_ID, sym::rustc_no_implicit_bounds) && !collected.any()
!find_attr!(self.tcx().get_all_attrs(CRATE_DEF_ID), AttributeKind::RustcNoImplicitBounds)
&& !collected.any()
}
fn reject_duplicate_relaxed_bounds(&self, relaxed_bounds: SmallVec<[&PolyTraitRef<'_>; 1]>) {
+7 -11
View File
@@ -1116,18 +1116,14 @@ fn run_required_analyses(tcx: TyCtxt<'_>) {
{
tcx.ensure_ok().mir_drops_elaborated_and_const_checked(def_id);
}
if tcx.is_coroutine(def_id.to_def_id()) {
tcx.ensure_ok().mir_coroutine_witnesses(def_id);
let _ = tcx.ensure_ok().check_coroutine_obligations(
tcx.typeck_root_def_id(def_id.to_def_id()).expect_local(),
if tcx.is_coroutine(def_id.to_def_id())
&& (!tcx.is_async_drop_in_place_coroutine(def_id.to_def_id()))
{
// Eagerly check the unsubstituted layout for cycles.
tcx.ensure_ok().layout_of(
ty::TypingEnv::post_analysis(tcx, def_id.to_def_id())
.as_query_input(tcx.type_of(def_id).instantiate_identity()),
);
if !tcx.is_async_drop_in_place_coroutine(def_id.to_def_id()) {
// Eagerly check the unsubstituted layout for cycles.
tcx.ensure_ok().layout_of(
ty::TypingEnv::post_analysis(tcx, def_id.to_def_id())
.as_query_input(tcx.type_of(def_id).instantiate_identity()),
);
}
}
});
});
+1 -2
View File
@@ -16,8 +16,7 @@ pub struct ImplicitCtxt<'a, 'tcx> {
/// The current `TyCtxt`.
pub tcx: TyCtxt<'tcx>,
/// The current query job, if any. This is updated by `JobOwner::start` in
/// `ty::query::plumbing` when executing a query.
/// The current query job, if any.
pub query: Option<QueryJobId>,
/// Used to prevent queries from calling too deeply.
+1 -1
View File
@@ -330,6 +330,7 @@ fn check_attributes(
| AttributeKind::RustcMir(_)
| AttributeKind::RustcNeverReturnsNullPointer
| AttributeKind::RustcNoImplicitAutorefs
| AttributeKind::RustcNoImplicitBounds
| AttributeKind::RustcNonConstTraitMethod
| AttributeKind::RustcNounwind
| AttributeKind::RustcObjcClass { .. }
@@ -413,7 +414,6 @@ fn check_attributes(
// crate-level attrs, are checked below
| sym::feature
| sym::register_tool
| sym::rustc_no_implicit_bounds
| sym::test_runner,
..
] => {}
+23 -17
View File
@@ -83,14 +83,18 @@ pub(crate) fn gather_active_jobs_inner<'tcx, K: Copy>(
Some(())
}
/// A type representing the responsibility to execute the job in the `job` field.
/// This will poison the relevant query if dropped.
struct JobOwner<'tcx, K>
/// Guard object representing the responsibility to execute a query job and
/// mark it as completed.
///
/// This will poison the relevant query key if it is dropped without calling
/// [`Self::complete`].
struct ActiveJobGuard<'tcx, K>
where
K: Eq + Hash + Copy,
{
state: &'tcx QueryState<'tcx, K>,
key: K,
key_hash: u64,
}
#[cold]
@@ -137,20 +141,19 @@ fn handle_cycle_error<'tcx, C: QueryCache, const FLAGS: QueryFlags>(
}
}
impl<'tcx, K> JobOwner<'tcx, K>
impl<'tcx, K> ActiveJobGuard<'tcx, K>
where
K: Eq + Hash + Copy,
{
/// Completes the query by updating the query cache with the `result`,
/// signals the waiter and forgets the JobOwner, so it won't poison the query
fn complete<C>(self, cache: &C, key_hash: u64, result: C::Value, dep_node_index: DepNodeIndex)
/// signals the waiter, and forgets the guard so it won't poison the query.
fn complete<C>(self, cache: &C, result: C::Value, dep_node_index: DepNodeIndex)
where
C: QueryCache<Key = K>,
{
let key = self.key;
let state = self.state;
// Forget ourself so our destructor won't poison the query
// Forget ourself so our destructor won't poison the query.
// (Extract fields by value first to make sure we don't leak anything.)
let Self { state, key, key_hash }: Self = self;
mem::forget(self);
// Mark as complete before we remove the job from the active state
@@ -174,7 +177,7 @@ fn complete<C>(self, cache: &C, key_hash: u64, result: C::Value, dep_node_index:
}
}
impl<'tcx, K> Drop for JobOwner<'tcx, K>
impl<'tcx, K> Drop for ActiveJobGuard<'tcx, K>
where
K: Eq + Hash + Copy,
{
@@ -182,11 +185,10 @@ impl<'tcx, K> Drop for JobOwner<'tcx, K>
#[cold]
fn drop(&mut self) {
// Poison the query so jobs waiting on it panic.
let state = self.state;
let Self { state, key, key_hash } = *self;
let job = {
let key_hash = sharded::make_hash(&self.key);
let mut shard = state.active.lock_shard_by_hash(key_hash);
match shard.find_entry(key_hash, equivalent_key(&self.key)) {
match shard.find_entry(key_hash, equivalent_key(&key)) {
Err(_) => panic!(),
Ok(occupied) => {
let ((key, value), vacant) = occupied.remove();
@@ -342,11 +344,13 @@ fn execute_job<'tcx, C: QueryCache, const FLAGS: QueryFlags, const INCR: bool>(
id: QueryJobId,
dep_node: Option<DepNode>,
) -> (C::Value, Option<DepNodeIndex>) {
// Use `JobOwner` so the query will be poisoned if executing it panics.
let job_owner = JobOwner { state, key };
// Set up a guard object that will automatically poison the query if a
// panic occurs while executing the query (or any intermediate plumbing).
let job_guard = ActiveJobGuard { state, key, key_hash };
debug_assert_eq!(qcx.tcx.dep_graph.is_fully_enabled(), INCR);
// Delegate to another function to actually execute the query job.
let (result, dep_node_index) = if INCR {
execute_job_incr(query, qcx, qcx.tcx.dep_graph.data().unwrap(), key, dep_node, id)
} else {
@@ -388,7 +392,9 @@ fn execute_job<'tcx, C: QueryCache, const FLAGS: QueryFlags, const INCR: bool>(
}
}
}
job_owner.complete(cache, key_hash, result, dep_node_index);
// Tell the guard to perform completion bookkeeping, and also to not poison the query.
job_guard.complete(cache, result, dep_node_index);
(result, Some(dep_node_index))
}
@@ -19,8 +19,15 @@ pub(crate) fn target() -> Target {
pre_link_args,
post_link_args,
relocation_model: RelocModel::Pic,
// crt_static should always be true for an executable and always false
// for a shared library. There is no easy way to indicate this and it
// doesn't seem to matter much so we set crt_static_allows_dylibs to
// true and leave crt_static as true when linking dynamic libraries.
// wasi also sets crt_static_allows_dylibs: true so this is at least
// aligned between wasm targets.
crt_static_respected: true,
crt_static_default: true,
crt_static_allows_dylibs: true,
panic_strategy: PanicStrategy::Unwind,
no_default_libraries: false,
families: cvs!["unix", "wasm"],
@@ -12,7 +12,7 @@
use smallvec::{SmallVec, smallvec};
use tracing::debug;
use crate::traits::{impossible_predicates, is_vtable_safe_method};
use crate::traits::is_vtable_safe_method;
#[derive(Clone, Debug)]
pub enum VtblSegment<'tcx> {
@@ -271,11 +271,7 @@ fn vtable_entries<'tcx>(
// do not hold for this particular set of type parameters.
// Note that this method could then never be called, so we
// do not want to try and codegen it, in that case (see #23435).
let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, args);
if impossible_predicates(
tcx,
predicates.map(|(predicate, _)| predicate).collect(),
) {
if tcx.instantiate_and_check_impossible_predicates((def_id, args)) {
debug!("vtable_entries: predicates do not hold");
return VtblEntry::Vacant;
}
+24
View File
@@ -689,6 +689,30 @@ impl<T: CoerceUnsized<U>, U> CoerceUnsized<Cell<U>> for Cell<T> {}
#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: DispatchFromDyn<U>, U> DispatchFromDyn<Cell<U>> for Cell<T> {}
#[stable(feature = "more_conversion_trait_impls", since = "CURRENT_RUSTC_VERSION")]
impl<T, const N: usize> AsRef<[Cell<T>; N]> for Cell<[T; N]> {
#[inline]
fn as_ref(&self) -> &[Cell<T>; N] {
self.as_array_of_cells()
}
}
#[stable(feature = "more_conversion_trait_impls", since = "CURRENT_RUSTC_VERSION")]
impl<T, const N: usize> AsRef<[Cell<T>]> for Cell<[T; N]> {
#[inline]
fn as_ref(&self) -> &[Cell<T>] {
&*self.as_array_of_cells()
}
}
#[stable(feature = "more_conversion_trait_impls", since = "CURRENT_RUSTC_VERSION")]
impl<T> AsRef<[Cell<T>]> for Cell<[T]> {
#[inline]
fn as_ref(&self) -> &[Cell<T>] {
self.as_slice_of_cells()
}
}
impl<T> Cell<[T]> {
/// Returns a `&[Cell<T>]` from a `&Cell<[T]>`
///
+50
View File
@@ -1532,6 +1532,56 @@ pub const fn as_bytes_mut(&mut self) -> &mut [MaybeUninit<u8>] {
}
}
#[stable(feature = "more_conversion_trait_impls", since = "CURRENT_RUSTC_VERSION")]
impl<T, const N: usize> From<[MaybeUninit<T>; N]> for MaybeUninit<[T; N]> {
#[inline]
fn from(arr: [MaybeUninit<T>; N]) -> Self {
arr.transpose()
}
}
#[stable(feature = "more_conversion_trait_impls", since = "CURRENT_RUSTC_VERSION")]
impl<T, const N: usize> AsRef<[MaybeUninit<T>; N]> for MaybeUninit<[T; N]> {
#[inline]
fn as_ref(&self) -> &[MaybeUninit<T>; N] {
// SAFETY: T and MaybeUninit<T> have the same layout
unsafe { &*ptr::from_ref(self).cast() }
}
}
#[stable(feature = "more_conversion_trait_impls", since = "CURRENT_RUSTC_VERSION")]
impl<T, const N: usize> AsRef<[MaybeUninit<T>]> for MaybeUninit<[T; N]> {
#[inline]
fn as_ref(&self) -> &[MaybeUninit<T>] {
&*AsRef::<[MaybeUninit<T>; N]>::as_ref(self)
}
}
#[stable(feature = "more_conversion_trait_impls", since = "CURRENT_RUSTC_VERSION")]
impl<T, const N: usize> AsMut<[MaybeUninit<T>; N]> for MaybeUninit<[T; N]> {
#[inline]
fn as_mut(&mut self) -> &mut [MaybeUninit<T>; N] {
// SAFETY: T and MaybeUninit<T> have the same layout
unsafe { &mut *ptr::from_mut(self).cast() }
}
}
#[stable(feature = "more_conversion_trait_impls", since = "CURRENT_RUSTC_VERSION")]
impl<T, const N: usize> AsMut<[MaybeUninit<T>]> for MaybeUninit<[T; N]> {
#[inline]
fn as_mut(&mut self) -> &mut [MaybeUninit<T>] {
&mut *AsMut::<[MaybeUninit<T>; N]>::as_mut(self)
}
}
#[stable(feature = "more_conversion_trait_impls", since = "CURRENT_RUSTC_VERSION")]
impl<T, const N: usize> From<MaybeUninit<[T; N]>> for [MaybeUninit<T>; N] {
#[inline]
fn from(arr: MaybeUninit<[T; N]>) -> Self {
arr.transpose()
}
}
impl<T, const N: usize> [MaybeUninit<T>; N] {
/// Transposes a `[MaybeUninit<T>; N]` into a `MaybeUninit<[T; N]>`.
///
+33 -1
View File
@@ -2175,7 +2175,7 @@ unsafe impl<T> Sync for ChunksExactMut<'_, T> where T: Sync {}
///
/// [`array_windows`]: slice::array_windows
/// [slices]: slice
#[derive(Debug, Clone, Copy)]
#[derive(Debug)]
#[stable(feature = "array_windows", since = "1.94.0")]
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct ArrayWindows<'a, T: 'a, const N: usize> {
@@ -2189,6 +2189,14 @@ pub(super) const fn new(slice: &'a [T]) -> Self {
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "array_windows", since = "1.94.0")]
impl<T, const N: usize> Clone for ArrayWindows<'_, T, N> {
fn clone(&self) -> Self {
Self { v: self.v }
}
}
#[stable(feature = "array_windows", since = "1.94.0")]
impl<'a, T, const N: usize> Iterator for ArrayWindows<'a, T, N> {
type Item = &'a [T; N];
@@ -2224,6 +2232,14 @@ fn nth(&mut self, n: usize) -> Option<Self::Item> {
fn last(self) -> Option<Self::Item> {
self.v.last_chunk()
}
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
// SAFETY: since the caller guarantees that `idx` is in bounds,
// which means that `idx` cannot overflow an `isize`, and the
// "slice" created by `cast_array` is a subslice of `self.v`
// thus is guaranteed to be valid for the lifetime `'a` of `self.v`.
unsafe { &*self.v.as_ptr().add(idx).cast_array() }
}
}
#[stable(feature = "array_windows", since = "1.94.0")]
@@ -2252,6 +2268,22 @@ fn is_empty(&self) -> bool {
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T, const N: usize> TrustedLen for ArrayWindows<'_, T, N> {}
#[stable(feature = "array_windows", since = "1.94.0")]
impl<T, const N: usize> FusedIterator for ArrayWindows<'_, T, N> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<T, const N: usize> TrustedRandomAccess for ArrayWindows<'_, T, N> {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
unsafe impl<T, const N: usize> TrustedRandomAccessNoCoerce for ArrayWindows<'_, T, N> {
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
/// An iterator over a slice in (non-overlapping) chunks (`chunk_size` elements at a
/// time), starting at the end of the slice.
///
+2 -2
View File
@@ -121,7 +121,7 @@ impl Methods {
}
}
}
with_api!(self, define_client_side);
with_api!(define_client_side, TokenStream, Span, Symbol);
struct Bridge<'a> {
/// Reusable buffer (only `clear`-ed, never shrunk), primarily
@@ -129,7 +129,7 @@ struct Bridge<'a> {
cached_buffer: Buffer,
/// Server-side function that the client uses to make requests.
dispatch: closure::Closure<'a, Buffer, Buffer>,
dispatch: closure::Closure<'a>,
/// Provided globals for this macro expansion.
globals: ExpnGlobals<Span>,
+11 -9
View File
@@ -1,10 +1,12 @@
//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`.
//! Closure type (equivalent to `&mut dyn FnMut(Buffer) -> Buffer`) that's `repr(C)`.
use std::marker::PhantomData;
use super::Buffer;
#[repr(C)]
pub(super) struct Closure<'a, A, R> {
call: unsafe extern "C" fn(*mut Env, A) -> R,
pub(super) struct Closure<'a> {
call: extern "C" fn(*mut Env, Buffer) -> Buffer,
env: *mut Env,
// Prevent Send and Sync impls.
//
@@ -14,17 +16,17 @@ pub(super) struct Closure<'a, A, R> {
struct Env;
impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> {
impl<'a, F: FnMut(Buffer) -> Buffer> From<&'a mut F> for Closure<'a> {
fn from(f: &'a mut F) -> Self {
unsafe extern "C" fn call<A, R, F: FnMut(A) -> R>(env: *mut Env, arg: A) -> R {
extern "C" fn call<F: FnMut(Buffer) -> Buffer>(env: *mut Env, arg: Buffer) -> Buffer {
unsafe { (*(env as *mut _ as *mut F))(arg) }
}
Closure { call: call::<A, R, F>, env: f as *mut _ as *mut Env, _marker: PhantomData }
Closure { call: call::<F>, env: f as *mut _ as *mut Env, _marker: PhantomData }
}
}
impl<'a, A, R> Closure<'a, A, R> {
pub(super) fn call(&mut self, arg: A) -> R {
unsafe { (self.call)(self.env, arg) }
impl<'a> Closure<'a> {
pub(super) fn call(&mut self, arg: Buffer) -> Buffer {
(self.call)(self.env, arg)
}
}
+51 -53
View File
@@ -18,71 +18,67 @@
/// Higher-order macro describing the server RPC API, allowing automatic
/// generation of type-safe Rust APIs, both client-side and server-side.
///
/// `with_api!(MySelf, my_macro)` expands to:
/// `with_api!(my_macro, MyTokenStream, MySpan, MySymbol)` expands to:
/// ```rust,ignore (pseudo-code)
/// my_macro! {
/// fn lit_character(ch: char) -> MySelf::Literal;
/// fn lit_span(lit: &MySelf::Literal) -> MySelf::Span;
/// fn lit_set_span(lit: &mut MySelf::Literal, span: MySelf::Span);
/// fn ts_clone(stream: &MyTokenStream) -> MyTokenStream;
/// fn span_debug(span: &MySpan) -> String;
/// // ...
/// }
/// ```
///
/// The first argument serves to customize the argument/return types,
/// to enable several different usecases:
///
/// If `MySelf` is just `Self`, then the types are only valid inside
/// a trait or a trait impl, where the trait has associated types
/// for each of the API types. If non-associated types are desired,
/// a module name (`self` in practice) can be used instead of `Self`.
/// The second (`TokenStream`), third (`Span`) and fourth (`Symbol`)
/// argument serve to customize the argument/return types that need
/// special handling, to enable several different representations of
/// these types.
macro_rules! with_api {
($S:ident, $m:ident) => {
($m:ident, $TokenStream: path, $Span: path, $Symbol: path) => {
$m! {
fn injected_env_var(var: &str) -> Option<String>;
fn track_env_var(var: &str, value: Option<&str>);
fn track_path(path: &str);
fn literal_from_str(s: &str) -> Result<Literal<$S::Span, $S::Symbol>, ()>;
fn emit_diagnostic(diagnostic: Diagnostic<$S::Span>);
fn literal_from_str(s: &str) -> Result<Literal<$Span, $Symbol>, ()>;
fn emit_diagnostic(diagnostic: Diagnostic<$Span>);
fn ts_drop(stream: $S::TokenStream);
fn ts_clone(stream: &$S::TokenStream) -> $S::TokenStream;
fn ts_is_empty(stream: &$S::TokenStream) -> bool;
fn ts_expand_expr(stream: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
fn ts_from_str(src: &str) -> $S::TokenStream;
fn ts_to_string(stream: &$S::TokenStream) -> String;
fn ts_drop(stream: $TokenStream);
fn ts_clone(stream: &$TokenStream) -> $TokenStream;
fn ts_is_empty(stream: &$TokenStream) -> bool;
fn ts_expand_expr(stream: &$TokenStream) -> Result<$TokenStream, ()>;
fn ts_from_str(src: &str) -> $TokenStream;
fn ts_to_string(stream: &$TokenStream) -> String;
fn ts_from_token_tree(
tree: TokenTree<$S::TokenStream, $S::Span, $S::Symbol>,
) -> $S::TokenStream;
tree: TokenTree<$TokenStream, $Span, $Symbol>,
) -> $TokenStream;
fn ts_concat_trees(
base: Option<$S::TokenStream>,
trees: Vec<TokenTree<$S::TokenStream, $S::Span, $S::Symbol>>,
) -> $S::TokenStream;
base: Option<$TokenStream>,
trees: Vec<TokenTree<$TokenStream, $Span, $Symbol>>,
) -> $TokenStream;
fn ts_concat_streams(
base: Option<$S::TokenStream>,
streams: Vec<$S::TokenStream>,
) -> $S::TokenStream;
base: Option<$TokenStream>,
streams: Vec<$TokenStream>,
) -> $TokenStream;
fn ts_into_trees(
stream: $S::TokenStream
) -> Vec<TokenTree<$S::TokenStream, $S::Span, $S::Symbol>>;
stream: $TokenStream
) -> Vec<TokenTree<$TokenStream, $Span, $Symbol>>;
fn span_debug(span: $S::Span) -> String;
fn span_parent(span: $S::Span) -> Option<$S::Span>;
fn span_source(span: $S::Span) -> $S::Span;
fn span_byte_range(span: $S::Span) -> Range<usize>;
fn span_start(span: $S::Span) -> $S::Span;
fn span_end(span: $S::Span) -> $S::Span;
fn span_line(span: $S::Span) -> usize;
fn span_column(span: $S::Span) -> usize;
fn span_file(span: $S::Span) -> String;
fn span_local_file(span: $S::Span) -> Option<String>;
fn span_join(span: $S::Span, other: $S::Span) -> Option<$S::Span>;
fn span_subspan(span: $S::Span, start: Bound<usize>, end: Bound<usize>) -> Option<$S::Span>;
fn span_resolved_at(span: $S::Span, at: $S::Span) -> $S::Span;
fn span_source_text(span: $S::Span) -> Option<String>;
fn span_save_span(span: $S::Span) -> usize;
fn span_recover_proc_macro_span(id: usize) -> $S::Span;
fn span_debug(span: $Span) -> String;
fn span_parent(span: $Span) -> Option<$Span>;
fn span_source(span: $Span) -> $Span;
fn span_byte_range(span: $Span) -> Range<usize>;
fn span_start(span: $Span) -> $Span;
fn span_end(span: $Span) -> $Span;
fn span_line(span: $Span) -> usize;
fn span_column(span: $Span) -> usize;
fn span_file(span: $Span) -> String;
fn span_local_file(span: $Span) -> Option<String>;
fn span_join(span: $Span, other: $Span) -> Option<$Span>;
fn span_subspan(span: $Span, start: Bound<usize>, end: Bound<usize>) -> Option<$Span>;
fn span_resolved_at(span: $Span, at: $Span) -> $Span;
fn span_source_text(span: $Span) -> Option<String>;
fn span_save_span(span: $Span) -> usize;
fn span_recover_proc_macro_span(id: usize) -> $Span;
fn symbol_normalize_and_validate_ident(string: &str) -> Result<$S::Symbol, ()>;
fn symbol_normalize_and_validate_ident(string: &str) -> Result<$Symbol, ()>;
}
};
}
@@ -126,7 +122,7 @@ pub struct BridgeConfig<'a> {
input: Buffer,
/// Server-side function that the client uses to make requests.
dispatch: closure::Closure<'a, Buffer, Buffer>,
dispatch: closure::Closure<'a>,
/// If 'true', always invoke the default panic hook
force_show_panics: bool,
@@ -146,7 +142,7 @@ pub(super) enum ApiTags {
rpc_encode_decode!(enum ApiTags { $($method),* });
}
}
with_api!(self, declare_tags);
with_api!(declare_tags, __, __, __);
/// Helper to wrap associated types to allow trait impl dispatch.
/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
@@ -173,7 +169,7 @@ fn unmark(self) -> Self::Unmarked {
self.value
}
}
impl<'a, T, M> Mark for &'a Marked<T, M> {
impl<'a, T> Mark for &'a Marked<T, client::TokenStream> {
type Unmarked = &'a T;
fn mark(_: Self::Unmarked) -> Self {
unreachable!()
@@ -220,6 +216,8 @@ fn unmark(self) -> Self::Unmarked {
Delimiter,
LitKind,
Level,
Bound<usize>,
Range<usize>,
}
rpc_encode_decode!(
@@ -318,7 +316,7 @@ macro_rules! compound_traits {
};
}
compound_traits!(
rpc_encode_decode!(
enum Bound<T> {
Included(x),
Excluded(x),
@@ -390,7 +388,7 @@ pub struct Literal<Span, Symbol> {
pub span: Span,
}
compound_traits!(struct Literal<Sp, Sy> { kind, symbol, suffix, span });
compound_traits!(struct Literal<Span, Symbol> { kind, symbol, suffix, span });
#[derive(Clone)]
pub enum TokenTree<TokenStream, Span, Symbol> {
@@ -434,6 +432,6 @@ pub struct ExpnGlobals<Span> {
struct ExpnGlobals<Span> { def_site, call_site, mixed_site }
);
compound_traits!(
rpc_encode_decode!(
struct Range<T> { start, end }
);
+25 -33
View File
@@ -52,45 +52,37 @@ fn decode(r: &mut &'a [u8], s: &mut S) -> Self {
}
};
(enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
fn encode(self, w: &mut Buffer, s: &mut S) {
// HACK(eddyb): `Tag` enum duplicated between the
// two impls as there's no other place to stash it.
#[allow(non_camel_case_types)]
#[repr(u8)]
enum Tag { $($variant),* }
#[allow(non_upper_case_globals, non_camel_case_types)]
const _: () = {
#[repr(u8)] enum Tag { $($variant),* }
match self {
$($name::$variant $(($field))* => {
(Tag::$variant as u8).encode(w, s);
$($field.encode(w, s);)*
})*
$(const $variant: u8 = Tag::$variant as u8;)*
impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
fn encode(self, w: &mut Buffer, s: &mut S) {
match self {
$($name::$variant $(($field))* => {
$variant.encode(w, s);
$($field.encode(w, s);)*
})*
}
}
}
}
impl<'a, S, $($($T: for<'s> Decode<'a, 's, S>),+)?> Decode<'a, '_, S>
for $name $(<$($T),+>)?
{
fn decode(r: &mut &'a [u8], s: &mut S) -> Self {
// HACK(eddyb): `Tag` enum duplicated between the
// two impls as there's no other place to stash it.
#[allow(non_upper_case_globals, non_camel_case_types)]
mod tag {
#[repr(u8)] enum Tag { $($variant),* }
$(pub(crate) const $variant: u8 = Tag::$variant as u8;)*
}
match u8::decode(r, s) {
$(tag::$variant => {
$(let $field = Decode::decode(r, s);)*
$name::$variant $(($field))*
})*
_ => unreachable!(),
impl<'a, S, $($($T: for<'s> Decode<'a, 's, S>),+)?> Decode<'a, '_, S>
for $name $(<$($T),+>)?
{
fn decode(r: &mut &'a [u8], s: &mut S) -> Self {
match u8::decode(r, s) {
$($variant => {
$(let $field = Decode::decode(r, s);)*
$name::$variant $(($field))*
})*
_ => unreachable!(),
}
}
}
}
};
}
}
+22 -42
View File
@@ -38,47 +38,27 @@
use std::mem;
// FIXME(eddyb) this could be `trait` impls except for the `const fn` requirement.
macro_rules! define_reify_functions {
($(
fn $name:ident $(<$($param:ident),*>)?
for $(extern $abi:tt)? fn($($arg:ident: $arg_ty:ty),*) -> $ret_ty:ty;
)+) => {
$(pub(super) const fn $name<
$($($param,)*)?
F: Fn($($arg_ty),*) -> $ret_ty + Copy
>(f: F) -> $(extern $abi)? fn($($arg_ty),*) -> $ret_ty {
// FIXME(eddyb) describe the `F` type (e.g. via `type_name::<F>`) once panic
// formatting becomes possible in `const fn`.
const { assert!(size_of::<F>() == 0, "selfless_reify: closure must be zero-sized"); }
$(extern $abi)? fn wrapper<
$($($param,)*)?
F: Fn($($arg_ty),*) -> $ret_ty + Copy
>($($arg: $arg_ty),*) -> $ret_ty {
let f = unsafe {
// SAFETY: `F` satisfies all criteria for "out of thin air"
// reconstructability (see module-level doc comment).
mem::MaybeUninit::<F>::uninit().assume_init()
};
f($($arg),*)
}
let _f_proof = f;
wrapper::<
$($($param,)*)?
F
>
})+
pub(super) const fn reify_to_extern_c_fn_hrt_bridge<
R,
F: Fn(super::BridgeConfig<'_>) -> R + Copy,
>(
f: F,
) -> extern "C" fn(super::BridgeConfig<'_>) -> R {
// FIXME(eddyb) describe the `F` type (e.g. via `type_name::<F>`) once panic
// formatting becomes possible in `const fn`.
const {
assert!(size_of::<F>() == 0, "selfless_reify: closure must be zero-sized");
}
}
define_reify_functions! {
fn _reify_to_extern_c_fn_unary<A, R> for extern "C" fn(arg: A) -> R;
// HACK(eddyb) this abstraction is used with `for<'a> fn(BridgeConfig<'a>)
// -> T` but that doesn't work with just `reify_to_extern_c_fn_unary`
// because of the `fn` pointer type being "higher-ranked" (i.e. the
// `for<'a>` binder).
// FIXME(eddyb) try to remove the lifetime from `BridgeConfig`, that'd help.
fn reify_to_extern_c_fn_hrt_bridge<R> for extern "C" fn(bridge: super::BridgeConfig<'_>) -> R;
extern "C" fn wrapper<R, F: Fn(super::BridgeConfig<'_>) -> R + Copy>(
bridge: super::BridgeConfig<'_>,
) -> R {
let f = unsafe {
// SAFETY: `F` satisfies all criteria for "out of thin air"
// reconstructability (see module-level doc comment).
mem::conjure_zst::<F>()
};
f(bridge)
}
let _f_proof = f;
wrapper::<R, F>
}
+10 -14
View File
@@ -58,12 +58,12 @@ struct Dispatcher<S: Server> {
server: S,
}
macro_rules! define_server_dispatcher_impl {
macro_rules! define_server {
(
$(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
) => {
pub trait Server {
type TokenStream: 'static + Clone;
type TokenStream: 'static + Clone + Default;
type Span: 'static + Copy + Eq + Hash;
type Symbol: 'static;
@@ -77,22 +77,20 @@ pub trait Server {
$(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?;)*
}
}
}
with_api!(define_server, Self::TokenStream, Self::Span, Self::Symbol);
macro_rules! define_dispatcher {
(
$(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
) => {
// FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
pub trait DispatcherTrait {
// HACK(eddyb) these are here to allow `Self::$name` to work below.
type TokenStream;
type Span;
type Symbol;
fn dispatch(&mut self, buf: Buffer) -> Buffer;
}
impl<S: Server> DispatcherTrait for Dispatcher<S> {
type TokenStream = MarkedTokenStream<S>;
type Span = MarkedSpan<S>;
type Symbol = MarkedSymbol<S>;
fn dispatch(&mut self, mut buf: Buffer) -> Buffer {
let Dispatcher { handle_store, server } = self;
@@ -127,7 +125,7 @@ fn dispatch(&mut self, mut buf: Buffer) -> Buffer {
}
}
}
with_api!(Self, define_server_dispatcher_impl);
with_api!(define_dispatcher, MarkedTokenStream<S>, MarkedSpan<S>, MarkedSymbol<S>);
pub trait ExecutionStrategy {
fn run_bridge_and_client(
@@ -312,7 +310,6 @@ pub fn run<S>(
) -> Result<S::TokenStream, PanicMessage>
where
S: Server,
S::TokenStream: Default,
{
let client::Client { handle_counters, run, _marker } = *self;
run_server(
@@ -338,7 +335,6 @@ pub fn run<S>(
) -> Result<S::TokenStream, PanicMessage>
where
S: Server,
S::TokenStream: Default,
{
let client::Client { handle_counters, run, _marker } = *self;
run_server(
+1
View File
@@ -27,6 +27,7 @@
#![feature(restricted_std)]
#![feature(rustc_attrs)]
#![feature(extend_one)]
#![feature(mem_conjure_zst)]
#![recursion_limit = "256"]
#![allow(internal_features)]
#![deny(ffi_unwind_calls)]
+14
View File
@@ -19,6 +19,20 @@
//! matter the platform or filesystem. An exception to this is made for Windows
//! drive letters.
//!
//! ## Path normalization
//!
//! Several methods in this module perform basic path normalization by disregarding
//! repeated separators, non-leading `.` components, and trailing separators. These include:
//! - Methods for iteration, such as [`Path::components`] and [`Path::iter`]
//! - Methods for inspection, such as [`Path::has_root`]
//! - Comparisons using [`PartialEq`], [`PartialOrd`], and [`Ord`]
//!
//! [`Path::join`] and [`PathBuf::push`] also disregard trailing slashes.
//!
// FIXME(normalize_lexically): mention normalize_lexically once stable
//! These methods **do not** resolve `..` components or symlinks. For full normalization
//! including `..` resolution, use [`Path::canonicalize`] (which does access the filesystem).
//!
//! ## Simple usage
//!
//! Path manipulation includes both parsing components from slices and building
+3 -1
View File
@@ -949,7 +949,9 @@ fn get_current_target_config(
// actually be changed with `-C` flags.
for config in query_rustc_output(
config,
&["--print=cfg", "--target", &config.target],
// `-Zunstable-options` is necessary when compiletest is running with custom targets
// (such as synthetic targets used to bless mir-opt tests).
&["-Zunstable-options", "--print=cfg", "--target", &config.target],
Default::default(),
)
.trim()
@@ -249,6 +249,7 @@
"only-unix",
"only-visionos",
"only-wasm32",
"only-wasm32-unknown-emscripten",
"only-wasm32-unknown-unknown",
"only-wasm32-wasip1",
"only-watchos",
+5
View File
@@ -1670,6 +1670,11 @@ fn make_compile_args(
if self.props.force_host { &*self.config.host } else { &*self.config.target };
compiler.arg(&format!("--target={}", target));
if target.ends_with(".json") {
// `-Zunstable-options` is necessary when compiletest is running with custom targets
// (such as synthetic targets used to bless mir-opt tests).
compiler.arg("-Zunstable-options");
}
}
self.set_revision_flags(&mut compiler);
+15 -15
View File
@@ -1614,9 +1614,9 @@ dependencies = [
[[package]]
name = "num-conv"
version = "0.1.0"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050"
[[package]]
name = "num-traits"
@@ -2453,9 +2453,9 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.25.2"
version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2e2aa2fca57727371eeafc975acc8e6f4c52f8166a78035543f6ee1c74c2dcc"
checksum = "f77debccd43ba198e9cee23efd7f10330ff445e46a98a2b107fed9094a1ee676"
dependencies = [
"boxcar",
"crossbeam-queue",
@@ -2478,15 +2478,15 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.25.2"
version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfc2a1e7bf06964105515451d728f2422dedc3a112383324a00b191a5c397a3"
checksum = "ea07adbf42d91cc076b7daf3b38bc8168c19eb362c665964118a89bc55ef19a5"
[[package]]
name = "salsa-macros"
version = "0.25.2"
version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d844c1aa34946da46af683b5c27ec1088a3d9d84a2b837a108223fd830220e1"
checksum = "d16d4d8b66451b9c75ddf740b7fc8399bc7b8ba33e854a5d7526d18708f67b05"
dependencies = [
"proc-macro2",
"quote",
@@ -2914,9 +2914,9 @@ dependencies = [
[[package]]
name = "time"
version = "0.3.44"
version = "0.3.47"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d"
checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c"
dependencies = [
"deranged",
"itoa",
@@ -2924,22 +2924,22 @@ dependencies = [
"num-conv",
"num_threads",
"powerfmt",
"serde",
"serde_core",
"time-core",
"time-macros",
]
[[package]]
name = "time-core"
version = "0.1.6"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b"
checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca"
[[package]]
name = "time-macros"
version = "0.2.24"
version = "0.2.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3"
checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215"
dependencies = [
"num-conv",
"time-core",
+2 -4
View File
@@ -135,13 +135,13 @@ rayon = "1.10.0"
rowan = "=0.15.17"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
salsa = { version = "0.25.2", default-features = false, features = [
salsa = { version = "0.26", default-features = false, features = [
"rayon",
"salsa_unstable",
"macros",
"inventory",
] }
salsa-macros = "0.25.2"
salsa-macros = "0.26"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }
@@ -192,8 +192,6 @@ unused_lifetimes = "warn"
unreachable_pub = "warn"
[workspace.lints.clippy]
# FIXME Remove the tidy test once the lint table is stable
## lint groups
complexity = { level = "warn", priority = -1 }
correctness = { level = "deny", priority = -1 }
@@ -60,7 +60,7 @@ fn hash<H: Hasher>(&self, state: &mut H) {
}
}
impl zalsa_struct_::HashEqLike<WithoutCrate> for EditionedFileIdData {
impl zalsa_::HashEqLike<WithoutCrate> for EditionedFileIdData {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(self, state);
@@ -16,8 +16,6 @@ impl Edition {
pub const DEFAULT: Edition = Edition::Edition2015;
pub const LATEST: Edition = Edition::Edition2024;
pub const CURRENT: Edition = Edition::Edition2024;
/// The current latest stable edition, note this is usually not the right choice in code.
pub const CURRENT_FIXME: Edition = Edition::Edition2024;
pub fn from_u32(u32: u32) -> Edition {
match u32 {
@@ -426,7 +426,7 @@ pub struct ExprCollector<'db> {
/// and we need to find the current definition. So we track the number of definitions we saw.
current_block_legacy_macro_defs_count: FxHashMap<Name, usize>,
current_try_block_label: Option<LabelId>,
current_try_block: Option<TryBlock>,
label_ribs: Vec<LabelRib>,
unowned_bindings: Vec<BindingId>,
@@ -472,6 +472,13 @@ enum Awaitable {
No(&'static str),
}
enum TryBlock {
// `try { ... }`
Homogeneous { label: LabelId },
// `try bikeshed Ty { ... }`
Heterogeneous { label: LabelId },
}
#[derive(Debug, Default)]
struct BindingList {
map: FxHashMap<(Name, HygieneId), BindingId>,
@@ -532,7 +539,7 @@ pub fn new(
lang_items: OnceCell::new(),
store: ExpressionStoreBuilder::default(),
expander,
current_try_block_label: None,
current_try_block: None,
is_lowering_coroutine: false,
label_ribs: Vec::new(),
unowned_bindings: Vec::new(),
@@ -1069,7 +1076,9 @@ fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
self.alloc_expr(Expr::Let { pat, expr }, syntax_ptr)
}
ast::Expr::BlockExpr(e) => match e.modifier() {
Some(ast::BlockModifier::Try(_)) => self.desugar_try_block(e),
Some(ast::BlockModifier::Try { try_token: _, bikeshed_token: _, result_type }) => {
self.desugar_try_block(e, result_type)
}
Some(ast::BlockModifier::Unsafe(_)) => {
self.collect_block_(e, |id, statements, tail| Expr::Unsafe {
id,
@@ -1344,7 +1353,7 @@ fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
.map(|it| this.lower_type_ref_disallow_impl_trait(it));
let prev_is_lowering_coroutine = mem::take(&mut this.is_lowering_coroutine);
let prev_try_block_label = this.current_try_block_label.take();
let prev_try_block = this.current_try_block.take();
let awaitable = if e.async_token().is_some() {
Awaitable::Yes
@@ -1369,7 +1378,7 @@ fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
let capture_by =
if e.move_token().is_some() { CaptureBy::Value } else { CaptureBy::Ref };
this.is_lowering_coroutine = prev_is_lowering_coroutine;
this.current_try_block_label = prev_try_block_label;
this.current_try_block = prev_try_block;
this.alloc_expr(
Expr::Closure {
args: args.into(),
@@ -1686,11 +1695,15 @@ fn with_binding_owner(&mut self, create_expr: impl FnOnce(&mut Self) -> ExprId)
/// Desugar `try { <stmts>; <expr> }` into `'<new_label>: { <stmts>; ::std::ops::Try::from_output(<expr>) }`,
/// `try { <stmts>; }` into `'<new_label>: { <stmts>; ::std::ops::Try::from_output(()) }`
/// and save the `<new_label>` to use it as a break target for desugaring of the `?` operator.
fn desugar_try_block(&mut self, e: BlockExpr) -> ExprId {
fn desugar_try_block(&mut self, e: BlockExpr, result_type: Option<ast::Type>) -> ExprId {
let try_from_output = self.lang_path(self.lang_items().TryTraitFromOutput);
let label = self.generate_new_name();
let label = self.alloc_label_desugared(Label { name: label }, AstPtr::new(&e).wrap_right());
let old_label = self.current_try_block_label.replace(label);
let try_block_info = match result_type {
Some(_) => TryBlock::Heterogeneous { label },
None => TryBlock::Homogeneous { label },
};
let old_try_block = self.current_try_block.replace(try_block_info);
let ptr = AstPtr::new(&e).upcast();
let (btail, expr_id) = self.with_labeled_rib(label, HygieneId::ROOT, |this| {
@@ -1720,8 +1733,38 @@ fn desugar_try_block(&mut self, e: BlockExpr) -> ExprId {
unreachable!("block was lowered to non-block");
};
*tail = Some(next_tail);
self.current_try_block_label = old_label;
expr_id
self.current_try_block = old_try_block;
match result_type {
Some(ty) => {
// `{ let <name>: <ty> = <expr>; <name> }`
let name = self.generate_new_name();
let type_ref = self.lower_type_ref_disallow_impl_trait(ty);
let binding = self.alloc_binding(
name.clone(),
BindingAnnotation::Unannotated,
HygieneId::ROOT,
);
let pat = self.alloc_pat_desugared(Pat::Bind { id: binding, subpat: None });
self.add_definition_to_binding(binding, pat);
let tail_expr =
self.alloc_expr_desugared_with_ptr(Expr::Path(Path::from(name)), ptr);
self.alloc_expr_desugared_with_ptr(
Expr::Block {
id: None,
statements: Box::new([Statement::Let {
pat,
type_ref: Some(type_ref),
initializer: Some(expr_id),
else_branch: None,
}]),
tail: Some(tail_expr),
label: None,
},
ptr,
)
}
None => expr_id,
}
}
/// Desugar `ast::WhileExpr` from: `[opt_ident]: while <cond> <body>` into:
@@ -1863,6 +1906,8 @@ fn collect_for_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::ForExpr) -
/// ControlFlow::Continue(val) => val,
/// ControlFlow::Break(residual) =>
/// // If there is an enclosing `try {...}`:
/// break 'catch_target Residual::into_try_type(residual),
/// // If there is an enclosing `try bikeshed Ty {...}`:
/// break 'catch_target Try::from_residual(residual),
/// // Otherwise:
/// return Try::from_residual(residual),
@@ -1873,7 +1918,6 @@ fn collect_try_operator(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::TryExp
let try_branch = self.lang_path(lang_items.TryTraitBranch);
let cf_continue = self.lang_path(lang_items.ControlFlowContinue);
let cf_break = self.lang_path(lang_items.ControlFlowBreak);
let try_from_residual = self.lang_path(lang_items.TryTraitFromResidual);
let operand = self.collect_expr_opt(e.expr());
let try_branch = self.alloc_expr(try_branch.map_or(Expr::Missing, Expr::Path), syntax_ptr);
let expr = self
@@ -1910,13 +1954,23 @@ fn collect_try_operator(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::TryExp
guard: None,
expr: {
let it = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr);
let callee = self
.alloc_expr(try_from_residual.map_or(Expr::Missing, Expr::Path), syntax_ptr);
let convert_fn = match self.current_try_block {
Some(TryBlock::Homogeneous { .. }) => {
self.lang_path(lang_items.ResidualIntoTryType)
}
Some(TryBlock::Heterogeneous { .. }) | None => {
self.lang_path(lang_items.TryTraitFromResidual)
}
};
let callee =
self.alloc_expr(convert_fn.map_or(Expr::Missing, Expr::Path), syntax_ptr);
let result =
self.alloc_expr(Expr::Call { callee, args: Box::new([it]) }, syntax_ptr);
self.alloc_expr(
match self.current_try_block_label {
Some(label) => Expr::Break { expr: Some(result), label: Some(label) },
match self.current_try_block {
Some(
TryBlock::Heterogeneous { label } | TryBlock::Homogeneous { label },
) => Expr::Break { expr: Some(result), label: Some(label) },
None => Expr::Return { expr: Some(result) },
},
syntax_ptr,
@@ -893,6 +893,24 @@ pub(crate) fn update_visibility_macros(&mut self, name: &Name, vis: Visibility)
self.macros.get_mut(name).expect("tried to update visibility of non-existent macro");
res.vis = vis;
}
pub(crate) fn update_def_types(&mut self, name: &Name, def: ModuleDefId, vis: Visibility) {
let res = self.types.get_mut(name).expect("tried to update def of non-existent type");
res.def = def;
res.vis = vis;
}
pub(crate) fn update_def_values(&mut self, name: &Name, def: ModuleDefId, vis: Visibility) {
let res = self.values.get_mut(name).expect("tried to update def of non-existent value");
res.def = def;
res.vis = vis;
}
pub(crate) fn update_def_macros(&mut self, name: &Name, def: MacroId, vis: Visibility) {
let res = self.macros.get_mut(name).expect("tried to update def of non-existent macro");
res.def = def;
res.vis = vis;
}
}
impl PerNs {
@@ -456,6 +456,7 @@ pub fn from_symbol(symbol: &Symbol) -> Option<Self> {
TryTraitFromOutput, sym::from_output, FunctionId;
TryTraitBranch, sym::branch, FunctionId;
TryTraitFromYeet, sym::from_yeet, FunctionId;
ResidualIntoTryType, sym::into_try_type, FunctionId;
PointerLike, sym::pointer_like, TraitId;
@@ -1209,42 +1209,69 @@ fn push_res_and_update_glob_vis(
// `ItemScope::push_res_with_import()`.
if let Some(def) = defs.types
&& let Some(prev_def) = prev_defs.types
&& def.def == prev_def.def
&& self.from_glob_import.contains_type(module_id, name.clone())
&& def.vis != prev_def.vis
&& def.vis.max(self.db, prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
// This import is being handled here, don't pass it down to
// `ItemScope::push_res_with_import()`.
defs.types = None;
self.def_map.modules[module_id].scope.update_visibility_types(name, def.vis);
if def.def == prev_def.def
&& self.from_glob_import.contains_type(module_id, name.clone())
&& def.vis != prev_def.vis
&& def.vis.max(self.db, prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
// This import is being handled here, don't pass it down to
// `ItemScope::push_res_with_import()`.
defs.types = None;
self.def_map.modules[module_id].scope.update_visibility_types(name, def.vis);
}
// When the source module's definition changed (e.g., due to an explicit import
// shadowing a glob), propagate the new definition to modules that glob-import from it.
// We check that the previous definition came from the same glob import to avoid
// incorrectly overwriting definitions from different glob sources.
//
// Note this is not a perfect fix, but it makes
// https://github.com/rust-lang/rust-analyzer/issues/19224 work for now until we
// implement a proper glob graph
else if def.def != prev_def.def && prev_def.import == def_import_type {
changed = true;
defs.types = None;
self.def_map.modules[module_id].scope.update_def_types(name, def.def, def.vis);
}
}
if let Some(def) = defs.values
&& let Some(prev_def) = prev_defs.values
&& def.def == prev_def.def
&& self.from_glob_import.contains_value(module_id, name.clone())
&& def.vis != prev_def.vis
&& def.vis.max(self.db, prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
// See comment above.
defs.values = None;
self.def_map.modules[module_id].scope.update_visibility_values(name, def.vis);
if def.def == prev_def.def
&& self.from_glob_import.contains_value(module_id, name.clone())
&& def.vis != prev_def.vis
&& def.vis.max(self.db, prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
defs.values = None;
self.def_map.modules[module_id].scope.update_visibility_values(name, def.vis);
} else if def.def != prev_def.def
&& prev_def.import.map(ImportOrExternCrate::from) == def_import_type
{
changed = true;
defs.values = None;
self.def_map.modules[module_id].scope.update_def_values(name, def.def, def.vis);
}
}
if let Some(def) = defs.macros
&& let Some(prev_def) = prev_defs.macros
&& def.def == prev_def.def
&& self.from_glob_import.contains_macro(module_id, name.clone())
&& def.vis != prev_def.vis
&& def.vis.max(self.db, prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
// See comment above.
defs.macros = None;
self.def_map.modules[module_id].scope.update_visibility_macros(name, def.vis);
if def.def == prev_def.def
&& self.from_glob_import.contains_macro(module_id, name.clone())
&& def.vis != prev_def.vis
&& def.vis.max(self.db, prev_def.vis, &self.def_map) == Some(def.vis)
{
changed = true;
defs.macros = None;
self.def_map.modules[module_id].scope.update_visibility_macros(name, def.vis);
} else if def.def != prev_def.def && prev_def.import == def_import_type {
changed = true;
defs.macros = None;
self.def_map.modules[module_id].scope.update_def_macros(name, def.def, def.vis);
}
}
}
@@ -830,15 +830,18 @@ fn include_bytes_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
// FIXME: actually read the file here if the user asked for macro expansion
let res = tt::TopSubtree::invisible_from_leaves(
let underscore = sym::underscore;
let zero = tt::Literal {
text_and_suffix: sym::_0_u8,
span,
[tt::Leaf::Literal(tt::Literal {
text_and_suffix: Symbol::empty(),
span,
kind: tt::LitKind::ByteStrRaw(1),
suffix_len: 0,
})],
);
kind: tt::LitKind::Integer,
suffix_len: 3,
};
// We don't use a real length since we can't know the file length, so we use an underscore
// to infer it.
let res = quote! {span =>
&[#zero; #underscore]
};
ExpandResult::ok(res)
}
@@ -133,7 +133,7 @@ pub fn impl_trait<'db>(
}
}
#[salsa::tracked(returns(ref), unsafe(non_update_types))]
#[salsa::tracked(returns(ref))]
pub fn predicates<'db>(db: &'db dyn HirDatabase, impl_: BuiltinDeriveImplId) -> GenericPredicates {
let loc = impl_.loc(db);
let generic_params = GenericParams::new(db, loc.adt.into());
@@ -12,7 +12,6 @@
use hir_def::{
FindPathConfig, GenericDefId, GenericParamId, HasModule, LocalFieldId, Lookup, ModuleDefId,
ModuleId, TraitId,
db::DefDatabase,
expr_store::{ExpressionStore, path::Path},
find_path::{self, PrefixKind},
hir::generics::{TypeOrConstParamData, TypeParamProvenance, WherePredicate},
@@ -100,6 +99,9 @@ pub struct HirFormatter<'a, 'db> {
display_kind: DisplayKind,
display_target: DisplayTarget,
bounds_formatting_ctx: BoundsFormattingCtx<'db>,
/// Whether formatting `impl Trait1 + Trait2` or `dyn Trait1 + Trait2` needs parentheses around it,
/// for example when formatting `&(impl Trait1 + Trait2)`.
trait_bounds_need_parens: bool,
}
// FIXME: To consider, ref and dyn trait lifetimes can be omitted if they are `'_`, path args should
@@ -331,6 +333,7 @@ fn display_source_code<'a>(
show_container_bounds: false,
display_lifetimes: DisplayLifetime::OnlyNamedOrStatic,
bounds_formatting_ctx: Default::default(),
trait_bounds_need_parens: false,
}) {
Ok(()) => {}
Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"),
@@ -566,6 +569,7 @@ pub fn write_to<F: HirWrite>(&self, f: &mut F) -> Result {
show_container_bounds: self.show_container_bounds,
display_lifetimes: self.display_lifetimes,
bounds_formatting_ctx: Default::default(),
trait_bounds_need_parens: false,
})
}
@@ -612,7 +616,11 @@ fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
}
}
fn write_projection<'db>(f: &mut HirFormatter<'_, 'db>, alias: &AliasTy<'db>) -> Result {
fn write_projection<'db>(
f: &mut HirFormatter<'_, 'db>,
alias: &AliasTy<'db>,
needs_parens_if_multi: bool,
) -> Result {
if f.should_truncate() {
return write!(f, "{TYPE_HINT_TRUNCATION}");
}
@@ -650,6 +658,7 @@ fn write_projection<'db>(f: &mut HirFormatter<'_, 'db>, alias: &AliasTy<'db>) ->
Either::Left(Ty::new_alias(f.interner, AliasTyKind::Projection, *alias)),
&bounds,
SizedByDefault::NotSized,
needs_parens_if_multi,
)
});
}
@@ -1056,7 +1065,7 @@ fn hir_fmt(&self, f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_, 'db>)
return write!(f, "{TYPE_HINT_TRUNCATION}");
}
use TyKind;
let trait_bounds_need_parens = mem::replace(&mut f.trait_bounds_need_parens, false);
match self.kind() {
TyKind::Never => write!(f, "!")?,
TyKind::Str => write!(f, "str")?,
@@ -1077,103 +1086,34 @@ fn hir_fmt(&self, f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_, 'db>)
c.hir_fmt(f)?;
write!(f, "]")?;
}
kind @ (TyKind::RawPtr(t, m) | TyKind::Ref(_, t, m)) => {
if let TyKind::Ref(l, _, _) = kind {
f.write_char('&')?;
if f.render_region(l) {
l.hir_fmt(f)?;
f.write_char(' ')?;
}
TyKind::Ref(l, t, m) => {
f.write_char('&')?;
if f.render_region(l) {
l.hir_fmt(f)?;
f.write_char(' ')?;
}
match m {
rustc_ast_ir::Mutability::Not => (),
rustc_ast_ir::Mutability::Mut => f.write_str("mut ")?,
}
f.trait_bounds_need_parens = true;
t.hir_fmt(f)?;
f.trait_bounds_need_parens = false;
}
TyKind::RawPtr(t, m) => {
write!(
f,
"*{}",
match m {
rustc_ast_ir::Mutability::Not => (),
rustc_ast_ir::Mutability::Mut => f.write_str("mut ")?,
rustc_ast_ir::Mutability::Not => "const ",
rustc_ast_ir::Mutability::Mut => "mut ",
}
} else {
write!(
f,
"*{}",
match m {
rustc_ast_ir::Mutability::Not => "const ",
rustc_ast_ir::Mutability::Mut => "mut ",
}
)?;
}
)?;
// FIXME: all this just to decide whether to use parentheses...
let (preds_to_print, has_impl_fn_pred) = match t.kind() {
TyKind::Dynamic(bounds, region) => {
let contains_impl_fn =
bounds.iter().any(|bound| match bound.skip_binder() {
ExistentialPredicate::Trait(trait_ref) => {
let trait_ = trait_ref.def_id.0;
fn_traits(f.lang_items()).any(|it| it == trait_)
}
_ => false,
});
let render_lifetime = f.render_region(region);
(bounds.len() + render_lifetime as usize, contains_impl_fn)
}
TyKind::Alias(AliasTyKind::Opaque, ty) => {
let opaque_ty_id = match ty.def_id {
SolverDefId::InternedOpaqueTyId(id) => id,
_ => unreachable!(),
};
let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id);
if let ImplTraitId::ReturnTypeImplTrait(func, _) = impl_trait_id {
let data = impl_trait_id.predicates(db);
let bounds =
|| data.iter_instantiated_copied(f.interner, ty.args.as_slice());
let mut len = bounds().count();
// Don't count Sized but count when it absent
// (i.e. when explicit ?Sized bound is set).
let default_sized = SizedByDefault::Sized { anchor: func.krate(db) };
let sized_bounds = bounds()
.filter(|b| {
matches!(
b.kind().skip_binder(),
ClauseKind::Trait(trait_ref)
if default_sized.is_sized_trait(
trait_ref.def_id().0,
db,
),
)
})
.count();
match sized_bounds {
0 => len += 1,
_ => {
len = len.saturating_sub(sized_bounds);
}
}
let contains_impl_fn = bounds().any(|bound| {
if let ClauseKind::Trait(trait_ref) = bound.kind().skip_binder() {
let trait_ = trait_ref.def_id().0;
fn_traits(f.lang_items()).any(|it| it == trait_)
} else {
false
}
});
(len, contains_impl_fn)
} else {
(0, false)
}
}
_ => (0, false),
};
if has_impl_fn_pred && preds_to_print <= 2 {
return t.hir_fmt(f);
}
if preds_to_print > 1 {
write!(f, "(")?;
t.hir_fmt(f)?;
write!(f, ")")?;
} else {
t.hir_fmt(f)?;
}
f.trait_bounds_need_parens = true;
t.hir_fmt(f)?;
f.trait_bounds_need_parens = false;
}
TyKind::Tuple(tys) => {
if tys.len() == 1 {
@@ -1328,7 +1268,9 @@ fn hir_fmt(&self, f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_, 'db>)
hir_fmt_generics(f, parameters.as_slice(), Some(def.def_id().0.into()), None)?;
}
TyKind::Alias(AliasTyKind::Projection, alias_ty) => write_projection(f, &alias_ty)?,
TyKind::Alias(AliasTyKind::Projection, alias_ty) => {
write_projection(f, &alias_ty, trait_bounds_need_parens)?
}
TyKind::Foreign(alias) => {
let type_alias = db.type_alias_signature(alias.0);
f.start_location_link(alias.0.into());
@@ -1363,6 +1305,7 @@ fn hir_fmt(&self, f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_, 'db>)
Either::Left(*self),
&bounds,
SizedByDefault::Sized { anchor: krate },
trait_bounds_need_parens,
)?;
}
TyKind::Closure(id, substs) => {
@@ -1525,6 +1468,7 @@ fn hir_fmt(&self, f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_, 'db>)
Either::Left(*self),
&bounds,
SizedByDefault::Sized { anchor: krate },
trait_bounds_need_parens,
)?;
}
},
@@ -1567,6 +1511,7 @@ fn hir_fmt(&self, f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_, 'db>)
Either::Left(*self),
&bounds_to_display,
SizedByDefault::NotSized,
trait_bounds_need_parens,
)?;
}
TyKind::Error(_) => {
@@ -1806,11 +1751,11 @@ pub enum SizedByDefault {
}
impl SizedByDefault {
fn is_sized_trait(self, trait_: TraitId, db: &dyn DefDatabase) -> bool {
fn is_sized_trait(self, trait_: TraitId, interner: DbInterner<'_>) -> bool {
match self {
Self::NotSized => false,
Self::Sized { anchor } => {
let sized_trait = hir_def::lang_item::lang_items(db, anchor).Sized;
Self::Sized { .. } => {
let sized_trait = interner.lang_items().Sized;
Some(trait_) == sized_trait
}
}
@@ -1823,16 +1768,62 @@ pub fn write_bounds_like_dyn_trait_with_prefix<'db>(
this: Either<Ty<'db>, Region<'db>>,
predicates: &[Clause<'db>],
default_sized: SizedByDefault,
needs_parens_if_multi: bool,
) -> Result {
let needs_parens =
needs_parens_if_multi && trait_bounds_need_parens(f, this, predicates, default_sized);
if needs_parens {
write!(f, "(")?;
}
write!(f, "{prefix}")?;
if !predicates.is_empty()
|| predicates.is_empty() && matches!(default_sized, SizedByDefault::Sized { .. })
{
write!(f, " ")?;
write_bounds_like_dyn_trait(f, this, predicates, default_sized)
} else {
Ok(())
write_bounds_like_dyn_trait(f, this, predicates, default_sized)?;
}
if needs_parens {
write!(f, ")")?;
}
Ok(())
}
fn trait_bounds_need_parens<'db>(
f: &mut HirFormatter<'_, 'db>,
this: Either<Ty<'db>, Region<'db>>,
predicates: &[Clause<'db>],
default_sized: SizedByDefault,
) -> bool {
// This needs to be kept in sync with `write_bounds_like_dyn_trait()`.
let mut distinct_bounds = 0usize;
let mut is_sized = false;
for p in predicates {
match p.kind().skip_binder() {
ClauseKind::Trait(trait_ref) => {
let trait_ = trait_ref.def_id().0;
if default_sized.is_sized_trait(trait_, f.interner) {
is_sized = true;
if matches!(default_sized, SizedByDefault::Sized { .. }) {
// Don't print +Sized, but rather +?Sized if absent.
continue;
}
}
distinct_bounds += 1;
}
ClauseKind::TypeOutlives(to) if Either::Left(to.0) == this => distinct_bounds += 1,
ClauseKind::RegionOutlives(lo) if Either::Right(lo.0) == this => distinct_bounds += 1,
_ => {}
}
}
if let SizedByDefault::Sized { .. } = default_sized
&& !is_sized
{
distinct_bounds += 1;
}
distinct_bounds > 1
}
fn write_bounds_like_dyn_trait<'db>(
@@ -1855,7 +1846,7 @@ fn write_bounds_like_dyn_trait<'db>(
match p.kind().skip_binder() {
ClauseKind::Trait(trait_ref) => {
let trait_ = trait_ref.def_id().0;
if default_sized.is_sized_trait(trait_, f.db) {
if default_sized.is_sized_trait(trait_, f.interner) {
is_sized = true;
if matches!(default_sized, SizedByDefault::Sized { .. }) {
// Don't print +Sized, but rather +?Sized if absent.
@@ -831,6 +831,8 @@ fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty<'db> {
let mut ordered_associated_types = vec![];
if let Some(principal_trait) = principal {
// Generally we should not elaborate in lowering as this can lead to cycles, but
// here rustc cycles as well.
for clause in elaborate::elaborate(
interner,
[Clause::upcast_from(
@@ -1897,7 +1899,7 @@ impl<'db> GenericPredicates {
/// Resolve the where clause(s) of an item with generics.
///
/// Diagnostics are computed only for this item's predicates, not for parents.
#[salsa::tracked(returns(ref))]
#[salsa::tracked(returns(ref), cycle_result=generic_predicates_cycle_result)]
pub fn query_with_diagnostics(
db: &'db dyn HirDatabase,
def: GenericDefId,
@@ -1906,6 +1908,20 @@ pub fn query_with_diagnostics(
}
}
/// A cycle can occur from malformed code.
fn generic_predicates_cycle_result(
_db: &dyn HirDatabase,
_: salsa::Id,
_def: GenericDefId,
) -> (GenericPredicates, Diagnostics) {
(
GenericPredicates::from_explicit_own_predicates(StoredEarlyBinder::bind(
Clauses::default().store(),
)),
None,
)
}
impl GenericPredicates {
#[inline]
pub(crate) fn from_explicit_own_predicates(
@@ -2590,11 +2606,13 @@ pub(crate) fn associated_type_by_name_including_super_traits<'db>(
) -> Option<(TraitRef<'db>, TypeAliasId)> {
let module = trait_ref.def_id.0.module(db);
let interner = DbInterner::new_with(db, module.krate(db));
rustc_type_ir::elaborate::supertraits(interner, Binder::dummy(trait_ref)).find_map(|t| {
let trait_id = t.as_ref().skip_binder().def_id.0;
let assoc_type = trait_id.trait_items(db).associated_type_by_name(name)?;
Some((t.skip_binder(), assoc_type))
})
all_supertraits_trait_refs(db, trait_ref.def_id.0)
.map(|t| t.instantiate(interner, trait_ref.args))
.find_map(|t| {
let trait_id = t.def_id.0;
let assoc_type = trait_id.trait_items(db).associated_type_by_name(name)?;
Some((t, assoc_type))
})
}
pub fn associated_type_shorthand_candidates(
@@ -2723,3 +2741,96 @@ fn named_associated_type_shorthand_candidates<'db, R>(
_ => None,
}
}
/// During lowering, elaborating supertraits can cause cycles. To avoid that, we have a separate query
/// to only collect supertraits.
///
/// Technically, it is possible to avoid even more cycles by only collecting the `TraitId` of supertraits
/// without their args. However rustc doesn't do that, so we don't either.
pub(crate) fn all_supertraits_trait_refs(
db: &dyn HirDatabase,
trait_: TraitId,
) -> impl ExactSizeIterator<Item = EarlyBinder<'_, TraitRef<'_>>> {
let interner = DbInterner::new_no_crate(db);
return all_supertraits_trait_refs_query(db, trait_).iter().map(move |trait_ref| {
trait_ref.get_with(|(trait_, args)| {
TraitRef::new_from_args(interner, (*trait_).into(), args.as_ref())
})
});
#[salsa_macros::tracked(returns(deref), cycle_result = all_supertraits_trait_refs_cycle_result)]
pub(crate) fn all_supertraits_trait_refs_query(
db: &dyn HirDatabase,
trait_: TraitId,
) -> Box<[StoredEarlyBinder<(TraitId, StoredGenericArgs)>]> {
let resolver = trait_.resolver(db);
let signature = db.trait_signature(trait_);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
&signature.store,
trait_.into(),
LifetimeElisionKind::AnonymousReportError,
);
let interner = ctx.interner;
let self_param_ty = Ty::new_param(
interner,
TypeParamId::from_unchecked(TypeOrConstParamId {
parent: trait_.into(),
local_id: Idx::from_raw(la_arena::RawIdx::from_u32(0)),
}),
0,
);
let mut supertraits = FxHashSet::default();
supertraits.insert(StoredEarlyBinder::bind((
trait_,
GenericArgs::identity_for_item(interner, trait_.into()).store(),
)));
for pred in signature.generic_params.where_predicates() {
let WherePredicate::TypeBound { target, bound } = pred else {
continue;
};
let target = &signature.store[*target];
if let TypeRef::TypeParam(param_id) = target
&& param_id.local_id().into_raw().into_u32() == 0
{
// This is `Self`.
} else if let TypeRef::Path(path) = target
&& path.is_self_type()
{
// Also `Self`.
} else {
// Not `Self`!
continue;
}
ctx.lower_type_bound(bound, self_param_ty, true).for_each(|(clause, _)| {
if let ClauseKind::Trait(trait_ref) = clause.kind().skip_binder() {
supertraits.extend(
all_supertraits_trait_refs(db, trait_ref.trait_ref.def_id.0).map(|t| {
let trait_ref = t.instantiate(interner, trait_ref.trait_ref.args);
StoredEarlyBinder::bind((trait_ref.def_id.0, trait_ref.args.store()))
}),
);
}
});
}
Box::from_iter(supertraits)
}
pub(crate) fn all_supertraits_trait_refs_cycle_result(
db: &dyn HirDatabase,
_: salsa::Id,
trait_: TraitId,
) -> Box<[StoredEarlyBinder<(TraitId, StoredGenericArgs)>]> {
let interner = DbInterner::new_no_crate(db);
Box::new([StoredEarlyBinder::bind((
trait_,
GenericArgs::identity_for_item(interner, trait_.into()).store(),
))])
}
}
@@ -608,7 +608,7 @@ trait Foo {}
fn test(f: impl Foo, g: &(impl Foo + ?Sized)) {
let _: &dyn Foo = &f;
let _: &dyn Foo = g;
//^ expected &'? (dyn Foo + 'static), got &'? impl Foo + ?Sized
//^ expected &'? (dyn Foo + 'static), got &'? (impl Foo + ?Sized)
}
"#,
);
@@ -111,7 +111,7 @@ fn test(
b;
//^ impl Foo
c;
//^ &impl Foo + ?Sized
//^ &(impl Foo + ?Sized)
d;
//^ S<impl Foo>
ref_any;
@@ -192,7 +192,7 @@ fn test(
b;
//^ fn(impl Foo) -> impl Foo
c;
} //^ fn(&impl Foo + ?Sized) -> &impl Foo + ?Sized
} //^ fn(&(impl Foo + ?Sized)) -> &(impl Foo + ?Sized)
"#,
);
}
@@ -2363,6 +2363,7 @@ fn test() {
}
"#,
expect![[r#"
46..49 'Foo': Foo<N>
93..97 'self': Foo<N>
108..125 '{ ... }': usize
118..119 'N': usize
@@ -2645,3 +2646,45 @@ fn bar(v: Self::Assoc) {
"#,
);
}
#[test]
fn issue_21560() {
check_no_mismatches(
r#"
mod bindings {
use super::*;
pub type HRESULT = i32;
}
use bindings::*;
mod error {
use super::*;
pub fn nonzero_hresult(hr: HRESULT) -> crate::HRESULT {
hr
}
}
pub use error::*;
mod hresult {
use super::*;
pub struct HRESULT(pub i32);
}
pub use hresult::HRESULT;
"#,
);
}
#[test]
fn regression_21577() {
check_no_mismatches(
r#"
pub trait FilterT<F: FilterT<F, V = Self::V> = Self> {
type V;
fn foo() {}
}
"#,
);
}
@@ -2152,10 +2152,11 @@ async fn main() {
let z: core::ops::ControlFlow<(), _> = try { () };
let w = const { 92 };
let t = 'a: { 92 };
let u = try bikeshed core::ops::ControlFlow<(), _> { () };
}
"#,
expect![[r#"
16..193 '{ ...2 }; }': ()
16..256 '{ ...) }; }': ()
26..27 'x': i32
30..43 'unsafe { 92 }': i32
39..41 '92': i32
@@ -2176,6 +2177,13 @@ async fn main() {
176..177 't': i32
180..190 ''a: { 92 }': i32
186..188 '92': i32
200..201 'u': ControlFlow<(), ()>
204..253 'try bi...{ () }': ControlFlow<(), ()>
204..253 'try bi...{ () }': fn from_output<ControlFlow<(), ()>>(<ControlFlow<(), ()> as Try>::Output) -> ControlFlow<(), ()>
204..253 'try bi...{ () }': ControlFlow<(), ()>
204..253 'try bi...{ () }': ControlFlow<(), ()>
204..253 'try bi...{ () }': ControlFlow<(), ()>
249..251 '()': ()
"#]],
)
}
@@ -4056,3 +4064,13 @@ fn foo() {
"#]],
);
}
#[test]
fn include_bytes_len_mismatch() {
check_no_mismatches(
r#"
//- minicore: include_bytes
static S: &[u8; 158] = include_bytes!("/foo/bar/baz.txt");
"#,
);
}
@@ -219,14 +219,16 @@ fn test() {
#[test]
fn infer_try_block() {
// FIXME: We should test more cases, but it currently doesn't work, since
// our labeled block type inference is broken.
check_types(
r#"
//- minicore: try, option
//- minicore: try, option, result, from
fn test() {
let x: Option<_> = try { Some(2)?; };
//^ Option<()>
let homogeneous = try { Ok::<(), u32>(())?; "hi" };
//^^^^^^^^^^^ Result<&'? str, u32>
let heterogeneous = try bikeshed Result<_, u64> { 1 };
//^^^^^^^^^^^^^ Result<i32, u64>
}
"#,
);
@@ -4819,7 +4821,7 @@ fn allowed3(baz: impl Baz<Assoc = Qux<impl Foo>>) {}
431..433 '{}': ()
447..450 'baz': impl Baz<Assoc = impl Foo>
480..482 '{}': ()
500..503 'baz': impl Baz<Assoc = &'a impl Foo + 'a>
500..503 'baz': impl Baz<Assoc = &'a (impl Foo + 'a)>
544..546 '{}': ()
560..563 'baz': impl Baz<Assoc = Qux<impl Foo>>
598..600 '{}': ()
@@ -22,6 +22,7 @@
TargetFeatures,
db::HirDatabase,
layout::{Layout, TagEncoding},
lower::all_supertraits_trait_refs,
mir::pad16,
};
@@ -62,23 +63,13 @@ pub(crate) fn fn_traits(lang_items: &LangItems) -> impl Iterator<Item = TraitId>
/// Returns an iterator over the whole super trait hierarchy (including the
/// trait itself).
pub fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
// we need to take care a bit here to avoid infinite loops in case of cycles
// (i.e. if we have `trait A: B; trait B: A;`)
let mut result = smallvec![trait_];
let mut i = 0;
while let Some(&t) = result.get(i) {
// yeah this is quadratic, but trait hierarchies should be flat
// enough that this doesn't matter
direct_super_traits_cb(db, t, |tt| {
if !result.contains(&tt) {
result.push(tt);
}
});
i += 1;
}
result
pub fn all_super_traits(db: &dyn HirDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
let mut supertraits = all_supertraits_trait_refs(db, trait_)
.map(|trait_ref| trait_ref.skip_binder().def_id.0)
.collect::<SmallVec<[_; _]>>();
supertraits.sort_unstable();
supertraits.dedup();
supertraits
}
fn direct_super_traits_cb(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) {
@@ -587,6 +587,7 @@ fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
Either::Left(ty),
&predicates,
SizedByDefault::Sized { anchor: krate },
false,
);
}
},
@@ -614,6 +615,7 @@ fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
Either::Left(ty),
&predicates,
default_sized,
false,
)?;
}
Ok(())
@@ -4233,6 +4233,10 @@ pub fn module(self, db: &dyn HirDatabase) -> Module {
self.parent(db).module(db)
}
pub fn as_id(self) -> u32 {
self.binding_id.into_raw().into_u32()
}
pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
let def = self.parent;
let infer = InferenceResult::for_body(db, def);
@@ -18,7 +18,6 @@
use itertools::Itertools;
use rustc_hash::FxHashSet;
use rustc_type_ir::inherent::Ty as _;
use span::Edition;
use crate::{
Adt, AssocItem, GenericDef, GenericParam, HasAttrs, HasVisibility, Impl, ModuleDef, ScopeDef,
@@ -367,7 +366,11 @@ pub(super) fn free_function<'a, 'lt, 'db, DB: HirDatabase>(
let ret_ty = it.ret_type_with_args(db, generics.iter().cloned());
// Filter out private and unsafe functions
if !it.is_visible_from(db, module)
|| it.is_unsafe_to_call(db, None, Edition::CURRENT_FIXME)
|| it.is_unsafe_to_call(
db,
None,
crate::Crate::from(ctx.scope.resolver().krate()).edition(db),
)
|| it.is_unstable(db)
|| ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
|| ret_ty.is_raw_ptr()
@@ -473,7 +476,11 @@ pub(super) fn impl_method<'a, 'lt, 'db, DB: HirDatabase>(
// Filter out private and unsafe functions
if !it.is_visible_from(db, module)
|| it.is_unsafe_to_call(db, None, Edition::CURRENT_FIXME)
|| it.is_unsafe_to_call(
db,
None,
crate::Crate::from(ctx.scope.resolver().krate()).edition(db),
)
|| it.is_unstable(db)
{
return None;
@@ -667,7 +674,11 @@ pub(super) fn impl_static_method<'a, 'lt, 'db, DB: HirDatabase>(
// Filter out private and unsafe functions
if !it.is_visible_from(db, module)
|| it.is_unsafe_to_call(db, None, Edition::CURRENT_FIXME)
|| it.is_unsafe_to_call(
db,
None,
crate::Crate::from(ctx.scope.resolver().krate()).edition(db),
)
|| it.is_unstable(db)
{
return None;
@@ -155,6 +155,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
&scope,
mod_path_to_ast(&import_path, edition),
&ctx.config.insert_use,
edition,
);
},
);
@@ -132,7 +132,7 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>)
);
}
if block.try_token().is_none()
if block.try_block_modifier().is_none()
&& block.unsafe_token().is_none()
&& block.label().is_none()
&& block.const_token().is_none()
@@ -859,7 +859,7 @@ fn analyze_container<'db>(
ast::BlockExpr(block_expr) => {
let (constness, block) = match block_expr.modifier() {
Some(ast::BlockModifier::Const(_)) => (true, block_expr),
Some(ast::BlockModifier::Try(_)) => (false, block_expr),
Some(ast::BlockModifier::Try { .. }) => (false, block_expr),
Some(ast::BlockModifier::Label(label)) if label.lifetime().is_some() => (false, block_expr),
_ => continue,
};
@@ -1147,14 +1147,7 @@ fn maybe_displayed_type(
if ty.is_reference() || ty.is_mutable_reference() {
let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate());
convert_reference_type(ty.strip_references(), ctx.db(), famous_defs)
.map(|conversion| {
conversion
.convert_type(
ctx.db(),
target_module.krate(ctx.db()).to_display_target(ctx.db()),
)
.to_string()
})
.map(|conversion| conversion.convert_type(ctx.db(), target_module).to_string())
.or_else(|| ty.display_source_code(ctx.db(), target_module.into(), true).ok())
} else {
ty.display_source_code(ctx.db(), target_module.into(), true).ok()
@@ -3187,6 +3180,28 @@ fn main() {
r#"
fn main() {
s.self$0();
}
"#,
);
}
#[test]
fn regression_21288() {
check_assist(
generate_function,
r#"
//- minicore: copy
fn foo() {
$0bar(&|x| true)
}
"#,
r#"
fn foo() {
bar(&|x| true)
}
fn bar(arg: impl Fn(_) -> bool) {
${0:todo!()}
}
"#,
);
@@ -226,15 +226,15 @@ fn generate_getter_from_info(
)
} else {
(|| {
let krate = ctx.sema.scope(record_field_info.field_ty.syntax())?.krate();
let famous_defs = &FamousDefs(&ctx.sema, krate);
let module = ctx.sema.scope(record_field_info.field_ty.syntax())?.module();
let famous_defs = &FamousDefs(&ctx.sema, module.krate(ctx.db()));
ctx.sema
.resolve_type(&record_field_info.field_ty)
.and_then(|ty| convert_reference_type(ty, ctx.db(), famous_defs))
.map(|conversion| {
cov_mark::hit!(convert_reference_type);
(
conversion.convert_type(ctx.db(), krate.to_display_target(ctx.db())),
conversion.convert_type(ctx.db(), module),
conversion.getter(record_field_info.field_name.to_string()),
)
})
@@ -1,6 +1,7 @@
use itertools::Itertools;
use itertools::{Itertools, chain};
use syntax::{
SyntaxKind::WHITESPACE,
TextRange,
ast::{
AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat, edit::AstNodeEdit, make,
prec::ExprPrecedence, syntax_factory::SyntaxFactory,
@@ -44,13 +45,26 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>)
cov_mark::hit!(move_guard_inapplicable_in_arm_body);
return None;
}
let space_before_guard = guard.syntax().prev_sibling_or_token();
let rest_arms = rest_arms(&match_arm, ctx.selection_trimmed())?;
let space_before_delete = chain(
guard.syntax().prev_sibling_or_token(),
rest_arms.iter().filter_map(|it| it.syntax().prev_sibling_or_token()),
);
let space_after_arrow = match_arm.fat_arrow_token()?.next_sibling_or_token();
let guard_condition = guard.condition()?.reset_indent();
let arm_expr = match_arm.expr()?;
let then_branch = crate::utils::wrap_block(&arm_expr);
let if_expr = make::expr_if(guard_condition, then_branch, None).indent(arm_expr.indent_level());
let if_branch = chain([&match_arm], &rest_arms)
.rfold(None, |else_branch, arm| {
if let Some(guard) = arm.guard() {
let then_branch = crate::utils::wrap_block(&arm.expr()?);
let guard_condition = guard.condition()?.reset_indent();
Some(make::expr_if(guard_condition, then_branch, else_branch).into())
} else {
arm.expr().map(|it| crate::utils::wrap_block(&it).into())
}
})?
.indent(arm_expr.indent_level());
let ElseBranch::IfExpr(if_expr) = if_branch else { return None };
let target = guard.syntax().text_range();
acc.add(
@@ -59,10 +73,13 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>)
target,
|builder| {
let mut edit = builder.make_editor(match_arm.syntax());
if let Some(element) = space_before_guard
&& element.kind() == WHITESPACE
{
edit.delete(element);
for element in space_before_delete {
if element.kind() == WHITESPACE {
edit.delete(element);
}
}
for rest_arm in &rest_arms {
edit.delete(rest_arm.syntax());
}
if let Some(element) = space_after_arrow
&& element.kind() == WHITESPACE
@@ -221,6 +238,25 @@ pub(crate) fn move_arm_cond_to_match_guard(
)
}
fn rest_arms(match_arm: &MatchArm, selection: TextRange) -> Option<Vec<MatchArm>> {
match_arm
.parent_match()
.match_arm_list()?
.arms()
.skip_while(|it| it != match_arm)
.skip(1)
.take_while(move |it| {
selection.is_empty() || crate::utils::is_selected(it, selection, false)
})
.take_while(move |it| {
it.pat()
.zip(match_arm.pat())
.is_some_and(|(a, b)| a.syntax().text() == b.syntax().text())
})
.collect::<Vec<_>>()
.into()
}
// Parses an if-else-if chain to get the conditions and the then branches until we encounter an else
// branch or the end.
fn parse_if_chain(if_expr: IfExpr) -> Option<(Vec<(Expr, BlockExpr)>, Option<BlockExpr>)> {
@@ -344,6 +380,115 @@ fn main() {
);
}
#[test]
fn move_multiple_guard_to_arm_body_works() {
check_assist(
move_guard_to_arm_body,
r#"
fn main() {
match 92 {
x @ 0..30 $0if x % 3 == 0 => false,
x @ 0..30 if x % 2 == 0 => true,
_ => false
}
}
"#,
r#"
fn main() {
match 92 {
x @ 0..30 => if x % 3 == 0 {
false
} else if x % 2 == 0 {
true
},
_ => false
}
}
"#,
);
check_assist(
move_guard_to_arm_body,
r#"
fn main() {
match 92 {
x @ 0..30 $0if x % 3 == 0 => false,
x @ 0..30 if x % 2 == 0 => true,
x @ 0..30 => false,
_ => true
}
}
"#,
r#"
fn main() {
match 92 {
x @ 0..30 => if x % 3 == 0 {
false
} else if x % 2 == 0 {
true
} else {
false
},
_ => true
}
}
"#,
);
check_assist(
move_guard_to_arm_body,
r#"
fn main() {
match 92 {
x @ 0..30 if x % 3 == 0 => false,
x @ 0..30 $0if x % 2 == 0$0 => true,
x @ 0..30 => false,
_ => true
}
}
"#,
r#"
fn main() {
match 92 {
x @ 0..30 if x % 3 == 0 => false,
x @ 0..30 => if x % 2 == 0 {
true
},
x @ 0..30 => false,
_ => true
}
}
"#,
);
check_assist(
move_guard_to_arm_body,
r#"
fn main() {
match 92 {
x @ 0..30 $0if x % 3 == 0 => false,
x @ 0..30 $0if x % 2 == 0 => true,
x @ 0..30 => false,
_ => true
}
}
"#,
r#"
fn main() {
match 92 {
x @ 0..30 => if x % 3 == 0 {
false
} else if x % 2 == 0 {
true
},
x @ 0..30 => false,
_ => true
}
}
"#,
);
}
#[test]
fn move_guard_to_block_arm_body_works() {
check_assist(
@@ -4,8 +4,7 @@
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
use hir::{
DisplayTarget, HasAttrs as HirHasAttrs, HirDisplay, InFile, ModuleDef, PathResolution,
Semantics,
HasAttrs as HirHasAttrs, HirDisplay, InFile, ModuleDef, PathResolution, Semantics,
db::{ExpandDatabase, HirDatabase},
};
use ide_db::{
@@ -836,13 +835,12 @@ enum ReferenceConversionType {
}
impl<'db> ReferenceConversion<'db> {
pub(crate) fn convert_type(
&self,
db: &'db dyn HirDatabase,
display_target: DisplayTarget,
) -> ast::Type {
pub(crate) fn convert_type(&self, db: &'db dyn HirDatabase, module: hir::Module) -> ast::Type {
let ty = match self.conversion {
ReferenceConversionType::Copy => self.ty.display(db, display_target).to_string(),
ReferenceConversionType::Copy => self
.ty
.display_source_code(db, module.into(), true)
.unwrap_or_else(|_| "_".to_owned()),
ReferenceConversionType::AsRefStr => "&str".to_owned(),
ReferenceConversionType::AsRefSlice => {
let type_argument_name = self
@@ -850,8 +848,8 @@ pub(crate) fn convert_type(
.type_arguments()
.next()
.unwrap()
.display(db, display_target)
.to_string();
.display_source_code(db, module.into(), true)
.unwrap_or_else(|_| "_".to_owned());
format!("&[{type_argument_name}]")
}
ReferenceConversionType::Dereferenced => {
@@ -860,8 +858,8 @@ pub(crate) fn convert_type(
.type_arguments()
.next()
.unwrap()
.display(db, display_target)
.to_string();
.display_source_code(db, module.into(), true)
.unwrap_or_else(|_| "_".to_owned());
format!("&{type_argument_name}")
}
ReferenceConversionType::Option => {
@@ -870,16 +868,22 @@ pub(crate) fn convert_type(
.type_arguments()
.next()
.unwrap()
.display(db, display_target)
.to_string();
.display_source_code(db, module.into(), true)
.unwrap_or_else(|_| "_".to_owned());
format!("Option<&{type_argument_name}>")
}
ReferenceConversionType::Result => {
let mut type_arguments = self.ty.type_arguments();
let first_type_argument_name =
type_arguments.next().unwrap().display(db, display_target).to_string();
let second_type_argument_name =
type_arguments.next().unwrap().display(db, display_target).to_string();
let first_type_argument_name = type_arguments
.next()
.unwrap()
.display_source_code(db, module.into(), true)
.unwrap_or_else(|_| "_".to_owned());
let second_type_argument_name = type_arguments
.next()
.unwrap()
.display_source_code(db, module.into(), true)
.unwrap_or_else(|_| "_".to_owned());
format!("Result<&{first_type_argument_name}, &{second_type_argument_name}>")
}
};
@@ -151,6 +151,10 @@ pub(crate) fn complete_postfix(
.add_to(acc, ctx.db);
}
},
_ if is_in_cond => {
postfix_snippet("let", "let", &format!("let $1 = {receiver_text}"))
.add_to(acc, ctx.db);
}
_ if matches!(second_ancestor.kind(), STMT_LIST | EXPR_STMT) => {
postfix_snippet("let", "let", &format!("let $0 = {receiver_text};"))
.add_to(acc, ctx.db);
@@ -253,7 +257,6 @@ pub(crate) fn complete_postfix(
&format!("while {receiver_text} {{\n $0\n}}"),
)
.add_to(acc, ctx.db);
postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc, ctx.db);
} else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator()
&& receiver_ty.impls_trait(ctx.db, trait_, &[])
{
@@ -266,6 +269,10 @@ pub(crate) fn complete_postfix(
}
}
if receiver_ty.is_bool() || receiver_ty.is_unknown() {
postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc, ctx.db);
}
let block_should_be_wrapped = if let ast::Expr::BlockExpr(block) = dot_receiver {
block.modifier().is_some() || !block.is_standalone()
} else {
@@ -585,6 +592,31 @@ fn main() {
);
}
#[test]
fn postfix_completion_works_in_if_condition() {
check(
r#"
fn foo(cond: bool) {
if cond.$0
}
"#,
expect![[r#"
sn box Box::new(expr)
sn call function(expr)
sn const const {}
sn dbg dbg!(expr)
sn dbgr dbg!(&expr)
sn deref *expr
sn let let
sn not !expr
sn ref &expr
sn refm &mut expr
sn return return expr
sn unsafe unsafe {}
"#]],
);
}
#[test]
fn postfix_type_filtering() {
check(
@@ -744,6 +776,25 @@ fn main() {
);
}
#[test]
fn iflet_fallback_cond() {
check_edit(
"let",
r#"
fn main() {
let bar = 2;
if bar.$0
}
"#,
r#"
fn main() {
let bar = 2;
if let $1 = bar
}
"#,
);
}
#[test]
fn option_letelse() {
check_edit(
@@ -146,9 +146,14 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
insert_use_with_alias_option(scope, path, cfg, None);
}
pub fn insert_use_as_alias(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
pub fn insert_use_as_alias(
scope: &ImportScope,
path: ast::Path,
cfg: &InsertUseConfig,
edition: span::Edition,
) {
let text: &str = "use foo as _";
let parse = syntax::SourceFile::parse(text, span::Edition::CURRENT_FIXME);
let parse = syntax::SourceFile::parse(text, edition);
let node = parse
.tree()
.syntax()
@@ -49,7 +49,7 @@ pub fn is_closure_or_blk_with_modif(expr: &ast::Expr) -> bool {
block_expr.modifier(),
Some(
ast::BlockModifier::Async(_)
| ast::BlockModifier::Try(_)
| ast::BlockModifier::Try { .. }
| ast::BlockModifier::Const(_)
)
)
@@ -148,7 +148,7 @@ pub fn walk_patterns_in_expr(start: &ast::Expr, cb: &mut dyn FnMut(ast::Pat)) {
block_expr.modifier(),
Some(
ast::BlockModifier::Async(_)
| ast::BlockModifier::Try(_)
| ast::BlockModifier::Try { .. }
| ast::BlockModifier::Const(_)
)
)
@@ -291,7 +291,7 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
match b.modifier() {
Some(
ast::BlockModifier::Async(_)
| ast::BlockModifier::Try(_)
| ast::BlockModifier::Try { .. }
| ast::BlockModifier::Const(_),
) => return cb(expr),
@@ -206,17 +206,18 @@ pub fn try_for_variable(
expr: &ast::Expr,
sema: &Semantics<'_, RootDatabase>,
) -> Option<SmolStr> {
let edition = sema.scope(expr.syntax())?.krate().edition(sema.db);
// `from_param` does not benefit from stripping it need the largest
// context possible so we check firstmost
if let Some(name) = from_param(expr, sema) {
if let Some(name) = from_param(expr, sema, edition) {
return Some(self.suggest_name(&name));
}
let mut next_expr = Some(expr.clone());
while let Some(expr) = next_expr {
let name = from_call(&expr)
.or_else(|| from_type(&expr, sema))
.or_else(|| from_field_name(&expr));
let name = from_call(&expr, edition)
.or_else(|| from_type(&expr, sema, edition))
.or_else(|| from_field_name(&expr, edition));
if let Some(name) = name {
return Some(self.suggest_name(&name));
}
@@ -270,7 +271,7 @@ fn split_numeric_suffix(name: &str) -> (&str, Option<usize>) {
}
}
fn normalize(name: &str) -> Option<SmolStr> {
fn normalize(name: &str, edition: syntax::Edition) -> Option<SmolStr> {
let name = to_lower_snake_case(name).to_smolstr();
if USELESS_NAMES.contains(&name.as_str()) {
@@ -281,16 +282,16 @@ fn normalize(name: &str) -> Option<SmolStr> {
return None;
}
if !is_valid_name(&name) {
if !is_valid_name(&name, edition) {
return None;
}
Some(name)
}
fn is_valid_name(name: &str) -> bool {
fn is_valid_name(name: &str, edition: syntax::Edition) -> bool {
matches!(
super::LexedStr::single_token(syntax::Edition::CURRENT_FIXME, name),
super::LexedStr::single_token(edition, name),
Some((syntax::SyntaxKind::IDENT, _error))
)
}
@@ -304,11 +305,11 @@ fn is_useless_method(method: &ast::MethodCallExpr) -> bool {
}
}
fn from_call(expr: &ast::Expr) -> Option<SmolStr> {
from_func_call(expr).or_else(|| from_method_call(expr))
fn from_call(expr: &ast::Expr, edition: syntax::Edition) -> Option<SmolStr> {
from_func_call(expr, edition).or_else(|| from_method_call(expr, edition))
}
fn from_func_call(expr: &ast::Expr) -> Option<SmolStr> {
fn from_func_call(expr: &ast::Expr, edition: syntax::Edition) -> Option<SmolStr> {
let call = match expr {
ast::Expr::CallExpr(call) => call,
_ => return None,
@@ -318,10 +319,10 @@ fn from_func_call(expr: &ast::Expr) -> Option<SmolStr> {
_ => return None,
};
let ident = func.path()?.segment()?.name_ref()?.ident_token()?;
normalize(ident.text())
normalize(ident.text(), edition)
}
fn from_method_call(expr: &ast::Expr) -> Option<SmolStr> {
fn from_method_call(expr: &ast::Expr, edition: syntax::Edition) -> Option<SmolStr> {
let method = match expr {
ast::Expr::MethodCallExpr(call) => call,
_ => return None,
@@ -340,10 +341,14 @@ fn from_method_call(expr: &ast::Expr) -> Option<SmolStr> {
}
}
normalize(name)
normalize(name, edition)
}
fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<SmolStr> {
fn from_param(
expr: &ast::Expr,
sema: &Semantics<'_, RootDatabase>,
edition: Edition,
) -> Option<SmolStr> {
let arg_list = expr.syntax().parent().and_then(ast::ArgList::cast)?;
let args_parent = arg_list.syntax().parent()?;
let func = match_ast! {
@@ -362,7 +367,7 @@ fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<Sm
let param = func.params().into_iter().nth(idx)?;
let pat = sema.source(param)?.value.right()?.pat()?;
let name = var_name_from_pat(&pat)?;
normalize(&name.to_smolstr())
normalize(&name.to_smolstr(), edition)
}
fn var_name_from_pat(pat: &ast::Pat) -> Option<ast::Name> {
@@ -374,10 +379,13 @@ fn var_name_from_pat(pat: &ast::Pat) -> Option<ast::Name> {
}
}
fn from_type(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<SmolStr> {
fn from_type(
expr: &ast::Expr,
sema: &Semantics<'_, RootDatabase>,
edition: Edition,
) -> Option<SmolStr> {
let ty = sema.type_of_expr(expr)?.adjusted();
let ty = ty.remove_ref().unwrap_or(ty);
let edition = sema.scope(expr.syntax())?.krate().edition(sema.db);
name_of_type(&ty, sema.db, edition)
}
@@ -417,7 +425,7 @@ fn name_of_type<'db>(
} else {
return None;
};
normalize(&name)
normalize(&name, edition)
}
fn sequence_name<'db>(
@@ -450,13 +458,13 @@ fn trait_name(trait_: &hir::Trait, db: &RootDatabase, edition: Edition) -> Optio
Some(name)
}
fn from_field_name(expr: &ast::Expr) -> Option<SmolStr> {
fn from_field_name(expr: &ast::Expr, edition: syntax::Edition) -> Option<SmolStr> {
let field = match expr {
ast::Expr::FieldExpr(field) => field,
_ => return None,
};
let ident = field.name_ref()?.ident_token()?;
normalize(ident.text())
normalize(ident.text(), edition)
}
#[cfg(test)]
@@ -332,7 +332,7 @@ pub(crate) fn find_fn_or_blocks(
ast::BlockExpr(blk) => {
match blk.modifier() {
Some(ast::BlockModifier::Async(_)) => blk.syntax().clone(),
Some(ast::BlockModifier::Try(_)) if token_kind != T![return] => blk.syntax().clone(),
Some(ast::BlockModifier::Try { .. }) if token_kind != T![return] => blk.syntax().clone(),
_ => continue,
}
},
@@ -404,8 +404,8 @@ fn nav_for_exit_points(
let blk_in_file = InFile::new(file_id, blk.into());
Some(expr_to_nav(db, blk_in_file, Some(async_tok)))
},
Some(ast::BlockModifier::Try(_)) if token_kind != T![return] => {
let try_tok = blk.try_token()?.text_range();
Some(ast::BlockModifier::Try { .. }) if token_kind != T![return] => {
let try_tok = blk.try_block_modifier()?.try_token()?.text_range();
let blk_in_file = InFile::new(file_id, blk.into());
Some(expr_to_nav(db, blk_in_file, Some(try_tok)))
},
@@ -473,7 +473,7 @@ pub(crate) fn highlight_exit_points(
},
ast::BlockExpr(blk) => match blk.modifier() {
Some(ast::BlockModifier::Async(t)) => hl_exit_points(sema, Some(t), blk.into()),
Some(ast::BlockModifier::Try(t)) if token.kind() != T![return] => {
Some(ast::BlockModifier::Try { try_token: t, .. }) if token.kind() != T![return] => {
hl_exit_points(sema, Some(t), blk.into())
},
_ => continue,
@@ -74,7 +74,7 @@ pub(super) fn try_expr(
ast::Fn(fn_) => sema.to_def(&fn_)?.ret_type(sema.db),
ast::Item(__) => return None,
ast::ClosureExpr(closure) => sema.type_of_expr(&closure.body()?)?.original,
ast::BlockExpr(block_expr) => if matches!(block_expr.modifier(), Some(ast::BlockModifier::Async(_) | ast::BlockModifier::Try(_)| ast::BlockModifier::Const(_))) {
ast::BlockExpr(block_expr) => if matches!(block_expr.modifier(), Some(ast::BlockModifier::Async(_) | ast::BlockModifier::Try { .. } | ast::BlockModifier::Const(_))) {
sema.type_of_expr(&block_expr.into())?.original
} else {
continue;
@@ -1382,4 +1382,21 @@ fn f<'a>() {
"#]],
);
}
#[test]
fn ref_multi_trait_impl_trait() {
check_with_config(
InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
r#"
//- minicore: sized
trait Eq {}
trait Ord {}
fn foo(argument: &(impl Eq + Ord)) {
let x = argument;
// ^ &(impl Eq + Ord)
}
"#,
);
}
}
@@ -67,7 +67,7 @@
FxHashMap, FxIndexSet, LineIndexDatabase,
base_db::{
CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath,
salsa::{Cancelled, Database},
salsa::{CancellationToken, Cancelled, Database},
},
prime_caches, symbol_index,
};
@@ -947,6 +947,10 @@ fn with_db<F, T>(&self, f: F) -> Cancellable<T>
// We use `attach_db_allow_change()` and not `attach_db()` because fixture injection can change the database.
hir::attach_db_allow_change(&self.db, || Cancelled::catch(|| f(&self.db)))
}
pub fn cancellation_token(&self) -> CancellationToken {
self.db.cancellation_token()
}
}
#[test]
@@ -1975,8 +1975,8 @@ trait Sub: Super + Super {
fn f() -> impl Sub<$0
"#,
expect![[r#"
trait Sub<SubTy = , SuperTy = >
^^^^^^^^^ -----------
trait Sub<SuperTy = , SubTy = >
^^^^^^^^^^^ ---------
"#]],
);
}
@@ -14,7 +14,7 @@
use std::ops::ControlFlow;
use either::Either;
use hir::{DefWithBody, EditionedFileId, InFile, InRealFile, MacroKind, Name, Semantics};
use hir::{DefWithBody, EditionedFileId, InFile, InRealFile, MacroKind, Semantics};
use ide_db::{FxHashMap, FxHashSet, MiniCore, Ranker, RootDatabase, SymbolKind};
use syntax::{
AstNode, AstToken, NodeOrToken,
@@ -257,8 +257,7 @@ fn item(&self) -> &ast::Item {
// FIXME: accommodate range highlighting
let mut body_stack: Vec<Option<DefWithBody>> = vec![];
let mut per_body_cache: FxHashMap<DefWithBody, (FxHashSet<_>, FxHashMap<Name, u32>)> =
FxHashMap::default();
let mut per_body_cache: FxHashMap<DefWithBody, FxHashSet<_>> = FxHashMap::default();
// Walk all nodes, keeping track of whether we are inside a macro or not.
// If in macro, expand it first and highlight the expanded code.
@@ -422,14 +421,11 @@ fn item(&self) -> &ast::Item {
}
let edition = descended_element.file_id.edition(sema.db);
let (unsafe_ops, bindings_shadow_count) = match current_body {
Some(current_body) => {
let (ops, bindings) = per_body_cache
.entry(current_body)
.or_insert_with(|| (sema.get_unsafe_ops(current_body), Default::default()));
(&*ops, Some(bindings))
}
None => (&empty, None),
let unsafe_ops = match current_body {
Some(current_body) => per_body_cache
.entry(current_body)
.or_insert_with(|| sema.get_unsafe_ops(current_body)),
None => &empty,
};
let is_unsafe_node =
|node| unsafe_ops.contains(&InFile::new(descended_element.file_id, node));
@@ -438,7 +434,6 @@ fn item(&self) -> &ast::Item {
let hl = highlight::name_like(
sema,
krate,
bindings_shadow_count,
&is_unsafe_node,
config.syntactic_name_ref_highlighting,
name_like,
@@ -5,12 +5,11 @@
use either::Either;
use hir::{AsAssocItem, HasAttrs, HasVisibility, Semantics};
use ide_db::{
FxHashMap, RootDatabase, SymbolKind,
RootDatabase, SymbolKind,
defs::{Definition, IdentClass, NameClass, NameRefClass},
syntax_helpers::node_ext::walk_pat,
};
use span::Edition;
use stdx::hash_once;
use syntax::{
AstNode, AstPtr, AstToken, NodeOrToken,
SyntaxKind::{self, *},
@@ -64,7 +63,6 @@ pub(super) fn token(
pub(super) fn name_like(
sema: &Semantics<'_, RootDatabase>,
krate: Option<hir::Crate>,
bindings_shadow_count: Option<&mut FxHashMap<hir::Name, u32>>,
is_unsafe_node: &impl Fn(AstPtr<Either<ast::Expr, ast::Pat>>) -> bool,
syntactic_name_ref_highlighting: bool,
name_like: ast::NameLike,
@@ -75,22 +73,15 @@ pub(super) fn name_like(
ast::NameLike::NameRef(name_ref) => highlight_name_ref(
sema,
krate,
bindings_shadow_count,
&mut binding_hash,
is_unsafe_node,
syntactic_name_ref_highlighting,
name_ref,
edition,
),
ast::NameLike::Name(name) => highlight_name(
sema,
bindings_shadow_count,
&mut binding_hash,
is_unsafe_node,
krate,
name,
edition,
),
ast::NameLike::Name(name) => {
highlight_name(sema, &mut binding_hash, is_unsafe_node, krate, name, edition)
}
ast::NameLike::Lifetime(lifetime) => match IdentClass::classify_lifetime(sema, &lifetime) {
Some(IdentClass::NameClass(NameClass::Definition(def))) => {
highlight_def(sema, krate, def, edition, false) | HlMod::Definition
@@ -273,7 +264,6 @@ fn keyword(token: SyntaxToken, kind: SyntaxKind) -> Highlight {
fn highlight_name_ref(
sema: &Semantics<'_, RootDatabase>,
krate: Option<hir::Crate>,
bindings_shadow_count: Option<&mut FxHashMap<hir::Name, u32>>,
binding_hash: &mut Option<u64>,
is_unsafe_node: &impl Fn(AstPtr<Either<ast::Expr, ast::Pat>>) -> bool,
syntactic_name_ref_highlighting: bool,
@@ -306,12 +296,8 @@ fn highlight_name_ref(
};
let mut h = match name_class {
NameRefClass::Definition(def, _) => {
if let Definition::Local(local) = &def
&& let Some(bindings_shadow_count) = bindings_shadow_count
{
let name = local.name(sema.db);
let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
*binding_hash = Some(calc_binding_hash(&name, *shadow_count))
if let Definition::Local(local) = &def {
*binding_hash = Some(local.as_id() as u64);
};
let mut h = highlight_def(sema, krate, def, edition, true);
@@ -432,7 +418,6 @@ fn highlight_name_ref(
fn highlight_name(
sema: &Semantics<'_, RootDatabase>,
bindings_shadow_count: Option<&mut FxHashMap<hir::Name, u32>>,
binding_hash: &mut Option<u64>,
is_unsafe_node: &impl Fn(AstPtr<Either<ast::Expr, ast::Pat>>) -> bool,
krate: Option<hir::Crate>,
@@ -440,13 +425,8 @@ fn highlight_name(
edition: Edition,
) -> Highlight {
let name_kind = NameClass::classify(sema, &name);
if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind
&& let Some(bindings_shadow_count) = bindings_shadow_count
{
let name = local.name(sema.db);
let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
*shadow_count += 1;
*binding_hash = Some(calc_binding_hash(&name, *shadow_count))
if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind {
*binding_hash = Some(local.as_id() as u64);
};
match name_kind {
Some(NameClass::Definition(def)) => {
@@ -474,10 +454,6 @@ fn highlight_name(
}
}
fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 {
hash_once::<ide_db::FxHasher>((name.as_str(), shadow_count))
}
pub(super) fn highlight_def(
sema: &Semantics<'_, RootDatabase>,
krate: Option<hir::Crate>,
@@ -42,14 +42,14 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="5697120079570210533" style="color: hsl(268,86%,80%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="4222724691718692706" style="color: hsl(156,71%,51%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="0" style="color: hsl(74,59%,48%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="1" style="color: hsl(152,51%,64%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="0" style="color: hsl(74,59%,48%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="2" style="color: hsl(272,82%,82%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="0" style="color: hsl(74,59%,48%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="17855021198829413584" style="color: hsl(230,76%,79%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="16380625810977895757" style="color: hsl(262,75%,75%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="17855021198829413584" style="color: hsl(230,76%,79%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="3" style="color: hsl(107,98%,81%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="4" style="color: hsl(241,93%,64%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="3" style="color: hsl(107,98%,81%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">fn</span> <span class="function declaration">bar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="0" style="color: hsl(74,59%,48%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
<span class="brace">}</span></code></pre>
@@ -17,7 +17,10 @@
use either::Either;
use hir::EditionedFileId;
use ide_db::{FilePosition, RootDatabase, base_db::RootQueryDb};
use ide_db::{
FilePosition, RootDatabase,
base_db::{RootQueryDb, SourceDatabase},
};
use span::Edition;
use std::iter;
@@ -70,11 +73,12 @@ pub(crate) fn on_char_typed(
if !TRIGGER_CHARS.contains(&char_typed) {
return None;
}
// FIXME: We need to figure out the edition of the file here, but that means hitting the
// database for more than just parsing the file which is bad.
let edition = db
.source_root_crates(db.file_source_root(position.file_id).source_root_id(db))
.first()
.map_or(Edition::CURRENT, |crates| crates.data(db).edition);
// FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
// causing the editor to feel sluggish!
let edition = Edition::CURRENT_FIXME;
// causing the editor to feel sluggish! We need to make this bail if it would block too long?
let editioned_file_id_wrapper = EditionedFileId::from_span_guess_origin(
db,
span::EditionedFileId::new(position.file_id, edition),
@@ -457,8 +461,8 @@ fn do_type_char(char_typed: char, before: &str) -> Option<String> {
let (offset, mut before) = extract_offset(before);
let edit = TextEdit::insert(offset, char_typed.to_string());
edit.apply(&mut before);
let parse = SourceFile::parse(&before, span::Edition::CURRENT_FIXME);
on_char_typed_(&parse, offset, char_typed, span::Edition::CURRENT_FIXME).map(|it| {
let parse = SourceFile::parse(&before, span::Edition::CURRENT);
on_char_typed_(&parse, offset, char_typed, span::Edition::CURRENT).map(|it| {
it.apply(&mut before);
before.to_string()
})
@@ -110,6 +110,7 @@ pub(super) fn prefill() -> DashMap<Symbol, (), BuildHasherDefault<FxHasher>> {
win64_dash_unwind = "win64-unwind",
x86_dash_interrupt = "x86-interrupt",
rust_dash_preserve_dash_none = "preserve-none",
_0_u8 = "0_u8",
@PLAIN:
__ra_fixup,
@@ -285,6 +286,7 @@ pub(super) fn prefill() -> DashMap<Symbol, (), BuildHasherDefault<FxHasher>> {
Into,
into_future,
into_iter,
into_try_type,
IntoFuture,
IntoIter,
IntoIterator,
@@ -976,11 +976,17 @@ fn break_expr(p: &mut Parser<'_>, r: Restrictions) -> CompletedMarker {
// test try_block_expr
// fn foo() {
// let _ = try {};
// let _ = try bikeshed T<U> {};
// }
fn try_block_expr(p: &mut Parser<'_>, m: Option<Marker>) -> CompletedMarker {
assert!(p.at(T![try]));
let m = m.unwrap_or_else(|| p.start());
let try_modifier = p.start();
p.bump(T![try]);
if p.eat_contextual_kw(T![bikeshed]) {
type_(p);
}
try_modifier.complete(p, TRY_BLOCK_MODIFIER);
if p.at(T!['{']) {
stmt_list(p);
} else {
@@ -114,6 +114,7 @@ pub enum SyntaxKind {
ATT_SYNTAX_KW,
AUTO_KW,
AWAIT_KW,
BIKESHED_KW,
BUILTIN_KW,
CLOBBER_ABI_KW,
DEFAULT_KW,
@@ -285,6 +286,7 @@ pub enum SyntaxKind {
STRUCT,
TOKEN_TREE,
TRAIT,
TRY_BLOCK_MODIFIER,
TRY_EXPR,
TUPLE_EXPR,
TUPLE_FIELD,
@@ -458,6 +460,7 @@ pub const fn text(self) -> &'static str {
| STRUCT
| TOKEN_TREE
| TRAIT
| TRY_BLOCK_MODIFIER
| TRY_EXPR
| TUPLE_EXPR
| TUPLE_FIELD
@@ -596,6 +599,7 @@ pub const fn text(self) -> &'static str {
ASM_KW => "asm",
ATT_SYNTAX_KW => "att_syntax",
AUTO_KW => "auto",
BIKESHED_KW => "bikeshed",
BUILTIN_KW => "builtin",
CLOBBER_ABI_KW => "clobber_abi",
DEFAULT_KW => "default",
@@ -698,6 +702,7 @@ pub fn is_contextual_keyword(self, edition: Edition) -> bool {
ASM_KW => true,
ATT_SYNTAX_KW => true,
AUTO_KW => true,
BIKESHED_KW => true,
BUILTIN_KW => true,
CLOBBER_ABI_KW => true,
DEFAULT_KW => true,
@@ -788,6 +793,7 @@ pub fn is_keyword(self, edition: Edition) -> bool {
ASM_KW => true,
ATT_SYNTAX_KW => true,
AUTO_KW => true,
BIKESHED_KW => true,
BUILTIN_KW => true,
CLOBBER_ABI_KW => true,
DEFAULT_KW => true,
@@ -941,6 +947,7 @@ pub fn from_contextual_keyword(ident: &str, edition: Edition) -> Option<SyntaxKi
"asm" => ASM_KW,
"att_syntax" => ATT_SYNTAX_KW,
"auto" => AUTO_KW,
"bikeshed" => BIKESHED_KW,
"builtin" => BUILTIN_KW,
"clobber_abi" => CLOBBER_ABI_KW,
"default" => DEFAULT_KW,
@@ -1112,6 +1119,7 @@ pub fn from_char(c: char) -> Option<SyntaxKind> {
[asm] => { $ crate :: SyntaxKind :: ASM_KW };
[att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW };
[auto] => { $ crate :: SyntaxKind :: AUTO_KW };
[bikeshed] => { $ crate :: SyntaxKind :: BIKESHED_KW };
[builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW };
[clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW };
[default] => { $ crate :: SyntaxKind :: DEFAULT_KW };
@@ -45,7 +45,8 @@ SOURCE_FILE
WHITESPACE " "
EXPR_STMT
BLOCK_EXPR
TRY_KW "try"
TRY_BLOCK_MODIFIER
TRY_KW "try"
WHITESPACE " "
LITERAL
INT_NUMBER "92"
@@ -21,7 +21,42 @@ SOURCE_FILE
EQ "="
WHITESPACE " "
BLOCK_EXPR
TRY_KW "try"
TRY_BLOCK_MODIFIER
TRY_KW "try"
WHITESPACE " "
STMT_LIST
L_CURLY "{"
R_CURLY "}"
SEMICOLON ";"
WHITESPACE "\n "
LET_STMT
LET_KW "let"
WHITESPACE " "
WILDCARD_PAT
UNDERSCORE "_"
WHITESPACE " "
EQ "="
WHITESPACE " "
BLOCK_EXPR
TRY_BLOCK_MODIFIER
TRY_KW "try"
WHITESPACE " "
BIKESHED_KW "bikeshed"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "T"
GENERIC_ARG_LIST
L_ANGLE "<"
TYPE_ARG
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "U"
R_ANGLE ">"
WHITESPACE " "
STMT_LIST
L_CURLY "{"
@@ -1,3 +1,4 @@
fn foo() {
let _ = try {};
let _ = try bikeshed T<U> {};
}
@@ -275,7 +275,10 @@ pub fn load_workspace(
}
tracing::debug!("Stitching sysroot library: {src_root}");
let mut stitched = stitched::Stitched { crates: Default::default() };
let mut stitched = stitched::Stitched {
crates: Default::default(),
edition: span::Edition::Edition2024,
};
for path in stitched::SYSROOT_CRATES.trim().lines() {
let name = path.split('/').next_back().unwrap();
@@ -511,6 +514,7 @@ pub(crate) mod stitched {
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Stitched {
pub(super) crates: Arena<RustLibSrcCrateData>,
pub(crate) edition: span::Edition,
}
impl ops::Index<RustLibSrcCrate> for Stitched {
@@ -1831,7 +1831,7 @@ fn sysroot_to_crate_graph(
let display_name = CrateDisplayName::from_canonical_name(&stitched[krate].name);
let crate_id = crate_graph.add_crate_root(
file_id,
Edition::CURRENT_FIXME,
stitched.edition,
Some(display_name),
None,
cfg_options.clone(),
@@ -22,7 +22,6 @@
pub(crate) use cargo_metadata::diagnostic::{
Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
};
use toolchain::DISPLAY_COMMAND_IGNORE_ENVS;
use toolchain::Tool;
use triomphe::Arc;
@@ -144,6 +143,7 @@ pub(crate) fn invocation_strategy(&self) -> InvocationStrategy {
}
impl fmt::Display for FlycheckConfig {
/// Show a shortened version of the check command.
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
FlycheckConfig::Automatic { cargo_options, .. } => {
@@ -153,12 +153,23 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Don't show `my_custom_check --foo $saved_file` literally to the user, as it
// looks like we've forgotten to substitute $saved_file.
//
// `my_custom_check --foo /home/user/project/src/dir/foo.rs` is too verbose.
//
// Instead, show `my_custom_check --foo ...`. The
// actual path is often too long to be worth showing
// in the IDE (e.g. in the VS Code status bar).
let display_args = args
.iter()
.map(|arg| if arg == SAVED_FILE_PLACEHOLDER_DOLLAR { "..." } else { arg })
.map(|arg| {
if (arg == SAVED_FILE_PLACEHOLDER_DOLLAR)
|| (arg == SAVED_FILE_INLINE)
|| arg.ends_with(".rs")
{
"..."
} else {
arg
}
})
.collect::<Vec<_>>();
write!(f, "{command} {}", display_args.join(" "))
@@ -403,24 +414,30 @@ struct FlycheckActor {
/// doesn't provide a way to read sub-process output without blocking, so we
/// have to wrap sub-processes output handling in a thread and pass messages
/// back over a channel.
command_handle: Option<CommandHandle<CargoCheckMessage>>,
command_handle: Option<CommandHandle<CheckMessage>>,
/// The receiver side of the channel mentioned above.
command_receiver: Option<Receiver<CargoCheckMessage>>,
command_receiver: Option<Receiver<CheckMessage>>,
diagnostics_cleared_for: FxHashSet<PackageSpecifier>,
diagnostics_received: DiagnosticsReceived,
}
#[derive(PartialEq)]
#[derive(PartialEq, Debug)]
enum DiagnosticsReceived {
Yes,
No,
YesAndClearedForAll,
/// We started a flycheck, but we haven't seen any diagnostics yet.
NotYet,
/// We received a non-zero number of diagnostics from rustc or clippy (via
/// cargo or custom check command). This means there were errors or
/// warnings.
AtLeastOne,
/// We received a non-zero number of diagnostics, and the scope is
/// workspace, so we've discarded the previous workspace diagnostics.
AtLeastOneAndClearedWorkspace,
}
#[allow(clippy::large_enum_variant)]
enum Event {
RequestStateChange(StateChange),
CheckEvent(Option<CargoCheckMessage>),
CheckEvent(Option<CheckMessage>),
}
/// This is stable behaviour. Don't change.
@@ -511,7 +528,7 @@ fn new(
command_handle: None,
command_receiver: None,
diagnostics_cleared_for: Default::default(),
diagnostics_received: DiagnosticsReceived::No,
diagnostics_received: DiagnosticsReceived::NotYet,
}
}
@@ -563,23 +580,13 @@ fn run(mut self, inbox: Receiver<StateChange>) {
};
let debug_command = format!("{command:?}");
let user_facing_command = match origin {
// Don't show all the --format=json-with-blah-blah args, just the simple
// version
FlycheckCommandOrigin::Cargo => self.config.to_string(),
// show them the full command but pretty printed. advanced user
FlycheckCommandOrigin::ProjectJsonRunnable
| FlycheckCommandOrigin::CheckOverrideCommand => display_command(
&command,
Some(std::path::Path::new(self.root.as_path())),
),
};
let user_facing_command = self.config.to_string();
tracing::debug!(?origin, ?command, "will restart flycheck");
let (sender, receiver) = unbounded();
match CommandHandle::spawn(
command,
CargoCheckParser,
CheckParser,
sender,
match &self.config {
FlycheckConfig::Automatic { cargo_options, .. } => {
@@ -640,7 +647,7 @@ fn run(mut self, inbox: Receiver<StateChange>) {
error
);
}
if self.diagnostics_received == DiagnosticsReceived::No {
if self.diagnostics_received == DiagnosticsReceived::NotYet {
tracing::trace!(flycheck_id = self.id, "clearing diagnostics");
// We finished without receiving any diagnostics.
// Clear everything for good measure
@@ -699,7 +706,7 @@ fn run(mut self, inbox: Receiver<StateChange>) {
self.report_progress(Progress::DidFinish(res));
}
Event::CheckEvent(Some(message)) => match message {
CargoCheckMessage::CompilerArtifact(msg) => {
CheckMessage::CompilerArtifact(msg) => {
tracing::trace!(
flycheck_id = self.id,
artifact = msg.target.name,
@@ -729,46 +736,75 @@ fn run(mut self, inbox: Receiver<StateChange>) {
});
}
}
CargoCheckMessage::Diagnostic { diagnostic, package_id } => {
CheckMessage::Diagnostic { diagnostic, package_id } => {
tracing::trace!(
flycheck_id = self.id,
message = diagnostic.message,
package_id = package_id.as_ref().map(|it| it.as_str()),
scope = ?self.scope,
"diagnostic received"
);
if self.diagnostics_received == DiagnosticsReceived::No {
self.diagnostics_received = DiagnosticsReceived::Yes;
}
if let Some(package_id) = &package_id {
if self.diagnostics_cleared_for.insert(package_id.clone()) {
tracing::trace!(
flycheck_id = self.id,
package_id = package_id.as_str(),
"clearing diagnostics"
);
self.send(FlycheckMessage::ClearDiagnostics {
match &self.scope {
FlycheckScope::Workspace => {
if self.diagnostics_received == DiagnosticsReceived::NotYet {
self.send(FlycheckMessage::ClearDiagnostics {
id: self.id,
kind: ClearDiagnosticsKind::All(ClearScope::Workspace),
});
self.diagnostics_received =
DiagnosticsReceived::AtLeastOneAndClearedWorkspace;
}
if let Some(package_id) = package_id {
tracing::warn!(
"Ignoring package label {:?} and applying diagnostics to the whole workspace",
package_id
);
}
self.send(FlycheckMessage::AddDiagnostic {
id: self.id,
kind: ClearDiagnosticsKind::All(ClearScope::Package(
package_id.clone(),
)),
generation: self.generation,
package_id: None,
workspace_root: self.root.clone(),
diagnostic,
});
}
FlycheckScope::Package { package: flycheck_package, .. } => {
if self.diagnostics_received == DiagnosticsReceived::NotYet {
self.diagnostics_received = DiagnosticsReceived::AtLeastOne;
}
// If the package has been set in the diagnostic JSON, respect that. Otherwise, use the
// package that the current flycheck is scoped to. This is useful when a project is
// directly using rustc for its checks (e.g. custom check commands in rust-project.json).
let package_id = package_id.unwrap_or(flycheck_package.clone());
if self.diagnostics_cleared_for.insert(package_id.clone()) {
tracing::trace!(
flycheck_id = self.id,
package_id = package_id.as_str(),
"clearing diagnostics"
);
self.send(FlycheckMessage::ClearDiagnostics {
id: self.id,
kind: ClearDiagnosticsKind::All(ClearScope::Package(
package_id.clone(),
)),
});
}
self.send(FlycheckMessage::AddDiagnostic {
id: self.id,
generation: self.generation,
package_id: Some(package_id),
workspace_root: self.root.clone(),
diagnostic,
});
}
} else if self.diagnostics_received
!= DiagnosticsReceived::YesAndClearedForAll
{
self.diagnostics_received = DiagnosticsReceived::YesAndClearedForAll;
self.send(FlycheckMessage::ClearDiagnostics {
id: self.id,
kind: ClearDiagnosticsKind::All(ClearScope::Workspace),
});
}
self.send(FlycheckMessage::AddDiagnostic {
id: self.id,
generation: self.generation,
package_id,
workspace_root: self.root.clone(),
diagnostic,
});
}
},
}
@@ -792,7 +828,7 @@ fn cancel_check_process(&mut self) {
fn clear_diagnostics_state(&mut self) {
self.diagnostics_cleared_for.clear();
self.diagnostics_received = DiagnosticsReceived::No;
self.diagnostics_received = DiagnosticsReceived::NotYet;
}
fn explicit_check_command(
@@ -942,15 +978,18 @@ fn send(&self, check_task: FlycheckMessage) {
}
#[allow(clippy::large_enum_variant)]
enum CargoCheckMessage {
enum CheckMessage {
/// A message from `cargo check`, including details like the path
/// to the relevant `Cargo.toml`.
CompilerArtifact(cargo_metadata::Artifact),
/// A diagnostic message from rustc itself.
Diagnostic { diagnostic: Diagnostic, package_id: Option<PackageSpecifier> },
}
struct CargoCheckParser;
struct CheckParser;
impl JsonLinesParser<CargoCheckMessage> for CargoCheckParser {
fn from_line(&self, line: &str, error: &mut String) -> Option<CargoCheckMessage> {
impl JsonLinesParser<CheckMessage> for CheckParser {
fn from_line(&self, line: &str, error: &mut String) -> Option<CheckMessage> {
let mut deserializer = serde_json::Deserializer::from_str(line);
deserializer.disable_recursion_limit();
if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
@@ -958,10 +997,10 @@ fn from_line(&self, line: &str, error: &mut String) -> Option<CargoCheckMessage>
// Skip certain kinds of messages to only spend time on what's useful
JsonMessage::Cargo(message) => match message {
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
Some(CargoCheckMessage::CompilerArtifact(artifact))
Some(CheckMessage::CompilerArtifact(artifact))
}
cargo_metadata::Message::CompilerMessage(msg) => {
Some(CargoCheckMessage::Diagnostic {
Some(CheckMessage::Diagnostic {
diagnostic: msg.message,
package_id: Some(PackageSpecifier::Cargo {
package_id: Arc::new(msg.package_id),
@@ -971,7 +1010,7 @@ fn from_line(&self, line: &str, error: &mut String) -> Option<CargoCheckMessage>
_ => None,
},
JsonMessage::Rustc(message) => {
Some(CargoCheckMessage::Diagnostic { diagnostic: message, package_id: None })
Some(CheckMessage::Diagnostic { diagnostic: message, package_id: None })
}
};
}
@@ -981,7 +1020,7 @@ fn from_line(&self, line: &str, error: &mut String) -> Option<CargoCheckMessage>
None
}
fn from_eof(&self) -> Option<CargoCheckMessage> {
fn from_eof(&self) -> Option<CheckMessage> {
None
}
}
@@ -993,64 +1032,14 @@ enum JsonMessage {
Rustc(Diagnostic),
}
/// Not good enough to execute in a shell, but good enough to show the user without all the noisy
/// quotes
///
/// Pass implicit_cwd if there is one regarded as the obvious by the user, so we can skip showing it.
/// Compactness is the aim of the game, the output typically gets truncated quite a lot.
fn display_command(c: &Command, implicit_cwd: Option<&std::path::Path>) -> String {
let mut o = String::new();
use std::fmt::Write;
let lossy = std::ffi::OsStr::to_string_lossy;
if let Some(dir) = c.get_current_dir() {
if Some(dir) == implicit_cwd.map(std::path::Path::new) {
// pass
} else if dir.to_string_lossy().contains(" ") {
write!(o, "cd {:?} && ", dir).unwrap();
} else {
write!(o, "cd {} && ", dir.display()).unwrap();
}
}
for (env, val) in c.get_envs() {
let (env, val) = (lossy(env), val.map(lossy).unwrap_or(std::borrow::Cow::Borrowed("")));
if DISPLAY_COMMAND_IGNORE_ENVS.contains(&env.as_ref()) {
continue;
}
if env.contains(" ") {
write!(o, "\"{}={}\" ", env, val).unwrap();
} else if val.contains(" ") {
write!(o, "{}=\"{}\" ", env, val).unwrap();
} else {
write!(o, "{}={} ", env, val).unwrap();
}
}
let prog = lossy(c.get_program());
if prog.contains(" ") {
write!(o, "{:?}", prog).unwrap();
} else {
write!(o, "{}", prog).unwrap();
}
for arg in c.get_args() {
let arg = lossy(arg);
if arg.contains(" ") {
write!(o, " \"{}\"", arg).unwrap();
} else {
write!(o, " {}", arg).unwrap();
}
}
o
}
#[cfg(test)]
mod tests {
use super::*;
use ide_db::FxHashMap;
use itertools::Itertools;
use paths::Utf8Path;
use project_model::project_json;
use crate::flycheck::Substitutions;
use crate::flycheck::display_command;
#[test]
fn test_substitutions() {
let label = ":label";
@@ -1139,34 +1128,47 @@ fn test_substitute(
}
#[test]
fn test_display_command() {
use std::path::Path;
let workdir = Path::new("workdir");
let mut cmd = toolchain::command("command", workdir, &FxHashMap::default());
assert_eq!(display_command(cmd.arg("--arg"), Some(workdir)), "command --arg");
assert_eq!(
display_command(cmd.arg("spaced arg"), Some(workdir)),
"command --arg \"spaced arg\""
);
assert_eq!(
display_command(cmd.env("ENVIRON", "yeah"), Some(workdir)),
"ENVIRON=yeah command --arg \"spaced arg\""
);
assert_eq!(
display_command(cmd.env("OTHER", "spaced env"), Some(workdir)),
"ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\""
);
assert_eq!(
display_command(cmd.current_dir("/tmp"), Some(workdir)),
"cd /tmp && ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\""
);
assert_eq!(
display_command(cmd.current_dir("/tmp and/thing"), Some(workdir)),
"cd \"/tmp and/thing\" && ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\""
);
assert_eq!(
display_command(cmd.current_dir("/tmp and/thing"), Some(Path::new("/tmp and/thing"))),
"ENVIRON=yeah OTHER=\"spaced env\" command --arg \"spaced arg\""
);
fn test_flycheck_config_display() {
let clippy = FlycheckConfig::Automatic {
cargo_options: CargoOptions {
subcommand: "clippy".to_owned(),
target_tuples: vec![],
all_targets: false,
set_test: false,
no_default_features: false,
all_features: false,
features: vec![],
extra_args: vec![],
extra_test_bin_args: vec![],
extra_env: FxHashMap::default(),
target_dir_config: TargetDirectoryConfig::default(),
},
ansi_color_output: true,
};
assert_eq!(clippy.to_string(), "cargo clippy");
let custom_dollar = FlycheckConfig::CustomCommand {
command: "check".to_owned(),
args: vec!["--input".to_owned(), "$saved_file".to_owned()],
extra_env: FxHashMap::default(),
invocation_strategy: InvocationStrategy::Once,
};
assert_eq!(custom_dollar.to_string(), "check --input ...");
let custom_inline = FlycheckConfig::CustomCommand {
command: "check".to_owned(),
args: vec!["--input".to_owned(), "{saved_file}".to_owned()],
extra_env: FxHashMap::default(),
invocation_strategy: InvocationStrategy::Once,
};
assert_eq!(custom_inline.to_string(), "check --input ...");
let custom_rs = FlycheckConfig::CustomCommand {
command: "check".to_owned(),
args: vec!["--input".to_owned(), "/path/to/file.rs".to_owned()],
extra_env: FxHashMap::default(),
invocation_strategy: InvocationStrategy::Once,
};
assert_eq!(custom_rs.to_string(), "check --input ...");
}
}
@@ -14,7 +14,7 @@
use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
use ide_db::{
MiniCore,
base_db::{Crate, ProcMacroPaths, SourceDatabase, salsa::Revision},
base_db::{Crate, ProcMacroPaths, SourceDatabase, salsa::CancellationToken, salsa::Revision},
};
use itertools::Itertools;
use load_cargo::SourceRootConfig;
@@ -88,6 +88,7 @@ pub(crate) struct GlobalState {
pub(crate) task_pool: Handle<TaskPool<Task>, Receiver<Task>>,
pub(crate) fmt_pool: Handle<TaskPool<Task>, Receiver<Task>>,
pub(crate) cancellation_pool: thread::Pool,
pub(crate) cancellation_tokens: FxHashMap<lsp_server::RequestId, CancellationToken>,
pub(crate) config: Arc<Config>,
pub(crate) config_errors: Option<ConfigErrors>,
@@ -265,6 +266,7 @@ pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> Global
task_pool,
fmt_pool,
cancellation_pool,
cancellation_tokens: Default::default(),
loader,
config: Arc::new(config.clone()),
analysis_host,
@@ -617,6 +619,7 @@ pub(crate) fn register_request(
}
pub(crate) fn respond(&mut self, response: lsp_server::Response) {
self.cancellation_tokens.remove(&response.id);
if let Some((method, start)) = self.req_queue.incoming.complete(&response.id) {
if let Some(err) = &response.error
&& err.message.starts_with("server panicked")
@@ -631,6 +634,9 @@ pub(crate) fn respond(&mut self, response: lsp_server::Response) {
}
pub(crate) fn cancel(&mut self, request_id: lsp_server::RequestId) {
if let Some(token) = self.cancellation_tokens.remove(&request_id) {
token.cancel();
}
if let Some(response) = self.req_queue.incoming.cancel(request_id) {
self.send(response.into());
}
@@ -253,6 +253,9 @@ fn on_with_thread_intent<const RUSTFMT: bool, const ALLOW_RETRYING: bool, R>(
tracing::debug!(?params);
let world = self.global_state.snapshot();
self.global_state
.cancellation_tokens
.insert(req.id.clone(), world.analysis.cancellation_token());
if RUSTFMT {
&mut self.global_state.fmt_pool.handle
} else {
@@ -265,7 +268,19 @@ fn on_with_thread_intent<const RUSTFMT: bool, const ALLOW_RETRYING: bool, R>(
});
match thread_result_to_response::<R>(req.id.clone(), result) {
Ok(response) => Task::Response(response),
Err(_cancelled) if ALLOW_RETRYING => Task::Retry(req),
Err(HandlerCancelledError::Inner(
Cancelled::PendingWrite | Cancelled::PropagatedPanic,
)) if ALLOW_RETRYING => Task::Retry(req),
// Note: Technically the return value here does not matter as we have already responded to the client with this error.
Err(HandlerCancelledError::Inner(Cancelled::Local)) => Task::Response(Response {
id: req.id,
result: None,
error: Some(ResponseError {
code: lsp_server::ErrorCode::RequestCanceled as i32,
message: "canceled by client".to_owned(),
data: None,
}),
}),
Err(_cancelled) => {
let error = on_cancelled();
Task::Response(Response { id: req.id, result: None, error: Some(error) })
@@ -81,25 +81,24 @@ impl zalsa_::HasJar for SyntaxContext {
#[derive(Hash)]
struct StructKey<'db, T0, T1, T2, T3>(T0, T1, T2, T3, std::marker::PhantomData<&'db ()>);
impl<'db, T0, T1, T2, T3> zalsa_::interned::HashEqLike<StructKey<'db, T0, T1, T2, T3>>
for SyntaxContextData
impl<'db, T0, T1, T2, T3> zalsa_::HashEqLike<StructKey<'db, T0, T1, T2, T3>> for SyntaxContextData
where
Option<MacroCallId>: zalsa_::interned::HashEqLike<T0>,
Transparency: zalsa_::interned::HashEqLike<T1>,
Edition: zalsa_::interned::HashEqLike<T2>,
SyntaxContext: zalsa_::interned::HashEqLike<T3>,
Option<MacroCallId>: zalsa_::HashEqLike<T0>,
Transparency: zalsa_::HashEqLike<T1>,
Edition: zalsa_::HashEqLike<T2>,
SyntaxContext: zalsa_::HashEqLike<T3>,
{
fn hash<H: std::hash::Hasher>(&self, h: &mut H) {
zalsa_::interned::HashEqLike::<T0>::hash(&self.outer_expn, &mut *h);
zalsa_::interned::HashEqLike::<T1>::hash(&self.outer_transparency, &mut *h);
zalsa_::interned::HashEqLike::<T2>::hash(&self.edition, &mut *h);
zalsa_::interned::HashEqLike::<T3>::hash(&self.parent, &mut *h);
zalsa_::HashEqLike::<T0>::hash(&self.outer_expn, &mut *h);
zalsa_::HashEqLike::<T1>::hash(&self.outer_transparency, &mut *h);
zalsa_::HashEqLike::<T2>::hash(&self.edition, &mut *h);
zalsa_::HashEqLike::<T3>::hash(&self.parent, &mut *h);
}
fn eq(&self, data: &StructKey<'db, T0, T1, T2, T3>) -> bool {
zalsa_::interned::HashEqLike::<T0>::eq(&self.outer_expn, &data.0)
&& zalsa_::interned::HashEqLike::<T1>::eq(&self.outer_transparency, &data.1)
&& zalsa_::interned::HashEqLike::<T2>::eq(&self.edition, &data.2)
&& zalsa_::interned::HashEqLike::<T3>::eq(&self.parent, &data.3)
zalsa_::HashEqLike::<T0>::eq(&self.outer_expn, &data.0)
&& zalsa_::HashEqLike::<T1>::eq(&self.outer_transparency, &data.1)
&& zalsa_::HashEqLike::<T2>::eq(&self.edition, &data.2)
&& zalsa_::HashEqLike::<T3>::eq(&self.parent, &data.3)
}
}
impl zalsa_struct_::Configuration for SyntaxContext {
@@ -203,10 +202,10 @@ unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
impl<'db> SyntaxContext {
pub fn new<
Db,
T0: zalsa_::interned::Lookup<Option<MacroCallId>> + std::hash::Hash,
T1: zalsa_::interned::Lookup<Transparency> + std::hash::Hash,
T2: zalsa_::interned::Lookup<Edition> + std::hash::Hash,
T3: zalsa_::interned::Lookup<SyntaxContext> + std::hash::Hash,
T0: zalsa_::Lookup<Option<MacroCallId>> + std::hash::Hash,
T1: zalsa_::Lookup<Transparency> + std::hash::Hash,
T2: zalsa_::Lookup<Edition> + std::hash::Hash,
T3: zalsa_::Lookup<SyntaxContext> + std::hash::Hash,
>(
db: &'db Db,
outer_expn: T0,
@@ -218,10 +217,10 @@ pub fn new<
) -> Self
where
Db: ?Sized + salsa::Database,
Option<MacroCallId>: zalsa_::interned::HashEqLike<T0>,
Transparency: zalsa_::interned::HashEqLike<T1>,
Edition: zalsa_::interned::HashEqLike<T2>,
SyntaxContext: zalsa_::interned::HashEqLike<T3>,
Option<MacroCallId>: zalsa_::HashEqLike<T0>,
Transparency: zalsa_::HashEqLike<T1>,
Edition: zalsa_::HashEqLike<T2>,
SyntaxContext: zalsa_::HashEqLike<T3>,
{
let (zalsa, zalsa_local) = db.zalsas();
@@ -236,10 +235,10 @@ pub fn new<
std::marker::PhantomData,
),
|id, data| SyntaxContextData {
outer_expn: zalsa_::interned::Lookup::into_owned(data.0),
outer_transparency: zalsa_::interned::Lookup::into_owned(data.1),
edition: zalsa_::interned::Lookup::into_owned(data.2),
parent: zalsa_::interned::Lookup::into_owned(data.3),
outer_expn: zalsa_::Lookup::into_owned(data.0),
outer_transparency: zalsa_::Lookup::into_owned(data.1),
edition: zalsa_::Lookup::into_owned(data.2),
parent: zalsa_::Lookup::into_owned(data.3),
opaque: opaque(zalsa_::FromId::from_id(id)),
opaque_and_semiopaque: opaque_and_semiopaque(zalsa_::FromId::from_id(id)),
},
@@ -472,8 +472,11 @@ RefExpr =
TryExpr =
Attr* Expr '?'
TryBlockModifier =
'try' ('bikeshed' Type)?
BlockExpr =
Attr* Label? ('try' | 'unsafe' | ('async' 'move'?) | ('gen' 'move'?) | 'const') StmtList
Attr* Label? (TryBlockModifier | 'unsafe' | ('async' 'move'?) | ('gen' 'move'?) | 'const') StmtList
PrefixExpr =
Attr* op:('-' | '!' | '*') Expr
@@ -375,7 +375,11 @@ pub fn kind(&self) -> LiteralKind {
pub enum BlockModifier {
Async(SyntaxToken),
Unsafe(SyntaxToken),
Try(SyntaxToken),
Try {
try_token: SyntaxToken,
bikeshed_token: Option<SyntaxToken>,
result_type: Option<ast::Type>,
},
Const(SyntaxToken),
AsyncGen(SyntaxToken),
Gen(SyntaxToken),
@@ -394,7 +398,13 @@ pub fn modifier(&self) -> Option<BlockModifier> {
})
.or_else(|| self.async_token().map(BlockModifier::Async))
.or_else(|| self.unsafe_token().map(BlockModifier::Unsafe))
.or_else(|| self.try_token().map(BlockModifier::Try))
.or_else(|| {
let modifier = self.try_block_modifier()?;
let try_token = modifier.try_token()?;
let bikeshed_token = modifier.bikeshed_token();
let result_type = modifier.ty();
Some(BlockModifier::Try { try_token, bikeshed_token, result_type })
})
.or_else(|| self.const_token().map(BlockModifier::Const))
.or_else(|| self.label().map(BlockModifier::Label))
}
@@ -323,6 +323,8 @@ pub fn label(&self) -> Option<Label> { support::child(&self.syntax) }
#[inline]
pub fn stmt_list(&self) -> Option<StmtList> { support::child(&self.syntax) }
#[inline]
pub fn try_block_modifier(&self) -> Option<TryBlockModifier> { support::child(&self.syntax) }
#[inline]
pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
#[inline]
pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
@@ -331,8 +333,6 @@ pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!
#[inline]
pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
#[inline]
pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
#[inline]
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
}
pub struct BoxPat {
@@ -1630,6 +1630,19 @@ pub fn trait_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax,
#[inline]
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
}
pub struct TryBlockModifier {
pub(crate) syntax: SyntaxNode,
}
impl TryBlockModifier {
#[inline]
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
#[inline]
pub fn bikeshed_token(&self) -> Option<SyntaxToken> {
support::token(&self.syntax, T![bikeshed])
}
#[inline]
pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
}
pub struct TryExpr {
pub(crate) syntax: SyntaxNode,
}
@@ -6320,6 +6333,38 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Trait").field("syntax", &self.syntax).finish()
}
}
impl AstNode for TryBlockModifier {
#[inline]
fn kind() -> SyntaxKind
where
Self: Sized,
{
TRY_BLOCK_MODIFIER
}
#[inline]
fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_BLOCK_MODIFIER }
#[inline]
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
}
#[inline]
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl hash::Hash for TryBlockModifier {
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
}
impl Eq for TryBlockModifier {}
impl PartialEq for TryBlockModifier {
fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
}
impl Clone for TryBlockModifier {
fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
}
impl fmt::Debug for TryBlockModifier {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("TryBlockModifier").field("syntax", &self.syntax).finish()
}
}
impl AstNode for TryExpr {
#[inline]
fn kind() -> SyntaxKind
@@ -9979,6 +10024,11 @@ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for TryBlockModifier {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for TryExpr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
@@ -9,16 +9,6 @@
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct SyntaxError(String, TextRange);
// FIXME: there was an unused SyntaxErrorKind previously (before this enum was removed)
// It was introduced in this PR: https://github.com/rust-lang/rust-analyzer/pull/846/files#diff-827da9b03b8f9faa1bade5cdd44d5dafR95
// but it was not removed by a mistake.
//
// So, we need to find a place where to stick validation for attributes in match clauses.
// Code before refactor:
// InvalidMatchInnerAttr => {
// write!(f, "Inner attributes are only allowed directly after the opening brace of the match expression")
// }
impl SyntaxError {
pub fn new(message: impl Into<String>, range: TextRange) -> Self {
Self(message.into(), range)
@@ -43,6 +43,7 @@
//! dispatch_from_dyn: unsize, pin
//! hash: sized
//! include:
//! include_bytes:
//! index: sized
//! infallible:
//! int_impl: size_of, transmute
@@ -953,6 +954,9 @@ pub trait FromResidual<R = <Self as Try>::Residual> {
#[lang = "from_residual"]
fn from_residual(residual: R) -> Self;
}
pub const trait Residual<O>: Sized {
type TryType: [const] Try<Output = O, Residual = Self>;
}
#[lang = "Try"]
pub trait Try: FromResidual<Self::Residual> {
type Output;
@@ -962,6 +966,12 @@ pub trait Try: FromResidual<Self::Residual> {
#[lang = "branch"]
fn branch(self) -> ControlFlow<Self::Residual, Self::Output>;
}
#[lang = "into_try_type"]
pub const fn residual_into_try_type<R: [const] Residual<O>, O>(
r: R,
) -> <R as Residual<O>>::TryType {
FromResidual::from_residual(r)
}
impl<B, C> Try for ControlFlow<B, C> {
type Output = C;
@@ -985,6 +995,10 @@ fn from_residual(residual: ControlFlow<B, Infallible>) -> Self {
}
}
}
impl<B, C> Residual<C> for ControlFlow<B, Infallible> {
type TryType = ControlFlow<B, C>;
}
// region:option
impl<T> Try for Option<T> {
type Output = T;
@@ -1008,6 +1022,10 @@ fn from_residual(x: Option<Infallible>) -> Self {
}
}
}
impl<T> const Residual<T> for Option<Infallible> {
type TryType = Option<T>;
}
// endregion:option
// region:result
// region:from
@@ -1037,10 +1055,14 @@ fn from_residual(residual: Result<Infallible, E>) -> Self {
}
}
}
impl<T, E> const Residual<T> for Result<Infallible, E> {
type TryType = Result<T, E>;
}
// endregion:from
// endregion:result
}
pub use self::try_::{ControlFlow, FromResidual, Try};
pub use self::try_::{ControlFlow, FromResidual, Residual, Try};
// endregion:try
// region:add
@@ -2040,6 +2062,14 @@ macro_rules! include {
}
// endregion:include
// region:include_bytes
#[rustc_builtin_macro]
#[macro_export]
macro_rules! include_bytes {
($file:expr $(,)?) => {{ /* compiler built-in */ }};
}
// endregion:include_bytes
// region:concat
#[rustc_builtin_macro]
#[macro_export]
@@ -74,9 +74,6 @@ pub fn name(self) -> &'static str {
// Prevent rustup from automatically installing toolchains, see https://github.com/rust-lang/rust-analyzer/issues/20719.
pub const NO_RUSTUP_AUTO_INSTALL_ENV: (&str, &str) = ("RUSTUP_AUTO_INSTALL", "0");
// These get ignored when displaying what command is running in LSP status messages.
pub const DISPLAY_COMMAND_IGNORE_ENVS: &[&str] = &[NO_RUSTUP_AUTO_INSTALL_ENV.0];
#[allow(clippy::disallowed_types)] /* generic parameter allows for FxHashMap */
pub fn command<H>(
cmd: impl AsRef<OsStr>,
+1 -1
View File
@@ -6,7 +6,7 @@ The rust analyzer manual uses [mdbook](https://rust-lang.github.io/mdBook/).
To run the documentation site locally:
```shell
```bash
cargo install mdbook
cargo xtask codegen
cd docs/book
@@ -4,7 +4,7 @@ rust-analyzer is an ordinary Rust project, which is organized as a Cargo workspa
So, just
```bash
$ cargo test
cargo test
```
should be enough to get you started!
@@ -203,14 +203,14 @@ It is enabled by `RA_COUNT=1`.
To measure time for from-scratch analysis, use something like this:
```bash
$ cargo run --release -p rust-analyzer -- analysis-stats ../chalk/
cargo run --release -p rust-analyzer -- analysis-stats ../chalk/
```
For measuring time of incremental analysis, use either of these:
```bash
$ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --highlight ../chalk/chalk-engine/src/logic.rs
$ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --complete ../chalk/chalk-engine/src/logic.rs:94:0
cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --highlight ../chalk/chalk-engine/src/logic.rs
cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --complete ../chalk/chalk-engine/src/logic.rs:94:0
```
Look for `fn benchmark_xxx` tests for a quick way to reproduce performance problems.
@@ -283,7 +283,8 @@ repository. We use the [rustc-josh-sync](https://github.com/rust-lang/josh-sync)
repositories. You can find documentation of the tool [here](https://github.com/rust-lang/josh-sync).
You can install the synchronization tool using the following commands:
```
```bash
cargo install --locked --git https://github.com/rust-lang/josh-sync
```
@@ -68,7 +68,7 @@ while d == 4 { // set a breakpoint here and change the value
However for this to work, you will need to enable debug_assertions in your build
```rust
```bash
RUSTFLAGS='--cfg debug_assertions' cargo build --release
```
@@ -13,7 +13,9 @@ editor](./other_editors.html).
rust-analyzer will attempt to install the standard library source code
automatically. You can also install it manually with `rustup`.
$ rustup component add rust-src
```bash
rustup component add rust-src
```
Only the latest stable standard library source is officially supported
for use with rust-analyzer. If you are using an older toolchain or have
@@ -11,9 +11,11 @@ your `$PATH`.
On Linux to install the `rust-analyzer` binary into `~/.local/bin`,
these commands should work:
$ mkdir -p ~/.local/bin
$ curl -L https://github.com/rust-lang/rust-analyzer/releases/latest/download/rust-analyzer-x86_64-unknown-linux-gnu.gz | gunzip -c - > ~/.local/bin/rust-analyzer
$ chmod +x ~/.local/bin/rust-analyzer
```bash
mkdir -p ~/.local/bin
curl -L https://github.com/rust-lang/rust-analyzer/releases/latest/download/rust-analyzer-x86_64-unknown-linux-gnu.gz | gunzip -c - > ~/.local/bin/rust-analyzer
chmod +x ~/.local/bin/rust-analyzer
```
Make sure that `~/.local/bin` is listed in the `$PATH` variable and use
the appropriate URL if youre not on a `x86-64` system.
@@ -24,8 +26,10 @@ or `/usr/local/bin` will work just as well.
Alternatively, you can install it from source using the command below.
Youll need the latest stable version of the Rust toolchain.
$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
$ cargo xtask install --server
```bash
git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
cargo xtask install --server
```
If your editor cant find the binary even though the binary is on your
`$PATH`, the likely explanation is that it doesnt see the same `$PATH`
@@ -38,7 +42,9 @@ the environment should help.
`rust-analyzer` is available in `rustup`:
$ rustup component add rust-analyzer
```bash
rustup component add rust-analyzer
```
### Arch Linux
@@ -53,7 +59,9 @@ User Repository):
Install it with pacman, for example:
$ pacman -S rust-analyzer
```bash
pacman -S rust-analyzer
```
### Gentoo Linux
@@ -64,7 +72,9 @@ Install it with pacman, for example:
The `rust-analyzer` binary can be installed via
[Homebrew](https://brew.sh/).
$ brew install rust-analyzer
```bash
brew install rust-analyzer
```
### Windows
@@ -37,13 +37,13 @@ bypassing LSP machinery.
When filing issues, it is useful (but not necessary) to try to minimize
examples. An ideal bug reproduction looks like this:
```shell
$ git clone https://github.com/username/repo.git && cd repo && git switch --detach commit-hash
$ rust-analyzer --version
```bash
git clone https://github.com/username/repo.git && cd repo && git switch --detach commit-hash
rust-analyzer --version
rust-analyzer dd12184e4 2021-05-08 dev
$ rust-analyzer analysis-stats .
💀 💀 💀
rust-analyzer analysis-stats .
```
💀 💀 💀
It is especially useful when the `repo` doesnt use external crates or
the standard library.
@@ -49,7 +49,9 @@ Alternatively, download a VSIX corresponding to your platform from the
Install the extension with the `Extensions: Install from VSIX` command
within VS Code, or from the command line via:
$ code --install-extension /path/to/rust-analyzer.vsix
```bash
code --install-extension /path/to/rust-analyzer.vsix
```
If you are running an unsupported platform, you can install
`rust-analyzer-no-server.vsix` and compile or obtain a server binary.
@@ -64,8 +66,10 @@ example:
Both the server and the Code plugin can be installed from source:
$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
$ cargo xtask install
```bash
git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
cargo xtask install
```
Youll need Cargo, nodejs (matching a supported version of VS Code) and
npm for this.
@@ -76,7 +80,9 @@ Remote, instead youll need to install the `.vsix` manually.
If youre not using Code, you can compile and install only the LSP
server:
$ cargo xtask install --server
```bash
cargo xtask install --server
```
Make sure that `.cargo/bin` is in `$PATH` and precedes paths where
`rust-analyzer` may also be installed. Specifically, `rustup` includes a
@@ -118,4 +124,3 @@ steps might help:
A C compiler should already be available via `org.freedesktop.Sdk`. Any
other tools or libraries you will need to acquire from Flatpak.
@@ -112,7 +112,7 @@ fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
// keywords that are keywords only in specific parse contexts
#[doc(alias = "WEAK_KEYWORDS")]
const CONTEXTUAL_KEYWORDS: &[&str] =
&["macro_rules", "union", "default", "raw", "dyn", "auto", "yeet", "safe"];
&["macro_rules", "union", "default", "raw", "dyn", "auto", "yeet", "safe", "bikeshed"];
// keywords we use for special macro expansions
const CONTEXTUAL_BUILTIN_KEYWORDS: &[&str] = &[
"asm",
-18
View File
@@ -1,18 +0,0 @@
//@ known-bug: #137190
trait Supertrait<T> {
fn method(&self) {}
}
trait Trait<P>: Supertrait<()> {}
impl<P> Trait<P> for () {}
const fn upcast<P>(x: &dyn Trait<P>) -> &dyn Supertrait<()> {
x
}
const fn foo() -> &'static dyn Supertrait<()> {
upcast::<()>(&())
}
const _: &'static dyn Supertrait<()> = foo();
-10
View File
@@ -1,10 +0,0 @@
//@ known-bug: #137190
trait Supertrait {
fn method(&self) {}
}
trait Trait: Supertrait {}
impl Trait for () {}
const _: &dyn Supertrait = &() as &dyn Trait as &dyn Supertrait;
-13
View File
@@ -1,13 +0,0 @@
//@ known-bug: #137916
//@ edition: 2021
use std::ptr::null;
async fn a() -> Box<dyn Send> {
Box::new(async {
let non_send = null::<()>();
&non_send;
async {}.await
})
}
fn main() {}
-18
View File
@@ -1,18 +0,0 @@
//@ known-bug: #138274
//@ edition: 2021
//@ compile-flags: --crate-type=lib
trait Trait {}
fn foo() -> Box<dyn Trait> {
todo!()
}
fn fetch() {
async {
let fut = async {
let _x = foo();
async {}.await;
};
let _: Box<dyn Send> = Box::new(fut);
};
}

Some files were not shown because too many files have changed in this diff Show More