Auto merge of #153943 - lnicola:sync-from-ra, r=lnicola

`rust-analyzer` subtree update

Subtree update of `rust-analyzer` to https://github.com/rust-lang/rust-analyzer/commit/90c8906e6443e7cee18cece9c2621a8b1c10794c.

Created using https://github.com/rust-lang/josh-sync.

r? @ghost
This commit is contained in:
bors
2026-03-16 10:36:21 +00:00
74 changed files with 1163 additions and 687 deletions
@@ -1,305 +1,46 @@
//! Defines [`EditionedFileId`], an interned wrapper around [`span::EditionedFileId`] that
//! is interned (so queries can take it) and remembers its crate.
//! is interned (so queries can take it) and stores only the underlying `span::EditionedFileId`.
use core::fmt;
use std::hash::{Hash, Hasher};
use std::hash::Hash;
use salsa::Database;
use span::Edition;
use vfs::FileId;
use crate::{Crate, RootQueryDb};
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct EditionedFileId(
salsa::Id,
std::marker::PhantomData<&'static salsa::plumbing::interned::Value<EditionedFileId>>,
);
const _: () = {
use salsa::plumbing as zalsa_;
use zalsa_::interned as zalsa_struct_;
type Configuration_ = EditionedFileId;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct EditionedFileIdData {
editioned_file_id: span::EditionedFileId,
krate: Crate,
}
// FIXME: This poses an invalidation problem, if one constructs an `EditionedFileId` with a
// different crate then whatever the input of a memo used, it will invalidate the memo causing
// it to recompute even if the crate is not really used.
/// We like to include the origin crate in an `EditionedFileId` (for use in the item tree),
/// but this poses us a problem.
///
/// Spans contain `EditionedFileId`s, and we don't want to make them store the crate too
/// because that will increase their size, which will increase memory usage significantly.
/// Furthermore, things using spans do not generally need the crate: they are using the
/// file id for queries like `ast_id_map` or `parse`, which do not care about the crate.
///
/// To solve this, we hash **only the `span::EditionedFileId`**, but on still compare
/// the crate in equality check. This preserves the invariant of `Hash` and `Eq` -
/// although same hashes can be used for different items, same file ids used for multiple
/// crates is a rare thing, and different items always have different hashes. Then,
/// when we only have a `span::EditionedFileId`, we use the `intern()` method to
/// reuse existing file ids, and create new one only if needed. See [`from_span_guess_origin`].
///
/// See this for more info: https://rust-lang.zulipchat.com/#narrow/channel/185405-t-compiler.2Frust-analyzer/topic/Letting.20EditionedFileId.20know.20its.20crate/near/530189401
///
/// [`from_span_guess_origin`]: EditionedFileId::from_span_guess_origin
#[derive(Hash, PartialEq, Eq)]
struct WithoutCrate {
editioned_file_id: span::EditionedFileId,
}
impl Hash for EditionedFileIdData {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
editioned_file_id.hash(state);
}
}
impl zalsa_struct_::HashEqLike<WithoutCrate> for EditionedFileIdData {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(self, state);
}
#[inline]
fn eq(&self, data: &WithoutCrate) -> bool {
let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
editioned_file_id == data.editioned_file_id
}
}
impl zalsa_::HasJar for EditionedFileId {
type Jar = zalsa_struct_::JarImpl<EditionedFileId>;
const KIND: zalsa_::JarKind = zalsa_::JarKind::Struct;
}
zalsa_::register_jar! {
zalsa_::ErasedJar::erase::<EditionedFileId>()
}
impl zalsa_struct_::Configuration for EditionedFileId {
const LOCATION: salsa::plumbing::Location =
salsa::plumbing::Location { file: file!(), line: line!() };
const DEBUG_NAME: &'static str = "EditionedFileId";
const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX;
const PERSIST: bool = false;
type Fields<'a> = EditionedFileIdData;
type Struct<'db> = EditionedFileId;
fn serialize<S>(_: &Self::Fields<'_>, _: S) -> Result<S::Ok, S::Error>
where
S: zalsa_::serde::Serializer,
{
unimplemented!("attempted to serialize value that set `PERSIST` to false")
}
fn deserialize<'de, D>(_: D) -> Result<Self::Fields<'static>, D::Error>
where
D: zalsa_::serde::Deserializer<'de>,
{
unimplemented!("attempted to deserialize value that cannot set `PERSIST` to false");
}
}
impl Configuration_ {
pub fn ingredient(zalsa: &zalsa_::Zalsa) -> &zalsa_struct_::IngredientImpl<Self> {
static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<EditionedFileId>> =
zalsa_::IngredientCache::new();
// SAFETY: `lookup_jar_by_type` returns a valid ingredient index, and the only
// ingredient created by our jar is the struct ingredient.
unsafe {
CACHE.get_or_create(zalsa, || {
zalsa.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>()
})
}
}
}
impl zalsa_::AsId for EditionedFileId {
fn as_id(&self) -> salsa::Id {
self.0.as_id()
}
}
impl zalsa_::FromId for EditionedFileId {
fn from_id(id: salsa::Id) -> Self {
Self(<salsa::Id>::from_id(id), std::marker::PhantomData)
}
}
unsafe impl Send for EditionedFileId {}
unsafe impl Sync for EditionedFileId {}
impl std::fmt::Debug for EditionedFileId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Self::default_debug_fmt(*self, f)
}
}
impl zalsa_::SalsaStructInDb for EditionedFileId {
type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
fn lookup_ingredient_index(aux: &zalsa_::Zalsa) -> salsa::plumbing::IngredientIndices {
aux.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>().into()
}
fn entries(zalsa: &zalsa_::Zalsa) -> impl Iterator<Item = zalsa_::DatabaseKeyIndex> + '_ {
let _ingredient_index =
zalsa.lookup_jar_by_type::<zalsa_struct_::JarImpl<EditionedFileId>>();
<EditionedFileId>::ingredient(zalsa).entries(zalsa).map(|entry| entry.key())
}
#[inline]
fn cast(id: salsa::Id, type_id: std::any::TypeId) -> Option<Self> {
if type_id == std::any::TypeId::of::<EditionedFileId>() {
Some(<Self as salsa::plumbing::FromId>::from_id(id))
} else {
None
}
}
#[inline]
unsafe fn memo_table(
zalsa: &zalsa_::Zalsa,
id: zalsa_::Id,
current_revision: zalsa_::Revision,
) -> zalsa_::MemoTableWithTypes<'_> {
// SAFETY: Guaranteed by caller.
unsafe {
zalsa.table().memos::<zalsa_struct_::Value<EditionedFileId>>(id, current_revision)
}
}
}
unsafe impl zalsa_::Update for EditionedFileId {
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
if unsafe { *old_pointer } != new_value {
unsafe { *old_pointer = new_value };
true
} else {
false
}
}
}
impl EditionedFileId {
pub fn from_span(
db: &(impl salsa::Database + ?Sized),
editioned_file_id: span::EditionedFileId,
krate: Crate,
) -> Self {
let (zalsa, zalsa_local) = db.zalsas();
Configuration_::ingredient(zalsa).intern(
zalsa,
zalsa_local,
EditionedFileIdData { editioned_file_id, krate },
|_, data| data,
)
}
/// Guesses the crate for the file.
///
/// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
///
/// 1. The file is not in the module tree.
/// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
/// (e.g. on enter feature, folding, etc.).
pub fn from_span_guess_origin(
db: &dyn RootQueryDb,
editioned_file_id: span::EditionedFileId,
) -> Self {
let (zalsa, zalsa_local) = db.zalsas();
Configuration_::ingredient(zalsa).intern(
zalsa,
zalsa_local,
WithoutCrate { editioned_file_id },
|_, _| {
// FileId not in the database.
let krate = db
.relevant_crates(editioned_file_id.file_id())
.first()
.copied()
.or_else(|| db.all_crates().first().copied())
.unwrap_or_else(|| {
// What we're doing here is a bit fishy. We rely on the fact that we only need
// the crate in the item tree, and we should not create an `EditionedFileId`
// without a crate except in cases where it does not matter. The chances that
// `all_crates()` will be empty are also very slim, but it can occur during startup.
// In the very unlikely case that there is a bug and we'll use this crate, Salsa
// will panic.
// SAFETY: 0 is less than `Id::MAX_U32`.
salsa::plumbing::FromId::from_id(unsafe { salsa::Id::from_index(0) })
});
EditionedFileIdData { editioned_file_id, krate }
},
)
}
pub fn editioned_file_id(self, db: &dyn salsa::Database) -> span::EditionedFileId {
let zalsa = db.zalsa();
let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
fields.editioned_file_id
}
pub fn krate(self, db: &dyn salsa::Database) -> Crate {
let zalsa = db.zalsa();
let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
fields.krate
}
/// Default debug formatting for this struct (may be useful if you define your own `Debug` impl)
pub fn default_debug_fmt(this: Self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
zalsa_::with_attached_database(|db| {
let zalsa = db.zalsa();
let fields = Configuration_::ingredient(zalsa).fields(zalsa, this);
fmt::Debug::fmt(fields, f)
})
.unwrap_or_else(|| {
f.debug_tuple("EditionedFileId").field(&zalsa_::AsId::as_id(&this)).finish()
})
}
}
};
#[salsa::interned(debug, constructor = from_span_file_id, no_lifetime)]
#[derive(PartialOrd, Ord)]
pub struct EditionedFileId {
field: span::EditionedFileId,
}
impl EditionedFileId {
#[inline]
pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition, krate: Crate) -> Self {
EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition), krate)
}
/// Attaches the current edition and guesses the crate for the file.
///
/// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
///
/// 1. The file is not in the module tree.
/// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
/// (e.g. on enter feature, folding, etc.).
#[inline]
pub fn current_edition_guess_origin(db: &dyn RootQueryDb, file_id: FileId) -> Self {
Self::from_span_guess_origin(db, span::EditionedFileId::current_edition(file_id))
pub fn new(db: &dyn Database, file_id: FileId, edition: Edition) -> Self {
Self::from_span_file_id(db, span::EditionedFileId::new(file_id, edition))
}
#[inline]
pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
let id = self.editioned_file_id(db);
id.file_id()
pub fn current_edition(db: &dyn Database, file_id: FileId) -> Self {
Self::from_span_file_id(db, span::EditionedFileId::current_edition(file_id))
}
#[inline]
pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
let id = self.editioned_file_id(db);
(id.file_id(), id.edition())
pub fn file_id(self, db: &dyn Database) -> vfs::FileId {
self.field(db).file_id()
}
#[inline]
pub fn edition(self, db: &dyn salsa::Database) -> Edition {
self.editioned_file_id(db).edition()
pub fn span_file_id(self, db: &dyn Database) -> span::EditionedFileId {
self.field(db)
}
#[inline]
pub fn unpack(self, db: &dyn Database) -> (vfs::FileId, span::Edition) {
self.field(db).unpack()
}
#[inline]
pub fn edition(self, db: &dyn Database) -> Edition {
self.field(db).edition()
}
}
@@ -870,7 +870,7 @@ pub fn shrink_to_fit(&mut self) {
impl Crate {
pub fn root_file_id(self, db: &dyn salsa::Database) -> EditionedFileId {
let data = self.data(db);
EditionedFileId::new(db, data.root_file_id, data.edition, self)
EditionedFileId::new(db, data.root_file_id, data.edition)
}
}
@@ -96,7 +96,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
/// Computes an [`ItemTree`] for the given file or macro expansion.
#[salsa::invoke(file_item_tree_query)]
#[salsa::transparent]
fn file_item_tree(&self, file_id: HirFileId) -> &ItemTree;
fn file_item_tree(&self, file_id: HirFileId, krate: Crate) -> &ItemTree;
/// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution.
#[salsa::invoke(macro_def)]
@@ -196,7 +196,7 @@ fn f() {
),
block: Some(
BlockId(
4401,
4801,
),
),
}"#]],
@@ -44,6 +44,7 @@
};
use ast::{AstNode, StructKind};
use base_db::Crate;
use cfg::CfgOptions;
use hir_expand::{
ExpandTo, HirFileId,
@@ -121,21 +122,23 @@ fn span_for(&self, range: TextRange) -> Span {
}
#[salsa_macros::tracked(returns(deref))]
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
pub(crate) fn file_item_tree_query(
db: &dyn DefDatabase,
file_id: HirFileId,
krate: Crate,
) -> Arc<ItemTree> {
let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
let ctx = lower::Ctx::new(db, file_id);
let ctx = lower::Ctx::new(db, file_id, krate);
let syntax = db.parse_or_expand(file_id);
let mut item_tree = match_ast! {
match syntax {
ast::SourceFile(file) => {
let krate = file_id.krate(db);
let root_file_id = krate.root_file_id(db);
let extra_top_attrs = (file_id == root_file_id).then(|| {
parse_extra_crate_attrs(db, krate).map(|crate_attrs| {
let file_id = root_file_id.editioned_file_id(db);
lower_extra_crate_attrs(db, crate_attrs, file_id, &|| ctx.cfg_options())
lower_extra_crate_attrs(db, crate_attrs, root_file_id.span_file_id(db), &|| ctx.cfg_options())
})
}).flatten();
let top_attrs = match extra_top_attrs {
@@ -190,14 +193,18 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
}
#[salsa_macros::tracked(returns(deref))]
pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
pub(crate) fn block_item_tree_query(
db: &dyn DefDatabase,
block: BlockId,
krate: Crate,
) -> Arc<ItemTree> {
let _p = tracing::info_span!("block_item_tree_query", ?block).entered();
static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
let loc = block.lookup(db);
let block = loc.ast_id.to_node(db);
let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
let ctx = lower::Ctx::new(db, loc.ast_id.file_id, krate);
let mut item_tree = ctx.lower_block(&block);
let ItemTree { top_level, top_attrs, attrs, vis, big_data, small_data } = &item_tree;
if small_data.is_empty()
@@ -356,10 +363,10 @@ pub(crate) fn new(file: HirFileId, block: Option<BlockId>) -> Self {
Self { file, block }
}
pub(crate) fn item_tree<'db>(&self, db: &'db dyn DefDatabase) -> &'db ItemTree {
pub(crate) fn item_tree<'db>(&self, db: &'db dyn DefDatabase, krate: Crate) -> &'db ItemTree {
match self.block {
Some(block) => block_item_tree_query(db, block),
None => file_item_tree_query(db, self.file),
Some(block) => block_item_tree_query(db, block, krate),
None => file_item_tree_query(db, self.file, krate),
}
}
@@ -2,7 +2,7 @@
use std::cell::OnceCell;
use base_db::FxIndexSet;
use base_db::{Crate, FxIndexSet};
use cfg::CfgOptions;
use hir_expand::{
HirFileId,
@@ -36,12 +36,13 @@ pub(super) struct Ctx<'a> {
span_map: OnceCell<SpanMap>,
file: HirFileId,
cfg_options: OnceCell<&'a CfgOptions>,
krate: Crate,
top_level: Vec<ModItemId>,
visibilities: FxIndexSet<RawVisibility>,
}
impl<'a> Ctx<'a> {
pub(super) fn new(db: &'a dyn DefDatabase, file: HirFileId) -> Self {
pub(super) fn new(db: &'a dyn DefDatabase, file: HirFileId, krate: Crate) -> Self {
Self {
db,
tree: ItemTree::default(),
@@ -51,12 +52,13 @@ pub(super) fn new(db: &'a dyn DefDatabase, file: HirFileId) -> Self {
span_map: OnceCell::new(),
visibilities: FxIndexSet::default(),
top_level: Vec::new(),
krate,
}
}
#[inline]
pub(super) fn cfg_options(&self) -> &'a CfgOptions {
self.cfg_options.get_or_init(|| self.file.krate(self.db).cfg_options(self.db))
self.cfg_options.get_or_init(|| self.krate.cfg_options(self.db))
}
pub(super) fn span_map(&self) -> SpanMapRef<'_> {
@@ -6,7 +6,7 @@
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let item_tree = db.file_item_tree(file_id.into());
let item_tree = db.file_item_tree(file_id.into(), db.test_crate());
let pretty = item_tree.pretty_print(&db, Edition::CURRENT);
expect.assert_eq(&pretty);
}
@@ -568,6 +568,12 @@ fn cfg_select() {
_ => { fn true_2() {} }
}
const _: ((),) = cfg_select! { _ => ((), ) };
const _: i32 = cfg_select! { true => 2 + 3, _ => 3 + 4 };
const _: i32 = cfg_select! { false => 2 + 3, _ => 3 + 4 };
const _: bool = cfg_select! { _ => 2 < 3 };
const _: bool = cfg_select! { true => foo::<(), fn() -> Foo<i32, i64>>(1,), _ => false };
cfg_select! {
false => { fn false_3() {} }
}
@@ -589,6 +595,12 @@ fn true_1() {}
fn true_2() {}
const _: ((),) = ((), );
const _: i32 = 2+3;
const _: i32 = 3+4;
const _: bool = 2<3;
const _: bool = foo::<(), fn() -> Foo<i32, i64>>(1, );
/* error: none of the predicates in this `cfg_select` evaluated to true */
/* error: expected `=>` after cfg expression */
@@ -35,9 +35,9 @@ struct $ident {
};
}
struct#0:MacroRules[BE8F, 0]@58..64#17408# MyTraitMap2#0:MacroCall[BE8F, 0]@31..42#ROOT2024# {#0:MacroRules[BE8F, 0]@72..73#17408#
map#0:MacroRules[BE8F, 0]@86..89#17408#:#0:MacroRules[BE8F, 0]@89..90#17408# #0:MacroRules[BE8F, 0]@89..90#17408#::#0:MacroRules[BE8F, 0]@91..93#17408#std#0:MacroRules[BE8F, 0]@93..96#17408#::#0:MacroRules[BE8F, 0]@96..98#17408#collections#0:MacroRules[BE8F, 0]@98..109#17408#::#0:MacroRules[BE8F, 0]@109..111#17408#HashSet#0:MacroRules[BE8F, 0]@111..118#17408#<#0:MacroRules[BE8F, 0]@118..119#17408#(#0:MacroRules[BE8F, 0]@119..120#17408#)#0:MacroRules[BE8F, 0]@120..121#17408#>#0:MacroRules[BE8F, 0]@121..122#17408#,#0:MacroRules[BE8F, 0]@122..123#17408#
}#0:MacroRules[BE8F, 0]@132..133#17408#
struct#0:MacroRules[BE8F, 0]@58..64#18432# MyTraitMap2#0:MacroCall[BE8F, 0]@31..42#ROOT2024# {#0:MacroRules[BE8F, 0]@72..73#18432#
map#0:MacroRules[BE8F, 0]@86..89#18432#:#0:MacroRules[BE8F, 0]@89..90#18432# #0:MacroRules[BE8F, 0]@89..90#18432#::#0:MacroRules[BE8F, 0]@91..93#18432#std#0:MacroRules[BE8F, 0]@93..96#18432#::#0:MacroRules[BE8F, 0]@96..98#18432#collections#0:MacroRules[BE8F, 0]@98..109#18432#::#0:MacroRules[BE8F, 0]@109..111#18432#HashSet#0:MacroRules[BE8F, 0]@111..118#18432#<#0:MacroRules[BE8F, 0]@118..119#18432#(#0:MacroRules[BE8F, 0]@119..120#18432#)#0:MacroRules[BE8F, 0]@120..121#18432#>#0:MacroRules[BE8F, 0]@121..122#18432#,#0:MacroRules[BE8F, 0]@122..123#18432#
}#0:MacroRules[BE8F, 0]@132..133#18432#
"#]],
);
}
@@ -197,7 +197,7 @@ macro_rules! mk_struct {
#[macro_use]
mod foo;
struct#1:MacroRules[DB0C, 0]@59..65#17408# Foo#0:MacroCall[DB0C, 0]@32..35#ROOT2024#(#1:MacroRules[DB0C, 0]@70..71#17408#u32#0:MacroCall[DB0C, 0]@41..44#ROOT2024#)#1:MacroRules[DB0C, 0]@74..75#17408#;#1:MacroRules[DB0C, 0]@75..76#17408#
struct#1:MacroRules[DB0C, 0]@59..65#18432# Foo#0:MacroCall[DB0C, 0]@32..35#ROOT2024#(#1:MacroRules[DB0C, 0]@70..71#18432#u32#0:MacroCall[DB0C, 0]@41..44#ROOT2024#)#1:MacroRules[DB0C, 0]@74..75#18432#;#1:MacroRules[DB0C, 0]@75..76#18432#
"#]],
);
}
@@ -423,10 +423,10 @@ macro_rules! m {
macro_rules! m {
($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
}
impl#\17408# Bar#\17408# {#\17408#
fn#\17408# foo#\ROOT2024#(#\17408#)#\17408# {#\17408#}#\17408#
fn#\17408# bar#\ROOT2024#(#\17408#)#\17408# {#\17408#}#\17408#
}#\17408#
impl#\18432# Bar#\18432# {#\18432#
fn#\18432# foo#\ROOT2024#(#\18432#)#\18432# {#\18432#}#\18432#
fn#\18432# bar#\ROOT2024#(#\18432#)#\18432# {#\18432#}#\18432#
}#\18432#
"#]],
);
}
@@ -458,7 +458,7 @@ fn $func_name() { todo!() }
"#;
let (db, file_id) = TestDB::with_single_file(fixture);
let krate = file_id.krate(&db);
let krate = db.test_crate();
let def_map = crate_def_map(&db, krate);
let source = def_map[def_map.root].definition_source(&db);
let source_file = match source.value {
@@ -279,7 +279,7 @@ fn seed_with_top_level(&mut self) {
let _p = tracing::info_span!("seed_with_top_level").entered();
let file_id = self.def_map.krate.root_file_id(self.db);
let item_tree = self.db.file_item_tree(file_id.into());
let item_tree = self.db.file_item_tree(file_id.into(), self.def_map.krate);
let attrs = match item_tree.top_level_attrs() {
AttrsOrCfg::Enabled { attrs } => attrs.as_ref(),
AttrsOrCfg::CfgDisabled(it) => it.1.as_ref(),
@@ -387,7 +387,7 @@ fn seed_with_top_level(&mut self) {
}
fn seed_with_inner(&mut self, tree_id: TreeId) {
let item_tree = tree_id.item_tree(self.db);
let item_tree = tree_id.item_tree(self.db, self.def_map.krate);
let is_cfg_enabled = matches!(item_tree.top_level_attrs(), AttrsOrCfg::Enabled { .. });
if is_cfg_enabled {
self.inject_prelude();
@@ -1708,7 +1708,7 @@ fn collect_macro_expansion(
}
let file_id = macro_call_id.into();
let item_tree = self.db.file_item_tree(file_id);
let item_tree = self.db.file_item_tree(file_id, self.def_map.krate);
// Derive helpers that are in scope for an item are also in scope for attribute macro expansions
// of that item (but not derive or fn like macros).
@@ -2335,10 +2335,10 @@ fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: Attrs<'_>
self.file_id(),
&module.name,
path_attr.as_deref(),
self.def_collector.def_map.krate,
) {
Ok((file_id, is_mod_rs, mod_dir)) => {
let item_tree = db.file_item_tree(file_id.into());
let item_tree =
db.file_item_tree(file_id.into(), self.def_collector.def_map.krate);
match item_tree.top_level_attrs() {
AttrsOrCfg::CfgDisabled(cfg) => {
self.emit_unconfigured_diagnostic(
@@ -2828,8 +2828,8 @@ fn crate_attrs() {
let fixture = r#"
//- /lib.rs crate:foo crate-attr:recursion_limit="4" crate-attr:no_core crate-attr:no_std crate-attr:feature(register_tool)
"#;
let (db, file_id) = TestDB::with_single_file(fixture);
let def_map = crate_def_map(&db, file_id.krate(&db));
let (db, _) = TestDB::with_single_file(fixture);
let def_map = crate_def_map(&db, db.test_crate());
assert_eq!(def_map.recursion_limit(), 4);
assert!(def_map.is_no_core());
assert!(def_map.is_no_std());
@@ -1,6 +1,6 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::{AnchoredPath, Crate};
use base_db::AnchoredPath;
use hir_expand::{EditionedFileId, name::Name};
use crate::{HirFileId, db::DefDatabase};
@@ -62,7 +62,6 @@ pub(super) fn resolve_declaration(
file_id: HirFileId,
name: &Name,
attr_path: Option<&str>,
krate: Crate,
) -> Result<(EditionedFileId, bool, ModDir), Box<[String]>> {
let name = name.as_str();
@@ -92,7 +91,7 @@ pub(super) fn resolve_declaration(
if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
return Ok((
// FIXME: Edition, is this rightr?
EditionedFileId::new(db, file_id, orig_file_id.edition(db), krate),
EditionedFileId::new(db, file_id, orig_file_id.edition(db)),
is_mod_rs,
mod_dir,
));
@@ -604,7 +604,7 @@ impl Tr for () {}
execute_assert_events(
&db,
|| {
db.file_item_tree(pos.file_id.into());
db.file_item_tree(pos.file_id.into(), db.test_crate());
},
&[("file_item_tree_query", 1), ("parse", 1)],
expect![[r#"
@@ -624,7 +624,7 @@ impl Tr for () {}
execute_assert_events(
&db,
|| {
db.file_item_tree(pos.file_id.into());
db.file_item_tree(pos.file_id.into(), db.test_crate());
},
&[("file_item_tree_query", 1), ("parse", 1)],
expect![[r#"
@@ -381,16 +381,40 @@ fn cfg_select_expand(
);
}
}
let expand_to_if_active = match iter.next() {
Some(tt::TtElement::Subtree(_, tt)) => tt.remaining(),
_ => {
let expand_to_if_active = match iter.peek() {
Some(tt::TtElement::Subtree(sub, tt)) if sub.delimiter.kind == DelimiterKind::Brace => {
iter.next();
tt.remaining()
}
None | Some(TtElement::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) => {
let err_span = iter.peek().map(|it| it.first_span()).unwrap_or(span);
iter.next();
return ExpandResult::new(
tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
ExpandError::other(err_span, "expected a token tree after `=>`"),
);
}
Some(_) => {
let expr = expect_fragment(
db,
&mut iter,
parser::PrefixEntryPoint::Expr,
tt.top_subtree().delimiter.delim_span(),
);
if let Some(err) = expr.err {
return ExpandResult::new(
tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
err.into(),
);
}
expr.value
}
};
if let Some(TtElement::Leaf(tt::Leaf::Punct(p))) = iter.peek()
&& p.char == ','
{
iter.next();
}
if expand_to.is_none() && active {
expand_to = Some(expand_to_if_active);
@@ -750,7 +774,7 @@ fn relative_file(
if res == call_site && !allow_recursion {
Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
} else {
Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition, lookup.krate))
Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition))
}
}
@@ -162,7 +162,7 @@ fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) ->
}
fn resolve_span(db: &dyn ExpandDatabase, Span { range, anchor, ctx: _ }: Span) -> FileRange {
let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
let file_id = EditionedFileId::from_span_file_id(db, anchor.file_id);
let anchor_offset =
db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
FileRange { file_id, range: range + anchor_offset }
@@ -208,7 +208,6 @@ pub(crate) fn fixup_syntax(
]);
}
},
// FIXME: foo::
ast::MatchExpr(it) => {
if it.expr().is_none() {
let match_token = match it.match_token() {
@@ -386,7 +386,7 @@ pub(crate) fn call_style(&self) -> MacroCallStyle {
impl HirFileId {
pub fn edition(self, db: &dyn ExpandDatabase) -> Edition {
match self {
HirFileId::FileId(file_id) => file_id.editioned_file_id(db).edition(),
HirFileId::FileId(file_id) => file_id.edition(db),
HirFileId::MacroFile(m) => db.lookup_intern_macro_call(m).def.edition,
}
}
@@ -1118,14 +1118,6 @@ pub fn file_id(self) -> Option<EditionedFileId> {
HirFileId::MacroFile(_) => None,
}
}
#[inline]
pub fn krate(self, db: &dyn ExpandDatabase) -> Crate {
match self {
HirFileId::FileId(it) => it.krate(db),
HirFileId::MacroFile(it) => it.loc(db).krate,
}
}
}
impl PartialEq<EditionedFileId> for HirFileId {
@@ -135,7 +135,7 @@ pub(crate) fn real_span_map(
});
Arc::new(RealSpanMap::from_file(
editioned_file_id.editioned_file_id(db),
editioned_file_id.span_file_id(db),
pairs.into_boxed_slice(),
tree.syntax().text_range().end(),
))
@@ -28,7 +28,7 @@
traits::StoredParamEnvAndCrate,
};
use super::mir::{interpret_mir, lower_to_mir, pad16};
use super::mir::{interpret_mir, lower_body_to_mir, pad16};
pub fn unknown_const<'db>(_ty: Ty<'db>) -> Const<'db> {
Const::new(DbInterner::conjure(), rustc_type_ir::ConstKind::Error(ErrorGuaranteed))
@@ -333,7 +333,7 @@ fn has_closure(body: &Body, expr: ExprId) -> bool {
return c;
}
}
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr)
if let Ok(mir_body) = lower_body_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr)
&& let Ok((Ok(result), _)) = interpret_mir(ctx.db, Arc::new(mir_body), true, None)
{
return result;
@@ -17,7 +17,7 @@
use hir_def::{
AdtId, ConstId, EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, attrs::AttrFlags,
ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId, attrs::AttrFlags,
db::DefDatabase, hir::Pat, item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
};
use hir_expand::{
@@ -77,6 +77,7 @@ pub enum IdentType {
Structure,
Trait,
TypeAlias,
Union,
Variable,
Variant,
}
@@ -94,6 +95,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
IdentType::Structure => "Structure",
IdentType::Trait => "Trait",
IdentType::TypeAlias => "Type alias",
IdentType::Union => "Union",
IdentType::Variable => "Variable",
IdentType::Variant => "Variant",
};
@@ -146,9 +148,7 @@ fn validate_adt(&mut self, adt: AdtId) {
match adt {
AdtId::StructId(struct_id) => self.validate_struct(struct_id),
AdtId::EnumId(enum_id) => self.validate_enum(enum_id),
AdtId::UnionId(_) => {
// FIXME: Unions aren't yet supported by this validator.
}
AdtId::UnionId(union_id) => self.validate_union(union_id),
}
}
@@ -383,6 +383,94 @@ fn validate_struct_fields(&mut self, struct_id: StructId) {
}
}
fn validate_union(&mut self, union_id: UnionId) {
// Check the union name.
let data = self.db.union_signature(union_id);
// rustc implementation excuses repr(C) since C unions predominantly don't
// use camel case.
let has_repr_c = AttrFlags::repr(self.db, union_id.into()).is_some_and(|repr| repr.c());
if !has_repr_c {
self.create_incorrect_case_diagnostic_for_item_name(
union_id,
&data.name,
CaseType::UpperCamelCase,
IdentType::Union,
);
}
// Check the field names.
self.validate_union_fields(union_id);
}
/// Check incorrect names for union fields.
fn validate_union_fields(&mut self, union_id: UnionId) {
let data = union_id.fields(self.db);
let edition = self.edition(union_id);
let mut union_fields_replacements = data
.fields()
.iter()
.filter_map(|(_, field)| {
to_lower_snake_case(&field.name.display_no_db(edition).to_smolstr()).map(
|new_name| Replacement {
current_name: field.name.clone(),
suggested_text: new_name,
expected_case: CaseType::LowerSnakeCase,
},
)
})
.peekable();
// XXX: Only look at sources if we do have incorrect names.
if union_fields_replacements.peek().is_none() {
return;
}
let union_loc = union_id.lookup(self.db);
let union_src = union_loc.source(self.db);
let Some(union_fields_list) = union_src.value.record_field_list() else {
always!(
union_fields_replacements.peek().is_none(),
"Replacements ({:?}) were generated for a union fields \
which had no fields list: {:?}",
union_fields_replacements.collect::<Vec<_>>(),
union_src
);
return;
};
let mut union_fields_iter = union_fields_list.fields();
for field_replacement in union_fields_replacements {
// We assume that parameters in replacement are in the same order as in the
// actual params list, but just some of them (ones that named correctly) are skipped.
let field = loop {
if let Some(field) = union_fields_iter.next() {
let Some(field_name) = field.name() else {
continue;
};
if field_name.as_name() == field_replacement.current_name {
break field;
}
} else {
never!(
"Replacement ({:?}) was generated for a union field \
which was not found: {:?}",
field_replacement,
union_src
);
return;
}
};
self.create_incorrect_case_diagnostic_for_ast_node(
field_replacement,
union_src.file_id,
&field,
IdentType::Field,
);
}
}
fn validate_enum(&mut self, enum_id: EnumId) {
// Check the enum name.
let data = self.db.enum_signature(enum_id);
@@ -1805,18 +1805,27 @@ fn resolve_type_param_assoc_type_shorthand(
}
AssocTypeShorthandResolution::Cycle => return AssocTypeShorthandResolution::Cycle,
};
let (assoc_type, args) = assoc_type_and_args
.get_with(|(assoc_type, args)| (*assoc_type, args.as_ref()))
.skip_binder();
let args = EarlyBinder::bind(args).instantiate(interner, bounded_trait_ref.args);
let current_result = StoredEarlyBinder::bind((assoc_type, args.store()));
if let Some(this_trait_resolution) = this_trait_resolution {
return AssocTypeShorthandResolution::Ambiguous {
sub_trait_resolution: Some(this_trait_resolution),
};
} else if supertraits_resolution.is_some() {
return AssocTypeShorthandResolution::Ambiguous { sub_trait_resolution: None };
} else if let Some(prev_resolution) = &supertraits_resolution {
if let AssocTypeShorthandResolution::Ambiguous {
sub_trait_resolution: Some(prev_resolution),
}
| AssocTypeShorthandResolution::Resolved(prev_resolution) = prev_resolution
&& *prev_resolution == current_result
{
continue;
} else {
return AssocTypeShorthandResolution::Ambiguous { sub_trait_resolution: None };
}
} else {
let (assoc_type, args) = assoc_type_and_args
.get_with(|(assoc_type, args)| (*assoc_type, args.as_ref()))
.skip_binder();
let args = EarlyBinder::bind(args).instantiate(interner, bounded_trait_ref.args);
let current_result = StoredEarlyBinder::bind((assoc_type, args.store()));
supertraits_resolution = Some(match lookup_on_bounded_trait {
AssocTypeShorthandResolution::Resolved(_) => {
AssocTypeShorthandResolution::Resolved(current_result)
@@ -183,7 +183,7 @@ pub(crate) fn lower_partly_resolved_path(
let trait_ref = self.lower_trait_ref_from_resolved_path(
trait_,
Ty::new_error(self.ctx.interner, ErrorGuaranteed),
false,
infer_args,
);
tracing::debug!(?trait_ref);
self.skip_resolved_segment();
@@ -201,7 +201,7 @@ pub(crate) fn lower_partly_resolved_path(
// this point (`trait_ref.substitution`).
let substitution = self.substs_from_path_segment(
associated_ty.into(),
false,
infer_args,
None,
true,
);
@@ -40,7 +40,10 @@
pub use eval::{
Evaluator, MirEvalError, VTableMap, interpret_mir, pad16, render_const_using_debug_impl,
};
pub use lower::{MirLowerError, lower_to_mir, mir_body_for_closure_query, mir_body_query};
pub use lower::{
MirLowerError, lower_body_to_mir, lower_to_mir_with_store, mir_body_for_closure_query,
mir_body_query,
};
pub use monomorphization::{
monomorphized_mir_body_for_closure_query, monomorphized_mir_body_query,
};
@@ -82,7 +82,7 @@ struct MirLowerCtx<'a, 'db> {
labeled_loop_blocks: FxHashMap<LabelId, LoopBlocks>,
discr_temp: Option<Place>,
db: &'db dyn HirDatabase,
body: &'a Body,
store: &'a ExpressionStore,
infer: &'a InferenceResult,
types: &'db crate::next_solver::DefaultAny<'db>,
resolver: Resolver<'db>,
@@ -285,7 +285,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn new(
db: &'db dyn HirDatabase,
owner: DefWithBodyId,
body: &'a Body,
store: &'a ExpressionStore,
infer: &'a InferenceResult,
) -> Self {
let mut basic_blocks = Arena::new();
@@ -316,7 +316,7 @@ fn new(
result: mir,
db,
infer,
body,
store,
types: crate::next_solver::default_types(db),
owner,
resolver,
@@ -354,7 +354,7 @@ fn lower_expr_to_some_operand(
current: BasicBlockId,
) -> Result<'db, Option<(Operand, BasicBlockId)>> {
if !self.has_adjustments(expr_id)
&& let Expr::Literal(l) = &self.body[expr_id]
&& let Expr::Literal(l) = &self.store[expr_id]
{
let ty = self.expr_ty_without_adjust(expr_id);
return Ok(Some((self.lower_literal_to_operand(ty, l)?, current)));
@@ -461,7 +461,7 @@ fn lower_expr_to_place_without_adjust(
place: Place,
mut current: BasicBlockId,
) -> Result<'db, Option<BasicBlockId>> {
match &self.body[expr_id] {
match &self.store[expr_id] {
Expr::OffsetOf(_) => {
not_supported!("builtin#offset_of")
}
@@ -500,7 +500,7 @@ fn lower_expr_to_place_without_adjust(
} else {
let resolver_guard =
self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
let hygiene = self.body.expr_path_hygiene(expr_id);
let hygiene = self.store.expr_path_hygiene(expr_id);
let result = self
.resolver
.resolve_path_in_value_ns_fully(self.db, p, hygiene)
@@ -509,7 +509,7 @@ fn lower_expr_to_place_without_adjust(
self.db,
p,
DisplayTarget::from_crate(self.db, self.krate()),
self.body,
self.store,
)
})?;
self.resolver.reset_to_guard(resolver_guard);
@@ -882,7 +882,7 @@ fn lower_expr_to_place_without_adjust(
let variant_id =
self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {
Some(p) => MirLowerError::UnresolvedName(
hir_display_with_store(&**p, self.body)
hir_display_with_store(&**p, self.store)
.display(self.db, self.display_target())
.to_string(),
),
@@ -1382,7 +1382,7 @@ fn lower_expr_to_place_without_adjust(
}
fn push_field_projection(&mut self, place: &mut Place, expr_id: ExprId) -> Result<'db, ()> {
if let Expr::Field { expr, name } = &self.body[expr_id] {
if let Expr::Field { expr, name } = &self.store[expr_id] {
if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind() {
let index =
name.as_tuple_index().ok_or(MirLowerError::TypeError("named field on tuple"))?
@@ -1411,7 +1411,7 @@ fn lower_literal_or_const_to_operand(
ty: Ty<'db>,
loc: &ExprId,
) -> Result<'db, Operand> {
match &self.body[*loc] {
match &self.store[*loc] {
Expr::Literal(l) => self.lower_literal_to_operand(ty, l),
Expr::Path(c) => {
let owner = self.owner;
@@ -1421,7 +1421,7 @@ fn lower_literal_or_const_to_operand(
self.db,
c,
DisplayTarget::from_crate(db, owner.krate(db)),
self.body,
self.store,
)
};
let pr = self
@@ -1859,7 +1859,7 @@ fn lower_block_to_place(
}
} else {
let mut err = None;
self.body.walk_bindings_in_pat(*pat, |b| {
self.store.walk_bindings_in_pat(*pat, |b| {
if let Err(e) = self.push_storage_live(b, current) {
err = Some(e);
}
@@ -1913,9 +1913,9 @@ fn lower_params_and_bindings(
self.result.param_locals.extend(params.clone().map(|(it, ty)| {
let local_id = self.result.locals.alloc(Local { ty: ty.store() });
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
if let Pat::Bind { id, subpat: None } = self.body[it]
if let Pat::Bind { id, subpat: None } = self.store[it]
&& matches!(
self.body[id].mode,
self.store[id].mode,
BindingAnnotation::Unannotated | BindingAnnotation::Mutable
)
{
@@ -1924,7 +1924,7 @@ fn lower_params_and_bindings(
local_id
}));
// and then rest of bindings
for (id, _) in self.body.bindings() {
for (id, _) in self.store.bindings() {
if !pick_binding(id) {
continue;
}
@@ -1953,7 +1953,7 @@ fn lower_params_and_bindings(
.into_iter()
.skip(base_param_count + self_binding.is_some() as usize);
for ((param, _), local) in params.zip(local_params) {
if let Pat::Bind { id, .. } = self.body[param]
if let Pat::Bind { id, .. } = self.store[param]
&& local == self.binding_local(id)?
{
continue;
@@ -2115,7 +2115,7 @@ pub fn mir_body_for_closure_query<'db>(
implementation_error!("closure expression is not closure");
};
let (captures, kind) = infer.closure_info(closure);
let mut ctx = MirLowerCtx::new(db, owner, &body, infer);
let mut ctx = MirLowerCtx::new(db, owner, &body.store, infer);
// 0 is return local
ctx.result.locals.alloc(Local { ty: infer.expr_ty(*root).store() });
let closure_local = ctx.result.locals.alloc(Local {
@@ -2205,7 +2205,7 @@ pub fn mir_body_for_closure_query<'db>(
.result
.binding_locals
.into_iter()
.filter(|it| ctx.body.binding_owner(it.0) == Some(expr))
.filter(|it| ctx.store.binding_owner(it.0) == Some(expr))
.collect();
if let Some(err) = err {
return Err(MirLowerError::UnresolvedUpvar(err));
@@ -2245,7 +2245,7 @@ pub fn mir_body_query<'db>(
let _p = tracing::info_span!("mir_body_query", ?detail).entered();
let body = db.body(def);
let infer = InferenceResult::for_body(db, def);
let mut result = lower_to_mir(db, def, &body, infer, body.body_expr)?;
let mut result = lower_body_to_mir(db, def, &body, infer, body.body_expr)?;
result.shrink_to_fit();
Ok(Arc::new(result))
}
@@ -2258,44 +2258,74 @@ pub(crate) fn mir_body_cycle_result<'db>(
Err(MirLowerError::Loop)
}
pub fn lower_to_mir<'db>(
/// Extracts params from `body.params`/`body.self_param` and the callable signature,
/// then delegates to [`lower_to_mir_with_store`].
pub fn lower_body_to_mir<'db>(
db: &'db dyn HirDatabase,
owner: DefWithBodyId,
body: &Body,
infer: &InferenceResult,
// FIXME: root_expr should always be the body.body_expr, but since `X` in `[(); X]` doesn't have its own specific body yet, we
// need to take this input explicitly.
// FIXME: root_expr should always be the body.body_expr,
// but this is currently also used for `X` in `[(); X]` which live in the same expression store
root_expr: ExprId,
) -> Result<'db, MirBody> {
let is_root = root_expr == body.body_expr;
// Extract params and self_param only when lowering the body's root expression for a function.
if is_root && let DefWithBodyId::FunctionId(fid) = owner {
let callable_sig =
db.callable_item_signature(fid.into()).instantiate_identity().skip_binder();
let mut param_tys = callable_sig.inputs().iter().copied();
let self_param = body.self_param.and_then(|id| Some((id, param_tys.next()?)));
lower_to_mir_with_store(
db,
owner,
&body.store,
infer,
root_expr,
body.params.iter().copied().zip(param_tys),
self_param,
is_root,
)
} else {
lower_to_mir_with_store(
db,
owner,
&body.store,
infer,
root_expr,
iter::empty(),
None,
is_root,
)
}
}
/// # Parameters
/// - `is_root`: `true` when `root_expr` is the body's top-level expression (picks
/// bindings with no owner); `false` when lowering an inline const or anonymous
/// const (picks bindings owned by `root_expr`).
pub fn lower_to_mir_with_store<'db>(
db: &'db dyn HirDatabase,
owner: DefWithBodyId,
store: &ExpressionStore,
infer: &InferenceResult,
root_expr: ExprId,
params: impl Iterator<Item = (PatId, Ty<'db>)> + Clone,
self_param: Option<(BindingId, Ty<'db>)>,
is_root: bool,
) -> Result<'db, MirBody> {
if infer.type_mismatches().next().is_some() || infer.is_erroneous() {
return Err(MirLowerError::HasErrors);
}
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
let mut ctx = MirLowerCtx::new(db, owner, store, infer);
// 0 is return local
ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr).store() });
let binding_picker = |b: BindingId| {
let owner = ctx.body.binding_owner(b);
if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) }
};
// 1 to param_len is for params
// FIXME: replace with let chain once it becomes stable
let current = 'b: {
if body.body_expr == root_expr {
// otherwise it's an inline const, and has no parameter
if let DefWithBodyId::FunctionId(fid) = owner {
let callable_sig =
db.callable_item_signature(fid.into()).instantiate_identity().skip_binder();
let mut params = callable_sig.inputs().iter().copied();
let self_param = body.self_param.and_then(|id| Some((id, params.next()?)));
break 'b ctx.lower_params_and_bindings(
body.params.iter().zip(params).map(|(it, y)| (*it, y)),
self_param,
binding_picker,
)?;
}
}
ctx.lower_params_and_bindings([].into_iter(), None, binding_picker)?
let owner = ctx.store.binding_owner(b);
if is_root { owner.is_none() } else { owner == Some(root_expr) }
};
let current = ctx.lower_params_and_bindings(params, self_param, binding_picker)?;
if let Some(current) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? {
let current = ctx.pop_drop_scope_assert_finished(current, root_expr.into())?;
ctx.set_terminator(current, TerminatorKind::Return, root_expr.into());
@@ -137,11 +137,11 @@ pub(super) fn lower_expr_as_place_without_adjust(
}
this.lower_expr_to_some_place_without_adjust(expr_id, current)
};
match &self.body[expr_id] {
match &self.store[expr_id] {
Expr::Path(p) => {
let resolver_guard =
self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
let hygiene = self.body.expr_path_hygiene(expr_id);
let hygiene = self.store.expr_path_hygiene(expr_id);
let resolved = self.resolver.resolve_path_in_value_ns_fully(self.db, p, hygiene);
self.resolver.reset_to_guard(resolver_guard);
let Some(pr) = resolved else {
@@ -131,7 +131,7 @@ fn pattern_match_inner(
.collect::<Vec<_>>()
.into(),
);
Ok(match &self.body[pattern] {
Ok(match &self.store[pattern] {
Pat::Missing => return Err(MirLowerError::IncompletePattern),
Pat::Wild => (current, current_else),
Pat::Tuple { args, ellipsis } => {
@@ -322,7 +322,7 @@ fn pattern_match_inner(
}
if let &Some(slice) = slice
&& mode != MatchingMode::Check
&& let Pat::Bind { id, subpat: _ } = self.body[slice]
&& let Pat::Bind { id, subpat: _ } = self.store[slice]
{
let next_place = cond_place.project(
ProjectionElem::Subslice {
@@ -363,9 +363,14 @@ fn pattern_match_inner(
)?,
None => {
let unresolved_name = || {
MirLowerError::unresolved_path(self.db, p, self.display_target(), self.body)
MirLowerError::unresolved_path(
self.db,
p,
self.display_target(),
self.store,
)
};
let hygiene = self.body.pat_path_hygiene(pattern);
let hygiene = self.store.pat_path_hygiene(pattern);
let pr = self
.resolver
.resolve_path_in_value_ns(self.db, p, hygiene)
@@ -432,7 +437,7 @@ fn pattern_match_inner(
(next, Some(else_target))
}
},
Pat::Lit(l) => match &self.body[*l] {
Pat::Lit(l) => match &self.store[*l] {
Expr::Literal(l) => {
if mode == MatchingMode::Check {
let c = self.lower_literal_to_operand(self.infer.pat_ty(pattern), l)?;
@@ -31,6 +31,7 @@ fn foo() -> i32 {
&[("InferenceResult::for_body_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -118,6 +119,7 @@ fn baz() -> i32 {
&[("InferenceResult::for_body_", 3)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -237,6 +239,7 @@ fn bar() -> f32 {
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -311,6 +314,7 @@ fn bar() -> f32 {
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -386,6 +390,7 @@ fn bar() -> f32 {
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -462,6 +467,7 @@ pub struct SomeStruct {
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -562,6 +568,7 @@ fn main() {
&[("trait_solve_shim", 0)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
"crate_local_def_map",
"file_item_tree_query",
"ast_id_map_shim",
@@ -9,16 +9,16 @@
fn cfg_impl_def() {
check_types(
r#"
//- /main.rs crate:main deps:foo cfg:test
//- /main.rs crate:main deps:foo cfg:some_cfg
use foo::S as T;
struct S;
#[cfg(test)]
#[cfg(some_cfg)]
impl S {
fn foo1(&self) -> i32 { 0 }
}
#[cfg(not(test))]
#[cfg(not(some_cfg))]
impl S {
fn foo2(&self) -> i32 { 0 }
}
@@ -31,12 +31,12 @@ fn test() {
//- /foo.rs crate:foo
pub struct S;
#[cfg(not(test))]
#[cfg(not(some_cfg))]
impl S {
pub fn foo3(&self) -> i32 { 0 }
}
#[cfg(test)]
#[cfg(some_cfg)]
impl S {
pub fn foo4(&self) -> i32 { 0 }
}
@@ -2815,3 +2815,28 @@ fn contains_0<S: Collection<Item = i32>>(points: &S) {
"#,
);
}
#[test]
fn regression_21773() {
check_no_mismatches(
r#"
trait Neg {
type Output;
}
trait Abs: Neg {
fn abs(&self) -> Self::Output;
}
trait SelfAbs: Abs + Neg
where
Self::Output: Neg<Output = Self::Output> + Abs,
{
}
fn wrapped_abs<T: SelfAbs<Output = T>>(v: T) -> T {
v.abs()
}
"#,
);
}
@@ -1091,7 +1091,7 @@ fn macro_call_diagnostics<'db>(
let file_id = loc.kind.file_id();
let mut range = precise_macro_call_location(&loc.kind, db, loc.krate);
let RenderedExpandError { message, error, kind } = err.render_to_string(db);
if Some(err.span().anchor.file_id) == file_id.file_id().map(|it| it.editioned_file_id(db)) {
if Some(err.span().anchor.file_id) == file_id.file_id().map(|it| it.span_file_id(db)) {
range.value = err.span().range
+ db.ast_id_map(file_id).get_erased(err.span().anchor.ast_id).text_range().start();
}
@@ -472,12 +472,12 @@ pub fn first_crate(&self, file: FileId) -> Option<Crate> {
pub fn attach_first_edition_opt(&self, file: FileId) -> Option<EditionedFileId> {
let krate = self.file_to_module_defs(file).next()?.krate(self.db);
Some(EditionedFileId::new(self.db, file, krate.edition(self.db), krate.id))
Some(EditionedFileId::new(self.db, file, krate.edition(self.db)))
}
pub fn attach_first_edition(&self, file: FileId) -> EditionedFileId {
self.attach_first_edition_opt(file)
.unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(self.db, file))
.unwrap_or_else(|| EditionedFileId::current_edition(self.db, file))
}
pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
@@ -93,7 +93,6 @@ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: Hi
impl ChildBySource for ItemScope {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let krate = file_id.krate(db);
self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
self.impls().for_each(|imp| insert_item_loc(db, res, file_id, imp, keys::IMPL));
self.extern_blocks().for_each(|extern_block| {
@@ -123,6 +122,8 @@ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: Hi
|(ast_id, calls)| {
let adt = ast_id.to_node(db);
calls.for_each(|(attr_id, call_id, calls)| {
// FIXME: Is this the right crate?
let krate = call_id.lookup(db).krate;
// FIXME: Fix cfg_attr handling.
let (attr, _, _, _) = attr_id.find_attr_range_with_source(db, krate, &adt);
res[keys::DERIVE_MACRO_CALL]
@@ -2,7 +2,7 @@
use ide_db::defs::{Definition, NameRefClass};
use syntax::{
AstNode,
ast::{self, HasArgList, HasGenericArgs, make, syntax_factory::SyntaxFactory},
ast::{self, HasArgList, HasGenericArgs, syntax_factory::SyntaxFactory},
syntax_editor::Position,
};
@@ -94,20 +94,21 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
ident.text_range(),
|builder| {
let mut editor = builder.make_editor(let_stmt.syntax());
let make = SyntaxFactory::without_mappings();
if let_stmt.semicolon_token().is_none() {
editor.insert(
Position::last_child_of(let_stmt.syntax()),
make::tokens::semicolon(),
make.token(syntax::SyntaxKind::SEMICOLON),
);
}
let placeholder_ty = make::ty_placeholder().clone_for_update();
let placeholder_ty = make.ty_placeholder();
if let Some(pat) = let_stmt.pat() {
let elements = vec![
make::token(syntax::SyntaxKind::COLON).into(),
make::token(syntax::SyntaxKind::WHITESPACE).into(),
make.token(syntax::SyntaxKind::COLON).into(),
make.whitespace(" ").into(),
placeholder_ty.syntax().clone().into(),
];
editor.insert_all(Position::after(pat.syntax()), elements);
@@ -188,7 +189,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
/// This will create a turbofish generic arg list corresponding to the number of arguments
fn get_fish_head(make: &SyntaxFactory, number_of_arguments: usize) -> ast::GenericArgList {
let args = (0..number_of_arguments).map(|_| make::type_arg(make::ty_placeholder()).into());
let args = (0..number_of_arguments).map(|_| make.type_arg(make.ty_placeholder()).into());
make.generic_arg_list(args, true)
}
@@ -102,11 +102,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
ast::Expr::BlockExpr(block) => unwrap_trivial_block(block),
e => e,
};
let cond = if invert_cond {
invert_boolean_expression(&make, cond)
} else {
cond.clone_for_update()
};
let cond = if invert_cond { invert_boolean_expression(&make, cond) } else { cond };
let parenthesize = matches!(
cond,
@@ -12,9 +12,10 @@
};
use itertools::Itertools;
use syntax::ast::edit::AstNodeEdit;
use syntax::ast::syntax_factory::SyntaxFactory;
use syntax::{
AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T,
ast::{self, HasName, edit::IndentLevel, make},
ast::{self, HasName, edit::IndentLevel},
};
use crate::{
@@ -62,19 +63,28 @@ pub(crate) fn convert_bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -
"Convert boolean to enum",
target,
|edit| {
let make = SyntaxFactory::without_mappings();
if let Some(ty) = &ty_annotation {
cov_mark::hit!(replaces_ty_annotation);
edit.replace(ty.syntax().text_range(), "Bool");
}
if let Some(initializer) = initializer {
replace_bool_expr(edit, initializer);
replace_bool_expr(edit, initializer, &make);
}
let usages = definition.usages(&ctx.sema).all();
add_enum_def(edit, ctx, &usages, target_node, &target_module);
add_enum_def(edit, ctx, &usages, target_node, &target_module, &make);
let mut delayed_mutations = Vec::new();
replace_usages(edit, ctx, usages, definition, &target_module, &mut delayed_mutations);
replace_usages(
edit,
ctx,
usages,
definition,
&target_module,
&mut delayed_mutations,
&make,
);
for (scope, path) in delayed_mutations {
insert_use(&scope, path, &ctx.config.insert_use);
}
@@ -168,16 +178,16 @@ fn find_bool_node(ctx: &AssistContext<'_>) -> Option<BoolNodeData> {
}
}
fn replace_bool_expr(edit: &mut SourceChangeBuilder, expr: ast::Expr) {
fn replace_bool_expr(edit: &mut SourceChangeBuilder, expr: ast::Expr, make: &SyntaxFactory) {
let expr_range = expr.syntax().text_range();
let enum_expr = bool_expr_to_enum_expr(expr);
let enum_expr = bool_expr_to_enum_expr(expr, make);
edit.replace(expr_range, enum_expr.syntax().text())
}
/// Converts an expression of type `bool` to one of the new enum type.
fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr {
let true_expr = make::expr_path(make::path_from_text("Bool::True"));
let false_expr = make::expr_path(make::path_from_text("Bool::False"));
fn bool_expr_to_enum_expr(expr: ast::Expr, make: &SyntaxFactory) -> ast::Expr {
let true_expr = make.expr_path(make.path_from_text("Bool::True"));
let false_expr = make.expr_path(make.path_from_text("Bool::False"));
if let ast::Expr::Literal(literal) = &expr {
match literal.kind() {
@@ -186,10 +196,10 @@ fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr {
_ => expr,
}
} else {
make::expr_if(
make.expr_if(
expr,
make::tail_only_block_expr(true_expr),
Some(ast::ElseBranch::Block(make::tail_only_block_expr(false_expr))),
make.tail_only_block_expr(true_expr),
Some(ast::ElseBranch::Block(make.tail_only_block_expr(false_expr))),
)
.into()
}
@@ -203,11 +213,13 @@ fn replace_usages(
target_definition: Definition,
target_module: &hir::Module,
delayed_mutations: &mut Vec<(ImportScope, ast::Path)>,
make: &SyntaxFactory,
) {
for (file_id, references) in usages {
edit.edit_file(file_id.file_id(ctx.db()));
let refs_with_imports = augment_references_with_imports(ctx, references, target_module);
let refs_with_imports =
augment_references_with_imports(ctx, references, target_module, make);
refs_with_imports.into_iter().rev().for_each(
|FileReferenceWithImport { range, name, import_data }| {
@@ -224,12 +236,13 @@ fn replace_usages(
target_definition,
target_module,
delayed_mutations,
make,
)
}
} else if let Some(initializer) = find_assignment_usage(&name) {
cov_mark::hit!(replaces_assignment);
replace_bool_expr(edit, initializer);
replace_bool_expr(edit, initializer, make);
} else if let Some((prefix_expr, inner_expr)) = find_negated_usage(&name) {
cov_mark::hit!(replaces_negation);
@@ -247,7 +260,7 @@ fn replace_usages(
{
cov_mark::hit!(replaces_record_expr);
let enum_expr = bool_expr_to_enum_expr(initializer);
let enum_expr = bool_expr_to_enum_expr(initializer, make);
utils::replace_record_field_expr(ctx, edit, record_field, enum_expr);
} else if let Some(pat) = find_record_pat_field_usage(&name) {
match pat {
@@ -263,6 +276,7 @@ fn replace_usages(
target_definition,
target_module,
delayed_mutations,
make,
)
}
}
@@ -272,14 +286,14 @@ fn replace_usages(
if let Some(expr) = literal_pat.literal().and_then(|literal| {
literal.syntax().ancestors().find_map(ast::Expr::cast)
}) {
replace_bool_expr(edit, expr);
replace_bool_expr(edit, expr, make);
}
}
_ => (),
}
} else if let Some((ty_annotation, initializer)) = find_assoc_const_usage(&name) {
edit.replace(ty_annotation.syntax().text_range(), "Bool");
replace_bool_expr(edit, initializer);
replace_bool_expr(edit, initializer, make);
} else if let Some(receiver) = find_method_call_expr_usage(&name) {
edit.replace(
receiver.syntax().text_range(),
@@ -296,10 +310,10 @@ fn replace_usages(
ctx,
edit,
record_field,
make::expr_bin_op(
make.expr_bin_op(
expr,
ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated: false }),
make::expr_path(make::path_from_text("Bool::True")),
make.expr_path(make.path_from_text("Bool::True")),
),
);
} else {
@@ -327,6 +341,7 @@ fn augment_references_with_imports(
ctx: &AssistContext<'_>,
references: Vec<FileReference>,
target_module: &hir::Module,
make: &SyntaxFactory,
) -> Vec<FileReferenceWithImport> {
let mut visited_modules = FxHashSet::default();
@@ -357,9 +372,9 @@ fn augment_references_with_imports(
cfg,
)
.map(|mod_path| {
make::path_concat(
make.path_concat(
mod_path_to_ast(&mod_path, edition),
make::path_from_text("Bool"),
make.path_from_text("Bool"),
)
})?;
@@ -458,6 +473,7 @@ fn add_enum_def(
usages: &UsageSearchResult,
target_node: SyntaxNode,
target_module: &hir::Module,
make: &SyntaxFactory,
) -> Option<()> {
let insert_before = node_to_insert_before(target_node);
@@ -482,7 +498,7 @@ fn add_enum_def(
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);
let indent = IndentLevel::from_node(&insert_before);
let enum_def = make_bool_enum(make_enum_pub).reset_indent().indent(indent);
let enum_def = make_bool_enum(make_enum_pub, make).reset_indent().indent(indent);
edit.insert(
insert_before.text_range().start(),
@@ -504,31 +520,30 @@ fn node_to_insert_before(target_node: SyntaxNode) -> SyntaxNode {
.unwrap_or(target_node)
}
fn make_bool_enum(make_pub: bool) -> ast::Enum {
let derive_eq = make::attr_outer(make::meta_token_tree(
make::ext::ident_path("derive"),
make::token_tree(
fn make_bool_enum(make_pub: bool, make: &SyntaxFactory) -> ast::Enum {
let derive_eq = make.attr_outer(make.meta_token_tree(
make.ident_path("derive"),
make.token_tree(
T!['('],
vec![
NodeOrToken::Token(make::tokens::ident("PartialEq")),
NodeOrToken::Token(make::token(T![,])),
NodeOrToken::Token(make::tokens::single_space()),
NodeOrToken::Token(make::tokens::ident("Eq")),
NodeOrToken::Token(make.ident("PartialEq")),
NodeOrToken::Token(make.token(T![,])),
NodeOrToken::Token(make.whitespace(" ")),
NodeOrToken::Token(make.ident("Eq")),
],
),
));
make::enum_(
make.enum_(
[derive_eq],
if make_pub { Some(make::visibility_pub()) } else { None },
make::name("Bool"),
if make_pub { Some(make.visibility_pub()) } else { None },
make.name("Bool"),
None,
None,
make::variant_list(vec![
make::variant(None, make::name("True"), None, None),
make::variant(None, make::name("False"), None, None),
make.variant_list(vec![
make.variant(None, make.name("True"), None, None),
make.variant(None, make.name("False"), None, None),
]),
)
.clone_for_update()
}
#[cfg(test)]
@@ -2,7 +2,10 @@
use hir::HirDisplay;
use ide_db::syntax_helpers::node_ext::walk_ty;
use syntax::{
ast::{self, AstNode, HasGenericArgs, HasGenericParams, HasName, edit::IndentLevel, make},
ast::{
self, AstNode, HasGenericArgs, HasGenericParams, HasName, edit::IndentLevel,
syntax_factory::SyntaxFactory,
},
syntax_editor,
};
@@ -43,10 +46,9 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let resolved_ty = ctx.sema.resolve_type(&ty)?;
let resolved_ty = if !resolved_ty.contains_unknown() {
let module = ctx.sema.scope(ty.syntax())?.module();
let resolved_ty = resolved_ty.display_source_code(ctx.db(), module.into(), false).ok()?;
make::ty(&resolved_ty)
resolved_ty.display_source_code(ctx.db(), module.into(), false).ok()?
} else {
ty.clone()
ty.to_string()
};
acc.add(
@@ -55,6 +57,9 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
target,
|builder| {
let mut edit = builder.make_editor(node);
let make = SyntaxFactory::without_mappings();
let resolved_ty = make.ty(&resolved_ty);
let mut known_generics = match item.generic_param_list() {
Some(it) => it.generic_params().collect(),
@@ -68,22 +73,20 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
}
let generics = collect_used_generics(&ty, &known_generics);
let generic_params =
generics.map(|it| make::generic_param_list(it.into_iter().cloned()));
generics.map(|it| make.generic_param_list(it.into_iter().cloned()));
// Replace original type with the alias
let ty_args = generic_params.as_ref().map(|it| it.to_generic_args().generic_args());
let new_ty = if let Some(ty_args) = ty_args {
make::generic_ty_path_segment(make::name_ref("Type"), ty_args)
make.generic_ty_path_segment(make.name_ref("Type"), ty_args)
} else {
make::path_segment(make::name_ref("Type"))
}
.clone_for_update();
make.path_segment(make.name_ref("Type"))
};
edit.replace(ty.syntax(), new_ty.syntax());
// Insert new alias
let ty_alias =
make::ty_alias(None, "Type", generic_params, None, None, Some((resolved_ty, None)))
.clone_for_update();
make.ty_alias(None, "Type", generic_params, None, None, Some((resolved_ty, None)));
if let Some(cap) = ctx.config.snippet_cap
&& let Some(name) = ty_alias.name()
@@ -96,7 +99,7 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
syntax_editor::Position::before(node),
vec![
ty_alias.syntax().clone().into(),
make::tokens::whitespace(&format!("\n\n{indent}")).into(),
make.whitespace(&format!("\n\n{indent}")).into(),
],
);
@@ -2,7 +2,7 @@
use stdx::format_to;
use syntax::{
AstNode,
ast::{self, HasGenericParams, HasName, HasTypeBounds, Impl, make},
ast::{self, HasGenericParams, HasName, HasTypeBounds, Impl, syntax_factory::SyntaxFactory},
};
use crate::{
@@ -72,7 +72,9 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
let default_code = " fn default() -> Self {
Self::new()
}";
let code = generate_trait_impl_text_from_impl(&impl_, self_ty, "Default", default_code);
let make = SyntaxFactory::without_mappings();
let code =
generate_trait_impl_text_from_impl(&impl_, self_ty, "Default", default_code, &make);
builder.insert(insert_location.end(), code);
},
)
@@ -84,6 +86,7 @@ fn generate_trait_impl_text_from_impl(
self_ty: ast::Type,
trait_text: &str,
code: &str,
make: &SyntaxFactory,
) -> String {
let generic_params = impl_.generic_param_list().map(|generic_params| {
let lifetime_params =
@@ -92,18 +95,18 @@ fn generate_trait_impl_text_from_impl(
// remove defaults since they can't be specified in impls
let param = match param {
ast::TypeOrConstParam::Type(param) => {
let param = make::type_param(param.name()?, param.type_bound_list());
let param = make.type_param(param.name()?, param.type_bound_list());
ast::GenericParam::TypeParam(param)
}
ast::TypeOrConstParam::Const(param) => {
let param = make::const_param(param.name()?, param.ty()?);
let param = make.const_param(param.name()?, param.ty()?);
ast::GenericParam::ConstParam(param)
}
};
Some(param)
});
make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
make.generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
});
let mut buf = String::with_capacity(code.len());
@@ -2,7 +2,7 @@
use ide_db::assists::{AssistId, GroupLabel};
use syntax::{
AstNode,
ast::{self, HasGenericParams, HasName, edit::IndentLevel, make},
ast::{self, HasGenericParams, HasName, edit::IndentLevel, syntax_factory::SyntaxFactory},
syntax_editor,
};
@@ -56,6 +56,7 @@ pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>)
func_node.syntax().text_range(),
|builder| {
let mut edit = builder.make_editor(func);
let make = SyntaxFactory::without_mappings();
let alias_name = format!("{}Fn", stdx::to_camel_case(&name.to_string()));
@@ -68,24 +69,24 @@ pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>)
let is_mut = self_ty.is_mutable_reference();
if let Some(adt) = self_ty.strip_references().as_adt() {
let inner_type = make::ty(adt.name(ctx.db()).as_str());
let inner_type = make.ty(adt.name(ctx.db()).as_str());
let ast_self_ty =
if is_ref { make::ty_ref(inner_type, is_mut) } else { inner_type };
if is_ref { make.ty_ref(inner_type, is_mut) } else { inner_type };
fn_params_vec.push(make::unnamed_param(ast_self_ty));
fn_params_vec.push(make.unnamed_param(ast_self_ty));
}
}
fn_params_vec.extend(param_list.params().filter_map(|p| match style {
ParamStyle::Named => Some(p),
ParamStyle::Unnamed => p.ty().map(make::unnamed_param),
ParamStyle::Unnamed => p.ty().map(|ty| make.unnamed_param(ty)),
}));
let generic_params = func_node.generic_param_list();
let is_unsafe = func_node.unsafe_token().is_some();
let ty = make::ty_fn_ptr(
let ty = make.ty_fn_ptr(
is_unsafe,
func_node.abi(),
fn_params_vec.into_iter(),
@@ -93,22 +94,21 @@ pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>)
);
// Insert new alias
let ty_alias = make::ty_alias(
let ty_alias = make.ty_alias(
None,
&alias_name,
generic_params,
None,
None,
Some((ast::Type::FnPtrType(ty), None)),
)
.clone_for_update();
);
let indent = IndentLevel::from_node(insertion_node);
edit.insert_all(
syntax_editor::Position::before(insertion_node),
vec![
ty_alias.syntax().clone().into(),
make::tokens::whitespace(&format!("\n\n{indent}")).into(),
make.whitespace(&format!("\n\n{indent}")).into(),
],
);
@@ -1,11 +1,8 @@
use either::Either;
use syntax::{
ast::{
self, AstNode, HasName, HasTypeBounds,
edit_in_place::{GenericParamsOwnerEdit, Removable},
make,
},
ast::{self, AstNode, HasName, HasTypeBounds, syntax_factory::SyntaxFactory},
match_ast,
syntax_editor::{GetOrCreateWhereClause, Removable},
};
use crate::{AssistContext, AssistId, Assists};
@@ -47,18 +44,23 @@ pub(crate) fn move_bounds_to_where_clause(
AssistId::refactor_rewrite("move_bounds_to_where_clause"),
"Move to where clause",
target,
|edit| {
let type_param_list = edit.make_mut(type_param_list);
let parent = edit.make_syntax_mut(parent);
|builder| {
let mut edit = builder.make_editor(&parent);
let make = SyntaxFactory::without_mappings();
let where_clause: ast::WhereClause = match_ast! {
match parent {
ast::Fn(it) => it.get_or_create_where_clause(),
ast::Trait(it) => it.get_or_create_where_clause(),
ast::Impl(it) => it.get_or_create_where_clause(),
ast::Enum(it) => it.get_or_create_where_clause(),
ast::Struct(it) => it.get_or_create_where_clause(),
ast::TypeAlias(it) => it.get_or_create_where_clause(),
let new_preds: Vec<ast::WherePred> = type_param_list
.generic_params()
.filter_map(|param| build_predicate(param, &make))
.collect();
match_ast! {
match (&parent) {
ast::Fn(it) => it.get_or_create_where_clause(&mut edit, &make, new_preds.into_iter()),
ast::Trait(it) => it.get_or_create_where_clause(&mut edit, &make, new_preds.into_iter()),
ast::Impl(it) => it.get_or_create_where_clause(&mut edit, &make, new_preds.into_iter()),
ast::Enum(it) => it.get_or_create_where_clause(&mut edit, &make, new_preds.into_iter()),
ast::Struct(it) => it.get_or_create_where_clause(&mut edit, &make, new_preds.into_iter()),
ast::TypeAlias(it) => it.get_or_create_where_clause(&mut edit, &make, new_preds.into_iter()),
_ => return,
}
};
@@ -70,25 +72,22 @@ pub(crate) fn move_bounds_to_where_clause(
ast::GenericParam::ConstParam(_) => continue,
};
if let Some(tbl) = param.type_bound_list() {
if let Some(predicate) = build_predicate(generic_param) {
where_clause.add_predicate(predicate)
}
tbl.remove()
tbl.remove(&mut edit);
}
}
builder.add_file_edits(ctx.vfs_file_id(), edit);
},
)
}
fn build_predicate(param: ast::GenericParam) -> Option<ast::WherePred> {
fn build_predicate(param: ast::GenericParam, make: &SyntaxFactory) -> Option<ast::WherePred> {
let target = match &param {
ast::GenericParam::TypeParam(t) => {
Either::Right(make::ty_path(make::ext::ident_path(&t.name()?.to_string())))
}
ast::GenericParam::TypeParam(t) => Either::Right(make.ty(&t.name()?.to_string())),
ast::GenericParam::LifetimeParam(l) => Either::Left(l.lifetime()?),
ast::GenericParam::ConstParam(_) => return None,
};
let predicate = make::where_pred(
let predicate = make.where_pred(
target,
match param {
ast::GenericParam::TypeParam(t) => t.type_bound_list()?,
@@ -97,7 +96,7 @@ fn build_predicate(param: ast::GenericParam) -> Option<ast::WherePred> {
}
.bounds(),
);
Some(predicate.clone_for_update())
Some(predicate)
}
#[cfg(test)]
@@ -5,9 +5,10 @@
defs::Definition,
search::{SearchScope, UsageSearchResult},
};
use syntax::ast::syntax_factory::SyntaxFactory;
use syntax::{
AstNode,
ast::{self, HasGenericParams, HasName, HasTypeBounds, Name, NameLike, PathType, make},
ast::{self, HasGenericParams, HasName, HasTypeBounds, Name, NameLike, PathType},
match_ast,
};
@@ -72,6 +73,7 @@ pub(crate) fn replace_named_generic_with_impl(
target,
|edit| {
let mut editor = edit.make_editor(type_param.syntax());
let make = SyntaxFactory::without_mappings();
// remove trait from generic param list
if let Some(generic_params) = fn_.generic_param_list() {
@@ -83,17 +85,14 @@ pub(crate) fn replace_named_generic_with_impl(
if params.is_empty() {
editor.delete(generic_params.syntax());
} else {
let new_generic_param_list = make::generic_param_list(params);
editor.replace(
generic_params.syntax(),
new_generic_param_list.syntax().clone_for_update(),
);
let new_generic_param_list = make.generic_param_list(params);
editor.replace(generic_params.syntax(), new_generic_param_list.syntax());
}
}
let new_bounds = make::impl_trait_type(type_bound_list);
let new_bounds = make.impl_trait_type(type_bound_list);
for path_type in path_types_to_replace.iter().rev() {
editor.replace(path_type.syntax(), new_bounds.clone_for_update().syntax());
editor.replace(path_type.syntax(), new_bounds.syntax());
}
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
@@ -1,8 +1,5 @@
use hir::AsAssocItem;
use syntax::{
TextRange,
ast::{self, AstNode, HasArgList, prec::ExprPrecedence},
};
use syntax::ast::{self, AstNode, HasArgList, prec::ExprPrecedence, syntax_factory::SyntaxFactory};
use crate::{AssistContext, AssistId, Assists};
@@ -36,10 +33,7 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>)
}
let args = call.arg_list()?;
let l_paren = args.l_paren_token()?;
let mut args_iter = args.args();
let first_arg = args_iter.next()?;
let second_arg = args_iter.next();
let first_arg = args.args().next()?;
let qualifier = path.qualifier()?;
let method_name = path.segment()?.name_ref()?;
@@ -51,43 +45,33 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>)
return None;
}
// `core::ops::Add::add(` -> ``
let delete_path =
TextRange::new(path.syntax().text_range().start(), l_paren.text_range().end());
// Parens around `expr` if needed
let parens = first_arg.precedence().needs_parentheses_in(ExprPrecedence::Postfix).then(|| {
let range = first_arg.syntax().text_range();
(range.start(), range.end())
});
// `, ` -> `.add(`
let replace_comma = TextRange::new(
first_arg.syntax().text_range().end(),
second_arg
.map(|a| a.syntax().text_range().start())
.unwrap_or_else(|| first_arg.syntax().text_range().end()),
);
acc.add(
AssistId::refactor_rewrite("unqualify_method_call"),
"Unqualify method call",
call.syntax().text_range(),
|edit| {
edit.delete(delete_path);
if let Some((open, close)) = parens {
edit.insert(open, "(");
edit.insert(close, ")");
}
edit.replace(replace_comma, format!(".{method_name}("));
|builder| {
let make = SyntaxFactory::with_mappings();
let mut editor = builder.make_editor(call.syntax());
let new_arg_list = make.arg_list(args.args().skip(1));
let receiver = if first_arg.precedence().needs_parentheses_in(ExprPrecedence::Postfix) {
ast::Expr::from(make.expr_paren(first_arg.clone()))
} else {
first_arg.clone()
};
let method_call = make.expr_method_call(receiver, method_name, new_arg_list);
editor.replace(call.syntax(), method_call.syntax());
if let Some(fun) = fun.as_assoc_item(ctx.db())
&& let Some(trait_) = fun.container_or_implemented_trait(ctx.db())
&& !scope.can_use_trait_methods(trait_)
{
// Only add an import for trait methods that are not already imported.
add_import(qualifier, ctx, edit);
add_import(qualifier, ctx, &make, &mut editor);
}
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -95,7 +79,8 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>)
fn add_import(
qualifier: ast::Path,
ctx: &AssistContext<'_>,
edit: &mut ide_db::source_change::SourceChangeBuilder,
make: &SyntaxFactory,
editor: &mut syntax::syntax_editor::SyntaxEditor,
) {
if let Some(path_segment) = qualifier.segment() {
// for `<i32 as std::ops::Add>`
@@ -122,8 +107,13 @@ fn add_import(
);
if let Some(scope) = scope {
let scope = edit.make_import_scope_mut(scope);
ide_db::imports::insert_use::insert_use(&scope, import, &ctx.config.insert_use);
ide_db::imports::insert_use::insert_use_with_editor(
&scope,
import,
&ctx.config.insert_use,
editor,
make,
);
}
}
}
@@ -169,7 +169,7 @@ fn crate_graph(db: &RootDatabase) -> SearchScope {
entries.extend(
source_root
.iter()
.map(|id| (EditionedFileId::new(db, id, crate_data.edition, krate), None)),
.map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)),
);
}
SearchScope { entries }
@@ -183,9 +183,11 @@ fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope {
let source_root = db.file_source_root(root_file).source_root_id(db);
let source_root = db.source_root(source_root).source_root(db);
entries.extend(source_root.iter().map(|id| {
(EditionedFileId::new(db, id, rev_dep.edition(db), rev_dep.into()), None)
}));
entries.extend(
source_root
.iter()
.map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)),
);
}
SearchScope { entries }
}
@@ -199,7 +201,7 @@ fn krate(db: &RootDatabase, of: hir::Crate) -> SearchScope {
SearchScope {
entries: source_root
.iter()
.map(|id| (EditionedFileId::new(db, id, of.edition(db), of.into()), None))
.map(|id| (EditionedFileId::new(db, id, of.edition(db)), None))
.collect(),
}
}
@@ -89,6 +89,12 @@
///
/// assert_eq!(generator.suggest_name("b2"), "b2");
/// assert_eq!(generator.suggest_name("b"), "b3");
///
/// // Multi-byte UTF-8 identifiers (e.g. CJK) are handled correctly
/// assert_eq!(generator.suggest_name("日本語"), "日本語");
/// assert_eq!(generator.suggest_name("日本語"), "日本語1");
/// assert_eq!(generator.suggest_name("données3"), "données3");
/// assert_eq!(generator.suggest_name("données"), "données4");
/// ```
#[derive(Debug, Default)]
pub struct NameGenerator {
@@ -262,11 +268,15 @@ fn insert(&mut self, name: &str) {
/// Remove the numeric suffix from the name
///
/// # Examples
/// `a1b2c3` -> `a1b2c`
/// `a1b2c3` -> (`a1b2c`, Some(3))
fn split_numeric_suffix(name: &str) -> (&str, Option<usize>) {
let pos =
name.rfind(|c: char| !c.is_numeric()).expect("Name cannot be empty or all-numeric");
let (prefix, suffix) = name.split_at(pos + 1);
// `rfind` returns the byte offset of the matched character, which may be
// multi-byte (e.g. CJK identifiers). Use `ceil_char_boundary` to advance
// past the full character to the next valid split point.
let split = name.ceil_char_boundary(pos + 1);
let (prefix, suffix) = name.split_at(split);
(prefix, suffix.parse().ok())
}
}
@@ -2,7 +2,7 @@
(
Module {
id: ModuleIdLt {
[salsa id]: Id(3800),
[salsa id]: Id(3400),
},
},
[
@@ -2,7 +2,7 @@
(
Module {
id: ModuleIdLt {
[salsa id]: Id(3800),
[salsa id]: Id(3400),
},
},
[
@@ -671,7 +671,7 @@
def: Module(
Module {
id: ModuleIdLt {
[salsa id]: Id(3801),
[salsa id]: Id(3401),
},
},
),
@@ -706,7 +706,7 @@
def: Module(
Module {
id: ModuleIdLt {
[salsa id]: Id(3802),
[salsa id]: Id(3402),
},
},
),
@@ -998,7 +998,7 @@
(
Module {
id: ModuleIdLt {
[salsa id]: Id(3801),
[salsa id]: Id(3401),
},
},
[
@@ -1044,7 +1044,7 @@
(
Module {
id: ModuleIdLt {
[salsa id]: Id(3802),
[salsa id]: Id(3402),
},
},
[
@@ -5,7 +5,7 @@
Struct(
Struct {
id: StructId(
3c00,
4000,
),
},
),
@@ -5,7 +5,7 @@
Struct(
Struct {
id: StructId(
3c00,
4000,
),
},
),
@@ -42,7 +42,7 @@
Struct(
Struct {
id: StructId(
3c00,
4000,
),
},
),
@@ -130,7 +130,8 @@ pub(crate) fn position(
database.apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ($0)");
let file_id = EditionedFileId::from_span_guess_origin(&database, file_id);
let file_id = EditionedFileId::from_span_file_id(&database, file_id);
let offset = range_or_offset.expect_offset();
(database, FilePosition { file_id, offset })
}
@@ -262,6 +262,48 @@ struct SomeStruct { SomeField: u8 }
);
}
#[test]
fn incorrect_union_names() {
check_diagnostics(
r#"
union non_camel_case_name { field: u8 }
// ^^^^^^^^^^^^^^^^^^^ 💡 warn: Union `non_camel_case_name` should have UpperCamelCase name, e.g. `NonCamelCaseName`
union SCREAMING_CASE { field: u8 }
// ^^^^^^^^^^^^^^ 💡 warn: Union `SCREAMING_CASE` should have UpperCamelCase name, e.g. `ScreamingCase`
"#,
);
}
#[test]
fn no_diagnostic_for_camel_cased_acronyms_in_union_name() {
check_diagnostics(
r#"
union AABB { field: u8 }
"#,
);
}
#[test]
fn no_diagnostic_for_repr_c_union() {
check_diagnostics(
r#"
#[repr(C)]
union my_union { field: u8 }
"#,
);
}
#[test]
fn incorrect_union_field() {
check_diagnostics(
r#"
union SomeUnion { SomeField: u8 }
// ^^^^^^^^^ 💡 warn: Field `SomeField` should have snake_case name, e.g. `some_field`
"#,
);
}
#[test]
fn incorrect_enum_names() {
check_diagnostics(
@@ -220,6 +220,23 @@ fn enum_type_alias_default_param() {
fn main() {
let _ = Result::<()>::Ok(());
}
"#,
);
}
#[test]
fn type_as_trait_does_not_count() {
check_diagnostics(
r#"
pub trait Lock<T> {
fn new(b: T) -> Self;
}
pub trait LockChoice {
type Lock<T>: Lock<T>;
}
fn f<L: LockChoice>() {
<L as LockChoice>::Lock::new(());
}
"#,
);
@@ -17,7 +17,7 @@ pub fn ssr_from_comment(
frange: FileRange,
) -> Option<(MatchFinder<'_>, TextRange)> {
let comment = {
let file_id = EditionedFileId::current_edition_guess_origin(db, frange.file_id);
let file_id = EditionedFileId::current_edition(db, frange.file_id);
let file = db.parse(file_id);
file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)
@@ -339,8 +339,7 @@ pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
// FIXME edition
self.with_db(|db| {
let editioned_file_id_wrapper =
EditionedFileId::current_edition_guess_origin(&self.db, file_id);
let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
db.parse(editioned_file_id_wrapper).tree()
})
@@ -369,7 +368,7 @@ pub fn extend_selection(&self, frange: FileRange) -> Cancellable<TextRange> {
/// supported).
pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
self.with_db(|db| {
let file_id = EditionedFileId::current_edition_guess_origin(&self.db, position.file_id);
let file_id = EditionedFileId::current_edition(&self.db, position.file_id);
let parse = db.parse(file_id);
let file = parse.tree();
matching_brace::matching_brace(&file, position.offset)
@@ -430,7 +429,7 @@ pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<Expande
pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
self.with_db(|db| {
let editioned_file_id_wrapper =
EditionedFileId::current_edition_guess_origin(&self.db, frange.file_id);
EditionedFileId::current_edition(&self.db, frange.file_id);
let parse = db.parse(editioned_file_id_wrapper);
join_lines::join_lines(config, &parse.tree(), frange.range)
})
@@ -471,8 +470,7 @@ pub fn file_structure(
) -> Cancellable<Vec<StructureNode>> {
// FIXME: Edition
self.with_db(|db| {
let editioned_file_id_wrapper =
EditionedFileId::current_edition_guess_origin(&self.db, file_id);
let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
let source_file = db.parse(editioned_file_id_wrapper).tree();
file_structure::file_structure(&source_file, config)
})
@@ -503,8 +501,7 @@ pub fn inlay_hints_resolve(
/// Returns the set of folding ranges.
pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
self.with_db(|db| {
let editioned_file_id_wrapper =
EditionedFileId::current_edition_guess_origin(&self.db, file_id);
let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree())
})
@@ -1151,10 +1151,7 @@ pub fn quux$0() {}
check_with_scope(
code,
Some(&mut |db| {
SearchScope::single_file(EditionedFileId::current_edition_guess_origin(
db,
FileId::from_raw(2),
))
SearchScope::single_file(EditionedFileId::current_edition(db, FileId::from_raw(2)))
}),
expect![[r#"
quux Function FileId(0) 19..35 26..30
@@ -1975,8 +1975,8 @@ trait Sub: Super + Super {
fn f() -> impl Sub<$0
"#,
expect![[r#"
trait Sub<SubTy = , SuperTy = >
^^^^^^^^^ -----------
trait Sub<SuperTy = , SubTy = >
^^^^^^^^^^^ ---------
"#]],
);
}
@@ -17,10 +17,7 @@
use either::Either;
use hir::EditionedFileId;
use ide_db::{
FilePosition, RootDatabase,
base_db::{RootQueryDb, SourceDatabase},
};
use ide_db::{FilePosition, RootDatabase, base_db::RootQueryDb};
use span::Edition;
use std::iter;
@@ -74,15 +71,11 @@ pub(crate) fn on_char_typed(
return None;
}
let edition = db
.source_root_crates(db.file_source_root(position.file_id).source_root_id(db))
.relevant_crates(position.file_id)
.first()
.map_or(Edition::CURRENT, |crates| crates.data(db).edition);
// FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
// causing the editor to feel sluggish! We need to make this bail if it would block too long?
let editioned_file_id_wrapper = EditionedFileId::from_span_guess_origin(
db,
span::EditionedFileId::new(position.file_id, edition),
);
.copied()
.map_or(Edition::CURRENT, |krate| krate.data(db).edition);
let editioned_file_id_wrapper = EditionedFileId::new(db, position.file_id, edition);
let file = &db.parse(editioned_file_id_wrapper);
let char_matches_position =
file.tree().syntax().text().char_at(position.offset) == Some(char_typed);
@@ -51,7 +51,7 @@
// ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::current_edition_guess_origin(db, position.file_id);
ide_db::base_db::EditionedFileId::current_edition(db, position.file_id);
let parse = db.parse(editioned_file_id_wrapper);
let file = parse.tree();
let token = file.syntax().token_at_offset(position.offset).left_biased()?;
@@ -10,6 +10,9 @@
// | VS Code | **rust-analyzer: Debug ItemTree** |
pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
let sema = Semantics::new(db);
let Some(krate) = sema.first_crate(file_id) else {
return String::new();
};
let file_id = sema.attach_first_edition(file_id);
db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition(db))
db.file_item_tree(file_id.into(), krate.into()).pretty_print(db, file_id.edition(db))
}
@@ -638,7 +638,7 @@ fn expand(
current_span = Span {
range: resolved.range,
anchor: SpanAnchor {
file_id: resolved.file_id.editioned_file_id(db),
file_id: resolved.file_id.span_file_id(db),
ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: current_ctx,
@@ -652,7 +652,7 @@ fn expand(
let resolved = db.resolve_span(current_span);
Ok(SubResponse::SpanSourceResult {
file_id: resolved.file_id.editioned_file_id(db).as_u32(),
file_id: resolved.file_id.span_file_id(db).as_u32(),
ast_id: span::ROOT_ERASED_FILE_AST_ID.into_raw(),
start: u32::from(resolved.range.start()),
end: u32::from(resolved.range.end()),
@@ -684,7 +684,7 @@ fn expand(
.text_range();
let parent_span = Some(ParentSpan {
file_id: editioned_file_id.editioned_file_id(db).as_u32(),
file_id: editioned_file_id.span_file_id(db).as_u32(),
ast_id: span::ROOT_ERASED_FILE_AST_ID.into_raw(),
start: u32::from(range.start()),
end: u32::from(range.end()),
@@ -328,7 +328,7 @@ fn apply(
let prev_working_dir = std::env::current_dir().ok();
if let Err(err) = std::env::set_current_dir(dir) {
eprintln!(
"Failed to set the current working dir to {}. Error: {err:?}",
"Failed to change the current working dir to {}. Error: {err:?}",
dir.display()
)
}
@@ -370,7 +370,7 @@ fn drop(&mut self) {
&& let Err(err) = std::env::set_current_dir(dir)
{
eprintln!(
"Failed to set the current working dir to {}. Error: {:?}",
"Failed to change the current working dir back to {}. Error: {:?}",
dir.display(),
err
)
@@ -126,8 +126,8 @@ pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
let source_roots = krates
.iter()
.cloned()
.map(|krate| db.file_source_root(krate.root_file(db)).source_root_id(db))
.unique();
.map(|krate| (db.file_source_root(krate.root_file(db)).source_root_id(db), krate))
.unique_by(|(source_root_id, _)| *source_root_id);
let mut dep_loc = 0;
let mut workspace_loc = 0;
@@ -137,7 +137,7 @@ pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
let mut workspace_item_stats = PrettyItemStats::default();
let mut dep_item_stats = PrettyItemStats::default();
for source_root_id in source_roots {
for (source_root_id, krate) in source_roots {
let source_root = db.source_root(source_root_id).source_root(db);
for file_id in source_root.iter() {
if let Some(p) = source_root.path_for_file(&file_id)
@@ -148,7 +148,8 @@ pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
let length = db.file_text(file_id).text(db).lines().count();
let item_stats = db
.file_item_tree(
EditionedFileId::current_edition_guess_origin(db, file_id).into(),
EditionedFileId::current_edition(db, file_id).into(),
krate.into(),
)
.item_tree_stats()
.into();
@@ -160,7 +161,8 @@ pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
let length = db.file_text(file_id).text(db).lines().count();
let item_stats = db
.file_item_tree(
EditionedFileId::current_edition_guess_origin(db, file_id).into(),
EditionedFileId::current_edition(db, file_id).into(),
krate.into(),
)
.item_tree_stats()
.into();
@@ -492,7 +494,7 @@ struct Acc {
let mut sw = self.stop_watch();
for &file_id in file_ids {
let file_id = file_id.editioned_file_id(db);
let file_id = file_id.span_file_id(db);
let sema = hir::Semantics::new(db);
let display_target = match sema.first_crate(file_id.file_id()) {
Some(krate) => krate.to_display_target(sema.db),
@@ -40,6 +40,8 @@
cmd parse {
/// Suppress printing.
optional --no-dump
/// Output as JSON.
optional --json
}
/// Parse stdin and print the list of symbols.
@@ -233,6 +235,7 @@ pub struct LspServer {
#[derive(Debug)]
pub struct Parse {
pub no_dump: bool,
pub json: bool,
}
#[derive(Debug)]
@@ -257,8 +260,8 @@ pub struct AnalysisStats {
pub disable_build_scripts: bool,
pub disable_proc_macros: bool,
pub proc_macro_srv: Option<PathBuf>,
pub skip_lowering: bool,
pub skip_lang_items: bool,
pub skip_lowering: bool,
pub skip_inference: bool,
pub skip_mir_stats: bool,
pub skip_data_layout: bool,
@@ -1,18 +1,101 @@
//! Read Rust code on stdin, print syntax tree on stdout.
use ide::Edition;
use syntax::{AstNode, SourceFile};
use ide_db::line_index::LineIndex;
use serde::Serialize;
use syntax::{AstNode, NodeOrToken, SourceFile, SyntaxNode, SyntaxToken};
use crate::cli::{flags, read_stdin};
#[derive(Serialize)]
struct JsonNode {
kind: String,
#[serde(rename = "type")]
node_type: &'static str,
start: [u32; 3],
end: [u32; 3],
#[serde(skip_serializing_if = "Option::is_none")]
text: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
children: Option<Vec<JsonNode>>,
}
fn pos(line_index: &LineIndex, offset: syntax::TextSize) -> [u32; 3] {
let offset_u32 = u32::from(offset);
let line_col = line_index.line_col(offset);
[offset_u32, line_col.line, line_col.col]
}
impl flags::Parse {
pub fn run(self) -> anyhow::Result<()> {
let _p = tracing::info_span!("flags::Parse::run").entered();
let text = read_stdin()?;
let line_index = LineIndex::new(&text);
let file = SourceFile::parse(&text, Edition::CURRENT).tree();
if !self.no_dump {
println!("{:#?}", file.syntax());
if self.json {
let json_tree = node_to_json(NodeOrToken::Node(file.syntax().clone()), &line_index);
println!("{}", serde_json::to_string(&json_tree)?);
} else {
println!("{:#?}", file.syntax());
}
}
std::mem::forget(file);
Ok(())
}
}
fn node_to_json(node: NodeOrToken<SyntaxNode, SyntaxToken>, line_index: &LineIndex) -> JsonNode {
let range = node.text_range();
let kind = format!("{:?}", node.kind());
match node {
NodeOrToken::Node(n) => {
let children: Vec<_> =
n.children_with_tokens().map(|it| node_to_json(it, line_index)).collect();
JsonNode {
kind,
node_type: "Node",
start: pos(line_index, range.start()),
end: pos(line_index, range.end()),
text: None,
children: Some(children),
}
}
NodeOrToken::Token(t) => JsonNode {
kind,
node_type: "Token",
start: pos(line_index, range.start()),
end: pos(line_index, range.end()),
text: Some(t.text().to_owned()),
children: None,
},
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::cli::flags;
#[test]
fn test_parse_json_output() {
let text = "fn main() {}".to_owned();
let flags = flags::Parse { json: true, no_dump: false };
let line_index = LineIndex::new(&text);
let file = SourceFile::parse(&text, Edition::CURRENT).tree();
let output = if flags.json {
let json_tree = node_to_json(NodeOrToken::Node(file.syntax().clone()), &line_index);
serde_json::to_string(&json_tree).unwrap()
} else {
format!("{:#?}", file.syntax())
};
assert!(output.contains(r#""kind":"SOURCE_FILE""#));
assert!(output.contains(r#""text":"main""#));
assert!(output.contains(r#""start":[0,0,0]"#));
}
}
@@ -74,7 +74,7 @@ pub fn run(self) -> anyhow::Result<()> {
let sr = db.source_root(root).source_root(db);
for file_id in sr.iter() {
for debug_info in match_finder.debug_where_text_equal(
EditionedFileId::current_edition_guess_origin(db, file_id),
EditionedFileId::current_edition(db, file_id),
debug_snippet,
) {
println!("{debug_info:#?}");
@@ -23,6 +23,3 @@ path = "fuzz_targets/parser.rs"
[[bin]]
name = "reparse"
path = "fuzz_targets/reparse.rs"
[lints]
workspace = true
@@ -1,9 +1,11 @@
//! Wrappers over [`make`] constructors
use either::Either;
use crate::{
AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken,
ast::{
self, HasArgList, HasAttrs, HasGenericArgs, HasGenericParams, HasLoopBody, HasName,
HasTypeBounds, HasVisibility, RangeItem, make,
HasTypeBounds, HasVisibility, Param, RangeItem, make,
},
syntax_editor::SyntaxMappingBuilder,
};
@@ -97,6 +99,52 @@ pub fn struct_(
make::struct_(visibility, strukt_name, generic_param_list, field_list).clone_for_update()
}
pub fn enum_(
&self,
attrs: impl IntoIterator<Item = ast::Attr>,
visibility: Option<ast::Visibility>,
enum_name: ast::Name,
generic_param_list: Option<ast::GenericParamList>,
where_clause: Option<ast::WhereClause>,
variant_list: ast::VariantList,
) -> ast::Enum {
make::enum_(attrs, visibility, enum_name, generic_param_list, where_clause, variant_list)
.clone_for_update()
}
pub fn unnamed_param(&self, ty: ast::Type) -> ast::Param {
make::unnamed_param(ty).clone_for_update()
}
pub fn ty_fn_ptr<I: Iterator<Item = Param>>(
&self,
is_unsafe: bool,
abi: Option<ast::Abi>,
params: I,
ret_type: Option<ast::RetType>,
) -> ast::FnPtrType {
make::ty_fn_ptr(is_unsafe, abi, params, ret_type).clone_for_update()
}
pub fn where_pred(
&self,
path: Either<ast::Lifetime, ast::Type>,
bounds: impl IntoIterator<Item = ast::TypeBound>,
) -> ast::WherePred {
make::where_pred(path, bounds).clone_for_update()
}
pub fn where_clause(
&self,
predicates: impl IntoIterator<Item = ast::WherePred>,
) -> ast::WhereClause {
make::where_clause(predicates).clone_for_update()
}
pub fn impl_trait_type(&self, bounds: ast::TypeBoundList) -> ast::ImplTraitType {
make::impl_trait_type(bounds).clone_for_update()
}
pub fn expr_field(&self, receiver: ast::Expr, field: &str) -> ast::FieldExpr {
let ast::Expr::FieldExpr(ast) =
make::expr_field(receiver.clone(), field).clone_for_update()
@@ -287,6 +335,26 @@ pub fn path_segment(&self, name_ref: ast::NameRef) -> ast::PathSegment {
ast
}
pub fn generic_ty_path_segment(
&self,
name_ref: ast::NameRef,
generic_args: impl IntoIterator<Item = ast::GenericArg>,
) -> ast::PathSegment {
make::generic_ty_path_segment(name_ref, generic_args).clone_for_update()
}
pub fn tail_only_block_expr(&self, tail_expr: ast::Expr) -> ast::BlockExpr {
make::tail_only_block_expr(tail_expr)
}
pub fn expr_bin_op(&self, lhs: ast::Expr, op: ast::BinaryOp, rhs: ast::Expr) -> ast::Expr {
make::expr_bin_op(lhs, op, rhs)
}
pub fn ty_placeholder(&self) -> ast::Type {
make::ty_placeholder().clone_for_update()
}
pub fn path_segment_generics(
&self,
name_ref: ast::NameRef,
@@ -20,7 +20,7 @@
mod edits;
mod mapping;
pub use edits::Removable;
pub use edits::{GetOrCreateWhereClause, Removable};
pub use mapping::{SyntaxMapping, SyntaxMappingBuilder};
#[derive(Debug)]
@@ -10,6 +10,107 @@
syntax_editor::{Position, SyntaxEditor},
};
pub trait GetOrCreateWhereClause: ast::HasGenericParams {
fn where_clause_position(&self) -> Option<Position>;
fn get_or_create_where_clause(
&self,
editor: &mut SyntaxEditor,
make: &SyntaxFactory,
new_preds: impl Iterator<Item = ast::WherePred>,
) {
let existing = self.where_clause();
let all_preds: Vec<_> =
existing.iter().flat_map(|wc| wc.predicates()).chain(new_preds).collect();
let new_where = make.where_clause(all_preds);
if let Some(existing) = &existing {
editor.replace(existing.syntax(), new_where.syntax());
} else if let Some(pos) = self.where_clause_position() {
editor.insert_all(
pos,
vec![make.whitespace(" ").into(), new_where.syntax().clone().into()],
);
}
}
}
impl GetOrCreateWhereClause for ast::Fn {
fn where_clause_position(&self) -> Option<Position> {
if let Some(ty) = self.ret_type() {
Some(Position::after(ty.syntax()))
} else if let Some(param_list) = self.param_list() {
Some(Position::after(param_list.syntax()))
} else {
Some(Position::last_child_of(self.syntax()))
}
}
}
impl GetOrCreateWhereClause for ast::Impl {
fn where_clause_position(&self) -> Option<Position> {
if let Some(ty) = self.self_ty() {
Some(Position::after(ty.syntax()))
} else {
Some(Position::last_child_of(self.syntax()))
}
}
}
impl GetOrCreateWhereClause for ast::Trait {
fn where_clause_position(&self) -> Option<Position> {
if let Some(gpl) = self.generic_param_list() {
Some(Position::after(gpl.syntax()))
} else if let Some(name) = self.name() {
Some(Position::after(name.syntax()))
} else {
Some(Position::last_child_of(self.syntax()))
}
}
}
impl GetOrCreateWhereClause for ast::TypeAlias {
fn where_clause_position(&self) -> Option<Position> {
if let Some(gpl) = self.generic_param_list() {
Some(Position::after(gpl.syntax()))
} else if let Some(name) = self.name() {
Some(Position::after(name.syntax()))
} else {
Some(Position::last_child_of(self.syntax()))
}
}
}
impl GetOrCreateWhereClause for ast::Struct {
fn where_clause_position(&self) -> Option<Position> {
let tfl = self.field_list().and_then(|fl| match fl {
ast::FieldList::RecordFieldList(_) => None,
ast::FieldList::TupleFieldList(it) => Some(it),
});
if let Some(tfl) = tfl {
Some(Position::after(tfl.syntax()))
} else if let Some(gpl) = self.generic_param_list() {
Some(Position::after(gpl.syntax()))
} else if let Some(name) = self.name() {
Some(Position::after(name.syntax()))
} else {
Some(Position::last_child_of(self.syntax()))
}
}
}
impl GetOrCreateWhereClause for ast::Enum {
fn where_clause_position(&self) -> Option<Position> {
if let Some(gpl) = self.generic_param_list() {
Some(Position::after(gpl.syntax()))
} else if let Some(name) = self.name() {
Some(Position::after(name.syntax()))
} else {
Some(Position::last_child_of(self.syntax()))
}
}
}
impl SyntaxEditor {
/// Adds a new generic param to the function using `SyntaxEditor`
pub fn add_generic_param(&mut self, function: &Fn, new_param: GenericParam) {
@@ -149,8 +149,8 @@ fn with_single_file(
let fixture = ChangeFixture::parse(ra_fixture);
fixture.change.apply(&mut db);
assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture");
let file = EditionedFileId::from_span_guess_origin(&db, fixture.files[0]);
(db, file)
let file_id = EditionedFileId::from_span_file_id(&db, fixture.files[0]);
(db, file_id)
}
/// See the trait documentation for more information on fixtures.
@@ -165,7 +165,7 @@ fn with_many_files(
let files = fixture
.files
.into_iter()
.map(|file| EditionedFileId::from_span_guess_origin(&db, file))
.map(|file| EditionedFileId::from_span_file_id(&db, file))
.collect();
(db, files)
}
@@ -222,7 +222,7 @@ fn with_range_or_offset(
let (file_id, range_or_offset) = fixture
.file_position
.expect("Could not find file position in fixture. Did you forget to add an `$0`?");
let file_id = EditionedFileId::from_span_guess_origin(&db, file_id);
let file_id = EditionedFileId::from_span_file_id(&db, file_id);
(db, file_id, range_or_offset)
}
@@ -208,6 +208,22 @@ fn run(mut self, inbox: Receiver<Message>) {
)
})
.filter_map(|path| -> Option<(AbsPathBuf, Option<Vec<u8>>)> {
// Ignore events for files/directories that we're not watching.
if !(self.watched_file_entries.contains(&path)
|| self
.watched_dir_entries
.iter()
.any(|dir| dir.contains_file(&path)))
{
return None;
}
// For removed files, fs::metadata() will return Err, but
// we still want to update the VFS.
if matches!(event.kind, EventKind::Remove(_)) {
return Some((path, None));
}
let meta = fs::metadata(&path).ok()?;
if meta.file_type().is_dir()
&& self
@@ -223,15 +239,6 @@ fn run(mut self, inbox: Receiver<Message>) {
return None;
}
if !(self.watched_file_entries.contains(&path)
|| self
.watched_dir_entries
.iter()
.any(|dir| dir.contains_file(&path)))
{
return None;
}
let contents = read(&path);
Some((path, contents))
})
@@ -317,7 +324,7 @@ fn load_entry(
fn watch(&mut self, path: &Path) {
if let Some((watcher, _)) = &mut self.watcher {
log_notify_error(watcher.watch(path, RecursiveMode::NonRecursive));
log_notify_error(watcher.watch(path, RecursiveMode::Recursive));
}
}
@@ -16,7 +16,7 @@ impl BorshDeserialize for SmolStr {
#[inline]
fn deserialize_reader<R: Read>(reader: &mut R) -> borsh::io::Result<Self> {
let len = u32::deserialize_reader(reader)?;
if (len as usize) < INLINE_CAP {
if (len as usize) <= INLINE_CAP {
let mut buf = [0u8; INLINE_CAP];
reader.read_exact(&mut buf[..len as usize])?;
_ = core::str::from_utf8(&buf[..len as usize]).map_err(|err| {
@@ -29,9 +29,8 @@ fn deserialize_reader<R: Read>(reader: &mut R) -> borsh::io::Result<Self> {
}))
} else {
// u8::vec_from_reader always returns Some on success in current implementation
let vec = u8::vec_from_reader(len, reader)?.ok_or_else(|| {
Error::new(ErrorKind::Other, "u8::vec_from_reader unexpectedly returned None")
})?;
let vec = u8::vec_from_reader(len, reader)?
.ok_or_else(|| Error::other("u8::vec_from_reader unexpectedly returned None"))?;
Ok(SmolStr::from(String::from_utf8(vec).map_err(|err| {
let msg = err.to_string();
Error::new(ErrorKind::InvalidData, msg)
+219 -39
View File
@@ -34,13 +34,17 @@
pub struct SmolStr(Repr);
impl SmolStr {
/// The maximum byte length of a string that can be stored inline
/// without heap allocation.
pub const INLINE_CAP: usize = INLINE_CAP;
/// Constructs an inline variant of `SmolStr`.
///
/// This never allocates.
///
/// # Panics
///
/// Panics if `text.len() > 23`.
/// Panics if `text.len() > `[`SmolStr::INLINE_CAP`].
#[inline]
pub const fn new_inline(text: &str) -> SmolStr {
assert!(text.len() <= INLINE_CAP); // avoids bounds checks in loop
@@ -100,6 +104,24 @@ pub fn is_empty(&self) -> bool {
pub const fn is_heap_allocated(&self) -> bool {
matches!(self.0, Repr::Heap(..))
}
/// Constructs a `SmolStr` from a byte slice, returning an error if the slice is not valid
/// UTF-8.
#[inline]
pub fn from_utf8(bytes: &[u8]) -> Result<SmolStr, core::str::Utf8Error> {
core::str::from_utf8(bytes).map(SmolStr::new)
}
/// Constructs a `SmolStr` from a byte slice without checking that the bytes are valid UTF-8.
///
/// # Safety
///
/// `bytes` must be valid UTF-8.
#[inline]
pub unsafe fn from_utf8_unchecked(bytes: &[u8]) -> SmolStr {
// SAFETY: caller guarantees bytes are valid UTF-8
SmolStr::new(unsafe { core::str::from_utf8_unchecked(bytes) })
}
}
impl Clone for SmolStr {
@@ -116,7 +138,10 @@ fn cold_clone(v: &SmolStr) -> SmolStr {
return cold_clone(self);
}
// SAFETY: We verified that the payload of `Repr` is a POD
// SAFETY: The non-heap variants (`Repr::Inline` and `Repr::Static`) contain only
// `Copy` data (a `[u8; 23]` + `InlineSize` enum, or a `&'static str` fat pointer)
// and carry no drop glue, so a raw `ptr::read` bitwise copy is sound.
// The heap variant (`Repr::Heap`) is excluded above.
unsafe { core::ptr::read(self as *const SmolStr) }
}
}
@@ -142,7 +167,12 @@ fn deref(&self) -> &str {
impl Eq for SmolStr {}
impl PartialEq<SmolStr> for SmolStr {
fn eq(&self, other: &SmolStr) -> bool {
self.0.ptr_eq(&other.0) || self.as_str() == other.as_str()
match (&self.0, &other.0) {
(Repr::Inline { len: l_len, buf: l_buf }, Repr::Inline { len: r_len, buf: r_buf }) => {
l_len == r_len && l_buf == r_buf
}
_ => self.as_str() == other.as_str(),
}
}
}
@@ -215,6 +245,48 @@ fn partial_cmp(&self, other: &SmolStr) -> Option<Ordering> {
}
}
impl PartialOrd<str> for SmolStr {
fn partial_cmp(&self, other: &str) -> Option<Ordering> {
Some(self.as_str().cmp(other))
}
}
impl<'a> PartialOrd<&'a str> for SmolStr {
fn partial_cmp(&self, other: &&'a str) -> Option<Ordering> {
Some(self.as_str().cmp(*other))
}
}
impl PartialOrd<SmolStr> for &str {
fn partial_cmp(&self, other: &SmolStr) -> Option<Ordering> {
Some((*self).cmp(other.as_str()))
}
}
impl PartialOrd<String> for SmolStr {
fn partial_cmp(&self, other: &String) -> Option<Ordering> {
Some(self.as_str().cmp(other.as_str()))
}
}
impl PartialOrd<SmolStr> for String {
fn partial_cmp(&self, other: &SmolStr) -> Option<Ordering> {
Some(self.as_str().cmp(other.as_str()))
}
}
impl<'a> PartialOrd<&'a String> for SmolStr {
fn partial_cmp(&self, other: &&'a String) -> Option<Ordering> {
Some(self.as_str().cmp(other.as_str()))
}
}
impl PartialOrd<SmolStr> for &String {
fn partial_cmp(&self, other: &SmolStr) -> Option<Ordering> {
Some(self.as_str().cmp(other.as_str()))
}
}
impl hash::Hash for SmolStr {
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
self.as_str().hash(hasher);
@@ -359,6 +431,20 @@ fn as_ref(&self) -> &std::path::Path {
}
}
impl From<char> for SmolStr {
#[inline]
fn from(c: char) -> SmolStr {
let mut buf = [0; INLINE_CAP];
let len = c.len_utf8();
c.encode_utf8(&mut buf);
SmolStr(Repr::Inline {
// SAFETY: A char is at most 4 bytes, which is always <= INLINE_CAP (23).
len: unsafe { InlineSize::transmute_from_u8(len as u8) },
buf,
})
}
}
impl From<&str> for SmolStr {
#[inline]
fn from(s: &str) -> SmolStr {
@@ -483,11 +569,15 @@ enum InlineSize {
}
impl InlineSize {
/// SAFETY: `value` must be less than or equal to [`INLINE_CAP`]
/// # Safety
///
/// `value` must be in the range `0..=23` (i.e. a valid `InlineSize` discriminant).
/// Values outside this range would produce an invalid enum discriminant, which is UB.
#[inline(always)]
const unsafe fn transmute_from_u8(value: u8) -> Self {
debug_assert!(value <= InlineSize::_V23 as u8);
// SAFETY: The caller is responsible to uphold this invariant
// SAFETY: The caller guarantees `value` is a valid discriminant for this
// `#[repr(u8)]` enum (0..=23), so the transmute produces a valid `InlineSize`.
unsafe { mem::transmute::<u8, Self>(value) }
}
}
@@ -563,24 +653,15 @@ fn as_str(&self) -> &str {
Repr::Static(data) => data,
Repr::Inline { len, buf } => {
let len = *len as usize;
// SAFETY: len is guaranteed to be <= INLINE_CAP
// SAFETY: `len` is an `InlineSize` discriminant (0..=23) which is always
// <= INLINE_CAP (23), so `..len` is always in bounds of `buf: [u8; 23]`.
let buf = unsafe { buf.get_unchecked(..len) };
// SAFETY: buf is guaranteed to be valid utf8 for ..len bytes
// SAFETY: All constructors that produce `Repr::Inline` copy from valid
// UTF-8 sources (`&str` or char encoding), so `buf[..len]` is valid UTF-8.
unsafe { ::core::str::from_utf8_unchecked(buf) }
}
}
}
fn ptr_eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::Heap(l0), Self::Heap(r0)) => Arc::ptr_eq(l0, r0),
(Self::Static(l0), Self::Static(r0)) => core::ptr::eq(l0, r0),
(Self::Inline { len: l_len, buf: l_buf }, Self::Inline { len: r_len, buf: r_buf }) => {
l_len == r_len && l_buf == r_buf
}
_ => false,
}
}
}
/// Convert value to [`SmolStr`] using [`fmt::Display`], potentially without allocating.
@@ -666,7 +747,7 @@ fn to_ascii_lowercase_smolstr(&self) -> SmolStr {
buf[..len].copy_from_slice(self.as_bytes());
buf[..len].make_ascii_lowercase();
SmolStr(Repr::Inline {
// SAFETY: `len` is in bounds
// SAFETY: `len` is guarded to be <= INLINE_CAP (23), a valid `InlineSize` discriminant.
len: unsafe { InlineSize::transmute_from_u8(len as u8) },
buf,
})
@@ -683,7 +764,7 @@ fn to_ascii_uppercase_smolstr(&self) -> SmolStr {
buf[..len].copy_from_slice(self.as_bytes());
buf[..len].make_ascii_uppercase();
SmolStr(Repr::Inline {
// SAFETY: `len` is in bounds
// SAFETY: `len` is guarded to be <= INLINE_CAP (23), a valid `InlineSize` discriminant.
len: unsafe { InlineSize::transmute_from_u8(len as u8) },
buf,
})
@@ -703,8 +784,11 @@ fn replacen_smolstr(&self, from: &str, to: &str, mut count: usize) -> SmolStr {
if let [from_u8] = from.as_bytes()
&& let [to_u8] = to.as_bytes()
{
// SAFETY: `from` and `to` are single-byte `&str`s. In valid UTF-8, a single-byte
// code unit is always in the range 0x00..=0x7F (i.e. ASCII). The closure only
// replaces the matching ASCII byte with another ASCII byte, and returns all
// other bytes unchanged, so UTF-8 validity is preserved.
return if self.len() <= count {
// SAFETY: `from_u8` & `to_u8` are ascii
unsafe { replacen_1_ascii(self, |b| if b == from_u8 { *to_u8 } else { *b }) }
} else {
unsafe {
@@ -736,7 +820,11 @@ fn replacen_smolstr(&self, from: &str, to: &str, mut count: usize) -> SmolStr {
}
}
/// SAFETY: `map` fn must only replace ascii with ascii or return unchanged bytes.
/// # Safety
///
/// `map` must satisfy: for every byte `b` in `src`, if `b <= 0x7F` (ASCII) then `map(b)` must
/// also be `<= 0x7F` (ASCII). If `b > 0x7F` (part of a multi-byte UTF-8 sequence), `map` must
/// return `b` unchanged. This ensures the output is valid UTF-8 whenever the input is.
#[inline]
unsafe fn replacen_1_ascii(src: &str, mut map: impl FnMut(&u8) -> u8) -> SmolStr {
if src.len() <= INLINE_CAP {
@@ -745,13 +833,16 @@ unsafe fn replacen_1_ascii(src: &str, mut map: impl FnMut(&u8) -> u8) -> SmolStr
buf[idx] = map(b);
}
SmolStr(Repr::Inline {
// SAFETY: `len` is in bounds
// SAFETY: `src` is a `&str` so `src.len()` <= INLINE_CAP <= 23, which is a
// valid `InlineSize` discriminant.
len: unsafe { InlineSize::transmute_from_u8(src.len() as u8) },
buf,
})
} else {
let out = src.as_bytes().iter().map(map).collect();
// SAFETY: We replaced ascii with ascii on valid utf8 strings.
// SAFETY: The caller guarantees `map` only substitutes ASCII bytes with ASCII
// bytes and leaves multi-byte UTF-8 continuation bytes untouched, so the
// output byte sequence is valid UTF-8.
unsafe { String::from_utf8_unchecked(out).into() }
}
}
@@ -773,9 +864,11 @@ unsafe fn replacen_1_ascii(src: &str, mut map: impl FnMut(&u8) -> u8) -> SmolStr
let mut is_ascii = [false; N];
while slice.len() >= N {
// SAFETY: checked in loop condition
// SAFETY: The loop condition guarantees `slice.len() >= N`, so `..N` is in bounds.
let chunk = unsafe { slice.get_unchecked(..N) };
// SAFETY: out_slice has at least same length as input slice and gets sliced with the same offsets
// SAFETY: `out_slice` starts with the same length as `slice` (both derived from
// `s.len()`) and both are advanced by the same offset `N` each iteration, so
// `out_slice.len() >= N` holds whenever `slice.len() >= N`.
let out_chunk = unsafe { out_slice.get_unchecked_mut(..N) };
for j in 0..N {
@@ -794,6 +887,7 @@ unsafe fn replacen_1_ascii(src: &str, mut map: impl FnMut(&u8) -> u8) -> SmolStr
out_chunk[j] = convert(&chunk[j]);
}
// SAFETY: Same reasoning as above — both slices have len >= N at this point.
slice = unsafe { slice.get_unchecked(N..) };
out_slice = unsafe { out_slice.get_unchecked_mut(N..) };
}
@@ -804,7 +898,9 @@ unsafe fn replacen_1_ascii(src: &str, mut map: impl FnMut(&u8) -> u8) -> SmolStr
if byte > 127 {
break;
}
// SAFETY: out_slice has at least same length as input slice
// SAFETY: `out_slice` is always the same length as `slice` (both start equal and
// are advanced by 1 together), and `slice` is non-empty per the loop condition,
// so index 0 and `1..` are in bounds for both.
unsafe {
*out_slice.get_unchecked_mut(0) = convert(&byte);
}
@@ -813,8 +909,10 @@ unsafe fn replacen_1_ascii(src: &str, mut map: impl FnMut(&u8) -> u8) -> SmolStr
}
unsafe {
// SAFETY: we know this is a valid char boundary
// since we only skipped over leading ascii bytes
// SAFETY: We only advanced past bytes that satisfy `b <= 127`, i.e. ASCII bytes.
// In UTF-8, ASCII bytes (0x00..=0x7F) are always single-byte code points and
// never appear as continuation bytes, so the remaining `slice` starts at a valid
// UTF-8 char boundary.
let rest = core::str::from_utf8_unchecked(slice);
(out, rest)
}
@@ -850,10 +948,18 @@ macro_rules! format_smolstr {
/// A builder that can be used to efficiently build a [`SmolStr`].
///
/// This won't allocate if the final string fits into the inline buffer.
#[derive(Clone, Default, Debug, PartialEq, Eq)]
#[derive(Clone, Default, Debug)]
pub struct SmolStrBuilder(SmolStrBuilderRepr);
#[derive(Clone, Debug, PartialEq, Eq)]
impl PartialEq for SmolStrBuilder {
fn eq(&self, other: &Self) -> bool {
self.as_str() == other.as_str()
}
}
impl Eq for SmolStrBuilder {}
#[derive(Clone, Debug)]
enum SmolStrBuilderRepr {
Inline { len: usize, buf: [u8; INLINE_CAP] },
Heap(String),
@@ -873,11 +979,57 @@ pub const fn new() -> Self {
Self(SmolStrBuilderRepr::Inline { buf: [0; INLINE_CAP], len: 0 })
}
/// Creates a new empty [`SmolStrBuilder`] with at least the specified capacity.
///
/// If `capacity` is less than or equal to [`SmolStr::INLINE_CAP`], the builder
/// will use inline storage and not allocate. Otherwise, it will pre-allocate a
/// heap buffer of the requested capacity.
#[must_use]
pub fn with_capacity(capacity: usize) -> Self {
if capacity <= INLINE_CAP {
Self::new()
} else {
Self(SmolStrBuilderRepr::Heap(String::with_capacity(capacity)))
}
}
/// Returns the number of bytes accumulated in the builder so far.
#[inline]
pub fn len(&self) -> usize {
match &self.0 {
SmolStrBuilderRepr::Inline { len, .. } => *len,
SmolStrBuilderRepr::Heap(heap) => heap.len(),
}
}
/// Returns `true` if the builder has a length of zero bytes.
#[inline]
pub fn is_empty(&self) -> bool {
match &self.0 {
SmolStrBuilderRepr::Inline { len, .. } => *len == 0,
SmolStrBuilderRepr::Heap(heap) => heap.is_empty(),
}
}
/// Returns a `&str` slice of the builder's current contents.
#[inline]
pub fn as_str(&self) -> &str {
match &self.0 {
SmolStrBuilderRepr::Inline { len, buf } => {
// SAFETY: `buf[..*len]` was built by prior `push`/`push_str` calls
// that only wrote valid UTF-8, and `*len <= INLINE_CAP` is maintained
// by the inline branch logic.
unsafe { core::str::from_utf8_unchecked(&buf[..*len]) }
}
SmolStrBuilderRepr::Heap(heap) => heap.as_str(),
}
}
/// Builds a [`SmolStr`] from `self`.
#[must_use]
pub fn finish(&self) -> SmolStr {
SmolStr(match &self.0 {
&SmolStrBuilderRepr::Inline { len, buf } => {
pub fn finish(self) -> SmolStr {
SmolStr(match self.0 {
SmolStrBuilderRepr::Inline { len, buf } => {
debug_assert!(len <= INLINE_CAP);
Repr::Inline {
// SAFETY: We know that `value.len` is less than or equal to the maximum value of `InlineSize`
@@ -885,7 +1037,7 @@ pub fn finish(&self) -> SmolStr {
buf,
}
}
SmolStrBuilderRepr::Heap(heap) => Repr::new(heap),
SmolStrBuilderRepr::Heap(heap) => Repr::new(&heap),
})
}
@@ -900,8 +1052,10 @@ pub fn push(&mut self, c: char) {
*len += char_len;
} else {
let mut heap = String::with_capacity(new_len);
// copy existing inline bytes over to the heap
// SAFETY: inline data is guaranteed to be valid utf8 for `old_len` bytes
// SAFETY: `buf[..*len]` was built by prior `push`/`push_str` calls
// that only wrote valid UTF-8 (from `char::encode_utf8` or `&str`
// byte copies), so extending the Vec with these bytes preserves the
// String's UTF-8 invariant.
unsafe { heap.as_mut_vec().extend_from_slice(&buf[..*len]) };
heap.push(c);
self.0 = SmolStrBuilderRepr::Heap(heap);
@@ -926,8 +1080,10 @@ pub fn push_str(&mut self, s: &str) {
let mut heap = String::with_capacity(*len);
// copy existing inline bytes over to the heap
// SAFETY: inline data is guaranteed to be valid utf8 for `old_len` bytes
// SAFETY: `buf[..old_len]` was built by prior `push`/`push_str` calls
// that only wrote valid UTF-8 (from `char::encode_utf8` or `&str` byte
// copies), so extending the Vec with these bytes preserves the String's
// UTF-8 invariant.
unsafe { heap.as_mut_vec().extend_from_slice(&buf[..old_len]) };
heap.push_str(s);
self.0 = SmolStrBuilderRepr::Heap(heap);
@@ -945,6 +1101,30 @@ fn write_str(&mut self, s: &str) -> fmt::Result {
}
}
impl iter::Extend<char> for SmolStrBuilder {
fn extend<I: iter::IntoIterator<Item = char>>(&mut self, iter: I) {
for c in iter {
self.push(c);
}
}
}
impl<'a> iter::Extend<&'a str> for SmolStrBuilder {
fn extend<I: iter::IntoIterator<Item = &'a str>>(&mut self, iter: I) {
for s in iter {
self.push_str(s);
}
}
}
impl<'a> iter::Extend<&'a String> for SmolStrBuilder {
fn extend<I: iter::IntoIterator<Item = &'a String>>(&mut self, iter: I) {
for s in iter {
self.push_str(s);
}
}
}
impl From<SmolStrBuilder> for SmolStr {
fn from(value: SmolStrBuilder) -> Self {
value.finish()
@@ -16,7 +16,7 @@ fn smol_str<'de: 'a, 'a, D>(deserializer: D) -> Result<SmolStr, D::Error>
impl<'a> Visitor<'a> for SmolStrVisitor {
type Value = SmolStr;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a string")
}
@@ -10,6 +10,7 @@
#[cfg(target_pointer_width = "64")]
fn smol_str_is_smol() {
assert_eq!(::std::mem::size_of::<SmolStr>(), ::std::mem::size_of::<String>(),);
assert_eq!(::std::mem::size_of::<Option<SmolStr>>(), ::std::mem::size_of::<SmolStr>(),);
}
#[test]
@@ -332,6 +333,29 @@ fn test_builder_push() {
assert_eq!("a".repeat(24), s);
}
#[test]
fn test_from_char() {
// ASCII char
let s: SmolStr = 'a'.into();
assert_eq!(s, "a");
assert!(!s.is_heap_allocated());
// Multi-byte char (2 bytes)
let s: SmolStr = SmolStr::from('ñ');
assert_eq!(s, "ñ");
assert!(!s.is_heap_allocated());
// 3-byte char
let s: SmolStr = '€'.into();
assert_eq!(s, "");
assert!(!s.is_heap_allocated());
// 4-byte char (emoji)
let s: SmolStr = '🦀'.into();
assert_eq!(s, "🦀");
assert!(!s.is_heap_allocated());
}
#[cfg(test)]
mod test_str_ext {
use smol_str::StrExt;
+1 -1
View File
@@ -1 +1 @@
f8704be04fe1150527fc2cf21dd44327f0fe87fb
eda4fc7733ee89e484d7120cafbd80dcb2fce66e