Unnamed repository; edit this file 'description' to name the repository.
Merge commit 'cd3bf9fe51676b520c546460e6d8919b8c8ff99f' into sync-from-ra
Laurențiu Nicola 2023-06-19
parent bbd6955 · commit 9326cf7
-rw-r--r--.github/workflows/autopublish.yaml4
-rw-r--r--.github/workflows/ci.yaml8
-rw-r--r--Cargo.lock48
-rw-r--r--crates/base-db/src/fixture.rs6
-rw-r--r--crates/base-db/src/input.rs23
-rw-r--r--crates/hir-def/src/body.rs50
-rw-r--r--crates/hir-def/src/body/lower.rs72
-rw-r--r--crates/hir-def/src/body/pretty.rs1
-rw-r--r--crates/hir-def/src/db.rs23
-rw-r--r--crates/hir-def/src/expander.rs12
-rw-r--r--crates/hir-def/src/find_path.rs4
-rw-r--r--crates/hir-def/src/hir.rs20
-rw-r--r--crates/hir-def/src/hir/type_ref.rs115
-rw-r--r--crates/hir-def/src/item_scope.rs4
-rw-r--r--crates/hir-def/src/item_tree.rs6
-rw-r--r--crates/hir-def/src/lib.rs301
-rw-r--r--crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs70
-rw-r--r--crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs48
-rw-r--r--crates/hir-def/src/nameres.rs23
-rw-r--r--crates/hir-def/src/nameres/collector.rs102
-rw-r--r--crates/hir-def/src/path.rs4
-rw-r--r--crates/hir-def/src/path/lower.rs4
-rw-r--r--crates/hir-def/src/resolver.rs38
-rw-r--r--crates/hir-expand/src/ast_id_map.rs7
-rw-r--r--crates/hir-expand/src/attrs.rs10
-rw-r--r--crates/hir-expand/src/builtin_derive_macro.rs154
-rw-r--r--crates/hir-expand/src/builtin_fn_macro.rs147
-rw-r--r--crates/hir-expand/src/db.rs108
-rw-r--r--crates/hir-expand/src/eager.rs84
-rw-r--r--crates/hir-expand/src/lib.rs72
-rw-r--r--crates/hir-expand/src/proc_macro.rs11
-rw-r--r--crates/hir-ty/Cargo.toml8
-rw-r--r--crates/hir-ty/src/autoderef.rs24
-rw-r--r--crates/hir-ty/src/chalk_db.rs4
-rw-r--r--crates/hir-ty/src/consteval.rs36
-rw-r--r--crates/hir-ty/src/consteval/tests.rs11
-rw-r--r--crates/hir-ty/src/db.rs1
-rw-r--r--crates/hir-ty/src/diagnostics/unsafe_check.rs7
-rw-r--r--crates/hir-ty/src/display.rs43
-rw-r--r--crates/hir-ty/src/infer.rs47
-rw-r--r--crates/hir-ty/src/infer/closure.rs3
-rw-r--r--crates/hir-ty/src/infer/expr.rs5
-rw-r--r--crates/hir-ty/src/infer/mutability.rs4
-rw-r--r--crates/hir-ty/src/infer/path.rs17
-rw-r--r--crates/hir-ty/src/interner.rs2
-rw-r--r--crates/hir-ty/src/layout/tests.rs35
-rw-r--r--crates/hir-ty/src/lower.rs119
-rw-r--r--crates/hir-ty/src/method_resolution.rs2
-rw-r--r--crates/hir-ty/src/mir/borrowck.rs16
-rw-r--r--crates/hir-ty/src/mir/eval.rs18
-rw-r--r--crates/hir-ty/src/mir/lower.rs8
-rw-r--r--crates/hir-ty/src/mir/pretty.rs3
-rw-r--r--crates/hir-ty/src/tests.rs2
-rw-r--r--crates/hir-ty/src/tests/regression.rs23
-rw-r--r--crates/hir-ty/src/tests/simple.rs48
-rw-r--r--crates/hir-ty/src/tests/traits.rs10
-rw-r--r--crates/hir-ty/src/utils.rs30
-rw-r--r--crates/hir/src/from_id.rs3
-rw-r--r--crates/hir/src/lib.rs92
-rw-r--r--crates/hir/src/semantics.rs18
-rw-r--r--crates/hir/src/source_analyzer.rs7
-rw-r--r--crates/hir/src/symbols.rs1
-rw-r--r--crates/ide-assists/src/handlers/add_missing_impl_members.rs363
-rw-r--r--crates/ide-assists/src/handlers/extract_type_alias.rs51
-rw-r--r--crates/ide-assists/src/handlers/generate_delegate_methods.rs25
-rw-r--r--crates/ide-assists/src/handlers/generate_function.rs102
-rw-r--r--crates/ide-assists/src/handlers/inline_call.rs3
-rw-r--r--crates/ide-assists/src/handlers/inline_const_as_literal.rs722
-rw-r--r--crates/ide-assists/src/handlers/raw_string.rs3
-rw-r--r--crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs28
-rw-r--r--crates/ide-assists/src/lib.rs2
-rw-r--r--crates/ide-assists/src/tests/generated.rs21
-rw-r--r--crates/ide-assists/src/utils.rs95
-rw-r--r--crates/ide-completion/src/completions/dot.rs122
-rw-r--r--crates/ide-completion/src/completions/item_list/trait_impl.rs108
-rw-r--r--crates/ide-db/src/apply_change.rs34
-rw-r--r--crates/ide-db/src/path_transform.rs169
-rw-r--r--crates/ide-db/src/search.rs2
-rw-r--r--crates/ide-db/src/test_data/test_doc_alias.txt42
-rw-r--r--crates/ide-db/src/test_data/test_symbol_index_collection.txt134
-rw-r--r--crates/ide/src/hover/tests.rs43
-rw-r--r--crates/ide/src/inlay_hints/chaining.rs12
-rw-r--r--crates/ide/src/lib.rs2
-rw-r--r--crates/ide/src/syntax_highlighting.rs3
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_strings.html3
-rw-r--r--crates/ide/src/syntax_highlighting/tests.rs3
-rw-r--r--crates/parser/src/grammar/types.rs2
-rw-r--r--crates/parser/test_data/parser/inline/ok/0017_array_type.rast5
-rw-r--r--crates/parser/test_data/parser/ok/0030_traits.rast5
-rw-r--r--crates/parser/test_data/parser/ok/0043_complex_assignment.rast5
-rw-r--r--crates/proc-macro-srv/src/server.rs6
-rw-r--r--crates/project-model/src/workspace.rs6
-rw-r--r--crates/rust-analyzer/Cargo.toml3
-rw-r--r--crates/rust-analyzer/src/cli.rs17
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs328
-rw-r--r--crates/rust-analyzer/src/cli/diagnostics.rs8
-rw-r--r--crates/rust-analyzer/src/cli/flags.rs20
-rw-r--r--crates/rust-analyzer/src/cli/progress_report.rs41
-rw-r--r--crates/rust-analyzer/src/dispatch.rs48
-rw-r--r--crates/rust-analyzer/src/global_state.rs35
-rw-r--r--crates/rust-analyzer/src/handlers/notification.rs4
-rw-r--r--crates/rust-analyzer/src/handlers/request.rs24
-rw-r--r--crates/rust-analyzer/src/main_loop.rs82
-rw-r--r--crates/rust-analyzer/src/reload.rs45
-rw-r--r--crates/rust-analyzer/src/to_proto.rs33
-rw-r--r--crates/rust-analyzer/tests/slow-tests/main.rs13
-rw-r--r--crates/syntax/rust.ungram2
-rw-r--r--crates/syntax/src/ast/generated/nodes.rs2
-rw-r--r--crates/syntax/src/ast/make.rs6
-rw-r--r--crates/test-utils/src/fixture.rs4
-rw-r--r--crates/test-utils/src/minicore.rs51
-rw-r--r--docs/user/manual.adoc21
-rw-r--r--editors/code/.vscodeignore1
-rw-r--r--lib/la-arena/src/lib.rs6
114 files changed, 3891 insertions, 1250 deletions
diff --git a/.github/workflows/autopublish.yaml b/.github/workflows/autopublish.yaml
index 7090c94d93..15cedab127 100644
--- a/.github/workflows/autopublish.yaml
+++ b/.github/workflows/autopublish.yaml
@@ -28,7 +28,7 @@ jobs:
- name: Publish Crates
env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
- PATCH: ${{ github.run_number }}
+ RUN_NUMBER: ${{ github.run_number }}
shell: bash
run: |
git config --global user.email "[email protected]"
@@ -53,4 +53,4 @@ jobs:
# Remove library crates from the workspaces so we don't auto-publish them as well
sed -i 's/ "lib\/\*",//' ./Cargo.toml
find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} +
- cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$PATCH
+ cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$(($RUN_NUMBER + 133))
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 622da105fd..31bb7eed8d 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -24,6 +24,7 @@ jobs:
pull-requests: read
outputs:
typescript: ${{ steps.filter.outputs.typescript }}
+ proc_macros: ${{ steps.filter.outputs.proc_macros }}
steps:
- uses: actions/checkout@v3
- uses: dorny/paths-filter@4067d885736b84de7c414f582ac45897079b0a78
@@ -45,8 +46,8 @@ jobs:
runs-on: ${{ matrix.os }}
env:
CC: deny_c
- RUST_CHANNEL: "${{ needs.changes.outputs.proc_macros == 'true' && 'nightly' || 'stable'}}"
- USE_SYSROOT_ABI: "${{ needs.changes.outputs.proc_macros == 'true' && '--features sysroot-abi' || ''}}"
+ RUST_CHANNEL: "${{ needs.changes.outputs.proc_macros == 'true' && 'nightly' || 'stable' }}"
+ USE_SYSROOT_ABI: "${{ needs.changes.outputs.proc_macros == 'true' && '--features sysroot-abi' || '' }}"
strategy:
fail-fast: false
@@ -62,7 +63,8 @@ jobs:
- name: Install Rust toolchain
run: |
rustup update --no-self-update ${{ env.RUST_CHANNEL }}
- rustup component add rustfmt rust-src
+ rustup component add --toolchain ${{ env.RUST_CHANNEL }} rustfmt rust-src
+ rustup default ${{ env.RUST_CHANNEL }}
- name: Cache Dependencies
uses: Swatinem/rust-cache@988c164c3d0e93c4dbab36aaf5bbeb77425b2894
diff --git a/Cargo.lock b/Cargo.lock
index e36aef6a6a..50c81ca279 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -177,21 +177,21 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chalk-derive"
-version = "0.89.0"
+version = "0.91.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea176c50987dc4765961aa165001e8eb5a722a26308c5797a47303ea91686aab"
+checksum = "c59178fded594fe78c47b841520e5a4399d00fe15fffee19b945958a878cd02d"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.15",
"synstructure",
]
[[package]]
name = "chalk-ir"
-version = "0.89.0"
+version = "0.91.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "473b480241695428c14e8f84f1c9a47ef232450a50faf3a4041e5c9dc11e0a3b"
+checksum = "8824be92876823b828d551bb792f79eb1f69c69d1948abf69fccbf84e448e57b"
dependencies = [
"bitflags 1.3.2",
"chalk-derive",
@@ -200,9 +200,9 @@ dependencies = [
[[package]]
name = "chalk-recursive"
-version = "0.89.0"
+version = "0.91.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6764b4fe67cac3a3758185084efbfbd39bf0352795824ba849ddd2b64cd4bb28"
+checksum = "1e110d1260809c238072d1c8ef84060e39983e8ea9d4c6f74b19b0ebbf8904dc"
dependencies = [
"chalk-derive",
"chalk-ir",
@@ -213,9 +213,9 @@ dependencies = [
[[package]]
name = "chalk-solve"
-version = "0.89.0"
+version = "0.91.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "55a7e6160966eceb6e7dcc2f479a2af4c477aaf5bccbc640d82515995ab1a6cc"
+checksum = "12200b19abf4b0633095f7bd099f3ef609d314754b6adb358c68cc04d10589e5"
dependencies = [
"chalk-derive",
"chalk-ir",
@@ -327,7 +327,7 @@ checksum = "f3cdeb9ec472d588e539a818b2dee436825730da08ad0017c4b1a17676bdc8b7"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 1.0.109",
]
[[package]]
@@ -1505,7 +1505,6 @@ dependencies = [
"parking_lot 0.12.1",
"parking_lot_core 0.9.6",
"proc-macro-api",
- "proc-macro-srv-cli",
"profile",
"project-model",
"rayon",
@@ -1578,7 +1577,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
- "syn",
+ "syn 1.0.109",
]
[[package]]
@@ -1637,7 +1636,7 @@ checksum = "d7e29c4601e36bcec74a223228dce795f4cd3616341a4af93520ca1a837c087d"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 1.0.109",
]
[[package]]
@@ -1660,7 +1659,7 @@ checksum = "395627de918015623b32e7669714206363a7fc00382bf477e72c1f7533e8eafc"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 1.0.109",
]
[[package]]
@@ -1731,14 +1730,25 @@ dependencies = [
]
[[package]]
+name = "syn"
+version = "2.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
name = "synstructure"
-version = "0.12.6"
+version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
+checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 2.0.15",
"unicode-xid",
]
@@ -1811,7 +1821,7 @@ checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 1.0.109",
]
[[package]]
@@ -1913,7 +1923,7 @@ checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 1.0.109",
]
[[package]]
diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs
index 5b11343173..d3abc3870b 100644
--- a/crates/base-db/src/fixture.rs
+++ b/crates/base-db/src/fixture.rs
@@ -215,7 +215,7 @@ impl ChangeFixture {
None,
default_cfg,
Default::default(),
- Env::default(),
+ Env::new_for_test_fixture(),
false,
CrateOrigin::Local { repo: None, name: None },
default_target_data_layout
@@ -259,7 +259,7 @@ impl ChangeFixture {
None,
Default::default(),
Default::default(),
- Env::default(),
+ Env::new_for_test_fixture(),
false,
CrateOrigin::Lang(LangCrateOrigin::Core),
target_layout.clone(),
@@ -298,7 +298,7 @@ impl ChangeFixture {
None,
Default::default(),
Default::default(),
- Env::default(),
+ Env::new_for_test_fixture(),
true,
CrateOrigin::Local { repo: None, name: None },
target_layout,
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index e8d521b42f..f2e523675b 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -151,6 +151,12 @@ pub enum CrateOrigin {
Lang(LangCrateOrigin),
}
+impl CrateOrigin {
+ pub fn is_local(&self) -> bool {
+ matches!(self, CrateOrigin::Local { .. })
+ }
+}
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum LangCrateOrigin {
Alloc,
@@ -333,6 +339,17 @@ pub struct Env {
entries: FxHashMap<String, String>,
}
+impl Env {
+ pub fn new_for_test_fixture() -> Self {
+ Env {
+ entries: FxHashMap::from_iter([(
+ String::from("__ra_is_test_fixture"),
+ String::from("__ra_is_test_fixture"),
+ )]),
+ }
+ }
+}
+
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Dependency {
pub crate_id: CrateId,
@@ -456,6 +473,12 @@ impl CrateGraph {
self.arena.iter().map(|(idx, _)| idx)
}
+ // FIXME: used for `handle_hack_cargo_workspace`, should be removed later
+ #[doc(hidden)]
+ pub fn iter_mut(&mut self) -> impl Iterator<Item = (CrateId, &mut CrateData)> + '_ {
+ self.arena.iter_mut()
+ }
+
/// Returns an iterator over all transitive dependencies of the given crate,
/// including the crate itself.
pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs
index 36626ed1a9..94dc39b117 100644
--- a/crates/hir-def/src/body.rs
+++ b/crates/hir-def/src/body.rs
@@ -37,6 +37,9 @@ pub struct Body {
pub pats: Arena<Pat>,
pub bindings: Arena<Binding>,
pub labels: Arena<Label>,
+ /// Id of the closure/generator that owns the corresponding binding. If a binding is owned by the
+ /// top level expression, it will not be listed in here.
+ pub binding_owners: FxHashMap<BindingId, ExprId>,
/// The patterns for the function's parameters. While the parameter types are
/// part of the function signature, the patterns are not (they don't change
/// the external type of the function).
@@ -118,7 +121,8 @@ impl Body {
let _p = profile::span("body_with_source_map_query");
let mut params = None;
- let (file_id, module, body, is_async_fn) = {
+ let mut is_async_fn = false;
+ let InFile { file_id, value: body } = {
match def {
DefWithBodyId::FunctionId(f) => {
let data = db.function_data(f);
@@ -138,31 +142,27 @@ impl Body {
}),
)
});
- (
- src.file_id,
- f.module(db),
- src.value.body().map(ast::Expr::from),
- data.has_async_kw(),
- )
+ is_async_fn = data.has_async_kw();
+ src.map(|it| it.body().map(ast::Expr::from))
}
DefWithBodyId::ConstId(c) => {
let c = c.lookup(db);
let src = c.source(db);
- (src.file_id, c.module(db), src.value.body(), false)
+ src.map(|it| it.body())
}
DefWithBodyId::StaticId(s) => {
let s = s.lookup(db);
let src = s.source(db);
- (src.file_id, s.module(db), src.value.body(), false)
+ src.map(|it| it.body())
}
DefWithBodyId::VariantId(v) => {
- let e = v.parent.lookup(db);
let src = v.parent.child_source(db);
- let variant = &src.value[v.local_id];
- (src.file_id, e.container, variant.expr(), false)
+ src.map(|it| it[v.local_id].expr())
}
+ DefWithBodyId::InTypeConstId(c) => c.lookup(db).id.map(|_| c.source(db).expr()),
}
};
+ let module = def.module(db);
let expander = Expander::new(db, file_id, module);
let (mut body, source_map) =
Body::new(db, def, expander, params, body, module.krate, is_async_fn);
@@ -209,14 +209,24 @@ impl Body {
}
fn shrink_to_fit(&mut self) {
- let Self { _c: _, body_expr: _, block_scopes, exprs, labels, params, pats, bindings } =
- self;
+ let Self {
+ _c: _,
+ body_expr: _,
+ block_scopes,
+ exprs,
+ labels,
+ params,
+ pats,
+ bindings,
+ binding_owners,
+ } = self;
block_scopes.shrink_to_fit();
exprs.shrink_to_fit();
labels.shrink_to_fit();
params.shrink_to_fit();
pats.shrink_to_fit();
bindings.shrink_to_fit();
+ binding_owners.shrink_to_fit();
}
pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) {
@@ -260,6 +270,17 @@ impl Body {
f(pat_id);
self.walk_pats_shallow(pat_id, |p| self.walk_pats(p, f));
}
+
+ pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool {
+ match self.binding_owners.get(&binding) {
+ Some(x) => {
+ // We assign expression ids in a way that outer closures will receive
+ // a lower id
+ x.into_raw() < relative_to.into_raw()
+ }
+ None => true,
+ }
+ }
}
impl Default for Body {
@@ -272,6 +293,7 @@ impl Default for Body {
labels: Default::default(),
params: Default::default(),
block_scopes: Default::default(),
+ binding_owners: Default::default(),
_c: Default::default(),
}
}
diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs
index 7b88e525bf..b375ec63a6 100644
--- a/crates/hir-def/src/body/lower.rs
+++ b/crates/hir-def/src/body/lower.rs
@@ -11,7 +11,6 @@ use hir_expand::{
AstId, ExpandError, InFile,
};
use intern::Interned;
-use la_arena::Arena;
use profile::Count;
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
@@ -40,7 +39,7 @@ use crate::{
nameres::{DefMap, MacroSubNs},
path::{GenericArgs, Path},
type_ref::{Mutability, Rawness, TypeRef},
- AdtId, BlockId, BlockLoc, DefWithBodyId, ModuleDefId, UnresolvedMacro,
+ AdtId, BlockId, BlockLoc, ConstBlockLoc, DefWithBodyId, ModuleDefId, UnresolvedMacro,
};
pub(super) fn lower(
@@ -60,10 +59,11 @@ pub(super) fn lower(
source_map: BodySourceMap::default(),
ast_id_map: db.ast_id_map(expander.current_file_id),
body: Body {
- exprs: Arena::default(),
- pats: Arena::default(),
- bindings: Arena::default(),
- labels: Arena::default(),
+ exprs: Default::default(),
+ pats: Default::default(),
+ bindings: Default::default(),
+ binding_owners: Default::default(),
+ labels: Default::default(),
params: Vec::new(),
body_expr: dummy_expr_id(),
block_scopes: Vec::new(),
@@ -188,7 +188,7 @@ impl ExprCollector<'_> {
param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
{
let ptr = AstPtr::new(&self_param);
- let binding_id = self.alloc_binding(
+ let binding_id: la_arena::Idx<Binding> = self.alloc_binding(
name![self],
BindingAnnotation::new(
self_param.mut_token().is_some() && self_param.amp_token().is_none(),
@@ -297,7 +297,10 @@ impl ExprCollector<'_> {
let (result_expr_id, prev_binding_owner) =
this.initialize_binding_owner(syntax_ptr);
let inner_expr = this.collect_block(e);
- let x = this.db.intern_anonymous_const((this.owner, inner_expr));
+ let x = this.db.intern_anonymous_const(ConstBlockLoc {
+ parent: this.owner,
+ root: inner_expr,
+ });
this.body.exprs[result_expr_id] = Expr::Const(x);
this.current_binding_owner = prev_binding_owner;
result_expr_id
@@ -742,16 +745,14 @@ impl ExprCollector<'_> {
/// }
/// ```
fn collect_for_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::ForExpr) -> ExprId {
- let (into_iter_fn, iter_next_fn, option_some, option_none) = 'if_chain: {
- if let Some(into_iter_fn) = LangItem::IntoIterIntoIter.path(self.db, self.krate) {
- if let Some(iter_next_fn) = LangItem::IteratorNext.path(self.db, self.krate) {
- if let Some(option_some) = LangItem::OptionSome.path(self.db, self.krate) {
- if let Some(option_none) = LangItem::OptionNone.path(self.db, self.krate) {
- break 'if_chain (into_iter_fn, iter_next_fn, option_some, option_none);
- }
- }
- }
- }
+ let Some((into_iter_fn, iter_next_fn, option_some, option_none)) = (|| {
+ Some((
+ LangItem::IntoIterIntoIter.path(self.db, self.krate)?,
+ LangItem::IteratorNext.path(self.db, self.krate)?,
+ LangItem::OptionSome.path(self.db, self.krate)?,
+ LangItem::OptionNone.path(self.db, self.krate)?,
+ ))
+ })() else {
// Some of the needed lang items are missing, so we can't desugar
return self.alloc_expr(Expr::Missing, syntax_ptr);
};
@@ -784,8 +785,8 @@ impl ExprCollector<'_> {
}),
};
let iter_name = Name::generate_new_name();
- let iter_binding = self.alloc_binding(iter_name.clone(), BindingAnnotation::Mutable);
- let iter_expr = self.alloc_expr(Expr::Path(Path::from(iter_name)), syntax_ptr.clone());
+ let iter_expr =
+ self.alloc_expr(Expr::Path(Path::from(iter_name.clone())), syntax_ptr.clone());
let iter_expr_mut = self.alloc_expr(
Expr::Ref { expr: iter_expr, rawness: Rawness::Ref, mutability: Mutability::Mut },
syntax_ptr.clone(),
@@ -805,7 +806,9 @@ impl ExprCollector<'_> {
);
let loop_outer =
self.alloc_expr(Expr::Loop { body: loop_inner, label }, syntax_ptr.clone());
+ let iter_binding = self.alloc_binding(iter_name, BindingAnnotation::Mutable);
let iter_pat = self.alloc_pat_desugared(Pat::Bind { id: iter_binding, subpat: None });
+ self.add_definition_to_binding(iter_binding, iter_pat);
self.alloc_expr(
Expr::Match {
expr: iterator,
@@ -827,18 +830,14 @@ impl ExprCollector<'_> {
/// }
/// ```
fn collect_try_operator(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::TryExpr) -> ExprId {
- let (try_branch, cf_continue, cf_break, try_from_residual) = 'if_chain: {
- if let Some(try_branch) = LangItem::TryTraitBranch.path(self.db, self.krate) {
- if let Some(cf_continue) = LangItem::ControlFlowContinue.path(self.db, self.krate) {
- if let Some(cf_break) = LangItem::ControlFlowBreak.path(self.db, self.krate) {
- if let Some(try_from_residual) =
- LangItem::TryTraitFromResidual.path(self.db, self.krate)
- {
- break 'if_chain (try_branch, cf_continue, cf_break, try_from_residual);
- }
- }
- }
- }
+ let Some((try_branch, cf_continue, cf_break, try_from_residual)) = (|| {
+ Some((
+ LangItem::TryTraitBranch.path(self.db, self.krate)?,
+ LangItem::ControlFlowContinue.path(self.db, self.krate)?,
+ LangItem::ControlFlowBreak.path(self.db, self.krate)?,
+ LangItem::TryTraitFromResidual.path(self.db, self.krate)?,
+ ))
+ })() else {
// Some of the needed lang items are missing, so we can't desugar
return self.alloc_expr(Expr::Missing, syntax_ptr);
};
@@ -1541,13 +1540,16 @@ impl ExprCollector<'_> {
}
fn alloc_binding(&mut self, name: Name, mode: BindingAnnotation) -> BindingId {
- self.body.bindings.alloc(Binding {
+ let binding = self.body.bindings.alloc(Binding {
name,
mode,
definitions: SmallVec::new(),
- owner: self.current_binding_owner,
problems: None,
- })
+ });
+ if let Some(owner) = self.current_binding_owner {
+ self.body.binding_owners.insert(binding, owner);
+ }
+ binding
}
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs
index 88380aa355..cd6df0e632 100644
--- a/crates/hir-def/src/body/pretty.rs
+++ b/crates/hir-def/src/body/pretty.rs
@@ -40,6 +40,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
};
format!("const {name} = ")
}
+ DefWithBodyId::InTypeConstId(_) => format!("In type const = "),
DefWithBodyId::VariantId(it) => {
let src = it.parent.child_source(db);
let variant = &src.value[it.local_id];
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index 6d18e3f56c..04ec47f84c 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -16,21 +16,22 @@ use crate::{
TraitAliasData, TraitData, TypeAliasData,
},
generics::GenericParams,
- hir::ExprId,
import_map::ImportMap,
item_tree::{AttrOwner, ItemTree},
lang_item::{LangItem, LangItemTarget, LangItems},
nameres::{diagnostics::DefDiagnostic, DefMap},
visibility::{self, Visibility},
- AnonymousConstId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId,
- EnumLoc, ExternBlockId, ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc,
- LocalEnumVariantId, LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc,
- ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitAliasId,
- TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, VariantId,
+ AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId,
+ EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId,
+ ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId, LocalFieldId, Macro2Id, Macro2Loc,
+ MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId,
+ StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId,
+ UnionLoc, VariantId,
};
#[salsa::query_group(InternDatabaseStorage)]
pub trait InternDatabase: SourceDatabase {
+ // region: items
#[salsa::interned]
fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
#[salsa::interned]
@@ -54,15 +55,19 @@ pub trait InternDatabase: SourceDatabase {
#[salsa::interned]
fn intern_extern_block(&self, loc: ExternBlockLoc) -> ExternBlockId;
#[salsa::interned]
- fn intern_block(&self, loc: BlockLoc) -> BlockId;
- #[salsa::interned]
fn intern_macro2(&self, loc: Macro2Loc) -> Macro2Id;
#[salsa::interned]
fn intern_proc_macro(&self, loc: ProcMacroLoc) -> ProcMacroId;
#[salsa::interned]
fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId;
+ // endregion: items
+
+ #[salsa::interned]
+ fn intern_block(&self, loc: BlockLoc) -> BlockId;
+ #[salsa::interned]
+ fn intern_anonymous_const(&self, id: ConstBlockLoc) -> ConstBlockId;
#[salsa::interned]
- fn intern_anonymous_const(&self, id: (DefWithBodyId, ExprId)) -> AnonymousConstId;
+ fn intern_in_type_const(&self, id: InTypeConstLoc) -> InTypeConstId;
}
#[salsa::query_group(DefDatabaseStorage)]
diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs
index 34ed1e72f2..a588827c8d 100644
--- a/crates/hir-def/src/expander.rs
+++ b/crates/hir-def/src/expander.rs
@@ -113,10 +113,10 @@ impl Expander {
call_id: MacroCallId,
error: Option<ExpandError>,
) -> ExpandResult<Option<InFile<Parse<SyntaxNode>>>> {
- let file_id = call_id.as_file();
- let ExpandResult { value, err } = db.parse_or_expand_with_err(file_id);
+ let macro_file = call_id.as_macro_file();
+ let ExpandResult { value, err } = db.parse_macro_expansion(macro_file);
- ExpandResult { value: Some(InFile::new(file_id, value)), err: error.or(err) }
+ ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) }
}
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
@@ -155,7 +155,7 @@ impl Expander {
}
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
- let ctx = LowerCtx::with_hygiene(db, &self.cfg_expander.hygiene);
+ let ctx = LowerCtx::new(db, &self.cfg_expander.hygiene, self.current_file_id);
Path::from_src(path, &ctx)
}
@@ -179,8 +179,8 @@ impl Expander {
} else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() {
self.recursion_depth = u32::MAX;
cov_mark::hit!(your_stack_belongs_to_me);
- return ExpandResult::only_err(ExpandError::Other(
- "reached recursion limit during macro expansion".into(),
+ return ExpandResult::only_err(ExpandError::other(
+ "reached recursion limit during macro expansion",
));
}
diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs
index e8cc2eab46..8c49ae1c4a 100644
--- a/crates/hir-def/src/find_path.rs
+++ b/crates/hir-def/src/find_path.rs
@@ -81,7 +81,7 @@ fn find_path_inner(
}
let def_map = from.def_map(db);
- let crate_root = def_map.crate_root();
+ let crate_root = def_map.crate_root().into();
// - if the item is a module, jump straight to module search
if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item {
let mut visited_modules = FxHashSet::default();
@@ -374,7 +374,7 @@ fn calculate_best_path(
}
}
if let Some(module) = item.module(db) {
- if module.def_map(db).block_id().is_some() && prefixed.is_some() {
+ if module.containing_block().is_some() && prefixed.is_some() {
cov_mark::hit!(prefixed_in_block_expression);
prefixed = Some(PrefixKind::Plain);
}
diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs
index 4ad8a7aa8e..500e880061 100644
--- a/crates/hir-def/src/hir.rs
+++ b/crates/hir-def/src/hir.rs
@@ -26,7 +26,7 @@ use crate::{
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
path::{GenericArgs, Path},
type_ref::{Mutability, Rawness, TypeRef},
- AnonymousConstId, BlockId,
+ BlockId, ConstBlockId,
};
pub use syntax::ast::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp};
@@ -181,7 +181,7 @@ pub enum Expr {
statements: Box<[Statement]>,
tail: Option<ExprId>,
},
- Const(AnonymousConstId),
+ Const(ConstBlockId),
Unsafe {
id: Option<BlockId>,
statements: Box<[Statement]>,
@@ -501,25 +501,9 @@ pub struct Binding {
pub name: Name,
pub mode: BindingAnnotation,
pub definitions: SmallVec<[PatId; 1]>,
- /// Id of the closure/generator that owns this binding. If it is owned by the
- /// top level expression, this field would be `None`.
- pub owner: Option<ExprId>,
pub problems: Option<BindingProblems>,
}
-impl Binding {
- pub fn is_upvar(&self, relative_to: ExprId) -> bool {
- match self.owner {
- Some(x) => {
- // We assign expression ids in a way that outer closures will receive
- // a lower id
- x.into_raw() < relative_to.into_raw()
- }
- None => true,
- }
- }
-}
-
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct RecordFieldPat {
pub name: Name,
diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs
index 0573c9a6f8..fa1f4933a2 100644
--- a/crates/hir-def/src/hir/type_ref.rs
+++ b/crates/hir-def/src/hir/type_ref.rs
@@ -118,7 +118,7 @@ pub enum TypeRef {
Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability),
// FIXME: for full const generics, the latter element (length) here is going to have to be an
// expression that is further lowered later in hir_ty.
- Array(Box<TypeRef>, ConstRefOrPath),
+ Array(Box<TypeRef>, ConstRef),
Slice(Box<TypeRef>),
/// A fn pointer. Last element of the vector is the return type.
Fn(Vec<(Option<Name>, TypeRef)>, bool /*varargs*/, bool /*is_unsafe*/),
@@ -186,11 +186,7 @@ impl TypeRef {
TypeRef::RawPtr(Box::new(inner_ty), mutability)
}
ast::Type::ArrayType(inner) => {
- // FIXME: This is a hack. We should probably reuse the machinery of
- // `hir_def::body::lower` to lower this into an `Expr` and then evaluate it at the
- // `hir_ty` level, which would allow knowing the type of:
- // let v: [u8; 2 + 2] = [0u8; 4];
- let len = ConstRefOrPath::from_expr_opt(inner.expr());
+ let len = ConstRef::from_const_arg(ctx, inner.const_arg());
TypeRef::Array(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())), len)
}
ast::Type::SliceType(inner) => {
@@ -380,73 +376,84 @@ impl TypeBound {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum ConstRefOrPath {
- Scalar(ConstRef),
+pub enum ConstRef {
+ Scalar(LiteralConstRef),
Path(Name),
+ Complex(AstId<ast::ConstArg>),
}
-impl ConstRefOrPath {
- pub(crate) fn from_expr_opt(expr: Option<ast::Expr>) -> Self {
- match expr {
- Some(x) => Self::from_expr(x),
- None => Self::Scalar(ConstRef::Unknown),
+impl ConstRef {
+ pub(crate) fn from_const_arg(lower_ctx: &LowerCtx<'_>, arg: Option<ast::ConstArg>) -> Self {
+ if let Some(arg) = arg {
+ let ast_id = lower_ctx.ast_id(&arg);
+ if let Some(expr) = arg.expr() {
+ return Self::from_expr(expr, ast_id);
+ }
}
+ Self::Scalar(LiteralConstRef::Unknown)
}
pub fn display<'a>(&'a self, db: &'a dyn ExpandDatabase) -> impl fmt::Display + 'a {
- struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRefOrPath);
+ struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRef);
impl fmt::Display for Display<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.1 {
- ConstRefOrPath::Scalar(s) => s.fmt(f),
- ConstRefOrPath::Path(n) => n.display(self.0).fmt(f),
+ ConstRef::Scalar(s) => s.fmt(f),
+ ConstRef::Path(n) => n.display(self.0).fmt(f),
+ ConstRef::Complex(_) => f.write_str("{const}"),
}
}
}
Display(db, self)
}
- // FIXME: as per the comments on `TypeRef::Array`, this evaluation should not happen at this
- // parse stage.
- fn from_expr(expr: ast::Expr) -> Self {
+ // We special case literals and single identifiers, to speed up things.
+ fn from_expr(expr: ast::Expr, ast_id: Option<AstId<ast::ConstArg>>) -> Self {
+ fn is_path_ident(p: &ast::PathExpr) -> bool {
+ let Some(path) = p.path() else {
+ return false;
+ };
+ if path.coloncolon_token().is_some() {
+ return false;
+ }
+ if let Some(s) = path.segment() {
+ if s.coloncolon_token().is_some() || s.generic_arg_list().is_some() {
+ return false;
+ }
+ }
+ true
+ }
match expr {
- ast::Expr::PathExpr(p) => {
+ ast::Expr::PathExpr(p) if is_path_ident(&p) => {
match p.path().and_then(|x| x.segment()).and_then(|x| x.name_ref()) {
Some(x) => Self::Path(x.as_name()),
- None => Self::Scalar(ConstRef::Unknown),
+ None => Self::Scalar(LiteralConstRef::Unknown),
}
}
- ast::Expr::PrefixExpr(prefix_expr) => match prefix_expr.op_kind() {
- Some(ast::UnaryOp::Neg) => {
- let unsigned = Self::from_expr_opt(prefix_expr.expr());
- // Add sign
- match unsigned {
- Self::Scalar(ConstRef::UInt(num)) => {
- Self::Scalar(ConstRef::Int(-(num as i128)))
- }
- other => other,
- }
- }
- _ => Self::from_expr_opt(prefix_expr.expr()),
- },
ast::Expr::Literal(literal) => Self::Scalar(match literal.kind() {
ast::LiteralKind::IntNumber(num) => {
- num.value().map(ConstRef::UInt).unwrap_or(ConstRef::Unknown)
+ num.value().map(LiteralConstRef::UInt).unwrap_or(LiteralConstRef::Unknown)
}
ast::LiteralKind::Char(c) => {
- c.value().map(ConstRef::Char).unwrap_or(ConstRef::Unknown)
+ c.value().map(LiteralConstRef::Char).unwrap_or(LiteralConstRef::Unknown)
}
- ast::LiteralKind::Bool(f) => ConstRef::Bool(f),
- _ => ConstRef::Unknown,
+ ast::LiteralKind::Bool(f) => LiteralConstRef::Bool(f),
+ _ => LiteralConstRef::Unknown,
}),
- _ => Self::Scalar(ConstRef::Unknown),
+ _ => {
+ if let Some(ast_id) = ast_id {
+ Self::Complex(ast_id)
+ } else {
+ Self::Scalar(LiteralConstRef::Unknown)
+ }
+ }
}
}
}
-/// A concrete constant value
+/// A literal constant value
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum ConstRef {
+pub enum LiteralConstRef {
Int(i128),
UInt(u128),
Bool(bool),
@@ -460,18 +467,20 @@ pub enum ConstRef {
Unknown,
}
-impl ConstRef {
+impl LiteralConstRef {
pub fn builtin_type(&self) -> BuiltinType {
match self {
- ConstRef::UInt(_) | ConstRef::Unknown => BuiltinType::Uint(BuiltinUint::U128),
- ConstRef::Int(_) => BuiltinType::Int(BuiltinInt::I128),
- ConstRef::Char(_) => BuiltinType::Char,
- ConstRef::Bool(_) => BuiltinType::Bool,
+ LiteralConstRef::UInt(_) | LiteralConstRef::Unknown => {
+ BuiltinType::Uint(BuiltinUint::U128)
+ }
+ LiteralConstRef::Int(_) => BuiltinType::Int(BuiltinInt::I128),
+ LiteralConstRef::Char(_) => BuiltinType::Char,
+ LiteralConstRef::Bool(_) => BuiltinType::Bool,
}
}
}
-impl From<Literal> for ConstRef {
+impl From<Literal> for LiteralConstRef {
fn from(literal: Literal) -> Self {
match literal {
Literal::Char(c) => Self::Char(c),
@@ -483,14 +492,14 @@ impl From<Literal> for ConstRef {
}
}
-impl std::fmt::Display for ConstRef {
+impl std::fmt::Display for LiteralConstRef {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
match self {
- ConstRef::Int(num) => num.fmt(f),
- ConstRef::UInt(num) => num.fmt(f),
- ConstRef::Bool(flag) => flag.fmt(f),
- ConstRef::Char(c) => write!(f, "'{c}'"),
- ConstRef::Unknown => f.write_char('_'),
+ LiteralConstRef::Int(num) => num.fmt(f),
+ LiteralConstRef::UInt(num) => num.fmt(f),
+ LiteralConstRef::Bool(flag) => flag.fmt(f),
+ LiteralConstRef::Char(c) => write!(f, "'{c}'"),
+ LiteralConstRef::Unknown => f.write_char('_'),
}
}
}
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index 3ed321d189..2001fb29a9 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -334,10 +334,6 @@ impl ItemScope {
)
}
- pub(crate) fn collect_legacy_macros(&self) -> FxHashMap<Name, SmallVec<[MacroId; 1]>> {
- self.legacy_macros.clone()
- }
-
/// Marks everything that is not a procedural macro as private to `this_module`.
pub(crate) fn censor_non_proc_macros(&mut self, this_module: ModuleId) {
self.types
diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs
index 590ed64af5..e74b71888c 100644
--- a/crates/hir-def/src/item_tree.rs
+++ b/crates/hir-def/src/item_tree.rs
@@ -101,7 +101,6 @@ pub struct ItemTree {
top_level: SmallVec<[ModItem; 1]>,
attrs: FxHashMap<AttrOwner, RawAttrs>,
- // FIXME: Remove this indirection, an item tree is almost always non-empty?
data: Option<Box<ItemTreeData>>,
}
@@ -718,7 +717,6 @@ pub struct Mod {
pub enum ModKind {
/// `mod m { ... }`
Inline { items: Box<[ModItem]> },
-
/// `mod m;`
Outline,
}
@@ -892,10 +890,6 @@ impl ModItem {
}
}
- pub fn downcast<N: ItemTreeNode>(self) -> Option<FileItemTreeId<N>> {
- N::id_from_mod_item(self)
- }
-
pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
match self {
ModItem::Import(it) => tree[it.index].ast_id().upcast(),
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index 9cd3dfd6f7..9d8b57a0da 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -57,13 +57,12 @@ mod test_db;
mod macro_expansion_tests;
mod pretty;
-use std::hash::{Hash, Hasher};
-
-use base_db::{
- impl_intern_key,
- salsa::{self, InternId},
- CrateId, ProcMacroKind,
+use std::{
+ hash::{Hash, Hasher},
+ panic::{RefUnwindSafe, UnwindSafe},
};
+
+use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
use hir_expand::{
ast_id_map::FileAstId,
attrs::{Attr, AttrId, AttrInput},
@@ -71,7 +70,7 @@ use hir_expand::{
builtin_derive_macro::BuiltinDeriveExpander,
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
- eager::expand_eager_macro,
+ eager::expand_eager_macro_input,
hygiene::Hygiene,
proc_macro::ProcMacroExpander,
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
@@ -89,11 +88,51 @@ use crate::{
builtin_type::BuiltinType,
data::adt::VariantData,
item_tree::{
- Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, ModItem,
- Static, Struct, Trait, TraitAlias, TypeAlias, Union,
+ Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, Static,
+ Struct, Trait, TraitAlias, TypeAlias, Union,
},
};
+/// A `ModuleId` that is always a crate's root module.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct CrateRootModuleId {
+ krate: CrateId,
+}
+
+impl CrateRootModuleId {
+ pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc<DefMap> {
+ db.crate_def_map(self.krate)
+ }
+
+ pub fn krate(self) -> CrateId {
+ self.krate
+ }
+}
+
+impl From<CrateRootModuleId> for ModuleId {
+ fn from(CrateRootModuleId { krate }: CrateRootModuleId) -> Self {
+ ModuleId { krate, block: None, local_id: DefMap::ROOT }
+ }
+}
+
+impl From<CrateRootModuleId> for ModuleDefId {
+ fn from(value: CrateRootModuleId) -> Self {
+ ModuleDefId::ModuleId(value.into())
+ }
+}
+
+impl TryFrom<ModuleId> for CrateRootModuleId {
+ type Error = ();
+
+ fn try_from(ModuleId { krate, block, local_id }: ModuleId) -> Result<Self, Self::Error> {
+ if block.is_none() && local_id == DefMap::ROOT {
+ Ok(CrateRootModuleId { krate })
+ } else {
+ Err(())
+ }
+ }
+}
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ModuleId {
krate: CrateId,
@@ -315,8 +354,7 @@ impl_intern!(MacroRulesId, MacroRulesLoc, intern_macro_rules, lookup_intern_macr
pub struct ProcMacroId(salsa::InternId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ProcMacroLoc {
- // FIXME: this should be a crate? or just a crate-root module
- pub container: ModuleId,
+ pub container: CrateRootModuleId,
pub id: ItemTreeId<Function>,
pub expander: ProcMacroExpander,
pub kind: ProcMacroKind,
@@ -476,29 +514,199 @@ impl_from!(
for ModuleDefId
);
-// FIXME: make this a DefWithBodyId
+/// Id of the anonymous const block expression and patterns. This is very similar to `ClosureId` and
+/// shouldn't be a `DefWithBodyId` since its type inference is dependent on its parent.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
-pub struct AnonymousConstId(InternId);
-impl_intern_key!(AnonymousConstId);
+pub struct ConstBlockId(salsa::InternId);
+impl_intern!(ConstBlockId, ConstBlockLoc, intern_anonymous_const, lookup_intern_anonymous_const);
+
+#[derive(Debug, Hash, PartialEq, Eq, Clone)]
+pub struct ConstBlockLoc {
+ /// The parent of the anonymous const block.
+ pub parent: DefWithBodyId,
+ /// The root expression of this const block in the parent body.
+ pub root: hir::ExprId,
+}
+
+#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
+pub enum TypeOwnerId {
+ FunctionId(FunctionId),
+ StaticId(StaticId),
+ ConstId(ConstId),
+ InTypeConstId(InTypeConstId),
+ AdtId(AdtId),
+ TraitId(TraitId),
+ TraitAliasId(TraitAliasId),
+ TypeAliasId(TypeAliasId),
+ ImplId(ImplId),
+ EnumVariantId(EnumVariantId),
+ // FIXME(const-generic-body): ModuleId should not be a type owner. This needs to be fixed to make `TypeOwnerId` actually
+ // useful for assigning ids to in type consts.
+ ModuleId(ModuleId),
+}
+
+impl TypeOwnerId {
+ fn as_generic_def_id(self) -> Option<GenericDefId> {
+ Some(match self {
+ TypeOwnerId::FunctionId(x) => GenericDefId::FunctionId(x),
+ TypeOwnerId::ConstId(x) => GenericDefId::ConstId(x),
+ TypeOwnerId::AdtId(x) => GenericDefId::AdtId(x),
+ TypeOwnerId::TraitId(x) => GenericDefId::TraitId(x),
+ TypeOwnerId::TraitAliasId(x) => GenericDefId::TraitAliasId(x),
+ TypeOwnerId::TypeAliasId(x) => GenericDefId::TypeAliasId(x),
+ TypeOwnerId::ImplId(x) => GenericDefId::ImplId(x),
+ TypeOwnerId::EnumVariantId(x) => GenericDefId::EnumVariantId(x),
+ TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => {
+ return None
+ }
+ })
+ }
+}
+
+impl_from!(
+ FunctionId,
+ StaticId,
+ ConstId,
+ InTypeConstId,
+ AdtId,
+ TraitId,
+ TraitAliasId,
+ TypeAliasId,
+ ImplId,
+ EnumVariantId,
+ ModuleId
+ for TypeOwnerId
+);
+
+// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let x: Type = _; }`)
+impl From<DefWithBodyId> for TypeOwnerId {
+ fn from(value: DefWithBodyId) -> Self {
+ match value {
+ DefWithBodyId::FunctionId(x) => x.into(),
+ DefWithBodyId::StaticId(x) => x.into(),
+ DefWithBodyId::ConstId(x) => x.into(),
+ DefWithBodyId::InTypeConstId(x) => x.into(),
+ DefWithBodyId::VariantId(x) => x.into(),
+ }
+ }
+}
+
+impl From<GenericDefId> for TypeOwnerId {
+ fn from(value: GenericDefId) -> Self {
+ match value {
+ GenericDefId::FunctionId(x) => x.into(),
+ GenericDefId::AdtId(x) => x.into(),
+ GenericDefId::TraitId(x) => x.into(),
+ GenericDefId::TraitAliasId(x) => x.into(),
+ GenericDefId::TypeAliasId(x) => x.into(),
+ GenericDefId::ImplId(x) => x.into(),
+ GenericDefId::EnumVariantId(x) => x.into(),
+ GenericDefId::ConstId(x) => x.into(),
+ }
+ }
+}
+
+// FIXME: This should not be a thing
+/// A thing that we want to store in interned ids, but we don't know its type in `hir-def`. This is
+/// currently only used in `InTypeConstId` for storing the type (which has type `Ty` defined in
+/// the `hir-ty` crate) of the constant in its id, which is a temporary hack so we may want
+/// to remove this after removing that.
+pub trait OpaqueInternableThing:
+ std::any::Any + std::fmt::Debug + Sync + Send + UnwindSafe + RefUnwindSafe
+{
+ fn as_any(&self) -> &dyn std::any::Any;
+ fn box_any(&self) -> Box<dyn std::any::Any>;
+ fn dyn_hash(&self, state: &mut dyn Hasher);
+ fn dyn_eq(&self, other: &dyn OpaqueInternableThing) -> bool;
+ fn dyn_clone(&self) -> Box<dyn OpaqueInternableThing>;
+}
+
+impl Hash for dyn OpaqueInternableThing {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.dyn_hash(state);
+ }
+}
+
+impl PartialEq for dyn OpaqueInternableThing {
+ fn eq(&self, other: &Self) -> bool {
+ self.dyn_eq(other)
+ }
+}
+
+impl Eq for dyn OpaqueInternableThing {}
+
+impl Clone for Box<dyn OpaqueInternableThing> {
+ fn clone(&self) -> Self {
+ self.dyn_clone()
+ }
+}
+
+// FIXME(const-generic-body): Use an stable id for in type consts.
+//
+// The current id uses `AstId<ast::ConstArg>` which will be changed by every change in the code. Ideally
+// we should use an id which is relative to the type owner, so that every change will only invalidate the
+// id if it happens inside of the type owner.
+//
+// The solution probably is to have some query on `TypeOwnerId` to traverse its constant children and store
+// their `AstId` in a list (vector or arena), and use the index of that list in the id here. That query probably
+// needs name resolution, and might go far and handles the whole path lowering or type lowering for a `TypeOwnerId`.
+//
+// Whatever path the solution takes, it should answer 3 questions at the same time:
+// * Is the id stable enough?
+// * How to find a constant id using an ast node / position in the source code? This is needed when we want to
+// provide ide functionalities inside an in type const (which we currently don't support) e.g. go to definition
+// for a local defined there. A complex id might have some trouble in this reverse mapping.
+// * How to find the return type of a constant using its id? We have this data when we are doing type lowering
+// and the name of the struct that contains this constant is resolved, so a query that only traverses the
+// type owner by its syntax tree might have a hard time here.
+
+/// A constant in a type as a substitution for const generics (like `Foo<{ 2 + 2 }>`) or as an array
+/// length (like `[u8; 2 + 2]`). These constants are body owner and are a variant of `DefWithBodyId`. These
+/// are not called `AnonymousConstId` to prevent confusion with [`ConstBlockId`].
+#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
+pub struct InTypeConstId(salsa::InternId);
+impl_intern!(InTypeConstId, InTypeConstLoc, intern_in_type_const, lookup_intern_in_type_const);
+
+#[derive(Debug, Hash, Eq, Clone)]
+pub struct InTypeConstLoc {
+ pub id: AstId<ast::ConstArg>,
+ /// The thing this const arg appears in
+ pub owner: TypeOwnerId,
+ pub thing: Box<dyn OpaqueInternableThing>,
+}
+
+impl PartialEq for InTypeConstLoc {
+ fn eq(&self, other: &Self) -> bool {
+ self.id == other.id && self.owner == other.owner && &*self.thing == &*other.thing
+ }
+}
+
+impl InTypeConstId {
+ pub fn source(&self, db: &dyn db::DefDatabase) -> ast::ConstArg {
+ let src = self.lookup(db).id;
+ let file_id = src.file_id;
+ let root = &db.parse_or_expand(file_id);
+ db.ast_id_map(file_id).get(src.value).to_node(root)
+ }
+}
/// A constant, which might appears as a const item, an annonymous const block in expressions
/// or patterns, or as a constant in types with const generics.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum GeneralConstId {
ConstId(ConstId),
- AnonymousConstId(AnonymousConstId),
+ ConstBlockId(ConstBlockId),
+ InTypeConstId(InTypeConstId),
}
-impl_from!(ConstId, AnonymousConstId for GeneralConstId);
+impl_from!(ConstId, ConstBlockId, InTypeConstId for GeneralConstId);
impl GeneralConstId {
pub fn generic_def(self, db: &dyn db::DefDatabase) -> Option<GenericDefId> {
match self {
- GeneralConstId::ConstId(x) => Some(x.into()),
- GeneralConstId::AnonymousConstId(x) => {
- let (parent, _) = db.lookup_intern_anonymous_const(x);
- parent.as_generic_def_id()
- }
+ GeneralConstId::ConstId(it) => Some(it.into()),
+ GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(),
+ GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(),
}
}
@@ -511,7 +719,8 @@ impl GeneralConstId {
.and_then(|x| x.as_str())
.unwrap_or("_")
.to_owned(),
- GeneralConstId::AnonymousConstId(id) => format!("{{anonymous const {id:?}}}"),
+ GeneralConstId::ConstBlockId(id) => format!("{{anonymous const {id:?}}}"),
+ GeneralConstId::InTypeConstId(id) => format!("{{in type const {id:?}}}"),
}
}
}
@@ -522,10 +731,11 @@ pub enum DefWithBodyId {
FunctionId(FunctionId),
StaticId(StaticId),
ConstId(ConstId),
+ InTypeConstId(InTypeConstId),
VariantId(EnumVariantId),
}
-impl_from!(FunctionId, ConstId, StaticId for DefWithBodyId);
+impl_from!(FunctionId, ConstId, StaticId, InTypeConstId for DefWithBodyId);
impl From<EnumVariantId> for DefWithBodyId {
fn from(id: EnumVariantId) -> Self {
@@ -540,6 +750,9 @@ impl DefWithBodyId {
DefWithBodyId::StaticId(_) => None,
DefWithBodyId::ConstId(c) => Some(c.into()),
DefWithBodyId::VariantId(c) => Some(c.into()),
+ // FIXME: stable rust doesn't allow generics in constants, but we should
+ // use `TypeOwnerId::as_generic_def_id` when it does.
+ DefWithBodyId::InTypeConstId(_) => None,
}
}
}
@@ -729,29 +942,37 @@ impl HasModule for MacroId {
match self {
MacroId::MacroRulesId(it) => it.lookup(db).container,
MacroId::Macro2Id(it) => it.lookup(db).container,
- MacroId::ProcMacroId(it) => it.lookup(db).container,
+ MacroId::ProcMacroId(it) => it.lookup(db).container.into(),
}
}
}
-impl HasModule for DefWithBodyId {
+impl HasModule for TypeOwnerId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
match self {
- DefWithBodyId::FunctionId(it) => it.lookup(db).module(db),
- DefWithBodyId::StaticId(it) => it.lookup(db).module(db),
- DefWithBodyId::ConstId(it) => it.lookup(db).module(db),
- DefWithBodyId::VariantId(it) => it.parent.lookup(db).container,
+ TypeOwnerId::FunctionId(x) => x.lookup(db).module(db),
+ TypeOwnerId::StaticId(x) => x.lookup(db).module(db),
+ TypeOwnerId::ConstId(x) => x.lookup(db).module(db),
+ TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.module(db),
+ TypeOwnerId::AdtId(x) => x.module(db),
+ TypeOwnerId::TraitId(x) => x.lookup(db).container,
+ TypeOwnerId::TraitAliasId(x) => x.lookup(db).container,
+ TypeOwnerId::TypeAliasId(x) => x.lookup(db).module(db),
+ TypeOwnerId::ImplId(x) => x.lookup(db).container,
+ TypeOwnerId::EnumVariantId(x) => x.parent.lookup(db).container,
+ TypeOwnerId::ModuleId(x) => *x,
}
}
}
-impl DefWithBodyId {
- pub fn as_mod_item(self, db: &dyn db::DefDatabase) -> ModItem {
+impl HasModule for DefWithBodyId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
match self {
- DefWithBodyId::FunctionId(it) => it.lookup(db).id.value.into(),
- DefWithBodyId::StaticId(it) => it.lookup(db).id.value.into(),
- DefWithBodyId::ConstId(it) => it.lookup(db).id.value.into(),
- DefWithBodyId::VariantId(it) => it.parent.lookup(db).id.value.into(),
+ DefWithBodyId::FunctionId(it) => it.lookup(db).module(db),
+ DefWithBodyId::StaticId(it) => it.lookup(db).module(db),
+ DefWithBodyId::ConstId(it) => it.lookup(db).module(db),
+ DefWithBodyId::VariantId(it) => it.parent.lookup(db).container,
+ DefWithBodyId::InTypeConstId(it) => it.lookup(db).owner.module(db),
}
}
}
@@ -865,7 +1086,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h));
let Some(path) = path else {
- return Ok(ExpandResult::only_err(ExpandError::Other("malformed macro invocation".into())));
+ return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
};
macro_call_as_call_id_(
@@ -913,7 +1134,7 @@ fn macro_call_as_call_id_(
let res = if let MacroDefKind::BuiltInEager(..) = def.kind {
let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
- expand_eager_macro(db, krate, macro_call, def, &resolver)?
+ expand_eager_macro_input(db, krate, macro_call, def, &resolver)?
} else {
ExpandResult {
value: Some(def.as_lazy_macro(
@@ -1028,13 +1249,13 @@ fn attr_macro_as_call_id(
def: MacroDefId,
) -> MacroCallId {
let arg = match macro_attr.input.as_deref() {
- Some(AttrInput::TokenTree(tt, map)) => (
+ Some(AttrInput::TokenTree(tt)) => (
{
- let mut tt = tt.clone();
+ let mut tt = tt.0.clone();
tt.delimiter = tt::Delimiter::UNSPECIFIED;
tt
},
- map.clone(),
+ tt.1.clone(),
),
_ => (tt::Subtree::empty(), Default::default()),
};
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
index 80474bc154..f41f971904 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -115,6 +115,66 @@ impl <A: core::clone::Clone, B: core::clone::Clone, > core::clone::Clone for Com
}
#[test]
+fn test_clone_expand_with_associated_types() {
+ check(
+ r#"
+//- minicore: derive, clone
+trait Trait {
+ type InWc;
+ type InFieldQualified;
+ type InFieldShorthand;
+ type InGenericArg;
+}
+trait Marker {}
+struct Vec<T>(T);
+
+#[derive(Clone)]
+struct Foo<T: Trait>
+where
+ <T as Trait>::InWc: Marker,
+{
+ qualified: <T as Trait>::InFieldQualified,
+ shorthand: T::InFieldShorthand,
+ generic: Vec<T::InGenericArg>,
+}
+"#,
+ expect![[r#"
+trait Trait {
+ type InWc;
+ type InFieldQualified;
+ type InFieldShorthand;
+ type InGenericArg;
+}
+trait Marker {}
+struct Vec<T>(T);
+
+#[derive(Clone)]
+struct Foo<T: Trait>
+where
+ <T as Trait>::InWc: Marker,
+{
+ qualified: <T as Trait>::InFieldQualified,
+ shorthand: T::InFieldShorthand,
+ generic: Vec<T::InGenericArg>,
+}
+
+impl <T: core::clone::Clone, > core::clone::Clone for Foo<T, > where T: Trait, T::InFieldShorthand: core::clone::Clone, T::InGenericArg: core::clone::Clone, {
+ fn clone(&self ) -> Self {
+ match self {
+ Foo {
+ qualified: qualified, shorthand: shorthand, generic: generic,
+ }
+ =>Foo {
+ qualified: qualified.clone(), shorthand: shorthand.clone(), generic: generic.clone(),
+ }
+ ,
+ }
+ }
+}"#]],
+ );
+}
+
+#[test]
fn test_clone_expand_with_const_generics() {
check(
r#"
@@ -336,18 +396,18 @@ enum Command {
}
impl < > core::hash::Hash for Command< > where {
- fn hash<H: core::hash::Hasher>(&self , state: &mut H) {
- core::mem::discriminant(self ).hash(state);
+ fn hash<H: core::hash::Hasher>(&self , ra_expand_state: &mut H) {
+ core::mem::discriminant(self ).hash(ra_expand_state);
match self {
Command::Move {
x: x, y: y,
}
=> {
- x.hash(state);
- y.hash(state);
+ x.hash(ra_expand_state);
+ y.hash(ra_expand_state);
}
, Command::Do(f0, )=> {
- f0.hash(state);
+ f0.hash(ra_expand_state);
}
, Command::Jump=> {}
,
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
index 977f300636..07d9baa589 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -79,7 +79,7 @@ fn main() { env!("TEST_ENV_VAR"); }
#[rustc_builtin_macro]
macro_rules! env {() => {}}
-fn main() { "__RA_UNIMPLEMENTED__"; }
+fn main() { "UNRESOLVED_ENV_VAR"; }
"##]],
);
}
@@ -208,6 +208,44 @@ fn main() {
}
#[test]
+fn regression_15002() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ format_args!(x = 2);
+ format_args!(x =);
+ format_args!(x =, x = 2);
+ format_args!("{}", x =);
+ format_args!(=, "{}", x =);
+ format_args!(x = 2, "{}", 5);
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+
+fn main() {
+ /* error: no rule matches input tokens */;
+ /* error: no rule matches input tokens */;
+ /* error: no rule matches input tokens */;
+ /* error: no rule matches input tokens */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::Argument::new(&(), ::core::fmt::Display::fmt), ]);
+ /* error: no rule matches input tokens */;
+ ::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::Argument::new(&(5), ::core::fmt::Display::fmt), ]);
+}
+"##]],
+ );
+}
+
+#[test]
fn test_format_args_expand_with_comma_exprs() {
check(
r#"
@@ -404,10 +442,6 @@ macro_rules! surprise {
() => { "s" };
}
-macro_rules! stuff {
- ($string:expr) => { concat!($string) };
-}
-
fn main() { concat!(surprise!()); }
"##,
expect![[r##"
@@ -418,10 +452,6 @@ macro_rules! surprise {
() => { "s" };
}
-macro_rules! stuff {
- ($string:expr) => { concat!($string) };
-}
-
fn main() { "s"; }
"##]],
);
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index 9b520bc303..0ab1bd8490 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -77,8 +77,8 @@ use crate::{
path::ModPath,
per_ns::PerNs,
visibility::Visibility,
- AstId, BlockId, BlockLoc, FunctionId, LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId,
- ProcMacroId,
+ AstId, BlockId, BlockLoc, CrateRootModuleId, FunctionId, LocalModuleId, Lookup, MacroExpander,
+ MacroId, ModuleId, ProcMacroId,
};
/// Contains the results of (early) name resolution.
@@ -93,7 +93,10 @@ use crate::{
#[derive(Debug, PartialEq, Eq)]
pub struct DefMap {
_c: Count<Self>,
+ /// When this is a block def map, this will hold the block id of the the block and module that
+ /// contains this block.
block: Option<BlockInfo>,
+ /// The modules and their data declared in this crate.
modules: Arena<ModuleData>,
krate: CrateId,
/// The prelude module for this crate. This either comes from an import
@@ -111,15 +114,18 @@ pub struct DefMap {
/// attributes.
derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
+ /// The diagnostics that need to be emitted for this crate.
diagnostics: Vec<DefDiagnostic>,
+ /// The crate data that is shared between a crate's def map and all its block def maps.
data: Arc<DefMapCrateData>,
}
/// Data that belongs to a crate which is shared between a crate's def map and all its block def maps.
#[derive(Clone, Debug, PartialEq, Eq)]
struct DefMapCrateData {
- extern_prelude: FxHashMap<Name, ModuleId>,
+ /// The extern prelude which contains all root modules of external crates that are in scope.
+ extern_prelude: FxHashMap<Name, CrateRootModuleId>,
/// Side table for resolving derive helpers.
exported_derives: FxHashMap<MacroDefId, Box<[Name]>>,
@@ -279,6 +285,7 @@ pub struct ModuleData {
}
impl DefMap {
+ /// The module id of a crate or block root.
pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0));
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
@@ -419,11 +426,11 @@ impl DefMap {
}
pub(crate) fn extern_prelude(&self) -> impl Iterator<Item = (&Name, ModuleId)> + '_ {
- self.data.extern_prelude.iter().map(|(name, def)| (name, *def))
+ self.data.extern_prelude.iter().map(|(name, &def)| (name, def.into()))
}
pub(crate) fn macro_use_prelude(&self) -> impl Iterator<Item = (&Name, MacroId)> + '_ {
- self.macro_use_prelude.iter().map(|(name, def)| (name, *def))
+ self.macro_use_prelude.iter().map(|(name, &def)| (name, def))
}
pub fn module_id(&self, local_id: LocalModuleId) -> ModuleId {
@@ -431,8 +438,8 @@ impl DefMap {
ModuleId { krate: self.krate, local_id, block }
}
- pub(crate) fn crate_root(&self) -> ModuleId {
- ModuleId { krate: self.krate, block: None, local_id: DefMap::ROOT }
+ pub fn crate_root(&self) -> CrateRootModuleId {
+ CrateRootModuleId { krate: self.krate }
}
pub(crate) fn resolve_path(
@@ -476,7 +483,7 @@ impl DefMap {
///
/// If `f` returns `Some(val)`, iteration is stopped and `Some(val)` is returned. If `f` returns
/// `None`, iteration continues.
- pub fn with_ancestor_maps<T>(
+ pub(crate) fn with_ancestor_maps<T>(
&self,
db: &dyn DefDatabase,
local_mod: LocalModuleId,
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 06542b4b1e..62fb3c7882 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -3,7 +3,7 @@
//! `DefCollector::collect` contains the fixed-point iteration loop which
//! resolves imports and expands macros.
-use std::{iter, mem};
+use std::{cmp::Ordering, iter, mem};
use base_db::{CrateId, Dependency, Edition, FileId};
use cfg::{CfgExpr, CfgOptions};
@@ -51,11 +51,11 @@ use crate::{
per_ns::PerNs,
tt,
visibility::{RawVisibility, Visibility},
- AdtId, AstId, AstIdWithPath, ConstLoc, EnumLoc, EnumVariantId, ExternBlockLoc, FunctionId,
- FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc,
- MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId,
- ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc,
- UnresolvedMacro,
+ AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId,
+ ExternBlockLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId,
+ Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId,
+ ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc,
+ TypeAliasLoc, UnionLoc, UnresolvedMacro,
};
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
@@ -274,8 +274,6 @@ impl DefCollector<'_> {
let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
let item_tree = self.db.file_item_tree(file_id.into());
- let module_id = DefMap::ROOT;
-
let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
@@ -285,10 +283,9 @@ impl DefCollector<'_> {
for (name, dep) in &self.deps {
if dep.is_prelude() {
- crate_data.extern_prelude.insert(
- name.clone(),
- ModuleId { krate: dep.crate_id, block: None, local_id: DefMap::ROOT },
- );
+ crate_data
+ .extern_prelude
+ .insert(name.clone(), CrateRootModuleId { krate: dep.crate_id });
}
}
@@ -374,7 +371,7 @@ impl DefCollector<'_> {
ModCollector {
def_collector: self,
macro_depth: 0,
- module_id,
+ module_id: DefMap::ROOT,
tree_id: TreeId::new(file_id.into(), None),
item_tree: &item_tree,
mod_dir: ModDir::root(),
@@ -384,8 +381,6 @@ impl DefCollector<'_> {
fn seed_with_inner(&mut self, tree_id: TreeId) {
let item_tree = tree_id.item_tree(self.db);
- let module_id = DefMap::ROOT;
-
let is_cfg_enabled = item_tree
.top_level_attrs(self.db, self.def_map.krate)
.cfg()
@@ -394,7 +389,7 @@ impl DefCollector<'_> {
ModCollector {
def_collector: self,
macro_depth: 0,
- module_id,
+ module_id: DefMap::ROOT,
tree_id,
item_tree: &item_tree,
mod_dir: ModDir::root(),
@@ -604,8 +599,6 @@ impl DefCollector<'_> {
if self.def_map.block.is_some() {
return;
}
- let crate_root = self.def_map.module_id(DefMap::ROOT);
-
let kind = def.kind.to_basedb_kind();
let (expander, kind) =
match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) {
@@ -614,7 +607,8 @@ impl DefCollector<'_> {
};
let proc_macro_id =
- ProcMacroLoc { container: crate_root, id, expander, kind }.intern(self.db);
+ ProcMacroLoc { container: self.def_map.crate_root(), id, expander, kind }
+ .intern(self.db);
self.define_proc_macro(def.name.clone(), proc_macro_id);
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
if let ProcMacroKind::CustomDerive { helpers } = def.kind {
@@ -831,16 +825,12 @@ impl DefCollector<'_> {
}
}
- fn resolve_extern_crate(&self, name: &Name) -> Option<ModuleId> {
+ fn resolve_extern_crate(&self, name: &Name) -> Option<CrateRootModuleId> {
if *name == name!(self) {
cov_mark::hit!(extern_crate_self_as);
Some(self.def_map.crate_root())
} else {
- self.deps.get(name).map(|dep| ModuleId {
- krate: dep.crate_id,
- block: None,
- local_id: DefMap::ROOT,
- })
+ self.deps.get(name).map(|dep| CrateRootModuleId { krate: dep.crate_id })
}
}
@@ -883,10 +873,12 @@ impl DefCollector<'_> {
{
if let (Some(ModuleDefId::ModuleId(def)), Some(name)) = (def.take_types(), name)
{
- Arc::get_mut(&mut self.def_map.data)
- .unwrap()
- .extern_prelude
- .insert(name.clone(), def);
+ if let Ok(def) = def.try_into() {
+ Arc::get_mut(&mut self.def_map.data)
+ .unwrap()
+ .extern_prelude
+ .insert(name.clone(), def);
+ }
}
}
@@ -1791,13 +1783,11 @@ impl ModCollector<'_, '_> {
let target_crate =
match self.def_collector.resolve_extern_crate(&self.item_tree[extern_crate].name) {
- Some(m) => {
- if m == self.def_collector.def_map.module_id(self.module_id) {
- cov_mark::hit!(ignore_macro_use_extern_crate_self);
- return;
- }
- m.krate
+ Some(m) if m.krate == self.def_collector.def_map.krate => {
+ cov_mark::hit!(ignore_macro_use_extern_crate_self);
+ return;
}
+ Some(m) => m.krate,
None => return,
};
@@ -1938,9 +1928,13 @@ impl ModCollector<'_, '_> {
let modules = &mut def_map.modules;
let res = modules.alloc(ModuleData::new(origin, vis));
modules[res].parent = Some(self.module_id);
- for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() {
- for &mac in &mac {
- modules[res].scope.define_legacy_macro(name.clone(), mac);
+
+ if let Some((target, source)) = Self::borrow_modules(modules.as_mut(), res, self.module_id)
+ {
+ for (name, macs) in source.scope.legacy_macros() {
+ for &mac in macs {
+ target.scope.define_legacy_macro(name.clone(), mac);
+ }
}
}
modules[self.module_id].children.insert(name.clone(), res);
@@ -2236,14 +2230,40 @@ impl ModCollector<'_, '_> {
}
fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) {
- let macros = self.def_collector.def_map[module_id].scope.collect_legacy_macros();
- for (name, macs) in macros {
+ let Some((source, target)) = Self::borrow_modules(self.def_collector.def_map.modules.as_mut(), module_id, self.module_id) else {
+ return
+ };
+
+ for (name, macs) in source.scope.legacy_macros() {
macs.last().map(|&mac| {
- self.def_collector.define_legacy_macro(self.module_id, name.clone(), mac)
+ target.scope.define_legacy_macro(name.clone(), mac);
});
}
}
+ /// Mutably borrow two modules at once, retu
+ fn borrow_modules(
+ modules: &mut [ModuleData],
+ a: LocalModuleId,
+ b: LocalModuleId,
+ ) -> Option<(&mut ModuleData, &mut ModuleData)> {
+ let a = a.into_raw().into_u32() as usize;
+ let b = b.into_raw().into_u32() as usize;
+
+ let (a, b) = match a.cmp(&b) {
+ Ordering::Equal => return None,
+ Ordering::Less => {
+ let (prefix, b) = modules.split_at_mut(b);
+ (&mut prefix[a], &mut b[0])
+ }
+ Ordering::Greater => {
+ let (prefix, a) = modules.split_at_mut(a);
+ (&mut a[0], &mut prefix[b])
+ }
+ };
+ Some((a, b))
+ }
+
fn is_cfg_enabled(&self, cfg: &CfgExpr) -> bool {
self.def_collector.cfg_options.check(cfg) != Some(false)
}
diff --git a/crates/hir-def/src/path.rs b/crates/hir-def/src/path.rs
index b9b8082549..ff4ae69546 100644
--- a/crates/hir-def/src/path.rs
+++ b/crates/hir-def/src/path.rs
@@ -9,7 +9,7 @@ use std::{
use crate::{
lang_item::LangItemTarget,
lower::LowerCtx,
- type_ref::{ConstRefOrPath, LifetimeRef, TypeBound, TypeRef},
+ type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRef},
};
use hir_expand::name::Name;
use intern::Interned;
@@ -90,7 +90,7 @@ pub struct AssociatedTypeBinding {
pub enum GenericArg {
Type(TypeRef),
Lifetime(LifetimeRef),
- Const(ConstRefOrPath),
+ Const(ConstRef),
}
impl Path {
diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs
index 26d2706175..1cb17ff0d2 100644
--- a/crates/hir-def/src/path/lower.rs
+++ b/crates/hir-def/src/path/lower.rs
@@ -2,7 +2,7 @@
use std::iter;
-use crate::{lower::LowerCtx, type_ref::ConstRefOrPath};
+use crate::{lower::LowerCtx, type_ref::ConstRef};
use either::Either;
use hir_expand::name::{name, AsName};
@@ -217,7 +217,7 @@ pub(super) fn lower_generic_args(
}
}
ast::GenericArg::ConstArg(arg) => {
- let arg = ConstRefOrPath::from_expr_opt(arg.expr());
+ let arg = ConstRef::from_const_arg(lower_ctx, Some(arg));
args.push(GenericArg::Const(arg))
}
}
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 06f5b2526a..0d6f55411c 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -21,11 +21,11 @@ use crate::{
path::{ModPath, Path, PathKind},
per_ns::PerNs,
visibility::{RawVisibility, Visibility},
- AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
- FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
- LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId,
- StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId,
- VariantId,
+ AdtId, AssocItemId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId,
+ EnumVariantId, ExternBlockId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId,
+ ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId,
+ ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
+ TypeOrConstParamId, TypeOwnerId, TypeParamId, VariantId,
};
#[derive(Debug, Clone)]
@@ -946,6 +946,15 @@ impl HasResolver for ModuleId {
}
}
+impl HasResolver for CrateRootModuleId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ Resolver {
+ scopes: vec![],
+ module_scope: ModuleItemMap { def_map: self.def_map(db), module_id: DefMap::ROOT },
+ }
+ }
+}
+
impl HasResolver for TraitId {
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
@@ -1009,6 +1018,24 @@ impl HasResolver for ExternBlockId {
}
}
+impl HasResolver for TypeOwnerId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ match self {
+ TypeOwnerId::FunctionId(x) => x.resolver(db),
+ TypeOwnerId::StaticId(x) => x.resolver(db),
+ TypeOwnerId::ConstId(x) => x.resolver(db),
+ TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.resolver(db),
+ TypeOwnerId::AdtId(x) => x.resolver(db),
+ TypeOwnerId::TraitId(x) => x.resolver(db),
+ TypeOwnerId::TraitAliasId(x) => x.resolver(db),
+ TypeOwnerId::TypeAliasId(x) => x.resolver(db),
+ TypeOwnerId::ImplId(x) => x.resolver(db),
+ TypeOwnerId::EnumVariantId(x) => x.resolver(db),
+ TypeOwnerId::ModuleId(x) => x.resolver(db),
+ }
+ }
+}
+
impl HasResolver for DefWithBodyId {
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
match self {
@@ -1016,6 +1043,7 @@ impl HasResolver for DefWithBodyId {
DefWithBodyId::FunctionId(f) => f.resolver(db),
DefWithBodyId::StaticId(s) => s.resolver(db),
DefWithBodyId::VariantId(v) => v.parent.resolver(db),
+ DefWithBodyId::InTypeConstId(c) => c.lookup(db).owner.resolver(db),
}
}
}
diff --git a/crates/hir-expand/src/ast_id_map.rs b/crates/hir-expand/src/ast_id_map.rs
index 400442de94..c2b0d5985e 100644
--- a/crates/hir-expand/src/ast_id_map.rs
+++ b/crates/hir-expand/src/ast_id_map.rs
@@ -20,7 +20,7 @@ use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstNode> {
raw: ErasedFileAstId,
- _ty: PhantomData<fn() -> N>,
+ covariant: PhantomData<fn() -> N>,
}
impl<N: AstNode> Clone for FileAstId<N> {
@@ -54,7 +54,7 @@ impl<N: AstNode> FileAstId<N> {
where
N: Into<M>,
{
- FileAstId { raw: self.raw, _ty: PhantomData }
+ FileAstId { raw: self.raw, covariant: PhantomData }
}
}
@@ -98,6 +98,7 @@ impl AstIdMap {
|| ast::Variant::can_cast(kind)
|| ast::RecordField::can_cast(kind)
|| ast::TupleField::can_cast(kind)
+ || ast::ConstArg::can_cast(kind)
{
res.alloc(&it);
true
@@ -121,7 +122,7 @@ impl AstIdMap {
pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
- FileAstId { raw, _ty: PhantomData }
+ FileAstId { raw, covariant: PhantomData }
}
pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index 0c369a18bb..4c918e55b9 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -192,14 +192,14 @@ pub enum AttrInput {
/// `#[attr = "string"]`
Literal(SmolStr),
/// `#[attr(subtree)]`
- TokenTree(tt::Subtree, mbe::TokenMap),
+ TokenTree(Box<(tt::Subtree, mbe::TokenMap)>),
}
impl fmt::Display for AttrInput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
- AttrInput::TokenTree(subtree, _) => subtree.fmt(f),
+ AttrInput::TokenTree(tt) => tt.0.fmt(f),
}
}
}
@@ -220,7 +220,7 @@ impl Attr {
Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() {
let (tree, map) = syntax_node_to_token_tree(tt.syntax());
- Some(Interned::new(AttrInput::TokenTree(tree, map)))
+ Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map)))))
} else {
None
};
@@ -256,7 +256,7 @@ impl Attr {
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
- AttrInput::TokenTree(subtree, _) => match &*subtree.token_trees {
+ AttrInput::TokenTree(tt) => match &*tt.0.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
_ => None,
},
@@ -267,7 +267,7 @@ impl Attr {
/// #[path TokenTree]
pub fn token_tree_value(&self) -> Option<&Subtree> {
match self.input.as_deref()? {
- AttrInput::TokenTree(subtree, _) => Some(subtree),
+ AttrInput::TokenTree(tt) => Some(&tt.0),
_ => None,
}
}
diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs
index 54706943ac..3d1e272b90 100644
--- a/crates/hir-expand/src/builtin_derive_macro.rs
+++ b/crates/hir-expand/src/builtin_derive_macro.rs
@@ -4,17 +4,16 @@ use ::tt::Ident;
use base_db::{CrateOrigin, LangCrateOrigin};
use itertools::izip;
use mbe::TokenMap;
-use std::collections::HashSet;
+use rustc_hash::FxHashSet;
use stdx::never;
use tracing::debug;
-use crate::tt::{self, TokenId};
-use syntax::{
- ast::{
- self, AstNode, FieldList, HasAttrs, HasGenericParams, HasModuleItem, HasName,
- HasTypeBounds, PathType,
- },
- match_ast,
+use crate::{
+ name::{AsName, Name},
+ tt::{self, TokenId},
+};
+use syntax::ast::{
+ self, AstNode, FieldList, HasAttrs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds,
};
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
@@ -195,39 +194,52 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
debug!("derive node didn't parse");
- ExpandError::Other("invalid item definition".into())
+ ExpandError::other("invalid item definition")
})?;
let item = macro_items.items().next().ok_or_else(|| {
debug!("no module item parsed");
- ExpandError::Other("no item found".into())
+ ExpandError::other("no item found")
})?;
- let node = item.syntax();
- let (name, params, shape) = match_ast! {
- match node {
- ast::Struct(it) => (it.name(), it.generic_param_list(), AdtShape::Struct(VariantShape::from(it.field_list(), &token_map)?)),
- ast::Enum(it) => {
- let default_variant = it.variant_list().into_iter().flat_map(|x| x.variants()).position(|x| x.attrs().any(|x| x.simple_name() == Some("default".into())));
- (
- it.name(),
- it.generic_param_list(),
- AdtShape::Enum {
- default_variant,
- variants: it.variant_list()
- .into_iter()
- .flat_map(|x| x.variants())
- .map(|x| Ok((name_to_token(&token_map,x.name())?, VariantShape::from(x.field_list(), &token_map)?))).collect::<Result<_, ExpandError>>()?
- }
- )
- },
- ast::Union(it) => (it.name(), it.generic_param_list(), AdtShape::Union),
- _ => {
- debug!("unexpected node is {:?}", node);
- return Err(ExpandError::Other("expected struct, enum or union".into()))
- },
+ let adt = ast::Adt::cast(item.syntax().clone()).ok_or_else(|| {
+ debug!("expected adt, found: {:?}", item);
+ ExpandError::other("expected struct, enum or union")
+ })?;
+ let (name, generic_param_list, shape) = match &adt {
+ ast::Adt::Struct(it) => (
+ it.name(),
+ it.generic_param_list(),
+ AdtShape::Struct(VariantShape::from(it.field_list(), &token_map)?),
+ ),
+ ast::Adt::Enum(it) => {
+ let default_variant = it
+ .variant_list()
+ .into_iter()
+ .flat_map(|x| x.variants())
+ .position(|x| x.attrs().any(|x| x.simple_name() == Some("default".into())));
+ (
+ it.name(),
+ it.generic_param_list(),
+ AdtShape::Enum {
+ default_variant,
+ variants: it
+ .variant_list()
+ .into_iter()
+ .flat_map(|x| x.variants())
+ .map(|x| {
+ Ok((
+ name_to_token(&token_map, x.name())?,
+ VariantShape::from(x.field_list(), &token_map)?,
+ ))
+ })
+ .collect::<Result<_, ExpandError>>()?,
+ },
+ )
}
+ ast::Adt::Union(it) => (it.name(), it.generic_param_list(), AdtShape::Union),
};
- let mut param_type_set: HashSet<String> = HashSet::new();
- let param_types = params
+
+ let mut param_type_set: FxHashSet<Name> = FxHashSet::default();
+ let param_types = generic_param_list
.into_iter()
.flat_map(|param_list| param_list.type_or_const_params())
.map(|param| {
@@ -235,7 +247,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
let this = param.name();
match this {
Some(x) => {
- param_type_set.insert(x.to_string());
+ param_type_set.insert(x.as_name());
mbe::syntax_node_to_token_tree(x.syntax()).0
}
None => tt::Subtree::empty(),
@@ -259,37 +271,33 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
(name, ty, bounds)
})
.collect();
- let is_associated_type = |p: &PathType| {
- if let Some(p) = p.path() {
- if let Some(parent) = p.qualifier() {
- if let Some(x) = parent.segment() {
- if let Some(x) = x.path_type() {
- if let Some(x) = x.path() {
- if let Some(pname) = x.as_single_name_ref() {
- if param_type_set.contains(&pname.to_string()) {
- // <T as Trait>::Assoc
- return true;
- }
- }
- }
- }
- }
- if let Some(pname) = parent.as_single_name_ref() {
- if param_type_set.contains(&pname.to_string()) {
- // T::Assoc
- return true;
- }
- }
- }
- }
- false
+
+ // For a generic parameter `T`, when shorthand associated type `T::Assoc` appears in field
+ // types (of any variant for enums), we generate trait bound for it. It sounds reasonable to
+ // also generate trait bound for qualified associated type `<T as Trait>::Assoc`, but rustc
+ // does not do that for some unknown reason.
+ //
+ // See the analogous function in rustc [find_type_parameters()] and rust-lang/rust#50730.
+ // [find_type_parameters()]: https://github.com/rust-lang/rust/blob/1.70.0/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs#L378
+
+ // It's cumbersome to deal with the distinct structures of ADTs, so let's just get untyped
+ // `SyntaxNode` that contains fields and look for descendant `ast::PathType`s. Of note is that
+ // we should not inspect `ast::PathType`s in parameter bounds and where clauses.
+ let field_list = match adt {
+ ast::Adt::Enum(it) => it.variant_list().map(|list| list.syntax().clone()),
+ ast::Adt::Struct(it) => it.field_list().map(|list| list.syntax().clone()),
+ ast::Adt::Union(it) => it.record_field_list().map(|list| list.syntax().clone()),
};
- let associated_types = node
- .descendants()
- .filter_map(PathType::cast)
- .filter(is_associated_type)
+ let associated_types = field_list
+ .into_iter()
+ .flat_map(|it| it.descendants())
+ .filter_map(ast::PathType::cast)
+ .filter_map(|p| {
+ let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
+ param_type_set.contains(&name).then_some(p)
+ })
.map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
- .collect::<Vec<_>>();
+ .collect();
let name_token = name_to_token(&token_map, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
}
@@ -297,7 +305,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
let name = name.ok_or_else(|| {
debug!("parsed item has no name");
- ExpandError::Other("missing name".into())
+ ExpandError::other("missing name")
})?;
let name_token_id =
token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
@@ -334,18 +342,18 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
/// }
/// ```
///
-/// where B1, ..., BN are the bounds given by `bounds_paths`.'. Z is a phantom type, and
+/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
tt: &tt::Subtree,
trait_path: tt::Subtree,
- trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
+ make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let info = match parse_adt(tt) {
Ok(info) => info,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
};
- let trait_body = trait_body(&info);
+ let trait_body = make_trait_body(&info);
let mut where_block = vec![];
let (params, args): (Vec<_>, Vec<_>) = info
.param_types
@@ -605,7 +613,7 @@ fn hash_expand(
span: tt::TokenId::unspecified(),
};
return quote! {
- fn hash<H: #krate::hash::Hasher>(&self, state: &mut H) {
+ fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
match #star self {}
}
};
@@ -613,7 +621,7 @@ fn hash_expand(
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
|(pat, names)| {
let expr = {
- let it = names.iter().map(|x| quote! { #x . hash(state); });
+ let it = names.iter().map(|x| quote! { #x . hash(ra_expand_state); });
quote! { {
##it
} }
@@ -625,8 +633,8 @@ fn hash_expand(
},
);
quote! {
- fn hash<H: #krate::hash::Hasher>(&self, state: &mut H) {
- #krate::mem::discriminant(self).hash(state);
+ fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
+ #krate::mem::discriminant(self).hash(ra_expand_state);
match self {
##arms
}
diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs
index c7643bd0a1..a9f0c154b0 100644
--- a/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/crates/hir-expand/src/builtin_fn_macro.rs
@@ -14,7 +14,8 @@ use syntax::{
};
use crate::{
- db::ExpandDatabase, name, quote, tt, ExpandError, ExpandResult, MacroCallId, MacroCallLoc,
+ db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId,
+ MacroCallLoc,
};
macro_rules! register_builtin {
@@ -49,7 +50,7 @@ macro_rules! register_builtin {
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
- ) -> ExpandResult<ExpandedEager> {
+ ) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( EagerExpander::$e_kind => $e_expand, )*
};
@@ -67,16 +68,9 @@ macro_rules! register_builtin {
};
}
-#[derive(Debug)]
-pub struct ExpandedEager {
- pub(crate) subtree: tt::Subtree,
- /// The included file ID of the include macro.
- pub(crate) included_file: Option<(FileId, TokenMap)>,
-}
-
-impl ExpandedEager {
- fn new(subtree: tt::Subtree) -> Self {
- ExpandedEager { subtree, included_file: None }
+impl EagerExpander {
+ pub fn is_include(&self) -> bool {
+ matches!(self, EagerExpander::Include)
}
}
@@ -237,18 +231,16 @@ fn format_args_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "")
- .map(|x| ExpandedEager { subtree: x, included_file: None })
}
fn format_args_nl_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "\\n")
- .map(|x| ExpandedEager { subtree: x, included_file: None })
}
fn format_args_expand_general(
@@ -262,9 +254,6 @@ fn format_args_expand_general(
let expand_error =
ExpandResult::new(tt::Subtree::empty(), mbe::ExpandError::NoMatchingRule.into());
- if args.is_empty() {
- return expand_error;
- }
let mut key_args = FxHashMap::default();
let mut args = args.into_iter().filter_map(|mut arg| {
// Remove `key =`.
@@ -281,7 +270,9 @@ fn format_args_expand_general(
Some(arg)
}).collect::<Vec<_>>().into_iter();
// ^^^^^^^ we need this collect, to enforce the side effect of the filter_map closure (building the `key_args`)
- let format_subtree = args.next().unwrap();
+ let Some(format_subtree) = args.next() else {
+ return expand_error;
+ };
let format_string = (|| {
let token_tree = format_subtree.token_trees.get(0)?;
match token_tree {
@@ -510,23 +501,23 @@ fn compile_error_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
- Some(unquoted) => ExpandError::Other(unquoted.into()),
- None => ExpandError::Other("`compile_error!` argument must be a string".into()),
+ Some(unquoted) => ExpandError::other(unquoted),
+ None => ExpandError::other("`compile_error!` argument must be a string"),
},
- _ => ExpandError::Other("`compile_error!` argument must be a string".into()),
+ _ => ExpandError::other("`compile_error!` argument must be a string"),
};
- ExpandResult { value: ExpandedEager::new(quote! {}), err: Some(err) }
+ ExpandResult { value: quote! {}, err: Some(err) }
}
fn concat_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut text = String::new();
for (i, mut t) in tt.token_trees.iter().enumerate() {
@@ -565,14 +556,14 @@ fn concat_expand(
}
}
}
- ExpandResult { value: ExpandedEager::new(quote!(#text)), err }
+ ExpandResult { value: quote!(#text), err }
}
fn concat_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let mut bytes = Vec::new();
let mut err = None;
for (i, t) in tt.token_trees.iter().enumerate() {
@@ -605,7 +596,7 @@ fn concat_bytes_expand(
}
}
let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
- ExpandResult { value: ExpandedEager::new(quote!([#ident])), err }
+ ExpandResult { value: quote!([#ident]), err }
}
fn concat_bytes_expand_subtree(
@@ -638,7 +629,7 @@ fn concat_idents_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut ident = String::new();
for (i, t) in tt.token_trees.iter().enumerate() {
@@ -653,7 +644,7 @@ fn concat_idents_expand(
}
}
let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
- ExpandResult { value: ExpandedEager::new(quote!(#ident)), err }
+ ExpandResult { value: quote!(#ident), err }
}
fn relative_file(
@@ -666,10 +657,10 @@ fn relative_file(
let path = AnchoredPath { anchor: call_site, path: path_str };
let res = db
.resolve_path(path)
- .ok_or_else(|| ExpandError::Other(format!("failed to load file `{path_str}`").into()))?;
+ .ok_or_else(|| ExpandError::other(format!("failed to load file `{path_str}`")))?;
// Prevent include itself
if res == call_site && !allow_recursion {
- Err(ExpandError::Other(format!("recursive inclusion of `{path_str}`").into()))
+ Err(ExpandError::other(format!("recursive inclusion of `{path_str}`")))
} else {
Ok(res)
}
@@ -688,38 +679,37 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
fn include_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
- tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
- let res = (|| {
- let path = parse_string(tt)?;
- let file_id = relative_file(db, arg_id, &path, false)?;
-
- let (subtree, map) =
- parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
- Ok((subtree, map, file_id))
- })();
-
- match res {
- Ok((subtree, map, file_id)) => {
- ExpandResult::ok(ExpandedEager { subtree, included_file: Some((file_id, map)) })
- }
- Err(e) => ExpandResult::new(
- ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
- e,
- ),
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ match db.include_expand(arg_id) {
+ Ok((res, _)) => ExpandResult::ok(res.0.clone()),
+ Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
}
}
+pub(crate) fn include_arg_to_tt(
+ db: &dyn ExpandDatabase,
+ arg_id: MacroCallId,
+) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
+ let loc = db.lookup_intern_macro_call(arg_id);
+ let Some(EagerCallInfo {arg, arg_id: Some(arg_id), .. }) = loc.eager.as_deref() else {
+ panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
+ };
+ let path = parse_string(&arg.0)?;
+ let file_id = relative_file(db, *arg_id, &path, false)?;
+
+ let (subtree, map) =
+ parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
+ Ok((triomphe::Arc::new((subtree, map)), file_id))
+}
+
fn include_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
if let Err(e) = parse_string(tt) {
- return ExpandResult::new(
- ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
- e,
- );
+ return ExpandResult::new(tt::Subtree::empty(), e);
}
// FIXME: actually read the file here if the user asked for macro expansion
@@ -730,22 +720,17 @@ fn include_bytes_expand(
span: tt::TokenId::unspecified(),
}))],
};
- ExpandResult::ok(ExpandedEager::new(res))
+ ExpandResult::ok(res)
}
fn include_str_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let path = match parse_string(tt) {
Ok(it) => it,
- Err(e) => {
- return ExpandResult::new(
- ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
- e,
- )
- }
+ Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
};
// FIXME: we're not able to read excluded files (which is most of them because
@@ -755,14 +740,14 @@ fn include_str_expand(
let file_id = match relative_file(db, arg_id, &path, true) {
Ok(file_id) => file_id,
Err(_) => {
- return ExpandResult::ok(ExpandedEager::new(quote!("")));
+ return ExpandResult::ok(quote!(""));
}
};
let text = db.file_text(file_id);
let text = &*text;
- ExpandResult::ok(ExpandedEager::new(quote!(#text)))
+ ExpandResult::ok(quote!(#text))
}
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
@@ -774,15 +759,10 @@ fn env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
- Err(e) => {
- return ExpandResult::new(
- ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
- e,
- )
- }
+ Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
};
let mut err = None;
@@ -790,35 +770,28 @@ fn env_expand(
// The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid
// unnecessary diagnostics for eg. `CARGO_PKG_NAME`.
if key == "OUT_DIR" {
- err = Some(ExpandError::Other(
- r#"`OUT_DIR` not set, enable "build scripts" to fix"#.into(),
- ));
+ err = Some(ExpandError::other(r#"`OUT_DIR` not set, enable "build scripts" to fix"#));
}
// If the variable is unset, still return a dummy string to help type inference along.
// We cannot use an empty string here, because for
// `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become
// `include!("foo.rs"), which might go to infinite loop
- "__RA_UNIMPLEMENTED__".to_string()
+ "UNRESOLVED_ENV_VAR".to_string()
});
let expanded = quote! { #s };
- ExpandResult { value: ExpandedEager::new(expanded), err }
+ ExpandResult { value: expanded, err }
}
fn option_env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
- Err(e) => {
- return ExpandResult::new(
- ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
- e,
- )
- }
+ Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
};
// FIXME: Use `DOLLAR_CRATE` when that works in eager macros.
let expanded = match get_env_inner(db, arg_id, &key) {
@@ -826,5 +799,5 @@ fn option_env_expand(
Some(s) => quote! { ::core::option::Option::Some(#s) },
};
- ExpandResult::ok(ExpandedEager::new(expanded))
+ ExpandResult::ok(expanded)
}
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 965dfa824d..78b2db7306 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -14,9 +14,9 @@ use triomphe::Arc;
use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, BuiltinAttrExpander,
- BuiltinDeriveExpander, BuiltinFnLikeExpander, ExpandError, ExpandResult, ExpandTo, HirFileId,
- HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile,
- ProcMacroExpander,
+ BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
+ ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
+ MacroDefKind, MacroFile, ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
@@ -53,9 +53,7 @@ impl TokenExpander {
match self {
TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into),
TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into),
- TokenExpander::BuiltinEager(it) => {
- it.expand(db, id, tt).map_err(Into::into).map(|res| res.subtree)
- }
+ TokenExpander::BuiltinEager(it) => it.expand(db, id, tt).map_err(Into::into),
TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
TokenExpander::ProcMacro(_) => {
@@ -132,6 +130,14 @@ pub trait ExpandDatabase: SourceDatabase {
/// Expand macro call to a token tree.
// This query is LRU cached
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
+ #[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
+ fn include_expand(
+ &self,
+ arg_id: MacroCallId,
+ ) -> Result<
+ (triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
+ ExpandError,
+ >;
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way.
@@ -281,31 +287,6 @@ fn parse_macro_expansion(
let _p = profile::span("parse_macro_expansion");
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
- if let Some(err) = &err {
- if tracing::enabled!(tracing::Level::DEBUG) {
- // Note:
- // The final goal we would like to make all parse_macro success,
- // such that the following log will not call anyway.
- let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- let node = loc.to_node(db);
-
- // collect parent information for warning log
- let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| {
- it.file_id.call_node(db)
- })
- .map(|n| format!("{:#}", n.value))
- .collect::<Vec<_>>()
- .join("\n");
-
- tracing::debug!(
- "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}",
- err,
- node.value,
- parents
- );
- }
- }
-
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
tracing::debug!("expanded = {}", tt.as_debug_string());
@@ -320,9 +301,14 @@ fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
- let arg = db.macro_arg_text(id)?;
let loc = db.lookup_intern_macro_call(id);
+ if let Some(EagerCallInfo { arg, arg_id: Some(_), error: _ }) = loc.eager.as_deref() {
+ return Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())));
+ }
+
+ let arg = db.macro_arg_text(id)?;
+
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
@@ -398,7 +384,17 @@ fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode>
return None;
}
}
- Some(arg.green().into())
+ if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
+ Some(
+ mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr)
+ .0
+ .syntax_node()
+ .green()
+ .into(),
+ )
+ } else {
+ Some(arg.green().into())
+ }
}
fn macro_def(
@@ -445,23 +441,21 @@ fn macro_def(
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand");
let loc = db.lookup_intern_macro_call(id);
- if let Some(eager) = &loc.eager {
- return ExpandResult { value: eager.arg_or_expansion.clone(), err: eager.error.clone() };
+ if let Some(EagerCallInfo { arg, arg_id: None, error }) = loc.eager.as_deref() {
+ // This is an input expansion for an eager macro. These are already pre-expanded
+ return ExpandResult { value: Arc::new(arg.0.clone()), err: error.clone() };
}
-
let expander = match db.macro_def(loc.def) {
Ok(it) => it,
- // FIXME: This is weird -- we effectively report macro *definition*
- // errors lazily, when we try to expand the macro. Instead, they should
- // be reported at the definition site when we construct a def map.
- // (Note we do report them also at the definition site in the late diagnostic pass)
+ // FIXME: We should make sure to enforce a variant that invalid macro
+ // definitions do not get expanders that could reach this call path!
Err(err) => {
return ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
token_trees: vec![],
}),
- err: Some(ExpandError::Other(format!("invalid macro definition: {err}").into())),
+ err: Some(ExpandError::other(format!("invalid macro definition: {err}"))),
}
}
};
@@ -473,13 +467,21 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
token_trees: Vec::new(),
},
),
- err: Some(ExpandError::Other(
+ // FIXME: We should make sure to enforce a variant that invalid macro
+ // calls do not reach this call path!
+ err: Some(ExpandError::other(
"invalid token tree"
- .into(),
)),
};
};
- let ExpandResult { value: mut tt, err } = expander.expand(db, id, &macro_arg.0);
+ let (arg_tt, arg_tm, undo_info) = &*macro_arg;
+ let ExpandResult { value: mut tt, mut err } = expander.expand(db, id, arg_tt);
+
+ if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() {
+ // FIXME: We should report both errors!
+ err = error.clone().or(err);
+ }
+
// Set a hard limit for the expanded tt
let count = tt.count();
if TOKEN_LIMIT.check(count).is_err() {
@@ -488,18 +490,15 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
delimiter: tt::Delimiter::UNSPECIFIED,
token_trees: vec![],
}),
- err: Some(ExpandError::Other(
- format!(
- "macro invocation exceeds token limit: produced {} tokens, limit is {}",
- count,
- TOKEN_LIMIT.inner(),
- )
- .into(),
- )),
+ err: Some(ExpandError::other(format!(
+ "macro invocation exceeds token limit: produced {} tokens, limit is {}",
+ count,
+ TOKEN_LIMIT.inner(),
+ ))),
};
}
- fixup::reverse_fixups(&mut tt, &macro_arg.1, &macro_arg.2);
+ fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
ExpandResult { value: Arc::new(tt), err }
}
@@ -520,9 +519,8 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<t
delimiter: tt::Delimiter::UNSPECIFIED,
token_trees: Vec::new(),
},
- err: Some(ExpandError::Other(
+ err: Some(ExpandError::other(
"invalid token tree"
- .into(),
)),
};
};
diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs
index 59a92ff0ab..7ee3fd375f 100644
--- a/crates/hir-expand/src/eager.rs
+++ b/crates/hir-expand/src/eager.rs
@@ -31,22 +31,24 @@ use crate::{
MacroCallLoc, MacroDefId, MacroDefKind, UnresolvedMacro,
};
-pub fn expand_eager_macro(
+pub fn expand_eager_macro_input(
db: &dyn ExpandDatabase,
krate: CrateId,
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
- let MacroDefKind::BuiltInEager(eager, _) = def.kind else {
- panic!("called `expand_eager_macro` on non-eager macro def {def:?}")
+ assert!(matches!(def.kind, MacroDefKind::BuiltInEager(..)));
+ let token_tree = macro_call.value.token_tree();
+
+ let Some(token_tree) = token_tree else {
+ return Ok(ExpandResult { value: None, err:
+ Some(ExpandError::other(
+ "invalid token tree"
+ )),
+ });
};
- let hygiene = Hygiene::new(db, macro_call.file_id);
- let parsed_args = macro_call
- .value
- .token_tree()
- .map(|tt| mbe::syntax_node_to_token_tree(tt.syntax()).0)
- .unwrap_or_else(tt::Subtree::empty);
+ let (parsed_args, arg_token_map) = mbe::syntax_node_to_token_tree(token_tree.syntax());
let ast_map = db.ast_id_map(macro_call.file_id);
let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(&macro_call.value));
@@ -60,41 +62,40 @@ pub fn expand_eager_macro(
def,
krate,
eager: Some(Box::new(EagerCallInfo {
- arg_or_expansion: Arc::new(parsed_args.clone()),
- included_file: None,
+ arg: Arc::new((parsed_args, arg_token_map)),
+ arg_id: None,
error: None,
})),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
});
-
- let parsed_args = mbe::token_tree_to_syntax_node(&parsed_args, mbe::TopEntryPoint::Expr).0;
- let ExpandResult { value, mut err } = eager_macro_recur(
+ let arg_as_expr = match db.macro_arg_text(arg_id) {
+ Some(it) => it,
+ None => {
+ return Ok(ExpandResult {
+ value: None,
+ err: Some(ExpandError::other("invalid token tree")),
+ })
+ }
+ };
+ let ExpandResult { value: expanded_eager_input, err } = eager_macro_recur(
db,
- &hygiene,
- InFile::new(arg_id.as_file(), parsed_args.syntax_node()),
+ &Hygiene::new(db, macro_call.file_id),
+ InFile::new(arg_id.as_file(), SyntaxNode::new_root(arg_as_expr)),
krate,
resolver,
)?;
- let Some(value ) = value else {
+ let Some(expanded_eager_input) = expanded_eager_input else {
return Ok(ExpandResult { value: None, err })
};
- let subtree = {
- let mut subtree = mbe::syntax_node_to_token_tree(&value).0;
- subtree.delimiter = crate::tt::Delimiter::unspecified();
- subtree
- };
-
- let res = eager.expand(db, arg_id, &subtree);
- if err.is_none() {
- err = res.err;
- }
+ let (mut subtree, token_map) = mbe::syntax_node_to_token_tree(&expanded_eager_input);
+ subtree.delimiter = crate::tt::Delimiter::unspecified();
let loc = MacroCallLoc {
def,
krate,
eager: Some(Box::new(EagerCallInfo {
- arg_or_expansion: Arc::new(res.value.subtree),
- included_file: res.value.included_file,
+ arg: Arc::new((subtree, token_map)),
+ arg_id: Some(arg_id),
error: err.clone(),
})),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
@@ -118,8 +119,9 @@ fn lazy_expand(
MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
);
- let file_id = id.as_file();
- db.parse_or_expand_with_err(file_id).map(|parse| InFile::new(file_id, parse))
+ let macro_file = id.as_macro_file();
+
+ db.parse_macro_expansion(macro_file).map(|parse| InFile::new(macro_file.into(), parse.0))
}
fn eager_macro_recur(
@@ -142,13 +144,13 @@ fn eager_macro_recur(
let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
None => {
- error = Some(ExpandError::Other("malformed macro invocation".into()));
+ error = Some(ExpandError::other("malformed macro invocation"));
continue;
}
};
let ExpandResult { value, err } = match def.kind {
MacroDefKind::BuiltInEager(..) => {
- let id = match expand_eager_macro(
+ let ExpandResult { value, err } = match expand_eager_macro_input(
db,
krate,
curr.with_value(child.clone()),
@@ -158,9 +160,17 @@ fn eager_macro_recur(
Ok(it) => it,
Err(err) => return Err(err),
};
- id.map(|call| {
- call.map(|call| db.parse_or_expand(call.as_file()).clone_for_update())
- })
+ match value {
+ Some(call) => {
+ let ExpandResult { value, err: err2 } =
+ db.parse_macro_expansion(call.as_macro_file());
+ ExpandResult {
+ value: Some(value.0.syntax_node().clone_for_update()),
+ err: err.or(err2),
+ }
+ }
+ None => ExpandResult { value: None, err },
+ }
}
MacroDefKind::Declarative(_)
| MacroDefKind::BuiltIn(..)
@@ -180,7 +190,7 @@ fn eager_macro_recur(
krate,
macro_resolver,
)?;
- let err = if err.is_none() { error } else { err };
+ let err = err.or(error);
ExpandResult { value, err }
}
};
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index c8373778d3..e0c199328e 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -58,7 +58,13 @@ pub enum ExpandError {
UnresolvedProcMacro(CrateId),
Mbe(mbe::ExpandError),
RecursionOverflowPoisoned,
- Other(Box<str>),
+ Other(Box<Box<str>>),
+}
+
+impl ExpandError {
+ pub fn other(msg: impl Into<Box<str>>) -> Self {
+ ExpandError::Other(Box::new(msg.into()))
+ }
}
impl From<mbe::ExpandError> for ExpandError {
@@ -97,9 +103,15 @@ impl fmt::Display for ExpandError {
/// The two variants are encoded in a single u32 which are differentiated by the MSB.
/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
/// `MacroCallId`.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct HirFileId(u32);
+impl fmt::Debug for HirFileId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.repr().fmt(f)
+ }
+}
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFile {
pub macro_call_id: MacroCallId,
@@ -115,6 +127,7 @@ impl_intern_key!(MacroCallId);
pub struct MacroCallLoc {
pub def: MacroDefId,
pub(crate) krate: CrateId,
+ /// Some if `def` is a builtin eager macro.
eager: Option<Box<EagerCallInfo>>,
pub kind: MacroCallKind,
}
@@ -140,8 +153,10 @@ pub enum MacroDefKind {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct EagerCallInfo {
/// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro!
- arg_or_expansion: Arc<tt::Subtree>,
- included_file: Option<(FileId, TokenMap)>,
+ arg: Arc<(tt::Subtree, TokenMap)>,
+ /// call id of the eager macro's input file. If this is none, macro call containing this call info
+ /// is an eager macro's input, otherwise it is its output.
+ arg_id: Option<MacroCallId>,
error: Option<ExpandError>,
}
@@ -206,10 +221,15 @@ impl HirFileId {
HirFileIdRepr::FileId(id) => break id,
HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
- file_id = match loc.eager.as_deref() {
- Some(&EagerCallInfo { included_file: Some((file, _)), .. }) => file.into(),
+ let is_include_expansion = loc.def.is_include()
+ && matches!(
+ loc.eager.as_deref(),
+ Some(EagerCallInfo { arg_id: Some(_), .. })
+ );
+ file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) {
+ Some(Ok((_, file))) => file.into(),
_ => loc.kind.file_id(),
- };
+ }
}
}
}
@@ -325,7 +345,17 @@ impl HirFileId {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.eager.as_deref(), Some(EagerCallInfo { included_file: Some(..), .. }))
+ loc.def.is_include()
+ }
+ _ => false,
+ }
+ }
+
+ pub fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
+ match self.macro_file() {
+ Some(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.eager.as_deref(), Some(EagerCallInfo { .. }))
}
_ => false,
}
@@ -423,6 +453,10 @@ impl MacroDefId {
pub fn is_attribute_derive(&self) -> bool {
matches!(self.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive())
}
+
+ pub fn is_include(&self) -> bool {
+ matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include())
+ }
}
impl MacroCallLoc {
@@ -569,6 +603,10 @@ impl MacroCallId {
pub fn as_file(self) -> HirFileId {
MacroFile { macro_call_id: self }.into()
}
+
+ pub fn as_macro_file(self) -> MacroFile {
+ MacroFile { macro_call_id: self }
+ }
}
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
@@ -662,7 +700,7 @@ impl ExpansionInfo {
let token_id = match token_id_in_attr_input {
Some(token_id) => token_id,
- // the token is not inside an attribute's input so do the lookup in the macro_arg as usual
+ // the token is not inside `an attribute's input so do the lookup in the macro_arg as usual
None => {
let relative_range =
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
@@ -694,14 +732,18 @@ impl ExpansionInfo {
let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
let loc = db.lookup_intern_macro_call(call_id);
- if let Some((file, map)) = loc.eager.and_then(|e| e.included_file) {
- // Special case: map tokens from `include!` expansions to the included file
- let range = map.first_range_by_token(token_id, token.value.kind())?;
- let source = db.parse(file);
+ // Special case: map tokens from `include!` expansions to the included file
+ if loc.def.is_include()
+ && matches!(loc.eager.as_deref(), Some(EagerCallInfo { arg_id: Some(_), .. }))
+ {
+ if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
+ let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
+ let source = db.parse(file_id);
- let token = source.syntax_node().covering_element(range).into_token()?;
+ let token = source.syntax_node().covering_element(range).into_token()?;
- return Some((InFile::new(file.into(), token), Origin::Call));
+ return Some((InFile::new(file_id.into(), token), Origin::Call));
+ }
}
// Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item.
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index c9539210ab..41675c630d 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -46,7 +46,7 @@ impl ProcMacroExpander {
never!("Non-dummy expander even though there are no proc macros");
return ExpandResult::new(
tt::Subtree::empty(),
- ExpandError::Other("Internal error".into()),
+ ExpandError::other("Internal error"),
);
}
};
@@ -60,7 +60,7 @@ impl ProcMacroExpander {
);
return ExpandResult::new(
tt::Subtree::empty(),
- ExpandError::Other("Internal error".into()),
+ ExpandError::other("Internal error"),
);
}
};
@@ -75,14 +75,11 @@ impl ProcMacroExpander {
ProcMacroExpansionError::System(text)
if proc_macro.kind == ProcMacroKind::Attr =>
{
- ExpandResult {
- value: tt.clone(),
- err: Some(ExpandError::Other(text.into())),
- }
+ ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
}
ProcMacroExpansionError::System(text)
| ProcMacroExpansionError::Panic(text) => {
- ExpandResult::new(tt::Subtree::empty(), ExpandError::Other(text.into()))
+ ExpandResult::new(tt::Subtree::empty(), ExpandError::other(text))
}
},
}
diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml
index 6ca0dbb850..c8bea34507 100644
--- a/crates/hir-ty/Cargo.toml
+++ b/crates/hir-ty/Cargo.toml
@@ -22,10 +22,10 @@ either = "1.7.0"
tracing = "0.1.35"
rustc-hash = "1.1.0"
scoped-tls = "1.0.0"
-chalk-solve = { version = "0.89.0", default-features = false }
-chalk-ir = "0.89.0"
-chalk-recursive = { version = "0.89.0", default-features = false }
-chalk-derive = "0.89.0"
+chalk-solve = { version = "0.91.0", default-features = false }
+chalk-ir = "0.91.0"
+chalk-recursive = { version = "0.91.0", default-features = false }
+chalk-derive = "0.91.0"
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
once_cell = "1.17.0"
triomphe.workspace = true
diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs
index f5b3f176b1..3860bccec8 100644
--- a/crates/hir-ty/src/autoderef.rs
+++ b/crates/hir-ty/src/autoderef.rs
@@ -22,17 +22,37 @@ pub(crate) enum AutoderefKind {
Overloaded,
}
+/// Returns types that `ty` transitively dereferences to. This function is only meant to be used
+/// outside `hir-ty`.
+///
+/// It is guaranteed that:
+/// - the yielded types don't contain inference variables (but may contain `TyKind::Error`).
+/// - a type won't be yielded more than once; in other words, the returned iterator will stop if it
+/// detects a cycle in the deref chain.
pub fn autoderef(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
ty: Canonical<Ty>,
-) -> impl Iterator<Item = Canonical<Ty>> + '_ {
+) -> impl Iterator<Item = Ty> {
let mut table = InferenceTable::new(db, env);
let ty = table.instantiate_canonical(ty);
let mut autoderef = Autoderef::new(&mut table, ty);
let mut v = Vec::new();
while let Some((ty, _steps)) = autoderef.next() {
- v.push(autoderef.table.canonicalize(ty).value);
+ // `ty` may contain unresolved inference variables. Since there's no chance they would be
+ // resolved, just replace with fallback type.
+ let resolved = autoderef.table.resolve_completely(ty);
+
+ // If the deref chain contains a cycle (e.g. `A` derefs to `B` and `B` derefs to `A`), we
+ // would revisit some already visited types. Stop here to avoid duplication.
+ //
+ // XXX: The recursion limit for `Autoderef` is currently 10, so `Vec::contains()` shouldn't
+ // be too expensive. Replace this duplicate check with `FxHashSet` if it proves to be more
+ // performant.
+ if v.contains(&resolved) {
+ break;
+ }
+ v.push(resolved);
}
v.into_iter()
}
diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs
index ac962c9e3e..5dd8e2719a 100644
--- a/crates/hir-ty/src/chalk_db.rs
+++ b/crates/hir-ty/src/chalk_db.rs
@@ -497,7 +497,7 @@ pub(crate) fn associated_ty_data_query(
let generic_params = generics(db.upcast(), type_alias.into());
// let bound_vars = generic_params.bound_vars_subst(DebruijnIndex::INNERMOST);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
- let ctx = crate::TyLoweringContext::new(db, &resolver)
+ let ctx = crate::TyLoweringContext::new(db, &resolver, type_alias.into())
.with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
let trait_subst = TyBuilder::subst_for_def(db, trait_, None)
@@ -592,6 +592,7 @@ fn well_known_trait_from_lang_item(item: LangItem) -> Option<WellKnownTrait> {
LangItem::Unpin => WellKnownTrait::Unpin,
LangItem::Unsize => WellKnownTrait::Unsize,
LangItem::Tuple => WellKnownTrait::Tuple,
+ LangItem::PointeeTrait => WellKnownTrait::Pointee,
_ => return None,
})
}
@@ -612,6 +613,7 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem {
WellKnownTrait::Tuple => LangItem::Tuple,
WellKnownTrait::Unpin => LangItem::Unpin,
WellKnownTrait::Unsize => LangItem::Unsize,
+ WellKnownTrait::Pointee => LangItem::PointeeTrait,
}
}
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index 40b63b17b5..262341c6e9 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -6,8 +6,8 @@ use hir_def::{
hir::Expr,
path::Path,
resolver::{Resolver, ValueNs},
- type_ref::ConstRef,
- EnumVariantId, GeneralConstId, StaticId,
+ type_ref::LiteralConstRef,
+ ConstBlockLoc, EnumVariantId, GeneralConstId, StaticId,
};
use la_arena::{Idx, RawIdx};
use stdx::never;
@@ -129,23 +129,28 @@ pub fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const {
}
/// Interns a constant scalar with the given type
-pub fn intern_const_ref(db: &dyn HirDatabase, value: &ConstRef, ty: Ty, krate: CrateId) -> Const {
+pub fn intern_const_ref(
+ db: &dyn HirDatabase,
+ value: &LiteralConstRef,
+ ty: Ty,
+ krate: CrateId,
+) -> Const {
let layout = db.layout_of_ty(ty.clone(), krate);
let bytes = match value {
- ConstRef::Int(i) => {
+ LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
}
- ConstRef::UInt(i) => {
+ LiteralConstRef::UInt(i) => {
let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
}
- ConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
- ConstRef::Char(c) => {
+ LiteralConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
+ LiteralConstRef::Char(c) => {
ConstScalar::Bytes((*c as u32).to_le_bytes().to_vec(), MemoryMap::default())
}
- ConstRef::Unknown => ConstScalar::Unknown,
+ LiteralConstRef::Unknown => ConstScalar::Unknown,
};
intern_const_scalar(bytes, ty)
}
@@ -154,7 +159,7 @@ pub fn intern_const_ref(db: &dyn HirDatabase, value: &ConstRef, ty: Ty, krate: C
pub fn usize_const(db: &dyn HirDatabase, value: Option<u128>, krate: CrateId) -> Const {
intern_const_ref(
db,
- &value.map_or(ConstRef::Unknown, ConstRef::UInt),
+ &value.map_or(LiteralConstRef::Unknown, LiteralConstRef::UInt),
TyBuilder::usize(),
krate,
)
@@ -210,17 +215,18 @@ pub(crate) fn const_eval_query(
GeneralConstId::ConstId(c) => {
db.monomorphized_mir_body(c.into(), subst, db.trait_environment(c.into()))?
}
- GeneralConstId::AnonymousConstId(c) => {
- let (def, root) = db.lookup_intern_anonymous_const(c);
- let body = db.body(def);
- let infer = db.infer(def);
+ GeneralConstId::ConstBlockId(c) => {
+ let ConstBlockLoc { parent, root } = db.lookup_intern_anonymous_const(c);
+ let body = db.body(parent);
+ let infer = db.infer(parent);
Arc::new(monomorphize_mir_body_bad(
db,
- lower_to_mir(db, def, &body, &infer, root)?,
+ lower_to_mir(db, parent, &body, &infer, root)?,
subst,
- db.trait_environment_for_body(def),
+ db.trait_environment_for_body(parent),
)?)
}
+ GeneralConstId::InTypeConstId(c) => db.mir_body(c.into())?,
};
let c = interpret_mir(db, &body, false).0?;
Ok(c)
diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs
index 06fff08b7d..0db1fefbfe 100644
--- a/crates/hir-ty/src/consteval/tests.rs
+++ b/crates/hir-ty/src/consteval/tests.rs
@@ -2052,6 +2052,17 @@ fn extern_weak_statics() {
}
#[test]
+fn from_ne_bytes() {
+ check_number(
+ r#"
+//- minicore: int_impl
+const GOAL: u32 = u32::from_ne_bytes([44, 1, 0, 0]);
+ "#,
+ 300,
+ );
+}
+
+#[test]
fn enums() {
check_number(
r#"
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index ca8a394e36..9dd810f844 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -278,6 +278,7 @@ fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult>
DefWithBodyId::VariantId(it) => {
db.enum_data(it.parent).variants[it.local_id].name.display(db.upcast()).to_string()
}
+ DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
});
db.infer_query(def)
}
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 7c38e6583a..9f9a56ffab 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -18,9 +18,10 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
let is_unsafe = match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).has_unsafe_kw(),
- DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) | DefWithBodyId::VariantId(_) => {
- false
- }
+ DefWithBodyId::StaticId(_)
+ | DefWithBodyId::ConstId(_)
+ | DefWithBodyId::VariantId(_)
+ | DefWithBodyId::InTypeConstId(_) => false,
};
if is_unsafe {
return res;
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index f90e025c7c..c1df24d172 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -2,7 +2,10 @@
//! HIR back into source code, and just displaying them for debugging/testing
//! purposes.
-use std::fmt::{self, Debug};
+use std::{
+ fmt::{self, Debug},
+ mem::size_of,
+};
use base_db::CrateId;
use chalk_ir::{BoundVar, TyKind};
@@ -536,8 +539,44 @@ fn render_const_scalar(
}
f.write_str("]")
}
+ TyKind::Dyn(_) => {
+ let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
+ let ty_id = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
+ let Ok(t) = memory_map.vtable.ty(ty_id) else {
+ return f.write_str("<ty-missing-in-vtable-map>");
+ };
+ let Ok(layout) = f.db.layout_of_ty(t.clone(), krate) else {
+ return f.write_str("<layout-error>");
+ };
+ let size = layout.size.bytes_usize();
+ let Some(bytes) = memory_map.get(addr, size) else {
+ return f.write_str("<ref-data-not-available>");
+ };
+ f.write_str("&")?;
+ render_const_scalar(f, bytes, memory_map, t)
+ }
+ TyKind::Adt(adt, _) if b.len() == 2 * size_of::<usize>() => match adt.0 {
+ hir_def::AdtId::StructId(s) => {
+ let data = f.db.struct_data(s);
+ write!(f, "&{}", data.name.display(f.db.upcast()))?;
+ Ok(())
+ }
+ _ => {
+ return f.write_str("<unsized-enum-or-union>");
+ }
+ },
_ => {
- let addr = usize::from_le_bytes(b.try_into().unwrap());
+ let addr = usize::from_le_bytes(match b.try_into() {
+ Ok(b) => b,
+ Err(_) => {
+ never!(
+ "tried rendering ty {:?} in const ref with incorrect byte count {}",
+ t,
+ b.len()
+ );
+ return f.write_str("<layout-error>");
+ }
+ });
let Ok(layout) = f.db.layout_of_ty(t.clone(), krate) else {
return f.write_str("<layout-error>");
};
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 80f32e96ee..1ac0837b5b 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -41,10 +41,15 @@ use stdx::{always, never};
use triomphe::Arc;
use crate::{
- db::HirDatabase, fold_tys, infer::coerce::CoerceMany, lower::ImplTraitLoweringMode,
- static_lifetime, to_assoc_type_id, traits::FnTrait, AliasEq, AliasTy, ClosureId, DomainGoal,
- GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, RpitId, Substitution,
- TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt,
+ db::HirDatabase,
+ fold_tys,
+ infer::coerce::CoerceMany,
+ lower::ImplTraitLoweringMode,
+ static_lifetime, to_assoc_type_id,
+ traits::FnTrait,
+ utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
+ AliasEq, AliasTy, ClosureId, DomainGoal, GenericArg, Goal, ImplTraitId, InEnvironment,
+ Interner, ProjectionTy, RpitId, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt,
};
// This lint has a false positive here. See the link below for details.
@@ -102,6 +107,16 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
},
});
}
+ DefWithBodyId::InTypeConstId(c) => {
+ // FIXME(const-generic-body): We should not get the return type in this way.
+ ctx.return_ty = c
+ .lookup(db.upcast())
+ .thing
+ .box_any()
+ .downcast::<InTypeConstIdMetadata>()
+ .unwrap()
+ .0;
+ }
}
ctx.infer_body();
@@ -684,7 +699,7 @@ impl<'a> InferenceContext<'a> {
fn collect_fn(&mut self, func: FunctionId) {
let data = self.db.function_data(func);
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, func.into())
.with_impl_trait_mode(ImplTraitLoweringMode::Param);
let mut param_tys =
data.params.iter().map(|type_ref| ctx.lower_ty(type_ref)).collect::<Vec<_>>();
@@ -708,7 +723,7 @@ impl<'a> InferenceContext<'a> {
}
let return_ty = &*data.ret_type;
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into())
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
let return_ty = ctx.lower_ty(return_ty);
let return_ty = self.insert_type_vars(return_ty);
@@ -823,7 +838,7 @@ impl<'a> InferenceContext<'a> {
}
fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
let ty = ctx.lower_ty(type_ref);
let ty = self.insert_type_vars(ty);
self.normalize_associated_types_in(ty)
@@ -850,7 +865,21 @@ impl<'a> InferenceContext<'a> {
}
fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
- self.table.unify(ty1, ty2)
+ let ty1 = ty1
+ .clone()
+ .try_fold_with(
+ &mut UnevaluatedConstEvaluatorFolder { db: self.db },
+ DebruijnIndex::INNERMOST,
+ )
+ .unwrap();
+ let ty2 = ty2
+ .clone()
+ .try_fold_with(
+ &mut UnevaluatedConstEvaluatorFolder { db: self.db },
+ DebruijnIndex::INNERMOST,
+ )
+ .unwrap();
+ self.table.unify(&ty1, &ty2)
}
/// Attempts to returns the deeply last field of nested structures, but
@@ -973,7 +1002,7 @@ impl<'a> InferenceContext<'a> {
Some(path) => path,
None => return (self.err_ty(), None),
};
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
let (resolution, unresolved) = if value_ns {
match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path) {
Some(ResolveValueResult::ValueNs(value)) => match value {
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index 23189f383e..ff64ae252b 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -715,10 +715,9 @@ impl InferenceContext<'_> {
}
fn is_upvar(&self, place: &HirPlace) -> bool {
- let b = &self.body[place.local];
if let Some(c) = self.current_closure {
let (_, root) = self.db.lookup_intern_closure(c.into());
- return b.is_upvar(root);
+ return self.body.is_binding_upvar(place.local, root);
}
false
}
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 33e98ac86c..194471f004 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -173,8 +173,8 @@ impl<'a> InferenceContext<'a> {
}
Expr::Const(id) => {
self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
- let (_, expr) = this.db.lookup_intern_anonymous_const(*id);
- this.infer_expr(expr, expected)
+ let loc = this.db.lookup_intern_anonymous_const(*id);
+ this.infer_expr(loc.root, expected)
})
.1
}
@@ -1715,6 +1715,7 @@ impl<'a> InferenceContext<'a> {
const_or_path_to_chalk(
this.db,
&this.resolver,
+ this.owner.into(),
ty,
c,
ParamLoweringMode::Placeholder,
diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs
index 4478342439..46f2e1d7d1 100644
--- a/crates/hir-ty/src/infer/mutability.rs
+++ b/crates/hir-ty/src/infer/mutability.rs
@@ -43,8 +43,8 @@ impl<'a> InferenceContext<'a> {
}
}
Expr::Const(id) => {
- let (_, expr) = self.db.lookup_intern_anonymous_const(*id);
- self.infer_mut_expr(expr, Mutability::Not);
+ let loc = self.db.lookup_intern_anonymous_const(*id);
+ self.infer_mut_expr(loc.root, Mutability::Not);
}
Expr::Let { pat, expr } => self.infer_mut_expr(*expr, self.pat_bound_mutability(*pat)),
Expr::Block { id: _, statements, tail, label: _ }
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs
index 95a20f983f..79d9e21e79 100644
--- a/crates/hir-ty/src/infer/path.rs
+++ b/crates/hir-ty/src/infer/path.rs
@@ -44,7 +44,8 @@ impl InferenceContext<'_> {
let last = path.segments().last()?;
// Don't use `self.make_ty()` here as we need `orig_ns`.
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let ctx =
+ crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
let ty = self.table.insert_type_vars(ty);
let ty = self.table.normalize_associated_types_in(ty);
@@ -108,7 +109,7 @@ impl InferenceContext<'_> {
}
};
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
let substs = ctx.substs_from_path(path, value_def, true);
let substs = substs.as_slice(Interner);
let parent_substs = self_subst.or_else(|| {
@@ -190,7 +191,11 @@ impl InferenceContext<'_> {
(TypeNs::TraitId(trait_), true) => {
let segment =
remaining_segments.last().expect("there should be at least one segment here");
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let ctx = crate::lower::TyLoweringContext::new(
+ self.db,
+ &self.resolver,
+ self.owner.into(),
+ );
let trait_ref =
ctx.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
self.resolve_trait_assoc_item(trait_ref, segment, id)
@@ -202,7 +207,11 @@ impl InferenceContext<'_> {
// as Iterator>::Item::default`)
let remaining_segments_for_ty =
remaining_segments.take(remaining_segments.len() - 1);
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let ctx = crate::lower::TyLoweringContext::new(
+ self.db,
+ &self.resolver,
+ self.owner.into(),
+ );
let (ty, _) = ctx.lower_partly_resolved_path(
def,
resolved_segment,
diff --git a/crates/hir-ty/src/interner.rs b/crates/hir-ty/src/interner.rs
index 89f7d9c4f4..e4dd4b86cf 100644
--- a/crates/hir-ty/src/interner.rs
+++ b/crates/hir-ty/src/interner.rs
@@ -266,7 +266,7 @@ impl chalk_ir::interner::Interner for Interner {
c1: &Self::InternedConcreteConst,
c2: &Self::InternedConcreteConst,
) -> bool {
- (c1 == &ConstScalar::Unknown) || (c2 == &ConstScalar::Unknown) || (c1 == c2)
+ !matches!(c1, ConstScalar::Bytes(..)) || !matches!(c2, ConstScalar::Bytes(..)) || (c1 == c2)
}
fn intern_generic_arg(
diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs
index fca2e09ff0..0ff8c532d4 100644
--- a/crates/hir-ty/src/layout/tests.rs
+++ b/crates/hir-ty/src/layout/tests.rs
@@ -2,6 +2,7 @@ use std::collections::HashMap;
use base_db::fixture::WithFixture;
use chalk_ir::{AdtId, TyKind};
+use either::Either;
use hir_def::db::DefDatabase;
use triomphe::Arc;
@@ -25,27 +26,38 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
);
let (db, file_ids) = TestDB::with_many_files(&ra_fixture);
- let (adt_id, module_id) = file_ids
+ let (adt_or_type_alias_id, module_id) = file_ids
.into_iter()
.find_map(|file_id| {
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
- let adt_id = scope.declarations().find_map(|x| match x {
+ let adt_or_type_alias_id = scope.declarations().find_map(|x| match x {
hir_def::ModuleDefId::AdtId(x) => {
let name = match x {
hir_def::AdtId::StructId(x) => db.struct_data(x).name.to_smol_str(),
hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(),
hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(),
};
- (name == "Goal").then_some(x)
+ (name == "Goal").then_some(Either::Left(x))
+ }
+ hir_def::ModuleDefId::TypeAliasId(x) => {
+ let name = db.type_alias_data(x).name.to_smol_str();
+ (name == "Goal").then_some(Either::Right(x))
}
_ => None,
})?;
- Some((adt_id, module_id))
+ Some((adt_or_type_alias_id, module_id))
})
.unwrap();
- let goal_ty = TyKind::Adt(AdtId(adt_id), Substitution::empty(Interner)).intern(Interner);
+ let goal_ty = match adt_or_type_alias_id {
+ Either::Left(adt_id) => {
+ TyKind::Adt(AdtId(adt_id), Substitution::empty(Interner)).intern(Interner)
+ }
+ Either::Right(ty_id) => {
+ db.ty(ty_id.into()).substitute(Interner, &Substitution::empty(Interner))
+ }
+ };
db.layout_of_ty(goal_ty, module_id.krate())
}
@@ -380,9 +392,22 @@ fn niche_optimization() {
#[test]
fn const_eval() {
size_and_align! {
+ struct Goal([i32; 2 + 2]);
+ }
+ size_and_align! {
const X: usize = 5;
struct Goal([i32; X]);
}
+ size_and_align! {
+ mod foo {
+ pub(super) const BAR: usize = 5;
+ }
+ struct Ar<T>([T; foo::BAR]);
+ struct Goal(Ar<Ar<i32>>);
+ }
+ size_and_align! {
+ type Goal = [u8; 2 + 2];
+ }
}
#[test]
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 0c68891fe4..9951a1c750 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -27,10 +27,11 @@ use hir_def::{
nameres::MacroSubNs,
path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments},
resolver::{HasResolver, Resolver, TypeNs},
- type_ref::{ConstRefOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef},
+ type_ref::{ConstRef, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef},
AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
- GenericDefId, HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId,
- StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
+ GenericDefId, HasModule, ImplId, InTypeConstLoc, ItemContainerId, LocalFieldId, Lookup,
+ ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId,
+ TypeParamId, UnionId, VariantId,
};
use hir_expand::{name::Name, ExpandResult};
use intern::Interned;
@@ -43,17 +44,24 @@ use triomphe::Arc;
use crate::{
all_super_traits,
- consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic},
+ consteval::{
+ intern_const_ref, intern_const_scalar, path_to_const, unknown_const,
+ unknown_const_as_generic,
+ },
db::HirDatabase,
make_binders,
mapping::{from_chalk_trait_id, ToChalk},
static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
utils::Generics,
- utils::{all_super_trait_refs, associated_type_by_name_including_super_traits, generics},
- AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, DebruijnIndex, DynTy, FnPointer,
- FnSig, FnSubst, GenericArgData, ImplTraitId, Interner, ParamKind, PolyFnSig, ProjectionTy,
- QuantifiedWhereClause, QuantifiedWhereClauses, ReturnTypeImplTrait, ReturnTypeImplTraits,
- Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
+ utils::{
+ all_super_trait_refs, associated_type_by_name_including_super_traits, generics,
+ InTypeConstIdMetadata,
+ },
+ AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy,
+ FnPointer, FnSig, FnSubst, GenericArgData, ImplTraitId, Interner, ParamKind, PolyFnSig,
+ ProjectionTy, QuantifiedWhereClause, QuantifiedWhereClauses, ReturnTypeImplTrait,
+ ReturnTypeImplTraits, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder,
+ TyKind, WhereClause,
};
#[derive(Debug)]
@@ -106,6 +114,7 @@ pub struct TyLoweringContext<'a> {
pub db: &'a dyn HirDatabase,
resolver: &'a Resolver,
in_binders: DebruijnIndex,
+ owner: TypeOwnerId,
/// Note: Conceptually, it's thinkable that we could be in a location where
/// some type params should be represented as placeholders, and others
/// should be converted to variables. I think in practice, this isn't
@@ -118,13 +127,14 @@ pub struct TyLoweringContext<'a> {
}
impl<'a> TyLoweringContext<'a> {
- pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self {
+ pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver, owner: TypeOwnerId) -> Self {
let impl_trait_mode = ImplTraitLoweringState::Disallowed;
let type_param_mode = ParamLoweringMode::Placeholder;
let in_binders = DebruijnIndex::INNERMOST;
Self {
db,
resolver,
+ owner,
in_binders,
impl_trait_mode,
type_param_mode,
@@ -235,6 +245,7 @@ impl<'a> TyLoweringContext<'a> {
let const_len = const_or_path_to_chalk(
self.db,
self.resolver,
+ self.owner,
TyBuilder::usize(),
len,
self.type_param_mode,
@@ -840,6 +851,7 @@ impl<'a> TyLoweringContext<'a> {
const_or_path_to_chalk(
self.db,
self.resolver,
+ self.owner,
ty,
c,
self.type_param_mode,
@@ -1356,8 +1368,8 @@ pub(crate) fn field_types_query(
};
let generics = generics(db.upcast(), def);
let mut res = ArenaMap::default();
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let ctx = TyLoweringContext::new(db, &resolver, GenericDefId::from(variant_id.adt_id()).into())
+ .with_type_param_mode(ParamLoweringMode::Variable);
for (field_id, field_data) in var_data.fields().iter() {
res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref)));
}
@@ -1379,8 +1391,8 @@ pub(crate) fn generic_predicates_for_param_query(
assoc_name: Option<Name>,
) -> Arc<[Binders<QuantifiedWhereClause>]> {
let resolver = def.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let ctx = TyLoweringContext::new(db, &resolver, def.into())
+ .with_type_param_mode(ParamLoweringMode::Variable);
let generics = generics(db.upcast(), def);
let mut predicates: Vec<_> = resolver
.where_predicates_in_scope()
@@ -1468,8 +1480,8 @@ pub(crate) fn trait_environment_query(
def: GenericDefId,
) -> Arc<TraitEnvironment> {
let resolver = def.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Placeholder);
+ let ctx = TyLoweringContext::new(db, &resolver, def.into())
+ .with_type_param_mode(ParamLoweringMode::Placeholder);
let mut traits_in_scope = Vec::new();
let mut clauses = Vec::new();
for pred in resolver.where_predicates_in_scope() {
@@ -1527,8 +1539,8 @@ pub(crate) fn generic_predicates_query(
def: GenericDefId,
) -> Arc<[Binders<QuantifiedWhereClause>]> {
let resolver = def.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let ctx = TyLoweringContext::new(db, &resolver, def.into())
+ .with_type_param_mode(ParamLoweringMode::Variable);
let generics = generics(db.upcast(), def);
let mut predicates = resolver
@@ -1582,8 +1594,8 @@ pub(crate) fn generic_defaults_query(
def: GenericDefId,
) -> Arc<[Binders<chalk_ir::GenericArg<Interner>>]> {
let resolver = def.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let ctx = TyLoweringContext::new(db, &resolver, def.into())
+ .with_type_param_mode(ParamLoweringMode::Variable);
let generic_params = generics(db.upcast(), def);
let parent_start_idx = generic_params.len_self();
@@ -1648,11 +1660,11 @@ pub(crate) fn generic_defaults_recover(
fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
let data = db.function_data(def);
let resolver = def.resolver(db.upcast());
- let ctx_params = TyLoweringContext::new(db, &resolver)
+ let ctx_params = TyLoweringContext::new(db, &resolver, def.into())
.with_impl_trait_mode(ImplTraitLoweringMode::Variable)
.with_type_param_mode(ParamLoweringMode::Variable);
let params = data.params.iter().map(|tr| ctx_params.lower_ty(tr)).collect::<Vec<_>>();
- let ctx_ret = TyLoweringContext::new(db, &resolver)
+ let ctx_ret = TyLoweringContext::new(db, &resolver, def.into())
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
.with_type_param_mode(ParamLoweringMode::Variable);
let ret = ctx_ret.lower_ty(&data.ret_type);
@@ -1683,8 +1695,8 @@ fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
let data = db.const_data(def);
let generics = generics(db.upcast(), def.into());
let resolver = def.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let ctx = TyLoweringContext::new(db, &resolver, def.into())
+ .with_type_param_mode(ParamLoweringMode::Variable);
make_binders(db, &generics, ctx.lower_ty(&data.type_ref))
}
@@ -1693,7 +1705,7 @@ fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
let data = db.static_data(def);
let resolver = def.resolver(db.upcast());
- let ctx = TyLoweringContext::new(db, &resolver);
+ let ctx = TyLoweringContext::new(db, &resolver, def.into());
Binders::empty(Interner, ctx.lower_ty(&data.type_ref))
}
@@ -1702,8 +1714,8 @@ fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnS
let struct_data = db.struct_data(def);
let fields = struct_data.variant_data.fields();
let resolver = def.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let ctx = TyLoweringContext::new(db, &resolver, AdtId::from(def).into())
+ .with_type_param_mode(ParamLoweringMode::Variable);
let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders();
Binders::new(binders, CallableSig::from_params_and_return(params, ret, false, Safety::Safe))
@@ -1715,7 +1727,7 @@ fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders<T
if let StructKind::Unit = struct_data.variant_data.kind() {
return type_for_adt(db, def.into());
}
- let generics = generics(db.upcast(), def.into());
+ let generics = generics(db.upcast(), AdtId::from(def).into());
let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
make_binders(
db,
@@ -1729,8 +1741,8 @@ fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId)
let var_data = &enum_data.variants[def.local_id];
let fields = var_data.variant_data.fields();
let resolver = def.parent.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let ctx = TyLoweringContext::new(db, &resolver, DefWithBodyId::VariantId(def).into())
+ .with_type_param_mode(ParamLoweringMode::Variable);
let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
let (ret, binders) = type_for_adt(db, def.parent.into()).into_value_and_skipped_binders();
Binders::new(binders, CallableSig::from_params_and_return(params, ret, false, Safety::Safe))
@@ -1762,8 +1774,8 @@ fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
let generics = generics(db.upcast(), t.into());
let resolver = t.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let ctx = TyLoweringContext::new(db, &resolver, t.into())
+ .with_type_param_mode(ParamLoweringMode::Variable);
if db.type_alias_data(t).is_extern {
Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
} else {
@@ -1884,8 +1896,8 @@ pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binde
"impl_self_ty_query({impl_id:?} -> {impl_loc:?} -> {impl_data:?})"
));
let generics = generics(db.upcast(), impl_id.into());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let ctx = TyLoweringContext::new(db, &resolver, impl_id.into())
+ .with_type_param_mode(ParamLoweringMode::Variable);
make_binders(db, &generics, ctx.lower_ty(&impl_data.self_ty))
}
@@ -1894,7 +1906,7 @@ pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> T
let parent_data = db.generic_params(def.parent());
let data = &parent_data.type_or_consts[def.local_id()];
let resolver = def.parent().resolver(db.upcast());
- let ctx = TyLoweringContext::new(db, &resolver);
+ let ctx = TyLoweringContext::new(db, &resolver, def.parent().into());
match data {
TypeOrConstParamData::TypeParamData(_) => {
never!();
@@ -1920,8 +1932,8 @@ pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<
let _cx = stdx::panic_context::enter(format!(
"impl_trait_query({impl_id:?} -> {impl_loc:?} -> {impl_data:?})"
));
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let ctx = TyLoweringContext::new(db, &resolver, impl_id.into())
+ .with_type_param_mode(ParamLoweringMode::Variable);
let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders();
let target_trait = impl_data.target_trait.as_ref()?;
Some(Binders::new(binders, ctx.lower_trait_ref(target_trait, Some(self_ty))?))
@@ -1934,7 +1946,7 @@ pub(crate) fn return_type_impl_traits(
// FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
let data = db.function_data(def);
let resolver = def.resolver(db.upcast());
- let ctx_ret = TyLoweringContext::new(db, &resolver)
+ let ctx_ret = TyLoweringContext::new(db, &resolver, def.into())
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
.with_type_param_mode(ParamLoweringMode::Variable);
let _ret = ctx_ret.lower_ty(&data.ret_type);
@@ -1969,7 +1981,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
arg: &'a GenericArg,
this: &mut T,
for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
- for_const: impl FnOnce(&mut T, &ConstRefOrPath, Ty) -> Const + 'a,
+ for_const: impl FnOnce(&mut T, &ConstRef, Ty) -> Const + 'a,
) -> Option<crate::GenericArg> {
let kind = match kind_id {
Either::Left(_) => ParamKind::Type,
@@ -1997,7 +2009,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
let p = p.mod_path()?;
if p.kind == PathKind::Plain {
if let [n] = p.segments() {
- let c = ConstRefOrPath::Path(n.clone());
+ let c = ConstRef::Path(n.clone());
return Some(
GenericArgData::Const(for_const(this, &c, c_ty)).intern(Interner),
);
@@ -2013,15 +2025,16 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
pub(crate) fn const_or_path_to_chalk(
db: &dyn HirDatabase,
resolver: &Resolver,
+ owner: TypeOwnerId,
expected_ty: Ty,
- value: &ConstRefOrPath,
+ value: &ConstRef,
mode: ParamLoweringMode,
args: impl FnOnce() -> Generics,
debruijn: DebruijnIndex,
) -> Const {
match value {
- ConstRefOrPath::Scalar(s) => intern_const_ref(db, s, expected_ty, resolver.krate()),
- ConstRefOrPath::Path(n) => {
+ ConstRef::Scalar(s) => intern_const_ref(db, s, expected_ty, resolver.krate()),
+ ConstRef::Path(n) => {
let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
path_to_const(
db,
@@ -2034,6 +2047,26 @@ pub(crate) fn const_or_path_to_chalk(
)
.unwrap_or_else(|| unknown_const(expected_ty))
}
+ &ConstRef::Complex(it) => {
+ let crate_data = &db.crate_graph()[owner.module(db.upcast()).krate()];
+ if crate_data.env.get("__ra_is_test_fixture").is_none() && crate_data.origin.is_local()
+ {
+ // FIXME: current `InTypeConstId` is very unstable, so we only use it in non local crate
+ // that are unlikely to be edited.
+ return unknown_const(expected_ty);
+ }
+ let c = db
+ .intern_in_type_const(InTypeConstLoc {
+ id: it,
+ owner,
+ thing: Box::new(InTypeConstIdMetadata(expected_ty.clone())),
+ })
+ .into();
+ intern_const_scalar(
+ ConstScalar::UnevaluatedConst(c, Substitution::empty(Interner)),
+ expected_ty,
+ )
+ }
}
}
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index 6fa3d1351a..ab6430e8f1 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -570,7 +570,7 @@ impl ReceiverAdjustments {
.intern(Interner);
}
}
- never!("unsize_array with non-reference-to-array {:?}", ty);
+ // FIXME: report diagnostic if array unsizing happens without indirection.
ty
};
adjust.push(Adjustment {
diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs
index a0ea1cc5ef..a5dd0182eb 100644
--- a/crates/hir-ty/src/mir/borrowck.rs
+++ b/crates/hir-ty/src/mir/borrowck.rs
@@ -240,10 +240,14 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio
/// Returns a map from basic blocks to the set of locals that might be ever initialized before
/// the start of the block. Only `StorageDead` can remove something from this map, and we ignore
/// `Uninit` and `drop` and similar after initialization.
-fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
+fn ever_initialized_map(
+ db: &dyn HirDatabase,
+ body: &MirBody,
+) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect();
fn dfs(
+ db: &dyn HirDatabase,
body: &MirBody,
b: BasicBlockId,
l: LocalId,
@@ -267,7 +271,7 @@ fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<Local
}
}
let Some(terminator) = &block.terminator else {
- never!("Terminator should be none only in construction");
+ never!("Terminator should be none only in construction.\nThe body:\n{}", body.pretty_print(db));
return;
};
let targets = match &terminator.kind {
@@ -299,18 +303,18 @@ fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<Local
for target in targets {
if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
result[target].insert(l, is_ever_initialized);
- dfs(body, target, l, result);
+ dfs(db, body, target, l, result);
}
}
}
for &l in &body.param_locals {
result[body.start_block].insert(l, true);
- dfs(body, body.start_block, l, &mut result);
+ dfs(db, body, body.start_block, l, &mut result);
}
for l in body.locals.iter().map(|x| x.0) {
if !result[body.start_block].contains_idx(l) {
result[body.start_block].insert(l, false);
- dfs(body, body.start_block, l, &mut result);
+ dfs(db, body, body.start_block, l, &mut result);
}
}
result
@@ -326,7 +330,7 @@ fn mutability_of_locals(
MutabilityReason::Mut { spans } => spans.push(span),
x @ MutabilityReason::Not => *x = MutabilityReason::Mut { spans: vec![span] },
};
- let ever_init_maps = ever_initialized_map(body);
+ let ever_init_maps = ever_initialized_map(db, body);
for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
let block = &body.basic_blocks[block_id];
for statement in &block.statements {
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index ce14f6dbad..9acf9d39e5 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -77,7 +77,7 @@ impl VTableMap {
id
}
- fn ty(&self, id: usize) -> Result<&Ty> {
+ pub(crate) fn ty(&self, id: usize) -> Result<&Ty> {
self.id_to_ty.get(id).ok_or(MirEvalError::InvalidVTableId(id))
}
@@ -1571,16 +1571,24 @@ impl Evaluator<'_> {
}
None => {
let mut check_inner = None;
+ let (addr, meta) = bytes.split_at(bytes.len() / 2);
let element_size = match t.kind(Interner) {
TyKind::Str => 1,
TyKind::Slice(t) => {
check_inner = Some(t);
this.size_of_sized(t, locals, "slice inner type")?
}
- _ => return Ok(()), // FIXME: support other kind of unsized types
+ TyKind::Dyn(_) => {
+ let t = this.vtable_map.ty_of_bytes(meta)?;
+ check_inner = Some(t);
+ this.size_of_sized(t, locals, "dyn concrete type")?
+ }
+ _ => return Ok(()),
+ };
+ let count = match t.kind(Interner) {
+ TyKind::Dyn(_) => 1,
+ _ => from_bytes!(usize, meta),
};
- let (addr, meta) = bytes.split_at(bytes.len() / 2);
- let count = from_bytes!(usize, meta);
let size = element_size * count;
let addr = Address::from_bytes(addr)?;
let b = this.read_memory(addr, size)?;
@@ -1588,7 +1596,7 @@ impl Evaluator<'_> {
if let Some(ty) = check_inner {
for i in 0..count {
let offset = element_size * i;
- rec(this, &b[offset..offset + element_size], ty, locals, mm)?;
+ rec(this, &b[offset..offset + element_size], &ty, locals, mm)?;
}
}
}
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index aad1a82f29..2cb29b4ab9 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -1853,7 +1853,7 @@ pub fn mir_body_for_closure_query(
.result
.binding_locals
.into_iter()
- .filter(|x| ctx.body[x.0].owner == Some(expr))
+ .filter(|it| ctx.body.binding_owners.get(&it.0).copied() == Some(expr))
.collect();
if let Some(err) = err {
return Err(MirLowerError::UnresolvedUpvar(err));
@@ -1876,6 +1876,7 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
DefWithBodyId::VariantId(it) => {
db.enum_data(it.parent).variants[it.local_id].name.display(db.upcast()).to_string()
}
+ DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
});
let body = db.body(def);
let infer = db.infer(def);
@@ -1908,10 +1909,11 @@ pub fn lower_to_mir(
// 0 is return local
ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) });
let binding_picker = |b: BindingId| {
+ let owner = ctx.body.binding_owners.get(&b).copied();
if root_expr == body.body_expr {
- body[b].owner.is_none()
+ owner.is_none()
} else {
- body[b].owner == Some(root_expr)
+ owner == Some(root_expr)
}
};
// 1 to param_len is for params
diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs
index 58662b01b9..ac23e77bd2 100644
--- a/crates/hir-ty/src/mir/pretty.rs
+++ b/crates/hir-ty/src/mir/pretty.rs
@@ -60,6 +60,9 @@ impl MirBody {
let data = db.enum_data(id.parent);
w!(this, "enum {} = ", data.name.display(db.upcast()));
}
+ hir_def::DefWithBodyId::InTypeConstId(id) => {
+ w!(this, "in type const {id:?} = ");
+ }
});
ctx.result
}
diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs
index 2db04024b7..8571412800 100644
--- a/crates/hir-ty/src/tests.rs
+++ b/crates/hir-ty/src/tests.rs
@@ -146,6 +146,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
let loc = db.lookup_intern_enum(it.parent);
loc.source(&db).value.syntax().text_range().start()
}
+ DefWithBodyId::InTypeConstId(it) => it.source(&db).syntax().text_range().start(),
});
let mut unexpected_type_mismatches = String::new();
for def in defs {
@@ -391,6 +392,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let loc = db.lookup_intern_enum(it.parent);
loc.source(&db).value.syntax().text_range().start()
}
+ DefWithBodyId::InTypeConstId(it) => it.source(&db).syntax().text_range().start(),
});
for def in defs {
let (body, source_map) = db.body_with_source_map(def);
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index f18c953a7a..047900a324 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -1955,3 +1955,26 @@ impl Inner<1> {
"#,
);
}
+
+#[test]
+fn dont_crash_on_slice_unsizing() {
+ check_no_mismatches(
+ r#"
+//- minicore: slice, unsize, coerce_unsized
+trait Tr {
+ fn f(self);
+}
+
+impl Tr for [i32] {
+ fn f(self) {
+ let t;
+ x(t);
+ }
+}
+
+fn x(a: [i32; 4]) {
+ let b = a.f();
+}
+ "#,
+ );
+}
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index 3ece40486d..a0ff628435 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -1829,6 +1829,38 @@ impl Foo for u8 {
}
#[test]
+fn const_eval_in_function_signature() {
+ check_types(
+ r#"
+const fn foo() -> usize {
+ 5
+}
+
+fn f() -> [u8; foo()] {
+ loop {}
+}
+
+fn main() {
+ let t = f();
+ //^ [u8; 5]
+}"#,
+ );
+ check_types(
+ r#"
+//- minicore: default, builtin_impls
+fn f() -> [u8; Default::default()] {
+ loop {}
+}
+
+fn main() {
+ let t = f();
+ //^ [u8; 0]
+}
+ "#,
+ );
+}
+
+#[test]
fn shadowing_primitive_with_inner_items() {
check_types(
r#"
@@ -3465,6 +3497,22 @@ fn func() {
);
}
+#[test]
+fn pointee_trait() {
+ check_types(
+ r#"
+//- minicore: pointee
+use core::ptr::Pointee;
+fn func() {
+ let x: <u8 as Pointee>::Metadata;
+ //^ ()
+ let x: <[u8] as Pointee>::Metadata;
+ //^ usize
+}
+ "#,
+ );
+}
+
// FIXME
#[test]
fn castable_to() {
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs
index 829a6ab189..97ae732a90 100644
--- a/crates/hir-ty/src/tests/traits.rs
+++ b/crates/hir-ty/src/tests/traits.rs
@@ -4335,8 +4335,9 @@ fn derive_macro_bounds() {
#[derive(Clone)]
struct AssocGeneric<T: Tr>(T::Assoc);
- #[derive(Clone)]
- struct AssocGeneric2<T: Tr>(<T as Tr>::Assoc);
+ // Currently rustc does not accept this.
+ // #[derive(Clone)]
+ // struct AssocGeneric2<T: Tr>(<T as Tr>::Assoc);
#[derive(Clone)]
struct AssocGeneric3<T: Tr>(Generic<T::Assoc>);
@@ -4361,9 +4362,8 @@ fn derive_macro_bounds() {
let x: &AssocGeneric<Copy> = &AssocGeneric(NotCopy);
let x = x.clone();
//^ &AssocGeneric<Copy>
- let x: &AssocGeneric2<Copy> = &AssocGeneric2(NotCopy);
- let x = x.clone();
- //^ &AssocGeneric2<Copy>
+ // let x: &AssocGeneric2<Copy> = &AssocGeneric2(NotCopy);
+ // let x = x.clone();
let x: &AssocGeneric3<Copy> = &AssocGeneric3(Generic(NotCopy));
let x = x.clone();
//^ &AssocGeneric3<Copy>
diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs
index 681d087ede..3636580630 100644
--- a/crates/hir-ty/src/utils.rs
+++ b/crates/hir-ty/src/utils.rs
@@ -1,7 +1,7 @@
//! Helper functions for working with def, which don't need to be a separate
//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
-use std::iter;
+use std::{hash::Hash, iter};
use base_db::CrateId;
use chalk_ir::{
@@ -20,7 +20,8 @@ use hir_def::{
resolver::{HasResolver, TypeNs},
type_ref::{TraitBoundModifier, TypeRef},
ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId, ItemContainerId,
- LocalEnumVariantId, Lookup, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId,
+ LocalEnumVariantId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId, TypeOrConstParamId,
+ TypeParamId,
};
use hir_expand::name::Name;
use intern::Interned;
@@ -464,3 +465,28 @@ pub(crate) fn detect_variant_from_bytes<'a>(
};
Some((var_id, var_layout))
}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub(crate) struct InTypeConstIdMetadata(pub(crate) Ty);
+
+impl OpaqueInternableThing for InTypeConstIdMetadata {
+ fn dyn_hash(&self, mut state: &mut dyn std::hash::Hasher) {
+ self.hash(&mut state);
+ }
+
+ fn dyn_eq(&self, other: &dyn OpaqueInternableThing) -> bool {
+ other.as_any().downcast_ref::<Self>().map_or(false, |x| self == x)
+ }
+
+ fn dyn_clone(&self) -> Box<dyn OpaqueInternableThing> {
+ Box::new(self.clone())
+ }
+
+ fn as_any(&self) -> &dyn std::any::Any {
+ self
+ }
+
+ fn box_any(&self) -> Box<dyn std::any::Any> {
+ Box::new(self.clone())
+ }
+}
diff --git a/crates/hir/src/from_id.rs b/crates/hir/src/from_id.rs
index 883e6a29b0..de23902199 100644
--- a/crates/hir/src/from_id.rs
+++ b/crates/hir/src/from_id.rs
@@ -40,6 +40,7 @@ from_id![
(hir_def::TraitAliasId, crate::TraitAlias),
(hir_def::StaticId, crate::Static),
(hir_def::ConstId, crate::Const),
+ (hir_def::InTypeConstId, crate::InTypeConst),
(hir_def::FunctionId, crate::Function),
(hir_def::ImplId, crate::Impl),
(hir_def::TypeOrConstParamId, crate::TypeOrConstParam),
@@ -144,6 +145,7 @@ impl From<DefWithBody> for DefWithBodyId {
DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id),
DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id),
DefWithBody::Variant(it) => DefWithBodyId::VariantId(it.into()),
+ DefWithBody::InTypeConst(it) => DefWithBodyId::InTypeConstId(it.id),
}
}
}
@@ -155,6 +157,7 @@ impl From<DefWithBodyId> for DefWithBody {
DefWithBodyId::StaticId(it) => DefWithBody::Static(it.into()),
DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()),
DefWithBodyId::VariantId(it) => DefWithBody::Variant(it.into()),
+ DefWithBodyId::InTypeConstId(it) => DefWithBody::InTypeConst(it.into()),
}
}
}
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 5926d86542..6df625380f 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -52,9 +52,10 @@ use hir_def::{
resolver::{HasResolver, Resolver},
src::HasSource as _,
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
- EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
- LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
- TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
+ EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, InTypeConstId, ItemContainerId,
+ LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId,
+ StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId,
+ UnionId,
};
use hir_expand::{name::name, MacroCallKind};
use hir_ty::{
@@ -202,7 +203,7 @@ impl Crate {
pub fn root_module(self, db: &dyn HirDatabase) -> Module {
let def_map = db.crate_def_map(self.id);
- Module { id: def_map.module_id(DefMap::ROOT) }
+ Module { id: def_map.crate_root().into() }
}
pub fn modules(self, db: &dyn HirDatabase) -> Vec<Module> {
@@ -475,7 +476,7 @@ impl Module {
/// in the module tree of any target in `Cargo.toml`.
pub fn crate_root(self, db: &dyn HirDatabase) -> Module {
let def_map = db.crate_def_map(self.id.krate());
- Module { id: def_map.module_id(DefMap::ROOT) }
+ Module { id: def_map.crate_root().into() }
}
pub fn is_crate_root(self) -> bool {
@@ -1375,8 +1376,9 @@ pub enum DefWithBody {
Static(Static),
Const(Const),
Variant(Variant),
+ InTypeConst(InTypeConst),
}
-impl_from!(Function, Const, Static, Variant for DefWithBody);
+impl_from!(Function, Const, Static, Variant, InTypeConst for DefWithBody);
impl DefWithBody {
pub fn module(self, db: &dyn HirDatabase) -> Module {
@@ -1385,6 +1387,7 @@ impl DefWithBody {
DefWithBody::Function(f) => f.module(db),
DefWithBody::Static(s) => s.module(db),
DefWithBody::Variant(v) => v.module(db),
+ DefWithBody::InTypeConst(c) => c.module(db),
}
}
@@ -1394,6 +1397,7 @@ impl DefWithBody {
DefWithBody::Static(s) => Some(s.name(db)),
DefWithBody::Const(c) => c.name(db),
DefWithBody::Variant(v) => Some(v.name(db)),
+ DefWithBody::InTypeConst(_) => None,
}
}
@@ -1404,6 +1408,11 @@ impl DefWithBody {
DefWithBody::Static(it) => it.ty(db),
DefWithBody::Const(it) => it.ty(db),
DefWithBody::Variant(it) => it.parent.variant_body_ty(db),
+ DefWithBody::InTypeConst(it) => Type::new_with_resolver_inner(
+ db,
+ &DefWithBodyId::from(it.id).resolver(db.upcast()),
+ TyKind::Error.intern(Interner),
+ ),
}
}
@@ -1413,6 +1422,7 @@ impl DefWithBody {
DefWithBody::Static(it) => it.id.into(),
DefWithBody::Const(it) => it.id.into(),
DefWithBody::Variant(it) => it.into(),
+ DefWithBody::InTypeConst(it) => it.id.into(),
}
}
@@ -1797,6 +1807,8 @@ impl DefWithBody {
DefWithBody::Static(it) => it.into(),
DefWithBody::Const(it) => it.into(),
DefWithBody::Variant(it) => it.into(),
+ // FIXME: don't ignore diagnostics for in type const
+ DefWithBody::InTypeConst(_) => return,
};
for diag in hir_ty::diagnostics::incorrect_case(db, krate, def.into()) {
acc.push(diag.into())
@@ -2086,6 +2098,17 @@ impl HasVisibility for Function {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InTypeConst {
+ pub(crate) id: InTypeConstId,
+}
+
+impl InTypeConst {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).owner.module(db.upcast()) }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Const {
pub(crate) id: ConstId,
}
@@ -2515,7 +2538,7 @@ impl AsAssocItem for DefWithBody {
match self {
DefWithBody::Function(it) => it.as_assoc_item(db),
DefWithBody::Const(it) => it.as_assoc_item(db),
- DefWithBody::Static(_) | DefWithBody::Variant(_) => None,
+ DefWithBody::Static(_) | DefWithBody::Variant(_) | DefWithBody::InTypeConst(_) => None,
}
}
}
@@ -2641,14 +2664,22 @@ impl GenericDef {
Either::Right(x) => GenericParam::TypeParam(x),
}
});
- let lt_params = generics
+ self.lifetime_params(db)
+ .into_iter()
+ .map(GenericParam::LifetimeParam)
+ .chain(ty_params)
+ .collect()
+ }
+
+ pub fn lifetime_params(self, db: &dyn HirDatabase) -> Vec<LifetimeParam> {
+ let generics = db.generic_params(self.into());
+ generics
.lifetimes
.iter()
.map(|(local_id, _)| LifetimeParam {
id: LifetimeParamId { parent: self.into(), local_id },
})
- .map(GenericParam::LifetimeParam);
- lt_params.chain(ty_params).collect()
+ .collect()
}
pub fn type_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
@@ -2769,6 +2800,22 @@ impl Local {
/// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = x;`
pub fn sources(self, db: &dyn HirDatabase) -> Vec<LocalSource> {
let (body, source_map) = db.body_with_source_map(self.parent);
+ self.sources_(db, &body, &source_map).collect()
+ }
+
+ /// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = x;`
+ pub fn primary_source(self, db: &dyn HirDatabase) -> LocalSource {
+ let (body, source_map) = db.body_with_source_map(self.parent);
+ let src = self.sources_(db, &body, &source_map).next().unwrap();
+ src
+ }
+
+ fn sources_<'a>(
+ self,
+ db: &'a dyn HirDatabase,
+ body: &'a hir_def::body::Body,
+ source_map: &'a hir_def::body::BodySourceMap,
+ ) -> impl Iterator<Item = LocalSource> + 'a {
body[self.binding_id]
.definitions
.iter()
@@ -2781,14 +2828,7 @@ impl Local {
Either::Right(it) => Either::Right(it.to_node(&root)),
})
})
- .map(|source| LocalSource { local: self, source })
- .collect()
- }
-
- /// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = x;`
- pub fn primary_source(self, db: &dyn HirDatabase) -> LocalSource {
- let all_sources = self.sources(db);
- all_sources.into_iter().next().unwrap()
+ .map(move |source| LocalSource { local: self, source })
}
}
@@ -3494,6 +3534,14 @@ impl Type {
}
}
+ pub fn is_scalar(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Scalar(_))
+ }
+
+ pub fn is_tuple(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Tuple(..))
+ }
+
pub fn remove_ref(&self) -> Option<Type> {
match &self.ty.kind(Interner) {
TyKind::Ref(.., ty) => Some(self.derived(ty.clone())),
@@ -3779,14 +3827,16 @@ impl Type {
}
}
- pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
+ /// Returns types that this type dereferences to (including this type itself). The returned
+ /// iterator won't yield the same type more than once even if the deref chain contains a cycle.
+ pub fn autoderef(&self, db: &dyn HirDatabase) -> impl Iterator<Item = Type> + '_ {
self.autoderef_(db).map(move |ty| self.derived(ty))
}
- fn autoderef_<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Ty> + 'a {
+ fn autoderef_(&self, db: &dyn HirDatabase) -> impl Iterator<Item = Ty> {
// There should be no inference vars in types passed here
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
- autoderef(db, self.env.clone(), canonical).map(|canonical| canonical.value)
+ autoderef(db, self.env.clone(), canonical)
}
// This would be nicer if it just returned an iterator, but that runs into
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 2d2b00b147..5a76a9185a 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -483,10 +483,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.scope_at_offset(node, offset)
}
- pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
- self.imp.scope_for_def(def)
- }
-
pub fn assert_contains_node(&self, node: &SyntaxNode) {
self.imp.assert_contains_node(node)
}
@@ -1074,8 +1070,12 @@ impl<'db> SemanticsImpl<'db> {
fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
let analyze = self.analyze(ty.syntax())?;
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
- let ty = hir_ty::TyLoweringContext::new(self.db, &analyze.resolver)
- .lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
+ let ty = hir_ty::TyLoweringContext::new(
+ self.db,
+ &analyze.resolver,
+ analyze.resolver.module().into(),
+ )
+ .lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
}
@@ -1307,12 +1307,6 @@ impl<'db> SemanticsImpl<'db> {
)
}
- fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
- let file_id = self.db.lookup_intern_trait(def.id).id.file_id();
- let resolver = def.id.resolver(self.db.upcast());
- SemanticsScope { db: self.db, file_id, resolver }
- }
-
fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
where
Def::Ast: AstNode,
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index 1374fa332c..ecb1b306a6 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -38,8 +38,8 @@ use hir_ty::{
UnsafeExpr,
},
lang_items::lang_items_for_bin_op,
- method_resolution::{self},
- Adjustment, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind, TyLoweringContext,
+ method_resolution, Adjustment, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind,
+ TyLoweringContext,
};
use itertools::Itertools;
use smallvec::SmallVec;
@@ -978,7 +978,8 @@ fn resolve_hir_path_(
let types = || {
let (ty, unresolved) = match path.type_anchor() {
Some(type_ref) => {
- let (_, res) = TyLoweringContext::new(db, resolver).lower_ty_ext(type_ref);
+ let (_, res) = TyLoweringContext::new(db, resolver, resolver.module().into())
+ .lower_ty_ext(type_ref);
res.map(|ty_ns| (ty_ns, path.segments().first()))
}
None => {
diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs
index 207e8206c9..43d957412b 100644
--- a/crates/hir/src/symbols.rs
+++ b/crates/hir/src/symbols.rs
@@ -233,6 +233,7 @@ impl<'a> SymbolCollector<'a> {
DefWithBodyId::VariantId(id) => {
Some(self.db.enum_data(id.parent).variants[id.local_id].name.to_smol_str())
}
+ DefWithBodyId::InTypeConstId(_) => Some("in type const".into()),
}
}
diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index 6340feda45..d07c637262 100644
--- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -1,13 +1,9 @@
use hir::HasSource;
-use ide_db::syntax_helpers::insert_whitespace_into_node::insert_ws_into;
use syntax::ast::{self, make, AstNode};
use crate::{
assist_context::{AssistContext, Assists},
- utils::{
- add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, render_snippet,
- Cursor, DefaultMethods,
- },
+ utils::{add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, DefaultMethods},
AssistId, AssistKind,
};
@@ -130,50 +126,36 @@ fn add_missing_impl_members_inner(
}
let target = impl_def.syntax().text_range();
- acc.add(AssistId(assist_id, AssistKind::QuickFix), label, target, |builder| {
- let missing_items = missing_items
- .into_iter()
- .map(|it| {
- if ctx.sema.hir_file_for(it.syntax()).is_macro() {
- if let Some(it) = ast::AssocItem::cast(insert_ws_into(it.syntax().clone())) {
- return it;
- }
- }
- it.clone_for_update()
- })
- .collect();
- let (new_impl_def, first_new_item) = add_trait_assoc_items_to_impl(
+ acc.add(AssistId(assist_id, AssistKind::QuickFix), label, target, |edit| {
+ let new_impl_def = edit.make_mut(impl_def.clone());
+ let first_new_item = add_trait_assoc_items_to_impl(
&ctx.sema,
- missing_items,
+ &missing_items,
trait_,
- impl_def.clone(),
+ &new_impl_def,
target_scope,
);
- match ctx.config.snippet_cap {
- None => builder.replace(target, new_impl_def.to_string()),
- Some(cap) => {
- let mut cursor = Cursor::Before(first_new_item.syntax());
- let placeholder;
- if let DefaultMethods::No = mode {
- if let ast::AssocItem::Fn(func) = &first_new_item {
- if try_gen_trait_body(ctx, func, trait_ref, &impl_def).is_none() {
- if let Some(m) =
- func.syntax().descendants().find_map(ast::MacroCall::cast)
- {
- if m.syntax().text() == "todo!()" {
- placeholder = m;
- cursor = Cursor::Replace(placeholder.syntax());
- }
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ let mut placeholder = None;
+ if let DefaultMethods::No = mode {
+ if let ast::AssocItem::Fn(func) = &first_new_item {
+ if try_gen_trait_body(ctx, func, trait_ref, &impl_def).is_none() {
+ if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
+ {
+ if m.syntax().text() == "todo!()" {
+ placeholder = Some(m);
}
}
}
}
- builder.replace_snippet(
- cap,
- target,
- render_snippet(cap, new_impl_def.syntax(), cursor),
- )
}
+
+ if let Some(macro_call) = placeholder {
+ edit.add_placeholder_snippet(cap, macro_call);
+ } else {
+ edit.add_tabstop_before(cap, first_new_item);
+ };
};
})
}
@@ -366,6 +348,125 @@ impl<U> Foo<U> for S {
}
#[test]
+ fn test_lifetime_substitution() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Trait<'a, 'b, A, B, C> {
+ fn foo(&self, one: &'a A, anoter: &'b B) -> &'a C;
+}
+
+impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {$0}"#,
+ r#"
+pub trait Trait<'a, 'b, A, B, C> {
+ fn foo(&self, one: &'a A, anoter: &'b B) -> &'a C;
+}
+
+impl<'x, 'y, T, V, U> Trait<'x, 'y, T, V, U> for () {
+ fn foo(&self, one: &'x T, anoter: &'y V) -> &'x U {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_lifetime_substitution_with_body() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+pub trait Trait<'a, 'b, A, B, C: Default> {
+ fn foo(&self, _one: &'a A, _anoter: &'b B) -> (C, &'a i32) {
+ let value: &'a i32 = &0;
+ (C::default(), value)
+ }
+}
+
+impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {$0}"#,
+ r#"
+pub trait Trait<'a, 'b, A, B, C: Default> {
+ fn foo(&self, _one: &'a A, _anoter: &'b B) -> (C, &'a i32) {
+ let value: &'a i32 = &0;
+ (C::default(), value)
+ }
+}
+
+impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {
+ $0fn foo(&self, _one: &'x T, _anoter: &'y V) -> (U, &'x i32) {
+ let value: &'x i32 = &0;
+ (<U>::default(), value)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_const_substitution() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+struct Bar<const: N: bool> {
+ bar: [i32, N]
+}
+
+trait Foo<const N: usize, T> {
+ fn get_n_sq(&self, arg: &T) -> usize { N * N }
+ fn get_array(&self, arg: Bar<N>) -> [i32; N] { [1; N] }
+}
+
+struct S<T> {
+ wrapped: T
+}
+
+impl<const X: usize, Y, Z> Foo<X, Z> for S<Y> {
+ $0
+}"#,
+ r#"
+struct Bar<const: N: bool> {
+ bar: [i32, N]
+}
+
+trait Foo<const N: usize, T> {
+ fn get_n_sq(&self, arg: &T) -> usize { N * N }
+ fn get_array(&self, arg: Bar<N>) -> [i32; N] { [1; N] }
+}
+
+struct S<T> {
+ wrapped: T
+}
+
+impl<const X: usize, Y, Z> Foo<X, Z> for S<Y> {
+ $0fn get_n_sq(&self, arg: &Z) -> usize { X * X }
+
+ fn get_array(&self, arg: Bar<X>) -> [i32; X] { [1; X] }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_const_substitution_2() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+trait Foo<const N: usize, const M: usize, T> {
+ fn get_sum(&self, arg: &T) -> usize { N + M }
+}
+
+impl<X> Foo<42, {20 + 22}, X> for () {
+ $0
+}"#,
+ r#"
+trait Foo<const N: usize, const M: usize, T> {
+ fn get_sum(&self, arg: &T) -> usize { N + M }
+}
+
+impl<X> Foo<42, {20 + 22}, X> for () {
+ $0fn get_sum(&self, arg: &X) -> usize { 42 + {20 + 22} }
+}"#,
+ )
+ }
+
+ #[test]
fn test_cursor_after_empty_impl_def() {
check_assist(
add_missing_impl_members,
@@ -747,6 +848,115 @@ impl Foo<T> for S<T> {
}
#[test]
+ fn test_qualify_generic_default_parameter() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod m {
+ pub struct S;
+ pub trait Foo<T = S> {
+ fn bar(&self, other: &T);
+ }
+}
+
+struct S;
+impl m::Foo for S { $0 }"#,
+ r#"
+mod m {
+ pub struct S;
+ pub trait Foo<T = S> {
+ fn bar(&self, other: &T);
+ }
+}
+
+struct S;
+impl m::Foo for S {
+ fn bar(&self, other: &m::S) {
+ ${0:todo!()}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_qualify_generic_default_parameter_2() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod m {
+ pub struct Wrapper<T, V> {
+ one: T,
+ another: V
+ };
+ pub struct S;
+ pub trait Foo<T = Wrapper<S, bool>> {
+ fn bar(&self, other: &T);
+ }
+}
+
+struct S;
+impl m::Foo for S { $0 }"#,
+ r#"
+mod m {
+ pub struct Wrapper<T, V> {
+ one: T,
+ another: V
+ };
+ pub struct S;
+ pub trait Foo<T = Wrapper<S, bool>> {
+ fn bar(&self, other: &T);
+ }
+}
+
+struct S;
+impl m::Foo for S {
+ fn bar(&self, other: &m::Wrapper<m::S, bool>) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_generic_default_parameter_3() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod m {
+ pub struct Wrapper<T, V> {
+ one: T,
+ another: V
+ };
+ pub struct S;
+ pub trait Foo<T = S, V = Wrapper<T, S>> {
+ fn bar(&self, other: &V);
+ }
+}
+
+struct S;
+impl m::Foo for S { $0 }"#,
+ r#"
+mod m {
+ pub struct Wrapper<T, V> {
+ one: T,
+ another: V
+ };
+ pub struct S;
+ pub trait Foo<T = S, V = Wrapper<T, S>> {
+ fn bar(&self, other: &V);
+ }
+}
+
+struct S;
+impl m::Foo for S {
+ fn bar(&self, other: &m::Wrapper<m::S, m::S>) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
fn test_assoc_type_bounds_are_removed() {
check_assist(
add_missing_impl_members,
@@ -1683,4 +1893,77 @@ impl m::Foo for S {
}"#,
)
}
+
+ #[test]
+ fn nested_macro_should_not_cause_crash() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+macro_rules! ty { () => { i32 } }
+trait SomeTrait { type Output; }
+impl SomeTrait for i32 { type Output = i64; }
+macro_rules! define_method {
+ () => {
+ fn method(&mut self, params: <ty!() as SomeTrait>::Output);
+ };
+}
+trait AnotherTrait { define_method!(); }
+impl $0AnotherTrait for () {
+}
+"#,
+ r#"
+macro_rules! ty { () => { i32 } }
+trait SomeTrait { type Output; }
+impl SomeTrait for i32 { type Output = i64; }
+macro_rules! define_method {
+ () => {
+ fn method(&mut self, params: <ty!() as SomeTrait>::Output);
+ };
+}
+trait AnotherTrait { define_method!(); }
+impl AnotherTrait for () {
+ $0fn method(&mut self,params: <ty!()as SomeTrait>::Output) {
+ todo!()
+ }
+}
+"#,
+ );
+ }
+
+ // FIXME: `T` in `ty!(T)` should be replaced by `PathTransform`.
+ #[test]
+ fn paths_in_nested_macro_should_get_transformed() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+macro_rules! ty { ($me:ty) => { $me } }
+trait SomeTrait { type Output; }
+impl SomeTrait for i32 { type Output = i64; }
+macro_rules! define_method {
+ ($t:ty) => {
+ fn method(&mut self, params: <ty!($t) as SomeTrait>::Output);
+ };
+}
+trait AnotherTrait<T: SomeTrait> { define_method!(T); }
+impl $0AnotherTrait<i32> for () {
+}
+"#,
+ r#"
+macro_rules! ty { ($me:ty) => { $me } }
+trait SomeTrait { type Output; }
+impl SomeTrait for i32 { type Output = i64; }
+macro_rules! define_method {
+ ($t:ty) => {
+ fn method(&mut self, params: <ty!($t) as SomeTrait>::Output);
+ };
+}
+trait AnotherTrait<T: SomeTrait> { define_method!(T); }
+impl AnotherTrait<i32> for () {
+ $0fn method(&mut self,params: <ty!(T)as SomeTrait>::Output) {
+ todo!()
+ }
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/extract_type_alias.rs b/crates/ide-assists/src/handlers/extract_type_alias.rs
index b310c2db9f..b6e7d6209c 100644
--- a/crates/ide-assists/src/handlers/extract_type_alias.rs
+++ b/crates/ide-assists/src/handlers/extract_type_alias.rs
@@ -1,6 +1,9 @@
use either::Either;
use ide_db::syntax_helpers::node_ext::walk_ty;
-use syntax::ast::{self, edit::IndentLevel, make, AstNode, HasGenericParams, HasName};
+use syntax::{
+ ast::{self, edit::IndentLevel, make, AstNode, HasGenericParams, HasName},
+ ted,
+};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -34,14 +37,16 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|| item.syntax(),
|impl_| impl_.as_ref().either(AstNode::syntax, AstNode::syntax),
);
- let insert_pos = node.text_range().start();
let target = ty.syntax().text_range();
acc.add(
AssistId("extract_type_alias", AssistKind::RefactorExtract),
"Extract type as type alias",
target,
- |builder| {
+ |edit| {
+ let node = edit.make_syntax_mut(node.clone());
+ let target_ty = edit.make_mut(ty.clone());
+
let mut known_generics = match item.generic_param_list() {
Some(it) => it.generic_params().collect(),
None => Vec::new(),
@@ -56,27 +61,29 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let generic_params =
generics.map(|it| make::generic_param_list(it.into_iter().cloned()));
+ // Replace original type with the alias
let ty_args = generic_params
.as_ref()
.map_or(String::new(), |it| it.to_generic_args().to_string());
- let replacement = format!("Type{ty_args}");
- builder.replace(target, replacement);
-
- let indent = IndentLevel::from_node(node);
- let generic_params = generic_params.map_or(String::new(), |it| it.to_string());
- match ctx.config.snippet_cap {
- Some(cap) => {
- builder.insert_snippet(
- cap,
- insert_pos,
- format!("type $0Type{generic_params} = {ty};\n\n{indent}"),
- );
- }
- None => {
- builder.insert(
- insert_pos,
- format!("type Type{generic_params} = {ty};\n\n{indent}"),
- );
+ // FIXME: replace with a `ast::make` constructor
+ let new_ty = make::ty(&format!("Type{ty_args}")).clone_for_update();
+ ted::replace(target_ty.syntax(), new_ty.syntax());
+
+ // Insert new alias
+ let indent = IndentLevel::from_node(&node);
+ let ty_alias = make::ty_alias("Type", generic_params, None, None, Some((ty, None)))
+ .clone_for_update();
+ ted::insert_all(
+ ted::Position::before(node),
+ vec![
+ ty_alias.syntax().clone().into(),
+ make::tokens::whitespace(&format!("\n\n{indent}")).into(),
+ ],
+ );
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ if let Some(name) = ty_alias.name() {
+ edit.add_tabstop_before(cap, name);
}
}
},
@@ -151,7 +158,7 @@ fn collect_used_generics<'gp>(
.and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
),
ast::Type::ArrayType(ar) => {
- if let Some(ast::Expr::PathExpr(p)) = ar.expr() {
+ if let Some(ast::Expr::PathExpr(p)) = ar.const_arg().and_then(|x| x.expr()) {
if let Some(path) = p.path() {
if let Some(name_ref) = path.as_single_name_ref() {
if let Some(param) = known_generics.iter().find(|gp| {
diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index 3667fc375b..b68c766e64 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -72,29 +72,27 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let krate = ty.krate(ctx.db());
ty.iterate_assoc_items(ctx.db(), krate, |item| {
if let hir::AssocItem::Function(f) = item {
+ let name = f.name(ctx.db());
if f.self_param(ctx.db()).is_some()
&& f.is_visible_from(ctx.db(), current_module)
- && seen_names.insert(f.name(ctx.db()))
+ && seen_names.insert(name.clone())
{
- methods.push(f)
+ methods.push((name, f))
}
}
Option::<()>::None
});
}
-
- for method in methods {
+ methods.sort_by(|(a, _), (b, _)| a.cmp(b));
+ for (name, method) in methods {
let adt = ast::Adt::Struct(strukt.clone());
- let name = method.name(ctx.db()).display(ctx.db()).to_string();
+ let name = name.display(ctx.db()).to_string();
// if `find_struct_impl` returns None, that means that a function named `name` already exists.
- let Some(impl_def) = find_struct_impl(ctx, &adt, &[name]) else { continue; };
+ let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else { continue; };
acc.add_group(
&GroupLabel("Generate delegate methods…".to_owned()),
AssistId("generate_delegate_methods", AssistKind::Generate),
- format!(
- "Generate delegate for `{field_name}.{}()`",
- method.name(ctx.db()).display(ctx.db())
- ),
+ format!("Generate delegate for `{field_name}.{name}()`",),
target,
|builder| {
// Create the function
@@ -102,9 +100,8 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
Some(source) => source.value,
None => return,
};
- let method_name = method.name(ctx.db());
let vis = method_source.visibility();
- let name = make::name(&method.name(ctx.db()).display(ctx.db()).to_string());
+ let fn_name = make::name(&name);
let params =
method_source.param_list().unwrap_or_else(|| make::param_list(None, []));
let type_params = method_source.generic_param_list();
@@ -114,7 +111,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
};
let tail_expr = make::expr_method_call(
make::ext::field_from_idents(["self", &field_name]).unwrap(), // This unwrap is ok because we have at least 1 arg in the list
- make::name_ref(&method_name.display(ctx.db()).to_string()),
+ make::name_ref(&name),
arg_list,
);
let ret_type = method_source.ret_type();
@@ -126,7 +123,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let body = make::block_expr([], Some(tail_expr_finished));
let f = make::fn_(
vis,
- name,
+ fn_name,
type_params,
None,
params,
diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs
index 850be21c30..c579f6780d 100644
--- a/crates/ide-assists/src/handlers/generate_function.rs
+++ b/crates/ide-assists/src/handlers/generate_function.rs
@@ -291,12 +291,9 @@ impl FunctionBuilder {
let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast);
let is_async = await_expr.is_some();
- let (ret_type, should_focus_return_type) = make_return_type(
- ctx,
- &ast::Expr::CallExpr(call.clone()),
- target_module,
- &mut necessary_generic_params,
- );
+ let expr_for_ret_ty = await_expr.map_or_else(|| call.clone().into(), |it| it.into());
+ let (ret_type, should_focus_return_type) =
+ make_return_type(ctx, &expr_for_ret_ty, target_module, &mut necessary_generic_params);
let (generic_param_list, where_clause) =
fn_generic_params(ctx, necessary_generic_params, &target)?;
@@ -338,12 +335,9 @@ impl FunctionBuilder {
let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast);
let is_async = await_expr.is_some();
- let (ret_type, should_focus_return_type) = make_return_type(
- ctx,
- &ast::Expr::MethodCallExpr(call.clone()),
- target_module,
- &mut necessary_generic_params,
- );
+ let expr_for_ret_ty = await_expr.map_or_else(|| call.clone().into(), |it| it.into());
+ let (ret_type, should_focus_return_type) =
+ make_return_type(ctx, &expr_for_ret_ty, target_module, &mut necessary_generic_params);
let (generic_param_list, where_clause) =
fn_generic_params(ctx, necessary_generic_params, &target)?;
@@ -429,12 +423,12 @@ impl FunctionBuilder {
/// user can change the `todo!` function body.
fn make_return_type(
ctx: &AssistContext<'_>,
- call: &ast::Expr,
+ expr: &ast::Expr,
target_module: Module,
necessary_generic_params: &mut FxHashSet<hir::GenericParam>,
) -> (Option<ast::RetType>, bool) {
let (ret_ty, should_focus_return_type) = {
- match ctx.sema.type_of_expr(call).map(TypeInfo::original) {
+ match ctx.sema.type_of_expr(expr).map(TypeInfo::original) {
Some(ty) if ty.is_unknown() => (Some(make::ty_placeholder()), true),
None => (Some(make::ty_placeholder()), true),
Some(ty) if ty.is_unit() => (None, false),
@@ -2268,13 +2262,13 @@ impl Foo {
check_assist(
generate_function,
r"
-fn foo() {
- $0bar(42).await();
+async fn foo() {
+ $0bar(42).await;
}
",
r"
-fn foo() {
- bar(42).await();
+async fn foo() {
+ bar(42).await;
}
async fn bar(arg: i32) ${0:-> _} {
@@ -2285,6 +2279,28 @@ async fn bar(arg: i32) ${0:-> _} {
}
#[test]
+ fn return_type_for_async_fn() {
+ check_assist(
+ generate_function,
+ r"
+//- minicore: result
+async fn foo() {
+ if Err(()) = $0bar(42).await {}
+}
+",
+ r"
+async fn foo() {
+ if Err(()) = bar(42).await {}
+}
+
+async fn bar(arg: i32) -> Result<_, ()> {
+ ${0:todo!()}
+}
+",
+ );
+ }
+
+ #[test]
fn create_method() {
check_assist(
generate_function,
@@ -2402,6 +2418,31 @@ fn foo() {S.bar();}
}
#[test]
+ fn create_async_method() {
+ check_assist(
+ generate_function,
+ r"
+//- minicore: result
+struct S;
+async fn foo() {
+ if let Err(()) = S.$0bar(42).await {}
+}
+",
+ r"
+struct S;
+impl S {
+ async fn bar(&self, arg: i32) -> Result<_, ()> {
+ ${0:todo!()}
+ }
+}
+async fn foo() {
+ if let Err(()) = S.bar(42).await {}
+}
+",
+ )
+ }
+
+ #[test]
fn create_static_method() {
check_assist(
generate_function,
@@ -2422,6 +2463,31 @@ fn foo() {S::bar();}
}
#[test]
+ fn create_async_static_method() {
+ check_assist(
+ generate_function,
+ r"
+//- minicore: result
+struct S;
+async fn foo() {
+ if let Err(()) = S::$0bar(42).await {}
+}
+",
+ r"
+struct S;
+impl S {
+ async fn bar(arg: i32) -> Result<_, ()> {
+ ${0:todo!()}
+ }
+}
+async fn foo() {
+ if let Err(()) = S::bar(42).await {}
+}
+",
+ )
+ }
+
+ #[test]
fn create_generic_static_method() {
check_assist(
generate_function,
diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs
index 28d815e81b..797180fa18 100644
--- a/crates/ide-assists/src/handlers/inline_call.rs
+++ b/crates/ide-assists/src/handlers/inline_call.rs
@@ -958,7 +958,6 @@ fn main() {
);
}
- // FIXME: const generics aren't being substituted, this is blocked on better support for them
#[test]
fn inline_substitutes_generics() {
check_assist(
@@ -982,7 +981,7 @@ fn foo<T, const N: usize>() {
fn bar<U, const M: usize>() {}
fn main() {
- bar::<usize, N>();
+ bar::<usize, {0}>();
}
"#,
);
diff --git a/crates/ide-assists/src/handlers/inline_const_as_literal.rs b/crates/ide-assists/src/handlers/inline_const_as_literal.rs
new file mode 100644
index 0000000000..5b1540b50c
--- /dev/null
+++ b/crates/ide-assists/src/handlers/inline_const_as_literal.rs
@@ -0,0 +1,722 @@
+use syntax::{ast, AstNode};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: inline_const_as_literal
+//
+// Evaluate and inline const variable as literal.
+//
+// ```
+// const STRING: &str = "Hello, World!";
+//
+// fn something() -> &'static str {
+// STRING$0
+// }
+// ```
+// ->
+// ```
+// const STRING: &str = "Hello, World!";
+//
+// fn something() -> &'static str {
+// "Hello, World!"
+// }
+// ```
+pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let variable = ctx.find_node_at_offset::<ast::PathExpr>()?;
+
+ if let hir::PathResolution::Def(hir::ModuleDef::Const(konst)) =
+ ctx.sema.resolve_path(&variable.path()?)?
+ {
+ let konst_ty = konst.ty(ctx.sema.db);
+
+ // Used as the upper limit for recursive calls if no TCO is available
+ let fuel = 20;
+
+ // There is no way to have a const static reference to a type that contains a interior
+ // mutability cell.
+
+ // FIXME: Add support to handle type aliases for builtin scalar types.
+ validate_type_recursively(ctx, Some(&konst_ty), false, fuel)?;
+
+ let expr = konst.value(ctx.sema.db)?;
+
+ let value = match expr {
+ ast::Expr::BlockExpr(_)
+ | ast::Expr::Literal(_)
+ | ast::Expr::RefExpr(_)
+ | ast::Expr::ArrayExpr(_)
+ | ast::Expr::TupleExpr(_)
+ | ast::Expr::IfExpr(_)
+ | ast::Expr::ParenExpr(_)
+ | ast::Expr::MatchExpr(_)
+ | ast::Expr::MacroExpr(_)
+ | ast::Expr::BinExpr(_)
+ | ast::Expr::CallExpr(_) => match konst.render_eval(ctx.sema.db) {
+ Ok(result) => result,
+ Err(_) => return None,
+ },
+ _ => return None,
+ };
+
+ let id = AssistId("inline_const_as_literal", AssistKind::RefactorInline);
+
+ let label = format!("Inline const as literal");
+ let target = variable.syntax().text_range();
+
+ return acc.add(id, label, target, |edit| {
+ edit.replace(variable.syntax().text_range(), value);
+ });
+ }
+ None
+}
+
+fn validate_type_recursively(
+ ctx: &AssistContext<'_>,
+ ty_hir: Option<&hir::Type>,
+ refed: bool,
+ fuel: i32,
+) -> Option<()> {
+ match (fuel > 0, ty_hir) {
+ (true, Some(ty)) if ty.is_reference() => validate_type_recursively(
+ ctx,
+ ty.as_reference().map(|(ty, _)| ty).as_ref(),
+ true,
+ // FIXME: Saving fuel when `&` repeating might not be a good idea if there's no TCO.
+ if refed { fuel } else { fuel - 1 },
+ ),
+ (true, Some(ty)) if ty.is_array() => validate_type_recursively(
+ ctx,
+ ty.as_array(ctx.db()).map(|(ty, _)| ty).as_ref(),
+ false,
+ fuel - 1,
+ ),
+ (true, Some(ty)) if ty.is_tuple() => ty
+ .tuple_fields(ctx.db())
+ .iter()
+ .all(|ty| validate_type_recursively(ctx, Some(ty), false, fuel - 1).is_some())
+ .then_some(()),
+ (true, Some(ty)) if refed && ty.is_slice() => {
+ validate_type_recursively(ctx, ty.as_slice().as_ref(), false, fuel - 1)
+ }
+ (_, Some(ty)) => match ty.as_builtin() {
+ // `const A: str` is not correct, but `const A: &builtin` is.
+ Some(builtin) if refed || (!refed && !builtin.is_str()) => Some(()),
+ _ => None,
+ },
+ _ => None,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ const NUMBER: u8 = 1;
+ const BOOL: u8 = 2;
+ const STR: u8 = 4;
+ const CHAR: u8 = 8;
+
+ const TEST_PAIRS: &[(&str, &str, u8)] = &[
+ ("u8", "0", NUMBER),
+ ("u16", "0", NUMBER),
+ ("u32", "0", NUMBER),
+ ("u64", "0", NUMBER),
+ ("u128", "0", NUMBER),
+ ("usize", "0", NUMBER),
+ ("i8", "0", NUMBER),
+ ("i16", "0", NUMBER),
+ ("i32", "0", NUMBER),
+ ("i64", "0", NUMBER),
+ ("i128", "0", NUMBER),
+ ("isize", "0", NUMBER),
+ ("bool", "false", BOOL),
+ ("&str", "\"str\"", STR),
+ ("char", "'c'", CHAR),
+ ];
+
+ // -----------Not supported-----------
+ #[test]
+ fn inline_const_as_literal_const_fn_call_slice() {
+ TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
+ check_assist_not_applicable(
+ inline_const_as_literal,
+ &format!(
+ r#"
+ const fn abc() -> &[{ty}] {{ &[{val}] }}
+ const ABC: &[{ty}] = abc();
+ fn a() {{ A$0BC }}
+ "#
+ ),
+ );
+ });
+ }
+
+ #[test]
+ fn inline_const_as_literal_expr_as_str_lit_not_applicable_const() {
+ check_assist_not_applicable(
+ inline_const_as_literal,
+ r#"
+ const STR$0ING: &str = "Hello, World!";
+
+ fn something() -> &'static str {
+ STRING
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_struct_() {
+ check_assist_not_applicable(
+ inline_const_as_literal,
+ r#"
+ struct A;
+ const STRUKT: A = A;
+
+ fn something() -> A {
+ STRU$0KT
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_enum_() {
+ check_assist_not_applicable(
+ inline_const_as_literal,
+ r#"
+ enum A { A, B, C }
+ const ENUM: A = A::A;
+
+ fn something() -> A {
+ EN$0UM
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_tuple_closure() {
+ check_assist_not_applicable(
+ inline_const_as_literal,
+ r#"
+ const CLOSURE: (&dyn Fn(i32) -> i32) = (&|num| -> i32 { num });
+ fn something() -> (&dyn Fn(i32) -> i32) {
+ STRU$0KT
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_closure_() {
+ check_assist_not_applicable(
+ inline_const_as_literal,
+ r#"
+ const CLOSURE: &dyn Fn(i32) -> i32 = &|num| -> i32 { num };
+ fn something() -> &dyn Fn(i32) -> i32 {
+ STRU$0KT
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_fn_() {
+ check_assist_not_applicable(
+ inline_const_as_literal,
+ r#"
+ struct S(i32);
+ const CON: fn(i32) -> S = S;
+ fn something() {
+ let x = CO$0N;
+ }
+ "#,
+ );
+ }
+
+ // ----------------------------
+
+ #[test]
+ fn inline_const_as_literal_const_expr() {
+ TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
+ check_assist(
+ inline_const_as_literal,
+ &format!(
+ r#"
+ const ABC: {ty} = {val};
+ fn a() {{ A$0BC }}
+ "#
+ ),
+ &format!(
+ r#"
+ const ABC: {ty} = {val};
+ fn a() {{ {val} }}
+ "#
+ ),
+ );
+ });
+ }
+
+ #[test]
+ fn inline_const_as_literal_const_block_expr() {
+ TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
+ check_assist(
+ inline_const_as_literal,
+ &format!(
+ r#"
+ const ABC: {ty} = {{ {val} }};
+ fn a() {{ A$0BC }}
+ "#
+ ),
+ &format!(
+ r#"
+ const ABC: {ty} = {{ {val} }};
+ fn a() {{ {val} }}
+ "#
+ ),
+ );
+ });
+ }
+
+ #[test]
+ fn inline_const_as_literal_const_block_eval_expr() {
+ TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
+ check_assist(
+ inline_const_as_literal,
+ &format!(
+ r#"
+ const ABC: {ty} = {{ true; {val} }};
+ fn a() {{ A$0BC }}
+ "#
+ ),
+ &format!(
+ r#"
+ const ABC: {ty} = {{ true; {val} }};
+ fn a() {{ {val} }}
+ "#
+ ),
+ );
+ });
+ }
+
+ #[test]
+ fn inline_const_as_literal_const_block_eval_block_expr() {
+ TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
+ check_assist(
+ inline_const_as_literal,
+ &format!(
+ r#"
+ const ABC: {ty} = {{ true; {{ {val} }} }};
+ fn a() {{ A$0BC }}
+ "#
+ ),
+ &format!(
+ r#"
+ const ABC: {ty} = {{ true; {{ {val} }} }};
+ fn a() {{ {val} }}
+ "#
+ ),
+ );
+ });
+ }
+
+ #[test]
+ fn inline_const_as_literal_const_fn_call_block_nested_builtin() {
+ TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
+ check_assist(
+ inline_const_as_literal,
+ &format!(
+ r#"
+ const fn abc() -> {ty} {{ {{ {{ {{ {val} }} }} }} }}
+ const ABC: {ty} = abc();
+ fn a() {{ A$0BC }}
+ "#
+ ),
+ &format!(
+ r#"
+ const fn abc() -> {ty} {{ {{ {{ {{ {val} }} }} }} }}
+ const ABC: {ty} = abc();
+ fn a() {{ {val} }}
+ "#
+ ),
+ );
+ });
+ }
+
+ #[test]
+ fn inline_const_as_literal_const_fn_call_tuple() {
+ TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
+ check_assist(
+ inline_const_as_literal,
+ &format!(
+ r#"
+ const fn abc() -> ({ty}, {ty}) {{ ({val}, {val}) }}
+ const ABC: ({ty}, {ty}) = abc();
+ fn a() {{ A$0BC }}
+ "#
+ ),
+ &format!(
+ r#"
+ const fn abc() -> ({ty}, {ty}) {{ ({val}, {val}) }}
+ const ABC: ({ty}, {ty}) = abc();
+ fn a() {{ ({val}, {val}) }}
+ "#
+ ),
+ );
+ });
+ }
+
+ #[test]
+ fn inline_const_as_literal_const_fn_call_builtin() {
+ TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
+ check_assist(
+ inline_const_as_literal,
+ &format!(
+ r#"
+ const fn abc() -> {ty} {{ {val} }}
+ const ABC: {ty} = abc();
+ fn a() {{ A$0BC }}
+ "#
+ ),
+ &format!(
+ r#"
+ const fn abc() -> {ty} {{ {val} }}
+ const ABC: {ty} = abc();
+ fn a() {{ {val} }}
+ "#
+ ),
+ );
+ });
+ }
+
+ #[test]
+ fn inline_const_as_literal_scalar_operators() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const ABC: i32 = 1 + 2 + 3;
+ fn a() { A$0BC }
+ "#,
+ r#"
+ const ABC: i32 = 1 + 2 + 3;
+ fn a() { 6 }
+ "#,
+ );
+ }
+ #[test]
+ fn inline_const_as_literal_block_scalar_calculate_expr() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const ABC: i32 = { 1 + 2 + 3 };
+ fn a() { A$0BC }
+ "#,
+ r#"
+ const ABC: i32 = { 1 + 2 + 3 };
+ fn a() { 6 }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_block_scalar_calculate_param_expr() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const ABC: i32 = { (1 + 2 + 3) };
+ fn a() { A$0BC }
+ "#,
+ r#"
+ const ABC: i32 = { (1 + 2 + 3) };
+ fn a() { 6 }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_block_tuple_scalar_calculate_block_expr() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const ABC: (i32, i32) = { (1, { 2 + 3 }) };
+ fn a() { A$0BC }
+ "#,
+ r#"
+ const ABC: (i32, i32) = { (1, { 2 + 3 }) };
+ fn a() { (1, 5) }
+ "#,
+ );
+ }
+
+ // FIXME: Add support for nested ref slices when using `render_eval`
+ #[test]
+ fn inline_const_as_literal_block_slice() {
+ check_assist_not_applicable(
+ inline_const_as_literal,
+ r#"
+ const ABC: &[&[&[&[&[&[i32]]]]]] = { &[&[&[&[&[&[10, 20, 30]]]]]] };
+ fn a() { A$0BC }
+ "#,
+ );
+ }
+
+ // FIXME: Add support for unary tuple expressions when using `render_eval`.
+ // `const fn abc() -> (i32) { (1) }` will results in `1` instead of `(1)` because it's evaluated
+ // as a paren expr.
+ #[test]
+ fn inline_const_as_literal_block_tuple() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const ABC: (([i32; 3]), (i32), ((&str, i32), i32), i32) = { (([1, 2, 3]), (10), (("hello", 10), 20), 30) };
+ fn a() { A$0BC }
+ "#,
+ r#"
+ const ABC: (([i32; 3]), (i32), ((&str, i32), i32), i32) = { (([1, 2, 3]), (10), (("hello", 10), 20), 30) };
+ fn a() { ([1, 2, 3], 10, (("hello", 10), 20), 30) }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_block_slice_single() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const ABC: [i32; 1] = { [10] };
+ fn a() { A$0BC }
+ "#,
+ r#"
+ const ABC: [i32; 1] = { [10] };
+ fn a() { [10] }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_block_array() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const ABC: [[[i32; 1]; 1]; 1] = { [[[10]]] };
+ fn a() { A$0BC }
+ "#,
+ r#"
+ const ABC: [[[i32; 1]; 1]; 1] = { [[[10]]] };
+ fn a() { [[[10]]] }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_block_recursive() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const ABC: &str = { { { { "hello" } } } };
+ fn a() { A$0BC }
+ "#,
+ r#"
+ const ABC: &str = { { { { "hello" } } } };
+ fn a() { "hello" }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_expr_as_str_lit() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const STRING: &str = "Hello, World!";
+
+ fn something() -> &'static str {
+ STR$0ING
+ }
+ "#,
+ r#"
+ const STRING: &str = "Hello, World!";
+
+ fn something() -> &'static str {
+ "Hello, World!"
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_eval_const_block_expr_to_str_lit() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const STRING: &str = {
+ let x = 9;
+ if x + 10 == 21 {
+ "Hello, World!"
+ } else {
+ "World, Hello!"
+ }
+ };
+
+ fn something() -> &'static str {
+ STR$0ING
+ }
+ "#,
+ r#"
+ const STRING: &str = {
+ let x = 9;
+ if x + 10 == 21 {
+ "Hello, World!"
+ } else {
+ "World, Hello!"
+ }
+ };
+
+ fn something() -> &'static str {
+ "World, Hello!"
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_eval_const_block_macro_expr_to_str_lit() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ macro_rules! co {() => {"World, Hello!"};}
+ const STRING: &str = { co!() };
+
+ fn something() -> &'static str {
+ STR$0ING
+ }
+ "#,
+ r#"
+ macro_rules! co {() => {"World, Hello!"};}
+ const STRING: &str = { co!() };
+
+ fn something() -> &'static str {
+ "World, Hello!"
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_eval_const_match_expr_to_str_lit() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const STRING: &str = match 9 + 10 {
+ 0..18 => "Hello, World!",
+ _ => "World, Hello!"
+ };
+
+ fn something() -> &'static str {
+ STR$0ING
+ }
+ "#,
+ r#"
+ const STRING: &str = match 9 + 10 {
+ 0..18 => "Hello, World!",
+ _ => "World, Hello!"
+ };
+
+ fn something() -> &'static str {
+ "World, Hello!"
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_eval_const_if_expr_to_str_lit() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const STRING: &str = if 1 + 2 == 4 {
+ "Hello, World!"
+ } else {
+ "World, Hello!"
+ }
+
+ fn something() -> &'static str {
+ STR$0ING
+ }
+ "#,
+ r#"
+ const STRING: &str = if 1 + 2 == 4 {
+ "Hello, World!"
+ } else {
+ "World, Hello!"
+ }
+
+ fn something() -> &'static str {
+ "World, Hello!"
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_eval_const_macro_expr_to_str_lit() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ macro_rules! co {() => {"World, Hello!"};}
+ const STRING: &str = co!();
+
+ fn something() -> &'static str {
+ STR$0ING
+ }
+ "#,
+ r#"
+ macro_rules! co {() => {"World, Hello!"};}
+ const STRING: &str = co!();
+
+ fn something() -> &'static str {
+ "World, Hello!"
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_eval_const_call_expr_to_str_lit() {
+ check_assist(
+ inline_const_as_literal,
+ r#"
+ const fn const_call() -> &'static str {"World, Hello!"}
+ const STRING: &str = const_call();
+
+ fn something() -> &'static str {
+ STR$0ING
+ }
+ "#,
+ r#"
+ const fn const_call() -> &'static str {"World, Hello!"}
+ const STRING: &str = const_call();
+
+ fn something() -> &'static str {
+ "World, Hello!"
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn inline_const_as_literal_expr_as_str_lit_not_applicable() {
+ check_assist_not_applicable(
+ inline_const_as_literal,
+ r#"
+ const STRING: &str = "Hello, World!";
+
+ fn something() -> &'static str {
+ STRING $0
+ }
+ "#,
+ );
+ }
+}
diff --git a/crates/ide-assists/src/handlers/raw_string.rs b/crates/ide-assists/src/handlers/raw_string.rs
index 40ee4771d1..63db606336 100644
--- a/crates/ide-assists/src/handlers/raw_string.rs
+++ b/crates/ide-assists/src/handlers/raw_string.rs
@@ -158,9 +158,8 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
#[cfg(test)]
mod tests {
- use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
-
use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
#[test]
fn make_raw_string_target() {
diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index 36ac8c71d8..3bdd795bea 100644
--- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -1,8 +1,5 @@
use hir::{InFile, ModuleDef};
-use ide_db::{
- helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator,
- syntax_helpers::insert_whitespace_into_node::insert_ws_into,
-};
+use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
use itertools::Itertools;
use syntax::{
ast::{self, AstNode, HasName},
@@ -182,7 +179,11 @@ fn impl_def_from_trait(
let impl_def = {
use syntax::ast::Impl;
let text = generate_trait_impl_text(adt, trait_path.to_string().as_str(), "");
- let parse = syntax::SourceFile::parse(&text);
+ // FIXME: `generate_trait_impl_text` currently generates two newlines
+ // at the front, but these leading newlines should really instead be
+ // inserted at the same time the impl is inserted
+ assert_eq!(&text[..2], "\n\n", "`generate_trait_impl_text` output changed");
+ let parse = syntax::SourceFile::parse(&text[2..]);
let node = match parse.tree().syntax().descendants().find_map(Impl::cast) {
Some(it) => it,
None => {
@@ -193,24 +194,13 @@ fn impl_def_from_trait(
)
}
};
- let node = node.clone_subtree();
+ let node = node.clone_for_update();
assert_eq!(node.syntax().text_range().start(), 0.into());
node
};
- let trait_items = trait_items
- .into_iter()
- .map(|it| {
- if sema.hir_file_for(it.syntax()).is_macro() {
- if let Some(it) = ast::AssocItem::cast(insert_ws_into(it.syntax().clone())) {
- return it;
- }
- }
- it.clone_for_update()
- })
- .collect();
- let (impl_def, first_assoc_item) =
- add_trait_assoc_items_to_impl(sema, trait_items, trait_, impl_def, target_scope);
+ let first_assoc_item =
+ add_trait_assoc_items_to_impl(sema, &trait_items, trait_, &impl_def, target_scope);
// Generate a default `impl` function body for the derived trait.
if let ast::AssocItem::Fn(ref func) = first_assoc_item {
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index bd282e5343..111753bf30 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -161,6 +161,7 @@ mod handlers {
mod generate_delegate_methods;
mod add_return_type;
mod inline_call;
+ mod inline_const_as_literal;
mod inline_local_variable;
mod inline_macro;
mod inline_type_alias;
@@ -265,6 +266,7 @@ mod handlers {
generate_new::generate_new,
inline_call::inline_call,
inline_call::inline_into_callers,
+ inline_const_as_literal::inline_const_as_literal,
inline_local_variable::inline_local_variable,
inline_type_alias::inline_type_alias,
inline_type_alias::inline_type_alias_uses,
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs
index 8a35fd290e..c097e07398 100644
--- a/crates/ide-assists/src/tests/generated.rs
+++ b/crates/ide-assists/src/tests/generated.rs
@@ -1480,6 +1480,27 @@ fn foo(name: Option<&str>) {
}
#[test]
+fn doctest_inline_const_as_literal() {
+ check_doc_test(
+ "inline_const_as_literal",
+ r#####"
+const STRING: &str = "Hello, World!";
+
+fn something() -> &'static str {
+ STRING$0
+}
+"#####,
+ r#####"
+const STRING: &str = "Hello, World!";
+
+fn something() -> &'static str {
+ "Hello, World!"
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_inline_into_callers() {
check_doc_test(
"inline_into_callers",
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index 8f7ea26306..03d8553506 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -3,8 +3,11 @@
use std::ops;
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
-use hir::{db::HirDatabase, HirDisplay, Semantics};
-use ide_db::{famous_defs::FamousDefs, path_transform::PathTransform, RootDatabase, SnippetCap};
+use hir::{db::HirDatabase, HirDisplay, InFile, Semantics};
+use ide_db::{
+ famous_defs::FamousDefs, path_transform::PathTransform,
+ syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, SnippetCap,
+};
use stdx::format_to;
use syntax::{
ast::{
@@ -91,30 +94,21 @@ pub fn filter_assoc_items(
sema: &Semantics<'_, RootDatabase>,
items: &[hir::AssocItem],
default_methods: DefaultMethods,
-) -> Vec<ast::AssocItem> {
- fn has_def_name(item: &ast::AssocItem) -> bool {
- match item {
- ast::AssocItem::Fn(def) => def.name(),
- ast::AssocItem::TypeAlias(def) => def.name(),
- ast::AssocItem::Const(def) => def.name(),
- ast::AssocItem::MacroCall(_) => None,
- }
- .is_some()
- }
-
- items
+) -> Vec<InFile<ast::AssocItem>> {
+ return items
.iter()
// Note: This throws away items with no source.
- .filter_map(|&i| {
- let item = match i {
- hir::AssocItem::Function(i) => ast::AssocItem::Fn(sema.source(i)?.value),
- hir::AssocItem::TypeAlias(i) => ast::AssocItem::TypeAlias(sema.source(i)?.value),
- hir::AssocItem::Const(i) => ast::AssocItem::Const(sema.source(i)?.value),
+ .copied()
+ .filter_map(|assoc_item| {
+ let item = match assoc_item {
+ hir::AssocItem::Function(it) => sema.source(it)?.map(ast::AssocItem::Fn),
+ hir::AssocItem::TypeAlias(it) => sema.source(it)?.map(ast::AssocItem::TypeAlias),
+ hir::AssocItem::Const(it) => sema.source(it)?.map(ast::AssocItem::Const),
};
Some(item)
})
.filter(has_def_name)
- .filter(|it| match it {
+ .filter(|it| match &it.value {
ast::AssocItem::Fn(def) => matches!(
(default_methods, def.body()),
(DefaultMethods::Only, Some(_)) | (DefaultMethods::No, None)
@@ -125,31 +119,58 @@ pub fn filter_assoc_items(
),
_ => default_methods == DefaultMethods::No,
})
- .collect::<Vec<_>>()
+ .collect();
+
+ fn has_def_name(item: &InFile<ast::AssocItem>) -> bool {
+ match &item.value {
+ ast::AssocItem::Fn(def) => def.name(),
+ ast::AssocItem::TypeAlias(def) => def.name(),
+ ast::AssocItem::Const(def) => def.name(),
+ ast::AssocItem::MacroCall(_) => None,
+ }
+ .is_some()
+ }
}
+/// Given `original_items` retrieved from the trait definition (usually by
+/// [`filter_assoc_items()`]), clones each item for update and applies path transformation to it,
+/// then inserts into `impl_`. Returns the modified `impl_` and the first associated item that got
+/// inserted.
pub fn add_trait_assoc_items_to_impl(
sema: &Semantics<'_, RootDatabase>,
- items: Vec<ast::AssocItem>,
+ original_items: &[InFile<ast::AssocItem>],
trait_: hir::Trait,
- impl_: ast::Impl,
+ impl_: &ast::Impl,
target_scope: hir::SemanticsScope<'_>,
-) -> (ast::Impl, ast::AssocItem) {
- let source_scope = sema.scope_for_def(trait_);
-
- let transform = PathTransform::trait_impl(&target_scope, &source_scope, trait_, impl_.clone());
-
+) -> ast::AssocItem {
let new_indent_level = IndentLevel::from_node(impl_.syntax()) + 1;
- let items = items.into_iter().map(|assoc_item| {
- transform.apply(assoc_item.syntax());
- assoc_item.remove_attrs_and_docs();
- assoc_item.reindent_to(new_indent_level);
- assoc_item
- });
+ let items = original_items.into_iter().map(|InFile { file_id, value: original_item }| {
+ let cloned_item = {
+ if file_id.is_macro() {
+ if let Some(formatted) =
+ ast::AssocItem::cast(insert_ws_into(original_item.syntax().clone()))
+ {
+ return formatted;
+ } else {
+ stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`");
+ }
+ }
+ original_item.clone_for_update()
+ };
- let res = impl_.clone_for_update();
+ if let Some(source_scope) = sema.scope(original_item.syntax()) {
+ // FIXME: Paths in nested macros are not handled well. See
+ // `add_missing_impl_members::paths_in_nested_macro_should_get_transformed` test.
+ let transform =
+ PathTransform::trait_impl(&target_scope, &source_scope, trait_, impl_.clone());
+ transform.apply(cloned_item.syntax());
+ }
+ cloned_item.remove_attrs_and_docs();
+ cloned_item.reindent_to(new_indent_level);
+ cloned_item
+ });
- let assoc_item_list = res.get_or_create_assoc_item_list();
+ let assoc_item_list = impl_.get_or_create_assoc_item_list();
let mut first_item = None;
for item in items {
first_item.get_or_insert_with(|| item.clone());
@@ -172,7 +193,7 @@ pub fn add_trait_assoc_items_to_impl(
assoc_item_list.add_item(item)
}
- (res, first_item.unwrap())
+ first_item.unwrap()
}
#[derive(Clone, Copy, Debug)]
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index 57a784c45b..c5bbb7f8d7 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -105,13 +105,20 @@ fn complete_fields(
mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
) {
+ let mut seen_names = FxHashSet::default();
for receiver in receiver.autoderef(ctx.db) {
for (field, ty) in receiver.fields(ctx.db) {
- named_field(acc, field, ty);
+ if seen_names.insert(field.name(ctx.db)) {
+ named_field(acc, field, ty);
+ }
}
for (i, ty) in receiver.tuple_fields(ctx.db).into_iter().enumerate() {
- // Tuple fields are always public (tuple struct fields are handled above).
- tuple_index(acc, i, ty);
+ // Tuples are always the last type in a deref chain, so just check if the name is
+ // already seen without inserting into the hashset.
+ if !seen_names.contains(&hir::Name::new_tuple_field(i)) {
+ // Tuple fields are always public (tuple struct fields are handled above).
+ tuple_index(acc, i, ty);
+ }
}
}
}
@@ -672,6 +679,74 @@ impl T {
}
#[test]
+ fn test_field_no_same_name() {
+ check(
+ r#"
+//- minicore: deref
+struct A { field: u8 }
+struct B { field: u16, another: u32 }
+impl core::ops::Deref for A {
+ type Target = B;
+ fn deref(&self) -> &Self::Target { loop {} }
+}
+fn test(a: A) {
+ a.$0
+}
+"#,
+ expect![[r#"
+ fd another u32
+ fd field u8
+ me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_tuple_field_no_same_index() {
+ check(
+ r#"
+//- minicore: deref
+struct A(u8);
+struct B(u16, u32);
+impl core::ops::Deref for A {
+ type Target = B;
+ fn deref(&self) -> &Self::Target { loop {} }
+}
+fn test(a: A) {
+ a.$0
+}
+"#,
+ expect![[r#"
+ fd 0 u8
+ fd 1 u32
+ me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_tuple_struct_deref_to_tuple_no_same_index() {
+ check(
+ r#"
+//- minicore: deref
+struct A(u8);
+impl core::ops::Deref for A {
+ type Target = (u16, u32);
+ fn deref(&self) -> &Self::Target { loop {} }
+}
+fn test(a: A) {
+ a.$0
+}
+"#,
+ expect![[r#"
+ fd 0 u8
+ fd 1 u32
+ me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target
+ "#]],
+ );
+ }
+
+ #[test]
fn test_completion_works_in_consts() {
check(
r#"
@@ -979,4 +1054,45 @@ fn test(thing: impl Encrypt) {
"#]],
)
}
+
+ #[test]
+ fn only_consider_same_type_once() {
+ check(
+ r#"
+//- minicore: deref
+struct A(u8);
+struct B(u16);
+impl core::ops::Deref for A {
+ type Target = B;
+ fn deref(&self) -> &Self::Target { loop {} }
+}
+impl core::ops::Deref for B {
+ type Target = A;
+ fn deref(&self) -> &Self::Target { loop {} }
+}
+fn test(a: A) {
+ a.$0
+}
+"#,
+ expect![[r#"
+ fd 0 u8
+ me deref() (use core::ops::Deref) fn(&self) -> &<Self as Deref>::Target
+ "#]],
+ );
+ }
+
+ #[test]
+ fn no_inference_var_in_completion() {
+ check(
+ r#"
+struct S<T>(T);
+fn test(s: S<Unknown>) {
+ s.$0
+}
+"#,
+ expect![[r#"
+ fd 0 {unknown}
+ "#]],
+ );
+ }
}
diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs
index 7de1bf2dc1..269e40e6ef 100644
--- a/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -227,9 +227,8 @@ fn get_transformed_assoc_item(
assoc_item: ast::AssocItem,
impl_def: hir::Impl,
) -> Option<ast::AssocItem> {
- let assoc_item = assoc_item.clone_for_update();
let trait_ = impl_def.trait_(ctx.db)?;
- let source_scope = &ctx.sema.scope_for_def(trait_);
+ let source_scope = &ctx.sema.scope(assoc_item.syntax())?;
let target_scope = &ctx.sema.scope(ctx.sema.source(impl_def)?.syntax().value)?;
let transform = PathTransform::trait_impl(
target_scope,
@@ -238,6 +237,9 @@ fn get_transformed_assoc_item(
ctx.sema.source(impl_def)?.value,
);
+ let assoc_item = assoc_item.clone_for_update();
+ // FIXME: Paths in nested macros are not handled well. See
+ // `macro_generated_assoc_item2` test.
transform.apply(assoc_item.syntax());
assoc_item.remove_attrs_and_docs();
Some(assoc_item)
@@ -834,6 +836,33 @@ impl Test for () {
}
#[test]
+ fn fn_with_lifetimes() {
+ check_edit(
+ "fn foo",
+ r#"
+trait Test<'a, 'b, T> {
+ fn foo(&self, a: &'a T, b: &'b T) -> &'a T;
+}
+
+impl<'x, 'y, A> Test<'x, 'y, A> for () {
+ t$0
+}
+"#,
+ r#"
+trait Test<'a, 'b, T> {
+ fn foo(&self, a: &'a T, b: &'b T) -> &'a T;
+}
+
+impl<'x, 'y, A> Test<'x, 'y, A> for () {
+ fn foo(&self, a: &'x A, b: &'y A) -> &'x A {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
fn complete_without_name() {
let test = |completion: &str, hint: &str, completed: &str, next_sibling: &str| {
check_edit(
@@ -1194,6 +1223,81 @@ impl Foo for Test {
}
#[test]
+ fn macro_generated_assoc_item() {
+ check_edit(
+ "fn method",
+ r#"
+macro_rules! ty { () => { i32 } }
+trait SomeTrait { type Output; }
+impl SomeTrait for i32 { type Output = i64; }
+macro_rules! define_method {
+ () => {
+ fn method(&mut self, params: <ty!() as SomeTrait>::Output);
+ };
+}
+trait AnotherTrait { define_method!(); }
+impl AnotherTrait for () {
+ $0
+}
+"#,
+ r#"
+macro_rules! ty { () => { i32 } }
+trait SomeTrait { type Output; }
+impl SomeTrait for i32 { type Output = i64; }
+macro_rules! define_method {
+ () => {
+ fn method(&mut self, params: <ty!() as SomeTrait>::Output);
+ };
+}
+trait AnotherTrait { define_method!(); }
+impl AnotherTrait for () {
+ fn method(&mut self,params: <ty!()as SomeTrait>::Output) {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ // FIXME: `T` in `ty!(T)` should be replaced by `PathTransform`.
+ #[test]
+ fn macro_generated_assoc_item2() {
+ check_edit(
+ "fn method",
+ r#"
+macro_rules! ty { ($me:ty) => { $me } }
+trait SomeTrait { type Output; }
+impl SomeTrait for i32 { type Output = i64; }
+macro_rules! define_method {
+ ($t:ty) => {
+ fn method(&mut self, params: <ty!($t) as SomeTrait>::Output);
+ };
+}
+trait AnotherTrait<T: SomeTrait> { define_method!(T); }
+impl AnotherTrait<i32> for () {
+ $0
+}
+"#,
+ r#"
+macro_rules! ty { ($me:ty) => { $me } }
+trait SomeTrait { type Output; }
+impl SomeTrait for i32 { type Output = i64; }
+macro_rules! define_method {
+ ($t:ty) => {
+ fn method(&mut self, params: <ty!($t) as SomeTrait>::Output);
+ };
+}
+trait AnotherTrait<T: SomeTrait> { define_method!(T); }
+impl AnotherTrait<i32> for () {
+ fn method(&mut self,params: <ty!(T)as SomeTrait>::Output) {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
fn includes_gat_generics() {
check_edit(
"type Ty",
diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs
index 8edda432ce..0dd544d0ae 100644
--- a/crates/ide-db/src/apply_change.rs
+++ b/crates/ide-db/src/apply_change.rs
@@ -1,7 +1,10 @@
//! Applies changes to the IDE state transactionally.
use base_db::{
- salsa::{Database, Durability},
+ salsa::{
+ debug::{DebugQueryTable, TableEntry},
+ Database, Durability, Query, QueryTable,
+ },
Change, SourceRootId,
};
use profile::{memory_usage, Bytes};
@@ -47,16 +50,37 @@ impl RootDatabase {
// | VS Code | **rust-analyzer: Memory Usage (Clears Database)**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065592-08559f00-91b1-11eb-8c96-64b88068ec02.gif[]
- pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> {
- let mut acc: Vec<(String, Bytes)> = vec![];
+ pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes, usize)> {
+ let mut acc: Vec<(String, Bytes, usize)> = vec![];
+
+ fn collect_query_count<'q, Q>(table: &QueryTable<'q, Q>) -> usize
+ where
+ QueryTable<'q, Q>: DebugQueryTable,
+ Q: Query,
+ <Q as Query>::Storage: 'q,
+ {
+ struct EntryCounter(usize);
+ impl<K, V> FromIterator<TableEntry<K, V>> for EntryCounter {
+ fn from_iter<T>(iter: T) -> EntryCounter
+ where
+ T: IntoIterator<Item = TableEntry<K, V>>,
+ {
+ EntryCounter(iter.into_iter().count())
+ }
+ }
+ table.entries::<EntryCounter>().0
+ }
+
macro_rules! purge_each_query {
($($q:path)*) => {$(
let before = memory_usage().allocated;
- $q.in_db(self).purge();
+ let table = $q.in_db(self);
+ let count = collect_query_count(&table);
+ table.purge();
let after = memory_usage().allocated;
let q: $q = Default::default();
let name = format!("{:?}", q);
- acc.push((name, before - after));
+ acc.push((name, before - after, count));
)*}
}
purge_each_query![
diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs
index 0ee627a44c..73e6a920ee 100644
--- a/crates/ide-db/src/path_transform.rs
+++ b/crates/ide-db/src/path_transform.rs
@@ -9,6 +9,19 @@ use syntax::{
ted, SyntaxNode,
};
+#[derive(Default)]
+struct AstSubsts {
+ types_and_consts: Vec<TypeOrConst>,
+ lifetimes: Vec<ast::LifetimeArg>,
+}
+
+enum TypeOrConst {
+ Either(ast::TypeArg), // indistinguishable type or const param
+ Const(ast::ConstArg),
+}
+
+type LifetimeName = String;
+
/// `PathTransform` substitutes path in SyntaxNodes in bulk.
///
/// This is mostly useful for IDE code generation. If you paste some existing
@@ -34,7 +47,7 @@ use syntax::{
/// ```
pub struct PathTransform<'a> {
generic_def: Option<hir::GenericDef>,
- substs: Vec<ast::Type>,
+ substs: AstSubsts,
target_scope: &'a SemanticsScope<'a>,
source_scope: &'a SemanticsScope<'a>,
}
@@ -72,7 +85,12 @@ impl<'a> PathTransform<'a> {
target_scope: &'a SemanticsScope<'a>,
source_scope: &'a SemanticsScope<'a>,
) -> PathTransform<'a> {
- PathTransform { source_scope, target_scope, generic_def: None, substs: Vec::new() }
+ PathTransform {
+ source_scope,
+ target_scope,
+ generic_def: None,
+ substs: AstSubsts::default(),
+ }
}
pub fn apply(&self, syntax: &SyntaxNode) {
@@ -91,12 +109,14 @@ impl<'a> PathTransform<'a> {
let target_module = self.target_scope.module();
let source_module = self.source_scope.module();
let skip = match self.generic_def {
- // this is a trait impl, so we need to skip the first type parameter -- this is a bit hacky
+ // this is a trait impl, so we need to skip the first type parameter (i.e. Self) -- this is a bit hacky
Some(hir::GenericDef::Trait(_)) => 1,
_ => 0,
};
- let substs_by_param: FxHashMap<_, _> = self
- .generic_def
+ let mut type_substs: FxHashMap<hir::TypeParam, ast::Type> = Default::default();
+ let mut const_substs: FxHashMap<hir::ConstParam, SyntaxNode> = Default::default();
+ let mut default_types: Vec<hir::TypeParam> = Default::default();
+ self.generic_def
.into_iter()
.flat_map(|it| it.type_params(db))
.skip(skip)
@@ -106,53 +126,105 @@ impl<'a> PathTransform<'a> {
// can still hit those trailing values and check if they actually have
// a default type. If they do, go for that type from `hir` to `ast` so
// the resulting change can be applied correctly.
- .zip(self.substs.iter().map(Some).chain(std::iter::repeat(None)))
- .filter_map(|(k, v)| match k.split(db) {
- Either::Left(_) => None,
- Either::Right(t) => match v {
- Some(v) => Some((k, v.clone())),
- None => {
- let default = t.default(db)?;
- Some((
- k,
- ast::make::ty(
- &default
- .display_source_code(db, source_module.into(), false)
- .ok()?,
- ),
- ))
+ .zip(self.substs.types_and_consts.iter().map(Some).chain(std::iter::repeat(None)))
+ .for_each(|(k, v)| match (k.split(db), v) {
+ (Either::Right(k), Some(TypeOrConst::Either(v))) => {
+ if let Some(ty) = v.ty() {
+ type_substs.insert(k, ty.clone());
+ }
+ }
+ (Either::Right(k), None) => {
+ if let Some(default) = k.default(db) {
+ if let Some(default) =
+ &default.display_source_code(db, source_module.into(), false).ok()
+ {
+ type_substs.insert(k, ast::make::ty(default).clone_for_update());
+ default_types.push(k);
+ }
+ }
+ }
+ (Either::Left(k), Some(TypeOrConst::Either(v))) => {
+ if let Some(ty) = v.ty() {
+ const_substs.insert(k, ty.syntax().clone());
}
- },
- })
+ }
+ (Either::Left(k), Some(TypeOrConst::Const(v))) => {
+ if let Some(expr) = v.expr() {
+ // FIXME: expressions in curly brackets can cause ambiguity after insertion
+ // (e.g. `N * 2` -> `{1 + 1} * 2`; it's unclear whether `{1 + 1}`
+ // is a standalone statement or a part of another expresson)
+ // and sometimes require slight modifications; see
+ // https://doc.rust-lang.org/reference/statements.html#expression-statements
+ const_substs.insert(k, expr.syntax().clone());
+ }
+ }
+ (Either::Left(_), None) => (), // FIXME: get default const value
+ _ => (), // ignore mismatching params
+ });
+ let lifetime_substs: FxHashMap<_, _> = self
+ .generic_def
+ .into_iter()
+ .flat_map(|it| it.lifetime_params(db))
+ .zip(self.substs.lifetimes.clone())
+ .filter_map(|(k, v)| Some((k.name(db).display(db.upcast()).to_string(), v.lifetime()?)))
.collect();
- Ctx { substs: substs_by_param, target_module, source_scope: self.source_scope }
+ let ctx = Ctx {
+ type_substs,
+ const_substs,
+ lifetime_substs,
+ target_module,
+ source_scope: self.source_scope,
+ };
+ ctx.transform_default_type_substs(default_types);
+ ctx
}
}
struct Ctx<'a> {
- substs: FxHashMap<hir::TypeOrConstParam, ast::Type>,
+ type_substs: FxHashMap<hir::TypeParam, ast::Type>,
+ const_substs: FxHashMap<hir::ConstParam, SyntaxNode>,
+ lifetime_substs: FxHashMap<LifetimeName, ast::Lifetime>,
target_module: hir::Module,
source_scope: &'a SemanticsScope<'a>,
}
+fn postorder(item: &SyntaxNode) -> impl Iterator<Item = SyntaxNode> {
+ item.preorder().filter_map(|event| match event {
+ syntax::WalkEvent::Enter(_) => None,
+ syntax::WalkEvent::Leave(node) => Some(node),
+ })
+}
+
impl<'a> Ctx<'a> {
fn apply(&self, item: &SyntaxNode) {
// `transform_path` may update a node's parent and that would break the
// tree traversal. Thus all paths in the tree are collected into a vec
// so that such operation is safe.
- let paths = item
- .preorder()
- .filter_map(|event| match event {
- syntax::WalkEvent::Enter(_) => None,
- syntax::WalkEvent::Leave(node) => Some(node),
- })
- .filter_map(ast::Path::cast)
- .collect::<Vec<_>>();
-
+ let paths = postorder(item).filter_map(ast::Path::cast).collect::<Vec<_>>();
for path in paths {
self.transform_path(path);
}
+
+ postorder(item).filter_map(ast::Lifetime::cast).for_each(|lifetime| {
+ if let Some(subst) = self.lifetime_substs.get(&lifetime.syntax().text().to_string()) {
+ ted::replace(lifetime.syntax(), subst.clone_subtree().clone_for_update().syntax());
+ }
+ });
}
+
+ fn transform_default_type_substs(&self, default_types: Vec<hir::TypeParam>) {
+ for k in default_types {
+ let v = self.type_substs.get(&k).unwrap();
+ // `transform_path` may update a node's parent and that would break the
+ // tree traversal. Thus all paths in the tree are collected into a vec
+ // so that such operation is safe.
+ let paths = postorder(&v.syntax()).filter_map(ast::Path::cast).collect::<Vec<_>>();
+ for path in paths {
+ self.transform_path(path);
+ }
+ }
+ }
+
fn transform_path(&self, path: ast::Path) -> Option<()> {
if path.qualifier().is_some() {
return None;
@@ -169,7 +241,7 @@ impl<'a> Ctx<'a> {
match resolution {
hir::PathResolution::TypeParam(tp) => {
- if let Some(subst) = self.substs.get(&tp.merge()) {
+ if let Some(subst) = self.type_substs.get(&tp) {
let parent = path.syntax().parent()?;
if let Some(parent) = ast::Path::cast(parent.clone()) {
// Path inside path means that there is an associated
@@ -236,8 +308,12 @@ impl<'a> Ctx<'a> {
}
ted::replace(path.syntax(), res.syntax())
}
+ hir::PathResolution::ConstParam(cp) => {
+ if let Some(subst) = self.const_substs.get(&cp) {
+ ted::replace(path.syntax(), subst.clone_subtree().clone_for_update());
+ }
+ }
hir::PathResolution::Local(_)
- | hir::PathResolution::ConstParam(_)
| hir::PathResolution::SelfType(_)
| hir::PathResolution::Def(_)
| hir::PathResolution::BuiltinAttr(_)
@@ -250,7 +326,7 @@ impl<'a> Ctx<'a> {
// FIXME: It would probably be nicer if we could get this via HIR (i.e. get the
// trait ref, and then go from the types in the substs back to the syntax).
-fn get_syntactic_substs(impl_def: ast::Impl) -> Option<Vec<ast::Type>> {
+fn get_syntactic_substs(impl_def: ast::Impl) -> Option<AstSubsts> {
let target_trait = impl_def.trait_()?;
let path_type = match target_trait {
ast::Type::PathType(path) => path,
@@ -261,13 +337,22 @@ fn get_syntactic_substs(impl_def: ast::Impl) -> Option<Vec<ast::Type>> {
get_type_args_from_arg_list(generic_arg_list)
}
-fn get_type_args_from_arg_list(generic_arg_list: ast::GenericArgList) -> Option<Vec<ast::Type>> {
- let mut result = Vec::new();
- for generic_arg in generic_arg_list.generic_args() {
- if let ast::GenericArg::TypeArg(type_arg) = generic_arg {
- result.push(type_arg.ty()?)
+fn get_type_args_from_arg_list(generic_arg_list: ast::GenericArgList) -> Option<AstSubsts> {
+ let mut result = AstSubsts::default();
+ generic_arg_list.generic_args().for_each(|generic_arg| match generic_arg {
+ // Const params are marked as consts on definition only,
+ // being passed to the trait they are indistguishable from type params;
+ // anyway, we don't really need to distinguish them here.
+ ast::GenericArg::TypeArg(type_arg) => {
+ result.types_and_consts.push(TypeOrConst::Either(type_arg))
}
- }
+ // Some const values are recognized correctly.
+ ast::GenericArg::ConstArg(const_arg) => {
+ result.types_and_consts.push(TypeOrConst::Const(const_arg));
+ }
+ ast::GenericArg::LifetimeArg(l_arg) => result.lifetimes.push(l_arg),
+ _ => (),
+ });
Some(result)
}
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 73cd5dcaf2..e8ff107bd4 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -243,6 +243,8 @@ impl Definition {
DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()),
DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()),
DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()),
+ // FIXME: implement
+ DefWithBody::InTypeConst(_) => return SearchScope::empty(),
};
return match def {
Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)),
diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt
index 77714efa35..7834c66033 100644
--- a/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -20,8 +20,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -47,8 +49,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -74,8 +78,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -101,8 +107,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -128,8 +136,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -155,8 +165,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -182,8 +194,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index b5adfc13d9..1a00e29384 100644
--- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -18,8 +18,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: TYPE_ALIAS,
@@ -43,8 +45,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: CONST,
@@ -68,8 +72,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: CONST,
@@ -95,8 +101,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: ENUM,
@@ -122,8 +130,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: MACRO_DEF,
@@ -147,8 +157,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STATIC,
@@ -174,8 +186,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -201,8 +215,12 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 2147483648,
+ hir_file_id: MacroFile(
+ MacroFile {
+ macro_call_id: MacroCallId(
+ 0,
+ ),
+ },
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -228,8 +246,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -257,8 +277,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -286,8 +308,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -311,8 +335,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: TRAIT,
@@ -338,8 +364,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: UNION,
@@ -365,8 +393,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: MODULE,
@@ -392,8 +422,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: MODULE,
@@ -419,8 +451,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: MACRO_RULES,
@@ -444,8 +478,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: FN,
@@ -471,8 +507,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: MACRO_RULES,
@@ -496,8 +534,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: FN,
@@ -521,8 +561,10 @@
},
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: FN,
@@ -561,8 +603,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 0,
+ hir_file_id: FileId(
+ FileId(
+ 0,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@@ -599,8 +643,10 @@
),
),
loc: DeclarationLocation {
- hir_file_id: HirFileId(
- 1,
+ hir_file_id: FileId(
+ FileId(
+ 1,
+ ),
),
ptr: SyntaxNodePtr {
kind: STRUCT,
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index a2f9697758..f75ebfa12e 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -4423,6 +4423,29 @@ const FOO$0: Option<&i32> = Some(2).as_ref();
}
#[test]
+fn hover_const_eval_dyn_trait() {
+ check(
+ r#"
+//- minicore: fmt, coerce_unsized, builtin_impls
+use core::fmt::Debug;
+
+const FOO$0: &dyn Debug = &2i32;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: &dyn Debug = &2
+ ```
+ "#]],
+ );
+}
+
+#[test]
fn hover_const_eval_slice() {
check(
r#"
@@ -4502,6 +4525,26 @@ const FOO$0: Tree = {
```
"#]],
);
+ // FIXME: Show the data of unsized structs
+ check(
+ r#"
+//- minicore: slice, index, coerce_unsized, transmute
+#[repr(transparent)]
+struct S<T: ?Sized>(T);
+const FOO$0: &S<[u8]> = core::mem::transmute::<&[u8], _>(&[1, 2, 3]);
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: &S<[u8]> = &S
+ ```
+ "#]],
+ );
}
#[test]
diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs
index ce1e03a069..84eac16b9f 100644
--- a/crates/ide/src/inlay_hints/chaining.rs
+++ b/crates/ide/src/inlay_hints/chaining.rs
@@ -474,7 +474,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9332..9340,
+ range: 9287..9295,
},
),
tooltip: "",
@@ -487,7 +487,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9364..9368,
+ range: 9319..9323,
},
),
tooltip: "",
@@ -511,7 +511,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9332..9340,
+ range: 9287..9295,
},
),
tooltip: "",
@@ -524,7 +524,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9364..9368,
+ range: 9319..9323,
},
),
tooltip: "",
@@ -548,7 +548,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9332..9340,
+ range: 9287..9295,
},
),
tooltip: "",
@@ -561,7 +561,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9364..9368,
+ range: 9319..9323,
},
),
tooltip: "",
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 87e769e423..f195f78b3a 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -181,7 +181,7 @@ impl AnalysisHost {
}
/// NB: this clears the database
- pub fn per_query_memory_usage(&mut self) -> Vec<(String, profile::Bytes)> {
+ pub fn per_query_memory_usage(&mut self) -> Vec<(String, profile::Bytes, usize)> {
self.db.per_query_memory_usage()
}
pub fn request_cancellation(&mut self) {
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index 8c02fe8164..dc06591ffe 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -243,6 +243,9 @@ fn traverse(
let mut attr_or_derive_item = None;
let mut current_macro: Option<ast::Macro> = None;
let mut macro_highlighter = MacroHighlighter::default();
+
+ // FIXME: these are not perfectly accurate, we determine them by the real file's syntax tree
+ // an an attribute nested in a macro call will not emit `inside_attribute`
let mut inside_attribute = false;
let mut inside_macro_call = false;
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index 327e1502d1..fa374b04f1 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -86,6 +86,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">assert</span> <span class="brace">{</span><span class="brace">}</span>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">asm</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">concat</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">toho</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panic<span class="punctuation">!</span><span class="parenthesis">(</span><span class="string_literal">"not yet implemented"</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
@@ -172,4 +174,5 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro">toho</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">fmt"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"mov eax, </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">format_args</span><span class="operator macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index 887d18b989..497992f684 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -432,6 +432,8 @@ macro_rules! panic {}
macro_rules! assert {}
#[rustc_builtin_macro]
macro_rules! asm {}
+#[rustc_builtin_macro]
+macro_rules! concat {}
macro_rules! toho {
() => ($crate::panic!("not yet implemented"));
@@ -518,6 +520,7 @@ fn main() {
toho!("{}fmt", 0);
asm!("mov eax, {0}");
format_args!(concat!("{}"), "{}");
+ format_args!("{}", format_args!("{}", 0));
}"#,
expect_file!["./test_data/highlight_strings.html"],
false,
diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs
index 93ef483502..96a6cdeaaf 100644
--- a/crates/parser/src/grammar/types.rs
+++ b/crates/parser/src/grammar/types.rs
@@ -153,7 +153,9 @@ fn array_or_slice_type(p: &mut Parser<'_>) {
// type T = [(); 92];
T![;] => {
p.bump(T![;]);
+ let m = p.start();
expressions::expr(p);
+ m.complete(p, CONST_ARG);
p.expect(T![']']);
ARRAY_TYPE
}
diff --git a/crates/parser/test_data/parser/inline/ok/0017_array_type.rast b/crates/parser/test_data/parser/inline/ok/0017_array_type.rast
index 2a5c644d46..0d50144b73 100644
--- a/crates/parser/test_data/parser/inline/ok/0017_array_type.rast
+++ b/crates/parser/test_data/parser/inline/ok/0017_array_type.rast
@@ -14,8 +14,9 @@ SOURCE_FILE
R_PAREN ")"
SEMICOLON ";"
WHITESPACE " "
- LITERAL
- INT_NUMBER "92"
+ CONST_ARG
+ LITERAL
+ INT_NUMBER "92"
R_BRACK "]"
SEMICOLON ";"
WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/ok/0030_traits.rast b/crates/parser/test_data/parser/ok/0030_traits.rast
index 44423581e6..3965ae9596 100644
--- a/crates/parser/test_data/parser/ok/0030_traits.rast
+++ b/crates/parser/test_data/parser/ok/0030_traits.rast
@@ -51,8 +51,9 @@ SOURCE_FILE
IDENT "i32"
SEMICOLON ";"
WHITESPACE " "
- LITERAL
- INT_NUMBER "1"
+ CONST_ARG
+ LITERAL
+ INT_NUMBER "1"
R_BRACK "]"
R_PAREN ")"
SEMICOLON ";"
diff --git a/crates/parser/test_data/parser/ok/0043_complex_assignment.rast b/crates/parser/test_data/parser/ok/0043_complex_assignment.rast
index 3b02c3f96a..f3c85b45b6 100644
--- a/crates/parser/test_data/parser/ok/0043_complex_assignment.rast
+++ b/crates/parser/test_data/parser/ok/0043_complex_assignment.rast
@@ -24,8 +24,9 @@ SOURCE_FILE
IDENT "u8"
SEMICOLON ";"
WHITESPACE " "
- LITERAL
- INT_NUMBER "1"
+ CONST_ARG
+ LITERAL
+ INT_NUMBER "1"
R_BRACK "]"
WHITESPACE " "
R_CURLY "}"
diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs
index 37a45bcbb8..6fd8de5934 100644
--- a/crates/proc-macro-srv/src/server.rs
+++ b/crates/proc-macro-srv/src/server.rs
@@ -97,7 +97,7 @@ impl server::TokenStream for RustAnalyzer {
match tree {
bridge::TokenTree::Group(group) => {
let group = Group {
- delimiter: delim_to_internal(group.delimiter),
+ delimiter: delim_to_internal(group.delimiter, group.span),
token_trees: match group.stream {
Some(stream) => stream.into_iter().collect(),
None => Vec::new(),
@@ -221,14 +221,14 @@ impl server::TokenStream for RustAnalyzer {
}
}
-fn delim_to_internal(d: proc_macro::Delimiter) -> tt::Delimiter {
+fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan<Span>) -> tt::Delimiter {
let kind = match d {
proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket,
proc_macro::Delimiter::None => tt::DelimiterKind::Invisible,
};
- tt::Delimiter { open: tt::TokenId::unspecified(), close: tt::TokenId::unspecified(), kind }
+ tt::Delimiter { open: span.open, close: span.close, kind }
}
fn delim_to_external(d: tt::Delimiter) -> proc_macro::Delimiter {
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index 0aca620a67..b5fe237fc4 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -1403,6 +1403,12 @@ fn handle_hack_cargo_workspace(
.unwrap();
crate_graph.remove_and_replace(fake, original).unwrap();
}
+ for (_, c) in crate_graph.iter_mut() {
+ if c.origin.is_local() {
+ // LangCrateOrigin::Other is good enough for a hack.
+ c.origin = CrateOrigin::Lang(LangCrateOrigin::Other);
+ }
+ }
sysroot
.crates()
.filter_map(|krate| {
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 77f02a8310..5b72d57560 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -68,7 +68,6 @@ ide-db.workspace = true
ide-ssr.workspace = true
ide.workspace = true
proc-macro-api.workspace = true
-proc-macro-srv-cli.workspace = true
profile.workspace = true
project-model.workspace = true
stdx.workspace = true
@@ -95,5 +94,5 @@ mbe.workspace = true
[features]
jemalloc = ["jemallocator", "profile/jemalloc"]
force-always-assert = ["always-assert/force"]
-sysroot-abi = ["proc-macro-srv-cli/sysroot-abi"]
+sysroot-abi = []
in-rust-tree = ["sysroot-abi", "ide/in-rust-tree", "syntax/in-rust-tree"]
diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs
index d5d877680a..e352019211 100644
--- a/crates/rust-analyzer/src/cli.rs
+++ b/crates/rust-analyzer/src/cli.rs
@@ -50,21 +50,24 @@ fn report_metric(metric: &str, value: u64, unit: &str) {
}
fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) {
- let mut mem = host.per_query_memory_usage();
+ let mem = host.per_query_memory_usage();
let before = profile::memory_usage();
drop(vfs);
let vfs = before.allocated - profile::memory_usage().allocated;
- mem.push(("VFS".into(), vfs));
let before = profile::memory_usage();
drop(host);
- mem.push(("Unaccounted".into(), before.allocated - profile::memory_usage().allocated));
+ let unaccounted = before.allocated - profile::memory_usage().allocated;
+ let remaining = profile::memory_usage().allocated;
- mem.push(("Remaining".into(), profile::memory_usage().allocated));
-
- for (name, bytes) in mem {
+ for (name, bytes, entries) in mem {
// NOTE: Not a debug print, so avoid going through the `eprintln` defined above.
- eprintln!("{bytes:>8} {name}");
+ eprintln!("{bytes:>8} {entries:>6} {name}");
}
+ eprintln!("{vfs:>8} VFS");
+
+ eprintln!("{unaccounted:>8} Unaccounted");
+
+ eprintln!("{remaining:>8} Remaining");
}
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 2c2a9a18d2..4cb917ce29 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -8,18 +8,20 @@ use std::{
use hir::{
db::{DefDatabase, ExpandDatabase, HirDatabase},
- AssocItem, Crate, Function, HasCrate, HasSource, HirDisplay, ModuleDef,
+ Adt, AssocItem, Crate, DefWithBody, HasCrate, HasSource, HirDisplay, ModuleDef, Name,
};
use hir_def::{
body::{BodySourceMap, SyntheticSyntax},
hir::{ExprId, PatId},
- FunctionId,
};
use hir_ty::{Interner, Substitution, TyExt, TypeFlags};
-use ide::{Analysis, AnalysisHost, LineCol, RootDatabase};
-use ide_db::base_db::{
- salsa::{self, debug::DebugQueryTable, ParallelDatabase},
- SourceDatabase, SourceDatabaseExt,
+use ide::{LineCol, RootDatabase};
+use ide_db::{
+ base_db::{
+ salsa::{self, debug::DebugQueryTable, ParallelDatabase},
+ SourceDatabase, SourceDatabaseExt,
+ },
+ LineIndexDatabase,
};
use itertools::Itertools;
use oorandom::Rand32;
@@ -27,7 +29,6 @@ use profile::{Bytes, StopWatch};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
-use stdx::format_to;
use syntax::{AstNode, SyntaxNode};
use vfs::{AbsPathBuf, Vfs, VfsPath};
@@ -120,7 +121,7 @@ impl flags::AnalysisStats {
eprint!(" crates: {num_crates}");
let mut num_decls = 0;
- let mut funcs = Vec::new();
+ let mut bodies = Vec::new();
let mut adts = Vec::new();
let mut consts = Vec::new();
while let Some(module) = visit_queue.pop() {
@@ -130,40 +131,71 @@ impl flags::AnalysisStats {
for decl in module.declarations(db) {
num_decls += 1;
match decl {
- ModuleDef::Function(f) => funcs.push(f),
- ModuleDef::Adt(a) => adts.push(a),
- ModuleDef::Const(c) => consts.push(c),
+ ModuleDef::Function(f) => bodies.push(DefWithBody::from(f)),
+ ModuleDef::Adt(a) => {
+ if let Adt::Enum(e) = a {
+ for v in e.variants(db) {
+ bodies.push(DefWithBody::from(v));
+ }
+ }
+ adts.push(a)
+ }
+ ModuleDef::Const(c) => {
+ bodies.push(DefWithBody::from(c));
+ consts.push(c)
+ }
+ ModuleDef::Static(s) => bodies.push(DefWithBody::from(s)),
_ => (),
- }
+ };
}
for impl_def in module.impl_defs(db) {
for item in impl_def.items(db) {
num_decls += 1;
- if let AssocItem::Function(f) = item {
- funcs.push(f);
+ match item {
+ AssocItem::Function(f) => bodies.push(DefWithBody::from(f)),
+ AssocItem::Const(c) => {
+ bodies.push(DefWithBody::from(c));
+ consts.push(c);
+ }
+ _ => (),
}
}
}
}
}
- eprintln!(", mods: {}, decls: {num_decls}, fns: {}", visited_modules.len(), funcs.len());
+ eprintln!(
+ ", mods: {}, decls: {num_decls}, bodies: {}, adts: {}, consts: {}",
+ visited_modules.len(),
+ bodies.len(),
+ adts.len(),
+ consts.len(),
+ );
eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed());
if self.randomize {
- shuffle(&mut rng, &mut funcs);
+ shuffle(&mut rng, &mut bodies);
+ }
+
+ if !self.skip_lowering {
+ self.run_body_lowering(db, &vfs, &bodies, verbosity);
}
if !self.skip_inference {
- self.run_inference(&host, db, &vfs, &funcs, verbosity);
+ self.run_inference(db, &vfs, &bodies, verbosity);
}
if !self.skip_mir_stats {
- self.run_mir_lowering(db, &funcs, verbosity);
+ self.run_mir_lowering(db, &bodies, verbosity);
}
- self.run_data_layout(db, &adts, verbosity);
- self.run_const_eval(db, &consts, verbosity);
+ if !self.skip_data_layout {
+ self.run_data_layout(db, &adts, verbosity);
+ }
+
+ if !self.skip_const_eval {
+ self.run_const_eval(db, &consts, verbosity);
+ }
let total_span = analysis_sw.elapsed();
eprintln!("{:<20} {total_span}", "Total:");
@@ -210,8 +242,10 @@ impl flags::AnalysisStats {
continue;
}
all += 1;
- let Err(e) = db.layout_of_adt(hir_def::AdtId::from(a).into(), Substitution::empty(Interner), a.krate(db).into()) else {
- continue;
+ let Err(e)
+ = db.layout_of_adt(hir_def::AdtId::from(a).into(), Substitution::empty(Interner), a.krate(db).into())
+ else {
+ continue
};
if verbosity.is_spammy() {
let full_name = a
@@ -227,9 +261,11 @@ impl flags::AnalysisStats {
}
fail += 1;
}
- eprintln!("{:<20} {}", "Data layouts:", sw.elapsed());
+ let data_layout_time = sw.elapsed();
+ eprintln!("{:<20} {}", "Data layouts:", data_layout_time);
eprintln!("Failed data layouts: {fail} ({}%)", percentage(fail, all));
report_metric("failed data layouts", fail, "#");
+ report_metric("data layout time", data_layout_time.time.as_millis() as u64, "ms");
}
fn run_const_eval(&self, db: &RootDatabase, consts: &[hir::Const], verbosity: Verbosity) {
@@ -255,61 +291,63 @@ impl flags::AnalysisStats {
}
fail += 1;
}
- eprintln!("{:<20} {}", "Const evaluation:", sw.elapsed());
+ let const_eval_time = sw.elapsed();
+ eprintln!("{:<20} {}", "Const evaluation:", const_eval_time);
eprintln!("Failed const evals: {fail} ({}%)", percentage(fail, all));
report_metric("failed const evals", fail, "#");
+ report_metric("const eval time", const_eval_time.time.as_millis() as u64, "ms");
}
- fn run_mir_lowering(&self, db: &RootDatabase, funcs: &[Function], verbosity: Verbosity) {
+ fn run_mir_lowering(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
let mut sw = self.stop_watch();
- let all = funcs.len() as u64;
+ let all = bodies.len() as u64;
let mut fail = 0;
- for f in funcs {
- let Err(e) = db.mir_body(FunctionId::from(*f).into()) else {
+ for &body in bodies {
+ let Err(e) = db.mir_body(body.into()) else {
continue;
};
if verbosity.is_spammy() {
- let full_name = f
+ let full_name = body
.module(db)
.path_to_root(db)
.into_iter()
.rev()
.filter_map(|it| it.name(db))
- .chain(Some(f.name(db)))
+ .chain(Some(body.name(db).unwrap_or_else(Name::missing)))
.map(|it| it.display(db).to_string())
.join("::");
println!("Mir body for {full_name} failed due {e:?}");
}
fail += 1;
}
- eprintln!("{:<20} {}", "MIR lowering:", sw.elapsed());
+ let mir_lowering_time = sw.elapsed();
+ eprintln!("{:<20} {}", "MIR lowering:", mir_lowering_time);
eprintln!("Mir failed bodies: {fail} ({}%)", percentage(fail, all));
report_metric("mir failed bodies", fail, "#");
+ report_metric("mir lowering time", mir_lowering_time.time.as_millis() as u64, "ms");
}
fn run_inference(
&self,
- host: &AnalysisHost,
db: &RootDatabase,
vfs: &Vfs,
- funcs: &[Function],
+ bodies: &[DefWithBody],
verbosity: Verbosity,
) {
let mut bar = match verbosity {
Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
_ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
- _ => ProgressReport::new(funcs.len() as u64),
+ _ => ProgressReport::new(bodies.len() as u64),
};
if self.parallel {
let mut inference_sw = self.stop_watch();
let snap = Snap(db.snapshot());
- funcs
+ bodies
.par_iter()
- .map_with(snap, |snap, &f| {
- let f_id = FunctionId::from(f);
- snap.0.body(f_id.into());
- snap.0.infer(f_id.into());
+ .map_with(snap, |snap, &body| {
+ snap.0.body(body.into());
+ snap.0.infer(body.into());
})
.count();
eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
@@ -325,39 +363,58 @@ impl flags::AnalysisStats {
let mut num_pats_unknown = 0;
let mut num_pats_partially_unknown = 0;
let mut num_pat_type_mismatches = 0;
- let analysis = host.analysis();
- for f in funcs.iter().copied() {
- let name = f.name(db);
- let full_name = f
- .module(db)
- .path_to_root(db)
- .into_iter()
- .rev()
- .filter_map(|it| it.name(db))
- .chain(Some(f.name(db)))
- .map(|it| it.display(db).to_string())
- .join("::");
+ for &body_id in bodies {
+ let name = body_id.name(db).unwrap_or_else(Name::missing);
+ let module = body_id.module(db);
+ let full_name = move || {
+ module
+ .krate()
+ .display_name(db)
+ .map(|it| it.canonical_name().to_string())
+ .into_iter()
+ .chain(
+ module
+ .path_to_root(db)
+ .into_iter()
+ .filter_map(|it| it.name(db))
+ .rev()
+ .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
+ .map(|it| it.display(db).to_string()),
+ )
+ .join("::")
+ };
if let Some(only_name) = self.only.as_deref() {
- if name.display(db).to_string() != only_name && full_name != only_name {
+ if name.display(db).to_string() != only_name && full_name() != only_name {
continue;
}
}
- let mut msg = format!("processing: {full_name}");
- if verbosity.is_verbose() {
- if let Some(src) = f.source(db) {
- let original_file = src.file_id.original_file(db);
- let path = vfs.file_path(original_file);
- let syntax_range = src.value.syntax().text_range();
- format_to!(msg, " ({} {:?})", path, syntax_range);
+ let msg = move || {
+ if verbosity.is_verbose() {
+ let source = match body_id {
+ DefWithBody::Function(it) => it.source(db).map(|it| it.syntax().cloned()),
+ DefWithBody::Static(it) => it.source(db).map(|it| it.syntax().cloned()),
+ DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
+ DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
+ DefWithBody::InTypeConst(_) => unimplemented!(),
+ };
+ if let Some(src) = source {
+ let original_file = src.file_id.original_file(db);
+ let path = vfs.file_path(original_file);
+ let syntax_range = src.value.text_range();
+ format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
+ } else {
+ format!("processing: {}", full_name())
+ }
+ } else {
+ format!("processing: {}", full_name())
}
- }
+ };
if verbosity.is_spammy() {
- bar.println(msg.to_string());
+ bar.println(msg());
}
- bar.set_message(&msg);
- let f_id = FunctionId::from(f);
- let (body, sm) = db.body_with_source_map(f_id.into());
- let inference_result = db.infer(f_id.into());
+ bar.set_message(msg);
+ let (body, sm) = db.body_with_source_map(body_id.into());
+ let inference_result = db.infer(body_id.into());
// region:expressions
let (previous_exprs, previous_unknown, previous_partially_unknown) =
@@ -368,9 +425,7 @@ impl flags::AnalysisStats {
let unknown_or_partial = if ty.is_unknown() {
num_exprs_unknown += 1;
if verbosity.is_spammy() {
- if let Some((path, start, end)) =
- expr_syntax_range(db, &analysis, vfs, &sm, expr_id)
- {
+ if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm, expr_id) {
bar.println(format!(
"{} {}:{}-{}:{}: Unknown type",
path,
@@ -394,9 +449,7 @@ impl flags::AnalysisStats {
};
if self.only.is_some() && verbosity.is_spammy() {
// in super-verbose mode for just one function, we print every single expression
- if let Some((_, start, end)) =
- expr_syntax_range(db, &analysis, vfs, &sm, expr_id)
- {
+ if let Some((_, start, end)) = expr_syntax_range(db, vfs, &sm, expr_id) {
bar.println(format!(
"{}:{}-{}:{}: {}",
start.line + 1,
@@ -412,16 +465,14 @@ impl flags::AnalysisStats {
if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
println!(
r#"{},type,"{}""#,
- location_csv_expr(db, &analysis, vfs, &sm, expr_id),
+ location_csv_expr(db, vfs, &sm, expr_id),
ty.display(db)
);
}
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
num_expr_type_mismatches += 1;
if verbosity.is_verbose() {
- if let Some((path, start, end)) =
- expr_syntax_range(db, &analysis, vfs, &sm, expr_id)
- {
+ if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm, expr_id) {
bar.println(format!(
"{} {}:{}-{}:{}: Expected {}, got {}",
path,
@@ -444,7 +495,7 @@ impl flags::AnalysisStats {
if self.output == Some(OutputFormat::Csv) {
println!(
r#"{},mismatch,"{}","{}""#,
- location_csv_expr(db, &analysis, vfs, &sm, expr_id),
+ location_csv_expr(db, vfs, &sm, expr_id),
mismatch.expected.display(db),
mismatch.actual.display(db)
);
@@ -454,7 +505,7 @@ impl flags::AnalysisStats {
if verbosity.is_spammy() {
bar.println(format!(
"In {}: {} exprs, {} unknown, {} partial",
- full_name,
+ full_name(),
num_exprs - previous_exprs,
num_exprs_unknown - previous_unknown,
num_exprs_partially_unknown - previous_partially_unknown
@@ -471,9 +522,7 @@ impl flags::AnalysisStats {
let unknown_or_partial = if ty.is_unknown() {
num_pats_unknown += 1;
if verbosity.is_spammy() {
- if let Some((path, start, end)) =
- pat_syntax_range(db, &analysis, vfs, &sm, pat_id)
- {
+ if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm, pat_id) {
bar.println(format!(
"{} {}:{}-{}:{}: Unknown type",
path,
@@ -497,8 +546,7 @@ impl flags::AnalysisStats {
};
if self.only.is_some() && verbosity.is_spammy() {
// in super-verbose mode for just one function, we print every single pattern
- if let Some((_, start, end)) = pat_syntax_range(db, &analysis, vfs, &sm, pat_id)
- {
+ if let Some((_, start, end)) = pat_syntax_range(db, vfs, &sm, pat_id) {
bar.println(format!(
"{}:{}-{}:{}: {}",
start.line + 1,
@@ -514,16 +562,14 @@ impl flags::AnalysisStats {
if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
println!(
r#"{},type,"{}""#,
- location_csv_pat(db, &analysis, vfs, &sm, pat_id),
+ location_csv_pat(db, vfs, &sm, pat_id),
ty.display(db)
);
}
if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat_id) {
num_pat_type_mismatches += 1;
if verbosity.is_verbose() {
- if let Some((path, start, end)) =
- pat_syntax_range(db, &analysis, vfs, &sm, pat_id)
- {
+ if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm, pat_id) {
bar.println(format!(
"{} {}:{}-{}:{}: Expected {}, got {}",
path,
@@ -546,7 +592,7 @@ impl flags::AnalysisStats {
if self.output == Some(OutputFormat::Csv) {
println!(
r#"{},mismatch,"{}","{}""#,
- location_csv_pat(db, &analysis, vfs, &sm, pat_id),
+ location_csv_pat(db, vfs, &sm, pat_id),
mismatch.expected.display(db),
mismatch.actual.display(db)
);
@@ -556,7 +602,7 @@ impl flags::AnalysisStats {
if verbosity.is_spammy() {
bar.println(format!(
"In {}: {} pats, {} unknown, {} partial",
- full_name,
+ full_name(),
num_pats - previous_pats,
num_pats_unknown - previous_unknown,
num_pats_partially_unknown - previous_partially_unknown
@@ -567,6 +613,7 @@ impl flags::AnalysisStats {
}
bar.finish_and_clear();
+ let inference_time = inference_sw.elapsed();
eprintln!(
" exprs: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
num_exprs,
@@ -585,12 +632,89 @@ impl flags::AnalysisStats {
percentage(num_pats_partially_unknown, num_pats),
num_pat_type_mismatches
);
+ eprintln!("{:<20} {}", "Inference:", inference_time);
report_metric("unknown type", num_exprs_unknown, "#");
report_metric("type mismatches", num_expr_type_mismatches, "#");
report_metric("pattern unknown type", num_pats_unknown, "#");
report_metric("pattern type mismatches", num_pat_type_mismatches, "#");
+ report_metric("inference time", inference_time.time.as_millis() as u64, "ms");
+ }
+
+ fn run_body_lowering(
+ &self,
+ db: &RootDatabase,
+ vfs: &Vfs,
+ bodies: &[DefWithBody],
+ verbosity: Verbosity,
+ ) {
+ let mut bar = match verbosity {
+ Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
+ _ if self.output.is_some() => ProgressReport::hidden(),
+ _ => ProgressReport::new(bodies.len() as u64),
+ };
- eprintln!("{:<20} {}", "Inference:", inference_sw.elapsed());
+ let mut sw = self.stop_watch();
+ bar.tick();
+ for &body_id in bodies {
+ let module = body_id.module(db);
+ let full_name = move || {
+ module
+ .krate()
+ .display_name(db)
+ .map(|it| it.canonical_name().to_string())
+ .into_iter()
+ .chain(
+ module
+ .path_to_root(db)
+ .into_iter()
+ .filter_map(|it| it.name(db))
+ .rev()
+ .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
+ .map(|it| it.display(db).to_string()),
+ )
+ .join("::")
+ };
+ if let Some(only_name) = self.only.as_deref() {
+ if body_id.name(db).unwrap_or_else(Name::missing).display(db).to_string()
+ != only_name
+ && full_name() != only_name
+ {
+ continue;
+ }
+ }
+ let msg = move || {
+ if verbosity.is_verbose() {
+ let source = match body_id {
+ DefWithBody::Function(it) => it.source(db).map(|it| it.syntax().cloned()),
+ DefWithBody::Static(it) => it.source(db).map(|it| it.syntax().cloned()),
+ DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
+ DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
+ DefWithBody::InTypeConst(_) => unimplemented!(),
+ };
+ if let Some(src) = source {
+ let original_file = src.file_id.original_file(db);
+ let path = vfs.file_path(original_file);
+ let syntax_range = src.value.text_range();
+ format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
+ } else {
+ format!("processing: {}", full_name())
+ }
+ } else {
+ format!("processing: {}", full_name())
+ }
+ };
+ if verbosity.is_spammy() {
+ bar.println(msg());
+ }
+ bar.set_message(msg);
+ db.body_with_source_map(body_id.into());
+ bar.inc(1);
+ }
+
+ bar.finish_and_clear();
+ let body_lowering_time = sw.elapsed();
+ eprintln!("{:<20} {}", "Body lowering:", body_lowering_time);
+ report_metric("body lowering time", body_lowering_time.time.as_millis() as u64, "ms");
}
fn stop_watch(&self) -> StopWatch {
@@ -598,13 +722,7 @@ impl flags::AnalysisStats {
}
}
-fn location_csv_expr(
- db: &RootDatabase,
- analysis: &Analysis,
- vfs: &Vfs,
- sm: &BodySourceMap,
- expr_id: ExprId,
-) -> String {
+fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: ExprId) -> String {
let src = match sm.expr_syntax(expr_id) {
Ok(s) => s,
Err(SyntheticSyntax) => return "synthetic,,".to_string(),
@@ -613,20 +731,14 @@ fn location_csv_expr(
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db);
let path = vfs.file_path(original_range.file_id);
- let line_index = analysis.file_line_index(original_range.file_id).unwrap();
+ let line_index = db.line_index(original_range.file_id);
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
}
-fn location_csv_pat(
- db: &RootDatabase,
- analysis: &Analysis,
- vfs: &Vfs,
- sm: &BodySourceMap,
- pat_id: PatId,
-) -> String {
+fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: PatId) -> String {
let src = match sm.pat_syntax(pat_id) {
Ok(s) => s,
Err(SyntheticSyntax) => return "synthetic,,".to_string(),
@@ -637,7 +749,7 @@ fn location_csv_pat(
});
let original_range = node.as_ref().original_file_range(db);
let path = vfs.file_path(original_range.file_id);
- let line_index = analysis.file_line_index(original_range.file_id).unwrap();
+ let line_index = db.line_index(original_range.file_id);
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@@ -646,7 +758,6 @@ fn location_csv_pat(
fn expr_syntax_range(
db: &RootDatabase,
- analysis: &Analysis,
vfs: &Vfs,
sm: &BodySourceMap,
expr_id: ExprId,
@@ -657,7 +768,7 @@ fn expr_syntax_range(
let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db);
let path = vfs.file_path(original_range.file_id);
- let line_index = analysis.file_line_index(original_range.file_id).unwrap();
+ let line_index = db.line_index(original_range.file_id);
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
@@ -668,7 +779,6 @@ fn expr_syntax_range(
}
fn pat_syntax_range(
db: &RootDatabase,
- analysis: &Analysis,
vfs: &Vfs,
sm: &BodySourceMap,
pat_id: PatId,
@@ -684,7 +794,7 @@ fn pat_syntax_range(
});
let original_range = node.as_ref().original_file_range(db);
let path = vfs.file_path(original_range.file_id);
- let line_index = analysis.file_line_index(original_range.file_id).unwrap();
+ let line_index = db.line_index(original_range.file_id);
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs
index 4006d023de..4306d72129 100644
--- a/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -17,9 +17,15 @@ impl flags::Diagnostics {
pub fn run(self) -> anyhow::Result<()> {
let mut cargo_config = CargoConfig::default();
cargo_config.sysroot = Some(RustLibSource::Discover);
+ let with_proc_macro_server = if let Some(p) = &self.proc_macro_srv {
+ let path = vfs::AbsPathBuf::assert(std::env::current_dir()?.join(&p));
+ ProcMacroServerChoice::Explicit(path)
+ } else {
+ ProcMacroServerChoice::Sysroot
+ };
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: !self.disable_build_scripts,
- with_proc_macro_server: ProcMacroServerChoice::Sysroot,
+ with_proc_macro_server,
prefill_caches: false,
};
let (host, _vfs, _proc_macro) =
diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs
index 31012c01b9..208a4e6ecd 100644
--- a/crates/rust-analyzer/src/cli/flags.rs
+++ b/crates/rust-analyzer/src/cli/flags.rs
@@ -66,8 +66,6 @@ xflags::xflags! {
optional --memory-usage
/// Print the total length of all source and macro files (whitespace is not counted).
optional --source-stats
- /// Only type check, skip lowering to mir
- optional --skip-mir-stats
/// Only analyze items matching this path.
optional -o, --only path: String
@@ -80,8 +78,16 @@ xflags::xflags! {
optional --disable-build-scripts
/// Don't use expand proc macros.
optional --disable-proc-macros
- /// Only resolve names, don't run type inference.
+ /// Skip body lowering.
+ optional --skip-lowering
+ /// Skip type inference.
optional --skip-inference
+ /// Skip lowering to mir
+ optional --skip-mir-stats
+ /// Skip data layout calculation
+ optional --skip-data-layout
+ /// Skip const evaluation
+ optional --skip-const-eval
}
cmd diagnostics {
@@ -92,6 +98,8 @@ xflags::xflags! {
optional --disable-build-scripts
/// Don't use expand proc macros.
optional --disable-proc-macros
+ /// Run a custom proc-macro-srv binary.
+ optional --proc-macro-srv path: PathBuf
}
cmd ssr {
@@ -174,13 +182,16 @@ pub struct AnalysisStats {
pub parallel: bool,
pub memory_usage: bool,
pub source_stats: bool,
+ pub skip_lowering: bool,
+ pub skip_inference: bool,
pub skip_mir_stats: bool,
+ pub skip_data_layout: bool,
+ pub skip_const_eval: bool,
pub only: Option<String>,
pub with_deps: bool,
pub no_sysroot: bool,
pub disable_build_scripts: bool,
pub disable_proc_macros: bool,
- pub skip_inference: bool,
}
#[derive(Debug)]
@@ -189,6 +200,7 @@ pub struct Diagnostics {
pub disable_build_scripts: bool,
pub disable_proc_macros: bool,
+ pub proc_macro_srv: Option<PathBuf>,
}
#[derive(Debug)]
diff --git a/crates/rust-analyzer/src/cli/progress_report.rs b/crates/rust-analyzer/src/cli/progress_report.rs
index d459dd115c..c236f9c7fe 100644
--- a/crates/rust-analyzer/src/cli/progress_report.rs
+++ b/crates/rust-analyzer/src/cli/progress_report.rs
@@ -4,41 +4,29 @@
use std::io::{self, Write};
/// A Simple ASCII Progress Bar
-pub(crate) struct ProgressReport {
+pub(crate) struct ProgressReport<'a> {
curr: f32,
text: String,
hidden: bool,
len: u64,
pos: u64,
- msg: String,
+ msg: Option<Box<dyn Fn() -> String + 'a>>,
}
-impl ProgressReport {
- pub(crate) fn new(len: u64) -> ProgressReport {
- ProgressReport {
- curr: 0.0,
- text: String::new(),
- hidden: false,
- len,
- pos: 0,
- msg: String::new(),
- }
+impl<'a> ProgressReport<'a> {
+ pub(crate) fn new(len: u64) -> ProgressReport<'a> {
+ ProgressReport { curr: 0.0, text: String::new(), hidden: false, len, pos: 0, msg: None }
}
- pub(crate) fn hidden() -> ProgressReport {
- ProgressReport {
- curr: 0.0,
- text: String::new(),
- hidden: true,
- len: 0,
- pos: 0,
- msg: String::new(),
- }
+ pub(crate) fn hidden() -> ProgressReport<'a> {
+ ProgressReport { curr: 0.0, text: String::new(), hidden: true, len: 0, pos: 0, msg: None }
}
- pub(crate) fn set_message(&mut self, msg: &str) {
- self.msg = msg.to_string();
+ pub(crate) fn set_message(&mut self, msg: impl Fn() -> String + 'a) {
+ if !self.hidden {
+ self.msg = Some(Box::new(msg));
+ }
self.tick();
}
@@ -67,7 +55,12 @@ impl ProgressReport {
return;
}
let percent = (self.curr * 100.0) as u32;
- let text = format!("{}/{} {percent:3>}% {}", self.pos, self.len, self.msg);
+ let text = format!(
+ "{}/{} {percent:3>}% {}",
+ self.pos,
+ self.len,
+ self.msg.as_ref().map_or_else(|| String::new(), |it| it())
+ );
self.update_text(&text);
}
diff --git a/crates/rust-analyzer/src/dispatch.rs b/crates/rust-analyzer/src/dispatch.rs
index ebe77b8dfe..4e57c6eb65 100644
--- a/crates/rust-analyzer/src/dispatch.rs
+++ b/crates/rust-analyzer/src/dispatch.rs
@@ -135,7 +135,7 @@ impl<'a> RequestDispatcher<'a> {
R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
R::Result: Serialize,
{
- self.on_with_thread_intent::<R>(ThreadIntent::Worker, f)
+ self.on_with_thread_intent::<true, R>(ThreadIntent::Worker, f)
}
/// Dispatches a latency-sensitive request onto the thread pool.
@@ -148,7 +148,22 @@ impl<'a> RequestDispatcher<'a> {
R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
R::Result: Serialize,
{
- self.on_with_thread_intent::<R>(ThreadIntent::LatencySensitive, f)
+ self.on_with_thread_intent::<true, R>(ThreadIntent::LatencySensitive, f)
+ }
+
+ /// Formatting requests should never block on waiting a for task thread to open up, editors will wait
+ /// on the response and a late formatting update might mess with the document and user.
+ /// We can't run this on the main thread though as we invoke rustfmt which may take arbitrary time to complete!
+ pub(crate) fn on_fmt_thread<R>(
+ &mut self,
+ f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ ) -> &mut Self
+ where
+ R: lsp_types::request::Request + 'static,
+ R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
+ R::Result: Serialize,
+ {
+ self.on_with_thread_intent::<false, R>(ThreadIntent::LatencySensitive, f)
}
pub(crate) fn finish(&mut self) {
@@ -163,7 +178,7 @@ impl<'a> RequestDispatcher<'a> {
}
}
- fn on_with_thread_intent<R>(
+ fn on_with_thread_intent<const MAIN_POOL: bool, R>(
&mut self,
intent: ThreadIntent,
f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
@@ -178,17 +193,20 @@ impl<'a> RequestDispatcher<'a> {
None => return self,
};
- self.global_state.task_pool.handle.spawn(intent, {
- let world = self.global_state.snapshot();
- move || {
- let result = panic::catch_unwind(move || {
- let _pctx = stdx::panic_context::enter(panic_context);
- f(world, params)
- });
- match thread_result_to_response::<R>(req.id.clone(), result) {
- Ok(response) => Task::Response(response),
- Err(_) => Task::Retry(req),
- }
+ let world = self.global_state.snapshot();
+ if MAIN_POOL {
+ &mut self.global_state.task_pool.handle
+ } else {
+ &mut self.global_state.fmt_pool.handle
+ }
+ .spawn(intent, move || {
+ let result = panic::catch_unwind(move || {
+ let _pctx = stdx::panic_context::enter(panic_context);
+ f(world, params)
+ });
+ match thread_result_to_response::<R>(req.id.clone(), result) {
+ Ok(response) => Task::Response(response),
+ Err(_) => Task::Retry(req),
}
});
@@ -289,7 +307,7 @@ pub(crate) struct NotificationDispatcher<'a> {
}
impl<'a> NotificationDispatcher<'a> {
- pub(crate) fn on<N>(
+ pub(crate) fn on_sync_mut<N>(
&mut self,
f: fn(&mut GlobalState, N::Params) -> Result<()>,
) -> Result<&mut Self>
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 9f4dc44402..d5b0e3a570 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -14,7 +14,7 @@ use nohash_hasher::IntMap;
use parking_lot::{Mutex, RwLock};
use proc_macro_api::ProcMacroServer;
use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
-use rustc_hash::FxHashMap;
+use rustc_hash::{FxHashMap, FxHashSet};
use triomphe::Arc;
use vfs::AnchoredPathBuf;
@@ -54,6 +54,7 @@ pub(crate) struct GlobalState {
req_queue: ReqQueue,
pub(crate) task_pool: Handle<TaskPool<Task>, Receiver<Task>>,
+ pub(crate) fmt_pool: Handle<TaskPool<Task>, Receiver<Task>>,
pub(crate) config: Arc<Config>,
pub(crate) config_errors: Option<ConfigError>,
@@ -111,9 +112,11 @@ pub(crate) struct GlobalState {
/// the user just adds comments or whitespace to Cargo.toml, we do not want
/// to invalidate any salsa caches.
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
+ pub(crate) crate_graph_file_dependencies: FxHashSet<vfs::VfsPath>,
// op queues
- pub(crate) fetch_workspaces_queue: OpQueue<(), Option<Vec<anyhow::Result<ProjectWorkspace>>>>,
+ pub(crate) fetch_workspaces_queue:
+ OpQueue<bool, Option<(Vec<anyhow::Result<ProjectWorkspace>>, bool)>>,
pub(crate) fetch_build_data_queue:
OpQueue<(), (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>,
@@ -151,6 +154,11 @@ impl GlobalState {
let handle = TaskPool::new_with_threads(sender, config.main_loop_num_threads());
Handle { handle, receiver }
};
+ let fmt_pool = {
+ let (sender, receiver) = unbounded();
+ let handle = TaskPool::new_with_threads(sender, 1);
+ Handle { handle, receiver }
+ };
let mut analysis_host = AnalysisHost::new(config.lru_parse_query_capacity());
if let Some(capacities) = config.lru_query_capacities() {
@@ -161,6 +169,7 @@ impl GlobalState {
sender,
req_queue: ReqQueue::default(),
task_pool,
+ fmt_pool,
loader,
config: Arc::new(config.clone()),
analysis_host,
@@ -189,6 +198,7 @@ impl GlobalState {
vfs_progress_n_done: 0,
workspaces: Arc::new(Vec::new()),
+ crate_graph_file_dependencies: FxHashSet::default(),
fetch_workspaces_queue: OpQueue::default(),
fetch_build_data_queue: OpQueue::default(),
fetch_proc_macros_queue: OpQueue::default(),
@@ -202,10 +212,9 @@ impl GlobalState {
pub(crate) fn process_changes(&mut self) -> bool {
let _p = profile::span("GlobalState::process_changes");
- let mut workspace_structure_change = None;
let mut file_changes = FxHashMap::default();
- let (change, changed_files) = {
+ let (change, changed_files, workspace_structure_change) = {
let mut change = Change::new();
let (vfs, line_endings_map) = &mut *self.vfs.write();
let changed_files = vfs.take_changes();
@@ -260,16 +269,20 @@ impl GlobalState {
.map(|(file_id, (change_kind, _))| vfs::ChangedFile { file_id, change_kind })
.collect();
+ let mut workspace_structure_change = None;
// A file was added or deleted
let mut has_structure_changes = false;
for file in &changed_files {
- if let Some(path) = vfs.file_path(file.file_id).as_path() {
+ let vfs_path = &vfs.file_path(file.file_id);
+ if let Some(path) = vfs_path.as_path() {
let path = path.to_path_buf();
if reload::should_refresh_for_change(&path, file.change_kind) {
- workspace_structure_change = Some(path);
+ workspace_structure_change = Some((path.clone(), false));
}
if file.is_created_or_deleted() {
has_structure_changes = true;
+ workspace_structure_change =
+ Some((path, self.crate_graph_file_dependencies.contains(vfs_path)));
}
}
@@ -294,7 +307,7 @@ impl GlobalState {
let roots = self.source_root_config.partition(vfs);
change.set_roots(roots);
}
- (change, changed_files)
+ (change, changed_files, workspace_structure_change)
};
self.analysis_host.apply_change(change);
@@ -304,9 +317,11 @@ impl GlobalState {
// FIXME: ideally we should only trigger a workspace fetch for non-library changes
// but something's going wrong with the source root business when we add a new local
// crate see https://github.com/rust-lang/rust-analyzer/issues/13029
- if let Some(path) = workspace_structure_change {
- self.fetch_workspaces_queue
- .request_op(format!("workspace vfs file change: {}", path.display()), ());
+ if let Some((path, force_crate_graph_reload)) = workspace_structure_change {
+ self.fetch_workspaces_queue.request_op(
+ format!("workspace vfs file change: {}", path.display()),
+ force_crate_graph_reload,
+ );
}
self.proc_macro_changed =
changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| {
diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs
index 09de6900c8..ae1dc23153 100644
--- a/crates/rust-analyzer/src/handlers/notification.rs
+++ b/crates/rust-analyzer/src/handlers/notification.rs
@@ -127,7 +127,7 @@ pub(crate) fn handle_did_save_text_document(
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
state
.fetch_workspaces_queue
- .request_op(format!("DidSaveTextDocument {}", abs_path.display()), ());
+ .request_op(format!("DidSaveTextDocument {}", abs_path.display()), false);
}
}
@@ -205,7 +205,7 @@ pub(crate) fn handle_did_change_workspace_folders(
if !config.has_linked_projects() && config.detached_files().is_empty() {
config.rediscover_workspaces();
- state.fetch_workspaces_queue.request_op("client workspaces changed".to_string(), ())
+ state.fetch_workspaces_queue.request_op("client workspaces changed".to_string(), false)
}
Ok(())
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 3f365c0594..a6a72552d5 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -18,12 +18,11 @@ use lsp_server::ErrorCode;
use lsp_types::{
CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem,
CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
- CodeLens, CompletionItem, DocumentFormattingParams, FoldingRange, FoldingRangeParams,
- HoverContents, InlayHint, InlayHintParams, Location, LocationLink, Position,
- PrepareRenameResponse, Range, RenameParams, SemanticTokensDeltaParams,
- SemanticTokensFullDeltaResult, SemanticTokensParams, SemanticTokensRangeParams,
- SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, SymbolTag,
- TextDocumentIdentifier, Url, WorkspaceEdit,
+ CodeLens, CompletionItem, FoldingRange, FoldingRangeParams, HoverContents, InlayHint,
+ InlayHintParams, Location, LocationLink, Position, PrepareRenameResponse, Range, RenameParams,
+ SemanticTokensDeltaParams, SemanticTokensFullDeltaResult, SemanticTokensParams,
+ SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
+ SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
};
use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
use serde_json::json;
@@ -52,7 +51,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<
state.proc_macro_clients = Arc::from(Vec::new());
state.proc_macro_changed = false;
- state.fetch_workspaces_queue.request_op("reload workspace request".to_string(), ());
+ state.fetch_workspaces_queue.request_op("reload workspace request".to_string(), false);
Ok(())
}
@@ -115,13 +114,14 @@ pub(crate) fn handle_analyzer_status(
pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<String> {
let _p = profile::span("handle_memory_usage");
- let mut mem = state.analysis_host.per_query_memory_usage();
- mem.push(("Remaining".into(), profile::memory_usage().allocated));
+ let mem = state.analysis_host.per_query_memory_usage();
let mut out = String::new();
- for (name, bytes) in mem {
- format_to!(out, "{:>8} {}\n", bytes, name);
+ for (name, bytes, entries) in mem {
+ format_to!(out, "{:>8} {:>6} {}\n", bytes, entries, name);
}
+ format_to!(out, "{:>8} Remaining\n", profile::memory_usage().allocated);
+
Ok(out)
}
@@ -1076,7 +1076,7 @@ pub(crate) fn handle_references(
pub(crate) fn handle_formatting(
snap: GlobalStateSnapshot,
- params: DocumentFormattingParams,
+ params: lsp_types::DocumentFormattingParams,
) -> Result<Option<Vec<lsp_types::TextEdit>>> {
let _p = profile::span("handle_formatting");
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 12bc638929..02dd94e5fa 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -11,6 +11,7 @@ use flycheck::FlycheckHandle;
use ide_db::base_db::{SourceDatabaseExt, VfsPath};
use lsp_server::{Connection, Notification, Request};
use lsp_types::notification::Notification as _;
+use stdx::thread::ThreadIntent;
use triomphe::Arc;
use vfs::FileId;
@@ -115,9 +116,11 @@ impl GlobalState {
self.register_did_save_capability();
}
- self.fetch_workspaces_queue.request_op("startup".to_string(), ());
- if let Some((cause, ())) = self.fetch_workspaces_queue.should_start_op() {
- self.fetch_workspaces(cause);
+ self.fetch_workspaces_queue.request_op("startup".to_string(), false);
+ if let Some((cause, force_crate_graph_reload)) =
+ self.fetch_workspaces_queue.should_start_op()
+ {
+ self.fetch_workspaces(cause, force_crate_graph_reload);
}
while let Some(event) = self.next_event(&inbox) {
@@ -177,6 +180,9 @@ impl GlobalState {
recv(self.task_pool.receiver) -> task =>
Some(Event::Task(task.unwrap())),
+ recv(self.fmt_pool.receiver) -> task =>
+ Some(Event::Task(task.unwrap())),
+
recv(self.loader.receiver) -> task =>
Some(Event::Vfs(task.unwrap())),
@@ -277,6 +283,7 @@ impl GlobalState {
}
}
}
+ let event_handling_duration = loop_start.elapsed();
let state_changed = self.process_changes();
let memdocs_added_or_removed = self.mem_docs.take_changes();
@@ -364,8 +371,10 @@ impl GlobalState {
}
if self.config.cargo_autoreload() {
- if let Some((cause, ())) = self.fetch_workspaces_queue.should_start_op() {
- self.fetch_workspaces(cause);
+ if let Some((cause, force_crate_graph_reload)) =
+ self.fetch_workspaces_queue.should_start_op()
+ {
+ self.fetch_workspaces(cause, force_crate_graph_reload);
}
}
@@ -385,9 +394,9 @@ impl GlobalState {
let loop_duration = loop_start.elapsed();
if loop_duration > Duration::from_millis(100) && was_quiescent {
- tracing::warn!("overly long loop turn took {loop_duration:?}: {event_dbg_msg}");
+ tracing::warn!("overly long loop turn took {loop_duration:?} (event handling took {event_handling_duration:?}): {event_dbg_msg}");
self.poke_rust_analyzer_developer(format!(
- "overly long loop turn took {loop_duration:?}: {event_dbg_msg}"
+ "overly long loop turn took {loop_duration:?} (event handling took {event_handling_duration:?}): {event_dbg_msg}"
));
}
Ok(())
@@ -397,7 +406,7 @@ impl GlobalState {
tracing::debug!(%cause, "will prime caches");
let num_worker_threads = self.config.prime_caches_num_threads();
- self.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, {
+ self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, {
let analysis = self.snapshot().analysis;
move |sender| {
sender.send(Task::PrimeCaches(PrimeCachesProgress::Begin)).unwrap();
@@ -468,8 +477,9 @@ impl GlobalState {
let (state, msg) = match progress {
ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)),
- ProjectWorkspaceProgress::End(workspaces) => {
- self.fetch_workspaces_queue.op_completed(Some(workspaces));
+ ProjectWorkspaceProgress::End(workspaces, force_reload_crate_graph) => {
+ self.fetch_workspaces_queue
+ .op_completed(Some((workspaces, force_reload_crate_graph)));
if let Err(e) = self.fetch_workspace_error() {
tracing::error!("FetchWorkspaceError:\n{e}");
}
@@ -546,7 +556,6 @@ impl GlobalState {
self.vfs_progress_n_total = n_total;
self.vfs_progress_n_done = n_done;
- // if n_total != 0 {
let state = if n_done == 0 {
Progress::Begin
} else if n_done < n_total {
@@ -562,7 +571,6 @@ impl GlobalState {
Some(Progress::fraction(n_done, n_total)),
None,
);
- // }
}
}
}
@@ -678,6 +686,12 @@ impl GlobalState {
.on_sync::<lsp_types::request::SelectionRangeRequest>(handlers::handle_selection_range)
.on_sync::<lsp_ext::MatchingBrace>(handlers::handle_matching_brace)
.on_sync::<lsp_ext::OnTypeFormatting>(handlers::handle_on_type_formatting)
+ // Formatting should be done immediately as the editor might wait on it, but we can't
+ // put it on the main thread as we do not want the main thread to block on rustfmt.
+ // So we have an extra thread just for formatting requests to make sure it gets handled
+ // as fast as possible.
+ .on_fmt_thread::<lsp_types::request::Formatting>(handlers::handle_formatting)
+ .on_fmt_thread::<lsp_types::request::RangeFormatting>(handlers::handle_range_formatting)
// We can’t run latency-sensitive request handlers which do semantic
// analysis on the main thread because that would block other
// requests. Instead, we run these request handlers on higher priority
@@ -695,14 +709,6 @@ impl GlobalState {
.on_latency_sensitive::<lsp_types::request::SemanticTokensRangeRequest>(
handlers::handle_semantic_tokens_range,
)
- // Formatting is not caused by the user typing,
- // but it does qualify as latency-sensitive
- // because a delay before formatting is applied
- // can be confusing for the user.
- .on_latency_sensitive::<lsp_types::request::Formatting>(handlers::handle_formatting)
- .on_latency_sensitive::<lsp_types::request::RangeFormatting>(
- handlers::handle_range_formatting,
- )
// All other request handlers
.on::<lsp_ext::FetchDependencyList>(handlers::fetch_dependency_list)
.on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
@@ -757,18 +763,28 @@ impl GlobalState {
use lsp_types::notification as notifs;
NotificationDispatcher { not: Some(not), global_state: self }
- .on::<notifs::Cancel>(handlers::handle_cancel)?
- .on::<notifs::WorkDoneProgressCancel>(handlers::handle_work_done_progress_cancel)?
- .on::<notifs::DidOpenTextDocument>(handlers::handle_did_open_text_document)?
- .on::<notifs::DidChangeTextDocument>(handlers::handle_did_change_text_document)?
- .on::<notifs::DidCloseTextDocument>(handlers::handle_did_close_text_document)?
- .on::<notifs::DidSaveTextDocument>(handlers::handle_did_save_text_document)?
- .on::<notifs::DidChangeConfiguration>(handlers::handle_did_change_configuration)?
- .on::<notifs::DidChangeWorkspaceFolders>(handlers::handle_did_change_workspace_folders)?
- .on::<notifs::DidChangeWatchedFiles>(handlers::handle_did_change_watched_files)?
- .on::<lsp_ext::CancelFlycheck>(handlers::handle_cancel_flycheck)?
- .on::<lsp_ext::ClearFlycheck>(handlers::handle_clear_flycheck)?
- .on::<lsp_ext::RunFlycheck>(handlers::handle_run_flycheck)?
+ .on_sync_mut::<notifs::Cancel>(handlers::handle_cancel)?
+ .on_sync_mut::<notifs::WorkDoneProgressCancel>(
+ handlers::handle_work_done_progress_cancel,
+ )?
+ .on_sync_mut::<notifs::DidOpenTextDocument>(handlers::handle_did_open_text_document)?
+ .on_sync_mut::<notifs::DidChangeTextDocument>(
+ handlers::handle_did_change_text_document,
+ )?
+ .on_sync_mut::<notifs::DidCloseTextDocument>(handlers::handle_did_close_text_document)?
+ .on_sync_mut::<notifs::DidSaveTextDocument>(handlers::handle_did_save_text_document)?
+ .on_sync_mut::<notifs::DidChangeConfiguration>(
+ handlers::handle_did_change_configuration,
+ )?
+ .on_sync_mut::<notifs::DidChangeWorkspaceFolders>(
+ handlers::handle_did_change_workspace_folders,
+ )?
+ .on_sync_mut::<notifs::DidChangeWatchedFiles>(
+ handlers::handle_did_change_watched_files,
+ )?
+ .on_sync_mut::<lsp_ext::CancelFlycheck>(handlers::handle_cancel_flycheck)?
+ .on_sync_mut::<lsp_ext::ClearFlycheck>(handlers::handle_clear_flycheck)?
+ .on_sync_mut::<lsp_ext::RunFlycheck>(handlers::handle_run_flycheck)?
.finish();
Ok(())
}
@@ -796,7 +812,7 @@ impl GlobalState {
// Diagnostics are triggered by the user typing
// so we run them on a latency sensitive thread.
- self.task_pool.handle.spawn(stdx::thread::ThreadIntent::LatencySensitive, move || {
+ self.task_pool.handle.spawn(ThreadIntent::LatencySensitive, move || {
let _p = profile::span("publish_diagnostics");
let _ctx = stdx::panic_context::enter("publish_diagnostics".to_owned());
let diagnostics = subscriptions
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 6e8c8ea91a..310c6b076c 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -27,6 +27,7 @@ use ide_db::{
use itertools::Itertools;
use proc_macro_api::{MacroDylib, ProcMacroServer};
use project_model::{PackageRoot, ProjectWorkspace, WorkspaceBuildScripts};
+use rustc_hash::FxHashSet;
use stdx::{format_to, thread::ThreadIntent};
use syntax::SmolStr;
use triomphe::Arc;
@@ -46,7 +47,7 @@ use ::tt::token_id as tt;
pub(crate) enum ProjectWorkspaceProgress {
Begin,
Report(String),
- End(Vec<anyhow::Result<ProjectWorkspace>>),
+ End(Vec<anyhow::Result<ProjectWorkspace>>, bool),
}
#[derive(Debug)]
@@ -85,7 +86,7 @@ impl GlobalState {
);
}
if self.config.linked_projects() != old_config.linked_projects() {
- self.fetch_workspaces_queue.request_op("linked projects changed".to_string(), ())
+ self.fetch_workspaces_queue.request_op("linked projects changed".to_string(), false)
} else if self.config.flycheck() != old_config.flycheck() {
self.reload_flycheck();
}
@@ -110,7 +111,7 @@ impl GlobalState {
if self.proc_macro_changed {
status.health = lsp_ext::Health::Warning;
- message.push_str("Proc-macros have changed and need to be rebuild.\n\n");
+ message.push_str("Proc-macros have changed and need to be rebuilt.\n\n");
}
if let Err(_) = self.fetch_build_data_error() {
status.health = lsp_ext::Health::Warning;
@@ -182,7 +183,7 @@ impl GlobalState {
status
}
- pub(crate) fn fetch_workspaces(&mut self, cause: Cause) {
+ pub(crate) fn fetch_workspaces(&mut self, cause: Cause, force_crate_graph_reload: bool) {
tracing::info!(%cause, "will fetch workspaces");
self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, {
@@ -250,7 +251,10 @@ impl GlobalState {
tracing::info!("did fetch workspaces {:?}", workspaces);
sender
- .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
+ .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(
+ workspaces,
+ force_crate_graph_reload,
+ )))
.unwrap();
}
});
@@ -336,15 +340,19 @@ impl GlobalState {
let _p = profile::span("GlobalState::switch_workspaces");
tracing::info!(%cause, "will switch workspaces");
+ let Some((workspaces, force_reload_crate_graph)) = self.fetch_workspaces_queue.last_op_result() else { return; };
+
if let Err(_) = self.fetch_workspace_error() {
if !self.workspaces.is_empty() {
+ if *force_reload_crate_graph {
+ self.recreate_crate_graph(cause);
+ }
// It only makes sense to switch to a partially broken workspace
// if we don't have any workspace at all yet.
return;
}
}
- let Some(workspaces) = self.fetch_workspaces_queue.last_op_result() else { return; };
let workspaces =
workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::<Vec<_>>();
@@ -373,6 +381,9 @@ impl GlobalState {
self.workspaces = Arc::new(workspaces);
} else {
tracing::info!("build scripts do not match the version of the active workspace");
+ if *force_reload_crate_graph {
+ self.recreate_crate_graph(cause);
+ }
// Current build scripts do not match the version of the active
// workspace, so there's nothing for us to update.
return;
@@ -467,13 +478,24 @@ impl GlobalState {
});
self.source_root_config = project_folders.source_root_config;
+ self.recreate_crate_graph(cause);
+
+ tracing::info!("did switch workspaces");
+ }
+
+ fn recreate_crate_graph(&mut self, cause: String) {
// Create crate graph from all the workspaces
- let (crate_graph, proc_macro_paths) = {
+ let (crate_graph, proc_macro_paths, crate_graph_file_dependencies) = {
let vfs = &mut self.vfs.write().0;
let loader = &mut self.loader;
+ // crate graph construction relies on these paths, record them so when one of them gets
+ // deleted or created we trigger a reconstruction of the crate graph
+ let mut crate_graph_file_dependencies = FxHashSet::default();
+
let mut load = |path: &AbsPath| {
let _p = profile::span("switch_workspaces::load");
let vfs_path = vfs::VfsPath::from(path.to_path_buf());
+ crate_graph_file_dependencies.insert(vfs_path.clone());
match vfs.file_id(&vfs_path) {
Some(file_id) => Some(file_id),
None => {
@@ -494,26 +516,25 @@ impl GlobalState {
crate_graph.extend(other, &mut crate_proc_macros);
proc_macros.push(crate_proc_macros);
}
- (crate_graph, proc_macros)
+ (crate_graph, proc_macros, crate_graph_file_dependencies)
};
- let mut change = Change::new();
if self.config.expand_proc_macros() {
self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths);
}
+ let mut change = Change::new();
change.set_crate_graph(crate_graph);
self.analysis_host.apply_change(change);
+ self.crate_graph_file_dependencies = crate_graph_file_dependencies;
self.process_changes();
self.reload_flycheck();
-
- tracing::info!("did switch workspaces");
}
pub(super) fn fetch_workspace_error(&self) -> Result<(), String> {
let mut buf = String::new();
- let Some(last_op_result) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()) };
+ let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()) };
if last_op_result.is_empty() {
stdx::format_to!(buf, "rust-analyzer failed to discover workspace");
} else {
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs
index 8a9e947ded..648bc995ad 100644
--- a/crates/rust-analyzer/src/to_proto.rs
+++ b/crates/rust-analyzer/src/to_proto.rs
@@ -410,7 +410,7 @@ pub(crate) fn signature_help(
let documentation = call_info.doc.filter(|_| config.docs).map(|doc| {
lsp_types::Documentation::MarkupContent(lsp_types::MarkupContent {
kind: lsp_types::MarkupKind::Markdown,
- value: doc,
+ value: crate::markdown::format_docs(&doc),
})
});
@@ -1410,7 +1410,8 @@ pub(crate) fn rename_error(err: RenameError) -> crate::LspError {
#[cfg(test)]
mod tests {
- use ide::Analysis;
+ use ide::{Analysis, FilePosition};
+ use test_utils::extract_offset;
use triomphe::Arc;
use super::*;
@@ -1451,6 +1452,34 @@ fn main() {
}
}
+ #[test]
+ fn calling_function_with_ignored_code_in_signature() {
+ let text = r#"
+fn foo() {
+ bar($0);
+}
+/// ```
+/// # use crate::bar;
+/// bar(5);
+/// ```
+fn bar(_: usize) {}
+"#;
+
+ let (offset, text) = extract_offset(text);
+ let (analysis, file_id) = Analysis::from_single_file(text);
+ let help = signature_help(
+ analysis.signature_help(FilePosition { file_id, offset }).unwrap().unwrap(),
+ CallInfoConfig { params_only: false, docs: true },
+ false,
+ );
+ let docs = match &help.signatures[help.active_signature.unwrap() as usize].documentation {
+ Some(lsp_types::Documentation::MarkupContent(content)) => &content.value,
+ _ => panic!("documentation contains markup"),
+ };
+ assert!(docs.contains("bar(5)"));
+ assert!(!docs.contains("use crate::bar"));
+ }
+
// `Url` is not able to parse windows paths on unix machines.
#[test]
#[cfg(target_os = "windows")]
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index e130c762fc..0bb29e7080 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -839,6 +839,17 @@ fn resolve_proc_macro() {
return;
}
+ // skip using the sysroot config as to prevent us from loading the sysroot sources
+ let mut rustc = std::process::Command::new(toolchain::rustc());
+ rustc.args(["--print", "sysroot"]);
+ let output = rustc.output().unwrap();
+ let sysroot =
+ vfs::AbsPathBuf::try_from(std::str::from_utf8(&output.stdout).unwrap().trim()).unwrap();
+
+ let standalone_server_name =
+ format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
+ let proc_macro_server_path = sysroot.join("libexec").join(&standalone_server_name);
+
let server = Project::with_fixture(
r###"
//- /foo/Cargo.toml
@@ -916,7 +927,7 @@ pub fn foo(_input: TokenStream) -> TokenStream {
},
"procMacro": {
"enable": true,
- "server": PathBuf::from(env!("CARGO_BIN_EXE_rust-analyzer")),
+ "server": proc_macro_server_path.as_path().as_ref(),
}
}))
.root("foo")
diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram
index 4c9027dec6..b096c99744 100644
--- a/crates/syntax/rust.ungram
+++ b/crates/syntax/rust.ungram
@@ -565,7 +565,7 @@ RefType =
'&' Lifetime? 'mut'? Type
ArrayType =
- '[' Type ';' Expr ']'
+ '[' Type ';' ConstArg ']'
SliceType =
'[' Type ']'
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index 61f6a04c98..e520801ea2 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -1207,7 +1207,7 @@ impl ArrayType {
pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
- pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn const_arg(&self) -> Option<ConstArg> { support::child(&self.syntax) }
pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
}
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index a07561e79a..3c2b7e56b0 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -166,7 +166,7 @@ pub fn ty_alias(
assignment: Option<(ast::Type, Option<ast::WhereClause>)>,
) -> ast::TypeAlias {
let mut s = String::new();
- s.push_str(&format!("type {} ", ident));
+ s.push_str(&format!("type {}", ident));
if let Some(list) = generic_param_list {
s.push_str(&list.to_string());
@@ -182,9 +182,9 @@ pub fn ty_alias(
if let Some(exp) = assignment {
if let Some(cl) = exp.1 {
- s.push_str(&format!("= {} {}", &exp.0.to_string(), &cl.to_string()));
+ s.push_str(&format!(" = {} {}", &exp.0.to_string(), &cl.to_string()));
} else {
- s.push_str(&format!("= {}", &exp.0.to_string()));
+ s.push_str(&format!(" = {}", &exp.0.to_string()));
}
}
diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs
index 05f32f8e51..602baed370 100644
--- a/crates/test-utils/src/fixture.rs
+++ b/crates/test-utils/src/fixture.rs
@@ -387,6 +387,10 @@ impl MiniCore {
}
}
+ if !active_regions.is_empty() {
+ panic!("unclosed regions: {:?} Add an `endregion` comment", active_regions);
+ }
+
for flag in &self.valid_flags {
if !seen_regions.iter().any(|it| it == flag) {
panic!("unused minicore flag: {flag:?}");
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index c79b4e966d..266bc2391f 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -32,6 +32,7 @@
//! include:
//! index: sized
//! infallible:
+//! int_impl: size_of, transmute
//! iterator: option
//! iterators: iterator, fn
//! manually_drop: drop
@@ -41,9 +42,11 @@
//! panic: fmt
//! phantom_data:
//! pin:
+//! pointee:
//! range:
//! result:
//! send: sized
+//! size_of: sized
//! sized:
//! slice:
//! sync: sized
@@ -345,6 +348,12 @@ pub mod mem {
pub fn transmute<Src, Dst>(src: Src) -> Dst;
}
// endregion:transmute
+
+ // region:size_of
+ extern "rust-intrinsic" {
+ pub fn size_of<T>() -> usize;
+ }
+ // endregion:size_of
}
pub mod ptr {
@@ -360,6 +369,14 @@ pub mod ptr {
*dst = src;
}
// endregion:drop
+
+ // region:pointee
+ #[lang = "pointee_trait"]
+ pub trait Pointee {
+ #[lang = "metadata_type"]
+ type Metadata;
+ }
+ // endregion:pointee
}
pub mod ops {
@@ -859,29 +876,26 @@ pub mod fmt {
}
#[lang = "format_argument"]
- pub struct ArgumentV1<'a> {
+ pub struct Argument<'a> {
value: &'a Opaque,
formatter: fn(&Opaque, &mut Formatter<'_>) -> Result,
}
- impl<'a> ArgumentV1<'a> {
- pub fn new<'b, T>(x: &'b T, f: fn(&T, &mut Formatter<'_>) -> Result) -> ArgumentV1<'b> {
+ impl<'a> Argument<'a> {
+ pub fn new<'b, T>(x: &'b T, f: fn(&T, &mut Formatter<'_>) -> Result) -> Argument<'b> {
use crate::mem::transmute;
- unsafe { ArgumentV1 { formatter: transmute(f), value: transmute(x) } }
+ unsafe { Argument { formatter: transmute(f), value: transmute(x) } }
}
}
#[lang = "format_arguments"]
pub struct Arguments<'a> {
pieces: &'a [&'static str],
- args: &'a [ArgumentV1<'a>],
+ args: &'a [Argument<'a>],
}
impl<'a> Arguments<'a> {
- pub const fn new_v1(
- pieces: &'a [&'static str],
- args: &'a [ArgumentV1<'a>],
- ) -> Arguments<'a> {
+ pub const fn new_v1(pieces: &'a [&'static str], args: &'a [Argument<'a>]) -> Arguments<'a> {
Arguments { pieces, args }
}
}
@@ -1307,6 +1321,25 @@ impl bool {
}
// endregion:bool_impl
+// region:int_impl
+macro_rules! impl_int {
+ ($($t:ty)*) => {
+ $(
+ impl $t {
+ pub const fn from_ne_bytes(bytes: [u8; mem::size_of::<Self>()]) -> Self {
+ unsafe { mem::transmute(bytes) }
+ }
+ }
+ )*
+ }
+}
+
+impl_int! {
+ usize u8 u16 u32 u64 u128
+ isize i8 i16 i32 i64 i128
+}
+// endregion:int_impl
+
// region:error
pub mod error {
#[rustc_has_incoherent_inherent_impls]
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc
index 5b9db10b09..b5c095fd9d 100644
--- a/docs/user/manual.adoc
+++ b/docs/user/manual.adoc
@@ -244,7 +244,7 @@ Any other tools or libraries you will need to acquire from Flatpak.
Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
-To use `rust-analyzer`, you need to install and enable one of the two popular two popular LSP client implementations for Emacs, https://github.com/joaotavora/eglot[Eglot] or https://github.com/emacs-lsp/lsp-mode[LSP Mode]. Both enable `rust-analyzer` by default in rust buffers if it is available.
+To use `rust-analyzer`, you need to install and enable one of the two popular LSP client implementations for Emacs, https://github.com/joaotavora/eglot[Eglot] or https://github.com/emacs-lsp/lsp-mode[LSP Mode]. Both enable `rust-analyzer` by default in rust buffers if it is available.
==== Eglot
@@ -653,9 +653,28 @@ However, if you use some other build system, you'll have to describe the structu
[source,TypeScript]
----
interface JsonProject {
+ /// Path to the sysroot directory.
+ ///
+ /// The sysroot is where rustc looks for the
+ /// crates that are built-in to rust, such as
+ /// std.
+ ///
+ /// https://doc.rust-lang.org/rustc/command-line-arguments.html#--sysroot-override-the-system-root
+ ///
+ /// To see the current value of sysroot, you
+ /// can query rustc:
+ ///
+ /// ```
+ /// $ rustc --print sysroot
+ /// /Users/yourname/.rustup/toolchains/stable-x86_64-apple-darwin
+ /// ```
+ sysroot?: string;
/// Path to the directory with *source code* of
/// sysroot crates.
///
+ /// By default, this is `lib/rustlib/src/rust/library`
+ /// relative to the sysroot.
+ ///
/// It should point to the directory where std,
/// core, and friends can be found:
///
diff --git a/editors/code/.vscodeignore b/editors/code/.vscodeignore
index 09dc27056b..6e118f0b3a 100644
--- a/editors/code/.vscodeignore
+++ b/editors/code/.vscodeignore
@@ -10,5 +10,6 @@
!package-lock.json
!package.json
!ra_syntax_tree.tmGrammar.json
+!rustdoc.markdown.injection.tmGrammar.json
!server
!README.md
diff --git a/lib/la-arena/src/lib.rs b/lib/la-arena/src/lib.rs
index 5107f29439..f39c3a3e4c 100644
--- a/lib/la-arena/src/lib.rs
+++ b/lib/la-arena/src/lib.rs
@@ -451,6 +451,12 @@ impl<T> Arena<T> {
}
}
+impl<T> AsMut<[T]> for Arena<T> {
+ fn as_mut(&mut self) -> &mut [T] {
+ self.data.as_mut()
+ }
+}
+
impl<T> Default for Arena<T> {
fn default() -> Arena<T> {
Arena { data: Vec::new() }