Unnamed repository; edit this file 'description' to name the repository.
Merge ref '82310651b93a' from rust-lang/rust
Pull recent changes from https://github.com/rust-lang/rust via Josh. Upstream ref: 82310651b93a594a3fd69015e1562186a080d94c Filtered ref: e13c0be8f13737c64082b89ce834546079767ac4 This merge was created using https://github.com/rust-lang/josh-sync.
The rustc-josh-sync Cronjob Bot 9 months ago
parent e6efb03 · parent f2840cd · commit 347aefa
-rw-r--r--.github/workflows/ci.yaml39
-rw-r--r--.github/workflows/release.yaml4
-rw-r--r--.typos.toml1
-rw-r--r--Cargo.lock643
-rw-r--r--Cargo.toml54
-rw-r--r--crates/base-db/Cargo.toml1
-rw-r--r--crates/base-db/src/input.rs47
-rw-r--r--crates/base-db/src/lib.rs13
-rw-r--r--crates/cfg/Cargo.toml1
-rw-r--r--crates/hir-def/Cargo.toml3
-rw-r--r--crates/hir-def/src/attr.rs177
-rw-r--r--crates/hir-def/src/db.rs111
-rw-r--r--crates/hir-def/src/expr_store.rs433
-rw-r--r--crates/hir-def/src/expr_store/body.rs6
-rw-r--r--crates/hir-def/src/expr_store/expander.rs7
-rw-r--r--crates/hir-def/src/expr_store/lower.rs405
-rw-r--r--crates/hir-def/src/expr_store/lower/asm.rs15
-rw-r--r--crates/hir-def/src/expr_store/lower/path/tests.rs2
-rw-r--r--crates/hir-def/src/expr_store/path.rs4
-rw-r--r--crates/hir-def/src/expr_store/pretty.rs86
-rw-r--r--crates/hir-def/src/expr_store/scope.rs13
-rw-r--r--crates/hir-def/src/expr_store/tests/body.rs149
-rw-r--r--crates/hir-def/src/expr_store/tests/body/block.rs2
-rw-r--r--crates/hir-def/src/find_path.rs4
-rw-r--r--crates/hir-def/src/hir.rs11
-rw-r--r--crates/hir-def/src/hir/generics.rs6
-rw-r--r--crates/hir-def/src/hir/type_ref.rs2
-rw-r--r--crates/hir-def/src/import_map.rs8
-rw-r--r--crates/hir-def/src/item_scope.rs58
-rw-r--r--crates/hir-def/src/item_tree.rs898
-rw-r--r--crates/hir-def/src/item_tree/lower.rs403
-rw-r--r--crates/hir-def/src/item_tree/pretty.rs172
-rw-r--r--crates/hir-def/src/item_tree/tests.rs101
-rw-r--r--crates/hir-def/src/lang_item.rs29
-rw-r--r--crates/hir-def/src/lib.rs299
-rw-r--r--crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs80
-rw-r--r--crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs24
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe.rs44
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs4
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe/regression.rs2
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mod.rs59
-rw-r--r--crates/hir-def/src/macro_expansion_tests/proc_macros.rs6
-rw-r--r--crates/hir-def/src/nameres.rs33
-rw-r--r--crates/hir-def/src/nameres/assoc.rs240
-rw-r--r--crates/hir-def/src/nameres/collector.rs372
-rw-r--r--crates/hir-def/src/nameres/diagnostics.rs25
-rw-r--r--crates/hir-def/src/nameres/path_resolution.rs82
-rw-r--r--crates/hir-def/src/nameres/tests/incremental.rs385
-rw-r--r--crates/hir-def/src/resolver.rs51
-rw-r--r--crates/hir-def/src/signatures.rs457
-rw-r--r--crates/hir-def/src/src.rs106
-rw-r--r--crates/hir-def/src/visibility.rs181
-rw-r--r--crates/hir-expand/Cargo.toml1
-rw-r--r--crates/hir-expand/src/attrs.rs25
-rw-r--r--crates/hir-expand/src/builtin/derive_macro.rs140
-rw-r--r--crates/hir-expand/src/builtin/fn_macro.rs95
-rw-r--r--crates/hir-expand/src/builtin/quote.rs6
-rw-r--r--crates/hir-expand/src/db.rs49
-rw-r--r--crates/hir-expand/src/files.rs29
-rw-r--r--crates/hir-expand/src/inert_attr_macro.rs2
-rw-r--r--crates/hir-expand/src/lib.rs28
-rw-r--r--crates/hir-expand/src/prettify_macro_expansion_.rs2
-rw-r--r--crates/hir-expand/src/proc_macro.rs18
-rw-r--r--crates/hir-ty/Cargo.toml3
-rw-r--r--crates/hir-ty/src/autoderef.rs2
-rw-r--r--crates/hir-ty/src/chalk_db.rs42
-rw-r--r--crates/hir-ty/src/chalk_ext.rs12
-rw-r--r--crates/hir-ty/src/consteval.rs6
-rw-r--r--crates/hir-ty/src/consteval/tests/intrinsics.rs10
-rw-r--r--crates/hir-ty/src/db.rs42
-rw-r--r--crates/hir-ty/src/diagnostics/decl_check.rs15
-rw-r--r--crates/hir-ty/src/diagnostics/expr.rs16
-rw-r--r--crates/hir-ty/src/diagnostics/match_check.rs16
-rw-r--r--crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs104
-rw-r--r--crates/hir-ty/src/diagnostics/unsafe_check.rs15
-rw-r--r--crates/hir-ty/src/display.rs25
-rw-r--r--crates/hir-ty/src/drop.rs6
-rw-r--r--crates/hir-ty/src/dyn_compatibility.rs6
-rw-r--r--crates/hir-ty/src/generics.rs13
-rw-r--r--crates/hir-ty/src/infer.rs54
-rw-r--r--crates/hir-ty/src/infer/cast.rs4
-rw-r--r--crates/hir-ty/src/infer/closure.rs24
-rw-r--r--crates/hir-ty/src/infer/expr.rs55
-rw-r--r--crates/hir-ty/src/infer/mutability.rs12
-rw-r--r--crates/hir-ty/src/infer/pat.rs10
-rw-r--r--crates/hir-ty/src/infer/path.rs4
-rw-r--r--crates/hir-ty/src/infer/unify.rs4
-rw-r--r--crates/hir-ty/src/inhabitedness.rs6
-rw-r--r--crates/hir-ty/src/layout.rs8
-rw-r--r--crates/hir-ty/src/layout/adt.rs10
-rw-r--r--crates/hir-ty/src/layout/target.rs5
-rw-r--r--crates/hir-ty/src/layout/tests.rs3
-rw-r--r--crates/hir-ty/src/lib.rs8
-rw-r--r--crates/hir-ty/src/lower.rs88
-rw-r--r--crates/hir-ty/src/lower/path.rs10
-rw-r--r--crates/hir-ty/src/mapping.rs41
-rw-r--r--crates/hir-ty/src/method_resolution.rs16
-rw-r--r--crates/hir-ty/src/mir.rs7
-rw-r--r--crates/hir-ty/src/mir/eval.rs55
-rw-r--r--crates/hir-ty/src/mir/eval/shim.rs26
-rw-r--r--crates/hir-ty/src/mir/eval/shim/simd.rs2
-rw-r--r--crates/hir-ty/src/mir/eval/tests.rs14
-rw-r--r--crates/hir-ty/src/mir/lower.rs32
-rw-r--r--crates/hir-ty/src/mir/lower/as_place.rs21
-rw-r--r--crates/hir-ty/src/mir/lower/pattern_matching.rs8
-rw-r--r--crates/hir-ty/src/mir/pretty.rs20
-rw-r--r--crates/hir-ty/src/test_db.rs9
-rw-r--r--crates/hir-ty/src/tests.rs10
-rw-r--r--crates/hir-ty/src/tests/closure_captures.rs43
-rw-r--r--crates/hir-ty/src/tests/coercion.rs2
-rw-r--r--crates/hir-ty/src/tests/display_source_code.rs8
-rw-r--r--crates/hir-ty/src/tests/incremental.rs598
-rw-r--r--crates/hir-ty/src/tests/method_resolution.rs8
-rw-r--r--crates/hir-ty/src/tests/never_type.rs28
-rw-r--r--crates/hir-ty/src/tests/regression.rs4
-rw-r--r--crates/hir-ty/src/tests/simple.rs26
-rw-r--r--crates/hir-ty/src/tests/traits.rs76
-rw-r--r--crates/hir-ty/src/tls.rs2
-rw-r--r--crates/hir-ty/src/utils.rs17
-rw-r--r--crates/hir-ty/src/variance.rs2
-rw-r--r--crates/hir/Cargo.toml1
-rw-r--r--crates/hir/src/attrs.rs14
-rw-r--r--crates/hir/src/diagnostics.rs86
-rw-r--r--crates/hir/src/display.rs10
-rw-r--r--crates/hir/src/has_source.rs2
-rw-r--r--crates/hir/src/lib.rs818
-rw-r--r--crates/hir/src/semantics.rs109
-rw-r--r--crates/hir/src/semantics/child_by_source.rs31
-rw-r--r--crates/hir/src/source_analyzer.rs109
-rw-r--r--crates/hir/src/symbols.rs22
-rw-r--r--crates/hir/src/term_search.rs48
-rw-r--r--crates/hir/src/term_search/expr.rs33
-rw-r--r--crates/hir/src/term_search/tactics.rs98
-rw-r--r--crates/ide-assists/Cargo.toml1
-rw-r--r--crates/ide-assists/src/assist_config.rs1
-rw-r--r--crates/ide-assists/src/assist_context.rs2
-rw-r--r--crates/ide-assists/src/handlers/add_missing_impl_members.rs2
-rw-r--r--crates/ide-assists/src/handlers/add_missing_match_arms.rs207
-rw-r--r--crates/ide-assists/src/handlers/auto_import.rs10
-rw-r--r--crates/ide-assists/src/handlers/convert_match_to_let_else.rs28
-rw-r--r--crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs15
-rw-r--r--crates/ide-assists/src/handlers/expand_rest_pattern.rs2
-rw-r--r--crates/ide-assists/src/handlers/extract_function.rs149
-rw-r--r--crates/ide-assists/src/handlers/generate_default_from_new.rs15
-rw-r--r--crates/ide-assists/src/handlers/generate_enum_is_method.rs4
-rw-r--r--crates/ide-assists/src/handlers/generate_enum_projection_method.rs4
-rw-r--r--crates/ide-assists/src/handlers/generate_function.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_getter_or_setter.rs2
-rw-r--r--crates/ide-assists/src/handlers/generate_impl.rs25
-rw-r--r--crates/ide-assists/src/handlers/generate_mut_trait_impl.rs142
-rw-r--r--crates/ide-assists/src/handlers/generate_new.rs409
-rw-r--r--crates/ide-assists/src/handlers/generate_single_field_struct_from.rs1000
-rw-r--r--crates/ide-assists/src/handlers/inline_call.rs8
-rw-r--r--crates/ide-assists/src/handlers/inline_const_as_literal.rs2
-rw-r--r--crates/ide-assists/src/handlers/merge_match_arms.rs16
-rw-r--r--crates/ide-assists/src/handlers/promote_local_to_const.rs22
-rw-r--r--crates/ide-assists/src/handlers/pull_assignment_up.rs69
-rw-r--r--crates/ide-assists/src/handlers/qualify_path.rs4
-rw-r--r--crates/ide-assists/src/handlers/remove_dbg.rs87
-rw-r--r--crates/ide-assists/src/handlers/remove_unused_imports.rs2
-rw-r--r--crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs34
-rw-r--r--crates/ide-assists/src/handlers/term_search.rs6
-rw-r--r--crates/ide-assists/src/handlers/toggle_macro_delimiter.rs26
-rw-r--r--crates/ide-assists/src/handlers/unmerge_match_arm.rs63
-rw-r--r--crates/ide-assists/src/handlers/wrap_return_type.rs163
-rw-r--r--crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs86
-rw-r--r--crates/ide-assists/src/lib.rs2
-rw-r--r--crates/ide-assists/src/tests.rs21
-rw-r--r--crates/ide-assists/src/tests/generated.rs30
-rw-r--r--crates/ide-assists/src/utils.rs101
-rw-r--r--crates/ide-assists/src/utils/gen_trait_fn_body.rs6
-rw-r--r--crates/ide-completion/Cargo.toml1
-rw-r--r--crates/ide-completion/src/completions.rs38
-rw-r--r--crates/ide-completion/src/completions/attribute.rs2
-rw-r--r--crates/ide-completion/src/completions/attribute/derive.rs2
-rw-r--r--crates/ide-completion/src/completions/dot.rs14
-rw-r--r--crates/ide-completion/src/completions/expr.rs18
-rw-r--r--crates/ide-completion/src/completions/field.rs2
-rw-r--r--crates/ide-completion/src/completions/flyimport.rs20
-rw-r--r--crates/ide-completion/src/completions/fn_param.rs2
-rw-r--r--crates/ide-completion/src/completions/item_list.rs6
-rw-r--r--crates/ide-completion/src/completions/item_list/trait_impl.rs55
-rw-r--r--crates/ide-completion/src/completions/pattern.rs2
-rw-r--r--crates/ide-completion/src/completions/postfix.rs326
-rw-r--r--crates/ide-completion/src/completions/record.rs4
-rw-r--r--crates/ide-completion/src/completions/snippet.rs6
-rw-r--r--crates/ide-completion/src/completions/type.rs4
-rw-r--r--crates/ide-completion/src/completions/use_.rs2
-rw-r--r--crates/ide-completion/src/completions/vis.rs2
-rw-r--r--crates/ide-completion/src/context.rs50
-rw-r--r--crates/ide-completion/src/context/analysis.rs43
-rw-r--r--crates/ide-completion/src/item.rs2
-rw-r--r--crates/ide-completion/src/render.rs112
-rw-r--r--crates/ide-completion/src/render/function.rs24
-rw-r--r--crates/ide-completion/src/render/literal.rs8
-rw-r--r--crates/ide-completion/src/render/macro_.rs2
-rw-r--r--crates/ide-completion/src/render/pattern.rs4
-rw-r--r--crates/ide-completion/src/tests/attribute.rs4
-rw-r--r--crates/ide-completion/src/tests/expression.rs38
-rw-r--r--crates/ide-completion/src/tests/item_list.rs54
-rw-r--r--crates/ide-completion/src/tests/type_pos.rs18
-rw-r--r--crates/ide-completion/src/tests/visibility.rs89
-rw-r--r--crates/ide-db/Cargo.toml3
-rw-r--r--crates/ide-db/src/active_parameter.rs32
-rw-r--r--crates/ide-db/src/defs.rs59
-rw-r--r--crates/ide-db/src/famous_defs.rs12
-rw-r--r--crates/ide-db/src/generated/lints.rs4
-rw-r--r--crates/ide-db/src/imports/import_assets.rs60
-rw-r--r--crates/ide-db/src/path_transform.rs31
-rw-r--r--crates/ide-db/src/prime_caches.rs2
-rw-r--r--crates/ide-db/src/search.rs16
-rw-r--r--crates/ide-db/src/symbol_index.rs39
-rw-r--r--crates/ide-db/src/syntax_helpers/suggest_name.rs18
-rw-r--r--crates/ide-db/src/test_data/test_doc_alias.txt7
-rw-r--r--crates/ide-db/src/test_data/test_symbol_index_collection.txt99
-rw-r--r--crates/ide-db/src/test_data/test_symbols_exclude_imports.txt36
-rw-r--r--crates/ide-db/src/test_data/test_symbols_with_imports.txt70
-rw-r--r--crates/ide-db/src/ty_filter.rs4
-rw-r--r--crates/ide-db/src/use_trivial_constructor.rs2
-rw-r--r--crates/ide-diagnostics/Cargo.toml1
-rw-r--r--crates/ide-diagnostics/src/handlers/expected_function.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs24
-rw-r--r--crates/ide-diagnostics/src/handlers/invalid_cast.rs7
-rw-r--r--crates/ide-diagnostics/src/handlers/macro_error.rs4
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_fields.rs8
-rw-r--r--crates/ide-diagnostics/src/handlers/missing_unsafe.rs40
-rw-r--r--crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs42
-rw-r--r--crates/ide-diagnostics/src/handlers/no_such_field.rs33
-rw-r--r--crates/ide-diagnostics/src/handlers/private_field.rs124
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs29
-rw-r--r--crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs9
-rw-r--r--crates/ide-diagnostics/src/handlers/type_mismatch.rs24
-rw-r--r--crates/ide-diagnostics/src/handlers/typed_hole.rs6
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_field.rs6
-rw-r--r--crates/ide-diagnostics/src/handlers/unresolved_method.rs26
-rw-r--r--crates/ide-ssr/Cargo.toml1
-rw-r--r--crates/ide-ssr/src/lib.rs2
-rw-r--r--crates/ide-ssr/src/matching.rs22
-rw-r--r--crates/ide-ssr/src/replacing.rs28
-rw-r--r--crates/ide-ssr/src/resolving.rs26
-rw-r--r--crates/ide-ssr/src/search.rs16
-rw-r--r--crates/ide/Cargo.toml3
-rw-r--r--crates/ide/src/annotations.rs82
-rw-r--r--crates/ide/src/call_hierarchy.rs2
-rw-r--r--crates/ide/src/doc_links.rs33
-rw-r--r--crates/ide/src/file_structure.rs50
-rwxr-xr-xcrates/ide/src/folding_ranges.rs146
-rw-r--r--crates/ide/src/goto_declaration.rs2
-rw-r--r--crates/ide/src/goto_definition.rs317
-rw-r--r--crates/ide/src/goto_implementation.rs2
-rw-r--r--crates/ide/src/goto_type_definition.rs8
-rw-r--r--crates/ide/src/highlight_related.rs352
-rw-r--r--crates/ide/src/hover.rs16
-rw-r--r--crates/ide/src/hover/render.rs10
-rw-r--r--crates/ide/src/hover/tests.rs96
-rw-r--r--crates/ide/src/inlay_hints.rs16
-rw-r--r--crates/ide/src/inlay_hints/adjustment.rs108
-rw-r--r--crates/ide/src/inlay_hints/bind_pat.rs6
-rw-r--r--crates/ide/src/inlay_hints/bounds.rs2
-rw-r--r--crates/ide/src/inlay_hints/closing_brace.rs4
-rw-r--r--crates/ide/src/inlay_hints/implicit_drop.rs2
-rw-r--r--crates/ide/src/inlay_hints/implied_dyn_trait.rs48
-rw-r--r--crates/ide/src/inlay_hints/param_name.rs6
-rw-r--r--crates/ide/src/lib.rs4
-rw-r--r--crates/ide/src/navigation_target.rs4
-rw-r--r--crates/ide/src/references.rs223
-rw-r--r--crates/ide/src/runnables.rs24
-rw-r--r--crates/ide/src/signature_help.rs14
-rw-r--r--crates/ide/src/typing.rs88
-rw-r--r--crates/ide/src/view_crate_graph.rs2
-rw-r--r--crates/ide/src/view_memory_layout.rs2
-rw-r--r--crates/intern/Cargo.toml1
-rw-r--r--crates/intern/src/symbol/symbols.rs4
-rw-r--r--crates/load-cargo/src/lib.rs103
-rw-r--r--crates/mbe/Cargo.toml1
-rw-r--r--crates/mbe/src/benchmark.rs4
-rw-r--r--crates/mbe/src/expander/matcher.rs11
-rw-r--r--crates/mbe/src/expander/transcriber.rs4
-rw-r--r--crates/mbe/src/parser.rs19
-rw-r--r--crates/mbe/src/tests.rs232
-rw-r--r--crates/parser/Cargo.toml1
-rw-r--r--crates/parser/src/grammar/expressions.rs2
-rw-r--r--crates/parser/src/grammar/expressions/atom.rs27
-rw-r--r--crates/parser/src/grammar/generic_params.rs7
-rw-r--r--crates/parser/src/grammar/items.rs13
-rw-r--r--crates/parser/src/lexed_str.rs137
-rw-r--r--crates/parser/src/parser.rs7
-rw-r--r--crates/parser/src/syntax_kind/generated.rs12
-rw-r--r--crates/parser/test_data/generated/runner.rs4
-rw-r--r--crates/parser/test_data/parser/inline/ok/asm_kinds.rast48
-rw-r--r--crates/parser/test_data/parser/inline/ok/asm_kinds.rs5
-rw-r--r--crates/parser/test_data/parser/inline/ok/global_asm.rast10
-rw-r--r--crates/parser/test_data/parser/inline/ok/global_asm.rs1
-rw-r--r--crates/parser/test_data/parser/inline/ok/type_param_bounds.rast3
-rw-r--r--crates/parser/test_data/parser/inline/ok/type_param_bounds.rs2
-rw-r--r--crates/paths/Cargo.toml1
-rw-r--r--crates/proc-macro-api/Cargo.toml1
-rw-r--r--crates/proc-macro-api/src/legacy_protocol/msg.rs9
-rw-r--r--crates/proc-macro-api/src/lib.rs70
-rw-r--r--crates/proc-macro-srv/Cargo.toml1
-rw-r--r--crates/proc-macro-srv/proc-macro-test/Cargo.toml3
-rw-r--r--crates/proc-macro-srv/proc-macro-test/build.rs12
-rw-r--r--crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml1
-rw-r--r--crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs1
-rw-r--r--crates/proc-macro-srv/src/server_impl.rs56
-rw-r--r--crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs4
-rw-r--r--crates/proc-macro-srv/src/tests/mod.rs344
-rw-r--r--crates/proc-macro-srv/src/tests/utils.rs8
-rw-r--r--crates/profile/Cargo.toml5
-rw-r--r--crates/project-model/Cargo.toml1
-rw-r--r--crates/project-model/src/build_dependencies.rs76
-rw-r--r--crates/project-model/src/cargo_config_file.rs34
-rw-r--r--crates/project-model/src/cargo_workspace.rs377
-rw-r--r--crates/project-model/src/env.rs115
-rw-r--r--crates/project-model/src/lib.rs5
-rw-r--r--crates/project-model/src/manifest_path.rs8
-rw-r--r--crates/project-model/src/sysroot.rs100
-rw-r--r--crates/project-model/src/tests.rs18
-rw-r--r--crates/project-model/src/toolchain_info/rustc_cfg.rs4
-rw-r--r--crates/project-model/src/toolchain_info/target_data_layout.rs4
-rw-r--r--crates/project-model/src/toolchain_info/target_tuple.rs50
-rw-r--r--crates/project-model/src/toolchain_info/version.rs4
-rw-r--r--crates/project-model/src/workspace.rs309
-rw-r--r--crates/query-group-macro/Cargo.toml1
-rw-r--r--crates/query-group-macro/src/queries.rs8
-rw-r--r--crates/rust-analyzer/Cargo.toml15
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs6
-rw-r--r--crates/rust-analyzer/src/cli/rustc_tests.rs14
-rw-r--r--crates/rust-analyzer/src/cli/scip.rs2
-rw-r--r--crates/rust-analyzer/src/config.rs21
-rw-r--r--crates/rust-analyzer/src/flycheck.rs7
-rw-r--r--crates/rust-analyzer/src/global_state.rs2
-rw-r--r--crates/rust-analyzer/src/handlers/dispatch.rs5
-rw-r--r--crates/rust-analyzer/src/handlers/notification.rs2
-rw-r--r--crates/rust-analyzer/src/handlers/request.rs22
-rw-r--r--crates/rust-analyzer/src/lsp/capabilities.rs8
-rw-r--r--crates/rust-analyzer/src/lsp/to_proto.rs8
-rw-r--r--crates/rust-analyzer/src/main_loop.rs11
-rw-r--r--crates/rust-analyzer/src/reload.rs49
-rw-r--r--crates/rust-analyzer/src/test_runner.rs1
-rw-r--r--crates/rust-analyzer/tests/slow-tests/main.rs3
-rw-r--r--crates/span/Cargo.toml3
-rw-r--r--crates/span/src/ast_id.rs923
-rw-r--r--crates/span/src/hygiene.rs16
-rw-r--r--crates/span/src/lib.rs24
-rw-r--r--crates/span/src/map.rs2
-rw-r--r--crates/stdx/Cargo.toml5
-rw-r--r--crates/stdx/src/lib.rs1
-rw-r--r--crates/stdx/src/variance.rs270
-rw-r--r--crates/syntax-bridge/Cargo.toml1
-rw-r--r--crates/syntax/Cargo.toml3
-rw-r--r--crates/syntax/rust.ungram6
-rw-r--r--crates/syntax/src/ast/edit_in_place.rs36
-rw-r--r--crates/syntax/src/ast/generated/nodes.rs23
-rw-r--r--crates/syntax/src/ast/make.rs16
-rw-r--r--crates/syntax/src/ast/node_ext.rs10
-rw-r--r--crates/syntax/src/ast/syntax_factory/constructors.rs54
-rw-r--r--crates/syntax/src/ast/token_ext.rs94
-rw-r--r--crates/syntax/src/syntax_editor.rs2
-rw-r--r--crates/syntax/src/validation.rs28
-rw-r--r--crates/test-fixture/src/lib.rs42
-rw-r--r--crates/test-utils/Cargo.toml1
-rw-r--r--crates/test-utils/src/fixture.rs46
-rw-r--r--crates/test-utils/src/minicore.rs207
-rw-r--r--crates/toolchain/Cargo.toml1
-rw-r--r--crates/tt/Cargo.toml1
-rw-r--r--crates/tt/src/iter.rs1
-rw-r--r--crates/tt/src/lib.rs2
-rw-r--r--crates/vfs-notify/Cargo.toml1
-rw-r--r--crates/vfs/Cargo.toml1
-rw-r--r--crates/vfs/src/file_set.rs6
-rw-r--r--docs/book/README.md2
-rw-r--r--docs/book/src/SUMMARY.md1
-rw-r--r--docs/book/src/configuration_generated.md25
-rw-r--r--docs/book/src/contributing/README.md4
-rw-r--r--docs/book/src/contributing/lsp-extensions.md18
-rw-r--r--docs/book/src/faq.md7
-rw-r--r--docs/book/src/non_cargo_based_projects.md3
-rw-r--r--docs/book/src/troubleshooting.md6
-rw-r--r--editors/code/package-lock.json30
-rw-r--r--editors/code/package.json30
-rw-r--r--editors/code/src/client.ts114
-rw-r--r--editors/code/src/commands.ts4
-rw-r--r--editors/code/src/config.ts14
-rw-r--r--editors/code/src/main.ts2
-rw-r--r--editors/code/src/run.ts57
-rw-r--r--lib/lsp-server/Cargo.toml7
-rw-r--r--lib/lsp-server/examples/goto_def.rs132
-rwxr-xr-xlib/lsp-server/examples/manual_test.sh53
-rw-r--r--lib/lsp-server/examples/minimal_lsp.rs335
-rw-r--r--lib/lsp-server/src/msg.rs4
-rw-r--r--lib/lsp-server/src/stdio.rs2
-rw-r--r--rust-version2
-rw-r--r--xtask/Cargo.toml6
-rw-r--r--xtask/src/codegen/grammar/ast_src.rs2
395 files changed, 14369 insertions, 6996 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 79fb7a2d2e..770652494f 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -17,6 +17,10 @@ env:
RUST_BACKTRACE: short
RUSTUP_MAX_RETRIES: 10
+defaults:
+ run:
+ shell: bash
+
jobs:
changes:
runs-on: ubuntu-latest
@@ -80,6 +84,7 @@ jobs:
CC: deny_c
strategy:
+ fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
@@ -99,7 +104,7 @@ jobs:
rustup toolchain install nightly --profile minimal --component rustfmt
# https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json
- name: Install Rust Problem Matcher
- if: matrix.os == 'ubuntu-latest'
+ if: matrix.os == 'macos-latest'
run: echo "::add-matcher::.github/rust.json"
# - name: Cache Dependencies
@@ -116,23 +121,9 @@ jobs:
if: matrix.os == 'ubuntu-latest'
run: cargo codegen --check
- - name: Compile tests
- run: cargo test --no-run
-
- name: Run tests
run: cargo nextest run --no-fail-fast --hide-progress-bar --status-level fail
- - name: Cancel parallel jobs
- if: failure()
- run: |
- # https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#cancel-a-workflow-run
- curl -L \
- -X POST \
- -H "Accept: application/vnd.github.v3+json" \
- -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
- -H "X-GitHub-Api-Version: 2022-11-28" \
- https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/cancel
-
- name: Run Clippy
if: matrix.os == 'macos-latest'
run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr
@@ -333,3 +324,21 @@ jobs:
jq -C <<< '${{ toJson(needs) }}'
# Check if all jobs that we depend on (in the needs array) were successful (or have been skipped).
jq --exit-status 'all(.result == "success" or .result == "skipped")' <<< '${{ toJson(needs) }}'
+
+ cancel-if-matrix-failed:
+ needs: rust
+ if: ${{ always() }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: Cancel parallel jobs
+ run: |
+ if jq --exit-status 'all(.result == "success" or .result == "skipped")' <<< '${{ toJson(needs) }}'; then
+ exit 0
+ fi
+ # https://docs.github.com/en/rest/actions/workflow-runs?apiVersion=2022-11-28#cancel-a-workflow-run
+ curl -L \
+ -X POST \
+ -H "Accept: application/vnd.github.v3+json" \
+ -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
+ -H "X-GitHub-Api-Version: 2022-11-28" \
+ https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/cancel
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index a758ecfd46..5bd90130f4 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -134,13 +134,13 @@ jobs:
- name: Run analysis-stats on rust-analyzer
if: matrix.target == 'x86_64-unknown-linux-gnu'
- run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats .
+ run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats . -q
- name: Run analysis-stats on rust std library
if: matrix.target == 'x86_64-unknown-linux-gnu'
env:
RUSTC_BOOTSTRAP: 1
- run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
+ run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std -q
- name: Upload artifacts
uses: actions/upload-artifact@v4
diff --git a/.typos.toml b/.typos.toml
index e938bddd4b..cdbc003a80 100644
--- a/.typos.toml
+++ b/.typos.toml
@@ -18,6 +18,7 @@ extend-ignore-re = [
"INOUT",
"optin",
"=Pn",
+ "\\[[0-9A-F]{4},", # AstId hex hashes
# ignore `// spellchecker:off` until `// spellchecker:on`
"(?s)(#|//)\\s*spellchecker:off.*?\\n\\s*(#|//)\\s*spellchecker:on",
]
diff --git a/Cargo.lock b/Cargo.lock
index 01de430925..e55cd80943 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -13,9 +13,9 @@ dependencies = [
[[package]]
name = "adler2"
-version = "2.0.0"
+version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
+checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
name = "allocator-api2"
@@ -25,9 +25,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "anyhow"
-version = "1.0.97"
+version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f"
+checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
[[package]]
name = "arbitrary"
@@ -100,51 +100,99 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
-version = "2.9.0"
+version = "2.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
+checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
[[package]]
name = "borsh"
-version = "1.5.5"
+version = "1.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5430e3be710b68d984d1391c854eb431a9d548640711faa54eecb1df93db91cc"
+checksum = "ad8646f98db542e39fc66e68a20b2144f6a732636df7c2354e74645faaa433ce"
dependencies = [
"cfg_aliases",
]
[[package]]
name = "boxcar"
-version = "0.2.12"
+version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "66bb12751a83493ef4b8da1120451a262554e216a247f14b48cb5e8fe7ed8bdf"
+checksum = "26c4925bc979b677330a8c7fe7a8c94af2dbb4a2d37b4a20a80d884400f46baa"
[[package]]
name = "camino"
-version = "1.1.9"
+version = "1.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
+checksum = "0da45bc31171d8d6960122e222a67740df867c1dd53b4d51caa297084c185cab"
dependencies = [
"serde",
]
[[package]]
name = "cargo-platform"
-version = "0.1.9"
+version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea"
+checksum = "84982c6c0ae343635a3a4ee6dedef965513735c8b183caa7289fa6e27399ebd4"
dependencies = [
"serde",
]
[[package]]
+name = "cargo-util-schemas"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e63d2780ac94487eb9f1fea7b0d56300abc9eb488800854ca217f102f5caccca"
+dependencies = [
+ "semver",
+ "serde",
+ "serde-untagged",
+ "serde-value",
+ "thiserror 1.0.69",
+ "toml",
+ "unicode-xid",
+ "url",
+]
+
+[[package]]
+name = "cargo-util-schemas"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7dc1a6f7b5651af85774ae5a34b4e8be397d9cf4bc063b7e6dbd99a841837830"
+dependencies = [
+ "semver",
+ "serde",
+ "serde-untagged",
+ "serde-value",
+ "thiserror 2.0.12",
+ "toml",
+ "unicode-xid",
+ "url",
+]
+
+[[package]]
+name = "cargo_metadata"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4f7835cfc6135093070e95eb2b53e5d9b5c403dc3a6be6040ee026270aa82502"
+dependencies = [
+ "camino",
+ "cargo-platform",
+ "cargo-util-schemas 0.2.0",
+ "semver",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.12",
+]
+
+[[package]]
name = "cargo_metadata"
-version = "0.19.2"
+version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba"
+checksum = "5cfca2aaa699835ba88faf58a06342a314a950d2b9686165e038286c30316868"
dependencies = [
"camino",
"cargo-platform",
+ "cargo-util-schemas 0.8.2",
"semver",
"serde",
"serde_json",
@@ -153,9 +201,9 @@ dependencies = [
[[package]]
name = "cc"
-version = "1.2.16"
+version = "1.2.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c"
+checksum = "956a5e21988b87f372569b66183b78babf23ebc2e744b733e4350a752c4dafac"
dependencies = [
"shlex",
]
@@ -178,9 +226,9 @@ dependencies = [
[[package]]
name = "cfg-if"
-version = "1.0.0"
+version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
[[package]]
name = "cfg_aliases"
@@ -190,9 +238,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chalk-derive"
-version = "0.102.0"
+version = "0.103.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "feb14e3ff0ebac26d8e58b6ed1417afb60c4a0a44b6425546ee7eb9c75ebb336"
+checksum = "eb4899682de915ca7c0b025bdd0a3d34c75fe12184122fda6805a7baddaa293c"
dependencies = [
"proc-macro2",
"quote",
@@ -202,19 +250,19 @@ dependencies = [
[[package]]
name = "chalk-ir"
-version = "0.102.0"
+version = "0.103.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72f0a61621a088af69fee8df39ec63cf5b6d0b9ab663a740cdeb376aabf2f244"
+checksum = "90a37d2ab99352b4caca135061e7b4ac67024b648c28ed0b787feec4bea4caed"
dependencies = [
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
"chalk-derive",
]
[[package]]
name = "chalk-recursive"
-version = "0.102.0"
+version = "0.103.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cbd3415cc540015533aa4a8ad007696d585dd9c5f81e7c099872f1dd4bf14894"
+checksum = "c855be60e646664bc37c2496d3dc81ca5ef60520930e5e0f0057a0575aff6c19"
dependencies = [
"chalk-derive",
"chalk-ir",
@@ -225,9 +273,9 @@ dependencies = [
[[package]]
name = "chalk-solve"
-version = "0.102.0"
+version = "0.103.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "747707b0c082b3ecf4b1ae28d0d8df708a46cddd22a386f9cc85a312a4de25ff"
+checksum = "477ac6cdfd2013e9f93b09b036c2b607a67b2e728f4777b8422d55a79e9e3a34"
dependencies = [
"chalk-derive",
"chalk-ir",
@@ -374,7 +422,7 @@ dependencies = [
"libc",
"option-ext",
"redox_users",
- "windows-sys 0.59.0",
+ "windows-sys 0.60.2",
]
[[package]]
@@ -432,6 +480,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
+name = "erased-serde"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7"
+dependencies = [
+ "serde",
+ "typeid",
+]
+
+[[package]]
name = "expect-test"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -461,9 +519,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flate2"
-version = "1.1.1"
+version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece"
+checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d"
dependencies = [
"crc32fast",
"miniz_oxide",
@@ -471,9 +529,9 @@ dependencies = [
[[package]]
name = "foldhash"
-version = "0.1.4"
+version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
+checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "form_urlencoded"
@@ -501,9 +559,9 @@ checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
[[package]]
name = "getrandom"
-version = "0.2.15"
+version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
+checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
dependencies = [
"cfg-if",
"libc",
@@ -524,9 +582,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
[[package]]
name = "hashbrown"
-version = "0.15.2"
+version = "0.15.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
+checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5"
dependencies = [
"allocator-api2",
"equivalent",
@@ -539,20 +597,14 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
dependencies = [
- "hashbrown 0.15.2",
+ "hashbrown 0.15.4",
]
[[package]]
-name = "heck"
-version = "0.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
-
-[[package]]
name = "hermit-abi"
-version = "0.3.9"
+version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
+checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c"
[[package]]
name = "hir"
@@ -588,7 +640,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
"cfg",
"cov-mark",
"drop_bomb",
@@ -655,7 +707,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
"chalk-derive",
"chalk-ir",
"chalk-recursive",
@@ -705,21 +757,22 @@ dependencies = [
[[package]]
name = "icu_collections"
-version = "1.5.0"
+version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526"
+checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
dependencies = [
"displaydoc",
+ "potential_utf",
"yoke",
"zerofrom",
"zerovec",
]
[[package]]
-name = "icu_locid"
-version = "1.5.0"
+name = "icu_locale_core"
+version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637"
+checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
dependencies = [
"displaydoc",
"litemap",
@@ -729,30 +782,10 @@ dependencies = [
]
[[package]]
-name = "icu_locid_transform"
-version = "1.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e"
-dependencies = [
- "displaydoc",
- "icu_locid",
- "icu_locid_transform_data",
- "icu_provider",
- "tinystr",
- "zerovec",
-]
-
-[[package]]
-name = "icu_locid_transform_data"
-version = "1.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e"
-
-[[package]]
name = "icu_normalizer"
-version = "1.5.0"
+version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f"
+checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
dependencies = [
"displaydoc",
"icu_collections",
@@ -760,68 +793,55 @@ dependencies = [
"icu_properties",
"icu_provider",
"smallvec",
- "utf16_iter",
- "utf8_iter",
- "write16",
"zerovec",
]
[[package]]
name = "icu_normalizer_data"
-version = "1.5.0"
+version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516"
+checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
[[package]]
name = "icu_properties"
-version = "1.5.1"
+version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5"
+checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
dependencies = [
"displaydoc",
"icu_collections",
- "icu_locid_transform",
+ "icu_locale_core",
"icu_properties_data",
"icu_provider",
- "tinystr",
+ "potential_utf",
+ "zerotrie",
"zerovec",
]
[[package]]
name = "icu_properties_data"
-version = "1.5.0"
+version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569"
+checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
[[package]]
name = "icu_provider"
-version = "1.5.0"
+version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9"
+checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
dependencies = [
"displaydoc",
- "icu_locid",
- "icu_provider_macros",
+ "icu_locale_core",
"stable_deref_trait",
"tinystr",
"writeable",
"yoke",
"zerofrom",
+ "zerotrie",
"zerovec",
]
[[package]]
-name = "icu_provider_macros"
-version = "1.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
name = "ide"
version = "0.0.0"
dependencies = [
@@ -898,7 +918,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
"cov-mark",
"crossbeam-channel",
"either",
@@ -976,9 +996,9 @@ dependencies = [
[[package]]
name = "idna_adapter"
-version = "1.2.0"
+version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71"
+checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
dependencies = [
"icu_normalizer",
"icu_properties",
@@ -991,7 +1011,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
dependencies = [
"equivalent",
- "hashbrown 0.15.2",
+ "hashbrown 0.15.4",
"serde",
]
@@ -1001,7 +1021,7 @@ version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3"
dependencies = [
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
"inotify-sys",
"libc",
]
@@ -1026,6 +1046,15 @@ dependencies = [
]
[[package]]
+name = "intrusive-collections"
+version = "0.9.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "189d0897e4cbe8c75efedf3502c18c887b05046e59d28404d4d8e46cbc4d1e86"
+dependencies = [
+ "memoffset",
+]
+
+[[package]]
name = "itertools"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1057,9 +1086,9 @@ checksum = "a037eddb7d28de1d0fc42411f501b53b75838d313908078d6698d064f3029b24"
[[package]]
name = "kqueue"
-version = "1.0.8"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7447f1ca1b7b563588a205fe93dea8df60fd981423a768bc1c0ded35ed147d0c"
+checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a"
dependencies = [
"kqueue-sys",
"libc",
@@ -1099,19 +1128,19 @@ checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
[[package]]
name = "libloading"
-version = "0.8.7"
+version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6a793df0d7afeac54f95b471d3af7f0d4fb975699f972341a4b76988d49cdf0c"
+checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667"
dependencies = [
"cfg-if",
- "windows-targets 0.53.0",
+ "windows-targets 0.53.2",
]
[[package]]
name = "libmimalloc-sys"
-version = "0.1.40"
+version = "0.1.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07d0e07885d6a754b9c7993f2625187ad694ee985d60f23355ff0e7077261502"
+checksum = "ec9d6fac27761dabcd4ee73571cdb06b7022dc99089acbe5435691edffaac0f4"
dependencies = [
"cc",
"libc",
@@ -1123,7 +1152,7 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
dependencies = [
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
"libc",
"redox_syscall",
]
@@ -1149,9 +1178,9 @@ dependencies = [
[[package]]
name = "litemap"
-version = "0.7.5"
+version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856"
+checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
[[package]]
name = "load-cargo"
@@ -1174,9 +1203,9 @@ dependencies = [
[[package]]
name = "lock_api"
-version = "0.4.12"
+version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
+checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765"
dependencies = [
"autocfg",
"scopeguard",
@@ -1184,21 +1213,24 @@ dependencies = [
[[package]]
name = "log"
-version = "0.4.26"
+version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
+checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
[[package]]
name = "lsp-server"
version = "0.7.8"
dependencies = [
+ "anyhow",
"crossbeam-channel",
"ctrlc",
"log",
"lsp-types",
+ "rustc-hash 2.1.1",
"serde",
"serde_derive",
"serde_json",
+ "toolchain",
]
[[package]]
@@ -1249,9 +1281,9 @@ dependencies = [
[[package]]
name = "memchr"
-version = "2.7.4"
+version = "2.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
+checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
[[package]]
name = "memmap2"
@@ -1273,32 +1305,32 @@ dependencies = [
[[package]]
name = "mimalloc"
-version = "0.1.44"
+version = "0.1.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "99585191385958383e13f6b822e6b6d8d9cf928e7d286ceb092da92b43c87bc1"
+checksum = "995942f432bbb4822a7e9c3faa87a695185b0d09273ba85f097b54f4e458f2af"
dependencies = [
"libmimalloc-sys",
]
[[package]]
name = "miniz_oxide"
-version = "0.8.5"
+version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
+checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
dependencies = [
"adler2",
]
[[package]]
name = "mio"
-version = "1.0.3"
+version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd"
+checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c"
dependencies = [
"libc",
"log",
"wasi",
- "windows-sys 0.52.0",
+ "windows-sys 0.59.0",
]
[[package]]
@@ -1316,7 +1348,7 @@ version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
dependencies = [
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
"cfg-if",
"cfg_aliases",
"libc",
@@ -1334,7 +1366,7 @@ version = "8.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943"
dependencies = [
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
"filetime",
"fsevent-sys",
"inotify",
@@ -1369,10 +1401,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]]
+name = "num-traits"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
name = "num_cpus"
-version = "1.16.0"
+version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
+checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b"
dependencies = [
"hermit-abi",
"libc",
@@ -1398,9 +1439,9 @@ dependencies = [
[[package]]
name = "once_cell"
-version = "1.21.1"
+version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc"
+checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "oorandom"
@@ -1415,10 +1456,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
+name = "ordered-float"
+version = "2.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c"
+dependencies = [
+ "num-traits",
+]
+
+[[package]]
+name = "papaya"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f92dd0b07c53a0a0c764db2ace8c541dc47320dad97c2200c2a637ab9dd2328f"
+dependencies = [
+ "equivalent",
+ "seize",
+]
+
+[[package]]
name = "parking_lot"
-version = "0.12.3"
+version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
+checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13"
dependencies = [
"lock_api",
"parking_lot_core",
@@ -1426,9 +1486,9 @@ dependencies = [
[[package]]
name = "parking_lot_core"
-version = "0.9.10"
+version = "0.9.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
+checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5"
dependencies = [
"cfg-if",
"libc",
@@ -1506,9 +1566,18 @@ checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
[[package]]
name = "portable-atomic"
-version = "1.11.0"
+version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e"
+checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
+
+[[package]]
+name = "potential_utf"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585"
+dependencies = [
+ "zerovec",
+]
[[package]]
name = "powerfmt"
@@ -1564,14 +1633,14 @@ dependencies = [
name = "proc-macro-test"
version = "0.0.0"
dependencies = [
- "cargo_metadata",
+ "cargo_metadata 0.20.0",
]
[[package]]
name = "proc-macro2"
-version = "1.0.94"
+version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84"
+checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
dependencies = [
"unicode-ident",
]
@@ -1596,7 +1665,7 @@ dependencies = [
"libc",
"perf-event",
"tikv-jemalloc-ctl",
- "windows-sys 0.59.0",
+ "windows-sys 0.60.2",
]
[[package]]
@@ -1605,7 +1674,7 @@ version = "0.0.0"
dependencies = [
"anyhow",
"base-db",
- "cargo_metadata",
+ "cargo_metadata 0.21.0",
"cfg",
"expect-test",
"intern",
@@ -1650,7 +1719,7 @@ version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
dependencies = [
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
"memchr",
"unicase",
]
@@ -1687,11 +1756,11 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_abi"
-version = "0.113.0"
+version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c33b8fa229789975647ca5426be432c7c327ebde89ab15889928185dbcee3230"
+checksum = "3ee51482d1c9d3e538acda8cce723db8eea1a81540544bf362bf4c3d841b2329"
dependencies = [
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
"ra-ap-rustc_hashes",
"ra-ap-rustc_index",
"tracing",
@@ -1699,18 +1768,18 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_hashes"
-version = "0.113.0"
+version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0d68a3e389927002f552938a90b04787f6435f55b46fc5691360470d1cb2e99d"
+checksum = "19c8f1e0c28e24e1b4c55dc08058c6c9829df2204497d4034259f491d348c204"
dependencies = [
"rustc-stable-hash",
]
[[package]]
name = "ra-ap-rustc_index"
-version = "0.113.0"
+version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "32502273df2838d0ca13f1c67e2a48feef940e591f9771869f07e2db2acede53"
+checksum = "5f33f429cec6b92fa2c7243883279fb29dd233fdc3e94099aff32aa91aa87f50"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@@ -1718,9 +1787,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index_macros"
-version = "0.113.0"
+version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a32f081864ae34c7ae6634edfa7a95ab9260ba85015e8b1d347580eda79d14f"
+checksum = "b9b55910dbe1fe7ef34bdc1d1bcb41e99b377eb680ea58a1218d95d6b4152257"
dependencies = [
"proc-macro2",
"quote",
@@ -1729,9 +1798,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
-version = "0.113.0"
+version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed34c51974718c5bd90d876d1364d9725159fc8030c2382b9cb837034152ed68"
+checksum = "22944e31fb91e9b3e75bcbc91e37d958b8c0825a6160927f2856831d2ce83b36"
dependencies = [
"memchr",
"unicode-properties",
@@ -1740,9 +1809,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
-version = "0.113.0"
+version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ff0440e5d27facbf4ff13ea651e48c2f6e360b3dbfc56251b41d60719b965fb8"
+checksum = "81057891bc2063ad9e353f29462fbc47a0f5072560af34428ae9313aaa5e9d97"
dependencies = [
"ra-ap-rustc_lexer",
"rustc-literal-escaper",
@@ -1750,9 +1819,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_pattern_analysis"
-version = "0.113.0"
+version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a6056efa57aba3aa0cc69a0bf1a8281624c23ad25b05748d11ebcd4668037bfc"
+checksum = "fe21a3542980d56d2435e96c2720773cac1c63fd4db666417e414729da192eb3"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.1.1",
@@ -1783,11 +1852,11 @@ dependencies = [
[[package]]
name = "redox_syscall"
-version = "0.5.10"
+version = "0.5.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1"
+checksum = "0d04b7d0ee6b4a0207a0a7adb104d23ecb0b47d6beae7152d0fa34b692b29fd6"
dependencies = [
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
]
[[package]]
@@ -1820,7 +1889,7 @@ version = "0.0.0"
dependencies = [
"anyhow",
"base64",
- "cargo_metadata",
+ "cargo_metadata 0.21.0",
"cfg",
"crossbeam-channel",
"dirs",
@@ -1874,16 +1943,16 @@ dependencies = [
"vfs",
"vfs-notify",
"walkdir",
- "windows-sys 0.59.0",
+ "windows-sys 0.60.2",
"xflags",
"xshell",
]
[[package]]
name = "rustc-demangle"
-version = "0.1.24"
+version = "0.1.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
+checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f"
[[package]]
name = "rustc-hash"
@@ -1899,9 +1968,9 @@ checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "rustc-literal-escaper"
-version = "0.0.2"
+version = "0.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0041b6238913c41fe704213a4a9329e2f685a156d1781998128b4149c230ad04"
+checksum = "ab03008eb631b703dd16978282ae36c73282e7922fe101a4bd072a40ecea7b8b"
[[package]]
name = "rustc-stable-hash"
@@ -1911,11 +1980,11 @@ checksum = "781442f29170c5c93b7185ad559492601acdc71d5bb0706f5868094f45cfcd08"
[[package]]
name = "rustc_apfloat"
-version = "0.2.2+llvm-462a31f5a5ab"
+version = "0.2.3+llvm-462a31f5a5ab"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "121e2195ff969977a4e2b5c9965ea867fce7e4cb5aee5b09dee698a7932d574f"
+checksum = "486c2179b4796f65bfe2ee33679acf0927ac83ecf583ad6c91c3b4570911b9ad"
dependencies = [
- "bitflags 2.9.0",
+ "bitflags 2.9.1",
"smallvec",
]
@@ -1927,16 +1996,18 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8fff508e3d6ef42a32607f7538e17171a877a12015e32036f46e99d00c95781"
+checksum = "2e235afdb8e510f38a07138fbe5a0b64691894358a9c0cbd813b1aade110efc9"
dependencies = [
"boxcar",
"crossbeam-queue",
- "dashmap",
- "hashbrown 0.15.2",
+ "crossbeam-utils",
+ "hashbrown 0.15.4",
"hashlink",
"indexmap",
+ "intrusive-collections",
+ "papaya",
"parking_lot",
"portable-atomic",
"rayon",
@@ -1950,17 +2021,16 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ea72b3c06f2ce6350fe3a0eeb7aaaf842d1d8352b706973c19c4f02e298a87c"
+checksum = "2edb86a7e9c91f6d30c9ce054312721dbe773a162db27bbfae834d16177b30ce"
[[package]]
name = "salsa-macros"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ce92025bc160b27814a207cb78d680973af17f863c7f4fc56cf3a535e22f378"
+checksum = "d0778d6e209051bc4e75acfe83bcd7848601ec3dbe9c3dbb982829020e9128af"
dependencies = [
- "heck",
"proc-macro2",
"quote",
"syn",
@@ -1998,6 +2068,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
+name = "seize"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e4b8d813387d566f627f3ea1b914c068aac94c40ae27ec43f5f33bde65abefe7"
+dependencies = [
+ "libc",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
name = "semver"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2016,6 +2096,27 @@ dependencies = [
]
[[package]]
+name = "serde-untagged"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "299d9c19d7d466db4ab10addd5703e4c615dec2a5a16dbbafe191045e87ee66e"
+dependencies = [
+ "erased-serde",
+ "serde",
+ "typeid",
+]
+
+[[package]]
+name = "serde-value"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c"
+dependencies = [
+ "ordered-float",
+ "serde",
+]
+
+[[package]]
name = "serde_derive"
version = "1.0.219"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2052,9 +2153,9 @@ dependencies = [
[[package]]
name = "serde_spanned"
-version = "0.6.8"
+version = "0.6.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1"
+checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
dependencies = [
"serde",
]
@@ -2076,9 +2177,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "smallvec"
-version = "1.14.0"
+version = "1.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd"
+checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
[[package]]
name = "smol_str"
@@ -2122,14 +2223,14 @@ dependencies = [
"libc",
"miow",
"tracing",
- "windows-sys 0.59.0",
+ "windows-sys 0.60.2",
]
[[package]]
name = "syn"
-version = "2.0.100"
+version = "2.0.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0"
+checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8"
dependencies = [
"proc-macro2",
"quote",
@@ -2138,9 +2239,9 @@ dependencies = [
[[package]]
name = "synstructure"
-version = "0.13.1"
+version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
+checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
dependencies = [
"proc-macro2",
"quote",
@@ -2183,9 +2284,9 @@ dependencies = [
[[package]]
name = "tenthash"
-version = "1.0.0"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d092d622df8bb64e5de8dc86a3667702d5f1e0fe2f0604c6035540703c8cd1e"
+checksum = "e5c4bcc0a4fa333239f43662d15fbf995f384b2aeaf89c4ab4c83353d6cbb952"
[[package]]
name = "test-fixture"
@@ -2270,12 +2371,11 @@ dependencies = [
[[package]]
name = "thread_local"
-version = "1.1.8"
+version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"
+checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
dependencies = [
"cfg-if",
- "once_cell",
]
[[package]]
@@ -2311,9 +2411,9 @@ dependencies = [
[[package]]
name = "time"
-version = "0.3.40"
+version = "0.3.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d9c75b47bdff86fa3334a3db91356b8d7d86a9b839dab7d0bdc5c3d3a077618"
+checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40"
dependencies = [
"deranged",
"itoa",
@@ -2334,9 +2434,9 @@ checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
[[package]]
name = "time-macros"
-version = "0.2.21"
+version = "0.2.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "29aa485584182073ed57fd5004aa09c371f021325014694e432313345865fd04"
+checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49"
dependencies = [
"num-conv",
"time-core",
@@ -2344,9 +2444,9 @@ dependencies = [
[[package]]
name = "tinystr"
-version = "0.7.6"
+version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f"
+checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
dependencies = [
"displaydoc",
"zerovec",
@@ -2354,9 +2454,9 @@ dependencies = [
[[package]]
name = "toml"
-version = "0.8.20"
+version = "0.8.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148"
+checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
dependencies = [
"serde",
"serde_spanned",
@@ -2366,27 +2466,34 @@ dependencies = [
[[package]]
name = "toml_datetime"
-version = "0.6.8"
+version = "0.6.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41"
+checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
-version = "0.22.24"
+version = "0.22.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474"
+checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
dependencies = [
"indexmap",
"serde",
"serde_spanned",
"toml_datetime",
+ "toml_write",
"winnow",
]
[[package]]
+name = "toml_write"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
+
+[[package]]
name = "toolchain"
version = "0.0.0"
dependencies = [
@@ -2407,9 +2514,9 @@ dependencies = [
[[package]]
name = "tracing-attributes"
-version = "0.1.28"
+version = "0.1.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
+checksum = "1b1ffbcf9c6f6b99d386e7444eb608ba646ae452a36b39737deb9663b610f662"
dependencies = [
"proc-macro2",
"quote",
@@ -2418,9 +2525,9 @@ dependencies = [
[[package]]
name = "tracing-core"
-version = "0.1.33"
+version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
+checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
dependencies = [
"once_cell",
"valuable",
@@ -2486,6 +2593,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
[[package]]
+name = "typeid"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c"
+
+[[package]]
name = "ungrammar"
version = "1.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2528,12 +2641,6 @@ dependencies = [
]
[[package]]
-name = "utf16_iter"
-version = "1.0.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246"
-
-[[package]]
name = "utf8_iter"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2586,9 +2693,9 @@ dependencies = [
[[package]]
name = "wasi"
-version = "0.11.0+wasi-snapshot-preview1"
+version = "0.11.1+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
[[package]]
name = "winapi-util"
@@ -2601,9 +2708,9 @@ dependencies = [
[[package]]
name = "windows"
-version = "0.61.1"
+version = "0.61.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419"
+checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893"
dependencies = [
"windows-collections",
"windows-core",
@@ -2623,9 +2730,9 @@ dependencies = [
[[package]]
name = "windows-core"
-version = "0.61.0"
+version = "0.61.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980"
+checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3"
dependencies = [
"windows-implement",
"windows-interface",
@@ -2636,12 +2743,13 @@ dependencies = [
[[package]]
name = "windows-future"
-version = "0.2.0"
+version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7a1d6bbefcb7b60acd19828e1bc965da6fcf18a7e39490c5f8be71e54a19ba32"
+checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e"
dependencies = [
"windows-core",
"windows-link",
+ "windows-threading",
]
[[package]]
@@ -2668,9 +2776,9 @@ dependencies = [
[[package]]
name = "windows-link"
-version = "0.1.1"
+version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38"
+checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
[[package]]
name = "windows-numerics"
@@ -2684,18 +2792,18 @@ dependencies = [
[[package]]
name = "windows-result"
-version = "0.3.2"
+version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252"
+checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-strings"
-version = "0.4.0"
+version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97"
+checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57"
dependencies = [
"windows-link",
]
@@ -2728,6 +2836,15 @@ dependencies = [
]
[[package]]
+name = "windows-sys"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
+dependencies = [
+ "windows-targets 0.53.2",
+]
+
+[[package]]
name = "windows-targets"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2760,9 +2877,9 @@ dependencies = [
[[package]]
name = "windows-targets"
-version = "0.53.0"
+version = "0.53.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b"
+checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef"
dependencies = [
"windows_aarch64_gnullvm 0.53.0",
"windows_aarch64_msvc 0.53.0",
@@ -2775,6 +2892,15 @@ dependencies = [
]
[[package]]
+name = "windows-threading"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2914,9 +3040,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
[[package]]
name = "winnow"
-version = "0.7.3"
+version = "0.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1"
+checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd"
dependencies = [
"memchr",
]
@@ -2928,16 +3054,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23f6174b2566cc4a74f95e1367ec343e7fa80c93cc8087f5c4a3d6a1088b2118"
[[package]]
-name = "write16"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936"
-
-[[package]]
name = "writeable"
-version = "0.5.5"
+version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
+checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
[[package]]
name = "xflags"
@@ -2992,9 +3112,9 @@ dependencies = [
[[package]]
name = "yoke"
-version = "0.7.5"
+version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40"
+checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
dependencies = [
"serde",
"stable_deref_trait",
@@ -3004,9 +3124,9 @@ dependencies = [
[[package]]
name = "yoke-derive"
-version = "0.7.5"
+version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154"
+checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
dependencies = [
"proc-macro2",
"quote",
@@ -3036,10 +3156,21 @@ dependencies = [
]
[[package]]
+name = "zerotrie"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
+dependencies = [
+ "displaydoc",
+ "yoke",
+ "zerofrom",
+]
+
+[[package]]
name = "zerovec"
-version = "0.10.4"
+version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079"
+checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428"
dependencies = [
"yoke",
"zerofrom",
@@ -3048,9 +3179,9 @@ dependencies = [
[[package]]
name = "zerovec-derive"
-version = "0.10.3"
+version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
+checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
dependencies = [
"proc-macro2",
"quote",
@@ -3059,9 +3190,9 @@ dependencies = [
[[package]]
name = "zip"
-version = "3.0.0"
+version = "4.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "12598812502ed0105f607f941c386f43d441e00148fce9dec3ca5ffb0bde9308"
+checksum = "153a6fff49d264c4babdcfa6b4d534747f520e56e8f0f384f3b808c4b64cc1fd"
dependencies = [
"arbitrary",
"crc32fast",
diff --git a/Cargo.toml b/Cargo.toml
index 975fe277b2..41fa06a76a 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"]
resolver = "2"
[workspace.package]
-rust-version = "1.86"
+rust-version = "1.88"
edition = "2024"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer team"]
@@ -49,6 +49,8 @@ debug = 2
# ungrammar = { path = "../ungrammar" }
# salsa = { path = "../salsa" }
+# salsa-macros = { path = "../salsa/components/salsa-macros" }
+# salsa-macro-rules = { path = "../salsa/components/salsa-macro-rules" }
[workspace.dependencies]
# local crates
@@ -87,11 +89,11 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
-ra-ap-rustc_lexer = { version = "0.113", default-features = false }
-ra-ap-rustc_parse_format = { version = "0.113", default-features = false }
-ra-ap-rustc_index = { version = "0.113", default-features = false }
-ra-ap-rustc_abi = { version = "0.113", default-features = false }
-ra-ap-rustc_pattern_analysis = { version = "0.113", default-features = false }
+ra-ap-rustc_lexer = { version = "0.121", default-features = false }
+ra-ap-rustc_parse_format = { version = "0.121", default-features = false }
+ra-ap-rustc_index = { version = "0.121", default-features = false }
+ra-ap-rustc_abi = { version = "0.121", default-features = false }
+ra-ap-rustc_pattern_analysis = { version = "0.121", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
@@ -101,24 +103,24 @@ la-arena = { version = "0.3.1" }
lsp-server = { version = "0.7.8" }
# non-local crates
-anyhow = "1.0.97"
+anyhow = "1.0.98"
arrayvec = "0.7.6"
-bitflags = "2.9.0"
-cargo_metadata = "0.19.2"
-camino = "1.1.9"
-chalk-solve = { version = "0.102.0", default-features = false }
-chalk-ir = "0.102.0"
-chalk-recursive = { version = "0.102.0", default-features = false }
-chalk-derive = "0.102.0"
+bitflags = "2.9.1"
+cargo_metadata = "0.21.0"
+camino = "1.1.10"
+chalk-solve = { version = "0.103.0", default-features = false }
+chalk-ir = "0.103.0"
+chalk-recursive = { version = "0.103.0", default-features = false }
+chalk-derive = "0.103.0"
crossbeam-channel = "0.5.15"
dissimilar = "1.0.10"
dot = "0.1.4"
either = "1.15.0"
expect-test = "1.5.1"
-indexmap = { version = "2.8.0", features = ["serde"] }
+indexmap = { version = "2.9.0", features = ["serde"] }
itertools = "0.14.0"
-libc = "0.2.171"
-libloading = "0.8.6"
+libc = "0.2.172"
+libloading = "0.8.8"
memmap2 = "0.9.5"
nohash-hasher = "0.2.0"
oorandom = "11.1.5"
@@ -129,20 +131,26 @@ object = { version = "0.36.7", default-features = false, features = [
"macho",
"pe",
] }
-process-wrap = { version = "8.2.0", features = ["std"] }
+process-wrap = { version = "8.2.1", features = ["std"] }
pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.6", default-features = false }
rayon = "1.10.0"
rowan = "=0.15.15"
-salsa = { version = "0.22.0", default-features = false, features = ["rayon","salsa_unstable"] }
-salsa-macros = "0.22.0"
+# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
+# on impls without it
+salsa = { version = "0.23.0", default-features = true, features = [
+ "rayon",
+ "salsa_unstable",
+ "macros",
+] }
+salsa-macros = "0.23.0"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }
serde_json = "1.0.140"
rustc-hash = "2.1.1"
-rustc-literal-escaper = "0.0.2"
-smallvec = { version = "1.14.0", features = [
+rustc-literal-escaper = "0.0.4"
+smallvec = { version = "1.15.1", features = [
"const_new",
"union",
"const_generics",
@@ -166,7 +174,7 @@ xshell = "0.2.7"
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=6.1.0", features = ["raw-api", "inline"] }
# We need to freeze the version of the crate, as it needs to match with dashmap
-hashbrown = { version = "0.14.0", features = [
+hashbrown = { version = "0.14.*", features = [
"inline-more",
], default-features = false }
diff --git a/crates/base-db/Cargo.toml b/crates/base-db/Cargo.toml
index 3b423a86f9..ea06fd9c48 100644
--- a/crates/base-db/Cargo.toml
+++ b/crates/base-db/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
la-arena.workspace = true
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 745238167b..8c9393bcc9 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -6,6 +6,7 @@
//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
//! actual IO is done and lowered to input.
+use std::error::Error;
use std::hash::BuildHasherDefault;
use std::{fmt, mem, ops};
@@ -20,11 +21,51 @@ use span::Edition;
use triomphe::Arc;
use vfs::{AbsPathBuf, AnchoredPath, FileId, VfsPath, file_set::FileSet};
-use crate::{CrateWorkspaceData, EditionedFileId, RootQueryDb};
+use crate::{CrateWorkspaceData, EditionedFileId, FxIndexSet, RootQueryDb};
-pub type ProcMacroPaths = FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), String>>;
+pub type ProcMacroPaths =
+ FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), ProcMacroLoadingError>>;
-type FxIndexSet<T> = indexmap::IndexSet<T, FxBuildHasher>;
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ProcMacroLoadingError {
+ Disabled,
+ FailedToBuild,
+ MissingDylibPath,
+ NotYetBuilt,
+ NoProcMacros,
+ ProcMacroSrvError(Box<str>),
+}
+impl ProcMacroLoadingError {
+ pub fn is_hard_error(&self) -> bool {
+ match self {
+ ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false,
+ ProcMacroLoadingError::FailedToBuild
+ | ProcMacroLoadingError::MissingDylibPath
+ | ProcMacroLoadingError::NoProcMacros
+ | ProcMacroLoadingError::ProcMacroSrvError(_) => true,
+ }
+ }
+}
+
+impl Error for ProcMacroLoadingError {}
+impl fmt::Display for ProcMacroLoadingError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"),
+ ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"),
+ ProcMacroLoadingError::MissingDylibPath => {
+ write!(f, "proc-macro crate build data is missing a dylib path")
+ }
+ ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"),
+ ProcMacroLoadingError::NoProcMacros => {
+ write!(f, "proc macro library has no proc macros")
+ }
+ ProcMacroLoadingError::ProcMacroSrvError(msg) => {
+ write!(f, "proc macro server error: {msg}")
+ }
+ }
+ }
+}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 4d4e6cae03..ad17f1730b 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -14,8 +14,9 @@ pub use crate::{
input::{
BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
- DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroPaths, ReleaseChannel,
- SourceRoot, SourceRootId, TargetLayoutLoadResult, UniqueCrateData,
+ DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroLoadingError,
+ ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
+ UniqueCrateData,
},
};
use dashmap::{DashMap, mapref::entry::Entry};
@@ -28,10 +29,12 @@ use syntax::{Parse, SyntaxError, ast};
use triomphe::Arc;
pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
+pub type FxIndexSet<T> = indexmap::IndexSet<T, rustc_hash::FxBuildHasher>;
+
#[macro_export]
macro_rules! impl_intern_key {
($id:ident, $loc:ident) => {
- #[salsa_macros::interned(no_lifetime)]
+ #[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct $id {
pub loc: $loc,
@@ -41,7 +44,7 @@ macro_rules! impl_intern_key {
impl ::std::fmt::Debug for $id {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
f.debug_tuple(stringify!($id))
- .field(&format_args!("{:04x}", self.0.as_u32()))
+ .field(&format_args!("{:04x}", self.0.index()))
.finish()
}
}
@@ -165,7 +168,7 @@ impl Files {
}
}
-#[salsa_macros::interned(no_lifetime, debug, constructor=from_span)]
+#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml
index d7764a16c0..ba34966614 100644
--- a/crates/cfg/Cargo.toml
+++ b/crates/cfg/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
rustc-hash.workspace = true
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index c1c89e8d1c..abb4819a76 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
arrayvec.workspace = true
@@ -25,7 +26,7 @@ rustc-hash.workspace = true
tracing.workspace = true
smallvec.workspace = true
triomphe.workspace = true
-rustc_apfloat = "0.2.2"
+rustc_apfloat = "0.2.3"
text-size.workspace = true
salsa.workspace = true
salsa-macros.workspace = true
diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs
index bb6222b1d4..b509e69b0d 100644
--- a/crates/hir-def/src/attr.rs
+++ b/crates/hir-def/src/attr.rs
@@ -14,6 +14,7 @@ use intern::{Symbol, sym};
use la_arena::{ArenaMap, Idx, RawIdx};
use mbe::DelimiterKind;
use rustc_abi::ReprOptions;
+use span::AstIdNode;
use syntax::{
AstPtr,
ast::{self, HasAttrs},
@@ -22,10 +23,10 @@ use triomphe::Arc;
use tt::iter::{TtElement, TtIter};
use crate::{
- AdtId, AttrDefId, GenericParamId, HasModule, ItemTreeLoc, LocalFieldId, Lookup, MacroId,
+ AdtId, AstIdLoc, AttrDefId, GenericParamId, HasModule, LocalFieldId, Lookup, MacroId,
VariantId,
db::DefDatabase,
- item_tree::{AttrOwner, FieldParent, ItemTreeNode},
+ item_tree::block_item_tree_query,
lang_item::LangItem,
nameres::{ModuleOrigin, ModuleSource},
src::{HasChildSource, HasSource},
@@ -42,6 +43,15 @@ pub struct AttrsWithOwner {
}
impl Attrs {
+ pub fn new(
+ db: &dyn DefDatabase,
+ owner: &dyn ast::HasAttrs,
+ span_map: SpanMapRef<'_>,
+ cfg_options: &CfgOptions,
+ ) -> Self {
+ Attrs(RawAttrs::new_expanded(db, owner, span_map, cfg_options))
+ }
+
pub fn get(&self, id: AttrId) -> Option<&Attr> {
(**self).iter().find(|attr| attr.id == id)
}
@@ -94,44 +104,64 @@ impl Attrs {
v: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Attrs>> {
let _p = tracing::info_span!("fields_attrs_query").entered();
- // FIXME: There should be some proper form of mapping between item tree field ids and hir field ids
let mut res = ArenaMap::default();
- let item_tree;
- let (parent, fields, krate) = match v {
+ let (fields, file_id, krate) = match v {
VariantId::EnumVariantId(it) => {
let loc = it.lookup(db);
let krate = loc.parent.lookup(db).container.krate;
- item_tree = loc.id.item_tree(db);
- let variant = &item_tree[loc.id.value];
- (FieldParent::EnumVariant(loc.id.value), &variant.fields, krate)
+ let source = loc.source(db);
+ (source.value.field_list(), source.file_id, krate)
}
VariantId::StructId(it) => {
let loc = it.lookup(db);
let krate = loc.container.krate;
- item_tree = loc.id.item_tree(db);
- let struct_ = &item_tree[loc.id.value];
- (FieldParent::Struct(loc.id.value), &struct_.fields, krate)
+ let source = loc.source(db);
+ (source.value.field_list(), source.file_id, krate)
}
VariantId::UnionId(it) => {
let loc = it.lookup(db);
let krate = loc.container.krate;
- item_tree = loc.id.item_tree(db);
- let union_ = &item_tree[loc.id.value];
- (FieldParent::Union(loc.id.value), &union_.fields, krate)
+ let source = loc.source(db);
+ (
+ source.value.record_field_list().map(ast::FieldList::RecordFieldList),
+ source.file_id,
+ krate,
+ )
}
};
+ let Some(fields) = fields else {
+ return Arc::new(res);
+ };
let cfg_options = krate.cfg_options(db);
-
- let mut idx = 0;
- for (id, _field) in fields.iter().enumerate() {
- let attrs = item_tree.attrs(db, krate, AttrOwner::make_field_indexed(parent, id));
- if attrs.is_cfg_enabled(cfg_options) {
- res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
- idx += 1;
+ let span_map = db.span_map(file_id);
+
+ match fields {
+ ast::FieldList::RecordFieldList(fields) => {
+ let mut idx = 0;
+ for field in fields.fields() {
+ let attrs =
+ Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
+ if attrs.is_cfg_enabled(cfg_options).is_ok() {
+ res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
+ idx += 1;
+ }
+ }
+ }
+ ast::FieldList::TupleFieldList(fields) => {
+ let mut idx = 0;
+ for field in fields.fields() {
+ let attrs =
+ Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
+ if attrs.is_cfg_enabled(cfg_options).is_ok() {
+ res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
+ idx += 1;
+ }
+ }
}
}
+ res.shrink_to_fit();
Arc::new(res)
}
}
@@ -167,11 +197,10 @@ impl Attrs {
}
#[inline]
- pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool {
- match self.cfg() {
- None => true,
- Some(cfg) => cfg_options.check(&cfg) != Some(false),
- }
+ pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> Result<(), CfgExpr> {
+ self.cfgs().try_for_each(|cfg| {
+ if cfg_options.check(&cfg) != Some(false) { Ok(()) } else { Err(cfg) }
+ })
}
#[inline]
@@ -488,61 +517,59 @@ impl AttrsWithOwner {
pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
let _p = tracing::info_span!("attrs_query").entered();
// FIXME: this should use `Trace` to avoid duplication in `source_map` below
- let raw_attrs = match def {
+ match def {
AttrDefId::ModuleId(module) => {
let def_map = module.def_map(db);
let mod_data = &def_map[module.local_id];
- match mod_data.origin {
- ModuleOrigin::File { definition, declaration_tree_id, .. } => {
+ let raw_attrs = match mod_data.origin {
+ ModuleOrigin::File { definition, declaration_tree_id, declaration, .. } => {
let decl_attrs = declaration_tree_id
.item_tree(db)
- .raw_attrs(AttrOwner::ModItem(declaration_tree_id.value.into()))
+ .raw_attrs(declaration.upcast())
.clone();
let tree = db.file_item_tree(definition.into());
- let def_attrs = tree.raw_attrs(AttrOwner::TopLevel).clone();
+ let def_attrs = tree.top_level_raw_attrs().clone();
decl_attrs.merge(def_attrs)
}
ModuleOrigin::CrateRoot { definition } => {
let tree = db.file_item_tree(definition.into());
- tree.raw_attrs(AttrOwner::TopLevel).clone()
+ tree.top_level_raw_attrs().clone()
+ }
+ ModuleOrigin::Inline { definition_tree_id, definition } => {
+ definition_tree_id.item_tree(db).raw_attrs(definition.upcast()).clone()
}
- ModuleOrigin::Inline { definition_tree_id, .. } => definition_tree_id
- .item_tree(db)
- .raw_attrs(AttrOwner::ModItem(definition_tree_id.value.into()))
- .clone(),
ModuleOrigin::BlockExpr { id, .. } => {
- let tree = db.block_item_tree(id);
- tree.raw_attrs(AttrOwner::TopLevel).clone()
+ let tree = block_item_tree_query(db, id);
+ tree.top_level_raw_attrs().clone()
}
- }
- }
- AttrDefId::FieldId(it) => {
- return db.fields_attrs(it.parent)[it.local_id].clone();
+ };
+ Attrs::expand_cfg_attr(db, module.krate, raw_attrs)
}
- AttrDefId::EnumVariantId(it) => attrs_from_item_tree_loc(db, it),
+ AttrDefId::FieldId(it) => db.fields_attrs(it.parent)[it.local_id].clone(),
+ AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::AdtId(it) => match it {
- AdtId::StructId(it) => attrs_from_item_tree_loc(db, it),
- AdtId::EnumId(it) => attrs_from_item_tree_loc(db, it),
- AdtId::UnionId(it) => attrs_from_item_tree_loc(db, it),
+ AdtId::StructId(it) => attrs_from_ast_id_loc(db, it),
+ AdtId::EnumId(it) => attrs_from_ast_id_loc(db, it),
+ AdtId::UnionId(it) => attrs_from_ast_id_loc(db, it),
},
- AttrDefId::TraitId(it) => attrs_from_item_tree_loc(db, it),
- AttrDefId::TraitAliasId(it) => attrs_from_item_tree_loc(db, it),
+ AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::TraitAliasId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::MacroId(it) => match it {
- MacroId::Macro2Id(it) => attrs_from_item_tree_loc(db, it),
- MacroId::MacroRulesId(it) => attrs_from_item_tree_loc(db, it),
- MacroId::ProcMacroId(it) => attrs_from_item_tree_loc(db, it),
+ MacroId::Macro2Id(it) => attrs_from_ast_id_loc(db, it),
+ MacroId::MacroRulesId(it) => attrs_from_ast_id_loc(db, it),
+ MacroId::ProcMacroId(it) => attrs_from_ast_id_loc(db, it),
},
- AttrDefId::ImplId(it) => attrs_from_item_tree_loc(db, it),
- AttrDefId::ConstId(it) => attrs_from_item_tree_loc(db, it),
- AttrDefId::StaticId(it) => attrs_from_item_tree_loc(db, it),
- AttrDefId::FunctionId(it) => attrs_from_item_tree_loc(db, it),
- AttrDefId::TypeAliasId(it) => attrs_from_item_tree_loc(db, it),
+ AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it),
AttrDefId::GenericParamId(it) => match it {
GenericParamId::ConstParamId(it) => {
let src = it.parent().child_source(db);
// FIXME: We should be never getting `None` here.
- return Attrs(match src.value.get(it.local_id()) {
+ Attrs(match src.value.get(it.local_id()) {
Some(val) => RawAttrs::new_expanded(
db,
val,
@@ -550,12 +577,12 @@ impl AttrsWithOwner {
def.krate(db).cfg_options(db),
),
None => RawAttrs::EMPTY,
- });
+ })
}
GenericParamId::TypeParamId(it) => {
let src = it.parent().child_source(db);
// FIXME: We should be never getting `None` here.
- return Attrs(match src.value.get(it.local_id()) {
+ Attrs(match src.value.get(it.local_id()) {
Some(val) => RawAttrs::new_expanded(
db,
val,
@@ -563,12 +590,12 @@ impl AttrsWithOwner {
def.krate(db).cfg_options(db),
),
None => RawAttrs::EMPTY,
- });
+ })
}
GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db);
// FIXME: We should be never getting `None` here.
- return Attrs(match src.value.get(it.local_id) {
+ Attrs(match src.value.get(it.local_id) {
Some(val) => RawAttrs::new_expanded(
db,
val,
@@ -576,16 +603,13 @@ impl AttrsWithOwner {
def.krate(db).cfg_options(db),
),
None => RawAttrs::EMPTY,
- });
+ })
}
},
- AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
- AttrDefId::ExternCrateId(it) => attrs_from_item_tree_loc(db, it),
- AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it),
- };
-
- let attrs = raw_attrs.expand_cfg_attr(db, def.krate(db));
- Attrs(attrs)
+ AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it),
+ AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it),
+ }
}
pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap {
@@ -787,14 +811,15 @@ fn any_has_attrs<'db>(
id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
}
-fn attrs_from_item_tree_loc<'db, N: ItemTreeNode>(
+fn attrs_from_ast_id_loc<'db, N: AstIdNode + HasAttrs>(
db: &(dyn DefDatabase + 'db),
- lookup: impl Lookup<Database = dyn DefDatabase, Data = impl ItemTreeLoc<Id = N>>,
-) -> RawAttrs {
- let id = lookup.lookup(db).item_tree_id();
- let tree = id.item_tree(db);
- let attr_owner = N::attr_owner(id.value);
- tree.raw_attrs(attr_owner).clone()
+ lookup: impl Lookup<Database = dyn DefDatabase, Data = impl AstIdLoc<Ast = N> + HasModule>,
+) -> Attrs {
+ let loc = lookup.lookup(db);
+ let source = loc.source(db);
+ let span_map = db.span_map(source.file_id);
+ let cfg_options = loc.krate(db).cfg_options(db);
+ Attrs(RawAttrs::new_expanded(db, &source.value, span_map.as_ref(), cfg_options))
}
pub(crate) fn fields_attrs_source_map(
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index 4a9a3b12cf..c67bb2422a 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -1,37 +1,34 @@
//! Defines database & queries for name resolution.
use base_db::{Crate, RootQueryDb, SourceDatabase};
use either::Either;
-use hir_expand::{EditionedFileId, HirFileId, MacroCallId, MacroDefId, db::ExpandDatabase};
+use hir_expand::{
+ EditionedFileId, HirFileId, InFile, Lookup, MacroCallId, MacroDefId, MacroDefKind,
+ db::ExpandDatabase,
+};
use intern::sym;
use la_arena::ArenaMap;
use syntax::{AstPtr, ast};
-use thin_vec::ThinVec;
use triomphe::Arc;
use crate::{
- AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, EnumVariantId,
- EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId,
- FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc, MacroId,
- MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId,
- StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId,
- TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
+ AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc,
+ EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc,
+ FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc,
+ MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId,
+ ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId,
+ TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
attr::{Attrs, AttrsWithOwner},
expr_store::{
Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes,
},
hir::generics::GenericParams,
import_map::ImportMap,
- item_tree::{AttrOwner, ItemTree},
+ item_tree::{ItemTree, file_item_tree_query},
lang_item::{self, LangItem},
- nameres::{
- assoc::{ImplItems, TraitItems},
- crate_def_map,
- diagnostics::DefDiagnostics,
- },
+ nameres::crate_def_map,
signatures::{
- ConstSignature, EnumSignature, EnumVariants, FunctionSignature, ImplSignature,
- InactiveEnumVariantCode, StaticSignature, StructSignature, TraitAliasSignature,
- TraitSignature, TypeAliasSignature, UnionSignature, VariantFields,
+ ConstSignature, EnumSignature, FunctionSignature, ImplSignature, StaticSignature,
+ StructSignature, TraitAliasSignature, TraitSignature, TypeAliasSignature, UnionSignature,
},
tt,
visibility::{self, Visibility},
@@ -92,7 +89,7 @@ pub trait InternDatabase: RootQueryDb {
#[salsa::interned]
fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId;
- // // endregion: items
+ // endregion: items
#[salsa::interned]
fn intern_block(&self, loc: BlockLoc) -> BlockId;
@@ -105,11 +102,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
fn expand_proc_attr_macros(&self) -> bool;
/// Computes an [`ItemTree`] for the given file or macro expansion.
- #[salsa::invoke(ItemTree::file_item_tree_query)]
- fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
-
- #[salsa::invoke(ItemTree::block_item_tree_query)]
- fn block_item_tree(&self, block_id: BlockId) -> Arc<ItemTree>;
+ #[salsa::invoke(file_item_tree_query)]
+ #[salsa::transparent]
+ fn file_item_tree(&self, file_id: HirFileId) -> &ItemTree;
/// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution.
#[salsa::invoke(macro_def)]
@@ -117,42 +112,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
// region:data
- #[salsa::invoke(VariantFields::query)]
- fn variant_fields_with_source_map(
- &self,
- id: VariantId,
- ) -> (Arc<VariantFields>, Arc<ExpressionStoreSourceMap>);
-
- #[salsa::tracked]
- fn enum_variants(&self, id: EnumId) -> Arc<EnumVariants> {
- self.enum_variants_with_diagnostics(id).0
- }
-
- #[salsa::invoke(EnumVariants::enum_variants_query)]
- fn enum_variants_with_diagnostics(
- &self,
- id: EnumId,
- ) -> (Arc<EnumVariants>, Option<Arc<ThinVec<InactiveEnumVariantCode>>>);
-
- #[salsa::transparent]
- #[salsa::invoke(ImplItems::impl_items_query)]
- fn impl_items(&self, e: ImplId) -> Arc<ImplItems>;
-
- #[salsa::invoke(ImplItems::impl_items_with_diagnostics_query)]
- fn impl_items_with_diagnostics(&self, e: ImplId) -> (Arc<ImplItems>, DefDiagnostics);
-
- #[salsa::transparent]
- #[salsa::invoke(TraitItems::trait_items_query)]
- fn trait_items(&self, e: TraitId) -> Arc<TraitItems>;
-
- #[salsa::invoke(TraitItems::trait_items_with_diagnostics_query)]
- fn trait_items_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitItems>, DefDiagnostics);
-
- #[salsa::tracked]
- fn variant_fields(&self, id: VariantId) -> Arc<VariantFields> {
- self.variant_fields_with_source_map(id).0
- }
-
#[salsa::tracked]
fn trait_signature(&self, trait_: TraitId) -> Arc<TraitSignature> {
self.trait_signature_with_source_map(trait_).0
@@ -323,16 +282,8 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
#[salsa::invoke(visibility::field_visibilities_query)]
fn field_visibilities(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Visibility>>;
- // FIXME: unify function_visibility and const_visibility?
-
- #[salsa::invoke(visibility::function_visibility_query)]
- fn function_visibility(&self, def: FunctionId) -> Visibility;
-
- #[salsa::invoke(visibility::const_visibility_query)]
- fn const_visibility(&self, def: ConstId) -> Visibility;
-
- #[salsa::invoke(visibility::type_alias_visibility_query)]
- fn type_alias_visibility(&self, def: TypeAliasId) -> Visibility;
+ #[salsa::invoke(visibility::assoc_visibility_query)]
+ fn assoc_visibility(&self, def: AssocItemId) -> Visibility;
// endregion:visibilities
@@ -368,7 +319,7 @@ fn include_macro_invoc(
fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool {
let file = crate_id.data(db).root_file_id(db);
let item_tree = db.file_item_tree(file.into());
- let attrs = item_tree.raw_attrs(AttrOwner::TopLevel);
+ let attrs = item_tree.top_level_raw_attrs();
for attr in &**attrs {
match attr.path().as_ident() {
Some(ident) if *ident == sym::no_std => return true,
@@ -399,10 +350,6 @@ fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool {
}
fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
- use hir_expand::InFile;
-
- use crate::{Lookup, MacroDefKind, MacroExpander};
-
let kind = |expander, file_id, m| {
let in_file = InFile::new(file_id, m);
match expander {
@@ -418,11 +365,9 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
MacroId::Macro2Id(it) => {
let loc: Macro2Loc = it.lookup(db);
- let item_tree = loc.id.item_tree(db);
- let makro = &item_tree[loc.id.value];
MacroDefId {
krate: loc.container.krate,
- kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()),
+ kind: kind(loc.expander, loc.id.file_id, loc.id.value.upcast()),
local_inner: false,
allow_internal_unsafe: loc.allow_internal_unsafe,
edition: loc.edition,
@@ -431,11 +376,9 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
MacroId::MacroRulesId(it) => {
let loc: MacroRulesLoc = it.lookup(db);
- let item_tree = loc.id.item_tree(db);
- let makro = &item_tree[loc.id.value];
MacroDefId {
krate: loc.container.krate,
- kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()),
+ kind: kind(loc.expander, loc.id.file_id, loc.id.value.upcast()),
local_inner: loc.flags.contains(MacroRulesLocFlags::LOCAL_INNER),
allow_internal_unsafe: loc
.flags
@@ -446,15 +389,9 @@ fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
MacroId::ProcMacroId(it) => {
let loc = it.lookup(db);
- let item_tree = loc.id.item_tree(db);
- let makro = &item_tree[loc.id.value];
MacroDefId {
krate: loc.container.krate,
- kind: MacroDefKind::ProcMacro(
- InFile::new(loc.id.file_id(), makro.ast_id),
- loc.expander,
- loc.kind,
- ),
+ kind: MacroDefKind::ProcMacro(loc.id, loc.expander, loc.kind),
local_inner: false,
allow_internal_unsafe: false,
edition: loc.edition,
diff --git a/crates/hir-def/src/expr_store.rs b/crates/hir-def/src/expr_store.rs
index f617c3225a..d3dfc05eb2 100644
--- a/crates/hir-def/src/expr_store.rs
+++ b/crates/hir-def/src/expr_store.rs
@@ -9,7 +9,10 @@ pub mod scope;
#[cfg(test)]
mod tests;
-use std::ops::{Deref, Index};
+use std::{
+ ops::{Deref, Index},
+ sync::LazyLock,
+};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
@@ -19,6 +22,8 @@ use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{Edition, SyntaxContext};
use syntax::{AstPtr, SyntaxNodePtr, ast};
+use thin_vec::ThinVec;
+use triomphe::Arc;
use tt::TextRange;
use crate::{
@@ -89,17 +94,17 @@ pub type TypeSource = InFile<TypePtr>;
pub type LifetimePtr = AstPtr<ast::Lifetime>;
pub type LifetimeSource = InFile<LifetimePtr>;
-#[derive(Debug, Eq, PartialEq)]
-pub struct ExpressionStore {
- pub exprs: Arena<Expr>,
- pub pats: Arena<Pat>,
- pub bindings: Arena<Binding>,
- pub labels: Arena<Label>,
- pub types: Arena<TypeRef>,
- pub lifetimes: Arena<LifetimeRef>,
+// We split the store into types-only and expressions, because most stores (e.g. generics)
+// don't store any expressions and this saves memory. Same thing for the source map.
+#[derive(Debug, PartialEq, Eq)]
+struct ExpressionOnlyStore {
+ exprs: Arena<Expr>,
+ pats: Arena<Pat>,
+ bindings: Arena<Binding>,
+ labels: Arena<Label>,
/// Id of the closure/coroutine that owns the corresponding binding. If a binding is owned by the
/// top level expression, it will not be listed in here.
- pub binding_owners: FxHashMap<BindingId, ExprId>,
+ binding_owners: FxHashMap<BindingId, ExprId>,
/// Block expressions in this store that may contain inner items.
block_scopes: Box<[BlockId]>,
@@ -110,8 +115,15 @@ pub struct ExpressionStore {
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
}
-#[derive(Debug, Eq, PartialEq, Default)]
-pub struct ExpressionStoreSourceMap {
+#[derive(Debug, PartialEq, Eq)]
+pub struct ExpressionStore {
+ expr_only: Option<Box<ExpressionOnlyStore>>,
+ pub types: Arena<TypeRef>,
+ pub lifetimes: Arena<LifetimeRef>,
+}
+
+#[derive(Debug, Eq, Default)]
+struct ExpressionOnlySourceMap {
// AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
// to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
expr_map: FxHashMap<ExprSource, ExprOrPatId>,
@@ -123,26 +135,83 @@ pub struct ExpressionStoreSourceMap {
label_map: FxHashMap<LabelSource, LabelId>,
label_map_back: ArenaMap<LabelId, LabelSource>,
- binding_definitions: FxHashMap<BindingId, SmallVec<[PatId; 4]>>,
+ binding_definitions:
+ ArenaMap<BindingId, SmallVec<[PatId; 2 * size_of::<usize>() / size_of::<PatId>()]>>,
/// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
/// Instead, we use id of expression (`92`) to identify the field.
field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
+ template_map: Option<Box<FormatTemplate>>,
+
+ expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
+
+ /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
+ /// the source map (since they're just as volatile).
+ //
+ // We store diagnostics on the `ExpressionOnlySourceMap` because diagnostics are rare (except
+ // maybe for cfgs, and they are also not common in type places).
+ diagnostics: ThinVec<ExpressionStoreDiagnostics>,
+}
+
+impl PartialEq for ExpressionOnlySourceMap {
+ fn eq(&self, other: &Self) -> bool {
+ // we only need to compare one of the two mappings
+ // as the other is a reverse mapping and thus will compare
+ // the same as normal mapping
+ let Self {
+ expr_map: _,
+ expr_map_back,
+ pat_map: _,
+ pat_map_back,
+ label_map: _,
+ label_map_back,
+ // If this changed, our pattern data must have changed
+ binding_definitions: _,
+ // If this changed, our expression data must have changed
+ field_map_back: _,
+ // If this changed, our pattern data must have changed
+ pat_field_map_back: _,
+ template_map,
+ expansions,
+ diagnostics,
+ } = self;
+ *expr_map_back == other.expr_map_back
+ && *pat_map_back == other.pat_map_back
+ && *label_map_back == other.label_map_back
+ && *template_map == other.template_map
+ && *expansions == other.expansions
+ && *diagnostics == other.diagnostics
+ }
+}
+
+#[derive(Debug, Eq, Default)]
+pub struct ExpressionStoreSourceMap {
+ expr_only: Option<Box<ExpressionOnlySourceMap>>,
+
types_map_back: ArenaMap<TypeRefId, TypeSource>,
types_map: FxHashMap<TypeSource, TypeRefId>,
lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>,
+ #[expect(
+ unused,
+ reason = "this is here for completeness, and maybe we'll need it in the future"
+ )]
lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>,
+}
- template_map: Option<Box<FormatTemplate>>,
-
- pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
-
- /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
- /// the source map (since they're just as volatile).
- pub diagnostics: Vec<ExpressionStoreDiagnostics>,
+impl PartialEq for ExpressionStoreSourceMap {
+ fn eq(&self, other: &Self) -> bool {
+ // we only need to compare one of the two mappings
+ // as the other is a reverse mapping and thus will compare
+ // the same as normal mapping
+ let Self { expr_only, types_map_back, types_map: _, lifetime_map_back, lifetime_map: _ } =
+ self;
+ *expr_only == other.expr_only
+ && *types_map_back == other.types_map_back
+ && *lifetime_map_back == other.lifetime_map_back
+ }
}
/// The body of an item (function, const etc.).
@@ -157,6 +226,42 @@ pub struct ExpressionStoreBuilder {
pub types: Arena<TypeRef>,
block_scopes: Vec<BlockId>,
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
+
+ // AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
+ // to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
+ expr_map: FxHashMap<ExprSource, ExprOrPatId>,
+ expr_map_back: ArenaMap<ExprId, ExprOrPatSource>,
+
+ pat_map: FxHashMap<PatSource, ExprOrPatId>,
+ pat_map_back: ArenaMap<PatId, ExprOrPatSource>,
+
+ label_map: FxHashMap<LabelSource, LabelId>,
+ label_map_back: ArenaMap<LabelId, LabelSource>,
+
+ types_map_back: ArenaMap<TypeRefId, TypeSource>,
+ types_map: FxHashMap<TypeSource, TypeRefId>,
+
+ lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>,
+ lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>,
+
+ binding_definitions:
+ ArenaMap<BindingId, SmallVec<[PatId; 2 * size_of::<usize>() / size_of::<PatId>()]>>,
+
+ /// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
+ /// Instead, we use id of expression (`92`) to identify the field.
+ field_map_back: FxHashMap<ExprId, FieldSource>,
+ pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
+
+ template_map: Option<Box<FormatTemplate>>,
+
+ expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
+
+ /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
+ /// the source map (since they're just as volatile).
+ //
+ // We store diagnostics on the `ExpressionOnlySourceMap` because diagnostics are rare (except
+ // maybe for cfgs, and they are also not common in type places).
+ pub(crate) diagnostics: Vec<ExpressionStoreDiagnostics>,
}
#[derive(Default, Debug, Eq, PartialEq)]
@@ -184,7 +289,7 @@ pub enum ExpressionStoreDiagnostics {
}
impl ExpressionStoreBuilder {
- pub fn finish(self) -> ExpressionStore {
+ pub fn finish(self) -> (ExpressionStore, ExpressionStoreSourceMap) {
let Self {
block_scopes,
mut exprs,
@@ -195,6 +300,23 @@ impl ExpressionStoreBuilder {
mut ident_hygiene,
mut types,
mut lifetimes,
+
+ mut expr_map,
+ mut expr_map_back,
+ mut pat_map,
+ mut pat_map_back,
+ mut label_map,
+ mut label_map_back,
+ mut types_map_back,
+ mut types_map,
+ mut lifetime_map_back,
+ mut lifetime_map,
+ mut binding_definitions,
+ mut field_map_back,
+ mut pat_field_map_back,
+ mut template_map,
+ mut expansions,
+ diagnostics,
} = self;
exprs.shrink_to_fit();
labels.shrink_to_fit();
@@ -205,27 +327,104 @@ impl ExpressionStoreBuilder {
types.shrink_to_fit();
lifetimes.shrink_to_fit();
- ExpressionStore {
- exprs,
- pats,
- bindings,
- labels,
- binding_owners,
- types,
- lifetimes,
- block_scopes: block_scopes.into_boxed_slice(),
- ident_hygiene,
+ expr_map.shrink_to_fit();
+ expr_map_back.shrink_to_fit();
+ pat_map.shrink_to_fit();
+ pat_map_back.shrink_to_fit();
+ label_map.shrink_to_fit();
+ label_map_back.shrink_to_fit();
+ types_map_back.shrink_to_fit();
+ types_map.shrink_to_fit();
+ lifetime_map_back.shrink_to_fit();
+ lifetime_map.shrink_to_fit();
+ binding_definitions.shrink_to_fit();
+ field_map_back.shrink_to_fit();
+ pat_field_map_back.shrink_to_fit();
+ if let Some(template_map) = &mut template_map {
+ let FormatTemplate {
+ format_args_to_captures,
+ asm_to_captures,
+ implicit_capture_to_source,
+ } = &mut **template_map;
+ format_args_to_captures.shrink_to_fit();
+ asm_to_captures.shrink_to_fit();
+ implicit_capture_to_source.shrink_to_fit();
}
+ expansions.shrink_to_fit();
+
+ let has_exprs =
+ !exprs.is_empty() || !labels.is_empty() || !pats.is_empty() || !bindings.is_empty();
+
+ let store = {
+ let expr_only = if has_exprs {
+ Some(Box::new(ExpressionOnlyStore {
+ exprs,
+ pats,
+ bindings,
+ labels,
+ binding_owners,
+ block_scopes: block_scopes.into_boxed_slice(),
+ ident_hygiene,
+ }))
+ } else {
+ None
+ };
+ ExpressionStore { expr_only, types, lifetimes }
+ };
+
+ let source_map = {
+ let expr_only = if has_exprs || !expansions.is_empty() || !diagnostics.is_empty() {
+ Some(Box::new(ExpressionOnlySourceMap {
+ expr_map,
+ expr_map_back,
+ pat_map,
+ pat_map_back,
+ label_map,
+ label_map_back,
+ binding_definitions,
+ field_map_back,
+ pat_field_map_back,
+ template_map,
+ expansions,
+ diagnostics: ThinVec::from_iter(diagnostics),
+ }))
+ } else {
+ None
+ };
+ ExpressionStoreSourceMap {
+ expr_only,
+ types_map_back,
+ types_map,
+ lifetime_map_back,
+ lifetime_map,
+ }
+ };
+
+ (store, source_map)
}
}
impl ExpressionStore {
+ pub fn empty_singleton() -> (Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>) {
+ static EMPTY: LazyLock<(Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>)> =
+ LazyLock::new(|| {
+ let (store, source_map) = ExpressionStoreBuilder::default().finish();
+ (Arc::new(store), Arc::new(source_map))
+ });
+ EMPTY.clone()
+ }
+
/// Returns an iterator over all block expressions in this store that define inner items.
pub fn blocks<'a>(
&'a self,
db: &'a dyn DefDatabase,
) -> impl Iterator<Item = (BlockId, &'a DefMap)> + 'a {
- self.block_scopes.iter().map(move |&block| (block, block_def_map(db, block)))
+ self.expr_only
+ .as_ref()
+ .map(|it| &*it.block_scopes)
+ .unwrap_or_default()
+ .iter()
+ .map(move |&block| (block, block_def_map(db, block)))
}
pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) {
@@ -272,7 +471,8 @@ impl ExpressionStore {
}
pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool {
- match self.binding_owners.get(&binding) {
+ let Some(expr_only) = &self.expr_only else { return false };
+ match expr_only.binding_owners.get(&binding) {
Some(it) => {
// We assign expression ids in a way that outer closures will receive
// a lower id
@@ -282,6 +482,11 @@ impl ExpressionStore {
}
}
+ #[inline]
+ pub fn binding_owner(&self, id: BindingId) -> Option<ExprId> {
+ self.expr_only.as_ref()?.binding_owners.get(&id).copied()
+ }
+
/// Walks the immediate children expressions and calls `f` for each child expression.
///
/// Note that this does not walk const blocks.
@@ -553,16 +758,22 @@ impl ExpressionStore {
});
}
+ #[inline]
+ #[track_caller]
+ fn assert_expr_only(&self) -> &ExpressionOnlyStore {
+ self.expr_only.as_ref().expect("should have `ExpressionStore::expr_only`")
+ }
+
fn binding_hygiene(&self, binding: BindingId) -> HygieneId {
- self.bindings[binding].hygiene
+ self.assert_expr_only().bindings[binding].hygiene
}
pub fn expr_path_hygiene(&self, expr: ExprId) -> HygieneId {
- self.ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT)
+ self.assert_expr_only().ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT)
}
pub fn pat_path_hygiene(&self, pat: PatId) -> HygieneId {
- self.ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT)
+ self.assert_expr_only().ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT)
}
pub fn expr_or_pat_path_hygiene(&self, id: ExprOrPatId) -> HygieneId {
@@ -571,43 +782,72 @@ impl ExpressionStore {
ExprOrPatId::PatId(id) => self.pat_path_hygiene(id),
}
}
+
+ #[inline]
+ pub fn exprs(&self) -> impl Iterator<Item = (ExprId, &Expr)> {
+ match &self.expr_only {
+ Some(it) => it.exprs.iter(),
+ None => const { &Arena::new() }.iter(),
+ }
+ }
+
+ #[inline]
+ pub fn pats(&self) -> impl Iterator<Item = (PatId, &Pat)> {
+ match &self.expr_only {
+ Some(it) => it.pats.iter(),
+ None => const { &Arena::new() }.iter(),
+ }
+ }
+
+ #[inline]
+ pub fn bindings(&self) -> impl Iterator<Item = (BindingId, &Binding)> {
+ match &self.expr_only {
+ Some(it) => it.bindings.iter(),
+ None => const { &Arena::new() }.iter(),
+ }
+ }
}
impl Index<ExprId> for ExpressionStore {
type Output = Expr;
+ #[inline]
fn index(&self, expr: ExprId) -> &Expr {
- &self.exprs[expr]
+ &self.assert_expr_only().exprs[expr]
}
}
impl Index<PatId> for ExpressionStore {
type Output = Pat;
+ #[inline]
fn index(&self, pat: PatId) -> &Pat {
- &self.pats[pat]
+ &self.assert_expr_only().pats[pat]
}
}
impl Index<LabelId> for ExpressionStore {
type Output = Label;
+ #[inline]
fn index(&self, label: LabelId) -> &Label {
- &self.labels[label]
+ &self.assert_expr_only().labels[label]
}
}
impl Index<BindingId> for ExpressionStore {
type Output = Binding;
+ #[inline]
fn index(&self, b: BindingId) -> &Binding {
- &self.bindings[b]
+ &self.assert_expr_only().bindings[b]
}
}
impl Index<TypeRefId> for ExpressionStore {
type Output = TypeRef;
+ #[inline]
fn index(&self, b: TypeRefId) -> &TypeRef {
&self.types[b]
}
@@ -616,6 +856,7 @@ impl Index<TypeRefId> for ExpressionStore {
impl Index<LifetimeRefId> for ExpressionStore {
type Output = LifetimeRef;
+ #[inline]
fn index(&self, b: LifetimeRefId) -> &LifetimeRef {
&self.lifetimes[b]
}
@@ -643,30 +884,46 @@ impl ExpressionStoreSourceMap {
}
}
+ #[inline]
+ fn expr_or_synthetic(&self) -> Result<&ExpressionOnlySourceMap, SyntheticSyntax> {
+ self.expr_only.as_deref().ok_or(SyntheticSyntax)
+ }
+
+ #[inline]
+ fn expr_only(&self) -> Option<&ExpressionOnlySourceMap> {
+ self.expr_only.as_deref()
+ }
+
+ #[inline]
+ #[track_caller]
+ fn assert_expr_only(&self) -> &ExpressionOnlySourceMap {
+ self.expr_only.as_ref().expect("should have `ExpressionStoreSourceMap::expr_only`")
+ }
+
pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprOrPatSource, SyntheticSyntax> {
- self.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax)
+ self.expr_or_synthetic()?.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax)
}
pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option<ExprOrPatId> {
let src = node.map(AstPtr::new);
- self.expr_map.get(&src).cloned()
+ self.expr_only()?.expr_map.get(&src).cloned()
}
pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
let src = node.map(AstPtr::new);
- self.expansions.get(&src).cloned()
+ self.expr_only()?.expansions.get(&src).cloned()
}
pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroCallId)> + '_ {
- self.expansions.iter().map(|(&a, &b)| (a, b))
+ self.expr_only().into_iter().flat_map(|it| it.expansions.iter().map(|(&a, &b)| (a, b)))
}
pub fn pat_syntax(&self, pat: PatId) -> Result<ExprOrPatSource, SyntheticSyntax> {
- self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax)
+ self.expr_or_synthetic()?.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax)
}
pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<ExprOrPatId> {
- self.pat_map.get(&node.map(AstPtr::new)).cloned()
+ self.expr_only()?.pat_map.get(&node.map(AstPtr::new)).cloned()
}
pub fn type_syntax(&self, id: TypeRefId) -> Result<TypeSource, SyntheticSyntax> {
@@ -678,49 +935,50 @@ impl ExpressionStoreSourceMap {
}
pub fn label_syntax(&self, label: LabelId) -> LabelSource {
- self.label_map_back[label]
+ self.assert_expr_only().label_map_back[label]
}
pub fn patterns_for_binding(&self, binding: BindingId) -> &[PatId] {
- self.binding_definitions.get(&binding).map_or(&[], Deref::deref)
+ self.assert_expr_only().binding_definitions.get(binding).map_or(&[], Deref::deref)
}
pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> {
let src = node.map(AstPtr::new);
- self.label_map.get(&src).cloned()
+ self.expr_only()?.label_map.get(&src).cloned()
}
pub fn field_syntax(&self, expr: ExprId) -> FieldSource {
- self.field_map_back[&expr]
+ self.assert_expr_only().field_map_back[&expr]
}
pub fn pat_field_syntax(&self, pat: PatId) -> PatFieldSource {
- self.pat_field_map_back[&pat]
+ self.assert_expr_only().pat_field_map_back[&pat]
}
pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprOrPatId> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::MacroExpr>).map(AstPtr::upcast);
- self.expr_map.get(&src).copied()
+ self.expr_only()?.expr_map.get(&src).copied()
}
pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroCallId)> {
- self.expansions.iter()
+ self.expr_only().into_iter().flat_map(|it| it.expansions.iter())
}
pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
- self.expansions.get(&node.map(AstPtr::new)).copied()
+ self.expr_only()?.expansions.get(&node.map(AstPtr::new)).copied()
}
pub fn implicit_format_args(
&self,
node: InFile<&ast::FormatArgsExpr>,
) -> Option<(HygieneId, &[(syntax::TextRange, Name)])> {
+ let expr_only = self.expr_only()?;
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
- let (hygiene, names) = self
+ let (hygiene, names) = expr_only
.template_map
.as_ref()?
.format_args_to_captures
- .get(&self.expr_map.get(&src)?.as_expr()?)?;
+ .get(&expr_only.expr_map.get(&src)?.as_expr()?)?;
Some((*hygiene, &**names))
}
@@ -728,67 +986,28 @@ impl ExpressionStoreSourceMap {
&self,
capture_expr: ExprId,
) -> Option<InFile<(ExprPtr, TextRange)>> {
- self.template_map.as_ref()?.implicit_capture_to_source.get(&capture_expr).copied()
+ self.expr_only()?
+ .template_map
+ .as_ref()?
+ .implicit_capture_to_source
+ .get(&capture_expr)
+ .copied()
}
pub fn asm_template_args(
&self,
node: InFile<&ast::AsmExpr>,
) -> Option<(ExprId, &[Vec<(syntax::TextRange, usize)>])> {
+ let expr_only = self.expr_only()?;
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
- let expr = self.expr_map.get(&src)?.as_expr()?;
- Some(expr)
- .zip(self.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref))
+ let expr = expr_only.expr_map.get(&src)?.as_expr()?;
+ Some(expr).zip(
+ expr_only.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref),
+ )
}
/// Get a reference to the source map's diagnostics.
pub fn diagnostics(&self) -> &[ExpressionStoreDiagnostics] {
- &self.diagnostics
- }
-
- fn shrink_to_fit(&mut self) {
- let Self {
- expr_map,
- expr_map_back,
- pat_map,
- pat_map_back,
- label_map,
- label_map_back,
- field_map_back,
- pat_field_map_back,
- expansions,
- template_map,
- diagnostics,
- binding_definitions,
- types_map,
- types_map_back,
- lifetime_map_back,
- lifetime_map,
- } = self;
- if let Some(template_map) = template_map {
- let FormatTemplate {
- format_args_to_captures,
- asm_to_captures,
- implicit_capture_to_source,
- } = &mut **template_map;
- format_args_to_captures.shrink_to_fit();
- asm_to_captures.shrink_to_fit();
- implicit_capture_to_source.shrink_to_fit();
- }
- expr_map.shrink_to_fit();
- expr_map_back.shrink_to_fit();
- pat_map.shrink_to_fit();
- pat_map_back.shrink_to_fit();
- label_map.shrink_to_fit();
- label_map_back.shrink_to_fit();
- field_map_back.shrink_to_fit();
- pat_field_map_back.shrink_to_fit();
- expansions.shrink_to_fit();
- diagnostics.shrink_to_fit();
- binding_definitions.shrink_to_fit();
- types_map.shrink_to_fit();
- types_map_back.shrink_to_fit();
- lifetime_map.shrink_to_fit();
- lifetime_map_back.shrink_to_fit();
+ self.expr_only().map(|it| &*it.diagnostics).unwrap_or_default()
}
}
diff --git a/crates/hir-def/src/expr_store/body.rs b/crates/hir-def/src/expr_store/body.rs
index fb6d931e0e..c955393b9c 100644
--- a/crates/hir-def/src/expr_store/body.rs
+++ b/crates/hir-def/src/expr_store/body.rs
@@ -36,6 +36,7 @@ pub struct Body {
impl ops::Deref for Body {
type Target = ExpressionStore;
+ #[inline]
fn deref(&self) -> &Self::Target {
&self.store
}
@@ -61,6 +62,7 @@ pub struct BodySourceMap {
impl ops::Deref for BodySourceMap {
type Target = ExpressionStoreSourceMap;
+ #[inline]
fn deref(&self) -> &Self::Target {
&self.store
}
@@ -102,9 +104,7 @@ impl Body {
}
};
let module = def.module(db);
- let (body, mut source_map) =
- lower_body(db, def, file_id, module, params, body, is_async_fn);
- source_map.store.shrink_to_fit();
+ let (body, source_map) = lower_body(db, def, file_id, module, params, body, is_async_fn);
(Arc::new(body), Arc::new(source_map))
}
diff --git a/crates/hir-def/src/expr_store/expander.rs b/crates/hir-def/src/expr_store/expander.rs
index 3823fb5a1e..23b9712d1e 100644
--- a/crates/hir-def/src/expr_store/expander.rs
+++ b/crates/hir-def/src/expr_store/expander.rs
@@ -6,6 +6,7 @@ use base_db::Crate;
use cfg::CfgOptions;
use drop_bomb::DropBomb;
use hir_expand::AstId;
+use hir_expand::span_map::SpanMapRef;
use hir_expand::{
ExpandError, ExpandErrorKind, ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap,
@@ -223,9 +224,15 @@ impl Expander {
}
}
+ #[inline]
pub(super) fn ast_id_map(&self) -> &AstIdMap {
&self.ast_id_map
}
+
+ #[inline]
+ pub(super) fn span_map(&self) -> SpanMapRef<'_> {
+ self.span_map.as_ref()
+ }
}
#[derive(Debug)]
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 29871f5e04..4e877748ca 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -7,12 +7,14 @@ mod path;
use std::mem;
+use base_db::FxIndexSet;
use cfg::CfgOptions;
use either::Either;
use hir_expand::{
- HirFileId, InFile, Lookup, MacroDefId,
+ HirFileId, InFile, MacroDefId,
mod_path::tool_path,
name::{AsName, Name},
+ span_map::SpanMapRef,
};
use intern::{Symbol, sym};
use rustc_hash::FxHashMap;
@@ -30,8 +32,8 @@ use triomphe::Arc;
use tt::TextRange;
use crate::{
- AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, ItemTreeLoc,
- MacroId, ModuleDefId, ModuleId, TraitAliasId, TraitId, TypeAliasId, UnresolvedMacro,
+ AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, MacroId,
+ ModuleDefId, ModuleId, TraitAliasId, TraitId, TypeAliasId, UnresolvedMacro,
builtin_type::BuiltinUint,
db::DefDatabase,
expr_store::{
@@ -65,8 +67,6 @@ use crate::{
pub use self::path::hir_segment_to_ast_segment;
-type FxIndexSet<K> = indexmap::IndexSet<K, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
-
pub(super) fn lower_body(
db: &dyn DefDatabase,
owner: DefWithBodyId,
@@ -121,14 +121,10 @@ pub(super) fn lower_body(
params = (0..count).map(|_| collector.missing_pat()).collect();
};
let body_expr = collector.missing_expr();
+ let (store, source_map) = collector.store.finish();
return (
- Body {
- store: collector.store.finish(),
- params: params.into_boxed_slice(),
- self_param,
- body_expr,
- },
- BodySourceMap { self_param: source_map_self_param, store: collector.source_map },
+ Body { store, params: params.into_boxed_slice(), self_param, body_expr },
+ BodySourceMap { self_param: source_map_self_param, store: source_map },
);
}
@@ -171,14 +167,10 @@ pub(super) fn lower_body(
},
);
+ let (store, source_map) = collector.store.finish();
(
- Body {
- store: collector.store.finish(),
- params: params.into_boxed_slice(),
- self_param,
- body_expr,
- },
- BodySourceMap { self_param: source_map_self_param, store: collector.source_map },
+ Body { store, params: params.into_boxed_slice(), self_param, body_expr },
+ BodySourceMap { self_param: source_map_self_param, store: source_map },
)
}
@@ -190,7 +182,8 @@ pub(crate) fn lower_type_ref(
let mut expr_collector = ExprCollector::new(db, module, type_ref.file_id);
let type_ref =
expr_collector.lower_type_ref_opt(type_ref.value, &mut ExprCollector::impl_trait_allocator);
- (expr_collector.store.finish(), expr_collector.source_map, type_ref)
+ let (store, source_map) = expr_collector.store.finish();
+ (store, source_map, type_ref)
}
pub(crate) fn lower_generic_params(
@@ -205,7 +198,8 @@ pub(crate) fn lower_generic_params(
let mut collector = generics::GenericParamsCollector::new(def);
collector.lower(&mut expr_collector, param_list, where_clause);
let params = collector.finish();
- (Arc::new(expr_collector.store.finish()), params, expr_collector.source_map)
+ let (store, source_map) = expr_collector.store.finish();
+ (Arc::new(store), params, source_map)
}
pub(crate) fn lower_impl(
@@ -232,7 +226,8 @@ pub(crate) fn lower_impl(
impl_syntax.value.where_clause(),
);
let params = collector.finish();
- (expr_collector.store.finish(), expr_collector.source_map, self_ty, trait_, params)
+ let (store, source_map) = expr_collector.store.finish();
+ (store, source_map, self_ty, trait_, params)
}
pub(crate) fn lower_trait(
@@ -253,7 +248,8 @@ pub(crate) fn lower_trait(
trait_syntax.value.where_clause(),
);
let params = collector.finish();
- (expr_collector.store.finish(), expr_collector.source_map, params)
+ let (store, source_map) = expr_collector.store.finish();
+ (store, source_map, params)
}
pub(crate) fn lower_trait_alias(
@@ -274,7 +270,8 @@ pub(crate) fn lower_trait_alias(
trait_syntax.value.where_clause(),
);
let params = collector.finish();
- (expr_collector.store.finish(), expr_collector.source_map, params)
+ let (store, source_map) = expr_collector.store.finish();
+ (store, source_map, params)
}
pub(crate) fn lower_type_alias(
@@ -313,7 +310,8 @@ pub(crate) fn lower_type_alias(
.value
.ty()
.map(|ty| expr_collector.lower_type_ref(ty, &mut ExprCollector::impl_trait_allocator));
- (expr_collector.store.finish(), expr_collector.source_map, params, bounds, type_ref)
+ let (store, source_map) = expr_collector.store.finish();
+ (store, source_map, params, bounds, type_ref)
}
pub(crate) fn lower_function(
@@ -421,9 +419,10 @@ pub(crate) fn lower_function(
} else {
return_type
};
+ let (store, source_map) = expr_collector.store.finish();
(
- expr_collector.store.finish(),
- expr_collector.source_map,
+ store,
+ source_map,
generics,
params.into_boxed_slice(),
return_type,
@@ -440,7 +439,6 @@ pub struct ExprCollector<'db> {
local_def_map: &'db LocalDefMap,
module: ModuleId,
pub store: ExpressionStoreBuilder,
- pub(crate) source_map: ExpressionStoreSourceMap,
// state stuff
// Prevent nested impl traits like `impl Foo<impl Bar>`.
@@ -551,7 +549,6 @@ impl ExprCollector<'_> {
module,
def_map,
local_def_map,
- source_map: ExpressionStoreSourceMap::default(),
store: ExpressionStoreBuilder::default(),
expander,
current_try_block_label: None,
@@ -564,6 +561,11 @@ impl ExprCollector<'_> {
}
}
+ #[inline]
+ pub(crate) fn span_map(&self) -> SpanMapRef<'_> {
+ self.expander.span_map()
+ }
+
pub fn lower_lifetime_ref(&mut self, lifetime: ast::Lifetime) -> LifetimeRefId {
// FIXME: Keyword check?
let lifetime_ref = match &*lifetime.text() {
@@ -693,7 +695,7 @@ impl ExprCollector<'_> {
let id = self.collect_macro_call(mcall, macro_ptr, true, |this, expansion| {
this.lower_type_ref_opt(expansion, impl_trait_lower_fn)
});
- self.source_map.types_map.insert(src, id);
+ self.store.types_map.insert(src, id);
return id;
}
None => TypeRef::Error,
@@ -727,8 +729,8 @@ impl ExprCollector<'_> {
fn alloc_type_ref(&mut self, type_ref: TypeRef, node: TypePtr) -> TypeRefId {
let id = self.store.types.alloc(type_ref);
let ptr = self.expander.in_file(node);
- self.source_map.types_map_back.insert(id, ptr);
- self.source_map.types_map.insert(ptr, id);
+ self.store.types_map_back.insert(id, ptr);
+ self.store.types_map.insert(ptr, id);
id
}
@@ -739,8 +741,8 @@ impl ExprCollector<'_> {
) -> LifetimeRefId {
let id = self.store.lifetimes.alloc(lifetime_ref);
let ptr = self.expander.in_file(node);
- self.source_map.lifetime_map_back.insert(id, ptr);
- self.source_map.lifetime_map.insert(ptr, id);
+ self.store.lifetime_map_back.insert(id, ptr);
+ self.store.lifetime_map.insert(ptr, id);
id
}
@@ -1185,14 +1187,14 @@ impl ExprCollector<'_> {
}
ast::Expr::ContinueExpr(e) => {
let label = self.resolve_label(e.lifetime()).unwrap_or_else(|e| {
- self.source_map.diagnostics.push(e);
+ self.store.diagnostics.push(e);
None
});
self.alloc_expr(Expr::Continue { label }, syntax_ptr)
}
ast::Expr::BreakExpr(e) => {
let label = self.resolve_label(e.lifetime()).unwrap_or_else(|e| {
- self.source_map.diagnostics.push(e);
+ self.store.diagnostics.push(e);
None
});
let expr = e.expr().map(|e| self.collect_expr(e));
@@ -1202,7 +1204,7 @@ impl ExprCollector<'_> {
let inner = self.collect_expr_opt(e.expr());
// make the paren expr point to the inner expression as well for IDE resolution
let src = self.expander.in_file(syntax_ptr);
- self.source_map.expr_map.insert(src, inner.into());
+ self.store.expr_map.insert(src, inner.into());
inner
}
ast::Expr::ReturnExpr(e) => {
@@ -1243,7 +1245,7 @@ impl ExprCollector<'_> {
None => self.missing_expr(),
};
let src = self.expander.in_file(AstPtr::new(&field));
- self.source_map.field_map_back.insert(expr, src);
+ self.store.field_map_back.insert(expr, src);
Some(RecordLitField { name, expr })
})
.collect();
@@ -1266,12 +1268,10 @@ impl ExprCollector<'_> {
ast::Expr::AwaitExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
if let Awaitable::No(location) = self.is_lowering_awaitable_block() {
- self.source_map.diagnostics.push(
- ExpressionStoreDiagnostics::AwaitOutsideOfAsync {
- node: self.expander.in_file(AstPtr::new(&e)),
- location: location.to_string(),
- },
- );
+ self.store.diagnostics.push(ExpressionStoreDiagnostics::AwaitOutsideOfAsync {
+ node: self.expander.in_file(AstPtr::new(&e)),
+ location: location.to_string(),
+ });
}
self.alloc_expr(Expr::Await { expr }, syntax_ptr)
}
@@ -1437,7 +1437,7 @@ impl ExprCollector<'_> {
// Make the macro-call point to its expanded expression so we can query
// semantics on syntax pointers to the macro
let src = self.expander.in_file(syntax_ptr);
- self.source_map.expr_map.insert(src, id.into());
+ self.store.expr_map.insert(src, id.into());
id
}
None => self.alloc_expr(Expr::Missing, syntax_ptr),
@@ -1481,7 +1481,7 @@ impl ExprCollector<'_> {
let expr = self.collect_expr(expr);
// Do not use `alloc_pat_from_expr()` here, it will override the entry in `expr_map`.
let id = self.store.pats.alloc(Pat::Expr(expr));
- self.source_map.pat_map_back.insert(id, src);
+ self.store.pat_map_back.insert(id, src);
id
})
}
@@ -1550,7 +1550,7 @@ impl ExprCollector<'_> {
let id = self.collect_macro_call(e, macro_ptr, true, |this, expansion| {
this.collect_expr_as_pat_opt(expansion)
});
- self.source_map.expr_map.insert(src, id.into());
+ self.store.expr_map.insert(src, id.into());
id
}
ast::Expr::RecordExpr(e) => {
@@ -1571,7 +1571,7 @@ impl ExprCollector<'_> {
let pat = self.collect_expr_as_pat(field_expr);
let name = f.field_name()?.as_name();
let src = self.expander.in_file(AstPtr::new(&f).wrap_left());
- self.source_map.pat_field_map_back.insert(pat, src);
+ self.store.pat_field_map_back.insert(pat, src);
Some(RecordFieldPat { name, pat })
})
.collect();
@@ -1617,7 +1617,7 @@ impl ExprCollector<'_> {
);
if let Either::Left(pat) = pat {
let src = this.expander.in_file(AstPtr::new(&expr).wrap_left());
- this.source_map.pat_map_back.insert(pat, src);
+ this.store.pat_map_back.insert(pat, src);
}
pat
}
@@ -1963,7 +1963,7 @@ impl ExprCollector<'_> {
self.module.krate(),
resolver,
&mut |ptr, call| {
- _ = self.source_map.expansions.insert(ptr.map(|(it, _)| it), call);
+ _ = self.store.expansions.insert(ptr.map(|(it, _)| it), call);
},
)
}
@@ -1973,19 +1973,17 @@ impl ExprCollector<'_> {
Ok(res) => res,
Err(UnresolvedMacro { path }) => {
if record_diagnostics {
- self.source_map.diagnostics.push(
- ExpressionStoreDiagnostics::UnresolvedMacroCall {
- node: self.expander.in_file(syntax_ptr),
- path,
- },
- );
+ self.store.diagnostics.push(ExpressionStoreDiagnostics::UnresolvedMacroCall {
+ node: self.expander.in_file(syntax_ptr),
+ path,
+ });
}
return collector(self, None);
}
};
if record_diagnostics {
if let Some(err) = res.err {
- self.source_map
+ self.store
.diagnostics
.push(ExpressionStoreDiagnostics::MacroError { node: macro_call_ptr, err });
}
@@ -1996,7 +1994,7 @@ impl ExprCollector<'_> {
// Keep collecting even with expansion errors so we can provide completions and
// other services in incomplete macro expressions.
if let Some(macro_file) = self.expander.current_file_id().macro_file() {
- self.source_map.expansions.insert(macro_call_ptr, macro_file);
+ self.store.expansions.insert(macro_call_ptr, macro_file);
}
if record_diagnostics {
@@ -2045,7 +2043,7 @@ impl ExprCollector<'_> {
// Make the macro-call point to its expanded expression so we can query
// semantics on syntax pointers to the macro
let src = self.expander.in_file(syntax_ptr);
- self.source_map.expr_map.insert(src, tail.into());
+ self.store.expr_map.insert(src, tail.into());
})
}
@@ -2141,26 +2139,10 @@ impl ExprCollector<'_> {
block: ast::BlockExpr,
mk_block: impl FnOnce(Option<BlockId>, Box<[Statement]>, Option<ExprId>) -> Expr,
) -> ExprId {
- let block_has_items = {
- let statement_has_item = block.statements().any(|stmt| match stmt {
- ast::Stmt::Item(_) => true,
- // Macro calls can be both items and expressions. The syntax library always treats
- // them as expressions here, so we undo that.
- ast::Stmt::ExprStmt(es) => matches!(es.expr(), Some(ast::Expr::MacroExpr(_))),
- _ => false,
- });
- statement_has_item
- || matches!(block.tail_expr(), Some(ast::Expr::MacroExpr(_)))
- || (block.may_carry_attributes() && block.attrs().next().is_some())
- };
-
- let block_id = if block_has_items {
- let file_local_id = self.expander.ast_id_map().ast_id(&block);
+ let block_id = self.expander.ast_id_map().ast_id_for_block(&block).map(|file_local_id| {
let ast_id = self.expander.in_file(file_local_id);
- Some(self.db.intern_block(BlockLoc { ast_id, module: self.module }))
- } else {
- None
- };
+ self.db.intern_block(BlockLoc { ast_id, module: self.module })
+ });
let (module, def_map) =
match block_id.map(|block_id| (block_def_map(self.db, block_id), block_id)) {
@@ -2260,11 +2242,8 @@ impl ExprCollector<'_> {
match resolved.take_values() {
Some(ModuleDefId::ConstId(_)) => (None, Pat::Path(name.into())),
Some(ModuleDefId::EnumVariantId(variant))
- if {
- let loc = variant.lookup(self.db);
- let tree = loc.item_tree_id().item_tree(self.db);
- tree[loc.id.value].shape != FieldsShape::Record
- } =>
+ // FIXME: This can cause a cycle if the user is writing invalid code
+ if variant.fields(self.db).shape != FieldsShape::Record =>
{
(None, Pat::Path(name.into()))
}
@@ -2375,7 +2354,7 @@ impl ExprCollector<'_> {
let pat = self.collect_pat(ast_pat, binding_list);
let name = f.field_name()?.as_name();
let src = self.expander.in_file(AstPtr::new(&f).wrap_right());
- self.source_map.pat_field_map_back.insert(pat, src);
+ self.store.pat_field_map_back.insert(pat, src);
Some(RecordFieldPat { name, pat })
})
.collect();
@@ -2438,7 +2417,7 @@ impl ExprCollector<'_> {
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
this.collect_pat_opt(expanded_pat, binding_list)
});
- self.source_map.pat_map.insert(src, pat.into());
+ self.store.pat_map.insert(src, pat.into());
return pat;
}
None => Pat::Missing,
@@ -2529,7 +2508,7 @@ impl ExprCollector<'_> {
}
});
if let Some(pat) = pat.left() {
- self.source_map.pat_map.insert(src, pat.into());
+ self.store.pat_map.insert(src, pat.into());
}
pat
}
@@ -2551,7 +2530,7 @@ impl ExprCollector<'_> {
match enabled {
Ok(()) => true,
Err(cfg) => {
- self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
+ self.store.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())),
cfg,
opts: self.cfg_options.clone(),
@@ -2562,7 +2541,7 @@ impl ExprCollector<'_> {
}
fn add_definition_to_binding(&mut self, binding_id: BindingId, pat_id: PatId) {
- self.source_map.binding_definitions.entry(binding_id).or_default().push(pat_id);
+ self.store.binding_definitions.entry(binding_id).or_default().push(pat_id);
}
// region: labels
@@ -2738,7 +2717,7 @@ impl ExprCollector<'_> {
|name, range| {
let expr_id = self.alloc_expr_desugared(Expr::Path(Path::from(name)));
if let Some(range) = range {
- self.source_map
+ self.store
.template_map
.get_or_insert_with(Default::default)
.implicit_capture_to_source
@@ -2829,6 +2808,44 @@ impl ExprCollector<'_> {
mutability: Mutability::Shared,
})
};
+
+ // Assume that rustc version >= 1.89.0 iff lang item `format_arguments` exists
+ // but `format_unsafe_arg` does not
+ let fmt_args =
+ || crate::lang_item::lang_item(self.db, self.module.krate(), LangItem::FormatArguments);
+ let fmt_unsafe_arg =
+ || crate::lang_item::lang_item(self.db, self.module.krate(), LangItem::FormatUnsafeArg);
+ let use_format_args_since_1_89_0 = fmt_args().is_some() && fmt_unsafe_arg().is_none();
+
+ let idx = if use_format_args_since_1_89_0 {
+ self.collect_format_args_impl(syntax_ptr, fmt, argmap, lit_pieces, format_options)
+ } else {
+ self.collect_format_args_before_1_89_0_impl(
+ syntax_ptr,
+ fmt,
+ argmap,
+ lit_pieces,
+ format_options,
+ )
+ };
+
+ self.store
+ .template_map
+ .get_or_insert_with(Default::default)
+ .format_args_to_captures
+ .insert(idx, (hygiene, mappings));
+ idx
+ }
+
+ /// `format_args!` expansion implementation for rustc versions < `1.89.0`
+ fn collect_format_args_before_1_89_0_impl(
+ &mut self,
+ syntax_ptr: AstPtr<ast::Expr>,
+ fmt: FormatArgs,
+ argmap: FxIndexSet<(usize, ArgumentType)>,
+ lit_pieces: ExprId,
+ format_options: ExprId,
+ ) -> ExprId {
let arguments = &*fmt.arguments.arguments;
let args = if arguments.is_empty() {
@@ -2916,19 +2933,189 @@ impl ExprCollector<'_> {
});
}
- let idx = self.alloc_expr(
+ self.alloc_expr(
Expr::Call {
callee: new_v1_formatted,
args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]),
},
syntax_ptr,
- );
- self.source_map
- .template_map
- .get_or_insert_with(Default::default)
- .format_args_to_captures
- .insert(idx, (hygiene, mappings));
- idx
+ )
+ }
+
+ /// `format_args!` expansion implementation for rustc versions >= `1.89.0`,
+ /// especially since [this PR](https://github.com/rust-lang/rust/pull/140748)
+ fn collect_format_args_impl(
+ &mut self,
+ syntax_ptr: AstPtr<ast::Expr>,
+ fmt: FormatArgs,
+ argmap: FxIndexSet<(usize, ArgumentType)>,
+ lit_pieces: ExprId,
+ format_options: ExprId,
+ ) -> ExprId {
+ let arguments = &*fmt.arguments.arguments;
+
+ let (let_stmts, args) = if arguments.is_empty() {
+ (
+ // Generate:
+ // []
+ vec![],
+ self.alloc_expr_desugared(Expr::Array(Array::ElementList {
+ elements: Box::default(),
+ })),
+ )
+ } else if argmap.len() == 1 && arguments.len() == 1 {
+ // Only one argument, so we don't need to make the `args` tuple.
+ //
+ // Generate:
+ // super let args = [<core::fmt::Arguments>::new_display(&arg)];
+ let args = argmap
+ .iter()
+ .map(|&(arg_index, ty)| {
+ let ref_arg = self.alloc_expr_desugared(Expr::Ref {
+ expr: arguments[arg_index].expr,
+ rawness: Rawness::Ref,
+ mutability: Mutability::Shared,
+ });
+ self.make_argument(ref_arg, ty)
+ })
+ .collect();
+ let args =
+ self.alloc_expr_desugared(Expr::Array(Array::ElementList { elements: args }));
+ let args_name = Name::new_symbol_root(sym::args);
+ let args_binding = self.alloc_binding(
+ args_name.clone(),
+ BindingAnnotation::Unannotated,
+ HygieneId::ROOT,
+ );
+ let args_pat = self.alloc_pat_desugared(Pat::Bind { id: args_binding, subpat: None });
+ self.add_definition_to_binding(args_binding, args_pat);
+ // TODO: We don't have `super let` yet.
+ let let_stmt = Statement::Let {
+ pat: args_pat,
+ type_ref: None,
+ initializer: Some(args),
+ else_branch: None,
+ };
+ (vec![let_stmt], self.alloc_expr_desugared(Expr::Path(args_name.into())))
+ } else {
+ // Generate:
+ // super let args = (&arg0, &arg1, &...);
+ let args_name = Name::new_symbol_root(sym::args);
+ let args_binding = self.alloc_binding(
+ args_name.clone(),
+ BindingAnnotation::Unannotated,
+ HygieneId::ROOT,
+ );
+ let args_pat = self.alloc_pat_desugared(Pat::Bind { id: args_binding, subpat: None });
+ self.add_definition_to_binding(args_binding, args_pat);
+ let elements = arguments
+ .iter()
+ .map(|arg| {
+ self.alloc_expr_desugared(Expr::Ref {
+ expr: arg.expr,
+ rawness: Rawness::Ref,
+ mutability: Mutability::Shared,
+ })
+ })
+ .collect();
+ let args_tuple = self.alloc_expr_desugared(Expr::Tuple { exprs: elements });
+ // TODO: We don't have `super let` yet
+ let let_stmt1 = Statement::Let {
+ pat: args_pat,
+ type_ref: None,
+ initializer: Some(args_tuple),
+ else_branch: None,
+ };
+
+ // Generate:
+ // super let args = [
+ // <core::fmt::Argument>::new_display(args.0),
+ // <core::fmt::Argument>::new_lower_hex(args.1),
+ // <core::fmt::Argument>::new_debug(args.0),
+ // …
+ // ];
+ let args = argmap
+ .iter()
+ .map(|&(arg_index, ty)| {
+ let args_ident_expr =
+ self.alloc_expr_desugared(Expr::Path(args_name.clone().into()));
+ let arg = self.alloc_expr_desugared(Expr::Field {
+ expr: args_ident_expr,
+ name: Name::new_tuple_field(arg_index),
+ });
+ self.make_argument(arg, ty)
+ })
+ .collect();
+ let array =
+ self.alloc_expr_desugared(Expr::Array(Array::ElementList { elements: args }));
+ let args_binding = self.alloc_binding(
+ args_name.clone(),
+ BindingAnnotation::Unannotated,
+ HygieneId::ROOT,
+ );
+ let args_pat = self.alloc_pat_desugared(Pat::Bind { id: args_binding, subpat: None });
+ self.add_definition_to_binding(args_binding, args_pat);
+ let let_stmt2 = Statement::Let {
+ pat: args_pat,
+ type_ref: None,
+ initializer: Some(array),
+ else_branch: None,
+ };
+ (vec![let_stmt1, let_stmt2], self.alloc_expr_desugared(Expr::Path(args_name.into())))
+ };
+
+ // Generate:
+ // &args
+ let args = self.alloc_expr_desugared(Expr::Ref {
+ expr: args,
+ rawness: Rawness::Ref,
+ mutability: Mutability::Shared,
+ });
+
+ let call_block = {
+ // Generate:
+ // unsafe {
+ // <core::fmt::Arguments>::new_v1_formatted(
+ // lit_pieces,
+ // args,
+ // format_options,
+ // )
+ // }
+
+ let new_v1_formatted = LangItem::FormatArguments.ty_rel_path(
+ self.db,
+ self.module.krate(),
+ Name::new_symbol_root(sym::new_v1_formatted),
+ );
+ let new_v1_formatted =
+ self.alloc_expr_desugared(new_v1_formatted.map_or(Expr::Missing, Expr::Path));
+ let args = [lit_pieces, args, format_options];
+ let call = self
+ .alloc_expr_desugared(Expr::Call { callee: new_v1_formatted, args: args.into() });
+
+ Expr::Unsafe { id: None, statements: Box::default(), tail: Some(call) }
+ };
+
+ if !let_stmts.is_empty() {
+ // Generate:
+ // {
+ // super let …
+ // super let …
+ // <core::fmt::Arguments>::new_…(…)
+ // }
+ let call = self.alloc_expr_desugared(call_block);
+ self.alloc_expr(
+ Expr::Block {
+ id: None,
+ statements: let_stmts.into(),
+ tail: Some(call),
+ label: None,
+ },
+ syntax_ptr,
+ )
+ } else {
+ self.alloc_expr(call_block, syntax_ptr)
+ }
}
/// Generate a hir expression for a format_args placeholder specification.
@@ -3192,8 +3379,8 @@ impl ExprCollector<'_> {
fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.in_file(ptr);
let id = self.store.exprs.alloc(expr);
- self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
- self.source_map.expr_map.insert(src, id.into());
+ self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
+ self.store.expr_map.insert(src, id.into());
id
}
// FIXME: desugared exprs don't have ptr, that's wrong and should be fixed.
@@ -3204,9 +3391,9 @@ impl ExprCollector<'_> {
fn alloc_expr_desugared_with_ptr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.in_file(ptr);
let id = self.store.exprs.alloc(expr);
- self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
+ self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
// We intentionally don't fill this as it could overwrite a non-desugared entry
- // self.source_map.expr_map.insert(src, id);
+ // self.store.expr_map.insert(src, id);
id
}
fn missing_expr(&mut self) -> ExprId {
@@ -3229,24 +3416,24 @@ impl ExprCollector<'_> {
fn alloc_pat_from_expr(&mut self, pat: Pat, ptr: ExprPtr) -> PatId {
let src = self.expander.in_file(ptr);
let id = self.store.pats.alloc(pat);
- self.source_map.expr_map.insert(src, id.into());
- self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_left));
+ self.store.expr_map.insert(src, id.into());
+ self.store.pat_map_back.insert(id, src.map(AstPtr::wrap_left));
id
}
fn alloc_expr_from_pat(&mut self, expr: Expr, ptr: PatPtr) -> ExprId {
let src = self.expander.in_file(ptr);
let id = self.store.exprs.alloc(expr);
- self.source_map.pat_map.insert(src, id.into());
- self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_right));
+ self.store.pat_map.insert(src, id.into());
+ self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_right));
id
}
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
let src = self.expander.in_file(ptr);
let id = self.store.pats.alloc(pat);
- self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_right));
- self.source_map.pat_map.insert(src, id.into());
+ self.store.pat_map_back.insert(id, src.map(AstPtr::wrap_right));
+ self.store.pat_map.insert(src, id.into());
id
}
// FIXME: desugared pats don't have ptr, that's wrong and should be fixed somehow.
@@ -3260,8 +3447,8 @@ impl ExprCollector<'_> {
fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId {
let src = self.expander.in_file(ptr);
let id = self.store.labels.alloc(label);
- self.source_map.label_map_back.insert(id, src);
- self.source_map.label_map.insert(src, id);
+ self.store.label_map_back.insert(id, src);
+ self.store.label_map.insert(src, id);
id
}
// FIXME: desugared labels don't have ptr, that's wrong and should be fixed somehow.
diff --git a/crates/hir-def/src/expr_store/lower/asm.rs b/crates/hir-def/src/expr_store/lower/asm.rs
index d36e5205c7..3bc4afb5c8 100644
--- a/crates/hir-def/src/expr_store/lower/asm.rs
+++ b/crates/hir-def/src/expr_store/lower/asm.rs
@@ -10,7 +10,7 @@ use tt::TextRange;
use crate::{
expr_store::lower::{ExprCollector, FxIndexSet},
- hir::{AsmOperand, AsmOptions, Expr, ExprId, InlineAsm, InlineAsmRegOrRegClass},
+ hir::{AsmOperand, AsmOptions, Expr, ExprId, InlineAsm, InlineAsmKind, InlineAsmRegOrRegClass},
};
impl ExprCollector<'_> {
@@ -269,11 +269,20 @@ impl ExprCollector<'_> {
}
})
};
+
+ let kind = if asm.global_asm_token().is_some() {
+ InlineAsmKind::GlobalAsm
+ } else if asm.naked_asm_token().is_some() {
+ InlineAsmKind::NakedAsm
+ } else {
+ InlineAsmKind::Asm
+ };
+
let idx = self.alloc_expr(
- Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options }),
+ Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options, kind }),
syntax_ptr,
);
- self.source_map
+ self.store
.template_map
.get_or_insert_with(Default::default)
.asm_to_captures
diff --git a/crates/hir-def/src/expr_store/lower/path/tests.rs b/crates/hir-def/src/expr_store/lower/path/tests.rs
index 8fd81c7b3d..f507841a91 100644
--- a/crates/hir-def/src/expr_store/lower/path/tests.rs
+++ b/crates/hir-def/src/expr_store/lower/path/tests.rs
@@ -23,7 +23,7 @@ fn lower_path(path: ast::Path) -> (TestDB, ExpressionStore, Option<Path>) {
let mut ctx =
ExprCollector::new(&db, crate_def_map(&db, krate).root_module_id(), file_id.into());
let lowered_path = ctx.lower_path(path, &mut ExprCollector::impl_trait_allocator);
- let store = ctx.store.finish();
+ let (store, _) = ctx.store.finish();
(db, store, lowered_path)
}
diff --git a/crates/hir-def/src/expr_store/path.rs b/crates/hir-def/src/expr_store/path.rs
index db83e73a0b..19c7ce0ce0 100644
--- a/crates/hir-def/src/expr_store/path.rs
+++ b/crates/hir-def/src/expr_store/path.rs
@@ -29,8 +29,8 @@ pub enum Path {
// This type is being used a lot, make sure it doesn't grow unintentionally.
#[cfg(target_arch = "x86_64")]
const _: () = {
- assert!(size_of::<Path>() == 16);
- assert!(size_of::<Option<Path>>() == 16);
+ assert!(size_of::<Path>() == 24);
+ assert!(size_of::<Option<Path>>() == 24);
};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
diff --git a/crates/hir-def/src/expr_store/pretty.rs b/crates/hir-def/src/expr_store/pretty.rs
index f12a9b7a54..f1b011333d 100644
--- a/crates/hir-def/src/expr_store/pretty.rs
+++ b/crates/hir-def/src/expr_store/pretty.rs
@@ -9,9 +9,10 @@ use std::{
use hir_expand::{Lookup, mod_path::PathKind};
use itertools::Itertools;
use span::Edition;
+use syntax::ast::HasName;
use crate::{
- AdtId, DefWithBodyId, GenericDefId, ItemTreeLoc, TypeParamId, VariantId,
+ AdtId, DefWithBodyId, GenericDefId, TypeParamId, VariantId,
expr_store::path::{GenericArg, GenericArgs},
hir::{
Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement,
@@ -19,6 +20,7 @@ use crate::{
},
lang_item::LangItemTarget,
signatures::{FnFlags, FunctionSignature, StructSignature},
+ src::HasSource,
type_ref::{ConstRef, LifetimeRef, Mutability, TraitBoundModifier, TypeBound, UseArgRef},
};
use crate::{LifetimeParamId, signatures::StructFlags};
@@ -48,6 +50,17 @@ pub enum LineFormat {
Indentation,
}
+fn item_name<Id, Loc>(db: &dyn DefDatabase, id: Id, default: &str) -> String
+where
+ Id: Lookup<Database = dyn DefDatabase, Data = Loc>,
+ Loc: HasSource,
+ Loc::Value: ast::HasName,
+{
+ let loc = id.lookup(db);
+ let source = loc.source(db);
+ source.value.name().map_or_else(|| default.to_owned(), |name| name.to_string())
+}
+
pub fn print_body_hir(
db: &dyn DefDatabase,
body: &Body,
@@ -55,31 +68,14 @@ pub fn print_body_hir(
edition: Edition,
) -> String {
let header = match owner {
- DefWithBodyId::FunctionId(it) => {
- it.lookup(db).id.resolved(db, |it| format!("fn {}", it.name.display(db, edition)))
- }
- DefWithBodyId::StaticId(it) => it
- .lookup(db)
- .id
- .resolved(db, |it| format!("static {} = ", it.name.display(db, edition))),
- DefWithBodyId::ConstId(it) => it.lookup(db).id.resolved(db, |it| {
- format!(
- "const {} = ",
- match &it.name {
- Some(name) => name.display(db, edition).to_string(),
- None => "_".to_owned(),
- }
- )
- }),
- DefWithBodyId::VariantId(it) => {
- let loc = it.lookup(db);
- let enum_loc = loc.parent.lookup(db);
- format!(
- "enum {}::{}",
- enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition),
- loc.id.item_tree(db)[loc.id.value].name.display(db, edition),
- )
- }
+ DefWithBodyId::FunctionId(it) => format!("fn {}", item_name(db, it, "<missing>")),
+ DefWithBodyId::StaticId(it) => format!("static {} = ", item_name(db, it, "<missing>")),
+ DefWithBodyId::ConstId(it) => format!("const {} = ", item_name(db, it, "_")),
+ DefWithBodyId::VariantId(it) => format!(
+ "enum {}::{}",
+ item_name(db, it.lookup(db).parent, "<missing>"),
+ item_name(db, it, "<missing>")
+ ),
};
let mut p = Printer {
@@ -116,25 +112,16 @@ pub fn print_body_hir(
pub fn print_variant_body_hir(db: &dyn DefDatabase, owner: VariantId, edition: Edition) -> String {
let header = match owner {
- VariantId::StructId(it) => {
- it.lookup(db).id.resolved(db, |it| format!("struct {}", it.name.display(db, edition)))
- }
- VariantId::EnumVariantId(enum_variant_id) => {
- let loc = enum_variant_id.lookup(db);
- let enum_loc = loc.parent.lookup(db);
- format!(
- "enum {}::{}",
- enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition),
- loc.id.item_tree(db)[loc.id.value].name.display(db, edition),
- )
- }
- VariantId::UnionId(union_id) => union_id
- .lookup(db)
- .id
- .resolved(db, |it| format!("union {}", it.name.display(db, edition))),
+ VariantId::StructId(it) => format!("struct {}", item_name(db, it, "<missing>")),
+ VariantId::EnumVariantId(it) => format!(
+ "enum {}::{}",
+ item_name(db, it.lookup(db).parent, "<missing>"),
+ item_name(db, it, "<missing>")
+ ),
+ VariantId::UnionId(it) => format!("union {}", item_name(db, it, "<missing>")),
};
- let fields = db.variant_fields(owner);
+ let fields = owner.fields(db);
let mut p = Printer {
db,
@@ -154,9 +141,11 @@ pub fn print_variant_body_hir(db: &dyn DefDatabase, owner: VariantId, edition: E
let FieldData { name, type_ref, visibility, is_unsafe } = data;
match visibility {
crate::item_tree::RawVisibility::Module(interned, _visibility_explicitness) => {
- w!(p, "{}", interned.display(db, p.edition))
+ w!(p, "pub(in {})", interned.display(db, p.edition))
}
crate::item_tree::RawVisibility::Public => w!(p, "pub "),
+ crate::item_tree::RawVisibility::PubCrate => w!(p, "pub(crate) "),
+ crate::item_tree::RawVisibility::PubSelf(_) => w!(p, "pub(self) "),
}
if *is_unsafe {
w!(p, "unsafe ");
@@ -913,7 +902,7 @@ impl Printer<'_> {
let mut same_name = false;
if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] {
if let Binding { name, mode: BindingAnnotation::Unannotated, .. } =
- &self.store.bindings[*id]
+ &self.store.assert_expr_only().bindings[*id]
{
if name.as_str() == field_name {
same_name = true;
@@ -1074,7 +1063,7 @@ impl Printer<'_> {
}
fn print_binding(&mut self, id: BindingId) {
- let Binding { name, mode, .. } = &self.store.bindings[id];
+ let Binding { name, mode, .. } = &self.store.assert_expr_only().bindings[id];
let mode = match mode {
BindingAnnotation::Unannotated => "",
BindingAnnotation::Mutable => "mut ",
@@ -1089,10 +1078,7 @@ impl Printer<'_> {
w!(self, "builtin#lang(");
macro_rules! write_name {
($it:ident) => {{
- let loc = $it.lookup(self.db);
- let tree = loc.item_tree_id().item_tree(self.db);
- let name = &tree[loc.id.value].name;
- w!(self, "{}", name.display(self.db, self.edition));
+ w!(self, "{}", item_name(self.db, $it, "<missing>"));
}};
}
match *it {
diff --git a/crates/hir-def/src/expr_store/scope.rs b/crates/hir-def/src/expr_store/scope.rs
index a46711c67e..1952dae9d7 100644
--- a/crates/hir-def/src/expr_store/scope.rs
+++ b/crates/hir-def/src/expr_store/scope.rs
@@ -106,7 +106,9 @@ impl ExprScopes {
let mut scopes = ExprScopes {
scopes: Arena::default(),
scope_entries: Arena::default(),
- scope_by_expr: ArenaMap::with_capacity(body.exprs.len()),
+ scope_by_expr: ArenaMap::with_capacity(
+ body.expr_only.as_ref().map_or(0, |it| it.exprs.len()),
+ ),
};
let mut root = scopes.root_scope();
if let Some(self_param) = body.self_param {
@@ -179,7 +181,7 @@ impl ExprScopes {
binding: BindingId,
hygiene: HygieneId,
) {
- let Binding { name, .. } = &store.bindings[binding];
+ let Binding { name, .. } = &store[binding];
let entry = self.scope_entries.alloc(ScopeEntry { name: name.clone(), binding, hygiene });
self.scopes[scope].entries =
IdxRange::new_inclusive(self.scopes[scope].entries.start()..=entry);
@@ -251,7 +253,7 @@ fn compute_expr_scopes(
scope: &mut ScopeId,
) {
let make_label =
- |label: &Option<LabelId>| label.map(|label| (label, store.labels[label].name.clone()));
+ |label: &Option<LabelId>| label.map(|label| (label, store[label].name.clone()));
let compute_expr_scopes = |scopes: &mut ExprScopes, expr: ExprId, scope: &mut ScopeId| {
compute_expr_scopes(expr, store, scopes, scope)
@@ -534,9 +536,8 @@ fn foo() {
};
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
- let pat_src = source_map
- .pat_syntax(*source_map.binding_definitions[&resolved.binding()].first().unwrap())
- .unwrap();
+ let pat_src =
+ source_map.pat_syntax(source_map.patterns_for_binding(resolved.binding())[0]).unwrap();
let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax());
assert_eq!(local_name.text_range(), expected_name.syntax().text_range());
diff --git a/crates/hir-def/src/expr_store/tests/body.rs b/crates/hir-def/src/expr_store/tests/body.rs
index 29e249b07a..c31428be28 100644
--- a/crates/hir-def/src/expr_store/tests/body.rs
+++ b/crates/hir-def/src/expr_store/tests/body.rs
@@ -178,14 +178,14 @@ fn main() {
}
#[test]
-fn desugar_builtin_format_args() {
+fn desugar_builtin_format_args_before_1_89_0() {
let (db, body, def) = lower(
r#"
-//- minicore: fmt
+//- minicore: fmt_before_1_89_0
fn main() {
let are = "are";
let count = 10;
- builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!");
+ builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", orphan = (), last = "!");
}
"#,
);
@@ -249,8 +249,11 @@ fn main() {
builtin#lang(Count::Implied),
),
],
- unsafe {
- builtin#lang(UnsafeArg::new)()
+ {
+ ();
+ unsafe {
+ builtin#lang(UnsafeArg::new)()
+ }
},
);
}"#]]
@@ -258,6 +261,89 @@ fn main() {
}
#[test]
+fn desugar_builtin_format_args() {
+ let (db, body, def) = lower(
+ r#"
+//- minicore: fmt
+fn main() {
+ let are = "are";
+ let count = 10;
+ builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", orphan = (), last = "!");
+}
+"#,
+ );
+
+ expect![[r#"
+ fn main() {
+ let are = "are";
+ let count = 10;
+ {
+ let args = (&"fancy", &(), &"!", &count, &are, );
+ let args = [
+ builtin#lang(Argument::new_display)(
+ args.3,
+ ), builtin#lang(Argument::new_display)(
+ args.0,
+ ), builtin#lang(Argument::new_debug)(
+ args.4,
+ ), builtin#lang(Argument::new_display)(
+ args.2,
+ ),
+ ];
+ unsafe {
+ builtin#lang(Arguments::new_v1_formatted)(
+ &[
+ "\u{1b}hello ", " ", " friends, we ", " ", "",
+ ],
+ &args,
+ &[
+ builtin#lang(Placeholder::new)(
+ 0usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 8u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Is)(
+ 2,
+ ),
+ ), builtin#lang(Placeholder::new)(
+ 1usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ), builtin#lang(Placeholder::new)(
+ 2usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ), builtin#lang(Placeholder::new)(
+ 1usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ), builtin#lang(Placeholder::new)(
+ 3usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ),
+ ],
+ )
+ }
+ };
+ }"#]]
+ .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
+}
+
+#[test]
fn test_macro_hygiene() {
let (db, body, def) = lower(
r##"
@@ -295,29 +381,31 @@ impl SsrError {
expect![[r#"
fn main() {
_ = ra_test_fixture::error::SsrError::new(
- builtin#lang(Arguments::new_v1_formatted)(
- &[
- "Failed to resolve path `", "`",
- ],
- &[
+ {
+ let args = [
builtin#lang(Argument::new_display)(
&node.text(),
),
- ],
- &[
- builtin#lang(Placeholder::new)(
- 0usize,
- ' ',
- builtin#lang(Alignment::Unknown),
- 0u32,
- builtin#lang(Count::Implied),
- builtin#lang(Count::Implied),
- ),
- ],
+ ];
unsafe {
- builtin#lang(UnsafeArg::new)()
- },
- ),
+ builtin#lang(Arguments::new_v1_formatted)(
+ &[
+ "Failed to resolve path `", "`",
+ ],
+ &args,
+ &[
+ builtin#lang(Placeholder::new)(
+ 0usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ),
+ ],
+ )
+ }
+ },
);
}"#]]
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
@@ -327,7 +415,7 @@ impl SsrError {
fn regression_10300() {
let (db, body, def) = lower(
r#"
-//- minicore: concat, panic
+//- minicore: concat, panic, fmt_before_1_89_0
mod private {
pub use core::concat;
}
@@ -420,9 +508,9 @@ fn f() {
}
"#,
);
- assert_eq!(body.bindings.len(), 1, "should have a binding for `B`");
+ assert_eq!(body.assert_expr_only().bindings.len(), 1, "should have a binding for `B`");
assert_eq!(
- body.bindings[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(),
+ body[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(),
"B",
"should have a binding for `B`",
);
@@ -478,6 +566,7 @@ const fn f(x: i32) -> i32 {
);
let mtch_arms = body
+ .assert_expr_only()
.exprs
.iter()
.find_map(|(_, expr)| {
@@ -490,10 +579,10 @@ const fn f(x: i32) -> i32 {
.unwrap();
let MatchArm { pat, .. } = mtch_arms[1];
- match body.pats[pat] {
+ match body[pat] {
Pat::Range { start, end } => {
- let hir_start = &body.exprs[start.unwrap()];
- let hir_end = &body.exprs[end.unwrap()];
+ let hir_start = &body[start.unwrap()];
+ let hir_end = &body[end.unwrap()];
assert!(matches!(hir_start, Expr::Path { .. }));
assert!(matches!(hir_end, Expr::Path { .. }));
diff --git a/crates/hir-def/src/expr_store/tests/body/block.rs b/crates/hir-def/src/expr_store/tests/body/block.rs
index 5f7b510bba..c7707378a5 100644
--- a/crates/hir-def/src/expr_store/tests/body/block.rs
+++ b/crates/hir-def/src/expr_store/tests/body/block.rs
@@ -397,7 +397,6 @@ fn main() {
fn underscore_import() {
// This used to panic, because the default (private) visibility inside block expressions would
// point into the containing `DefMap`, which visibilities should never be able to do.
- cov_mark::check!(adjust_vis_in_block_def_map);
check_at(
r#"
mod m {
@@ -457,7 +456,6 @@ fn foo() {
#[test]
fn is_visible_from_same_def_map() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/9481
- cov_mark::check!(is_visible_from_same_block_def_map);
check_at(
r#"
fn outer() {
diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs
index bb75621c7e..dccfff002f 100644
--- a/crates/hir-def/src/find_path.rs
+++ b/crates/hir-def/src/find_path.rs
@@ -137,7 +137,7 @@ fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Opt
let loc = variant.lookup(ctx.db);
if let Some(mut path) = find_path_inner(ctx, ItemInNs::Types(loc.parent.into()), max_len) {
path.push_segment(
- ctx.db.enum_variants(loc.parent).variants[loc.index as usize].1.clone(),
+ loc.parent.enum_variants(ctx.db).variants[loc.index as usize].1.clone(),
);
return Some(path);
}
@@ -615,6 +615,7 @@ fn find_local_import_locations(
cov_mark::hit!(discount_private_imports);
false
}
+ Visibility::PubCrate(_) => true,
Visibility::Public => true,
};
@@ -1286,7 +1287,6 @@ $0
#[test]
fn explicit_private_imports_crate() {
- cov_mark::check!(explicit_private_imports);
check_found_path(
r#"
//- /main.rs
diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs
index 0fc7857d97..e70cd2cd6c 100644
--- a/crates/hir-def/src/hir.rs
+++ b/crates/hir-def/src/hir.rs
@@ -332,6 +332,17 @@ pub struct OffsetOf {
pub struct InlineAsm {
pub operands: Box<[(Option<Name>, AsmOperand)]>,
pub options: AsmOptions,
+ pub kind: InlineAsmKind,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum InlineAsmKind {
+ /// `asm!()`.
+ Asm,
+ /// `global_asm!()`.
+ GlobalAsm,
+ /// `naked_asm!()`.
+ NakedAsm,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
diff --git a/crates/hir-def/src/hir/generics.rs b/crates/hir-def/src/hir/generics.rs
index a9a0e36312..94e683cb0f 100644
--- a/crates/hir-def/src/hir/generics.rs
+++ b/crates/hir-def/src/hir/generics.rs
@@ -331,13 +331,13 @@ impl GenericParams {
}
#[inline]
- pub fn no_predicates(&self) -> bool {
+ pub fn has_no_predicates(&self) -> bool {
self.where_predicates.is_empty()
}
#[inline]
- pub fn where_predicates(&self) -> std::slice::Iter<'_, WherePredicate> {
- self.where_predicates.iter()
+ pub fn where_predicates(&self) -> &[WherePredicate] {
+ &self.where_predicates
}
/// Iterator of type_or_consts field
diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs
index eb3b92d31f..eacc3f3ced 100644
--- a/crates/hir-def/src/hir/type_ref.rs
+++ b/crates/hir-def/src/hir/type_ref.rs
@@ -149,7 +149,7 @@ pub enum TypeRef {
}
#[cfg(target_arch = "x86_64")]
-const _: () = assert!(size_of::<TypeRef>() == 16);
+const _: () = assert!(size_of::<TypeRef>() == 24);
pub type TypeRefId = Idx<TypeRef>;
diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs
index a6138fb682..f31f355cfa 100644
--- a/crates/hir-def/src/import_map.rs
+++ b/crates/hir-def/src/import_map.rs
@@ -16,7 +16,7 @@ use crate::{
AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId,
db::DefDatabase,
item_scope::{ImportOrExternCrate, ItemInNs},
- nameres::{DefMap, crate_def_map},
+ nameres::{DefMap, assoc::TraitItems, crate_def_map},
visibility::Visibility,
};
@@ -221,7 +221,7 @@ impl ImportMap {
trait_import_info: &ImportInfo,
) {
let _p = tracing::info_span!("collect_trait_assoc_items").entered();
- for &(ref assoc_item_name, item) in &db.trait_items(tr).items {
+ for &(ref assoc_item_name, item) in &TraitItems::query(db, tr).items {
let module_def_id = match item {
AssocItemId::FunctionId(f) => ModuleDefId::from(f),
AssocItemId::ConstId(c) => ModuleDefId::from(c),
@@ -482,7 +482,7 @@ mod tests {
use expect_test::{Expect, expect};
use test_fixture::WithFixture;
- use crate::{ItemContainerId, Lookup, test_db::TestDB};
+ use crate::{ItemContainerId, Lookup, nameres::assoc::TraitItems, test_db::TestDB};
use super::*;
@@ -580,7 +580,7 @@ mod tests {
let trait_info = dependency_imports.import_info_for(ItemInNs::Types(trait_id.into()))?;
- let trait_items = db.trait_items(trait_id);
+ let trait_items = TraitItems::query(db, trait_id);
let (assoc_item_name, _) = trait_items
.items
.iter()
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index 5362c0588d..efa4399468 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -13,13 +13,14 @@ use smallvec::{SmallVec, smallvec};
use span::Edition;
use stdx::format_to;
use syntax::ast;
+use thin_vec::ThinVec;
use crate::{
AdtId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap, HasModule, ImplId,
LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
db::DefDatabase,
per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem},
- visibility::{Visibility, VisibilityExplicitness},
+ visibility::Visibility,
};
#[derive(Debug, Default)]
@@ -155,22 +156,21 @@ pub struct ItemScope {
/// The defs declared in this scope. Each def has a single scope where it is
/// declared.
- declarations: Vec<ModuleDefId>,
+ declarations: ThinVec<ModuleDefId>,
- impls: Vec<ImplId>,
- #[allow(clippy::box_collection)]
- extern_blocks: Option<Box<Vec<ExternBlockId>>>,
- unnamed_consts: Vec<ConstId>,
+ impls: ThinVec<ImplId>,
+ extern_blocks: ThinVec<ExternBlockId>,
+ unnamed_consts: ThinVec<ConstId>,
/// Traits imported via `use Trait as _;`.
- unnamed_trait_imports: FxHashMap<TraitId, Item<()>>,
+ unnamed_trait_imports: ThinVec<(TraitId, Item<()>)>,
// the resolutions of the imports of this scope
use_imports_types: FxHashMap<ImportOrExternCrate, ImportOrDef>,
use_imports_values: FxHashMap<ImportOrGlob, ImportOrDef>,
use_imports_macros: FxHashMap<ImportOrExternCrate, ImportOrDef>,
- use_decls: Vec<UseId>,
- extern_crate_decls: Vec<ExternCrateId>,
+ use_decls: ThinVec<UseId>,
+ extern_crate_decls: ThinVec<ExternCrateId>,
/// Macros visible in current module in legacy textual scope
///
/// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first.
@@ -183,7 +183,7 @@ pub struct ItemScope {
/// Module scoped macros will be inserted into `items` instead of here.
// FIXME: Macro shadowing in one module is not properly handled. Non-item place macros will
// be all resolved to the last one defined if shadowing happens.
- legacy_macros: FxHashMap<Name, SmallVec<[MacroId; 1]>>,
+ legacy_macros: FxHashMap<Name, SmallVec<[MacroId; 2]>>,
/// The attribute macro invocations in this scope.
attr_macros: FxHashMap<AstId<ast::Item>, MacroCallId>,
/// The macro invocations in this scope.
@@ -198,7 +198,7 @@ struct DeriveMacroInvocation {
attr_id: AttrId,
/// The `#[derive]` call
attr_call_id: MacroCallId,
- derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
+ derive_call_ids: SmallVec<[Option<MacroCallId>; 4]>,
}
pub(crate) static BUILTIN_SCOPE: LazyLock<FxIndexMap<Name, PerNs>> = LazyLock::new(|| {
@@ -322,7 +322,7 @@ impl ItemScope {
}
pub fn extern_blocks(&self) -> impl Iterator<Item = ExternBlockId> + '_ {
- self.extern_blocks.iter().flat_map(|it| it.iter()).copied()
+ self.extern_blocks.iter().copied()
}
pub fn use_decls(&self) -> impl ExactSizeIterator<Item = UseId> + '_ {
@@ -435,7 +435,7 @@ impl ItemScope {
ModuleDefId::TraitId(t) => Some(t),
_ => None,
})
- .chain(self.unnamed_trait_imports.keys().copied())
+ .chain(self.unnamed_trait_imports.iter().map(|&(t, _)| t))
}
pub(crate) fn resolutions(&self) -> impl Iterator<Item = (Option<Name>, PerNs)> + '_ {
@@ -476,7 +476,7 @@ impl ItemScope {
}
pub(crate) fn define_extern_block(&mut self, extern_block: ExternBlockId) {
- self.extern_blocks.get_or_insert_default().push(extern_block);
+ self.extern_blocks.push(extern_block);
}
pub(crate) fn define_extern_crate_decl(&mut self, extern_crate: ExternCrateId) {
@@ -564,7 +564,7 @@ impl ItemScope {
// FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
- self.unnamed_trait_imports.get(&tr).map(|trait_| trait_.vis)
+ self.unnamed_trait_imports.iter().find(|&&(t, _)| t == tr).map(|(_, trait_)| trait_.vis)
}
pub(crate) fn push_unnamed_trait(
@@ -573,7 +573,7 @@ impl ItemScope {
vis: Visibility,
import: Option<ImportId>,
) {
- self.unnamed_trait_imports.insert(tr, Item { def: (), vis, import });
+ self.unnamed_trait_imports.push((tr, Item { def: (), vis, import }));
}
pub(crate) fn push_res_with_import(
@@ -720,33 +720,19 @@ impl ItemScope {
}
/// Marks everything that is not a procedural macro as private to `this_module`.
- pub(crate) fn censor_non_proc_macros(&mut self, this_module: ModuleId) {
+ pub(crate) fn censor_non_proc_macros(&mut self, krate: Crate) {
self.types
.values_mut()
.map(|def| &mut def.vis)
.chain(self.values.values_mut().map(|def| &mut def.vis))
- .chain(self.unnamed_trait_imports.values_mut().map(|def| &mut def.vis))
- .for_each(|vis| match vis {
- &mut Visibility::Module(_, visibility_explicitness) => {
- *vis = Visibility::Module(this_module, visibility_explicitness)
- }
- Visibility::Public => {
- *vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit)
- }
- });
+ .chain(self.unnamed_trait_imports.iter_mut().map(|(_, def)| &mut def.vis))
+ .for_each(|vis| *vis = Visibility::PubCrate(krate));
for mac in self.macros.values_mut() {
if matches!(mac.def, MacroId::ProcMacroId(_) if mac.import.is_none()) {
continue;
}
- match mac.vis {
- Visibility::Module(_, visibility_explicitness) => {
- mac.vis = Visibility::Module(this_module, visibility_explicitness)
- }
- Visibility::Public => {
- mac.vis = Visibility::Module(this_module, VisibilityExplicitness::Implicit)
- }
- }
+ mac.vis = Visibility::PubCrate(krate)
}
}
@@ -817,9 +803,7 @@ impl ItemScope {
macro_invocations,
extern_blocks,
} = self;
- if let Some(it) = extern_blocks {
- it.shrink_to_fit();
- }
+ extern_blocks.shrink_to_fit();
types.shrink_to_fit();
values.shrink_to_fit();
macros.shrink_to_fit();
diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs
index 1b97eb72b6..c633339857 100644
--- a/crates/hir-def/src/item_tree.rs
+++ b/crates/hir-def/src/item_tree.rs
@@ -29,7 +29,6 @@
//!
//! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
//! surface syntax.
-#![allow(unexpected_cfgs)]
mod lower;
mod pretty;
@@ -38,38 +37,40 @@ mod tests;
use std::{
fmt::{self, Debug},
- hash::{Hash, Hasher},
- ops::{Index, Range},
+ hash::Hash,
+ ops::Index,
sync::OnceLock,
};
use ast::{AstNode, StructKind};
use base_db::Crate;
use hir_expand::{
- ExpandTo, HirFileId, InFile,
+ ExpandTo, HirFileId,
attrs::RawAttrs,
mod_path::{ModPath, PathKind},
name::Name,
};
-use intern::{Interned, Symbol};
-use la_arena::{Arena, Idx, RawIdx};
+use intern::Interned;
+use la_arena::{Idx, RawIdx};
use rustc_hash::FxHashMap;
-use smallvec::SmallVec;
use span::{AstIdNode, Edition, FileAstId, SyntaxContext};
use stdx::never;
use syntax::{SyntaxKind, ast, match_ast};
+use thin_vec::ThinVec;
use triomphe::Arc;
use crate::{BlockId, Lookup, attr::Attrs, db::DefDatabase};
+pub(crate) use crate::item_tree::lower::{lower_use_tree, visibility_from_ast};
+
#[derive(Copy, Clone, Eq, PartialEq)]
-pub struct RawVisibilityId(u32);
+pub(crate) struct RawVisibilityId(u32);
impl RawVisibilityId {
- pub const PUB: Self = RawVisibilityId(u32::MAX);
- pub const PRIV_IMPLICIT: Self = RawVisibilityId(u32::MAX - 1);
- pub const PRIV_EXPLICIT: Self = RawVisibilityId(u32::MAX - 2);
- pub const PUB_CRATE: Self = RawVisibilityId(u32::MAX - 3);
+ const PUB: Self = RawVisibilityId(u32::MAX);
+ const PRIV_IMPLICIT: Self = RawVisibilityId(u32::MAX - 1);
+ const PRIV_EXPLICIT: Self = RawVisibilityId(u32::MAX - 2);
+ const PUB_CRATE: Self = RawVisibilityId(u32::MAX - 3);
}
impl fmt::Debug for RawVisibilityId {
@@ -85,112 +86,136 @@ impl fmt::Debug for RawVisibilityId {
}
}
-/// The item tree of a source file.
-#[derive(Debug, Default, Eq, PartialEq)]
-pub struct ItemTree {
- top_level: SmallVec<[ModItem; 1]>,
- attrs: FxHashMap<AttrOwner, RawAttrs>,
-
- data: Option<Box<ItemTreeData>>,
-}
-
-impl ItemTree {
- pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
- let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
- static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
-
- let ctx = lower::Ctx::new(db, file_id);
- let syntax = db.parse_or_expand(file_id);
- let mut top_attrs = None;
- let mut item_tree = match_ast! {
- match syntax {
- ast::SourceFile(file) => {
- top_attrs = Some(RawAttrs::new(db, &file, ctx.span_map()));
- ctx.lower_module_items(&file)
- },
- ast::MacroItems(items) => {
- ctx.lower_module_items(&items)
- },
- ast::MacroStmts(stmts) => {
- // The produced statements can include items, which should be added as top-level
- // items.
- ctx.lower_macro_stmts(stmts)
- },
- _ => {
- if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) {
- return Default::default();
- }
- panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}");
- },
- }
- };
+#[salsa_macros::tracked(returns(deref))]
+pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
+ let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
+ static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
- if let Some(attrs) = top_attrs {
- item_tree.attrs.insert(AttrOwner::TopLevel, attrs);
+ let ctx = lower::Ctx::new(db, file_id);
+ let syntax = db.parse_or_expand(file_id);
+ let mut item_tree = match_ast! {
+ match syntax {
+ ast::SourceFile(file) => {
+ let top_attrs = RawAttrs::new(db, &file, ctx.span_map());
+ let mut item_tree = ctx.lower_module_items(&file);
+ item_tree.top_attrs = top_attrs;
+ item_tree
+ },
+ ast::MacroItems(items) => {
+ ctx.lower_module_items(&items)
+ },
+ ast::MacroStmts(stmts) => {
+ // The produced statements can include items, which should be added as top-level
+ // items.
+ ctx.lower_macro_stmts(stmts)
+ },
+ _ => {
+ if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) {
+ return Default::default();
+ }
+ panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}");
+ },
}
- if item_tree.data.is_none() && item_tree.top_level.is_empty() && item_tree.attrs.is_empty()
- {
- EMPTY
- .get_or_init(|| {
- Arc::new(ItemTree {
- top_level: SmallVec::new_const(),
- attrs: FxHashMap::default(),
- data: None,
- })
+ };
+ let ItemTree { top_level, top_attrs, attrs, vis, big_data, small_data } = &item_tree;
+ if small_data.is_empty()
+ && big_data.is_empty()
+ && top_level.is_empty()
+ && attrs.is_empty()
+ && top_attrs.is_empty()
+ && vis.arena.is_empty()
+ {
+ EMPTY
+ .get_or_init(|| {
+ Arc::new(ItemTree {
+ top_level: Box::new([]),
+ attrs: FxHashMap::default(),
+ small_data: FxHashMap::default(),
+ big_data: FxHashMap::default(),
+ top_attrs: RawAttrs::EMPTY,
+ vis: ItemVisibilities { arena: ThinVec::new() },
})
- .clone()
- } else {
- item_tree.shrink_to_fit();
- Arc::new(item_tree)
- }
- }
-
- pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
- let _p = tracing::info_span!("block_item_tree_query", ?block).entered();
- static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
-
- let loc = block.lookup(db);
- let block = loc.ast_id.to_node(db);
-
- let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
- let mut item_tree = ctx.lower_block(&block);
- if item_tree.data.is_none() && item_tree.top_level.is_empty() && item_tree.attrs.is_empty()
- {
- EMPTY
- .get_or_init(|| {
- Arc::new(ItemTree {
- top_level: SmallVec::new_const(),
- attrs: FxHashMap::default(),
- data: None,
- })
+ })
+ .clone()
+ } else {
+ item_tree.shrink_to_fit();
+ Arc::new(item_tree)
+ }
+}
+
+#[salsa_macros::tracked(returns(deref))]
+pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
+ let _p = tracing::info_span!("block_item_tree_query", ?block).entered();
+ static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
+
+ let loc = block.lookup(db);
+ let block = loc.ast_id.to_node(db);
+
+ let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
+ let mut item_tree = ctx.lower_block(&block);
+ let ItemTree { top_level, top_attrs, attrs, vis, big_data, small_data } = &item_tree;
+ if small_data.is_empty()
+ && big_data.is_empty()
+ && top_level.is_empty()
+ && attrs.is_empty()
+ && top_attrs.is_empty()
+ && vis.arena.is_empty()
+ {
+ EMPTY
+ .get_or_init(|| {
+ Arc::new(ItemTree {
+ top_level: Box::new([]),
+ attrs: FxHashMap::default(),
+ small_data: FxHashMap::default(),
+ big_data: FxHashMap::default(),
+ top_attrs: RawAttrs::EMPTY,
+ vis: ItemVisibilities { arena: ThinVec::new() },
})
- .clone()
- } else {
- item_tree.shrink_to_fit();
- Arc::new(item_tree)
- }
+ })
+ .clone()
+ } else {
+ item_tree.shrink_to_fit();
+ Arc::new(item_tree)
}
+}
+/// The item tree of a source file.
+#[derive(Debug, Default, Eq, PartialEq)]
+pub struct ItemTree {
+ top_level: Box<[ModItemId]>,
+ top_attrs: RawAttrs,
+ attrs: FxHashMap<FileAstId<ast::Item>, RawAttrs>,
+ vis: ItemVisibilities,
+ big_data: FxHashMap<FileAstId<ast::Item>, BigModItem>,
+ small_data: FxHashMap<FileAstId<ast::Item>, SmallModItem>,
+}
+impl ItemTree {
/// Returns an iterator over all items located at the top level of the `HirFileId` this
/// `ItemTree` was created from.
- pub fn top_level_items(&self) -> &[ModItem] {
+ pub(crate) fn top_level_items(&self) -> &[ModItemId] {
&self.top_level
}
/// Returns the inner attributes of the source file.
- pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs {
- Attrs::expand_cfg_attr(
- db,
- krate,
- self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone(),
- )
+ pub(crate) fn top_level_raw_attrs(&self) -> &RawAttrs {
+ &self.top_attrs
+ }
+
+ /// Returns the inner attributes of the source file.
+ pub(crate) fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs {
+ Attrs::expand_cfg_attr(db, krate, self.top_attrs.clone())
}
- pub(crate) fn raw_attrs(&self, of: AttrOwner) -> &RawAttrs {
+ pub(crate) fn raw_attrs(&self, of: FileAstId<ast::Item>) -> &RawAttrs {
self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY)
}
- pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: Crate, of: AttrOwner) -> Attrs {
+ pub(crate) fn attrs(
+ &self,
+ db: &dyn DefDatabase,
+ krate: Crate,
+ of: FileAstId<ast::Item>,
+ ) -> Attrs {
Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone())
}
@@ -198,131 +223,79 @@ impl ItemTree {
///
/// For more detail, see [`ItemTreeDataStats`].
pub fn item_tree_stats(&self) -> ItemTreeDataStats {
- match self.data {
- Some(ref data) => ItemTreeDataStats {
- traits: data.traits.len(),
- impls: data.impls.len(),
- mods: data.mods.len(),
- macro_calls: data.macro_calls.len(),
- macro_rules: data.macro_rules.len(),
- },
- None => ItemTreeDataStats::default(),
+ let mut traits = 0;
+ let mut impls = 0;
+ let mut mods = 0;
+ let mut macro_calls = 0;
+ let mut macro_rules = 0;
+ for item in self.small_data.values() {
+ match item {
+ SmallModItem::Trait(_) => traits += 1,
+ SmallModItem::Impl(_) => impls += 1,
+ SmallModItem::MacroRules(_) => macro_rules += 1,
+ SmallModItem::MacroCall(_) => macro_calls += 1,
+ _ => {}
+ }
+ }
+ for item in self.big_data.values() {
+ match item {
+ BigModItem::Mod(_) => mods += 1,
+ _ => {}
+ }
}
+ ItemTreeDataStats { traits, impls, mods, macro_calls, macro_rules }
}
pub fn pretty_print(&self, db: &dyn DefDatabase, edition: Edition) -> String {
pretty::print_item_tree(db, self, edition)
}
- fn data(&self) -> &ItemTreeData {
- self.data.as_ref().expect("attempted to access data of empty ItemTree")
- }
-
- fn data_mut(&mut self) -> &mut ItemTreeData {
- self.data.get_or_insert_with(Box::default)
- }
-
fn shrink_to_fit(&mut self) {
- let ItemTree { top_level, attrs, data } = self;
- top_level.shrink_to_fit();
+ let ItemTree { top_level: _, attrs, big_data, small_data, vis: _, top_attrs: _ } = self;
attrs.shrink_to_fit();
- if let Some(data) = data {
- let ItemTreeData {
- uses,
- extern_crates,
- extern_blocks,
- functions,
- structs,
- unions,
- enums,
- variants,
- consts,
- statics,
- traits,
- trait_aliases,
- impls,
- type_aliases,
- mods,
- macro_calls,
- macro_rules,
- macro_defs,
- vis,
- } = &mut **data;
-
- uses.shrink_to_fit();
- extern_crates.shrink_to_fit();
- extern_blocks.shrink_to_fit();
- functions.shrink_to_fit();
- structs.shrink_to_fit();
- unions.shrink_to_fit();
- enums.shrink_to_fit();
- variants.shrink_to_fit();
- consts.shrink_to_fit();
- statics.shrink_to_fit();
- traits.shrink_to_fit();
- trait_aliases.shrink_to_fit();
- impls.shrink_to_fit();
- type_aliases.shrink_to_fit();
- mods.shrink_to_fit();
- macro_calls.shrink_to_fit();
- macro_rules.shrink_to_fit();
- macro_defs.shrink_to_fit();
-
- vis.arena.shrink_to_fit();
- }
+ big_data.shrink_to_fit();
+ small_data.shrink_to_fit();
}
}
#[derive(Default, Debug, Eq, PartialEq)]
struct ItemVisibilities {
- arena: Arena<RawVisibility>,
+ arena: ThinVec<RawVisibility>,
}
-impl ItemVisibilities {
- fn alloc(&mut self, vis: RawVisibility) -> RawVisibilityId {
- match &vis {
- RawVisibility::Public => RawVisibilityId::PUB,
- RawVisibility::Module(path, explicitiy) if path.segments().is_empty() => {
- match (path.kind, explicitiy) {
- (PathKind::SELF, VisibilityExplicitness::Explicit) => {
- RawVisibilityId::PRIV_EXPLICIT
- }
- (PathKind::SELF, VisibilityExplicitness::Implicit) => {
- RawVisibilityId::PRIV_IMPLICIT
- }
- (PathKind::Crate, _) => RawVisibilityId::PUB_CRATE,
- _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()),
- }
- }
- _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()),
- }
- }
+#[derive(Debug, Clone, Eq, PartialEq)]
+enum SmallModItem {
+ Const(Const),
+ Enum(Enum),
+ ExternBlock(ExternBlock),
+ Function(Function),
+ Impl(Impl),
+ Macro2(Macro2),
+ MacroCall(MacroCall),
+ MacroRules(MacroRules),
+ Static(Static),
+ Struct(Struct),
+ Trait(Trait),
+ TraitAlias(TraitAlias),
+ TypeAlias(TypeAlias),
+ Union(Union),
}
-#[derive(Default, Debug, Eq, PartialEq)]
-struct ItemTreeData {
- uses: Arena<Use>,
- extern_crates: Arena<ExternCrate>,
- extern_blocks: Arena<ExternBlock>,
- functions: Arena<Function>,
- structs: Arena<Struct>,
- unions: Arena<Union>,
- enums: Arena<Enum>,
- variants: Arena<Variant>,
- consts: Arena<Const>,
- statics: Arena<Static>,
- traits: Arena<Trait>,
- trait_aliases: Arena<TraitAlias>,
- impls: Arena<Impl>,
- type_aliases: Arena<TypeAlias>,
- mods: Arena<Mod>,
- macro_calls: Arena<MacroCall>,
- macro_rules: Arena<MacroRules>,
- macro_defs: Arena<Macro2>,
-
- vis: ItemVisibilities,
+#[derive(Debug, Clone, Eq, PartialEq)]
+enum BigModItem {
+ ExternCrate(ExternCrate),
+ Mod(Mod),
+ Use(Use),
}
+// `ModItem` is stored a bunch in `ItemTree`'s so we pay the max for each item. It should stay as
+// small as possible which is why we split them in two, most common ones are 3 usize but some rarer
+// ones are 5.
+#[cfg(target_pointer_width = "64")]
+const _: [(); std::mem::size_of::<BigModItem>()] = [(); std::mem::size_of::<[usize; 5]>()];
+#[cfg(target_pointer_width = "64")]
+const _: [(); std::mem::size_of::<SmallModItem>()] = [(); std::mem::size_of::<[usize; 3]>()];
+
#[derive(Default, Debug, Eq, PartialEq)]
pub struct ItemTreeDataStats {
pub traits: usize,
@@ -332,100 +305,13 @@ pub struct ItemTreeDataStats {
pub macro_rules: usize,
}
-#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
-pub enum AttrOwner {
- /// Attributes on an item.
- ModItem(ModItem),
- /// Inner attributes of the source file.
- TopLevel,
-
- Variant(FileItemTreeId<Variant>),
- // while not relevant to early name resolution, fields can contain visibility
- Field(FieldParent, ItemTreeFieldId),
-}
-
-impl AttrOwner {
- pub fn make_field_indexed(parent: FieldParent, idx: usize) -> Self {
- AttrOwner::Field(parent, ItemTreeFieldId::from_raw(RawIdx::from_u32(idx as u32)))
- }
-}
-
-#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
-pub enum FieldParent {
- Struct(FileItemTreeId<Struct>),
- Union(FileItemTreeId<Union>),
- EnumVariant(FileItemTreeId<Variant>),
-}
-
-pub type ItemTreeFieldId = Idx<Field>;
-
-macro_rules! from_attrs {
- ( $( $var:ident($t:ty) ),+ $(,)? ) => {
- $(
- impl From<$t> for AttrOwner {
- fn from(t: $t) -> AttrOwner {
- AttrOwner::$var(t)
- }
- }
- )+
- };
-}
-
-from_attrs!(ModItem(ModItem), Variant(FileItemTreeId<Variant>));
-
/// Trait implemented by all nodes in the item tree.
-pub trait ItemTreeNode: Clone {
+pub(crate) trait ItemTreeNode: Clone {
type Source: AstIdNode;
-
- fn ast_id(&self) -> FileAstId<Self::Source>;
-
- /// Looks up an instance of `Self` in an item tree.
- fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self;
- fn attr_owner(id: FileItemTreeId<Self>) -> AttrOwner;
-}
-
-pub struct FileItemTreeId<N>(Idx<N>);
-
-impl<N> FileItemTreeId<N> {
- pub fn range_iter(range: Range<Self>) -> impl Iterator<Item = Self> + Clone {
- (range.start.index().into_raw().into_u32()..range.end.index().into_raw().into_u32())
- .map(RawIdx::from_u32)
- .map(Idx::from_raw)
- .map(Self)
- }
-}
-
-impl<N> FileItemTreeId<N> {
- pub fn index(&self) -> Idx<N> {
- self.0
- }
-}
-
-impl<N> Clone for FileItemTreeId<N> {
- fn clone(&self) -> Self {
- *self
- }
-}
-impl<N> Copy for FileItemTreeId<N> {}
-
-impl<N> PartialEq for FileItemTreeId<N> {
- fn eq(&self, other: &FileItemTreeId<N>) -> bool {
- self.0 == other.0
- }
-}
-impl<N> Eq for FileItemTreeId<N> {}
-
-impl<N> Hash for FileItemTreeId<N> {
- fn hash<H: Hasher>(&self, state: &mut H) {
- self.0.hash(state)
- }
}
-impl<N> fmt::Debug for FileItemTreeId<N> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.0.fmt(f)
- }
-}
+#[allow(type_alias_bounds)]
+pub(crate) type ItemTreeAstId<T: ItemTreeNode> = FileAstId<T::Source>;
/// Identifies a particular [`ItemTree`].
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
@@ -435,100 +321,48 @@ pub struct TreeId {
}
impl TreeId {
- pub fn new(file: HirFileId, block: Option<BlockId>) -> Self {
+ pub(crate) fn new(file: HirFileId, block: Option<BlockId>) -> Self {
Self { file, block }
}
- pub fn item_tree(&self, db: &dyn DefDatabase) -> Arc<ItemTree> {
+ pub(crate) fn item_tree<'db>(&self, db: &'db dyn DefDatabase) -> &'db ItemTree {
match self.block {
- Some(block) => db.block_item_tree(block),
- None => db.file_item_tree(self.file),
+ Some(block) => block_item_tree_query(db, block),
+ None => file_item_tree_query(db, self.file),
}
}
+ #[inline]
pub fn file_id(self) -> HirFileId {
self.file
}
- pub fn is_block(self) -> bool {
+ pub(crate) fn is_block(self) -> bool {
self.block.is_some()
}
}
-#[derive(Debug)]
-pub struct ItemTreeId<N> {
- tree: TreeId,
- pub value: FileItemTreeId<N>,
-}
-
-impl<N> ItemTreeId<N> {
- pub fn new(tree: TreeId, idx: FileItemTreeId<N>) -> Self {
- Self { tree, value: idx }
- }
-
- pub fn file_id(self) -> HirFileId {
- self.tree.file
- }
-
- pub fn tree_id(self) -> TreeId {
- self.tree
- }
-
- pub fn item_tree(self, db: &dyn DefDatabase) -> Arc<ItemTree> {
- self.tree.item_tree(db)
- }
-
- pub fn resolved<R>(self, db: &dyn DefDatabase, cb: impl FnOnce(&N) -> R) -> R
- where
- ItemTree: Index<FileItemTreeId<N>, Output = N>,
- {
- cb(&self.tree.item_tree(db)[self.value])
- }
-}
-
-impl<N> Copy for ItemTreeId<N> {}
-impl<N> Clone for ItemTreeId<N> {
- fn clone(&self) -> Self {
- *self
- }
-}
-
-impl<N> PartialEq for ItemTreeId<N> {
- fn eq(&self, other: &Self) -> bool {
- self.tree == other.tree && self.value == other.value
- }
-}
-
-impl<N> Eq for ItemTreeId<N> {}
-
-impl<N> Hash for ItemTreeId<N> {
- fn hash<H: Hasher>(&self, state: &mut H) {
- self.tree.hash(state);
- self.value.hash(state);
- }
-}
-
macro_rules! mod_items {
- ( $( $typ:ident in $fld:ident -> $ast:ty ),+ $(,)? ) => {
+ ($mod_item:ident -> $( $typ:ident in $fld:ident -> $ast:ty ),+ $(,)? ) => {
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
- pub enum ModItem {
+ pub(crate) enum $mod_item {
$(
- $typ(FileItemTreeId<$typ>),
+ $typ(FileAstId<$ast>),
)+
}
- impl ModItem {
- pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
+ impl $mod_item {
+ pub(crate) fn ast_id(self) -> FileAstId<ast::Item> {
match self {
- $(ModItem::$typ(it) => tree[it.index()].ast_id().upcast()),+
+ $($mod_item::$typ(it) => it.upcast()),+
}
}
}
$(
- impl From<FileItemTreeId<$typ>> for ModItem {
- fn from(id: FileItemTreeId<$typ>) -> ModItem {
- ModItem::$typ(id)
+ impl From<FileAstId<$ast>> for $mod_item {
+ fn from(id: FileAstId<$ast>) -> $mod_item {
+ ModItemId::$typ(id)
}
}
)+
@@ -536,25 +370,19 @@ macro_rules! mod_items {
$(
impl ItemTreeNode for $typ {
type Source = $ast;
-
- fn ast_id(&self) -> FileAstId<Self::Source> {
- self.ast_id
- }
-
- fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self {
- &tree.data().$fld[index]
- }
-
- fn attr_owner(id: FileItemTreeId<Self>) -> AttrOwner {
- AttrOwner::ModItem(ModItem::$typ(id))
- }
}
- impl Index<Idx<$typ>> for ItemTree {
+ impl Index<FileAstId<$ast>> for ItemTree {
type Output = $typ;
- fn index(&self, index: Idx<$typ>) -> &Self::Output {
- &self.data().$fld[index]
+ #[allow(unused_imports)]
+ fn index(&self, index: FileAstId<$ast>) -> &Self::Output {
+ use BigModItem::*;
+ use SmallModItem::*;
+ match &self.$fld[&index.upcast()] {
+ $typ(item) => item,
+ _ => panic!("expected item of type `{}` at index `{:?}`", stringify!($typ), index),
+ }
}
}
)+
@@ -562,94 +390,59 @@ macro_rules! mod_items {
}
mod_items! {
- Use in uses -> ast::Use,
- ExternCrate in extern_crates -> ast::ExternCrate,
- ExternBlock in extern_blocks -> ast::ExternBlock,
- Function in functions -> ast::Fn,
- Struct in structs -> ast::Struct,
- Union in unions -> ast::Union,
- Enum in enums -> ast::Enum,
- Const in consts -> ast::Const,
- Static in statics -> ast::Static,
- Trait in traits -> ast::Trait,
- TraitAlias in trait_aliases -> ast::TraitAlias,
- Impl in impls -> ast::Impl,
- TypeAlias in type_aliases -> ast::TypeAlias,
- Mod in mods -> ast::Module,
- MacroCall in macro_calls -> ast::MacroCall,
- MacroRules in macro_rules -> ast::MacroRules,
- Macro2 in macro_defs -> ast::MacroDef,
+ModItemId ->
+ Const in small_data -> ast::Const,
+ Enum in small_data -> ast::Enum,
+ ExternBlock in small_data -> ast::ExternBlock,
+ ExternCrate in big_data -> ast::ExternCrate,
+ Function in small_data -> ast::Fn,
+ Impl in small_data -> ast::Impl,
+ Macro2 in small_data -> ast::MacroDef,
+ MacroCall in small_data -> ast::MacroCall,
+ MacroRules in small_data -> ast::MacroRules,
+ Mod in big_data -> ast::Module,
+ Static in small_data -> ast::Static,
+ Struct in small_data -> ast::Struct,
+ Trait in small_data -> ast::Trait,
+ TraitAlias in small_data -> ast::TraitAlias,
+ TypeAlias in small_data -> ast::TypeAlias,
+ Union in small_data -> ast::Union,
+ Use in big_data -> ast::Use,
}
impl Index<RawVisibilityId> for ItemTree {
type Output = RawVisibility;
fn index(&self, index: RawVisibilityId) -> &Self::Output {
static VIS_PUB: RawVisibility = RawVisibility::Public;
- static VIS_PRIV_IMPLICIT: OnceLock<RawVisibility> = OnceLock::new();
- static VIS_PRIV_EXPLICIT: OnceLock<RawVisibility> = OnceLock::new();
- static VIS_PUB_CRATE: OnceLock<RawVisibility> = OnceLock::new();
+ static VIS_PRIV_IMPLICIT: RawVisibility =
+ RawVisibility::PubSelf(VisibilityExplicitness::Implicit);
+ static VIS_PRIV_EXPLICIT: RawVisibility =
+ RawVisibility::PubSelf(VisibilityExplicitness::Explicit);
+ static VIS_PUB_CRATE: RawVisibility = RawVisibility::PubCrate;
match index {
- RawVisibilityId::PRIV_IMPLICIT => VIS_PRIV_IMPLICIT.get_or_init(|| {
- RawVisibility::Module(
- Interned::new(ModPath::from_kind(PathKind::SELF)),
- VisibilityExplicitness::Implicit,
- )
- }),
- RawVisibilityId::PRIV_EXPLICIT => VIS_PRIV_EXPLICIT.get_or_init(|| {
- RawVisibility::Module(
- Interned::new(ModPath::from_kind(PathKind::SELF)),
- VisibilityExplicitness::Explicit,
- )
- }),
+ RawVisibilityId::PRIV_IMPLICIT => &VIS_PRIV_IMPLICIT,
+ RawVisibilityId::PRIV_EXPLICIT => &VIS_PRIV_EXPLICIT,
RawVisibilityId::PUB => &VIS_PUB,
- RawVisibilityId::PUB_CRATE => VIS_PUB_CRATE.get_or_init(|| {
- RawVisibility::Module(
- Interned::new(ModPath::from_kind(PathKind::Crate)),
- VisibilityExplicitness::Explicit,
- )
- }),
- _ => &self.data().vis.arena[Idx::from_raw(index.0.into())],
+ RawVisibilityId::PUB_CRATE => &VIS_PUB_CRATE,
+ _ => &self.vis.arena[index.0 as usize],
}
}
}
-impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree {
- type Output = N;
- fn index(&self, id: FileItemTreeId<N>) -> &N {
- N::lookup(self, id.index())
- }
-}
-
-impl ItemTreeNode for Variant {
- type Source = ast::Variant;
-
- fn ast_id(&self) -> FileAstId<Self::Source> {
- self.ast_id
- }
-
- fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self {
- &tree.data().variants[index]
- }
-
- fn attr_owner(id: FileItemTreeId<Self>) -> AttrOwner {
- AttrOwner::Variant(id)
- }
-}
-
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Use {
- pub visibility: RawVisibilityId,
- pub ast_id: FileAstId<ast::Use>,
- pub use_tree: UseTree,
+ pub(crate) visibility: RawVisibilityId,
+ pub(crate) use_tree: UseTree,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct UseTree {
- pub index: Idx<ast::UseTree>,
kind: UseTreeKind,
}
+// FIXME: Would be nice to encode `None` into this
+// We could just use a `Name` where `_` well means `_` ..
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ImportAlias {
/// Unnamed alias, as in `use Foo as _;`
@@ -703,55 +496,37 @@ pub enum UseTreeKind {
pub struct ExternCrate {
pub name: Name,
pub alias: Option<ImportAlias>,
- pub visibility: RawVisibilityId,
- pub ast_id: FileAstId<ast::ExternCrate>,
+ pub(crate) visibility: RawVisibilityId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct ExternBlock {
- pub abi: Option<Symbol>,
- pub ast_id: FileAstId<ast::ExternBlock>,
- pub children: Box<[ModItem]>,
+ pub(crate) children: Box<[ModItemId]>,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Function {
pub name: Name,
- pub visibility: RawVisibilityId,
- pub ast_id: FileAstId<ast::Fn>,
+ pub(crate) visibility: RawVisibilityId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Struct {
pub name: Name,
- pub visibility: RawVisibilityId,
- pub fields: Box<[Field]>,
+ pub(crate) visibility: RawVisibilityId,
pub shape: FieldsShape,
- pub ast_id: FileAstId<ast::Struct>,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Union {
pub name: Name,
- pub visibility: RawVisibilityId,
- pub fields: Box<[Field]>,
- pub ast_id: FileAstId<ast::Union>,
+ pub(crate) visibility: RawVisibilityId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Enum {
pub name: Name,
- pub visibility: RawVisibilityId,
- pub variants: Range<FileItemTreeId<Variant>>,
- pub ast_id: FileAstId<ast::Enum>,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct Variant {
- pub name: Name,
- pub fields: Box<[Field]>,
- pub shape: FieldsShape,
- pub ast_id: FileAstId<ast::Variant>,
+ pub(crate) visibility: RawVisibilityId,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
@@ -762,11 +537,15 @@ pub enum FieldsShape {
}
/// Visibility of an item, not yet resolved.
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum RawVisibility {
/// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is
/// equivalent to `pub(self)`.
Module(Interned<ModPath>, VisibilityExplicitness),
+ /// `pub(self)`.
+ PubSelf(VisibilityExplicitness),
+ /// `pub(crate)`.
+ PubCrate,
/// `pub`.
Public,
}
@@ -785,71 +564,51 @@ impl VisibilityExplicitness {
}
}
-// FIXME: Remove this from item tree?
-/// A single field of an enum variant or struct
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct Field {
- pub name: Name,
- pub visibility: RawVisibilityId,
- // FIXME: Not an item tree property
- pub is_unsafe: bool,
-}
-
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Const {
/// `None` for `const _: () = ();`
pub name: Option<Name>,
- pub visibility: RawVisibilityId,
- pub ast_id: FileAstId<ast::Const>,
+ pub(crate) visibility: RawVisibilityId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Static {
pub name: Name,
- pub visibility: RawVisibilityId,
- pub ast_id: FileAstId<ast::Static>,
+ pub(crate) visibility: RawVisibilityId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Trait {
pub name: Name,
- pub visibility: RawVisibilityId,
- pub items: Box<[AssocItem]>,
- pub ast_id: FileAstId<ast::Trait>,
+ pub(crate) visibility: RawVisibilityId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct TraitAlias {
pub name: Name,
- pub visibility: RawVisibilityId,
- pub ast_id: FileAstId<ast::TraitAlias>,
+ pub(crate) visibility: RawVisibilityId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
-pub struct Impl {
- pub items: Box<[AssocItem]>,
- pub ast_id: FileAstId<ast::Impl>,
-}
+pub struct Impl {}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TypeAlias {
pub name: Name,
- pub visibility: RawVisibilityId,
- pub ast_id: FileAstId<ast::TypeAlias>,
+ pub(crate) visibility: RawVisibilityId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Mod {
pub name: Name,
- pub visibility: RawVisibilityId,
- pub kind: ModKind,
- pub ast_id: FileAstId<ast::Module>,
+ pub(crate) visibility: RawVisibilityId,
+ pub(crate) kind: ModKind,
}
#[derive(Debug, Clone, Eq, PartialEq)]
-pub enum ModKind {
+pub(crate) enum ModKind {
/// `mod m { ... }`
- Inline { items: Box<[ModItem]> },
+ Inline { items: Box<[ModItemId]> },
/// `mod m;`
Outline,
}
@@ -858,7 +617,6 @@ pub enum ModKind {
pub struct MacroCall {
/// Path to the called macro.
pub path: Interned<ModPath>,
- pub ast_id: FileAstId<ast::MacroCall>,
pub expand_to: ExpandTo,
pub ctxt: SyntaxContext,
}
@@ -867,52 +625,13 @@ pub struct MacroCall {
pub struct MacroRules {
/// The name of the declared macro.
pub name: Name,
- pub ast_id: FileAstId<ast::MacroRules>,
}
/// "Macros 2.0" macro definition.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Macro2 {
pub name: Name,
- pub visibility: RawVisibilityId,
- pub ast_id: FileAstId<ast::MacroDef>,
-}
-
-impl Use {
- /// Maps a `UseTree` contained in this import back to its AST node.
- pub fn use_tree_to_ast(
- &self,
- db: &dyn DefDatabase,
- file_id: HirFileId,
- index: Idx<ast::UseTree>,
- ) -> ast::UseTree {
- // Re-lower the AST item and get the source map.
- // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
- let ast = InFile::new(file_id, self.ast_id).to_node(db);
- let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
- let (_, source_map) = lower::lower_use_tree(db, ast_use_tree, &mut |range| {
- db.span_map(file_id).span_for_range(range).ctx
- })
- .expect("failed to lower use tree");
- source_map[index].clone()
- }
-
- /// Maps a `UseTree` contained in this import back to its AST node.
- pub fn use_tree_source_map(
- &self,
- db: &dyn DefDatabase,
- file_id: HirFileId,
- ) -> Arena<ast::UseTree> {
- // Re-lower the AST item and get the source map.
- // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
- let ast = InFile::new(file_id, self.ast_id).to_node(db);
- let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
- lower::lower_use_tree(db, ast_use_tree, &mut |range| {
- db.span_map(file_id).span_for_range(range).ctx
- })
- .expect("failed to lower use tree")
- .1
- }
+ pub(crate) visibility: RawVisibilityId,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
@@ -925,15 +644,17 @@ pub enum ImportKind {
TypeOnly,
}
-impl UseTree {
+impl Use {
/// Expands the `UseTree` into individually imported `ModPath`s.
pub fn expand(
&self,
mut cb: impl FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>),
) {
- self.expand_impl(None, &mut cb)
+ self.use_tree.expand_impl(None, &mut 0, &mut cb)
}
+}
+impl UseTree {
/// The [`UseTreeKind`] of this `UseTree`.
pub fn kind(&self) -> &UseTreeKind {
&self.kind
@@ -942,6 +663,7 @@ impl UseTree {
fn expand_impl(
&self,
prefix: Option<ModPath>,
+ counting_index: &mut u32,
cb: &mut impl FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>),
) {
fn concat_mod_paths(
@@ -977,17 +699,27 @@ impl UseTree {
match &self.kind {
UseTreeKind::Single { path, alias } => {
if let Some((path, kind)) = concat_mod_paths(prefix, path) {
- cb(self.index, path, kind, alias.clone());
+ cb(Idx::from_raw(RawIdx::from_u32(*counting_index)), path, kind, alias.clone());
}
}
UseTreeKind::Glob { path: Some(path) } => {
if let Some((path, _)) = concat_mod_paths(prefix, path) {
- cb(self.index, path, ImportKind::Glob, None);
+ cb(
+ Idx::from_raw(RawIdx::from_u32(*counting_index)),
+ path,
+ ImportKind::Glob,
+ None,
+ );
}
}
UseTreeKind::Glob { path: None } => {
if let Some(prefix) = prefix {
- cb(self.index, prefix, ImportKind::Glob, None);
+ cb(
+ Idx::from_raw(RawIdx::from_u32(*counting_index)),
+ prefix,
+ ImportKind::Glob,
+ None,
+ );
}
}
UseTreeKind::Prefixed { prefix: additional_prefix, list } => {
@@ -999,82 +731,10 @@ impl UseTree {
None => prefix,
};
for tree in &**list {
- tree.expand_impl(prefix.clone(), cb);
+ *counting_index += 1;
+ tree.expand_impl(prefix.clone(), counting_index, cb);
}
}
}
}
}
-
-macro_rules! impl_froms {
- ($e:ident { $($v:ident ($t:ty)),* $(,)? }) => {
- $(
- impl From<$t> for $e {
- fn from(it: $t) -> $e {
- $e::$v(it)
- }
- }
- )*
- }
-}
-
-impl ModItem {
- pub fn as_assoc_item(&self) -> Option<AssocItem> {
- match self {
- ModItem::Use(_)
- | ModItem::ExternCrate(_)
- | ModItem::ExternBlock(_)
- | ModItem::Struct(_)
- | ModItem::Union(_)
- | ModItem::Enum(_)
- | ModItem::Static(_)
- | ModItem::Trait(_)
- | ModItem::TraitAlias(_)
- | ModItem::Impl(_)
- | ModItem::Mod(_)
- | ModItem::MacroRules(_)
- | ModItem::Macro2(_) => None,
- &ModItem::MacroCall(call) => Some(AssocItem::MacroCall(call)),
- &ModItem::Const(konst) => Some(AssocItem::Const(konst)),
- &ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(alias)),
- &ModItem::Function(func) => Some(AssocItem::Function(func)),
- }
- }
-}
-
-#[derive(Debug, Copy, Clone, Eq, PartialEq)]
-pub enum AssocItem {
- Function(FileItemTreeId<Function>),
- TypeAlias(FileItemTreeId<TypeAlias>),
- Const(FileItemTreeId<Const>),
- MacroCall(FileItemTreeId<MacroCall>),
-}
-
-impl_froms!(AssocItem {
- Function(FileItemTreeId<Function>),
- TypeAlias(FileItemTreeId<TypeAlias>),
- Const(FileItemTreeId<Const>),
- MacroCall(FileItemTreeId<MacroCall>),
-});
-
-impl From<AssocItem> for ModItem {
- fn from(item: AssocItem) -> Self {
- match item {
- AssocItem::Function(it) => it.into(),
- AssocItem::TypeAlias(it) => it.into(),
- AssocItem::Const(it) => it.into(),
- AssocItem::MacroCall(it) => it.into(),
- }
- }
-}
-
-impl AssocItem {
- pub fn ast_id(self, tree: &ItemTree) -> FileAstId<ast::AssocItem> {
- match self {
- AssocItem::Function(id) => tree[id].ast_id.upcast(),
- AssocItem::TypeAlias(id) => tree[id].ast_id.upcast(),
- AssocItem::Const(id) => tree[id].ast_id.upcast(),
- AssocItem::MacroCall(id) => tree[id].ast_id.upcast(),
- }
- }
-}
diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs
index b490e1683c..5ab61c8939 100644
--- a/crates/hir-def/src/item_tree/lower.rs
+++ b/crates/hir-def/src/item_tree/lower.rs
@@ -2,42 +2,40 @@
use std::{cell::OnceCell, collections::hash_map::Entry};
+use base_db::FxIndexSet;
use hir_expand::{
HirFileId,
mod_path::PathKind,
name::AsName,
span_map::{SpanMap, SpanMapRef},
};
-use intern::{Symbol, sym};
use la_arena::Arena;
-use span::{AstIdMap, SyntaxContext};
+use span::{AstIdMap, FileAstId, SyntaxContext};
use syntax::{
AstNode,
- ast::{self, HasModuleItem, HasName, IsString},
+ ast::{self, HasModuleItem, HasName},
};
use triomphe::Arc;
use crate::{
db::DefDatabase,
item_tree::{
- AssocItem, AttrOwner, Const, Enum, ExternBlock, ExternCrate, Field, FieldParent,
- FieldsShape, FileItemTreeId, Function, Idx, Impl, ImportAlias, Interned, ItemTree,
- ItemTreeData, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, ModPath, Name, Range,
- RawAttrs, RawIdx, RawVisibility, RawVisibilityId, Static, Struct, StructKind, Trait,
- TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind, Variant, VisibilityExplicitness,
+ BigModItem, Const, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl,
+ ImportAlias, Interned, ItemTree, ItemTreeAstId, Macro2, MacroCall, MacroRules, Mod,
+ ModItemId, ModKind, ModPath, RawAttrs, RawVisibility, RawVisibilityId, SmallModItem,
+ Static, Struct, StructKind, Trait, TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind,
+ VisibilityExplicitness,
},
};
-fn id<N>(index: Idx<N>) -> FileItemTreeId<N> {
- FileItemTreeId(index)
-}
-
pub(super) struct Ctx<'a> {
db: &'a dyn DefDatabase,
tree: ItemTree,
source_ast_id_map: Arc<AstIdMap>,
span_map: OnceCell<SpanMap>,
file: HirFileId,
+ top_level: Vec<ModItemId>,
+ visibilities: FxIndexSet<RawVisibility>,
}
impl<'a> Ctx<'a> {
@@ -48,6 +46,8 @@ impl<'a> Ctx<'a> {
source_ast_id_map: db.ast_id_map(file),
file,
span_map: OnceCell::new(),
+ visibilities: FxIndexSet::default(),
+ top_level: Vec::new(),
}
}
@@ -56,13 +56,14 @@ impl<'a> Ctx<'a> {
}
pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
- self.tree.top_level =
- item_owner.items().flat_map(|item| self.lower_mod_item(&item)).collect();
+ self.top_level = item_owner.items().flat_map(|item| self.lower_mod_item(&item)).collect();
+ self.tree.vis.arena = self.visibilities.into_iter().collect();
+ self.tree.top_level = self.top_level.into_boxed_slice();
self.tree
}
pub(super) fn lower_macro_stmts(mut self, stmts: ast::MacroStmts) -> ItemTree {
- self.tree.top_level = stmts
+ self.top_level = stmts
.statements()
.filter_map(|stmt| {
match stmt {
@@ -86,17 +87,19 @@ impl<'a> Ctx<'a> {
if let Some(call) = tail_macro.macro_call() {
cov_mark::hit!(macro_stmt_with_trailing_macro_expr);
if let Some(mod_item) = self.lower_mod_item(&call.into()) {
- self.tree.top_level.push(mod_item);
+ self.top_level.push(mod_item);
}
}
}
+ self.tree.vis.arena = self.visibilities.into_iter().collect();
+ self.tree.top_level = self.top_level.into_boxed_slice();
self.tree
}
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
- self.tree.attrs.insert(AttrOwner::TopLevel, RawAttrs::new(self.db, block, self.span_map()));
- self.tree.top_level = block
+ self.tree.top_attrs = RawAttrs::new(self.db, block, self.span_map());
+ self.top_level = block
.statements()
.filter_map(|stmt| match stmt {
ast::Stmt::Item(item) => self.lower_mod_item(&item),
@@ -112,20 +115,17 @@ impl<'a> Ctx<'a> {
if let Some(ast::Expr::MacroExpr(expr)) = block.tail_expr() {
if let Some(call) = expr.macro_call() {
if let Some(mod_item) = self.lower_mod_item(&call.into()) {
- self.tree.top_level.push(mod_item);
+ self.top_level.push(mod_item);
}
}
}
-
+ self.tree.vis.arena = self.visibilities.into_iter().collect();
+ self.tree.top_level = self.top_level.into_boxed_slice();
self.tree
}
- fn data(&mut self) -> &mut ItemTreeData {
- self.tree.data_mut()
- }
-
- fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
- let mod_item: ModItem = match item {
+ fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItemId> {
+ let mod_item: ModItemId = match item {
ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
ast::Item::Union(ast) => self.lower_union(ast)?.into(),
ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
@@ -143,14 +143,16 @@ impl<'a> Ctx<'a> {
ast::Item::MacroRules(ast) => self.lower_macro_rules(ast)?.into(),
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
+ // FIXME: Handle `global_asm!()`.
+ ast::Item::AsmExpr(_) => return None,
};
let attrs = RawAttrs::new(self.db, item, self.span_map());
- self.add_attrs(mod_item.into(), attrs);
+ self.add_attrs(mod_item.ast_id(), attrs);
Some(mod_item)
}
- fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
+ fn add_attrs(&mut self, item: FileAstId<ast::Item>, attrs: RawAttrs) {
if !attrs.is_empty() {
match self.tree.attrs.entry(item) {
Entry::Occupied(mut entry) => {
@@ -163,208 +165,78 @@ impl<'a> Ctx<'a> {
}
}
- fn lower_assoc_item(&mut self, item_node: &ast::AssocItem) -> Option<AssocItem> {
- let item: AssocItem = match item_node {
- ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
- ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
- ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
- ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
- }?;
- let attrs = RawAttrs::new(self.db, item_node, self.span_map());
- self.add_attrs(
- match item {
- AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)),
- AssocItem::TypeAlias(it) => AttrOwner::ModItem(ModItem::TypeAlias(it)),
- AssocItem::Const(it) => AttrOwner::ModItem(ModItem::Const(it)),
- AssocItem::MacroCall(it) => AttrOwner::ModItem(ModItem::MacroCall(it)),
- },
- attrs,
- );
- Some(item)
- }
-
- fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
+ fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<ItemTreeAstId<Struct>> {
let visibility = self.lower_visibility(strukt);
let name = strukt.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(strukt);
- let (fields, kind, attrs) = self.lower_fields(&strukt.kind());
- let res = Struct { name, visibility, fields, shape: kind, ast_id };
- let id = id(self.data().structs.alloc(res));
-
- for (idx, attr) in attrs {
- self.add_attrs(
- AttrOwner::Field(
- FieldParent::Struct(id),
- Idx::from_raw(RawIdx::from_u32(idx as u32)),
- ),
- attr,
- );
- }
- Some(id)
- }
+ let shape = adt_shape(strukt.kind());
+ let res = Struct { name, visibility, shape };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::Struct(res));
- fn lower_fields(
- &mut self,
- strukt_kind: &ast::StructKind,
- ) -> (Box<[Field]>, FieldsShape, Vec<(usize, RawAttrs)>) {
- match strukt_kind {
- ast::StructKind::Record(it) => {
- let mut fields = vec![];
- let mut attrs = vec![];
-
- for (i, field) in it.fields().enumerate() {
- let data = self.lower_record_field(&field);
- fields.push(data);
- let attr = RawAttrs::new(self.db, &field, self.span_map());
- if !attr.is_empty() {
- attrs.push((i, attr))
- }
- }
- (fields.into(), FieldsShape::Record, attrs)
- }
- ast::StructKind::Tuple(it) => {
- let mut fields = vec![];
- let mut attrs = vec![];
-
- for (i, field) in it.fields().enumerate() {
- let data = self.lower_tuple_field(i, &field);
- fields.push(data);
- let attr = RawAttrs::new(self.db, &field, self.span_map());
- if !attr.is_empty() {
- attrs.push((i, attr))
- }
- }
- (fields.into(), FieldsShape::Tuple, attrs)
- }
- ast::StructKind::Unit => (Box::default(), FieldsShape::Unit, Vec::default()),
- }
- }
-
- fn lower_record_field(&mut self, field: &ast::RecordField) -> Field {
- let name = match field.name() {
- Some(name) => name.as_name(),
- None => Name::missing(),
- };
- let visibility = self.lower_visibility(field);
-
- Field { name, visibility, is_unsafe: field.unsafe_token().is_some() }
+ Some(ast_id)
}
- fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleField) -> Field {
- let name = Name::new_tuple_field(idx);
- let visibility = self.lower_visibility(field);
- Field { name, visibility, is_unsafe: false }
- }
-
- fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
+ fn lower_union(&mut self, union: &ast::Union) -> Option<ItemTreeAstId<Union>> {
let visibility = self.lower_visibility(union);
let name = union.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(union);
- let (fields, _, attrs) = match union.record_field_list() {
- Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
- None => (Box::default(), FieldsShape::Record, Vec::default()),
- };
- let res = Union { name, visibility, fields, ast_id };
- let id = id(self.data().unions.alloc(res));
- for (idx, attr) in attrs {
- self.add_attrs(
- AttrOwner::Field(
- FieldParent::Union(id),
- Idx::from_raw(RawIdx::from_u32(idx as u32)),
- ),
- attr,
- );
- }
- Some(id)
+ let res = Union { name, visibility };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::Union(res));
+ Some(ast_id)
}
- fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
+ fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<ItemTreeAstId<Enum>> {
let visibility = self.lower_visibility(enum_);
let name = enum_.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(enum_);
- let variants = match &enum_.variant_list() {
- Some(variant_list) => self.lower_variants(variant_list),
- None => {
- FileItemTreeId(self.next_variant_idx())..FileItemTreeId(self.next_variant_idx())
- }
- };
- let res = Enum { name, visibility, variants, ast_id };
- let id = id(self.data().enums.alloc(res));
- Some(id)
+ let res = Enum { name, visibility };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::Enum(res));
+ Some(ast_id)
}
- fn lower_variants(&mut self, variants: &ast::VariantList) -> Range<FileItemTreeId<Variant>> {
- let start = self.next_variant_idx();
- for variant in variants.variants() {
- let idx = self.lower_variant(&variant);
- self.add_attrs(id(idx).into(), RawAttrs::new(self.db, &variant, self.span_map()));
- }
- let end = self.next_variant_idx();
- FileItemTreeId(start)..FileItemTreeId(end)
- }
-
- fn lower_variant(&mut self, variant: &ast::Variant) -> Idx<Variant> {
- let name = match variant.name() {
- Some(name) => name.as_name(),
- None => Name::missing(),
- };
- let (fields, kind, attrs) = self.lower_fields(&variant.kind());
- let ast_id = self.source_ast_id_map.ast_id(variant);
- let res = Variant { name, fields, shape: kind, ast_id };
- let id = self.data().variants.alloc(res);
- for (idx, attr) in attrs {
- self.add_attrs(
- AttrOwner::Field(
- FieldParent::EnumVariant(FileItemTreeId(id)),
- Idx::from_raw(RawIdx::from_u32(idx as u32)),
- ),
- attr,
- );
- }
- id
- }
-
- fn lower_function(&mut self, func: &ast::Fn) -> Option<FileItemTreeId<Function>> {
+ fn lower_function(&mut self, func: &ast::Fn) -> Option<ItemTreeAstId<Function>> {
let visibility = self.lower_visibility(func);
let name = func.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(func);
- let res = Function { name, visibility, ast_id };
+ let res = Function { name, visibility };
- let id = id(self.data().functions.alloc(res));
- Some(id)
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::Function(res));
+ Some(ast_id)
}
fn lower_type_alias(
&mut self,
type_alias: &ast::TypeAlias,
- ) -> Option<FileItemTreeId<TypeAlias>> {
+ ) -> Option<ItemTreeAstId<TypeAlias>> {
let name = type_alias.name()?.as_name();
let visibility = self.lower_visibility(type_alias);
let ast_id = self.source_ast_id_map.ast_id(type_alias);
- let res = TypeAlias { name, visibility, ast_id };
- let id = id(self.data().type_aliases.alloc(res));
- Some(id)
+ let res = TypeAlias { name, visibility };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::TypeAlias(res));
+ Some(ast_id)
}
- fn lower_static(&mut self, static_: &ast::Static) -> Option<FileItemTreeId<Static>> {
+ fn lower_static(&mut self, static_: &ast::Static) -> Option<ItemTreeAstId<Static>> {
let name = static_.name()?.as_name();
let visibility = self.lower_visibility(static_);
let ast_id = self.source_ast_id_map.ast_id(static_);
- let res = Static { name, visibility, ast_id };
- Some(id(self.data().statics.alloc(res)))
+ let res = Static { name, visibility };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::Static(res));
+ Some(ast_id)
}
- fn lower_const(&mut self, konst: &ast::Const) -> FileItemTreeId<Const> {
+ fn lower_const(&mut self, konst: &ast::Const) -> ItemTreeAstId<Const> {
let name = konst.name().map(|it| it.as_name());
let visibility = self.lower_visibility(konst);
let ast_id = self.source_ast_id_map.ast_id(konst);
- let res = Const { name, visibility, ast_id };
- id(self.data().consts.alloc(res))
+ let res = Const { name, visibility };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::Const(res));
+ ast_id
}
- fn lower_module(&mut self, module: &ast::Module) -> Option<FileItemTreeId<Mod>> {
+ fn lower_module(&mut self, module: &ast::Module) -> Option<ItemTreeAstId<Mod>> {
let name = module.name()?.as_name();
let visibility = self.lower_visibility(module);
let kind = if module.semicolon_token().is_some() {
@@ -381,70 +253,59 @@ impl<'a> Ctx<'a> {
}
};
let ast_id = self.source_ast_id_map.ast_id(module);
- let res = Mod { name, visibility, kind, ast_id };
- Some(id(self.data().mods.alloc(res)))
+ let res = Mod { name, visibility, kind };
+ self.tree.big_data.insert(ast_id.upcast(), BigModItem::Mod(res));
+ Some(ast_id)
}
- fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
+ fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<ItemTreeAstId<Trait>> {
let name = trait_def.name()?.as_name();
let visibility = self.lower_visibility(trait_def);
let ast_id = self.source_ast_id_map.ast_id(trait_def);
- let items = trait_def
- .assoc_item_list()
- .into_iter()
- .flat_map(|list| list.assoc_items())
- .filter_map(|item_node| self.lower_assoc_item(&item_node))
- .collect();
-
- let def = Trait { name, visibility, items, ast_id };
- let id = id(self.data().traits.alloc(def));
- Some(id)
+ let def = Trait { name, visibility };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::Trait(def));
+ Some(ast_id)
}
fn lower_trait_alias(
&mut self,
trait_alias_def: &ast::TraitAlias,
- ) -> Option<FileItemTreeId<TraitAlias>> {
+ ) -> Option<ItemTreeAstId<TraitAlias>> {
let name = trait_alias_def.name()?.as_name();
let visibility = self.lower_visibility(trait_alias_def);
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
- let alias = TraitAlias { name, visibility, ast_id };
- let id = id(self.data().trait_aliases.alloc(alias));
- Some(id)
+ let alias = TraitAlias { name, visibility };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::TraitAlias(alias));
+ Some(ast_id)
}
- fn lower_impl(&mut self, impl_def: &ast::Impl) -> FileItemTreeId<Impl> {
+ fn lower_impl(&mut self, impl_def: &ast::Impl) -> ItemTreeAstId<Impl> {
let ast_id = self.source_ast_id_map.ast_id(impl_def);
- // We cannot use `assoc_items()` here as that does not include macro calls.
- let items = impl_def
- .assoc_item_list()
- .into_iter()
- .flat_map(|it| it.assoc_items())
- .filter_map(|item| self.lower_assoc_item(&item))
- .collect();
// Note that trait impls don't get implicit `Self` unlike traits, because here they are a
// type alias rather than a type parameter, so this is handled by the resolver.
- let res = Impl { items, ast_id };
- id(self.data().impls.alloc(res))
+ let res = Impl {};
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::Impl(res));
+ ast_id
}
- fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
+ fn lower_use(&mut self, use_item: &ast::Use) -> Option<ItemTreeAstId<Use>> {
let visibility = self.lower_visibility(use_item);
let ast_id = self.source_ast_id_map.ast_id(use_item);
let (use_tree, _) = lower_use_tree(self.db, use_item.use_tree()?, &mut |range| {
self.span_map().span_for_range(range).ctx
})?;
- let res = Use { visibility, ast_id, use_tree };
- Some(id(self.data().uses.alloc(res)))
+ let res = Use { visibility, use_tree };
+ self.tree.big_data.insert(ast_id.upcast(), BigModItem::Use(res));
+ Some(ast_id)
}
fn lower_extern_crate(
&mut self,
extern_crate: &ast::ExternCrate,
- ) -> Option<FileItemTreeId<ExternCrate>> {
+ ) -> Option<ItemTreeAstId<ExternCrate>> {
let name = extern_crate.name_ref()?.as_name();
let alias = extern_crate.rename().map(|a| {
a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias)
@@ -452,11 +313,12 @@ impl<'a> Ctx<'a> {
let visibility = self.lower_visibility(extern_crate);
let ast_id = self.source_ast_id_map.ast_id(extern_crate);
- let res = ExternCrate { name, alias, visibility, ast_id };
- Some(id(self.data().extern_crates.alloc(res)))
+ let res = ExternCrate { name, alias, visibility };
+ self.tree.big_data.insert(ast_id.upcast(), BigModItem::ExternCrate(res));
+ Some(ast_id)
}
- fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
+ fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<ItemTreeAstId<MacroCall>> {
let span_map = self.span_map();
let path = m.path()?;
let range = path.syntax().text_range();
@@ -465,31 +327,33 @@ impl<'a> Ctx<'a> {
})?);
let ast_id = self.source_ast_id_map.ast_id(m);
let expand_to = hir_expand::ExpandTo::from_call_site(m);
- let res = MacroCall { path, ast_id, expand_to, ctxt: span_map.span_for_range(range).ctx };
- Some(id(self.data().macro_calls.alloc(res)))
+ let res = MacroCall { path, expand_to, ctxt: span_map.span_for_range(range).ctx };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::MacroCall(res));
+ Some(ast_id)
}
- fn lower_macro_rules(&mut self, m: &ast::MacroRules) -> Option<FileItemTreeId<MacroRules>> {
+ fn lower_macro_rules(&mut self, m: &ast::MacroRules) -> Option<ItemTreeAstId<MacroRules>> {
let name = m.name()?;
let ast_id = self.source_ast_id_map.ast_id(m);
- let res = MacroRules { name: name.as_name(), ast_id };
- Some(id(self.data().macro_rules.alloc(res)))
+ let res = MacroRules { name: name.as_name() };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::MacroRules(res));
+ Some(ast_id)
}
- fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<Macro2>> {
+ fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<ItemTreeAstId<Macro2>> {
let name = m.name()?;
let ast_id = self.source_ast_id_map.ast_id(m);
let visibility = self.lower_visibility(m);
- let res = Macro2 { name: name.as_name(), ast_id, visibility };
- Some(id(self.data().macro_defs.alloc(res)))
+ let res = Macro2 { name: name.as_name(), visibility };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::Macro2(res));
+ Some(ast_id)
}
- fn lower_extern_block(&mut self, block: &ast::ExternBlock) -> FileItemTreeId<ExternBlock> {
+ fn lower_extern_block(&mut self, block: &ast::ExternBlock) -> ItemTreeAstId<ExternBlock> {
let ast_id = self.source_ast_id_map.ast_id(block);
- let abi = block.abi().map(lower_abi);
let children: Box<[_]> = block.extern_item_list().map_or(Box::new([]), |list| {
list.extern_items()
.filter_map(|item| {
@@ -497,42 +361,44 @@ impl<'a> Ctx<'a> {
// (in other words, the knowledge that they're in an extern block must not be used).
// This is because an extern block can contain macros whose ItemTree's top-level items
// should be considered to be in an extern block too.
- let mod_item: ModItem = match &item {
+ let mod_item: ModItemId = match &item {
ast::ExternItem::Fn(ast) => self.lower_function(ast)?.into(),
ast::ExternItem::Static(ast) => self.lower_static(ast)?.into(),
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
};
let attrs = RawAttrs::new(self.db, &item, self.span_map());
- self.add_attrs(mod_item.into(), attrs);
+ self.add_attrs(mod_item.ast_id(), attrs);
Some(mod_item)
})
.collect()
});
- let res = ExternBlock { abi, ast_id, children };
- id(self.data().extern_blocks.alloc(res))
+ let res = ExternBlock { children };
+ self.tree.small_data.insert(ast_id.upcast(), SmallModItem::ExternBlock(res));
+ ast_id
}
fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
let vis = visibility_from_ast(self.db, item.visibility(), &mut |range| {
self.span_map().span_for_range(range).ctx
});
- self.data().vis.alloc(vis)
- }
-
- fn next_variant_idx(&self) -> Idx<Variant> {
- Idx::from_raw(RawIdx::from(
- self.tree.data.as_ref().map_or(0, |data| data.variants.len() as u32),
- ))
- }
-}
-
-fn lower_abi(abi: ast::Abi) -> Symbol {
- match abi.abi_string() {
- Some(tok) => Symbol::intern(tok.text_without_quotes()),
- // `extern` default to be `extern "C"`.
- _ => sym::C,
+ match &vis {
+ RawVisibility::Public => RawVisibilityId::PUB,
+ RawVisibility::Module(path, explicitness) if path.segments().is_empty() => {
+ match (path.kind, explicitness) {
+ (PathKind::SELF, VisibilityExplicitness::Explicit) => {
+ RawVisibilityId::PRIV_EXPLICIT
+ }
+ (PathKind::SELF, VisibilityExplicitness::Implicit) => {
+ RawVisibilityId::PRIV_IMPLICIT
+ }
+ (PathKind::Crate, _) => RawVisibilityId::PUB_CRATE,
+ _ => RawVisibilityId(self.visibilities.insert_full(vis).0 as u32),
+ }
+ }
+ _ => RawVisibilityId(self.visibilities.insert_full(vis).0 as u32),
+ }
}
}
@@ -561,17 +427,15 @@ impl UseTreeLowering<'_> {
}
};
+ self.mapping.alloc(tree.clone());
let list = use_tree_list
.use_trees()
.filter_map(|tree| self.lower_use_tree(tree, span_for_range))
.collect();
- Some(
- self.use_tree(
- UseTreeKind::Prefixed { prefix: prefix.map(Interned::new), list },
- tree,
- ),
- )
+ Some(UseTree {
+ kind: UseTreeKind::Prefixed { prefix: prefix.map(Interned::new), list },
+ })
} else {
let is_glob = tree.star_token().is_some();
let path = match tree.path() {
@@ -590,23 +454,20 @@ impl UseTreeLowering<'_> {
if path.is_none() {
cov_mark::hit!(glob_enum_group);
}
- Some(self.use_tree(UseTreeKind::Glob { path: path.map(Interned::new) }, tree))
+ self.mapping.alloc(tree.clone());
+ Some(UseTree { kind: UseTreeKind::Glob { path: path.map(Interned::new) } })
}
// Globs can't be renamed
(_, Some(_), true) | (None, None, false) => None,
// `bla::{ as Name}` is invalid
(None, Some(_), false) => None,
- (Some(path), alias, false) => Some(
- self.use_tree(UseTreeKind::Single { path: Interned::new(path), alias }, tree),
- ),
+ (Some(path), alias, false) => {
+ self.mapping.alloc(tree.clone());
+ Some(UseTree { kind: UseTreeKind::Single { path: Interned::new(path), alias } })
+ }
}
}
}
-
- fn use_tree(&mut self, kind: UseTreeKind, ast: ast::UseTree) -> UseTree {
- let index = self.mapping.alloc(ast);
- UseTree { index, kind }
- }
}
pub(crate) fn lower_use_tree(
@@ -626,7 +487,7 @@ fn private_vis() -> RawVisibility {
)
}
-fn visibility_from_ast(
+pub(crate) fn visibility_from_ast(
db: &dyn DefDatabase,
node: Option<ast::Visibility>,
span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext,
@@ -647,3 +508,11 @@ fn visibility_from_ast(
};
RawVisibility::Module(Interned::new(path), VisibilityExplicitness::Explicit)
}
+
+fn adt_shape(kind: StructKind) -> FieldsShape {
+ match kind {
+ StructKind::Record(_) => FieldsShape::Record,
+ StructKind::Tuple(_) => FieldsShape::Tuple,
+ StructKind::Unit => FieldsShape::Unit,
+ }
+}
diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs
index 47c6eb1329..696174cb07 100644
--- a/crates/hir-def/src/item_tree/pretty.rs
+++ b/crates/hir-def/src/item_tree/pretty.rs
@@ -2,15 +2,13 @@
use std::fmt::{self, Write};
-use la_arena::{Idx, RawIdx};
use span::{Edition, ErasedFileAstId};
use crate::{
item_tree::{
- AttrOwner, Const, DefDatabase, Enum, ExternBlock, ExternCrate, Field, FieldParent,
- FieldsShape, FileItemTreeId, Function, Impl, ItemTree, Macro2, MacroCall, MacroRules, Mod,
- ModItem, ModKind, RawAttrs, RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias,
- Union, Use, UseTree, UseTreeKind, Variant,
+ Const, DefDatabase, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ItemTree,
+ Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawAttrs, RawVisibilityId, Static,
+ Struct, Trait, TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind,
},
visibility::RawVisibility,
};
@@ -19,9 +17,7 @@ pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree, edition: Ed
let mut p =
Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true, edition };
- if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) {
- p.print_attrs(attrs, true, "\n");
- }
+ p.print_attrs(&tree.top_attrs, true, "\n");
p.blank();
for item in tree.top_level_items() {
@@ -103,8 +99,8 @@ impl Printer<'_> {
}
}
- fn print_attrs_of(&mut self, of: impl Into<AttrOwner>, separated_by: &str) {
- if let Some(attrs) = self.tree.attrs.get(&of.into()) {
+ fn print_attrs_of(&mut self, of: ModItemId, separated_by: &str) {
+ if let Some(attrs) = self.tree.attrs.get(&of.ast_id()) {
self.print_attrs(attrs, false, separated_by);
}
}
@@ -112,50 +108,22 @@ impl Printer<'_> {
fn print_visibility(&mut self, vis: RawVisibilityId) {
match &self.tree[vis] {
RawVisibility::Module(path, _expl) => {
- w!(self, "pub({}) ", path.display(self.db, self.edition))
+ w!(self, "pub(in {}) ", path.display(self.db, self.edition))
}
RawVisibility::Public => w!(self, "pub "),
+ RawVisibility::PubCrate => w!(self, "pub(crate) "),
+ RawVisibility::PubSelf(_) => w!(self, "pub(self) "),
};
}
- fn print_fields(&mut self, parent: FieldParent, kind: FieldsShape, fields: &[Field]) {
- let edition = self.edition;
+ fn print_fields(&mut self, kind: FieldsShape) {
match kind {
FieldsShape::Record => {
self.whitespace();
- w!(self, "{{");
- self.indented(|this| {
- for (idx, Field { name, visibility, is_unsafe }) in fields.iter().enumerate() {
- this.print_attrs_of(
- AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))),
- "\n",
- );
- this.print_visibility(*visibility);
- if *is_unsafe {
- w!(this, "unsafe ");
- }
-
- wln!(this, "{},", name.display(self.db, edition));
- }
- });
- w!(self, "}}");
+ w!(self, "{{ ... }}");
}
FieldsShape::Tuple => {
- w!(self, "(");
- self.indented(|this| {
- for (idx, Field { name, visibility, is_unsafe }) in fields.iter().enumerate() {
- this.print_attrs_of(
- AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))),
- "\n",
- );
- this.print_visibility(*visibility);
- if *is_unsafe {
- w!(this, "unsafe ");
- }
- wln!(this, "{},", name.display(self.db, edition));
- }
- });
- w!(self, ")");
+ w!(self, "(...)");
}
FieldsShape::Unit => {}
}
@@ -191,20 +159,20 @@ impl Printer<'_> {
}
}
- fn print_mod_item(&mut self, item: ModItem) {
+ fn print_mod_item(&mut self, item: ModItemId) {
self.print_attrs_of(item, "\n");
match item {
- ModItem::Use(it) => {
- let Use { visibility, use_tree, ast_id } = &self.tree[it];
+ ModItemId::Use(ast_id) => {
+ let Use { visibility, use_tree } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "use ");
self.print_use_tree(use_tree);
wln!(self, ";");
}
- ModItem::ExternCrate(it) => {
- let ExternCrate { name, alias, visibility, ast_id } = &self.tree[it];
+ ModItemId::ExternCrate(ast_id) => {
+ let ExternCrate { name, alias, visibility } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "extern crate {}", name.display(self.db, self.edition));
@@ -213,14 +181,10 @@ impl Printer<'_> {
}
wln!(self, ";");
}
- ModItem::ExternBlock(it) => {
- let ExternBlock { abi, ast_id, children } = &self.tree[it];
+ ModItemId::ExternBlock(ast_id) => {
+ let ExternBlock { children } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
- w!(self, "extern ");
- if let Some(abi) = abi {
- w!(self, "\"{}\" ", abi);
- }
- w!(self, "{{");
+ w!(self, "extern {{");
self.indented(|this| {
for child in &**children {
this.print_mod_item(*child);
@@ -228,52 +192,40 @@ impl Printer<'_> {
});
wln!(self, "}}");
}
- ModItem::Function(it) => {
- let Function { name, visibility, ast_id } = &self.tree[it];
+ ModItemId::Function(ast_id) => {
+ let Function { name, visibility } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
wln!(self, "fn {};", name.display(self.db, self.edition));
}
- ModItem::Struct(it) => {
- let Struct { visibility, name, fields, shape: kind, ast_id } = &self.tree[it];
+ ModItemId::Struct(ast_id) => {
+ let Struct { visibility, name, shape: kind } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "struct {}", name.display(self.db, self.edition));
- self.print_fields(FieldParent::Struct(it), *kind, fields);
+ self.print_fields(*kind);
if matches!(kind, FieldsShape::Record) {
wln!(self);
} else {
wln!(self, ";");
}
}
- ModItem::Union(it) => {
- let Union { name, visibility, fields, ast_id } = &self.tree[it];
+ ModItemId::Union(ast_id) => {
+ let Union { name, visibility } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "union {}", name.display(self.db, self.edition));
- self.print_fields(FieldParent::Union(it), FieldsShape::Record, fields);
+ self.print_fields(FieldsShape::Record);
wln!(self);
}
- ModItem::Enum(it) => {
- let Enum { name, visibility, variants, ast_id } = &self.tree[it];
+ ModItemId::Enum(ast_id) => {
+ let Enum { name, visibility } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
- w!(self, "enum {}", name.display(self.db, self.edition));
- let edition = self.edition;
- self.indented(|this| {
- for variant in FileItemTreeId::range_iter(variants.clone()) {
- let Variant { name, fields, shape: kind, ast_id } = &this.tree[variant];
- this.print_ast_id(ast_id.erase());
- this.print_attrs_of(variant, "\n");
- w!(this, "{}", name.display(self.db, edition));
- this.print_fields(FieldParent::EnumVariant(variant), *kind, fields);
- wln!(this, ",");
- }
- });
- wln!(self, "}}");
+ w!(self, "enum {} {{ ... }}", name.display(self.db, self.edition));
}
- ModItem::Const(it) => {
- let Const { name, visibility, ast_id } = &self.tree[it];
+ ModItemId::Const(ast_id) => {
+ let Const { name, visibility } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "const ");
@@ -283,8 +235,8 @@ impl Printer<'_> {
}
wln!(self, " = _;");
}
- ModItem::Static(it) => {
- let Static { name, visibility, ast_id } = &self.tree[it];
+ ModItemId::Static(ast_id) => {
+ let Static { name, visibility } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "static ");
@@ -292,45 +244,33 @@ impl Printer<'_> {
w!(self, " = _;");
wln!(self);
}
- ModItem::Trait(it) => {
- let Trait { name, visibility, items, ast_id } = &self.tree[it];
+ ModItemId::Trait(ast_id) => {
+ let Trait { name, visibility } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
- w!(self, "trait {} {{", name.display(self.db, self.edition));
- self.indented(|this| {
- for item in &**items {
- this.print_mod_item((*item).into());
- }
- });
- wln!(self, "}}");
+ w!(self, "trait {} {{ ... }}", name.display(self.db, self.edition));
}
- ModItem::TraitAlias(it) => {
- let TraitAlias { name, visibility, ast_id } = &self.tree[it];
+ ModItemId::TraitAlias(ast_id) => {
+ let TraitAlias { name, visibility } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
wln!(self, "trait {} = ..;", name.display(self.db, self.edition));
}
- ModItem::Impl(it) => {
- let Impl { items, ast_id } = &self.tree[it];
+ ModItemId::Impl(ast_id) => {
+ let Impl {} = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
- w!(self, "impl {{");
- self.indented(|this| {
- for item in &**items {
- this.print_mod_item((*item).into());
- }
- });
- wln!(self, "}}");
+ w!(self, "impl {{ ... }}");
}
- ModItem::TypeAlias(it) => {
- let TypeAlias { name, visibility, ast_id } = &self.tree[it];
+ ModItemId::TypeAlias(ast_id) => {
+ let TypeAlias { name, visibility } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "type {}", name.display(self.db, self.edition));
w!(self, ";");
wln!(self);
}
- ModItem::Mod(it) => {
- let Mod { name, visibility, kind, ast_id } = &self.tree[it];
+ ModItemId::Mod(ast_id) => {
+ let Mod { name, visibility, kind } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "mod {}", name.display(self.db, self.edition));
@@ -349,24 +289,24 @@ impl Printer<'_> {
}
}
}
- ModItem::MacroCall(it) => {
- let MacroCall { path, ast_id, expand_to, ctxt } = &self.tree[it];
+ ModItemId::MacroCall(ast_id) => {
+ let MacroCall { path, expand_to, ctxt } = &self.tree[ast_id];
let _ = writeln!(
self,
- "// AstId: {:?}, SyntaxContextId: {}, ExpandTo: {:?}",
- ast_id.erase().into_raw(),
+ "// AstId: {:#?}, SyntaxContextId: {}, ExpandTo: {:?}",
+ ast_id.erase(),
ctxt,
expand_to
);
wln!(self, "{}!(...);", path.display(self.db, self.edition));
}
- ModItem::MacroRules(it) => {
- let MacroRules { name, ast_id } = &self.tree[it];
+ ModItemId::MacroRules(ast_id) => {
+ let MacroRules { name } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db, self.edition));
}
- ModItem::Macro2(it) => {
- let Macro2 { name, visibility, ast_id } = &self.tree[it];
+ ModItemId::Macro2(ast_id) => {
+ let Macro2 { name, visibility } = &self.tree[ast_id];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
wln!(self, "macro {} {{ ... }}", name.display(self.db, self.edition));
@@ -377,7 +317,7 @@ impl Printer<'_> {
}
fn print_ast_id(&mut self, ast_id: ErasedFileAstId) {
- wln!(self, "// AstId: {:?}", ast_id.into_raw());
+ wln!(self, "// AstId: {ast_id:#?}");
}
}
diff --git a/crates/hir-def/src/item_tree/tests.rs b/crates/hir-def/src/item_tree/tests.rs
index 824fbfa592..91b42bef8f 100644
--- a/crates/hir-def/src/item_tree/tests.rs
+++ b/crates/hir-def/src/item_tree/tests.rs
@@ -35,23 +35,23 @@ use a::{c, d::{e}};
#![no_std]
#![doc = " another file comment"]
- // AstId: 1
+ // AstId: ExternCrate[070B, 0]
pub(self) extern crate self as renamed;
- // AstId: 2
- pub(super) extern crate bli;
+ // AstId: ExternCrate[1EA5, 0]
+ pub(in super) extern crate bli;
- // AstId: 3
+ // AstId: Use[0000, 0]
pub use crate::path::{nested, items as renamed, Trait as _};
- // AstId: 4
+ // AstId: Use[0000, 1]
pub(self) use globs::*;
#[doc = " docs on import"]
- // AstId: 5
+ // AstId: Use[0000, 2]
pub(self) use crate::{A, B};
- // AstId: 6
+ // AstId: Use[0000, 3]
pub(self) use a::{c, d::{e}};
"##]],
);
@@ -73,23 +73,23 @@ extern "C" {
fn ex_fn();
}
"#,
- expect![[r##"
+ expect![[r#"
#[on_extern_block]
- // AstId: 1
- extern "C" {
+ // AstId: ExternBlock[0000, 0]
+ extern {
#[on_extern_type]
- // AstId: 2
+ // AstId: TypeAlias[A09C, 0]
pub(self) type ExType;
#[on_extern_static]
- // AstId: 3
+ // AstId: Static[D85E, 0]
pub(self) static EX_STATIC = _;
#[on_extern_fn]
- // AstId: 4
+ // AstId: Fn[B240, 0]
pub(self) fn ex_fn;
}
- "##]],
+ "#]],
);
}
@@ -124,44 +124,21 @@ enum E {
}
"#,
expect![[r#"
- // AstId: 1
+ // AstId: Struct[ED35, 0]
pub(self) struct Unit;
#[derive(Debug)]
- // AstId: 2
- pub(self) struct Struct {
- #[doc = " fld docs"]
- pub(self) fld,
- }
+ // AstId: Struct[A47C, 0]
+ pub(self) struct Struct { ... }
- // AstId: 3
- pub(self) struct Tuple(
- #[attr]
- pub(self) 0,
- );
+ // AstId: Struct[C8C9, 0]
+ pub(self) struct Tuple(...);
- // AstId: 4
- pub(self) union Ize {
- pub(self) a,
- pub(self) b,
- }
+ // AstId: Union[2797, 0]
+ pub(self) union Ize { ... }
- // AstId: 5
- pub(self) enum E
- // AstId: 6
- #[doc = " comment on Unit"]
- Unit,
- // AstId: 7
- #[doc = " comment on Tuple"]
- Tuple(
- pub(self) 0,
- ),
- // AstId: 8
- Struct {
- #[doc = " comment on a: u8"]
- pub(self) a,
- },
- }
+ // AstId: Enum[7D23, 0]
+ pub(self) enum E { ... }
"#]],
);
}
@@ -185,25 +162,19 @@ trait Tr: SuperTrait + 'lifetime {
}
"#,
expect![[r#"
- // AstId: 1
+ // AstId: Static[F7C1, 0]
pub static ST = _;
- // AstId: 2
+ // AstId: Const[84BB, 0]
pub(self) const _ = _;
#[attr]
#[inner_attr_in_fn]
- // AstId: 3
+ // AstId: Fn[BE8F, 0]
pub(self) fn f;
- // AstId: 4
- pub(self) trait Tr {
- // AstId: 6
- pub(self) type Assoc;
-
- // AstId: 7
- pub(self) fn method;
- }
+ // AstId: Trait[9320, 0]
+ pub(self) trait Tr { ... }
"#]],
);
}
@@ -226,16 +197,16 @@ mod outline;
expect![[r##"
#[doc = " outer"]
#[doc = " inner"]
- // AstId: 1
+ // AstId: Module[03AE, 0]
pub(self) mod inline {
- // AstId: 3
+ // AstId: Use[0000, 0]
pub(self) use super::*;
- // AstId: 4
+ // AstId: Fn[2A78, 0]
pub(self) fn fn_in_module;
}
- // AstId: 2
+ // AstId: Module[C08B, 0]
pub(self) mod outline;
"##]],
);
@@ -254,13 +225,13 @@ pub macro m2() {}
m!();
"#,
expect![[r#"
- // AstId: 1
+ // AstId: MacroRules[7E68, 0]
macro_rules! m { ... }
- // AstId: 2
+ // AstId: MacroDef[1C1E, 0]
pub macro m2 { ... }
- // AstId: 3, SyntaxContextId: ROOT2024, ExpandTo: Items
+ // AstId: MacroCall[7E68, 0], SyntaxContextId: ROOT2024, ExpandTo: Items
m!(...);
"#]],
);
@@ -273,7 +244,7 @@ fn pub_self() {
pub(self) struct S;
"#,
expect![[r#"
- // AstId: 1
+ // AstId: Struct[5024, 0]
pub(self) struct S;
"#]],
)
diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs
index 4ad44775ea..750308026e 100644
--- a/crates/hir-def/src/lang_item.rs
+++ b/crates/hir-def/src/lang_item.rs
@@ -9,8 +9,10 @@ use triomphe::Arc;
use crate::{
AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
- StaticId, StructId, TraitId, TypeAliasId, UnionId, db::DefDatabase, expr_store::path::Path,
- nameres::crate_def_map,
+ StaticId, StructId, TraitId, TypeAliasId, UnionId,
+ db::DefDatabase,
+ expr_store::path::Path,
+ nameres::{assoc::TraitItems, crate_def_map},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -96,7 +98,7 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangIt
for (_, module_data) in crate_def_map.modules() {
for impl_def in module_data.scope.impls() {
lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef);
- for &(_, assoc) in db.impl_items(impl_def).items.iter() {
+ for &(_, assoc) in impl_def.impl_items(db).items.iter() {
match assoc {
AssocItemId::FunctionId(f) => {
lang_items.collect_lang_item(db, f, LangItemTarget::Function)
@@ -113,19 +115,21 @@ pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangIt
match def {
ModuleDefId::TraitId(trait_) => {
lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
- db.trait_items(trait_).items.iter().for_each(|&(_, assoc_id)| match assoc_id {
- AssocItemId::FunctionId(f) => {
- lang_items.collect_lang_item(db, f, LangItemTarget::Function);
+ TraitItems::query(db, trait_).items.iter().for_each(|&(_, assoc_id)| {
+ match assoc_id {
+ AssocItemId::FunctionId(f) => {
+ lang_items.collect_lang_item(db, f, LangItemTarget::Function);
+ }
+ AssocItemId::TypeAliasId(alias) => {
+ lang_items.collect_lang_item(db, alias, LangItemTarget::TypeAlias)
+ }
+ AssocItemId::ConstId(_) => {}
}
- AssocItemId::TypeAliasId(alias) => {
- lang_items.collect_lang_item(db, alias, LangItemTarget::TypeAlias)
- }
- AssocItemId::ConstId(_) => {}
});
}
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
- db.enum_variants(e).variants.iter().for_each(|&(id, _)| {
+ e.enum_variants(db).variants.iter().for_each(|&(id, _, _)| {
lang_items.collect_lang_item(db, id, LangItemTarget::EnumVariant);
});
}
@@ -304,6 +308,8 @@ impl LangItem {
language_item_table! {
// Variant name, Name, Getter method name, Target Generic requirements;
Sized, sym::sized, sized_trait, Target::Trait, GenericRequirement::Exact(0);
+ MetaSized, sym::meta_sized, sized_trait, Target::Trait, GenericRequirement::Exact(0);
+ PointeeSized, sym::pointee_sized, sized_trait, Target::Trait, GenericRequirement::Exact(0);
Unsize, sym::unsize, unsize_trait, Target::Trait, GenericRequirement::Minimum(1);
/// Trait injected by `#[derive(PartialEq)]`, (i.e. "Partial EQ").
StructuralPeq, sym::structural_peq, structural_peq_trait, Target::Trait, GenericRequirement::None;
@@ -377,6 +383,7 @@ language_item_table! {
AsyncFnMut, sym::async_fn_mut, async_fn_mut_trait, Target::Trait, GenericRequirement::Exact(1);
AsyncFnOnce, sym::async_fn_once, async_fn_once_trait, Target::Trait, GenericRequirement::Exact(1);
+ AsyncFnOnceOutput, sym::async_fn_once_output,async_fn_once_output, Target::AssocTy, GenericRequirement::None;
FnOnceOutput, sym::fn_once_output, fn_once_output, Target::AssocTy, GenericRequirement::None;
Future, sym::future_trait, future_trait, Target::Trait, GenericRequirement::Exact(0);
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index b41ff026bc..bdf8b453e2 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -49,8 +49,9 @@ pub mod find_path;
pub mod import_map;
pub mod visibility;
-use intern::{Interned, sym};
+use intern::{Interned, Symbol, sym};
pub use rustc_abi as layout;
+use thin_vec::ThinVec;
use triomphe::Arc;
pub use crate::signatures::LocalFieldId;
@@ -74,12 +75,11 @@ use hir_expand::{
name::Name,
proc_macro::{CustomProcMacroExpander, ProcMacroKind},
};
-use item_tree::ExternBlock;
use la_arena::Idx;
use nameres::DefMap;
use span::{AstIdNode, Edition, FileAstId, SyntaxContext};
use stdx::impl_from;
-use syntax::ast;
+use syntax::{AstNode, ast};
pub use hir_expand::{Intern, Lookup, tt};
@@ -87,13 +87,15 @@ use crate::{
attr::Attrs,
builtin_type::BuiltinType,
db::DefDatabase,
+ expr_store::ExpressionStoreSourceMap,
hir::generics::{LocalLifetimeParamId, LocalTypeOrConstParamId},
- item_tree::{
- Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, Macro2, MacroRules,
- Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use, Variant,
+ nameres::{
+ LocalDefMap,
+ assoc::{ImplItems, TraitItems},
+ block_def_map, crate_def_map, crate_local_def_map,
+ diagnostics::DefDiagnostics,
},
- nameres::{LocalDefMap, block_def_map, crate_def_map, crate_local_def_map},
- signatures::VariantFields,
+ signatures::{EnumVariants, InactiveEnumVariantCode, VariantFields},
};
type FxIndexMap<K, V> = indexmap::IndexMap<K, V, rustc_hash::FxBuildHasher>;
@@ -113,70 +115,111 @@ pub struct ImportPathConfig {
}
#[derive(Debug)]
-pub struct ItemLoc<N: ItemTreeNode> {
+pub struct ItemLoc<N: AstIdNode> {
pub container: ModuleId,
- pub id: ItemTreeId<N>,
+ pub id: AstId<N>,
}
-impl<N: ItemTreeNode> Clone for ItemLoc<N> {
+impl<N: AstIdNode> Clone for ItemLoc<N> {
fn clone(&self) -> Self {
*self
}
}
-impl<N: ItemTreeNode> Copy for ItemLoc<N> {}
+impl<N: AstIdNode> Copy for ItemLoc<N> {}
-impl<N: ItemTreeNode> PartialEq for ItemLoc<N> {
+impl<N: AstIdNode> PartialEq for ItemLoc<N> {
fn eq(&self, other: &Self) -> bool {
self.container == other.container && self.id == other.id
}
}
-impl<N: ItemTreeNode> Eq for ItemLoc<N> {}
+impl<N: AstIdNode> Eq for ItemLoc<N> {}
-impl<N: ItemTreeNode> Hash for ItemLoc<N> {
+impl<N: AstIdNode> Hash for ItemLoc<N> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.container.hash(state);
self.id.hash(state);
}
}
+impl<N: AstIdNode> HasModule for ItemLoc<N> {
+ #[inline]
+ fn module(&self, _db: &dyn DefDatabase) -> ModuleId {
+ self.container
+ }
+}
+
#[derive(Debug)]
-pub struct AssocItemLoc<N: ItemTreeNode> {
+pub struct AssocItemLoc<N: AstIdNode> {
+ // FIXME: Store this as an erased `salsa::Id` to save space
pub container: ItemContainerId,
- pub id: ItemTreeId<N>,
+ pub id: AstId<N>,
}
-impl<N: ItemTreeNode> Clone for AssocItemLoc<N> {
+impl<N: AstIdNode> Clone for AssocItemLoc<N> {
fn clone(&self) -> Self {
*self
}
}
-impl<N: ItemTreeNode> Copy for AssocItemLoc<N> {}
+impl<N: AstIdNode> Copy for AssocItemLoc<N> {}
-impl<N: ItemTreeNode> PartialEq for AssocItemLoc<N> {
+impl<N: AstIdNode> PartialEq for AssocItemLoc<N> {
fn eq(&self, other: &Self) -> bool {
self.container == other.container && self.id == other.id
}
}
-impl<N: ItemTreeNode> Eq for AssocItemLoc<N> {}
+impl<N: AstIdNode> Eq for AssocItemLoc<N> {}
-impl<N: ItemTreeNode> Hash for AssocItemLoc<N> {
+impl<N: AstIdNode> Hash for AssocItemLoc<N> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.container.hash(state);
self.id.hash(state);
}
}
-pub trait ItemTreeLoc {
+impl<N: AstIdNode> HasModule for AssocItemLoc<N> {
+ #[inline]
+ fn module(&self, db: &dyn DefDatabase) -> ModuleId {
+ self.container.module(db)
+ }
+}
+
+pub trait AstIdLoc {
type Container;
- type Id;
- fn item_tree_id(&self) -> ItemTreeId<Self::Id>;
+ type Ast: AstNode;
+ fn ast_id(&self) -> AstId<Self::Ast>;
fn container(&self) -> Self::Container;
}
+impl<N: AstIdNode> AstIdLoc for ItemLoc<N> {
+ type Container = ModuleId;
+ type Ast = N;
+ #[inline]
+ fn ast_id(&self) -> AstId<Self::Ast> {
+ self.id
+ }
+ #[inline]
+ fn container(&self) -> Self::Container {
+ self.container
+ }
+}
+
+impl<N: AstIdNode> AstIdLoc for AssocItemLoc<N> {
+ type Container = ItemContainerId;
+ type Ast = N;
+ #[inline]
+ fn ast_id(&self) -> AstId<Self::Ast> {
+ self.id
+ }
+ #[inline]
+ fn container(&self) -> Self::Container {
+ self.container
+ }
+}
+
macro_rules! impl_intern {
($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
impl_intern_key!($id, $loc);
@@ -186,94 +229,170 @@ macro_rules! impl_intern {
macro_rules! impl_loc {
($loc:ident, $id:ident: $id_ty:ident, $container:ident: $container_type:ident) => {
- impl ItemTreeLoc for $loc {
+ impl AstIdLoc for $loc {
type Container = $container_type;
- type Id = $id_ty;
- fn item_tree_id(&self) -> ItemTreeId<Self::Id> {
+ type Ast = ast::$id_ty;
+ fn ast_id(&self) -> AstId<Self::Ast> {
self.$id
}
fn container(&self) -> Self::Container {
self.$container
}
}
+
+ impl HasModule for $loc {
+ #[inline]
+ fn module(&self, db: &dyn DefDatabase) -> ModuleId {
+ self.$container.module(db)
+ }
+ }
};
}
-type FunctionLoc = AssocItemLoc<Function>;
+type FunctionLoc = AssocItemLoc<ast::Fn>;
impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function);
-impl_loc!(FunctionLoc, id: Function, container: ItemContainerId);
-type StructLoc = ItemLoc<Struct>;
+type StructLoc = ItemLoc<ast::Struct>;
impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct);
-impl_loc!(StructLoc, id: Struct, container: ModuleId);
-pub type UnionLoc = ItemLoc<Union>;
+impl StructId {
+ pub fn fields(self, db: &dyn DefDatabase) -> &VariantFields {
+ VariantFields::firewall(db, self.into())
+ }
+
+ pub fn fields_with_source_map(
+ self,
+ db: &dyn DefDatabase,
+ ) -> (Arc<VariantFields>, Arc<ExpressionStoreSourceMap>) {
+ VariantFields::query(db, self.into())
+ }
+}
+
+pub type UnionLoc = ItemLoc<ast::Union>;
impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union);
-impl_loc!(UnionLoc, id: Union, container: ModuleId);
-pub type EnumLoc = ItemLoc<Enum>;
+impl UnionId {
+ pub fn fields(self, db: &dyn DefDatabase) -> &VariantFields {
+ VariantFields::firewall(db, self.into())
+ }
+
+ pub fn fields_with_source_map(
+ self,
+ db: &dyn DefDatabase,
+ ) -> (Arc<VariantFields>, Arc<ExpressionStoreSourceMap>) {
+ VariantFields::query(db, self.into())
+ }
+}
+
+pub type EnumLoc = ItemLoc<ast::Enum>;
impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum);
-impl_loc!(EnumLoc, id: Enum, container: ModuleId);
-type ConstLoc = AssocItemLoc<Const>;
+impl EnumId {
+ #[inline]
+ pub fn enum_variants(self, db: &dyn DefDatabase) -> &EnumVariants {
+ &self.enum_variants_with_diagnostics(db).0
+ }
+
+ #[inline]
+ pub fn enum_variants_with_diagnostics(
+ self,
+ db: &dyn DefDatabase,
+ ) -> &(EnumVariants, Option<ThinVec<InactiveEnumVariantCode>>) {
+ EnumVariants::of(db, self)
+ }
+}
+
+type ConstLoc = AssocItemLoc<ast::Const>;
impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const);
-impl_loc!(ConstLoc, id: Const, container: ItemContainerId);
-pub type StaticLoc = AssocItemLoc<Static>;
+pub type StaticLoc = AssocItemLoc<ast::Static>;
impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static);
-impl_loc!(StaticLoc, id: Static, container: ItemContainerId);
-pub type TraitLoc = ItemLoc<Trait>;
+pub type TraitLoc = ItemLoc<ast::Trait>;
impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
-impl_loc!(TraitLoc, id: Trait, container: ModuleId);
-pub type TraitAliasLoc = ItemLoc<TraitAlias>;
+impl TraitId {
+ #[inline]
+ pub fn trait_items(self, db: &dyn DefDatabase) -> &TraitItems {
+ TraitItems::query(db, self)
+ }
+}
+
+pub type TraitAliasLoc = ItemLoc<ast::TraitAlias>;
impl_intern!(TraitAliasId, TraitAliasLoc, intern_trait_alias, lookup_intern_trait_alias);
-impl_loc!(TraitAliasLoc, id: TraitAlias, container: ModuleId);
-type TypeAliasLoc = AssocItemLoc<TypeAlias>;
+type TypeAliasLoc = AssocItemLoc<ast::TypeAlias>;
impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias);
-impl_loc!(TypeAliasLoc, id: TypeAlias, container: ItemContainerId);
-type ImplLoc = ItemLoc<Impl>;
+type ImplLoc = ItemLoc<ast::Impl>;
impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
-impl_loc!(ImplLoc, id: Impl, container: ModuleId);
-type UseLoc = ItemLoc<Use>;
+impl ImplId {
+ #[inline]
+ pub fn impl_items(self, db: &dyn DefDatabase) -> &ImplItems {
+ &self.impl_items_with_diagnostics(db).0
+ }
+
+ #[inline]
+ pub fn impl_items_with_diagnostics(self, db: &dyn DefDatabase) -> &(ImplItems, DefDiagnostics) {
+ ImplItems::of(db, self)
+ }
+}
+
+type UseLoc = ItemLoc<ast::Use>;
impl_intern!(UseId, UseLoc, intern_use, lookup_intern_use);
-impl_loc!(UseLoc, id: Use, container: ModuleId);
-type ExternCrateLoc = ItemLoc<ExternCrate>;
+type ExternCrateLoc = ItemLoc<ast::ExternCrate>;
impl_intern!(ExternCrateId, ExternCrateLoc, intern_extern_crate, lookup_intern_extern_crate);
-impl_loc!(ExternCrateLoc, id: ExternCrate, container: ModuleId);
-type ExternBlockLoc = ItemLoc<ExternBlock>;
+type ExternBlockLoc = ItemLoc<ast::ExternBlock>;
impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block);
-impl_loc!(ExternBlockLoc, id: ExternBlock, container: ModuleId);
+
+#[salsa::tracked]
+impl ExternBlockId {
+ #[salsa::tracked]
+ pub fn abi(self, db: &dyn DefDatabase) -> Option<Symbol> {
+ signatures::extern_block_abi(db, self)
+ }
+}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumVariantLoc {
- pub id: ItemTreeId<Variant>,
+ pub id: AstId<ast::Variant>,
pub parent: EnumId,
pub index: u32,
}
impl_intern!(EnumVariantId, EnumVariantLoc, intern_enum_variant, lookup_intern_enum_variant);
impl_loc!(EnumVariantLoc, id: Variant, parent: EnumId);
+
+impl EnumVariantId {
+ pub fn fields(self, db: &dyn DefDatabase) -> &VariantFields {
+ VariantFields::firewall(db, self.into())
+ }
+
+ pub fn fields_with_source_map(
+ self,
+ db: &dyn DefDatabase,
+ ) -> (Arc<VariantFields>, Arc<ExpressionStoreSourceMap>) {
+ VariantFields::query(db, self.into())
+ }
+}
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Macro2Loc {
pub container: ModuleId,
- pub id: ItemTreeId<Macro2>,
+ pub id: AstId<ast::MacroDef>,
pub expander: MacroExpander,
pub allow_internal_unsafe: bool,
pub edition: Edition,
}
impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2);
-impl_loc!(Macro2Loc, id: Macro2, container: ModuleId);
+impl_loc!(Macro2Loc, id: MacroDef, container: ModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroRulesLoc {
pub container: ModuleId,
- pub id: ItemTreeId<MacroRules>,
+ pub id: AstId<ast::MacroRules>,
pub expander: MacroExpander,
pub flags: MacroRulesLocFlags,
pub edition: Edition,
@@ -301,13 +420,13 @@ pub enum MacroExpander {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ProcMacroLoc {
pub container: CrateRootModuleId,
- pub id: ItemTreeId<Function>,
+ pub id: AstId<ast::Fn>,
pub expander: CustomProcMacroExpander,
pub kind: ProcMacroKind,
pub edition: Edition,
}
impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro);
-impl_loc!(ProcMacroLoc, id: Function, container: CrateRootModuleId);
+impl_loc!(ProcMacroLoc, id: Fn, container: CrateRootModuleId);
#[derive(Debug, Hash, PartialEq, Eq, Clone)]
pub struct BlockLoc {
@@ -338,6 +457,18 @@ impl CrateRootModuleId {
}
}
+impl HasModule for CrateRootModuleId {
+ #[inline]
+ fn module(&self, _db: &dyn DefDatabase) -> ModuleId {
+ ModuleId { krate: self.krate, block: None, local_id: DefMap::ROOT }
+ }
+
+ #[inline]
+ fn krate(&self, _db: &dyn DefDatabase) -> Crate {
+ self.krate
+ }
+}
+
impl PartialEq<ModuleId> for CrateRootModuleId {
fn eq(&self, other: &ModuleId) -> bool {
other.block.is_none() && other.local_id == DefMap::ROOT && self.krate == other.krate
@@ -466,11 +597,19 @@ impl ModuleId {
}
}
+impl HasModule for ModuleId {
+ #[inline]
+ fn module(&self, _db: &dyn DefDatabase) -> ModuleId {
+ *self
+ }
+}
+
/// An ID of a module, **local** to a `DefMap`.
pub type LocalModuleId = Idx<nameres::ModuleData>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct FieldId {
+ // FIXME: Store this as an erased `salsa::Id` to save space
pub parent: VariantId,
pub local_id: LocalFieldId,
}
@@ -486,6 +625,7 @@ pub struct TupleFieldId {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct TypeOrConstParamId {
+ // FIXME: Store this as an erased `salsa::Id` to save space
pub parent: GenericDefId,
pub local_id: LocalTypeOrConstParamId,
}
@@ -544,6 +684,7 @@ impl From<ConstParamId> for TypeOrConstParamId {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct LifetimeParamId {
+ // FIXME: Store this as an erased `salsa::Id` to save space
pub parent: GenericDefId,
pub local_id: LocalLifetimeParamId,
}
@@ -642,15 +783,10 @@ impl GeneralConstId {
pub fn name(self, db: &dyn DefDatabase) -> String {
match self {
GeneralConstId::StaticId(it) => {
- let loc = it.lookup(db);
- let tree = loc.item_tree_id().item_tree(db);
- let name = tree[loc.id.value].name.display(db, Edition::CURRENT);
- name.to_string()
+ db.static_signature(it).name.display(db, Edition::CURRENT).to_string()
}
GeneralConstId::ConstId(const_id) => {
- let loc = const_id.lookup(db);
- let tree = loc.item_tree_id().item_tree(db);
- tree[loc.id.value].name.as_ref().map_or_else(
+ db.const_signature(const_id).name.as_ref().map_or_else(
|| "_".to_owned(),
|name| name.display(db, Edition::CURRENT).to_string(),
)
@@ -692,7 +828,7 @@ impl DefWithBodyId {
}
}
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, salsa_macros::Supertype)]
pub enum AssocItemId {
FunctionId(FunctionId),
ConstId(ConstId),
@@ -768,8 +904,8 @@ impl GenericDefId {
GenericDefId::TraitId(it) => file_id_and_params_of_item_loc(db, it),
GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it),
GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it),
- GenericDefId::ConstId(it) => (it.lookup(db).id.file_id(), None),
- GenericDefId::StaticId(it) => (it.lookup(db).id.file_id(), None),
+ GenericDefId::ConstId(it) => (it.lookup(db).id.file_id, None),
+ GenericDefId::StaticId(it) => (it.lookup(db).id.file_id, None),
}
}
@@ -929,15 +1065,22 @@ pub enum VariantId {
impl_from!(EnumVariantId, StructId, UnionId for VariantId);
impl VariantId {
- pub fn variant_data(self, db: &dyn DefDatabase) -> Arc<VariantFields> {
- db.variant_fields(self)
+ pub fn fields(self, db: &dyn DefDatabase) -> &VariantFields {
+ VariantFields::firewall(db, self)
+ }
+
+ pub fn fields_with_source_map(
+ self,
+ db: &dyn DefDatabase,
+ ) -> (Arc<VariantFields>, Arc<ExpressionStoreSourceMap>) {
+ VariantFields::query(db, self)
}
pub fn file_id(self, db: &dyn DefDatabase) -> HirFileId {
match self {
- VariantId::EnumVariantId(it) => it.lookup(db).id.file_id(),
- VariantId::StructId(it) => it.lookup(db).id.file_id(),
- VariantId::UnionId(it) => it.lookup(db).id.file_id(),
+ VariantId::EnumVariantId(it) => it.lookup(db).id.file_id,
+ VariantId::StructId(it) => it.lookup(db).id.file_id,
+ VariantId::UnionId(it) => it.lookup(db).id.file_id,
}
}
@@ -977,7 +1120,7 @@ pub trait HasModule {
impl<N, ItemId> HasModule for ItemId
where
- N: ItemTreeNode,
+ N: AstIdNode,
ItemId: Lookup<Database = dyn DefDatabase, Data = ItemLoc<N>> + Copy,
{
#[inline]
@@ -1003,7 +1146,7 @@ where
#[inline]
fn module_for_assoc_item_loc<'db>(
db: &(dyn 'db + DefDatabase),
- id: impl Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<impl ItemTreeNode>>,
+ id: impl Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<impl AstIdNode>>,
) -> ModuleId {
id.lookup(db).container.module(db)
}
@@ -1245,7 +1388,7 @@ pub struct SyntheticSyntax;
// Crate authors can opt their type out of completions in some cases.
// This is done with the `#[rust_analyzer::completions(...)]` attribute.
//
-// All completeable things support `#[rust_analyzer::completions(ignore_flyimport)]`,
+// All completable things support `#[rust_analyzer::completions(ignore_flyimport)]`,
// which causes the thing to get excluded from flyimport completion. It will still
// be completed when in scope. This is analogous to the setting `rust-analyzer.completion.autoimport.exclude`
// with `"type": "always"`.
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
index 777953d3f2..0013c2a256 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -746,3 +746,83 @@ struct Struct9<#[pointee] T, U>(T) where T: ?Sized;
623..690: `derive(CoercePointee)` requires `T` to be marked `?Sized`"#]],
);
}
+
+#[test]
+fn union_derive() {
+ check_errors(
+ r#"
+//- minicore: clone, copy, default, fmt, hash, ord, eq, derive
+
+#[derive(Copy)]
+union Foo1 { _v: () }
+#[derive(Clone)]
+union Foo2 { _v: () }
+#[derive(Default)]
+union Foo3 { _v: () }
+#[derive(Debug)]
+union Foo4 { _v: () }
+#[derive(Hash)]
+union Foo5 { _v: () }
+#[derive(Ord)]
+union Foo6 { _v: () }
+#[derive(PartialOrd)]
+union Foo7 { _v: () }
+#[derive(Eq)]
+union Foo8 { _v: () }
+#[derive(PartialEq)]
+union Foo9 { _v: () }
+ "#,
+ expect![[r#"
+ 78..118: this trait cannot be derived for unions
+ 119..157: this trait cannot be derived for unions
+ 158..195: this trait cannot be derived for unions
+ 196..232: this trait cannot be derived for unions
+ 233..276: this trait cannot be derived for unions
+ 313..355: this trait cannot be derived for unions"#]],
+ );
+}
+
+#[test]
+fn default_enum_without_default_attr() {
+ check_errors(
+ r#"
+//- minicore: default, derive
+
+#[derive(Default)]
+enum Foo {
+ Bar,
+}
+ "#,
+ expect!["1..41: `#[derive(Default)]` on enum with no `#[default]`"],
+ );
+}
+
+#[test]
+fn generic_enum_default() {
+ check(
+ r#"
+//- minicore: default, derive
+
+#[derive(Default)]
+enum Foo<T> {
+ Bar(T),
+ #[default]
+ Baz,
+}
+"#,
+ expect![[r#"
+
+#[derive(Default)]
+enum Foo<T> {
+ Bar(T),
+ #[default]
+ Baz,
+}
+
+impl <T, > $crate::default::Default for Foo<T, > where {
+ fn default() -> Self {
+ Foo::Baz
+ }
+}"#]],
+ );
+}
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
index 293868df61..1c3af47d52 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -28,6 +28,19 @@ fn test_asm_expand() {
r#"
#[rustc_builtin_macro]
macro_rules! asm {() => {}}
+#[rustc_builtin_macro]
+macro_rules! global_asm {() => {}}
+#[rustc_builtin_macro]
+macro_rules! naked_asm {() => {}}
+
+global_asm! {
+ ""
+}
+
+#[unsafe(naked)]
+extern "C" fn foo() {
+ naked_asm!("");
+}
fn main() {
let i: u64 = 3;
@@ -45,6 +58,17 @@ fn main() {
expect![[r##"
#[rustc_builtin_macro]
macro_rules! asm {() => {}}
+#[rustc_builtin_macro]
+macro_rules! global_asm {() => {}}
+#[rustc_builtin_macro]
+macro_rules! naked_asm {() => {}}
+
+builtin #global_asm ("")
+
+#[unsafe(naked)]
+extern "C" fn foo() {
+ builtin #naked_asm ("");
+}
fn main() {
let i: u64 = 3;
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 38fc4b3d11..c489c1f7c1 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -35,9 +35,9 @@ macro_rules! f {
};
}
-struct#0:[email protected]#14336# MyTraitMap2#0:[email protected]#ROOT2024# {#0:[email protected]#14336#
- map#0:[email protected]#14336#:#0:[email protected]#14336# #0:[email protected]#14336#::#0:[email protected]#14336#std#0:[email protected]#14336#::#0:[email protected]#14336#collections#0:[email protected]#14336#::#0:[email protected]#14336#HashSet#0:[email protected]#14336#<#0:[email protected]#14336#(#0:[email protected]#14336#)#0:[email protected]#14336#>#0:[email protected]#14336#,#0:[email protected]#14336#
-}#0:[email protected]#14336#
+struct#0:MacroRules[BE8F, 0]@58..64#14336# MyTraitMap2#0:MacroCall[BE8F, 0]@31..42#ROOT2024# {#0:MacroRules[BE8F, 0]@72..73#14336#
+ map#0:MacroRules[BE8F, 0]@86..89#14336#:#0:MacroRules[BE8F, 0]@89..90#14336# #0:MacroRules[BE8F, 0]@89..90#14336#::#0:MacroRules[BE8F, 0]@91..93#14336#std#0:MacroRules[BE8F, 0]@93..96#14336#::#0:MacroRules[BE8F, 0]@96..98#14336#collections#0:MacroRules[BE8F, 0]@98..109#14336#::#0:MacroRules[BE8F, 0]@109..111#14336#HashSet#0:MacroRules[BE8F, 0]@111..118#14336#<#0:MacroRules[BE8F, 0]@118..119#14336#(#0:MacroRules[BE8F, 0]@119..120#14336#)#0:MacroRules[BE8F, 0]@120..121#14336#>#0:MacroRules[BE8F, 0]@121..122#14336#,#0:MacroRules[BE8F, 0]@122..123#14336#
+}#0:MacroRules[BE8F, 0]@132..133#14336#
"#]],
);
}
@@ -75,12 +75,12 @@ macro_rules! f {
};
}
-fn#0:[email protected]#ROOT2024# main#0:[email protected]#ROOT2024#(#0:[email protected]#ROOT2024#)#0:[email protected]#ROOT2024# {#0:[email protected]#ROOT2024#
- 1#0:[email protected]#ROOT2024#;#0:[email protected]#ROOT2024#
- 1.0#0:[email protected]#ROOT2024#;#0:[email protected]#ROOT2024#
- (#0:[email protected]#ROOT2024#(#0:[email protected]#ROOT2024#1#0:[email protected]#ROOT2024#,#0:[email protected]#ROOT2024# )#0:[email protected]#ROOT2024#,#0:[email protected]#ROOT2024# )#0:[email protected]#ROOT2024#.#0:[email protected]#ROOT2024#0#0:[email protected]#ROOT2024#.#0:[email protected]#ROOT2024#0#0:[email protected]#ROOT2024#;#0:[email protected]#ROOT2024#
- let#0:[email protected]#ROOT2024# x#0:[email protected]#ROOT2024# =#0:[email protected]#ROOT2024# 1#0:[email protected]#ROOT2024#;#0:[email protected]#ROOT2024#
-}#0:[email protected]#ROOT2024#
+fn#0:MacroCall[BE8F, 0]@30..32#ROOT2024# main#0:MacroCall[BE8F, 0]@33..37#ROOT2024#(#0:MacroCall[BE8F, 0]@37..38#ROOT2024#)#0:MacroCall[BE8F, 0]@38..39#ROOT2024# {#0:MacroCall[BE8F, 0]@40..41#ROOT2024#
+ 1#0:MacroCall[BE8F, 0]@50..51#ROOT2024#;#0:MacroCall[BE8F, 0]@51..52#ROOT2024#
+ 1.0#0:MacroCall[BE8F, 0]@61..64#ROOT2024#;#0:MacroCall[BE8F, 0]@64..65#ROOT2024#
+ (#0:MacroCall[BE8F, 0]@74..75#ROOT2024#(#0:MacroCall[BE8F, 0]@75..76#ROOT2024#1#0:MacroCall[BE8F, 0]@76..77#ROOT2024#,#0:MacroCall[BE8F, 0]@77..78#ROOT2024# )#0:MacroCall[BE8F, 0]@78..79#ROOT2024#,#0:MacroCall[BE8F, 0]@79..80#ROOT2024# )#0:MacroCall[BE8F, 0]@80..81#ROOT2024#.#0:MacroCall[BE8F, 0]@81..82#ROOT2024#0#0:MacroCall[BE8F, 0]@82..85#ROOT2024#.#0:MacroCall[BE8F, 0]@82..85#ROOT2024#0#0:MacroCall[BE8F, 0]@82..85#ROOT2024#;#0:MacroCall[BE8F, 0]@85..86#ROOT2024#
+ let#0:MacroCall[BE8F, 0]@95..98#ROOT2024# x#0:MacroCall[BE8F, 0]@99..100#ROOT2024# =#0:MacroCall[BE8F, 0]@101..102#ROOT2024# 1#0:MacroCall[BE8F, 0]@103..104#ROOT2024#;#0:MacroCall[BE8F, 0]@104..105#ROOT2024#
+}#0:MacroCall[BE8F, 0]@110..111#ROOT2024#
"#]],
@@ -171,7 +171,7 @@ fn main(foo: ()) {
}
fn main(foo: ()) {
- /* error: unresolved macro unresolved */"helloworld!"#0:[email protected]#ROOT2024#;
+ /* error: unresolved macro unresolved */"helloworld!"#0:Fn[15AE, 0]@236..321#ROOT2024#;
}
}
@@ -197,7 +197,7 @@ macro_rules! mk_struct {
#[macro_use]
mod foo;
-struct#1:[email protected]#14336# Foo#0:[email protected]#ROOT2024#(#1:[email protected]#14336#u32#0:[email protected]#ROOT2024#)#1:[email protected]#14336#;#1:[email protected]#14336#
+struct#1:MacroRules[DB0C, 0]@59..65#14336# Foo#0:MacroCall[DB0C, 0]@32..35#ROOT2024#(#1:MacroRules[DB0C, 0]@70..71#14336#u32#0:MacroCall[DB0C, 0]@41..44#ROOT2024#)#1:MacroRules[DB0C, 0]@74..75#14336#;#1:MacroRules[DB0C, 0]@75..76#14336#
"#]],
);
}
@@ -2029,3 +2029,25 @@ fn f() {
"#]],
);
}
+
+#[test]
+fn lifetime_repeat() {
+ check(
+ r#"
+macro_rules! m {
+ ($($x:expr)'a*) => (stringify!($($x)'b*));
+}
+fn f() {
+ let _ = m!(0 'a 1 'a 2);
+}
+ "#,
+ expect![[r#"
+macro_rules! m {
+ ($($x:expr)'a*) => (stringify!($($x)'b*));
+}
+fn f() {
+ let _ = stringify!(0 'b 1 'b 2);
+}
+ "#]],
+ );
+}
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs
index 2d289b7683..2c94f0e8d0 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs
@@ -13,6 +13,8 @@ macro_rules! m {
($(x),*) => ();
($(x)_*) => ();
($(x)i*) => ();
+ ($(x)'a*) => ();
+ ($(x)'_*) => ();
($($i:ident)*) => ($_);
($($true:ident)*) => ($true);
($($false:ident)*) => ($false);
@@ -28,6 +30,8 @@ macro_rules! m {
($(x),*) => ();
($(x)_*) => ();
($(x)i*) => ();
+ ($(x)'a*) => ();
+ ($(x)'_*) => ();
($($i:ident)*) => ($_);
($($true:ident)*) => ($true);
($($false:ident)*) => ($false);
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
index 2cc3ca8c75..e2022c7967 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -784,7 +784,7 @@ macro_rules! delegate_impl {
}
}
}
-impl <> Data for &'amut G where G: Data {}
+impl <> Data for &'a mut G where G: Data {}
"#]],
);
}
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index dc4334ee08..5e95b06139 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -14,19 +14,20 @@ mod builtin_fn_macro;
mod mbe;
mod proc_macros;
-use std::{iter, ops::Range, sync};
+use std::{any::TypeId, iter, ops::Range, sync};
use base_db::RootQueryDb;
use expect_test::Expect;
use hir_expand::{
AstId, InFile, MacroCallId, MacroCallKind, MacroKind,
+ builtin::quote::quote,
db::ExpandDatabase,
proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
span_map::SpanMapRef,
};
-use intern::Symbol;
+use intern::{Symbol, sym};
use itertools::Itertools;
-use span::{Edition, Span};
+use span::{Edition, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext};
use stdx::{format_to, format_to_acc};
use syntax::{
AstNode, AstPtr,
@@ -34,7 +35,9 @@ use syntax::{
SyntaxNode, T,
ast::{self, edit::IndentLevel},
};
+use syntax_bridge::token_tree_to_syntax_node;
use test_fixture::WithFixture;
+use tt::{TextRange, TextSize};
use crate::{
AdtId, Lookup, ModuleDefId,
@@ -302,14 +305,15 @@ fn pretty_print_macro_expansion(
(_, T!['{']) => " ",
(T![;] | T!['{'] | T!['}'], _) => "\n",
(_, T!['}']) => "\n",
- (IDENT | LIFETIME_IDENT, IDENT | LIFETIME_IDENT) => " ",
- _ if prev_kind.is_keyword(Edition::CURRENT)
- && curr_kind.is_keyword(Edition::CURRENT) =>
+ _ if (prev_kind.is_any_identifier()
+ || prev_kind == LIFETIME_IDENT
+ || prev_kind.is_literal())
+ && (curr_kind.is_any_identifier()
+ || curr_kind == LIFETIME_IDENT
+ || curr_kind.is_literal()) =>
{
" "
}
- (IDENT, _) if curr_kind.is_keyword(Edition::CURRENT) => " ",
- (_, IDENT) if prev_kind.is_keyword(Edition::CURRENT) => " ",
(T![>], IDENT) => " ",
(T![>], _) if curr_kind.is_keyword(Edition::CURRENT) => " ",
(T![->], _) | (_, T![->]) => " ",
@@ -380,4 +384,43 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
panic!("got invalid macro input: {:?}", parse.errors());
}
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == TypeId::of::<Self>()
+ }
+}
+
+#[test]
+fn regression_20171() {
+ // This really isn't the appropriate place to put this test, but it's convenient with access to `quote!`.
+ let span = Span {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: SpanAnchor {
+ file_id: span::EditionedFileId::current_edition(span::FileId::from_raw(0)),
+ ast_id: ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: SyntaxContext::root(Edition::CURRENT),
+ };
+ let close_brace = tt::Punct { char: '}', spacing: tt::Spacing::Alone, span };
+ let dotdot1 = tt::Punct { char: '.', spacing: tt::Spacing::Joint, span };
+ let dotdot2 = tt::Punct { char: '.', spacing: tt::Spacing::Alone, span };
+ let dollar_crate = sym::dollar_crate;
+ let tt = quote! {
+ span => {
+ if !((matches!(
+ drive_parser(&mut parser, data, false),
+ Err(TarParserError::CorruptField {
+ field: CorruptFieldContext::PaxKvLength,
+ error: GeneralParseError::ParseInt(ParseIntError { #dotdot1 #dotdot2 })
+ })
+ #close_brace ))) {
+ #dollar_crate::panic::panic_2021!();
+ }}
+ };
+ token_tree_to_syntax_node(
+ &tt,
+ syntax_bridge::TopEntryPoint::MacroStmts,
+ &mut |_| Edition::CURRENT,
+ Edition::CURRENT,
+ );
}
diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index b2e1adc365..6952a9da10 100644
--- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -181,9 +181,9 @@ fn foo(&self) {
self.0. 1;
}
-fn#0:[email protected]#ROOT2024# foo#0:[email protected]#ROOT2024#(#0:[email protected]#ROOT2024#&#0:[email protected]#ROOT2024#self#0:[email protected]#ROOT2024# )#0:[email protected]#ROOT2024# {#0:[email protected]#ROOT2024#
- self#0:[email protected]#ROOT2024# .#0:[email protected]#ROOT2024#0#0:[email protected]#ROOT2024#.#0:[email protected]#ROOT2024#1#0:[email protected]#ROOT2024#;#0:[email protected]#ROOT2024#
-}#0:[email protected]#ROOT2024#"#]],
+fn#0:Fn[8A31, 0]@45..47#ROOT2024# foo#0:Fn[8A31, 0]@48..51#ROOT2024#(#0:Fn[8A31, 0]@51..52#ROOT2024#&#0:Fn[8A31, 0]@52..53#ROOT2024#self#0:Fn[8A31, 0]@53..57#ROOT2024# )#0:Fn[8A31, 0]@57..58#ROOT2024# {#0:Fn[8A31, 0]@59..60#ROOT2024#
+ self#0:Fn[8A31, 0]@65..69#ROOT2024# .#0:Fn[8A31, 0]@69..70#ROOT2024#0#0:Fn[8A31, 0]@70..71#ROOT2024#.#0:Fn[8A31, 0]@71..72#ROOT2024#1#0:Fn[8A31, 0]@73..74#ROOT2024#;#0:Fn[8A31, 0]@74..75#ROOT2024#
+}#0:Fn[8A31, 0]@76..77#ROOT2024#"#]],
);
}
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index f337f83156..5030585147 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -62,8 +62,8 @@ use std::ops::Deref;
use base_db::Crate;
use hir_expand::{
- EditionedFileId, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, mod_path::ModPath,
- name::Name, proc_macro::ProcMacroKind,
+ EditionedFileId, ErasedAstId, HirFileId, InFile, MacroCallId, mod_path::ModPath, name::Name,
+ proc_macro::ProcMacroKind,
};
use intern::Symbol;
use itertools::Itertools;
@@ -80,7 +80,7 @@ use crate::{
LocalModuleId, Lookup, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
db::DefDatabase,
item_scope::{BuiltinShadowMode, ItemScope},
- item_tree::{ItemTreeId, Mod, TreeId},
+ item_tree::TreeId,
nameres::{diagnostics::DefDiagnostic, path_resolution::ResolveMode},
per_ns::PerNs,
visibility::{Visibility, VisibilityExplicitness},
@@ -171,12 +171,10 @@ pub struct DefMap {
/// ExternCrateId being None implies it being imported from the general prelude import.
macro_use_prelude: FxHashMap<Name, (MacroId, Option<ExternCrateId>)>,
- // FIXME: AstId's are fairly unstable
/// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
/// attributes.
// FIXME: Figure out a better way for the IDE layer to resolve these?
derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
- // FIXME: AstId's are fairly unstable
/// A mapping from [`hir_expand::MacroDefId`] to [`crate::MacroId`].
pub macro_def_to_macro_id: FxHashMap<ErasedAstId, MacroId>,
@@ -191,7 +189,7 @@ pub struct DefMap {
#[derive(Clone, Debug, PartialEq, Eq)]
struct DefMapCrateData {
/// Side table for resolving derive helpers.
- exported_derives: FxHashMap<MacroDefId, Box<[Name]>>,
+ exported_derives: FxHashMap<MacroId, Box<[Name]>>,
fn_proc_macro_mapping: FxHashMap<FunctionId, ProcMacroId>,
/// Custom attributes registered with `#![register_attr]`.
@@ -291,11 +289,11 @@ pub enum ModuleOrigin {
File {
is_mod_rs: bool,
declaration: FileAstId<ast::Module>,
- declaration_tree_id: ItemTreeId<Mod>,
+ declaration_tree_id: TreeId,
definition: EditionedFileId,
},
Inline {
- definition_tree_id: ItemTreeId<Mod>,
+ definition_tree_id: TreeId,
definition: FileAstId<ast::Module>,
},
/// Pseudo-module introduced by a block scope (contains only inner items).
@@ -375,19 +373,14 @@ pub fn crate_def_map(db: &dyn DefDatabase, crate_id: Crate) -> &DefMap {
crate_local_def_map(db, crate_id).def_map(db)
}
-#[allow(unused_lifetimes)]
-mod __ {
- use super::*;
- #[salsa_macros::tracked]
- pub(crate) struct DefMapPair<'db> {
- #[tracked]
- #[returns(ref)]
- pub(crate) def_map: DefMap,
- #[returns(ref)]
- pub(crate) local: LocalDefMap,
- }
+#[salsa_macros::tracked]
+pub(crate) struct DefMapPair<'db> {
+ #[tracked]
+ #[returns(ref)]
+ pub(crate) def_map: DefMap,
+ #[returns(ref)]
+ pub(crate) local: LocalDefMap,
}
-pub(crate) use __::DefMapPair;
#[salsa_macros::tracked(returns(ref))]
pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefMapPair<'_> {
diff --git a/crates/hir-def/src/nameres/assoc.rs b/crates/hir-def/src/nameres/assoc.rs
index 86225d33b4..07210df887 100644
--- a/crates/hir-def/src/nameres/assoc.rs
+++ b/crates/hir-def/src/nameres/assoc.rs
@@ -1,14 +1,28 @@
//! Expansion of associated items
-use hir_expand::{AstId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind, name::Name};
-use syntax::ast;
+use std::mem;
+
+use cfg::CfgOptions;
+use hir_expand::{
+ AstId, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind,
+ mod_path::ModPath,
+ name::{AsName, Name},
+ span_map::SpanMap,
+};
+use intern::Interned;
+use span::AstIdMap;
+use syntax::{
+ AstNode,
+ ast::{self, HasModuleItem, HasName},
+};
+use thin_vec::ThinVec;
use triomphe::Arc;
use crate::{
AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId,
ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
+ attr::Attrs,
db::DefDatabase,
- item_tree::{AssocItem, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId},
macro_call_as_call_id,
nameres::{
DefMap, LocalDefMap, MacroSubNs,
@@ -20,29 +34,30 @@ use crate::{
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TraitItems {
pub items: Box<[(Name, AssocItemId)]>,
- // box it as the vec is usually empty anyways
- // FIXME: AstIds are rather unstable...
- pub macro_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+ // `ThinVec` as the vec is usually empty anyways
+ pub macro_calls: ThinVec<(AstId<ast::Item>, MacroCallId)>,
}
+#[salsa::tracked]
impl TraitItems {
#[inline]
- pub(crate) fn trait_items_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitItems> {
- db.trait_items_with_diagnostics(tr).0
+ pub(crate) fn query(db: &dyn DefDatabase, tr: TraitId) -> &TraitItems {
+ &Self::query_with_diagnostics(db, tr).0
}
- pub(crate) fn trait_items_with_diagnostics_query(
+ #[salsa::tracked(returns(ref))]
+ pub fn query_with_diagnostics(
db: &dyn DefDatabase,
tr: TraitId,
- ) -> (Arc<TraitItems>, DefDiagnostics) {
- let ItemLoc { container: module_id, id: tree_id } = tr.lookup(db);
+ ) -> (TraitItems, DefDiagnostics) {
+ let ItemLoc { container: module_id, id: ast_id } = tr.lookup(db);
- let collector = AssocItemCollector::new(db, module_id, ItemContainerId::TraitId(tr));
- let item_tree = tree_id.item_tree(db);
- let (items, macro_calls, diagnostics) =
- collector.collect(&item_tree, tree_id.tree_id(), &item_tree[tree_id.value].items);
+ let collector =
+ AssocItemCollector::new(db, module_id, ItemContainerId::TraitId(tr), ast_id.file_id);
+ let source = ast_id.with_value(collector.ast_id_map.get(ast_id.value)).to_node(db);
+ let (items, macro_calls, diagnostics) = collector.collect(source.assoc_item_list());
- (Arc::new(TraitItems { macro_calls, items }), DefDiagnostics::new(diagnostics))
+ (TraitItems { macro_calls, items }, DefDiagnostics::new(diagnostics))
}
pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
@@ -76,41 +91,36 @@ impl TraitItems {
}
pub fn macro_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
- self.macro_calls.iter().flat_map(|it| it.iter()).copied()
+ self.macro_calls.iter().copied()
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct ImplItems {
pub items: Box<[(Name, AssocItemId)]>,
- // box it as the vec is usually empty anyways
- // FIXME: AstIds are rather unstable...
- pub macro_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+ // `ThinVec` as the vec is usually empty anyways
+ pub macro_calls: ThinVec<(AstId<ast::Item>, MacroCallId)>,
}
+#[salsa::tracked]
impl ImplItems {
- #[inline]
- pub(crate) fn impl_items_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplItems> {
- db.impl_items_with_diagnostics(id).0
- }
-
- pub(crate) fn impl_items_with_diagnostics_query(
- db: &dyn DefDatabase,
- id: ImplId,
- ) -> (Arc<ImplItems>, DefDiagnostics) {
+ #[salsa::tracked(returns(ref))]
+ pub fn of(db: &dyn DefDatabase, id: ImplId) -> (ImplItems, DefDiagnostics) {
let _p = tracing::info_span!("impl_items_with_diagnostics_query").entered();
- let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
+ let ItemLoc { container: module_id, id: ast_id } = id.lookup(db);
- let collector = AssocItemCollector::new(db, module_id, ItemContainerId::ImplId(id));
- let item_tree = tree_id.item_tree(db);
- let (items, macro_calls, diagnostics) =
- collector.collect(&item_tree, tree_id.tree_id(), &item_tree[tree_id.value].items);
+ let collector =
+ AssocItemCollector::new(db, module_id, ItemContainerId::ImplId(id), ast_id.file_id);
+ let source = ast_id.with_value(collector.ast_id_map.get(ast_id.value)).to_node(db);
+ let (items, macro_calls, diagnostics) = collector.collect(source.assoc_item_list());
- (Arc::new(ImplItems { items, macro_calls }), DefDiagnostics::new(diagnostics))
+ (ImplItems { items, macro_calls }, DefDiagnostics::new(diagnostics))
}
+}
+impl ImplItems {
pub fn macro_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
- self.macro_calls.iter().flat_map(|it| it.iter()).copied()
+ self.macro_calls.iter().copied()
}
}
@@ -119,67 +129,73 @@ struct AssocItemCollector<'a> {
module_id: ModuleId,
def_map: &'a DefMap,
local_def_map: &'a LocalDefMap,
+ ast_id_map: Arc<AstIdMap>,
+ span_map: SpanMap,
+ cfg_options: &'a CfgOptions,
+ file_id: HirFileId,
diagnostics: Vec<DefDiagnostic>,
container: ItemContainerId,
depth: usize,
items: Vec<(Name, AssocItemId)>,
- macro_calls: Vec<(AstId<ast::Item>, MacroCallId)>,
+ macro_calls: ThinVec<(AstId<ast::Item>, MacroCallId)>,
}
impl<'a> AssocItemCollector<'a> {
- fn new(db: &'a dyn DefDatabase, module_id: ModuleId, container: ItemContainerId) -> Self {
+ fn new(
+ db: &'a dyn DefDatabase,
+ module_id: ModuleId,
+ container: ItemContainerId,
+ file_id: HirFileId,
+ ) -> Self {
let (def_map, local_def_map) = module_id.local_def_map(db);
Self {
db,
module_id,
def_map,
local_def_map,
+ ast_id_map: db.ast_id_map(file_id),
+ span_map: db.span_map(file_id),
+ cfg_options: module_id.krate.cfg_options(db),
+ file_id,
container,
items: Vec::new(),
depth: 0,
- macro_calls: Vec::new(),
+ macro_calls: ThinVec::new(),
diagnostics: Vec::new(),
}
}
fn collect(
mut self,
- item_tree: &ItemTree,
- tree_id: TreeId,
- assoc_items: &[AssocItem],
- ) -> (
- Box<[(Name, AssocItemId)]>,
- Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
- Vec<DefDiagnostic>,
- ) {
- self.items.reserve(assoc_items.len());
- for &item in assoc_items {
- self.collect_item(item_tree, tree_id, item);
+ item_list: Option<ast::AssocItemList>,
+ ) -> (Box<[(Name, AssocItemId)]>, ThinVec<(AstId<ast::Item>, MacroCallId)>, Vec<DefDiagnostic>)
+ {
+ if let Some(item_list) = item_list {
+ for item in item_list.assoc_items() {
+ self.collect_item(item);
+ }
}
- (
- self.items.into_boxed_slice(),
- if self.macro_calls.is_empty() { None } else { Some(Box::new(self.macro_calls)) },
- self.diagnostics,
- )
+ self.macro_calls.shrink_to_fit();
+ (self.items.into_boxed_slice(), self.macro_calls, self.diagnostics)
}
- fn collect_item(&mut self, item_tree: &ItemTree, tree_id: TreeId, item: AssocItem) {
- let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into());
- if !attrs.is_cfg_enabled(self.module_id.krate.cfg_options(self.db)) {
+ fn collect_item(&mut self, item: ast::AssocItem) {
+ let ast_id = self.ast_id_map.ast_id(&item);
+ let attrs = Attrs::new(self.db, &item, self.span_map.as_ref(), self.cfg_options);
+ if let Err(cfg) = attrs.is_cfg_enabled(self.cfg_options) {
self.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id.local_id,
- tree_id,
- ModItem::from(item).into(),
- attrs.cfg().unwrap(),
- self.module_id.krate.cfg_options(self.db).clone(),
+ InFile::new(self.file_id, ast_id.erase()),
+ cfg,
+ self.cfg_options.clone(),
));
return;
}
+ let ast_id = InFile::new(self.file_id, ast_id.upcast());
'attrs: for attr in &*attrs {
- let ast_id = AstId::new(tree_id.file_id(), item.ast_id(item_tree).upcast());
let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
match self.def_map.resolve_attr_macro(
@@ -223,34 +239,51 @@ impl<'a> AssocItemCollector<'a> {
}
}
- self.record_item(item_tree, tree_id, item);
+ self.record_item(item);
}
- fn record_item(&mut self, item_tree: &ItemTree, tree_id: TreeId, item: AssocItem) {
+ fn record_item(&mut self, item: ast::AssocItem) {
match item {
- AssocItem::Function(id) => {
- let item = &item_tree[id];
- let def =
- FunctionLoc { container: self.container, id: ItemTreeId::new(tree_id, id) }
- .intern(self.db);
- self.items.push((item.name.clone(), def.into()));
+ ast::AssocItem::Fn(function) => {
+ let Some(name) = function.name() else { return };
+ let ast_id = self.ast_id_map.ast_id(&function);
+ let def = FunctionLoc {
+ container: self.container,
+ id: InFile::new(self.file_id, ast_id),
+ }
+ .intern(self.db);
+ self.items.push((name.as_name(), def.into()));
}
- AssocItem::TypeAlias(id) => {
- let item = &item_tree[id];
+ ast::AssocItem::TypeAlias(type_alias) => {
+ let Some(name) = type_alias.name() else { return };
+ let ast_id = self.ast_id_map.ast_id(&type_alias);
+ let def = TypeAliasLoc {
+ container: self.container,
+ id: InFile::new(self.file_id, ast_id),
+ }
+ .intern(self.db);
+ self.items.push((name.as_name(), def.into()));
+ }
+ ast::AssocItem::Const(konst) => {
+ let Some(name) = konst.name() else { return };
+ let ast_id = self.ast_id_map.ast_id(&konst);
let def =
- TypeAliasLoc { container: self.container, id: ItemTreeId::new(tree_id, id) }
+ ConstLoc { container: self.container, id: InFile::new(self.file_id, ast_id) }
.intern(self.db);
- self.items.push((item.name.clone(), def.into()));
- }
- AssocItem::Const(id) => {
- let item = &item_tree[id];
- let Some(name) = item.name.clone() else { return };
- let def = ConstLoc { container: self.container, id: ItemTreeId::new(tree_id, id) }
- .intern(self.db);
- self.items.push((name, def.into()));
+ self.items.push((name.as_name(), def.into()));
}
- AssocItem::MacroCall(call) => {
- let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call];
+ ast::AssocItem::MacroCall(call) => {
+ let ast_id = self.ast_id_map.ast_id(&call);
+ let ast_id = InFile::new(self.file_id, ast_id);
+ let Some(path) = call.path() else { return };
+ let range = path.syntax().text_range();
+ let Some(path) = ModPath::from_src(self.db, path, &mut |range| {
+ self.span_map.span_for_range(range).ctx
+ }) else {
+ return;
+ };
+ let path = Interned::new(path);
+ let ctxt = self.span_map.span_for_range(range).ctx;
let resolver = |path: &_| {
self.def_map
@@ -268,10 +301,10 @@ impl<'a> AssocItemCollector<'a> {
};
match macro_call_as_call_id(
self.db,
- InFile::new(tree_id.file_id(), ast_id),
- path,
+ ast_id,
+ &path,
ctxt,
- expand_to,
+ ExpandTo::Items,
self.module_id.krate(),
resolver,
&mut |ptr, call_id| {
@@ -281,8 +314,7 @@ impl<'a> AssocItemCollector<'a> {
// FIXME: Expansion error?
Ok(call_id) => match call_id.value {
Some(call_id) => {
- self.macro_calls
- .push((InFile::new(tree_id.file_id(), ast_id.upcast()), call_id));
+ self.macro_calls.push((ast_id.upcast(), call_id));
self.collect_macro_items(call_id);
}
None => (),
@@ -291,11 +323,11 @@ impl<'a> AssocItemCollector<'a> {
self.diagnostics.push(DefDiagnostic::unresolved_macro_call(
self.module_id.local_id,
MacroCallKind::FnLike {
- ast_id: InFile::new(tree_id.file_id(), ast_id),
- expand_to,
+ ast_id,
+ expand_to: ExpandTo::Items,
eager: None,
},
- Clone::clone(path),
+ (*path).clone(),
));
}
}
@@ -308,13 +340,29 @@ impl<'a> AssocItemCollector<'a> {
tracing::warn!("macro expansion is too deep");
return;
}
- let tree_id = TreeId::new(macro_call_id.into(), None);
- let item_tree = self.db.file_item_tree(macro_call_id.into());
+ let (syntax, span_map) = self.db.parse_macro_expansion(macro_call_id).value;
+ let old_file_id = mem::replace(&mut self.file_id, macro_call_id.into());
+ let old_ast_id_map = mem::replace(&mut self.ast_id_map, self.db.ast_id_map(self.file_id));
+ let old_span_map = mem::replace(&mut self.span_map, SpanMap::ExpansionSpanMap(span_map));
self.depth += 1;
- for item in item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item) {
- self.collect_item(&item_tree, tree_id, item);
+
+ let items = ast::MacroItems::cast(syntax.syntax_node()).expect("not `MacroItems`");
+ for item in items.items() {
+ let item = match item {
+ ast::Item::Fn(it) => ast::AssocItem::from(it),
+ ast::Item::Const(it) => it.into(),
+ ast::Item::TypeAlias(it) => it.into(),
+ ast::Item::MacroCall(it) => it.into(),
+ // FIXME: Should error on disallowed item kinds.
+ _ => continue,
+ };
+ self.collect_item(item);
}
+
self.depth -= 1;
+ self.file_id = old_file_id;
+ self.ast_id_map = old_ast_id_map;
+ self.span_map = old_span_map;
}
}
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 350c97c398..0c3274d849 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -9,8 +9,8 @@ use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
use cfg::{CfgAtom, CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
- EditionedFileId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId,
- MacroDefKind,
+ EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
+ MacroDefId, MacroDefKind,
attrs::{Attr, AttrId},
builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
mod_path::{ModPath, PathKind},
@@ -35,13 +35,13 @@ use crate::{
db::DefDatabase,
item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports},
item_tree::{
- self, AttrOwner, FieldsShape, FileItemTreeId, ImportAlias, ImportKind, ItemTree,
- ItemTreeId, ItemTreeNode, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId,
- UseTreeKind,
+ self, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId, Macro2, MacroCall,
+ MacroRules, Mod, ModItemId, ModKind, TreeId,
},
macro_call_as_call_id,
nameres::{
BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, ModuleData, ModuleOrigin, ResolveMode,
+ assoc::TraitItems,
attr_resolution::{attr_macro_as_call_id, derive_macro_as_call_id},
crate_def_map,
diagnostics::DefDiagnostic,
@@ -154,14 +154,14 @@ struct Import {
impl Import {
fn from_use(
tree: &ItemTree,
- item_tree_id: ItemTreeId<item_tree::Use>,
+ item: FileAstId<ast::Use>,
id: UseId,
is_prelude: bool,
mut cb: impl FnMut(Self),
) {
- let it = &tree[item_tree_id.value];
+ let it = &tree[item];
let visibility = &tree[it.visibility];
- it.use_tree.expand(|idx, path, kind, alias| {
+ it.expand(|idx, path, kind, alias| {
cb(Self {
path,
alias,
@@ -181,15 +181,15 @@ struct ImportDirective {
}
#[derive(Clone, Debug, Eq, PartialEq)]
-struct MacroDirective {
+struct MacroDirective<'db> {
module_id: LocalModuleId,
depth: usize,
- kind: MacroDirectiveKind,
+ kind: MacroDirectiveKind<'db>,
container: ItemContainerId,
}
#[derive(Clone, Debug, Eq, PartialEq)]
-enum MacroDirectiveKind {
+enum MacroDirectiveKind<'db> {
FnLike {
ast_id: AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo,
@@ -206,30 +206,31 @@ enum MacroDirectiveKind {
Attr {
ast_id: AstIdWithPath<ast::Item>,
attr: Attr,
- mod_item: ModItem,
+ mod_item: ModItemId,
/* is this needed? */ tree: TreeId,
+ item_tree: &'db ItemTree,
},
}
/// Walks the tree of module recursively
-struct DefCollector<'a> {
- db: &'a dyn DefDatabase,
+struct DefCollector<'db> {
+ db: &'db dyn DefDatabase,
def_map: DefMap,
local_def_map: LocalDefMap,
/// Set only in case of blocks.
- crate_local_def_map: Option<&'a LocalDefMap>,
+ crate_local_def_map: Option<&'db LocalDefMap>,
// The dependencies of the current crate, including optional deps like `test`.
deps: FxHashMap<Name, BuiltDependency>,
glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, Visibility, GlobId)>>,
unresolved_imports: Vec<ImportDirective>,
indeterminate_imports: Vec<(ImportDirective, PerNs)>,
- unresolved_macros: Vec<MacroDirective>,
+ unresolved_macros: Vec<MacroDirective<'db>>,
// We'd like to avoid emitting a diagnostics avalanche when some `extern crate` doesn't
// resolve. When we emit diagnostics for unresolved imports, we only do so if the import
// doesn't start with an unresolved crate's name.
unresolved_extern_crates: FxHashSet<Name>,
mod_dirs: FxHashMap<LocalModuleId, ModDir>,
- cfg_options: &'a CfgOptions,
+ cfg_options: &'db CfgOptions,
/// List of procedural macros defined by this crate. This is read from the dynamic library
/// built by the build system, and is the list of proc-macros we can actually expand. It is
/// empty when proc-macro support is disabled (in which case we still do name resolution for
@@ -244,10 +245,10 @@ struct DefCollector<'a> {
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general.
// FIXME: There has to be a better way to do this
- skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
+ skip_attrs: FxHashMap<InFile<FileAstId<ast::Item>>, AttrId>,
}
-impl DefCollector<'_> {
+impl<'db> DefCollector<'db> {
fn seed_with_top_level(&mut self) {
let _p = tracing::info_span!("seed_with_top_level").entered();
@@ -355,7 +356,7 @@ impl DefCollector<'_> {
macro_depth: 0,
module_id: DefMap::ROOT,
tree_id: TreeId::new(file_id.into(), None),
- item_tree: &item_tree,
+ item_tree,
mod_dir: ModDir::root(),
}
.collect_in_top_module(item_tree.top_level_items());
@@ -376,7 +377,7 @@ impl DefCollector<'_> {
macro_depth: 0,
module_id: DefMap::ROOT,
tree_id,
- item_tree: &item_tree,
+ item_tree,
mod_dir: ModDir::root(),
}
.collect_in_top_module(item_tree.top_level_items());
@@ -437,9 +438,8 @@ impl DefCollector<'_> {
// Additionally, while the proc macro entry points must be `pub`, they are not publicly
// exported in type/value namespace. This function reduces the visibility of all items
// in the crate root that aren't proc macros.
- let module_id = self.def_map.module_id(DefMap::ROOT);
let root = &mut self.def_map.modules[DefMap::ROOT];
- root.scope.censor_non_proc_macros(module_id);
+ root.scope.censor_non_proc_macros(self.def_map.krate);
}
}
@@ -459,7 +459,7 @@ impl DefCollector<'_> {
self.unresolved_macros.iter().enumerate().find_map(|(idx, directive)| match &directive
.kind
{
- MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree } => {
+ MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree, item_tree } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id,
MacroCallKind::Attr {
@@ -470,16 +470,22 @@ impl DefCollector<'_> {
attr.path().clone(),
));
- self.skip_attrs.insert(ast_id.ast_id.with_value(*mod_item), attr.id);
+ self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), attr.id);
- Some((idx, directive, *mod_item, *tree))
+ Some((idx, directive, *mod_item, *tree, *item_tree))
}
_ => None,
});
match unresolved_attr {
- Some((pos, &MacroDirective { module_id, depth, container, .. }, mod_item, tree_id)) => {
- let item_tree = &tree_id.item_tree(self.db);
+ Some((
+ pos,
+ &MacroDirective { module_id, depth, container, .. },
+ mod_item,
+ tree_id,
+ item_tree,
+ )) => {
+ // FIXME: Remove this clone
let mod_dir = self.mod_dirs[&module_id].clone();
ModCollector {
def_collector: self,
@@ -576,13 +582,7 @@ impl DefCollector<'_> {
/// use a dummy expander that always errors. This comes with the drawback of macros potentially
/// going out of sync with what the build system sees (since we resolve using VFS state, but
/// Cargo builds only on-disk files). We could and probably should add diagnostics for that.
- fn export_proc_macro(
- &mut self,
- def: ProcMacroDef,
- id: ItemTreeId<item_tree::Function>,
- ast_id: AstId<ast::Fn>,
- fn_id: FunctionId,
- ) {
+ fn export_proc_macro(&mut self, def: ProcMacroDef, ast_id: AstId<ast::Fn>, fn_id: FunctionId) {
let kind = def.kind.to_basedb_kind();
let (expander, kind) = match self.proc_macros.iter().find(|(n, _, _)| n == &def.name) {
Some(_)
@@ -598,7 +598,7 @@ impl DefCollector<'_> {
let proc_macro_id = ProcMacroLoc {
container: self.def_map.crate_root(),
- id,
+ id: ast_id,
expander,
kind,
edition: self.def_map.data.edition,
@@ -609,7 +609,7 @@ impl DefCollector<'_> {
self.define_proc_macro(def.name.clone(), proc_macro_id);
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
if let ProcMacroKind::Derive { helpers } = def.kind {
- crate_data.exported_derives.insert(self.db.macro_def(proc_macro_id.into()), helpers);
+ crate_data.exported_derives.insert(proc_macro_id.into(), helpers);
}
crate_data.fn_proc_macro_mapping.insert(fn_id, proc_macro_id);
}
@@ -887,9 +887,31 @@ impl DefCollector<'_> {
let imp = ImportOrExternCrate::Import(ImportId { use_: id, idx: use_tree });
tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
+ // `extern crate crate_name` things can be re-exported as `pub use crate_name`.
+ // But they cannot be re-exported as `pub use self::crate_name`, `pub use crate::crate_name`
+ // or `pub use ::crate_name`.
+ //
+ // This has been historically allowed, but may be not allowed in future
+ // https://github.com/rust-lang/rust/issues/127909
+ if let Some(def) = def.types.as_mut() {
+ let is_extern_crate_reimport_without_prefix = || {
+ let Some(ImportOrExternCrate::ExternCrate(_)) = def.import else {
+ return false;
+ };
+ if kind == ImportKind::Glob {
+ return false;
+ }
+ matches!(import.path.kind, PathKind::Plain | PathKind::SELF)
+ && import.path.segments().len() < 2
+ };
+ if is_extern_crate_reimport_without_prefix() {
+ def.vis = vis;
+ }
+ }
+
self.update(module_id, &[(name.cloned(), def)], vis, Some(imp));
}
- ImportSource { kind: ImportKind::Glob, id, is_prelude, use_tree } => {
+ ImportSource { kind: ImportKind::Glob, id, is_prelude, use_tree, .. } => {
tracing::debug!("glob import: {:?}", import);
let glob = GlobId { use_: id, idx: use_tree };
match def.take_types() {
@@ -973,12 +995,11 @@ impl DefCollector<'_> {
Some(ModuleDefId::AdtId(AdtId::EnumId(e))) => {
cov_mark::hit!(glob_enum);
// glob import from enum => just import all the variants
- let resolutions = self
- .db
- .enum_variants(e)
+ let resolutions = e
+ .enum_variants(self.db)
.variants
.iter()
- .map(|&(variant, ref name)| {
+ .map(|&(variant, ref name, _)| {
let res = PerNs::both(variant.into(), variant.into(), vis, None);
(Some(name.clone()), res)
})
@@ -1000,8 +1021,7 @@ impl DefCollector<'_> {
let resolutions = if true {
vec![]
} else {
- self.db
- .trait_items(it)
+ TraitItems::query(self.db, it)
.items
.iter()
.map(|&(ref name, variant)| {
@@ -1150,33 +1170,8 @@ impl DefCollector<'_> {
vis: Visibility,
def_import_type: Option<ImportOrExternCrate>,
) -> bool {
- // `extern crate crate_name` things can be re-exported as `pub use crate_name`.
- // But they cannot be re-exported as `pub use self::crate_name`, `pub use crate::crate_name`
- // or `pub use ::crate_name`.
- //
- // This has been historically allowed, but may be not allowed in future
- // https://github.com/rust-lang/rust/issues/127909
if let Some(def) = defs.types.as_mut() {
- let is_extern_crate_reimport_without_prefix = || {
- let Some(ImportOrExternCrate::ExternCrate(_)) = def.import else {
- return false;
- };
- let Some(ImportOrExternCrate::Import(id)) = def_import_type else {
- return false;
- };
- let use_id = id.use_.lookup(self.db).id;
- let item_tree = use_id.item_tree(self.db);
- let use_kind = item_tree[use_id.value].use_tree.kind();
- let UseTreeKind::Single { path, .. } = use_kind else {
- return false;
- };
- path.segments().len() < 2
- };
- if is_extern_crate_reimport_without_prefix() {
- def.vis = vis;
- } else {
- def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis);
- }
+ def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis);
}
if let Some(def) = defs.values.as_mut() {
def.vis = def.vis.min(vis, &self.def_map).unwrap_or(vis);
@@ -1259,7 +1254,7 @@ impl DefCollector<'_> {
fn resolve_macros(&mut self) -> ReachedFixedPoint {
let mut macros = mem::take(&mut self.unresolved_macros);
let mut resolved = Vec::new();
- let mut push_resolved = |directive: &MacroDirective, call_id| {
+ let mut push_resolved = |directive: &MacroDirective<'_>, call_id| {
resolved.push((directive.module_id, directive.depth, directive.container, call_id));
};
@@ -1272,7 +1267,7 @@ impl DefCollector<'_> {
let mut eager_callback_buffer = vec![];
let mut res = ReachedFixedPoint::Yes;
// Retain unresolved macros after this round of resolution.
- let mut retain = |directive: &MacroDirective| {
+ let mut retain = |directive: &MacroDirective<'db>| {
let subns = match &directive.kind {
MacroDirectiveKind::FnLike { .. } => MacroSubNs::Bang,
MacroDirectiveKind::Attr { .. } | MacroDirectiveKind::Derive { .. } => {
@@ -1349,7 +1344,7 @@ impl DefCollector<'_> {
// Record its helper attributes.
if def_id.krate != self.def_map.krate {
let def_map = crate_def_map(self.db, def_id.krate);
- if let Some(helpers) = def_map.data.exported_derives.get(&def_id) {
+ if let Some(helpers) = def_map.data.exported_derives.get(&macro_id) {
self.def_map
.derive_helpers_in_scope
.entry(ast_id.ast_id.map(|it| it.upcast()))
@@ -1367,22 +1362,29 @@ impl DefCollector<'_> {
return Resolved::Yes;
}
}
- MacroDirectiveKind::Attr { ast_id: file_ast_id, mod_item, attr, tree } => {
+ MacroDirectiveKind::Attr {
+ ast_id: file_ast_id,
+ mod_item,
+ attr,
+ tree,
+ item_tree,
+ } => {
let &AstIdWithPath { ast_id, ref path } = file_ast_id;
let file_id = ast_id.file_id;
let mut recollect_without = |collector: &mut Self| {
// Remove the original directive since we resolved it.
let mod_dir = collector.mod_dirs[&directive.module_id].clone();
- collector.skip_attrs.insert(InFile::new(file_id, *mod_item), attr.id);
+ collector
+ .skip_attrs
+ .insert(InFile::new(file_id, mod_item.ast_id()), attr.id);
- let item_tree = tree.item_tree(self.db);
ModCollector {
def_collector: collector,
macro_depth: directive.depth,
module_id: directive.module_id,
tree_id: *tree,
- item_tree: &item_tree,
+ item_tree,
mod_dir,
}
.collect(&[*mod_item], directive.container);
@@ -1435,11 +1437,10 @@ impl DefCollector<'_> {
// normal (as that would just be an identity expansion with extra output)
// Instead we treat derive attributes special and apply them separately.
- let item_tree = tree.item_tree(self.db);
let ast_adt_id: FileAstId<ast::Adt> = match *mod_item {
- ModItem::Struct(strukt) => item_tree[strukt].ast_id().upcast(),
- ModItem::Union(union) => item_tree[union].ast_id().upcast(),
- ModItem::Enum(enum_) => item_tree[enum_].ast_id().upcast(),
+ ModItemId::Struct(ast_id) => ast_id.upcast(),
+ ModItemId::Union(ast_id) => ast_id.upcast(),
+ ModItemId::Enum(ast_id) => ast_id.upcast(),
_ => {
let diag = DefDiagnostic::invalid_derive_target(
directive.module_id,
@@ -1571,7 +1572,7 @@ impl DefCollector<'_> {
macro_depth: depth,
tree_id: TreeId::new(file_id, None),
module_id,
- item_tree: &item_tree,
+ item_tree,
mod_dir,
}
.collect(item_tree.top_level_items(), container);
@@ -1672,22 +1673,22 @@ impl DefCollector<'_> {
}
/// Walks a single module, populating defs, imports and macros
-struct ModCollector<'a, 'b> {
- def_collector: &'a mut DefCollector<'b>,
+struct ModCollector<'a, 'db> {
+ def_collector: &'a mut DefCollector<'db>,
macro_depth: usize,
module_id: LocalModuleId,
tree_id: TreeId,
- item_tree: &'a ItemTree,
+ item_tree: &'db ItemTree,
mod_dir: ModDir,
}
impl ModCollector<'_, '_> {
- fn collect_in_top_module(&mut self, items: &[ModItem]) {
+ fn collect_in_top_module(&mut self, items: &[ModItemId]) {
let module = self.def_collector.def_map.module_id(self.module_id);
self.collect(items, module.into())
}
- fn collect(&mut self, items: &[ModItem], container: ItemContainerId) {
+ fn collect(&mut self, items: &[ModItemId], container: ItemContainerId) {
let krate = self.def_collector.def_map.krate;
let is_crate_root =
self.module_id == DefMap::ROOT && self.def_collector.def_map.block.is_none();
@@ -1726,11 +1727,12 @@ impl ModCollector<'_, '_> {
.unwrap_or(Visibility::Public)
};
- let mut process_mod_item = |item: ModItem| {
- let attrs = self.item_tree.attrs(db, krate, item.into());
+ let mut process_mod_item = |item: ModItemId| {
+ let attrs = self.item_tree.attrs(db, krate, item.ast_id());
if let Some(cfg) = attrs.cfg() {
if !self.is_cfg_enabled(&cfg) {
- self.emit_unconfigured_diagnostic(self.tree_id, item.into(), &cfg);
+ let ast_id = item.ast_id().erase();
+ self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg);
return;
}
}
@@ -1747,39 +1749,31 @@ impl ModCollector<'_, '_> {
self.def_collector.crate_local_def_map.unwrap_or(&self.def_collector.local_def_map);
match item {
- ModItem::Mod(m) => self.collect_module(m, &attrs),
- ModItem::Use(item_tree_id) => {
- let id = UseLoc {
- container: module,
- id: ItemTreeId::new(self.tree_id, item_tree_id),
- }
- .intern(db);
+ ModItemId::Mod(m) => self.collect_module(m, &attrs),
+ ModItemId::Use(item_tree_id) => {
+ let id =
+ UseLoc { container: module, id: InFile::new(self.file_id(), item_tree_id) }
+ .intern(db);
let is_prelude = attrs.by_key(sym::prelude_import).exists();
- Import::from_use(
- self.item_tree,
- ItemTreeId::new(self.tree_id, item_tree_id),
- id,
- is_prelude,
- |import| {
- self.def_collector.unresolved_imports.push(ImportDirective {
- module_id: self.module_id,
- import,
- status: PartialResolvedImport::Unresolved,
- });
- },
- )
+ Import::from_use(self.item_tree, item_tree_id, id, is_prelude, |import| {
+ self.def_collector.unresolved_imports.push(ImportDirective {
+ module_id: self.module_id,
+ import,
+ status: PartialResolvedImport::Unresolved,
+ });
+ })
}
- ModItem::ExternCrate(item_tree_id) => {
+ ModItemId::ExternCrate(item_tree_id) => {
+ let item_tree::ExternCrate { name, visibility, alias } =
+ &self.item_tree[item_tree_id];
+
let id = ExternCrateLoc {
container: module,
- id: ItemTreeId::new(self.tree_id, item_tree_id),
+ id: InFile::new(self.tree_id.file_id(), item_tree_id),
}
.intern(db);
def_map.modules[self.module_id].scope.define_extern_crate_decl(id);
- let item_tree::ExternCrate { name, visibility, alias, ast_id } =
- &self.item_tree[item_tree_id];
-
let is_self = *name == sym::self_;
let resolved = if is_self {
cov_mark::hit!(extern_crate_self_as);
@@ -1838,15 +1832,15 @@ impl ModCollector<'_, '_> {
self.def_collector.def_map.diagnostics.push(
DefDiagnostic::unresolved_extern_crate(
module_id,
- InFile::new(self.file_id(), *ast_id),
+ InFile::new(self.file_id(), item_tree_id),
),
);
}
}
- ModItem::ExternBlock(block) => {
+ ModItemId::ExternBlock(block) => {
let extern_block_id = ExternBlockLoc {
container: module,
- id: ItemTreeId::new(self.tree_id, block),
+ id: InFile::new(self.file_id(), block),
}
.intern(db);
self.def_collector.def_map.modules[self.module_id]
@@ -1857,19 +1851,20 @@ impl ModCollector<'_, '_> {
ItemContainerId::ExternBlockId(extern_block_id),
)
}
- ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container),
- ModItem::MacroRules(id) => self.collect_macro_rules(id, module),
- ModItem::Macro2(id) => self.collect_macro_def(id, module),
- ModItem::Impl(imp) => {
+ ModItemId::MacroCall(mac) => self.collect_macro_call(mac, container),
+ ModItemId::MacroRules(id) => self.collect_macro_rules(id, module),
+ ModItemId::Macro2(id) => self.collect_macro_def(id, module),
+ ModItemId::Impl(imp) => {
let impl_id =
- ImplLoc { container: module, id: ItemTreeId::new(self.tree_id, imp) }
+ ImplLoc { container: module, id: InFile::new(self.file_id(), imp) }
.intern(db);
self.def_collector.def_map.modules[self.module_id].scope.define_impl(impl_id)
}
- ModItem::Function(id) => {
+ ModItemId::Function(id) => {
let it = &self.item_tree[id];
let fn_id =
- FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
+ FunctionLoc { container, id: InFile::new(self.tree_id.file_id(), id) }
+ .intern(db);
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
@@ -1880,8 +1875,7 @@ impl ModCollector<'_, '_> {
if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) {
self.def_collector.export_proc_macro(
proc_macro,
- ItemTreeId::new(self.tree_id, id),
- InFile::new(self.file_id(), self.item_tree[id].ast_id()),
+ InFile::new(self.file_id(), id),
fn_id,
);
}
@@ -1889,13 +1883,13 @@ impl ModCollector<'_, '_> {
update_def(self.def_collector, fn_id.into(), &it.name, vis, false);
}
- ModItem::Struct(id) => {
+ ModItemId::Struct(id) => {
let it = &self.item_tree[id];
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
update_def(
self.def_collector,
- StructLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ StructLoc { container: module, id: InFile::new(self.file_id(), id) }
.intern(db)
.into(),
&it.name,
@@ -1903,13 +1897,13 @@ impl ModCollector<'_, '_> {
!matches!(it.shape, FieldsShape::Record),
);
}
- ModItem::Union(id) => {
+ ModItemId::Union(id) => {
let it = &self.item_tree[id];
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
update_def(
self.def_collector,
- UnionLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ UnionLoc { container: module, id: InFile::new(self.file_id(), id) }
.intern(db)
.into(),
&it.name,
@@ -1917,19 +1911,20 @@ impl ModCollector<'_, '_> {
false,
);
}
- ModItem::Enum(id) => {
+ ModItemId::Enum(id) => {
let it = &self.item_tree[id];
let enum_ =
- EnumLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ EnumLoc { container: module, id: InFile::new(self.tree_id.file_id(), id) }
.intern(db);
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
update_def(self.def_collector, enum_.into(), &it.name, vis, false);
}
- ModItem::Const(id) => {
+ ModItemId::Const(id) => {
let it = &self.item_tree[id];
let const_id =
- ConstLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
+ ConstLoc { container, id: InFile::new(self.tree_id.file_id(), id) }
+ .intern(db);
match &it.name {
Some(name) => {
@@ -1945,13 +1940,13 @@ impl ModCollector<'_, '_> {
}
}
}
- ModItem::Static(id) => {
+ ModItemId::Static(id) => {
let it = &self.item_tree[id];
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
update_def(
self.def_collector,
- StaticLoc { container, id: ItemTreeId::new(self.tree_id, id) }
+ StaticLoc { container, id: InFile::new(self.file_id(), id) }
.intern(db)
.into(),
&it.name,
@@ -1959,13 +1954,13 @@ impl ModCollector<'_, '_> {
false,
);
}
- ModItem::Trait(id) => {
+ ModItemId::Trait(id) => {
let it = &self.item_tree[id];
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
update_def(
self.def_collector,
- TraitLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ TraitLoc { container: module, id: InFile::new(self.file_id(), id) }
.intern(db)
.into(),
&it.name,
@@ -1973,13 +1968,13 @@ impl ModCollector<'_, '_> {
false,
);
}
- ModItem::TraitAlias(id) => {
+ ModItemId::TraitAlias(id) => {
let it = &self.item_tree[id];
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
update_def(
self.def_collector,
- TraitAliasLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ TraitAliasLoc { container: module, id: InFile::new(self.file_id(), id) }
.intern(db)
.into(),
&it.name,
@@ -1987,13 +1982,13 @@ impl ModCollector<'_, '_> {
false,
);
}
- ModItem::TypeAlias(id) => {
+ ModItemId::TypeAlias(id) => {
let it = &self.item_tree[id];
let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]);
update_def(
self.def_collector,
- TypeAliasLoc { container, id: ItemTreeId::new(self.tree_id, id) }
+ TypeAliasLoc { container, id: InFile::new(self.file_id(), id) }
.intern(db)
.into(),
&it.name,
@@ -2010,12 +2005,12 @@ impl ModCollector<'_, '_> {
if is_crate_root {
items
.iter()
- .filter(|it| matches!(it, ModItem::ExternCrate(..)))
+ .filter(|it| matches!(it, ModItemId::ExternCrate(..)))
.copied()
.for_each(&mut process_mod_item);
items
.iter()
- .filter(|it| !matches!(it, ModItem::ExternCrate(..)))
+ .filter(|it| !matches!(it, ModItemId::ExternCrate(..)))
.copied()
.for_each(process_mod_item);
} else {
@@ -2056,19 +2051,18 @@ impl ModCollector<'_, '_> {
);
}
- fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) {
+ fn collect_module(&mut self, module_ast_id: ItemTreeAstId<Mod>, attrs: &Attrs) {
let path_attr = attrs.by_key(sym::path).string_value_unescape();
let is_macro_use = attrs.by_key(sym::macro_use).exists();
- let module = &self.item_tree[module_id];
+ let module = &self.item_tree[module_ast_id];
match &module.kind {
// inline module, just recurse
ModKind::Inline { items } => {
let module_id = self.push_child_module(
module.name.clone(),
- module.ast_id,
+ module_ast_id,
None,
&self.item_tree[module.visibility],
- module_id,
);
let Some(mod_dir) =
@@ -2091,7 +2085,7 @@ impl ModCollector<'_, '_> {
}
// out of line module, resolve, parse and recurse
ModKind::Outline => {
- let ast_id = AstId::new(self.file_id(), module.ast_id);
+ let ast_id = AstId::new(self.file_id(), module_ast_id);
let db = self.def_collector.db;
match self.mod_dir.resolve_declaration(
db,
@@ -2110,8 +2104,7 @@ impl ModCollector<'_, '_> {
match is_enabled {
Err(cfg) => {
self.emit_unconfigured_diagnostic(
- self.tree_id,
- AttrOwner::ModItem(module_id.into()),
+ InFile::new(self.file_id(), module_ast_id.erase()),
&cfg,
);
}
@@ -2121,14 +2114,13 @@ impl ModCollector<'_, '_> {
ast_id.value,
Some((file_id, is_mod_rs)),
&self.item_tree[module.visibility],
- module_id,
);
ModCollector {
def_collector: self.def_collector,
macro_depth: self.macro_depth,
module_id,
tree_id: TreeId::new(file_id.into(), None),
- item_tree: &item_tree,
+ item_tree,
mod_dir,
}
.collect_in_top_module(item_tree.top_level_items());
@@ -2149,7 +2141,6 @@ impl ModCollector<'_, '_> {
ast_id.value,
None,
&self.item_tree[module.visibility],
- module_id,
);
self.def_collector.def_map.diagnostics.push(
DefDiagnostic::unresolved_module(self.module_id, ast_id, candidates),
@@ -2166,7 +2157,6 @@ impl ModCollector<'_, '_> {
declaration: FileAstId<ast::Module>,
definition: Option<(EditionedFileId, bool)>,
visibility: &crate::visibility::RawVisibility,
- mod_tree_id: FileItemTreeId<Mod>,
) -> LocalModuleId {
let def_map = &mut self.def_collector.def_map;
let vis = def_map
@@ -2179,15 +2169,14 @@ impl ModCollector<'_, '_> {
)
.unwrap_or(Visibility::Public);
let origin = match definition {
- None => ModuleOrigin::Inline {
- definition: declaration,
- definition_tree_id: ItemTreeId::new(self.tree_id, mod_tree_id),
- },
+ None => {
+ ModuleOrigin::Inline { definition: declaration, definition_tree_id: self.tree_id }
+ }
Some((definition, is_mod_rs)) => ModuleOrigin::File {
declaration,
definition,
is_mod_rs,
- declaration_tree_id: ItemTreeId::new(self.tree_id, mod_tree_id),
+ declaration_tree_id: self.tree_id,
},
};
@@ -2228,11 +2217,14 @@ impl ModCollector<'_, '_> {
fn resolve_attributes(
&mut self,
attrs: &Attrs,
- mod_item: ModItem,
+ mod_item: ModItemId,
container: ItemContainerId,
) -> Result<(), ()> {
- let mut ignore_up_to =
- self.def_collector.skip_attrs.get(&InFile::new(self.file_id(), mod_item)).copied();
+ let mut ignore_up_to = self
+ .def_collector
+ .skip_attrs
+ .get(&InFile::new(self.file_id(), mod_item.ast_id()))
+ .copied();
let iter = attrs
.iter()
.dedup_by(|a, b| {
@@ -2262,11 +2254,7 @@ impl ModCollector<'_, '_> {
attr.path.display(self.def_collector.db, Edition::LATEST)
);
- let ast_id = AstIdWithPath::new(
- self.file_id(),
- mod_item.ast_id(self.item_tree),
- attr.path.clone(),
- );
+ let ast_id = AstIdWithPath::new(self.file_id(), mod_item.ast_id(), attr.path.clone());
self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id,
depth: self.macro_depth + 1,
@@ -2275,6 +2263,7 @@ impl ModCollector<'_, '_> {
attr: attr.clone(),
mod_item,
tree: self.tree_id,
+ item_tree: self.item_tree,
},
container,
});
@@ -2285,11 +2274,11 @@ impl ModCollector<'_, '_> {
Ok(())
}
- fn collect_macro_rules(&mut self, id: FileItemTreeId<MacroRules>, module: ModuleId) {
+ fn collect_macro_rules(&mut self, ast_id: ItemTreeAstId<MacroRules>, module: ModuleId) {
let krate = self.def_collector.def_map.krate;
- let mac = &self.item_tree[id];
- let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into());
- let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast());
+ let mac = &self.item_tree[ast_id];
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
+ let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
let export_attr = || attrs.by_key(sym::macro_export);
@@ -2336,7 +2325,7 @@ impl ModCollector<'_, '_> {
self.def_collector
.def_map
.diagnostics
- .push(DefDiagnostic::unimplemented_builtin_macro(self.module_id, ast_id));
+ .push(DefDiagnostic::unimplemented_builtin_macro(self.module_id, f_ast_id));
return;
}
}
@@ -2352,16 +2341,13 @@ impl ModCollector<'_, '_> {
let macro_id = MacroRulesLoc {
container: module,
- id: ItemTreeId::new(self.tree_id, id),
+ id: InFile::new(self.file_id(), ast_id),
flags,
expander,
edition: self.def_collector.def_map.data.edition,
}
.intern(self.def_collector.db);
- self.def_collector.def_map.macro_def_to_macro_id.insert(
- InFile::new(self.file_id(), self.item_tree[id].ast_id()).erase(),
- macro_id.into(),
- );
+ self.def_collector.def_map.macro_def_to_macro_id.insert(f_ast_id.erase(), macro_id.into());
self.def_collector.define_macro_rules(
self.module_id,
mac.name.clone(),
@@ -2370,14 +2356,14 @@ impl ModCollector<'_, '_> {
);
}
- fn collect_macro_def(&mut self, id: FileItemTreeId<Macro2>, module: ModuleId) {
+ fn collect_macro_def(&mut self, ast_id: ItemTreeAstId<Macro2>, module: ModuleId) {
let krate = self.def_collector.def_map.krate;
- let mac = &self.item_tree[id];
- let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast());
+ let mac = &self.item_tree[ast_id];
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
+ let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
// Case 1: builtin macros
let mut helpers_opt = None;
- let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into());
let expander = if attrs.by_key(sym::rustc_builtin_macro).exists() {
if let Some(expander) = find_builtin_macro(&mac.name) {
match expander {
@@ -2409,7 +2395,7 @@ impl ModCollector<'_, '_> {
self.def_collector
.def_map
.diagnostics
- .push(DefDiagnostic::unimplemented_builtin_macro(self.module_id, ast_id));
+ .push(DefDiagnostic::unimplemented_builtin_macro(self.module_id, f_ast_id));
return;
}
} else {
@@ -2420,16 +2406,13 @@ impl ModCollector<'_, '_> {
let macro_id = Macro2Loc {
container: module,
- id: ItemTreeId::new(self.tree_id, id),
+ id: InFile::new(self.file_id(), ast_id),
expander,
allow_internal_unsafe,
edition: self.def_collector.def_map.data.edition,
}
.intern(self.def_collector.db);
- self.def_collector.def_map.macro_def_to_macro_id.insert(
- InFile::new(self.file_id(), self.item_tree[id].ast_id()).erase(),
- macro_id.into(),
- );
+ self.def_collector.def_map.macro_def_to_macro_id.insert(f_ast_id.erase(), macro_id.into());
self.def_collector.define_macro_def(
self.module_id,
mac.name.clone(),
@@ -2441,16 +2424,17 @@ impl ModCollector<'_, '_> {
Arc::get_mut(&mut self.def_collector.def_map.data)
.unwrap()
.exported_derives
- .insert(self.def_collector.db.macro_def(macro_id.into()), helpers);
+ .insert(macro_id.into(), helpers);
}
}
}
fn collect_macro_call(
&mut self,
- &MacroCall { ref path, ast_id, expand_to, ctxt }: &MacroCall,
+ ast_id: FileAstId<ast::MacroCall>,
container: ItemContainerId,
) {
+ let &MacroCall { ref path, expand_to, ctxt } = &self.item_tree[ast_id];
let ast_id = AstIdWithPath::new(self.file_id(), ast_id, path.clone());
let db = self.def_collector.db;
@@ -2565,16 +2549,16 @@ impl ModCollector<'_, '_> {
self.def_collector.cfg_options.check(cfg) != Some(false)
}
- fn emit_unconfigured_diagnostic(&mut self, tree_id: TreeId, item: AttrOwner, cfg: &CfgExpr) {
+ fn emit_unconfigured_diagnostic(&mut self, ast_id: ErasedAstId, cfg: &CfgExpr) {
self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id,
- tree_id,
- item,
+ ast_id,
cfg.clone(),
self.def_collector.cfg_options.clone(),
));
}
+ #[inline]
fn file_id(&self) -> HirFileId {
self.tree_id.file_id()
}
diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs
index de3d2f4836..c495a07449 100644
--- a/crates/hir-def/src/nameres/diagnostics.rs
+++ b/crates/hir-def/src/nameres/diagnostics.rs
@@ -3,22 +3,18 @@
use std::ops::Not;
use cfg::{CfgExpr, CfgOptions};
-use hir_expand::{ExpandErrorKind, MacroCallKind, attrs::AttrId, mod_path::ModPath};
+use hir_expand::{ErasedAstId, ExpandErrorKind, MacroCallKind, attrs::AttrId, mod_path::ModPath};
use la_arena::Idx;
use syntax::ast;
-use crate::{
- AstId,
- item_tree::{self, AttrOwner, ItemTreeId, TreeId},
- nameres::LocalModuleId,
-};
+use crate::{AstId, nameres::LocalModuleId};
#[derive(Debug, PartialEq, Eq)]
pub enum DefDiagnosticKind {
UnresolvedModule { ast: AstId<ast::Module>, candidates: Box<[String]> },
UnresolvedExternCrate { ast: AstId<ast::ExternCrate> },
- UnresolvedImport { id: ItemTreeId<item_tree::Use>, index: Idx<ast::UseTree> },
- UnconfiguredCode { tree: TreeId, item: AttrOwner, cfg: CfgExpr, opts: CfgOptions },
+ UnresolvedImport { id: AstId<ast::Use>, index: Idx<ast::UseTree> },
+ UnconfiguredCode { ast_id: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
@@ -28,7 +24,7 @@ pub enum DefDiagnosticKind {
}
#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct DefDiagnostics(Option<triomphe::Arc<Box<[DefDiagnostic]>>>);
+pub struct DefDiagnostics(Option<triomphe::ThinArc<(), DefDiagnostic>>);
impl DefDiagnostics {
pub fn new(diagnostics: Vec<DefDiagnostic>) -> Self {
@@ -36,12 +32,12 @@ impl DefDiagnostics {
diagnostics
.is_empty()
.not()
- .then(|| triomphe::Arc::new(diagnostics.into_boxed_slice())),
+ .then(|| triomphe::ThinArc::from_header_and_iter((), diagnostics.into_iter())),
)
}
pub fn iter(&self) -> impl Iterator<Item = &DefDiagnostic> {
- self.0.as_ref().into_iter().flat_map(|it| &***it)
+ self.0.as_ref().into_iter().flat_map(|it| &it.slice)
}
}
@@ -75,7 +71,7 @@ impl DefDiagnostic {
pub(super) fn unresolved_import(
container: LocalModuleId,
- id: ItemTreeId<item_tree::Use>,
+ id: AstId<ast::Use>,
index: Idx<ast::UseTree>,
) -> Self {
Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
@@ -92,14 +88,13 @@ impl DefDiagnostic {
pub fn unconfigured_code(
container: LocalModuleId,
- tree: TreeId,
- item: AttrOwner,
+ ast_id: ErasedAstId,
cfg: CfgExpr,
opts: CfgOptions,
) -> Self {
Self {
in_module: container,
- kind: DefDiagnosticKind::UnconfiguredCode { tree, item, cfg, opts },
+ kind: DefDiagnosticKind::UnconfiguredCode { ast_id, cfg, opts },
}
}
diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs
index 74ce33a641..4641b220da 100644
--- a/crates/hir-def/src/nameres/path_resolution.rs
+++ b/crates/hir-def/src/nameres/path_resolution.rs
@@ -12,7 +12,6 @@
use either::Either;
use hir_expand::{
- Lookup,
mod_path::{ModPath, PathKind},
name::Name,
};
@@ -25,8 +24,8 @@ use crate::{
item_scope::{BUILTIN_SCOPE, ImportOrExternCrate},
item_tree::FieldsShape,
nameres::{
- BlockInfo, BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, crate_def_map,
- sub_namespace_match,
+ BlockInfo, BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, assoc::TraitItems,
+ crate_def_map, sub_namespace_match,
},
per_ns::PerNs,
visibility::{RawVisibility, Visibility},
@@ -107,7 +106,7 @@ impl DefMap {
visibility: &RawVisibility,
within_impl: bool,
) -> Option<Visibility> {
- let mut vis = match visibility {
+ let vis = match visibility {
RawVisibility::Module(path, explicitness) => {
let (result, remaining) = self.resolve_path(
local_def_map,
@@ -121,29 +120,36 @@ impl DefMap {
return None;
}
let types = result.take_types()?;
- match types {
+ let mut vis = match types {
ModuleDefId::ModuleId(m) => Visibility::Module(m, *explicitness),
// error: visibility needs to refer to module
_ => {
return None;
}
+ };
+
+ // In block expressions, `self` normally refers to the containing non-block module, and
+ // `super` to its parent (etc.). However, visibilities must only refer to a module in the
+ // DefMap they're written in, so we restrict them when that happens.
+ if let Visibility::Module(m, mv) = vis {
+ // ...unless we're resolving visibility for an associated item in an impl.
+ if self.block_id() != m.block && !within_impl {
+ vis = Visibility::Module(self.module_id(Self::ROOT), mv);
+ tracing::debug!(
+ "visibility {:?} points outside DefMap, adjusting to {:?}",
+ m,
+ vis
+ );
+ }
}
+ vis
+ }
+ RawVisibility::PubSelf(explicitness) => {
+ Visibility::Module(self.module_id(original_module), *explicitness)
}
RawVisibility::Public => Visibility::Public,
+ RawVisibility::PubCrate => Visibility::PubCrate(self.krate),
};
-
- // In block expressions, `self` normally refers to the containing non-block module, and
- // `super` to its parent (etc.). However, visibilities must only refer to a module in the
- // DefMap they're written in, so we restrict them when that happens.
- if let Visibility::Module(m, mv) = vis {
- // ...unless we're resolving visibility for an associated item in an impl.
- if self.block_id() != m.block && !within_impl {
- cov_mark::hit!(adjust_vis_in_block_def_map);
- vis = Visibility::Module(self.module_id(Self::ROOT), mv);
- tracing::debug!("visibility {:?} points outside DefMap, adjusting to {:?}", m, vis);
- }
- }
-
Some(vis)
}
@@ -529,23 +535,22 @@ impl DefMap {
// enum variant
cov_mark::hit!(can_import_enum_variant);
- let res =
- db.enum_variants(e).variants.iter().find(|(_, name)| name == segment).map(
- |&(variant, _)| {
- let item_tree_id = variant.lookup(db).id;
- match item_tree_id.item_tree(db)[item_tree_id.value].shape {
- FieldsShape::Record => {
- PerNs::types(variant.into(), Visibility::Public, None)
- }
- FieldsShape::Tuple | FieldsShape::Unit => PerNs::both(
- variant.into(),
- variant.into(),
- Visibility::Public,
- None,
- ),
- }
- },
- );
+ let res = e
+ .enum_variants(db)
+ .variants
+ .iter()
+ .find(|(_, name, _)| name == segment)
+ .map(|&(variant, _, shape)| match shape {
+ FieldsShape::Record => {
+ PerNs::types(variant.into(), Visibility::Public, None)
+ }
+ FieldsShape::Tuple | FieldsShape::Unit => PerNs::both(
+ variant.into(),
+ variant.into(),
+ Visibility::Public,
+ None,
+ ),
+ });
// FIXME: Need to filter visibility here and below? Not sure.
return match res {
Some(res) => {
@@ -579,8 +584,11 @@ impl DefMap {
// now resulting in a cycle.
// To properly implement this, trait item collection needs to be done in def map
// collection...
- let item =
- if true { None } else { db.trait_items(t).assoc_item_by_name(segment) };
+ let item = if true {
+ None
+ } else {
+ TraitItems::query(db, t).assoc_item_by_name(segment)
+ };
return match item {
Some(item) => ResolvePathResult::new(
match item {
diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs
index 948e8bed66..338851b715 100644
--- a/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/crates/hir-def/src/nameres/tests/incremental.rs
@@ -2,13 +2,13 @@ use base_db::{
CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
DependencyBuilder, Env, RootQueryDb, SourceDatabase,
};
+use expect_test::{Expect, expect};
use intern::Symbol;
use span::Edition;
use test_fixture::WithFixture;
use triomphe::Arc;
use crate::{
- AdtId, ModuleDefId,
db::DefDatabase,
nameres::{crate_def_map, tests::TestDB},
};
@@ -16,29 +16,29 @@ use crate::{
fn check_def_map_is_not_recomputed(
#[rust_analyzer::rust_fixture] ra_fixture_initial: &str,
#[rust_analyzer::rust_fixture] ra_fixture_change: &str,
+ expecta: Expect,
+ expectb: Expect,
) {
let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
let krate = db.fetch_test_crate();
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
crate_def_map(&db, krate);
- });
- assert!(
- format!("{events:?}").contains("crate_local_def_map"),
- "no crate def map computed:\n{events:#?}",
- )
- }
+ },
+ &[],
+ expecta,
+ );
db.set_file_text(pos.file_id.file_id(&db), ra_fixture_change);
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
crate_def_map(&db, krate);
- });
- assert!(
- !format!("{events:?}").contains("crate_local_def_map"),
- "crate def map invalidated:\n{events:#?}",
- )
- }
+ },
+ &[("crate_local_def_map", 0)],
+ expectb,
+ );
}
#[test]
@@ -104,15 +104,20 @@ pub const BAZ: u32 = 0;
Arc::ptr_eq(&all_crates_before, &all_crates_after),
"the all_crates list should not have been invalidated"
);
-
- let events = db.log_executed(|| {
- for &krate in db.all_crates().iter() {
- crate_def_map(&db, krate);
- }
- });
- let invalidated_def_maps =
- events.iter().filter(|event| event.contains("crate_local_def_map")).count();
- assert_eq!(invalidated_def_maps, 1, "{events:#?}")
+ execute_assert_events(
+ &db,
+ || {
+ for &krate in db.all_crates().iter() {
+ crate_def_map(&db, krate);
+ }
+ },
+ &[("crate_local_def_map", 1)],
+ expect![[r#"
+ [
+ "crate_local_def_map",
+ ]
+ "#]],
+ );
}
#[test]
@@ -152,6 +157,33 @@ fn foo() -> i32 { 92 }
#[cfg(never)]
fn no() {}
",
+ expect![[r#"
+ [
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "EnumVariants::of_",
+ ]
+ "#]],
+ expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "EnumVariants::of_",
+ ]
+ "#]],
);
}
@@ -183,6 +215,41 @@ m!(Y);
pub struct S {}
",
+ expect![[r#"
+ [
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "macro_def_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_macro_expansion_shim",
+ "macro_arg_shim",
+ "decl_macro_expander_shim",
+ ]
+ "#]],
+ expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "macro_arg_shim",
+ "parse_macro_expansion_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ ]
+ "#]],
);
}
@@ -206,6 +273,49 @@ fn f() {}
#[proc_macros::identity]
fn f() { foo }
",
+ expect![[r#"
+ [
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "crate_local_def_map",
+ "proc_macros_for_crate_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "macro_def_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_macro_expansion_shim",
+ "expand_proc_macro_shim",
+ "macro_arg_shim",
+ "proc_macro_span_shim",
+ ]
+ "#]],
+ expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "macro_arg_shim",
+ "expand_proc_macro_shim",
+ "parse_macro_expansion_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ ]
+ "#]],
);
}
@@ -287,6 +397,60 @@ m2!(X);
#[derive(proc_macros::DeriveIdentity)]
pub struct S {}
",
+ expect![[r#"
+ [
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "macro_def_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_macro_expansion_shim",
+ "macro_arg_shim",
+ "decl_macro_expander_shim",
+ "macro_def_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_macro_expansion_shim",
+ "macro_arg_shim",
+ "decl_macro_expander_shim",
+ "crate_local_def_map",
+ "proc_macros_for_crate_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "macro_def_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_macro_expansion_shim",
+ "expand_proc_macro_shim",
+ "macro_arg_shim",
+ "proc_macro_span_shim",
+ ]
+ "#]],
+ expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "macro_arg_shim",
+ "macro_arg_shim",
+ "decl_macro_expander_shim",
+ "macro_arg_shim",
+ ]
+ "#]],
);
}
@@ -341,19 +505,46 @@ m!(Z);
"#,
);
let krate = db.test_crate();
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let crate_def_map = crate_def_map(&db, krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 4);
- });
- let n_recalculated_item_trees =
- events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
- assert_eq!(n_recalculated_item_trees, 6);
- let n_reparsed_macros =
- events.iter().filter(|it| it.contains("parse_macro_expansion_shim")).count();
- assert_eq!(n_reparsed_macros, 3);
- }
+ },
+ &[("file_item_tree_query", 6), ("parse_macro_expansion_shim", 3)],
+ expect![[r#"
+ [
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "macro_def_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_macro_expansion_shim",
+ "macro_arg_shim",
+ "decl_macro_expander_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_macro_expansion_shim",
+ "macro_arg_shim",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_macro_expansion_shim",
+ "macro_arg_shim",
+ ]
+ "#]],
+ );
let new_text = r#"
m!(X);
@@ -363,28 +554,31 @@ m!(Z);
"#;
db.set_file_text(pos.file_id.file_id(&db), new_text);
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let crate_def_map = crate_def_map(&db, krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 4);
- });
- let n_recalculated_item_trees =
- events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
- assert_eq!(n_recalculated_item_trees, 1, "{events:#?}");
- let n_reparsed_macros =
- events.iter().filter(|it| it.contains("parse_macro_expansion_shim")).count();
- assert_eq!(n_reparsed_macros, 0);
- }
+ },
+ &[("file_item_tree_query", 1), ("parse_macro_expansion_shim", 0)],
+ expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "macro_arg_shim",
+ "macro_arg_shim",
+ "macro_arg_shim",
+ ]
+ "#]],
+ );
}
#[test]
fn item_tree_prevents_reparsing() {
- // The `ItemTree` is used by both name resolution and the various queries in `adt.rs` and
- // `data.rs`. After computing the `ItemTree` and deleting the parse tree, we should be able to
- // run those other queries without triggering a reparse.
-
- let (db, pos) = TestDB::with_position(
+ let (mut db, pos) = TestDB::with_position(
r#"
pub struct S;
pub union U {}
@@ -399,53 +593,54 @@ pub static ST: u8 = 0;
pub type Ty = ();
"#,
);
- let krate = db.test_crate();
- {
- let events = db.log_executed(|| {
- db.file_item_tree(pos.file_id.into());
- });
- let n_calculated_item_trees =
- events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
- assert_eq!(n_calculated_item_trees, 1);
- let n_parsed_files = events.iter().filter(|it| it.contains("parse")).count();
- assert_eq!(n_parsed_files, 1);
- }
- // FIXME(salsa-transition): bring this back
- // base_db::ParseQuery.in_db(&db).purge();
+ execute_assert_events(
+ &db,
+ || {
+ db.file_item_tree(pos.file_id.into());
+ },
+ &[("file_item_tree_query", 1), ("parse", 1)],
+ expect![[r#"
+ [
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ ]
+ "#]],
+ );
- {
- let events = db.log_executed(|| {
- let crate_def_map = crate_def_map(&db, krate);
- let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
- assert_eq!(module_data.scope.resolutions().count(), 8);
- assert_eq!(module_data.scope.impls().count(), 1);
+ let file_id = pos.file_id.file_id(&db);
+ let file_text = db.file_text(file_id).text(&db);
+ db.set_file_text(file_id, &format!("{file_text}\n"));
- for imp in module_data.scope.impls() {
- db.impl_signature(imp);
- }
+ execute_assert_events(
+ &db,
+ || {
+ db.file_item_tree(pos.file_id.into());
+ },
+ &[("file_item_tree_query", 1), ("parse", 1)],
+ expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ ]
+ "#]],
+ );
+}
- for (_, res) in module_data.scope.resolutions() {
- match res.values.map(|it| it.def).or(res.types.map(|it| it.def)).unwrap() {
- ModuleDefId::FunctionId(f) => _ = db.function_signature(f),
- ModuleDefId::AdtId(adt) => match adt {
- AdtId::StructId(it) => _ = db.struct_signature(it),
- AdtId::UnionId(it) => _ = db.union_signature(it),
- AdtId::EnumId(it) => _ = db.enum_signature(it),
- },
- ModuleDefId::ConstId(it) => _ = db.const_signature(it),
- ModuleDefId::StaticId(it) => _ = db.static_signature(it),
- ModuleDefId::TraitId(it) => _ = db.trait_signature(it),
- ModuleDefId::TraitAliasId(it) => _ = db.trait_alias_signature(it),
- ModuleDefId::TypeAliasId(it) => _ = db.type_alias_signature(it),
- ModuleDefId::EnumVariantId(_)
- | ModuleDefId::ModuleId(_)
- | ModuleDefId::MacroId(_)
- | ModuleDefId::BuiltinType(_) => unreachable!(),
- }
- }
- });
- let n_reparsed_files = events.iter().filter(|it| it.contains("parse(")).count();
- assert_eq!(n_reparsed_files, 0);
+fn execute_assert_events(
+ db: &TestDB,
+ f: impl FnOnce(),
+ required: &[(&str, usize)],
+ expect: Expect,
+) {
+ let events = db.log_executed(f);
+ for (event, count) in required {
+ let n = events.iter().filter(|it| it.contains(event)).count();
+ assert_eq!(n, *count, "Expected {event} to be executed {count} times, but only got {n}");
}
+ expect.assert_debug_eq(&events);
}
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 16988ddf04..316ad5dae6 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -5,21 +5,22 @@ use base_db::Crate;
use hir_expand::{
MacroDefId,
mod_path::{ModPath, PathKind},
- name::Name,
+ name::{AsName, Name},
};
use intern::{Symbol, sym};
use itertools::Itertools as _;
use rustc_hash::FxHashSet;
use smallvec::{SmallVec, smallvec};
use span::SyntaxContext;
+use syntax::ast::HasName;
use triomphe::Arc;
use crate::{
- AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId,
- ExternBlockId, ExternCrateId, FunctionId, FxIndexMap, GenericDefId, GenericParamId, HasModule,
- ImplId, ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup, Macro2Id,
- MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId,
- TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UseId, VariantId,
+ AdtId, AstIdLoc, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId,
+ EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, FxIndexMap, GenericDefId,
+ GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalModuleId, Lookup,
+ Macro2Id, MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId,
+ TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UseId, VariantId,
builtin_type::BuiltinType,
db::DefDatabase,
expr_store::{
@@ -32,10 +33,10 @@ use crate::{
generics::{GenericParams, TypeOrConstParamData},
},
item_scope::{BUILTIN_SCOPE, BuiltinShadowMode, ImportOrExternCrate, ImportOrGlob, ItemScope},
- item_tree::ImportAlias,
lang_item::LangItemTarget,
nameres::{DefMap, LocalDefMap, MacroSubNs, ResolvePathResultPrefixInfo, block_def_map},
per_ns::PerNs,
+ src::HasSource,
type_ref::LifetimeRef,
visibility::{RawVisibility, Visibility},
};
@@ -304,6 +305,10 @@ impl<'db> Resolver<'db> {
}),
)
}
+ RawVisibility::PubSelf(explicitness) => {
+ Some(Visibility::Module(self.module(), *explicitness))
+ }
+ RawVisibility::PubCrate => Some(Visibility::PubCrate(self.krate())),
RawVisibility::Public => Some(Visibility::Public),
}
}
@@ -627,14 +632,14 @@ impl<'db> Resolver<'db> {
.extern_crate_decls()
.filter_map(|id| {
let loc = id.lookup(db);
- let tree = loc.item_tree_id().item_tree(db);
- match &tree[loc.id.value].alias {
- Some(alias) => match alias {
- ImportAlias::Underscore => None,
- ImportAlias::Alias(name) => Some(name.clone()),
- },
- None => Some(tree[loc.id.value].name.clone()),
- }
+ let extern_crate = loc.source(db);
+ // If there is a rename (`as x`), extract the renamed name, or remove the `extern crate`
+ // if it is an underscore.
+ extern_crate
+ .value
+ .rename()
+ .map(|a| a.name().map(|it| it.as_name()))
+ .unwrap_or_else(|| extern_crate.value.name_ref().map(|it| it.as_name()))
})
}
@@ -1047,17 +1052,6 @@ impl<'db> Scope<'db> {
}
}
-pub fn resolver_for_expr(
- db: &dyn DefDatabase,
- owner: DefWithBodyId,
- expr_id: ExprId,
-) -> Resolver<'_> {
- let r = owner.resolver(db);
- let scopes = db.expr_scopes(owner);
- let scope_id = scopes.scope_for(expr_id);
- resolver_for_scope_(db, scopes, scope_id, r, owner)
-}
-
pub fn resolver_for_scope(
db: &dyn DefDatabase,
owner: DefWithBodyId,
@@ -1471,10 +1465,7 @@ impl HasResolver for MacroRulesId {
fn lookup_resolver(
db: &dyn DefDatabase,
- lookup: impl Lookup<
- Database = dyn DefDatabase,
- Data = impl ItemTreeLoc<Container = impl HasResolver>,
- >,
+ lookup: impl Lookup<Database = dyn DefDatabase, Data = impl AstIdLoc<Container = impl HasResolver>>,
) -> Resolver<'_> {
lookup.lookup(db).container().resolver(db)
}
diff --git a/crates/hir-def/src/signatures.rs b/crates/hir-def/src/signatures.rs
index 44cfd72c48..92e610b36a 100644
--- a/crates/hir-def/src/signatures.rs
+++ b/crates/hir-def/src/signatures.rs
@@ -1,24 +1,28 @@
//! Item signature IR definitions
-use std::ops::Not as _;
+use std::{cell::LazyCell, ops::Not as _};
use bitflags::bitflags;
use cfg::{CfgExpr, CfgOptions};
-use either::Either;
-use hir_expand::{InFile, Intern, Lookup, name::Name};
+use hir_expand::{
+ InFile, Intern, Lookup,
+ name::{AsName, Name},
+};
use intern::{Symbol, sym};
use la_arena::{Arena, Idx};
use rustc_abi::{IntegerType, ReprOptions};
use syntax::{
- AstNode, SyntaxNodePtr,
- ast::{self, HasGenericParams, IsString},
+ NodeOrToken, SyntaxNodePtr, T,
+ ast::{self, HasGenericParams, HasName, HasVisibility, IsString},
};
use thin_vec::ThinVec;
use triomphe::Arc;
use crate::{
- ConstId, EnumId, EnumVariantId, EnumVariantLoc, FunctionId, HasModule, ImplId, ItemContainerId,
- ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, UnionId, VariantId,
+ ConstId, EnumId, EnumVariantId, EnumVariantLoc, ExternBlockId, FunctionId, HasModule, ImplId,
+ ItemContainerId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, UnionId,
+ VariantId,
+ attr::Attrs,
db::DefDatabase,
expr_store::{
ExpressionStore, ExpressionStoreSourceMap,
@@ -28,15 +32,17 @@ use crate::{
},
},
hir::{ExprId, PatId, generics::GenericParams},
- item_tree::{
- AttrOwner, Field, FieldParent, FieldsShape, FileItemTreeId, ItemTree, ItemTreeId, ModItem,
- RawVisibility, RawVisibilityId,
- },
+ item_tree::{FieldsShape, RawVisibility, visibility_from_ast},
lang_item::LangItem,
src::HasSource,
type_ref::{TraitRef, TypeBound, TypeRefId},
};
+#[inline]
+fn as_name_opt(name: Option<ast::Name>) -> Name {
+ name.map_or_else(Name::missing, |it| it.as_name())
+}
+
#[derive(Debug, PartialEq, Eq)]
pub struct StructSignature {
pub name: Name,
@@ -70,8 +76,8 @@ bitflags! {
impl StructSignature {
pub fn query(db: &dyn DefDatabase, id: StructId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
+ let InFile { file_id, value: source } = loc.source(db);
+ let attrs = db.attrs(id.into());
let mut flags = StructFlags::empty();
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
@@ -91,23 +97,23 @@ impl StructSignature {
}
}
let repr = attrs.repr();
+ let shape = adt_shape(source.kind());
- let hir_expand::files::InFileWrapper { file_id, value } = loc.source(db);
let (store, generic_params, source_map) = lower_generic_params(
db,
loc.container,
id.into(),
file_id,
- value.generic_param_list(),
- value.where_clause(),
+ source.generic_param_list(),
+ source.where_clause(),
);
(
Arc::new(StructSignature {
generic_params,
store,
flags,
- shape: item_tree[loc.id.value].shape,
- name: item_tree[loc.id.value].name.clone(),
+ shape,
+ name: as_name_opt(source.name()),
repr,
}),
Arc::new(source_map),
@@ -115,6 +121,15 @@ impl StructSignature {
}
}
+#[inline]
+fn adt_shape(adt_kind: ast::StructKind) -> FieldsShape {
+ match adt_kind {
+ ast::StructKind::Record(_) => FieldsShape::Record,
+ ast::StructKind::Tuple(_) => FieldsShape::Tuple,
+ ast::StructKind::Unit => FieldsShape::Unit,
+ }
+}
+
#[derive(Debug, PartialEq, Eq)]
pub struct UnionSignature {
pub name: Name,
@@ -127,9 +142,7 @@ pub struct UnionSignature {
impl UnionSignature {
pub fn query(db: &dyn DefDatabase, id: UnionId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
- let krate = loc.container.krate;
- let item_tree = loc.id.item_tree(db);
- let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
+ let attrs = db.attrs(id.into());
let mut flags = StructFlags::empty();
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
@@ -140,14 +153,14 @@ impl UnionSignature {
let repr = attrs.repr();
- let hir_expand::files::InFileWrapper { file_id, value } = loc.source(db);
+ let InFile { file_id, value: source } = loc.source(db);
let (store, generic_params, source_map) = lower_generic_params(
db,
loc.container,
id.into(),
file_id,
- value.generic_param_list(),
- value.where_clause(),
+ source.generic_param_list(),
+ source.where_clause(),
);
(
Arc::new(UnionSignature {
@@ -155,7 +168,7 @@ impl UnionSignature {
store,
flags,
repr,
- name: item_tree[loc.id.value].name.clone(),
+ name: as_name_opt(source.name()),
}),
Arc::new(source_map),
)
@@ -181,8 +194,7 @@ pub struct EnumSignature {
impl EnumSignature {
pub fn query(db: &dyn DefDatabase, id: EnumId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
+ let attrs = db.attrs(id.into());
let mut flags = EnumFlags::empty();
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
flags |= EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
@@ -190,14 +202,14 @@ impl EnumSignature {
let repr = attrs.repr();
- let hir_expand::files::InFileWrapper { file_id, value } = loc.source(db);
+ let InFile { file_id, value: source } = loc.source(db);
let (store, generic_params, source_map) = lower_generic_params(
db,
loc.container,
id.into(),
file_id,
- value.generic_param_list(),
- value.where_clause(),
+ source.generic_param_list(),
+ source.where_clause(),
);
(
@@ -206,7 +218,7 @@ impl EnumSignature {
store,
flags,
repr,
- name: item_tree[loc.id.value].name.clone(),
+ name: as_name_opt(source.name()),
}),
Arc::new(source_map),
)
@@ -239,10 +251,9 @@ pub struct ConstSignature {
impl ConstSignature {
pub fn query(db: &dyn DefDatabase, id: ConstId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
let module = loc.container.module(db);
- let attrs = item_tree.attrs(db, module.krate, ModItem::from(loc.id.value).into());
+ let attrs = db.attrs(id.into());
let mut flags = ConstFlags::empty();
if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
flags |= ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
@@ -253,14 +264,14 @@ impl ConstSignature {
}
let (store, source_map, type_ref) =
- crate::expr_store::lower::lower_type_ref(db, module, source.map(|it| it.ty()));
+ crate::expr_store::lower::lower_type_ref(db, module, source.as_ref().map(|it| it.ty()));
(
Arc::new(ConstSignature {
store: Arc::new(store),
type_ref,
flags,
- name: item_tree[loc.id.value].name.clone(),
+ name: source.value.name().map(|it| it.as_name()),
}),
Arc::new(source_map),
)
@@ -295,10 +306,9 @@ pub struct StaticSignature {
impl StaticSignature {
pub fn query(db: &dyn DefDatabase, id: StaticId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
let module = loc.container.module(db);
- let attrs = item_tree.attrs(db, module.krate, ModItem::from(loc.id.value).into());
+ let attrs = db.attrs(id.into());
let mut flags = StaticFlags::empty();
if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
flags |= StaticFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
@@ -323,14 +333,14 @@ impl StaticSignature {
}
let (store, source_map, type_ref) =
- crate::expr_store::lower::lower_type_ref(db, module, source.map(|it| it.ty()));
+ crate::expr_store::lower::lower_type_ref(db, module, source.as_ref().map(|it| it.ty()));
(
Arc::new(StaticSignature {
store: Arc::new(store),
type_ref,
flags,
- name: item_tree[loc.id.value].name.clone(),
+ name: as_name_opt(source.value.name()),
}),
Arc::new(source_map),
)
@@ -407,10 +417,9 @@ pub struct TraitSignature {
impl TraitSignature {
pub fn query(db: &dyn DefDatabase, id: TraitId) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
let mut flags = TraitFlags::empty();
- let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
+ let attrs = db.attrs(id.into());
let source = loc.source(db);
if source.value.auto_token().is_some() {
flags.insert(TraitFlags::AUTO);
@@ -446,15 +455,11 @@ impl TraitSignature {
flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH;
}
+ let name = as_name_opt(source.value.name());
let (store, source_map, generic_params) = lower_trait(db, loc.container, source, id);
(
- Arc::new(TraitSignature {
- store: Arc::new(store),
- generic_params,
- flags,
- name: item_tree[loc.id.value].name.clone(),
- }),
+ Arc::new(TraitSignature { store: Arc::new(store), generic_params, flags, name }),
Arc::new(source_map),
)
}
@@ -473,17 +478,13 @@ impl TraitAliasSignature {
id: TraitAliasId,
) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
let source = loc.source(db);
+ let name = as_name_opt(source.value.name());
let (store, source_map, generic_params) = lower_trait_alias(db, loc.container, source, id);
(
- Arc::new(TraitAliasSignature {
- generic_params,
- store: Arc::new(store),
- name: item_tree[loc.id.value].name.clone(),
- }),
+ Arc::new(TraitAliasSignature { generic_params, store: Arc::new(store), name }),
Arc::new(source_map),
)
}
@@ -530,10 +531,9 @@ impl FunctionSignature {
) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
let module = loc.container.module(db);
- let item_tree = loc.id.item_tree(db);
let mut flags = FnFlags::empty();
- let attrs = item_tree.attrs(db, module.krate, ModItem::from(loc.id.value).into());
+ let attrs = db.attrs(id.into());
if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
flags.insert(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
}
@@ -568,6 +568,7 @@ impl FunctionSignature {
flags.insert(FnFlags::HAS_BODY);
}
+ let name = as_name_opt(source.value.name());
let abi = source.value.abi().map(|abi| {
abi.abi_string().map_or_else(|| sym::C, |it| Symbol::intern(it.text_without_quotes()))
});
@@ -588,7 +589,7 @@ impl FunctionSignature {
abi,
flags,
legacy_const_generics_indices,
- name: item_tree[loc.id.value].name.clone(),
+ name,
}),
Arc::new(source_map),
)
@@ -662,14 +663,9 @@ impl TypeAliasSignature {
id: TypeAliasId,
) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
let mut flags = TypeAliasFlags::empty();
- let attrs = item_tree.attrs(
- db,
- loc.container.module(db).krate(),
- ModItem::from(loc.id.value).into(),
- );
+ let attrs = db.attrs(id.into());
if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
flags.insert(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL);
}
@@ -680,6 +676,7 @@ impl TypeAliasSignature {
flags.insert(TypeAliasFlags::IS_EXTERN);
}
let source = loc.source(db);
+ let name = as_name_opt(source.value.name());
let (store, source_map, generic_params, bounds, ty) =
lower_type_alias(db, loc.container.module(db), source, id);
@@ -689,7 +686,7 @@ impl TypeAliasSignature {
generic_params,
flags,
bounds,
- name: item_tree[loc.id.value].name.clone(),
+ name,
ty,
}),
Arc::new(source_map),
@@ -734,119 +731,68 @@ pub struct VariantFields {
pub store: Arc<ExpressionStore>,
pub shape: FieldsShape,
}
+
+#[salsa::tracked]
impl VariantFields {
- #[inline]
+ #[salsa::tracked(returns(clone))]
pub(crate) fn query(
db: &dyn DefDatabase,
id: VariantId,
) -> (Arc<Self>, Arc<ExpressionStoreSourceMap>) {
- let (shape, (fields, store, source_map)) = match id {
+ let (shape, result) = match id {
VariantId::EnumVariantId(id) => {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
let parent = loc.parent.lookup(db);
- let variant = &item_tree[loc.id.value];
- (
- variant.shape,
- lower_fields(
- db,
- parent.container,
- &item_tree,
- FieldParent::EnumVariant(loc.id.value),
- loc.source(db).map(|src| {
- variant.fields.iter().zip(
- src.field_list()
- .map(|it| {
- match it {
- ast::FieldList::RecordFieldList(record_field_list) => {
- Either::Left(record_field_list.fields().map(|it| {
- (SyntaxNodePtr::new(it.syntax()), it.ty())
- }))
- }
- ast::FieldList::TupleFieldList(field_list) => {
- Either::Right(field_list.fields().map(|it| {
- (SyntaxNodePtr::new(it.syntax()), it.ty())
- }))
- }
- }
- .into_iter()
- })
- .into_iter()
- .flatten(),
- )
- }),
- Some(item_tree[parent.id.value].visibility),
- ),
- )
+ let source = loc.source(db);
+ let shape = adt_shape(source.value.kind());
+ let enum_vis = Some(source.value.parent_enum().visibility());
+ let fields = lower_field_list(
+ db,
+ parent.container,
+ source.map(|src| src.field_list()),
+ enum_vis,
+ );
+ (shape, fields)
}
VariantId::StructId(id) => {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- let strukt = &item_tree[loc.id.value];
- (
- strukt.shape,
- lower_fields(
- db,
- loc.container,
- &item_tree,
- FieldParent::Struct(loc.id.value),
- loc.source(db).map(|src| {
- strukt.fields.iter().zip(
- src.field_list()
- .map(|it| {
- match it {
- ast::FieldList::RecordFieldList(record_field_list) => {
- Either::Left(record_field_list.fields().map(|it| {
- (SyntaxNodePtr::new(it.syntax()), it.ty())
- }))
- }
- ast::FieldList::TupleFieldList(field_list) => {
- Either::Right(field_list.fields().map(|it| {
- (SyntaxNodePtr::new(it.syntax()), it.ty())
- }))
- }
- }
- .into_iter()
- })
- .into_iter()
- .flatten(),
- )
- }),
- None,
- ),
- )
+ let source = loc.source(db);
+ let shape = adt_shape(source.value.kind());
+ let fields =
+ lower_field_list(db, loc.container, source.map(|src| src.field_list()), None);
+ (shape, fields)
}
VariantId::UnionId(id) => {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- let union = &item_tree[loc.id.value];
- (
- FieldsShape::Record,
- lower_fields(
- db,
- loc.container,
- &item_tree,
- FieldParent::Union(loc.id.value),
- loc.source(db).map(|src| {
- union.fields.iter().zip(
- src.record_field_list()
- .map(|it| {
- it.fields()
- .map(|it| (SyntaxNodePtr::new(it.syntax()), it.ty()))
- })
- .into_iter()
- .flatten(),
- )
- }),
- None,
- ),
- )
+ let source = loc.source(db);
+ let fields = lower_field_list(
+ db,
+ loc.container,
+ source.map(|src| src.record_field_list().map(ast::FieldList::RecordFieldList)),
+ None,
+ );
+ (FieldsShape::Record, fields)
}
};
+ match result {
+ Some((fields, store, source_map)) => (
+ Arc::new(VariantFields { fields, store: Arc::new(store), shape }),
+ Arc::new(source_map),
+ ),
+ None => {
+ let (store, source_map) = ExpressionStore::empty_singleton();
+ (Arc::new(VariantFields { fields: Arena::default(), store, shape }), source_map)
+ }
+ }
+ }
- (Arc::new(VariantFields { fields, store: Arc::new(store), shape }), Arc::new(source_map))
+ #[salsa::tracked(returns(deref))]
+ pub(crate) fn firewall(db: &dyn DefDatabase, id: VariantId) -> Arc<Self> {
+ Self::query(db, id).0
}
+}
+impl VariantFields {
pub fn len(&self) -> usize {
self.fields.len()
}
@@ -860,40 +806,90 @@ impl VariantFields {
}
}
-fn lower_fields<'a>(
+fn lower_field_list(
db: &dyn DefDatabase,
module: ModuleId,
- item_tree: &ItemTree,
- parent: FieldParent,
- fields: InFile<impl Iterator<Item = (&'a Field, (SyntaxNodePtr, Option<ast::Type>))>>,
- override_visibility: Option<RawVisibilityId>,
-) -> (Arena<FieldData>, ExpressionStore, ExpressionStoreSourceMap) {
- let mut arena = Arena::new();
+ fields: InFile<Option<ast::FieldList>>,
+ override_visibility: Option<Option<ast::Visibility>>,
+) -> Option<(Arena<FieldData>, ExpressionStore, ExpressionStoreSourceMap)> {
+ let file_id = fields.file_id;
+ match fields.value? {
+ ast::FieldList::RecordFieldList(fields) => lower_fields(
+ db,
+ module,
+ InFile::new(file_id, fields.fields().map(|field| (field.ty(), field))),
+ |_, field| as_name_opt(field.name()),
+ override_visibility,
+ ),
+ ast::FieldList::TupleFieldList(fields) => lower_fields(
+ db,
+ module,
+ InFile::new(file_id, fields.fields().map(|field| (field.ty(), field))),
+ |idx, _| Name::new_tuple_field(idx),
+ override_visibility,
+ ),
+ }
+}
+
+fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
+ db: &dyn DefDatabase,
+ module: ModuleId,
+ fields: InFile<impl Iterator<Item = (Option<ast::Type>, Field)>>,
+ mut field_name: impl FnMut(usize, &Field) -> Name,
+ override_visibility: Option<Option<ast::Visibility>>,
+) -> Option<(Arena<FieldData>, ExpressionStore, ExpressionStoreSourceMap)> {
let cfg_options = module.krate.cfg_options(db);
let mut col = ExprCollector::new(db, module, fields.file_id);
- for (idx, (field, (ptr, ty))) in fields.value.enumerate() {
- let attr_owner = AttrOwner::make_field_indexed(parent, idx);
- let attrs = item_tree.attrs(db, module.krate, attr_owner);
- if attrs.is_cfg_enabled(cfg_options) {
- arena.alloc(FieldData {
- name: field.name.clone(),
- type_ref: col
- .lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator),
- visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(),
- is_unsafe: field.is_unsafe,
- });
- } else {
- col.source_map.diagnostics.push(
- crate::expr_store::ExpressionStoreDiagnostics::InactiveCode {
- node: InFile::new(fields.file_id, ptr),
- cfg: attrs.cfg().unwrap(),
- opts: cfg_options.clone(),
- },
- );
- }
- }
- let store = col.store.finish();
- (arena, store, col.source_map)
+ let override_visibility = override_visibility.map(|vis| {
+ LazyCell::new(|| {
+ let span_map = db.span_map(fields.file_id);
+ visibility_from_ast(db, vis, &mut |range| span_map.span_for_range(range).ctx)
+ })
+ });
+
+ let mut arena = Arena::new();
+ let mut idx = 0;
+ let mut has_fields = false;
+ for (ty, field) in fields.value {
+ has_fields = true;
+ match Attrs::is_cfg_enabled_for(db, &field, col.span_map(), cfg_options) {
+ Ok(()) => {
+ let type_ref =
+ col.lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator);
+ let visibility = override_visibility.as_ref().map_or_else(
+ || {
+ visibility_from_ast(db, field.visibility(), &mut |range| {
+ col.span_map().span_for_range(range).ctx
+ })
+ },
+ |it| RawVisibility::clone(it),
+ );
+ let is_unsafe = field
+ .syntax()
+ .children_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .any(|token| token.kind() == T![unsafe]);
+ let name = field_name(idx, &field);
+ arena.alloc(FieldData { name, type_ref, visibility, is_unsafe });
+ idx += 1;
+ }
+ Err(cfg) => {
+ col.store.diagnostics.push(
+ crate::expr_store::ExpressionStoreDiagnostics::InactiveCode {
+ node: InFile::new(fields.file_id, SyntaxNodePtr::new(field.syntax())),
+ cfg,
+ opts: cfg_options.clone(),
+ },
+ );
+ }
+ }
+ }
+ if !has_fields {
+ return None;
+ }
+ let (store, source_map) = col.store.finish();
+ arena.shrink_to_fit();
+ Some((arena, store, source_map))
}
#[derive(Debug, PartialEq, Eq)]
@@ -905,59 +901,74 @@ pub struct InactiveEnumVariantCode {
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct EnumVariants {
- pub variants: Box<[(EnumVariantId, Name)]>,
+ pub variants: Box<[(EnumVariantId, Name, FieldsShape)]>,
}
+#[salsa::tracked]
impl EnumVariants {
- pub(crate) fn enum_variants_query(
+ #[salsa::tracked(returns(ref))]
+ pub(crate) fn of(
db: &dyn DefDatabase,
e: EnumId,
- ) -> (Arc<EnumVariants>, Option<Arc<ThinVec<InactiveEnumVariantCode>>>) {
+ ) -> (EnumVariants, Option<ThinVec<InactiveEnumVariantCode>>) {
let loc = e.lookup(db);
- let item_tree = loc.id.item_tree(db);
+ let source = loc.source(db);
+ let ast_id_map = db.ast_id_map(source.file_id);
+ let span_map = db.span_map(source.file_id);
let mut diagnostics = ThinVec::new();
let cfg_options = loc.container.krate.cfg_options(db);
let mut index = 0;
- let variants = FileItemTreeId::range_iter(item_tree[loc.id.value].variants.clone())
+ let Some(variants) = source.value.variant_list() else {
+ return (EnumVariants { variants: Box::default() }, None);
+ };
+ let variants = variants
+ .variants()
.filter_map(|variant| {
- let attrs = item_tree.attrs(db, loc.container.krate, variant.into());
- if attrs.is_cfg_enabled(cfg_options) {
- let enum_variant = EnumVariantLoc {
- id: ItemTreeId::new(loc.id.tree_id(), variant),
- parent: e,
- index,
+ let ast_id = ast_id_map.ast_id(&variant);
+ match Attrs::is_cfg_enabled_for(db, &variant, span_map.as_ref(), cfg_options) {
+ Ok(()) => {
+ let enum_variant =
+ EnumVariantLoc { id: source.with_value(ast_id), parent: e, index }
+ .intern(db);
+ index += 1;
+ let name = as_name_opt(variant.name());
+ let shape = adt_shape(variant.kind());
+ Some((enum_variant, name, shape))
+ }
+ Err(cfg) => {
+ diagnostics.push(InactiveEnumVariantCode {
+ ast_id,
+ cfg,
+ opts: cfg_options.clone(),
+ });
+ None
}
- .intern(db);
- index += 1;
- Some((enum_variant, item_tree[variant].name.clone()))
- } else {
- diagnostics.push(InactiveEnumVariantCode {
- ast_id: item_tree[variant].ast_id,
- cfg: attrs.cfg().unwrap(),
- opts: cfg_options.clone(),
- });
- None
}
})
.collect();
- (
- Arc::new(EnumVariants { variants }),
- diagnostics.is_empty().not().then(|| Arc::new(diagnostics)),
- )
+ (EnumVariants { variants }, diagnostics.is_empty().not().then_some(diagnostics))
}
+}
+impl EnumVariants {
pub fn variant(&self, name: &Name) -> Option<EnumVariantId> {
- self.variants.iter().find_map(|(v, n)| if n == name { Some(*v) } else { None })
+ self.variants.iter().find_map(|(v, n, _)| if n == name { Some(*v) } else { None })
+ }
+
+ pub fn variant_name_by_id(&self, variant_id: EnumVariantId) -> Option<Name> {
+ self.variants
+ .iter()
+ .find_map(|(id, name, _)| if *id == variant_id { Some(name.clone()) } else { None })
}
// [Adopted from rustc](https://github.com/rust-lang/rust/blob/bd53aa3bf7a24a70d763182303bd75e5fc51a9af/compiler/rustc_middle/src/ty/adt.rs#L446-L448)
pub fn is_payload_free(&self, db: &dyn DefDatabase) -> bool {
- self.variants.iter().all(|&(v, _)| {
+ self.variants.iter().all(|&(v, _, _)| {
// The condition check order is slightly modified from rustc
// to improve performance by early returning with relatively fast checks
- let variant = &db.variant_fields(v.into());
+ let variant = v.fields(db);
if !variant.fields().is_empty() {
return false;
}
@@ -965,7 +976,7 @@ impl EnumVariants {
if !matches!(variant.shape, FieldsShape::Unit) {
let body = db.body(v.into());
// A variant with explicit discriminant
- if body.exprs[body.body_expr] != crate::hir::Expr::Missing {
+ if !matches!(body[body.body_expr], crate::hir::Expr::Missing) {
return false;
}
}
@@ -973,3 +984,17 @@ impl EnumVariants {
})
}
}
+
+pub(crate) fn extern_block_abi(
+ db: &dyn DefDatabase,
+ extern_block: ExternBlockId,
+) -> Option<Symbol> {
+ let source = extern_block.lookup(db).source(db);
+ source.value.abi().map(|abi| {
+ match abi.abi_string() {
+ Some(tok) => Symbol::intern(tok.text_without_quotes()),
+ // `extern` default to be `extern "C"`.
+ _ => sym::C,
+ }
+ })
+}
diff --git a/crates/hir-def/src/src.rs b/crates/hir-def/src/src.rs
index 3867f39b8b..aa373a27b0 100644
--- a/crates/hir-def/src/src.rs
+++ b/crates/hir-def/src/src.rs
@@ -1,15 +1,13 @@
//! Utilities for mapping between hir IDs and the surface syntax.
use either::Either;
-use hir_expand::InFile;
-use la_arena::ArenaMap;
+use hir_expand::{AstId, InFile};
+use la_arena::{Arena, ArenaMap, Idx};
use syntax::{AstNode, AstPtr, ast};
use crate::{
- GenericDefId, ItemTreeLoc, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup,
- UseId, VariantId,
- db::DefDatabase,
- item_tree::{AttrOwner, FieldParent, ItemTreeNode},
+ AstIdLoc, GenericDefId, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup,
+ UseId, VariantId, attr::Attrs, db::DefDatabase,
};
pub trait HasSource {
@@ -23,18 +21,13 @@ pub trait HasSource {
impl<T> HasSource for T
where
- T: ItemTreeLoc,
- T::Id: ItemTreeNode,
+ T: AstIdLoc,
{
- type Value = <T::Id as ItemTreeNode>::Source;
+ type Value = T::Ast;
fn ast_ptr(&self, db: &dyn DefDatabase) -> InFile<AstPtr<Self::Value>> {
- let id = self.item_tree_id();
- let file_id = id.file_id();
- let tree = id.item_tree(db);
- let ast_id_map = db.ast_id_map(file_id);
- let node = &tree[id.value];
-
- InFile::new(file_id, ast_id_map.get(node.ast_id()))
+ let id = self.ast_id();
+ let ast_id_map = db.ast_id_map(id.file_id);
+ InFile::new(id.file_id, ast_id_map.get(id.value))
}
}
@@ -43,18 +36,37 @@ pub trait HasChildSource<ChildId> {
fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<ChildId, Self::Value>>;
}
+/// Maps a `UseTree` contained in this import back to its AST node.
+pub fn use_tree_to_ast(
+ db: &dyn DefDatabase,
+ use_ast_id: AstId<ast::Use>,
+ index: Idx<ast::UseTree>,
+) -> ast::UseTree {
+ use_tree_source_map(db, use_ast_id)[index].clone()
+}
+
+/// Maps a `UseTree` contained in this import back to its AST node.
+fn use_tree_source_map(db: &dyn DefDatabase, use_ast_id: AstId<ast::Use>) -> Arena<ast::UseTree> {
+ // Re-lower the AST item and get the source map.
+ // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
+ let ast = use_ast_id.to_node(db);
+ let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
+ let mut span_map = None;
+ crate::item_tree::lower_use_tree(db, ast_use_tree, &mut |range| {
+ span_map.get_or_insert_with(|| db.span_map(use_ast_id.file_id)).span_for_range(range).ctx
+ })
+ .expect("failed to lower use tree")
+ .1
+}
+
impl HasChildSource<la_arena::Idx<ast::UseTree>> for UseId {
type Value = ast::UseTree;
fn child_source(
&self,
db: &dyn DefDatabase,
) -> InFile<ArenaMap<la_arena::Idx<ast::UseTree>, Self::Value>> {
- let loc = &self.lookup(db);
- let use_ = &loc.id.item_tree(db)[loc.id.value];
- InFile::new(
- loc.id.file_id(),
- use_.use_tree_source_map(db, loc.id.file_id()).into_iter().collect(),
- )
+ let loc = self.lookup(db);
+ InFile::new(loc.id.file_id, use_tree_source_map(db, loc.id).into_iter().collect())
}
}
@@ -124,49 +136,30 @@ impl HasChildSource<LocalFieldId> for VariantId {
type Value = Either<ast::TupleField, ast::RecordField>;
fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<LocalFieldId, Self::Value>> {
- let item_tree;
- let (src, parent, container) = match *self {
+ let (src, container) = match *self {
VariantId::EnumVariantId(it) => {
let lookup = it.lookup(db);
- item_tree = lookup.id.item_tree(db);
- (
- lookup.source(db).map(|it| it.kind()),
- FieldParent::EnumVariant(lookup.id.value),
- lookup.parent.lookup(db).container,
- )
+ (lookup.source(db).map(|it| it.kind()), lookup.parent.lookup(db).container)
}
VariantId::StructId(it) => {
let lookup = it.lookup(db);
- item_tree = lookup.id.item_tree(db);
- (
- lookup.source(db).map(|it| it.kind()),
- FieldParent::Struct(lookup.id.value),
- lookup.container,
- )
+ (lookup.source(db).map(|it| it.kind()), lookup.container)
}
VariantId::UnionId(it) => {
let lookup = it.lookup(db);
- item_tree = lookup.id.item_tree(db);
- (
- lookup.source(db).map(|it| it.kind()),
- FieldParent::Union(lookup.id.value),
- lookup.container,
- )
+ (lookup.source(db).map(|it| it.kind()), lookup.container)
}
};
-
+ let span_map = db.span_map(src.file_id);
let mut map = ArenaMap::new();
match &src.value {
ast::StructKind::Tuple(fl) => {
let cfg_options = container.krate.cfg_options(db);
let mut idx = 0;
- for (i, fd) in fl.fields().enumerate() {
- let attrs = item_tree.attrs(
- db,
- container.krate,
- AttrOwner::make_field_indexed(parent, i),
- );
- if !attrs.is_cfg_enabled(cfg_options) {
+ for fd in fl.fields() {
+ let enabled =
+ Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
+ if !enabled {
continue;
}
map.insert(
@@ -179,13 +172,10 @@ impl HasChildSource<LocalFieldId> for VariantId {
ast::StructKind::Record(fl) => {
let cfg_options = container.krate.cfg_options(db);
let mut idx = 0;
- for (i, fd) in fl.fields().enumerate() {
- let attrs = item_tree.attrs(
- db,
- container.krate,
- AttrOwner::make_field_indexed(parent, i),
- );
- if !attrs.is_cfg_enabled(cfg_options) {
+ for fd in fl.fields() {
+ let enabled =
+ Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
+ if !enabled {
continue;
}
map.insert(
@@ -195,7 +185,7 @@ impl HasChildSource<LocalFieldId> for VariantId {
idx += 1;
}
}
- _ => (),
+ ast::StructKind::Unit => (),
}
InFile::new(src.file_id, map)
}
diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs
index 3c67ee9fe5..b5eb84c25f 100644
--- a/crates/hir-def/src/visibility.rs
+++ b/crates/hir-def/src/visibility.rs
@@ -2,16 +2,15 @@
use std::iter;
-use hir_expand::Lookup;
+use base_db::Crate;
+use hir_expand::{InFile, Lookup};
use la_arena::ArenaMap;
+use syntax::ast::{self, HasVisibility};
use triomphe::Arc;
use crate::{
- ConstId, FunctionId, HasModule, ItemContainerId, ItemLoc, ItemTreeLoc, LocalFieldId,
- LocalModuleId, ModuleId, TraitId, TypeAliasId, VariantId,
- db::DefDatabase,
- nameres::DefMap,
- resolver::{HasResolver, Resolver},
+ AssocItemId, HasModule, ItemContainerId, LocalFieldId, LocalModuleId, ModuleId, TraitId,
+ VariantId, db::DefDatabase, nameres::DefMap, resolver::HasResolver, src::HasSource,
};
pub use crate::item_tree::{RawVisibility, VisibilityExplicitness};
@@ -21,6 +20,8 @@ pub use crate::item_tree::{RawVisibility, VisibilityExplicitness};
pub enum Visibility {
/// Visibility is restricted to a certain module.
Module(ModuleId, VisibilityExplicitness),
+ /// Visibility is restricted to the crate.
+ PubCrate(Crate),
/// Visibility is unrestricted.
Public,
}
@@ -43,8 +44,13 @@ impl Visibility {
pub fn is_visible_from(self, db: &dyn DefDatabase, from_module: ModuleId) -> bool {
let to_module = match self {
Visibility::Module(m, _) => m,
+ Visibility::PubCrate(krate) => return from_module.krate == krate,
Visibility::Public => return true,
};
+ if from_module == to_module {
+ // if the modules are the same, visibility is trivially satisfied
+ return true;
+ }
// if they're not in the same crate, it can't be visible
if from_module.krate != to_module.krate {
return false;
@@ -61,12 +67,18 @@ impl Visibility {
) -> bool {
let to_module = match self {
Visibility::Module(m, _) => m,
+ Visibility::PubCrate(krate) => return def_map.krate() == krate,
Visibility::Public => return true,
};
// if they're not in the same crate, it can't be visible
if def_map.krate() != to_module.krate {
return false;
}
+
+ if from_module == to_module.local_id && def_map.block_id() == to_module.block {
+ // if the modules are the same, visibility is trivially satisfied
+ return true;
+ }
Self::is_visible_from_def_map_(db, def_map, to_module, from_module)
}
@@ -90,9 +102,7 @@ impl Visibility {
// `to_module` is not a block, so there is no parent def map to use.
(None, _) => (),
// `to_module` is at `def_map`'s block, no need to move further.
- (Some(a), Some(b)) if a == b => {
- cov_mark::hit!(is_visible_from_same_block_def_map);
- }
+ (Some(a), Some(b)) if a == b => {}
_ => {
if let Some(parent) = to_module.def_map(db).parent() {
to_module = parent;
@@ -134,26 +144,56 @@ impl Visibility {
pub(crate) fn max(self, other: Visibility, def_map: &DefMap) -> Option<Visibility> {
match (self, other) {
(_, Visibility::Public) | (Visibility::Public, _) => Some(Visibility::Public),
+ (Visibility::PubCrate(krate), Visibility::PubCrate(krateb)) => {
+ if krate == krateb {
+ Some(Visibility::PubCrate(krate))
+ } else {
+ None
+ }
+ }
+ (Visibility::Module(mod_, _), Visibility::PubCrate(krate))
+ | (Visibility::PubCrate(krate), Visibility::Module(mod_, _)) => {
+ if mod_.krate == krate {
+ Some(Visibility::PubCrate(krate))
+ } else {
+ None
+ }
+ }
(Visibility::Module(mod_a, expl_a), Visibility::Module(mod_b, expl_b)) => {
- if mod_a.krate != mod_b.krate {
+ if mod_a == mod_b {
+ // Most module visibilities are `pub(self)`, and assuming no errors
+ // this will be the common and thus fast path.
+ return Some(Visibility::Module(
+ mod_a,
+ match (expl_a, expl_b) {
+ (VisibilityExplicitness::Explicit, _)
+ | (_, VisibilityExplicitness::Explicit) => {
+ VisibilityExplicitness::Explicit
+ }
+ _ => VisibilityExplicitness::Implicit,
+ },
+ ));
+ }
+
+ if mod_a.krate() != def_map.krate() || mod_b.krate() != def_map.krate() {
return None;
}
let def_block = def_map.block_id();
- if (mod_a.containing_block(), mod_b.containing_block()) != (def_block, def_block) {
+ if mod_a.containing_block() != def_block || mod_b.containing_block() != def_block {
return None;
}
let mut a_ancestors =
iter::successors(Some(mod_a.local_id), |&m| def_map[m].parent);
- let mut b_ancestors =
- iter::successors(Some(mod_b.local_id), |&m| def_map[m].parent);
if a_ancestors.any(|m| m == mod_b.local_id) {
// B is above A
return Some(Visibility::Module(mod_b, expl_b));
}
+ let mut b_ancestors =
+ iter::successors(Some(mod_b.local_id), |&m| def_map[m].parent);
if b_ancestors.any(|m| m == mod_a.local_id) {
// A is above B
return Some(Visibility::Module(mod_a, expl_a));
@@ -171,26 +211,52 @@ impl Visibility {
pub(crate) fn min(self, other: Visibility, def_map: &DefMap) -> Option<Visibility> {
match (self, other) {
(vis, Visibility::Public) | (Visibility::Public, vis) => Some(vis),
+ (Visibility::PubCrate(krate), Visibility::PubCrate(krateb)) => {
+ if krate == krateb {
+ Some(Visibility::PubCrate(krate))
+ } else {
+ None
+ }
+ }
+ (Visibility::Module(mod_, exp), Visibility::PubCrate(krate))
+ | (Visibility::PubCrate(krate), Visibility::Module(mod_, exp)) => {
+ if mod_.krate == krate { Some(Visibility::Module(mod_, exp)) } else { None }
+ }
(Visibility::Module(mod_a, expl_a), Visibility::Module(mod_b, expl_b)) => {
- if mod_a.krate != mod_b.krate {
+ if mod_a == mod_b {
+ // Most module visibilities are `pub(self)`, and assuming no errors
+ // this will be the common and thus fast path.
+ return Some(Visibility::Module(
+ mod_a,
+ match (expl_a, expl_b) {
+ (VisibilityExplicitness::Explicit, _)
+ | (_, VisibilityExplicitness::Explicit) => {
+ VisibilityExplicitness::Explicit
+ }
+ _ => VisibilityExplicitness::Implicit,
+ },
+ ));
+ }
+
+ if mod_a.krate() != def_map.krate() || mod_b.krate() != def_map.krate() {
return None;
}
let def_block = def_map.block_id();
- if (mod_a.containing_block(), mod_b.containing_block()) != (def_block, def_block) {
+ if mod_a.containing_block() != def_block || mod_b.containing_block() != def_block {
return None;
}
let mut a_ancestors =
iter::successors(Some(mod_a.local_id), |&m| def_map[m].parent);
- let mut b_ancestors =
- iter::successors(Some(mod_b.local_id), |&m| def_map[m].parent);
if a_ancestors.any(|m| m == mod_b.local_id) {
// B is above A
return Some(Visibility::Module(mod_a, expl_a));
}
+ let mut b_ancestors =
+ iter::successors(Some(mod_b.local_id), |&m| def_map[m].parent);
if b_ancestors.any(|m| m == mod_a.local_id) {
// A is above B
return Some(Visibility::Module(mod_b, expl_b));
@@ -207,7 +273,7 @@ pub(crate) fn field_visibilities_query(
db: &dyn DefDatabase,
variant_id: VariantId,
) -> Arc<ArenaMap<LocalFieldId, Visibility>> {
- let variant_fields = db.variant_fields(variant_id);
+ let variant_fields = variant_id.fields(db);
let fields = variant_fields.fields();
if fields.is_empty() {
return Arc::default();
@@ -217,49 +283,62 @@ pub(crate) fn field_visibilities_query(
for (field_id, field_data) in fields.iter() {
res.insert(field_id, Visibility::resolve(db, &resolver, &field_data.visibility));
}
+ res.shrink_to_fit();
Arc::new(res)
}
-/// Resolve visibility of a function.
-pub(crate) fn function_visibility_query(db: &dyn DefDatabase, def: FunctionId) -> Visibility {
- let resolver = def.resolver(db);
- let loc = def.lookup(db);
- let tree = loc.item_tree_id().item_tree(db);
- if let ItemContainerId::TraitId(trait_id) = loc.container {
- trait_vis(db, &resolver, trait_id)
- } else {
- Visibility::resolve(db, &resolver, &tree[tree[loc.id.value].visibility])
+pub fn visibility_from_ast(
+ db: &dyn DefDatabase,
+ has_resolver: impl HasResolver,
+ ast_vis: InFile<Option<ast::Visibility>>,
+) -> Visibility {
+ let mut span_map = None;
+ let raw_vis = crate::item_tree::visibility_from_ast(db, ast_vis.value, &mut |range| {
+ span_map.get_or_insert_with(|| db.span_map(ast_vis.file_id)).span_for_range(range).ctx
+ });
+ if raw_vis == RawVisibility::Public {
+ return Visibility::Public;
}
+
+ Visibility::resolve(db, &has_resolver.resolver(db), &raw_vis)
}
-/// Resolve visibility of a const.
-pub(crate) fn const_visibility_query(db: &dyn DefDatabase, def: ConstId) -> Visibility {
- let resolver = def.resolver(db);
- let loc = def.lookup(db);
- let tree = loc.item_tree_id().item_tree(db);
- if let ItemContainerId::TraitId(trait_id) = loc.container {
- trait_vis(db, &resolver, trait_id)
- } else {
- Visibility::resolve(db, &resolver, &tree[tree[loc.id.value].visibility])
+/// Resolve visibility of a type alias.
+pub(crate) fn assoc_visibility_query(db: &dyn DefDatabase, def: AssocItemId) -> Visibility {
+ match def {
+ AssocItemId::FunctionId(function_id) => {
+ let loc = function_id.lookup(db);
+ trait_item_visibility(db, loc.container).unwrap_or_else(|| {
+ let source = loc.source(db);
+ visibility_from_ast(db, function_id, source.map(|src| src.visibility()))
+ })
+ }
+ AssocItemId::ConstId(const_id) => {
+ let loc = const_id.lookup(db);
+ trait_item_visibility(db, loc.container).unwrap_or_else(|| {
+ let source = loc.source(db);
+ visibility_from_ast(db, const_id, source.map(|src| src.visibility()))
+ })
+ }
+ AssocItemId::TypeAliasId(type_alias_id) => {
+ let loc = type_alias_id.lookup(db);
+ trait_item_visibility(db, loc.container).unwrap_or_else(|| {
+ let source = loc.source(db);
+ visibility_from_ast(db, type_alias_id, source.map(|src| src.visibility()))
+ })
+ }
}
}
-/// Resolve visibility of a type alias.
-pub(crate) fn type_alias_visibility_query(db: &dyn DefDatabase, def: TypeAliasId) -> Visibility {
- let resolver = def.resolver(db);
- let loc = def.lookup(db);
- let tree = loc.item_tree_id().item_tree(db);
- if let ItemContainerId::TraitId(trait_id) = loc.container {
- trait_vis(db, &resolver, trait_id)
- } else {
- Visibility::resolve(db, &resolver, &tree[tree[loc.id.value].visibility])
+fn trait_item_visibility(db: &dyn DefDatabase, container: ItemContainerId) -> Option<Visibility> {
+ match container {
+ ItemContainerId::TraitId(trait_) => Some(trait_visibility(db, trait_)),
+ _ => None,
}
}
-#[inline]
-fn trait_vis(db: &dyn DefDatabase, resolver: &Resolver<'_>, trait_id: TraitId) -> Visibility {
- let ItemLoc { id: tree_id, .. } = trait_id.lookup(db);
- let item_tree = tree_id.item_tree(db);
- let tr_def = &item_tree[tree_id.value];
- Visibility::resolve(db, resolver, &item_tree[tr_def.visibility])
+fn trait_visibility(db: &dyn DefDatabase, def: TraitId) -> Visibility {
+ let loc = def.lookup(db);
+ let source = loc.source(db);
+ visibility_from_ast(db, def, source.map(|src| src.visibility()))
}
diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml
index ed818c5be3..80a3c08486 100644
--- a/crates/hir-expand/Cargo.toml
+++ b/crates/hir-expand/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index 94c97713f0..986f8764f5 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -433,20 +433,19 @@ fn unescape(s: &str) -> Option<Cow<'_, str>> {
let mut buf = String::new();
let mut prev_end = 0;
let mut has_error = false;
- unescape::unescape_unicode(s, unescape::Mode::Str, &mut |char_range, unescaped_char| match (
- unescaped_char,
- buf.capacity() == 0,
- ) {
- (Ok(c), false) => buf.push(c),
- (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
- prev_end = char_range.end
- }
- (Ok(c), true) => {
- buf.reserve_exact(s.len());
- buf.push_str(&s[..prev_end]);
- buf.push(c);
+ unescape::unescape_str(s, |char_range, unescaped_char| {
+ match (unescaped_char, buf.capacity() == 0) {
+ (Ok(c), false) => buf.push(c),
+ (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
+ prev_end = char_range.end
+ }
+ (Ok(c), true) => {
+ buf.reserve_exact(s.len());
+ buf.push_str(&s[..prev_end]);
+ buf.push(c);
+ }
+ (Err(_), _) => has_error = true,
}
- (Err(_), _) => has_error = true,
});
match (has_error, buf.capacity() == 0) {
diff --git a/crates/hir-expand/src/builtin/derive_macro.rs b/crates/hir-expand/src/builtin/derive_macro.rs
index d135584a08..15e68ff95c 100644
--- a/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/crates/hir-expand/src/builtin/derive_macro.rs
@@ -458,6 +458,7 @@ fn expand_simple_derive(
invoc_span: Span,
tt: &tt::TopSubtree,
trait_path: tt::TopSubtree,
+ allow_unions: bool,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let info = match parse_adt(db, tt, invoc_span) {
@@ -469,6 +470,12 @@ fn expand_simple_derive(
);
}
};
+ if !allow_unions && matches!(info.shape, AdtShape::Union) {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(invoc_span)),
+ ExpandError::other(invoc_span, "this trait cannot be derived for unions"),
+ );
+ }
ExpandResult::ok(expand_simple_derive_with_parsed(
invoc_span,
info,
@@ -535,7 +542,14 @@ fn copy_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
+ expand_simple_derive(
+ db,
+ span,
+ tt,
+ quote! {span => #krate::marker::Copy },
+ true,
+ |_| quote! {span =>},
+ )
}
fn clone_expand(
@@ -544,7 +558,7 @@ fn clone_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, true, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@@ -599,41 +613,63 @@ fn default_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::default::Default }, |adt| {
- let body = match &adt.shape {
- AdtShape::Struct(fields) => {
- let name = &adt.name;
- fields.as_pattern_map(
- quote!(span =>#name),
+ let adt = match parse_adt(db, tt, span) {
+ Ok(info) => info,
+ Err(e) => {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan { open: span, close: span }),
+ e,
+ );
+ }
+ };
+ let (body, constrain_to_trait) = match &adt.shape {
+ AdtShape::Struct(fields) => {
+ let name = &adt.name;
+ let body = fields.as_pattern_map(
+ quote!(span =>#name),
+ span,
+ |_| quote!(span =>#krate::default::Default::default()),
+ );
+ (body, true)
+ }
+ AdtShape::Enum { default_variant, variants } => {
+ if let Some(d) = default_variant {
+ let (name, fields) = &variants[*d];
+ let adt_name = &adt.name;
+ let body = fields.as_pattern_map(
+ quote!(span =>#adt_name :: #name),
span,
|_| quote!(span =>#krate::default::Default::default()),
- )
+ );
+ (body, false)
+ } else {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(span, "`#[derive(Default)]` on enum with no `#[default]`"),
+ );
}
- AdtShape::Enum { default_variant, variants } => {
- if let Some(d) = default_variant {
- let (name, fields) = &variants[*d];
- let adt_name = &adt.name;
- fields.as_pattern_map(
- quote!(span =>#adt_name :: #name),
- span,
- |_| quote!(span =>#krate::default::Default::default()),
- )
- } else {
- // FIXME: Return expand error here
- quote!(span =>)
+ }
+ AdtShape::Union => {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(span, "this trait cannot be derived for unions"),
+ );
+ }
+ };
+ ExpandResult::ok(expand_simple_derive_with_parsed(
+ span,
+ adt,
+ quote! {span => #krate::default::Default },
+ |_adt| {
+ quote! {span =>
+ fn default() -> Self {
+ #body
}
}
- AdtShape::Union => {
- // FIXME: Return expand error here
- quote!(span =>)
- }
- };
- quote! {span =>
- fn default() -> Self {
- #body
- }
- }
- })
+ },
+ constrain_to_trait,
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ))
}
fn debug_expand(
@@ -642,7 +678,7 @@ fn debug_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, false, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
@@ -697,10 +733,7 @@ fn debug_expand(
}
})
.collect(),
- AdtShape::Union => {
- // FIXME: Return expand error here
- vec![]
- }
+ AdtShape::Union => unreachable!(),
};
quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
@@ -718,11 +751,7 @@ fn hash_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, |adt| {
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote! {span =>};
- }
+ expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, false, |adt| {
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@@ -769,7 +798,14 @@ fn eq_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
+ expand_simple_derive(
+ db,
+ span,
+ tt,
+ quote! {span => #krate::cmp::Eq },
+ true,
+ |_| quote! {span =>},
+ )
}
fn partial_eq_expand(
@@ -778,11 +814,7 @@ fn partial_eq_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote! {span =>};
- }
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, false, |adt| {
let name = &adt.name;
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
@@ -854,7 +886,7 @@ fn ord_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, false, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@@ -873,10 +905,6 @@ fn ord_expand(
}
}
}
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote!(span =>);
- }
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
@@ -916,7 +944,7 @@ fn partial_ord_expand(
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, false, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@@ -935,10 +963,6 @@ fn partial_ord_expand(
}
}
}
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote!(span =>);
- }
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index 3180b8dae1..60fbc66065 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -7,12 +7,13 @@ use intern::{
Symbol,
sym::{self},
};
+use itertools::Itertools;
use mbe::{DelimiterKind, expect_fragment};
use span::{Edition, FileId, Span};
use stdx::format_to;
use syntax::{
format_smolstr,
- unescape::{Mode, unescape_byte, unescape_char, unescape_unicode},
+ unescape::{unescape_byte, unescape_char, unescape_str},
};
use syntax_bridge::syntax_node_to_token_tree;
@@ -124,8 +125,8 @@ register_builtin! {
(assert, Assert) => assert_expand,
(stringify, Stringify) => stringify_expand,
(asm, Asm) => asm_expand,
- (global_asm, GlobalAsm) => asm_expand,
- (naked_asm, NakedAsm) => asm_expand,
+ (global_asm, GlobalAsm) => global_asm_expand,
+ (naked_asm, NakedAsm) => naked_asm_expand,
(cfg, Cfg) => cfg_expand,
(core_panic, CorePanic) => panic_expand,
(std_panic, StdPanic) => panic_expand,
@@ -324,6 +325,36 @@ fn asm_expand(
ExpandResult::ok(expanded)
}
+fn global_asm_expand(
+ _db: &dyn ExpandDatabase,
+ _id: MacroCallId,
+ tt: &tt::TopSubtree,
+ span: Span,
+) -> ExpandResult<tt::TopSubtree> {
+ let mut tt = tt.clone();
+ tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
+ let pound = mk_pound(span);
+ let expanded = quote! {span =>
+ builtin #pound global_asm #tt
+ };
+ ExpandResult::ok(expanded)
+}
+
+fn naked_asm_expand(
+ _db: &dyn ExpandDatabase,
+ _id: MacroCallId,
+ tt: &tt::TopSubtree,
+ span: Span,
+) -> ExpandResult<tt::TopSubtree> {
+ let mut tt = tt.clone();
+ tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
+ let pound = mk_pound(span);
+ let expanded = quote! {span =>
+ builtin #pound naked_asm #tt
+ };
+ ExpandResult::ok(expanded)
+}
+
fn cfg_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
@@ -430,7 +461,7 @@ fn compile_error_expand(
kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
suffix: _,
})),
- ] => ExpandError::other(span, Box::from(unescape_str(text).as_str())),
+ ] => ExpandError::other(span, Box::from(unescape_symbol(text).as_str())),
_ => ExpandError::other(span, "`compile_error!` argument must be a string"),
};
@@ -481,7 +512,7 @@ fn concat_expand(
format_to!(text, "{}", it.symbol.as_str())
}
tt::LitKind::Str => {
- text.push_str(unescape_str(&it.symbol).as_str());
+ text.push_str(unescape_symbol(&it.symbol).as_str());
record_span(it.span);
}
tt::LitKind::StrRaw(_) => {
@@ -681,52 +712,36 @@ fn relative_file(
}
fn parse_string(tt: &tt::TopSubtree) -> Result<(Symbol, Span), ExpandError> {
- let delimiter = tt.top_subtree().delimiter;
- tt.iter()
- .next()
- .ok_or(delimiter.open.cover(delimiter.close))
- .and_then(|tt| match tt {
+ let mut tt = TtElement::Subtree(tt.top_subtree(), tt.iter());
+ (|| {
+ // FIXME: We wrap expression fragments in parentheses which can break this expectation
+ // here
+ // Remove this once we handle none delims correctly
+ while let TtElement::Subtree(sub, tt_iter) = &mut tt
+ && let DelimiterKind::Parenthesis | DelimiterKind::Invisible = sub.delimiter.kind
+ {
+ tt =
+ tt_iter.exactly_one().map_err(|_| sub.delimiter.open.cover(sub.delimiter.close))?;
+ }
+
+ match tt {
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
symbol: text,
span,
kind: tt::LitKind::Str,
suffix: _,
- })) => Ok((unescape_str(text), *span)),
+ })) => Ok((unescape_symbol(text), *span)),
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
symbol: text,
span,
kind: tt::LitKind::StrRaw(_),
suffix: _,
})) => Ok((text.clone(), *span)),
- // FIXME: We wrap expression fragments in parentheses which can break this expectation
- // here
- // Remove this once we handle none delims correctly
- TtElement::Subtree(tt, mut tt_iter)
- if tt.delimiter.kind == DelimiterKind::Parenthesis =>
- {
- tt_iter
- .next()
- .and_then(|tt| match tt {
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::Str,
- suffix: _,
- })) => Some((unescape_str(text), *span)),
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::StrRaw(_),
- suffix: _,
- })) => Some((text.clone(), *span)),
- _ => None,
- })
- .ok_or(delimiter.open.cover(delimiter.close))
- }
TtElement::Leaf(l) => Err(*l.span()),
TtElement::Subtree(tt, _) => Err(tt.delimiter.open.cover(tt.delimiter.close)),
- })
- .map_err(|span| ExpandError::other(span, "expected string literal"))
+ }
+ })()
+ .map_err(|span| ExpandError::other(span, "expected string literal"))
}
fn include_expand(
@@ -897,11 +912,11 @@ fn quote_expand(
)
}
-fn unescape_str(s: &Symbol) -> Symbol {
+fn unescape_symbol(s: &Symbol) -> Symbol {
if s.as_str().contains('\\') {
let s = s.as_str();
let mut buf = String::with_capacity(s.len());
- unescape_unicode(s, Mode::Str, &mut |_, c| {
+ unescape_str(s, |_, c| {
if let Ok(c) = c {
buf.push(c)
}
diff --git a/crates/hir-expand/src/builtin/quote.rs b/crates/hir-expand/src/builtin/quote.rs
index 62b7b638e7..70c38d4d7c 100644
--- a/crates/hir-expand/src/builtin/quote.rs
+++ b/crates/hir-expand/src/builtin/quote.rs
@@ -129,7 +129,7 @@ macro_rules! quote {
}
}
}
-pub(super) use quote;
+pub use quote;
pub trait ToTokenTree {
fn to_tokens(self, span: Span, builder: &mut TopSubtreeBuilder);
@@ -277,8 +277,8 @@ mod tests {
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:#?}");
expect![[r#"
- SUBTREE $$ 937550:[email protected]#ROOT2024 937550:[email protected]#ROOT2024
- IDENT hello 937550:[email protected]#ROOT2024"#]]
+ SUBTREE $$ 937550:Root[0000, 0]@0..0#ROOT2024 937550:Root[0000, 0]@0..0#ROOT2024
+ IDENT hello 937550:Root[0000, 0]@0..0#ROOT2024"#]]
.assert_eq(&t);
}
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 7cb1b6c020..888c1405a6 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -13,7 +13,7 @@ use crate::{
AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
- attrs::{AttrId, collect_attrs},
+ attrs::{AttrId, AttrInput, RawAttrs, collect_attrs},
builtin::pseudo_derive_attr_expansion,
cfg_process,
declarative::DeclarativeMacroExpander,
@@ -60,6 +60,7 @@ pub trait ExpandDatabase: RootQueryDb {
fn proc_macros_for_crate(&self, krate: Crate) -> Option<Arc<CrateProcMacros>>;
#[salsa::invoke(ast_id_map)]
+ #[salsa::lru(1024)]
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
#[salsa::transparent]
@@ -144,7 +145,7 @@ pub trait ExpandDatabase: RootQueryDb {
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext;
}
-#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext)]
+#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext, revisions = usize::MAX)]
pub struct SyntaxContextWrapper {
pub data: SyntaxContext,
}
@@ -241,30 +242,36 @@ pub fn expand_speculative(
let attr_arg = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
- let attr = if loc.def.is_attribute_derive() {
+ if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
- ast::Attr::cast(speculative_args.clone())
+ ast::Attr::cast(speculative_args.clone()).and_then(|attr| attr.token_tree()).map(
+ |token_tree| {
+ let mut tree = syntax_node_to_token_tree(
+ token_tree.syntax(),
+ span_map,
+ span,
+ DocCommentDesugarMode::ProcMacro,
+ );
+ *tree.top_subtree_delimiter_mut() = tt::Delimiter::invisible_spanned(span);
+ tree
+ },
+ )
} else {
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
- collect_attrs(&item)
- .nth(invoc_attr_index.ast_index())
- .and_then(|x| Either::left(x.1))
- }?;
- match attr.token_tree() {
- Some(token_tree) => {
- let mut tree = syntax_node_to_token_tree(
- token_tree.syntax(),
- span_map,
- span,
- DocCommentDesugarMode::ProcMacro,
- );
- *tree.top_subtree_delimiter_mut() = tt::Delimiter::invisible_spanned(span);
-
- Some(tree)
- }
- _ => None,
+ let attrs = RawAttrs::new_expanded(db, &item, span_map, loc.krate.cfg_options(db));
+ attrs.iter().find(|attr| attr.id == invoc_attr_index).and_then(|attr| {
+ match attr.input.as_deref()? {
+ AttrInput::TokenTree(tt) => {
+ let mut attr_arg = tt.clone();
+ attr_arg.top_subtree_delimiter_mut().kind =
+ tt::DelimiterKind::Invisible;
+ Some(attr_arg)
+ }
+ AttrInput::Literal(_) => None,
+ }
+ })
}
}
_ => None,
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index 8024823cbc..6730b337d3 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -106,7 +106,7 @@ impl FileRange {
/// It is stable across reparses, and can be used as salsa key/value.
pub type AstId<N> = crate::InFile<FileAstId<N>>;
-impl<N: AstIdNode> AstId<N> {
+impl<N: AstNode> AstId<N> {
pub fn to_node(&self, db: &dyn ExpandDatabase) -> N {
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
}
@@ -122,6 +122,13 @@ impl<N: AstIdNode> AstId<N> {
pub fn erase(&self) -> ErasedAstId {
crate::InFile::new(self.file_id, self.value.erase())
}
+ #[inline]
+ pub fn upcast<M: AstIdNode>(self) -> AstId<M>
+ where
+ N: Into<M>,
+ {
+ self.map(|it| it.upcast())
+ }
}
pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
@@ -308,11 +315,11 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range_with_macro_call_body(
+ pub fn original_file_range_with_macro_call_input(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
- self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
+ self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_input(db)
}
pub fn original_syntax_node_rooted(
@@ -458,7 +465,7 @@ impl InFile<TextRange> {
}
}
- pub fn original_node_file_range_with_macro_call_body(
+ pub fn original_node_file_range_with_macro_call_input(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
@@ -469,7 +476,7 @@ impl InFile<TextRange> {
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file);
- loc.kind.original_call_range_with_body(db)
+ loc.kind.original_call_range_with_input(db)
}
}
}
@@ -490,6 +497,18 @@ impl InFile<TextRange> {
}
}
}
+
+ pub fn original_node_file_range_rooted_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<FileRange> {
+ match self.file_id {
+ HirFileId::FileId(file_id) => Some(FileRange { file_id, range: self.value }),
+ HirFileId::MacroFile(mac_file) => {
+ map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value)
+ }
+ }
+ }
}
impl<N: AstNode> InFile<N> {
diff --git a/crates/hir-expand/src/inert_attr_macro.rs b/crates/hir-expand/src/inert_attr_macro.rs
index 543ac0619d..385c98ef87 100644
--- a/crates/hir-expand/src/inert_attr_macro.rs
+++ b/crates/hir-expand/src/inert_attr_macro.rs
@@ -486,7 +486,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_legacy_const_generics, Normal, template!(List: "N"), ErrorFollowing,
INTERNAL_UNSTABLE
),
- // Do not const-check this function's body. It will always get replaced during CTFE.
+ // Do not const-check this function's body. It will always get replaced during CTFE via `hook_special_const_fn`.
rustc_attr!(
rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
),
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 6ecac1463f..ac61b22009 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -199,9 +199,9 @@ impl ExpandErrorKind {
},
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
match db.proc_macros_for_crate(def_crate).as_ref().and_then(|it| it.get_error()) {
- Some((e, hard_err)) => RenderedExpandError {
- message: e.to_owned(),
- error: hard_err,
+ Some(e) => RenderedExpandError {
+ message: e.to_string(),
+ error: e.is_hard_error(),
kind: RenderedExpandError::GENERAL_KIND,
},
None => RenderedExpandError {
@@ -688,8 +688,11 @@ impl MacroCallKind {
/// Returns the original file range that best describes the location of this macro call.
///
- /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
- pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange {
+ /// This spans the entire macro call, including its input. That is for
+ /// - fn_like! {}, it spans the path and token tree
+ /// - #\[derive], it spans the `#[derive(...)]` attribute and the annotated item
+ /// - #\[attr], it spans the `#[attr(...)]` attribute and the annotated item
+ pub fn original_call_range_with_input(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id() {
@@ -712,8 +715,8 @@ impl MacroCallKind {
/// Returns the original file range that best describes the location of this macro call.
///
/// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
- /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
- /// get only the specific derive that is being referred to.
+ /// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the
+ /// attribute's range, and derives get only the specific derive that is being referred to.
pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
@@ -726,7 +729,14 @@ impl MacroCallKind {
};
let range = match kind {
- MacroCallKind::FnLike { ast_id, .. } => ast_id.to_ptr(db).text_range(),
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = ast_id.to_node(db);
+ node.path()
+ .unwrap()
+ .syntax()
+ .text_range()
+ .cover(node.excl_token().unwrap().text_range())
+ }
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr`
@@ -1056,7 +1066,7 @@ impl ExpandTo {
intern::impl_internable!(ModPath, attrs::AttrInput);
-#[salsa_macros::interned(no_lifetime, debug)]
+#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
#[doc(alias = "MacroFileId")]
pub struct MacroCallId {
pub loc: MacroCallLoc,
diff --git a/crates/hir-expand/src/prettify_macro_expansion_.rs b/crates/hir-expand/src/prettify_macro_expansion_.rs
index 6134c3a36b..6431d46d39 100644
--- a/crates/hir-expand/src/prettify_macro_expansion_.rs
+++ b/crates/hir-expand/src/prettify_macro_expansion_.rs
@@ -46,7 +46,7 @@ pub fn prettify_macro_expansion(
} else if let Some(crate_name) = &macro_def_crate.extra_data(db).display_name {
make::tokens::ident(crate_name.crate_name().as_str())
} else {
- return dollar_crate.clone();
+ dollar_crate.clone()
}
});
if replacement.text() == "$crate" {
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index 1cd975b980..f97d721dfa 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -4,7 +4,7 @@ use core::fmt;
use std::any::Any;
use std::{panic::RefUnwindSafe, sync};
-use base_db::{Crate, CrateBuilderId, CratesIdMap, Env};
+use base_db::{Crate, CrateBuilderId, CratesIdMap, Env, ProcMacroLoadingError};
use intern::Symbol;
use rustc_hash::FxHashMap;
use span::Span;
@@ -34,9 +34,7 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe + Any {
current_dir: String,
) -> Result<tt::TopSubtree, ProcMacroExpansionError>;
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.type_id() == self.type_id()
- }
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool;
}
impl PartialEq for dyn ProcMacroExpander {
@@ -55,8 +53,8 @@ pub enum ProcMacroExpansionError {
System(String),
}
-pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, (String, bool)>;
-type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, (Box<str>, bool)>;
+pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, ProcMacroLoadingError>;
+type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, ProcMacroLoadingError>;
#[derive(Default, Debug)]
pub struct ProcMacrosBuilder(FxHashMap<CrateBuilderId, Arc<CrateProcMacros>>);
@@ -79,9 +77,7 @@ impl ProcMacrosBuilder {
proc_macros_crate,
match proc_macro {
Ok(it) => Arc::new(CrateProcMacros(Ok(it.into_boxed_slice()))),
- Err((e, hard_err)) => {
- Arc::new(CrateProcMacros(Err((e.into_boxed_str(), hard_err))))
- }
+ Err(e) => Arc::new(CrateProcMacros(Err(e))),
},
);
}
@@ -141,8 +137,8 @@ impl CrateProcMacros {
)
}
- pub fn get_error(&self) -> Option<(&str, bool)> {
- self.0.as_ref().err().map(|(e, hard_err)| (&**e, *hard_err))
+ pub fn get_error(&self) -> Option<&ProcMacroLoadingError> {
+ self.0.as_ref().err()
}
/// Fetch the [`CustomProcMacroExpander`]s and their corresponding names for the given crate.
diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml
index efa544cf39..7cc0a26d37 100644
--- a/crates/hir-ty/Cargo.toml
+++ b/crates/hir-ty/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
@@ -31,7 +32,7 @@ la-arena.workspace = true
triomphe.workspace = true
typed-arena = "2.0.2"
indexmap.workspace = true
-rustc_apfloat = "0.2.2"
+rustc_apfloat = "0.2.3"
query-group.workspace = true
salsa.workspace = true
salsa-macros.workspace = true
diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs
index 7acc9456ec..cc8f7bf04a 100644
--- a/crates/hir-ty/src/autoderef.rs
+++ b/crates/hir-ty/src/autoderef.rs
@@ -208,7 +208,7 @@ pub(crate) fn deref_by_trait(
};
let trait_id = trait_id()?;
let target =
- db.trait_items(trait_id).associated_type_by_name(&Name::new_symbol_root(sym::Target))?;
+ trait_id.trait_items(db).associated_type_by_name(&Name::new_symbol_root(sym::Target))?;
let projection = {
let b = TyBuilder::subst_for_def(db, trait_id, None);
diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs
index 22b96b55cb..26b635298a 100644
--- a/crates/hir-ty/src/chalk_db.rs
+++ b/crates/hir-ty/src/chalk_db.rs
@@ -63,7 +63,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
) -> Option<rust_ir::AssociatedTyValueId<Interner>> {
let alias_id = from_assoc_type_id(assoc_type_id);
let trait_sig = self.db.type_alias_signature(alias_id);
- self.db.impl_items(hir_def::ImplId::from_chalk(self.db, impl_id)).items.iter().find_map(
+ hir_def::ImplId::from_chalk(self.db, impl_id).impl_items(self.db).items.iter().find_map(
|(name, item)| match item {
AssocItemId::TypeAliasId(alias) if &trait_sig.name == name => {
Some(TypeAliasAsValue(*alias).to_chalk(self.db))
@@ -261,10 +261,20 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
&self,
well_known_trait: WellKnownTrait,
) -> Option<chalk_ir::TraitId<Interner>> {
- let lang_attr = lang_item_from_well_known_trait(well_known_trait);
- let trait_ = lang_attr.resolve_trait(self.db, self.krate)?;
+ let lang_item = lang_item_from_well_known_trait(well_known_trait);
+ let trait_ = lang_item.resolve_trait(self.db, self.krate)?;
Some(to_chalk_trait_id(trait_))
}
+ fn well_known_assoc_type_id(
+ &self,
+ assoc_type: rust_ir::WellKnownAssocType,
+ ) -> Option<chalk_ir::AssocTypeId<Interner>> {
+ let lang_item = match assoc_type {
+ rust_ir::WellKnownAssocType::AsyncFnOnceOutput => LangItem::AsyncFnOnceOutput,
+ };
+ let alias = lang_item.resolve_type_alias(self.db, self.krate)?;
+ Some(to_assoc_type_id(alias))
+ }
fn program_clauses_for_env(
&self,
@@ -305,9 +315,8 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => {
if let Some((future_trait, future_output)) =
LangItem::Future.resolve_trait(self.db, self.krate).and_then(|trait_| {
- let alias = self
- .db
- .trait_items(trait_)
+ let alias = trait_
+ .trait_items(self.db)
.associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
Some((trait_, alias))
})
@@ -701,7 +710,7 @@ pub(crate) fn trait_datum_query(
};
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
let associated_ty_ids =
- db.trait_items(trait_).associated_types().map(to_assoc_type_id).collect();
+ trait_.trait_items(db).associated_types().map(to_assoc_type_id).collect();
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
let well_known = db.lang_attr(trait_.into()).and_then(well_known_trait_from_lang_item);
let trait_datum = TraitDatum {
@@ -792,7 +801,7 @@ pub(crate) fn adt_datum_query(
// this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it
let _variant_id_to_fields = |id: VariantId| {
- let variant_data = &id.variant_data(db);
+ let variant_data = &id.fields(db);
let fields = if variant_data.fields().is_empty() {
vec![]
} else {
@@ -813,11 +822,11 @@ pub(crate) fn adt_datum_query(
(rust_ir::AdtKind::Struct, vec![variant_id_to_fields(id.into())])
}
hir_def::AdtId::EnumId(id) => {
- let variants = db
- .enum_variants(id)
+ let variants = id
+ .enum_variants(db)
.variants
.iter()
- .map(|&(variant_id, _)| variant_id_to_fields(variant_id.into()))
+ .map(|&(variant_id, _, _)| variant_id_to_fields(variant_id.into()))
.collect();
(rust_ir::AdtKind::Enum, variants)
}
@@ -869,9 +878,9 @@ fn impl_def_datum(db: &dyn HirDatabase, krate: Crate, impl_id: hir_def::ImplId)
let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };
let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses };
- let trait_data = db.trait_items(trait_);
- let associated_ty_value_ids = db
- .impl_items(impl_id)
+ let trait_data = trait_.trait_items(db);
+ let associated_ty_value_ids = impl_id
+ .impl_items(db)
.items
.iter()
.filter_map(|(_, item)| match item {
@@ -921,8 +930,9 @@ fn type_alias_associated_ty_value(
.into_value_and_skipped_binders()
.0; // we don't return any assoc ty values if the impl'd trait can't be resolved
- let assoc_ty = db
- .trait_items(trait_ref.hir_trait_id())
+ let assoc_ty = trait_ref
+ .hir_trait_id()
+ .trait_items(db)
.associated_type_by_name(&type_alias_data.name)
.expect("assoc ty value should not exist"); // validated when building the impl data as well
let (ty, binders) = db.ty(type_alias.into()).into_value_and_skipped_binders();
diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs
index aabc4c4234..836cc96233 100644
--- a/crates/hir-ty/src/chalk_ext.rs
+++ b/crates/hir-ty/src/chalk_ext.rs
@@ -16,7 +16,8 @@ use crate::{
ClosureId, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy,
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause,
db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
- from_placeholder_idx, generics::generics, to_chalk_trait_id, utils::ClosureSubst,
+ from_placeholder_idx, generics::generics, mapping::ToChalk, to_chalk_trait_id,
+ utils::ClosureSubst,
};
pub trait TyExt {
@@ -190,10 +191,9 @@ impl TyExt for Ty {
fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId> {
match *self.kind(Interner) {
TyKind::Adt(AdtId(adt), ..) => Some(adt.into()),
- TyKind::FnDef(callable, ..) => Some(GenericDefId::from_callable(
- db,
- db.lookup_intern_callable_def(callable.into()),
- )),
+ TyKind::FnDef(callable, ..) => {
+ Some(GenericDefId::from_callable(db, ToChalk::from_chalk(db, callable)))
+ }
TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()),
TyKind::Foreign(type_alias, ..) => Some(from_foreign_def_id(type_alias).into()),
_ => None,
@@ -202,7 +202,7 @@ impl TyExt for Ty {
fn callable_def(&self, db: &dyn HirDatabase) -> Option<CallableDefId> {
match self.kind(Interner) {
- &TyKind::FnDef(def, ..) => Some(db.lookup_intern_callable_def(def.into())),
+ &TyKind::FnDef(def, ..) => Some(ToChalk::from_chalk(db, def)),
_ => None,
}
}
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index f903b06d65..14b9cd203f 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -281,12 +281,12 @@ pub(crate) fn const_eval_discriminant_variant(
let def = variant_id.into();
let body = db.body(def);
let loc = variant_id.lookup(db);
- if body.exprs[body.body_expr] == Expr::Missing {
+ if matches!(body[body.body_expr], Expr::Missing) {
let prev_idx = loc.index.checked_sub(1);
let value = match prev_idx {
Some(prev_idx) => {
1 + db.const_eval_discriminant(
- db.enum_variants(loc.parent).variants[prev_idx as usize].0,
+ loc.parent.enum_variants(db).variants[prev_idx as usize].0,
)?
}
_ => 0,
@@ -334,7 +334,7 @@ pub(crate) fn eval_to_const(
// Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
return unknown_const(infer[expr].clone());
}
- if let Expr::Path(p) = &ctx.body.exprs[expr] {
+ if let Expr::Path(p) = &ctx.body[expr] {
let resolver = &ctx.resolver;
if let Some(c) =
path_to_const(db, resolver, p, mode, || ctx.generics(), debruijn, infer[expr].clone())
diff --git a/crates/hir-ty/src/consteval/tests/intrinsics.rs b/crates/hir-ty/src/consteval/tests/intrinsics.rs
index ee375d60de..5e85978e29 100644
--- a/crates/hir-ty/src/consteval/tests/intrinsics.rs
+++ b/crates/hir-ty/src/consteval/tests/intrinsics.rs
@@ -112,16 +112,16 @@ fn size_of_val() {
}
#[test]
-fn min_align_of_val() {
+fn align_of_val() {
check_number(
r#"
//- minicore: coerce_unsized
#[rustc_intrinsic]
- pub fn min_align_of_val<T: ?Sized>(_: *const T) -> usize;
+ pub fn align_of_val<T: ?Sized>(_: *const T) -> usize;
struct X(i32, u8);
- const GOAL: usize = min_align_of_val(&X(1, 2));
+ const GOAL: usize = align_of_val(&X(1, 2));
"#,
4,
);
@@ -129,11 +129,11 @@ fn min_align_of_val() {
r#"
//- minicore: coerce_unsized
#[rustc_intrinsic]
- pub fn min_align_of_val<T: ?Sized>(_: *const T) -> usize;
+ pub fn align_of_val<T: ?Sized>(_: *const T) -> usize;
const GOAL: usize = {
let x: &[i32] = &[1, 2, 3];
- min_align_of_val(x)
+ align_of_val(x)
};
"#,
4,
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index 1e985dc604..b3d46845c4 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -237,18 +237,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
// Interned IDs for Chalk integration
#[salsa::interned]
- fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId;
-
- #[salsa::interned]
- fn intern_type_or_const_param_id(
- &self,
- param_id: TypeOrConstParamId,
- ) -> InternedTypeOrConstParamId;
-
- #[salsa::interned]
- fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId;
-
- #[salsa::interned]
fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
#[salsa::interned]
@@ -332,9 +320,31 @@ fn hir_database_is_dyn_compatible() {
fn _assert_dyn_compatible(_: &dyn HirDatabase) {}
}
-impl_intern_key!(InternedTypeOrConstParamId, TypeOrConstParamId);
+#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
+#[derive(PartialOrd, Ord)]
+pub struct InternedTypeOrConstParamId {
+ pub loc: TypeOrConstParamId,
+}
+impl ::std::fmt::Debug for InternedTypeOrConstParamId {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ f.debug_tuple(stringify!(InternedTypeOrConstParamId))
+ .field(&format_args!("{:04x}", self.0.index()))
+ .finish()
+ }
+}
-impl_intern_key!(InternedLifetimeParamId, LifetimeParamId);
+#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
+#[derive(PartialOrd, Ord)]
+pub struct InternedLifetimeParamId {
+ pub loc: LifetimeParamId,
+}
+impl ::std::fmt::Debug for InternedLifetimeParamId {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ f.debug_tuple(stringify!(InternedLifetimeParamId))
+ .field(&format_args!("{:04x}", self.0.index()))
+ .finish()
+ }
+}
impl_intern_key!(InternedConstParamId, ConstParamId);
@@ -347,7 +357,3 @@ impl_intern_key!(InternedClosureId, InternedClosure);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId);
impl_intern_key!(InternedCoroutineId, InternedCoroutine);
-
-// This exists just for Chalk, because Chalk just has a single `FnDefId` where
-// we have different IDs for struct and enum variant constructors.
-impl_intern_key!(InternedCallableDefId, CallableDefId);
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs
index 099100a732..40fe3073cf 100644
--- a/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -226,11 +226,10 @@ impl<'a> DeclValidator<'a> {
let body = self.db.body(func.into());
let edition = self.edition(func);
let mut pats_replacements = body
- .pats
- .iter()
+ .pats()
.filter_map(|(pat_id, pat)| match pat {
Pat::Bind { id, .. } => {
- let bind_name = &body.bindings[*id].name;
+ let bind_name = &body[*id].name;
let mut suggested_text = to_lower_snake_case(bind_name.as_str())?;
if is_raw_identifier(&suggested_text, edition) {
suggested_text.insert_str(0, "r#");
@@ -307,7 +306,7 @@ impl<'a> DeclValidator<'a> {
/// Check incorrect names for struct fields.
fn validate_struct_fields(&mut self, struct_id: StructId) {
- let data = self.db.variant_fields(struct_id.into());
+ let data = struct_id.fields(self.db);
if data.shape != FieldsShape::Record {
return;
};
@@ -395,9 +394,9 @@ impl<'a> DeclValidator<'a> {
/// Check incorrect names for enum variants.
fn validate_enum_variants(&mut self, enum_id: EnumId) {
- let data = self.db.enum_variants(enum_id);
+ let data = enum_id.enum_variants(self.db);
- for (variant_id, _) in data.variants.iter() {
+ for (variant_id, _, _) in data.variants.iter() {
self.validate_enum_variant_fields(*variant_id);
}
@@ -405,7 +404,7 @@ impl<'a> DeclValidator<'a> {
let mut enum_variants_replacements = data
.variants
.iter()
- .filter_map(|(_, name)| {
+ .filter_map(|(_, name, _)| {
to_camel_case(&name.display_no_db(edition).to_smolstr()).map(|new_name| {
Replacement {
current_name: name.clone(),
@@ -468,7 +467,7 @@ impl<'a> DeclValidator<'a> {
/// Check incorrect names for fields of enum variant.
fn validate_enum_variant_fields(&mut self, variant_id: EnumVariantId) {
- let variant_data = self.db.variant_fields(variant_id.into());
+ let variant_data = variant_id.fields(self.db);
if variant_data.shape != FieldsShape::Record {
return;
};
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index 9eb7ffe1c7..5ae6bf6dff 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -101,7 +101,7 @@ impl ExprValidator {
self.check_for_trailing_return(body.body_expr, &body);
}
- for (id, expr) in body.exprs.iter() {
+ for (id, expr) in body.exprs() {
if let Some((variant, missed_fields, true)) =
record_literal_missing_fields(db, &self.infer, id, expr)
{
@@ -132,7 +132,7 @@ impl ExprValidator {
}
}
- for (id, pat) in body.pats.iter() {
+ for (id, pat) in body.pats() {
if let Some((variant, missed_fields, true)) =
record_pattern_missing_fields(db, &self.infer, id, pat)
{
@@ -389,7 +389,7 @@ impl ExprValidator {
if !self.validate_lints {
return;
}
- match &body.exprs[body_expr] {
+ match &body[body_expr] {
Expr::Block { statements, tail, .. } => {
let last_stmt = tail.or_else(|| match statements.last()? {
Statement::Expr { expr, .. } => Some(*expr),
@@ -428,7 +428,7 @@ impl ExprValidator {
if else_branch.is_none() {
return;
}
- if let Expr::Block { statements, tail, .. } = &self.body.exprs[*then_branch] {
+ if let Expr::Block { statements, tail, .. } = &self.body[*then_branch] {
let last_then_expr = tail.or_else(|| match statements.last()? {
Statement::Expr { expr, .. } => Some(*expr),
_ => None,
@@ -494,7 +494,7 @@ impl FilterMapNextChecker {
Some(next_function_id),
match next_function_id.lookup(db).container {
ItemContainerId::TraitId(iterator_trait_id) => {
- let iterator_trait_items = &db.trait_items(iterator_trait_id).items;
+ let iterator_trait_items = &iterator_trait_id.trait_items(db).items;
iterator_trait_items.iter().find_map(|(name, it)| match it {
&AssocItemId::FunctionId(id) if *name == sym::filter_map => Some(id),
_ => None,
@@ -558,7 +558,7 @@ pub fn record_literal_missing_fields(
return None;
}
- let variant_data = variant_def.variant_data(db);
+ let variant_data = variant_def.fields(db);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<LocalFieldId> = variant_data
@@ -588,7 +588,7 @@ pub fn record_pattern_missing_fields(
return None;
}
- let variant_data = variant_def.variant_data(db);
+ let variant_data = variant_def.fields(db);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<LocalFieldId> = variant_data
@@ -642,7 +642,7 @@ fn missing_match_arms<'p>(
}
let non_empty_enum = match scrut_ty.as_adt() {
- Some((AdtId::EnumId(e), _)) => !cx.db.enum_variants(e).variants.is_empty(),
+ Some((AdtId::EnumId(e), _)) => !e.enum_variants(cx.db).variants.is_empty(),
_ => false,
};
let display_target = DisplayTarget::from_crate(cx.db, krate);
diff --git a/crates/hir-ty/src/diagnostics/match_check.rs b/crates/hir-ty/src/diagnostics/match_check.rs
index 7df22a45cb..ca132fbdc4 100644
--- a/crates/hir-ty/src/diagnostics/match_check.rs
+++ b/crates/hir-ty/src/diagnostics/match_check.rs
@@ -25,7 +25,6 @@ use crate::{
db::HirDatabase,
display::{HirDisplay, HirDisplayError, HirFormatter},
infer::BindingMode,
- lang_items::is_box,
};
use self::pat_util::EnumerateAndAdjustIterator;
@@ -77,7 +76,7 @@ pub(crate) enum PatKind {
subpatterns: Vec<FieldPat>,
},
- /// `box P`, `&P`, `&mut P`, etc.
+ /// `&P`, `&mut P`, etc.
Deref {
subpattern: Pat,
},
@@ -151,7 +150,7 @@ impl<'a> PatCtxt<'a> {
hir_def::hir::Pat::Bind { id, subpat, .. } => {
let bm = self.infer.binding_modes[pat];
ty = &self.infer[id];
- let name = &self.body.bindings[id].name;
+ let name = &self.body[id].name;
match (bm, ty.kind(Interner)) {
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
(BindingMode::Ref(_), _) => {
@@ -169,13 +168,13 @@ impl<'a> PatCtxt<'a> {
}
hir_def::hir::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => {
- let expected_len = variant.unwrap().variant_data(self.db).fields().len();
+ let expected_len = variant.unwrap().fields(self.db).fields().len();
let subpatterns = self.lower_tuple_subpats(args, expected_len, ellipsis);
self.lower_variant_or_leaf(pat, ty, subpatterns)
}
hir_def::hir::Pat::Record { ref args, .. } if variant.is_some() => {
- let variant_data = variant.unwrap().variant_data(self.db);
+ let variant_data = variant.unwrap().fields(self.db);
let subpatterns = args
.iter()
.map(|field| {
@@ -328,7 +327,7 @@ impl HirDisplay for Pat {
write!(
f,
"{}",
- f.db.enum_variants(loc.parent).variants[loc.index as usize]
+ loc.parent.enum_variants(f.db).variants[loc.index as usize]
.1
.display(f.db, f.edition())
)?;
@@ -345,7 +344,7 @@ impl HirDisplay for Pat {
)?,
};
- let variant_data = variant.variant_data(f.db);
+ let variant_data = variant.fields(f.db);
if variant_data.shape == FieldsShape::Record {
write!(f, " {{ ")?;
@@ -377,7 +376,7 @@ impl HirDisplay for Pat {
}
let num_fields =
- variant.map_or(subpatterns.len(), |v| v.variant_data(f.db).fields().len());
+ variant.map_or(subpatterns.len(), |v| v.fields(f.db).fields().len());
if num_fields != 0 || variant.is_none() {
write!(f, "(")?;
let subpats = (0..num_fields).map(|i| {
@@ -406,7 +405,6 @@ impl HirDisplay for Pat {
}
PatKind::Deref { subpattern } => {
match self.ty.kind(Interner) {
- TyKind::Adt(adt, _) if is_box(f.db, adt.0) => write!(f, "box ")?,
&TyKind::Ref(mutbl, ..) => {
write!(f, "&{}", if mutbl == Mutability::Mut { "mut " } else { "" })?
}
diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index dd82a0f45c..56fd12e1f2 100644
--- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -6,7 +6,7 @@ use std::fmt;
use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
use intern::sym;
use rustc_pattern_analysis::{
- Captures, IndexVec, PatCx, PrivateUninhabitedField,
+ IndexVec, PatCx, PrivateUninhabitedField,
constructor::{Constructor, ConstructorSet, VariantVisibility},
usefulness::{PlaceValidity, UsefulnessReport, compute_match_usefulness},
};
@@ -21,7 +21,7 @@ use crate::{
inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from},
};
-use super::{FieldPat, Pat, PatKind, is_box};
+use super::{FieldPat, Pat, PatKind};
use Constructor::*;
@@ -50,7 +50,7 @@ impl EnumVariantContiguousIndex {
}
fn to_enum_variant_id(self, db: &dyn HirDatabase, eid: EnumId) -> EnumVariantId {
- db.enum_variants(eid).variants[self.0].0
+ eid.enum_variants(db).variants[self.0].0
}
}
@@ -138,15 +138,15 @@ impl<'db> MatchCheckCtx<'db> {
}
// This lists the fields of a variant along with their types.
- fn list_variant_fields<'a>(
- &'a self,
- ty: &'a Ty,
+ fn list_variant_fields(
+ &self,
+ ty: &Ty,
variant: VariantId,
- ) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'db> {
+ ) -> impl Iterator<Item = (LocalFieldId, Ty)> {
let (_, substs) = ty.as_adt().unwrap();
let field_tys = self.db.field_types(variant);
- let fields_len = variant.variant_data(self.db).fields().len() as u32;
+ let fields_len = variant.fields(self.db).fields().len() as u32;
(0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| {
let ty = field_tys[fid].clone().substitute(Interner, substs);
@@ -170,8 +170,6 @@ impl<'db> MatchCheckCtx<'db> {
}
PatKind::Deref { subpattern } => {
ctor = match pat.ty.kind(Interner) {
- // This is a box pattern.
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => Struct,
TyKind::Ref(..) => Ref,
_ => {
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
@@ -194,23 +192,6 @@ impl<'db> MatchCheckCtx<'db> {
ctor = Struct;
arity = substs.len(Interner);
}
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
- // _)` or a box pattern. As a hack to avoid an ICE with the former, we
- // ignore other fields than the first one. This will trigger an error later
- // anyway.
- // See https://github.com/rust-lang/rust/issues/82772 ,
- // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
- // The problem is that we can't know from the type whether we'll match
- // normally or through box-patterns. We'll have to figure out a proper
- // solution when we introduce generalized deref patterns. Also need to
- // prevent mixing of those two options.
- fields.retain(|ipat| ipat.idx == 0);
- ctor = Struct;
- arity = 1;
- }
&TyKind::Adt(AdtId(adt), _) => {
ctor = match pat.kind.as_ref() {
PatKind::Leaf { .. } if matches!(adt, hir_def::AdtId::UnionId(_)) => {
@@ -229,7 +210,7 @@ impl<'db> MatchCheckCtx<'db> {
}
};
let variant = Self::variant_id_for_adt(self.db, &ctor, adt).unwrap();
- arity = variant.variant_data(self.db).fields().len();
+ arity = variant.fields(self.db).fields().len();
}
_ => {
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
@@ -277,12 +258,6 @@ impl<'db> MatchCheckCtx<'db> {
})
.collect(),
},
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => {
- // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
- // of `std`). So this branch is only reachable when the feature is enabled and
- // the pattern is a box pattern.
- PatKind::Deref { subpattern: subpatterns.next().unwrap() }
- }
TyKind::Adt(adt, substs) => {
let variant = Self::variant_id_for_adt(self.db, pat.ctor(), adt.0).unwrap();
let subpatterns = self
@@ -343,14 +318,8 @@ impl PatCx for MatchCheckCtx<'_> {
Struct | Variant(_) | UnionField => match *ty.kind(Interner) {
TyKind::Tuple(arity, ..) => arity,
TyKind::Adt(AdtId(adt), ..) => {
- if is_box(self.db, adt) {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- 1
- } else {
- let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
- variant.variant_data(self.db).fields().len()
- }
+ let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
+ variant.fields(self.db).fields().len()
}
_ => {
never!("Unexpected type for `Single` constructor: {:?}", ty);
@@ -383,29 +352,22 @@ impl PatCx for MatchCheckCtx<'_> {
tys.cloned().map(|ty| (ty, PrivateUninhabitedField(false))).collect()
}
TyKind::Ref(.., rty) => single(rty.clone()),
- &TyKind::Adt(AdtId(adt), ref substs) => {
- if is_box(self.db, adt) {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
- single(subst_ty)
- } else {
- let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
-
- let visibilities = LazyCell::new(|| self.db.field_visibilities(variant));
-
- self.list_variant_fields(ty, variant)
- .map(move |(fid, ty)| {
- let is_visible = || {
- matches!(adt, hir_def::AdtId::EnumId(..))
- || visibilities[fid].is_visible_from(self.db, self.module)
- };
- let is_uninhabited = self.is_uninhabited(&ty);
- let private_uninhabited = is_uninhabited && !is_visible();
- (ty, PrivateUninhabitedField(private_uninhabited))
- })
- .collect()
- }
+ &TyKind::Adt(AdtId(adt), ..) => {
+ let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
+
+ let visibilities = LazyCell::new(|| self.db.field_visibilities(variant));
+
+ self.list_variant_fields(ty, variant)
+ .map(move |(fid, ty)| {
+ let is_visible = || {
+ matches!(adt, hir_def::AdtId::EnumId(..))
+ || visibilities[fid].is_visible_from(self.db, self.module)
+ };
+ let is_uninhabited = self.is_uninhabited(&ty);
+ let private_uninhabited = is_uninhabited && !is_visible();
+ (ty, PrivateUninhabitedField(private_uninhabited))
+ })
+ .collect()
}
ty_kind => {
never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind);
@@ -458,14 +420,14 @@ impl PatCx for MatchCheckCtx<'_> {
TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(),
TyKind::Array(..) | TyKind::Slice(..) => unhandled(),
&TyKind::Adt(AdtId(adt @ hir_def::AdtId::EnumId(enum_id)), ref subst) => {
- let enum_data = cx.db.enum_variants(enum_id);
+ let enum_data = enum_id.enum_variants(cx.db);
let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive(adt);
if enum_data.variants.is_empty() && !is_declared_nonexhaustive {
ConstructorSet::NoConstructors
} else {
let mut variants = IndexVec::with_capacity(enum_data.variants.len());
- for &(variant, _) in enum_data.variants.iter() {
+ for &(variant, _, _) in enum_data.variants.iter() {
let is_uninhabited = is_enum_variant_uninhabited_from(
cx.db,
variant,
@@ -527,6 +489,14 @@ impl PatCx for MatchCheckCtx<'_> {
fn complexity_exceeded(&self) -> Result<(), Self::Error> {
Err(())
}
+
+ fn report_mixed_deref_pat_ctors(
+ &self,
+ _deref_pat: &DeconstructedPat<'_>,
+ _normal_pat: &DeconstructedPat<'_>,
+ ) {
+ // FIXME(deref_patterns): This could report an error comparable to the one in rustc.
+ }
}
impl fmt::Debug for MatchCheckCtx<'_> {
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 20cf3c7811..f6ad3c7aae 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -7,7 +7,7 @@ use either::Either;
use hir_def::{
AdtId, DefWithBodyId, FieldId, FunctionId, VariantId,
expr_store::{Body, path::Path},
- hir::{AsmOperand, Expr, ExprId, ExprOrPatId, Pat, PatId, Statement, UnaryOp},
+ hir::{AsmOperand, Expr, ExprId, ExprOrPatId, InlineAsmKind, Pat, PatId, Statement, UnaryOp},
resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs},
signatures::StaticFlags,
type_ref::Rawness,
@@ -217,7 +217,7 @@ impl<'db> UnsafeVisitor<'db> {
}
fn walk_pat(&mut self, current: PatId) {
- let pat = &self.body.pats[current];
+ let pat = &self.body[current];
if self.inside_union_destructure {
match pat {
@@ -264,7 +264,7 @@ impl<'db> UnsafeVisitor<'db> {
}
fn walk_expr(&mut self, current: ExprId) {
- let expr = &self.body.exprs[current];
+ let expr = &self.body[current];
let inside_assignment = mem::replace(&mut self.inside_assignment, false);
match expr {
&Expr::Call { callee, .. } => {
@@ -284,7 +284,7 @@ impl<'db> UnsafeVisitor<'db> {
self.resolver.reset_to_guard(guard);
}
Expr::Ref { expr, rawness: Rawness::RawPtr, mutability: _ } => {
- match self.body.exprs[*expr] {
+ match self.body[*expr] {
// Do not report unsafe for `addr_of[_mut]!(EXTERN_OR_MUT_STATIC)`,
// see https://github.com/rust-lang/rust/pull/125834.
Expr::Path(_) => return,
@@ -315,7 +315,12 @@ impl<'db> UnsafeVisitor<'db> {
self.inside_assignment = old_inside_assignment;
}
Expr::InlineAsm(asm) => {
- self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm);
+ if asm.kind == InlineAsmKind::Asm {
+ // `naked_asm!()` requires `unsafe` on the attribute (`#[unsafe(naked)]`),
+ // and `global_asm!()` doesn't require it at all.
+ self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm);
+ }
+
asm.operands.iter().for_each(|(_, op)| match op {
AsmOperand::In { expr, .. }
| AsmOperand::Out { expr: Some(expr), .. }
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index f210dd8799..b3760e3a38 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -795,6 +795,14 @@ fn render_const_scalar(
let Some(bytes) = memory_map.get(addr, size_one * count) else {
return f.write_str("<ref-data-not-available>");
};
+ let expected_len = count * size_one;
+ if bytes.len() < expected_len {
+ never!(
+ "Memory map size is too small. Expected {expected_len}, got {}",
+ bytes.len(),
+ );
+ return f.write_str("<layout-error>");
+ }
f.write_str("&[")?;
let mut first = true;
for i in 0..count {
@@ -888,7 +896,7 @@ fn render_const_scalar(
write!(f, "{}", data.name.display(f.db, f.edition()))?;
let field_types = f.db.field_types(s.into());
render_variant_after_name(
- &f.db.variant_fields(s.into()),
+ s.fields(f.db),
f,
&field_types,
f.db.trait_environment(adt.0.into()),
@@ -914,13 +922,13 @@ fn render_const_scalar(
write!(
f,
"{}",
- f.db.enum_variants(loc.parent).variants[loc.index as usize]
+ loc.parent.enum_variants(f.db).variants[loc.index as usize]
.1
.display(f.db, f.edition())
)?;
let field_types = f.db.field_types(var_id.into());
render_variant_after_name(
- &f.db.variant_fields(var_id.into()),
+ var_id.fields(f.db),
f,
&field_types,
f.db.trait_environment(adt.0.into()),
@@ -1208,7 +1216,7 @@ impl HirDisplay for Ty {
write!(
f,
"{}",
- db.enum_variants(loc.parent).variants[loc.index as usize]
+ loc.parent.enum_variants(db).variants[loc.index as usize]
.1
.display(db, f.edition())
)?
@@ -1394,7 +1402,7 @@ impl HirDisplay for Ty {
let future_trait =
LangItem::Future.resolve_trait(db, body.module(db).krate());
let output = future_trait.and_then(|t| {
- db.trait_items(t)
+ t.trait_items(db)
.associated_type_by_name(&Name::new_symbol_root(sym::Output))
});
write!(f, "impl ")?;
@@ -1432,10 +1440,10 @@ impl HirDisplay for Ty {
match f.closure_style {
ClosureStyle::Hide => return write!(f, "{TYPE_HINT_TRUNCATION}"),
ClosureStyle::ClosureWithId => {
- return write!(f, "{{closure#{:?}}}", id.0.as_u32());
+ return write!(f, "{{closure#{:?}}}", id.0.index());
}
ClosureStyle::ClosureWithSubst => {
- write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
+ write!(f, "{{closure#{:?}}}", id.0.index())?;
return hir_fmt_generics(f, substs.as_slice(Interner), None, None);
}
_ => (),
@@ -2082,6 +2090,7 @@ pub fn write_visibility(
) -> Result<(), HirDisplayError> {
match vis {
Visibility::Public => write!(f, "pub "),
+ Visibility::PubCrate(_) => write!(f, "pub(crate) "),
Visibility::Module(vis_id, _) => {
let def_map = module_id.def_map(f.db);
let root_module_id = def_map.module_id(DefMap::ROOT);
@@ -2177,6 +2186,7 @@ impl HirDisplayWithExpressionStore for TypeRefId {
f.write_joined(
generic_params
.where_predicates()
+ .iter()
.filter_map(|it| match it {
WherePredicate::TypeBound { target, bound }
| WherePredicate::ForLifetime { lifetimes: _, target, bound }
@@ -2326,6 +2336,7 @@ impl HirDisplayWithExpressionStore for TypeBound {
store[*path].hir_fmt(f, store)
}
TypeBound::Use(args) => {
+ write!(f, "use<")?;
let edition = f.edition();
let last = args.len().saturating_sub(1);
for (idx, arg) in args.iter().enumerate() {
diff --git a/crates/hir-ty/src/drop.rs b/crates/hir-ty/src/drop.rs
index 70763759ef..5577be890d 100644
--- a/crates/hir-ty/src/drop.rs
+++ b/crates/hir-ty/src/drop.rs
@@ -67,11 +67,11 @@ pub(crate) fn has_drop_glue(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironm
}
// Unions cannot have fields with destructors.
AdtId::UnionId(_) => DropGlue::None,
- AdtId::EnumId(id) => db
- .enum_variants(id)
+ AdtId::EnumId(id) => id
+ .enum_variants(db)
.variants
.iter()
- .map(|&(variant, _)| {
+ .map(|&(variant, _, _)| {
db.field_types(variant.into())
.iter()
.map(|(_, field_ty)| {
diff --git a/crates/hir-ty/src/dyn_compatibility.rs b/crates/hir-ty/src/dyn_compatibility.rs
index ed8d8dc262..30949c83bf 100644
--- a/crates/hir-ty/src/dyn_compatibility.rs
+++ b/crates/hir-ty/src/dyn_compatibility.rs
@@ -101,7 +101,7 @@ where
// rustc checks for non-lifetime binders here, but we don't support HRTB yet
- let trait_data = db.trait_items(trait_);
+ let trait_data = trait_.trait_items(db);
for (_, assoc_item) in &trait_data.items {
dyn_compatibility_violation_for_assoc_item(db, trait_, *assoc_item, cb)?;
}
@@ -122,7 +122,7 @@ pub fn dyn_compatibility_of_trait_query(
res
}
-fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> bool {
+pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> bool {
let krate = def.module(db).krate();
let Some(sized) = LangItem::Sized.resolve_trait(db, krate) else {
return false;
@@ -164,7 +164,7 @@ fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
// Same as the above, `predicates_reference_self`
fn bounds_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
- let trait_data = db.trait_items(trait_);
+ let trait_data = trait_.trait_items(db);
trait_data
.items
.iter()
diff --git a/crates/hir-ty/src/generics.rs b/crates/hir-ty/src/generics.rs
index bb4aaf7889..f14872e68c 100644
--- a/crates/hir-ty/src/generics.rs
+++ b/crates/hir-ty/src/generics.rs
@@ -60,7 +60,16 @@ impl Generics {
}
pub(crate) fn where_predicates(&self) -> impl Iterator<Item = &WherePredicate> {
- self.params.where_predicates()
+ self.params.where_predicates().iter()
+ }
+
+ pub(crate) fn has_no_predicates(&self) -> bool {
+ self.params.has_no_predicates()
+ && self.parent_generics.as_ref().is_none_or(|g| g.params.has_no_predicates())
+ }
+
+ pub(crate) fn is_empty(&self) -> bool {
+ self.params.is_empty() && self.parent_generics.as_ref().is_none_or(|g| g.params.is_empty())
}
pub(crate) fn iter_id(&self) -> impl Iterator<Item = GenericParamId> + '_ {
@@ -229,7 +238,7 @@ impl Generics {
}
/// Returns a Substitution that replaces each parameter by itself (i.e. `Ty::Param`).
- pub(crate) fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution {
+ pub fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution {
Substitution::from_iter(
Interner,
self.iter_id().map(|id| match id {
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 14eb716075..e880438e3a 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -460,19 +460,17 @@ pub struct InferenceResult {
/// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
/// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
/// that which allows us to resolve a [`TupleFieldId`]s type.
- pub tuple_field_access_types: FxHashMap<TupleId, Substitution>,
+ tuple_field_access_types: FxHashMap<TupleId, Substitution>,
/// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead.
- pub diagnostics: Vec<InferenceDiagnostic>,
- pub type_of_expr: ArenaMap<ExprId, Ty>,
+ diagnostics: Vec<InferenceDiagnostic>,
+ pub(crate) type_of_expr: ArenaMap<ExprId, Ty>,
/// For each pattern record the type it resolves to.
///
/// **Note**: When a pattern type is resolved it may still contain
/// unresolved or missing subpatterns or subpatterns of mismatched types.
- pub type_of_pat: ArenaMap<PatId, Ty>,
- pub type_of_binding: ArenaMap<BindingId, Ty>,
- pub type_of_rpit: ArenaMap<ImplTraitIdx, Ty>,
- /// Type of the result of `.into_iter()` on the for. `ExprId` is the one of the whole for loop.
- pub type_of_for_iterator: FxHashMap<ExprId, Ty>,
+ pub(crate) type_of_pat: ArenaMap<PatId, Ty>,
+ pub(crate) type_of_binding: ArenaMap<BindingId, Ty>,
+ pub(crate) type_of_rpit: ArenaMap<ImplTraitIdx, Ty>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
/// Whether there are any type-mismatching errors in the result.
// FIXME: This isn't as useful as initially thought due to us falling back placeholders to
@@ -483,7 +481,7 @@ pub struct InferenceResult {
// FIXME: Move this into `InferenceContext`
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
- pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
+ pub(crate) pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
/// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
///
/// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an
@@ -497,12 +495,12 @@ pub struct InferenceResult {
/// }
/// ```
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
- pub binding_modes: ArenaMap<PatId, BindingMode>,
- pub expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
+ pub(crate) binding_modes: ArenaMap<PatId, BindingMode>,
+ pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
- pub coercion_casts: FxHashSet<ExprId>,
+ pub(crate) coercion_casts: FxHashSet<ExprId>,
}
impl InferenceResult {
@@ -566,6 +564,26 @@ impl InferenceResult {
pub fn is_erroneous(&self) -> bool {
self.has_errors && self.type_of_expr.iter().count() == 0
}
+
+ pub fn diagnostics(&self) -> &[InferenceDiagnostic] {
+ &self.diagnostics
+ }
+
+ pub fn tuple_field_access_type(&self, id: TupleId) -> &Substitution {
+ &self.tuple_field_access_types[&id]
+ }
+
+ pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty]> {
+ self.pat_adjustments.get(&id).map(|it| &**it)
+ }
+
+ pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment]> {
+ self.expr_adjustments.get(&id).map(|it| &**it)
+ }
+
+ pub fn binding_mode(&self, id: PatId) -> Option<BindingMode> {
+ self.binding_modes.get(id).copied()
+ }
}
impl Index<ExprId> for InferenceResult {
@@ -772,7 +790,6 @@ impl<'db> InferenceContext<'db> {
type_of_pat,
type_of_binding,
type_of_rpit,
- type_of_for_iterator,
type_mismatches,
has_errors,
standard_types: _,
@@ -832,11 +849,6 @@ impl<'db> InferenceContext<'db> {
*has_errors = *has_errors || ty.contains_unknown();
}
type_of_rpit.shrink_to_fit();
- for ty in type_of_for_iterator.values_mut() {
- *ty = table.resolve_completely(ty.clone());
- *has_errors = *has_errors || ty.contains_unknown();
- }
- type_of_for_iterator.shrink_to_fit();
*has_errors |= !type_mismatches.is_empty();
@@ -1673,7 +1685,7 @@ impl<'db> InferenceContext<'db> {
// If we can resolve to an enum variant, it takes priority over associated type
// of the same name.
if let Some((AdtId::EnumId(id), _)) = ty.as_adt() {
- let enum_data = self.db.enum_variants(id);
+ let enum_data = id.enum_variants(self.db);
if let Some(variant) = enum_data.variant(current_segment.name) {
return if remaining_segments.len() == 1 {
(ty, Some(variant.into()))
@@ -1792,7 +1804,7 @@ impl<'db> InferenceContext<'db> {
let segment = path.segments().last().unwrap();
// this could be an enum variant or associated type
if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() {
- let enum_data = self.db.enum_variants(enum_id);
+ let enum_data = enum_id.enum_variants(self.db);
if let Some(variant) = enum_data.variant(segment) {
return (ty, Some(variant.into()));
}
@@ -1813,7 +1825,7 @@ impl<'db> InferenceContext<'db> {
}
fn resolve_output_on(&self, trait_: TraitId) -> Option<TypeAliasId> {
- self.db.trait_items(trait_).associated_type_by_name(&Name::new_symbol_root(sym::Output))
+ trait_.trait_items(self.db).associated_type_by_name(&Name::new_symbol_root(sym::Output))
}
fn resolve_lang_trait(&self, lang: LangItem) -> Option<TraitId> {
diff --git a/crates/hir-ty/src/infer/cast.rs b/crates/hir-ty/src/infer/cast.rs
index 10d85792c9..4e95eca3f9 100644
--- a/crates/hir-ty/src/infer/cast.rs
+++ b/crates/hir-ty/src/infer/cast.rs
@@ -43,7 +43,7 @@ impl CastTy {
let (AdtId::EnumId(id), _) = t.as_adt()? else {
return None;
};
- let enum_data = table.db.enum_variants(id);
+ let enum_data = id.enum_variants(table.db);
if enum_data.is_payload_free(table.db) { Some(Self::Int(Int::CEnum)) } else { None }
}
TyKind::Raw(m, ty) => Some(Self::Ptr(ty.clone(), *m)),
@@ -382,7 +382,7 @@ fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result<Option<Pointe
return Err(());
};
- let struct_data = table.db.variant_fields(id.into());
+ let struct_data = id.fields(table.db);
if let Some((last_field, _)) = struct_data.fields().iter().last() {
let last_field_ty =
table.db.field_types(id.into())[last_field].clone().substitute(Interner, subst);
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index bd57ca8916..c3029bf2b5 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -677,7 +677,7 @@ impl CapturedItem {
match proj {
ProjectionElem::Deref => {}
ProjectionElem::Field(Either::Left(f)) => {
- let variant_data = f.parent.variant_data(db);
+ let variant_data = f.parent.fields(db);
match variant_data.shape {
FieldsShape::Record => {
result.push('_');
@@ -720,7 +720,7 @@ impl CapturedItem {
// In source code autoderef kicks in.
ProjectionElem::Deref => {}
ProjectionElem::Field(Either::Left(f)) => {
- let variant_data = f.parent.variant_data(db);
+ let variant_data = f.parent.fields(db);
match variant_data.shape {
FieldsShape::Record => format_to!(
result,
@@ -782,7 +782,7 @@ impl CapturedItem {
if field_need_paren {
result = format!("({result})");
}
- let variant_data = f.parent.variant_data(db);
+ let variant_data = f.parent.fields(db);
let field = match variant_data.shape {
FieldsShape::Record => {
variant_data.fields()[f.local_id].name.as_str().to_owned()
@@ -1210,9 +1210,8 @@ impl InferenceContext<'_> {
if let Some(deref_trait) =
self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait())
{
- if let Some(deref_fn) = self
- .db
- .trait_items(deref_trait)
+ if let Some(deref_fn) = deref_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref_mut))
{
break 'b deref_fn == f;
@@ -1230,11 +1229,16 @@ impl InferenceContext<'_> {
self.select_from_expr(*expr);
}
}
+ Expr::Let { pat, expr } => {
+ self.walk_expr(*expr);
+ if let Some(place) = self.place_of_expr(*expr) {
+ self.consume_with_pat(place, *pat);
+ }
+ }
Expr::UnaryOp { expr, op: _ }
| Expr::Array(Array::Repeat { initializer: expr, repeat: _ })
| Expr::Await { expr }
| Expr::Loop { body: expr, label: _ }
- | Expr::Let { pat: _, expr }
| Expr::Box { expr }
| Expr::Cast { expr, type_ref: _ } => {
self.consume_expr(*expr);
@@ -1360,7 +1364,7 @@ impl InferenceContext<'_> {
if let Some(variant) = self.result.variant_resolution_for_pat(p) {
let adt = variant.adt_id(self.db);
let is_multivariant = match adt {
- hir_def::AdtId::EnumId(e) => self.db.enum_variants(e).variants.len() != 1,
+ hir_def::AdtId::EnumId(e) => e.enum_variants(self.db).variants.len() != 1,
_ => false,
};
if is_multivariant {
@@ -1556,7 +1560,7 @@ impl InferenceContext<'_> {
self.consume_place(place)
}
VariantId::StructId(s) => {
- let vd = &*self.db.variant_fields(s.into());
+ let vd = s.fields(self.db);
for field_pat in args.iter() {
let arg = field_pat.pat;
let Some(local_id) = vd.field(&field_pat.name) else {
@@ -1608,7 +1612,7 @@ impl InferenceContext<'_> {
self.consume_place(place)
}
VariantId::StructId(s) => {
- let vd = &*self.db.variant_fields(s.into());
+ let vd = s.fields(self.db);
let (al, ar) =
args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let fields = vd.fields().iter();
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 6403127929..d43c99fc28 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -542,7 +542,7 @@ impl InferenceContext<'_> {
_ if fields.is_empty() => {}
Some(def) => {
let field_types = self.db.field_types(def);
- let variant_data = def.variant_data(self.db);
+ let variant_data = def.fields(self.db);
let visibilities = self.db.field_visibilities(def);
for field in fields.iter() {
let field_def = {
@@ -654,9 +654,8 @@ impl InferenceContext<'_> {
match op {
UnaryOp::Deref => {
if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) {
- if let Some(deref_fn) = self
- .db
- .trait_items(deref_trait)
+ if let Some(deref_fn) = deref_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref))
{
// FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that
@@ -732,9 +731,32 @@ impl InferenceContext<'_> {
&Pat::Expr(expr) => {
Some(self.infer_expr(expr, &Expectation::none(), ExprIsRead::No))
}
- Pat::Path(path) => Some(self.infer_expr_path(path, target.into(), tgt_expr)),
+ Pat::Path(path) => {
+ let resolver_guard =
+ self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
+ let resolution = self.resolver.resolve_path_in_value_ns_fully(
+ self.db,
+ path,
+ self.body.pat_path_hygiene(target),
+ );
+ self.resolver.reset_to_guard(resolver_guard);
+
+ if matches!(
+ resolution,
+ Some(
+ ValueNs::ConstId(_)
+ | ValueNs::StructId(_)
+ | ValueNs::EnumVariantId(_)
+ )
+ ) {
+ None
+ } else {
+ Some(self.infer_expr_path(path, target.into(), tgt_expr))
+ }
+ }
_ => None,
};
+ let is_destructuring_assignment = lhs_ty.is_none();
if let Some(lhs_ty) = lhs_ty {
self.write_pat_ty(target, lhs_ty.clone());
@@ -748,7 +770,15 @@ impl InferenceContext<'_> {
self.inside_assignment = false;
self.resolver.reset_to_guard(resolver_guard);
}
- self.result.standard_types.unit.clone()
+ if is_destructuring_assignment && self.diverges.is_always() {
+ // Ordinary assignments always return `()`, even when they diverge.
+ // However, rustc lowers destructuring assignments into blocks, and blocks return `!` if they have no tail
+ // expression and they diverge. Therefore, we have to do the same here, even though we don't lower destructuring
+ // assignments into blocks.
+ self.table.new_maybe_never_var()
+ } else {
+ self.result.standard_types.unit.clone()
+ }
}
Expr::Range { lhs, rhs, range_type } => {
let lhs_ty =
@@ -813,9 +843,8 @@ impl InferenceContext<'_> {
self.table.new_lifetime_var(),
));
self.write_expr_adj(*base, adj.into_boxed_slice());
- if let Some(func) = self
- .db
- .trait_items(index_trait)
+ if let Some(func) = index_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::index))
{
let subst = TyBuilder::subst_for_def(self.db, index_trait, None);
@@ -1148,7 +1177,7 @@ impl InferenceContext<'_> {
let Some(trait_) = fn_x.get_id(self.db, self.table.trait_env.krate) else {
return;
};
- let trait_data = self.db.trait_items(trait_);
+ let trait_data = trait_.trait_items(self.db);
if let Some(func) = trait_data.method_by_name(&fn_x.method_name()) {
let subst = TyBuilder::subst_for_def(self.db, trait_, None)
.push(callee_ty.clone())
@@ -1316,7 +1345,7 @@ impl InferenceContext<'_> {
let trait_func = lang_items_for_bin_op(op).and_then(|(name, lang_item)| {
let trait_id = self.resolve_lang_item(lang_item)?.as_trait()?;
- let func = self.db.trait_items(trait_id).method_by_name(&name)?;
+ let func = trait_id.trait_items(self.db).method_by_name(&name)?;
Some((trait_id, func))
});
let (trait_, func) = match trait_func {
@@ -1568,12 +1597,12 @@ impl InferenceContext<'_> {
});
}
&TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref parameters) => {
- let local_id = self.db.variant_fields(s.into()).field(name)?;
+ let local_id = s.fields(self.db).field(name)?;
let field = FieldId { parent: s.into(), local_id };
(field, parameters.clone())
}
&TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), ref parameters) => {
- let local_id = self.db.variant_fields(u.into()).field(name)?;
+ let local_id = u.fields(self.db).field(name)?;
let field = FieldId { parent: u.into(), local_id };
(field, parameters.clone())
}
diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs
index ac450c0b55..3f7eba9dd1 100644
--- a/crates/hir-ty/src/infer/mutability.rs
+++ b/crates/hir-ty/src/infer/mutability.rs
@@ -129,9 +129,8 @@ impl InferenceContext<'_> {
if let Some(index_trait) =
LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate)
{
- if let Some(index_fn) = self
- .db
- .trait_items(index_trait)
+ if let Some(index_fn) = index_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::index_mut))
{
*f = index_fn;
@@ -194,9 +193,8 @@ impl InferenceContext<'_> {
});
if is_mut_ptr {
mutability = Mutability::Not;
- } else if let Some(deref_fn) = self
- .db
- .trait_items(deref_trait)
+ } else if let Some(deref_fn) = deref_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref_mut))
{
*f = deref_fn;
@@ -275,7 +273,7 @@ impl InferenceContext<'_> {
fn pat_bound_mutability(&self, pat: PatId) -> Mutability {
let mut r = Mutability::Not;
self.body.walk_bindings_in_pat(pat, |b| {
- if self.body.bindings[b].mode == BindingAnnotation::RefMut {
+ if self.body[b].mode == BindingAnnotation::RefMut {
r = Mutability::Mut;
}
});
diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs
index 4bc3e167eb..18288b718f 100644
--- a/crates/hir-ty/src/infer/pat.rs
+++ b/crates/hir-ty/src/infer/pat.rs
@@ -38,7 +38,7 @@ impl InferenceContext<'_> {
decl: Option<DeclContext>,
) -> Ty {
let (ty, def) = self.resolve_variant(id.into(), path, true);
- let var_data = def.map(|it| it.variant_data(self.db));
+ let var_data = def.map(|it| it.fields(self.db));
if let Some(variant) = def {
self.write_variant_resolution(id.into(), variant);
}
@@ -60,7 +60,7 @@ impl InferenceContext<'_> {
_ if subs.is_empty() => {}
Some(def) => {
let field_types = self.db.field_types(def);
- let variant_data = def.variant_data(self.db);
+ let variant_data = def.fields(self.db);
let visibilities = self.db.field_visibilities(def);
let (pre, post) = match ellipsis {
@@ -129,7 +129,7 @@ impl InferenceContext<'_> {
_ if subs.len() == 0 => {}
Some(def) => {
let field_types = self.db.field_types(def);
- let variant_data = def.variant_data(self.db);
+ let variant_data = def.fields(self.db);
let visibilities = self.db.field_visibilities(def);
let substs = ty.as_adt().map(TupleExt::tail);
@@ -459,7 +459,7 @@ impl InferenceContext<'_> {
expected: &Ty,
decl: Option<DeclContext>,
) -> Ty {
- let Binding { mode, .. } = self.body.bindings[binding];
+ let Binding { mode, .. } = self.body[binding];
let mode = if mode == BindingAnnotation::Unannotated {
default_bm
} else {
@@ -639,7 +639,7 @@ impl InferenceContext<'_> {
pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool {
let mut res = false;
body.walk_pats(pat_id, &mut |pat| {
- res |= matches!(body[pat], Pat::Bind { id, .. } if body.bindings[id].mode == BindingAnnotation::Ref);
+ res |= matches!(body[pat], Pat::Bind { id, .. } if body[id].mode == BindingAnnotation::Ref);
});
res
}
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs
index 9d4bbe5346..bc8648ecdd 100644
--- a/crates/hir-ty/src/infer/path.rs
+++ b/crates/hir-ty/src/infer/path.rs
@@ -278,7 +278,7 @@ impl InferenceContext<'_> {
) -> Option<(ValueNs, Substitution)> {
let trait_ = trait_ref.hir_trait_id();
let item =
- self.db.trait_items(trait_).items.iter().map(|(_name, id)| *id).find_map(|item| {
+ trait_.trait_items(self.db).items.iter().map(|(_name, id)| *id).find_map(|item| {
match item {
AssocItemId::FunctionId(func) => {
if segment.name == &self.db.function_signature(func).name {
@@ -397,7 +397,7 @@ impl InferenceContext<'_> {
Some((AdtId::EnumId(e), subst)) => (e, subst),
_ => return None,
};
- let enum_data = self.db.enum_variants(enum_id);
+ let enum_data = enum_id.enum_variants(self.db);
let variant = enum_data.variant(name)?;
self.write_variant_resolution(id, variant.into());
Some((ValueNs::EnumVariantId(variant), subst.clone()))
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs
index 631b571465..c07755535f 100644
--- a/crates/hir-ty/src/infer/unify.rs
+++ b/crates/hir-ty/src/infer/unify.rs
@@ -859,7 +859,7 @@ impl<'a> InferenceTable<'a> {
] {
let krate = self.trait_env.krate;
let fn_trait = fn_trait_name.get_id(self.db, krate)?;
- let trait_data = self.db.trait_items(fn_trait);
+ let trait_data = fn_trait.trait_items(self.db);
let output_assoc_type =
trait_data.associated_type_by_name(&Name::new_symbol_root(output_assoc_name))?;
@@ -1001,7 +1001,7 @@ impl<'a> InferenceTable<'a> {
// Must use a loop here and not recursion because otherwise users will conduct completely
// artificial examples of structs that have themselves as the tail field and complain r-a crashes.
while let Some((AdtId::StructId(id), subst)) = ty.as_adt() {
- let struct_data = self.db.variant_fields(id.into());
+ let struct_data = id.fields(self.db);
if let Some((last_field, _)) = struct_data.fields().iter().next_back() {
let last_field_ty = self.db.field_types(id.into())[last_field]
.clone()
diff --git a/crates/hir-ty/src/inhabitedness.rs b/crates/hir-ty/src/inhabitedness.rs
index e81a5e3c31..b16b6a1178 100644
--- a/crates/hir-ty/src/inhabitedness.rs
+++ b/crates/hir-ty/src/inhabitedness.rs
@@ -113,9 +113,9 @@ impl UninhabitedFrom<'_> {
AdtId::UnionId(_) => CONTINUE_OPAQUELY_INHABITED,
AdtId::StructId(s) => self.visit_variant(s.into(), subst),
AdtId::EnumId(e) => {
- let enum_data = self.db.enum_variants(e);
+ let enum_data = e.enum_variants(self.db);
- for &(variant, _) in enum_data.variants.iter() {
+ for &(variant, _, _) in enum_data.variants.iter() {
let variant_inhabitedness = self.visit_variant(variant.into(), subst);
match variant_inhabitedness {
Break(VisiblyUninhabited) => (),
@@ -132,7 +132,7 @@ impl UninhabitedFrom<'_> {
variant: VariantId,
subst: &Substitution,
) -> ControlFlow<VisiblyUninhabited> {
- let variant_data = self.db.variant_fields(variant);
+ let variant_data = variant.fields(self.db);
let fields = variant_data.fields();
if fields.is_empty() {
return CONTINUE_OPAQUELY_INHABITED;
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index c253fe2567..107da6a5af 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -261,14 +261,14 @@ pub fn layout_of_ty_query(
}
// Potentially-wide pointers.
TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => {
- let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
+ let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::ZERO));
if matches!(ty.kind(Interner), TyKind::Ref(..)) {
data_ptr.valid_range_mut().start = 1;
}
// let pointee = tcx.normalize_erasing_regions(param_env, pointee);
// if pointee.is_sized(tcx.at(DUMMY_SP), param_env) {
- // return Ok(tcx.mk_layout(LayoutS::scalar(cx, data_ptr)));
+ // return Ok(tcx.mk_layout(LayoutData::scalar(cx, data_ptr)));
// }
let mut unsized_part = struct_tail_erasing_lifetimes(db, pointee.clone());
@@ -285,7 +285,7 @@ pub fn layout_of_ty_query(
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
}
TyKind::Dyn(..) => {
- let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
+ let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::ZERO));
vtable.valid_range_mut().start = 1;
vtable
}
@@ -375,7 +375,7 @@ pub(crate) fn layout_of_ty_cycle_result(
fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty {
match pointee.kind(Interner) {
&TyKind::Adt(AdtId(hir_def::AdtId::StructId(i)), ref subst) => {
- let data = db.variant_fields(i.into());
+ let data = i.fields(db);
let mut it = data.fields().iter().rev();
match it.next() {
Some((f, _)) => {
diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs
index 3a020bf050..236f316366 100644
--- a/crates/hir-ty/src/layout/adt.rs
+++ b/crates/hir-ty/src/layout/adt.rs
@@ -42,7 +42,7 @@ pub fn layout_of_adt_query(
AdtId::StructId(s) => {
let sig = db.struct_signature(s);
let mut r = SmallVec::<[_; 1]>::new();
- r.push(handle_variant(s.into(), &db.variant_fields(s.into()))?);
+ r.push(handle_variant(s.into(), s.fields(db))?);
(
r,
sig.repr.unwrap_or_default(),
@@ -52,15 +52,15 @@ pub fn layout_of_adt_query(
AdtId::UnionId(id) => {
let data = db.union_signature(id);
let mut r = SmallVec::new();
- r.push(handle_variant(id.into(), &db.variant_fields(id.into()))?);
+ r.push(handle_variant(id.into(), id.fields(db))?);
(r, data.repr.unwrap_or_default(), false)
}
AdtId::EnumId(e) => {
- let variants = db.enum_variants(e);
+ let variants = e.enum_variants(db);
let r = variants
.variants
.iter()
- .map(|&(v, _)| handle_variant(v.into(), &db.variant_fields(v.into())))
+ .map(|&(v, _, _)| handle_variant(v.into(), v.fields(db)))
.collect::<Result<SmallVec<_>, _>>()?;
(r, db.enum_signature(e).repr.unwrap_or_default(), false)
}
@@ -82,7 +82,7 @@ pub fn layout_of_adt_query(
|min, max| repr_discr(dl, &repr, min, max).unwrap_or((Integer::I8, false)),
variants.iter_enumerated().filter_map(|(id, _)| {
let AdtId::EnumId(e) = def else { return None };
- let d = db.const_eval_discriminant(db.enum_variants(e).variants[id.0].0).ok()?;
+ let d = db.const_eval_discriminant(e.enum_variants(db).variants[id.0].0).ok()?;
Some((id, d))
}),
// FIXME: The current code for niche-filling relies on variant indices
diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs
index e1e1c44996..82d0ed4f19 100644
--- a/crates/hir-ty/src/layout/target.rs
+++ b/crates/hir-ty/src/layout/target.rs
@@ -2,7 +2,7 @@
use base_db::Crate;
use hir_def::layout::TargetDataLayout;
-use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors};
+use rustc_abi::{AddressSpace, AlignFromBytesError, TargetDataLayoutErrors};
use triomphe::Arc;
use crate::db::HirDatabase;
@@ -12,7 +12,7 @@ pub fn target_data_layout_query(
krate: Crate,
) -> Result<Arc<TargetDataLayout>, Arc<str>> {
match &krate.workspace_data(db).data_layout {
- Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
+ Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it, AddressSpace::ZERO) {
Ok(it) => Ok(Arc::new(it)),
Err(e) => {
Err(match e {
@@ -39,6 +39,7 @@ pub fn target_data_layout_query(
target,
} => format!(r#"inconsistent target specification: "data-layout" claims pointers are {pointer_size}-bit, while "target-pointer-width" is `{target}`"#),
TargetDataLayoutErrors::InvalidBitsSize { err } => err,
+ TargetDataLayoutErrors::UnknownPointerSpecification { err } => format!(r#"use of unknown pointer specifer in "data-layout": {err}"#),
}.into())
}
},
diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs
index cc7d74f4fb..b3bc226ec9 100644
--- a/crates/hir-ty/src/layout/tests.rs
+++ b/crates/hir-ty/src/layout/tests.rs
@@ -119,8 +119,7 @@ fn eval_expr(
.unwrap();
let hir_body = db.body(function_id.into());
let b = hir_body
- .bindings
- .iter()
+ .bindings()
.find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
.unwrap()
.0;
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 128569d55d..e787fd9b1e 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -98,7 +98,7 @@ pub use lower::{
ValueTyDefId, associated_type_shorthand_candidates, diagnostics::*,
};
pub use mapping::{
- from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
+ ToChalk, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
lt_from_placeholder_idx, lt_to_placeholder_idx, to_assoc_type_id, to_chalk_trait_id,
to_foreign_def_id, to_placeholder_idx,
};
@@ -542,7 +542,7 @@ impl CallableSig {
}
pub fn from_def(db: &dyn HirDatabase, def: FnDefId, substs: &Substitution) -> CallableSig {
- let callable_def = db.lookup_intern_callable_def(def.into());
+ let callable_def = ToChalk::from_chalk(db, def);
let sig = db.callable_item_signature(callable_def);
sig.substitute(Interner, substs)
}
@@ -891,8 +891,8 @@ pub fn callable_sig_from_fn_trait(
) -> Option<(FnTrait, CallableSig)> {
let krate = trait_env.krate;
let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
- let output_assoc_type = db
- .trait_items(fn_once_trait)
+ let output_assoc_type = fn_once_trait
+ .trait_items(db)
.associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
let mut table = InferenceTable::new(db, trait_env.clone());
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 0a546768da..f32b6af4d8 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -581,11 +581,28 @@ impl<'a> TyLoweringContext<'a> {
match bound {
&TypeBound::Path(path, TraitBoundModifier::None) | &TypeBound::ForLifetime(_, path) => {
// FIXME Don't silently drop the hrtb lifetimes here
- if let Some((trait_ref, ctx)) = self.lower_trait_ref_from_path(path, self_ty) {
- if !ignore_bindings {
- assoc_bounds = ctx.assoc_type_bindings_from_type_bound(trait_ref.clone());
+ if let Some((trait_ref, mut ctx)) =
+ self.lower_trait_ref_from_path(path, self_ty.clone())
+ {
+ // FIXME(sized-hierarchy): Remove this bound modifications once we have implemented
+ // sized-hierarchy correctly.
+ let meta_sized = LangItem::MetaSized
+ .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
+ let pointee_sized = LangItem::PointeeSized
+ .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
+ if meta_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) {
+ // Ignore this bound
+ } else if pointee_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) {
+ // Regard this as `?Sized` bound
+ ctx.ty_ctx().unsized_types.insert(self_ty);
+ } else {
+ if !ignore_bindings {
+ assoc_bounds =
+ ctx.assoc_type_bindings_from_type_bound(trait_ref.clone());
+ }
+ clause =
+ Some(crate::wrap_empty_binders(WhereClause::Implemented(trait_ref)));
}
- clause = Some(crate::wrap_empty_binders(WhereClause::Implemented(trait_ref)));
}
}
&TypeBound::Path(path, TraitBoundModifier::Maybe) => {
@@ -711,7 +728,7 @@ impl<'a> TyLoweringContext<'a> {
.unwrap_or(it),
None => it,
},
- None => static_lifetime(),
+ None => error_lifetime(),
},
})
.intern(Interner)
@@ -805,7 +822,7 @@ fn named_associated_type_shorthand_candidates<R>(
) -> Option<R> {
let mut search = |t| {
all_super_trait_refs(db, t, |t| {
- let data = db.trait_items(t.hir_trait_id());
+ let data = t.hir_trait_id().trait_items(db);
for (name, assoc_id) in &data.items {
if let AssocItemId::TypeAliasId(alias) = assoc_id {
@@ -883,7 +900,12 @@ pub(crate) fn field_types_with_diagnostics_query(
db: &dyn HirDatabase,
variant_id: VariantId,
) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics) {
- let var_data = db.variant_fields(variant_id);
+ let var_data = variant_id.fields(db);
+ let fields = var_data.fields();
+ if fields.is_empty() {
+ return (Arc::new(ArenaMap::default()), None);
+ }
+
let (resolver, def): (_, GenericDefId) = match variant_id {
VariantId::StructId(it) => (it.resolver(db), it.into()),
VariantId::UnionId(it) => (it.resolver(db), it.into()),
@@ -899,7 +921,7 @@ pub(crate) fn field_types_with_diagnostics_query(
LifetimeElisionKind::AnonymousReportError,
)
.with_type_param_mode(ParamLoweringMode::Variable);
- for (field_id, field_data) in var_data.fields().iter() {
+ for (field_id, field_data) in fields.iter() {
res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(field_data.type_ref)));
}
(Arc::new(res), create_diagnostics(ctx.diagnostics))
@@ -920,6 +942,10 @@ pub(crate) fn generic_predicates_for_param_query(
assoc_name: Option<Name>,
) -> GenericPredicates {
let generics = generics(db, def);
+ if generics.has_no_predicates() && generics.is_empty() {
+ return GenericPredicates(None);
+ }
+
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
@@ -936,8 +962,32 @@ pub(crate) fn generic_predicates_for_param_query(
| WherePredicate::TypeBound { target, bound, .. } => {
let invalid_target = { ctx.lower_ty_only_param(*target) != Some(param_id) };
if invalid_target {
- // If this is filtered out without lowering, `?Sized` is not gathered into `ctx.unsized_types`
- if let TypeBound::Path(_, TraitBoundModifier::Maybe) = bound {
+ // FIXME(sized-hierarchy): Revisit and adjust this properly once we have implemented
+ // sized-hierarchy correctly.
+ // If this is filtered out without lowering, `?Sized` or `PointeeSized` is not gathered into
+ // `ctx.unsized_types`
+ let lower = || -> bool {
+ match bound {
+ TypeBound::Path(_, TraitBoundModifier::Maybe) => true,
+ TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => {
+ let TypeRef::Path(path) = &ctx.store[path.type_ref()] else {
+ return false;
+ };
+ let Some(pointee_sized) =
+ LangItem::PointeeSized.resolve_trait(ctx.db, ctx.resolver.krate())
+ else {
+ return false;
+ };
+ // Lower the path directly with `Resolver` instead of PathLoweringContext`
+ // to prevent diagnostics duplications.
+ ctx.resolver.resolve_path_in_type_ns_fully(ctx.db, path).is_some_and(
+ |it| matches!(it, TypeNs::TraitId(tr) if tr == pointee_sized),
+ )
+ }
+ _ => false,
+ }
+ }();
+ if lower {
ctx.lower_where_predicate(pred, true).for_each(drop);
}
return false;
@@ -957,7 +1007,7 @@ pub(crate) fn generic_predicates_for_param_query(
};
all_super_traits(db, tr).iter().any(|tr| {
- db.trait_items(*tr).items.iter().any(|(name, item)| {
+ tr.trait_items(db).items.iter().any(|(name, item)| {
matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
})
})
@@ -1025,6 +1075,10 @@ pub(crate) fn trait_environment_query(
def: GenericDefId,
) -> Arc<TraitEnvironment> {
let generics = generics(db, def);
+ if generics.has_no_predicates() && generics.is_empty() {
+ return TraitEnvironment::empty(def.krate(db));
+ }
+
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
@@ -1128,6 +1182,10 @@ where
F: Fn(&WherePredicate, GenericDefId) -> bool,
{
let generics = generics(db, def);
+ if generics.has_no_predicates() && generics.is_empty() {
+ return (GenericPredicates(None), None);
+ }
+
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
@@ -1154,7 +1212,7 @@ where
}
}
- if generics.len() > 0 {
+ if !generics.is_empty() {
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
let explicitly_unsized_tys = ctx.unsized_types;
if let Some(implicitly_sized_predicates) =
@@ -1229,7 +1287,7 @@ pub(crate) fn generic_defaults_with_diagnostics_query(
def: GenericDefId,
) -> (GenericDefaults, Diagnostics) {
let generic_params = generics(db, def);
- if generic_params.len() == 0 {
+ if generic_params.is_empty() {
return (GenericDefaults(None), None);
}
let resolver = def.resolver(db);
@@ -1418,7 +1476,7 @@ fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnS
/// Build the type of a tuple struct constructor.
fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Option<Binders<Ty>> {
- let struct_data = db.variant_fields(def.into());
+ let struct_data = def.fields(db);
match struct_data.shape {
FieldsShape::Record => None,
FieldsShape::Unit => Some(type_for_adt(db, def.into())),
@@ -1451,7 +1509,7 @@ fn type_for_enum_variant_constructor(
def: EnumVariantId,
) -> Option<Binders<Ty>> {
let e = def.lookup(db).parent;
- match db.variant_fields(def.into()).shape {
+ match def.fields(db).shape {
FieldsShape::Record => None,
FieldsShape::Unit => Some(type_for_adt(db, e.into())),
FieldsShape::Tuple => {
diff --git a/crates/hir-ty/src/lower/path.rs b/crates/hir-ty/src/lower/path.rs
index 726eaf8b0a..5c06234fa0 100644
--- a/crates/hir-ty/src/lower/path.rs
+++ b/crates/hir-ty/src/lower/path.rs
@@ -173,7 +173,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
self.skip_resolved_segment();
let segment = self.current_or_prev_segment;
let found =
- self.ctx.db.trait_items(trait_).associated_type_by_name(segment.name);
+ trait_.trait_items(self.ctx.db).associated_type_by_name(segment.name);
match found {
Some(associated_ty) => {
@@ -1018,8 +1018,12 @@ fn check_generic_args_len(
}
let lifetime_args_len = def_generics.len_lifetimes_self();
- if provided_lifetimes_count == 0 && lifetime_args_len > 0 && !lowering_assoc_type_generics {
- // In generic associated types, we never allow inferring the lifetimes.
+ if provided_lifetimes_count == 0
+ && lifetime_args_len > 0
+ && (!lowering_assoc_type_generics || infer_args)
+ {
+ // In generic associated types, we never allow inferring the lifetimes, but only in type context, that is
+ // when `infer_args == false`. In expression/pattern context we always allow inferring them, even for GATs.
match lifetime_elision {
&LifetimeElisionKind::AnonymousCreateParameter { report_in_path } => {
ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, report_in_path);
diff --git a/crates/hir-ty/src/mapping.rs b/crates/hir-ty/src/mapping.rs
index 2abc1ac62a..9d3d2044c4 100644
--- a/crates/hir-ty/src/mapping.rs
+++ b/crates/hir-ty/src/mapping.rs
@@ -13,10 +13,11 @@ use salsa::{
use crate::{
AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId, Interner, OpaqueTyId,
- PlaceholderIndex, chalk_db, db::HirDatabase,
+ PlaceholderIndex, chalk_db,
+ db::{HirDatabase, InternedLifetimeParamId, InternedTypeOrConstParamId},
};
-pub(crate) trait ToChalk {
+pub trait ToChalk {
type Chalk;
fn to_chalk(self, db: &dyn HirDatabase) -> Self::Chalk;
fn from_chalk(db: &dyn HirDatabase, chalk: Self::Chalk) -> Self;
@@ -44,12 +45,12 @@ impl ToChalk for hir_def::ImplId {
impl ToChalk for CallableDefId {
type Chalk = FnDefId;
- fn to_chalk(self, db: &dyn HirDatabase) -> FnDefId {
- db.intern_callable_def(self).into()
+ fn to_chalk(self, _db: &dyn HirDatabase) -> FnDefId {
+ chalk_ir::FnDefId(salsa::plumbing::AsId::as_id(&self))
}
fn from_chalk(db: &dyn HirDatabase, fn_def_id: FnDefId) -> CallableDefId {
- db.lookup_intern_callable_def(fn_def_id.into())
+ salsa::plumbing::FromIdWithDb::from_id(fn_def_id.0, db.zalsa())
}
}
@@ -70,18 +71,6 @@ impl ToChalk for TypeAliasAsValue {
}
}
-impl From<FnDefId> for crate::db::InternedCallableDefId {
- fn from(fn_def_id: FnDefId) -> Self {
- Self::from_id(fn_def_id.0)
- }
-}
-
-impl From<crate::db::InternedCallableDefId> for FnDefId {
- fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self {
- chalk_ir::FnDefId(callable_def_id.as_id())
- }
-}
-
impl From<OpaqueTyId> for crate::db::InternedOpaqueTyId {
fn from(id: OpaqueTyId) -> Self {
FromId::from_id(id.0)
@@ -137,30 +126,32 @@ pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId {
pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
- let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
- db.lookup_intern_type_or_const_param_id(interned_id)
+ let interned_id =
+ InternedTypeOrConstParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) });
+ interned_id.loc(db)
}
pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> PlaceholderIndex {
- let interned_id = db.intern_type_or_const_param_id(id);
+ let interned_id = InternedTypeOrConstParamId::new(db, id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
- idx: interned_id.as_id().as_u32() as usize,
+ idx: interned_id.as_id().index() as usize,
}
}
pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
- let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
- db.lookup_intern_lifetime_param_id(interned_id)
+ let interned_id =
+ InternedLifetimeParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) });
+ interned_id.loc(db)
}
pub fn lt_to_placeholder_idx(db: &dyn HirDatabase, id: LifetimeParamId) -> PlaceholderIndex {
- let interned_id = db.intern_lifetime_param_id(id);
+ let interned_id = InternedLifetimeParamId::new(db, id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
- idx: interned_id.as_id().as_u32() as usize,
+ idx: interned_id.as_id().index() as usize,
}
}
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index 3b295d41e6..a6150a9bc1 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -790,7 +790,7 @@ fn find_matching_impl(
mut impls: impl Iterator<Item = ImplId>,
mut table: InferenceTable<'_>,
actual_trait_ref: TraitRef,
-) -> Option<(Arc<ImplItems>, Substitution)> {
+) -> Option<(&ImplItems, Substitution)> {
let db = table.db;
impls.find_map(|impl_| {
table.run_in_snapshot(|table| {
@@ -811,7 +811,7 @@ fn find_matching_impl(
let goal = crate::Goal::all(Interner, wcs);
table.try_obligation(goal.clone())?;
table.register_obligation(goal);
- Some((db.impl_items(impl_), table.resolve_completely(impl_substs)))
+ Some((impl_.impl_items(db), table.resolve_completely(impl_substs)))
})
})
}
@@ -875,7 +875,7 @@ fn is_inherent_impl_coherent(
_ => false,
};
- let items = db.impl_items(impl_id);
+ let items = impl_id.impl_items(db);
rustc_has_incoherent_inherent_impls
&& !items.items.is_empty()
&& items.items.iter().all(|&(_, assoc)| match assoc {
@@ -1302,7 +1302,7 @@ fn iterate_trait_method_candidates(
// trait, but if we find out it doesn't, we'll skip the rest of the
// iteration
let mut known_implemented = false;
- for &(_, item) in db.trait_items(t).items.iter() {
+ for &(_, item) in t.trait_items(db).items.iter() {
// Don't pass a `visible_from_module` down to `is_valid_candidate`,
// since only inherent methods should be included into visibility checking.
let visible =
@@ -1429,7 +1429,7 @@ fn iterate_inherent_methods(
) -> ControlFlow<()> {
let db = table.db;
for t in traits {
- let data = db.trait_items(t);
+ let data = t.trait_items(db);
for &(_, item) in data.items.iter() {
// We don't pass `visible_from_module` as all trait items should be visible.
let visible = match is_valid_trait_method_candidate(
@@ -1462,7 +1462,7 @@ fn iterate_inherent_methods(
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
) -> ControlFlow<()> {
for &impl_id in impls.for_self_ty(self_ty) {
- for &(ref item_name, item) in table.db.impl_items(impl_id).items.iter() {
+ for &(ref item_name, item) in impl_id.impl_items(table.db).items.iter() {
let visible = match is_valid_impl_method_candidate(
table,
self_ty,
@@ -1550,7 +1550,7 @@ fn is_valid_impl_method_candidate(
check_that!(name.is_none_or(|n| n == item_name));
if let Some(from_module) = visible_from_module {
- if !db.const_visibility(c).is_visible_from(db, from_module) {
+ if !db.assoc_visibility(c.into()).is_visible_from(db, from_module) {
cov_mark::hit!(const_candidate_not_visible);
return IsValidCandidate::NotVisible;
}
@@ -1639,7 +1639,7 @@ fn is_valid_impl_fn_candidate(
let data = db.function_signature(fn_id);
if let Some(from_module) = visible_from_module {
- if !db.function_visibility(fn_id).is_visible_from(db, from_module) {
+ if !db.assoc_visibility(fn_id.into()).is_visible_from(db, from_module) {
cov_mark::hit!(autoderef_candidate_not_visible);
return IsValidCandidate::NotVisible;
}
diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs
index bf80ed7967..482b420279 100644
--- a/crates/hir-ty/src/mir.rs
+++ b/crates/hir-ty/src/mir.rs
@@ -1212,10 +1212,9 @@ impl MirSpan {
match *self {
MirSpan::ExprId(expr) => matches!(body[expr], Expr::Ref { .. }),
// FIXME: Figure out if this is correct wrt. match ergonomics.
- MirSpan::BindingId(binding) => matches!(
- body.bindings[binding].mode,
- BindingAnnotation::Ref | BindingAnnotation::RefMut
- ),
+ MirSpan::BindingId(binding) => {
+ matches!(body[binding].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut)
+ }
MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false,
}
}
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index 21e5428520..9a97bd6dbe 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -31,8 +31,8 @@ use syntax::{SyntaxNodePtr, TextRange};
use triomphe::Arc;
use crate::{
- CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId, Interner,
- MemoryMap, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
+ AliasTy, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId,
+ Interner, MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
consteval::{ConstEvalError, intern_const_scalar, try_const_usize},
db::{HirDatabase, InternedClosure},
display::{ClosureStyle, DisplayTarget, HirDisplay},
@@ -630,7 +630,7 @@ impl Evaluator<'_> {
Ok(target_data_layout) => target_data_layout,
Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
};
- let cached_ptr_size = target_data_layout.pointer_size.bytes_usize();
+ let cached_ptr_size = target_data_layout.pointer_size().bytes_usize();
Ok(Evaluator {
target_data_layout,
stack: vec![0],
@@ -657,12 +657,12 @@ impl Evaluator<'_> {
cached_ptr_size,
cached_fn_trait_func: LangItem::Fn
.resolve_trait(db, crate_id)
- .and_then(|x| db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call))),
+ .and_then(|x| x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call))),
cached_fn_mut_trait_func: LangItem::FnMut.resolve_trait(db, crate_id).and_then(|x| {
- db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_mut))
+ x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_mut))
}),
cached_fn_once_trait_func: LangItem::FnOnce.resolve_trait(db, crate_id).and_then(|x| {
- db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_once))
+ x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_once))
}),
})
}
@@ -1631,7 +1631,7 @@ impl Evaluator<'_> {
Variants::Empty => unreachable!(),
Variants::Single { index } => {
let r =
- self.const_eval_discriminant(self.db.enum_variants(e).variants[index.0].0)?;
+ self.const_eval_discriminant(e.enum_variants(self.db).variants[index.0].0)?;
Ok(r)
}
Variants::Multiple { tag, tag_encoding, variants, .. } => {
@@ -1656,7 +1656,7 @@ impl Evaluator<'_> {
.unwrap_or(*untagged_variant)
.0;
let result =
- self.const_eval_discriminant(self.db.enum_variants(e).variants[idx].0)?;
+ self.const_eval_discriminant(e.enum_variants(self.db).variants[idx].0)?;
Ok(result)
}
}
@@ -1749,8 +1749,7 @@ impl Evaluator<'_> {
AdtId::UnionId(_) => not_supported!("unsizing unions"),
AdtId::EnumId(_) => not_supported!("unsizing enums"),
};
- let Some((last_field, _)) =
- self.db.variant_fields(id.into()).fields().iter().next_back()
+ let Some((last_field, _)) = id.fields(self.db).fields().iter().next_back()
else {
not_supported!("unsizing struct without field");
};
@@ -2196,7 +2195,7 @@ impl Evaluator<'_> {
}
}
}
- chalk_ir::TyKind::Array(inner, len) => {
+ TyKind::Array(inner, len) => {
let len = match try_const_usize(this.db, len) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in patching addresses"),
@@ -2214,7 +2213,7 @@ impl Evaluator<'_> {
)?;
}
}
- chalk_ir::TyKind::Tuple(_, subst) => {
+ TyKind::Tuple(_, subst) => {
let layout = this.layout(ty)?;
for (id, ty) in subst.iter(Interner).enumerate() {
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
@@ -2230,9 +2229,9 @@ impl Evaluator<'_> {
)?;
}
}
- chalk_ir::TyKind::Adt(adt, subst) => match adt.0 {
+ TyKind::Adt(adt, subst) => match adt.0 {
AdtId::StructId(s) => {
- let data = this.db.variant_fields(s.into());
+ let data = s.fields(this.db);
let layout = this.layout(ty)?;
let field_types = this.db.field_types(s.into());
for (f, _) in data.fields().iter() {
@@ -2261,7 +2260,7 @@ impl Evaluator<'_> {
bytes,
e,
) {
- let data = &this.db.variant_fields(v.into());
+ let data = v.fields(this.db);
let field_types = this.db.field_types(v.into());
for (f, _) in data.fields().iter() {
let offset =
@@ -2281,6 +2280,10 @@ impl Evaluator<'_> {
}
AdtId::UnionId(_) => (),
},
+ TyKind::Alias(AliasTy::Projection(proj)) => {
+ let ty = this.db.normalize_projection(proj.clone(), this.trait_env.clone());
+ rec(this, bytes, &ty, locals, mm, stack_depth_limit - 1)?;
+ }
_ => (),
}
Ok(())
@@ -2771,12 +2774,15 @@ impl Evaluator<'_> {
Err(e) => {
let db = self.db;
let loc = variant.lookup(db);
- let enum_loc = loc.parent.lookup(db);
let edition = self.crate_id.data(self.db).edition;
let name = format!(
"{}::{}",
- enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition),
- loc.id.item_tree(db)[loc.id.value].name.display(db, edition),
+ self.db.enum_signature(loc.parent).name.display(db, edition),
+ loc.parent
+ .enum_variants(self.db)
+ .variant_name_by_id(variant)
+ .unwrap()
+ .display(db, edition),
);
Err(MirEvalError::ConstEvalError(name, Box::new(e)))
}
@@ -2805,7 +2811,7 @@ impl Evaluator<'_> {
) -> Result<()> {
let Some(drop_fn) = (|| {
let drop_trait = LangItem::Drop.resolve_trait(self.db, self.crate_id)?;
- self.db.trait_items(drop_trait).method_by_name(&Name::new_symbol_root(sym::drop))
+ drop_trait.trait_items(self.db).method_by_name(&Name::new_symbol_root(sym::drop))
})() else {
// in some tests we don't have drop trait in minicore, and
// we can ignore drop in them.
@@ -2835,7 +2841,7 @@ impl Evaluator<'_> {
return Ok(());
}
let layout = self.layout_adt(id.0, subst.clone())?;
- let variant_fields = self.db.variant_fields(s.into());
+ let variant_fields = s.fields(self.db);
match variant_fields.shape {
FieldsShape::Record | FieldsShape::Tuple => {
let field_types = self.db.field_types(s.into());
@@ -2915,7 +2921,7 @@ pub fn render_const_using_debug_impl(
not_supported!("core::fmt::Debug not found");
};
let Some(debug_fmt_fn) =
- db.trait_items(debug_trait).method_by_name(&Name::new_symbol_root(sym::fmt))
+ debug_trait.trait_items(db).method_by_name(&Name::new_symbol_root(sym::fmt))
else {
not_supported!("core::fmt::Debug::fmt not found");
};
@@ -2927,7 +2933,7 @@ pub fn render_const_using_debug_impl(
let a2 = evaluator.heap_allocate(evaluator.ptr_size() * 2, evaluator.ptr_size())?;
evaluator.write_memory(a2, &data.addr.to_bytes())?;
let debug_fmt_fn_ptr = evaluator.vtable_map.id(TyKind::FnDef(
- db.intern_callable_def(debug_fmt_fn.into()).into(),
+ CallableDefId::FunctionId(debug_fmt_fn).to_chalk(db),
Substitution::from1(Interner, c.data(Interner).ty.clone()),
)
.intern(Interner));
@@ -3042,7 +3048,10 @@ impl IntValue {
(8, true) => Self::I64(i64::from_le_bytes(bytes.try_into().unwrap())),
(16, false) => Self::U128(u128::from_le_bytes(bytes.try_into().unwrap())),
(16, true) => Self::I128(i128::from_le_bytes(bytes.try_into().unwrap())),
- _ => panic!("invalid integer size"),
+ (len, is_signed) => {
+ never!("invalid integer size: {len}, signed: {is_signed}");
+ Self::I32(0)
+ }
}
}
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index 90c52ee96f..e9665d5ae9 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -65,9 +65,7 @@ impl Evaluator<'_> {
Some(abi) => *abi == sym::rust_dash_intrinsic,
None => match def.lookup(self.db).container {
hir_def::ItemContainerId::ExternBlockId(block) => {
- let id = block.lookup(self.db).id;
- id.item_tree(self.db)[id.value].abi.as_ref()
- == Some(&sym::rust_dash_intrinsic)
+ block.abi(self.db) == Some(sym::rust_dash_intrinsic)
}
_ => false,
},
@@ -86,10 +84,7 @@ impl Evaluator<'_> {
);
}
let is_extern_c = match def.lookup(self.db).container {
- hir_def::ItemContainerId::ExternBlockId(block) => {
- let id = block.lookup(self.db).id;
- id.item_tree(self.db)[id.value].abi.as_ref() == Some(&sym::C)
- }
+ hir_def::ItemContainerId::ExternBlockId(block) => block.abi(self.db) == Some(sym::C),
_ => false,
};
if is_extern_c {
@@ -764,7 +759,9 @@ impl Evaluator<'_> {
let size = self.size_of_sized(ty, locals, "size_of arg")?;
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
}
- "min_align_of" | "pref_align_of" => {
+ // FIXME: `min_align_of` was renamed to `align_of` in Rust 1.89
+ // (https://github.com/rust-lang/rust/pull/142410)
+ "min_align_of" | "align_of" => {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
@@ -796,17 +793,19 @@ impl Evaluator<'_> {
destination.write_from_bytes(self, &size.to_le_bytes())
}
}
- "min_align_of_val" => {
+ // FIXME: `min_align_of_val` was renamed to `align_of_val` in Rust 1.89
+ // (https://github.com/rust-lang/rust/pull/142410)
+ "min_align_of_val" | "align_of_val" => {
let Some(ty) =
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::InternalError(
- "min_align_of_val generic arg is not provided".into(),
+ "align_of_val generic arg is not provided".into(),
));
};
let [arg] = args else {
return Err(MirEvalError::InternalError(
- "min_align_of_val args are not provided".into(),
+ "align_of_val args are not provided".into(),
));
};
if let Some((_, align)) = self.size_align_of(ty, locals)? {
@@ -1258,9 +1257,8 @@ impl Evaluator<'_> {
args.push(IntervalAndTy::new(addr, field, self, locals)?);
}
if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id) {
- if let Some(def) = self
- .db
- .trait_items(target)
+ if let Some(def) = target
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::call_once))
{
self.exec_fn_trait(
diff --git a/crates/hir-ty/src/mir/eval/shim/simd.rs b/crates/hir-ty/src/mir/eval/shim/simd.rs
index 984648cfec..bc331a23d9 100644
--- a/crates/hir-ty/src/mir/eval/shim/simd.rs
+++ b/crates/hir-ty/src/mir/eval/shim/simd.rs
@@ -31,7 +31,7 @@ impl Evaluator<'_> {
Some(len) => len,
_ => {
if let AdtId::StructId(id) = id.0 {
- let struct_data = self.db.variant_fields(id.into());
+ let struct_data = id.fields(self.db);
let fields = struct_data.fields();
let Some((first_field, _)) = fields.iter().next() else {
not_supported!("simd type with no field");
diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs
index 3abbbe45e6..c1f86960e1 100644
--- a/crates/hir-ty/src/mir/eval/tests.rs
+++ b/crates/hir-ty/src/mir/eval/tests.rs
@@ -984,3 +984,17 @@ fn main<'a, T: Foo + Bar + Baz>(
|e| matches!(e, MirEvalError::MirLowerError(_, MirLowerError::GenericArgNotProvided(..))),
);
}
+
+#[test]
+fn format_args_pass() {
+ check_pass(
+ r#"
+//- minicore: fmt
+fn main() {
+ let x1 = format_args!("");
+ let x2 = format_args!("{}", x1);
+ let x3 = format_args!("{} {}", x1, x2);
+}
+"#,
+ );
+}
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 99d9351530..07d8147272 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -321,7 +321,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
current: BasicBlockId,
) -> Result<Option<(Operand, BasicBlockId)>> {
if !self.has_adjustments(expr_id) {
- if let Expr::Literal(l) = &self.body.exprs[expr_id] {
+ if let Expr::Literal(l) = &self.body[expr_id] {
let ty = self.expr_ty_without_adjust(expr_id);
return Ok(Some((self.lower_literal_to_operand(ty, l)?, current)));
}
@@ -411,7 +411,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
place: Place,
mut current: BasicBlockId,
) -> Result<Option<BasicBlockId>> {
- match &self.body.exprs[expr_id] {
+ match &self.body[expr_id] {
Expr::OffsetOf(_) => {
not_supported!("builtin#offset_of")
}
@@ -503,7 +503,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(Some(current))
}
ValueNs::EnumVariantId(variant_id) => {
- let variant_fields = &self.db.variant_fields(variant_id.into());
+ let variant_fields = variant_id.fields(self.db);
if variant_fields.shape == FieldsShape::Unit {
let ty = self.infer.type_of_expr[expr_id].clone();
current = self.lower_enum_variant(
@@ -856,7 +856,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
TyKind::Adt(_, s) => s.clone(),
_ => not_supported!("Non ADT record literal"),
};
- let variant_fields = self.db.variant_fields(variant_id);
+ let variant_fields = variant_id.fields(self.db);
match variant_id {
VariantId::EnumVariantId(_) | VariantId::StructId(_) => {
let mut operands = vec![None; variant_fields.fields().len()];
@@ -1176,8 +1176,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
place,
Rvalue::Aggregate(
AggregateKind::Adt(st.into(), subst.clone()),
- self.db
- .variant_fields(st.into())
+ st.fields(self.db)
.fields()
.iter()
.map(|it| {
@@ -1375,7 +1374,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
fn lower_literal_or_const_to_operand(&mut self, ty: Ty, loc: &ExprId) -> Result<Operand> {
- match &self.body.exprs[*loc] {
+ match &self.body[*loc] {
Expr::Literal(l) => self.lower_literal_to_operand(ty, l),
Expr::Path(c) => {
let owner = self.owner;
@@ -1851,7 +1850,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
if let Pat::Bind { id, subpat: None } = self.body[it] {
if matches!(
- self.body.bindings[id].mode,
+ self.body[id].mode,
BindingAnnotation::Unannotated | BindingAnnotation::Mutable
) {
self.result.binding_locals.insert(id, local_id);
@@ -1860,7 +1859,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
local_id
}));
// and then rest of bindings
- for (id, _) in self.body.bindings.iter() {
+ for (id, _) in self.body.bindings() {
if !pick_binding(id) {
continue;
}
@@ -1922,11 +1921,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
let edition = self.edition();
let db = self.db;
let loc = variant.lookup(db);
- let enum_loc = loc.parent.lookup(db);
let name = format!(
"{}::{}",
- enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition),
- loc.id.item_tree(db)[loc.id.value].name.display(db, edition),
+ self.db.enum_signature(loc.parent).name.display(db, edition),
+ loc.parent
+ .enum_variants(self.db)
+ .variant_name_by_id(variant)
+ .unwrap()
+ .display(db, edition),
);
Err(MirLowerError::ConstEvalError(name.into(), Box::new(e)))
}
@@ -2124,7 +2126,7 @@ pub fn mir_body_for_closure_query(
.result
.binding_locals
.into_iter()
- .filter(|it| ctx.body.binding_owners.get(&it.0).copied() == Some(expr))
+ .filter(|it| ctx.body.binding_owner(it.0) == Some(expr))
.collect();
if let Some(err) = err {
return Err(MirLowerError::UnresolvedUpvar(err));
@@ -2152,7 +2154,7 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
.to_string(),
DefWithBodyId::VariantId(it) => {
let loc = it.lookup(db);
- db.enum_variants(loc.parent).variants[loc.index as usize]
+ loc.parent.enum_variants(db).variants[loc.index as usize]
.1
.display(db, edition)
.to_string()
@@ -2189,7 +2191,7 @@ pub fn lower_to_mir(
// 0 is return local
ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) });
let binding_picker = |b: BindingId| {
- let owner = ctx.body.binding_owners.get(&b).copied();
+ let owner = ctx.body.binding_owner(b);
if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) }
};
// 1 to param_len is for params
diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs
index c22bada7a9..e074c2d558 100644
--- a/crates/hir-ty/src/mir/lower/as_place.rs
+++ b/crates/hir-ty/src/mir/lower/as_place.rs
@@ -133,7 +133,7 @@ impl MirLowerCtx<'_> {
}
this.lower_expr_to_some_place_without_adjust(expr_id, current)
};
- match &self.body.exprs[expr_id] {
+ match &self.body[expr_id] {
Expr::Path(p) => {
let resolver_guard =
self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
@@ -193,9 +193,8 @@ impl MirLowerCtx<'_> {
if let Some(deref_trait) =
self.resolve_lang_item(LangItem::DerefMut)?.as_trait()
{
- if let Some(deref_fn) = self
- .db
- .trait_items(deref_trait)
+ if let Some(deref_fn) = deref_trait
+ .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref_mut))
{
break 'b deref_fn == f;
@@ -297,11 +296,8 @@ impl MirLowerCtx<'_> {
let result_ref = TyKind::Ref(mutability, error_lifetime(), result_ty).intern(Interner);
let mut result: Place = self.temp(result_ref, current, span)?.into();
let index_fn_op = Operand::const_zst(
- TyKind::FnDef(
- self.db.intern_callable_def(CallableDefId::FunctionId(index_fn.0)).into(),
- index_fn.1,
- )
- .intern(Interner),
+ TyKind::FnDef(CallableDefId::FunctionId(index_fn.0).to_chalk(self.db), index_fn.1)
+ .intern(Interner),
);
let Some(current) = self.lower_call(
index_fn_op,
@@ -350,14 +346,13 @@ impl MirLowerCtx<'_> {
.resolve_lang_item(trait_lang_item)?
.as_trait()
.ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
- let deref_fn = self
- .db
- .trait_items(deref_trait)
+ let deref_fn = deref_trait
+ .trait_items(self.db)
.method_by_name(&trait_method_name)
.ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
let deref_fn_op = Operand::const_zst(
TyKind::FnDef(
- self.db.intern_callable_def(CallableDefId::FunctionId(deref_fn)).into(),
+ CallableDefId::FunctionId(deref_fn).to_chalk(self.db),
Substitution::from1(Interner, source_ty),
)
.intern(Interner),
diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs
index b3c1f6f387..3325226b1d 100644
--- a/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -130,7 +130,7 @@ impl MirLowerCtx<'_> {
.collect::<Vec<_>>()
.into(),
);
- Ok(match &self.body.pats[pattern] {
+ Ok(match &self.body[pattern] {
Pat::Missing => return Err(MirLowerError::IncompletePattern),
Pat::Wild => (current, current_else),
Pat::Tuple { args, ellipsis } => {
@@ -436,7 +436,7 @@ impl MirLowerCtx<'_> {
(next, Some(else_target))
}
},
- Pat::Lit(l) => match &self.body.exprs[*l] {
+ Pat::Lit(l) => match &self.body[*l] {
Expr::Literal(l) => {
if mode == MatchingMode::Check {
let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;
@@ -609,7 +609,7 @@ impl MirLowerCtx<'_> {
}
self.pattern_matching_variant_fields(
shape,
- &self.db.variant_fields(v.into()),
+ v.fields(self.db),
variant,
current,
current_else,
@@ -619,7 +619,7 @@ impl MirLowerCtx<'_> {
}
VariantId::StructId(s) => self.pattern_matching_variant_fields(
shape,
- &self.db.variant_fields(s.into()),
+ s.fields(self.db),
variant,
current,
current_else,
diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs
index 7ae6e907e7..aad54f8843 100644
--- a/crates/hir-ty/src/mir/pretty.rs
+++ b/crates/hir-ty/src/mir/pretty.rs
@@ -63,16 +63,16 @@ impl MirBody {
}
hir_def::DefWithBodyId::VariantId(id) => {
let loc = id.lookup(db);
- let enum_loc = loc.parent.lookup(db);
+ let edition = this.display_target.edition;
w!(
this,
"enum {}::{} = ",
- enum_loc.id.item_tree(db)[enum_loc.id.value]
- .name
- .display(db, this.display_target.edition),
- loc.id.item_tree(db)[loc.id.value]
- .name
- .display(db, this.display_target.edition),
+ db.enum_signature(loc.parent).name.display(db, edition),
+ loc.parent
+ .enum_variants(db)
+ .variant_name_by_id(id)
+ .unwrap()
+ .display(db, edition),
)
}
});
@@ -219,7 +219,7 @@ impl<'a> MirPrettyCtx<'a> {
fn local_name(&self, local: LocalId) -> LocalName {
match self.local_to_binding.get(local) {
- Some(b) => LocalName::Binding(self.hir_body.bindings[*b].name.clone(), local),
+ Some(b) => LocalName::Binding(self.hir_body[*b].name.clone(), local),
None => LocalName::Unknown(local),
}
}
@@ -326,7 +326,7 @@ impl<'a> MirPrettyCtx<'a> {
w!(this, ")");
}
ProjectionElem::Field(Either::Left(field)) => {
- let variant_fields = this.db.variant_fields(field.parent);
+ let variant_fields = field.parent.fields(this.db);
let name = &variant_fields.fields()[field.local_id].name;
match field.parent {
hir_def::VariantId::EnumVariantId(e) => {
@@ -336,7 +336,7 @@ impl<'a> MirPrettyCtx<'a> {
w!(
this,
" as {}).{}",
- this.db.enum_variants(loc.parent).variants[loc.index as usize]
+ loc.parent.enum_variants(this.db).variants[loc.index as usize]
.1
.display(this.db, this.display_target.edition),
name.display(this.db, this.display_target.edition)
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index d049c678e2..b5de0e52f5 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -166,10 +166,10 @@ impl TestDB {
self.events.lock().unwrap().take().unwrap()
}
- pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
+ pub(crate) fn log_executed(&self, f: impl FnOnce()) -> (Vec<String>, Vec<salsa::Event>) {
let events = self.log(f);
- events
- .into_iter()
+ let executed = events
+ .iter()
.filter_map(|e| match e.kind {
// This is pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
@@ -181,6 +181,7 @@ impl TestDB {
}
_ => None,
})
- .collect()
+ .collect();
+ (executed, events)
}
}
diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs
index 2b75bd6f16..9605a0b412 100644
--- a/crates/hir-ty/src/tests.rs
+++ b/crates/hir-ty/src/tests.rs
@@ -168,7 +168,7 @@ fn check_impl(
let inference_result = db.infer(def);
for (pat, mut ty) in inference_result.type_of_pat.iter() {
- if let Pat::Bind { id, .. } = body.pats[pat] {
+ if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
}
let node = match pat_node(&body_source_map, pat, &db) {
@@ -316,7 +316,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
}
for (pat, mut ty) in inference_result.type_of_pat.iter() {
- if let Pat::Bind { id, .. } = body.pats[pat] {
+ if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
}
let node = match body_source_map.pat_syntax(pat) {
@@ -437,7 +437,7 @@ pub(crate) fn visit_module(
) {
visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
for impl_id in crate_def_map[module_id].scope.impls() {
- let impl_data = db.impl_items(impl_id);
+ let impl_data = impl_id.impl_items(db);
for &(_, item) in impl_data.items.iter() {
match item {
AssocItemId::FunctionId(it) => {
@@ -479,14 +479,14 @@ pub(crate) fn visit_module(
visit_body(db, &body, cb);
}
ModuleDefId::AdtId(hir_def::AdtId::EnumId(it)) => {
- db.enum_variants(it).variants.iter().for_each(|&(it, _)| {
+ it.enum_variants(db).variants.iter().for_each(|&(it, _, _)| {
let body = db.body(it.into());
cb(it.into());
visit_body(db, &body, cb);
});
}
ModuleDefId::TraitId(it) => {
- let trait_data = db.trait_items(it);
+ let trait_data = it.trait_items(db);
for &(_, item) in trait_data.items.iter() {
match item {
AssocItemId::FunctionId(it) => cb(it.into()),
diff --git a/crates/hir-ty/src/tests/closure_captures.rs b/crates/hir-ty/src/tests/closure_captures.rs
index 88d21be81e..dbc68eeba1 100644
--- a/crates/hir-ty/src/tests/closure_captures.rs
+++ b/crates/hir-ty/src/tests/closure_captures.rs
@@ -444,3 +444,46 @@ fn main() {
expect!["99..165;49..54;120..121,133..134 ByRef(Mut { kind: Default }) a &'? mut A"],
);
}
+
+#[test]
+fn let_binding_is_a_ref_capture_in_ref_binding() {
+ check_closure_captures(
+ r#"
+//- minicore:copy
+struct S;
+fn main() {
+ let mut s = S;
+ let s_ref = &mut s;
+ let mut s2 = S;
+ let s_ref2 = &mut s2;
+ let closure = || {
+ if let ref cb = s_ref {
+ } else if let ref mut cb = s_ref2 {
+ }
+ };
+}
+"#,
+ expect![[r#"
+ 129..225;49..54;149..155 ByRef(Shared) s_ref &'? &'? mut S
+ 129..225;93..99;188..198 ByRef(Mut { kind: Default }) s_ref2 &'? mut &'? mut S"#]],
+ );
+}
+
+#[test]
+fn let_binding_is_a_value_capture_in_binding() {
+ check_closure_captures(
+ r#"
+//- minicore:copy, option
+struct Box(i32);
+fn main() {
+ let b = Some(Box(0));
+ let closure = || {
+ if let Some(b) = b {
+ let _move = b;
+ }
+ };
+}
+"#,
+ expect!["73..149;37..38;103..104 ByValue b Option<Box>"],
+ );
+}
diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs
index ddc5b71519..3894b4b6f7 100644
--- a/crates/hir-ty/src/tests/coercion.rs
+++ b/crates/hir-ty/src/tests/coercion.rs
@@ -561,7 +561,7 @@ trait Foo {}
fn test(f: impl Foo, g: &(impl Foo + ?Sized)) {
let _: &dyn Foo = &f;
let _: &dyn Foo = g;
- //^ expected &'? (dyn Foo + 'static), got &'? impl Foo + ?Sized
+ //^ expected &'? (dyn Foo + '?), got &'? impl Foo + ?Sized
}
"#,
);
diff --git a/crates/hir-ty/src/tests/display_source_code.rs b/crates/hir-ty/src/tests/display_source_code.rs
index a986b54a7b..6e3faa05a6 100644
--- a/crates/hir-ty/src/tests/display_source_code.rs
+++ b/crates/hir-ty/src/tests/display_source_code.rs
@@ -67,11 +67,11 @@ trait B: A {}
fn test<'a>(
_: &(dyn A<Assoc = ()> + Send),
- //^ &(dyn A<Assoc = ()> + Send + 'static)
+ //^ &(dyn A<Assoc = ()> + Send)
_: &'a (dyn Send + A<Assoc = ()>),
- //^ &'a (dyn A<Assoc = ()> + Send + 'static)
+ //^ &'a (dyn A<Assoc = ()> + Send)
_: &dyn B<Assoc = ()>,
- //^ &(dyn B<Assoc = ()> + 'static)
+ //^ &(dyn B<Assoc = ()>)
) {}
"#,
);
@@ -85,7 +85,7 @@ fn render_dyn_for_ty() {
trait Foo<'a> {}
fn foo(foo: &dyn for<'a> Foo<'a>) {}
- // ^^^ &(dyn Foo<'?> + 'static)
+ // ^^^ &dyn Foo<'?>
"#,
);
}
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index e8e3812c69..3159499e86 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -1,5 +1,7 @@
use base_db::SourceDatabase;
-use hir_def::ModuleDefId;
+use expect_test::Expect;
+use hir_def::{DefWithBodyId, ModuleDefId};
+use salsa::EventKind;
use test_fixture::WithFixture;
use crate::{db::HirDatabase, test_db::TestDB};
@@ -15,8 +17,9 @@ fn foo() -> i32 {
$01 + 1
}",
);
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
@@ -24,9 +27,31 @@ fn foo() -> i32 {
db.infer(it.into());
}
});
- });
- assert!(format!("{events:?}").contains("infer_shim"))
- }
+ },
+ &[("infer_shim", 1)],
+ expect_test::expect![[r#"
+ [
+ "source_root_crates_shim",
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "infer_shim",
+ "function_signature_shim",
+ "function_signature_with_source_map_shim",
+ "attrs_shim",
+ "body_shim",
+ "body_with_source_map_shim",
+ "trait_environment_shim",
+ "return_type_impl_traits_shim",
+ "expr_scopes_shim",
+ "lang_item",
+ "crate_lang_items",
+ "lang_item",
+ ]
+ "#]],
+ );
let new_text = "
fn foo() -> i32 {
@@ -37,8 +62,9 @@ fn foo() -> i32 {
db.set_file_text(pos.file_id.file_id(&db), new_text);
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
@@ -46,9 +72,22 @@ fn foo() -> i32 {
db.infer(it.into());
}
});
- });
- assert!(!format!("{events:?}").contains("infer_shim"), "{events:#?}")
- }
+ },
+ &[("infer_shim", 0)],
+ expect_test::expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "attrs_shim",
+ "function_signature_with_source_map_shim",
+ "function_signature_shim",
+ "body_with_source_map_shim",
+ "body_shim",
+ ]
+ "#]],
+ );
}
#[test]
@@ -66,8 +105,9 @@ fn baz() -> i32 {
1 + 1
}",
);
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
@@ -75,9 +115,49 @@ fn baz() -> i32 {
db.infer(it.into());
}
});
- });
- assert!(format!("{events:?}").contains("infer_shim"))
- }
+ },
+ &[("infer_shim", 3)],
+ expect_test::expect![[r#"
+ [
+ "source_root_crates_shim",
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "infer_shim",
+ "function_signature_shim",
+ "function_signature_with_source_map_shim",
+ "attrs_shim",
+ "body_shim",
+ "body_with_source_map_shim",
+ "trait_environment_shim",
+ "return_type_impl_traits_shim",
+ "expr_scopes_shim",
+ "lang_item",
+ "crate_lang_items",
+ "attrs_shim",
+ "attrs_shim",
+ "lang_item",
+ "infer_shim",
+ "function_signature_shim",
+ "function_signature_with_source_map_shim",
+ "body_shim",
+ "body_with_source_map_shim",
+ "trait_environment_shim",
+ "return_type_impl_traits_shim",
+ "expr_scopes_shim",
+ "infer_shim",
+ "function_signature_shim",
+ "function_signature_with_source_map_shim",
+ "body_shim",
+ "body_with_source_map_shim",
+ "trait_environment_shim",
+ "return_type_impl_traits_shim",
+ "expr_scopes_shim",
+ ]
+ "#]],
+ );
let new_text = "
fn foo() -> f32 {
@@ -93,8 +173,9 @@ fn baz() -> i32 {
db.set_file_text(pos.file_id.file_id(&db), new_text);
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
@@ -102,9 +183,34 @@ fn baz() -> i32 {
db.infer(it.into());
}
});
- });
- assert_eq!(format!("{events:?}").matches("infer_shim").count(), 1, "{events:#?}")
- }
+ },
+ &[("infer_shim", 1)],
+ expect_test::expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "attrs_shim",
+ "function_signature_with_source_map_shim",
+ "function_signature_shim",
+ "body_with_source_map_shim",
+ "body_shim",
+ "attrs_shim",
+ "attrs_shim",
+ "function_signature_with_source_map_shim",
+ "function_signature_shim",
+ "body_with_source_map_shim",
+ "body_shim",
+ "infer_shim",
+ "expr_scopes_shim",
+ "function_signature_with_source_map_shim",
+ "function_signature_shim",
+ "body_with_source_map_shim",
+ "body_shim",
+ ]
+ "#]],
+ );
}
#[test]
@@ -121,14 +227,26 @@ fn bar() -> f32 {
}
$0",
);
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
- });
- assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
- }
+ },
+ &[("trait_impls_in_crate_shim", 1)],
+ expect_test::expect![[r#"
+ [
+ "source_root_crates_shim",
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "trait_impls_in_crate_shim",
+ ]
+ "#]],
+ );
let new_text = "
fn foo() -> i32 {
@@ -146,24 +264,25 @@ pub struct NewStruct {
db.set_file_text(pos.file_id.file_id(&db), new_text);
- {
- let actual = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
- });
-
- let expected = vec![
- "parse_shim".to_owned(),
- "ast_id_map_shim".to_owned(),
- "file_item_tree_shim".to_owned(),
- "real_span_map_shim".to_owned(),
- "crate_local_def_map".to_owned(),
- "trait_impls_in_crate_shim".to_owned(),
- ];
-
- assert_eq!(expected, actual);
- }
+ },
+ &[("trait_impls_in_crate_shim", 1)],
+ expect_test::expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "crate_local_def_map",
+ "trait_impls_in_crate_shim",
+ ]
+ "#]],
+ );
}
#[test]
@@ -180,14 +299,26 @@ fn bar() -> f32 {
}
$0",
);
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
- });
- assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
- }
+ },
+ &[("trait_impls_in_crate_shim", 1)],
+ expect_test::expect![[r#"
+ [
+ "source_root_crates_shim",
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "trait_impls_in_crate_shim",
+ ]
+ "#]],
+ );
let new_text = "
fn foo() -> i32 {
@@ -206,24 +337,25 @@ pub enum SomeEnum {
db.set_file_text(pos.file_id.file_id(&db), new_text);
- {
- let actual = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
- });
-
- let expected = vec![
- "parse_shim".to_owned(),
- "ast_id_map_shim".to_owned(),
- "file_item_tree_shim".to_owned(),
- "real_span_map_shim".to_owned(),
- "crate_local_def_map".to_owned(),
- "trait_impls_in_crate_shim".to_owned(),
- ];
-
- assert_eq!(expected, actual);
- }
+ },
+ &[("trait_impls_in_crate_shim", 1)],
+ expect_test::expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "crate_local_def_map",
+ "trait_impls_in_crate_shim",
+ ]
+ "#]],
+ );
}
#[test]
@@ -240,14 +372,26 @@ fn bar() -> f32 {
}
$0",
);
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
- });
- assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
- }
+ },
+ &[("trait_impls_in_crate_shim", 1)],
+ expect_test::expect![[r#"
+ [
+ "source_root_crates_shim",
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "trait_impls_in_crate_shim",
+ ]
+ "#]],
+ );
let new_text = "
use std::collections::HashMap;
@@ -263,24 +407,25 @@ fn bar() -> f32 {
db.set_file_text(pos.file_id.file_id(&db), new_text);
- {
- let actual = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
- });
-
- let expected = vec![
- "parse_shim".to_owned(),
- "ast_id_map_shim".to_owned(),
- "file_item_tree_shim".to_owned(),
- "real_span_map_shim".to_owned(),
- "crate_local_def_map".to_owned(),
- "trait_impls_in_crate_shim".to_owned(),
- ];
-
- assert_eq!(expected, actual);
- }
+ },
+ &[("trait_impls_in_crate_shim", 1)],
+ expect_test::expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "crate_local_def_map",
+ "trait_impls_in_crate_shim",
+ ]
+ "#]],
+ );
}
#[test]
@@ -301,14 +446,26 @@ pub struct SomeStruct {
}
$0",
);
- {
- let events = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
- });
- assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
- }
+ },
+ &[("trait_impls_in_crate_shim", 1)],
+ expect_test::expect![[r#"
+ [
+ "source_root_crates_shim",
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "trait_impls_in_crate_shim",
+ ]
+ "#]],
+ );
let new_text = "
fn foo() -> i32 {
@@ -332,30 +489,253 @@ impl SomeStruct {
db.set_file_text(pos.file_id.file_id(&db), new_text);
- {
- let actual = db.log_executed(|| {
+ execute_assert_events(
+ &db,
+ || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
- });
-
- let expected = vec![
- "parse_shim".to_owned(),
- "ast_id_map_shim".to_owned(),
- "file_item_tree_shim".to_owned(),
- "real_span_map_shim".to_owned(),
- "crate_local_def_map".to_owned(),
- "trait_impls_in_crate_shim".to_owned(),
- "attrs_shim".to_owned(),
- "impl_trait_with_diagnostics_shim".to_owned(),
- "impl_signature_shim".to_owned(),
- "impl_signature_with_source_map_shim".to_owned(),
- "impl_self_ty_with_diagnostics_shim".to_owned(),
- "struct_signature_shim".to_owned(),
- "struct_signature_with_source_map_shim".to_owned(),
- "type_for_adt_tracked".to_owned(),
- ];
-
- assert_eq!(expected, actual);
- }
+ },
+ &[("trait_impls_in_crate_shim", 1)],
+ expect_test::expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "crate_local_def_map",
+ "trait_impls_in_crate_shim",
+ "attrs_shim",
+ "impl_trait_with_diagnostics_shim",
+ "impl_signature_shim",
+ "impl_signature_with_source_map_shim",
+ "impl_self_ty_with_diagnostics_shim",
+ "struct_signature_shim",
+ "struct_signature_with_source_map_shim",
+ "attrs_shim",
+ "type_for_adt_tracked",
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn add_struct_invalidates_trait_solve() {
+ let (mut db, file_id) = TestDB::with_single_file(
+ "
+//- /main.rs crate:main
+struct SomeStruct;
+
+trait Trait<T> {
+ fn method(&self) -> T;
+}
+impl Trait<u32> for SomeStruct {}
+
+fn main() {
+ let s = SomeStruct;
+ s.method();
+ s.$0
+}",
+ );
+
+ execute_assert_events(
+ &db,
+ || {
+ let module = db.module_for_file(file_id.file_id(&db));
+ let crate_def_map = module.def_map(&db);
+ let mut defs: Vec<DefWithBodyId> = vec![];
+ visit_module(&db, crate_def_map, module.local_id, &mut |it| {
+ let def = match it {
+ ModuleDefId::FunctionId(it) => it.into(),
+ ModuleDefId::EnumVariantId(it) => it.into(),
+ ModuleDefId::ConstId(it) => it.into(),
+ ModuleDefId::StaticId(it) => it.into(),
+ _ => return,
+ };
+ defs.push(def);
+ });
+
+ for def in defs {
+ let _inference_result = db.infer(def);
+ }
+ },
+ &[("trait_solve_shim", 2)],
+ expect_test::expect![[r#"
+ [
+ "source_root_crates_shim",
+ "crate_local_def_map",
+ "file_item_tree_query",
+ "ast_id_map_shim",
+ "parse_shim",
+ "real_span_map_shim",
+ "TraitItems::query_with_diagnostics_",
+ "body_shim",
+ "body_with_source_map_shim",
+ "attrs_shim",
+ "ImplItems::of_",
+ "infer_shim",
+ "trait_signature_shim",
+ "trait_signature_with_source_map_shim",
+ "attrs_shim",
+ "function_signature_shim",
+ "function_signature_with_source_map_shim",
+ "attrs_shim",
+ "body_shim",
+ "body_with_source_map_shim",
+ "trait_environment_shim",
+ "lang_item",
+ "crate_lang_items",
+ "attrs_shim",
+ "attrs_shim",
+ "return_type_impl_traits_shim",
+ "infer_shim",
+ "function_signature_shim",
+ "function_signature_with_source_map_shim",
+ "trait_environment_shim",
+ "expr_scopes_shim",
+ "struct_signature_shim",
+ "struct_signature_with_source_map_shim",
+ "generic_predicates_shim",
+ "value_ty_shim",
+ "VariantFields::firewall_",
+ "VariantFields::query_",
+ "lang_item",
+ "inherent_impls_in_crate_shim",
+ "impl_signature_shim",
+ "impl_signature_with_source_map_shim",
+ "callable_item_signature_shim",
+ "adt_variance_shim",
+ "variances_of_shim",
+ "trait_solve_shim",
+ "trait_datum_shim",
+ "generic_predicates_shim",
+ "adt_datum_shim",
+ "trait_impls_in_deps_shim",
+ "trait_impls_in_crate_shim",
+ "impl_trait_with_diagnostics_shim",
+ "impl_self_ty_with_diagnostics_shim",
+ "type_for_adt_tracked",
+ "impl_datum_shim",
+ "generic_predicates_shim",
+ "program_clauses_for_chalk_env_shim",
+ "value_ty_shim",
+ "generic_predicates_shim",
+ "trait_solve_shim",
+ "lang_item",
+ ]
+ "#]],
+ );
+
+ let new_text = "
+//- /main.rs crate:main
+struct AnotherStruct;
+
+struct SomeStruct;
+
+trait Trait<T> {
+ fn method(&self) -> T;
+}
+impl Trait<u32> for SomeStruct {}
+
+fn main() {
+ let s = SomeStruct;
+ s.method();
+ s.$0
+}";
+
+ db.set_file_text(file_id.file_id(&db), new_text);
+
+ execute_assert_events(
+ &db,
+ || {
+ let module = db.module_for_file(file_id.file_id(&db));
+ let crate_def_map = module.def_map(&db);
+ let mut defs: Vec<DefWithBodyId> = vec![];
+
+ visit_module(&db, crate_def_map, module.local_id, &mut |it| {
+ let def = match it {
+ ModuleDefId::FunctionId(it) => it.into(),
+ ModuleDefId::EnumVariantId(it) => it.into(),
+ ModuleDefId::ConstId(it) => it.into(),
+ ModuleDefId::StaticId(it) => it.into(),
+ _ => return,
+ };
+ defs.push(def);
+ });
+
+ for def in defs {
+ let _inference_result = db.infer(def);
+ }
+ },
+ &[("trait_solve_shim", 0)],
+ expect_test::expect![[r#"
+ [
+ "parse_shim",
+ "ast_id_map_shim",
+ "file_item_tree_query",
+ "real_span_map_shim",
+ "crate_local_def_map",
+ "TraitItems::query_with_diagnostics_",
+ "body_with_source_map_shim",
+ "attrs_shim",
+ "body_shim",
+ "ImplItems::of_",
+ "infer_shim",
+ "attrs_shim",
+ "trait_signature_with_source_map_shim",
+ "attrs_shim",
+ "function_signature_with_source_map_shim",
+ "function_signature_shim",
+ "body_with_source_map_shim",
+ "body_shim",
+ "trait_environment_shim",
+ "crate_lang_items",
+ "attrs_shim",
+ "attrs_shim",
+ "attrs_shim",
+ "return_type_impl_traits_shim",
+ "infer_shim",
+ "function_signature_with_source_map_shim",
+ "expr_scopes_shim",
+ "struct_signature_with_source_map_shim",
+ "VariantFields::query_",
+ "inherent_impls_in_crate_shim",
+ "impl_signature_with_source_map_shim",
+ "impl_signature_shim",
+ "callable_item_signature_shim",
+ "generic_predicates_shim",
+ "trait_impls_in_crate_shim",
+ "impl_trait_with_diagnostics_shim",
+ "impl_self_ty_with_diagnostics_shim",
+ "generic_predicates_shim",
+ ]
+ "#]],
+ );
+}
+
+fn execute_assert_events(
+ db: &TestDB,
+ f: impl FnOnce(),
+ required: &[(&str, usize)],
+ expect: Expect,
+) {
+ let (executed, events) = db.log_executed(f);
+ salsa::attach(db, || {
+ for (event, count) in required {
+ let n = executed.iter().filter(|it| it.contains(event)).count();
+ assert_eq!(
+ n,
+ *count,
+ "Expected {event} to be executed {count} times, but only got {n}:\n \
+ Executed: {executed:#?}\n \
+ Event log: {events:#?}",
+ events = events
+ .iter()
+ .filter(|event| !matches!(event.kind, EventKind::WillCheckCancellation))
+ .map(|event| { format!("{:?}", event.kind) })
+ .collect::<Vec<_>>(),
+ );
+ }
+ expect.assert_debug_eq(&executed);
+ });
}
diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs
index 94826acca3..c58ca6c67a 100644
--- a/crates/hir-ty/src/tests/method_resolution.rs
+++ b/crates/hir-ty/src/tests/method_resolution.rs
@@ -1153,9 +1153,9 @@ fn dyn_trait_super_trait_not_in_scope() {
51..55 'self': &'? Self
64..69 '{ 0 }': u32
66..67 '0': u32
- 176..177 'd': &'? (dyn Trait + 'static)
+ 176..177 'd': &'? (dyn Trait + '?)
191..207 '{ ...o(); }': ()
- 197..198 'd': &'? (dyn Trait + 'static)
+ 197..198 'd': &'? (dyn Trait + '?)
197..204 'd.foo()': u32
"#]],
);
@@ -2019,10 +2019,10 @@ impl dyn Error + Send {
/// Attempts to downcast the box to a concrete type.
pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error + Send>> {
let err: Box<dyn Error> = self;
- // ^^^^ expected Box<dyn Error + 'static>, got Box<dyn Error + Send + 'static>
+ // ^^^^ expected Box<dyn Error + '?>, got Box<dyn Error + Send + '?>
// FIXME, type mismatch should not occur
<dyn Error>::downcast(err).map_err(|_| loop {})
- //^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box<dyn Error + 'static>) -> Result<Box<{unknown}>, Box<dyn Error + 'static>>
+ //^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box<dyn Error + '?>) -> Result<Box<{unknown}>, Box<dyn Error + '?>>
}
}
"#,
diff --git a/crates/hir-ty/src/tests/never_type.rs b/crates/hir-ty/src/tests/never_type.rs
index 1ca4c9b2ad..6a9135622d 100644
--- a/crates/hir-ty/src/tests/never_type.rs
+++ b/crates/hir-ty/src/tests/never_type.rs
@@ -785,3 +785,31 @@ fn make_up_a_pointer<T>() -> *const T {
"#]],
)
}
+
+#[test]
+fn diverging_destructuring_assignment() {
+ check_infer_with_mismatches(
+ r#"
+fn foo() {
+ let n = match 42 {
+ 0 => _ = loop {},
+ _ => 0,
+ };
+}
+ "#,
+ expect![[r#"
+ 9..84 '{ ... }; }': ()
+ 19..20 'n': i32
+ 23..81 'match ... }': i32
+ 29..31 '42': i32
+ 42..43 '0': i32
+ 42..43 '0': i32
+ 47..48 '_': !
+ 47..58 '_ = loop {}': i32
+ 51..58 'loop {}': !
+ 56..58 '{}': ()
+ 68..69 '_': i32
+ 73..74 '0': i32
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index ff8adeef1d..238753e12e 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -629,7 +629,7 @@ fn issue_4053_diesel_where_clauses() {
488..522 '{ ... }': ()
498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
498..508 'self.order': O
- 498..515 'self.o...into()': dyn QueryFragment<DB> + 'static
+ 498..515 'self.o...into()': dyn QueryFragment<DB> + '?
"#]],
);
}
@@ -773,7 +773,7 @@ fn issue_4800() {
"#,
expect![[r#"
379..383 'self': &'? mut PeerSet<D>
- 401..424 '{ ... }': dyn Future<Output = ()> + 'static
+ 401..424 '{ ... }': dyn Future<Output = ()> + '?
411..418 'loop {}': !
416..418 '{}': ()
575..579 'self': &'? mut Self
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index cf51671afb..b154e59878 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -2741,11 +2741,11 @@ impl B for Astruct {}
715..744 '#[rust...1i32])': Box<[i32; 1], Global>
737..743 '[1i32]': [i32; 1]
738..742 '1i32': i32
- 755..756 'v': Vec<Box<dyn B + 'static, Global>, Global>
- 776..793 '<[_]> ...to_vec': fn into_vec<Box<dyn B + 'static, Global>, Global>(Box<[Box<dyn B + 'static, Global>], Global>) -> Vec<Box<dyn B + 'static, Global>, Global>
- 776..850 '<[_]> ...ct)]))': Vec<Box<dyn B + 'static, Global>, Global>
- 794..849 '#[rust...uct)])': Box<[Box<dyn B + 'static, Global>; 1], Global>
- 816..848 '[#[rus...ruct)]': [Box<dyn B + 'static, Global>; 1]
+ 755..756 'v': Vec<Box<dyn B + '?, Global>, Global>
+ 776..793 '<[_]> ...to_vec': fn into_vec<Box<dyn B + '?, Global>, Global>(Box<[Box<dyn B + '?, Global>], Global>) -> Vec<Box<dyn B + '?, Global>, Global>
+ 776..850 '<[_]> ...ct)]))': Vec<Box<dyn B + '?, Global>, Global>
+ 794..849 '#[rust...uct)])': Box<[Box<dyn B + '?, Global>; 1], Global>
+ 816..848 '[#[rus...ruct)]': [Box<dyn B + '?, Global>; 1]
817..847 '#[rust...truct)': Box<Astruct, Global>
839..846 'Astruct': Astruct
"#]],
@@ -3751,7 +3751,7 @@ fn foo() {
}
let v: bool = true;
m!();
- // ^^^^ i32
+ // ^^ i32
}
"#,
);
@@ -3765,39 +3765,39 @@ fn foo() {
let v: bool;
macro_rules! m { () => { v } }
m!();
- // ^^^^ bool
+ // ^^ bool
let v: char;
macro_rules! m { () => { v } }
m!();
- // ^^^^ char
+ // ^^ char
{
let v: u8;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u8
+ // ^^ u8
let v: i8;
macro_rules! m { () => { v } }
m!();
- // ^^^^ i8
+ // ^^ i8
let v: i16;
macro_rules! m { () => { v } }
m!();
- // ^^^^ i16
+ // ^^ i16
{
let v: u32;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u32
+ // ^^ u32
let v: u64;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u64
+ // ^^ u64
}
}
}
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs
index e5d1fbe9de..56e31a1af1 100644
--- a/crates/hir-ty/src/tests/traits.rs
+++ b/crates/hir-ty/src/tests/traits.rs
@@ -1475,26 +1475,26 @@ fn test(x: Box<dyn Trait<u64>>, y: &dyn Trait<u64>) {
expect![[r#"
29..33 'self': &'? Self
54..58 'self': &'? Self
- 198..200 '{}': Box<dyn Trait<u64> + 'static>
- 210..211 'x': Box<dyn Trait<u64> + 'static>
- 234..235 'y': &'? (dyn Trait<u64> + 'static)
+ 198..200 '{}': Box<dyn Trait<u64> + '?>
+ 210..211 'x': Box<dyn Trait<u64> + '?>
+ 234..235 'y': &'? (dyn Trait<u64> + '?)
254..371 '{ ...2(); }': ()
- 260..261 'x': Box<dyn Trait<u64> + 'static>
- 267..268 'y': &'? (dyn Trait<u64> + 'static)
- 278..279 'z': Box<dyn Trait<u64> + 'static>
- 282..285 'bar': fn bar() -> Box<dyn Trait<u64> + 'static>
- 282..287 'bar()': Box<dyn Trait<u64> + 'static>
- 293..294 'x': Box<dyn Trait<u64> + 'static>
+ 260..261 'x': Box<dyn Trait<u64> + '?>
+ 267..268 'y': &'? (dyn Trait<u64> + '?)
+ 278..279 'z': Box<dyn Trait<u64> + '?>
+ 282..285 'bar': fn bar() -> Box<dyn Trait<u64> + '?>
+ 282..287 'bar()': Box<dyn Trait<u64> + '?>
+ 293..294 'x': Box<dyn Trait<u64> + '?>
293..300 'x.foo()': u64
- 306..307 'y': &'? (dyn Trait<u64> + 'static)
+ 306..307 'y': &'? (dyn Trait<u64> + '?)
306..313 'y.foo()': u64
- 319..320 'z': Box<dyn Trait<u64> + 'static>
+ 319..320 'z': Box<dyn Trait<u64> + '?>
319..326 'z.foo()': u64
- 332..333 'x': Box<dyn Trait<u64> + 'static>
+ 332..333 'x': Box<dyn Trait<u64> + '?>
332..340 'x.foo2()': i64
- 346..347 'y': &'? (dyn Trait<u64> + 'static)
+ 346..347 'y': &'? (dyn Trait<u64> + '?)
346..354 'y.foo2()': i64
- 360..361 'z': Box<dyn Trait<u64> + 'static>
+ 360..361 'z': Box<dyn Trait<u64> + '?>
360..368 'z.foo2()': i64
"#]],
);
@@ -1523,14 +1523,14 @@ fn test(s: S<u32, i32>) {
expect![[r#"
32..36 'self': &'? Self
102..106 'self': &'? S<T, U>
- 128..139 '{ loop {} }': &'? (dyn Trait<T, U> + 'static)
+ 128..139 '{ loop {} }': &'? (dyn Trait<T, U> + '?)
130..137 'loop {}': !
135..137 '{}': ()
175..179 'self': &'? Self
251..252 's': S<u32, i32>
267..289 '{ ...z(); }': ()
273..274 's': S<u32, i32>
- 273..280 's.bar()': &'? (dyn Trait<u32, i32> + 'static)
+ 273..280 's.bar()': &'? (dyn Trait<u32, i32> + '?)
273..286 's.bar().baz()': (u32, i32)
"#]],
);
@@ -1556,20 +1556,20 @@ fn test(x: Trait, y: &Trait) -> u64 {
}"#,
expect![[r#"
26..30 'self': &'? Self
- 60..62 '{}': dyn Trait + 'static
- 72..73 'x': dyn Trait + 'static
- 82..83 'y': &'? (dyn Trait + 'static)
+ 60..62 '{}': dyn Trait + '?
+ 72..73 'x': dyn Trait + '?
+ 82..83 'y': &'? (dyn Trait + '?)
100..175 '{ ...o(); }': u64
- 106..107 'x': dyn Trait + 'static
- 113..114 'y': &'? (dyn Trait + 'static)
- 124..125 'z': dyn Trait + 'static
- 128..131 'bar': fn bar() -> dyn Trait + 'static
- 128..133 'bar()': dyn Trait + 'static
- 139..140 'x': dyn Trait + 'static
+ 106..107 'x': dyn Trait + '?
+ 113..114 'y': &'? (dyn Trait + '?)
+ 124..125 'z': dyn Trait + '?
+ 128..131 'bar': fn bar() -> dyn Trait + '?
+ 128..133 'bar()': dyn Trait + '?
+ 139..140 'x': dyn Trait + '?
139..146 'x.foo()': u64
- 152..153 'y': &'? (dyn Trait + 'static)
+ 152..153 'y': &'? (dyn Trait + '?)
152..159 'y.foo()': u64
- 165..166 'z': dyn Trait + 'static
+ 165..166 'z': dyn Trait + '?
165..172 'z.foo()': u64
"#]],
);
@@ -1589,10 +1589,10 @@ fn main() {
expect![[r#"
31..35 'self': &'? S
37..39 '{}': ()
- 47..48 '_': &'? (dyn Fn(S) + 'static)
+ 47..48 '_': &'? (dyn Fn(S) + '?)
58..60 '{}': ()
71..105 '{ ...()); }': ()
- 77..78 'f': fn f(&'? (dyn Fn(S) + 'static))
+ 77..78 'f': fn f(&'? (dyn Fn(S) + '?))
77..102 'f(&|nu...foo())': ()
79..101 '&|numb....foo()': &'? impl Fn(S)
80..101 '|numbe....foo()': impl Fn(S)
@@ -2927,13 +2927,13 @@ fn test(x: &dyn Foo) {
foo(x);
}"#,
expect![[r#"
- 21..22 'x': &'? (dyn Foo + 'static)
+ 21..22 'x': &'? (dyn Foo + '?)
34..36 '{}': ()
- 46..47 'x': &'? (dyn Foo + 'static)
+ 46..47 'x': &'? (dyn Foo + '?)
59..74 '{ foo(x); }': ()
- 65..68 'foo': fn foo(&'? (dyn Foo + 'static))
+ 65..68 'foo': fn foo(&'? (dyn Foo + '?))
65..71 'foo(x)': ()
- 69..70 'x': &'? (dyn Foo + 'static)
+ 69..70 'x': &'? (dyn Foo + '?)
"#]],
);
}
@@ -3210,13 +3210,13 @@ fn foo() {
218..324 '{ ...&s); }': ()
228..229 's': Option<i32>
232..236 'None': Option<i32>
- 246..247 'f': Box<dyn FnOnce(&'? Option<i32>) + 'static>
- 281..310 'Box { ... {}) }': Box<dyn FnOnce(&'? Option<i32>) + 'static>
+ 246..247 'f': Box<dyn FnOnce(&'? Option<i32>) + '?>
+ 281..310 'Box { ... {}) }': Box<dyn FnOnce(&'? Option<i32>) + '?>
294..308 '&mut (|ps| {})': &'? mut impl FnOnce(&'? Option<i32>)
300..307 '|ps| {}': impl FnOnce(&'? Option<i32>)
301..303 'ps': &'? Option<i32>
305..307 '{}': ()
- 316..317 'f': Box<dyn FnOnce(&'? Option<i32>) + 'static>
+ 316..317 'f': Box<dyn FnOnce(&'? Option<i32>) + '?>
316..321 'f(&s)': ()
318..320 '&s': &'? Option<i32>
319..320 's': Option<i32>
@@ -4252,9 +4252,9 @@ fn f<'a>(v: &dyn Trait<Assoc<i32> = &'a i32>) {
"#,
expect![[r#"
90..94 'self': &'? Self
- 127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
+ 127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + '?)
164..195 '{ ...f(); }': ()
- 170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
+ 170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + '?)
170..184 'v.get::<i32>()': &'? i32
170..192 'v.get:...eref()': &'? i32
"#]],
diff --git a/crates/hir-ty/src/tls.rs b/crates/hir-ty/src/tls.rs
index f5911e2161..f53409af2b 100644
--- a/crates/hir-ty/src/tls.rs
+++ b/crates/hir-ty/src/tls.rs
@@ -109,7 +109,7 @@ impl DebugContext<'_> {
CallableDefId::StructId(s) => self.0.struct_signature(s).name.clone(),
CallableDefId::EnumVariantId(e) => {
let loc = e.lookup(self.0);
- self.0.enum_variants(loc.parent).variants[loc.index as usize].1.clone()
+ loc.parent.enum_variants(self.0).variants[loc.index as usize].1.clone()
}
};
match def {
diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs
index 1e0ff423de..d07c1aa33b 100644
--- a/crates/hir-ty/src/utils.rs
+++ b/crates/hir-ty/src/utils.rs
@@ -1,7 +1,7 @@
//! Helper functions for working with def, which don't need to be a separate
//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
-use std::iter;
+use std::{cell::LazyCell, iter};
use base_db::Crate;
use chalk_ir::{
@@ -161,11 +161,12 @@ impl Iterator for ClauseElaborator<'_> {
}
fn direct_super_traits_cb(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) {
- let resolver = trait_.resolver(db);
+ let resolver = LazyCell::new(|| trait_.resolver(db));
let (generic_params, store) = db.generic_params_and_store(trait_.into());
let trait_self = generic_params.trait_self_param();
generic_params
.where_predicates()
+ .iter()
.filter_map(|pred| match pred {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound } => {
@@ -218,7 +219,7 @@ pub(super) fn associated_type_by_name_including_super_traits(
name: &Name,
) -> Option<(TraitRef, TypeAliasId)> {
all_super_trait_refs(db, trait_ref, |t| {
- let assoc_type = db.trait_items(t.hir_trait_id()).associated_type_by_name(name)?;
+ let assoc_type = t.hir_trait_id().trait_items(db).associated_type_by_name(name)?;
Some((t, assoc_type))
})
}
@@ -293,9 +294,7 @@ pub fn is_fn_unsafe_to_call(
let loc = func.lookup(db);
match loc.container {
hir_def::ItemContainerId::ExternBlockId(block) => {
- let id = block.lookup(db).id;
- let is_intrinsic_block =
- id.item_tree(db)[id.value].abi.as_ref() == Some(&sym::rust_dash_intrinsic);
+ let is_intrinsic_block = block.abi(db) == Some(sym::rust_dash_intrinsic);
if is_intrinsic_block {
// legacy intrinsics
// extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute
@@ -357,7 +356,7 @@ pub(crate) fn detect_variant_from_bytes<'a>(
let (var_id, var_layout) = match &layout.variants {
hir_def::layout::Variants::Empty => unreachable!(),
hir_def::layout::Variants::Single { index } => {
- (db.enum_variants(e).variants[index.0].0, layout)
+ (e.enum_variants(db).variants[index.0].0, layout)
}
hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => {
let size = tag.size(target_data_layout).bytes_usize();
@@ -367,7 +366,7 @@ pub(crate) fn detect_variant_from_bytes<'a>(
TagEncoding::Direct => {
let (var_idx, layout) =
variants.iter_enumerated().find_map(|(var_idx, v)| {
- let def = db.enum_variants(e).variants[var_idx.0].0;
+ let def = e.enum_variants(db).variants[var_idx.0].0;
(db.const_eval_discriminant(def) == Ok(tag)).then_some((def, v))
})?;
(var_idx, layout)
@@ -380,7 +379,7 @@ pub(crate) fn detect_variant_from_bytes<'a>(
.filter(|x| x != untagged_variant)
.nth(candidate_tag)
.unwrap_or(*untagged_variant);
- (db.enum_variants(e).variants[variant.0].0, &variants[variant])
+ (e.enum_variants(db).variants[variant.0].0, &variants[variant])
}
}
}
diff --git a/crates/hir-ty/src/variance.rs b/crates/hir-ty/src/variance.rs
index d6b43aeed4..08a215fecf 100644
--- a/crates/hir-ty/src/variance.rs
+++ b/crates/hir-ty/src/variance.rs
@@ -213,7 +213,7 @@ impl Context<'_> {
AdtId::StructId(s) => add_constraints_from_variant(VariantId::StructId(s)),
AdtId::UnionId(u) => add_constraints_from_variant(VariantId::UnionId(u)),
AdtId::EnumId(e) => {
- db.enum_variants(e).variants.iter().for_each(|&(variant, _)| {
+ e.enum_variants(db).variants.iter().for_each(|&(variant, _, _)| {
add_constraints_from_variant(VariantId::EnumVariantId(variant))
});
}
diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml
index 2af3c2e4c3..c68ff706e4 100644
--- a/crates/hir/Cargo.toml
+++ b/crates/hir/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
rustc-hash.workspace = true
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs
index b1cf30b98f..c8645b6282 100644
--- a/crates/hir/src/attrs.rs
+++ b/crates/hir/src/attrs.rs
@@ -207,7 +207,7 @@ fn resolve_assoc_or_field(
// Doc paths in this context may only resolve to an item of this trait
// (i.e. no items of its supertraits), so we need to handle them here
// independently of others.
- return db.trait_items(id).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
+ return id.trait_items(db).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
let def = match *assoc_id {
AssocItemId::FunctionId(it) => ModuleDef::Function(it.into()),
AssocItemId::ConstId(it) => ModuleDef::Const(it.into()),
@@ -242,9 +242,9 @@ fn resolve_assoc_or_field(
resolve_field(db, variant_def, name, ns)
}
-fn resolve_assoc_item(
- db: &dyn HirDatabase,
- ty: &Type,
+fn resolve_assoc_item<'db>(
+ db: &'db dyn HirDatabase,
+ ty: &Type<'db>,
name: &Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
@@ -256,10 +256,10 @@ fn resolve_assoc_item(
})
}
-fn resolve_impl_trait_item(
- db: &dyn HirDatabase,
+fn resolve_impl_trait_item<'db>(
+ db: &'db dyn HirDatabase,
resolver: Resolver<'_>,
- ty: &Type,
+ ty: &Type<'db>,
name: &Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index f7b140e03d..c1e814ec22 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -36,16 +36,16 @@ pub use hir_ty::{
};
macro_rules! diagnostics {
- ($($diag:ident,)*) => {
+ ($AnyDiagnostic:ident <$db:lifetime> -> $($diag:ident $(<$lt:lifetime>)?,)*) => {
#[derive(Debug)]
- pub enum AnyDiagnostic {$(
- $diag(Box<$diag>),
+ pub enum $AnyDiagnostic<$db> {$(
+ $diag(Box<$diag $(<$lt>)?>),
)*}
$(
- impl From<$diag> for AnyDiagnostic {
- fn from(d: $diag) -> AnyDiagnostic {
- AnyDiagnostic::$diag(Box::new(d))
+ impl<$db> From<$diag $(<$lt>)?> for $AnyDiagnostic<$db> {
+ fn from(d: $diag $(<$lt>)?) -> $AnyDiagnostic<$db> {
+ $AnyDiagnostic::$diag(Box::new(d))
}
}
)*
@@ -66,15 +66,15 @@ macro_rules! diagnostics {
// }, ...
// ]
-diagnostics![
+diagnostics![AnyDiagnostic<'db> ->
AwaitOutsideOfAsync,
BreakOutsideOfLoop,
- CastToUnsized,
- ExpectedFunction,
+ CastToUnsized<'db>,
+ ExpectedFunction<'db>,
InactiveCode,
IncoherentImpl,
IncorrectCase,
- InvalidCast,
+ InvalidCast<'db>,
InvalidDeriveTarget,
MacroDefError,
MacroError,
@@ -85,7 +85,7 @@ diagnostics![
MissingFields,
MissingMatchArms,
MissingUnsafe,
- MovedOutOfRef,
+ MovedOutOfRef<'db>,
NeedMut,
NonExhaustiveLet,
NoSuchField,
@@ -98,17 +98,17 @@ diagnostics![
TraitImplMissingAssocItems,
TraitImplOrphan,
TraitImplRedundantAssocItems,
- TypedHole,
- TypeMismatch,
+ TypedHole<'db>,
+ TypeMismatch<'db>,
UndeclaredLabel,
UnimplementedBuiltinMacro,
UnreachableLabel,
UnresolvedAssocItem,
UnresolvedExternCrate,
- UnresolvedField,
+ UnresolvedField<'db>,
UnresolvedImport,
UnresolvedMacroCall,
- UnresolvedMethodCall,
+ UnresolvedMethodCall<'db>,
UnresolvedModule,
UnresolvedIdent,
UnusedMut,
@@ -130,9 +130,9 @@ pub struct BreakOutsideOfLoop {
}
#[derive(Debug)]
-pub struct TypedHole {
+pub struct TypedHole<'db> {
pub expr: InFile<ExprOrPatPtr>,
- pub expected: Type,
+ pub expected: Type<'db>,
}
#[derive(Debug)]
@@ -242,25 +242,25 @@ pub struct MismatchedTupleStructPatArgCount {
}
#[derive(Debug)]
-pub struct ExpectedFunction {
+pub struct ExpectedFunction<'db> {
pub call: InFile<ExprOrPatPtr>,
- pub found: Type,
+ pub found: Type<'db>,
}
#[derive(Debug)]
-pub struct UnresolvedField {
+pub struct UnresolvedField<'db> {
pub expr: InFile<ExprOrPatPtr>,
- pub receiver: Type,
+ pub receiver: Type<'db>,
pub name: Name,
pub method_with_same_name_exists: bool,
}
#[derive(Debug)]
-pub struct UnresolvedMethodCall {
+pub struct UnresolvedMethodCall<'db> {
pub expr: InFile<ExprOrPatPtr>,
- pub receiver: Type,
+ pub receiver: Type<'db>,
pub name: Name,
- pub field_with_same_name: Option<Type>,
+ pub field_with_same_name: Option<Type<'db>>,
pub assoc_func_with_same_name: Option<Function>,
}
@@ -329,10 +329,10 @@ pub struct NonExhaustiveLet {
}
#[derive(Debug)]
-pub struct TypeMismatch {
+pub struct TypeMismatch<'db> {
pub expr_or_pat: InFile<ExprOrPatPtr>,
- pub expected: Type,
- pub actual: Type,
+ pub expected: Type<'db>,
+ pub actual: Type<'db>,
}
#[derive(Debug)]
@@ -352,8 +352,8 @@ pub struct UnusedVariable {
}
#[derive(Debug)]
-pub struct MovedOutOfRef {
- pub ty: Type,
+pub struct MovedOutOfRef<'db> {
+ pub ty: Type<'db>,
pub span: InFile<SyntaxNodePtr>,
}
@@ -403,17 +403,17 @@ pub struct RemoveUnnecessaryElse {
}
#[derive(Debug)]
-pub struct CastToUnsized {
+pub struct CastToUnsized<'db> {
pub expr: InFile<ExprOrPatPtr>,
- pub cast_ty: Type,
+ pub cast_ty: Type<'db>,
}
#[derive(Debug)]
-pub struct InvalidCast {
+pub struct InvalidCast<'db> {
pub expr: InFile<ExprOrPatPtr>,
pub error: CastError,
- pub expr_ty: Type,
- pub cast_ty: Type,
+ pub expr_ty: Type<'db>,
+ pub cast_ty: Type<'db>,
}
#[derive(Debug)]
@@ -482,15 +482,15 @@ pub struct IncorrectGenericsOrder {
pub expected_kind: GenericArgKind,
}
-impl AnyDiagnostic {
+impl<'db> AnyDiagnostic<'db> {
pub(crate) fn body_validation_diagnostic(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
diagnostic: BodyValidationDiagnostic,
source_map: &hir_def::expr_store::BodySourceMap,
- ) -> Option<AnyDiagnostic> {
+ ) -> Option<AnyDiagnostic<'db>> {
match diagnostic {
BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => {
- let variant_data = variant.variant_data(db);
+ let variant_data = variant.fields(db);
let missed_fields = missed_fields
.into_iter()
.map(|idx| variant_data.fields()[idx].name.clone())
@@ -618,12 +618,12 @@ impl AnyDiagnostic {
}
pub(crate) fn inference_diagnostic(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
def: DefWithBodyId,
d: &InferenceDiagnostic,
source_map: &hir_def::expr_store::BodySourceMap,
sig_map: &hir_def::expr_store::ExpressionStoreSourceMap,
- ) -> Option<AnyDiagnostic> {
+ ) -> Option<AnyDiagnostic<'db>> {
let expr_syntax = |expr| {
source_map
.expr_syntax(expr)
@@ -819,7 +819,7 @@ impl AnyDiagnostic {
fn path_diagnostic(
diag: &PathLoweringDiagnostic,
path: InFile<ast::Path>,
- ) -> Option<AnyDiagnostic> {
+ ) -> Option<AnyDiagnostic<'db>> {
Some(match *diag {
PathLoweringDiagnostic::GenericArgsProhibited { segment, reason } => {
let segment = hir_segment_to_ast_segment(&path.value, segment)?;
@@ -912,8 +912,8 @@ impl AnyDiagnostic {
pub(crate) fn ty_diagnostic(
diag: &TyLoweringDiagnostic,
source_map: &ExpressionStoreSourceMap,
- db: &dyn HirDatabase,
- ) -> Option<AnyDiagnostic> {
+ db: &'db dyn HirDatabase,
+ ) -> Option<AnyDiagnostic<'db>> {
let Ok(source) = source_map.type_syntax(diag.source) else {
stdx::never!("error on synthetic type syntax");
return None;
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index 124ab8e274..2960ebedf3 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -404,7 +404,7 @@ impl HirDisplay for TupleField {
impl HirDisplay for Variant {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
- let data = f.db.variant_fields(self.id.into());
+ let data = self.id.fields(f.db);
match data.shape {
FieldsShape::Unit => {}
FieldsShape::Tuple => {
@@ -431,7 +431,7 @@ impl HirDisplay for Variant {
}
}
-impl HirDisplay for Type {
+impl HirDisplay for Type<'_> {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
self.ty.hir_fmt(f)
}
@@ -633,7 +633,7 @@ fn has_disaplayable_predicates(
params: &GenericParams,
store: &ExpressionStore,
) -> bool {
- params.where_predicates().any(|pred| {
+ params.where_predicates().iter().any(|pred| {
!matches!(
pred,
WherePredicate::TypeBound { target, .. }
@@ -668,7 +668,7 @@ fn write_where_predicates(
_ => false,
};
- let mut iter = params.where_predicates().peekable();
+ let mut iter = params.where_predicates().iter().peekable();
while let Some(pred) = iter.next() {
if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(*target)) {
continue;
@@ -743,7 +743,7 @@ impl HirDisplay for Static {
}
}
-impl HirDisplay for TraitRef {
+impl HirDisplay for TraitRef<'_> {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
self.trait_ref.hir_fmt(f)
}
diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs
index fe7429c867..4767d4792e 100644
--- a/crates/hir/src/has_source.rs
+++ b/crates/hir/src/has_source.rs
@@ -225,7 +225,7 @@ impl HasSource for LocalSource {
}
}
-impl HasSource for Param {
+impl HasSource for Param<'_> {
type Ast = Either<ast::SelfParam, ast::Param>;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index e8218cf861..1b2b76999f 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -52,12 +52,14 @@ use hir_def::{
BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat,
generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance},
},
- item_tree::{AttrOwner, FieldParent, ImportAlias, ItemTreeFieldId, ItemTreeNode},
+ item_tree::ImportAlias,
layout::{self, ReprOptions, TargetDataLayout},
- nameres::{self, diagnostics::DefDiagnostic},
+ nameres::{self, assoc::TraitItems, diagnostics::DefDiagnostic},
per_ns::PerNs,
resolver::{HasResolver, Resolver},
signatures::{ImplFlags, StaticFlags, TraitFlags, VariantFields},
+ src::HasSource as _,
+ visibility::visibility_from_ast,
};
use hir_expand::{
AstId, MacroCallKind, RenderedExpandError, ValueResult, attrs::collect_attrs,
@@ -81,11 +83,11 @@ use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind;
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
-use span::{Edition, FileId};
-use stdx::{format_to, impl_from, never};
+use span::{AstIdNode, Edition, FileId};
+use stdx::{format_to, impl_from, never, variance::PhantomCovariantLifetime};
use syntax::{
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr,
- ast::{self, HasAttrs as _, HasName},
+ ast::{self, HasAttrs as _, HasName, HasVisibility as _},
format_smolstr,
};
use triomphe::{Arc, ThinArc};
@@ -398,7 +400,11 @@ impl ModuleDef {
Some(name)
}
- pub fn diagnostics(self, db: &dyn HirDatabase, style_lints: bool) -> Vec<AnyDiagnostic> {
+ pub fn diagnostics<'db>(
+ self,
+ db: &'db dyn HirDatabase,
+ style_lints: bool,
+ ) -> Vec<AnyDiagnostic<'db>> {
let id = match self {
ModuleDef::Adt(it) => match it {
Adt::Struct(it) => it.id.into(),
@@ -610,10 +616,10 @@ impl Module {
}
/// Fills `acc` with the module's diagnostics.
- pub fn diagnostics(
+ pub fn diagnostics<'db>(
self,
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
style_lints: bool,
) {
let _p = tracing::info_span!("diagnostics", name = ?self.name(db)).entered();
@@ -643,7 +649,7 @@ impl Module {
acc.extend(def.diagnostics(db, style_lints))
}
ModuleDef::Trait(t) => {
- for diag in db.trait_items_with_diagnostics(t.id).1.iter() {
+ for diag in TraitItems::query_with_diagnostics(db, t.id).1.iter() {
emit_def_diagnostic(db, acc, diag, edition);
}
@@ -662,32 +668,32 @@ impl Module {
Adt::Struct(s) => {
let source_map = db.struct_signature_with_source_map(s.id).1;
expr_store_diagnostics(db, acc, &source_map);
- let source_map = db.variant_fields_with_source_map(s.id.into()).1;
- expr_store_diagnostics(db, acc, &source_map);
+ let source_map = &s.id.fields_with_source_map(db).1;
+ expr_store_diagnostics(db, acc, source_map);
push_ty_diagnostics(
db,
acc,
db.field_types_with_diagnostics(s.id.into()).1,
- &source_map,
+ source_map,
);
}
Adt::Union(u) => {
let source_map = db.union_signature_with_source_map(u.id).1;
expr_store_diagnostics(db, acc, &source_map);
- let source_map = db.variant_fields_with_source_map(u.id.into()).1;
- expr_store_diagnostics(db, acc, &source_map);
+ let source_map = &u.id.fields_with_source_map(db).1;
+ expr_store_diagnostics(db, acc, source_map);
push_ty_diagnostics(
db,
acc,
db.field_types_with_diagnostics(u.id.into()).1,
- &source_map,
+ source_map,
);
}
Adt::Enum(e) => {
let source_map = db.enum_signature_with_source_map(e.id).1;
expr_store_diagnostics(db, acc, &source_map);
- let (variants, diagnostics) = db.enum_variants_with_diagnostics(e.id);
- let file = e.id.lookup(db).id.file_id();
+ let (variants, diagnostics) = e.id.enum_variants_with_diagnostics(db);
+ let file = e.id.lookup(db).id.file_id;
let ast_id_map = db.ast_id_map(file);
if let Some(diagnostics) = &diagnostics {
for diag in diagnostics.iter() {
@@ -704,15 +710,15 @@ impl Module {
);
}
}
- for &(v, _) in &variants.variants {
- let source_map = db.variant_fields_with_source_map(v.into()).1;
+ for &(v, _, _) in &variants.variants {
+ let source_map = &v.fields_with_source_map(db).1;
push_ty_diagnostics(
db,
acc,
db.field_types_with_diagnostics(v.into()).1,
- &source_map,
+ source_map,
);
- expr_store_diagnostics(db, acc, &source_map);
+ expr_store_diagnostics(db, acc, source_map);
}
}
}
@@ -742,12 +748,10 @@ impl Module {
GenericDef::Impl(impl_def).diagnostics(db, acc);
let loc = impl_def.id.lookup(db);
- let tree = loc.id.item_tree(db);
let source_map = db.impl_signature_with_source_map(impl_def.id).1;
expr_store_diagnostics(db, acc, &source_map);
- let node = &tree[loc.id.value];
- let file_id = loc.id.file_id();
+ let file_id = loc.id.file_id;
if file_id.macro_file().is_some_and(|it| it.kind(db) == MacroKind::DeriveBuiltIn) {
// these expansion come from us, diagnosing them is a waste of resources
// FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
@@ -760,16 +764,16 @@ impl Module {
let ast_id_map = db.ast_id_map(file_id);
- for diag in db.impl_items_with_diagnostics(impl_def.id).1.iter() {
+ for diag in impl_def.id.impl_items_with_diagnostics(db).1.iter() {
emit_def_diagnostic(db, acc, diag, edition);
}
if inherent_impls.invalid_impls().contains(&impl_def.id) {
- acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
+ acc.push(IncoherentImpl { impl_: ast_id_map.get(loc.id.value), file_id }.into())
}
if !impl_def.check_orphan_rules(db) {
- acc.push(TraitImplOrphan { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
+ acc.push(TraitImplOrphan { impl_: ast_id_map.get(loc.id.value), file_id }.into())
}
let trait_ = impl_def.trait_(db);
@@ -808,23 +812,23 @@ impl Module {
// unsafe negative impl
(true, _, true, _) |
// unsafe impl for safe trait
- (true, false, _, false) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(node.ast_id()), file_id, should_be_safe: true }.into()),
+ (true, false, _, false) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(loc.id.value), file_id, should_be_safe: true }.into()),
// safe impl for unsafe trait
(false, true, false, _) |
// safe impl of dangling drop
- (false, false, _, true) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(node.ast_id()), file_id, should_be_safe: false }.into()),
+ (false, false, _, true) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(loc.id.value), file_id, should_be_safe: false }.into()),
_ => (),
};
// Negative impls can't have items, don't emit missing items diagnostic for them
if let (false, Some(trait_)) = (impl_is_negative, trait_) {
- let items = &db.trait_items(trait_.into()).items;
+ let items = &trait_.id.trait_items(db).items;
let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
AssocItemId::FunctionId(it) => !db.function_signature(it).has_body(),
AssocItemId::ConstId(id) => !db.const_signature(id).has_body(),
AssocItemId::TypeAliasId(it) => db.type_alias_signature(it).ty.is_none(),
});
- impl_assoc_items_scratch.extend(db.impl_items(impl_def.id).items.iter().cloned());
+ impl_assoc_items_scratch.extend(impl_def.id.impl_items(db).items.iter().cloned());
let redundant = impl_assoc_items_scratch
.iter()
@@ -839,14 +843,14 @@ impl Module {
TraitImplRedundantAssocItems {
trait_,
file_id,
- impl_: ast_id_map.get(node.ast_id()),
+ impl_: ast_id_map.get(loc.id.value),
assoc_item: (name, assoc_item),
}
.into(),
)
}
- let missing: Vec<_> = required_items
+ let mut missing: Vec<_> = required_items
.filter(|(name, id)| {
!impl_assoc_items_scratch.iter().any(|(impl_name, impl_item)| {
discriminant(impl_item) == discriminant(id) && impl_name == name
@@ -854,10 +858,42 @@ impl Module {
})
.map(|(name, item)| (name.clone(), AssocItem::from(*item)))
.collect();
+
+ if !missing.is_empty() {
+ let self_ty = db.impl_self_ty(impl_def.id).substitute(
+ Interner,
+ &hir_ty::generics::generics(db, impl_def.id.into()).placeholder_subst(db),
+ );
+ let self_ty = if let TyKind::Alias(AliasTy::Projection(projection)) =
+ self_ty.kind(Interner)
+ {
+ db.normalize_projection(
+ projection.clone(),
+ db.trait_environment(impl_def.id.into()),
+ )
+ } else {
+ self_ty
+ };
+ let self_ty_is_guaranteed_unsized = matches!(
+ self_ty.kind(Interner),
+ TyKind::Dyn(..) | TyKind::Slice(..) | TyKind::Str
+ );
+ if self_ty_is_guaranteed_unsized {
+ missing.retain(|(_, assoc_item)| {
+ let assoc_item = match *assoc_item {
+ AssocItem::Function(it) => it.id.into(),
+ AssocItem::Const(it) => it.id.into(),
+ AssocItem::TypeAlias(it) => it.id.into(),
+ };
+ !hir_ty::dyn_compatibility::generics_require_sized_self(db, assoc_item)
+ });
+ }
+ }
+
if !missing.is_empty() {
acc.push(
TraitImplMissingAssocItems {
- impl_: ast_id_map.get(node.ast_id()),
+ impl_: ast_id_map.get(loc.id.value),
file_id,
missing,
}
@@ -880,7 +916,7 @@ impl Module {
&source_map,
);
- for &(_, item) in db.impl_items(impl_def.id).items.iter() {
+ for &(_, item) in impl_def.id.impl_items(db).items.iter() {
AssocItem::from(item).diagnostics(db, acc, style_lints);
}
}
@@ -938,10 +974,10 @@ impl Module {
}
}
-fn macro_call_diagnostics(
- db: &dyn HirDatabase,
+fn macro_call_diagnostics<'db>(
+ db: &'db dyn HirDatabase,
macro_call_id: MacroCallId,
- acc: &mut Vec<AnyDiagnostic>,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
) {
let Some(e) = db.parse_macro_expansion_error(macro_call_id) else {
return;
@@ -978,7 +1014,11 @@ fn macro_call_diagnostics(
}
}
-fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, m: Macro) {
+fn emit_macro_def_diagnostics<'db>(
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
+ m: Macro,
+) {
let id = db.macro_def(m.id);
if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) {
if let Some(e) = expander.mac.err() {
@@ -998,18 +1038,18 @@ fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>
}
}
-fn emit_def_diagnostic(
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+fn emit_def_diagnostic<'db>(
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
diag: &DefDiagnostic,
edition: Edition,
) {
emit_def_diagnostic_(db, acc, &diag.kind, edition)
}
-fn emit_def_diagnostic_(
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+fn emit_def_diagnostic_<'db>(
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
diag: &DefDiagnosticKind,
edition: Edition,
) {
@@ -1044,73 +1084,25 @@ fn emit_def_diagnostic_(
)
}
DefDiagnosticKind::UnresolvedImport { id, index } => {
- let file_id = id.file_id();
- let item_tree = id.item_tree(db);
- let import = &item_tree[id.value];
+ let file_id = id.file_id;
- let use_tree = import.use_tree_to_ast(db, file_id, *index);
+ let use_tree = hir_def::src::use_tree_to_ast(db, *id, *index);
acc.push(
UnresolvedImport { decl: InFile::new(file_id, AstPtr::new(&use_tree)) }.into(),
);
}
- DefDiagnosticKind::UnconfiguredCode { tree, item, cfg, opts } => {
- let item_tree = tree.item_tree(db);
- let ast_id_map = db.ast_id_map(tree.file_id());
- // FIXME: This parses... We could probably store relative ranges for the children things
- // here in the item tree?
- (|| {
- let process_field_list =
- |field_list: Option<_>, idx: ItemTreeFieldId| match field_list? {
- ast::FieldList::RecordFieldList(it) => Some(SyntaxNodePtr::new(
- it.fields().nth(idx.into_raw().into_u32() as usize)?.syntax(),
- )),
- ast::FieldList::TupleFieldList(it) => Some(SyntaxNodePtr::new(
- it.fields().nth(idx.into_raw().into_u32() as usize)?.syntax(),
- )),
- };
- let ptr = match *item {
- AttrOwner::ModItem(it) => {
- ast_id_map.get(it.ast_id(&item_tree)).syntax_node_ptr()
- }
- AttrOwner::TopLevel => ast_id_map.root(),
- AttrOwner::Variant(it) => {
- ast_id_map.get(item_tree[it].ast_id).syntax_node_ptr()
- }
- AttrOwner::Field(FieldParent::EnumVariant(parent), idx) => process_field_list(
- ast_id_map
- .get(item_tree[parent].ast_id)
- .to_node(&db.parse_or_expand(tree.file_id()))
- .field_list(),
- idx,
- )?,
- AttrOwner::Field(FieldParent::Struct(parent), idx) => process_field_list(
- ast_id_map
- .get(item_tree[parent.index()].ast_id)
- .to_node(&db.parse_or_expand(tree.file_id()))
- .field_list(),
- idx,
- )?,
- AttrOwner::Field(FieldParent::Union(parent), idx) => SyntaxNodePtr::new(
- ast_id_map
- .get(item_tree[parent.index()].ast_id)
- .to_node(&db.parse_or_expand(tree.file_id()))
- .record_field_list()?
- .fields()
- .nth(idx.into_raw().into_u32() as usize)?
- .syntax(),
- ),
- };
- acc.push(
- InactiveCode {
- node: InFile::new(tree.file_id(), ptr),
- cfg: cfg.clone(),
- opts: opts.clone(),
- }
- .into(),
- );
- Some(())
- })();
+ DefDiagnosticKind::UnconfiguredCode { ast_id, cfg, opts } => {
+ let ast_id_map = db.ast_id_map(ast_id.file_id);
+ let ptr = ast_id_map.get_erased(ast_id.value);
+ acc.push(
+ InactiveCode {
+ node: InFile::new(ast_id.file_id, ptr),
+ cfg: cfg.clone(),
+ opts: opts.clone(),
+ }
+ .into(),
+ );
}
DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
let (node, precise_location) = precise_macro_call_location(ast, db);
@@ -1267,14 +1259,20 @@ impl TupleField {
Name::new_tuple_field(self.index as usize)
}
- pub fn ty(&self, db: &dyn HirDatabase) -> Type {
- let ty = db.infer(self.owner).tuple_field_access_types[&self.tuple]
+ pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
+ let ty = db
+ .infer(self.owner)
+ .tuple_field_access_type(self.tuple)
.as_slice(Interner)
.get(self.index as usize)
.and_then(|arg| arg.ty(Interner))
.cloned()
.unwrap_or_else(|| TyKind::Error.intern(Interner));
- Type { env: db.trait_environment_for_body(self.owner), ty }
+ Type {
+ env: db.trait_environment_for_body(self.owner),
+ ty,
+ _pd: PhantomCovariantLifetime::new(),
+ }
}
}
@@ -1315,7 +1313,7 @@ impl AstNode for FieldSource {
impl Field {
pub fn name(&self, db: &dyn HirDatabase) -> Name {
- db.variant_fields(self.parent.into()).fields()[self.id].name.clone()
+ VariantId::from(self.parent).fields(db).fields()[self.id].name.clone()
}
pub fn index(&self) -> usize {
@@ -1325,7 +1323,7 @@ impl Field {
/// Returns the type as in the signature of the struct (i.e., with
/// placeholder types for type parameters). Only use this in the context of
/// the field definition.
- pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
let var_id = self.parent.into();
let generic_def_id: GenericDefId = match self.parent {
VariantDef::Struct(it) => it.id.into(),
@@ -1338,7 +1336,11 @@ impl Field {
}
// FIXME: Find better API to also handle const generics
- pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
+ pub fn ty_with_args<'db>(
+ &self,
+ db: &'db dyn HirDatabase,
+ generics: impl Iterator<Item = Type<'db>>,
+ ) -> Type<'db> {
let var_id = self.parent.into();
let def_id: AdtId = match self.parent {
VariantDef::Struct(it) => it.id.into(),
@@ -1380,7 +1382,7 @@ impl Field {
impl HasVisibility for Field {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
- let variant_data = db.variant_fields(self.parent.into());
+ let variant_data = VariantId::from(self.parent).fields(db);
let visibility = &variant_data.fields()[self.id].visibility;
let parent_id: hir_def::VariantId = self.parent.into();
// FIXME: RawVisibility::Public doesn't need to construct a resolver
@@ -1403,22 +1405,23 @@ impl Struct {
}
pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
- db.variant_fields(self.id.into())
+ self.id
+ .fields(db)
.fields()
.iter()
.map(|(id, _)| Field { parent: self.into(), id })
.collect()
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def(db, self.id)
}
- pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def_placeholders(db, self.id)
}
- pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
@@ -1434,8 +1437,8 @@ impl Struct {
}
}
- fn variant_fields(self, db: &dyn HirDatabase) -> Arc<VariantFields> {
- db.variant_fields(self.id.into())
+ fn variant_fields(self, db: &dyn HirDatabase) -> &VariantFields {
+ self.id.fields(db)
}
pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
@@ -1446,12 +1449,8 @@ impl Struct {
impl HasVisibility for Struct {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let loc = self.id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- Visibility::resolve(
- db,
- &self.id.resolver(db),
- &item_tree[item_tree[loc.id.value].visibility],
- )
+ let source = loc.source(db);
+ visibility_from_ast(db, self.id, source.map(|src| src.visibility()))
}
}
@@ -1469,20 +1468,20 @@ impl Union {
Module { id: self.id.lookup(db).container }
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def(db, self.id)
}
- pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def_placeholders(db, self.id)
}
- pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
- match db.variant_fields(self.id.into()).shape {
+ match self.id.fields(db).shape {
hir_def::item_tree::FieldsShape::Record => StructKind::Record,
hir_def::item_tree::FieldsShape::Tuple => StructKind::Tuple,
hir_def::item_tree::FieldsShape::Unit => StructKind::Unit,
@@ -1490,7 +1489,8 @@ impl Union {
}
pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
- db.variant_fields(self.id.into())
+ self.id
+ .fields(db)
.fields()
.iter()
.map(|(id, _)| Field { parent: self.into(), id })
@@ -1504,12 +1504,8 @@ impl Union {
impl HasVisibility for Union {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let loc = self.id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- Visibility::resolve(
- db,
- &self.id.resolver(db),
- &item_tree[item_tree[loc.id.value].visibility],
- )
+ let source = loc.source(db);
+ visibility_from_ast(db, self.id, source.map(|src| src.visibility()))
}
}
@@ -1528,27 +1524,27 @@ impl Enum {
}
pub fn variants(self, db: &dyn HirDatabase) -> Vec<Variant> {
- db.enum_variants(self.id).variants.iter().map(|&(id, _)| Variant { id }).collect()
+ self.id.enum_variants(db).variants.iter().map(|&(id, _, _)| Variant { id }).collect()
}
pub fn num_variants(self, db: &dyn HirDatabase) -> usize {
- db.enum_variants(self.id).variants.len()
+ self.id.enum_variants(db).variants.len()
}
pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprOptions> {
db.enum_signature(self.id).repr
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
Type::from_def(db, self.id)
}
- pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty_placeholders<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
Type::from_def_placeholders(db, self.id)
}
/// The type of the enum variant bodies.
- pub fn variant_body_ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn variant_body_ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
Type::new_for_crate(
self.id.lookup(db).container.krate(),
TyBuilder::builtin(match db.enum_signature(self.id).variant_body_type() {
@@ -1597,12 +1593,8 @@ impl Enum {
impl HasVisibility for Enum {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let loc = self.id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- Visibility::resolve(
- db,
- &self.id.resolver(db),
- &item_tree[item_tree[loc.id.value].visibility],
- )
+ let source = loc.source(db);
+ visibility_from_ast(db, self.id, source.map(|src| src.visibility()))
}
}
@@ -1627,18 +1619,19 @@ impl Variant {
self.id.lookup(db).parent.into()
}
- pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
pub fn name(self, db: &dyn HirDatabase) -> Name {
let lookup = self.id.lookup(db);
let enum_ = lookup.parent;
- db.enum_variants(enum_).variants[lookup.index as usize].1.clone()
+ enum_.enum_variants(db).variants[lookup.index as usize].1.clone()
}
pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
- db.variant_fields(self.id.into())
+ self.id
+ .fields(db)
.fields()
.iter()
.map(|(id, _)| Field { parent: self.into(), id })
@@ -1646,7 +1639,7 @@ impl Variant {
}
pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
- match db.variant_fields(self.id.into()).shape {
+ match self.id.fields(db).shape {
hir_def::item_tree::FieldsShape::Record => StructKind::Record,
hir_def::item_tree::FieldsShape::Tuple => StructKind::Tuple,
hir_def::item_tree::FieldsShape::Unit => StructKind::Unit,
@@ -1729,16 +1722,20 @@ impl Adt {
/// Turns this ADT into a type. Any type parameters of the ADT will be
/// turned into unknown types, which is good for e.g. finding the most
/// general set of completions, but will not look very nice when printed.
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
let id = AdtId::from(self);
Type::from_def(db, id)
}
/// Turns this ADT into a type with the given type parameters. This isn't
/// the greatest API, FIXME find a better one.
- pub fn ty_with_args(self, db: &dyn HirDatabase, args: impl Iterator<Item = Type>) -> Type {
+ pub fn ty_with_args<'db>(
+ self,
+ db: &'db dyn HirDatabase,
+ args: impl IntoIterator<Item = Type<'db>>,
+ ) -> Type<'db> {
let id = AdtId::from(self);
- let mut it = args.map(|t| t.ty);
+ let mut it = args.into_iter().map(|t| t.ty);
let ty = TyBuilder::def_ty(db, id.into(), None)
.fill(|x| {
let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
@@ -1869,7 +1866,7 @@ impl DefWithBody {
}
/// Returns the type this def's body has to evaluate to.
- pub fn body_type(self, db: &dyn HirDatabase) -> Type {
+ pub fn body_type(self, db: &dyn HirDatabase) -> Type<'_> {
match self {
DefWithBody::Function(it) => it.ret_type(db),
DefWithBody::Static(it) => it.ty(db),
@@ -1902,10 +1899,10 @@ impl DefWithBody {
}
}
- pub fn diagnostics(
+ pub fn diagnostics<'db>(
self,
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
style_lints: bool,
) {
let krate = self.module(db).id.krate();
@@ -1932,7 +1929,7 @@ impl DefWithBody {
expr_store_diagnostics(db, acc, &source_map);
let infer = db.infer(self.into());
- for d in &infer.diagnostics {
+ for d in infer.diagnostics() {
acc.extend(AnyDiagnostic::inference_diagnostic(
db,
self.into(),
@@ -2039,7 +2036,7 @@ impl DefWithBody {
)
}
let mol = &borrowck_result.mutability_of_locals;
- for (binding_id, binding_data) in body.bindings.iter() {
+ for (binding_id, binding_data) in body.bindings() {
if binding_data.problems.is_some() {
// We should report specific diagnostics for these problems, not `need-mut` and `unused-mut`.
continue;
@@ -2135,7 +2132,7 @@ impl DefWithBody {
fn expr_store_diagnostics(
db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+ acc: &mut Vec<AnyDiagnostic<'_>>,
source_map: &ExpressionStoreSourceMap,
) {
for diag in source_map.diagnostics() {
@@ -2200,11 +2197,11 @@ impl Function {
db.function_signature(self.id).name.clone()
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
- pub fn fn_ptr_type(self, db: &dyn HirDatabase) -> Type {
+ pub fn fn_ptr_type(self, db: &dyn HirDatabase) -> Type<'_> {
let resolver = self.id.resolver(db);
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
@@ -2213,7 +2210,7 @@ impl Function {
}
/// Get this function's return type
- pub fn ret_type(self, db: &dyn HirDatabase) -> Type {
+ pub fn ret_type(self, db: &dyn HirDatabase) -> Type<'_> {
let resolver = self.id.resolver(db);
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
@@ -2222,11 +2219,11 @@ impl Function {
}
// FIXME: Find better API to also handle const generics
- pub fn ret_type_with_args(
+ pub fn ret_type_with_args<'db>(
self,
- db: &dyn HirDatabase,
- generics: impl Iterator<Item = Type>,
- ) -> Type {
+ db: &'db dyn HirDatabase,
+ generics: impl Iterator<Item = Type<'db>>,
+ ) -> Type<'db> {
let resolver = self.id.resolver(db);
let parent_id: Option<GenericDefId> = match self.id.lookup(db).container {
ItemContainerId::ImplId(it) => Some(it.into()),
@@ -2251,7 +2248,7 @@ impl Function {
Type::new_with_resolver_inner(db, &resolver, ty)
}
- pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn async_ret_type<'db>(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
if !self.is_async(db) {
return None;
}
@@ -2275,7 +2272,7 @@ impl Function {
self.has_self_param(db).then_some(SelfParam { func: self.id })
}
- pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {
+ pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param<'_>> {
let environment = db.trait_environment(self.id.into());
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
@@ -2284,7 +2281,11 @@ impl Function {
.iter()
.enumerate()
.map(|(idx, ty)| {
- let ty = Type { env: environment.clone(), ty: ty.clone() };
+ let ty = Type {
+ env: environment.clone(),
+ ty: ty.clone(),
+ _pd: PhantomCovariantLifetime::new(),
+ };
Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
})
.collect()
@@ -2294,12 +2295,12 @@ impl Function {
db.function_signature(self.id).params.len()
}
- pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
+ pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param<'_>>> {
self.self_param(db)?;
Some(self.params_without_self(db))
}
- pub fn params_without_self(self, db: &dyn HirDatabase) -> Vec<Param> {
+ pub fn params_without_self(self, db: &dyn HirDatabase) -> Vec<Param<'_>> {
let environment = db.trait_environment(self.id.into());
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
@@ -2310,18 +2311,22 @@ impl Function {
.enumerate()
.skip(skip)
.map(|(idx, ty)| {
- let ty = Type { env: environment.clone(), ty: ty.clone() };
+ let ty = Type {
+ env: environment.clone(),
+ ty: ty.clone(),
+ _pd: PhantomCovariantLifetime::new(),
+ };
Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
})
.collect()
}
// FIXME: Find better API to also handle const generics
- pub fn params_without_self_with_args(
+ pub fn params_without_self_with_args<'db>(
self,
- db: &dyn HirDatabase,
- generics: impl Iterator<Item = Type>,
- ) -> Vec<Param> {
+ db: &'db dyn HirDatabase,
+ generics: impl Iterator<Item = Type<'db>>,
+ ) -> Vec<Param<'db>> {
let environment = db.trait_environment(self.id.into());
let parent_id: Option<GenericDefId> = match self.id.lookup(db).container {
ItemContainerId::ImplId(it) => Some(it.into()),
@@ -2356,7 +2361,11 @@ impl Function {
.enumerate()
.skip(skip)
.map(|(idx, ty)| {
- let ty = Type { env: environment.clone(), ty: ty.clone() };
+ let ty = Type {
+ env: environment.clone(),
+ ty: ty.clone(),
+ _pd: PhantomCovariantLifetime::new(),
+ };
Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
})
.collect()
@@ -2386,7 +2395,8 @@ impl Function {
return true;
}
- let Some(impl_traits) = self.ret_type(db).as_impl_traits(db) else { return false };
+ let ret_type = self.ret_type(db);
+ let Some(impl_traits) = ret_type.as_impl_traits(db) else { return false };
let Some(future_trait_id) = LangItem::Future.resolve_trait(db, self.ty(db).env.krate)
else {
return false;
@@ -2529,14 +2539,14 @@ impl From<hir_ty::Mutability> for Access {
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
-pub struct Param {
+pub struct Param<'db> {
func: Callee,
/// The index in parameter list, including self parameter.
idx: usize,
- ty: Type,
+ ty: Type<'db>,
}
-impl Param {
+impl<'db> Param<'db> {
pub fn parent_fn(&self) -> Option<Function> {
match self.func {
Callee::Def(CallableDefId::FunctionId(f)) => Some(f.into()),
@@ -2552,7 +2562,7 @@ impl Param {
self.idx
}
- pub fn ty(&self) -> &Type {
+ pub fn ty(&self) -> &Type<'db> {
&self.ty
}
@@ -2619,17 +2629,21 @@ impl SelfParam {
Function::from(self.func)
}
- pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
let substs = TyBuilder::placeholder_subst(db, self.func);
let callable_sig =
db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
let environment = db.trait_environment(self.func.into());
let ty = callable_sig.params()[0].clone();
- Type { env: environment, ty }
+ Type { env: environment, ty, _pd: PhantomCovariantLifetime::new() }
}
// FIXME: Find better API to also handle const generics
- pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
+ pub fn ty_with_args<'db>(
+ &self,
+ db: &'db dyn HirDatabase,
+ generics: impl Iterator<Item = Type<'db>>,
+ ) -> Type<'db> {
let parent_id: GenericDefId = match self.func.lookup(db).container {
ItemContainerId::ImplId(it) => it.into(),
ItemContainerId::TraitId(it) => it.into(),
@@ -2654,13 +2668,13 @@ impl SelfParam {
db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
let environment = db.trait_environment(self.func.into());
let ty = callable_sig.params()[0].clone();
- Type { env: environment, ty }
+ Type { env: environment, ty, _pd: PhantomCovariantLifetime::new() }
}
}
impl HasVisibility for Function {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
- db.function_visibility(self.id)
+ db.assoc_visibility(self.id.into())
}
}
@@ -2676,10 +2690,9 @@ impl ExternCrateDecl {
pub fn resolved_crate(self, db: &dyn HirDatabase) -> Option<Crate> {
let loc = self.id.lookup(db);
- let item_tree = loc.id.item_tree(db);
let krate = loc.container.krate();
- let name = &item_tree[loc.id.value].name;
- if *name == sym::self_ {
+ let name = self.name(db);
+ if name == sym::self_ {
Some(krate.into())
} else {
krate.data(db).dependencies.iter().find_map(|dep| {
@@ -2690,25 +2703,29 @@ impl ExternCrateDecl {
pub fn name(self, db: &dyn HirDatabase) -> Name {
let loc = self.id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- item_tree[loc.id.value].name.clone()
+ let source = loc.source(db);
+ as_name_opt(source.value.name_ref())
}
pub fn alias(self, db: &dyn HirDatabase) -> Option<ImportAlias> {
let loc = self.id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- item_tree[loc.id.value].alias.clone()
+ let source = loc.source(db);
+ let rename = source.value.rename()?;
+ if let Some(name) = rename.name() {
+ Some(ImportAlias::Alias(name.as_name()))
+ } else if rename.underscore_token().is_some() {
+ Some(ImportAlias::Underscore)
+ } else {
+ None
+ }
}
/// Returns the name under which this crate is made accessible, taking `_` into account.
pub fn alias_or_name(self, db: &dyn HirDatabase) -> Option<Name> {
- let loc = self.id.lookup(db);
- let item_tree = loc.id.item_tree(db);
-
- match &item_tree[loc.id.value].alias {
+ match self.alias(db) {
Some(ImportAlias::Underscore) => None,
- Some(ImportAlias::Alias(alias)) => Some(alias.clone()),
- None => Some(item_tree[loc.id.value].name.clone()),
+ Some(ImportAlias::Alias(alias)) => Some(alias),
+ None => Some(self.name(db)),
}
}
}
@@ -2716,12 +2733,8 @@ impl ExternCrateDecl {
impl HasVisibility for ExternCrateDecl {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let loc = self.id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- Visibility::resolve(
- db,
- &self.id.resolver(db),
- &item_tree[item_tree[loc.id.value].visibility],
- )
+ let source = loc.source(db);
+ visibility_from_ast(db, self.id, source.map(|src| src.visibility()))
}
}
@@ -2743,7 +2756,7 @@ impl Const {
self.source(db)?.value.body()
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
@@ -2756,7 +2769,7 @@ impl Const {
impl HasVisibility for Const {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
- db.const_visibility(self.id)
+ db.assoc_visibility(self.id.into())
}
}
@@ -2820,7 +2833,7 @@ impl Static {
self.source(db)?.value.body()
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
@@ -2841,12 +2854,8 @@ impl Static {
impl HasVisibility for Static {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let loc = self.id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- Visibility::resolve(
- db,
- &self.id.resolver(db),
- &item_tree[item_tree[loc.id.value].visibility],
- )
+ let source = loc.source(db);
+ visibility_from_ast(db, self.id, source.map(|src| src.visibility()))
}
}
@@ -2879,7 +2888,7 @@ impl Trait {
}
pub fn function(self, db: &dyn HirDatabase, name: impl PartialEq<Name>) -> Option<Function> {
- db.trait_items(self.id).items.iter().find(|(n, _)| name == *n).and_then(|&(_, it)| match it
+ self.id.trait_items(db).items.iter().find(|(n, _)| name == *n).and_then(|&(_, it)| match it
{
AssocItemId::FunctionId(id) => Some(Function { id }),
_ => None,
@@ -2887,7 +2896,7 @@ impl Trait {
}
pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
- db.trait_items(self.id).items.iter().map(|(_name, it)| (*it).into()).collect()
+ self.id.trait_items(db).items.iter().map(|(_name, it)| (*it).into()).collect()
}
pub fn items_with_supertraits(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
@@ -2935,11 +2944,7 @@ impl Trait {
}
fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {
- db.trait_items(self.id)
- .macro_calls
- .as_ref()
- .map(|it| it.as_ref().clone().into_boxed_slice())
- .unwrap_or_default()
+ self.id.trait_items(db).macro_calls.to_vec().into_boxed_slice()
}
/// `#[rust_analyzer::completions(...)]` mode.
@@ -2951,12 +2956,8 @@ impl Trait {
impl HasVisibility for Trait {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let loc = self.id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- Visibility::resolve(
- db,
- &self.id.resolver(db),
- &item_tree[item_tree[loc.id.value].visibility],
- )
+ let source = loc.source(db);
+ visibility_from_ast(db, self.id, source.map(|src| src.visibility()))
}
}
@@ -2978,12 +2979,8 @@ impl TraitAlias {
impl HasVisibility for TraitAlias {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let loc = self.id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- Visibility::resolve(
- db,
- &self.id.resolver(db),
- &item_tree[item_tree[loc.id.value].visibility],
- )
+ let source = loc.source(db);
+ visibility_from_ast(db, self.id, source.map(|src| src.visibility()))
}
}
@@ -3006,11 +3003,11 @@ impl TypeAlias {
Module { id: self.id.module(db) }
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def(db, self.id)
}
- pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def_placeholders(db, self.id)
}
@@ -3021,7 +3018,7 @@ impl TypeAlias {
impl HasVisibility for TypeAlias {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
- db.type_alias_visibility(self.id)
+ db.assoc_visibility(self.id.into())
}
}
@@ -3051,11 +3048,18 @@ pub struct BuiltinType {
}
impl BuiltinType {
+ // Constructors are added on demand, feel free to add more.
pub fn str() -> BuiltinType {
BuiltinType { inner: hir_def::builtin_type::BuiltinType::Str }
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn i32() -> BuiltinType {
+ BuiltinType {
+ inner: hir_def::builtin_type::BuiltinType::Int(hir_ty::primitive::BuiltinInt::I32),
+ }
+ }
+
+ pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
let core = Crate::core(db).map(|core| core.id).unwrap_or_else(|| db.all_crates()[0]);
Type::new_for_crate(core, TyBuilder::builtin(self.inner))
}
@@ -3131,25 +3135,23 @@ impl Macro {
match self.id {
MacroId::Macro2Id(id) => {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- item_tree[loc.id.value].name.clone()
+ let source = loc.source(db);
+ as_name_opt(source.value.name())
}
MacroId::MacroRulesId(id) => {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- item_tree[loc.id.value].name.clone()
+ let source = loc.source(db);
+ as_name_opt(source.value.name())
}
MacroId::ProcMacroId(id) => {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
+ let source = loc.source(db);
match loc.kind {
ProcMacroKind::CustomDerive => db
.attrs(id.into())
.parse_proc_macro_derive()
- .map_or_else(|| item_tree[loc.id.value].name.clone(), |(it, _)| it),
- ProcMacroKind::Bang | ProcMacroKind::Attr => {
- item_tree[loc.id.value].name.clone()
- }
+ .map_or_else(|| as_name_opt(source.value.name()), |(it, _)| it),
+ ProcMacroKind::Bang | ProcMacroKind::Attr => as_name_opt(source.value.name()),
}
}
}
@@ -3220,7 +3222,8 @@ impl Macro {
}
}
- pub fn is_asm_or_global_asm(&self, db: &dyn HirDatabase) -> bool {
+ /// Is this `asm!()`, or a variant of it (e.g. `global_asm!()`)?
+ pub fn is_asm_like(&self, db: &dyn HirDatabase) -> bool {
match self.id {
MacroId::Macro2Id(it) => {
matches!(it.lookup(db).expander, MacroExpander::BuiltIn(m) if m.is_asm())
@@ -3246,12 +3249,8 @@ impl HasVisibility for Macro {
match self.id {
MacroId::Macro2Id(id) => {
let loc = id.lookup(db);
- let item_tree = loc.id.item_tree(db);
- Visibility::resolve(
- db,
- &id.resolver(db),
- &item_tree[item_tree[loc.id.value].visibility],
- )
+ let source = loc.source(db);
+ visibility_from_ast(db, id, source.map(|src| src.visibility()))
}
MacroId::MacroRulesId(_) => Visibility::Public,
MacroId::ProcMacroId(_) => Visibility::Public,
@@ -3405,7 +3404,7 @@ fn as_assoc_item<'db, ID, DEF, LOC>(
where
ID: Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<LOC>>,
DEF: From<ID>,
- LOC: ItemTreeNode,
+ LOC: AstIdNode,
{
match id.lookup(db).container {
ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) => Some(ctor(DEF::from(id))),
@@ -3421,7 +3420,7 @@ fn as_extern_assoc_item<'db, ID, DEF, LOC>(
where
ID: Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<LOC>>,
DEF: From<ID>,
- LOC: ItemTreeNode,
+ LOC: AstIdNode,
{
match id.lookup(db).container {
ItemContainerId::ExternBlockId(_) => Some(ctor(DEF::from(id))),
@@ -3523,7 +3522,7 @@ impl AssocItem {
}
}
- pub fn implementing_ty(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn implementing_ty(self, db: &dyn HirDatabase) -> Option<Type<'_>> {
match self.container(db) {
AssocItemContainer::Impl(i) => Some(i.self_ty(db)),
_ => None,
@@ -3551,10 +3550,10 @@ impl AssocItem {
}
}
- pub fn diagnostics(
+ pub fn diagnostics<'db>(
self,
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
style_lints: bool,
) {
match self {
@@ -3676,12 +3675,12 @@ impl GenericDef {
}
}
- pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
+ pub fn diagnostics<'db>(self, db: &'db dyn HirDatabase, acc: &mut Vec<AnyDiagnostic<'db>>) {
let def = self.id();
let generics = db.generic_params(def);
- if generics.is_empty() && generics.no_predicates() {
+ if generics.is_empty() && generics.has_no_predicates() {
return;
}
@@ -3741,18 +3740,19 @@ impl GenericDef {
// We cannot call this `Substitution` unfortunately...
#[derive(Debug)]
-pub struct GenericSubstitution {
+pub struct GenericSubstitution<'db> {
def: GenericDefId,
subst: Substitution,
env: Arc<TraitEnvironment>,
+ _pd: PhantomCovariantLifetime<'db>,
}
-impl GenericSubstitution {
+impl<'db> GenericSubstitution<'db> {
fn new(def: GenericDefId, subst: Substitution, env: Arc<TraitEnvironment>) -> Self {
- Self { def, subst, env }
+ Self { def, subst, env, _pd: PhantomCovariantLifetime::new() }
}
- pub fn types(&self, db: &dyn HirDatabase) -> Vec<(Symbol, Type)> {
+ pub fn types(&self, db: &'db dyn HirDatabase) -> Vec<(Symbol, Type<'db>)> {
let container = match self.def {
GenericDefId::ConstId(id) => Some(id.lookup(db).container),
GenericDefId::FunctionId(id) => Some(id.lookup(db).container),
@@ -3795,7 +3795,10 @@ impl GenericSubstitution {
container_params
.chain(self_params)
.filter_map(|(ty, name)| {
- Some((name?.symbol().clone(), Type { ty, env: self.env.clone() }))
+ Some((
+ name?.symbol().clone(),
+ Type { ty, env: self.env.clone(), _pd: PhantomCovariantLifetime::new() },
+ ))
})
.collect()
}
@@ -3898,7 +3901,7 @@ impl Local {
self.parent(db).module(db)
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
let def = self.parent;
let infer = db.infer(def);
let ty = infer[self.binding_id].clone();
@@ -4160,6 +4163,10 @@ impl TypeParam {
self.merge().name(db)
}
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent().into()
+ }
+
pub fn module(self, db: &dyn HirDatabase) -> Module {
self.id.parent().module(db).into()
}
@@ -4175,7 +4182,7 @@ impl TypeParam {
}
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
let resolver = self.id.parent().resolver(db);
let ty =
TyKind::Placeholder(hir_ty::to_placeholder_idx(db, self.id.into())).intern(Interner);
@@ -4197,7 +4204,7 @@ impl TypeParam {
.collect()
}
- pub fn default(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn default(self, db: &dyn HirDatabase) -> Option<Type<'_>> {
let ty = generic_arg_from_param(db, self.id.into())?;
let resolver = self.id.parent().resolver(db);
match ty.data(Interner) {
@@ -4262,7 +4269,7 @@ impl ConstParam {
self.id.parent().into()
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::new(db, self.id.parent(), db.const_param_ty(self.id))
}
@@ -4319,7 +4326,7 @@ impl TypeOrConstParam {
}
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
match self.split(db) {
Either::Left(it) => it.ty(db),
Either::Right(it) => it.ty(db),
@@ -4364,7 +4371,10 @@ impl Impl {
module.id.def_map(db)[module.id.local_id].scope.impls().map(Into::into).collect()
}
- pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec<Impl> {
+ pub fn all_for_type<'db>(
+ db: &'db dyn HirDatabase,
+ Type { ty, env, _pd: _ }: Type<'db>,
+ ) -> Vec<Impl> {
let def_crates = match method_resolution::def_crates(db, &ty, env.krate) {
Some(def_crates) => def_crates,
None => return Vec::new(),
@@ -4449,14 +4459,14 @@ impl Impl {
Some(Trait { id })
}
- pub fn trait_ref(self, db: &dyn HirDatabase) -> Option<TraitRef> {
+ pub fn trait_ref(self, db: &dyn HirDatabase) -> Option<TraitRef<'_>> {
let substs = TyBuilder::placeholder_subst(db, self.id);
let trait_ref = db.impl_trait(self.id)?.substitute(Interner, &substs);
let resolver = self.id.resolver(db);
Some(TraitRef::new_with_resolver(db, &resolver, trait_ref))
}
- pub fn self_ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn self_ty(self, db: &dyn HirDatabase) -> Type<'_> {
let resolver = self.id.resolver(db);
let substs = TyBuilder::placeholder_subst(db, self.id);
let ty = db.impl_self_ty(self.id).substitute(Interner, &substs);
@@ -4464,7 +4474,7 @@ impl Impl {
}
pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
- db.impl_items(self.id).items.iter().map(|&(_, it)| it.into()).collect()
+ self.id.impl_items(db).items.iter().map(|&(_, it)| it.into()).collect()
}
pub fn is_negative(self, db: &dyn HirDatabase) -> bool {
@@ -4513,30 +4523,27 @@ impl Impl {
}
fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {
- db.impl_items(self.id)
- .macro_calls
- .as_ref()
- .map(|it| it.as_ref().clone().into_boxed_slice())
- .unwrap_or_default()
+ self.id.impl_items(db).macro_calls.to_vec().into_boxed_slice()
}
}
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct TraitRef {
+pub struct TraitRef<'db> {
env: Arc<TraitEnvironment>,
trait_ref: hir_ty::TraitRef,
+ _pd: PhantomCovariantLifetime<'db>,
}
-impl TraitRef {
+impl<'db> TraitRef<'db> {
pub(crate) fn new_with_resolver(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
resolver: &Resolver<'_>,
trait_ref: hir_ty::TraitRef,
- ) -> TraitRef {
+ ) -> Self {
let env = resolver
.generic_def()
.map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
- TraitRef { env, trait_ref }
+ TraitRef { env, trait_ref, _pd: PhantomCovariantLifetime::new() }
}
pub fn trait_(&self) -> Trait {
@@ -4544,21 +4551,21 @@ impl TraitRef {
Trait { id }
}
- pub fn self_ty(&self) -> Type {
+ pub fn self_ty(&self) -> Type<'_> {
let ty = self.trait_ref.self_type_parameter(Interner);
- Type { env: self.env.clone(), ty }
+ Type { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() }
}
/// Returns `idx`-th argument of this trait reference if it is a type argument. Note that the
/// first argument is the `Self` type.
- pub fn get_type_argument(&self, idx: usize) -> Option<Type> {
+ pub fn get_type_argument(&self, idx: usize) -> Option<Type<'db>> {
self.trait_ref
.substitution
.as_slice(Interner)
.get(idx)
.and_then(|arg| arg.ty(Interner))
.cloned()
- .map(|ty| Type { env: self.env.clone(), ty })
+ .map(|ty| Type { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() })
}
}
@@ -4606,7 +4613,7 @@ impl Closure {
.collect()
}
- pub fn capture_types(&self, db: &dyn HirDatabase) -> Vec<Type> {
+ pub fn capture_types<'db>(&self, db: &'db dyn HirDatabase) -> Vec<Type<'db>> {
let owner = db.lookup_intern_closure((self.id).into()).0;
let infer = &db.infer(owner);
let (captures, _) = infer.closure_info(&self.id);
@@ -4615,6 +4622,7 @@ impl Closure {
.map(|capture| Type {
env: db.trait_environment_for_body(owner),
ty: capture.ty(&self.subst),
+ _pd: PhantomCovariantLifetime::new(),
})
.collect()
}
@@ -4746,40 +4754,45 @@ impl CaptureUsageSource {
}
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct Type {
+pub struct Type<'db> {
env: Arc<TraitEnvironment>,
ty: Ty,
+ _pd: PhantomCovariantLifetime<'db>,
}
-impl Type {
- pub(crate) fn new_with_resolver(db: &dyn HirDatabase, resolver: &Resolver<'_>, ty: Ty) -> Type {
+impl<'db> Type<'db> {
+ pub(crate) fn new_with_resolver(
+ db: &'db dyn HirDatabase,
+ resolver: &Resolver<'_>,
+ ty: Ty,
+ ) -> Self {
Type::new_with_resolver_inner(db, resolver, ty)
}
pub(crate) fn new_with_resolver_inner(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
resolver: &Resolver<'_>,
ty: Ty,
- ) -> Type {
+ ) -> Self {
let environment = resolver
.generic_def()
.map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
- Type { env: environment, ty }
+ Type { env: environment, ty, _pd: PhantomCovariantLifetime::new() }
}
- pub(crate) fn new_for_crate(krate: base_db::Crate, ty: Ty) -> Type {
- Type { env: TraitEnvironment::empty(krate), ty }
+ pub(crate) fn new_for_crate(krate: base_db::Crate, ty: Ty) -> Self {
+ Type { env: TraitEnvironment::empty(krate), ty, _pd: PhantomCovariantLifetime::new() }
}
- fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
+ fn new(db: &'db dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Self {
let resolver = lexical_env.resolver(db);
let environment = resolver
.generic_def()
.map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
- Type { env: environment, ty }
+ Type { env: environment, ty, _pd: PhantomCovariantLifetime::new() }
}
- fn from_def(db: &dyn HirDatabase, def: impl Into<TyDefId> + HasResolver) -> Type {
+ fn from_def(db: &'db dyn HirDatabase, def: impl Into<TyDefId> + HasResolver) -> Self {
let ty = db.ty(def.into());
let substs = TyBuilder::unknown_subst(
db,
@@ -4792,7 +4805,10 @@ impl Type {
Type::new(db, def, ty.substitute(Interner, &substs))
}
- fn from_def_placeholders(db: &dyn HirDatabase, def: impl Into<TyDefId> + HasResolver) -> Type {
+ fn from_def_placeholders(
+ db: &'db dyn HirDatabase,
+ def: impl Into<TyDefId> + HasResolver,
+ ) -> Self {
let ty = db.ty(def.into());
let substs = TyBuilder::placeholder_subst(
db,
@@ -4805,7 +4821,10 @@ impl Type {
Type::new(db, def, ty.substitute(Interner, &substs))
}
- fn from_value_def(db: &dyn HirDatabase, def: impl Into<ValueTyDefId> + HasResolver) -> Type {
+ fn from_value_def(
+ db: &'db dyn HirDatabase,
+ def: impl Into<ValueTyDefId> + HasResolver,
+ ) -> Self {
let Some(ty) = db.value_ty(def.into()) else {
return Type::new(db, def, TyKind::Error.intern(Interner));
};
@@ -4825,13 +4844,17 @@ impl Type {
Type::new(db, def, ty.substitute(Interner, &substs))
}
- pub fn new_slice(ty: Type) -> Type {
- Type { env: ty.env, ty: TyBuilder::slice(ty.ty) }
+ pub fn new_slice(ty: Self) -> Self {
+ Type { env: ty.env, ty: TyBuilder::slice(ty.ty), _pd: PhantomCovariantLifetime::new() }
}
- pub fn new_tuple(krate: base_db::Crate, tys: &[Type]) -> Type {
+ pub fn new_tuple(krate: base_db::Crate, tys: &[Self]) -> Self {
let tys = tys.iter().map(|it| it.ty.clone());
- Type { env: TraitEnvironment::empty(krate), ty: TyBuilder::tuple_with(tys) }
+ Type {
+ env: TraitEnvironment::empty(krate),
+ ty: TyBuilder::tuple_with(tys),
+ _pd: PhantomCovariantLifetime::new(),
+ }
}
pub fn is_unit(&self) -> bool {
@@ -4858,7 +4881,7 @@ impl Type {
matches!(self.ty.kind(Interner), TyKind::Ref(..))
}
- pub fn contains_reference(&self, db: &dyn HirDatabase) -> bool {
+ pub fn contains_reference(&self, db: &'db dyn HirDatabase) -> bool {
return go(db, self.env.krate, &self.ty);
fn go(db: &dyn HirDatabase, krate: base_db::Crate, ty: &Ty) -> bool {
@@ -4902,13 +4925,13 @@ impl Type {
}
}
- pub fn as_reference(&self) -> Option<(Type, Mutability)> {
+ pub fn as_reference(&self) -> Option<(Type<'db>, Mutability)> {
let (ty, _lt, m) = self.ty.as_reference()?;
let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut));
Some((self.derived(ty.clone()), m))
}
- pub fn add_reference(&self, mutability: Mutability) -> Type {
+ pub fn add_reference(&self, mutability: Mutability) -> Self {
let ty_mutability = match mutability {
Mutability::Shared => hir_ty::Mutability::Not,
Mutability::Mut => hir_ty::Mutability::Mut,
@@ -4944,25 +4967,25 @@ impl Type {
matches!(self.ty.kind(Interner), TyKind::Tuple(..))
}
- pub fn remove_ref(&self) -> Option<Type> {
+ pub fn remove_ref(&self) -> Option<Type<'db>> {
match &self.ty.kind(Interner) {
TyKind::Ref(.., ty) => Some(self.derived(ty.clone())),
_ => None,
}
}
- pub fn as_slice(&self) -> Option<Type> {
+ pub fn as_slice(&self) -> Option<Type<'db>> {
match &self.ty.kind(Interner) {
TyKind::Slice(ty) => Some(self.derived(ty.clone())),
_ => None,
}
}
- pub fn strip_references(&self) -> Type {
+ pub fn strip_references(&self) -> Self {
self.derived(self.ty.strip_references().clone())
}
- pub fn strip_reference(&self) -> Type {
+ pub fn strip_reference(&self) -> Self {
self.derived(self.ty.strip_reference().clone())
}
@@ -4973,7 +4996,7 @@ impl Type {
/// Checks that particular type `ty` implements `std::future::IntoFuture` or
/// `std::future::Future` and returns the `Output` associated type.
/// This function is used in `.await` syntax completion.
- pub fn into_future_output(&self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn into_future_output(&self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
let trait_ = LangItem::IntoFutureIntoFuture
.resolve_function(db, self.env.krate)
.and_then(|into_future_fn| {
@@ -4990,26 +5013,26 @@ impl Type {
}
let output_assoc_type =
- db.trait_items(trait_).associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
+ trait_.trait_items(db).associated_type_by_name(&Name::new_symbol_root(sym::Output))?;
self.normalize_trait_assoc_type(db, &[], output_assoc_type.into())
}
/// This does **not** resolve `IntoFuture`, only `Future`.
- pub fn future_output(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn future_output(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
let future_output = LangItem::FutureOutput.resolve_type_alias(db, self.env.krate)?;
self.normalize_trait_assoc_type(db, &[], future_output.into())
}
/// This does **not** resolve `IntoIterator`, only `Iterator`.
- pub fn iterator_item(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn iterator_item(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
let iterator_trait = LangItem::Iterator.resolve_trait(db, self.env.krate)?;
- let iterator_item = db
- .trait_items(iterator_trait)
+ let iterator_item = iterator_trait
+ .trait_items(db)
.associated_type_by_name(&Name::new_symbol_root(sym::Item))?;
self.normalize_trait_assoc_type(db, &[], iterator_item.into())
}
- pub fn impls_iterator(self, db: &dyn HirDatabase) -> bool {
+ pub fn impls_iterator(self, db: &'db dyn HirDatabase) -> bool {
let Some(iterator_trait) = LangItem::Iterator.resolve_trait(db, self.env.krate) else {
return false;
};
@@ -5019,7 +5042,7 @@ impl Type {
}
/// Resolves the projection `<Self as IntoIterator>::IntoIter` and returns the resulting type
- pub fn into_iterator_iter(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn into_iterator_iter(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
let trait_ = LangItem::IntoIterIntoIter.resolve_function(db, self.env.krate).and_then(
|into_iter_fn| {
let assoc_item = as_assoc_item(db, AssocItem::Function, into_iter_fn)?;
@@ -5034,8 +5057,8 @@ impl Type {
return None;
}
- let into_iter_assoc_type = db
- .trait_items(trait_)
+ let into_iter_assoc_type = trait_
+ .trait_items(db)
.associated_type_by_name(&Name::new_symbol_root(sym::IntoIter))?;
self.normalize_trait_assoc_type(db, &[], into_iter_assoc_type.into())
}
@@ -5044,7 +5067,7 @@ impl Type {
///
/// This function can be used to check if a particular type is callable, since FnOnce is a
/// supertrait of Fn and FnMut, so all callable types implements at least FnOnce.
- pub fn impls_fnonce(&self, db: &dyn HirDatabase) -> bool {
+ pub fn impls_fnonce(&self, db: &'db dyn HirDatabase) -> bool {
let fnonce_trait = match FnTrait::FnOnce.get_id(db, self.env.krate) {
Some(it) => it,
None => return false,
@@ -5056,7 +5079,7 @@ impl Type {
}
// FIXME: Find better API that also handles const generics
- pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool {
+ pub fn impls_trait(&self, db: &'db dyn HirDatabase, trait_: Trait, args: &[Type<'db>]) -> bool {
let mut it = args.iter().map(|t| t.ty.clone());
let trait_ref = TyBuilder::trait_ref(db, trait_.id)
.push(self.ty.clone())
@@ -5084,10 +5107,10 @@ impl Type {
pub fn normalize_trait_assoc_type(
&self,
- db: &dyn HirDatabase,
- args: &[Type],
+ db: &'db dyn HirDatabase,
+ args: &[Type<'db>],
alias: TypeAlias,
- ) -> Option<Type> {
+ ) -> Option<Type<'db>> {
let mut args = args.iter();
let trait_id = match alias.id.lookup(db).container {
ItemContainerId::TraitId(id) => id,
@@ -5111,14 +5134,14 @@ impl Type {
if ty.is_unknown() { None } else { Some(self.derived(ty)) }
}
- pub fn is_copy(&self, db: &dyn HirDatabase) -> bool {
+ pub fn is_copy(&self, db: &'db dyn HirDatabase) -> bool {
let Some(copy_trait) = LangItem::Copy.resolve_trait(db, self.env.krate) else {
return false;
};
self.impls_trait(db, copy_trait.into(), &[])
}
- pub fn as_callable(&self, db: &dyn HirDatabase) -> Option<Callable> {
+ pub fn as_callable(&self, db: &'db dyn HirDatabase) -> Option<Callable<'db>> {
let callee = match self.ty.kind(Interner) {
TyKind::Closure(id, subst) => Callee::Closure(*id, subst.clone()),
TyKind::Function(_) => Callee::FnPtr,
@@ -5172,7 +5195,7 @@ impl Type {
matches!(self.ty.kind(Interner), TyKind::Array(..))
}
- pub fn is_packed(&self, db: &dyn HirDatabase) -> bool {
+ pub fn is_packed(&self, db: &'db dyn HirDatabase) -> bool {
let adt_id = match *self.ty.kind(Interner) {
TyKind::Adt(hir_ty::AdtId(adt_id), ..) => adt_id,
_ => return false,
@@ -5189,7 +5212,7 @@ impl Type {
matches!(self.ty.kind(Interner), TyKind::Raw(..))
}
- pub fn remove_raw_ptr(&self) -> Option<Type> {
+ pub fn remove_raw_ptr(&self) -> Option<Type<'db>> {
if let TyKind::Raw(_, ty) = self.ty.kind(Interner) {
Some(self.derived(ty.clone()))
} else {
@@ -5237,7 +5260,7 @@ impl Type {
}
}
- pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> {
+ pub fn fields(&self, db: &'db dyn HirDatabase) -> Vec<(Field, Self)> {
let (variant_id, substs) = match self.ty.kind(Interner) {
TyKind::Adt(hir_ty::AdtId(AdtId::StructId(s)), substs) => ((*s).into(), substs),
TyKind::Adt(hir_ty::AdtId(AdtId::UnionId(u)), substs) => ((*u).into(), substs),
@@ -5254,7 +5277,7 @@ impl Type {
.collect()
}
- pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec<Type> {
+ pub fn tuple_fields(&self, _db: &'db dyn HirDatabase) -> Vec<Self> {
if let TyKind::Tuple(_, substs) = &self.ty.kind(Interner) {
substs
.iter(Interner)
@@ -5265,7 +5288,7 @@ impl Type {
}
}
- pub fn as_array(&self, db: &dyn HirDatabase) -> Option<(Type, usize)> {
+ pub fn as_array(&self, db: &'db dyn HirDatabase) -> Option<(Self, usize)> {
if let TyKind::Array(ty, len) = &self.ty.kind(Interner) {
try_const_usize(db, len).map(|it| (self.derived(ty.clone()), it as usize))
} else {
@@ -5283,14 +5306,14 @@ impl Type {
/// Returns types that this type dereferences to (including this type itself). The returned
/// iterator won't yield the same type more than once even if the deref chain contains a cycle.
- pub fn autoderef<'db>(
+ pub fn autoderef(
&self,
db: &'db dyn HirDatabase,
- ) -> impl Iterator<Item = Type> + use<'_, 'db> {
+ ) -> impl Iterator<Item = Type<'db>> + use<'_, 'db> {
self.autoderef_(db).map(move |ty| self.derived(ty))
}
- fn autoderef_(&self, db: &dyn HirDatabase) -> impl Iterator<Item = Ty> {
+ fn autoderef_(&self, db: &'db dyn HirDatabase) -> impl Iterator<Item = Ty> {
// There should be no inference vars in types passed here
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
autoderef(db, self.env.clone(), canonical)
@@ -5300,7 +5323,7 @@ impl Type {
// lifetime problems, because we need to borrow temp `CrateImplDefs`.
pub fn iterate_assoc_items<T>(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
krate: Crate,
mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> {
@@ -5314,7 +5337,7 @@ impl Type {
fn iterate_assoc_items_dyn(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
krate: Crate,
callback: &mut dyn FnMut(AssocItemId) -> bool,
) {
@@ -5326,7 +5349,7 @@ impl Type {
let impls = db.inherent_impls_in_crate(krate);
for impl_def in impls.for_self_ty(&self.ty) {
- for &(_, item) in db.impl_items(*impl_def).items.iter() {
+ for &(_, item) in impl_def.impl_items(db).items.iter() {
if callback(item) {
return;
}
@@ -5353,7 +5376,7 @@ impl Type {
/// - "String"
/// - "U"
/// ```
- pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ {
+ pub fn type_arguments(&self) -> impl Iterator<Item = Type<'db>> + '_ {
self.ty
.strip_references()
.as_adt()
@@ -5423,7 +5446,7 @@ impl Type {
pub fn iterate_method_candidates_with_traits<T>(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
@@ -5451,7 +5474,7 @@ impl Type {
pub fn iterate_method_candidates<T>(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
with_local_impls: Option<Module>,
name: Option<&Name>,
@@ -5473,7 +5496,7 @@ impl Type {
/// are considered inherent methods.
pub fn iterate_method_candidates_split_inherent(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
@@ -5541,7 +5564,7 @@ impl Type {
#[tracing::instrument(skip_all, fields(name = ?name))]
pub fn iterate_path_candidates<T>(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
@@ -5576,7 +5599,7 @@ impl Type {
#[tracing::instrument(skip_all, fields(name = ?name))]
pub fn iterate_path_candidates_split_inherent(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
@@ -5639,10 +5662,10 @@ impl Type {
/// If a type can be represented as `dyn Trait`, returns all traits accessible via this type,
/// or an empty iterator otherwise.
- pub fn applicable_inherent_traits<'a>(
- &'a self,
- db: &'a dyn HirDatabase,
- ) -> impl Iterator<Item = Trait> + 'a {
+ pub fn applicable_inherent_traits(
+ &self,
+ db: &'db dyn HirDatabase,
+ ) -> impl Iterator<Item = Trait> {
let _p = tracing::info_span!("applicable_inherent_traits").entered();
self.autoderef_(db)
.filter_map(|ty| ty.dyn_trait())
@@ -5650,7 +5673,7 @@ impl Type {
.map(Trait::from)
}
- pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
+ pub fn env_traits(&self, db: &'db dyn HirDatabase) -> impl Iterator<Item = Trait> {
let _p = tracing::info_span!("env_traits").entered();
self.autoderef_(db)
.filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
@@ -5662,10 +5685,7 @@ impl Type {
.map(Trait::from)
}
- pub fn as_impl_traits(
- &self,
- db: &dyn HirDatabase,
- ) -> Option<impl Iterator<Item = Trait> + use<>> {
+ pub fn as_impl_traits(&self, db: &'db dyn HirDatabase) -> Option<impl Iterator<Item = Trait>> {
self.ty.impl_trait_bounds(db).map(|it| {
it.into_iter().filter_map(|pred| match pred.skip_binders() {
hir_ty::WhereClause::Implemented(trait_ref) => {
@@ -5676,33 +5696,33 @@ impl Type {
})
}
- pub fn as_associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<Trait> {
+ pub fn as_associated_type_parent_trait(&self, db: &'db dyn HirDatabase) -> Option<Trait> {
self.ty.associated_type_parent_trait(db).map(Into::into)
}
- fn derived(&self, ty: Ty) -> Type {
- Type { env: self.env.clone(), ty }
+ fn derived(&self, ty: Ty) -> Self {
+ Type { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() }
}
/// Visits every type, including generic arguments, in this type. `cb` is called with type
/// itself first, and then with its generic arguments.
- pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) {
- fn walk_substs(
- db: &dyn HirDatabase,
- type_: &Type,
+ pub fn walk(&self, db: &'db dyn HirDatabase, mut cb: impl FnMut(Type<'db>)) {
+ fn walk_substs<'db>(
+ db: &'db dyn HirDatabase,
+ type_: &Type<'db>,
substs: &Substitution,
- cb: &mut impl FnMut(Type),
+ cb: &mut impl FnMut(Type<'db>),
) {
for ty in substs.iter(Interner).filter_map(|a| a.ty(Interner)) {
walk_type(db, &type_.derived(ty.clone()), cb);
}
}
- fn walk_bounds(
- db: &dyn HirDatabase,
- type_: &Type,
+ fn walk_bounds<'db>(
+ db: &'db dyn HirDatabase,
+ type_: &Type<'db>,
bounds: &[QuantifiedWhereClause],
- cb: &mut impl FnMut(Type),
+ cb: &mut impl FnMut(Type<'db>),
) {
for pred in bounds {
if let WhereClause::Implemented(trait_ref) = pred.skip_binders() {
@@ -5719,7 +5739,11 @@ impl Type {
}
}
- fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) {
+ fn walk_type<'db>(
+ db: &'db dyn HirDatabase,
+ type_: &Type<'db>,
+ cb: &mut impl FnMut(Type<'db>),
+ ) {
let ty = type_.ty.strip_references();
match ty.kind(Interner) {
TyKind::Adt(_, substs) => {
@@ -5787,7 +5811,7 @@ impl Type {
///
/// Note that we consider placeholder types to unify with everything.
/// For example `Option<T>` and `Option<U>` unify although there is unresolved goal `T = U`.
- pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool {
+ pub fn could_unify_with(&self, db: &'db dyn HirDatabase, other: &Type<'db>) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
hir_ty::could_unify(db, self.env.clone(), &tys)
}
@@ -5796,17 +5820,17 @@ impl Type {
///
/// This means that placeholder types are not considered to unify if there are any bounds set on
/// them. For example `Option<T>` and `Option<U>` do not unify as we cannot show that `T = U`
- pub fn could_unify_with_deeply(&self, db: &dyn HirDatabase, other: &Type) -> bool {
+ pub fn could_unify_with_deeply(&self, db: &'db dyn HirDatabase, other: &Type<'db>) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
hir_ty::could_unify_deeply(db, self.env.clone(), &tys)
}
- pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool {
+ pub fn could_coerce_to(&self, db: &'db dyn HirDatabase, to: &Type<'db>) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone()));
hir_ty::could_coerce(db, self.env.clone(), &tys)
}
- pub fn as_type_param(&self, db: &dyn HirDatabase) -> Option<TypeParam> {
+ pub fn as_type_param(&self, db: &'db dyn HirDatabase) -> Option<TypeParam> {
match self.ty.kind(Interner) {
TyKind::Placeholder(p) => Some(TypeParam {
id: TypeParamId::from_unchecked(hir_ty::from_placeholder_idx(db, *p)),
@@ -5816,19 +5840,19 @@ impl Type {
}
/// Returns unique `GenericParam`s contained in this type.
- pub fn generic_params(&self, db: &dyn HirDatabase) -> FxHashSet<GenericParam> {
+ pub fn generic_params(&self, db: &'db dyn HirDatabase) -> FxHashSet<GenericParam> {
hir_ty::collect_placeholders(&self.ty, db)
.into_iter()
.map(|id| TypeOrConstParam { id }.split(db).either_into())
.collect()
}
- pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
+ pub fn layout(&self, db: &'db dyn HirDatabase) -> Result<Layout, LayoutError> {
db.layout_of_ty(self.ty.clone(), self.env.clone())
.map(|layout| Layout(layout, db.target_data_layout(self.env.krate).unwrap()))
}
- pub fn drop_glue(&self, db: &dyn HirDatabase) -> DropGlue {
+ pub fn drop_glue(&self, db: &'db dyn HirDatabase) -> DropGlue {
db.has_drop_glue(self.ty.clone(), self.env.clone())
}
}
@@ -5855,8 +5879,8 @@ impl InlineAsmOperand {
// FIXME: Document this
#[derive(Debug)]
-pub struct Callable {
- ty: Type,
+pub struct Callable<'db> {
+ ty: Type<'db>,
sig: CallableSig,
callee: Callee,
/// Whether this is a method that was called with method call syntax.
@@ -5880,7 +5904,7 @@ pub enum CallableKind {
FnImpl(FnTrait),
}
-impl Callable {
+impl<'db> Callable<'db> {
pub fn kind(&self) -> CallableKind {
match self.callee {
Callee::Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
@@ -5895,7 +5919,7 @@ impl Callable {
Callee::FnImpl(fn_) => CallableKind::FnImpl(fn_),
}
}
- pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<(SelfParam, Type)> {
+ pub fn receiver_param(&self, db: &'db dyn HirDatabase) -> Option<(SelfParam, Type<'db>)> {
let func = match self.callee {
Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
_ => return None,
@@ -5906,7 +5930,7 @@ impl Callable {
pub fn n_params(&self) -> usize {
self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
}
- pub fn params(&self) -> Vec<Param> {
+ pub fn params(&self) -> Vec<Param<'db>> {
self.sig
.params()
.iter()
@@ -5916,14 +5940,14 @@ impl Callable {
.map(|(idx, ty)| Param { func: self.callee.clone(), idx, ty })
.collect()
}
- pub fn return_type(&self) -> Type {
+ pub fn return_type(&self) -> Type<'db> {
self.ty.derived(self.sig.ret().clone())
}
pub fn sig(&self) -> &CallableSig {
&self.sig
}
- pub fn ty(&self) -> &Type {
+ pub fn ty(&self) -> &Type<'db> {
&self.ty
}
}
@@ -6125,9 +6149,9 @@ impl From<ItemInNs> for ScopeDef {
}
#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct Adjustment {
- pub source: Type,
- pub target: Type,
+pub struct Adjustment<'db> {
+ pub source: Type<'db>,
+ pub target: Type<'db>,
pub kind: Adjust,
}
@@ -6226,7 +6250,7 @@ impl HasCrate for TypeAlias {
}
}
-impl HasCrate for Type {
+impl HasCrate for Type<'_> {
fn krate(&self, _db: &dyn HirDatabase) -> Crate {
self.env.krate.into()
}
@@ -6380,9 +6404,9 @@ pub enum DocLinkDef {
SelfType(Trait),
}
-fn push_ty_diagnostics(
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+fn push_ty_diagnostics<'db>(
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
diagnostics: Option<ThinArc<(), TyLoweringDiagnostic>>,
source_map: &ExpressionStoreSourceMap,
) {
@@ -6478,3 +6502,7 @@ pub fn resolve_absolute_path<'a, I: Iterator<Item = Symbol> + Clone + 'a>(
})
.flatten()
}
+
+fn as_name_opt(name: Option<impl AsName>) -> Name {
+ name.map_or_else(Name::missing, |name| name.as_name())
+}
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 4a2e8e379f..adba59236a 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -25,7 +25,6 @@ use hir_expand::{
builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
files::{FileRangeWrapper, HirFileRange, InRealFile},
- inert_attr_macro::find_builtin_attr_idx,
mod_path::{ModPath, PathKind},
name::AsName,
};
@@ -124,15 +123,15 @@ impl PathResolutionPerNs {
}
#[derive(Debug)]
-pub struct TypeInfo {
+pub struct TypeInfo<'db> {
/// The original type of the expression or pattern.
- pub original: Type,
+ pub original: Type<'db>,
/// The adjusted type, if an adjustment happened.
- pub adjusted: Option<Type>,
+ pub adjusted: Option<Type<'db>>,
}
-impl TypeInfo {
- pub fn original(self) -> Type {
+impl<'db> TypeInfo<'db> {
+ pub fn original(self) -> Type<'db> {
self.original
}
@@ -141,7 +140,7 @@ impl TypeInfo {
}
/// The adjusted type, or the original in case no adjustments occurred.
- pub fn adjusted(self) -> Type {
+ pub fn adjusted(self) -> Type<'db> {
self.adjusted.unwrap_or(self.original)
}
}
@@ -159,13 +158,13 @@ pub struct SemanticsImpl<'db> {
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
}
-impl<DB> fmt::Debug for Semantics<'_, DB> {
+impl<DB: ?Sized> fmt::Debug for Semantics<'_, DB> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Semantics {{ ... }}")
}
}
-impl<'db, DB> ops::Deref for Semantics<'db, DB> {
+impl<'db, DB: ?Sized> ops::Deref for Semantics<'db, DB> {
type Target = SemanticsImpl<'db>;
fn deref(&self) -> &Self::Target {
@@ -173,12 +172,28 @@ impl<'db, DB> ops::Deref for Semantics<'db, DB> {
}
}
+// Note: while this variant of `Semantics<'_, _>` might seem unused, as it does not
+// find actual use within the rust-analyzer project itself, it exists to enable the use
+// within e.g. tracked salsa functions in third-party crates that build upon `ra_ap_hir`.
+impl Semantics<'_, dyn HirDatabase> {
+ /// Creates an instance that's weakly coupled to its underlying database type.
+ pub fn new_dyn(db: &'_ dyn HirDatabase) -> Semantics<'_, dyn HirDatabase> {
+ let impl_ = SemanticsImpl::new(db);
+ Semantics { db, imp: impl_ }
+ }
+}
+
impl<DB: HirDatabase> Semantics<'_, DB> {
+ /// Creates an instance that's strongly coupled to its underlying database type.
pub fn new(db: &DB) -> Semantics<'_, DB> {
let impl_ = SemanticsImpl::new(db);
Semantics { db, imp: impl_ }
}
+}
+// Note: We take `DB` as `?Sized` here in order to support type-erased
+// use of `Semantics` via `Semantics<'_, dyn HirDatabase>`:
+impl<DB: HirDatabase + ?Sized> Semantics<'_, DB> {
pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
self.imp.find_file(syntax_node).file_id
}
@@ -229,7 +244,7 @@ impl<DB: HirDatabase> Semantics<'_, DB> {
offset: TextSize,
) -> impl Iterator<Item = ast::NameLike> + 'slf {
node.token_at_offset(offset)
- .map(move |token| self.descend_into_macros_no_opaque(token))
+ .map(move |token| self.descend_into_macros_no_opaque(token, true))
.map(|descendants| descendants.into_iter().filter_map(move |it| it.value.parent()))
// re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first
// See algo::ancestors_at_offset, which uses the same approach
@@ -662,8 +677,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
let body = self.db.body(to_be_renamed.parent);
let resolver = to_be_renamed.parent.resolver(self.db);
- let starting_expr =
- body.binding_owners.get(&to_be_renamed.binding_id).copied().unwrap_or(body.body_expr);
+ let starting_expr = body.binding_owner(to_be_renamed.binding_id).unwrap_or(body.body_expr);
let mut visitor = RenameConflictsVisitor {
body: &body,
conflicts: FxHashSet::default(),
@@ -953,13 +967,6 @@ impl<'db> SemanticsImpl<'db> {
let Some(item) = ast::Item::cast(ancestor) else {
return false;
};
- // Optimization to skip the semantic check.
- if item.attrs().all(|attr| {
- attr.simple_name()
- .is_some_and(|attr| find_builtin_attr_idx(&Symbol::intern(&attr)).is_some())
- }) {
- return false;
- }
self.with_ctx(|ctx| {
if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
return true;
@@ -1001,10 +1008,11 @@ impl<'db> SemanticsImpl<'db> {
pub fn descend_into_macros_no_opaque(
&self,
token: SyntaxToken,
+ always_descend_into_derives: bool,
) -> SmallVec<[InFile<SyntaxToken>; 1]> {
let mut res = smallvec![];
let token = self.wrap_token_infile(token);
- self.descend_into_macros_all(token.clone(), true, &mut |t, ctx| {
+ self.descend_into_macros_all(token.clone(), always_descend_into_derives, &mut |t, ctx| {
if !ctx.is_opaque(self.db) {
// Don't descend into opaque contexts
res.push(t);
@@ -1525,7 +1533,7 @@ impl<'db> SemanticsImpl<'db> {
Some(Label { parent, label_id })
}
- pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type<'db>> {
let analyze = self.analyze(ty.syntax())?;
analyze.type_of_type(self.db, ty)
}
@@ -1544,7 +1552,7 @@ impl<'db> SemanticsImpl<'db> {
}
}
- pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> {
+ pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment<'db>>> {
let mutability = |m| match m {
hir_ty::Mutability::Not => Mutability::Shared,
hir_ty::Mutability::Mut => Mutability::Mut,
@@ -1587,13 +1595,13 @@ impl<'db> SemanticsImpl<'db> {
})
}
- pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo<'db>> {
self.analyze(expr.syntax())?
.type_of_expr(self.db, expr)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
}
- pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo<'db>> {
self.analyze(pat.syntax())?
.type_of_pat(self.db, pat)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
@@ -1602,15 +1610,15 @@ impl<'db> SemanticsImpl<'db> {
/// It also includes the changes that binding mode makes in the type. For example in
/// `let ref x @ Some(_) = None` the result of `type_of_pat` is `Option<T>` but the result
/// of this function is `&mut Option<T>`
- pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> {
+ pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type<'db>> {
self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
}
- pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type<'db>> {
self.analyze(param.syntax())?.type_of_self(self.db, param)
}
- pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type<'db>; 1]> {
self.analyze(pat.syntax())
.and_then(|it| it.pattern_adjustments(self.db, pat))
.unwrap_or_default()
@@ -1620,7 +1628,7 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
}
- pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable> {
+ pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable<'db>> {
self.analyze(call.syntax())?.resolve_expr_as_callable(self.db, call)
}
@@ -1632,7 +1640,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_method_call_fallback(
&self,
call: &ast::MethodCallExpr,
- ) -> Option<(Either<Function, Field>, Option<GenericSubstitution>)> {
+ ) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
}
@@ -1640,10 +1648,10 @@ impl<'db> SemanticsImpl<'db> {
// FIXME: better api for the trait environment
pub fn resolve_trait_impl_method(
&self,
- env: Type,
+ env: Type<'db>,
trait_: Trait,
func: Function,
- subst: impl IntoIterator<Item = Type>,
+ subst: impl IntoIterator<Item = Type<'db>>,
) -> Option<Function> {
let mut substs = hir_ty::TyBuilder::subst_for_def(self.db, TraitId::from(trait_), None);
for s in subst {
@@ -1682,7 +1690,10 @@ impl<'db> SemanticsImpl<'db> {
// This does not resolve the method call to the correct trait impl!
// We should probably fix that.
- pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ pub fn resolve_method_call_as_callable(
+ &self,
+ call: &ast::MethodCallExpr,
+ ) -> Option<Callable<'db>> {
self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
}
@@ -1693,14 +1704,15 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_field_fallback(
&self,
field: &ast::FieldExpr,
- ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution>)> {
+ ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
+ {
self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
}
pub fn resolve_record_field(
&self,
field: &ast::RecordExprField,
- ) -> Option<(Field, Option<Local>, Type)> {
+ ) -> Option<(Field, Option<Local>, Type<'db>)> {
self.resolve_record_field_with_substitution(field)
.map(|(field, local, ty, _)| (field, local, ty))
}
@@ -1708,18 +1720,21 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_record_field_with_substitution(
&self,
field: &ast::RecordExprField,
- ) -> Option<(Field, Option<Local>, Type, GenericSubstitution)> {
+ ) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
self.analyze(field.syntax())?.resolve_record_field(self.db, field)
}
- pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
+ pub fn resolve_record_pat_field(
+ &self,
+ field: &ast::RecordPatField,
+ ) -> Option<(Field, Type<'db>)> {
self.resolve_record_pat_field_with_subst(field).map(|(field, ty, _)| (field, ty))
}
pub fn resolve_record_pat_field_with_subst(
&self,
field: &ast::RecordPatField,
- ) -> Option<(Field, Type, GenericSubstitution)> {
+ ) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
}
@@ -1760,7 +1775,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
let Some(mac) = self.resolve_macro_call(macro_call) else { return false };
- if mac.is_asm_or_global_asm(self.db) {
+ if mac.is_asm_like(self.db) {
return true;
}
@@ -1792,7 +1807,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_path_with_subst(
&self,
path: &ast::Path,
- ) -> Option<(PathResolution, Option<GenericSubstitution>)> {
+ ) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
self.analyze(path.syntax())?.resolve_path(self.db, path)
}
@@ -1803,7 +1818,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_offset_of_field(
&self,
name_ref: &ast::NameRef,
- ) -> Option<(Either<Variant, Field>, GenericSubstitution)> {
+ ) -> Option<(Either<Variant, Field>, GenericSubstitution<'db>)> {
self.analyze_no_infer(name_ref.syntax())?.resolve_offset_of_field(self.db, name_ref)
}
@@ -1825,13 +1840,19 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
}
- pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ pub fn record_literal_missing_fields(
+ &self,
+ literal: &ast::RecordExpr,
+ ) -> Vec<(Field, Type<'db>)> {
self.analyze(literal.syntax())
.and_then(|it| it.record_literal_missing_fields(self.db, literal))
.unwrap_or_default()
}
- pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ pub fn record_pattern_missing_fields(
+ &self,
+ pattern: &ast::RecordPat,
+ ) -> Vec<(Field, Type<'db>)> {
self.analyze(pattern.syntax())
.and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
.unwrap_or_default()
@@ -2177,6 +2198,10 @@ pub struct SemanticsScope<'db> {
}
impl<'db> SemanticsScope<'db> {
+ pub fn file_id(&self) -> HirFileId {
+ self.file_id
+ }
+
pub fn module(&self) -> Module {
Module { id: self.resolver.module() }
}
diff --git a/crates/hir/src/semantics/child_by_source.rs b/crates/hir/src/semantics/child_by_source.rs
index 1a6d63c88c..e7db93d375 100644
--- a/crates/hir/src/semantics/child_by_source.rs
+++ b/crates/hir/src/semantics/child_by_source.rs
@@ -6,10 +6,11 @@
use either::Either;
use hir_expand::{HirFileId, attrs::collect_attrs};
+use span::AstIdNode;
use syntax::{AstPtr, ast};
use hir_def::{
- AdtId, AssocItemId, DefWithBodyId, EnumId, FieldId, GenericDefId, ImplId, ItemTreeLoc,
+ AdtId, AssocItemId, AstIdLoc, DefWithBodyId, EnumId, FieldId, GenericDefId, ImplId,
LifetimeParamId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, TypeOrConstParamId,
VariantId,
db::DefDatabase,
@@ -19,7 +20,6 @@ use hir_def::{
},
hir::generics::GenericParams,
item_scope::ItemScope,
- item_tree::ItemTreeNode,
nameres::DefMap,
src::{HasChildSource, HasSource},
};
@@ -35,7 +35,7 @@ pub(crate) trait ChildBySource {
impl ChildBySource for TraitId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
- let data = db.trait_items(*self);
+ let data = self.trait_items(db);
data.macro_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| {
@@ -61,7 +61,7 @@ impl ChildBySource for TraitId {
impl ChildBySource for ImplId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
- let data = db.impl_items(*self);
+ let data = self.impl_items(db);
data.macro_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| {
let ptr = ast_id.to_ptr(db);
@@ -113,7 +113,7 @@ impl ChildBySource for ItemScope {
ids.iter().for_each(|&id| {
if let MacroId::MacroRulesId(id) = id {
let loc = id.lookup(db);
- if loc.id.file_id() == file_id {
+ if loc.id.file_id == file_id {
res[keys::MACRO_RULES].insert(loc.ast_ptr(db).value, id);
}
}
@@ -191,7 +191,7 @@ impl ChildBySource for VariantId {
Either::Right(source) => res[keys::RECORD_FIELD].insert(AstPtr::new(&source), id),
}
}
- let (_, sm) = db.variant_fields_with_source_map(*self);
+ let (_, sm) = self.fields_with_source_map(db);
sm.expansions().for_each(|(ast, &exp_id)| res[keys::MACRO_CALL].insert(ast.value, exp_id));
}
}
@@ -199,16 +199,14 @@ impl ChildBySource for VariantId {
impl ChildBySource for EnumId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let loc = &self.lookup(db);
- if file_id != loc.id.file_id() {
+ if file_id != loc.id.file_id {
return;
}
- let tree = loc.id.item_tree(db);
- let ast_id_map = db.ast_id_map(loc.id.file_id());
+ let ast_id_map = db.ast_id_map(loc.id.file_id);
- db.enum_variants(*self).variants.iter().for_each(|&(variant, _)| {
- res[keys::ENUM_VARIANT]
- .insert(ast_id_map.get(tree[variant.lookup(db).id.value].ast_id), variant);
+ self.enum_variants(db).variants.iter().for_each(|&(variant, _, _)| {
+ res[keys::ENUM_VARIANT].insert(ast_id_map.get(variant.lookup(db).id.value), variant);
});
let (_, source_map) = db.enum_signature_with_source_map(*self);
source_map
@@ -287,15 +285,14 @@ fn insert_item_loc<ID, N, Data>(
res: &mut DynMap,
file_id: HirFileId,
id: ID,
- key: Key<N::Source, ID>,
+ key: Key<N, ID>,
) where
ID: Lookup<Database = dyn DefDatabase, Data = Data> + 'static,
- Data: ItemTreeLoc<Id = N>,
- N: ItemTreeNode,
- N::Source: 'static,
+ Data: AstIdLoc<Ast = N>,
+ N: AstIdNode + 'static,
{
let loc = id.lookup(db);
- if loc.item_tree_id().file_id() == file_id {
+ if loc.ast_id().file_id == file_id {
res[key].insert(loc.ast_ptr(db).value, id)
}
}
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index ec2ccf8cba..ecc6e5f3d0 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -37,7 +37,7 @@ use hir_expand::{
};
use hir_ty::{
Adjustment, AliasTy, InferenceResult, Interner, LifetimeElisionKind, ProjectionTy,
- Substitution, TraitEnvironment, Ty, TyExt, TyKind, TyLoweringContext,
+ Substitution, ToChalk, TraitEnvironment, Ty, TyExt, TyKind, TyLoweringContext,
diagnostics::{
InsideUnsafeBlock, record_literal_missing_fields, record_pattern_missing_fields,
unsafe_operations,
@@ -156,14 +156,14 @@ impl<'db> SourceAnalyzer<'db> {
InFile { file_id, .. }: InFile<&SyntaxNode>,
_offset: Option<TextSize>,
) -> SourceAnalyzer<'db> {
- let (fields, source_map) = db.variant_fields_with_source_map(def);
+ let (fields, source_map) = def.fields_with_source_map(db);
let resolver = def.resolver(db);
SourceAnalyzer {
resolver,
body_or_sig: Some(BodyOrSig::VariantFields {
def,
store: fields.store.clone(),
- source_map,
+ source_map: source_map.clone(),
}),
file_id,
}
@@ -242,11 +242,7 @@ impl<'db> SourceAnalyzer<'db> {
fn binding_id_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingId> {
let pat_id = self.pat_id(&pat.clone().into())?;
- if let Pat::Bind { id, .. } = self.store()?.pats[pat_id.as_pat()?] {
- Some(id)
- } else {
- None
- }
+ if let Pat::Bind { id, .. } = self.store()?[pat_id.as_pat()?] { Some(id) } else { None }
}
pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment]> {
@@ -254,10 +250,14 @@ impl<'db> SourceAnalyzer<'db> {
// expressions nor patterns).
let expr_id = self.expr_id(expr.clone())?.as_expr()?;
let infer = self.infer()?;
- infer.expr_adjustments.get(&expr_id).map(|v| &**v)
+ infer.expr_adjustment(expr_id)
}
- pub(crate) fn type_of_type(&self, db: &'db dyn HirDatabase, ty: &ast::Type) -> Option<Type> {
+ pub(crate) fn type_of_type(
+ &self,
+ db: &'db dyn HirDatabase,
+ ty: &ast::Type,
+ ) -> Option<Type<'db>> {
let type_ref = self.type_id(ty)?;
let ty = TyLoweringContext::new(
db,
@@ -277,12 +277,12 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
expr: &ast::Expr,
- ) -> Option<(Type, Option<Type>)> {
+ ) -> Option<(Type<'db>, Option<Type<'db>>)> {
let expr_id = self.expr_id(expr.clone())?;
let infer = self.infer()?;
let coerced = expr_id
.as_expr()
- .and_then(|expr_id| infer.expr_adjustments.get(&expr_id))
+ .and_then(|expr_id| infer.expr_adjustment(expr_id))
.and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
let ty = infer[expr_id].clone();
let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
@@ -293,17 +293,16 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
pat: &ast::Pat,
- ) -> Option<(Type, Option<Type>)> {
+ ) -> Option<(Type<'db>, Option<Type<'db>>)> {
let expr_or_pat_id = self.pat_id(pat)?;
let infer = self.infer()?;
let coerced = match expr_or_pat_id {
ExprOrPatId::ExprId(idx) => infer
- .expr_adjustments
- .get(&idx)
+ .expr_adjustment(idx)
.and_then(|adjusts| adjusts.last().cloned())
.map(|adjust| adjust.target),
ExprOrPatId::PatId(idx) => {
- infer.pat_adjustments.get(&idx).and_then(|adjusts| adjusts.last().cloned())
+ infer.pat_adjustment(idx).and_then(|adjusts| adjusts.last().cloned())
}
};
@@ -316,7 +315,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
pat: &ast::IdentPat,
- ) -> Option<Type> {
+ ) -> Option<Type<'db>> {
let binding_id = self.binding_id_of_pat(pat)?;
let infer = self.infer()?;
let ty = infer[binding_id].clone();
@@ -328,7 +327,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
_param: &ast::SelfParam,
- ) -> Option<Type> {
+ ) -> Option<Type<'db>> {
let binding = self.body()?.self_param?;
let ty = self.infer()?[binding].clone();
Some(Type::new_with_resolver(db, &self.resolver, ty))
@@ -341,7 +340,7 @@ impl<'db> SourceAnalyzer<'db> {
) -> Option<BindingMode> {
let id = self.pat_id(&pat.clone().into())?;
let infer = self.infer()?;
- infer.binding_modes.get(id.as_pat()?).map(|bm| match bm {
+ infer.binding_mode(id.as_pat()?).map(|bm| match bm {
hir_ty::BindingMode::Move => BindingMode::Move,
hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
@@ -353,13 +352,12 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
pat: &ast::Pat,
- ) -> Option<SmallVec<[Type; 1]>> {
+ ) -> Option<SmallVec<[Type<'db>; 1]>> {
let pat_id = self.pat_id(pat)?;
let infer = self.infer()?;
Some(
infer
- .pat_adjustments
- .get(&pat_id.as_pat()?)?
+ .pat_adjustment(pat_id.as_pat()?)?
.iter()
.map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
.collect(),
@@ -370,7 +368,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
call: &ast::MethodCallExpr,
- ) -> Option<Callable> {
+ ) -> Option<Callable<'db>> {
let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
let (func, substs) = self.infer()?.method_resolution(expr_id)?;
let ty = db.value_ty(func.into())?.substitute(Interner, &substs);
@@ -395,7 +393,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
call: &ast::MethodCallExpr,
- ) -> Option<(Either<Function, Field>, Option<GenericSubstitution>)> {
+ ) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
let inference_result = self.infer()?;
match inference_result.method_resolution(expr_id) {
@@ -419,7 +417,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
call: &ast::Expr,
- ) -> Option<Callable> {
+ ) -> Option<Callable<'db>> {
let (orig, adjusted) = self.type_of_expr(db, &call.clone())?;
adjusted.unwrap_or(orig).as_callable(db)
}
@@ -440,7 +438,7 @@ impl<'db> SourceAnalyzer<'db> {
field_expr: ExprId,
infer: &InferenceResult,
db: &'db dyn HirDatabase,
- ) -> Option<GenericSubstitution> {
+ ) -> Option<GenericSubstitution<'db>> {
let body = self.store()?;
if let Expr::Field { expr: object_expr, name: _ } = body[field_expr] {
let (adt, subst) = type_of_expr_including_adjust(infer, object_expr)?.as_adt()?;
@@ -457,7 +455,8 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
field: &ast::FieldExpr,
- ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution>)> {
+ ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
+ {
let (def, ..) = self.body_()?;
let expr_id = self.expr_id(field.clone().into())?.as_expr()?;
let inference_result = self.infer()?;
@@ -680,7 +679,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
field: &ast::RecordExprField,
- ) -> Option<(Field, Option<Local>, Type, GenericSubstitution)> {
+ ) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
let expr = ast::Expr::from(record_expr);
let expr_id = self.store_sm()?.node_expr(InFile::new(self.file_id, &expr))?;
@@ -708,7 +707,7 @@ impl<'db> SourceAnalyzer<'db> {
};
let (adt, subst) = self.infer()?.type_of_expr_or_pat(expr_id)?.as_adt()?;
let variant = self.infer()?.variant_resolution_for_expr_or_pat(expr_id)?;
- let variant_data = variant.variant_data(db);
+ let variant_data = variant.fields(db);
let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
let field_ty =
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
@@ -724,14 +723,14 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
field: &ast::RecordPatField,
- ) -> Option<(Field, Type, GenericSubstitution)> {
+ ) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
let field_name = field.field_name()?.as_name();
let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
let pat_id = self.pat_id(&record_pat.into())?;
let variant = self.infer()?.variant_resolution_for_pat(pat_id.as_pat()?)?;
- let variant_data = variant.variant_data(db);
+ let variant_data = variant.fields(db);
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
- let (adt, subst) = self.infer()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?;
+ let (adt, subst) = self.infer()?[pat_id.as_pat()?].as_adt()?;
let field_ty =
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
Some((
@@ -760,7 +759,8 @@ impl<'db> SourceAnalyzer<'db> {
},
};
- let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, Some(store))?;
+ let body_owner = self.resolver.body_owner();
+ let res = resolve_hir_value_path(db, &self.resolver, body_owner, path, HygieneId::ROOT)?;
match res {
PathResolution::Def(def) => Some(def),
_ => None,
@@ -779,7 +779,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
name_ref: &ast::NameRef,
- ) -> Option<(Either<crate::Variant, crate::Field>, GenericSubstitution)> {
+ ) -> Option<(Either<crate::Variant, crate::Field>, GenericSubstitution<'db>)> {
let offset_of_expr = ast::OffsetOfExpr::cast(name_ref.syntax().parent()?)?;
let container = offset_of_expr.ty()?;
let container = self.type_of_type(db, &container)?;
@@ -798,8 +798,8 @@ impl<'db> SourceAnalyzer<'db> {
};
container = Either::Right(db.normalize_projection(projection, trait_env.clone()));
}
- let handle_variants = |variant, subst: &Substitution, container: &mut _| {
- let fields = db.variant_fields(variant);
+ let handle_variants = |variant: VariantId, subst: &Substitution, container: &mut _| {
+ let fields = variant.fields(db);
let field = fields.field(&field_name.as_name())?;
let field_types = db.field_types(variant);
*container = Either::Right(field_types[field].clone().substitute(Interner, subst));
@@ -829,7 +829,7 @@ impl<'db> SourceAnalyzer<'db> {
handle_variants(id.into(), subst, &mut container)?
}
AdtId::EnumId(id) => {
- let variants = db.enum_variants(id);
+ let variants = id.enum_variants(db);
let variant = variants.variant(&field_name.as_name())?;
container = Either::Left((variant, subst.clone()));
(Either::Left(Variant { id: variant }), id.into(), subst.clone())
@@ -851,7 +851,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
path: &ast::Path,
- ) -> Option<(PathResolution, Option<GenericSubstitution>)> {
+ ) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
let parent = path.syntax().parent();
let parent = || parent.clone();
@@ -991,7 +991,7 @@ impl<'db> SourceAnalyzer<'db> {
let parent_hir_path = path
.parent_path()
.and_then(|p| collector.lower_path(p, &mut ExprCollector::impl_trait_error_allocator));
- let store = collector.store.finish();
+ let (store, _) = collector.store.finish();
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
// trying to resolve foo::bar.
@@ -1169,8 +1169,7 @@ impl<'db> SourceAnalyzer<'db> {
)
}
TyKind::FnDef(fn_id, subst) => {
- let fn_id = hir_ty::db::InternedCallableDefId::from(*fn_id);
- let fn_id = db.lookup_intern_callable_def(fn_id);
+ let fn_id = ToChalk::from_chalk(db, *fn_id);
let generic_def_id = match fn_id {
CallableDefId::StructId(id) => id.into(),
CallableDefId::FunctionId(id) => id.into(),
@@ -1201,7 +1200,7 @@ impl<'db> SourceAnalyzer<'db> {
let mut collector = ExprCollector::new(db, self.resolver.module(), self.file_id);
let hir_path =
collector.lower_path(path.clone(), &mut ExprCollector::impl_trait_error_allocator)?;
- let store = collector.store.finish();
+ let (store, _) = collector.store.finish();
Some(resolve_hir_path_(
db,
&self.resolver,
@@ -1217,7 +1216,7 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
literal: &ast::RecordExpr,
- ) -> Option<Vec<(Field, Type)>> {
+ ) -> Option<Vec<(Field, Type<'db>)>> {
let body = self.store()?;
let infer = self.infer()?;
@@ -1240,12 +1239,12 @@ impl<'db> SourceAnalyzer<'db> {
&self,
db: &'db dyn HirDatabase,
pattern: &ast::RecordPat,
- ) -> Option<Vec<(Field, Type)>> {
+ ) -> Option<Vec<(Field, Type<'db>)>> {
let body = self.store()?;
let infer = self.infer()?;
let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
- let substs = infer.type_of_pat[pat_id].as_adt()?.1;
+ let substs = infer[pat_id].as_adt()?.1;
let (variant, missing_fields, _exhaustive) =
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
@@ -1259,7 +1258,7 @@ impl<'db> SourceAnalyzer<'db> {
substs: &Substitution,
variant: VariantId,
missing_fields: Vec<LocalFieldId>,
- ) -> Vec<(Field, Type)> {
+ ) -> Vec<(Field, Type<'db>)> {
let field_types = db.field_types(variant);
missing_fields
@@ -1419,7 +1418,7 @@ impl<'db> SourceAnalyzer<'db> {
method_name: &Name,
) -> Option<(TraitId, FunctionId)> {
let trait_id = lang_trait.resolve_trait(db, self.resolver.krate())?;
- let fn_id = db.trait_items(trait_id).method_by_name(method_name)?;
+ let fn_id = trait_id.trait_items(db).method_by_name(method_name)?;
Some((trait_id, fn_id))
}
@@ -1436,9 +1435,11 @@ fn scope_for(
) -> Option<ScopeId> {
node.ancestors_with_macros(db)
.take_while(|it| {
- !ast::Item::can_cast(it.kind())
- || ast::MacroCall::can_cast(it.kind())
- || ast::Use::can_cast(it.kind())
+ let kind = it.kind();
+ !ast::Item::can_cast(kind)
+ || ast::MacroCall::can_cast(kind)
+ || ast::Use::can_cast(kind)
+ || ast::AsmExpr::can_cast(kind)
})
.filter_map(|it| it.map(ast::Expr::cast).transpose())
.filter_map(|it| source_map.node_expr(it.as_ref())?.as_expr())
@@ -1576,7 +1577,7 @@ fn resolve_hir_path_(
// within the trait's associated types.
if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
if let Some(type_alias_id) =
- db.trait_items(trait_id).associated_type_by_name(unresolved.name)
+ trait_id.trait_items(db).associated_type_by_name(unresolved.name)
{
return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
}
@@ -1727,7 +1728,7 @@ fn resolve_hir_path_qualifier(
// within the trait's associated types.
if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
if let Some(type_alias_id) =
- db.trait_items(trait_id).associated_type_by_name(unresolved.name)
+ trait_id.trait_items(db).associated_type_by_name(unresolved.name)
{
return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
}
@@ -1781,8 +1782,8 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
}
fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> {
- match infer.expr_adjustments.get(&id).and_then(|adjustments| adjustments.last()) {
+ match infer.expr_adjustment(id).and_then(|adjustments| adjustments.last()) {
Some(adjustment) => Some(&adjustment.target),
- None => infer.type_of_expr.get(id),
+ None => Some(&infer[id]),
}
}
diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs
index e87ab87407..dca10193e2 100644
--- a/crates/hir/src/symbols.rs
+++ b/crates/hir/src/symbols.rs
@@ -1,5 +1,6 @@
//! File symbol extraction.
+use base_db::FxIndexSet;
use either::Either;
use hir_def::{
AdtId, AssocItemId, Complete, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId,
@@ -21,8 +22,6 @@ use syntax::{AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr, ast
use crate::{HasCrate, Module, ModuleDef, Semantics};
-pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
-
/// The actual data that is stored in the index. It should be as compact as
/// possible.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -34,6 +33,7 @@ pub struct FileSymbol {
/// Whether this symbol is a doc alias for the original symbol.
pub is_alias: bool,
pub is_assoc: bool,
+ pub is_import: bool,
pub do_not_complete: Complete,
}
@@ -125,6 +125,13 @@ impl<'a> SymbolCollector<'a> {
}
ModuleDefId::AdtId(AdtId::EnumId(id)) => {
this.push_decl(id, name, false, None);
+ let enum_name = this.db.enum_signature(id).name.as_str().to_smolstr();
+ this.with_container_name(Some(enum_name), |this| {
+ let variants = id.enum_variants(this.db);
+ for (variant_id, variant_name, _) in &variants.variants {
+ this.push_decl(*variant_id, variant_name, true, None);
+ }
+ });
}
ModuleDefId::AdtId(AdtId::UnionId(id)) => {
this.push_decl(id, name, false, None);
@@ -165,6 +172,7 @@ impl<'a> SymbolCollector<'a> {
let is_explicit_import = |vis| match vis {
Visibility::Public => true,
+ Visibility::PubCrate(_) => true,
Visibility::Module(_, VisibilityExplicitness::Explicit) => true,
Visibility::Module(_, VisibilityExplicitness::Implicit) => false,
};
@@ -197,6 +205,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc,
is_alias: false,
is_assoc: false,
+ is_import: true,
do_not_complete: Complete::Yes,
});
};
@@ -227,6 +236,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Complete::Yes,
});
};
@@ -322,7 +332,7 @@ impl<'a> SymbolCollector<'a> {
.to_smolstr(),
);
self.with_container_name(impl_name, |s| {
- for &(ref name, assoc_item_id) in &self.db.impl_items(impl_id).items {
+ for &(ref name, assoc_item_id) in &impl_id.impl_items(self.db).items {
s.push_assoc_item(assoc_item_id, name, None)
}
})
@@ -331,7 +341,7 @@ impl<'a> SymbolCollector<'a> {
fn collect_from_trait(&mut self, trait_id: TraitId, trait_do_not_complete: Complete) {
let trait_data = self.db.trait_signature(trait_id);
self.with_container_name(Some(trait_data.name.as_str().into()), |s| {
- for &(ref name, assoc_item_id) in &self.db.trait_items(trait_id).items {
+ for &(ref name, assoc_item_id) in &trait_id.trait_items(self.db).items {
s.push_assoc_item(assoc_item_id, name, Some(trait_do_not_complete));
}
});
@@ -398,6 +408,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
is_alias: true,
is_assoc,
+ is_import: false,
do_not_complete,
});
}
@@ -410,6 +421,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc,
is_alias: false,
is_assoc,
+ is_import: false,
do_not_complete,
});
@@ -442,6 +454,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
is_alias: true,
is_assoc: false,
+ is_import: false,
do_not_complete,
});
}
@@ -454,6 +467,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete,
});
}
diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs
index af72179305..4b354e6406 100644
--- a/crates/hir/src/term_search.rs
+++ b/crates/hir/src/term_search.rs
@@ -22,20 +22,20 @@ enum NewTypesKey {
/// Helper enum to squash big number of alternative trees into `Many` variant as there is too many
/// to take into account.
#[derive(Debug)]
-enum AlternativeExprs {
+enum AlternativeExprs<'db> {
/// There are few trees, so we keep track of them all
- Few(FxHashSet<Expr>),
+ Few(FxHashSet<Expr<'db>>),
/// There are too many trees to keep track of
Many,
}
-impl AlternativeExprs {
+impl<'db> AlternativeExprs<'db> {
/// Construct alternative trees
///
/// # Arguments
/// `threshold` - threshold value for many trees (more than that is many)
/// `exprs` - expressions iterator
- fn new(threshold: usize, exprs: impl Iterator<Item = Expr>) -> AlternativeExprs {
+ fn new(threshold: usize, exprs: impl Iterator<Item = Expr<'db>>) -> AlternativeExprs<'db> {
let mut it = AlternativeExprs::Few(Default::default());
it.extend_with_threshold(threshold, exprs);
it
@@ -45,7 +45,7 @@ impl AlternativeExprs {
///
/// # Arguments
/// `ty` - Type of expressions queried (this is used to give type to `Expr::Many`)
- fn exprs(&self, ty: &Type) -> Vec<Expr> {
+ fn exprs(&self, ty: &Type<'db>) -> Vec<Expr<'db>> {
match self {
AlternativeExprs::Few(exprs) => exprs.iter().cloned().collect(),
AlternativeExprs::Many => vec![Expr::Many(ty.clone())],
@@ -57,7 +57,7 @@ impl AlternativeExprs {
/// # Arguments
/// `threshold` - threshold value for many trees (more than that is many)
/// `exprs` - expressions iterator
- fn extend_with_threshold(&mut self, threshold: usize, exprs: impl Iterator<Item = Expr>) {
+ fn extend_with_threshold(&mut self, threshold: usize, exprs: impl Iterator<Item = Expr<'db>>) {
match self {
AlternativeExprs::Few(tts) => {
for it in exprs {
@@ -88,20 +88,20 @@ impl AlternativeExprs {
/// Both of them are to speed up the term search by leaving out types / ScopeDefs that likely do
/// not produce any new results.
#[derive(Default, Debug)]
-struct LookupTable {
+struct LookupTable<'db> {
/// All the `Expr`s in "value" produce the type of "key"
- data: FxHashMap<Type, AlternativeExprs>,
+ data: FxHashMap<Type<'db>, AlternativeExprs<'db>>,
/// New types reached since last query by the `NewTypesKey`
- new_types: FxHashMap<NewTypesKey, Vec<Type>>,
+ new_types: FxHashMap<NewTypesKey, Vec<Type<'db>>>,
/// Types queried but not present
- types_wishlist: FxHashSet<Type>,
+ types_wishlist: FxHashSet<Type<'db>>,
/// Threshold to squash trees to `Many`
many_threshold: usize,
}
-impl LookupTable {
+impl<'db> LookupTable<'db> {
/// Initialize lookup table
- fn new(many_threshold: usize, goal: Type) -> Self {
+ fn new(many_threshold: usize, goal: Type<'db>) -> Self {
let mut res = Self { many_threshold, ..Default::default() };
res.new_types.insert(NewTypesKey::ImplMethod, Vec::new());
res.new_types.insert(NewTypesKey::StructProjection, Vec::new());
@@ -110,7 +110,7 @@ impl LookupTable {
}
/// Find all `Expr`s that unify with the `ty`
- fn find(&mut self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
+ fn find(&mut self, db: &'db dyn HirDatabase, ty: &Type<'db>) -> Option<Vec<Expr<'db>>> {
let res = self
.data
.iter()
@@ -135,7 +135,7 @@ impl LookupTable {
///
/// For example if we have type `i32` in data and we query for `&i32` it map all the type
/// trees we have for `i32` with `Expr::Reference` and returns them.
- fn find_autoref(&mut self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
+ fn find_autoref(&mut self, db: &'db dyn HirDatabase, ty: &Type<'db>) -> Option<Vec<Expr<'db>>> {
let res = self
.data
.iter()
@@ -174,7 +174,7 @@ impl LookupTable {
/// Note that the types have to be the same, unification is not enough as unification is not
/// transitive. For example Vec<i32> and FxHashSet<i32> both unify with Iterator<Item = i32>,
/// but they clearly do not unify themselves.
- fn insert(&mut self, ty: Type, exprs: impl Iterator<Item = Expr>) {
+ fn insert(&mut self, ty: Type<'db>, exprs: impl Iterator<Item = Expr<'db>>) {
match self.data.get_mut(&ty) {
Some(it) => {
it.extend_with_threshold(self.many_threshold, exprs);
@@ -192,14 +192,14 @@ impl LookupTable {
}
/// Iterate all the reachable types
- fn iter_types(&self) -> impl Iterator<Item = Type> + '_ {
+ fn iter_types(&self) -> impl Iterator<Item = Type<'db>> + '_ {
self.data.keys().cloned()
}
/// Query new types reached since last query by key
///
/// Create new key if you wish to query it to avoid conflicting with existing queries.
- fn new_types(&mut self, key: NewTypesKey) -> Vec<Type> {
+ fn new_types(&mut self, key: NewTypesKey) -> Vec<Type<'db>> {
match self.new_types.get_mut(&key) {
Some(it) => std::mem::take(it),
None => Vec::new(),
@@ -207,20 +207,20 @@ impl LookupTable {
}
/// Types queried but not found
- fn types_wishlist(&mut self) -> &FxHashSet<Type> {
+ fn types_wishlist(&mut self) -> &FxHashSet<Type<'db>> {
&self.types_wishlist
}
}
/// Context for the `term_search` function
#[derive(Debug)]
-pub struct TermSearchCtx<'a, DB: HirDatabase> {
+pub struct TermSearchCtx<'db, DB: HirDatabase> {
/// Semantics for the program
- pub sema: &'a Semantics<'a, DB>,
+ pub sema: &'db Semantics<'db, DB>,
/// Semantic scope, captures context for the term search
- pub scope: &'a SemanticsScope<'a>,
+ pub scope: &'db SemanticsScope<'db>,
/// Target / expected output type
- pub goal: Type,
+ pub goal: Type<'db>,
/// Configuration for term search
pub config: TermSearchConfig,
}
@@ -263,7 +263,7 @@ impl Default for TermSearchConfig {
/// Note that there are usually more ways we can get to the `goal` type but some are discarded to
/// reduce the memory consumption. It is also unlikely anyone is willing ti browse through
/// thousands of possible responses so we currently take first 10 from every tactic.
-pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
+pub fn term_search<'db, DB: HirDatabase>(ctx: &'db TermSearchCtx<'db, DB>) -> Vec<Expr<'db>> {
let module = ctx.scope.module();
let mut defs = FxHashSet::default();
defs.insert(ScopeDef::ModuleDef(ModuleDef::Module(module)));
@@ -285,7 +285,7 @@ pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
};
// Try trivial tactic first, also populates lookup table
- let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect();
+ let mut solutions: Vec<Expr<'db>> = tactics::trivial(ctx, &defs, &mut lookup).collect();
// Use well known types tactic before iterations as it does not depend on other tactics
solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup));
solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup));
diff --git a/crates/hir/src/term_search/expr.rs b/crates/hir/src/term_search/expr.rs
index 78ee3b5aa6..843831948a 100644
--- a/crates/hir/src/term_search/expr.rs
+++ b/crates/hir/src/term_search/expr.rs
@@ -59,7 +59,7 @@ fn mod_item_path_str(
/// So in short it pretty much gives us a way to get type `Option<i32>` using the items we have in
/// scope.
#[derive(Debug, Clone, Eq, Hash, PartialEq)]
-pub enum Expr {
+pub enum Expr<'db> {
/// Constant
Const(Const),
/// Static variable
@@ -69,26 +69,31 @@ pub enum Expr {
/// Constant generic parameter
ConstParam(ConstParam),
/// Well known type (such as `true` for bool)
- FamousType { ty: Type, value: &'static str },
+ FamousType { ty: Type<'db>, value: &'static str },
/// Function call (does not take self param)
- Function { func: Function, generics: Vec<Type>, params: Vec<Expr> },
+ Function { func: Function, generics: Vec<Type<'db>>, params: Vec<Expr<'db>> },
/// Method call (has self param)
- Method { func: Function, generics: Vec<Type>, target: Box<Expr>, params: Vec<Expr> },
+ Method {
+ func: Function,
+ generics: Vec<Type<'db>>,
+ target: Box<Expr<'db>>,
+ params: Vec<Expr<'db>>,
+ },
/// Enum variant construction
- Variant { variant: Variant, generics: Vec<Type>, params: Vec<Expr> },
+ Variant { variant: Variant, generics: Vec<Type<'db>>, params: Vec<Expr<'db>> },
/// Struct construction
- Struct { strukt: Struct, generics: Vec<Type>, params: Vec<Expr> },
+ Struct { strukt: Struct, generics: Vec<Type<'db>>, params: Vec<Expr<'db>> },
/// Tuple construction
- Tuple { ty: Type, params: Vec<Expr> },
+ Tuple { ty: Type<'db>, params: Vec<Expr<'db>> },
/// Struct field access
- Field { expr: Box<Expr>, field: Field },
+ Field { expr: Box<Expr<'db>>, field: Field },
/// Passing type as reference (with `&`)
- Reference(Box<Expr>),
+ Reference(Box<Expr<'db>>),
/// Indicates possibility of many different options that all evaluate to `ty`
- Many(Type),
+ Many(Type<'db>),
}
-impl Expr {
+impl<'db> Expr<'db> {
/// Generate source code for type tree.
///
/// Note that trait imports are not added to generated code.
@@ -96,8 +101,8 @@ impl Expr {
/// by `traits_used` method are also imported.
pub fn gen_source_code(
&self,
- sema_scope: &SemanticsScope<'_>,
- many_formatter: &mut dyn FnMut(&Type) -> String,
+ sema_scope: &SemanticsScope<'db>,
+ many_formatter: &mut dyn FnMut(&Type<'db>) -> String,
cfg: ImportPathConfig,
display_target: DisplayTarget,
) -> Result<String, DisplaySourceCodeError> {
@@ -298,7 +303,7 @@ impl Expr {
/// Get type of the type tree.
///
/// Same as getting the type of root node
- pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(&self, db: &'db dyn HirDatabase) -> Type<'db> {
match self {
Expr::Const(it) => it.ty(db),
Expr::Static(it) => it.ty(db),
diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs
index bcff44fcd0..9df131f90e 100644
--- a/crates/hir/src/term_search/tactics.rs
+++ b/crates/hir/src/term_search/tactics.rs
@@ -40,11 +40,11 @@ use super::{LookupTable, NewTypesKey, TermSearchCtx};
///
/// _Note that there is no use of calling this tactic in every iteration as the output does not
/// depend on the current state of `lookup`_
-pub(super) fn trivial<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn trivial<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
-) -> impl Iterator<Item = Expr> + 'a {
+ lookup: &'lt mut LookupTable<'db>,
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
defs.iter().filter_map(|def| {
let expr = match def {
@@ -104,11 +104,11 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
///
/// _Note that there is no use of calling this tactic in every iteration as the output does not
/// depend on the current state of `lookup`_
-pub(super) fn assoc_const<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn assoc_const<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
-) -> impl Iterator<Item = Expr> + 'a {
+ lookup: &'lt mut LookupTable<'db>,
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
@@ -152,12 +152,12 @@ pub(super) fn assoc_const<'a, DB: HirDatabase>(
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn data_constructor<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn data_constructor<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
lookup
@@ -199,14 +199,14 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
let generics: Vec<_> = ty.type_arguments().collect();
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> = fields
+ let param_exprs: Vec<Vec<Expr<'_>>> = fields
.into_iter()
.map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())))
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian
// product
- let exprs: Vec<Expr> = if param_exprs.is_empty() {
+ let exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
vec![Expr::Struct { strukt, generics, params: Vec::new() }]
} else {
param_exprs
@@ -247,7 +247,7 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
.into_iter()
.filter_map(|variant| {
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> = variant
+ let param_exprs: Vec<Vec<Expr<'_>>> = variant
.fields(db)
.into_iter()
.map(|field| {
@@ -257,7 +257,7 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
// Note that we need special case for 0 param constructors because of multi cartesian
// product
- let variant_exprs: Vec<Expr> = if param_exprs.is_empty() {
+ let variant_exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
vec![Expr::Variant {
variant,
generics: generics.clone(),
@@ -301,12 +301,12 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn free_function<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn free_function<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
defs.iter()
@@ -375,7 +375,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
}
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> = it
+ let param_exprs: Vec<Vec<Expr<'_>>> = it
.params_without_self_with_args(db, generics.iter().cloned())
.into_iter()
.map(|field| {
@@ -389,7 +389,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
// Note that we need special case for 0 param constructors because of multi cartesian
// product
- let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
+ let fn_exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
vec![Expr::Function { func: *it, generics, params: Vec::new() }]
} else {
param_exprs
@@ -432,12 +432,12 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn impl_method<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn impl_method<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
lookup
@@ -507,14 +507,14 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup");
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> = it
+ let param_exprs: Vec<Vec<Expr<'_>>> = it
.params_without_self_with_args(db, ty.type_arguments())
.into_iter()
.map(|field| lookup.find_autoref(db, field.ty()))
.collect::<Option<_>>()?;
let generics: Vec<_> = ty.type_arguments().collect();
- let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs)
+ let fn_exprs: Vec<Expr<'_>> = std::iter::once(target_type_exprs)
.chain(param_exprs)
.multi_cartesian_product()
.map(|params| {
@@ -547,12 +547,12 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn struct_projection<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn struct_projection<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
lookup
@@ -589,11 +589,11 @@ pub(super) fn struct_projection<'a, DB: HirDatabase>(
/// * `ctx` - Context for the term search
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
-pub(super) fn famous_types<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn famous_types<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
-) -> impl Iterator<Item = Expr> + 'a {
+ lookup: &'lt mut LookupTable<'db>,
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
[
@@ -620,12 +620,12 @@ pub(super) fn famous_types<'a, DB: HirDatabase>(
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn impl_static_method<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn impl_static_method<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
lookup
@@ -683,7 +683,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
}
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> = it
+ let param_exprs: Vec<Vec<Expr<'_>>> = it
.params_without_self_with_args(db, ty.type_arguments())
.into_iter()
.map(|field| lookup.find_autoref(db, field.ty()))
@@ -692,7 +692,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
// Note that we need special case for 0 param constructors because of multi cartesian
// product
let generics = ty.type_arguments().collect();
- let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
+ let fn_exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
vec![Expr::Function { func: it, generics, params: Vec::new() }]
} else {
param_exprs
@@ -722,12 +722,12 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn make_tuple<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn make_tuple<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
@@ -749,15 +749,15 @@ pub(super) fn make_tuple<'a, DB: HirDatabase>(
}
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> =
+ let param_exprs: Vec<Vec<Expr<'db>>> =
ty.type_arguments().map(|field| lookup.find(db, &field)).collect::<Option<_>>()?;
- let exprs: Vec<Expr> = param_exprs
+ let exprs: Vec<Expr<'db>> = param_exprs
.into_iter()
.multi_cartesian_product()
.filter(|_| should_continue())
.map(|params| {
- let tys: Vec<Type> = params.iter().map(|it| it.ty(db)).collect();
+ let tys: Vec<Type<'_>> = params.iter().map(|it| it.ty(db)).collect();
let tuple_ty = Type::new_tuple(module.krate().into(), &tys);
let expr = Expr::Tuple { ty: tuple_ty.clone(), params };
diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml
index 53af980c19..385b0e1eb7 100644
--- a/crates/ide-assists/Cargo.toml
+++ b/crates/ide-assists/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/ide-assists/src/assist_config.rs b/crates/ide-assists/src/assist_config.rs
index fb569f8cda..57ced8d853 100644
--- a/crates/ide-assists/src/assist_config.rs
+++ b/crates/ide-assists/src/assist_config.rs
@@ -22,6 +22,7 @@ pub struct AssistConfig {
pub term_search_borrowck: bool,
pub code_action_grouping: bool,
pub expr_fill_default: ExprFillDefaultMode,
+ pub prefer_self_ty: bool,
}
impl AssistConfig {
diff --git a/crates/ide-assists/src/assist_context.rs b/crates/ide-assists/src/assist_context.rs
index 9eb9452a2b..207a7548f4 100644
--- a/crates/ide-assists/src/assist_context.rs
+++ b/crates/ide-assists/src/assist_context.rs
@@ -95,7 +95,7 @@ impl<'a> AssistContext<'a> {
}
}
- pub(crate) fn db(&self) -> &RootDatabase {
+ pub(crate) fn db(&self) -> &'a RootDatabase {
self.sema.db
}
diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index 6a55f39e69..9f9d21923f 100644
--- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -192,7 +192,7 @@ fn add_missing_impl_members_inner(
fn try_gen_trait_body(
ctx: &AssistContext<'_>,
func: &ast::Fn,
- trait_ref: hir::TraitRef,
+ trait_ref: hir::TraitRef<'_>,
impl_def: &ast::Impl,
edition: Edition,
) -> Option<()> {
diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 858d436991..1ece7ddab1 100644
--- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -1,12 +1,13 @@
use std::iter::{self, Peekable};
use either::Either;
-use hir::{Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym};
+use hir::{Adt, AsAssocItem, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym};
use ide_db::RootDatabase;
use ide_db::assists::ExprFillDefaultMode;
use ide_db::syntax_helpers::suggest_name;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
use itertools::Itertools;
+use syntax::ToSmolStr;
use syntax::ast::edit::IndentLevel;
use syntax::ast::edit_in_place::Indent;
use syntax::ast::syntax_factory::SyntaxFactory;
@@ -79,12 +80,20 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
let make = SyntaxFactory::with_mappings();
- let module = ctx.sema.scope(expr.syntax())?.module();
+ let scope = ctx.sema.scope(expr.syntax())?;
+ let module = scope.module();
+ let self_ty = if ctx.config.prefer_self_ty {
+ scope
+ .containing_function()
+ .and_then(|function| function.as_assoc_item(ctx.db())?.implementing_ty(ctx.db()))
+ } else {
+ None
+ };
let (mut missing_pats, is_non_exhaustive, has_hidden_variants): (
Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
bool,
bool,
- ) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) {
+ ) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr, self_ty.as_ref()) {
let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate());
let variants = enum_def.variants(ctx.db());
@@ -102,8 +111,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
})
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
- let option_enum = FamousDefs(&ctx.sema, module.krate()).core_option_Option().map(lift_enum);
- let missing_pats: Box<dyn Iterator<Item = _>> = if Some(enum_def) == option_enum {
+ let option_enum = FamousDefs(&ctx.sema, module.krate()).core_option_Option();
+ let missing_pats: Box<dyn Iterator<Item = _>> = if matches!(enum_def, ExtendedEnum::Enum { enum_: e, .. } if Some(e) == option_enum)
+ {
// Match `Some` variant first.
cov_mark::hit!(option_order);
Box::new(missing_pats.rev())
@@ -111,7 +121,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
Box::new(missing_pats)
};
(missing_pats.peekable(), is_non_exhaustive, has_hidden_variants)
- } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) {
+ } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr, self_ty.as_ref()) {
let is_non_exhaustive =
enum_defs.iter().any(|enum_def| enum_def.is_non_exhaustive(ctx.db(), module.krate()));
@@ -159,7 +169,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
is_non_exhaustive,
has_hidden_variants,
)
- } else if let Some((enum_def, len)) = resolve_array_of_enum_def(&ctx.sema, &expr) {
+ } else if let Some((enum_def, len)) =
+ resolve_array_of_enum_def(&ctx.sema, &expr, self_ty.as_ref())
+ {
let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate());
let variants = enum_def.variants(ctx.db());
@@ -373,23 +385,23 @@ fn does_pat_match_variant(pat: &Pat, var: &Pat) -> bool {
}
}
-#[derive(Eq, PartialEq, Clone, Copy)]
+#[derive(Eq, PartialEq, Clone)]
enum ExtendedEnum {
Bool,
- Enum(hir::Enum),
+ Enum { enum_: hir::Enum, use_self: bool },
}
#[derive(Eq, PartialEq, Clone, Copy, Debug)]
enum ExtendedVariant {
True,
False,
- Variant(hir::Variant),
+ Variant { variant: hir::Variant, use_self: bool },
}
impl ExtendedVariant {
fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool {
match self {
- ExtendedVariant::Variant(var) => {
+ ExtendedVariant::Variant { variant: var, .. } => {
var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate
}
_ => false,
@@ -397,25 +409,35 @@ impl ExtendedVariant {
}
}
-fn lift_enum(e: hir::Enum) -> ExtendedEnum {
- ExtendedEnum::Enum(e)
-}
-
impl ExtendedEnum {
- fn is_non_exhaustive(self, db: &RootDatabase, krate: Crate) -> bool {
+ fn enum_(
+ db: &RootDatabase,
+ enum_: hir::Enum,
+ enum_ty: &hir::Type<'_>,
+ self_ty: Option<&hir::Type<'_>>,
+ ) -> Self {
+ ExtendedEnum::Enum {
+ enum_,
+ use_self: self_ty.is_some_and(|self_ty| self_ty.could_unify_with_deeply(db, enum_ty)),
+ }
+ }
+
+ fn is_non_exhaustive(&self, db: &RootDatabase, krate: Crate) -> bool {
match self {
- ExtendedEnum::Enum(e) => {
+ ExtendedEnum::Enum { enum_: e, .. } => {
e.attrs(db).by_key(sym::non_exhaustive).exists() && e.module(db).krate() != krate
}
_ => false,
}
}
- fn variants(self, db: &RootDatabase) -> Vec<ExtendedVariant> {
- match self {
- ExtendedEnum::Enum(e) => {
- e.variants(db).into_iter().map(ExtendedVariant::Variant).collect::<Vec<_>>()
- }
+ fn variants(&self, db: &RootDatabase) -> Vec<ExtendedVariant> {
+ match *self {
+ ExtendedEnum::Enum { enum_: e, use_self } => e
+ .variants(db)
+ .into_iter()
+ .map(|variant| ExtendedVariant::Variant { variant, use_self })
+ .collect::<Vec<_>>(),
ExtendedEnum::Bool => {
Vec::<ExtendedVariant>::from([ExtendedVariant::True, ExtendedVariant::False])
}
@@ -423,9 +445,13 @@ impl ExtendedEnum {
}
}
-fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
+fn resolve_enum_def(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &ast::Expr,
+ self_ty: Option<&hir::Type<'_>>,
+) -> Option<ExtendedEnum> {
sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() {
- Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
+ Some(Adt::Enum(e)) => Some(ExtendedEnum::enum_(sema.db, e, &ty, self_ty)),
_ => ty.is_bool().then_some(ExtendedEnum::Bool),
})
}
@@ -433,6 +459,7 @@ fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Opt
fn resolve_tuple_of_enum_def(
sema: &Semantics<'_, RootDatabase>,
expr: &ast::Expr,
+ self_ty: Option<&hir::Type<'_>>,
) -> Option<Vec<ExtendedEnum>> {
sema.type_of_expr(expr)?
.adjusted()
@@ -441,7 +468,7 @@ fn resolve_tuple_of_enum_def(
.map(|ty| {
ty.autoderef(sema.db).find_map(|ty| {
match ty.as_adt() {
- Some(Adt::Enum(e)) => Some(lift_enum(e)),
+ Some(Adt::Enum(e)) => Some(ExtendedEnum::enum_(sema.db, e, &ty, self_ty)),
// For now we only handle expansion for a tuple of enums. Here
// we map non-enum items to None and rely on `collect` to
// convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>.
@@ -456,10 +483,11 @@ fn resolve_tuple_of_enum_def(
fn resolve_array_of_enum_def(
sema: &Semantics<'_, RootDatabase>,
expr: &ast::Expr,
+ self_ty: Option<&hir::Type<'_>>,
) -> Option<(ExtendedEnum, usize)> {
sema.type_of_expr(expr)?.adjusted().as_array(sema.db).and_then(|(ty, len)| {
ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
- Some(Adt::Enum(e)) => Some((lift_enum(e), len)),
+ Some(Adt::Enum(e)) => Some((ExtendedEnum::enum_(sema.db, e, &ty, self_ty), len)),
_ => ty.is_bool().then_some((ExtendedEnum::Bool, len)),
})
})
@@ -474,9 +502,21 @@ fn build_pat(
) -> Option<ast::Pat> {
let db = ctx.db();
match var {
- ExtendedVariant::Variant(var) => {
+ ExtendedVariant::Variant { variant: var, use_self } => {
let edition = module.krate().edition(db);
- let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?, edition);
+ let path = if use_self {
+ make::path_from_segments(
+ [
+ make::path_segment(make::name_ref_self_ty()),
+ make::path_segment(make::name_ref(
+ &var.name(db).display(db, edition).to_smolstr(),
+ )),
+ ],
+ false,
+ )
+ } else {
+ mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?, edition)
+ };
let fields = var.fields(db);
let pat: ast::Pat = match var.kind(db) {
hir::StructKind::Tuple => {
@@ -509,8 +549,10 @@ fn build_pat(
#[cfg(test)]
mod tests {
+ use crate::AssistConfig;
use crate::tests::{
- check_assist, check_assist_not_applicable, check_assist_target, check_assist_unresolved,
+ TEST_CONFIG, check_assist, check_assist_not_applicable, check_assist_target,
+ check_assist_unresolved, check_assist_with_config,
};
use super::add_missing_match_arms;
@@ -2095,4 +2137,111 @@ fn f() {
"#,
);
}
+
+ #[test]
+ fn prefer_self() {
+ check_assist_with_config(
+ add_missing_match_arms,
+ AssistConfig { prefer_self_ty: true, ..TEST_CONFIG },
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl Foo {
+ fn qux(&self) {
+ match self {
+ $0_ => {}
+ }
+ }
+}
+ "#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl Foo {
+ fn qux(&self) {
+ match self {
+ Self::Bar => ${1:todo!()},
+ Self::Baz => ${2:todo!()},$0
+ }
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn prefer_self_with_generics() {
+ check_assist_with_config(
+ add_missing_match_arms,
+ AssistConfig { prefer_self_ty: true, ..TEST_CONFIG },
+ r#"
+enum Foo<T> {
+ Bar(T),
+ Baz,
+}
+
+impl<T> Foo<T> {
+ fn qux(&self) {
+ match self {
+ $0_ => {}
+ }
+ }
+}
+ "#,
+ r#"
+enum Foo<T> {
+ Bar(T),
+ Baz,
+}
+
+impl<T> Foo<T> {
+ fn qux(&self) {
+ match self {
+ Self::Bar(${1:_}) => ${2:todo!()},
+ Self::Baz => ${3:todo!()},$0
+ }
+ }
+}
+ "#,
+ );
+ check_assist_with_config(
+ add_missing_match_arms,
+ AssistConfig { prefer_self_ty: true, ..TEST_CONFIG },
+ r#"
+enum Foo<T> {
+ Bar(T),
+ Baz,
+}
+
+impl<T> Foo<T> {
+ fn qux(v: Foo<i32>) {
+ match v {
+ $0_ => {}
+ }
+ }
+}
+ "#,
+ r#"
+enum Foo<T> {
+ Bar(T),
+ Baz,
+}
+
+impl<T> Foo<T> {
+ fn qux(v: Foo<i32>) {
+ match v {
+ Foo::Bar(${1:_}) => ${2:todo!()},
+ Foo::Baz => ${3:todo!()},$0
+ }
+ }
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs
index f3243d369a..bb6a10d40b 100644
--- a/crates/ide-assists/src/handlers/auto_import.rs
+++ b/crates/ide-assists/src/handlers/auto_import.rs
@@ -164,9 +164,9 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
Some(())
}
-pub(super) fn find_importable_node(
- ctx: &AssistContext<'_>,
-) -> Option<(ImportAssets, SyntaxNode, Option<Type>)> {
+pub(super) fn find_importable_node<'a: 'db, 'db>(
+ ctx: &'a AssistContext<'db>,
+) -> Option<(ImportAssets<'db>, SyntaxNode, Option<Type<'db>>)> {
// Deduplicate this with the `expected_type_and_name` logic for completions
let expected = |expr_or_pat: Either<ast::Expr, ast::Pat>| match expr_or_pat {
Either::Left(expr) => {
@@ -226,7 +226,7 @@ pub(super) fn find_importable_node(
}
}
-fn group_label(import_candidate: &ImportCandidate) -> GroupLabel {
+fn group_label(import_candidate: &ImportCandidate<'_>) -> GroupLabel {
let name = match import_candidate {
ImportCandidate::Path(candidate) => format!("Import {}", candidate.name.text()),
ImportCandidate::TraitAssocItem(candidate) => {
@@ -244,7 +244,7 @@ fn group_label(import_candidate: &ImportCandidate) -> GroupLabel {
pub(crate) fn relevance_score(
ctx: &AssistContext<'_>,
import: &LocatedImport,
- expected: Option<&Type>,
+ expected: Option<&Type<'_>>,
current_module: Option<&Module>,
) -> i32 {
let mut score = 0;
diff --git a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
index efcbcef00e..9126e869b9 100644
--- a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
+++ b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
@@ -1,8 +1,8 @@
use ide_db::defs::{Definition, NameRefClass};
use syntax::{
AstNode, SyntaxNode,
- ast::{self, HasName, Name},
- ted,
+ ast::{self, HasName, Name, syntax_factory::SyntaxFactory},
+ syntax_editor::SyntaxEditor,
};
use crate::{
@@ -121,34 +121,36 @@ fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Opti
// Rename `extracted` with `binding` in `pat`.
fn rename_variable(pat: &ast::Pat, extracted: &[Name], binding: ast::Pat) -> SyntaxNode {
- let syntax = pat.syntax().clone_for_update();
+ let syntax = pat.syntax().clone_subtree();
+ let mut editor = SyntaxEditor::new(syntax.clone());
+ let make = SyntaxFactory::with_mappings();
let extracted = extracted
.iter()
- .map(|e| syntax.covering_element(e.syntax().text_range()))
+ .map(|e| e.syntax().text_range() - pat.syntax().text_range().start())
+ .map(|r| syntax.covering_element(r))
.collect::<Vec<_>>();
for extracted_syntax in extracted {
// If `extracted` variable is a record field, we should rename it to `binding`,
// otherwise we just need to replace `extracted` with `binding`.
-
if let Some(record_pat_field) =
extracted_syntax.ancestors().find_map(ast::RecordPatField::cast)
{
if let Some(name_ref) = record_pat_field.field_name() {
- ted::replace(
+ editor.replace(
record_pat_field.syntax(),
- ast::make::record_pat_field(
- ast::make::name_ref(&name_ref.text()),
- binding.clone(),
+ make.record_pat_field(
+ make.name_ref(&name_ref.text()),
+ binding.clone_for_update(),
)
- .syntax()
- .clone_for_update(),
+ .syntax(),
);
}
} else {
- ted::replace(extracted_syntax, binding.clone().syntax().clone_for_update());
+ editor.replace(extracted_syntax, binding.syntax().clone_for_update());
}
}
- syntax
+ editor.add_mappings(make.finish_with_mappings());
+ editor.finish().new_root().clone()
}
#[cfg(test)]
diff --git a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
index 32c4ae2e86..8d27574eb2 100644
--- a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
@@ -4,7 +4,8 @@ use itertools::Itertools;
use syntax::{
SyntaxKind,
ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility},
- match_ast, ted,
+ match_ast,
+ syntax_editor::{Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder};
@@ -97,11 +98,14 @@ fn edit_struct_def(
// Note that we don't need to consider macro files in this function because this is
// currently not triggered for struct definitions inside macro calls.
let tuple_fields = record_fields.fields().filter_map(|f| {
- let field = ast::make::tuple_field(f.visibility(), f.ty()?).clone_for_update();
- ted::insert_all(
- ted::Position::first_child_of(field.syntax()),
+ let field = ast::make::tuple_field(f.visibility(), f.ty()?);
+ let mut editor = SyntaxEditor::new(field.syntax().clone());
+ editor.insert_all(
+ Position::first_child_of(field.syntax()),
f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(),
);
+ let field_syntax = editor.finish().new_root().clone();
+ let field = ast::TupleField::cast(field_syntax)?;
Some(field)
});
let tuple_fields = ast::make::tuple_field_list(tuple_fields);
@@ -1086,8 +1090,7 @@ pub struct $0Foo {
}
"#,
r#"
-pub struct Foo(#[my_custom_attr]
-u32);
+pub struct Foo(#[my_custom_attr]u32);
"#,
);
}
diff --git a/crates/ide-assists/src/handlers/expand_rest_pattern.rs b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
index b71de5e00c..c80b78fd97 100644
--- a/crates/ide-assists/src/handlers/expand_rest_pattern.rs
+++ b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
@@ -175,7 +175,7 @@ pub(crate) fn expand_rest_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) ->
// ast::TuplePat(it) => (),
// FIXME
// ast::SlicePat(it) => (),
- _ => return None,
+ _ => None,
}
}
}
diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs
index cf45ea0a30..00cbef1c01 100644
--- a/crates/ide-assists/src/handlers/extract_function.rs
+++ b/crates/ide-assists/src/handlers/extract_function.rs
@@ -309,23 +309,23 @@ fn extraction_target(node: &SyntaxNode, selection_range: TextRange) -> Option<Fu
}
#[derive(Debug)]
-struct Function {
+struct Function<'db> {
name: ast::NameRef,
self_param: Option<ast::SelfParam>,
- params: Vec<Param>,
- control_flow: ControlFlow,
- ret_ty: RetType,
+ params: Vec<Param<'db>>,
+ control_flow: ControlFlow<'db>,
+ ret_ty: RetType<'db>,
body: FunctionBody,
outliving_locals: Vec<OutlivedLocal>,
/// Whether at least one of the container's tail expr is contained in the range we're extracting.
contains_tail_expr: bool,
- mods: ContainerInfo,
+ mods: ContainerInfo<'db>,
}
#[derive(Debug)]
-struct Param {
+struct Param<'db> {
var: Local,
- ty: hir::Type,
+ ty: hir::Type<'db>,
move_local: bool,
requires_mut: bool,
is_copy: bool,
@@ -340,10 +340,10 @@ enum ParamKind {
}
#[derive(Debug)]
-enum FunType {
+enum FunType<'db> {
Unit,
- Single(hir::Type),
- Tuple(Vec<hir::Type>),
+ Single(hir::Type<'db>),
+ Tuple(Vec<hir::Type<'db>>),
}
/// Where to put extracted function definition
@@ -358,19 +358,19 @@ enum Anchor {
// FIXME: ControlFlow and ContainerInfo both track some function modifiers, feels like these two should
// probably be merged somehow.
#[derive(Debug)]
-struct ControlFlow {
- kind: Option<FlowKind>,
+struct ControlFlow<'db> {
+ kind: Option<FlowKind<'db>>,
is_async: bool,
is_unsafe: bool,
}
/// The thing whose expression we are extracting from. Can be a function, const, static, const arg, ...
#[derive(Clone, Debug)]
-struct ContainerInfo {
+struct ContainerInfo<'db> {
is_const: bool,
parent_loop: Option<SyntaxNode>,
/// The function's return type, const's type etc.
- ret_type: Option<hir::Type>,
+ ret_type: Option<hir::Type<'db>>,
generic_param_lists: Vec<ast::GenericParamList>,
where_clauses: Vec<ast::WhereClause>,
edition: Edition,
@@ -389,11 +389,11 @@ struct ContainerInfo {
/// }
/// ```
#[derive(Debug, Clone)]
-enum FlowKind {
+enum FlowKind<'db> {
/// Return with value (`return $expr;`)
Return(Option<ast::Expr>),
Try {
- kind: TryKind,
+ kind: TryKind<'db>,
},
/// Break with label and value (`break 'label $expr;`)
Break(Option<ast::Lifetime>, Option<ast::Expr>),
@@ -402,18 +402,18 @@ enum FlowKind {
}
#[derive(Debug, Clone)]
-enum TryKind {
+enum TryKind<'db> {
Option,
- Result { ty: hir::Type },
+ Result { ty: hir::Type<'db> },
}
#[derive(Debug)]
-enum RetType {
- Expr(hir::Type),
+enum RetType<'db> {
+ Expr(hir::Type<'db>),
Stmt,
}
-impl RetType {
+impl RetType<'_> {
fn is_unit(&self) -> bool {
match self {
RetType::Expr(ty) => ty.is_unit(),
@@ -456,8 +456,8 @@ impl LocalUsages {
}
}
-impl Function {
- fn return_type(&self, ctx: &AssistContext<'_>) -> FunType {
+impl<'db> Function<'db> {
+ fn return_type(&self, ctx: &AssistContext<'db>) -> FunType<'db> {
match &self.ret_ty {
RetType::Expr(ty) if ty.is_unit() => FunType::Unit,
RetType::Expr(ty) => FunType::Single(ty.clone()),
@@ -487,7 +487,7 @@ impl ParamKind {
}
}
-impl Param {
+impl<'db> Param<'db> {
fn kind(&self) -> ParamKind {
match (self.move_local, self.requires_mut, self.is_copy) {
(false, true, _) => ParamKind::MutRef,
@@ -497,7 +497,7 @@ impl Param {
}
}
- fn to_arg(&self, ctx: &AssistContext<'_>, edition: Edition) -> ast::Expr {
+ fn to_arg(&self, ctx: &AssistContext<'db>, edition: Edition) -> ast::Expr {
let var = path_expr_from_local(ctx, self.var, edition);
match self.kind() {
ParamKind::Value | ParamKind::MutValue => var,
@@ -532,8 +532,12 @@ impl Param {
}
}
-impl TryKind {
- fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>, edition: Edition) -> Option<TryKind> {
+impl<'db> TryKind<'db> {
+ fn of_ty(
+ ty: hir::Type<'db>,
+ ctx: &AssistContext<'db>,
+ edition: Edition,
+ ) -> Option<TryKind<'db>> {
if ty.is_unknown() {
// We favour Result for `expr?`
return Some(TryKind::Result { ty });
@@ -551,7 +555,7 @@ impl TryKind {
}
}
-impl FlowKind {
+impl<'db> FlowKind<'db> {
fn make_result_handler(&self, expr: Option<ast::Expr>) -> ast::Expr {
match self {
FlowKind::Return(_) => make::expr_return(expr),
@@ -567,7 +571,7 @@ impl FlowKind {
}
}
- fn expr_ty(&self, ctx: &AssistContext<'_>) -> Option<hir::Type> {
+ fn expr_ty(&self, ctx: &AssistContext<'db>) -> Option<hir::Type<'db>> {
match self {
FlowKind::Return(Some(expr)) | FlowKind::Break(_, Some(expr)) => {
ctx.sema.type_of_expr(expr).map(TypeInfo::adjusted)
@@ -876,11 +880,11 @@ impl FunctionBody {
(res, self_param)
}
- fn analyze_container(
+ fn analyze_container<'db>(
&self,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
edition: Edition,
- ) -> Option<(ContainerInfo, bool)> {
+ ) -> Option<(ContainerInfo<'db>, bool)> {
let mut ancestors = self.parent()?.ancestors();
let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted);
let mut parent_loop = None;
@@ -985,7 +989,7 @@ impl FunctionBody {
))
}
- fn return_ty(&self, ctx: &AssistContext<'_>) -> Option<RetType> {
+ fn return_ty<'db>(&self, ctx: &AssistContext<'db>) -> Option<RetType<'db>> {
match self.tail_expr() {
Some(expr) => ctx.sema.type_of_expr(&expr).map(TypeInfo::original).map(RetType::Expr),
None => Some(RetType::Stmt),
@@ -1006,11 +1010,11 @@ impl FunctionBody {
}
/// Analyses the function body for external control flow.
- fn external_control_flow(
+ fn external_control_flow<'db>(
&self,
- ctx: &AssistContext<'_>,
- container_info: &ContainerInfo,
- ) -> Option<ControlFlow> {
+ ctx: &AssistContext<'db>,
+ container_info: &ContainerInfo<'db>,
+ ) -> Option<ControlFlow<'db>> {
let mut ret_expr = None;
let mut try_expr = None;
let mut break_expr = None;
@@ -1096,12 +1100,12 @@ impl FunctionBody {
/// find variables that should be extracted as params
///
/// Computes additional info that affects param type and mutability
- fn extracted_function_params(
+ fn extracted_function_params<'db>(
&self,
- ctx: &AssistContext<'_>,
- container_info: &ContainerInfo,
+ ctx: &AssistContext<'db>,
+ container_info: &ContainerInfo<'db>,
locals: FxIndexSet<Local>,
- ) -> Vec<Param> {
+ ) -> Vec<Param<'db>> {
locals
.into_iter()
.sorted()
@@ -1449,7 +1453,7 @@ fn fixup_call_site(builder: &mut SourceChangeBuilder, body: &FunctionBody) {
}
}
-fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> SyntaxNode {
+fn make_call(ctx: &AssistContext<'_>, fun: &Function<'_>, indent: IndentLevel) -> SyntaxNode {
let ret_ty = fun.return_type(ctx);
let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx, fun.mods.edition)));
@@ -1508,17 +1512,17 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> Sy
}
}
-enum FlowHandler {
+enum FlowHandler<'db> {
None,
- Try { kind: TryKind },
- If { action: FlowKind },
- IfOption { action: FlowKind },
- MatchOption { none: FlowKind },
- MatchResult { err: FlowKind },
+ Try { kind: TryKind<'db> },
+ If { action: FlowKind<'db> },
+ IfOption { action: FlowKind<'db> },
+ MatchOption { none: FlowKind<'db> },
+ MatchResult { err: FlowKind<'db> },
}
-impl FlowHandler {
- fn from_ret_ty(fun: &Function, ret_ty: &FunType) -> FlowHandler {
+impl<'db> FlowHandler<'db> {
+ fn from_ret_ty(fun: &Function<'db>, ret_ty: &FunType<'db>) -> FlowHandler<'db> {
if fun.contains_tail_expr {
return FlowHandler::None;
}
@@ -1628,7 +1632,7 @@ fn path_expr_from_local(ctx: &AssistContext<'_>, var: Local, edition: Edition) -
fn format_function(
ctx: &AssistContext<'_>,
module: hir::Module,
- fun: &Function,
+ fun: &Function<'_>,
old_indent: IndentLevel,
) -> ast::Fn {
let fun_name = make::name(&fun.name.text());
@@ -1654,7 +1658,7 @@ fn format_function(
fn make_generic_params_and_where_clause(
ctx: &AssistContext<'_>,
- fun: &Function,
+ fun: &Function<'_>,
) -> (Option<ast::GenericParamList>, Option<ast::WhereClause>) {
let used_type_params = fun.type_params(ctx);
@@ -1666,7 +1670,7 @@ fn make_generic_params_and_where_clause(
fn make_generic_param_list(
ctx: &AssistContext<'_>,
- fun: &Function,
+ fun: &Function<'_>,
used_type_params: &[TypeParam],
) -> Option<ast::GenericParamList> {
let mut generic_params = fun
@@ -1703,7 +1707,7 @@ fn param_is_required(
fn make_where_clause(
ctx: &AssistContext<'_>,
- fun: &Function,
+ fun: &Function<'_>,
used_type_params: &[TypeParam],
) -> Option<ast::WhereClause> {
let mut predicates = fun
@@ -1743,9 +1747,9 @@ fn resolved_type_param(ctx: &AssistContext<'_>, pred: &ast::WherePred) -> Option
}
}
-impl Function {
+impl<'db> Function<'db> {
/// Collect all the `TypeParam`s used in the `body` and `params`.
- fn type_params(&self, ctx: &AssistContext<'_>) -> Vec<TypeParam> {
+ fn type_params(&self, ctx: &AssistContext<'db>) -> Vec<TypeParam> {
let type_params_in_descendant_paths =
self.body.descendant_paths().filter_map(|it| match ctx.sema.resolve_path(&it) {
Some(PathResolution::TypeParam(type_param)) => Some(type_param),
@@ -1808,8 +1812,8 @@ impl Function {
}
}
-impl FunType {
- fn make_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
+impl<'db> FunType<'db> {
+ fn make_ty(&self, ctx: &AssistContext<'db>, module: hir::Module) -> ast::Type {
match self {
FunType::Unit => make::ty_unit(),
FunType::Single(ty) => make_ty(ty, ctx, module),
@@ -1831,7 +1835,11 @@ impl FunType {
}
}
-fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) -> ast::BlockExpr {
+fn make_body(
+ ctx: &AssistContext<'_>,
+ old_indent: IndentLevel,
+ fun: &Function<'_>,
+) -> ast::BlockExpr {
let ret_ty = fun.return_type(ctx);
let handler = FlowHandler::from_ret_ty(fun, &ret_ty);
@@ -2009,19 +2017,19 @@ fn with_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr
make::hacky_block_expr(elements, Some(tail_expr))
}
-fn format_type(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> String {
+fn format_type(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module) -> String {
ty.display_source_code(ctx.db(), module.into(), true).ok().unwrap_or_else(|| "_".to_owned())
}
-fn make_ty(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
+fn make_ty(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
let ty_str = format_type(ty, ctx, module);
make::ty(&ty_str)
}
fn rewrite_body_segment(
ctx: &AssistContext<'_>,
- params: &[Param],
- handler: &FlowHandler,
+ params: &[Param<'_>],
+ handler: &FlowHandler<'_>,
syntax: &SyntaxNode,
) -> SyntaxNode {
let syntax = fix_param_usages(ctx, params, syntax);
@@ -2030,8 +2038,12 @@ fn rewrite_body_segment(
}
/// change all usages to account for added `&`/`&mut` for some params
-fn fix_param_usages(ctx: &AssistContext<'_>, params: &[Param], syntax: &SyntaxNode) -> SyntaxNode {
- let mut usages_for_param: Vec<(&Param, Vec<ast::Expr>)> = Vec::new();
+fn fix_param_usages(
+ ctx: &AssistContext<'_>,
+ params: &[Param<'_>],
+ syntax: &SyntaxNode,
+) -> SyntaxNode {
+ let mut usages_for_param: Vec<(&Param<'_>, Vec<ast::Expr>)> = Vec::new();
let tm = TreeMutator::new(syntax);
@@ -2085,7 +2097,7 @@ fn fix_param_usages(ctx: &AssistContext<'_>, params: &[Param], syntax: &SyntaxNo
res
}
-fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) {
+fn update_external_control_flow(handler: &FlowHandler<'_>, syntax: &SyntaxNode) {
let mut nested_loop = None;
let mut nested_scope = None;
for event in syntax.preorder() {
@@ -2146,7 +2158,10 @@ fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) {
}
}
-fn make_rewritten_flow(handler: &FlowHandler, arg_expr: Option<ast::Expr>) -> Option<ast::Expr> {
+fn make_rewritten_flow(
+ handler: &FlowHandler<'_>,
+ arg_expr: Option<ast::Expr>,
+) -> Option<ast::Expr> {
let value = match handler {
FlowHandler::None | FlowHandler::Try { .. } => return None,
FlowHandler::If { .. } => make::expr_call(
diff --git a/crates/ide-assists/src/handlers/generate_default_from_new.rs b/crates/ide-assists/src/handlers/generate_default_from_new.rs
index 79a78ab369..47233fb399 100644
--- a/crates/ide-assists/src/handlers/generate_default_from_new.rs
+++ b/crates/ide-assists/src/handlers/generate_default_from_new.rs
@@ -2,7 +2,7 @@ use ide_db::famous_defs::FamousDefs;
use stdx::format_to;
use syntax::{
AstNode,
- ast::{self, HasGenericParams, HasName, Impl, make},
+ ast::{self, HasGenericParams, HasName, HasTypeBounds, Impl, make},
};
use crate::{
@@ -88,20 +88,19 @@ fn generate_trait_impl_text_from_impl(
let generic_params = impl_.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
- let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
+ let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
// remove defaults since they can't be specified in impls
- match param {
+ let param = match param {
ast::TypeOrConstParam::Type(param) => {
- let param = param.clone_for_update();
- param.remove_default();
+ let param = make::type_param(param.name()?, param.type_bound_list());
ast::GenericParam::TypeParam(param)
}
ast::TypeOrConstParam::Const(param) => {
- let param = param.clone_for_update();
- param.remove_default();
+ let param = make::const_param(param.name()?, param.ty()?);
ast::GenericParam::ConstParam(param)
}
- }
+ };
+ Some(param)
});
make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
diff --git a/crates/ide-assists/src/handlers/generate_enum_is_method.rs b/crates/ide-assists/src/handlers/generate_enum_is_method.rs
index 3e6d0bec68..517906b429 100644
--- a/crates/ide-assists/src/handlers/generate_enum_is_method.rs
+++ b/crates/ide-assists/src/handlers/generate_enum_is_method.rs
@@ -1,3 +1,5 @@
+use std::slice;
+
use ide_db::assists::GroupLabel;
use stdx::to_lower_snake_case;
use syntax::ast::HasVisibility;
@@ -52,7 +54,7 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_>
let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text()));
// Return early if we've found an existing new fn
- let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?;
+ let impl_def = find_struct_impl(ctx, &parent_enum, slice::from_ref(&fn_name))?;
let target = variant.syntax().text_range();
acc.add_group(
diff --git a/crates/ide-assists/src/handlers/generate_enum_projection_method.rs b/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
index 3974bcf618..e4b0f83049 100644
--- a/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
+++ b/crates/ide-assists/src/handlers/generate_enum_projection_method.rs
@@ -1,3 +1,5 @@
+use std::slice;
+
use ide_db::assists::GroupLabel;
use itertools::Itertools;
use stdx::to_lower_snake_case;
@@ -148,7 +150,7 @@ fn generate_enum_projection_method(
let fn_name = format!("{fn_name_prefix}_{}", &to_lower_snake_case(&variant_name.text()));
// Return early if we've found an existing new fn
- let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?;
+ let impl_def = find_struct_impl(ctx, &parent_enum, slice::from_ref(&fn_name))?;
let target = variant.syntax().text_range();
acc.add_group(
diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs
index 30084d23d1..78ae815dc8 100644
--- a/crates/ide-assists/src/handlers/generate_function.rs
+++ b/crates/ide-assists/src/handlers/generate_function.rs
@@ -307,7 +307,7 @@ impl FunctionBuilder {
ctx: &AssistContext<'_>,
call: &ast::MethodCallExpr,
name: &ast::NameRef,
- receiver_ty: Type,
+ receiver_ty: Type<'_>,
target_module: Module,
target: GeneratedFunctionTarget,
) -> Option<Self> {
diff --git a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
index c7e5e41aac..20ee9253d3 100644
--- a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
+++ b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
@@ -294,7 +294,7 @@ fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldI
let self_expr = make::ext::expr_self();
let lhs = make::expr_field(self_expr, field_name);
let rhs = make::expr_path(make::ext::ident_path(field_name));
- let assign_stmt = make::expr_stmt(make::expr_assignment(lhs, rhs));
+ let assign_stmt = make::expr_stmt(make::expr_assignment(lhs, rhs).into());
let body = make::block_expr([assign_stmt.into()], None);
// Make the setter fn
diff --git a/crates/ide-assists/src/handlers/generate_impl.rs b/crates/ide-assists/src/handlers/generate_impl.rs
index 2862e6d5af..14601ca020 100644
--- a/crates/ide-assists/src/handlers/generate_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_impl.rs
@@ -1,14 +1,14 @@
use syntax::{
ast::{self, AstNode, HasName, edit_in_place::Indent, make},
- ted,
+ syntax_editor::{Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists, utils};
-fn insert_impl(impl_: ast::Impl, nominal: &ast::Adt) {
+fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &ast::Adt) {
let indent = nominal.indent_level();
- ted::insert_all_raw(
- ted::Position::after(nominal.syntax()),
+ editor.insert_all(
+ Position::after(nominal.syntax()),
vec![
// Add a blank line after the ADT, and indentation for the impl to match the ADT
make::tokens::whitespace(&format!("\n\n{indent}")).into(),
@@ -51,14 +51,17 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
// Generate the impl
let impl_ = utils::generate_impl(&nominal);
+ let mut editor = edit.make_editor(nominal.syntax());
// Add a tabstop after the left curly brace
if let Some(cap) = ctx.config.snippet_cap {
if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) {
- edit.add_tabstop_after_token(cap, l_curly);
+ let tabstop = edit.make_tabstop_after(cap);
+ editor.add_annotation(l_curly, tabstop);
}
}
- insert_impl(impl_, &edit.make_mut(nominal));
+ insert_impl(&mut editor, &impl_, &nominal);
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -97,18 +100,22 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
// Generate the impl
let impl_ = utils::generate_trait_impl_intransitive(&nominal, make::ty_placeholder());
+ let mut editor = edit.make_editor(nominal.syntax());
// Make the trait type a placeholder snippet
if let Some(cap) = ctx.config.snippet_cap {
if let Some(trait_) = impl_.trait_() {
- edit.add_placeholder_snippet(cap, trait_);
+ let placeholder = edit.make_placeholder_snippet(cap);
+ editor.add_annotation(trait_.syntax(), placeholder);
}
if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) {
- edit.add_tabstop_after_token(cap, l_curly);
+ let tabstop = edit.make_tabstop_after(cap);
+ editor.add_annotation(l_curly, tabstop);
}
}
- insert_impl(impl_, &edit.make_mut(nominal));
+ insert_impl(&mut editor, &impl_, &nominal);
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
index bab2ccf3f3..4ddab2cfad 100644
--- a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -1,6 +1,6 @@
-use ide_db::famous_defs::FamousDefs;
+use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait};
use syntax::{
- AstNode,
+ AstNode, T,
ast::{self, edit_in_place::Indent, make},
ted,
};
@@ -32,7 +32,7 @@ use crate::{AssistContext, AssistId, Assists};
//
// $0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
// fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
-// &self[index as usize]
+// &mut self[index as usize]
// }
// }
//
@@ -48,36 +48,34 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
let indent = impl_def.indent_level();
- let trait_ = impl_def.trait_()?;
- if let ast::Type::PathType(trait_path) = trait_ {
- let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?;
- let scope = ctx.sema.scope(trait_path.syntax())?;
- if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? {
- return None;
- }
- }
+ let ast::Type::PathType(path) = impl_def.trait_()? else {
+ return None;
+ };
+ let trait_name = path.path()?.segment()?.name_ref()?;
+
+ let scope = ctx.sema.scope(impl_def.trait_()?.syntax())?;
+ let famous = FamousDefs(&ctx.sema, scope.krate());
+
+ let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?;
+ let trait_new = get_trait_mut(&trait_, famous)?;
// Index -> IndexMut
- let index_trait = impl_def
- .syntax()
- .descendants()
- .filter_map(ast::NameRef::cast)
- .find(|it| it.text() == "Index")?;
- ted::replace(
- index_trait.syntax(),
- make::path_segment(make::name_ref("IndexMut")).clone_for_update().syntax(),
- );
+ ted::replace(trait_name.syntax(), make::name_ref(trait_new).clone_for_update().syntax());
// index -> index_mut
- let trait_method_name = impl_def
+ let (trait_method_name, new_trait_method_name) = impl_def
.syntax()
.descendants()
.filter_map(ast::Name::cast)
- .find(|it| it.text() == "index")?;
- ted::replace(trait_method_name.syntax(), make::name("index_mut").clone_for_update().syntax());
+ .find_map(process_method_name)?;
+ ted::replace(
+ trait_method_name.syntax(),
+ make::name(new_trait_method_name).clone_for_update().syntax(),
+ );
- let type_alias = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast)?;
- ted::remove(type_alias.syntax());
+ if let Some(type_alias) = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast) {
+ ted::remove(type_alias.syntax());
+ }
// &self -> &mut self
let mut_self_param = make::mut_self_param();
@@ -87,15 +85,14 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
// &Self::Output -> &mut Self::Output
let ret_type = impl_def.syntax().descendants().find_map(ast::RetType::cast)?;
- ted::replace(
- ret_type.syntax(),
- make::ret_type(make::ty("&mut Self::Output")).clone_for_update().syntax(),
- );
+ let new_ret_type = process_ret_type(&ret_type)?;
+ ted::replace(ret_type.syntax(), make::ret_type(new_ret_type).clone_for_update().syntax());
let fn_ = impl_def.assoc_item_list()?.assoc_items().find_map(|it| match it {
ast::AssocItem::Fn(f) => Some(f),
_ => None,
})?;
+ let _ = process_ref_mut(&fn_);
let assoc_list = make::assoc_item_list().clone_for_update();
ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
@@ -104,7 +101,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
let target = impl_def.syntax().text_range();
acc.add(
AssistId::generate("generate_mut_trait_impl"),
- "Generate `IndexMut` impl from this `Index` trait",
+ format!("Generate `{trait_new}` impl from this `{trait_name}` trait"),
target,
|edit| {
edit.insert(target.start(), format!("$0{impl_def}\n\n{indent}"));
@@ -112,6 +109,52 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
)
}
+fn process_ref_mut(fn_: &ast::Fn) -> Option<()> {
+ let expr = fn_.body()?.tail_expr()?;
+ match &expr {
+ ast::Expr::RefExpr(ref_expr) if ref_expr.mut_token().is_none() => {
+ ted::insert_all_raw(
+ ted::Position::after(ref_expr.amp_token()?),
+ vec![make::token(T![mut]).into(), make::tokens::whitespace(" ").into()],
+ );
+ }
+ _ => {}
+ }
+ None
+}
+
+fn get_trait_mut(apply_trait: &hir::Trait, famous: FamousDefs<'_, '_>) -> Option<&'static str> {
+ let trait_ = Some(apply_trait);
+ if trait_ == famous.core_convert_Index().as_ref() {
+ return Some("IndexMut");
+ }
+ if trait_ == famous.core_convert_AsRef().as_ref() {
+ return Some("AsMut");
+ }
+ if trait_ == famous.core_borrow_Borrow().as_ref() {
+ return Some("BorrowMut");
+ }
+ None
+}
+
+fn process_method_name(name: ast::Name) -> Option<(ast::Name, &'static str)> {
+ let new_name = match &*name.text() {
+ "index" => "index_mut",
+ "as_ref" => "as_mut",
+ "borrow" => "borrow_mut",
+ _ => return None,
+ };
+ Some((name, new_name))
+}
+
+fn process_ret_type(ref_ty: &ast::RetType) -> Option<ast::Type> {
+ let ty = ref_ty.ty()?;
+ let ast::Type::RefType(ref_type) = ty else {
+ return None;
+ };
+ Some(make::ty_ref(ref_type.ty()?, true))
+}
+
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@@ -139,7 +182,7 @@ pub enum Axis { X = 0, Y = 1, Z = 2 }
$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
- &self[index as usize]
+ &mut self[index as usize]
}
}
@@ -188,6 +231,35 @@ impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
}
"#,
);
+
+ check_assist(
+ generate_mut_trait_impl,
+ r#"
+//- minicore: as_ref
+struct Foo(i32);
+
+impl core::convert::AsRef$0<i32> for Foo {
+ fn as_ref(&self) -> &i32 {
+ &self.0
+ }
+}
+"#,
+ r#"
+struct Foo(i32);
+
+$0impl core::convert::AsMut<i32> for Foo {
+ fn as_mut(&mut self) -> &mut i32 {
+ &mut self.0
+ }
+}
+
+impl core::convert::AsRef<i32> for Foo {
+ fn as_ref(&self) -> &i32 {
+ &self.0
+ }
+}
+"#,
+ );
}
#[test]
@@ -287,5 +359,13 @@ pub trait Index<Idx: ?Sized> {}
impl<T> Index$0<i32> for [T; 3] {}
"#,
);
+ check_assist_not_applicable(
+ generate_mut_trait_impl,
+ r#"
+pub trait AsRef<T: ?Sized> {}
+
+impl AsRef$0<i32> for [T; 3] {}
+"#,
+ );
}
}
diff --git a/crates/ide-assists/src/handlers/generate_new.rs b/crates/ide-assists/src/handlers/generate_new.rs
index 4837f92f93..51c2f65e02 100644
--- a/crates/ide-assists/src/handlers/generate_new.rs
+++ b/crates/ide-assists/src/handlers/generate_new.rs
@@ -1,5 +1,6 @@
use ide_db::{
- imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor,
+ imports::import_assets::item_for_path_search, syntax_helpers::suggest_name::NameGenerator,
+ use_trivial_constructor::use_trivial_constructor,
};
use syntax::{
ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make},
@@ -35,10 +36,30 @@ use crate::{
pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
- // We want to only apply this to non-union structs with named fields
let field_list = match strukt.kind() {
- StructKind::Record(named) => named,
- _ => return None,
+ StructKind::Record(named) => {
+ named.fields().filter_map(|f| Some((f.name()?, f.ty()?))).collect::<Vec<_>>()
+ }
+ StructKind::Tuple(tuple) => {
+ let mut name_generator = NameGenerator::default();
+ tuple
+ .fields()
+ .enumerate()
+ .filter_map(|(i, f)| {
+ let ty = f.ty()?;
+ let name = match name_generator.for_type(
+ &ctx.sema.resolve_type(&ty)?,
+ ctx.db(),
+ ctx.edition(),
+ ) {
+ Some(name) => name,
+ None => name_generator.suggest_name(&format!("_{i}")),
+ };
+ Some((make::name(name.as_str()), f.ty()?))
+ })
+ .collect::<Vec<_>>()
+ }
+ StructKind::Unit => return None,
};
// Return early if we've found an existing new fn
@@ -50,11 +71,9 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
let target = strukt.syntax().text_range();
acc.add(AssistId::generate("generate_new"), "Generate `new`", target, |builder| {
let trivial_constructors = field_list
- .fields()
- .map(|f| {
- let name = f.name()?;
-
- let ty = ctx.sema.resolve_type(&f.ty()?)?;
+ .iter()
+ .map(|(name, ty)| {
+ let ty = ctx.sema.resolve_type(ty)?;
let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
@@ -73,34 +92,44 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
edition,
)?;
- Some(make::record_expr_field(make::name_ref(&name.text()), Some(expr)))
+ Some((make::name_ref(&name.text()), Some(expr)))
})
.collect::<Vec<_>>();
- let params = field_list.fields().enumerate().filter_map(|(i, f)| {
+ let params = field_list.iter().enumerate().filter_map(|(i, (name, ty))| {
if trivial_constructors[i].is_none() {
- let name = f.name()?;
- let ty = f.ty()?;
-
- Some(make::param(make::ident_pat(false, false, name).into(), ty))
+ Some(make::param(make::ident_pat(false, false, name.clone()).into(), ty.clone()))
} else {
None
}
});
let params = make::param_list(None, params);
- let fields = field_list.fields().enumerate().filter_map(|(i, f)| {
- let constructor = trivial_constructors[i].clone();
- if constructor.is_some() {
+ let fields = field_list.iter().enumerate().map(|(i, (name, _))| {
+ if let Some(constructor) = trivial_constructors[i].clone() {
constructor
} else {
- Some(make::record_expr_field(make::name_ref(&f.name()?.text()), None))
+ (make::name_ref(&name.text()), None)
}
});
- let fields = make::record_expr_field_list(fields);
- let record_expr = make::record_expr(make::ext::ident_path("Self"), fields);
- let body = make::block_expr(None, Some(record_expr.into()));
+ let tail_expr: ast::Expr = match strukt.kind() {
+ StructKind::Record(_) => {
+ let fields = fields.map(|(name, expr)| make::record_expr_field(name, expr));
+ let fields = make::record_expr_field_list(fields);
+ make::record_expr(make::ext::ident_path("Self"), fields).into()
+ }
+ StructKind::Tuple(_) => {
+ let args = fields.map(|(arg, expr)| {
+ let arg = || make::expr_path(make::path_unqualified(make::path_segment(arg)));
+ expr.unwrap_or_else(arg)
+ });
+ let arg_list = make::arg_list(args);
+ make::expr_call(make::expr_path(make::ext::ident_path("Self")), arg_list).into()
+ }
+ StructKind::Unit => unreachable!(),
+ };
+ let body = make::block_expr(None, tail_expr.into());
let ret_type = make::ret_type(make::ty_path(make::ext::ident_path("Self")));
@@ -120,8 +149,35 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
.clone_for_update();
fn_.indent(1.into());
- // Add a tabstop before the name
if let Some(cap) = ctx.config.snippet_cap {
+ match strukt.kind() {
+ StructKind::Tuple(_) => {
+ let struct_args = fn_
+ .body()
+ .unwrap()
+ .syntax()
+ .descendants()
+ .filter(|it| syntax::ast::ArgList::can_cast(it.kind()))
+ .flat_map(|args| args.children())
+ .filter(|it| syntax::ast::PathExpr::can_cast(it.kind()))
+ .enumerate()
+ .filter_map(|(i, node)| {
+ if trivial_constructors[i].is_none() { Some(node) } else { None }
+ });
+ if let Some(fn_params) = fn_.param_list() {
+ for (struct_arg, fn_param) in struct_args.zip(fn_params.params()) {
+ if let Some(fn_pat) = fn_param.pat() {
+ let fn_pat = fn_pat.syntax().clone();
+ builder
+ .add_placeholder_snippet_group(cap, vec![struct_arg, fn_pat]);
+ }
+ }
+ }
+ }
+ _ => {}
+ }
+
+ // Add a tabstop before the name
if let Some(name) = fn_.name() {
builder.add_tabstop_before(cap, name);
}
@@ -157,7 +213,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
}
#[cfg(test)]
-mod tests {
+mod record_tests {
use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
use super::*;
@@ -695,3 +751,308 @@ impl<T> Source<T> {
);
}
}
+
+#[cfg(test)]
+mod tuple_tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_new_with_zst_fields() {
+ check_assist(
+ generate_new,
+ r#"
+struct Empty;
+
+struct Foo(Empty$0);
+"#,
+ r#"
+struct Empty;
+
+struct Foo(Empty);
+
+impl Foo {
+ fn $0new() -> Self {
+ Self(Empty)
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Empty;
+
+struct Foo(String, Empty$0);
+"#,
+ r#"
+struct Empty;
+
+struct Foo(String, Empty);
+
+impl Foo {
+ fn $0new(${1:_0}: String) -> Self {
+ Self(${1:_0}, Empty)
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+enum Empty { Bar }
+
+struct Foo(Empty$0);
+"#,
+ r#"
+enum Empty { Bar }
+
+struct Foo(Empty);
+
+impl Foo {
+ fn $0new() -> Self {
+ Self(Empty::Bar)
+ }
+}
+"#,
+ );
+
+ // make sure the assist only works on unit variants
+ check_assist(
+ generate_new,
+ r#"
+struct Empty {}
+
+struct Foo(Empty$0);
+"#,
+ r#"
+struct Empty {}
+
+struct Foo(Empty);
+
+impl Foo {
+ fn $0new(${1:empty}: Empty) -> Self {
+ Self(${1:empty})
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+enum Empty { Bar {} }
+
+struct Foo(Empty$0);
+"#,
+ r#"
+enum Empty { Bar {} }
+
+struct Foo(Empty);
+
+impl Foo {
+ fn $0new(${1:empty}: Empty) -> Self {
+ Self(${1:empty})
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_new() {
+ check_assist(
+ generate_new,
+ r#"
+struct Foo($0);
+"#,
+ r#"
+struct Foo();
+
+impl Foo {
+ fn $0new() -> Self {
+ Self()
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo<T: Clone>($0);
+"#,
+ r#"
+struct Foo<T: Clone>();
+
+impl<T: Clone> Foo<T> {
+ fn $0new() -> Self {
+ Self()
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo<'a, T: Foo<'a>>($0);
+"#,
+ r#"
+struct Foo<'a, T: Foo<'a>>();
+
+impl<'a, T: Foo<'a>> Foo<'a, T> {
+ fn $0new() -> Self {
+ Self()
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo(String$0);
+"#,
+ r#"
+struct Foo(String);
+
+impl Foo {
+ fn $0new(${1:_0}: String) -> Self {
+ Self(${1:_0})
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Vec<T> { };
+struct Foo(String, Vec<i32>$0);
+"#,
+ r#"
+struct Vec<T> { };
+struct Foo(String, Vec<i32>);
+
+impl Foo {
+ fn $0new(${1:_0}: String, ${2:items}: Vec<i32>) -> Self {
+ Self(${1:_0}, ${2:items})
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn check_that_visibility_modifiers_dont_get_brought_in() {
+ check_assist(
+ generate_new,
+ r#"
+struct Vec<T> { };
+struct Foo(pub String, pub Vec<i32>$0);
+"#,
+ r#"
+struct Vec<T> { };
+struct Foo(pub String, pub Vec<i32>);
+
+impl Foo {
+ fn $0new(${1:_0}: String, ${2:items}: Vec<i32>) -> Self {
+ Self(${1:_0}, ${2:items})
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_new_not_applicable_if_fn_exists() {
+ check_assist_not_applicable(
+ generate_new,
+ r#"
+struct Foo($0);
+
+impl Foo {
+ fn new() -> Self {
+ Self
+ }
+}
+"#,
+ );
+
+ check_assist_not_applicable(
+ generate_new,
+ r#"
+struct Foo($0);
+
+impl Foo {
+ fn New() -> Self {
+ Self
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_new_target() {
+ check_assist_target(
+ generate_new,
+ r#"
+struct SomeThingIrrelevant;
+/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>>($0);
+struct EvenMoreIrrelevant;
+"#,
+ "/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>>();",
+ );
+ }
+
+ #[test]
+ fn test_unrelated_new() {
+ check_assist(
+ generate_new,
+ r#"
+pub struct AstId<N: AstNode> {
+ file_id: HirFileId,
+ file_ast_id: FileAstId<N>,
+}
+
+impl<N: AstNode> AstId<N> {
+ pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
+ AstId { file_id, file_ast_id }
+ }
+}
+
+pub struct Source<T>(pub HirFileId,$0 pub T);
+
+impl<T> Source<T> {
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
+ Source(self.file_id, f(self.ast))
+ }
+}
+"#,
+ r#"
+pub struct AstId<N: AstNode> {
+ file_id: HirFileId,
+ file_ast_id: FileAstId<N>,
+}
+
+impl<N: AstNode> AstId<N> {
+ pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
+ AstId { file_id, file_ast_id }
+ }
+}
+
+pub struct Source<T>(pub HirFileId, pub T);
+
+impl<T> Source<T> {
+ pub fn $0new(${1:_0}: HirFileId, ${2:_1}: T) -> Self {
+ Self(${1:_0}, ${2:_1})
+ }
+
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
+ Source(self.file_id, f(self.ast))
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs b/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs
new file mode 100644
index 0000000000..4e95ceb2e8
--- /dev/null
+++ b/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs
@@ -0,0 +1,1000 @@
+use ast::make;
+use hir::{HasCrate, ModuleDef, Semantics};
+use ide_db::{
+ RootDatabase, famous_defs::FamousDefs, helpers::mod_path_to_ast,
+ imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor,
+};
+use syntax::{
+ TokenText,
+ ast::{self, AstNode, HasGenericParams, HasName, edit, edit_in_place::Indent},
+};
+
+use crate::{
+ AssistId,
+ assist_context::{AssistContext, Assists},
+ utils::add_cfg_attrs_to,
+};
+
+// Assist: generate_single_field_struct_from
+//
+// Implement From for a single field structure, ignore trivial types.
+//
+// ```
+// # //- minicore: from, phantom_data
+// use core::marker::PhantomData;
+// struct $0Foo<T> {
+// id: i32,
+// _phantom_data: PhantomData<T>,
+// }
+// ```
+// ->
+// ```
+// use core::marker::PhantomData;
+// struct Foo<T> {
+// id: i32,
+// _phantom_data: PhantomData<T>,
+// }
+//
+// impl<T> From<i32> for Foo<T> {
+// fn from(id: i32) -> Self {
+// Self { id, _phantom_data: PhantomData }
+// }
+// }
+// ```
+pub(crate) fn generate_single_field_struct_from(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let strukt_name = ctx.find_node_at_offset::<ast::Name>()?;
+ let adt = ast::Adt::cast(strukt_name.syntax().parent()?)?;
+ let ast::Adt::Struct(strukt) = adt else {
+ return None;
+ };
+
+ let sema = &ctx.sema;
+ let (names, types) = get_fields(&strukt)?;
+
+ let module = sema.scope(strukt.syntax())?.module();
+ let constructors = make_constructors(ctx, module, &types);
+
+ if constructors.iter().filter(|expr| expr.is_none()).count() != 1 {
+ return None;
+ }
+ let main_field_i = constructors.iter().position(Option::is_none)?;
+ if from_impl_exists(&strukt, main_field_i, &ctx.sema).is_some() {
+ return None;
+ }
+
+ let main_field_name =
+ names.as_ref().map_or(TokenText::borrowed("value"), |names| names[main_field_i].text());
+ let main_field_ty = types[main_field_i].clone();
+
+ acc.add(
+ AssistId::generate("generate_single_field_struct_from"),
+ "Generate single field `From`",
+ strukt.syntax().text_range(),
+ |builder| {
+ let indent = strukt.indent_level();
+ let ty_where_clause = strukt.where_clause();
+ let type_gen_params = strukt.generic_param_list();
+ let type_gen_args = type_gen_params.as_ref().map(|params| params.to_generic_args());
+ let trait_gen_args = Some(make::generic_arg_list([ast::GenericArg::TypeArg(
+ make::type_arg(main_field_ty.clone()),
+ )]));
+
+ let ty = make::ty(&strukt_name.text());
+
+ let constructor =
+ make_adt_constructor(names.as_deref(), constructors, &main_field_name);
+ let body = make::block_expr([], Some(constructor));
+
+ let fn_ = make::fn_(
+ None,
+ make::name("from"),
+ None,
+ None,
+ make::param_list(
+ None,
+ [make::param(
+ make::path_pat(make::path_from_text(&main_field_name)),
+ main_field_ty,
+ )],
+ ),
+ body,
+ Some(make::ret_type(make::ty("Self"))),
+ false,
+ false,
+ false,
+ false,
+ )
+ .clone_for_update();
+
+ fn_.indent(1.into());
+
+ let impl_ = make::impl_trait(
+ false,
+ None,
+ trait_gen_args,
+ type_gen_params,
+ type_gen_args,
+ false,
+ make::ty("From"),
+ ty.clone(),
+ None,
+ ty_where_clause.map(|wc| edit::AstNodeEdit::reset_indent(&wc)),
+ None,
+ )
+ .clone_for_update();
+
+ impl_.get_or_create_assoc_item_list().add_item(fn_.into());
+
+ add_cfg_attrs_to(&strukt, &impl_);
+
+ impl_.reindent_to(indent);
+
+ builder.insert(strukt.syntax().text_range().end(), format!("\n\n{indent}{impl_}"));
+ },
+ )
+}
+
+fn make_adt_constructor(
+ names: Option<&[ast::Name]>,
+ constructors: Vec<Option<ast::Expr>>,
+ main_field_name: &TokenText<'_>,
+) -> ast::Expr {
+ if let Some(names) = names {
+ let fields = make::record_expr_field_list(names.iter().zip(constructors).map(
+ |(name, initializer)| {
+ make::record_expr_field(make::name_ref(&name.text()), initializer)
+ },
+ ));
+ make::record_expr(make::path_from_text("Self"), fields).into()
+ } else {
+ let arg_list = make::arg_list(constructors.into_iter().map(|expr| {
+ expr.unwrap_or_else(|| make::expr_path(make::path_from_text(main_field_name)))
+ }));
+ make::expr_call(make::expr_path(make::path_from_text("Self")), arg_list).into()
+ }
+}
+
+fn make_constructors(
+ ctx: &AssistContext<'_>,
+ module: hir::Module,
+ types: &[ast::Type],
+) -> Vec<Option<ast::Expr>> {
+ let (db, sema) = (ctx.db(), &ctx.sema);
+ types
+ .iter()
+ .map(|ty| {
+ let ty = sema.resolve_type(ty)?;
+ if ty.is_unit() {
+ return Some(make::expr_tuple([]).into());
+ }
+ let item_in_ns = ModuleDef::Adt(ty.as_adt()?).into();
+ let edition = module.krate().edition(db);
+
+ let ty_path = module.find_path(
+ db,
+ item_for_path_search(db, item_in_ns)?,
+ ctx.config.import_path_config(),
+ )?;
+
+ use_trivial_constructor(db, mod_path_to_ast(&ty_path, edition), &ty, edition)
+ })
+ .collect()
+}
+
+fn get_fields(strukt: &ast::Struct) -> Option<(Option<Vec<ast::Name>>, Vec<ast::Type>)> {
+ Some(match strukt.kind() {
+ ast::StructKind::Unit => return None,
+ ast::StructKind::Record(fields) => {
+ let names = fields.fields().map(|field| field.name()).collect::<Option<_>>()?;
+ let types = fields.fields().map(|field| field.ty()).collect::<Option<_>>()?;
+ (Some(names), types)
+ }
+ ast::StructKind::Tuple(fields) => {
+ (None, fields.fields().map(|field| field.ty()).collect::<Option<_>>()?)
+ }
+ })
+}
+
+fn from_impl_exists(
+ strukt: &ast::Struct,
+ main_field_i: usize,
+ sema: &Semantics<'_, RootDatabase>,
+) -> Option<()> {
+ let db = sema.db;
+ let strukt = sema.to_def(strukt)?;
+ let krate = strukt.krate(db);
+ let from_trait = FamousDefs(sema, krate).core_convert_From()?;
+ let ty = strukt.fields(db).get(main_field_i)?.ty(db);
+
+ strukt.ty(db).impls_trait(db, from_trait, &[ty]).then_some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::generate_single_field_struct_from;
+
+ #[test]
+ fn works() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo {
+ foo: i32,
+ }
+ "#,
+ r#"
+ struct Foo {
+ foo: i32,
+ }
+
+ impl From<i32> for Foo {
+ fn from(foo: i32) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from, phantom_data
+ struct $0Foo {
+ b1: (),
+ b2: core::marker::PhantomData,
+ foo: i32,
+ a1: (),
+ a2: core::marker::PhantomData,
+ }
+ "#,
+ r#"
+ struct Foo {
+ b1: (),
+ b2: core::marker::PhantomData,
+ foo: i32,
+ a1: (),
+ a2: core::marker::PhantomData,
+ }
+
+ impl From<i32> for Foo {
+ fn from(foo: i32) -> Self {
+ Self { b1: (), b2: core::marker::PhantomData, foo, a1: (), a2: core::marker::PhantomData }
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn cfgs() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ #[cfg(feature = "foo")]
+ #[cfg(test)]
+ struct $0Foo {
+ foo: i32,
+ }
+ "#,
+ r#"
+ #[cfg(feature = "foo")]
+ #[cfg(test)]
+ struct Foo {
+ foo: i32,
+ }
+
+ #[cfg(feature = "foo")]
+ #[cfg(test)]
+ impl From<i32> for Foo {
+ fn from(foo: i32) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn indent() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ mod foo {
+ struct $0Foo {
+ foo: i32,
+ }
+ }
+ "#,
+ r#"
+ mod foo {
+ struct Foo {
+ foo: i32,
+ }
+
+ impl From<i32> for Foo {
+ fn from(foo: i32) -> Self {
+ Self { foo }
+ }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ mod foo {
+ mod bar {
+ struct $0Foo {
+ foo: i32,
+ }
+ }
+ }
+ "#,
+ r#"
+ mod foo {
+ mod bar {
+ struct Foo {
+ foo: i32,
+ }
+
+ impl From<i32> for Foo {
+ fn from(foo: i32) -> Self {
+ Self { foo }
+ }
+ }
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn where_clause_indent() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ mod foo {
+ mod bar {
+ trait Trait {}
+ struct $0Foo<T>
+ where
+ T: Trait,
+ {
+ foo: T,
+ }
+ }
+ }
+ "#,
+ r#"
+ mod foo {
+ mod bar {
+ trait Trait {}
+ struct Foo<T>
+ where
+ T: Trait,
+ {
+ foo: T,
+ }
+
+ impl<T> From<T> for Foo<T>
+ where
+ T: Trait,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ mod foo {
+ mod bar {
+ trait Trait<const B: bool> {}
+ struct $0Foo<T>
+ where
+ T: Trait<{
+ true
+ }>
+ {
+ foo: T,
+ }
+ }
+ }
+ "#,
+ r#"
+ mod foo {
+ mod bar {
+ trait Trait<const B: bool> {}
+ struct Foo<T>
+ where
+ T: Trait<{
+ true
+ }>
+ {
+ foo: T,
+ }
+
+ impl<T> From<T> for Foo<T>
+ where
+ T: Trait<{
+ true
+ }>
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn generics() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T> {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T> {
+ foo: T,
+ }
+
+ impl<T> From<T> for Foo<T> {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send> {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send> {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T> {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send> where T: Sync,{
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send> where T: Sync,{
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send> where T: Sync {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send> where T: Sync {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send> where T: Sync, Self: Send {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send> where T: Sync, Self: Send {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync, Self: Send
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send>
+ where T: Sync, Self: Send
+ {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send>
+ where T: Sync, Self: Send
+ {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync, Self: Send
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send>
+ where T: Sync, Self: Send,
+ {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send>
+ where T: Sync, Self: Send,
+ {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync, Self: Send,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send>
+ where T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send>
+ where T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync,
+ Self: Send,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send + Sync>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send + Sync>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+
+ impl<T: Send + Sync> From<T> for Foo<T>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn tuple() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32);
+ "#,
+ r#"
+ struct Foo(i32);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value)
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T>(T);
+ "#,
+ r#"
+ struct Foo<T>(T);
+
+ impl<T> From<T> for Foo<T> {
+ fn from(value: T) -> Self {
+ Self(value)
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn trivial() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from, phantom_data
+ use core::marker::PhantomData;
+ struct $0Foo(i32, PhantomData<i32>);
+ "#,
+ r#"
+ use core::marker::PhantomData;
+ struct Foo(i32, PhantomData<i32>);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value, PhantomData)
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from, phantom_data
+ use core::marker::PhantomData;
+ struct $0Foo(i32, PhantomData<()>);
+ "#,
+ r#"
+ use core::marker::PhantomData;
+ struct Foo(i32, PhantomData<()>);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value, PhantomData)
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from, phantom_data
+ use core::marker::PhantomData;
+ struct $0Foo(PhantomData<()>, i32, PhantomData<()>);
+ "#,
+ r#"
+ use core::marker::PhantomData;
+ struct Foo(PhantomData<()>, i32, PhantomData<()>);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(PhantomData, value, PhantomData)
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from, phantom_data
+ use core::marker::PhantomData;
+ struct $0Foo<T>(PhantomData<T>, i32, PhantomData<()>);
+ "#,
+ r#"
+ use core::marker::PhantomData;
+ struct Foo<T>(PhantomData<T>, i32, PhantomData<()>);
+
+ impl<T> From<i32> for Foo<T> {
+ fn from(value: i32) -> Self {
+ Self(PhantomData, value, PhantomData)
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn unit() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32, ());
+ "#,
+ r#"
+ struct Foo(i32, ());
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value, ())
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo((), i32, ());
+ "#,
+ r#"
+ struct Foo((), i32, ());
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self((), value, ())
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo((), (), i32, ());
+ "#,
+ r#"
+ struct Foo((), (), i32, ());
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self((), (), value, ())
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn invalid_multiple_main_field() {
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32, i32);
+ "#,
+ );
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T>(i32, T);
+ "#,
+ );
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T>(T, T);
+ "#,
+ );
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T> { foo: T, bar: i32 }
+ "#,
+ );
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo { foo: i32, bar: i64 }
+ "#,
+ );
+ }
+
+ #[test]
+ fn exists_other_from() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32);
+
+ impl From<&i32> for Foo {
+ fn from(value: &i32) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ r#"
+ struct Foo(i32);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value)
+ }
+ }
+
+ impl From<&i32> for Foo {
+ fn from(value: &i32) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32);
+
+ type X = i32;
+
+ impl From<&X> for Foo {
+ fn from(value: &X) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ r#"
+ struct Foo(i32);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value)
+ }
+ }
+
+ type X = i32;
+
+ impl From<&X> for Foo {
+ fn from(value: &X) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn exists_from() {
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32);
+
+ impl From<i32> for Foo {
+ fn from(_: i32) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ );
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32);
+
+ type X = i32;
+
+ impl From<X> for Foo {
+ fn from(_: X) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ );
+ }
+}
diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs
index 6f028e58d0..b7b8bc604a 100644
--- a/crates/ide-assists/src/handlers/inline_call.rs
+++ b/crates/ide-assists/src/handlers/inline_call.rs
@@ -283,11 +283,11 @@ impl CallInfo {
}
}
-fn get_fn_params(
- db: &dyn HirDatabase,
+fn get_fn_params<'db>(
+ db: &'db dyn HirDatabase,
function: hir::Function,
param_list: &ast::ParamList,
-) -> Option<Vec<(ast::Pat, Option<ast::Type>, hir::Param)>> {
+) -> Option<Vec<(ast::Pat, Option<ast::Type>, hir::Param<'db>)>> {
let mut assoc_fn_params = function.assoc_fn_params(db).into_iter();
let mut params = Vec::new();
@@ -316,7 +316,7 @@ fn inline(
function_def_file_id: EditionedFileId,
function: hir::Function,
fn_body: &ast::BlockExpr,
- params: &[(ast::Pat, Option<ast::Type>, hir::Param)],
+ params: &[(ast::Pat, Option<ast::Type>, hir::Param<'_>)],
CallInfo { node, arguments, generic_arg_list, krate }: &CallInfo,
) -> ast::Expr {
let file_id = sema.hir_file_for(fn_body.syntax());
diff --git a/crates/ide-assists/src/handlers/inline_const_as_literal.rs b/crates/ide-assists/src/handlers/inline_const_as_literal.rs
index e5ed04fdc7..b11d3792bc 100644
--- a/crates/ide-assists/src/handlers/inline_const_as_literal.rs
+++ b/crates/ide-assists/src/handlers/inline_const_as_literal.rs
@@ -58,7 +58,7 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_>
fn validate_type_recursively(
ctx: &AssistContext<'_>,
- ty_hir: Option<&hir::Type>,
+ ty_hir: Option<&hir::Type<'_>>,
refed: bool,
fuel: i32,
) -> Option<()> {
diff --git a/crates/ide-assists/src/handlers/merge_match_arms.rs b/crates/ide-assists/src/handlers/merge_match_arms.rs
index 42f35210b4..08170f81b2 100644
--- a/crates/ide-assists/src/handlers/merge_match_arms.rs
+++ b/crates/ide-assists/src/handlers/merge_match_arms.rs
@@ -105,7 +105,7 @@ fn contains_placeholder(a: &ast::MatchArm) -> bool {
}
fn are_same_types(
- current_arm_types: &FxHashMap<String, Option<Type>>,
+ current_arm_types: &FxHashMap<String, Option<Type<'_>>>,
arm: &ast::MatchArm,
ctx: &AssistContext<'_>,
) -> bool {
@@ -121,15 +121,15 @@ fn are_same_types(
true
}
-fn get_arm_types(
- context: &AssistContext<'_>,
+fn get_arm_types<'db>(
+ context: &AssistContext<'db>,
arm: &ast::MatchArm,
-) -> FxHashMap<String, Option<Type>> {
- let mut mapping: FxHashMap<String, Option<Type>> = FxHashMap::default();
+) -> FxHashMap<String, Option<Type<'db>>> {
+ let mut mapping: FxHashMap<String, Option<Type<'db>>> = FxHashMap::default();
- fn recurse(
- map: &mut FxHashMap<String, Option<Type>>,
- ctx: &AssistContext<'_>,
+ fn recurse<'db>(
+ map: &mut FxHashMap<String, Option<Type<'db>>>,
+ ctx: &AssistContext<'db>,
pat: &Option<ast::Pat>,
) {
if let Some(local_pat) = pat {
diff --git a/crates/ide-assists/src/handlers/promote_local_to_const.rs b/crates/ide-assists/src/handlers/promote_local_to_const.rs
index 6316a8f0db..603be4d667 100644
--- a/crates/ide-assists/src/handlers/promote_local_to_const.rs
+++ b/crates/ide-assists/src/handlers/promote_local_to_const.rs
@@ -3,8 +3,7 @@ use ide_db::{assists::AssistId, defs::Definition};
use stdx::to_upper_snake_case;
use syntax::{
AstNode,
- ast::{self, HasName, make},
- ted,
+ ast::{self, HasName, syntax_factory::SyntaxFactory},
};
use crate::{
@@ -69,15 +68,18 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
"Promote local to constant",
let_stmt.syntax().text_range(),
|edit| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(let_stmt.syntax());
let name = to_upper_snake_case(&name.to_string());
let usages = Definition::Local(local).usages(&ctx.sema).all();
if let Some(usages) = usages.references.get(&ctx.file_id()) {
- let name_ref = make::name_ref(&name);
+ let name_ref = make.name_ref(&name);
for usage in usages {
let Some(usage_name) = usage.name.as_name_ref().cloned() else { continue };
if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage_name) {
- let name_expr = make::expr_path(make::path_from_text(&name));
+ let path = make.ident_path(&name);
+ let name_expr = make.expr_path(path);
utils::replace_record_field_expr(ctx, edit, record_field, name_expr);
} else {
let usage_range = usage.range;
@@ -86,15 +88,17 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
}
}
- let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer)
- .clone_for_update();
- let let_stmt = edit.make_mut(let_stmt);
+ let item = make.item_const(None, make.name(&name), make.ty(&ty), initializer);
if let Some((cap, name)) = ctx.config.snippet_cap.zip(item.name()) {
- edit.add_tabstop_before(cap, name);
+ let tabstop = edit.make_tabstop_before(cap);
+ editor.add_annotation(name.syntax().clone(), tabstop);
}
- ted::replace(let_stmt.syntax(), item.syntax());
+ editor.replace(let_stmt.syntax(), item.syntax());
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/pull_assignment_up.rs b/crates/ide-assists/src/handlers/pull_assignment_up.rs
index 5f626d2957..1b0c313935 100644
--- a/crates/ide-assists/src/handlers/pull_assignment_up.rs
+++ b/crates/ide-assists/src/handlers/pull_assignment_up.rs
@@ -1,7 +1,8 @@
use syntax::{
AstNode,
- ast::{self, make},
- ted,
+ algo::find_node_at_range,
+ ast::{self, syntax_factory::SyntaxFactory},
+ syntax_editor::SyntaxEditor,
};
use crate::{
@@ -66,33 +67,51 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) ->
return None;
}
}
-
+ let target = tgt.syntax().text_range();
+
+ let edit_tgt = tgt.syntax().clone_subtree();
+ let assignments: Vec<_> = collector
+ .assignments
+ .into_iter()
+ .filter_map(|(stmt, rhs)| {
+ Some((
+ find_node_at_range::<ast::BinExpr>(
+ &edit_tgt,
+ stmt.syntax().text_range() - target.start(),
+ )?,
+ find_node_at_range::<ast::Expr>(
+ &edit_tgt,
+ rhs.syntax().text_range() - target.start(),
+ )?,
+ ))
+ })
+ .collect();
+
+ let mut editor = SyntaxEditor::new(edit_tgt);
+ for (stmt, rhs) in assignments {
+ let mut stmt = stmt.syntax().clone();
+ if let Some(parent) = stmt.parent() {
+ if ast::ExprStmt::cast(parent.clone()).is_some() {
+ stmt = parent.clone();
+ }
+ }
+ editor.replace(stmt, rhs.syntax());
+ }
+ let new_tgt_root = editor.finish().new_root().clone();
+ let new_tgt = ast::Expr::cast(new_tgt_root)?;
acc.add(
AssistId::refactor_extract("pull_assignment_up"),
"Pull assignment up",
- tgt.syntax().text_range(),
+ target,
move |edit| {
- let assignments: Vec<_> = collector
- .assignments
- .into_iter()
- .map(|(stmt, rhs)| (edit.make_mut(stmt), rhs.clone_for_update()))
- .collect();
-
- let tgt = edit.make_mut(tgt);
-
- for (stmt, rhs) in assignments {
- let mut stmt = stmt.syntax().clone();
- if let Some(parent) = stmt.parent() {
- if ast::ExprStmt::cast(parent.clone()).is_some() {
- stmt = parent.clone();
- }
- }
- ted::replace(stmt, rhs.syntax());
- }
- let assign_expr = make::expr_assignment(collector.common_lhs, tgt.clone());
- let assign_stmt = make::expr_stmt(assign_expr);
-
- ted::replace(tgt.syntax(), assign_stmt.syntax().clone_for_update());
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(tgt.syntax());
+ let assign_expr = make.expr_assignment(collector.common_lhs, new_tgt.clone());
+ let assign_stmt = make.expr_stmt(assign_expr.into());
+
+ editor.replace(tgt.syntax(), assign_stmt.syntax());
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/qualify_path.rs b/crates/ide-assists/src/handlers/qualify_path.rs
index 07d2f52a34..8834ad9765 100644
--- a/crates/ide-assists/src/handlers/qualify_path.rs
+++ b/crates/ide-assists/src/handlers/qualify_path.rs
@@ -217,7 +217,7 @@ fn item_as_trait(db: &RootDatabase, item: hir::ItemInNs) -> Option<hir::Trait> {
}
}
-fn group_label(candidate: &ImportCandidate) -> GroupLabel {
+fn group_label(candidate: &ImportCandidate<'_>) -> GroupLabel {
let name = match candidate {
ImportCandidate::Path(it) => &it.name,
ImportCandidate::TraitAssocItem(it) | ImportCandidate::TraitMethod(it) => {
@@ -230,7 +230,7 @@ fn group_label(candidate: &ImportCandidate) -> GroupLabel {
fn label(
db: &RootDatabase,
- candidate: &ImportCandidate,
+ candidate: &ImportCandidate<'_>,
import: &LocatedImport,
edition: Edition,
) -> String {
diff --git a/crates/ide-assists/src/handlers/remove_dbg.rs b/crates/ide-assists/src/handlers/remove_dbg.rs
index 52ace03f3c..9356d02706 100644
--- a/crates/ide-assists/src/handlers/remove_dbg.rs
+++ b/crates/ide-assists/src/handlers/remove_dbg.rs
@@ -1,8 +1,9 @@
use itertools::Itertools;
use syntax::{
- Edition, NodeOrToken, SyntaxElement, T, TextRange, TextSize,
- ast::{self, AstNode, AstToken, make},
- match_ast, ted,
+ Edition, NodeOrToken, SyntaxNode, SyntaxToken, T,
+ ast::{self, AstNode, make},
+ match_ast,
+ syntax_editor::{Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists};
@@ -40,21 +41,23 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let replacements =
macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::<Vec<_>>();
-
- acc.add(
- AssistId::quick_fix("remove_dbg"),
- "Remove dbg!()",
- replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range))?,
- |builder| {
- for (range, expr) in replacements {
- if let Some(expr) = expr {
- builder.replace(range, expr.to_string());
- } else {
- builder.delete(range);
- }
+ let target = replacements
+ .iter()
+ .flat_map(|(node_or_token, _)| node_or_token.iter())
+ .map(|t| t.text_range())
+ .reduce(|acc, range| acc.cover(range))?;
+ acc.add(AssistId::quick_fix("remove_dbg"), "Remove dbg!()", target, |builder| {
+ let mut editor = builder.make_editor(ctx.source_file().syntax());
+ for (range, expr) in replacements {
+ if let Some(expr) = expr {
+ editor.insert(Position::before(range[0].clone()), expr.syntax().clone_for_update());
+ }
+ for node_or_token in range {
+ editor.delete(node_or_token);
}
- },
- )
+ }
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
+ })
}
/// Returns `None` when either
@@ -63,7 +66,9 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
/// - (`macro_expr` has no parent - is that possible?)
///
/// Returns `Some(_, None)` when the macro call should just be removed.
-fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Option<ast::Expr>)> {
+fn compute_dbg_replacement(
+ macro_expr: ast::MacroExpr,
+) -> Option<(Vec<NodeOrToken<SyntaxNode, SyntaxToken>>, Option<ast::Expr>)> {
let macro_call = macro_expr.macro_call()?;
let tt = macro_call.token_tree()?;
let r_delim = NodeOrToken::Token(tt.right_delimiter_token()?);
@@ -88,22 +93,22 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt
match_ast! {
match parent {
ast::StmtList(_) => {
- let range = macro_expr.syntax().text_range();
- let range = match whitespace_start(macro_expr.syntax().prev_sibling_or_token()) {
- Some(start) => range.cover_offset(start),
- None => range,
- };
- (range, None)
+ let mut replace = vec![macro_expr.syntax().clone().into()];
+ if let Some(prev_sibling) = macro_expr.syntax().prev_sibling_or_token()
+ && prev_sibling.kind() == syntax::SyntaxKind::WHITESPACE {
+ replace.push(prev_sibling);
+ }
+ (replace, None)
},
ast::ExprStmt(it) => {
- let range = it.syntax().text_range();
- let range = match whitespace_start(it.syntax().prev_sibling_or_token()) {
- Some(start) => range.cover_offset(start),
- None => range,
- };
- (range, None)
+ let mut replace = vec![it.syntax().clone().into()];
+ if let Some(prev_sibling) = it.syntax().prev_sibling_or_token()
+ && prev_sibling.kind() == syntax::SyntaxKind::WHITESPACE {
+ replace.push(prev_sibling);
+ }
+ (replace, None)
},
- _ => (macro_call.syntax().text_range(), Some(make::ext::expr_unit())),
+ _ => (vec![macro_call.syntax().clone().into()], Some(make::ext::expr_unit())),
}
}
}
@@ -147,13 +152,13 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt
};
let expr = replace_nested_dbgs(expr.clone());
let expr = if wrap { make::expr_paren(expr).into() } else { expr.clone_subtree() };
- (macro_call.syntax().text_range(), Some(expr))
+ (vec![macro_call.syntax().clone().into()], Some(expr))
}
// dbg!(expr0, expr1, ...)
exprs => {
let exprs = exprs.iter().cloned().map(replace_nested_dbgs);
let expr = make::expr_tuple(exprs);
- (macro_call.syntax().text_range(), Some(expr.into()))
+ (vec![macro_call.syntax().clone().into()], Some(expr.into()))
}
})
}
@@ -178,8 +183,8 @@ fn replace_nested_dbgs(expanded: ast::Expr) -> ast::Expr {
return replaced;
}
- let expanded = expanded.clone_for_update();
-
+ let expanded = expanded.clone_subtree();
+ let mut editor = SyntaxEditor::new(expanded.syntax().clone());
// We need to collect to avoid mutation during traversal.
let macro_exprs: Vec<_> =
expanded.syntax().descendants().filter_map(ast::MacroExpr::cast).collect();
@@ -191,17 +196,13 @@ fn replace_nested_dbgs(expanded: ast::Expr) -> ast::Expr {
};
if let Some(expr) = expr_opt {
- ted::replace(mac.syntax(), expr.syntax().clone_for_update());
+ editor.replace(mac.syntax(), expr.syntax().clone_for_update());
} else {
- ted::remove(mac.syntax());
+ editor.delete(mac.syntax());
}
}
-
- expanded
-}
-
-fn whitespace_start(it: Option<SyntaxElement>) -> Option<TextSize> {
- Some(it?.into_token().and_then(ast::Whitespace::cast)?.syntax().text_range().start())
+ let expanded_syntax = editor.finish().new_root().clone();
+ ast::Expr::cast(expanded_syntax).unwrap()
}
#[cfg(test)]
diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs
index 16debc4d72..c38bdfdccf 100644
--- a/crates/ide-assists/src/handlers/remove_unused_imports.rs
+++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs
@@ -117,7 +117,7 @@ pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>)
if unused.peek().is_some() {
acc.add(
AssistId::quick_fix("remove_unused_imports"),
- "Remove all the unused imports",
+ "Remove all unused imports",
selected_el.text_range(),
|builder| {
let unused: Vec<ast::UseTree> = unused.map(|x| builder.make_mut(x)).collect();
diff --git a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
index e933bcc40d..5ef8ba46b9 100644
--- a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
+++ b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
@@ -1,8 +1,5 @@
use ide_db::syntax_helpers::suggest_name;
-use syntax::{
- ast::{self, AstNode, make},
- ted,
-};
+use syntax::ast::{self, AstNode, syntax_factory::SyntaxFactory};
use crate::{AssistContext, AssistId, Assists};
@@ -60,21 +57,24 @@ pub(crate) fn replace_is_method_with_if_let_method(
message,
call_expr.syntax().text_range(),
|edit| {
- let call_expr = edit.make_mut(call_expr);
-
- let var_pat = make::ident_pat(false, false, make::name(&var_name));
- let pat = make::tuple_struct_pat(make::ext::ident_path(text), [var_pat.into()]);
- let let_expr = make::expr_let(pat.into(), receiver).clone_for_update();
-
- if let Some(cap) = ctx.config.snippet_cap {
- if let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat() {
- if let Some(first_var) = pat.fields().next() {
- edit.add_placeholder_snippet(cap, first_var);
- }
- }
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(call_expr.syntax());
+
+ let var_pat = make.ident_pat(false, false, make.name(&var_name));
+ let pat = make.tuple_struct_pat(make.ident_path(text), [var_pat.into()]);
+ let let_expr = make.expr_let(pat.into(), receiver);
+
+ if let Some(cap) = ctx.config.snippet_cap
+ && let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat()
+ && let Some(first_var) = pat.fields().next()
+ {
+ let placeholder = edit.make_placeholder_snippet(cap);
+ editor.add_annotation(first_var.syntax(), placeholder);
}
- ted::replace(call_expr.syntax(), let_expr.syntax());
+ editor.replace(call_expr.syntax(), let_expr.syntax());
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs
index 6af8e1482c..6527d3706e 100644
--- a/crates/ide-assists/src/handlers/term_search.rs
+++ b/crates/ide-assists/src/handlers/term_search.rs
@@ -46,7 +46,7 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
return None;
}
- let mut formatter = |_: &hir::Type| String::from("todo!()");
+ let mut formatter = |_: &hir::Type<'_>| String::from("todo!()");
let edition = scope.krate().edition(ctx.db());
let paths = paths
@@ -111,7 +111,7 @@ fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
-fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
+fn f() { let a: u128 = 1; let b: u128 = unimplemented$0!("asd") }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
}
@@ -121,7 +121,7 @@ fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
check_assist(
term_search,
r#"//- minicore: todo, unimplemented
-fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
+fn f() { let a: u128 = 1; let b: u128 = unimplemented$0!() }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
}
diff --git a/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
index 109269bd6e..504e12f93d 100644
--- a/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
+++ b/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
@@ -1,8 +1,7 @@
use ide_db::assists::AssistId;
use syntax::{
AstNode, T,
- ast::{self, make},
- ted,
+ ast::{self, syntax_factory::SyntaxFactory},
};
use crate::{AssistContext, Assists};
@@ -37,8 +36,7 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>)
RCur,
}
- let makro = ctx.find_node_at_offset::<ast::MacroCall>()?.clone_for_update();
- let makro_text_range = makro.syntax().text_range();
+ let makro = ctx.find_node_at_offset::<ast::MacroCall>()?;
let cursor_offset = ctx.offset();
let semicolon = makro.semicolon_token();
@@ -71,24 +69,28 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>)
},
token_tree.syntax().text_range(),
|builder| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = builder.make_editor(makro.syntax());
+
match token {
MacroDelims::LPar | MacroDelims::RPar => {
- ted::replace(ltoken, make::token(T!['{']));
- ted::replace(rtoken, make::token(T!['}']));
+ editor.replace(ltoken, make.token(T!['{']));
+ editor.replace(rtoken, make.token(T!['}']));
if let Some(sc) = semicolon {
- ted::remove(sc);
+ editor.delete(sc);
}
}
MacroDelims::LBra | MacroDelims::RBra => {
- ted::replace(ltoken, make::token(T!['(']));
- ted::replace(rtoken, make::token(T![')']));
+ editor.replace(ltoken, make.token(T!['(']));
+ editor.replace(rtoken, make.token(T![')']));
}
MacroDelims::LCur | MacroDelims::RCur => {
- ted::replace(ltoken, make::token(T!['[']));
- ted::replace(rtoken, make::token(T![']']));
+ editor.replace(ltoken, make.token(T!['[']));
+ editor.replace(rtoken, make.token(T![']']));
}
}
- builder.replace(makro_text_range, makro.syntax().text());
+ editor.add_mappings(make.finish_with_mappings());
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/crates/ide-assists/src/handlers/unmerge_match_arm.rs
index 5aedff5cc7..7b0f2dc65a 100644
--- a/crates/ide-assists/src/handlers/unmerge_match_arm.rs
+++ b/crates/ide-assists/src/handlers/unmerge_match_arm.rs
@@ -1,8 +1,7 @@
use syntax::{
Direction, SyntaxKind, T,
- algo::neighbor,
- ast::{self, AstNode, edit::IndentLevel, make},
- ted::{self, Position},
+ ast::{self, AstNode, edit::IndentLevel, syntax_factory::SyntaxFactory},
+ syntax_editor::{Element, Position},
};
use crate::{AssistContext, AssistId, Assists};
@@ -33,7 +32,7 @@ use crate::{AssistContext, AssistId, Assists};
// ```
pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let pipe_token = ctx.find_token_syntax_at_offset(T![|])?;
- let or_pat = ast::OrPat::cast(pipe_token.parent()?)?.clone_for_update();
+ let or_pat = ast::OrPat::cast(pipe_token.parent()?)?;
if or_pat.leading_pipe().is_some_and(|it| it == pipe_token) {
return None;
}
@@ -44,13 +43,14 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
// without `OrPat`.
let new_parent = match_arm.syntax().parent()?;
- let old_parent_range = new_parent.text_range();
acc.add(
AssistId::refactor_rewrite("unmerge_match_arm"),
"Unmerge match arm",
pipe_token.text_range(),
|edit| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(&new_parent);
let pats_after = pipe_token
.siblings_with_tokens(Direction::Next)
.filter_map(|it| ast::Pat::cast(it.into_node()?))
@@ -59,11 +59,9 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
let new_pat = if pats_after.len() == 1 {
pats_after[0].clone()
} else {
- make::or_pat(pats_after, or_pat.leading_pipe().is_some()).into()
+ make.or_pat(pats_after, or_pat.leading_pipe().is_some()).into()
};
- let new_match_arm =
- make::match_arm(new_pat, match_arm.guard(), match_arm_body).clone_for_update();
-
+ let new_match_arm = make.match_arm(new_pat, match_arm.guard(), match_arm_body);
let mut pipe_index = pipe_token.index();
if pipe_token
.prev_sibling_or_token()
@@ -71,10 +69,13 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
{
pipe_index -= 1;
}
- or_pat.syntax().splice_children(
- pipe_index..or_pat.syntax().children_with_tokens().count(),
- Vec::new(),
- );
+ for child in or_pat
+ .syntax()
+ .children_with_tokens()
+ .skip_while(|child| child.index() < pipe_index)
+ {
+ editor.delete(child.syntax_element());
+ }
let mut insert_after_old_arm = Vec::new();
@@ -86,33 +87,19 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
// body is a block, but we don't bother to check that.
// - Missing after the arm with arms after, if the arm body is a block. In this case
// we don't want to insert a comma at all.
- let has_comma_after =
- std::iter::successors(match_arm.syntax().last_child_or_token(), |it| {
- it.prev_sibling_or_token()
- })
- .map(|it| it.kind())
- .find(|it| !it.is_trivia())
- == Some(T![,]);
- let has_arms_after = neighbor(&match_arm, Direction::Next).is_some();
- if !has_comma_after && !has_arms_after {
- insert_after_old_arm.push(make::token(T![,]).into());
+ let has_comma_after = match_arm.comma_token().is_some();
+ if !has_comma_after && !match_arm.expr().unwrap().is_block_like() {
+ insert_after_old_arm.push(make.token(T![,]).into());
}
let indent = IndentLevel::from_node(match_arm.syntax());
- insert_after_old_arm.push(make::tokens::whitespace(&format!("\n{indent}")).into());
+ insert_after_old_arm.push(make.whitespace(&format!("\n{indent}")).into());
insert_after_old_arm.push(new_match_arm.syntax().clone().into());
- ted::insert_all_raw(Position::after(match_arm.syntax()), insert_after_old_arm);
-
- if has_comma_after {
- ted::insert_raw(
- Position::last_child_of(new_match_arm.syntax()),
- make::token(T![,]),
- );
- }
-
- edit.replace(old_parent_range, new_parent.to_string());
+ editor.insert_all(Position::after(match_arm.syntax()), insert_after_old_arm);
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -258,7 +245,7 @@ fn main() {
let x = X::A;
let y = match x {
X::A => 1i32,
- X::B => 1i32
+ X::B => 1i32,
};
}
"#,
@@ -276,7 +263,7 @@ enum X { A, B }
fn main() {
let x = X::A;
match x {
- X::A $0| X::B => {},
+ X::A $0| X::B => {}
}
}
"#,
@@ -287,8 +274,8 @@ enum X { A, B }
fn main() {
let x = X::A;
match x {
- X::A => {},
- X::B => {},
+ X::A => {}
+ X::B => {}
}
}
"#,
diff --git a/crates/ide-assists/src/handlers/wrap_return_type.rs b/crates/ide-assists/src/handlers/wrap_return_type.rs
index 9ea78719b2..d7189aa5db 100644
--- a/crates/ide-assists/src/handlers/wrap_return_type.rs
+++ b/crates/ide-assists/src/handlers/wrap_return_type.rs
@@ -56,7 +56,8 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
};
let type_ref = &ret_type.ty()?;
- let ty = ctx.sema.resolve_type(type_ref)?.as_adt();
+ let ty = ctx.sema.resolve_type(type_ref)?;
+ let ty_adt = ty.as_adt();
let famous_defs = FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate());
for kind in WrapperKind::ALL {
@@ -64,7 +65,7 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
continue;
};
- if matches!(ty, Some(hir::Adt::Enum(ret_type)) if ret_type == core_wrapper) {
+ if matches!(ty_adt, Some(hir::Adt::Enum(ret_type)) if ret_type == core_wrapper) {
// The return type is already wrapped
cov_mark::hit!(wrap_return_type_simple_return_type_already_wrapped);
continue;
@@ -78,10 +79,23 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|builder| {
let mut editor = builder.make_editor(&parent);
let make = SyntaxFactory::with_mappings();
- let alias = wrapper_alias(ctx, &make, &core_wrapper, type_ref, kind.symbol());
- let new_return_ty = alias.unwrap_or_else(|| match kind {
- WrapperKind::Option => make.ty_option(type_ref.clone()),
- WrapperKind::Result => make.ty_result(type_ref.clone(), make.ty_infer().into()),
+ let alias = wrapper_alias(ctx, &make, core_wrapper, type_ref, &ty, kind.symbol());
+ let (ast_new_return_ty, semantic_new_return_ty) = alias.unwrap_or_else(|| {
+ let (ast_ty, ty_constructor) = match kind {
+ WrapperKind::Option => {
+ (make.ty_option(type_ref.clone()), famous_defs.core_option_Option())
+ }
+ WrapperKind::Result => (
+ make.ty_result(type_ref.clone(), make.ty_infer().into()),
+ famous_defs.core_result_Result(),
+ ),
+ };
+ let semantic_ty = ty_constructor
+ .map(|ty_constructor| {
+ hir::Adt::from(ty_constructor).ty_with_args(ctx.db(), [ty.clone()])
+ })
+ .unwrap_or_else(|| ty.clone());
+ (ast_ty, semantic_ty)
});
let mut exprs_to_wrap = Vec::new();
@@ -96,6 +110,17 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
for_each_tail_expr(&body_expr, tail_cb);
for ret_expr_arg in exprs_to_wrap {
+ if let Some(ty) = ctx.sema.type_of_expr(&ret_expr_arg) {
+ if ty.adjusted().could_unify_with(ctx.db(), &semantic_new_return_ty) {
+ // The type is already correct, don't wrap it.
+ // We deliberately don't use `could_unify_with_deeply()`, because as long as the outer
+ // enum matches it's okay for us, as we don't trigger the assist if the return type
+ // is already `Option`/`Result`, so mismatched exact type is more likely a mistake
+ // than something intended.
+ continue;
+ }
+ }
+
let happy_wrapped = make.expr_call(
make.expr_path(make.ident_path(kind.happy_ident())),
make.arg_list(iter::once(ret_expr_arg.clone())),
@@ -103,12 +128,12 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
editor.replace(ret_expr_arg.syntax(), happy_wrapped.syntax());
}
- editor.replace(type_ref.syntax(), new_return_ty.syntax());
+ editor.replace(type_ref.syntax(), ast_new_return_ty.syntax());
if let WrapperKind::Result = kind {
// Add a placeholder snippet at the first generic argument that doesn't equal the return type.
// This is normally the error type, but that may not be the case when we inserted a type alias.
- let args = new_return_ty
+ let args = ast_new_return_ty
.path()
.unwrap()
.segment()
@@ -188,27 +213,28 @@ impl WrapperKind {
}
// Try to find an wrapper type alias in the current scope (shadowing the default).
-fn wrapper_alias(
- ctx: &AssistContext<'_>,
+fn wrapper_alias<'db>(
+ ctx: &AssistContext<'db>,
make: &SyntaxFactory,
- core_wrapper: &hir::Enum,
- ret_type: &ast::Type,
+ core_wrapper: hir::Enum,
+ ast_ret_type: &ast::Type,
+ semantic_ret_type: &hir::Type<'db>,
wrapper: hir::Symbol,
-) -> Option<ast::PathType> {
+) -> Option<(ast::PathType, hir::Type<'db>)> {
let wrapper_path = hir::ModPath::from_segments(
hir::PathKind::Plain,
iter::once(hir::Name::new_symbol_root(wrapper)),
);
- ctx.sema.resolve_mod_path(ret_type.syntax(), &wrapper_path).and_then(|def| {
+ ctx.sema.resolve_mod_path(ast_ret_type.syntax(), &wrapper_path).and_then(|def| {
def.filter_map(|def| match def.into_module_def() {
hir::ModuleDef::TypeAlias(alias) => {
let enum_ty = alias.ty(ctx.db()).as_adt()?.as_enum()?;
- (&enum_ty == core_wrapper).then_some(alias)
+ (enum_ty == core_wrapper).then_some((alias, enum_ty))
}
_ => None,
})
- .find_map(|alias| {
+ .find_map(|(alias, enum_ty)| {
let mut inserted_ret_type = false;
let generic_args =
alias.source(ctx.db())?.value.generic_param_list()?.generic_params().map(|param| {
@@ -216,7 +242,7 @@ fn wrapper_alias(
// Replace the very first type parameter with the function's return type.
ast::GenericParam::TypeParam(_) if !inserted_ret_type => {
inserted_ret_type = true;
- make.type_arg(ret_type.clone()).into()
+ make.type_arg(ast_ret_type.clone()).into()
}
ast::GenericParam::LifetimeParam(_) => {
make.lifetime_arg(make.lifetime("'_")).into()
@@ -231,7 +257,10 @@ fn wrapper_alias(
make.path_segment_generics(make.name_ref(name.as_str()), generic_arg_list),
);
- Some(make.ty_path(path))
+ let new_ty =
+ hir::Adt::from(enum_ty).ty_with_args(ctx.db(), [semantic_ret_type.clone()]);
+
+ Some((make.ty_path(path), new_ty))
})
})
}
@@ -605,29 +634,39 @@ fn foo() -> Option<i32> {
check_assist_by_label(
wrap_return_type,
r#"
-//- minicore: option
+//- minicore: option, future
+struct F(i32);
+impl core::future::Future for F {
+ type Output = i32;
+ fn poll(self: core::pin::Pin<&mut Self>, cx: &mut core::task::Context<'_>) -> core::task::Poll<Self::Output> { 0 }
+}
async fn foo() -> i$032 {
if true {
if false {
- 1.await
+ F(1).await
} else {
- 2.await
+ F(2).await
}
} else {
- 24i32.await
+ F(24i32).await
}
}
"#,
r#"
+struct F(i32);
+impl core::future::Future for F {
+ type Output = i32;
+ fn poll(self: core::pin::Pin<&mut Self>, cx: &mut core::task::Context<'_>) -> core::task::Poll<Self::Output> { 0 }
+}
async fn foo() -> Option<i32> {
if true {
if false {
- Some(1.await)
+ Some(F(1).await)
} else {
- Some(2.await)
+ Some(F(2).await)
}
} else {
- Some(24i32.await)
+ Some(F(24i32).await)
}
}
"#,
@@ -1666,29 +1705,39 @@ fn foo() -> Result<i32, ${0:_}> {
check_assist_by_label(
wrap_return_type,
r#"
-//- minicore: result
+//- minicore: result, future
+struct F(i32);
+impl core::future::Future for F {
+ type Output = i32;
+ fn poll(self: core::pin::Pin<&mut Self>, cx: &mut core::task::Context<'_>) -> core::task::Poll<Self::Output> { 0 }
+}
async fn foo() -> i$032 {
if true {
if false {
- 1.await
+ F(1).await
} else {
- 2.await
+ F(2).await
}
} else {
- 24i32.await
+ F(24i32).await
}
}
"#,
r#"
+struct F(i32);
+impl core::future::Future for F {
+ type Output = i32;
+ fn poll(self: core::pin::Pin<&mut Self>, cx: &mut core::task::Context<'_>) -> core::task::Poll<Self::Output> { 0 }
+}
async fn foo() -> Result<i32, ${0:_}> {
if true {
if false {
- Ok(1.await)
+ Ok(F(1).await)
} else {
- Ok(2.await)
+ Ok(F(2).await)
}
} else {
- Ok(24i32.await)
+ Ok(F(24i32).await)
}
}
"#,
@@ -2460,4 +2509,54 @@ fn foo() -> Result<i32, ${0:_}> {
WrapperKind::Result.label(),
);
}
+
+ #[test]
+ fn already_wrapped() {
+ check_assist_by_label(
+ wrap_return_type,
+ r#"
+//- minicore: option
+fn foo() -> i32$0 {
+ if false {
+ 0
+ } else {
+ Some(1)
+ }
+}
+ "#,
+ r#"
+fn foo() -> Option<i32> {
+ if false {
+ Some(0)
+ } else {
+ Some(1)
+ }
+}
+ "#,
+ WrapperKind::Option.label(),
+ );
+ check_assist_by_label(
+ wrap_return_type,
+ r#"
+//- minicore: result
+fn foo() -> i32$0 {
+ if false {
+ 0
+ } else {
+ Ok(1)
+ }
+}
+ "#,
+ r#"
+fn foo() -> Result<i32, ${0:_}> {
+ if false {
+ Ok(0)
+ } else {
+ Ok(1)
+ }
+}
+ "#,
+ WrapperKind::Result.label(),
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs b/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
index e1b94673e7..5183566d13 100644
--- a/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
+++ b/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
@@ -2,8 +2,7 @@ use ide_db::source_change::SourceChangeBuilder;
use itertools::Itertools;
use syntax::{
NodeOrToken, SyntaxToken, T, TextRange, algo,
- ast::{self, AstNode, make},
- ted::{self, Position},
+ ast::{self, AstNode, make, syntax_factory::SyntaxFactory},
};
use crate::{AssistContext, AssistId, Assists};
@@ -173,40 +172,45 @@ fn wrap_derive(
}
}
let handle_source_change = |edit: &mut SourceChangeBuilder| {
- let new_derive = make::attr_outer(make::meta_token_tree(
- make::ext::ident_path("derive"),
- make::token_tree(T!['('], new_derive),
- ))
- .clone_for_update();
- let meta = make::meta_token_tree(
- make::ext::ident_path("cfg_attr"),
- make::token_tree(
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(attr.syntax());
+ let new_derive = make.attr_outer(
+ make.meta_token_tree(make.ident_path("derive"), make.token_tree(T!['('], new_derive)),
+ );
+ let meta = make.meta_token_tree(
+ make.ident_path("cfg_attr"),
+ make.token_tree(
T!['('],
vec![
- NodeOrToken::Token(make::token(T![,])),
- NodeOrToken::Token(make::tokens::whitespace(" ")),
- NodeOrToken::Token(make::tokens::ident("derive")),
- NodeOrToken::Node(make::token_tree(T!['('], cfg_derive_tokens)),
+ NodeOrToken::Token(make.token(T![,])),
+ NodeOrToken::Token(make.whitespace(" ")),
+ NodeOrToken::Token(make.ident("derive")),
+ NodeOrToken::Node(make.token_tree(T!['('], cfg_derive_tokens)),
],
),
);
- // Remove the derive attribute
- let edit_attr = edit.make_syntax_mut(attr.syntax().clone());
-
- ted::replace(edit_attr, new_derive.syntax().clone());
- let cfg_attr = make::attr_outer(meta).clone_for_update();
- ted::insert_all_raw(
- Position::after(new_derive.syntax().clone()),
- vec![make::tokens::whitespace("\n").into(), cfg_attr.syntax().clone().into()],
+ let cfg_attr = make.attr_outer(meta);
+ editor.replace_with_many(
+ attr.syntax(),
+ vec![
+ new_derive.syntax().clone().into(),
+ make.whitespace("\n").into(),
+ cfg_attr.syntax().clone().into(),
+ ],
);
+
if let Some(snippet_cap) = ctx.config.snippet_cap {
if let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{
- edit.add_tabstop_after_token(snippet_cap, first_meta)
+ let tabstop = edit.make_tabstop_after(snippet_cap);
+ editor.add_annotation(first_meta, tabstop);
}
}
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
};
acc.add(
@@ -221,10 +225,10 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
let range = attr.syntax().text_range();
let path = attr.path()?;
let handle_source_change = |edit: &mut SourceChangeBuilder| {
- let mut raw_tokens = vec![
- NodeOrToken::Token(make::token(T![,])),
- NodeOrToken::Token(make::tokens::whitespace(" ")),
- ];
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(attr.syntax());
+ let mut raw_tokens =
+ vec![NodeOrToken::Token(make.token(T![,])), NodeOrToken::Token(make.whitespace(" "))];
path.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
raw_tokens.push(NodeOrToken::Token(token));
@@ -232,9 +236,9 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
});
if let Some(meta) = attr.meta() {
if let (Some(eq), Some(expr)) = (meta.eq_token(), meta.expr()) {
- raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
+ raw_tokens.push(NodeOrToken::Token(make.whitespace(" ")));
raw_tokens.push(NodeOrToken::Token(eq));
- raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
+ raw_tokens.push(NodeOrToken::Token(make.whitespace(" ")));
expr.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
@@ -245,26 +249,24 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
raw_tokens.extend(tt.token_trees_and_tokens());
}
}
- let meta = make::meta_token_tree(
- make::ext::ident_path("cfg_attr"),
- make::token_tree(T!['('], raw_tokens),
- );
- let cfg_attr = if attr.excl_token().is_some() {
- make::attr_inner(meta)
- } else {
- make::attr_outer(meta)
- }
- .clone_for_update();
- let attr_syntax = edit.make_syntax_mut(attr.syntax().clone());
- ted::replace(attr_syntax, cfg_attr.syntax());
+ let meta =
+ make.meta_token_tree(make.ident_path("cfg_attr"), make.token_tree(T!['('], raw_tokens));
+ let cfg_attr =
+ if attr.excl_token().is_some() { make.attr_inner(meta) } else { make.attr_outer(meta) };
+
+ editor.replace(attr.syntax(), cfg_attr.syntax());
if let Some(snippet_cap) = ctx.config.snippet_cap {
if let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{
- edit.add_tabstop_after_token(snippet_cap, first_meta)
+ let tabstop = edit.make_tabstop_after(snippet_cap);
+ editor.add_annotation(first_meta, tabstop);
}
}
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
};
acc.add(
AssistId::refactor("wrap_unwrap_cfg_attr"),
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index c260443203..cde0d875e0 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -172,6 +172,7 @@ mod handlers {
mod generate_is_empty_from_len;
mod generate_mut_trait_impl;
mod generate_new;
+ mod generate_single_field_struct_from;
mod generate_trait_from_impl;
mod inline_call;
mod inline_const_as_literal;
@@ -305,6 +306,7 @@ mod handlers {
generate_mut_trait_impl::generate_mut_trait_impl,
generate_new::generate_new,
generate_trait_from_impl::generate_trait_from_impl,
+ generate_single_field_struct_from::generate_single_field_struct_from,
inline_call::inline_call,
inline_call::inline_into_callers,
inline_const_as_literal::inline_const_as_literal,
diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs
index 5e6889792d..cda2ad4327 100644
--- a/crates/ide-assists/src/tests.rs
+++ b/crates/ide-assists/src/tests.rs
@@ -37,6 +37,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
term_search_borrowck: true,
code_action_grouping: true,
expr_fill_default: ExprFillDefaultMode::Todo,
+ prefer_self_ty: false,
};
pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig {
@@ -57,6 +58,7 @@ pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig {
term_search_borrowck: true,
code_action_grouping: false,
expr_fill_default: ExprFillDefaultMode::Todo,
+ prefer_self_ty: false,
};
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
@@ -77,6 +79,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
term_search_borrowck: true,
code_action_grouping: true,
expr_fill_default: ExprFillDefaultMode::Todo,
+ prefer_self_ty: false,
};
pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
@@ -97,6 +100,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
term_search_borrowck: true,
code_action_grouping: true,
expr_fill_default: ExprFillDefaultMode::Todo,
+ prefer_self_ty: false,
};
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) {
@@ -114,6 +118,23 @@ pub(crate) fn check_assist(
}
#[track_caller]
+pub(crate) fn check_assist_with_config(
+ assist: Handler,
+ config: AssistConfig,
+ #[rust_analyzer::rust_fixture] ra_fixture_before: &str,
+ #[rust_analyzer::rust_fixture] ra_fixture_after: &str,
+) {
+ let ra_fixture_after = trim_indent(ra_fixture_after);
+ check_with_config(
+ config,
+ assist,
+ ra_fixture_before,
+ ExpectedResult::After(&ra_fixture_after),
+ None,
+ );
+}
+
+#[track_caller]
pub(crate) fn check_assist_no_snippet_cap(
assist: Handler,
#[rust_analyzer::rust_fixture] ra_fixture_before: &str,
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs
index 72f7195cbd..fc1c6928ff 100644
--- a/crates/ide-assists/src/tests/generated.rs
+++ b/crates/ide-assists/src/tests/generated.rs
@@ -1933,7 +1933,7 @@ pub enum Axis { X = 0, Y = 1, Z = 2 }
$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
- &self[index as usize]
+ &mut self[index as usize]
}
}
@@ -1995,6 +1995,34 @@ impl Person {
}
#[test]
+fn doctest_generate_single_field_struct_from() {
+ check_doc_test(
+ "generate_single_field_struct_from",
+ r#####"
+//- minicore: from, phantom_data
+use core::marker::PhantomData;
+struct $0Foo<T> {
+ id: i32,
+ _phantom_data: PhantomData<T>,
+}
+"#####,
+ r#####"
+use core::marker::PhantomData;
+struct Foo<T> {
+ id: i32,
+ _phantom_data: PhantomData<T>,
+}
+
+impl<T> From<i32> for Foo<T> {
+ fn from(id: i32) -> Self {
+ Self { id, _phantom_data: PhantomData }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_generate_trait_from_impl() {
check_doc_test(
"generate_trait_from_impl",
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index ef6914fda1..2c8cb6e4d9 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -1,5 +1,7 @@
//! Assorted functions shared by several assists.
+use std::slice;
+
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
use hir::{
DisplayTarget, HasAttrs as HirHasAttrs, HirDisplay, InFile, ModuleDef, PathResolution,
@@ -405,7 +407,7 @@ pub(crate) fn does_pat_variant_nested_or_literal(ctx: &AssistContext<'_>, pat: &
}
fn check_pat_variant_from_enum(ctx: &AssistContext<'_>, pat: &ast::Pat) -> bool {
- ctx.sema.type_of_pat(pat).is_none_or(|ty: hir::TypeInfo| {
+ ctx.sema.type_of_pat(pat).is_none_or(|ty: hir::TypeInfo<'_>| {
ty.adjusted().as_adt().is_some_and(|adt| matches!(adt, hir::Adt::Enum(_)))
})
}
@@ -592,12 +594,10 @@ fn generate_impl_text_inner(
let generic_params = adt.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
- let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
- match param {
+ let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
+ let param = match param {
ast::TypeOrConstParam::Type(param) => {
- let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
- param.remove_default();
let mut bounds =
param.type_bound_list().map_or_else(Vec::new, |it| it.bounds().collect());
if let Some(trait_) = trait_text {
@@ -608,17 +608,16 @@ fn generate_impl_text_inner(
}
};
// `{ty_param}: {bounds}`
- let param =
- make::type_param(param.name().unwrap(), make::type_bound_list(bounds));
+ let param = make::type_param(param.name()?, make::type_bound_list(bounds));
ast::GenericParam::TypeParam(param)
}
ast::TypeOrConstParam::Const(param) => {
- let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
- param.remove_default();
+ let param = make::const_param(param.name()?, param.ty()?);
ast::GenericParam::ConstParam(param)
}
- }
+ };
+ Some(param)
});
make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
@@ -693,12 +692,10 @@ fn generate_impl_inner(
let generic_params = adt.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
- let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
- match param {
+ let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
+ let param = match param {
ast::TypeOrConstParam::Type(param) => {
- let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
- param.remove_default();
let mut bounds =
param.type_bound_list().map_or_else(Vec::new, |it| it.bounds().collect());
if let Some(trait_) = &trait_ {
@@ -709,17 +706,16 @@ fn generate_impl_inner(
}
};
// `{ty_param}: {bounds}`
- let param =
- make::type_param(param.name().unwrap(), make::type_bound_list(bounds));
+ let param = make::type_param(param.name()?, make::type_bound_list(bounds));
ast::GenericParam::TypeParam(param)
}
ast::TypeOrConstParam::Const(param) => {
- let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
- param.remove_default();
+ let param = make::const_param(param.name()?, param.ty()?);
ast::GenericParam::ConstParam(param)
}
- }
+ };
+ Some(param)
});
make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
@@ -747,16 +743,23 @@ fn generate_impl_inner(
.clone_for_update();
// Copy any cfg attrs from the original adt
- let cfg_attrs = adt
- .attrs()
- .filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false));
- for attr in cfg_attrs {
- impl_.add_attr(attr.clone_for_update());
- }
+ add_cfg_attrs_to(adt, &impl_);
impl_
}
+pub(crate) fn add_cfg_attrs_to<T, U>(from: &T, to: &U)
+where
+ T: HasAttrs,
+ U: AttrsOwnerEdit,
+{
+ let cfg_attrs =
+ from.attrs().filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg"));
+ for attr in cfg_attrs {
+ to.add_attr(attr.clone_for_update());
+ }
+}
+
pub(crate) fn add_method_to_adt(
builder: &mut SourceChangeBuilder,
adt: &ast::Adt,
@@ -780,9 +783,9 @@ pub(crate) fn add_method_to_adt(
}
#[derive(Debug)]
-pub(crate) struct ReferenceConversion {
+pub(crate) struct ReferenceConversion<'db> {
conversion: ReferenceConversionType,
- ty: hir::Type,
+ ty: hir::Type<'db>,
impls_deref: bool,
}
@@ -802,10 +805,10 @@ enum ReferenceConversionType {
Result,
}
-impl ReferenceConversion {
+impl<'db> ReferenceConversion<'db> {
pub(crate) fn convert_type(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
display_target: DisplayTarget,
) -> ast::Type {
let ty = match self.conversion {
@@ -878,11 +881,11 @@ impl ReferenceConversion {
// FIXME: It should return a new hir::Type, but currently constructing new types is too cumbersome
// and all users of this function operate on string type names, so they can do the conversion
// itself themselves.
-pub(crate) fn convert_reference_type(
- ty: hir::Type,
- db: &RootDatabase,
- famous_defs: &FamousDefs<'_, '_>,
-) -> Option<ReferenceConversion> {
+pub(crate) fn convert_reference_type<'db>(
+ ty: hir::Type<'db>,
+ db: &'db RootDatabase,
+ famous_defs: &FamousDefs<'_, 'db>,
+) -> Option<ReferenceConversion<'db>> {
handle_copy(&ty, db)
.or_else(|| handle_as_ref_str(&ty, db, famous_defs))
.or_else(|| handle_as_ref_slice(&ty, db, famous_defs))
@@ -892,56 +895,60 @@ pub(crate) fn convert_reference_type(
.map(|(conversion, impls_deref)| ReferenceConversion { ty, conversion, impls_deref })
}
-fn could_deref_to_target(ty: &hir::Type, target: &hir::Type, db: &dyn HirDatabase) -> bool {
+fn could_deref_to_target(ty: &hir::Type<'_>, target: &hir::Type<'_>, db: &dyn HirDatabase) -> bool {
let ty_ref = ty.add_reference(hir::Mutability::Shared);
let target_ref = target.add_reference(hir::Mutability::Shared);
ty_ref.could_coerce_to(db, &target_ref)
}
-fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<(ReferenceConversionType, bool)> {
+fn handle_copy(
+ ty: &hir::Type<'_>,
+ db: &dyn HirDatabase,
+) -> Option<(ReferenceConversionType, bool)> {
ty.is_copy(db).then_some((ReferenceConversionType::Copy, true))
}
fn handle_as_ref_str(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
famous_defs: &FamousDefs<'_, '_>,
) -> Option<(ReferenceConversionType, bool)> {
let str_type = hir::BuiltinType::str().ty(db);
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type.clone()])
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&str_type))
.then_some((ReferenceConversionType::AsRefStr, could_deref_to_target(ty, &str_type, db)))
}
fn handle_as_ref_slice(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
famous_defs: &FamousDefs<'_, '_>,
) -> Option<(ReferenceConversionType, bool)> {
let type_argument = ty.type_arguments().next()?;
let slice_type = hir::Type::new_slice(type_argument);
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type.clone()]).then_some((
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&slice_type)).then_some((
ReferenceConversionType::AsRefSlice,
could_deref_to_target(ty, &slice_type, db),
))
}
fn handle_dereferenced(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
famous_defs: &FamousDefs<'_, '_>,
) -> Option<(ReferenceConversionType, bool)> {
let type_argument = ty.type_arguments().next()?;
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument.clone()]).then_some((
- ReferenceConversionType::Dereferenced,
- could_deref_to_target(ty, &type_argument, db),
- ))
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&type_argument))
+ .then_some((
+ ReferenceConversionType::Dereferenced,
+ could_deref_to_target(ty, &type_argument, db),
+ ))
}
fn handle_option_as_ref(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
famous_defs: &FamousDefs<'_, '_>,
) -> Option<(ReferenceConversionType, bool)> {
@@ -953,7 +960,7 @@ fn handle_option_as_ref(
}
fn handle_result_as_ref(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
famous_defs: &FamousDefs<'_, '_>,
) -> Option<(ReferenceConversionType, bool)> {
diff --git a/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/crates/ide-assists/src/utils/gen_trait_fn_body.rs
index 4ea56dc46a..c58bdd9e8e 100644
--- a/crates/ide-assists/src/utils/gen_trait_fn_body.rs
+++ b/crates/ide-assists/src/utils/gen_trait_fn_body.rs
@@ -17,7 +17,7 @@ pub(crate) fn gen_trait_fn_body(
func: &ast::Fn,
trait_path: &ast::Path,
adt: &ast::Adt,
- trait_ref: Option<TraitRef>,
+ trait_ref: Option<TraitRef<'_>>,
) -> Option<()> {
match trait_path.segment()?.name_ref()?.text().as_str() {
"Clone" => gen_clone_impl(adt, func),
@@ -405,7 +405,7 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
}
/// Generate a `PartialEq` impl based on the fields and members of the target type.
-fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -> Option<()> {
+fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> {
stdx::always!(func.name().is_some_and(|name| name.text() == "eq"));
fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> {
match expr {
@@ -599,7 +599,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -
Some(())
}
-fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -> Option<()> {
+fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> {
stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp"));
fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> {
let mut arms = vec![];
diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml
index 94c01e333e..9bad21fc8e 100644
--- a/crates/ide-completion/Cargo.toml
+++ b/crates/ide-completion/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs
index 5d68aca9e6..65072d936f 100644
--- a/crates/ide-completion/src/completions.rs
+++ b/crates/ide-completion/src/completions.rs
@@ -161,7 +161,11 @@ impl Completions {
item.add_to(self, ctx.db);
}
- pub(crate) fn add_expr(&mut self, ctx: &CompletionContext<'_>, expr: &hir::term_search::Expr) {
+ pub(crate) fn add_expr(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ expr: &hir::term_search::Expr<'_>,
+ ) {
if let Some(item) = render_expr(ctx, expr) {
item.add_to(self, ctx.db)
}
@@ -170,7 +174,7 @@ impl Completions {
pub(crate) fn add_crate_roots(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
) {
ctx.process_all_names(&mut |name, res, doc_aliases| match res {
ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) if m.is_crate_root() => {
@@ -183,7 +187,7 @@ impl Completions {
pub(crate) fn add_path_resolution(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: hir::Name,
resolution: hir::ScopeDef,
doc_aliases: Vec<syntax::SmolStr>,
@@ -232,7 +236,7 @@ impl Completions {
pub(crate) fn add_enum_variants(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
e: hir::Enum,
) {
if !ctx.check_stability_and_hidden(e) {
@@ -246,7 +250,7 @@ impl Completions {
pub(crate) fn add_module(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
module: hir::Module,
local_name: hir::Name,
doc_aliases: Vec<syntax::SmolStr>,
@@ -263,7 +267,7 @@ impl Completions {
pub(crate) fn add_macro(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
mac: hir::Macro,
local_name: hir::Name,
) {
@@ -286,7 +290,7 @@ impl Completions {
pub(crate) fn add_function(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
func: hir::Function,
local_name: Option<hir::Name>,
) {
@@ -312,7 +316,7 @@ impl Completions {
pub(crate) fn add_method(
&mut self,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
func: hir::Function,
receiver: Option<SmolStr>,
local_name: Option<hir::Name>,
@@ -340,7 +344,7 @@ impl Completions {
pub(crate) fn add_method_with_import(
&mut self,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
func: hir::Function,
import: LocatedImport,
) {
@@ -407,7 +411,7 @@ impl Completions {
pub(crate) fn add_qualified_enum_variant(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
variant: hir::Variant,
path: hir::ModPath,
) {
@@ -424,7 +428,7 @@ impl Completions {
pub(crate) fn add_enum_variant(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
variant: hir::Variant,
local_name: Option<hir::Name>,
) {
@@ -447,10 +451,10 @@ impl Completions {
pub(crate) fn add_field(
&mut self,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
receiver: Option<SmolStr>,
field: hir::Field,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
) {
let is_private_editable = match ctx.is_visible(&field) {
Visible::Yes => false,
@@ -471,7 +475,7 @@ impl Completions {
pub(crate) fn add_struct_literal(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
strukt: hir::Struct,
path: Option<hir::ModPath>,
local_name: Option<hir::Name>,
@@ -518,7 +522,7 @@ impl Completions {
ctx: &CompletionContext<'_>,
receiver: Option<SmolStr>,
field: usize,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
) {
// Only used for (unnamed) tuples, whose all fields *are* stable. No need to check
// stability here.
@@ -550,7 +554,7 @@ impl Completions {
&mut self,
ctx: &CompletionContext<'_>,
pattern_ctx: &PatternContext,
- path_ctx: Option<&PathCompletionCtx>,
+ path_ctx: Option<&PathCompletionCtx<'_>>,
variant: hir::Variant,
local_name: Option<hir::Name>,
) {
@@ -704,7 +708,7 @@ pub(super) fn complete_name(
pub(super) fn complete_name_ref(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- NameRefContext { nameref, kind }: &NameRefContext,
+ NameRefContext { nameref, kind }: &NameRefContext<'_>,
) {
match kind {
NameRefKind::Path(path_ctx) => {
diff --git a/crates/ide-completion/src/completions/attribute.rs b/crates/ide-completion/src/completions/attribute.rs
index 705402c785..c542e140df 100644
--- a/crates/ide-completion/src/completions/attribute.rs
+++ b/crates/ide-completion/src/completions/attribute.rs
@@ -86,7 +86,7 @@ pub(crate) fn complete_known_attribute_input(
pub(crate) fn complete_attribute_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
&AttrCtx { kind, annotated_item_kind, ref derive_helpers }: &AttrCtx,
) {
let is_inner = kind == AttrKind::Inner;
diff --git a/crates/ide-completion/src/completions/attribute/derive.rs b/crates/ide-completion/src/completions/attribute/derive.rs
index 2fc07e0138..267d92b6c0 100644
--- a/crates/ide-completion/src/completions/attribute/derive.rs
+++ b/crates/ide-completion/src/completions/attribute/derive.rs
@@ -13,7 +13,7 @@ use crate::{
pub(crate) fn complete_derive_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
existing_derives: &ExistingDerives,
) {
let core = ctx.famous_defs().core();
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index 4f21136d21..5340d65a14 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -18,7 +18,7 @@ use crate::{
pub(crate) fn complete_dot(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
) {
let receiver_ty = match dot_access {
DotAccess { receiver_ty: Some(receiver_ty), .. } => &receiver_ty.original,
@@ -130,8 +130,8 @@ pub(crate) fn complete_dot(
pub(crate) fn complete_undotted_self(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
- expr_ctx: &PathExprCtx,
+ path_ctx: &PathCompletionCtx<'_>,
+ expr_ctx: &PathExprCtx<'_>,
) {
if !ctx.config.enable_self_on_the_fly {
return;
@@ -198,9 +198,9 @@ pub(crate) fn complete_undotted_self(
fn complete_fields(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- receiver: &hir::Type,
- mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
- mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
+ receiver: &hir::Type<'_>,
+ mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type<'_>),
+ mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type<'_>),
is_field_access: bool,
is_method_access_with_parens: bool,
) {
@@ -230,7 +230,7 @@ fn complete_fields(
fn complete_methods(
ctx: &CompletionContext<'_>,
- receiver: &hir::Type,
+ receiver: &hir::Type<'_>,
traits_in_scope: &FxHashSet<hir::TraitId>,
f: impl FnMut(hir::Function),
) {
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs
index 7fbd1fbc1a..2133291b1d 100644
--- a/crates/ide-completion/src/completions/expr.rs
+++ b/crates/ide-completion/src/completions/expr.rs
@@ -47,8 +47,8 @@ where
pub(crate) fn complete_expr_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
- expr_ctx: &PathExprCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
+ expr_ctx: &PathExprCtx<'_>,
) {
let _p = tracing::info_span!("complete_expr_path").entered();
if !ctx.qualifier_ctx.none() {
@@ -145,10 +145,16 @@ pub(crate) fn complete_expr_path(
});
match resolution {
hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
- // Set visible_from to None so private items are returned.
- // They will be possibly filtered out in add_path_resolution()
- // via def_is_visible().
- let module_scope = module.scope(ctx.db, None);
+ let visible_from = if ctx.config.enable_private_editable {
+ // Set visible_from to None so private items are returned.
+ // They will be possibly filtered out in add_path_resolution()
+ // via def_is_visible().
+ None
+ } else {
+ Some(ctx.module)
+ };
+
+ let module_scope = module.scope(ctx.db, visible_from);
for (name, def) in module_scope {
if scope_def_applicable(def) {
acc.add_path_resolution(
diff --git a/crates/ide-completion/src/completions/field.rs b/crates/ide-completion/src/completions/field.rs
index 1441b0e3a0..26afa9c8ad 100644
--- a/crates/ide-completion/src/completions/field.rs
+++ b/crates/ide-completion/src/completions/field.rs
@@ -8,7 +8,7 @@ use crate::{
pub(crate) fn complete_field_list_tuple_variant(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
) {
if ctx.qualifier_ctx.vis_node.is_some() {
} else if let PathCompletionCtx {
diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs
index a747561380..dad8a76de8 100644
--- a/crates/ide-completion/src/completions/flyimport.rs
+++ b/crates/ide-completion/src/completions/flyimport.rs
@@ -111,7 +111,7 @@ use crate::{
pub(crate) fn import_on_the_fly_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
) -> Option<()> {
if !ctx.config.enable_imports_on_the_fly {
return None;
@@ -175,7 +175,7 @@ pub(crate) fn import_on_the_fly_pat(
pub(crate) fn import_on_the_fly_dot(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
) -> Option<()> {
if !ctx.config.enable_imports_on_the_fly {
return None;
@@ -203,8 +203,8 @@ pub(crate) fn import_on_the_fly_dot(
fn import_on_the_fly(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { kind, .. }: &PathCompletionCtx,
- import_assets: ImportAssets,
+ path_ctx @ PathCompletionCtx { kind, .. }: &PathCompletionCtx<'_>,
+ import_assets: ImportAssets<'_>,
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
@@ -290,7 +290,7 @@ fn import_on_the_fly_pat_(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
pattern_ctx: &PatternContext,
- import_assets: ImportAssets,
+ import_assets: ImportAssets<'_>,
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
@@ -335,8 +335,8 @@ fn import_on_the_fly_pat_(
fn import_on_the_fly_method(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
- import_assets: ImportAssets,
+ dot_access: &DotAccess<'_>,
+ import_assets: ImportAssets<'_>,
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
@@ -400,11 +400,11 @@ fn import_name(ctx: &CompletionContext<'_>) -> String {
if token_kind.is_any_identifier() { ctx.token.to_string() } else { String::new() }
}
-fn import_assets_for_path(
- ctx: &CompletionContext<'_>,
+fn import_assets_for_path<'db>(
+ ctx: &CompletionContext<'db>,
potential_import_name: &str,
qualifier: Option<ast::Path>,
-) -> Option<ImportAssets> {
+) -> Option<ImportAssets<'db>> {
let _p =
tracing::info_span!("import_assets_for_path", ?potential_import_name, ?qualifier).entered();
diff --git a/crates/ide-completion/src/completions/fn_param.rs b/crates/ide-completion/src/completions/fn_param.rs
index 6d1e973dc4..809e71cc11 100644
--- a/crates/ide-completion/src/completions/fn_param.rs
+++ b/crates/ide-completion/src/completions/fn_param.rs
@@ -195,5 +195,5 @@ fn comma_wrapper(ctx: &CompletionContext<'_>) -> Option<(impl Fn(&str) -> String
matches!(prev_token_kind, SyntaxKind::COMMA | SyntaxKind::L_PAREN | SyntaxKind::PIPE);
let leading = if has_leading_comma { "" } else { ", " };
- Some((move |label: &_| (format!("{leading}{label}{trailing}")), param.text_range()))
+ Some((move |label: &_| format!("{leading}{label}{trailing}"), param.text_range()))
}
diff --git a/crates/ide-completion/src/completions/item_list.rs b/crates/ide-completion/src/completions/item_list.rs
index 893997cee4..6c001bd16b 100644
--- a/crates/ide-completion/src/completions/item_list.rs
+++ b/crates/ide-completion/src/completions/item_list.rs
@@ -10,8 +10,8 @@ pub(crate) mod trait_impl;
pub(crate) fn complete_item_list_in_expr(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
- expr_ctx: &PathExprCtx,
+ path_ctx: &PathCompletionCtx<'_>,
+ expr_ctx: &PathExprCtx<'_>,
) {
if !expr_ctx.in_block_expr {
return;
@@ -25,7 +25,7 @@ pub(crate) fn complete_item_list_in_expr(
pub(crate) fn complete_item_list(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
kind: &ItemListKind,
) {
let _p = tracing::info_span!("complete_item_list").entered();
diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs
index 58aead73fd..975c2f0225 100644
--- a/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -37,6 +37,7 @@ use ide_db::{
SymbolKind, documentation::HasDocs, path_transform::PathTransform,
syntax_helpers::prettify_macro_expansion, traits::get_missing_assoc_items,
};
+use syntax::ast::HasGenericParams;
use syntax::{
AstNode, SmolStr, SyntaxElement, SyntaxKind, T, TextRange, ToSmolStr,
ast::{self, HasGenericArgs, HasTypeBounds, edit_in_place::AttrsOwnerEdit, make},
@@ -122,7 +123,7 @@ fn complete_trait_impl_name(
pub(crate) fn complete_trait_impl_item_by_name(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
name_ref: &Option<ast::NameRef>,
impl_: &Option<ast::Impl>,
) {
@@ -390,6 +391,12 @@ fn add_type_alias_impl(
} else if let Some(end) = transformed_ty.eq_token().map(|tok| tok.text_range().start())
{
end
+ } else if let Some(end) = transformed_ty
+ .where_clause()
+ .and_then(|wc| wc.where_token())
+ .map(|tok| tok.text_range().start())
+ {
+ end
} else if let Some(end) =
transformed_ty.semicolon_token().map(|tok| tok.text_range().start())
{
@@ -400,17 +407,29 @@ fn add_type_alias_impl(
let len = end - start;
let mut decl = transformed_ty.syntax().text().slice(..len).to_string();
- if !decl.ends_with(' ') {
- decl.push(' ');
- }
- decl.push_str("= ");
+ decl.truncate(decl.trim_end().len());
+ decl.push_str(" = ");
+
+ let wc = transformed_ty
+ .where_clause()
+ .map(|wc| {
+ let ws = wc
+ .where_token()
+ .and_then(|it| it.prev_token())
+ .filter(|token| token.kind() == SyntaxKind::WHITESPACE)
+ .map(|token| token.to_string())
+ .unwrap_or_else(|| " ".into());
+ format!("{ws}{wc}")
+ })
+ .unwrap_or_default();
match ctx.config.snippet_cap {
Some(cap) => {
- let snippet = format!("{decl}$0;");
+ let snippet = format!("{decl}$0{wc};");
item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet));
}
None => {
+ decl.push_str(&wc);
item.text_edit(TextEdit::replace(replacement_range, decl));
}
};
@@ -1440,6 +1459,30 @@ impl<'b> Tr<'b> for () {
"#,
);
}
+ #[test]
+ fn includes_where_clause() {
+ check_edit(
+ "type Ty",
+ r#"
+trait Tr {
+ type Ty where Self: Copy;
+}
+
+impl Tr for () {
+ $0
+}
+"#,
+ r#"
+trait Tr {
+ type Ty where Self: Copy;
+}
+
+impl Tr for () {
+ type Ty = $0 where Self: Copy;
+}
+"#,
+ );
+ }
#[test]
fn strips_comments() {
diff --git a/crates/ide-completion/src/completions/pattern.rs b/crates/ide-completion/src/completions/pattern.rs
index ea3511d31c..62fae1cb23 100644
--- a/crates/ide-completion/src/completions/pattern.rs
+++ b/crates/ide-completion/src/completions/pattern.rs
@@ -124,7 +124,7 @@ pub(crate) fn complete_pattern(
pub(crate) fn complete_pattern_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
) {
match qualified {
Qualified::With { resolution: Some(resolution), super_chain_len, .. } => {
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index 3cdf211283..d0023852ac 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -11,6 +11,7 @@ use ide_db::{
text_edit::TextEdit,
ty_filter::TryEnum,
};
+use itertools::Either;
use stdx::never;
use syntax::{
SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR},
@@ -28,7 +29,7 @@ use crate::{
pub(crate) fn complete_postfix(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
) {
if !ctx.config.enable_postfix_completions {
return;
@@ -86,98 +87,10 @@ pub(crate) fn complete_postfix(
}
}
- let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references());
- if let Some(try_enum) = &try_enum {
- match try_enum {
- TryEnum::Result => {
- postfix_snippet(
- "ifl",
- "if let Ok {}",
- &format!("if let Ok($1) = {receiver_text} {{\n $0\n}}"),
- )
- .add_to(acc, ctx.db);
-
- postfix_snippet(
- "lete",
- "let Ok else {}",
- &format!("let Ok($1) = {receiver_text} else {{\n $2\n}};\n$0"),
- )
- .add_to(acc, ctx.db);
-
- postfix_snippet(
- "while",
- "while let Ok {}",
- &format!("while let Ok($1) = {receiver_text} {{\n $0\n}}"),
- )
- .add_to(acc, ctx.db);
- }
- TryEnum::Option => {
- postfix_snippet(
- "ifl",
- "if let Some {}",
- &format!("if let Some($1) = {receiver_text} {{\n $0\n}}"),
- )
- .add_to(acc, ctx.db);
-
- postfix_snippet(
- "lete",
- "let Some else {}",
- &format!("let Some($1) = {receiver_text} else {{\n $2\n}};\n$0"),
- )
- .add_to(acc, ctx.db);
-
- postfix_snippet(
- "while",
- "while let Some {}",
- &format!("while let Some($1) = {receiver_text} {{\n $0\n}}"),
- )
- .add_to(acc, ctx.db);
- }
- }
- } else if receiver_ty.is_bool() || receiver_ty.is_unknown() {
- postfix_snippet("if", "if expr {}", &format!("if {receiver_text} {{\n $0\n}}"))
- .add_to(acc, ctx.db);
- postfix_snippet("while", "while expr {}", &format!("while {receiver_text} {{\n $0\n}}"))
- .add_to(acc, ctx.db);
- postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc, ctx.db);
- } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() {
- if receiver_ty.impls_trait(ctx.db, trait_, &[]) {
- postfix_snippet(
- "for",
- "for ele in expr {}",
- &format!("for ele in {receiver_text} {{\n $0\n}}"),
- )
- .add_to(acc, ctx.db);
- }
- }
-
postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc, ctx.db);
postfix_snippet("refm", "&mut expr", &format!("&mut {receiver_text}")).add_to(acc, ctx.db);
postfix_snippet("deref", "*expr", &format!("*{receiver_text}")).add_to(acc, ctx.db);
- let mut block_should_be_wrapped = true;
- if dot_receiver.syntax().kind() == BLOCK_EXPR {
- block_should_be_wrapped = false;
- if let Some(parent) = dot_receiver.syntax().parent() {
- if matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) {
- block_should_be_wrapped = true;
- }
- }
- };
- let unsafe_completion_string = if block_should_be_wrapped {
- format!("unsafe {{ {receiver_text} }}")
- } else {
- format!("unsafe {receiver_text}")
- };
- postfix_snippet("unsafe", "unsafe {}", &unsafe_completion_string).add_to(acc, ctx.db);
-
- let const_completion_string = if block_should_be_wrapped {
- format!("const {{ {receiver_text} }}")
- } else {
- format!("const {receiver_text}")
- };
- postfix_snippet("const", "const {}", &const_completion_string).add_to(acc, ctx.db);
-
// The rest of the postfix completions create an expression that moves an argument,
// so it's better to consider references now to avoid breaking the compilation
@@ -195,18 +108,81 @@ pub(crate) fn complete_postfix(
add_custom_postfix_completions(acc, ctx, &postfix_snippet, &receiver_text);
}
- match try_enum {
- Some(try_enum) => match try_enum {
- TryEnum::Result => {
- postfix_snippet(
+ postfix_snippet("box", "Box::new(expr)", &format!("Box::new({receiver_text})"))
+ .add_to(acc, ctx.db);
+ postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({receiver_text})")).add_to(acc, ctx.db); // fixme
+ postfix_snippet("dbgr", "dbg!(&expr)", &format!("dbg!(&{receiver_text})")).add_to(acc, ctx.db);
+ postfix_snippet("call", "function(expr)", &format!("${{1}}({receiver_text})"))
+ .add_to(acc, ctx.db);
+
+ let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references());
+ let mut is_in_cond = false;
+ if let Some(parent) = dot_receiver_including_refs.syntax().parent() {
+ if let Some(second_ancestor) = parent.parent() {
+ let sec_ancestor_kind = second_ancestor.kind();
+ if let Some(expr) = <Either<ast::IfExpr, ast::WhileExpr>>::cast(second_ancestor) {
+ is_in_cond = match expr {
+ Either::Left(it) => it.condition().is_some_and(|cond| *cond.syntax() == parent),
+ Either::Right(it) => {
+ it.condition().is_some_and(|cond| *cond.syntax() == parent)
+ }
+ }
+ }
+ match &try_enum {
+ Some(try_enum) if is_in_cond => match try_enum {
+ TryEnum::Result => {
+ postfix_snippet(
+ "let",
+ "let Ok(_)",
+ &format!("let Ok($0) = {receiver_text}"),
+ )
+ .add_to(acc, ctx.db);
+ postfix_snippet(
+ "letm",
+ "let Ok(mut _)",
+ &format!("let Ok(mut $0) = {receiver_text}"),
+ )
+ .add_to(acc, ctx.db);
+ }
+ TryEnum::Option => {
+ postfix_snippet(
+ "let",
+ "let Some(_)",
+ &format!("let Some($0) = {receiver_text}"),
+ )
+ .add_to(acc, ctx.db);
+ postfix_snippet(
+ "letm",
+ "let Some(mut _)",
+ &format!("let Some(mut $0) = {receiver_text}"),
+ )
+ .add_to(acc, ctx.db);
+ }
+ },
+ _ if matches!(sec_ancestor_kind, STMT_LIST | EXPR_STMT) => {
+ postfix_snippet("let", "let", &format!("let $0 = {receiver_text};"))
+ .add_to(acc, ctx.db);
+ postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};"))
+ .add_to(acc, ctx.db);
+ }
+ _ => (),
+ }
+ }
+ }
+
+ if !is_in_cond {
+ match try_enum {
+ Some(try_enum) => match try_enum {
+ TryEnum::Result => {
+ postfix_snippet(
"match",
"match expr {}",
&format!("match {receiver_text} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}"),
)
.add_to(acc, ctx.db);
- }
- TryEnum::Option => {
- postfix_snippet(
+ }
+ TryEnum::Option => {
+ postfix_snippet(
"match",
"match expr {}",
&format!(
@@ -214,32 +190,106 @@ pub(crate) fn complete_postfix(
),
)
.add_to(acc, ctx.db);
+ }
+ },
+ None => {
+ postfix_snippet(
+ "match",
+ "match expr {}",
+ &format!("match {receiver_text} {{\n ${{1:_}} => {{$0}},\n}}"),
+ )
+ .add_to(acc, ctx.db);
}
- },
- None => {
+ }
+ if let Some(try_enum) = &try_enum {
+ match try_enum {
+ TryEnum::Result => {
+ postfix_snippet(
+ "ifl",
+ "if let Ok {}",
+ &format!("if let Ok($1) = {receiver_text} {{\n $0\n}}"),
+ )
+ .add_to(acc, ctx.db);
+
+ postfix_snippet(
+ "lete",
+ "let Ok else {}",
+ &format!("let Ok($1) = {receiver_text} else {{\n $2\n}};\n$0"),
+ )
+ .add_to(acc, ctx.db);
+
+ postfix_snippet(
+ "while",
+ "while let Ok {}",
+ &format!("while let Ok($1) = {receiver_text} {{\n $0\n}}"),
+ )
+ .add_to(acc, ctx.db);
+ }
+ TryEnum::Option => {
+ postfix_snippet(
+ "ifl",
+ "if let Some {}",
+ &format!("if let Some($1) = {receiver_text} {{\n $0\n}}"),
+ )
+ .add_to(acc, ctx.db);
+
+ postfix_snippet(
+ "lete",
+ "let Some else {}",
+ &format!("let Some($1) = {receiver_text} else {{\n $2\n}};\n$0"),
+ )
+ .add_to(acc, ctx.db);
+
+ postfix_snippet(
+ "while",
+ "while let Some {}",
+ &format!("while let Some($1) = {receiver_text} {{\n $0\n}}"),
+ )
+ .add_to(acc, ctx.db);
+ }
+ }
+ } else if receiver_ty.is_bool() || receiver_ty.is_unknown() {
+ postfix_snippet("if", "if expr {}", &format!("if {receiver_text} {{\n $0\n}}"))
+ .add_to(acc, ctx.db);
postfix_snippet(
- "match",
- "match expr {}",
- &format!("match {receiver_text} {{\n ${{1:_}} => {{$0}},\n}}"),
+ "while",
+ "while expr {}",
+ &format!("while {receiver_text} {{\n $0\n}}"),
)
.add_to(acc, ctx.db);
+ postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc, ctx.db);
+ } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() {
+ if receiver_ty.impls_trait(ctx.db, trait_, &[]) {
+ postfix_snippet(
+ "for",
+ "for ele in expr {}",
+ &format!("for ele in {receiver_text} {{\n $0\n}}"),
+ )
+ .add_to(acc, ctx.db);
+ }
}
}
- postfix_snippet("box", "Box::new(expr)", &format!("Box::new({receiver_text})"))
- .add_to(acc, ctx.db);
- postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({receiver_text})")).add_to(acc, ctx.db); // fixme
- postfix_snippet("dbgr", "dbg!(&expr)", &format!("dbg!(&{receiver_text})")).add_to(acc, ctx.db);
- postfix_snippet("call", "function(expr)", &format!("${{1}}({receiver_text})"))
- .add_to(acc, ctx.db);
-
- if let Some(parent) = dot_receiver_including_refs.syntax().parent().and_then(|p| p.parent()) {
- if matches!(parent.kind(), STMT_LIST | EXPR_STMT) {
- postfix_snippet("let", "let", &format!("let $0 = {receiver_text};"))
- .add_to(acc, ctx.db);
- postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};"))
- .add_to(acc, ctx.db);
+ let mut block_should_be_wrapped = true;
+ if dot_receiver.syntax().kind() == BLOCK_EXPR {
+ block_should_be_wrapped = false;
+ if let Some(parent) = dot_receiver.syntax().parent() {
+ if matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) {
+ block_should_be_wrapped = true;
+ }
}
+ };
+ {
+ let (open_brace, close_brace) =
+ if block_should_be_wrapped { ("{ ", " }") } else { ("", "") };
+ let (open_paren, close_paren) = if is_in_cond { ("(", ")") } else { ("", "") };
+ let unsafe_completion_string =
+ format!("{open_paren}unsafe {open_brace}{receiver_text}{close_brace}{close_paren}");
+ postfix_snippet("unsafe", "unsafe {}", &unsafe_completion_string).add_to(acc, ctx.db);
+
+ let const_completion_string =
+ format!("{open_paren}const {open_brace}{receiver_text}{close_brace}{close_paren}");
+ postfix_snippet("const", "const {}", &const_completion_string).add_to(acc, ctx.db);
}
if let ast::Expr::Literal(literal) = dot_receiver_including_refs.clone() {
@@ -568,6 +618,54 @@ fn main() {
}
#[test]
+ fn option_iflet_cond() {
+ check(
+ r#"
+//- minicore: option
+fn main() {
+ let bar = Some(true);
+ if bar.$0
+}
+"#,
+ expect![[r#"
+ me and(…) fn(self, Option<U>) -> Option<U>
+ me as_ref() const fn(&self) -> Option<&T>
+ me ok_or(…) const fn(self, E) -> Result<T, E>
+ me unwrap() const fn(self) -> T
+ me unwrap_or(…) fn(self, T) -> T
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn const const {}
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let Some(_)
+ sn letm let Some(mut _)
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+ check_edit(
+ "let",
+ r#"
+//- minicore: option
+fn main() {
+ let bar = Some(true);
+ if bar.$0
+}
+"#,
+ r#"
+fn main() {
+ let bar = Some(true);
+ if let Some($0) = bar
+}
+"#,
+ );
+ }
+
+ #[test]
fn option_letelse() {
check_edit(
"lete",
diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs
index c18aab007b..36f38a70db 100644
--- a/crates/ide-completion/src/completions/record.rs
+++ b/crates/ide-completion/src/completions/record.rs
@@ -88,7 +88,7 @@ pub(crate) fn complete_record_expr_fields(
pub(crate) fn add_default_update(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- ty: Option<hir::TypeInfo>,
+ ty: Option<hir::TypeInfo<'_>>,
) {
let default_trait = ctx.famous_defs().core_default_Default();
let impls_default_trait = default_trait
@@ -117,7 +117,7 @@ pub(crate) fn add_default_update(
fn complete_fields(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- missing_fields: Vec<(hir::Field, hir::Type)>,
+ missing_fields: Vec<(hir::Field, hir::Type<'_>)>,
) {
for (field, ty) in missing_fields {
// This should call something else, we shouldn't be synthesizing a DotAccess here
diff --git a/crates/ide-completion/src/completions/snippet.rs b/crates/ide-completion/src/completions/snippet.rs
index 31aae11676..ead9852eff 100644
--- a/crates/ide-completion/src/completions/snippet.rs
+++ b/crates/ide-completion/src/completions/snippet.rs
@@ -11,8 +11,8 @@ use crate::{
pub(crate) fn complete_expr_snippet(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
- &PathExprCtx { in_block_expr, .. }: &PathExprCtx,
+ path_ctx: &PathCompletionCtx<'_>,
+ &PathExprCtx { in_block_expr, .. }: &PathExprCtx<'_>,
) {
if !matches!(path_ctx.qualified, Qualified::No) {
return;
@@ -51,7 +51,7 @@ macro_rules! $1 {
pub(crate) fn complete_item_snippet(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
kind: &ItemListKind,
) {
if !matches!(path_ctx.qualified, Qualified::No) {
diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs
index 79db705af4..7c38c7d8ce 100644
--- a/crates/ide-completion/src/completions/type.rs
+++ b/crates/ide-completion/src/completions/type.rs
@@ -12,7 +12,7 @@ use crate::{
pub(crate) fn complete_type_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
location: &TypeLocation,
) {
let _p = tracing::info_span!("complete_type_path").entered();
@@ -220,7 +220,7 @@ pub(crate) fn complete_type_path(
pub(crate) fn complete_ascribed_type(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
ascription: &TypeAscriptionTarget,
) -> Option<()> {
if !path_ctx.is_trivial_path() {
diff --git a/crates/ide-completion/src/completions/use_.rs b/crates/ide-completion/src/completions/use_.rs
index 4d6d0b758a..d2ab193ec3 100644
--- a/crates/ide-completion/src/completions/use_.rs
+++ b/crates/ide-completion/src/completions/use_.rs
@@ -13,7 +13,7 @@ use crate::{
pub(crate) fn complete_use_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, use_tree_parent, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, use_tree_parent, .. }: &PathCompletionCtx<'_>,
name_ref: &Option<ast::NameRef>,
) {
match qualified {
diff --git a/crates/ide-completion/src/completions/vis.rs b/crates/ide-completion/src/completions/vis.rs
index d15c35ac84..38761f77a2 100644
--- a/crates/ide-completion/src/completions/vis.rs
+++ b/crates/ide-completion/src/completions/vis.rs
@@ -8,7 +8,7 @@ use crate::{
pub(crate) fn complete_vis_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
&has_in_token: &bool,
) {
match qualified {
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 5287627790..cfd7f80d40 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -65,13 +65,13 @@ impl QualifierCtx {
/// The state of the path we are currently completing.
#[derive(Debug)]
-pub(crate) struct PathCompletionCtx {
+pub(crate) struct PathCompletionCtx<'db> {
/// If this is a call with () already there (or {} in case of record patterns)
pub(crate) has_call_parens: bool,
/// If this has a macro call bang !
pub(crate) has_macro_bang: bool,
/// The qualifier of the current path.
- pub(crate) qualified: Qualified,
+ pub(crate) qualified: Qualified<'db>,
/// The parent of the path we are completing.
pub(crate) parent: Option<ast::Path>,
#[allow(dead_code)]
@@ -79,14 +79,14 @@ pub(crate) struct PathCompletionCtx {
pub(crate) path: ast::Path,
/// The path of which we are completing the segment in the original file
pub(crate) original_path: Option<ast::Path>,
- pub(crate) kind: PathKind,
+ pub(crate) kind: PathKind<'db>,
/// Whether the path segment has type args or not.
pub(crate) has_type_args: bool,
/// Whether the qualifier comes from a use tree parent or not
pub(crate) use_tree_parent: bool,
}
-impl PathCompletionCtx {
+impl PathCompletionCtx<'_> {
pub(crate) fn is_trivial_path(&self) -> bool {
matches!(
self,
@@ -104,9 +104,9 @@ impl PathCompletionCtx {
/// The kind of path we are completing right now.
#[derive(Debug, PartialEq, Eq)]
-pub(crate) enum PathKind {
+pub(crate) enum PathKind<'db> {
Expr {
- expr_ctx: PathExprCtx,
+ expr_ctx: PathExprCtx<'db>,
},
Type {
location: TypeLocation,
@@ -140,7 +140,7 @@ pub(crate) struct AttrCtx {
}
#[derive(Debug, PartialEq, Eq)]
-pub(crate) struct PathExprCtx {
+pub(crate) struct PathExprCtx<'db> {
pub(crate) in_block_expr: bool,
pub(crate) in_breakable: BreakableKind,
pub(crate) after_if_expr: bool,
@@ -152,7 +152,7 @@ pub(crate) struct PathExprCtx {
/// The surrounding RecordExpression we are completing a functional update
pub(crate) is_func_update: Option<ast::RecordExpr>,
pub(crate) self_param: Option<hir::SelfParam>,
- pub(crate) innermost_ret_ty: Option<hir::Type>,
+ pub(crate) innermost_ret_ty: Option<hir::Type<'db>>,
pub(crate) impl_: Option<ast::Impl>,
/// Whether this expression occurs in match arm guard position: before the
/// fat arrow token
@@ -241,7 +241,7 @@ pub(crate) enum ItemListKind {
}
#[derive(Debug)]
-pub(crate) enum Qualified {
+pub(crate) enum Qualified<'db> {
No,
With {
path: ast::Path,
@@ -260,7 +260,7 @@ pub(crate) enum Qualified {
},
/// <_>::
TypeAnchor {
- ty: Option<hir::Type>,
+ ty: Option<hir::Type<'db>>,
trait_: Option<hir::Trait>,
},
/// Whether the path is an absolute path
@@ -341,17 +341,17 @@ pub(crate) enum NameKind {
/// The state of the NameRef we are completing.
#[derive(Debug)]
-pub(crate) struct NameRefContext {
+pub(crate) struct NameRefContext<'db> {
/// NameRef syntax in the original file
pub(crate) nameref: Option<ast::NameRef>,
- pub(crate) kind: NameRefKind,
+ pub(crate) kind: NameRefKind<'db>,
}
/// The kind of the NameRef we are completing.
#[derive(Debug)]
-pub(crate) enum NameRefKind {
- Path(PathCompletionCtx),
- DotAccess(DotAccess),
+pub(crate) enum NameRefKind<'db> {
+ Path(PathCompletionCtx<'db>),
+ DotAccess(DotAccess<'db>),
/// Position where we are only interested in keyword completions
Keyword(ast::Item),
/// The record expression this nameref is a field of and whether a dot precedes the completion identifier.
@@ -365,9 +365,9 @@ pub(crate) enum NameRefKind {
/// The identifier we are currently completing.
#[derive(Debug)]
-pub(crate) enum CompletionAnalysis {
+pub(crate) enum CompletionAnalysis<'db> {
Name(NameContext),
- NameRef(NameRefContext),
+ NameRef(NameRefContext<'db>),
Lifetime(LifetimeContext),
/// The string the cursor is currently inside
String {
@@ -386,9 +386,9 @@ pub(crate) enum CompletionAnalysis {
/// Information about the field or method access we are completing.
#[derive(Debug)]
-pub(crate) struct DotAccess {
+pub(crate) struct DotAccess<'db> {
pub(crate) receiver: Option<ast::Expr>,
- pub(crate) receiver_ty: Option<TypeInfo>,
+ pub(crate) receiver_ty: Option<TypeInfo<'db>>,
pub(crate) kind: DotAccessKind,
pub(crate) ctx: DotAccessExprCtx,
}
@@ -457,7 +457,7 @@ pub(crate) struct CompletionContext<'a> {
/// This is usually the parameter name of the function argument we are completing.
pub(crate) expected_name: Option<NameOrNameRef>,
/// The expected type of what we are completing.
- pub(crate) expected_type: Option<Type>,
+ pub(crate) expected_type: Option<Type<'a>>,
pub(crate) qualifier_ctx: QualifierCtx,
@@ -608,7 +608,7 @@ impl CompletionContext<'_> {
pub(crate) fn iterate_path_candidates(
&self,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
mut cb: impl FnMut(hir::AssocItem),
) {
let mut seen = FxHashSet::default();
@@ -695,12 +695,12 @@ impl CompletionContext<'_> {
}
// CompletionContext construction
-impl<'a> CompletionContext<'a> {
+impl<'db> CompletionContext<'db> {
pub(crate) fn new(
- db: &'a RootDatabase,
+ db: &'db RootDatabase,
position @ FilePosition { file_id, offset }: FilePosition,
- config: &'a CompletionConfig<'a>,
- ) -> Option<(CompletionContext<'a>, CompletionAnalysis)> {
+ config: &'db CompletionConfig<'db>,
+ ) -> Option<(CompletionContext<'db>, CompletionAnalysis<'db>)> {
let _p = tracing::info_span!("CompletionContext::new").entered();
let sema = Semantics::new(db);
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index 7a2230b3e3..ea5fb39338 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -4,6 +4,7 @@ use std::iter;
use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
use ide_db::{RootDatabase, active_parameter::ActiveParameter};
use itertools::Either;
+use stdx::always;
use syntax::{
AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
T, TextRange, TextSize,
@@ -38,9 +39,9 @@ struct ExpansionResult {
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
}
-pub(super) struct AnalysisResult {
- pub(super) analysis: CompletionAnalysis,
- pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
+pub(super) struct AnalysisResult<'db> {
+ pub(super) analysis: CompletionAnalysis<'db>,
+ pub(super) expected: (Option<Type<'db>>, Option<ast::NameOrNameRef>),
pub(super) qualifier_ctx: QualifierCtx,
/// the original token of the expanded file
pub(super) token: SyntaxToken,
@@ -48,13 +49,13 @@ pub(super) struct AnalysisResult {
pub(super) original_offset: TextSize,
}
-pub(super) fn expand_and_analyze(
- sema: &Semantics<'_, RootDatabase>,
+pub(super) fn expand_and_analyze<'db>(
+ sema: &Semantics<'db, RootDatabase>,
original_file: InFile<SyntaxNode>,
speculative_file: SyntaxNode,
offset: TextSize,
original_token: &SyntaxToken,
-) -> Option<AnalysisResult> {
+) -> Option<AnalysisResult<'db>> {
// as we insert after the offset, right biased will *always* pick the identifier no matter
// if there is an ident already typed or not
let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
@@ -432,12 +433,13 @@ fn expand(
/// Fill the completion context, this is what does semantic reasoning about the surrounding context
/// of the completion location.
-fn analyze(
- sema: &Semantics<'_, RootDatabase>,
+fn analyze<'db>(
+ sema: &Semantics<'db, RootDatabase>,
expansion_result: ExpansionResult,
original_token: &SyntaxToken,
self_token: &SyntaxToken,
-) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
+) -> Option<(CompletionAnalysis<'db>, (Option<Type<'db>>, Option<ast::NameOrNameRef>), QualifierCtx)>
+{
let _p = tracing::info_span!("CompletionContext::analyze").entered();
let ExpansionResult {
original_file,
@@ -555,17 +557,17 @@ fn analyze(
}
/// Calculate the expected type and name of the cursor position.
-fn expected_type_and_name(
- sema: &Semantics<'_, RootDatabase>,
+fn expected_type_and_name<'db>(
+ sema: &Semantics<'db, RootDatabase>,
token: &SyntaxToken,
name_like: &ast::NameLike,
-) -> (Option<Type>, Option<NameOrNameRef>) {
+) -> (Option<Type<'db>>, Option<NameOrNameRef>) {
let mut node = match token.parent() {
Some(it) => it,
None => return (None, None),
};
- let strip_refs = |mut ty: Type| match name_like {
+ let strip_refs = |mut ty: Type<'db>| match name_like {
ast::NameLike::NameRef(n) => {
let p = match n.syntax().parent() {
Some(it) => it,
@@ -805,13 +807,13 @@ fn classify_name(
Some(NameContext { name, kind })
}
-fn classify_name_ref(
- sema: &Semantics<'_, RootDatabase>,
+fn classify_name_ref<'db>(
+ sema: &Semantics<'db, RootDatabase>,
original_file: &SyntaxNode,
name_ref: ast::NameRef,
original_offset: TextSize,
parent: SyntaxNode,
-) -> Option<(NameRefContext, QualifierCtx)> {
+) -> Option<(NameRefContext<'db>, QualifierCtx)> {
let nameref = find_node_at_offset(original_file, original_offset);
let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
@@ -868,8 +870,15 @@ fn classify_name_ref(
return None;
}
+ let mut receiver_ty = receiver.as_ref().and_then(|it| sema.type_of_expr(it));
+ if receiver_is_ambiguous_float_literal {
+ // `123.|` is parsed as a float but should actually be an integer.
+ always!(receiver_ty.as_ref().is_none_or(|receiver_ty| receiver_ty.original.is_float()));
+ receiver_ty = Some(TypeInfo { original: hir::BuiltinType::i32().ty(sema.db), adjusted: None });
+ }
+
let kind = NameRefKind::DotAccess(DotAccess {
- receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
+ receiver_ty,
kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
receiver,
ctx: DotAccessExprCtx { in_block_expr: is_in_block(field.syntax()), in_breakable: is_in_breakable(field.syntax()) }
diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs
index 19cdef30bd..dcaac3997b 100644
--- a/crates/ide-completion/src/item.rs
+++ b/crates/ide-completion/src/item.rs
@@ -502,7 +502,7 @@ pub(crate) struct Builder {
impl Builder {
pub(crate) fn from_resolution(
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: hir::Name,
resolution: hir::ScopeDef,
) -> Self {
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 00c0b470f9..c6b8af3c79 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -122,10 +122,10 @@ impl<'a> RenderContext<'a> {
pub(crate) fn render_field(
ctx: RenderContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
receiver: Option<SmolStr>,
field: hir::Field,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
) -> CompletionItem {
let db = ctx.db();
let is_deprecated = ctx.is_deprecated(field);
@@ -204,7 +204,7 @@ pub(crate) fn render_tuple_field(
ctx: RenderContext<'_>,
receiver: Option<SmolStr>,
field: usize,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
) -> CompletionItem {
let mut item = CompletionItem::new(
SymbolKind::Field,
@@ -241,7 +241,7 @@ pub(crate) fn render_type_inference(
pub(crate) fn render_path_resolution(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: hir::Name,
resolution: ScopeDef,
) -> Builder {
@@ -259,7 +259,7 @@ pub(crate) fn render_pattern_resolution(
pub(crate) fn render_resolution_with_import(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
import_edit: LocatedImport,
) -> Option<Builder> {
let resolution = ScopeDef::from(import_edit.original_item);
@@ -282,10 +282,10 @@ pub(crate) fn render_resolution_with_import_pat(
pub(crate) fn render_expr(
ctx: &CompletionContext<'_>,
- expr: &hir::term_search::Expr,
+ expr: &hir::term_search::Expr<'_>,
) -> Option<Builder> {
let mut i = 1;
- let mut snippet_formatter = |ty: &hir::Type| {
+ let mut snippet_formatter = |ty: &hir::Type<'_>| {
let arg_name = ty
.as_adt()
.map(|adt| stdx::to_lower_snake_case(adt.name(ctx.db).as_str()))
@@ -295,7 +295,7 @@ pub(crate) fn render_expr(
res
};
- let mut label_formatter = |ty: &hir::Type| {
+ let mut label_formatter = |ty: &hir::Type<'_>| {
ty.as_adt()
.map(|adt| stdx::to_lower_snake_case(adt.name(ctx.db).as_str()))
.unwrap_or_else(|| String::from("..."))
@@ -391,7 +391,7 @@ fn render_resolution_pat(
fn render_resolution_path(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: hir::Name,
import_to_add: Option<LocatedImport>,
resolution: ScopeDef,
@@ -460,7 +460,7 @@ fn render_resolution_path(
}
}
- let mut set_item_relevance = |ty: Type| {
+ let mut set_item_relevance = |ty: Type<'_>| {
if !ty.is_unknown() {
item.detail(ty.display(db, krate).to_string());
}
@@ -593,8 +593,8 @@ fn scope_def_is_deprecated(ctx: &RenderContext<'_>, resolution: ScopeDef) -> boo
// FIXME: This checks types without possible coercions which some completions might want to do
fn match_types(
ctx: &CompletionContext<'_>,
- ty1: &hir::Type,
- ty2: &hir::Type,
+ ty1: &hir::Type<'_>,
+ ty2: &hir::Type<'_>,
) -> Option<CompletionRelevanceTypeMatch> {
if ty1 == ty2 {
Some(CompletionRelevanceTypeMatch::Exact)
@@ -607,7 +607,7 @@ fn match_types(
fn compute_type_match(
ctx: &CompletionContext<'_>,
- completion_ty: &hir::Type,
+ completion_ty: &hir::Type<'_>,
) -> Option<CompletionRelevanceTypeMatch> {
let expected_type = ctx.expected_type.as_ref()?;
@@ -626,7 +626,7 @@ fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str)
fn compute_ref_match(
ctx: &CompletionContext<'_>,
- completion_ty: &hir::Type,
+ completion_ty: &hir::Type<'_>,
) -> Option<CompletionItemRefMode> {
let expected_type = ctx.expected_type.as_ref()?;
let expected_without_ref = expected_type.remove_ref();
@@ -658,8 +658,8 @@ fn compute_ref_match(
fn path_ref_match(
completion: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
- ty: &hir::Type,
+ path_ctx: &PathCompletionCtx<'_>,
+ ty: &hir::Type<'_>,
item: &mut Builder,
) {
if let Some(original_path) = &path_ctx.original_path {
@@ -733,7 +733,7 @@ mod tests {
) {
let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
actual.retain(|it| kinds.contains(&it.kind));
- actual.sort_by_key(|it| cmp::Reverse(it.relevance.score()));
+ actual.sort_by_key(|it| (cmp::Reverse(it.relevance.score()), it.label.primary.clone()));
check_relevance_(actual, expect);
}
@@ -743,7 +743,7 @@ mod tests {
actual.retain(|it| it.kind != CompletionItemKind::Snippet);
actual.retain(|it| it.kind != CompletionItemKind::Keyword);
actual.retain(|it| it.kind != CompletionItemKind::BuiltinType);
- actual.sort_by_key(|it| cmp::Reverse(it.relevance.score()));
+ actual.sort_by_key(|it| (cmp::Reverse(it.relevance.score()), it.label.primary.clone()));
check_relevance_(actual, expect);
}
@@ -824,9 +824,9 @@ fn main() {
st dep::test_mod_b::Struct {…} dep::test_mod_b::Struct { } [type_could_unify]
ex dep::test_mod_b::Struct { } [type_could_unify]
st Struct Struct [type_could_unify+requires_import]
+ md dep []
fn main() fn() []
fn test(…) fn(Struct) []
- md dep []
st Struct Struct [requires_import]
"#]],
);
@@ -862,9 +862,9 @@ fn main() {
"#,
expect![[r#"
un Union Union [type_could_unify+requires_import]
+ md dep []
fn main() fn() []
fn test(…) fn(Union) []
- md dep []
en Union Union [requires_import]
"#]],
);
@@ -900,9 +900,9 @@ fn main() {
ev dep::test_mod_b::Enum::variant dep::test_mod_b::Enum::variant [type_could_unify]
ex dep::test_mod_b::Enum::variant [type_could_unify]
en Enum Enum [type_could_unify+requires_import]
+ md dep []
fn main() fn() []
fn test(…) fn(Enum) []
- md dep []
en Enum Enum [requires_import]
"#]],
);
@@ -937,9 +937,9 @@ fn main() {
expect![[r#"
ev dep::test_mod_b::Enum::Variant dep::test_mod_b::Enum::Variant [type_could_unify]
ex dep::test_mod_b::Enum::Variant [type_could_unify]
+ md dep []
fn main() fn() []
fn test(…) fn(Enum) []
- md dep []
"#]],
);
}
@@ -967,9 +967,9 @@ fn main() {
}
"#,
expect![[r#"
+ md dep []
fn main() fn() []
fn test(…) fn(fn(usize) -> i32) []
- md dep []
fn function fn(usize) -> i32 [requires_import]
fn function(…) fn(isize) -> i32 [requires_import]
"#]],
@@ -1000,9 +1000,9 @@ fn main() {
"#,
expect![[r#"
ct CONST i32 [type_could_unify+requires_import]
+ md dep []
fn main() fn() []
fn test(…) fn(i32) []
- md dep []
ct CONST i64 [requires_import]
"#]],
);
@@ -1032,9 +1032,9 @@ fn main() {
"#,
expect![[r#"
sc STATIC i32 [type_could_unify+requires_import]
+ md dep []
fn main() fn() []
fn test(…) fn(i32) []
- md dep []
sc STATIC i64 [requires_import]
"#]],
);
@@ -1090,8 +1090,8 @@ fn func(input: Struct) { }
"#,
expect![[r#"
- st Struct Struct [type]
st Self Self [type]
+ st Struct Struct [type]
sp Self Struct [type]
st Struct Struct [type]
ex Struct [type]
@@ -1119,9 +1119,9 @@ fn main() {
"#,
expect![[r#"
lc input bool [type+name+local]
+ ex false [type]
ex input [type]
ex true [type]
- ex false [type]
lc inputbad i32 [local]
fn main() fn() []
fn test(…) fn(bool) []
@@ -2088,9 +2088,9 @@ fn f() { A { bar: b$0 }; }
"#,
expect![[r#"
fn bar() fn() -> u8 [type+name]
+ ex bar() [type]
fn baz() fn() -> u8 [type]
ex baz() [type]
- ex bar() [type]
st A A []
fn f() fn() []
"#]],
@@ -2199,8 +2199,8 @@ fn main() {
lc s S [type+name+local]
st S S [type]
st S S [type]
- ex s [type]
ex S [type]
+ ex s [type]
fn foo(…) fn(&mut S) []
fn main() fn() []
"#]],
@@ -2218,8 +2218,8 @@ fn main() {
st S S [type]
lc ssss S [type+local]
st S S [type]
- ex ssss [type]
ex S [type]
+ ex ssss [type]
fn foo(…) fn(&mut S) []
fn main() fn() []
"#]],
@@ -2252,11 +2252,11 @@ fn main() {
ex Foo [type]
lc foo &Foo [local]
lc *foo [type+local]
- fn bar(…) fn(Foo) []
- fn main() fn() []
- md core []
tt Clone []
tt Copy []
+ fn bar(…) fn(Foo) []
+ md core []
+ fn main() fn() []
"#]],
);
}
@@ -2297,9 +2297,9 @@ fn main() {
st &S [type]
st T T []
st &T [type]
+ md core []
fn foo(…) fn(&S) []
fn main() fn() []
- md core []
"#]],
)
}
@@ -2346,9 +2346,9 @@ fn main() {
st &mut S [type]
st T T []
st &mut T [type]
+ md core []
fn foo(…) fn(&mut S) []
fn main() fn() []
- md core []
"#]],
)
}
@@ -2364,8 +2364,8 @@ fn foo(bar: u32) {
}
"#,
expect![[r#"
- lc baz i32 [local]
lc bar u32 [local]
+ lc baz i32 [local]
fn foo(…) fn(u32) []
"#]],
);
@@ -2449,9 +2449,9 @@ fn main() {
st &T [type]
fn bar() fn() -> T []
fn &bar() [type]
+ md core []
fn foo(…) fn(&S) []
fn main() fn() []
- md core []
"#]],
)
}
@@ -2702,8 +2702,8 @@ fn test() {
fn fn_builder() fn() -> FooBuilder [type_could_unify]
fn fn_ctr_wrapped() fn() -> Option<Foo<T>> [type_could_unify]
fn fn_ctr_wrapped_2() fn() -> Result<Foo<T>, u32> [type_could_unify]
- me fn_returns_unit(…) fn(&self) [type_could_unify]
fn fn_other() fn() -> Option<u32> [type_could_unify]
+ me fn_returns_unit(…) fn(&self) [type_could_unify]
"#]],
);
}
@@ -2965,12 +2965,12 @@ fn foo() {
ev Foo::B Foo::B [type_could_unify]
ev Foo::A(…) Foo::A(T) [type_could_unify]
lc foo Foo<u32> [type+local]
- ex foo [type]
ex Foo::B [type]
+ ex foo [type]
en Foo Foo<{unknown}> [type_could_unify]
- fn foo() fn() []
fn bar() fn() -> Foo<u8> []
fn baz() fn() -> Foo<T> []
+ fn foo() fn() []
"#]],
);
}
@@ -3000,19 +3000,19 @@ fn main() {
expect![[r#"
sn not !expr [snippet]
me not() fn(self) -> <Self as Not>::Output [type_could_unify+requires_import]
- sn if if expr {} []
- sn while while expr {} []
- sn ref &expr []
- sn refm &mut expr []
- sn deref *expr []
- sn unsafe unsafe {} []
- sn const const {} []
- sn match match expr {} []
sn box Box::new(expr) []
+ sn call function(expr) []
+ sn const const {} []
sn dbg dbg!(expr) []
sn dbgr dbg!(&expr) []
- sn call function(expr) []
+ sn deref *expr []
+ sn if if expr {} []
+ sn match match expr {} []
+ sn ref &expr []
+ sn refm &mut expr []
sn return return expr []
+ sn unsafe unsafe {} []
+ sn while while expr {} []
"#]],
);
}
@@ -3033,19 +3033,19 @@ fn main() {
&[CompletionItemKind::Snippet, CompletionItemKind::SymbolKind(SymbolKind::Method)],
expect![[r#"
me f() fn(&self) []
- sn ref &expr []
- sn refm &mut expr []
- sn deref *expr []
- sn unsafe unsafe {} []
- sn const const {} []
- sn match match expr {} []
sn box Box::new(expr) []
+ sn call function(expr) []
+ sn const const {} []
sn dbg dbg!(expr) []
sn dbgr dbg!(&expr) []
- sn call function(expr) []
+ sn deref *expr []
sn let let []
sn letm let mut []
+ sn match match expr {} []
+ sn ref &expr []
+ sn refm &mut expr []
sn return return expr []
+ sn unsafe unsafe {} []
"#]],
);
}
diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs
index 2fe517fa8c..7669aec8f5 100644
--- a/crates/ide-completion/src/render/function.rs
+++ b/crates/ide-completion/src/render/function.rs
@@ -22,13 +22,13 @@ use crate::{
#[derive(Debug)]
enum FuncKind<'ctx> {
- Function(&'ctx PathCompletionCtx),
- Method(&'ctx DotAccess, Option<SmolStr>),
+ Function(&'ctx PathCompletionCtx<'ctx>),
+ Method(&'ctx DotAccess<'ctx>, Option<SmolStr>),
}
pub(crate) fn render_fn(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: Option<hir::Name>,
func: hir::Function,
) -> Builder {
@@ -38,7 +38,7 @@ pub(crate) fn render_fn(
pub(crate) fn render_method(
ctx: RenderContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
receiver: Option<SmolStr>,
local_name: Option<hir::Name>,
func: hir::Function,
@@ -186,8 +186,8 @@ fn render(
fn compute_return_type_match(
db: &dyn HirDatabase,
ctx: &RenderContext<'_>,
- self_type: hir::Type,
- ret_type: &hir::Type,
+ self_type: hir::Type<'_>,
+ ret_type: &hir::Type<'_>,
) -> CompletionRelevanceReturnType {
if match_types(ctx.completion, &self_type, ret_type).is_some() {
// fn([..]) -> Self
@@ -217,8 +217,8 @@ pub(super) fn add_call_parens<'b>(
name: SmolStr,
escaped_name: SmolStr,
self_param: Option<hir::SelfParam>,
- params: Vec<hir::Param>,
- ret_type: &hir::Type,
+ params: Vec<hir::Param<'_>>,
+ ret_type: &hir::Type<'_>,
) -> &'b mut Builder {
cov_mark::hit!(inserts_parens_for_function_calls);
@@ -288,7 +288,7 @@ pub(super) fn add_call_parens<'b>(
builder.label(SmolStr::from_iter([&name, label_suffix])).insert_snippet(cap, snippet)
}
-fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'static str {
+fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type<'_>) -> &'static str {
if let Some(derefed_ty) = ty.remove_ref() {
for (name, local) in ctx.locals.iter().sorted_by_key(|&(k, _)| k.clone()) {
if name.as_str() == arg {
@@ -369,12 +369,12 @@ fn params_display(ctx: &CompletionContext<'_>, detail: &mut String, func: hir::F
}
}
-fn params(
- ctx: &CompletionContext<'_>,
+fn params<'db>(
+ ctx: &CompletionContext<'db>,
func: hir::Function,
func_kind: &FuncKind<'_>,
has_dot_receiver: bool,
-) -> Option<(Option<hir::SelfParam>, Vec<hir::Param>)> {
+) -> Option<(Option<hir::SelfParam>, Vec<hir::Param<'db>>)> {
ctx.config.callable.as_ref()?;
// Don't add parentheses if the expected type is a function reference with the same signature.
diff --git a/crates/ide-completion/src/render/literal.rs b/crates/ide-completion/src/render/literal.rs
index 5a9e35a729..6c89e49f94 100644
--- a/crates/ide-completion/src/render/literal.rs
+++ b/crates/ide-completion/src/render/literal.rs
@@ -21,7 +21,7 @@ use crate::{
pub(crate) fn render_variant_lit(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: Option<hir::Name>,
variant: hir::Variant,
path: Option<hir::ModPath>,
@@ -35,7 +35,7 @@ pub(crate) fn render_variant_lit(
pub(crate) fn render_struct_literal(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
strukt: hir::Struct,
path: Option<hir::ModPath>,
local_name: Option<hir::Name>,
@@ -49,7 +49,7 @@ pub(crate) fn render_struct_literal(
fn render(
ctx @ RenderContext { completion, .. }: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
thing: Variant,
name: hir::Name,
path: Option<hir::ModPath>,
@@ -194,7 +194,7 @@ impl Variant {
}
}
- fn ty(self, db: &dyn HirDatabase) -> hir::Type {
+ fn ty(self, db: &dyn HirDatabase) -> hir::Type<'_> {
match self {
Variant::Struct(it) => it.ty(db),
Variant::EnumVariant(it) => it.parent_enum(db).ty(db),
diff --git a/crates/ide-completion/src/render/macro_.rs b/crates/ide-completion/src/render/macro_.rs
index 4674dae031..35fe407b2e 100644
--- a/crates/ide-completion/src/render/macro_.rs
+++ b/crates/ide-completion/src/render/macro_.rs
@@ -12,7 +12,7 @@ use crate::{
pub(crate) fn render_macro(
ctx: RenderContext<'_>,
- PathCompletionCtx { kind, has_macro_bang, has_call_parens, .. }: &PathCompletionCtx,
+ PathCompletionCtx { kind, has_macro_bang, has_call_parens, .. }: &PathCompletionCtx<'_>,
name: hir::Name,
macro_: hir::Macro,
diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs
index dcc51a86a8..60ec112823 100644
--- a/crates/ide-completion/src/render/pattern.rs
+++ b/crates/ide-completion/src/render/pattern.rs
@@ -46,7 +46,7 @@ pub(crate) fn render_struct_pat(
pub(crate) fn render_variant_pat(
ctx: RenderContext<'_>,
pattern_ctx: &PatternContext,
- path_ctx: Option<&PathCompletionCtx>,
+ path_ctx: Option<&PathCompletionCtx<'_>>,
variant: hir::Variant,
local_name: Option<Name>,
path: Option<&hir::ModPath>,
@@ -109,7 +109,7 @@ fn build_completion(
lookup: SmolStr,
pat: String,
def: impl HasDocs + Copy,
- adt_ty: hir::Type,
+ adt_ty: hir::Type<'_>,
// Missing in context of match statement completions
is_variant_missing: bool,
) -> CompletionItem {
diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs
index 411902f111..46a3630045 100644
--- a/crates/ide-completion/src/tests/attribute.rs
+++ b/crates/ide-completion/src/tests/attribute.rs
@@ -878,6 +878,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de PartialEq macro PartialEq
de PartialEq, Eq
@@ -900,6 +901,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de Eq
de Eq, PartialOrd, Ord
@@ -921,6 +923,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de Eq
de Eq, PartialOrd, Ord
@@ -942,6 +945,7 @@ mod derive {
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de PartialOrd
de PartialOrd, Ord
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index b46e4c3206..33f729f016 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -1474,20 +1474,18 @@ fn main() {
}
"#,
expect![[r#"
+ me foo() fn(&self)
sn box Box::new(expr)
sn call function(expr)
sn const const {}
sn dbg dbg!(expr)
sn dbgr dbg!(&expr)
sn deref *expr
- sn if if expr {}
sn match match expr {}
- sn not !expr
sn ref &expr
sn refm &mut expr
sn return return expr
sn unsafe unsafe {}
- sn while while expr {}
"#]],
);
}
@@ -2243,3 +2241,37 @@ fn main() {
"#,
);
}
+
+#[test]
+fn ambiguous_float_literal() {
+ check(
+ r#"
+#![rustc_coherence_is_core]
+
+impl i32 {
+ pub fn int_method(self) {}
+}
+impl f64 {
+ pub fn float_method(self) {}
+}
+
+fn foo() {
+ 1.$0
+}
+ "#,
+ expect![[r#"
+ me int_method() fn(self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn const const {}
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+}
diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs
index fcdf10c856..ac32649d4f 100644
--- a/crates/ide-completion/src/tests/item_list.rs
+++ b/crates/ide-completion/src/tests/item_list.rs
@@ -458,6 +458,33 @@ type O = $0;
r"
struct A;
trait B {
+type O<'a>
+where
+Self: 'a;
+}
+impl B for A {
+$0
+}
+",
+ r#"
+struct A;
+trait B {
+type O<'a>
+where
+Self: 'a;
+}
+impl B for A {
+type O<'a> = $0
+where
+Self: 'a;
+}
+"#,
+ );
+ check_edit(
+ "type O",
+ r"
+struct A;
+trait B {
type O: ?Sized = u32;
}
impl B for A {
@@ -550,3 +577,30 @@ fn inside_extern_blocks() {
"#]],
)
}
+
+#[test]
+fn tokens_from_macro() {
+ check_edit(
+ "fn as_ref",
+ r#"
+//- proc_macros: identity
+//- minicore: as_ref
+struct Foo;
+
+#[proc_macros::identity]
+impl<'a> AsRef<&'a i32> for Foo {
+ $0
+}
+ "#,
+ r#"
+struct Foo;
+
+#[proc_macros::identity]
+impl<'a> AsRef<&'a i32> for Foo {
+ fn as_ref(&self) -> &&'a i32 {
+ $0
+}
+}
+ "#,
+ );
+}
diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs
index 125e11e9e3..c7e2d05825 100644
--- a/crates/ide-completion/src/tests/type_pos.rs
+++ b/crates/ide-completion/src/tests/type_pos.rs
@@ -429,18 +429,18 @@ trait Tr<T> {
impl Tr<$0
"#,
expect![[r#"
- en Enum Enum
- ma makro!(…) macro_rules! makro
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
md module
- sp Self dyn Tr<{unknown}> + 'static
- st Record Record
- st S S
- st Tuple Tuple
- st Unit Unit
+ sp Self dyn Tr<{unknown}>
+ st Record Record
+ st S S
+ st Tuple Tuple
+ st Unit Unit
tt Tr
tt Trait
- un Union Union
- bt u32 u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
diff --git a/crates/ide-completion/src/tests/visibility.rs b/crates/ide-completion/src/tests/visibility.rs
index 4b5a0ac1c2..b404011dfe 100644
--- a/crates/ide-completion/src/tests/visibility.rs
+++ b/crates/ide-completion/src/tests/visibility.rs
@@ -1,7 +1,7 @@
//! Completion tests for visibility modifiers.
use expect_test::expect;
-use crate::tests::{check, check_with_trigger_character};
+use crate::tests::{check, check_with_private_editable, check_with_trigger_character};
#[test]
fn empty_pub() {
@@ -78,3 +78,90 @@ mod bar {}
"#]],
);
}
+
+#[test]
+fn use_inner_public_function() {
+ check(
+ r#"
+//- /inner.rs crate:inner
+pub fn inner_public() {}
+fn inner_private() {}
+//- /foo.rs crate:foo deps:inner
+use inner::inner_public;
+pub fn outer_public() {}
+//- /lib.rs crate:lib deps:foo
+fn x() {
+ foo::$0
+}
+ "#,
+ expect![[r#"
+ fn outer_public() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn pub_use_inner_public_function() {
+ check(
+ r#"
+//- /inner.rs crate:inner
+pub fn inner_public() {}
+fn inner_private() {}
+//- /foo.rs crate:foo deps:inner
+pub use inner::inner_public;
+pub fn outer_public() {}
+//- /lib.rs crate:lib deps:foo
+fn x() {
+ foo::$0
+}
+ "#,
+ expect![[r#"
+ fn inner_public() fn()
+ fn outer_public() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn use_inner_public_function_private_editable() {
+ check_with_private_editable(
+ r#"
+//- /inner.rs crate:inner
+pub fn inner_public() {}
+fn inner_private() {}
+//- /foo.rs crate:foo deps:inner
+use inner::inner_public;
+pub fn outer_public() {}
+//- /lib.rs crate:lib deps:foo
+fn x() {
+ foo::$0
+}
+ "#,
+ expect![[r#"
+ fn inner_public() fn()
+ fn outer_public() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn pub_use_inner_public_function_private_editable() {
+ check_with_private_editable(
+ r#"
+//- /inner.rs crate:inner
+pub fn inner_public() {}
+fn inner_private() {}
+//- /foo.rs crate:foo deps:inner
+pub use inner::inner_public;
+pub fn outer_public() {}
+//- /lib.rs crate:lib deps:foo
+fn x() {
+ foo::$0
+}
+ "#,
+ expect![[r#"
+ fn inner_public() fn()
+ fn outer_public() fn()
+ "#]],
+ );
+}
diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml
index 583318de26..e065adb0f0 100644
--- a/crates/ide-db/Cargo.toml
+++ b/crates/ide-db/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
@@ -22,7 +23,7 @@ either.workspace = true
itertools.workspace = true
arrayvec.workspace = true
indexmap.workspace = true
-memchr = "2.7.4"
+memchr = "2.7.5"
salsa.workspace = true
salsa-macros.workspace = true
query-group.workspace = true
diff --git a/crates/ide-db/src/active_parameter.rs b/crates/ide-db/src/active_parameter.rs
index 7b5723f37f..9edfc113f7 100644
--- a/crates/ide-db/src/active_parameter.rs
+++ b/crates/ide-db/src/active_parameter.rs
@@ -13,21 +13,21 @@ use syntax::{
use crate::RootDatabase;
#[derive(Debug)]
-pub struct ActiveParameter {
- pub ty: Type,
+pub struct ActiveParameter<'db> {
+ pub ty: Type<'db>,
pub src: Option<InFile<Either<ast::SelfParam, ast::Param>>>,
}
-impl ActiveParameter {
+impl<'db> ActiveParameter<'db> {
/// Returns information about the call argument this token is part of.
- pub fn at_token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Self> {
+ pub fn at_token(sema: &Semantics<'db, RootDatabase>, token: SyntaxToken) -> Option<Self> {
let (signature, active_parameter) = callable_for_token(sema, token)?;
Self::from_signature_and_active_parameter(sema, signature, active_parameter)
}
/// Returns information about the call argument this token is part of.
pub fn at_arg(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &'db Semantics<'db, RootDatabase>,
list: ast::ArgList,
at: TextSize,
) -> Option<Self> {
@@ -36,8 +36,8 @@ impl ActiveParameter {
}
fn from_signature_and_active_parameter(
- sema: &Semantics<'_, RootDatabase>,
- signature: hir::Callable,
+ sema: &Semantics<'db, RootDatabase>,
+ signature: hir::Callable<'db>,
active_parameter: Option<usize>,
) -> Option<Self> {
let idx = active_parameter?;
@@ -63,10 +63,10 @@ impl ActiveParameter {
}
/// Returns a [`hir::Callable`] this token is a part of and its argument index of said callable.
-pub fn callable_for_token(
- sema: &Semantics<'_, RootDatabase>,
+pub fn callable_for_token<'db>(
+ sema: &Semantics<'db, RootDatabase>,
token: SyntaxToken,
-) -> Option<(hir::Callable, Option<usize>)> {
+) -> Option<(hir::Callable<'db>, Option<usize>)> {
let offset = token.text_range().start();
// Find the calling expression and its NameRef
let parent = token.parent()?;
@@ -79,21 +79,21 @@ pub fn callable_for_token(
}
/// Returns a [`hir::Callable`] this token is a part of and its argument index of said callable.
-pub fn callable_for_arg_list(
- sema: &Semantics<'_, RootDatabase>,
+pub fn callable_for_arg_list<'db>(
+ sema: &Semantics<'db, RootDatabase>,
arg_list: ast::ArgList,
at: TextSize,
-) -> Option<(hir::Callable, Option<usize>)> {
+) -> Option<(hir::Callable<'db>, Option<usize>)> {
debug_assert!(arg_list.syntax().text_range().contains(at));
let callable = arg_list.syntax().parent().and_then(ast::CallableExpr::cast)?;
callable_for_node(sema, &callable, at)
}
-pub fn callable_for_node(
- sema: &Semantics<'_, RootDatabase>,
+pub fn callable_for_node<'db>(
+ sema: &Semantics<'db, RootDatabase>,
calling_node: &ast::CallableExpr,
offset: TextSize,
-) -> Option<(hir::Callable, Option<usize>)> {
+) -> Option<(hir::Callable<'db>, Option<usize>)> {
let callable = match calling_node {
ast::CallableExpr::Call(call) => sema.resolve_expr_as_callable(&call.expr()?),
ast::CallableExpr::MethodCall(call) => sema.resolve_method_call_as_callable(call),
diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs
index d5db1c481b..a4a140ec57 100644
--- a/crates/ide-db/src/defs.rs
+++ b/crates/ide-db/src/defs.rs
@@ -385,17 +385,17 @@ fn find_std_module(
// FIXME: IdentClass as a name no longer fits
#[derive(Debug)]
-pub enum IdentClass {
- NameClass(NameClass),
- NameRefClass(NameRefClass),
+pub enum IdentClass<'db> {
+ NameClass(NameClass<'db>),
+ NameRefClass(NameRefClass<'db>),
Operator(OperatorClass),
}
-impl IdentClass {
+impl<'db> IdentClass<'db> {
pub fn classify_node(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
node: &SyntaxNode,
- ) -> Option<IdentClass> {
+ ) -> Option<IdentClass<'db>> {
match_ast! {
match node {
ast::Name(name) => NameClass::classify(sema, &name).map(IdentClass::NameClass),
@@ -418,23 +418,23 @@ impl IdentClass {
}
pub fn classify_token(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
token: &SyntaxToken,
- ) -> Option<IdentClass> {
+ ) -> Option<IdentClass<'db>> {
let parent = token.parent()?;
Self::classify_node(sema, &parent)
}
pub fn classify_lifetime(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
lifetime: &ast::Lifetime,
- ) -> Option<IdentClass> {
+ ) -> Option<IdentClass<'db>> {
NameRefClass::classify_lifetime(sema, lifetime)
.map(IdentClass::NameRefClass)
.or_else(|| NameClass::classify_lifetime(sema, lifetime).map(IdentClass::NameClass))
}
- pub fn definitions(self) -> ArrayVec<(Definition, Option<GenericSubstitution>), 2> {
+ pub fn definitions(self) -> ArrayVec<(Definition, Option<GenericSubstitution<'db>>), 2> {
let mut res = ArrayVec::new();
match self {
IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
@@ -518,7 +518,7 @@ impl IdentClass {
///
/// A model special case is `None` constant in pattern.
#[derive(Debug)]
-pub enum NameClass {
+pub enum NameClass<'db> {
Definition(Definition),
/// `None` in `if let None = Some(82) {}`.
/// Syntactically, it is a name, but semantically it is a reference.
@@ -528,11 +528,11 @@ pub enum NameClass {
PatFieldShorthand {
local_def: Local,
field_ref: Field,
- adt_subst: GenericSubstitution,
+ adt_subst: GenericSubstitution<'db>,
},
}
-impl NameClass {
+impl<'db> NameClass<'db> {
/// `Definition` defined by this name.
pub fn defined(self) -> Option<Definition> {
let res = match self {
@@ -545,7 +545,10 @@ impl NameClass {
Some(res)
}
- pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<NameClass> {
+ pub fn classify(
+ sema: &Semantics<'db, RootDatabase>,
+ name: &ast::Name,
+ ) -> Option<NameClass<'db>> {
let _p = tracing::info_span!("NameClass::classify").entered();
let parent = name.syntax().parent()?;
@@ -597,10 +600,10 @@ impl NameClass {
Some(definition)
}
- fn classify_ident_pat(
- sema: &Semantics<'_, RootDatabase>,
+ fn classify_ident_pat<'db>(
+ sema: &Semantics<'db, RootDatabase>,
ident_pat: ast::IdentPat,
- ) -> Option<NameClass> {
+ ) -> Option<NameClass<'db>> {
if let Some(def) = sema.resolve_bind_pat_to_const(&ident_pat) {
return Some(NameClass::ConstReference(Definition::from(def)));
}
@@ -638,9 +641,9 @@ impl NameClass {
}
pub fn classify_lifetime(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
lifetime: &ast::Lifetime,
- ) -> Option<NameClass> {
+ ) -> Option<NameClass<'db>> {
let _p = tracing::info_span!("NameClass::classify_lifetime", ?lifetime).entered();
let parent = lifetime.syntax().parent()?;
@@ -723,12 +726,12 @@ impl OperatorClass {
/// A model special case is field shorthand syntax, which uses a single
/// reference to point to two different defs.
#[derive(Debug)]
-pub enum NameRefClass {
- Definition(Definition, Option<GenericSubstitution>),
+pub enum NameRefClass<'db> {
+ Definition(Definition, Option<GenericSubstitution<'db>>),
FieldShorthand {
local_ref: Local,
field_ref: Field,
- adt_subst: GenericSubstitution,
+ adt_subst: GenericSubstitution<'db>,
},
/// The specific situation where we have an extern crate decl without a rename
/// Here we have both a declaration and a reference.
@@ -741,13 +744,13 @@ pub enum NameRefClass {
},
}
-impl NameRefClass {
+impl<'db> NameRefClass<'db> {
// Note: we don't have unit-tests for this rather important function.
// It is primarily exercised via goto definition tests in `ide`.
pub fn classify(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
name_ref: &ast::NameRef,
- ) -> Option<NameRefClass> {
+ ) -> Option<NameRefClass<'db>> {
let _p = tracing::info_span!("NameRefClass::classify", ?name_ref).entered();
let parent = name_ref.syntax().parent()?;
@@ -866,9 +869,9 @@ impl NameRefClass {
}
pub fn classify_lifetime(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
lifetime: &ast::Lifetime,
- ) -> Option<NameRefClass> {
+ ) -> Option<NameRefClass<'db>> {
let _p = tracing::info_span!("NameRefClass::classify_lifetime", ?lifetime).entered();
if lifetime.text() == "'static" {
return Some(NameRefClass::Definition(
diff --git a/crates/ide-db/src/famous_defs.rs b/crates/ide-db/src/famous_defs.rs
index 994150b1ac..8e68738508 100644
--- a/crates/ide-db/src/famous_defs.rs
+++ b/crates/ide-db/src/famous_defs.rs
@@ -106,6 +106,18 @@ impl FamousDefs<'_, '_> {
self.find_trait("core:convert:AsRef")
}
+ pub fn core_convert_AsMut(&self) -> Option<Trait> {
+ self.find_trait("core:convert:AsMut")
+ }
+
+ pub fn core_borrow_Borrow(&self) -> Option<Trait> {
+ self.find_trait("core:borrow:Borrow")
+ }
+
+ pub fn core_borrow_BorrowMut(&self) -> Option<Trait> {
+ self.find_trait("core:borrow:BorrowMut")
+ }
+
pub fn core_ops_ControlFlow(&self) -> Option<Enum> {
self.find_enum("core:ops:ControlFlow")
}
diff --git a/crates/ide-db/src/generated/lints.rs b/crates/ide-db/src/generated/lints.rs
index de8a42979b..f9eb44d03a 100644
--- a/crates/ide-db/src/generated/lints.rs
+++ b/crates/ide-db/src/generated/lints.rs
@@ -4711,9 +4711,9 @@ The tracking issue for this feature is: [#133668]
label: "const_trait_impl",
description: r##"# `const_trait_impl`
-The tracking issue for this feature is: [#67792]
+The tracking issue for this feature is: [#143874]
-[#67792]: https://github.com/rust-lang/rust/issues/67792
+[#143874]: https://github.com/rust-lang/rust/issues/143874
------------------------
"##,
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index ac592dfe93..9f35988924 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -25,26 +25,26 @@ use crate::{
/// * assists
/// * etc.
#[derive(Debug)]
-pub enum ImportCandidate {
+pub enum ImportCandidate<'db> {
/// A path, qualified (`std::collections::HashMap`) or not (`HashMap`).
Path(PathImportCandidate),
/// A trait associated function (with no self parameter) or an associated constant.
/// For 'test_mod::TestEnum::test_function', `ty` is the `test_mod::TestEnum` expression type
/// and `name` is the `test_function`
- TraitAssocItem(TraitImportCandidate),
+ TraitAssocItem(TraitImportCandidate<'db>),
/// A trait method with self parameter.
/// For 'test_enum.test_method()', `ty` is the `test_enum` expression type
/// and `name` is the `test_method`
- TraitMethod(TraitImportCandidate),
+ TraitMethod(TraitImportCandidate<'db>),
}
/// A trait import needed for a given associated item access.
/// For `some::path::SomeStruct::ASSOC_`, contains the
/// type of `some::path::SomeStruct` and `ASSOC_` as the item name.
#[derive(Debug)]
-pub struct TraitImportCandidate {
+pub struct TraitImportCandidate<'db> {
/// A type of the item that has the associated item accessed at.
- pub receiver_ty: Type,
+ pub receiver_ty: Type<'db>,
/// The associated item name that the trait to import should contain.
pub assoc_item_name: NameToImport,
}
@@ -100,16 +100,16 @@ impl NameToImport {
/// A struct to find imports in the project, given a certain name (or its part) and the context.
#[derive(Debug)]
-pub struct ImportAssets {
- import_candidate: ImportCandidate,
+pub struct ImportAssets<'db> {
+ import_candidate: ImportCandidate<'db>,
candidate_node: SyntaxNode,
module_with_candidate: Module,
}
-impl ImportAssets {
+impl<'db> ImportAssets<'db> {
pub fn for_method_call(
method_call: &ast::MethodCallExpr,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
) -> Option<Self> {
let candidate_node = method_call.syntax().clone();
Some(Self {
@@ -121,7 +121,7 @@ impl ImportAssets {
pub fn for_exact_path(
fully_qualified_path: &ast::Path,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
) -> Option<Self> {
let candidate_node = fully_qualified_path.syntax().clone();
if let Some(use_tree) = candidate_node.ancestors().find_map(ast::UseTree::cast) {
@@ -139,7 +139,7 @@ impl ImportAssets {
})
}
- pub fn for_ident_pat(sema: &Semantics<'_, RootDatabase>, pat: &ast::IdentPat) -> Option<Self> {
+ pub fn for_ident_pat(sema: &Semantics<'db, RootDatabase>, pat: &ast::IdentPat) -> Option<Self> {
if !pat.is_simple_ident() {
return None;
}
@@ -156,7 +156,7 @@ impl ImportAssets {
module_with_candidate: Module,
qualifier: Option<ast::Path>,
fuzzy_name: String,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
candidate_node: SyntaxNode,
) -> Option<Self> {
Some(Self {
@@ -168,7 +168,7 @@ impl ImportAssets {
pub fn for_fuzzy_method_call(
module_with_method_call: Module,
- receiver_ty: Type,
+ receiver_ty: Type<'db>,
fuzzy_method_name: String,
candidate_node: SyntaxNode,
) -> Option<Self> {
@@ -229,14 +229,14 @@ impl LocatedImport {
}
}
-impl ImportAssets {
- pub fn import_candidate(&self) -> &ImportCandidate {
+impl<'db> ImportAssets<'db> {
+ pub fn import_candidate(&self) -> &ImportCandidate<'db> {
&self.import_candidate
}
pub fn search_for_imports(
&self,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
cfg: ImportPathConfig,
prefix_kind: PrefixKind,
) -> impl Iterator<Item = LocatedImport> {
@@ -247,7 +247,7 @@ impl ImportAssets {
/// This may return non-absolute paths if a part of the returned path is already imported into scope.
pub fn search_for_relative_paths(
&self,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
cfg: ImportPathConfig,
) -> impl Iterator<Item = LocatedImport> {
let _p = tracing::info_span!("ImportAssets::search_for_relative_paths").entered();
@@ -286,7 +286,7 @@ impl ImportAssets {
fn search_for(
&self,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
prefixed: Option<PrefixKind>,
cfg: ImportPathConfig,
) -> impl Iterator<Item = LocatedImport> {
@@ -533,11 +533,11 @@ fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Optio
})
}
-fn trait_applicable_items(
- db: &RootDatabase,
+fn trait_applicable_items<'db>(
+ db: &'db RootDatabase,
current_crate: Crate,
- scope: &SemanticsScope<'_>,
- trait_candidate: &TraitImportCandidate,
+ scope: &SemanticsScope<'db>,
+ trait_candidate: &TraitImportCandidate<'db>,
trait_assoc_item: bool,
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
scope_filter: impl Fn(hir::Trait) -> bool,
@@ -709,9 +709,9 @@ fn get_mod_path(
}
}
-impl ImportCandidate {
+impl<'db> ImportCandidate<'db> {
fn for_method_call(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
method_call: &ast::MethodCallExpr,
) -> Option<Self> {
match sema.resolve_method_call(method_call) {
@@ -725,7 +725,7 @@ impl ImportCandidate {
}
}
- fn for_regular_path(sema: &Semantics<'_, RootDatabase>, path: &ast::Path) -> Option<Self> {
+ fn for_regular_path(sema: &Semantics<'db, RootDatabase>, path: &ast::Path) -> Option<Self> {
if sema.resolve_path(path).is_some() {
return None;
}
@@ -736,7 +736,7 @@ impl ImportCandidate {
)
}
- fn for_name(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<Self> {
+ fn for_name(sema: &Semantics<'db, RootDatabase>, name: &ast::Name) -> Option<Self> {
if sema
.scope(name.syntax())?
.speculative_resolve(&make::ext::ident_path(&name.text()))
@@ -753,17 +753,17 @@ impl ImportCandidate {
fn for_fuzzy_path(
qualifier: Option<ast::Path>,
fuzzy_name: String,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
) -> Option<Self> {
path_import_candidate(sema, qualifier, NameToImport::fuzzy(fuzzy_name))
}
}
-fn path_import_candidate(
- sema: &Semantics<'_, RootDatabase>,
+fn path_import_candidate<'db>(
+ sema: &Semantics<'db, RootDatabase>,
qualifier: Option<ast::Path>,
name: NameToImport,
-) -> Option<ImportCandidate> {
+) -> Option<ImportCandidate<'db>> {
Some(match qualifier {
Some(qualifier) => match sema.resolve_path(&qualifier) {
Some(PathResolution::Def(ModuleDef::BuiltinType(_))) | None => {
diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs
index 232648af66..0ab880bcfe 100644
--- a/crates/ide-db/src/path_transform.rs
+++ b/crates/ide-db/src/path_transform.rs
@@ -2,7 +2,10 @@
use crate::helpers::mod_path_to_ast;
use either::Either;
-use hir::{AsAssocItem, HirDisplay, ImportPathConfig, ModuleDef, SemanticsScope};
+use hir::{
+ AsAssocItem, HirDisplay, HirFileId, ImportPathConfig, ModuleDef, SemanticsScope,
+ prettify_macro_expansion,
+};
use itertools::Itertools;
use rustc_hash::FxHashMap;
use span::Edition;
@@ -136,6 +139,25 @@ impl<'a> PathTransform<'a> {
}
}
+ fn prettify_target_node(&self, node: SyntaxNode) -> SyntaxNode {
+ match self.target_scope.file_id() {
+ HirFileId::FileId(_) => node,
+ HirFileId::MacroFile(file_id) => {
+ let db = self.target_scope.db;
+ prettify_macro_expansion(
+ db,
+ node,
+ &db.expansion_span_map(file_id),
+ self.target_scope.module().krate().into(),
+ )
+ }
+ }
+ }
+
+ fn prettify_target_ast<N: AstNode>(&self, node: N) -> N {
+ N::cast(self.prettify_target_node(node.syntax().clone())).unwrap()
+ }
+
fn build_ctx(&self) -> Ctx<'a> {
let db = self.source_scope.db;
let target_module = self.target_scope.module();
@@ -163,7 +185,7 @@ impl<'a> PathTransform<'a> {
.for_each(|(k, v)| match (k.split(db), v) {
(Either::Right(k), Some(TypeOrConst::Either(v))) => {
if let Some(ty) = v.ty() {
- type_substs.insert(k, ty);
+ type_substs.insert(k, self.prettify_target_ast(ty));
}
}
(Either::Right(k), None) => {
@@ -178,7 +200,7 @@ impl<'a> PathTransform<'a> {
}
(Either::Left(k), Some(TypeOrConst::Either(v))) => {
if let Some(ty) = v.ty() {
- const_substs.insert(k, ty.syntax().clone());
+ const_substs.insert(k, self.prettify_target_node(ty.syntax().clone()));
}
}
(Either::Left(k), Some(TypeOrConst::Const(v))) => {
@@ -189,7 +211,7 @@ impl<'a> PathTransform<'a> {
// and sometimes require slight modifications; see
// https://doc.rust-lang.org/reference/statements.html#expression-statements
// (default values in curly brackets can cause the same problem)
- const_substs.insert(k, expr.syntax().clone());
+ const_substs.insert(k, self.prettify_target_node(expr.syntax().clone()));
}
}
(Either::Left(k), None) => {
@@ -204,6 +226,7 @@ impl<'a> PathTransform<'a> {
}
_ => (), // ignore mismatching params
});
+ // No need to prettify lifetimes, there's nothing to prettify.
let lifetime_substs: FxHashMap<_, _> = self
.generic_def
.into_iter()
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index 5356614dce..e6618573e0 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -272,5 +272,5 @@ fn crate_name(db: &RootDatabase, krate: Crate) -> Symbol {
.display_name
.as_deref()
.cloned()
- .unwrap_or_else(|| Symbol::integer(salsa::plumbing::AsId::as_id(&krate).as_u32() as usize))
+ .unwrap_or_else(|| Symbol::integer(salsa::plumbing::AsId::as_id(&krate).index() as usize))
}
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index c5ad64ed59..4efb83ba32 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -317,7 +317,7 @@ impl Definition {
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -332,7 +332,7 @@ impl Definition {
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -341,7 +341,7 @@ impl Definition {
if let Definition::SelfType(impl_) = self {
return match impl_.source(db).map(|src| src.syntax().cloned()) {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -360,7 +360,7 @@ impl Definition {
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -429,7 +429,7 @@ pub struct FindUsages<'a> {
/// The container of our definition should it be an assoc item
assoc_item_container: Option<hir::AssocItemContainer>,
/// whether to search for the `Self` type of the definition
- include_self_kw_refs: Option<hir::Type>,
+ include_self_kw_refs: Option<hir::Type<'a>>,
/// whether to search for the `self` module
search_self_mod: bool,
}
@@ -1087,12 +1087,12 @@ impl<'a> FindUsages<'a> {
fn found_self_ty_name_ref(
&self,
- self_ty: &hir::Type,
+ self_ty: &hir::Type<'_>,
name_ref: &ast::NameRef,
sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
) -> bool {
// See https://github.com/rust-lang/rust-analyzer/pull/15864/files/e0276dc5ddc38c65240edb408522bb869f15afb4#r1389848845
- let ty_eq = |ty: hir::Type| match (ty.as_adt(), self_ty.as_adt()) {
+ let ty_eq = |ty: hir::Type<'_>| match (ty.as_adt(), self_ty.as_adt()) {
(Some(ty), Some(self_ty)) => ty == self_ty,
(None, None) => ty == *self_ty,
_ => false,
@@ -1315,7 +1315,7 @@ impl<'a> FindUsages<'a> {
}
}
-fn def_to_ty(sema: &Semantics<'_, RootDatabase>, def: &Definition) -> Option<hir::Type> {
+fn def_to_ty<'db>(sema: &Semantics<'db, RootDatabase>, def: &Definition) -> Option<hir::Type<'db>> {
match def {
Definition::Adt(adt) => Some(adt.ty(sema.db)),
Definition::TypeAlias(it) => Some(it.ty(sema.db)),
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs
index d1ba79e8c7..c15cade84a 100644
--- a/crates/ide-db/src/symbol_index.rs
+++ b/crates/ide-db/src/symbol_index.rs
@@ -50,6 +50,7 @@ pub struct Query {
case_sensitive: bool,
only_types: bool,
libs: bool,
+ exclude_imports: bool,
}
impl Query {
@@ -63,6 +64,7 @@ impl Query {
mode: SearchMode::Fuzzy,
assoc_mode: AssocSearchMode::Include,
case_sensitive: false,
+ exclude_imports: false,
}
}
@@ -94,6 +96,10 @@ impl Query {
pub fn case_sensitive(&mut self) {
self.case_sensitive = true;
}
+
+ pub fn exclude_imports(&mut self) {
+ self.exclude_imports = true;
+ }
}
#[query_group::query_group]
@@ -362,6 +368,9 @@ impl Query {
if ignore_underscore_prefixed && symbol_name.starts_with("__") {
continue;
}
+ if self.exclude_imports && symbol.is_import {
+ continue;
+ }
if self.mode.check(&self.query, self.case_sensitive, symbol_name) {
if let Some(b) = cb(symbol).break_value() {
return Some(b);
@@ -385,7 +394,8 @@ impl Query {
mod tests {
use expect_test::expect_file;
- use test_fixture::WithFixture;
+ use salsa::Durability;
+ use test_fixture::{WORKSPACE, WithFixture};
use super::*;
@@ -506,4 +516,31 @@ struct Duplicate;
expect_file!["./test_data/test_doc_alias.txt"].assert_debug_eq(&symbols);
}
+
+ #[test]
+ fn test_exclude_imports() {
+ let (mut db, _) = RootDatabase::with_many_files(
+ r#"
+//- /lib.rs
+mod foo;
+pub use foo::Foo;
+
+//- /foo.rs
+pub struct Foo;
+"#,
+ );
+
+ let mut local_roots = FxHashSet::default();
+ local_roots.insert(WORKSPACE);
+ db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
+
+ let mut query = Query::new("Foo".to_owned());
+ let mut symbols = world_symbols(&db, query.clone());
+ symbols.sort_by_key(|x| x.is_import);
+ expect_file!["./test_data/test_symbols_with_imports.txt"].assert_debug_eq(&symbols);
+
+ query.exclude_imports();
+ let symbols = world_symbols(&db, query);
+ expect_file!["./test_data/test_symbols_exclude_imports.txt"].assert_debug_eq(&symbols);
+ }
}
diff --git a/crates/ide-db/src/syntax_helpers/suggest_name.rs b/crates/ide-db/src/syntax_helpers/suggest_name.rs
index 9b9f450bc7..995bf72dca 100644
--- a/crates/ide-db/src/syntax_helpers/suggest_name.rs
+++ b/crates/ide-db/src/syntax_helpers/suggest_name.rs
@@ -151,10 +151,10 @@ impl NameGenerator {
/// - If `ty` is an `impl Trait`, it will suggest the name of the first trait.
///
/// If the suggested name conflicts with reserved keywords, it will return `None`.
- pub fn for_type(
+ pub fn for_type<'db>(
&mut self,
- ty: &hir::Type,
- db: &RootDatabase,
+ ty: &hir::Type<'db>,
+ db: &'db RootDatabase,
edition: Edition,
) -> Option<SmolStr> {
let name = name_of_type(ty, db, edition)?;
@@ -373,7 +373,11 @@ fn from_type(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<Smo
name_of_type(&ty, sema.db, edition)
}
-fn name_of_type(ty: &hir::Type, db: &RootDatabase, edition: Edition) -> Option<SmolStr> {
+fn name_of_type<'db>(
+ ty: &hir::Type<'db>,
+ db: &'db RootDatabase,
+ edition: Edition,
+) -> Option<SmolStr> {
let name = if let Some(adt) = ty.as_adt() {
let name = adt.name(db).display(db, edition).to_string();
@@ -407,7 +411,11 @@ fn name_of_type(ty: &hir::Type, db: &RootDatabase, edition: Edition) -> Option<S
normalize(&name)
}
-fn sequence_name(inner_ty: Option<&hir::Type>, db: &RootDatabase, edition: Edition) -> SmolStr {
+fn sequence_name<'db>(
+ inner_ty: Option<&hir::Type<'db>>,
+ db: &'db RootDatabase,
+ edition: Edition,
+) -> SmolStr {
let items_str = SmolStr::new_static("items");
let Some(inner_ty) = inner_ty else {
return items_str;
diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt
index 455a680590..30d1df4f8e 100644
--- a/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -41,6 +41,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -74,6 +75,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -107,6 +109,7 @@
container_name: None,
is_alias: true,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -140,6 +143,7 @@
container_name: None,
is_alias: true,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -173,6 +177,7 @@
container_name: None,
is_alias: true,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -206,6 +211,7 @@
container_name: None,
is_alias: true,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -239,6 +245,7 @@
container_name: None,
is_alias: true,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
],
diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 5e5ae1d168..973256c470 100644
--- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -11,6 +11,40 @@
},
[
FileSymbol {
+ name: "A",
+ def: Variant(
+ Variant {
+ id: EnumVariantId(
+ 7800,
+ ),
+ },
+ ),
+ loc: DeclarationLocation {
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: VARIANT,
+ range: 201..202,
+ },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 201..202,
+ },
+ ),
+ },
+ container_name: Some(
+ "Enum",
+ ),
+ is_alias: false,
+ is_assoc: true,
+ is_import: false,
+ do_not_complete: Yes,
+ },
+ FileSymbol {
name: "Alias",
def: TypeAlias(
TypeAlias {
@@ -39,6 +73,41 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
+ do_not_complete: Yes,
+ },
+ FileSymbol {
+ name: "B",
+ def: Variant(
+ Variant {
+ id: EnumVariantId(
+ 7801,
+ ),
+ },
+ ),
+ loc: DeclarationLocation {
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: VARIANT,
+ range: 204..205,
+ },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 204..205,
+ },
+ ),
+ },
+ container_name: Some(
+ "Enum",
+ ),
+ is_alias: false,
+ is_assoc: true,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -70,6 +139,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -101,6 +171,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -134,6 +205,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -167,6 +239,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: true,
do_not_complete: Yes,
},
FileSymbol {
@@ -200,6 +273,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -231,6 +305,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -264,6 +339,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -297,6 +373,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -332,6 +409,7 @@
),
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -367,6 +445,7 @@
),
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -400,6 +479,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -433,6 +513,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -464,6 +545,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -497,6 +579,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: true,
do_not_complete: Yes,
},
FileSymbol {
@@ -530,6 +613,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -565,6 +649,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -600,6 +685,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -633,6 +719,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -666,6 +753,7 @@
),
is_alias: false,
is_assoc: true,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -699,6 +787,7 @@
),
is_alias: false,
is_assoc: true,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -732,6 +821,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -763,6 +853,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -796,6 +887,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: true,
do_not_complete: Yes,
},
FileSymbol {
@@ -829,6 +921,7 @@
),
is_alias: false,
is_assoc: true,
+ is_import: false,
do_not_complete: Yes,
},
],
@@ -875,6 +968,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
],
@@ -919,6 +1013,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: true,
do_not_complete: Yes,
},
FileSymbol {
@@ -952,6 +1047,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: true,
do_not_complete: Yes,
},
FileSymbol {
@@ -985,6 +1081,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: false,
do_not_complete: Yes,
},
FileSymbol {
@@ -1018,6 +1115,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: true,
do_not_complete: Yes,
},
FileSymbol {
@@ -1051,6 +1149,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ is_import: true,
do_not_complete: Yes,
},
],
diff --git a/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
new file mode 100644
index 0000000000..22872b577f
--- /dev/null
+++ b/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
@@ -0,0 +1,36 @@
+[
+ FileSymbol {
+ name: "Foo",
+ def: Adt(
+ Struct(
+ Struct {
+ id: StructId(
+ 3800,
+ ),
+ },
+ ),
+ ),
+ loc: DeclarationLocation {
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2001),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 0..15,
+ },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 11..14,
+ },
+ ),
+ },
+ container_name: None,
+ is_alias: false,
+ is_assoc: false,
+ is_import: false,
+ do_not_complete: Yes,
+ },
+]
diff --git a/crates/ide-db/src/test_data/test_symbols_with_imports.txt b/crates/ide-db/src/test_data/test_symbols_with_imports.txt
new file mode 100644
index 0000000000..9f98bf87e2
--- /dev/null
+++ b/crates/ide-db/src/test_data/test_symbols_with_imports.txt
@@ -0,0 +1,70 @@
+[
+ FileSymbol {
+ name: "Foo",
+ def: Adt(
+ Struct(
+ Struct {
+ id: StructId(
+ 3800,
+ ),
+ },
+ ),
+ ),
+ loc: DeclarationLocation {
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2001),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 0..15,
+ },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 11..14,
+ },
+ ),
+ },
+ container_name: None,
+ is_alias: false,
+ is_assoc: false,
+ is_import: false,
+ do_not_complete: Yes,
+ },
+ FileSymbol {
+ name: "Foo",
+ def: Adt(
+ Struct(
+ Struct {
+ id: StructId(
+ 3800,
+ ),
+ },
+ ),
+ ),
+ loc: DeclarationLocation {
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: USE_TREE,
+ range: 17..25,
+ },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME_REF,
+ range: 22..25,
+ },
+ ),
+ },
+ container_name: None,
+ is_alias: false,
+ is_assoc: false,
+ is_import: true,
+ do_not_complete: Yes,
+ },
+]
diff --git a/crates/ide-db/src/ty_filter.rs b/crates/ide-db/src/ty_filter.rs
index 63ce0ddbb8..095256d829 100644
--- a/crates/ide-db/src/ty_filter.rs
+++ b/crates/ide-db/src/ty_filter.rs
@@ -10,7 +10,7 @@ use syntax::ast::{self, Pat, make};
use crate::RootDatabase;
/// Enum types that implement `std::ops::Try` trait.
-#[derive(Clone, Copy)]
+#[derive(Clone, Copy, Debug)]
pub enum TryEnum {
Result,
Option,
@@ -20,7 +20,7 @@ impl TryEnum {
const ALL: [TryEnum; 2] = [TryEnum::Option, TryEnum::Result];
/// Returns `Some(..)` if the provided type is an enum that implements `std::ops::Try`.
- pub fn from_ty(sema: &Semantics<'_, RootDatabase>, ty: &hir::Type) -> Option<TryEnum> {
+ pub fn from_ty(sema: &Semantics<'_, RootDatabase>, ty: &hir::Type<'_>) -> Option<TryEnum> {
let enum_ = match ty.as_adt() {
Some(hir::Adt::Enum(it)) => it,
_ => return None,
diff --git a/crates/ide-db/src/use_trivial_constructor.rs b/crates/ide-db/src/use_trivial_constructor.rs
index a4a93e36f0..f63cd92694 100644
--- a/crates/ide-db/src/use_trivial_constructor.rs
+++ b/crates/ide-db/src/use_trivial_constructor.rs
@@ -11,7 +11,7 @@ use syntax::{
pub fn use_trivial_constructor(
db: &crate::RootDatabase,
path: Path,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
edition: Edition,
) -> Option<Expr> {
match ty.as_adt() {
diff --git a/crates/ide-diagnostics/Cargo.toml b/crates/ide-diagnostics/Cargo.toml
index 96be51e1b2..6f1e66948f 100644
--- a/crates/ide-diagnostics/Cargo.toml
+++ b/crates/ide-diagnostics/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/ide-diagnostics/src/handlers/expected_function.rs b/crates/ide-diagnostics/src/handlers/expected_function.rs
index 7d2ac373dc..afd1687ae0 100644
--- a/crates/ide-diagnostics/src/handlers/expected_function.rs
+++ b/crates/ide-diagnostics/src/handlers/expected_function.rs
@@ -7,7 +7,7 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// This diagnostic is triggered if a call is made on something that is not callable.
pub(crate) fn expected_function(
ctx: &DiagnosticsContext<'_>,
- d: &hir::ExpectedFunction,
+ d: &hir::ExpectedFunction<'_>,
) -> Diagnostic {
Diagnostic::new_with_syntax_node_ptr(
ctx,
diff --git a/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs b/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs
index 06f3575942..7402133f74 100644
--- a/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs
+++ b/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs
@@ -183,4 +183,28 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn generic_assoc_type_infer_lifetime_in_expr_position() {
+ check_diagnostics(
+ r#"
+//- minicore: sized
+struct Player;
+
+struct Foo<'c, C> {
+ _v: &'c C,
+}
+trait WithSignals: Sized {
+ type SignalCollection<'c, C>;
+ fn __signals_from_external(&self) -> Self::SignalCollection<'_, Self>;
+}
+impl WithSignals for Player {
+ type SignalCollection<'c, C> = Foo<'c, C>;
+ fn __signals_from_external(&self) -> Self::SignalCollection<'_, Self> {
+ Self::SignalCollection { _v: self }
+ }
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/invalid_cast.rs b/crates/ide-diagnostics/src/handlers/invalid_cast.rs
index 7a6e98fe1b..a59077b757 100644
--- a/crates/ide-diagnostics/src/handlers/invalid_cast.rs
+++ b/crates/ide-diagnostics/src/handlers/invalid_cast.rs
@@ -18,7 +18,7 @@ macro_rules! format_ty {
// Diagnostic: invalid-cast
//
// This diagnostic is triggered if the code contains an illegal cast
-pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast) -> Diagnostic {
+pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast<'_>) -> Diagnostic {
let display_range = ctx.sema.diagnostics_display_range(d.expr.map(|it| it.into()));
let (code, message) = match d.error {
CastError::CastToBool => (
@@ -106,7 +106,10 @@ pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast) -
// Diagnostic: cast-to-unsized
//
// This diagnostic is triggered when casting to an unsized type
-pub(crate) fn cast_to_unsized(ctx: &DiagnosticsContext<'_>, d: &hir::CastToUnsized) -> Diagnostic {
+pub(crate) fn cast_to_unsized(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::CastToUnsized<'_>,
+) -> Diagnostic {
let display_range = ctx.sema.diagnostics_display_range(d.expr.map(|it| it.into()));
Diagnostic::new(
DiagnosticCode::RustcHardError("E0620"),
diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs
index 546512a6cf..c39e00e178 100644
--- a/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -242,8 +242,8 @@ macro_rules! outer {
fn f() {
outer!();
-} //^^^^^^^^ error: leftover tokens
- //^^^^^^^^ error: Syntax Error in Expansion: expected expression
+} //^^^^^^ error: leftover tokens
+ //^^^^^^ error: Syntax Error in Expansion: expected expression
"#,
)
}
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 2b76efb196..7da799e0d4 100644
--- a/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -66,7 +66,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
let current_module =
ctx.sema.scope(d.field_list_parent.to_node(&root).syntax()).map(|it| it.module());
let range = InFile::new(d.file, d.field_list_parent.text_range())
- .original_node_file_range_rooted(ctx.sema.db);
+ .original_node_file_range_rooted_opt(ctx.sema.db)?;
let build_text_edit = |new_syntax: &SyntaxNode, old_syntax| {
let edit = {
@@ -106,7 +106,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
}
});
- let generate_fill_expr = |ty: &Type| match ctx.config.expr_fill_default {
+ let generate_fill_expr = |ty: &Type<'_>| match ctx.config.expr_fill_default {
ExprFillDefaultMode::Todo => make::ext::expr_todo(),
ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
ExprFillDefaultMode::Default => {
@@ -180,7 +180,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
}
fn make_ty(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
module: hir::Module,
edition: Edition,
@@ -198,7 +198,7 @@ fn make_ty(
fn get_default_constructor(
ctx: &DiagnosticsContext<'_>,
d: &hir::MissingFields,
- ty: &Type,
+ ty: &Type<'_>,
) -> Option<ast::Expr> {
if let Some(builtin_ty) = ty.as_builtin() {
if builtin_ty.is_int() || builtin_ty.is_uint() {
diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 6bd5417b25..17caf63018 100644
--- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -630,6 +630,17 @@ fn main() {
// Checks that we don't place orphan arguments for formatting under an unsafe block.
check_diagnostics(
r#"
+//- minicore: fmt_before_1_89_0
+fn foo() {
+ let p = 0xDEADBEEF as *const i32;
+ format_args!("", *p);
+ // ^^ error: dereference of raw pointer is unsafe and requires an unsafe function or block
+}
+ "#,
+ );
+
+ check_diagnostics(
+ r#"
//- minicore: fmt
fn foo() {
let p = 0xDEADBEEF as *const i32;
@@ -958,4 +969,33 @@ impl FooTrait for S2 {
"#,
);
}
+
+ #[test]
+ fn no_false_positive_on_format_args_since_1_89_0() {
+ check_diagnostics(
+ r#"
+//- minicore: fmt
+fn test() {
+ let foo = 10;
+ let bar = true;
+ let _x = format_args!("{} {0} {} {last}", foo, bar, last = "!");
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn naked_asm_is_safe() {
+ check_diagnostics(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! naked_asm { () => {} }
+
+#[unsafe(naked)]
+extern "C" fn naked() {
+ naked_asm!("");
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
index 01cf5e8fa5..1e80d02926 100644
--- a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
+++ b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
@@ -4,7 +4,10 @@ use hir::HirDisplay;
// Diagnostic: moved-out-of-ref
//
// This diagnostic is triggered on moving non copy things out of references.
-pub(crate) fn moved_out_of_ref(ctx: &DiagnosticsContext<'_>, d: &hir::MovedOutOfRef) -> Diagnostic {
+pub(crate) fn moved_out_of_ref(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::MovedOutOfRef<'_>,
+) -> Diagnostic {
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcHardError("E0507"),
@@ -217,4 +220,41 @@ fn test() {
"#,
)
}
+
+ #[test]
+ fn regression_18201() {
+ check_diagnostics(
+ r#"
+//- minicore: copy
+struct NotCopy;
+struct S(NotCopy);
+impl S {
+ fn f(&mut self) {
+ || {
+ if let ref mut _cb = self.0 {
+ }
+ };
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn regression_20155() {
+ check_diagnostics(
+ r#"
+//- minicore: copy, option
+struct Box(i32);
+fn test() {
+ let b = Some(Box(0));
+ || {
+ if let Some(b) = b {
+ let _move = b;
+ }
+ };
+}
+"#,
+ )
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs
index ef42f2dc74..0edab5e0b3 100644
--- a/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -1,5 +1,4 @@
use either::Either;
-use hir::{Field, HasCrate};
use hir::{HasSource, HirDisplay, Semantics, VariantId, db::ExpandDatabase};
use ide_db::text_edit::TextEdit;
use ide_db::{EditionedFileId, RootDatabase, source_change::SourceChange};
@@ -8,7 +7,10 @@ use syntax::{
ast::{self, edit::IndentLevel, make},
};
-use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
+use crate::{
+ Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, fix,
+ handlers::private_field::field_is_private_fixes,
+};
// Diagnostic: no-such-field
//
@@ -37,8 +39,8 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assis
field_is_private_fixes(
&ctx.sema,
d.field.file_id.original_file(ctx.sema.db),
- node,
private_field,
+ ctx.sema.original_range(node.syntax()).range,
)
} else {
missing_record_expr_field_fixes(
@@ -52,31 +54,6 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assis
}
}
-fn field_is_private_fixes(
- sema: &Semantics<'_, RootDatabase>,
- usage_file_id: EditionedFileId,
- record_expr_field: &ast::RecordExprField,
- private_field: Field,
-) -> Option<Vec<Assist>> {
- let def_crate = private_field.krate(sema.db);
- let usage_crate = sema.file_to_module_def(usage_file_id.file_id(sema.db))?.krate();
- let visibility = if usage_crate == def_crate { "pub(crate) " } else { "pub " };
-
- let source = private_field.source(sema.db)?;
- let (range, _) = source.syntax().original_file_range_opt(sema.db)?;
- let source_change = SourceChange::from_text_edit(
- range.file_id.file_id(sema.db),
- TextEdit::insert(range.range.start(), visibility.into()),
- );
-
- Some(vec![fix(
- "increase_field_visibility",
- "Increase field visibility",
- source_change,
- sema.original_range(record_expr_field.syntax()).range,
- )])
-}
-
fn missing_record_expr_field_fixes(
sema: &Semantics<'_, RootDatabase>,
usage_file_id: EditionedFileId,
diff --git a/crates/ide-diagnostics/src/handlers/private_field.rs b/crates/ide-diagnostics/src/handlers/private_field.rs
index 5b4273a5a6..69cd0d27cb 100644
--- a/crates/ide-diagnostics/src/handlers/private_field.rs
+++ b/crates/ide-diagnostics/src/handlers/private_field.rs
@@ -1,4 +1,8 @@
-use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+use hir::{EditionedFileId, FileRange, HasCrate, HasSource, Semantics};
+use ide_db::{RootDatabase, assists::Assist, source_change::SourceChange, text_edit::TextEdit};
+use syntax::{AstNode, TextRange, TextSize, ast::HasVisibility};
+
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
// Diagnostic: private-field
//
@@ -16,11 +20,59 @@ pub(crate) fn private_field(ctx: &DiagnosticsContext<'_>, d: &hir::PrivateField)
d.expr.map(|it| it.into()),
)
.stable()
+ .with_fixes(field_is_private_fixes(
+ &ctx.sema,
+ d.expr.file_id.original_file(ctx.sema.db),
+ d.field,
+ ctx.sema.original_range(d.expr.to_node(ctx.sema.db).syntax()).range,
+ ))
+}
+
+pub(crate) fn field_is_private_fixes(
+ sema: &Semantics<'_, RootDatabase>,
+ usage_file_id: EditionedFileId,
+ private_field: hir::Field,
+ fix_range: TextRange,
+) -> Option<Vec<Assist>> {
+ let def_crate = private_field.krate(sema.db);
+ let usage_crate = sema.file_to_module_def(usage_file_id.file_id(sema.db))?.krate();
+ let mut visibility_text = if usage_crate == def_crate { "pub(crate) " } else { "pub " };
+
+ let source = private_field.source(sema.db)?;
+ let existing_visibility = match &source.value {
+ hir::FieldSource::Named(it) => it.visibility(),
+ hir::FieldSource::Pos(it) => it.visibility(),
+ };
+ let range = match existing_visibility {
+ Some(visibility) => {
+ // If there is an existing visibility, don't insert whitespace after.
+ visibility_text = visibility_text.trim_end();
+ source.with_value(visibility.syntax()).original_file_range_opt(sema.db)?.0
+ }
+ None => {
+ let (range, _) = source.syntax().original_file_range_opt(sema.db)?;
+ FileRange {
+ file_id: range.file_id,
+ range: TextRange::at(range.range.start(), TextSize::new(0)),
+ }
+ }
+ };
+ let source_change = SourceChange::from_text_edit(
+ range.file_id.file_id(sema.db),
+ TextEdit::replace(range.range, visibility_text.into()),
+ );
+
+ Some(vec![fix(
+ "increase_field_visibility",
+ "Increase field visibility",
+ source_change,
+ fix_range,
+ )])
}
#[cfg(test)]
mod tests {
- use crate::tests::check_diagnostics;
+ use crate::tests::{check_diagnostics, check_fix};
#[test]
fn private_field() {
@@ -29,7 +81,7 @@ mod tests {
mod module { pub struct Struct { field: u32 } }
fn main(s: module::Struct) {
s.field;
- //^^^^^^^ error: field `field` of `Struct` is private
+ //^^^^^^^ 💡 error: field `field` of `Struct` is private
}
"#,
);
@@ -42,7 +94,7 @@ fn main(s: module::Struct) {
mod module { pub struct Struct(u32); }
fn main(s: module::Struct) {
s.0;
- //^^^ error: field `0` of `Struct` is private
+ //^^^ 💡 error: field `0` of `Struct` is private
}
"#,
);
@@ -113,4 +165,68 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn change_visibility_fix() {
+ check_fix(
+ r#"
+pub mod foo {
+ pub mod bar {
+ pub struct Struct {
+ field: i32,
+ }
+ }
+}
+
+fn foo(v: foo::bar::Struct) {
+ v.field$0;
+}
+ "#,
+ r#"
+pub mod foo {
+ pub mod bar {
+ pub struct Struct {
+ pub(crate) field: i32,
+ }
+ }
+}
+
+fn foo(v: foo::bar::Struct) {
+ v.field;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn change_visibility_with_existing_visibility() {
+ check_fix(
+ r#"
+pub mod foo {
+ pub mod bar {
+ pub struct Struct {
+ pub(super) field: i32,
+ }
+ }
+}
+
+fn foo(v: foo::bar::Struct) {
+ v.field$0;
+}
+ "#,
+ r#"
+pub mod foo {
+ pub mod bar {
+ pub struct Struct {
+ pub(crate) field: i32,
+ }
+ }
+}
+
+fn foo(v: foo::bar::Struct) {
+ v.field;
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
index fa7ba90a75..0e18ce9674 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
@@ -127,4 +127,33 @@ impl !Trait for () {}
"#,
)
}
+
+ #[test]
+ fn impl_sized_for_unsized() {
+ check_diagnostics(
+ r#"
+//- minicore: sized
+trait Trait {
+ type Item
+ where
+ Self: Sized;
+
+ fn item()
+ where
+ Self: Sized;
+}
+
+trait OtherTrait {}
+
+impl Trait for () {
+ type Item = ();
+ fn item() {}
+}
+
+// Items with Self: Sized bound not required to be implemented for unsized types.
+impl Trait for str {}
+impl Trait for dyn OtherTrait {}
+ "#,
+ )
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index 4327b12dce..fc2648efb4 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -77,6 +77,7 @@ fn quickfix_for_redundant_assoc_item(
redundant_item_def: String,
range: TextRange,
) -> Option<Vec<Assist>> {
+ let file_id = d.file_id.file_id()?;
let add_assoc_item_def = |builder: &mut SourceChangeBuilder| -> Option<()> {
let db = ctx.sema.db;
let root = db.parse_or_expand(d.file_id);
@@ -90,12 +91,14 @@ fn quickfix_for_redundant_assoc_item(
let trait_def = d.trait_.source(db)?.value;
let l_curly = trait_def.assoc_item_list()?.l_curly_token()?.text_range();
let where_to_insert =
- hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted(db).range;
+ hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted_opt(db)?;
+ if where_to_insert.file_id != file_id {
+ return None;
+ }
- builder.insert(where_to_insert.end(), redundant_item_def);
+ builder.insert(where_to_insert.range.end(), redundant_item_def);
Some(())
};
- let file_id = d.file_id.file_id()?;
let mut source_change_builder = SourceChangeBuilder::new(file_id.file_id(ctx.sema.db));
add_assoc_item_def(&mut source_change_builder)?;
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 076df1ab0f..ac54ac0950 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -20,7 +20,7 @@ use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_dis
//
// This diagnostic is triggered when the type of an expression or pattern does not match
// the expected type.
-pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Diagnostic {
+pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch<'_>) -> Diagnostic {
let display_range = adjusted_display_range(ctx, d.expr_or_pat, &|node| {
let Either::Left(expr) = node else { return None };
let salient_token_range = match expr {
@@ -39,7 +39,7 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
cov_mark::hit!(type_mismatch_range_adjustment);
Some(salient_token_range)
});
- let mut diag = Diagnostic::new(
+ Diagnostic::new(
DiagnosticCode::RustcHardError("E0308"),
format!(
"expected {}, found {}",
@@ -52,14 +52,10 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
),
display_range,
)
- .with_fixes(fixes(ctx, d));
- if diag.fixes.is_some() {
- diag.experimental = false;
- }
- diag
+ .with_fixes(fixes(ctx, d))
}
-fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assist>> {
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch<'_>) -> Option<Vec<Assist>> {
let mut fixes = Vec::new();
if let Some(expr_ptr) = d.expr_or_pat.value.cast::<ast::Expr>() {
@@ -76,7 +72,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assi
fn add_reference(
ctx: &DiagnosticsContext<'_>,
- d: &hir::TypeMismatch,
+ d: &hir::TypeMismatch<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
@@ -98,7 +94,7 @@ fn add_reference(
fn add_missing_ok_or_some(
ctx: &DiagnosticsContext<'_>,
- d: &hir::TypeMismatch,
+ d: &hir::TypeMismatch<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
@@ -188,7 +184,7 @@ fn add_missing_ok_or_some(
fn remove_unnecessary_wrapper(
ctx: &DiagnosticsContext<'_>,
- d: &hir::TypeMismatch,
+ d: &hir::TypeMismatch<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
@@ -271,7 +267,7 @@ fn remove_unnecessary_wrapper(
fn remove_semicolon(
ctx: &DiagnosticsContext<'_>,
- d: &hir::TypeMismatch,
+ d: &hir::TypeMismatch<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
@@ -301,7 +297,7 @@ fn remove_semicolon(
fn str_ref_to_owned(
ctx: &DiagnosticsContext<'_>,
- d: &hir::TypeMismatch,
+ d: &hir::TypeMismatch<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
@@ -1175,7 +1171,7 @@ trait B {}
fn test(a: &dyn A) -> &dyn B {
a
- //^ error: expected &(dyn B + 'static), found &(dyn A + 'static)
+ //^ error: expected &dyn B, found &dyn A
}
"#,
);
diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs
index 1915a88dd0..8d42770269 100644
--- a/crates/ide-diagnostics/src/handlers/typed_hole.rs
+++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs
@@ -20,7 +20,7 @@ use syntax::AstNode;
// Diagnostic: typed-hole
//
// This diagnostic is triggered when an underscore expression is used in an invalid position.
-pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Diagnostic {
+pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole<'_>) -> Diagnostic {
let display_range = ctx.sema.diagnostics_display_range(d.expr.map(|it| it.into()));
let (message, fixes) = if d.expected.is_unknown() {
("`_` expressions may only appear on the left-hand side of an assignment".to_owned(), None)
@@ -41,7 +41,7 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di
.with_fixes(fixes)
}
-fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> {
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole<'_>) -> Option<Vec<Assist>> {
let db = ctx.sema.db;
let root = db.parse_or_expand(d.expr.file_id);
let (original_range, _) =
@@ -61,7 +61,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>
};
let paths = term_search(&term_search_ctx);
- let mut formatter = |_: &hir::Type| String::from("_");
+ let mut formatter = |_: &hir::Type<'_>| String::from("_");
let assists: Vec<Assist> = d
.expected
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 0649c97f82..6901589896 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -25,7 +25,7 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_ran
// This diagnostic is triggered if a field does not exist on a given type.
pub(crate) fn unresolved_field(
ctx: &DiagnosticsContext<'_>,
- d: &hir::UnresolvedField,
+ d: &hir::UnresolvedField<'_>,
) -> Diagnostic {
let method_suffix = if d.method_with_same_name_exists {
", but a method with a similar name exists"
@@ -54,7 +54,7 @@ pub(crate) fn unresolved_field(
.with_fixes(fixes(ctx, d))
}
-fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<Assist>> {
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField<'_>) -> Option<Vec<Assist>> {
let mut fixes = Vec::new();
if d.method_with_same_name_exists {
fixes.extend(method_fix(ctx, &d.expr));
@@ -64,7 +64,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<A
}
// FIXME: Add Snippet Support
-fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Assist> {
+fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField<'_>) -> Option<Assist> {
// Get the FileRange of the invalid field access
let root = ctx.sema.db.parse_or_expand(d.expr.file_id);
let expr = d.expr.value.to_node(&root).left()?;
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 00c2a8c4c4..dcca85d4db 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -18,7 +18,7 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_ran
// This diagnostic is triggered if a method does not exist on a given type.
pub(crate) fn unresolved_method(
ctx: &DiagnosticsContext<'_>,
- d: &hir::UnresolvedMethodCall,
+ d: &hir::UnresolvedMethodCall<'_>,
) -> Diagnostic {
let suffix = if d.field_with_same_name.is_some() {
", but a field with a similar name exists"
@@ -49,7 +49,7 @@ pub(crate) fn unresolved_method(
.with_fixes(fixes(ctx, d))
}
-fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Vec<Assist>> {
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall<'_>) -> Option<Vec<Assist>> {
let field_fix = if let Some(ty) = &d.field_with_same_name {
field_fix(ctx, d, ty)
} else {
@@ -72,8 +72,8 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<
fn field_fix(
ctx: &DiagnosticsContext<'_>,
- d: &hir::UnresolvedMethodCall,
- ty: &hir::Type,
+ d: &hir::UnresolvedMethodCall<'_>,
+ ty: &hir::Type<'_>,
) -> Option<Assist> {
if !ty.impls_fnonce(ctx.sema.db) {
return None;
@@ -107,7 +107,10 @@ fn field_fix(
})
}
-fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Assist> {
+fn assoc_func_fix(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedMethodCall<'_>,
+) -> Option<Assist> {
if let Some(f) = d.assoc_func_with_same_name {
let db = ctx.sema.db;
@@ -117,8 +120,7 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
let call = ast::MethodCallExpr::cast(expr.syntax().clone())?;
let range = InFile::new(expr_ptr.file_id, call.syntax().text_range())
- .original_node_file_range_rooted(db)
- .range;
+ .original_node_file_range_rooted_opt(db)?;
let receiver = call.receiver()?;
let receiver_type = &ctx.sema.type_of_expr(&receiver)?.original;
@@ -171,18 +173,16 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
let assoc_func_call_expr_string = make::expr_call(assoc_func_path, args).to_string();
- let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id;
-
Some(Assist {
id: AssistId::quick_fix("method_call_to_assoc_func_call_fix"),
label: Label::new(format!(
"Use associated func call instead: `{assoc_func_call_expr_string}`"
)),
group: None,
- target: range,
+ target: range.range,
source_change: Some(SourceChange::from_text_edit(
- file_id.file_id(ctx.sema.db),
- TextEdit::replace(range, assoc_func_call_expr_string),
+ range.file_id.file_id(ctx.sema.db),
+ TextEdit::replace(range.range, assoc_func_call_expr_string),
)),
command: None,
})
@@ -297,7 +297,7 @@ macro_rules! m {
}
fn main() {
m!(());
- // ^^^^^^ error: no method `foo` on type `()`
+ // ^^ error: no method `foo` on type `()`
}
"#,
);
diff --git a/crates/ide-ssr/Cargo.toml b/crates/ide-ssr/Cargo.toml
index 1212fa9f9c..0620bd26fe 100644
--- a/crates/ide-ssr/Cargo.toml
+++ b/crates/ide-ssr/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs
index 43c56ac8be..e4b20f3f1a 100644
--- a/crates/ide-ssr/src/lib.rs
+++ b/crates/ide-ssr/src/lib.rs
@@ -110,7 +110,7 @@ pub struct SsrMatches {
pub struct MatchFinder<'db> {
/// Our source of information about the user's code.
sema: Semantics<'db, ide_db::RootDatabase>,
- rules: Vec<ResolvedRule>,
+ rules: Vec<ResolvedRule<'db>>,
resolution_scope: resolving::ResolutionScope<'db>,
restrict_ranges: Vec<ide_db::FileRange>,
}
diff --git a/crates/ide-ssr/src/matching.rs b/crates/ide-ssr/src/matching.rs
index cff4eede04..b350315ba5 100644
--- a/crates/ide-ssr/src/matching.rs
+++ b/crates/ide-ssr/src/matching.rs
@@ -84,12 +84,12 @@ pub(crate) struct MatchFailed {
/// Checks if `code` matches the search pattern found in `search_scope`, returning information about
/// the match, if it does. Since we only do matching in this module and searching is done by the
/// parent module, we don't populate nested matches.
-pub(crate) fn get_match(
+pub(crate) fn get_match<'db>(
debug_active: bool,
- rule: &ResolvedRule,
+ rule: &ResolvedRule<'db>,
code: &SyntaxNode,
restrict_range: &Option<FileRange>,
- sema: &Semantics<'_, ide_db::RootDatabase>,
+ sema: &Semantics<'db, ide_db::RootDatabase>,
) -> Result<Match, MatchFailed> {
record_match_fails_reasons_scope(debug_active, || {
Matcher::try_match(rule, code, restrict_range, sema)
@@ -102,7 +102,7 @@ struct Matcher<'db, 'sema> {
/// If any placeholders come from anywhere outside of this range, then the match will be
/// rejected.
restrict_range: Option<FileRange>,
- rule: &'sema ResolvedRule,
+ rule: &'sema ResolvedRule<'db>,
}
/// Which phase of matching we're currently performing. We do two phases because most attempted
@@ -117,7 +117,7 @@ enum Phase<'a> {
impl<'db, 'sema> Matcher<'db, 'sema> {
fn try_match(
- rule: &ResolvedRule,
+ rule: &ResolvedRule<'db>,
code: &SyntaxNode,
restrict_range: &Option<FileRange>,
sema: &'sema Semantics<'db, ide_db::RootDatabase>,
@@ -535,7 +535,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
fn attempt_match_ufcs_to_method_call(
&self,
phase: &mut Phase<'_>,
- pattern_ufcs: &UfcsCallInfo,
+ pattern_ufcs: &UfcsCallInfo<'db>,
code: &ast::MethodCallExpr,
) -> Result<(), MatchFailed> {
use ast::HasArgList;
@@ -597,7 +597,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
fn attempt_match_ufcs_to_ufcs(
&self,
phase: &mut Phase<'_>,
- pattern_ufcs: &UfcsCallInfo,
+ pattern_ufcs: &UfcsCallInfo<'db>,
code: &ast::CallExpr,
) -> Result<(), MatchFailed> {
use ast::HasArgList;
@@ -615,7 +615,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
/// times. Returns the number of times it needed to be dereferenced.
fn check_expr_type(
&self,
- pattern_type: &hir::Type,
+ pattern_type: &hir::Type<'db>,
expr: &ast::Expr,
) -> Result<usize, MatchFailed> {
use hir::HirDisplay;
@@ -656,10 +656,10 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
}
impl Match {
- fn render_template_paths(
+ fn render_template_paths<'db>(
&mut self,
- template: &ResolvedPattern,
- sema: &Semantics<'_, ide_db::RootDatabase>,
+ template: &ResolvedPattern<'db>,
+ sema: &Semantics<'db, ide_db::RootDatabase>,
) -> Result<(), MatchFailed> {
let module = sema
.scope(&self.matched_node)
diff --git a/crates/ide-ssr/src/replacing.rs b/crates/ide-ssr/src/replacing.rs
index 3c92697926..752edd6535 100644
--- a/crates/ide-ssr/src/replacing.rs
+++ b/crates/ide-ssr/src/replacing.rs
@@ -14,21 +14,21 @@ use crate::{Match, SsrMatches, fragments, resolving::ResolvedRule};
/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
/// template. Placeholders in the template will have been substituted with whatever they matched to
/// in the original code.
-pub(crate) fn matches_to_edit(
- db: &dyn hir::db::ExpandDatabase,
+pub(crate) fn matches_to_edit<'db>(
+ db: &'db dyn hir::db::ExpandDatabase,
matches: &SsrMatches,
file_src: &str,
- rules: &[ResolvedRule],
+ rules: &[ResolvedRule<'db>],
) -> TextEdit {
matches_to_edit_at_offset(db, matches, file_src, 0.into(), rules)
}
-fn matches_to_edit_at_offset(
- db: &dyn hir::db::ExpandDatabase,
+fn matches_to_edit_at_offset<'db>(
+ db: &'db dyn hir::db::ExpandDatabase,
matches: &SsrMatches,
file_src: &str,
relative_start: TextSize,
- rules: &[ResolvedRule],
+ rules: &[ResolvedRule<'db>],
) -> TextEdit {
let mut edit_builder = TextEdit::builder();
for m in &matches.matches {
@@ -40,12 +40,12 @@ fn matches_to_edit_at_offset(
edit_builder.finish()
}
-struct ReplacementRenderer<'a> {
- db: &'a dyn hir::db::ExpandDatabase,
+struct ReplacementRenderer<'a, 'db> {
+ db: &'db dyn hir::db::ExpandDatabase,
match_info: &'a Match,
file_src: &'a str,
- rules: &'a [ResolvedRule],
- rule: &'a ResolvedRule,
+ rules: &'a [ResolvedRule<'db>],
+ rule: &'a ResolvedRule<'db>,
out: String,
// Map from a range within `out` to a token in `template` that represents a placeholder. This is
// used to validate that the generated source code doesn't split any placeholder expansions (see
@@ -58,11 +58,11 @@ struct ReplacementRenderer<'a> {
edition: Edition,
}
-fn render_replace(
- db: &dyn hir::db::ExpandDatabase,
+fn render_replace<'db>(
+ db: &'db dyn hir::db::ExpandDatabase,
match_info: &Match,
file_src: &str,
- rules: &[ResolvedRule],
+ rules: &[ResolvedRule<'db>],
edition: Edition,
) -> String {
let rule = &rules[match_info.rule_index];
@@ -89,7 +89,7 @@ fn render_replace(
renderer.out
}
-impl ReplacementRenderer<'_> {
+impl<'db> ReplacementRenderer<'_, 'db> {
fn render_node_children(&mut self, node: &SyntaxNode) {
for node_or_token in node.children_with_tokens() {
self.render_node_or_token(&node_or_token);
diff --git a/crates/ide-ssr/src/resolving.rs b/crates/ide-ssr/src/resolving.rs
index a687db4bf5..8f28a1cd3a 100644
--- a/crates/ide-ssr/src/resolving.rs
+++ b/crates/ide-ssr/src/resolving.rs
@@ -15,18 +15,18 @@ pub(crate) struct ResolutionScope<'db> {
node: SyntaxNode,
}
-pub(crate) struct ResolvedRule {
- pub(crate) pattern: ResolvedPattern,
- pub(crate) template: Option<ResolvedPattern>,
+pub(crate) struct ResolvedRule<'db> {
+ pub(crate) pattern: ResolvedPattern<'db>,
+ pub(crate) template: Option<ResolvedPattern<'db>>,
pub(crate) index: usize,
}
-pub(crate) struct ResolvedPattern {
+pub(crate) struct ResolvedPattern<'db> {
pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
pub(crate) node: SyntaxNode,
// Paths in `node` that we've resolved.
pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
- pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo>,
+ pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo<'db>>,
pub(crate) contains_self: bool,
}
@@ -36,18 +36,18 @@ pub(crate) struct ResolvedPath {
pub(crate) depth: u32,
}
-pub(crate) struct UfcsCallInfo {
+pub(crate) struct UfcsCallInfo<'db> {
pub(crate) call_expr: ast::CallExpr,
pub(crate) function: hir::Function,
- pub(crate) qualifier_type: Option<hir::Type>,
+ pub(crate) qualifier_type: Option<hir::Type<'db>>,
}
-impl ResolvedRule {
+impl<'db> ResolvedRule<'db> {
pub(crate) fn new(
rule: parsing::ParsedRule,
- resolution_scope: &ResolutionScope<'_>,
+ resolution_scope: &ResolutionScope<'db>,
index: usize,
- ) -> Result<ResolvedRule, SsrError> {
+ ) -> Result<ResolvedRule<'db>, SsrError> {
let resolver =
Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in };
let resolved_template = match rule.template {
@@ -74,8 +74,8 @@ struct Resolver<'a, 'db> {
placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
}
-impl Resolver<'_, '_> {
- fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
+impl<'db> Resolver<'_, 'db> {
+ fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern<'db>, SsrError> {
use syntax::ast::AstNode;
use syntax::{SyntaxElement, T};
let mut resolved_paths = FxHashMap::default();
@@ -250,7 +250,7 @@ impl<'db> ResolutionScope<'db> {
}
}
- fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type> {
+ fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type<'db>> {
use syntax::ast::AstNode;
if let Some(path) = ast::Path::cast(path.clone()) {
if let Some(qualifier) = path.qualifier() {
diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs
index 9afbedbb1a..99a98fb2a7 100644
--- a/crates/ide-ssr/src/search.rs
+++ b/crates/ide-ssr/src/search.rs
@@ -21,13 +21,13 @@ pub(crate) struct UsageCache {
usages: Vec<(Definition, UsageSearchResult)>,
}
-impl MatchFinder<'_> {
+impl<'db> MatchFinder<'db> {
/// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
/// replacement impossible, so further processing is required in order to properly nest matches
/// and remove overlapping matches. This is done in the `nesting` module.
pub(crate) fn find_matches_for_rule(
&self,
- rule: &ResolvedRule,
+ rule: &ResolvedRule<'db>,
usage_cache: &mut UsageCache,
matches_out: &mut Vec<Match>,
) {
@@ -49,8 +49,8 @@ impl MatchFinder<'_> {
fn find_matches_for_pattern_tree(
&self,
- rule: &ResolvedRule,
- pattern: &ResolvedPattern,
+ rule: &ResolvedRule<'db>,
+ pattern: &ResolvedPattern<'db>,
usage_cache: &mut UsageCache,
matches_out: &mut Vec<Match>,
) {
@@ -144,7 +144,7 @@ impl MatchFinder<'_> {
SearchScope::files(&files)
}
- fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
+ fn slow_scan(&self, rule: &ResolvedRule<'db>, matches_out: &mut Vec<Match>) {
self.search_files_do(|file_id| {
let file = self.sema.parse_guess_edition(file_id);
let code = file.syntax();
@@ -177,7 +177,7 @@ impl MatchFinder<'_> {
fn slow_scan_node(
&self,
code: &SyntaxNode,
- rule: &ResolvedRule,
+ rule: &ResolvedRule<'db>,
restrict_range: &Option<FileRange>,
matches_out: &mut Vec<Match>,
) {
@@ -206,7 +206,7 @@ impl MatchFinder<'_> {
fn try_add_match(
&self,
- rule: &ResolvedRule,
+ rule: &ResolvedRule<'db>,
code: &SyntaxNode,
restrict_range: &Option<FileRange>,
matches_out: &mut Vec<Match>,
@@ -274,7 +274,7 @@ impl UsageCache {
/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't
/// something that we can find references to. We then somewhat arbitrarily pick the path that is the
/// longest as this is hopefully more likely to be less common, making it faster to find.
-fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> {
+fn pick_path_for_usages<'a>(pattern: &'a ResolvedPattern<'_>) -> Option<&'a ResolvedPath> {
// FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are
// private to the current module, then we definitely would want to pick them over say a path
// from std. Possibly we should go further than this and intersect the search scopes for all
diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml
index 1d19daf2f5..06d2776ebe 100644
--- a/crates/ide/Cargo.toml
+++ b/crates/ide/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
@@ -25,7 +26,7 @@ dot.workspace = true
smallvec.workspace = true
triomphe.workspace = true
nohash-hasher.workspace = true
-rustc_apfloat = "0.2.2"
+rustc_apfloat = "0.2.3"
# local deps
cfg.workspace = true
diff --git a/crates/ide/src/annotations.rs b/crates/ide/src/annotations.rs
index 3d71da985b..05196ac98c 100644
--- a/crates/ide/src/annotations.rs
+++ b/crates/ide/src/annotations.rs
@@ -10,6 +10,7 @@ use crate::{
NavigationTarget, RunnableKind,
annotations::fn_references::find_all_methods,
goto_implementation::goto_implementation,
+ navigation_target,
references::find_all_refs,
runnables::{Runnable, runnables},
};
@@ -148,15 +149,32 @@ pub(crate) fn annotations(
node: InFile<T>,
source_file_id: FileId,
) -> Option<(TextRange, Option<TextRange>)> {
- if let Some(InRealFile { file_id, value }) = node.original_ast_node_rooted(db) {
- if file_id.file_id(db) == source_file_id {
- return Some((
- value.syntax().text_range(),
- value.name().map(|name| name.syntax().text_range()),
- ));
+ if let Some(name) = node.value.name().map(|name| name.syntax().text_range()) {
+ // if we have a name, try mapping that out of the macro expansion as we can put the
+ // annotation on that name token
+ // See `test_no_annotations_macro_struct_def` vs `test_annotations_macro_struct_def_call_site`
+ let res = navigation_target::orig_range_with_focus_r(
+ db,
+ node.file_id,
+ node.value.syntax().text_range(),
+ Some(name),
+ );
+ if res.call_site.0.file_id == source_file_id {
+ if let Some(name_range) = res.call_site.1 {
+ return Some((res.call_site.0.range, Some(name_range)));
+ }
}
+ };
+ // otherwise try upmapping the entire node out of attributes
+ let InRealFile { file_id, value } = node.original_ast_node_rooted(db)?;
+ if file_id.file_id(db) == source_file_id {
+ Some((
+ value.syntax().text_range(),
+ value.name().map(|name| name.syntax().text_range()),
+ ))
+ } else {
+ None
}
- None
}
});
@@ -914,6 +932,56 @@ m!();
}
#[test]
+ fn test_annotations_macro_struct_def_call_site() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! m {
+ ($name:ident) => {
+ struct $name {}
+ };
+}
+
+m! {
+ Name
+};
+"#,
+ expect![[r#"
+ [
+ Annotation {
+ range: 83..87,
+ kind: HasImpls {
+ pos: FilePositionWrapper {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 83,
+ },
+ data: Some(
+ [],
+ ),
+ },
+ },
+ Annotation {
+ range: 83..87,
+ kind: HasReferences {
+ pos: FilePositionWrapper {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 83,
+ },
+ data: Some(
+ [],
+ ),
+ },
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
fn test_annotations_appear_above_whole_item_when_configured_to_do_so() {
check_with_config(
r#"
diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs
index 4b8d07a253..7a0405939d 100644
--- a/crates/ide/src/call_hierarchy.rs
+++ b/crates/ide/src/call_hierarchy.rs
@@ -592,7 +592,7 @@ macro_rules! call {
"#,
expect!["callee Function FileId(0) 22..37 30..36"],
expect![[r#"
- caller Function FileId(0) 38..52 : FileId(0):44..50
+ caller Function FileId(0) 38..43 : FileId(0):44..50
caller Function FileId(1) 130..136 130..136 : FileId(0):44..50
callee Function FileId(0) 38..52 44..50 : FileId(0):44..50"#]],
expect![[]],
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index 2c983287d8..a5d9a10d2e 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -60,7 +60,7 @@ pub(crate) fn rewrite_links(
let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb))
.into_offset_iter();
- let doc = map_links(doc, |target, title, range| {
+ let doc = map_links(doc, |target, title, range, link_type| {
// This check is imperfect, there's some overlap between valid intra-doc links
// and valid URLs so we choose to be too eager to try to resolve what might be
// a URL.
@@ -78,7 +78,7 @@ pub(crate) fn rewrite_links(
.map(|(_, attr_id)| attr_id.is_inner_attr())
.unwrap_or(false);
if let Some((target, title)) =
- rewrite_intra_doc_link(db, definition, target, title, is_inner_doc)
+ rewrite_intra_doc_link(db, definition, target, title, is_inner_doc, link_type)
{
(None, target, title)
} else if let Some(target) = rewrite_url_link(db, definition, target) {
@@ -417,6 +417,7 @@ fn rewrite_intra_doc_link(
target: &str,
title: &str,
is_inner_doc: bool,
+ link_type: LinkType,
) -> Option<(String, String)> {
let (link, ns) = parse_intra_doc_link(target);
@@ -438,7 +439,21 @@ fn rewrite_intra_doc_link(
url = url.join(&file).ok()?;
url.set_fragment(frag);
- Some((url.into(), strip_prefixes_suffixes(title).to_owned()))
+ // We want to strip the keyword prefix from the title, but only if the target is implicitly the same
+ // as the title.
+ let title = match link_type {
+ LinkType::Email
+ | LinkType::Autolink
+ | LinkType::Shortcut
+ | LinkType::Collapsed
+ | LinkType::Reference
+ | LinkType::Inline => title.to_owned(),
+ LinkType::ShortcutUnknown | LinkType::CollapsedUnknown | LinkType::ReferenceUnknown => {
+ strip_prefixes_suffixes(title).to_owned()
+ }
+ };
+
+ Some((url.into(), title))
}
/// Try to resolve path to local documentation via path-based links (i.e. `../gateway/struct.Shard.html`).
@@ -470,7 +485,7 @@ fn mod_path_of_def(db: &RootDatabase, def: Definition) -> Option<String> {
/// Rewrites a markdown document, applying 'callback' to each link.
fn map_links<'e>(
events: impl Iterator<Item = (Event<'e>, Range<usize>)>,
- callback: impl Fn(&str, &str, Range<usize>) -> (Option<LinkType>, String, String),
+ callback: impl Fn(&str, &str, Range<usize>, LinkType) -> (Option<LinkType>, String, String),
) -> impl Iterator<Item = Event<'e>> {
let mut in_link = false;
// holds the origin link target on start event and the rewritten one on end event
@@ -490,25 +505,25 @@ fn map_links<'e>(
Event::End(Tag::Link(link_type, target, _)) => {
in_link = false;
Event::End(Tag::Link(
- end_link_type.unwrap_or(link_type),
+ end_link_type.take().unwrap_or(link_type),
end_link_target.take().unwrap_or(target),
CowStr::Borrowed(""),
))
}
Event::Text(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s, range);
+ callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
- if !matches!(end_link_type, Some(LinkType::Autolink)) {
+ if !matches!(end_link_type, Some(LinkType::Autolink)) && link_type.is_some() {
end_link_type = link_type;
}
Event::Text(CowStr::Boxed(link_name.into()))
}
Event::Code(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s, range);
+ callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
- if !matches!(end_link_type, Some(LinkType::Autolink)) {
+ if !matches!(end_link_type, Some(LinkType::Autolink)) && link_type.is_some() {
end_link_type = link_type;
}
Event::Code(CowStr::Boxed(link_name.into()))
diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs
index 347da4e85b..6820f99fac 100644
--- a/crates/ide/src/file_structure.rs
+++ b/crates/ide/src/file_structure.rs
@@ -329,7 +329,7 @@ macro_rules! mcexp {
#[deprecated]
fn obsolete() {}
-#[deprecated(note = "for awhile")]
+#[deprecated(note = "for a while")]
fn very_obsolete() {}
// region: Some region name
@@ -608,8 +608,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "very_obsolete",
- navigation_range: 511..524,
- node_range: 473..529,
+ navigation_range: 512..525,
+ node_range: 473..530,
kind: SymbolKind(
Function,
),
@@ -621,8 +621,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "Some region name",
- navigation_range: 531..558,
- node_range: 531..558,
+ navigation_range: 532..559,
+ node_range: 532..559,
kind: Region,
detail: None,
deprecated: false,
@@ -630,8 +630,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "m",
- navigation_range: 598..599,
- node_range: 573..636,
+ navigation_range: 599..600,
+ node_range: 574..637,
kind: SymbolKind(
Module,
),
@@ -643,8 +643,8 @@ fn let_statements() {
22,
),
label: "dontpanic",
- navigation_range: 573..593,
- node_range: 573..593,
+ navigation_range: 574..594,
+ node_range: 574..594,
kind: Region,
detail: None,
deprecated: false,
@@ -654,8 +654,8 @@ fn let_statements() {
22,
),
label: "f",
- navigation_range: 605..606,
- node_range: 602..611,
+ navigation_range: 606..607,
+ node_range: 603..612,
kind: SymbolKind(
Function,
),
@@ -669,8 +669,8 @@ fn let_statements() {
22,
),
label: "g",
- navigation_range: 628..629,
- node_range: 612..634,
+ navigation_range: 629..630,
+ node_range: 613..635,
kind: SymbolKind(
Function,
),
@@ -682,8 +682,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "extern \"C\"",
- navigation_range: 638..648,
- node_range: 638..651,
+ navigation_range: 639..649,
+ node_range: 639..652,
kind: ExternBlock,
detail: None,
deprecated: false,
@@ -691,8 +691,8 @@ fn let_statements() {
StructureNode {
parent: None,
label: "let_statements",
- navigation_range: 656..670,
- node_range: 653..813,
+ navigation_range: 657..671,
+ node_range: 654..814,
kind: SymbolKind(
Function,
),
@@ -706,8 +706,8 @@ fn let_statements() {
27,
),
label: "x",
- navigation_range: 683..684,
- node_range: 679..690,
+ navigation_range: 684..685,
+ node_range: 680..691,
kind: SymbolKind(
Local,
),
@@ -719,8 +719,8 @@ fn let_statements() {
27,
),
label: "mut y",
- navigation_range: 699..704,
- node_range: 695..709,
+ navigation_range: 700..705,
+ node_range: 696..710,
kind: SymbolKind(
Local,
),
@@ -732,8 +732,8 @@ fn let_statements() {
27,
),
label: "Foo { .. }",
- navigation_range: 718..740,
- node_range: 714..753,
+ navigation_range: 719..741,
+ node_range: 715..754,
kind: SymbolKind(
Local,
),
@@ -745,8 +745,8 @@ fn let_statements() {
27,
),
label: "_",
- navigation_range: 803..804,
- node_range: 799..811,
+ navigation_range: 804..805,
+ node_range: 800..812,
kind: SymbolKind(
Local,
),
diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs
index 194e8c968f..c081796d07 100755
--- a/crates/ide/src/folding_ranges.rs
+++ b/crates/ide/src/folding_ranges.rs
@@ -2,7 +2,7 @@ use ide_db::{FxHashSet, syntax_helpers::node_ext::vis_eq};
use syntax::{
Direction, NodeOrToken, SourceFile,
SyntaxKind::{self, *},
- TextRange, TextSize,
+ SyntaxNode, TextRange, TextSize,
ast::{self, AstNode, AstToken},
match_ast,
};
@@ -16,16 +16,22 @@ const REGION_END: &str = "// endregion";
pub enum FoldKind {
Comment,
Imports,
- Mods,
+ Region,
Block,
ArgList,
- Region,
- Consts,
- Statics,
Array,
WhereClause,
ReturnType,
MatchArm,
+ Function,
+ // region: item runs
+ Modules,
+ Consts,
+ Statics,
+ TypeAliases,
+ TraitAliases,
+ ExternCrates,
+ // endregion: item runs
}
#[derive(Debug)]
@@ -41,10 +47,8 @@ pub struct Fold {
pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
let mut res = vec![];
let mut visited_comments = FxHashSet::default();
- let mut visited_imports = FxHashSet::default();
- let mut visited_mods = FxHashSet::default();
- let mut visited_consts = FxHashSet::default();
- let mut visited_statics = FxHashSet::default();
+ let mut visited_nodes = FxHashSet::default();
+ let mut merged_fn_bodies = FxHashSet::default();
// regions can be nested, here is a LIFO buffer
let mut region_starts: Vec<TextSize> = vec![];
@@ -57,6 +61,32 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
NodeOrToken::Token(token) => token.text().contains('\n'),
};
if is_multiline {
+ // for the func with multiline param list
+ if matches!(element.kind(), FN) {
+ if let NodeOrToken::Node(node) = &element {
+ if let Some(fn_node) = ast::Fn::cast(node.clone()) {
+ if !fn_node
+ .param_list()
+ .map(|param_list| param_list.syntax().text().contains_char('\n'))
+ .unwrap_or(false)
+ {
+ continue;
+ }
+
+ if let Some(body) = fn_node.body() {
+ res.push(Fold {
+ range: TextRange::new(
+ node.text_range().start(),
+ node.text_range().end(),
+ ),
+ kind: FoldKind::Function,
+ });
+ merged_fn_bodies.insert(body.syntax().text_range());
+ continue;
+ }
+ }
+ }
+ }
res.push(Fold { range: element.text_range(), kind });
continue;
}
@@ -93,30 +123,40 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
if module.item_list().is_none() {
if let Some(range) = contiguous_range_for_item_group(
module,
- &mut visited_mods,
+ &mut visited_nodes,
) {
- res.push(Fold { range, kind: FoldKind::Mods })
+ res.push(Fold { range, kind: FoldKind::Modules })
}
}
},
ast::Use(use_) => {
- if let Some(range) = contiguous_range_for_item_group(use_, &mut visited_imports) {
+ if let Some(range) = contiguous_range_for_item_group(use_, &mut visited_nodes) {
res.push(Fold { range, kind: FoldKind::Imports })
}
},
ast::Const(konst) => {
- if let Some(range) = contiguous_range_for_item_group(konst, &mut visited_consts) {
+ if let Some(range) = contiguous_range_for_item_group(konst, &mut visited_nodes) {
res.push(Fold { range, kind: FoldKind::Consts })
}
},
ast::Static(statik) => {
- if let Some(range) = contiguous_range_for_item_group(statik, &mut visited_statics) {
+ if let Some(range) = contiguous_range_for_item_group(statik, &mut visited_nodes) {
res.push(Fold { range, kind: FoldKind::Statics })
}
},
- ast::WhereClause(where_clause) => {
- if let Some(range) = fold_range_for_where_clause(where_clause) {
- res.push(Fold { range, kind: FoldKind::WhereClause })
+ ast::TypeAlias(alias) => {
+ if let Some(range) = contiguous_range_for_item_group(alias, &mut visited_nodes) {
+ res.push(Fold { range, kind: FoldKind::TypeAliases })
+ }
+ },
+ ast::TraitAlias(alias) => {
+ if let Some(range) = contiguous_range_for_item_group(alias, &mut visited_nodes) {
+ res.push(Fold { range, kind: FoldKind::TraitAliases })
+ }
+ },
+ ast::ExternCrate(extern_crate) => {
+ if let Some(range) = contiguous_range_for_item_group(extern_crate, &mut visited_nodes) {
+ res.push(Fold { range, kind: FoldKind::ExternCrates })
}
},
ast::MatchArm(match_arm) => {
@@ -137,9 +177,11 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
match kind {
COMMENT => Some(FoldKind::Comment),
- ARG_LIST | PARAM_LIST => Some(FoldKind::ArgList),
+ ARG_LIST | PARAM_LIST | GENERIC_ARG_LIST | GENERIC_PARAM_LIST => Some(FoldKind::ArgList),
ARRAY_EXPR => Some(FoldKind::Array),
RET_TYPE => Some(FoldKind::ReturnType),
+ FN => Some(FoldKind::Function),
+ WHERE_CLAUSE => Some(FoldKind::WhereClause),
ASSOC_ITEM_LIST
| RECORD_FIELD_LIST
| RECORD_PAT_FIELD_LIST
@@ -155,11 +197,14 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
}
}
-fn contiguous_range_for_item_group<N>(first: N, visited: &mut FxHashSet<N>) -> Option<TextRange>
+fn contiguous_range_for_item_group<N>(
+ first: N,
+ visited: &mut FxHashSet<SyntaxNode>,
+) -> Option<TextRange>
where
N: ast::HasVisibility + Clone + Hash + Eq,
{
- if !visited.insert(first.clone()) {
+ if !visited.insert(first.syntax().clone()) {
return None;
}
@@ -183,7 +228,7 @@ where
if let Some(next) = N::cast(node) {
let next_vis = next.visibility();
if eq_visibility(next_vis.clone(), last_vis) {
- visited.insert(next.clone());
+ visited.insert(next.syntax().clone());
last_vis = next_vis;
last = next;
continue;
@@ -259,18 +304,6 @@ fn contiguous_range_for_comment(
}
}
-fn fold_range_for_where_clause(where_clause: ast::WhereClause) -> Option<TextRange> {
- let first_where_pred = where_clause.predicates().next();
- let last_where_pred = where_clause.predicates().last();
-
- if first_where_pred != last_where_pred {
- let start = where_clause.where_token()?.text_range().end();
- let end = where_clause.syntax().text_range().end();
- return Some(TextRange::new(start, end));
- }
- None
-}
-
fn fold_range_for_multiline_match_arm(match_arm: ast::MatchArm) -> Option<TextRange> {
if fold_kind(match_arm.expr()?.syntax().kind()).is_some() {
None
@@ -287,6 +320,7 @@ mod tests {
use super::*;
+ #[track_caller]
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
let (ranges, text) = extract_tags(ra_fixture, "fold");
@@ -307,22 +341,43 @@ mod tests {
let kind = match fold.kind {
FoldKind::Comment => "comment",
FoldKind::Imports => "imports",
- FoldKind::Mods => "mods",
+ FoldKind::Modules => "mods",
FoldKind::Block => "block",
FoldKind::ArgList => "arglist",
FoldKind::Region => "region",
FoldKind::Consts => "consts",
FoldKind::Statics => "statics",
+ FoldKind::TypeAliases => "typealiases",
FoldKind::Array => "array",
FoldKind::WhereClause => "whereclause",
FoldKind::ReturnType => "returntype",
FoldKind::MatchArm => "matcharm",
+ FoldKind::Function => "function",
+ FoldKind::TraitAliases => "traitaliases",
+ FoldKind::ExternCrates => "externcrates",
};
assert_eq!(kind, &attr.unwrap());
}
}
#[test]
+ fn test_fold_func_with_multiline_param_list() {
+ check(
+ r#"
+<fold function>fn func<fold arglist>(
+ a: i32,
+ b: i32,
+ c: i32,
+)</fold> <fold block>{
+
+
+
+}</fold></fold>
+"#,
+ );
+ }
+
+ #[test]
fn test_fold_comments() {
check(
r#"
@@ -534,10 +589,10 @@ const _: S = S <fold block>{
fn fold_multiline_params() {
check(
r#"
-fn foo<fold arglist>(
+<fold function>fn foo<fold arglist>(
x: i32,
y: String,
-)</fold> {}
+)</fold> {}</fold>
"#,
)
}
@@ -594,19 +649,18 @@ static SECOND_STATIC: &str = "second";</fold>
#[test]
fn fold_where_clause() {
- // fold multi-line and don't fold single line.
check(
r#"
fn foo()
-where<fold whereclause>
+<fold whereclause>where
A: Foo,
B: Foo,
C: Foo,
D: Foo,</fold> {}
fn bar()
-where
- A: Bar, {}
+<fold whereclause>where
+ A: Bar,</fold> {}
"#,
)
}
@@ -624,4 +678,16 @@ fn bar() -> (bool, bool) { (true, true) }
"#,
)
}
+
+ #[test]
+ fn fold_generics() {
+ check(
+ r#"
+type Foo<T, U> = foo<fold arglist><
+ T,
+ U,
+></fold>;
+"#,
+ )
+ }
}
diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs
index 38c032d382..267e8ff712 100644
--- a/crates/ide/src/goto_declaration.rs
+++ b/crates/ide/src/goto_declaration.rs
@@ -29,7 +29,7 @@ pub(crate) fn goto_declaration(
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
let range = original_token.text_range();
let info: Vec<NavigationTarget> = sema
- .descend_into_macros_no_opaque(original_token)
+ .descend_into_macros_no_opaque(original_token, false)
.iter()
.filter_map(|token| {
let parent = token.value.parent()?;
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index 7917aab8bf..29fc68bb50 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -88,7 +88,7 @@ pub(crate) fn goto_definition(
}
let navs = sema
- .descend_into_macros_no_opaque(original_token.clone())
+ .descend_into_macros_no_opaque(original_token.clone(), false)
.into_iter()
.filter_map(|token| {
let parent = token.value.parent()?;
@@ -291,13 +291,14 @@ fn handle_control_flow_keywords(
token: &SyntaxToken,
) -> Option<Vec<NavigationTarget>> {
match token.kind() {
- // For `fn` / `loop` / `while` / `for` / `async`, return the keyword it self,
+ // For `fn` / `loop` / `while` / `for` / `async` / `match`, return the keyword it self,
// so that VSCode will find the references when using `ctrl + click`
T![fn] | T![async] | T![try] | T![return] => nav_for_exit_points(sema, token),
T![loop] | T![while] | T![break] | T![continue] => nav_for_break_points(sema, token),
T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => {
nav_for_break_points(sema, token)
}
+ T![match] | T![=>] | T![if] => nav_for_branch_exit_points(sema, token),
_ => None,
}
}
@@ -407,6 +408,91 @@ fn nav_for_exit_points(
Some(navs)
}
+pub(crate) fn find_branch_root(
+ sema: &Semantics<'_, RootDatabase>,
+ token: &SyntaxToken,
+) -> Vec<SyntaxNode> {
+ let find_nodes = |node_filter: fn(SyntaxNode) -> Option<SyntaxNode>| {
+ sema.descend_into_macros(token.clone())
+ .into_iter()
+ .filter_map(|token| node_filter(token.parent()?))
+ .collect_vec()
+ };
+
+ match token.kind() {
+ T![match] => find_nodes(|node| Some(ast::MatchExpr::cast(node)?.syntax().clone())),
+ T![=>] => find_nodes(|node| Some(ast::MatchArm::cast(node)?.syntax().clone())),
+ T![if] => find_nodes(|node| {
+ let if_expr = ast::IfExpr::cast(node)?;
+
+ let root_if = iter::successors(Some(if_expr.clone()), |if_expr| {
+ let parent_if = if_expr.syntax().parent().and_then(ast::IfExpr::cast)?;
+ let ast::ElseBranch::IfExpr(else_branch) = parent_if.else_branch()? else {
+ return None;
+ };
+
+ (else_branch.syntax() == if_expr.syntax()).then_some(parent_if)
+ })
+ .last()?;
+
+ Some(root_if.syntax().clone())
+ }),
+ _ => vec![],
+ }
+}
+
+fn nav_for_branch_exit_points(
+ sema: &Semantics<'_, RootDatabase>,
+ token: &SyntaxToken,
+) -> Option<Vec<NavigationTarget>> {
+ let db = sema.db;
+
+ let navs = match token.kind() {
+ T![match] => find_branch_root(sema, token)
+ .into_iter()
+ .filter_map(|node| {
+ let file_id = sema.hir_file_for(&node);
+ let match_expr = ast::MatchExpr::cast(node)?;
+ let focus_range = match_expr.match_token()?.text_range();
+ let match_expr_in_file = InFile::new(file_id, match_expr.into());
+ Some(expr_to_nav(db, match_expr_in_file, Some(focus_range)))
+ })
+ .flatten()
+ .collect_vec(),
+
+ T![=>] => find_branch_root(sema, token)
+ .into_iter()
+ .filter_map(|node| {
+ let match_arm = ast::MatchArm::cast(node)?;
+ let match_expr = sema
+ .ancestors_with_macros(match_arm.syntax().clone())
+ .find_map(ast::MatchExpr::cast)?;
+ let file_id = sema.hir_file_for(match_expr.syntax());
+ let focus_range = match_arm.fat_arrow_token()?.text_range();
+ let match_expr_in_file = InFile::new(file_id, match_expr.into());
+ Some(expr_to_nav(db, match_expr_in_file, Some(focus_range)))
+ })
+ .flatten()
+ .collect_vec(),
+
+ T![if] => find_branch_root(sema, token)
+ .into_iter()
+ .filter_map(|node| {
+ let file_id = sema.hir_file_for(&node);
+ let if_expr = ast::IfExpr::cast(node)?;
+ let focus_range = if_expr.if_token()?.text_range();
+ let if_expr_in_file = InFile::new(file_id, if_expr.into());
+ Some(expr_to_nav(db, if_expr_in_file, Some(focus_range)))
+ })
+ .flatten()
+ .collect_vec(),
+
+ _ => return Some(Vec::new()),
+ };
+
+ Some(navs)
+}
+
pub(crate) fn find_loops(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
@@ -996,7 +1082,7 @@ macro_rules! define_fn {
}
define_fn!();
-//^^^^^^^^^^^^^
+//^^^^^^^^^^
fn bar() {
$0foo();
}
@@ -3142,7 +3228,7 @@ mod bar {
use crate::m;
m!();
- // ^^^^^
+ // ^^
fn qux() {
Foo$0;
@@ -3614,4 +3700,227 @@ fn foo() {
"#,
);
}
+
+ #[test]
+ fn goto_def_for_match_keyword() {
+ check(
+ r#"
+fn main() {
+ match$0 0 {
+ // ^^^^^
+ 0 => {},
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_match_arm_fat_arrow() {
+ check(
+ r#"
+fn main() {
+ match 0 {
+ 0 =>$0 {},
+ // ^^
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_if_keyword() {
+ check(
+ r#"
+fn main() {
+ if$0 true {
+ // ^^
+ ()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_match_nested_in_if() {
+ check(
+ r#"
+fn main() {
+ if true {
+ match$0 0 {
+ // ^^^^^
+ 0 => {},
+ _ => {},
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_multiple_match_expressions() {
+ check(
+ r#"
+fn main() {
+ match 0 {
+ 0 => {},
+ _ => {},
+ };
+
+ match$0 1 {
+ // ^^^^^
+ 1 => {},
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_nested_match_expressions() {
+ check(
+ r#"
+fn main() {
+ match 0 {
+ 0 => match$0 1 {
+ // ^^^^^
+ 1 => {},
+ _ => {},
+ },
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_if_else_chains() {
+ check(
+ r#"
+fn main() {
+ if true {
+ // ^^
+ ()
+ } else if$0 false {
+ ()
+ } else {
+ ()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_match_with_guards() {
+ check(
+ r#"
+fn main() {
+ match 42 {
+ x if x > 0 =>$0 {},
+ // ^^
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_match_with_macro_arm() {
+ check(
+ r#"
+macro_rules! arm {
+ () => { 0 => {} };
+}
+
+fn main() {
+ match$0 0 {
+ // ^^^^^
+ arm!(),
+ _ => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_const_from_match_pat_with_tuple_struct() {
+ check(
+ r#"
+struct Tag(u8);
+struct Path {}
+
+const Path: u8 = 0;
+ // ^^^^
+fn main() {
+ match Tag(Path) {
+ Tag(Path$0) => {}
+ _ => {}
+ }
+}
+
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_const_from_match_pat() {
+ check(
+ r#"
+type T1 = u8;
+const T1: u8 = 0;
+ // ^^
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ _ => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_struct_from_match_pat() {
+ check(
+ r#"
+struct T1;
+ // ^^
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ _ => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_goto_trait_from_match_pat() {
+ check(
+ r#"
+trait T1 {}
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ // ^^
+ _ => {}
+ }
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs
index 1bc28f28b6..02d96a6473 100644
--- a/crates/ide/src/goto_implementation.rs
+++ b/crates/ide/src/goto_implementation.rs
@@ -83,7 +83,7 @@ pub(crate) fn goto_implementation(
Some(RangeInfo { range, info: navs })
}
-fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type) -> Vec<NavigationTarget> {
+fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type<'_>) -> Vec<NavigationTarget> {
Impl::all_for_type(sema.db, ty)
.into_iter()
.filter_map(|imp| imp.try_to_nav(sema.db))
diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs
index a6c7ea29b0..b80e81d39c 100644
--- a/crates/ide/src/goto_type_definition.rs
+++ b/crates/ide/src/goto_type_definition.rs
@@ -38,7 +38,7 @@ pub(crate) fn goto_type_definition(
}
}
};
- let mut process_ty = |ty: hir::Type| {
+ let mut process_ty = |ty: hir::Type<'_>| {
// collect from each `ty` into the `res` result vec
let ty = ty.strip_references();
ty.walk(db, |t| {
@@ -70,11 +70,10 @@ pub(crate) fn goto_type_definition(
}
let range = token.text_range();
- sema.descend_into_macros_no_opaque(token)
+ sema.descend_into_macros_no_opaque(token, false)
.into_iter()
.filter_map(|token| {
- sema
- .token_ancestors_with_macros(token.value)
+ sema.token_ancestors_with_macros(token.value)
// When `token` is within a macro call, we can't determine its type. Don't continue
// this traversal because otherwise we'll end up returning the type of *that* macro
// call, which is not what we want in general.
@@ -103,7 +102,6 @@ pub(crate) fn goto_type_definition(
_ => return None,
}
};
-
Some(ty)
})
})
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index aa947921a9..356bd69aa4 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -37,8 +37,11 @@ pub struct HighlightRelatedConfig {
pub break_points: bool,
pub closure_captures: bool,
pub yield_points: bool,
+ pub branch_exit_points: bool,
}
+type HighlightMap = FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>;
+
// Feature: Highlight Related
//
// Highlights constructs related to the thing under the cursor:
@@ -64,7 +67,7 @@ pub(crate) fn highlight_related(
let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`
- T![->] => 4,
+ T![->] | T![=>] => 4,
kind if kind.is_keyword(file_id.edition(sema.db)) => 3,
IDENT | INT_NUMBER => 2,
T![|] => 1,
@@ -78,6 +81,9 @@ pub(crate) fn highlight_related(
T![fn] | T![return] | T![->] if config.exit_points => {
highlight_exit_points(sema, token).remove(&file_id)
}
+ T![match] | T![=>] | T![if] if config.branch_exit_points => {
+ highlight_branch_exit_points(sema, token).remove(&file_id)
+ }
T![await] | T![async] if config.yield_points => {
highlight_yield_points(sema, token).remove(&file_id)
}
@@ -300,11 +306,93 @@ fn highlight_references(
if res.is_empty() { None } else { Some(res.into_iter().collect()) }
}
+pub(crate) fn highlight_branch_exit_points(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> {
+ let mut highlights: HighlightMap = FxHashMap::default();
+
+ let push_to_highlights = |file_id, range, highlights: &mut HighlightMap| {
+ if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
+ let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
+ highlights.entry(file_id).or_default().insert(hrange);
+ }
+ };
+
+ let push_tail_expr = |tail: Option<ast::Expr>, highlights: &mut HighlightMap| {
+ let Some(tail) = tail else {
+ return;
+ };
+
+ for_each_tail_expr(&tail, &mut |tail| {
+ let file_id = sema.hir_file_for(tail.syntax());
+ let range = tail.syntax().text_range();
+ push_to_highlights(file_id, Some(range), highlights);
+ });
+ };
+
+ let nodes = goto_definition::find_branch_root(sema, &token).into_iter();
+ match token.kind() {
+ T![match] => {
+ for match_expr in nodes.filter_map(ast::MatchExpr::cast) {
+ let file_id = sema.hir_file_for(match_expr.syntax());
+ let range = match_expr.match_token().map(|token| token.text_range());
+ push_to_highlights(file_id, range, &mut highlights);
+
+ let Some(arm_list) = match_expr.match_arm_list() else {
+ continue;
+ };
+ for arm in arm_list.arms() {
+ push_tail_expr(arm.expr(), &mut highlights);
+ }
+ }
+ }
+ T![=>] => {
+ for arm in nodes.filter_map(ast::MatchArm::cast) {
+ let file_id = sema.hir_file_for(arm.syntax());
+ let range = arm.fat_arrow_token().map(|token| token.text_range());
+ push_to_highlights(file_id, range, &mut highlights);
+
+ push_tail_expr(arm.expr(), &mut highlights);
+ }
+ }
+ T![if] => {
+ for mut if_to_process in nodes.map(ast::IfExpr::cast) {
+ while let Some(cur_if) = if_to_process.take() {
+ let file_id = sema.hir_file_for(cur_if.syntax());
+
+ let if_kw_range = cur_if.if_token().map(|token| token.text_range());
+ push_to_highlights(file_id, if_kw_range, &mut highlights);
+
+ if let Some(then_block) = cur_if.then_branch() {
+ push_tail_expr(Some(then_block.into()), &mut highlights);
+ }
+
+ match cur_if.else_branch() {
+ Some(ast::ElseBranch::Block(else_block)) => {
+ push_tail_expr(Some(else_block.into()), &mut highlights);
+ if_to_process = None;
+ }
+ Some(ast::ElseBranch::IfExpr(nested_if)) => if_to_process = Some(nested_if),
+ None => if_to_process = None,
+ }
+ }
+ }
+ }
+ _ => {}
+ }
+
+ highlights
+ .into_iter()
+ .map(|(file_id, ranges)| (file_id, ranges.into_iter().collect()))
+ .collect()
+}
+
fn hl_exit_points(
sema: &Semantics<'_, RootDatabase>,
def_token: Option<SyntaxToken>,
body: ast::Expr,
-) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
+) -> Option<HighlightMap> {
let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| {
@@ -411,7 +499,7 @@ pub(crate) fn highlight_break_points(
loop_token: Option<SyntaxToken>,
label: Option<ast::Label>,
expr: ast::Expr,
- ) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
+ ) -> Option<HighlightMap> {
let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| {
@@ -504,7 +592,7 @@ pub(crate) fn highlight_yield_points(
sema: &Semantics<'_, RootDatabase>,
async_token: Option<SyntaxToken>,
body: Option<ast::Expr>,
- ) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
+ ) -> Option<HighlightMap> {
let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| {
@@ -597,10 +685,7 @@ fn original_frange(
InFile::new(file_id, text_range?).original_node_file_range_opt(db).map(|(frange, _)| frange)
}
-fn merge_map(
- res: &mut FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>,
- new: Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>>,
-) {
+fn merge_map(res: &mut HighlightMap, new: Option<HighlightMap>) {
let Some(new) = new else {
return;
};
@@ -750,6 +835,7 @@ mod tests {
references: true,
closure_captures: true,
yield_points: true,
+ branch_exit_points: true,
};
#[track_caller]
@@ -2135,6 +2221,62 @@ fn main() {
}
#[test]
+ fn nested_match() {
+ check(
+ r#"
+fn main() {
+ match$0 0 {
+ // ^^^^^
+ 0 => match 1 {
+ 1 => 2,
+ // ^
+ _ => 3,
+ // ^
+ },
+ _ => 4,
+ // ^
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn single_arm_highlight() {
+ check(
+ r#"
+fn main() {
+ match 0 {
+ 0 =>$0 {
+ // ^^
+ let x = 1;
+ x
+ // ^
+ }
+ _ => 2,
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn no_branches_when_disabled() {
+ let config = HighlightRelatedConfig { branch_exit_points: false, ..ENABLED_CONFIG };
+ check_with_config(
+ r#"
+fn main() {
+ match$0 0 {
+ 0 => 1,
+ _ => 2,
+ }
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
fn asm() {
check(
r#"
@@ -2165,6 +2307,200 @@ pub unsafe fn bootstrap() -> ! {
}
#[test]
+ fn complex_arms_highlight() {
+ check(
+ r#"
+fn calculate(n: i32) -> i32 { n * 2 }
+
+fn main() {
+ match$0 Some(1) {
+ // ^^^^^
+ Some(x) => match x {
+ 0 => { let y = x; y },
+ // ^
+ 1 => calculate(x),
+ //^^^^^^^^^^^^
+ _ => (|| 6)(),
+ // ^^^^^^^^
+ },
+ None => loop {
+ break 5;
+ // ^^^^^^^
+ },
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn match_in_macro_highlight() {
+ check(
+ r#"
+macro_rules! M {
+ ($e:expr) => { $e };
+}
+
+fn main() {
+ M!{
+ match$0 Some(1) {
+ // ^^^^^
+ Some(x) => x,
+ // ^
+ None => 0,
+ // ^
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn match_in_macro_highlight_2() {
+ check(
+ r#"
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { $crate::match_ast!(match ($node) { $($tt)* }) };
+
+ (match ($node:expr) {
+ $( $( $path:ident )::+ ($it:pat) => $res:expr, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = $($path::)+cast($node.clone()) { $res } else )*
+ { $catch_all }
+ }};
+}
+
+fn main() {
+ match_ast! {
+ match$0 Some(1) {
+ Some(x) => x,
+ }
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn nested_if_else() {
+ check(
+ r#"
+fn main() {
+ if$0 true {
+ // ^^
+ if false {
+ 1
+ // ^
+ } else {
+ 2
+ // ^
+ }
+ } else {
+ 3
+ // ^
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn if_else_if_highlight() {
+ check(
+ r#"
+fn main() {
+ if$0 true {
+ // ^^
+ 1
+ // ^
+ } else if false {
+ // ^^
+ 2
+ // ^
+ } else {
+ 3
+ // ^
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn complex_if_branches() {
+ check(
+ r#"
+fn calculate(n: i32) -> i32 { n * 2 }
+
+fn main() {
+ if$0 true {
+ // ^^
+ let x = 5;
+ calculate(x)
+ // ^^^^^^^^^^^^
+ } else if false {
+ // ^^
+ (|| 10)()
+ // ^^^^^^^^^
+ } else {
+ loop {
+ break 15;
+ // ^^^^^^^^
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn if_in_macro_highlight() {
+ check(
+ r#"
+macro_rules! M {
+ ($e:expr) => { $e };
+}
+
+fn main() {
+ M!{
+ if$0 true {
+ // ^^
+ 5
+ // ^
+ } else {
+ 10
+ // ^^
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn match_in_macro() {
+ // We should not highlight the outer `match` expression.
+ check(
+ r#"
+macro_rules! M {
+ (match) => { 1 };
+}
+
+fn main() {
+ match Some(1) {
+ Some(x) => x,
+ None => {
+ M!(match$0)
+ }
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
fn labeled_block_tail_expr() {
check(
r#"
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index 5404a9dc2c..e4d6279759 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -426,7 +426,7 @@ pub(crate) fn hover_for_definition(
sema: &Semantics<'_, RootDatabase>,
file_id: FileId,
def: Definition,
- subst: Option<GenericSubstitution>,
+ subst: Option<GenericSubstitution<'_>>,
scope_node: &SyntaxNode,
macro_arm: Option<u32>,
render_extras: bool,
@@ -483,10 +483,10 @@ pub(crate) fn hover_for_definition(
}
}
-fn notable_traits(
- db: &RootDatabase,
- ty: &hir::Type,
-) -> Vec<(hir::Trait, Vec<(Option<hir::Type>, hir::Name)>)> {
+fn notable_traits<'db>(
+ db: &'db RootDatabase,
+ ty: &hir::Type<'db>,
+) -> Vec<(hir::Trait, Vec<(Option<hir::Type<'db>>, hir::Name)>)> {
db.notable_traits_in_deps(ty.krate(db).into())
.iter()
.flat_map(|it| &**it)
@@ -567,8 +567,8 @@ fn runnable_action(
fn goto_type_action_for_def(
db: &RootDatabase,
def: Definition,
- notable_traits: &[(hir::Trait, Vec<(Option<hir::Type>, hir::Name)>)],
- subst_types: Option<Vec<(hir::Symbol, hir::Type)>>,
+ notable_traits: &[(hir::Trait, Vec<(Option<hir::Type<'_>>, hir::Name)>)],
+ subst_types: Option<Vec<(hir::Symbol, hir::Type<'_>)>>,
edition: Edition,
) -> Option<HoverAction> {
let mut targets: Vec<hir::ModuleDef> = Vec::new();
@@ -622,7 +622,7 @@ fn goto_type_action_for_def(
fn walk_and_push_ty(
db: &RootDatabase,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
push_new_def: &mut dyn FnMut(hir::ModuleDef),
) {
ty.walk(db, |t| {
diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs
index c24864a18b..670210d499 100644
--- a/crates/ide/src/hover/render.rs
+++ b/crates/ide/src/hover/render.rs
@@ -476,10 +476,10 @@ pub(super) fn definition(
db: &RootDatabase,
def: Definition,
famous_defs: Option<&FamousDefs<'_, '_>>,
- notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)],
+ notable_traits: &[(Trait, Vec<(Option<Type<'_>>, Name)>)],
macro_arm: Option<u32>,
render_extras: bool,
- subst_types: Option<&Vec<(Symbol, Type)>>,
+ subst_types: Option<&Vec<(Symbol, Type<'_>)>>,
config: &HoverConfig,
edition: Edition,
display_target: DisplayTarget,
@@ -938,7 +938,7 @@ pub(super) fn literal(
fn render_notable_trait(
db: &RootDatabase,
- notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)],
+ notable_traits: &[(Trait, Vec<(Option<Type<'_>>, Name)>)],
edition: Edition,
display_target: DisplayTarget,
) -> Option<String> {
@@ -979,7 +979,7 @@ fn render_notable_trait(
fn type_info(
sema: &Semantics<'_, RootDatabase>,
config: &HoverConfig,
- ty: TypeInfo,
+ ty: TypeInfo<'_>,
edition: Edition,
display_target: DisplayTarget,
) -> Option<HoverResult> {
@@ -1038,7 +1038,7 @@ fn type_info(
fn closure_ty(
sema: &Semantics<'_, RootDatabase>,
config: &HoverConfig,
- TypeInfo { original, adjusted }: &TypeInfo,
+ TypeInfo { original, adjusted }: &TypeInfo<'_>,
edition: Edition,
display_target: DisplayTarget,
) -> Option<HoverResult> {
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index a281a49152..c5480217a9 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -10927,3 +10927,99 @@ fn main() {
"#]],
);
}
+
+#[test]
+fn keyword_inside_link() {
+ check(
+ r#"
+enum Foo {
+ MacroExpansion,
+}
+
+/// I return a [macro expansion](Foo::MacroExpansion).
+fn bar$0() -> Foo {
+ Foo::MacroExpansion
+}
+ "#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ fn bar() -> Foo
+ ```
+
+ ---
+
+ I return a [macro expansion](https://docs.rs/ra_test_fixture/*/ra_test_fixture/enum.Foo.html#variant.MacroExpansion).
+ "#]],
+ );
+}
+
+#[test]
+fn regression_20190() {
+ check(
+ r#"
+struct Foo;
+
+/// [`foo` bar](Foo).
+fn has_docs$0() {}
+ "#,
+ expect![[r#"
+ *has_docs*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ fn has_docs()
+ ```
+
+ ---
+
+ [`foo` bar](https://docs.rs/ra_test_fixture/*/ra_test_fixture/struct.Foo.html).
+ "#]],
+ );
+}
+
+#[test]
+fn regression_20225() {
+ check(
+ r#"
+//- minicore: coerce_unsized
+trait Trait {
+ type Type<'a, T: ?Sized + 'a>;
+}
+
+enum Borrowed {}
+
+impl Trait for Borrowed {
+ type Type<'a, T: ?Sized + 'a> = &'a T;
+}
+
+enum Enum<'a, T: Trait + 'a> {
+ Variant1(T::Type<'a, [Enum<'a, T>]>),
+ Variant2,
+}
+
+impl Enum<'_, Borrowed> {
+ const CONSTANT$0: Self = Self::Variant1(&[Self::Variant2]);
+}
+ "#,
+ expect![[r#"
+ *CONSTANT*
+
+ ```rust
+ ra_test_fixture::Enum
+ ```
+
+ ```rust
+ const CONSTANT: Self = Variant1(&[Variant2])
+ ```
+ "#]],
+ );
+}
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index b094b09846..19e5509681 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -722,14 +722,14 @@ impl InlayHintLabelBuilder<'_> {
fn label_of_ty(
famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
display_target: DisplayTarget,
) -> Option<InlayHintLabel> {
fn rec(
sema: &Semantics<'_, RootDatabase>,
famous_defs: &FamousDefs<'_, '_>,
mut max_length: Option<usize>,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
label_builder: &mut InlayHintLabelBuilder<'_>,
config: &InlayHintsConfig,
display_target: DisplayTarget,
@@ -788,11 +788,11 @@ fn label_of_ty(
}
/// Checks if the type is an Iterator from std::iter and returns the iterator trait and the item type of the concrete iterator.
-fn hint_iterator(
- sema: &Semantics<'_, RootDatabase>,
- famous_defs: &FamousDefs<'_, '_>,
- ty: &hir::Type,
-) -> Option<(hir::Trait, hir::TypeAlias, hir::Type)> {
+fn hint_iterator<'db>(
+ sema: &Semantics<'db, RootDatabase>,
+ famous_defs: &FamousDefs<'_, 'db>,
+ ty: &hir::Type<'db>,
+) -> Option<(hir::Trait, hir::TypeAlias, hir::Type<'db>)> {
let db = sema.db;
let strukt = ty.strip_references().as_adt()?;
let krate = strukt.module(db).krate();
@@ -826,7 +826,7 @@ fn ty_to_text_edit(
sema: &Semantics<'_, RootDatabase>,
config: &InlayHintsConfig,
node_for_hint: &SyntaxNode,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
offset_to_insert_ty: TextSize,
additional_edits: &dyn Fn(&mut TextEditBuilder),
prefix: impl Into<String>,
diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs
index f2844a2eaa..49b43fc37f 100644
--- a/crates/ide/src/inlay_hints/adjustment.rs
+++ b/crates/ide/src/inlay_hints/adjustment.rs
@@ -109,50 +109,90 @@ pub(super) fn hints(
}
has_adjustments = true;
- // FIXME: Add some nicer tooltips to each of these
- let (text, coercion) = match kind {
+ let (text, coercion, detailed_tooltip) = match kind {
Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => {
allow_edit = false;
- ("<never-to-any>", "never to any")
- }
- Adjust::Deref(None) => ("*", "dereference"),
- Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => {
- ("*", "`Deref` dereference")
- }
- Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => {
- ("*", "`DerefMut` dereference")
- }
- Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => ("&", "borrow"),
- Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => ("&mut ", "unique borrow"),
- Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => {
- ("&raw const ", "const pointer borrow")
- }
- Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => {
- ("&raw mut ", "mut pointer borrow")
+ (
+ "<never-to-any>",
+ "never to any",
+ "Coerces the never type `!` into any other type. This happens in code paths that never return, like after `panic!()` or `return`.",
+ )
}
+ Adjust::Deref(None) => (
+ "*",
+ "dereference",
+ "Built-in dereference of a reference to access the underlying value. The compiler inserts `*` to get the value from `&T`.",
+ ),
+ Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => (
+ "*",
+ "`Deref` dereference",
+ "Dereference via the `Deref` trait. Used for types like `Box<T>` or `Rc<T>` so they act like plain `T`.",
+ ),
+ Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => (
+ "*",
+ "`DerefMut` dereference",
+ "Mutable dereference using the `DerefMut` trait. Enables smart pointers to give mutable access to their inner values.",
+ ),
+ Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => (
+ "&",
+ "shared borrow",
+ "Inserts `&` to create a shared reference. Lets you use a value without moving or cloning it.",
+ ),
+ Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => (
+ "&mut ",
+ "mutable borrow",
+ "Inserts `&mut` to create a unique, mutable reference. Lets you modify a value without taking ownership.",
+ ),
+ Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => (
+ "&raw const ",
+ "const raw pointer",
+ "Converts a reference to a raw const pointer `*const T`. Often used when working with FFI or unsafe code.",
+ ),
+ Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => (
+ "&raw mut ",
+ "mut raw pointer",
+ "Converts a mutable reference to a raw mutable pointer `*mut T`. Allows mutation in unsafe contexts.",
+ ),
// some of these could be represented via `as` casts, but that's not too nice and
// handling everything as a prefix expr makes the `(` and `)` insertion easier
Adjust::Pointer(cast) if config.adjustment_hints == AdjustmentHints::Always => {
allow_edit = false;
match cast {
- PointerCast::ReifyFnPointer => {
- ("<fn-item-to-fn-pointer>", "fn item to fn pointer")
- }
+ PointerCast::ReifyFnPointer => (
+ "<fn-item-to-fn-pointer>",
+ "fn item to fn pointer",
+ "Converts a named function to a function pointer `fn()`. Useful when passing functions as values.",
+ ),
PointerCast::UnsafeFnPointer => (
"<safe-fn-pointer-to-unsafe-fn-pointer>",
"safe fn pointer to unsafe fn pointer",
+ "Coerces a safe function pointer to an unsafe one. Allows calling it in an unsafe context.",
+ ),
+ PointerCast::ClosureFnPointer(Safety::Unsafe) => (
+ "<closure-to-unsafe-fn-pointer>",
+ "closure to unsafe fn pointer",
+ "Converts a non-capturing closure to an unsafe function pointer. Required for use in `extern` or unsafe APIs.",
+ ),
+ PointerCast::ClosureFnPointer(Safety::Safe) => (
+ "<closure-to-fn-pointer>",
+ "closure to fn pointer",
+ "Converts a non-capturing closure to a function pointer. Lets closures behave like plain functions.",
+ ),
+ PointerCast::MutToConstPointer => (
+ "<mut-ptr-to-const-ptr>",
+ "mut ptr to const ptr",
+ "Coerces `*mut T` to `*const T`. Safe because const pointers restrict what you can do.",
+ ),
+ PointerCast::ArrayToPointer => (
+ "<array-ptr-to-element-ptr>",
+ "array to pointer",
+ "Converts an array to a pointer to its first element. Similar to how arrays decay to pointers in C.",
+ ),
+ PointerCast::Unsize => (
+ "<unsize>",
+ "unsize coercion",
+ "Converts a sized type to an unsized one. Used for things like turning arrays into slices or concrete types into trait objects.",
),
- PointerCast::ClosureFnPointer(Safety::Unsafe) => {
- ("<closure-to-unsafe-fn-pointer>", "closure to unsafe fn pointer")
- }
- PointerCast::ClosureFnPointer(Safety::Safe) => {
- ("<closure-to-fn-pointer>", "closure to fn pointer")
- }
- PointerCast::MutToConstPointer => {
- ("<mut-ptr-to-const-ptr>", "mut ptr to const ptr")
- }
- PointerCast::ArrayToPointer => ("<array-ptr-to-element-ptr>", ""),
- PointerCast::Unsize => ("<unsize>", "unsize"),
}
}
_ => continue,
@@ -162,9 +202,11 @@ pub(super) fn hints(
linked_location: None,
tooltip: Some(config.lazy_tooltip(|| {
InlayTooltip::Markdown(format!(
- "`{}` → `{}` ({coercion} coercion)",
+ "`{}` → `{}`\n\n**{}**\n\n{}",
source.display(sema.db, display_target),
target.display(sema.db, display_target),
+ coercion,
+ detailed_tooltip
))
})),
};
diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs
index 36fdd90e8a..729349365e 100644
--- a/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/crates/ide/src/inlay_hints/bind_pat.rs
@@ -380,9 +380,9 @@ fn main() {
let foo = foo3();
// ^^^ impl Fn(f64, f64) -> u32
let foo = foo4();
- // ^^^ &'static (dyn Fn(f64, f64) -> u32 + 'static)
+ // ^^^ &'static dyn Fn(f64, f64) -> u32
let foo = foo5();
- // ^^^ &'static (dyn Fn(&(dyn Fn(f64, f64) -> u32 + 'static), f64) -> u32 + 'static)
+ // ^^^ &'static dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32
let foo = foo6();
// ^^^ impl Fn(f64, f64) -> u32
let foo = foo7();
@@ -413,7 +413,7 @@ fn main() {
let foo = foo3();
// ^^^ impl Fn(f64, f64) -> u32
let foo = foo4();
- // ^^^ &'static (dyn Fn(f64, f64) -> u32 + 'static)
+ // ^^^ &'static dyn Fn(f64, f64) -> u32
let foo = foo5();
let foo = foo6();
let foo = foo7();
diff --git a/crates/ide/src/inlay_hints/bounds.rs b/crates/ide/src/inlay_hints/bounds.rs
index b9a98f88be..f0003dae3f 100644
--- a/crates/ide/src/inlay_hints/bounds.rs
+++ b/crates/ide/src/inlay_hints/bounds.rs
@@ -143,7 +143,7 @@ fn foo<T>() {}
file_id: FileId(
1,
),
- range: 135..140,
+ range: 446..451,
},
),
),
diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs
index ca3a982760..05253b6794 100644
--- a/crates/ide/src/inlay_hints/closing_brace.rs
+++ b/crates/ide/src/inlay_hints/closing_brace.rs
@@ -91,8 +91,6 @@ pub(super) fn hints(
match_ast! {
match parent {
ast::Fn(it) => {
- // FIXME: this could include parameters, but `HirDisplay` prints too much info
- // and doesn't respect the max length either, so the hints end up way too long
(format!("fn {}", it.name()?), it.name().map(name))
},
ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)),
@@ -193,7 +191,7 @@ impl Tr for () {
//^ impl Tr for ()
impl dyn Tr {
}
-//^ impl dyn Tr + 'static
+//^ impl dyn Tr
static S0: () = 0;
static S1: () = {};
diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs
index bf4688e9d8..d0539abe28 100644
--- a/crates/ide/src/inlay_hints/implicit_drop.rs
+++ b/crates/ide/src/inlay_hints/implicit_drop.rs
@@ -92,7 +92,7 @@ pub(super) fn hints(
},
MirSpan::Unknown => continue,
};
- let binding = &hir.bindings[binding_idx];
+ let binding = &hir[binding_idx];
let name = binding.name.display_no_db(display_target.edition).to_smolstr();
if name.starts_with("<ra@") {
continue; // Ignore desugared variables
diff --git a/crates/ide/src/inlay_hints/implied_dyn_trait.rs b/crates/ide/src/inlay_hints/implied_dyn_trait.rs
index 32d130503a..0da1785234 100644
--- a/crates/ide/src/inlay_hints/implied_dyn_trait.rs
+++ b/crates/ide/src/inlay_hints/implied_dyn_trait.rs
@@ -17,19 +17,28 @@ pub(super) fn hints(
let parent = path.syntax().parent()?;
let range = match path {
Either::Left(path) => {
- let paren =
- parent.ancestors().take_while(|it| ast::ParenType::can_cast(it.kind())).last();
+ let paren = parent
+ .ancestors()
+ .take_while(|it| {
+ ast::ParenType::can_cast(it.kind()) || ast::ForType::can_cast(it.kind())
+ })
+ .last();
let parent = paren.as_ref().and_then(|it| it.parent()).unwrap_or(parent);
if ast::TypeBound::can_cast(parent.kind())
|| ast::TypeAnchor::can_cast(parent.kind())
- || ast::Impl::cast(parent)
- .and_then(|it| it.trait_())
- .is_some_and(|it| it.syntax() == path.syntax())
+ || ast::Impl::cast(parent).is_some_and(|it| {
+ it.trait_().map_or(
+ // only show it for impl type if the impl is not incomplete, otherwise we
+ // are likely typing a trait impl
+ it.assoc_item_list().is_none_or(|it| it.l_curly_token().is_none()),
+ |trait_| trait_.syntax() == path.syntax(),
+ )
+ })
{
return None;
}
sema.resolve_trait(&path.path()?)?;
- paren.map_or_else(|| path.syntax().text_range(), |it| it.text_range())
+ path.syntax().text_range()
}
Either::Right(dyn_) => {
if dyn_.dyn_token().is_some() {
@@ -84,7 +93,8 @@ fn foo(_: &T, _: for<'a> T) {}
impl T {}
// ^ dyn
impl T for (T) {}
- // ^^^ dyn
+ // ^ dyn
+impl T
"#,
);
}
@@ -106,7 +116,7 @@ fn foo(
_: &mut (T + T)
// ^^^^^ dyn
_: *mut (T),
- // ^^^ dyn
+ // ^ dyn
) {}
"#,
);
@@ -130,4 +140,26 @@ fn foo(
"#]],
);
}
+
+ #[test]
+ fn hrtb_bound_does_not_add_dyn() {
+ check(
+ r#"
+//- minicore: fn
+fn test<F>(f: F) where F: for<'a> FnOnce(&'a i32) {}
+ // ^: Sized
+ "#,
+ );
+ }
+
+ #[test]
+ fn with_parentheses() {
+ check(
+ r#"
+trait T {}
+fn foo(v: &(T)) {}
+ // ^ dyn
+ "#,
+ );
+ }
}
diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs
index 5ff9fee60a..5174228466 100644
--- a/crates/ide/src/inlay_hints/param_name.rs
+++ b/crates/ide/src/inlay_hints/param_name.rs
@@ -87,10 +87,10 @@ pub(super) fn hints(
Some(())
}
-fn get_callable(
- sema: &Semantics<'_, RootDatabase>,
+fn get_callable<'db>(
+ sema: &Semantics<'db, RootDatabase>,
expr: &ast::Expr,
-) -> Option<(hir::Callable, ast::ArgList)> {
+) -> Option<(hir::Callable<'db>, ast::ArgList)> {
match expr {
ast::Expr::CallExpr(expr) => {
let descended = sema.descend_node_into_attributes(expr.clone()).pop();
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 82dbcde4c0..b3b8deb61f 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -409,7 +409,7 @@ impl Analysis {
self.with_db(|db| typing::on_enter(db, position))
}
- pub const SUPPORTED_TRIGGER_CHARS: &'static str = typing::TRIGGER_CHARS;
+ pub const SUPPORTED_TRIGGER_CHARS: &[char] = typing::TRIGGER_CHARS;
/// Returns an edit which should be applied after a character was typed.
///
@@ -421,7 +421,7 @@ impl Analysis {
char_typed: char,
) -> Cancellable<Option<SourceChange>> {
// Fast path to not even parse the file.
- if !typing::TRIGGER_CHARS.contains(char_typed) {
+ if !typing::TRIGGER_CHARS.contains(&char_typed) {
return Ok(None);
}
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index 9334b73fc7..7dc18141bd 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -844,7 +844,7 @@ pub(crate) fn orig_range_with_focus_r(
// *should* contain the name
_ => {
let kind = call_kind();
- let range = kind.clone().original_call_range_with_body(db);
+ let range = kind.clone().original_call_range_with_input(db);
//If the focus range is in the attribute/derive body, we
// need to point the call site to the entire body, if not, fall back
// to the name range of the attribute/derive call
@@ -867,7 +867,7 @@ pub(crate) fn orig_range_with_focus_r(
}
// def site name
- // FIXME: This can be de improved
+ // FIXME: This can be improved
Some((focus_range, _ctxt)) => {
match value_range {
// but overall node is in macro input
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index c6a323d408..fe874bc99b 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -21,6 +21,7 @@ use hir::{PathResolution, Semantics};
use ide_db::{
FileId, RootDatabase,
defs::{Definition, NameClass, NameRefClass},
+ helpers::pick_best_token,
search::{ReferenceCategory, SearchScope, UsageSearchResult},
};
use itertools::Itertools;
@@ -397,7 +398,11 @@ fn handle_control_flow_keywords(
.attach_first_edition(file_id)
.map(|it| it.edition(sema.db))
.unwrap_or(Edition::CURRENT);
- let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword(edition))?;
+ let token = pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind {
+ _ if kind.is_keyword(edition) => 4,
+ T![=>] => 3,
+ _ => 1,
+ })?;
let references = match token.kind() {
T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token),
@@ -408,6 +413,7 @@ fn handle_control_flow_keywords(
T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => {
highlight_related::highlight_break_points(sema, token)
}
+ T![if] | T![=>] | T![match] => highlight_related::highlight_branch_exit_points(sema, token),
_ => return None,
}
.into_iter()
@@ -1344,6 +1350,159 @@ impl Foo {
);
}
+ #[test]
+ fn test_highlight_if_branches() {
+ check(
+ r#"
+fn main() {
+ let x = if$0 true {
+ 1
+ } else if false {
+ 2
+ } else {
+ 3
+ };
+
+ println!("x: {}", x);
+}
+"#,
+ expect![[r#"
+ FileId(0) 24..26
+ FileId(0) 42..43
+ FileId(0) 55..57
+ FileId(0) 74..75
+ FileId(0) 97..98
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_match_branches() {
+ check(
+ r#"
+fn main() {
+ $0match Some(42) {
+ Some(x) if x > 0 => println!("positive"),
+ Some(0) => println!("zero"),
+ Some(_) => println!("negative"),
+ None => println!("none"),
+ };
+}
+"#,
+ expect![[r#"
+ FileId(0) 16..21
+ FileId(0) 61..81
+ FileId(0) 102..118
+ FileId(0) 139..159
+ FileId(0) 177..193
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_match_arm_arrow() {
+ check(
+ r#"
+fn main() {
+ match Some(42) {
+ Some(x) if x > 0 $0=> println!("positive"),
+ Some(0) => println!("zero"),
+ Some(_) => println!("negative"),
+ None => println!("none"),
+ }
+}
+"#,
+ expect![[r#"
+ FileId(0) 58..60
+ FileId(0) 61..81
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_nested_branches() {
+ check(
+ r#"
+fn main() {
+ let x = $0if true {
+ if false {
+ 1
+ } else {
+ match Some(42) {
+ Some(_) => 2,
+ None => 3,
+ }
+ }
+ } else {
+ 4
+ };
+
+ println!("x: {}", x);
+}
+"#,
+ expect![[r#"
+ FileId(0) 24..26
+ FileId(0) 65..66
+ FileId(0) 140..141
+ FileId(0) 167..168
+ FileId(0) 215..216
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_match_with_complex_guards() {
+ check(
+ r#"
+fn main() {
+ let x = $0match (x, y) {
+ (a, b) if a > b && a % 2 == 0 => 1,
+ (a, b) if a < b || b % 2 == 1 => 2,
+ (a, _) if a > 40 => 3,
+ _ => 4,
+ };
+
+ println!("x: {}", x);
+}
+"#,
+ expect![[r#"
+ FileId(0) 24..29
+ FileId(0) 80..81
+ FileId(0) 124..125
+ FileId(0) 155..156
+ FileId(0) 171..172
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_mixed_if_match_expressions() {
+ check(
+ r#"
+fn main() {
+ let x = $0if let Some(x) = Some(42) {
+ 1
+ } else if let None = None {
+ 2
+ } else {
+ match 42 {
+ 0 => 3,
+ _ => 4,
+ }
+ };
+}
+"#,
+ expect![[r#"
+ FileId(0) 24..26
+ FileId(0) 60..61
+ FileId(0) 73..75
+ FileId(0) 102..103
+ FileId(0) 153..154
+ FileId(0) 173..174
+ "#]],
+ );
+ }
+
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
check_with_scope(ra_fixture, None, expect)
}
@@ -2867,4 +3026,66 @@ const FOO$0: i32 = 0;
"#]],
);
}
+
+ #[test]
+ fn test_highlight_if_let_match_combined() {
+ check(
+ r#"
+enum MyEnum { A(i32), B(String), C }
+
+fn main() {
+ let val = MyEnum::A(42);
+
+ let x = $0if let MyEnum::A(x) = val {
+ 1
+ } else if let MyEnum::B(s) = val {
+ 2
+ } else {
+ match val {
+ MyEnum::C => 3,
+ _ => 4,
+ }
+ };
+}
+"#,
+ expect![[r#"
+ FileId(0) 92..94
+ FileId(0) 128..129
+ FileId(0) 141..143
+ FileId(0) 177..178
+ FileId(0) 237..238
+ FileId(0) 257..258
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_highlight_nested_match_expressions() {
+ check(
+ r#"
+enum Outer { A(Inner), B }
+enum Inner { X, Y(i32) }
+
+fn main() {
+ let val = Outer::A(Inner::Y(42));
+
+ $0match val {
+ Outer::A(inner) => match inner {
+ Inner::X => println!("Inner::X"),
+ Inner::Y(n) if n > 0 => println!("Inner::Y positive: {}", n),
+ Inner::Y(_) => println!("Inner::Y non-positive"),
+ },
+ Outer::B => println!("Outer::B"),
+ }
+}
+"#,
+ expect![[r#"
+ FileId(0) 108..113
+ FileId(0) 185..205
+ FileId(0) 243..279
+ FileId(0) 308..341
+ FileId(0) 374..394
+ "#]],
+ );
+ }
}
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index ab13960240..9d1a5bae96 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -5,11 +5,11 @@ use ast::HasName;
use cfg::{CfgAtom, CfgExpr};
use hir::{
AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, ModPath, Name, PathKind, Semantics,
- Symbol, db::HirDatabase, sym, symbols::FxIndexSet,
+ Symbol, db::HirDatabase, sym,
};
use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
use ide_db::{
- FilePosition, FxHashMap, FxIndexMap, RootDatabase, SymbolKind,
+ FilePosition, FxHashMap, FxIndexMap, FxIndexSet, RootDatabase, SymbolKind,
base_db::RootQueryDb,
defs::Definition,
documentation::docs_from_attrs,
@@ -351,7 +351,7 @@ pub(crate) fn runnable_fn(
)
.call_site();
- let file_range = fn_source.syntax().original_file_range_with_macro_call_body(sema.db);
+ let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db);
let update_test =
UpdateTest::find_snapshot_macro(sema, &fn_source.file_syntax(sema.db), file_range);
@@ -425,7 +425,7 @@ pub(crate) fn runnable_impl(
let impl_source = sema.source(*def)?;
let impl_syntax = impl_source.syntax();
- let file_range = impl_syntax.original_file_range_with_macro_call_body(sema.db);
+ let file_range = impl_syntax.original_file_range_with_macro_call_input(sema.db);
let update_test =
UpdateTest::find_snapshot_macro(sema, &impl_syntax.file_syntax(sema.db), file_range);
@@ -1241,10 +1241,10 @@ generate_main!();
[
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"\", kind: Module })",
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..310, name: \"foo_test\", kind: Function })",
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"foo_test2\", kind: Function }, true)",
- "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..344, name: \"main\", kind: Function })",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..307, name: \"foo_test\", kind: Function })",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"foo_test2\", kind: Function }, true)",
+ "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..341, name: \"main\", kind: Function })",
]
"#]],
);
@@ -1272,10 +1272,10 @@ foo!();
"#,
expect![[r#"
[
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo0\", kind: Function }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo1\", kind: Function }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo2\", kind: Function }, true)",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo0\", kind: Function }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo1\", kind: Function }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo2\", kind: Function }, true)",
]
"#]],
);
diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs
index 0e17b35590..e30a3ebefb 100644
--- a/crates/ide/src/signature_help.rs
+++ b/crates/ide/src/signature_help.rs
@@ -278,7 +278,7 @@ fn signature_help_for_call(
}
res.signature.push(')');
- let mut render = |ret_type: hir::Type| {
+ let mut render = |ret_type: hir::Type<'_>| {
if !ret_type.is_unit() {
format_to!(res.signature, " -> {}", ret_type.display(db, display_target));
}
@@ -597,11 +597,11 @@ fn signature_help_for_tuple_expr(
Some(res)
}
-fn signature_help_for_record_(
- sema: &Semantics<'_, RootDatabase>,
+fn signature_help_for_record_<'db>(
+ sema: &Semantics<'db, RootDatabase>,
field_list_children: SyntaxElementChildren,
path: &ast::Path,
- fields2: impl Iterator<Item = (hir::Field, hir::Type)>,
+ fields2: impl Iterator<Item = (hir::Field, hir::Type<'db>)>,
token: SyntaxToken,
edition: Edition,
display_target: DisplayTarget,
@@ -689,13 +689,13 @@ fn signature_help_for_record_(
Some(res)
}
-fn signature_help_for_tuple_pat_ish(
- db: &RootDatabase,
+fn signature_help_for_tuple_pat_ish<'db>(
+ db: &'db RootDatabase,
mut res: SignatureHelp,
pat: &SyntaxNode,
token: SyntaxToken,
mut field_pats: AstChildren<ast::Pat>,
- fields: impl ExactSizeIterator<Item = hir::Type>,
+ fields: impl ExactSizeIterator<Item = hir::Type<'db>>,
display_target: DisplayTarget,
) -> SignatureHelp {
let rest_pat = field_pats.find(|it| matches!(it, ast::Pat::RestPat(_)));
diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs
index 4df7e25223..ed55ac5bf0 100644
--- a/crates/ide/src/typing.rs
+++ b/crates/ide/src/typing.rs
@@ -15,6 +15,7 @@
mod on_enter;
+use either::Either;
use hir::EditionedFileId;
use ide_db::{FilePosition, RootDatabase, base_db::RootQueryDb};
use span::Edition;
@@ -33,7 +34,7 @@ use crate::SourceChange;
pub(crate) use on_enter::on_enter;
// Don't forget to add new trigger characters to `server_capabilities` in `caps.rs`.
-pub(crate) const TRIGGER_CHARS: &str = ".=<>{(|";
+pub(crate) const TRIGGER_CHARS: &[char] = &['.', '=', '<', '>', '{', '(', '|', '+'];
struct ExtendedTextEdit {
edit: TextEdit,
@@ -66,7 +67,7 @@ pub(crate) fn on_char_typed(
position: FilePosition,
char_typed: char,
) -> Option<SourceChange> {
- if !stdx::always!(TRIGGER_CHARS.contains(char_typed)) {
+ if !TRIGGER_CHARS.contains(&char_typed) {
return None;
}
// FIXME: We need to figure out the edition of the file here, but that means hitting the
@@ -101,6 +102,7 @@ fn on_char_typed_(
'>' => on_right_angle_typed(&file.tree(), offset),
'{' | '(' | '<' => on_opening_delimiter_typed(file, offset, char_typed, edition),
'|' => on_pipe_typed(&file.tree(), offset),
+ '+' => on_plus_typed(&file.tree(), offset),
_ => None,
}
.map(conv)
@@ -402,6 +404,28 @@ fn on_pipe_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
Some(TextEdit::insert(after_lpipe, "|".to_owned()))
}
+fn on_plus_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let plus_token = file.syntax().token_at_offset(offset).right_biased()?;
+ if plus_token.kind() != SyntaxKind::PLUS {
+ return None;
+ }
+ let mut ancestors = plus_token.parent_ancestors();
+ ancestors.next().and_then(ast::TypeBoundList::cast)?;
+ let trait_type =
+ ancestors.next().and_then(<Either<ast::DynTraitType, ast::ImplTraitType>>::cast)?;
+ let kind = ancestors.next()?.kind();
+
+ if ast::RefType::can_cast(kind) || ast::PtrType::can_cast(kind) || ast::RetType::can_cast(kind)
+ {
+ let mut builder = TextEdit::builder();
+ builder.insert(trait_type.syntax().text_range().start(), "(".to_owned());
+ builder.insert(trait_type.syntax().text_range().end(), ")".to_owned());
+ Some(builder.finish())
+ } else {
+ None
+ }
+}
+
/// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }`
fn on_right_angle_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
let file_text = file.syntax().text();
@@ -1597,4 +1621,64 @@ fn foo() {
"#,
);
}
+
+ #[test]
+ fn adds_parentheses_around_trait_object_in_ref_type() {
+ type_char(
+ '+',
+ r#"
+fn foo(x: &dyn A$0) {}
+"#,
+ r#"
+fn foo(x: &(dyn A+)) {}
+"#,
+ );
+ type_char(
+ '+',
+ r#"
+fn foo(x: &'static dyn A$0B) {}
+"#,
+ r#"
+fn foo(x: &'static (dyn A+B)) {}
+"#,
+ );
+ type_char_noop(
+ '+',
+ r#"
+fn foo(x: &(dyn A$0)) {}
+"#,
+ );
+ type_char_noop(
+ '+',
+ r#"
+fn foo(x: Box<dyn A$0>) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn adds_parentheses_around_trait_object_in_ptr_type() {
+ type_char(
+ '+',
+ r#"
+fn foo(x: *const dyn A$0) {}
+"#,
+ r#"
+fn foo(x: *const (dyn A+)) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn adds_parentheses_around_trait_object_in_return_type() {
+ type_char(
+ '+',
+ r#"
+fn foo(x: fn() -> dyn A$0) {}
+"#,
+ r#"
+fn foo(x: fn() -> (dyn A+)) {}
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/view_crate_graph.rs b/crates/ide/src/view_crate_graph.rs
index 7985279679..25deffe10e 100644
--- a/crates/ide/src/view_crate_graph.rs
+++ b/crates/ide/src/view_crate_graph.rs
@@ -79,7 +79,7 @@ impl<'a> dot::Labeller<'a, Crate, Edge<'a>> for DotCrateGraph<'_> {
}
fn node_id(&'a self, n: &Crate) -> Id<'a> {
- let id = n.as_id().as_u32();
+ let id = n.as_id().index();
Id::new(format!("_{id:?}")).unwrap()
}
diff --git a/crates/ide/src/view_memory_layout.rs b/crates/ide/src/view_memory_layout.rs
index 140ae4265b..63701a4d15 100644
--- a/crates/ide/src/view_memory_layout.rs
+++ b/crates/ide/src/view_memory_layout.rs
@@ -107,7 +107,7 @@ pub(crate) fn view_memory_layout(
fn read_layout(
nodes: &mut Vec<MemoryLayoutNode>,
db: &RootDatabase,
- ty: &Type,
+ ty: &Type<'_>,
layout: &Layout,
parent_idx: usize,
display_target: DisplayTarget,
diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml
index 9ff656cb74..81b6703dee 100644
--- a/crates/intern/Cargo.toml
+++ b/crates/intern/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs
index fc922dd849..1ccd20c25e 100644
--- a/crates/intern/src/symbol/symbols.rs
+++ b/crates/intern/src/symbol/symbols.rs
@@ -222,6 +222,7 @@ define_symbols! {
fn_once_output,
fn_once,
async_fn_once,
+ async_fn_once_output,
async_fn_mut,
async_fn,
fn_ptr_addr,
@@ -437,6 +438,8 @@ define_symbols! {
shr,
simd,
sized,
+ meta_sized,
+ pointee_sized,
skip,
slice_len_fn,
Some,
@@ -495,6 +498,7 @@ define_symbols! {
vectorcall,
wasm,
win64,
+ args,
array,
boxed_slice,
completions,
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 30e2d5416c..26ee698af0 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -2,7 +2,7 @@
//! for incorporating changes.
// Note, don't remove any public api from this. This API is consumed by external tools
// to run rust-analyzer as a library.
-use std::{collections::hash_map::Entry, mem, path::Path, sync};
+use std::{any::Any, collections::hash_map::Entry, mem, path::Path, sync};
use crossbeam_channel::{Receiver, unbounded};
use hir_expand::proc_macro::{
@@ -11,7 +11,7 @@ use hir_expand::proc_macro::{
};
use ide_db::{
ChangeWithProcMacros, FxHashMap, RootDatabase,
- base_db::{CrateGraphBuilder, Env, SourceRoot, SourceRootId},
+ base_db::{CrateGraphBuilder, Env, ProcMacroLoadingError, SourceRoot, SourceRootId},
prime_caches,
};
use itertools::Itertools;
@@ -42,7 +42,7 @@ pub fn load_workspace_at(
root: &Path,
cargo_config: &CargoConfig,
load_config: &LoadCargoConfig,
- progress: &dyn Fn(String),
+ progress: &(dyn Fn(String) + Sync),
) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option<ProcMacroClient>)> {
let root = AbsPathBuf::assert_utf8(std::env::current_dir()?.join(root));
let root = ProjectManifest::discover_single(&root)?;
@@ -69,6 +69,23 @@ pub fn load_workspace(
extra_env: &FxHashMap<String, Option<String>>,
load_config: &LoadCargoConfig,
) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option<ProcMacroClient>)> {
+ let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
+ let mut db = RootDatabase::new(lru_cap);
+
+ let (vfs, proc_macro_server) = load_workspace_into_db(ws, extra_env, load_config, &mut db)?;
+
+ Ok((db, vfs, proc_macro_server))
+}
+
+// This variant of `load_workspace` allows deferring the loading of rust-analyzer
+// into an existing database, which is useful in certain third-party scenarios,
+// now that `salsa` supports extending foreign databases (e.g. `RootDatabase`).
+pub fn load_workspace_into_db(
+ ws: ProjectWorkspace,
+ extra_env: &FxHashMap<String, Option<String>>,
+ load_config: &LoadCargoConfig,
+ db: &mut RootDatabase,
+) -> anyhow::Result<(vfs::Vfs, Option<ProcMacroClient>)> {
let (sender, receiver) = unbounded();
let mut vfs = vfs::Vfs::default();
let mut loader = {
@@ -78,23 +95,27 @@ pub fn load_workspace(
tracing::debug!(?load_config, "LoadCargoConfig");
let proc_macro_server = match &load_config.with_proc_macro_server {
- ProcMacroServerChoice::Sysroot => ws
- .find_sysroot_proc_macro_srv()
- .and_then(|it| ProcMacroClient::spawn(&it, extra_env).map_err(Into::into))
- .map_err(|e| (e, true)),
+ ProcMacroServerChoice::Sysroot => ws.find_sysroot_proc_macro_srv().map(|it| {
+ it.and_then(|it| ProcMacroClient::spawn(&it, extra_env).map_err(Into::into)).map_err(
+ |e| ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()),
+ )
+ }),
ProcMacroServerChoice::Explicit(path) => {
- ProcMacroClient::spawn(path, extra_env).map_err(Into::into).map_err(|e| (e, true))
- }
- ProcMacroServerChoice::None => {
- Err((anyhow::format_err!("proc macro server disabled"), false))
+ Some(ProcMacroClient::spawn(path, extra_env).map_err(|e| {
+ ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str())
+ }))
}
+ ProcMacroServerChoice::None => Some(Err(ProcMacroLoadingError::Disabled)),
};
match &proc_macro_server {
- Ok(server) => {
- tracing::info!(path=%server.server_path(), "Proc-macro server started")
+ Some(Ok(server)) => {
+ tracing::info!(manifest=%ws.manifest_or_root(), path=%server.server_path(), "Proc-macro server started")
+ }
+ Some(Err(e)) => {
+ tracing::info!(manifest=%ws.manifest_or_root(), %e, "Failed to start proc-macro server")
}
- Err((e, _)) => {
- tracing::info!(%e, "Failed to start proc-macro server")
+ None => {
+ tracing::info!(manifest=%ws.manifest_or_root(), "No proc-macro server started")
}
}
@@ -111,22 +132,24 @@ pub fn load_workspace(
);
let proc_macros = {
let proc_macro_server = match &proc_macro_server {
- Ok(it) => Ok(it),
- Err((e, hard_err)) => Err((e.to_string(), *hard_err)),
+ Some(Ok(it)) => Ok(it),
+ Some(Err(e)) => {
+ Err(ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()))
+ }
+ None => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ "proc-macro-srv is not running, workspace is missing a sysroot".into(),
+ )),
};
proc_macros
.into_iter()
.map(|(crate_id, path)| {
(
crate_id,
- path.map_or_else(
- |e| Err((e, true)),
- |(_, path)| {
- proc_macro_server.as_ref().map_err(Clone::clone).and_then(
- |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
- )
- },
- ),
+ path.map_or_else(Err, |(_, path)| {
+ proc_macro_server.as_ref().map_err(Clone::clone).and_then(
+ |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
+ )
+ }),
)
})
.collect()
@@ -139,18 +162,20 @@ pub fn load_workspace(
version: 0,
});
- let db = load_crate_graph(
+ load_crate_graph_into_db(
crate_graph,
proc_macros,
project_folders.source_root_config,
&mut vfs,
&receiver,
+ db,
);
if load_config.prefill_caches {
- prime_caches::parallel_prime_caches(&db, 1, &|_| ());
+ prime_caches::parallel_prime_caches(db, 1, &|_| ());
}
- Ok((db, vfs, proc_macro_server.ok()))
+
+ Ok((vfs, proc_macro_server.and_then(Result::ok)))
}
#[derive(Default)]
@@ -391,11 +416,13 @@ pub fn load_proc_macro(
path: &AbsPath,
ignored_macros: &[Box<str>],
) -> ProcMacroLoadResult {
- let res: Result<Vec<_>, String> = (|| {
+ let res: Result<Vec<_>, _> = (|| {
let dylib = MacroDylib::new(path.to_path_buf());
- let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
+ let vec = server.load_dylib(dylib).map_err(|e| {
+ ProcMacroLoadingError::ProcMacroSrvError(format!("{e}").into_boxed_str())
+ })?;
if vec.is_empty() {
- return Err("proc macro library returned no proc macros".to_owned());
+ return Err(ProcMacroLoadingError::NoProcMacros);
}
Ok(vec
.into_iter()
@@ -412,20 +439,19 @@ pub fn load_proc_macro(
}
Err(e) => {
tracing::warn!("proc-macro loading for {path} failed: {e}");
- Err((e, true))
+ Err(e)
}
}
}
-fn load_crate_graph(
+fn load_crate_graph_into_db(
crate_graph: CrateGraphBuilder,
proc_macros: ProcMacrosBuilder,
source_root_config: SourceRootConfig,
vfs: &mut vfs::Vfs,
receiver: &Receiver<vfs::loader::Message>,
-) -> RootDatabase {
- let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
- let mut db = RootDatabase::new(lru_cap);
+ db: &mut RootDatabase,
+) {
let mut analysis_change = ChangeWithProcMacros::default();
db.enable_proc_attr_macros();
@@ -462,7 +488,6 @@ fn load_crate_graph(
analysis_change.set_proc_macros(proc_macros);
db.apply_change(analysis_change);
- db
}
fn expander_to_proc_macro(
@@ -512,6 +537,10 @@ impl ProcMacroExpander for Expander {
Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
}
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ (other as &dyn Any).downcast_ref::<Self>() == Some(self)
+ }
}
#[cfg(test)]
diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml
index f3ab093bae..eef718b706 100644
--- a/crates/mbe/Cargo.toml
+++ b/crates/mbe/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
cov-mark = "2.0.0"
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs
index db75dceae1..04ac85ad43 100644
--- a/crates/mbe/src/benchmark.rs
+++ b/crates/mbe/src/benchmark.rs
@@ -197,6 +197,10 @@ fn invocation_fixtures(
builder.push(tt::Leaf::Punct(*it))
}
}
+ Separator::Lifetime(punct, ident) => {
+ builder.push(tt::Leaf::Punct(*punct));
+ builder.push(tt::Leaf::Ident(ident.clone()));
+ }
};
}
}
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs
index 940aaacb02..a8d5965d48 100644
--- a/crates/mbe/src/expander/matcher.rs
+++ b/crates/mbe/src/expander/matcher.rs
@@ -823,7 +823,7 @@ fn match_meta_var<'t>(
"expected token tree",
)
}),
- MetaVarKind::Lifetime => expect_lifetime(input).map_err(|()| {
+ MetaVarKind::Lifetime => expect_lifetime(input).map(drop).map_err(|()| {
ExpandError::binding_error(
span.unwrap_or(delim_span.close),
"expected lifetime",
@@ -963,6 +963,10 @@ fn expect_separator<S: Copy>(iter: &mut TtIter<'_, S>, separator: &Separator) ->
}
Err(_) => false,
},
+ Separator::Lifetime(_punct, ident) => match expect_lifetime(&mut fork) {
+ Ok(lifetime) => lifetime.sym == ident.sym,
+ Err(_) => false,
+ },
};
if ok {
*iter = fork;
@@ -983,13 +987,12 @@ fn expect_tt<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<(), ()> {
Ok(())
}
-fn expect_lifetime<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<(), ()> {
+fn expect_lifetime<'a, S: Copy>(iter: &mut TtIter<'a, S>) -> Result<&'a tt::Ident<S>, ()> {
let punct = iter.expect_single_punct()?;
if punct.char != '\'' {
return Err(());
}
- iter.expect_ident_or_underscore()?;
- Ok(())
+ iter.expect_ident_or_underscore()
}
fn eat_char<S: Copy>(iter: &mut TtIter<'_, S>, c: char) {
diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs
index ec277ba72e..2c046df10f 100644
--- a/crates/mbe/src/expander/transcriber.rs
+++ b/crates/mbe/src/expander/transcriber.rs
@@ -497,6 +497,10 @@ fn expand_repeat(
builder.push(tt::Leaf::from(punct));
}
}
+ Separator::Lifetime(punct, ident) => {
+ builder.push(tt::Leaf::from(*punct));
+ builder.push(tt::Leaf::from(ident.clone()));
+ }
};
}
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs
index fbc353d610..711101260a 100644
--- a/crates/mbe/src/parser.rs
+++ b/crates/mbe/src/parser.rs
@@ -155,6 +155,7 @@ pub(crate) enum Separator {
Literal(tt::Literal<Span>),
Ident(tt::Ident<Span>),
Puncts(ArrayVec<tt::Punct<Span>, MAX_GLUED_PUNCT_LEN>),
+ Lifetime(tt::Punct<Span>, tt::Ident<Span>),
}
// Note that when we compare a Separator, we just care about its textual value.
@@ -170,6 +171,7 @@ impl PartialEq for Separator {
let b_iter = b.iter().map(|b| b.char);
a_iter.eq(b_iter)
}
+ (Lifetime(_, a), Lifetime(_, b)) => a.sym == b.sym,
_ => false,
}
}
@@ -350,10 +352,19 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat
_ => true,
};
match tt {
- tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => {
- return Err(ParseError::InvalidRepeat);
- }
- tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()),
+ tt::Leaf::Ident(ident) => match separator {
+ Separator::Puncts(puncts) if puncts.is_empty() => {
+ separator = Separator::Ident(ident.clone());
+ }
+ Separator::Puncts(puncts) => match puncts.as_slice() {
+ [tt::Punct { char: '\'', .. }] => {
+ separator = Separator::Lifetime(puncts[0], ident.clone());
+ }
+ _ => return Err(ParseError::InvalidRepeat),
+ },
+ _ => return Err(ParseError::InvalidRepeat),
+ },
+ tt::Leaf::Literal(_) if has_sep => return Err(ParseError::InvalidRepeat),
tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()),
tt::Leaf::Punct(punct) => {
let repeat_kind = match punct.char {
diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs
index 769455faac..56034516ef 100644
--- a/crates/mbe/src/tests.rs
+++ b/crates/mbe/src/tests.rs
@@ -3,7 +3,9 @@
// FIXME: Move more of the nameres independent tests from
// crates\hir-def\src\macro_expansion_tests\mod.rs to this
use expect_test::expect;
-use span::{Edition, EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContext};
+use span::{
+ Edition, EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext,
+};
use stdx::format_to;
use tt::{TextRange, TextSize};
@@ -24,7 +26,7 @@ fn check_(
def_edition,
SpanAnchor {
file_id: EditionedFileId::new(FileId::from_raw(0), def_edition),
- ast_id: ErasedFileAstId::from_raw(0),
+ ast_id: ROOT_ERASED_FILE_AST_ID,
},
SyntaxContext::root(Edition::CURRENT),
decl,
@@ -37,7 +39,7 @@ fn check_(
};
let call_anchor = SpanAnchor {
file_id: EditionedFileId::new(FileId::from_raw(1), call_edition),
- ast_id: ErasedFileAstId::from_raw(0),
+ ast_id: ROOT_ERASED_FILE_AST_ID,
};
let arg_tt = syntax_bridge::parse_to_token_tree(
call_edition,
@@ -110,8 +112,8 @@ fn unbalanced_brace() {
"#,
r#""#,
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- SUBTREE {} 0:[email protected]#ROOT2024 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..0#ROOT2024 1:Root[0000, 0]@0..0#ROOT2024
+ SUBTREE {} 0:Root[0000, 0]@9..10#ROOT2024 0:Root[0000, 0]@11..12#ROOT2024
{}"#]],
);
@@ -133,25 +135,25 @@ fn token_mapping_smoke_test() {
struct MyTraitMap2
"#,
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- IDENT struct 0:[email protected]#ROOT2024
- IDENT MyTraitMap2 1:[email protected]#ROOT2024
- SUBTREE {} 0:[email protected]#ROOT2024 0:[email protected]#ROOT2024
- IDENT map 0:[email protected]#ROOT2024
- PUNCH : [alone] 0:[email protected]#ROOT2024
- PUNCH : [joint] 0:[email protected]#ROOT2024
- PUNCH : [alone] 0:[email protected]#ROOT2024
- IDENT std 0:[email protected]#ROOT2024
- PUNCH : [joint] 0:[email protected]#ROOT2024
- PUNCH : [alone] 0:[email protected]#ROOT2024
- IDENT collections 0:[email protected]#ROOT2024
- PUNCH : [joint] 0:[email protected]#ROOT2024
- PUNCH : [alone] 0:[email protected]#ROOT2024
- IDENT HashSet 0:[email protected]#ROOT2024
- PUNCH < [alone] 0:[email protected]#ROOT2024
- SUBTREE () 0:[email protected]#ROOT2024 0:[email protected]#ROOT2024
- PUNCH > [joint] 0:[email protected]#ROOT2024
- PUNCH , [alone] 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..20#ROOT2024 1:Root[0000, 0]@0..20#ROOT2024
+ IDENT struct 0:Root[0000, 0]@34..40#ROOT2024
+ IDENT MyTraitMap2 1:Root[0000, 0]@8..19#ROOT2024
+ SUBTREE {} 0:Root[0000, 0]@48..49#ROOT2024 0:Root[0000, 0]@100..101#ROOT2024
+ IDENT map 0:Root[0000, 0]@58..61#ROOT2024
+ PUNCH : [alone] 0:Root[0000, 0]@61..62#ROOT2024
+ PUNCH : [joint] 0:Root[0000, 0]@63..64#ROOT2024
+ PUNCH : [alone] 0:Root[0000, 0]@64..65#ROOT2024
+ IDENT std 0:Root[0000, 0]@65..68#ROOT2024
+ PUNCH : [joint] 0:Root[0000, 0]@68..69#ROOT2024
+ PUNCH : [alone] 0:Root[0000, 0]@69..70#ROOT2024
+ IDENT collections 0:Root[0000, 0]@70..81#ROOT2024
+ PUNCH : [joint] 0:Root[0000, 0]@81..82#ROOT2024
+ PUNCH : [alone] 0:Root[0000, 0]@82..83#ROOT2024
+ IDENT HashSet 0:Root[0000, 0]@83..90#ROOT2024
+ PUNCH < [alone] 0:Root[0000, 0]@90..91#ROOT2024
+ SUBTREE () 0:Root[0000, 0]@91..92#ROOT2024 0:Root[0000, 0]@92..93#ROOT2024
+ PUNCH > [joint] 0:Root[0000, 0]@93..94#ROOT2024
+ PUNCH , [alone] 0:Root[0000, 0]@94..95#ROOT2024
struct MyTraitMap2 {
map: ::std::collections::HashSet<()>,
@@ -180,28 +182,28 @@ fn main() {
}
"#,
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- IDENT fn 1:[email protected]#ROOT2024
- IDENT main 1:[email protected]#ROOT2024
- SUBTREE () 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- SUBTREE {} 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- LITERAL Integer 1 1:[email protected]#ROOT2024
- PUNCH ; [alone] 1:[email protected]#ROOT2024
- LITERAL Float 1.0 1:[email protected]#ROOT2024
- PUNCH ; [alone] 1:[email protected]#ROOT2024
- SUBTREE () 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- SUBTREE () 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- LITERAL Integer 1 1:[email protected]#ROOT2024
- PUNCH , [alone] 1:[email protected]#ROOT2024
- PUNCH , [alone] 1:[email protected]#ROOT2024
- PUNCH . [alone] 1:[email protected]#ROOT2024
- LITERAL Float 0.0 1:[email protected]#ROOT2024
- PUNCH ; [alone] 1:[email protected]#ROOT2024
- IDENT let 1:[email protected]#ROOT2024
- IDENT x 1:[email protected]#ROOT2024
- PUNCH = [alone] 1:[email protected]#ROOT2024
- LITERAL Integer 1 1:[email protected]#ROOT2024
- PUNCH ; [alone] 1:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..63#ROOT2024 1:Root[0000, 0]@0..63#ROOT2024
+ IDENT fn 1:Root[0000, 0]@1..3#ROOT2024
+ IDENT main 1:Root[0000, 0]@4..8#ROOT2024
+ SUBTREE () 1:Root[0000, 0]@8..9#ROOT2024 1:Root[0000, 0]@9..10#ROOT2024
+ SUBTREE {} 1:Root[0000, 0]@11..12#ROOT2024 1:Root[0000, 0]@61..62#ROOT2024
+ LITERAL Integer 1 1:Root[0000, 0]@17..18#ROOT2024
+ PUNCH ; [alone] 1:Root[0000, 0]@18..19#ROOT2024
+ LITERAL Float 1.0 1:Root[0000, 0]@24..27#ROOT2024
+ PUNCH ; [alone] 1:Root[0000, 0]@27..28#ROOT2024
+ SUBTREE () 1:Root[0000, 0]@33..34#ROOT2024 1:Root[0000, 0]@39..40#ROOT2024
+ SUBTREE () 1:Root[0000, 0]@34..35#ROOT2024 1:Root[0000, 0]@37..38#ROOT2024
+ LITERAL Integer 1 1:Root[0000, 0]@35..36#ROOT2024
+ PUNCH , [alone] 1:Root[0000, 0]@36..37#ROOT2024
+ PUNCH , [alone] 1:Root[0000, 0]@38..39#ROOT2024
+ PUNCH . [alone] 1:Root[0000, 0]@40..41#ROOT2024
+ LITERAL Float 0.0 1:Root[0000, 0]@41..44#ROOT2024
+ PUNCH ; [alone] 1:Root[0000, 0]@44..45#ROOT2024
+ IDENT let 1:Root[0000, 0]@50..53#ROOT2024
+ IDENT x 1:Root[0000, 0]@54..55#ROOT2024
+ PUNCH = [alone] 1:Root[0000, 0]@56..57#ROOT2024
+ LITERAL Integer 1 1:Root[0000, 0]@58..59#ROOT2024
+ PUNCH ; [alone] 1:Root[0000, 0]@59..60#ROOT2024
fn main(){
1;
@@ -227,14 +229,14 @@ fn expr_2021() {
const { 1 },
"#,
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- IDENT _ 1:[email protected]#ROOT2024
- PUNCH ; [joint] 0:[email protected]#ROOT2024
- SUBTREE () 0:[email protected]#ROOT2024 0:[email protected]#ROOT2024
- IDENT const 1:[email protected]#ROOT2024
- SUBTREE {} 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- LITERAL Integer 1 1:[email protected]#ROOT2024
- PUNCH ; [alone] 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..25#ROOT2024 1:Root[0000, 0]@0..25#ROOT2024
+ IDENT _ 1:Root[0000, 0]@5..6#ROOT2024
+ PUNCH ; [joint] 0:Root[0000, 0]@36..37#ROOT2024
+ SUBTREE () 0:Root[0000, 0]@34..35#ROOT2024 0:Root[0000, 0]@34..35#ROOT2024
+ IDENT const 1:Root[0000, 0]@12..17#ROOT2024
+ SUBTREE {} 1:Root[0000, 0]@18..19#ROOT2024 1:Root[0000, 0]@22..23#ROOT2024
+ LITERAL Integer 1 1:Root[0000, 0]@20..21#ROOT2024
+ PUNCH ; [alone] 0:Root[0000, 0]@39..40#ROOT2024
_;
(const {
@@ -255,13 +257,13 @@ fn expr_2021() {
expect![[r#"
ExpandError {
inner: (
- 1:[email protected]#ROOT2024,
+ 1:Root[0000, 0]@5..6#ROOT2024,
NoMatchingRule,
),
}
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH ; [alone] 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..8#ROOT2024 1:Root[0000, 0]@0..8#ROOT2024
+ PUNCH ; [alone] 0:Root[0000, 0]@39..40#ROOT2024
;"#]],
);
@@ -279,13 +281,13 @@ fn expr_2021() {
expect![[r#"
ExpandError {
inner: (
- 1:[email protected]#ROOT2024,
+ 1:Root[0000, 0]@5..10#ROOT2024,
NoMatchingRule,
),
}
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH ; [alone] 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..18#ROOT2024 1:Root[0000, 0]@0..18#ROOT2024
+ PUNCH ; [alone] 0:Root[0000, 0]@39..40#ROOT2024
;"#]],
);
@@ -305,26 +307,26 @@ fn expr_2021() {
break 'foo bar,
"#,
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- LITERAL Integer 4 1:[email protected]#ROOT2024
- PUNCH ; [joint] 0:[email protected]#ROOT2024
- LITERAL Str literal 1:[email protected]#ROOT2024
- PUNCH ; [joint] 0:[email protected]#ROOT2024
- SUBTREE () 0:[email protected]#ROOT2024 0:[email protected]#ROOT2024
- IDENT funcall 1:[email protected]#ROOT2024
- SUBTREE () 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH ; [joint] 0:[email protected]#ROOT2024
- SUBTREE () 0:[email protected]#ROOT2024 0:[email protected]#ROOT2024
- IDENT future 1:[email protected]#ROOT2024
- PUNCH . [alone] 1:[email protected]#ROOT2024
- IDENT await 1:[email protected]#ROOT2024
- PUNCH ; [joint] 0:[email protected]#ROOT2024
- SUBTREE () 0:[email protected]#ROOT2024 0:[email protected]#ROOT2024
- IDENT break 1:[email protected]#ROOT2024
- PUNCH ' [joint] 1:[email protected]#ROOT2024
- IDENT foo 1:[email protected]#ROOT2024
- IDENT bar 1:[email protected]#ROOT2024
- PUNCH ; [alone] 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..76#ROOT2024 1:Root[0000, 0]@0..76#ROOT2024
+ LITERAL Integer 4 1:Root[0000, 0]@5..6#ROOT2024
+ PUNCH ; [joint] 0:Root[0000, 0]@41..42#ROOT2024
+ LITERAL Str literal 1:Root[0000, 0]@12..21#ROOT2024
+ PUNCH ; [joint] 0:Root[0000, 0]@41..42#ROOT2024
+ SUBTREE () 0:Root[0000, 0]@39..40#ROOT2024 0:Root[0000, 0]@39..40#ROOT2024
+ IDENT funcall 1:Root[0000, 0]@27..34#ROOT2024
+ SUBTREE () 1:Root[0000, 0]@34..35#ROOT2024 1:Root[0000, 0]@35..36#ROOT2024
+ PUNCH ; [joint] 0:Root[0000, 0]@41..42#ROOT2024
+ SUBTREE () 0:Root[0000, 0]@39..40#ROOT2024 0:Root[0000, 0]@39..40#ROOT2024
+ IDENT future 1:Root[0000, 0]@42..48#ROOT2024
+ PUNCH . [alone] 1:Root[0000, 0]@48..49#ROOT2024
+ IDENT await 1:Root[0000, 0]@49..54#ROOT2024
+ PUNCH ; [joint] 0:Root[0000, 0]@41..42#ROOT2024
+ SUBTREE () 0:Root[0000, 0]@39..40#ROOT2024 0:Root[0000, 0]@39..40#ROOT2024
+ IDENT break 1:Root[0000, 0]@60..65#ROOT2024
+ PUNCH ' [joint] 1:Root[0000, 0]@66..67#ROOT2024
+ IDENT foo 1:Root[0000, 0]@67..70#ROOT2024
+ IDENT bar 1:Root[0000, 0]@71..74#ROOT2024
+ PUNCH ; [alone] 0:Root[0000, 0]@44..45#ROOT2024
4;
"literal";
@@ -346,13 +348,13 @@ fn expr_2021() {
expect![[r#"
ExpandError {
inner: (
- 1:[email protected]#ROOT2024,
+ 1:Root[0000, 0]@5..6#ROOT2024,
NoMatchingRule,
),
}
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH ; [alone] 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..8#ROOT2024 1:Root[0000, 0]@0..8#ROOT2024
+ PUNCH ; [alone] 0:Root[0000, 0]@44..45#ROOT2024
;"#]],
);
@@ -370,88 +372,88 @@ fn minus_belongs_to_literal() {
check(
"-1",
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH - [alone] 0:[email protected]#ROOT2024
- LITERAL Integer 1 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..2#ROOT2024 1:Root[0000, 0]@0..2#ROOT2024
+ PUNCH - [alone] 0:Root[0000, 0]@10..11#ROOT2024
+ LITERAL Integer 1 0:Root[0000, 0]@11..12#ROOT2024
-1"#]],
);
check(
"- 1",
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH - [alone] 0:[email protected]#ROOT2024
- LITERAL Integer 1 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..3#ROOT2024 1:Root[0000, 0]@0..3#ROOT2024
+ PUNCH - [alone] 0:Root[0000, 0]@10..11#ROOT2024
+ LITERAL Integer 1 0:Root[0000, 0]@11..12#ROOT2024
-1"#]],
);
check(
"-2",
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH - [alone] 0:[email protected]#ROOT2024
- LITERAL Integer 2 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..2#ROOT2024 1:Root[0000, 0]@0..2#ROOT2024
+ PUNCH - [alone] 0:Root[0000, 0]@25..26#ROOT2024
+ LITERAL Integer 2 0:Root[0000, 0]@27..28#ROOT2024
-2"#]],
);
check(
"- 2",
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH - [alone] 0:[email protected]#ROOT2024
- LITERAL Integer 2 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..3#ROOT2024 1:Root[0000, 0]@0..3#ROOT2024
+ PUNCH - [alone] 0:Root[0000, 0]@25..26#ROOT2024
+ LITERAL Integer 2 0:Root[0000, 0]@27..28#ROOT2024
-2"#]],
);
check(
"-3.0",
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH - [alone] 0:[email protected]#ROOT2024
- LITERAL Float 3.0 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..4#ROOT2024 1:Root[0000, 0]@0..4#ROOT2024
+ PUNCH - [alone] 0:Root[0000, 0]@43..44#ROOT2024
+ LITERAL Float 3.0 0:Root[0000, 0]@45..48#ROOT2024
-3.0"#]],
);
check(
"- 3.0",
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH - [alone] 0:[email protected]#ROOT2024
- LITERAL Float 3.0 0:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..5#ROOT2024 1:Root[0000, 0]@0..5#ROOT2024
+ PUNCH - [alone] 0:Root[0000, 0]@43..44#ROOT2024
+ LITERAL Float 3.0 0:Root[0000, 0]@45..48#ROOT2024
-3.0"#]],
);
check(
"@1",
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- LITERAL Integer 1 1:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..2#ROOT2024 1:Root[0000, 0]@0..2#ROOT2024
+ LITERAL Integer 1 1:Root[0000, 0]@1..2#ROOT2024
1"#]],
);
check(
"@-1",
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH - [alone] 1:[email protected]#ROOT2024
- LITERAL Integer 1 1:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..3#ROOT2024 1:Root[0000, 0]@0..3#ROOT2024
+ PUNCH - [alone] 1:Root[0000, 0]@1..2#ROOT2024
+ LITERAL Integer 1 1:Root[0000, 0]@2..3#ROOT2024
-1"#]],
);
check(
"@1.0",
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- LITERAL Float 1.0 1:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..4#ROOT2024 1:Root[0000, 0]@0..4#ROOT2024
+ LITERAL Float 1.0 1:Root[0000, 0]@1..4#ROOT2024
1.0"#]],
);
check(
"@-1.0",
expect![[r#"
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH - [alone] 1:[email protected]#ROOT2024
- LITERAL Float 1.0 1:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..5#ROOT2024 1:Root[0000, 0]@0..5#ROOT2024
+ PUNCH - [alone] 1:Root[0000, 0]@1..2#ROOT2024
+ LITERAL Float 1.0 1:Root[0000, 0]@2..5#ROOT2024
-1.0"#]],
);
@@ -460,16 +462,16 @@ fn minus_belongs_to_literal() {
expect![[r#"
ExpandError {
inner: (
- 1:[email protected]#ROOT2024,
+ 1:Root[0000, 0]@1..2#ROOT2024,
BindingError(
"expected literal",
),
),
}
- SUBTREE $$ 1:[email protected]#ROOT2024 1:[email protected]#ROOT2024
- PUNCH - [joint] 1:[email protected]#ROOT2024
- PUNCH - [alone] 1:[email protected]#ROOT2024
+ SUBTREE $$ 1:Root[0000, 0]@0..6#ROOT2024 1:Root[0000, 0]@0..6#ROOT2024
+ PUNCH - [joint] 1:Root[0000, 0]@1..2#ROOT2024
+ PUNCH - [alone] 1:Root[0000, 0]@2..3#ROOT2024
--"#]],
);
diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml
index c80510eedf..c7da654de6 100644
--- a/crates/parser/Cargo.toml
+++ b/crates/parser/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
drop_bomb = "0.1.5"
diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs
index 0ac25da329..2b4151e3b7 100644
--- a/crates/parser/src/grammar/expressions.rs
+++ b/crates/parser/src/grammar/expressions.rs
@@ -4,7 +4,7 @@ use crate::grammar::attributes::ATTRIBUTE_FIRST;
use super::*;
-pub(super) use atom::{EXPR_RECOVERY_SET, LITERAL_FIRST, literal};
+pub(super) use atom::{EXPR_RECOVERY_SET, LITERAL_FIRST, literal, parse_asm_expr};
pub(crate) use atom::{block_expr, match_arm_list};
#[derive(PartialEq, Eq)]
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs
index 8ed0fc6729..76656567e7 100644
--- a/crates/parser/src/grammar/expressions/atom.rs
+++ b/crates/parser/src/grammar/expressions/atom.rs
@@ -253,8 +253,7 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
let m = p.start();
p.bump_remap(T![builtin]);
p.bump(T![#]);
- if p.at_contextual_kw(T![offset_of]) {
- p.bump_remap(T![offset_of]);
+ if p.eat_contextual_kw(T![offset_of]) {
p.expect(T!['(']);
type_(p);
p.expect(T![,]);
@@ -278,8 +277,7 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
p.expect(T![')']);
}
Some(m.complete(p, OFFSET_OF_EXPR))
- } else if p.at_contextual_kw(T![format_args]) {
- p.bump_remap(T![format_args]);
+ } else if p.eat_contextual_kw(T![format_args]) {
p.expect(T!['(']);
expr(p);
if p.eat(T![,]) {
@@ -302,7 +300,16 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
}
p.expect(T![')']);
Some(m.complete(p, FORMAT_ARGS_EXPR))
- } else if p.at_contextual_kw(T![asm]) {
+ } else if p.eat_contextual_kw(T![asm])
+ || p.eat_contextual_kw(T![global_asm])
+ || p.eat_contextual_kw(T![naked_asm])
+ {
+ // test asm_kinds
+ // fn foo() {
+ // builtin#asm("");
+ // builtin#global_asm("");
+ // builtin#naked_asm("");
+ // }
parse_asm_expr(p, m)
} else {
m.abandon(p);
@@ -321,8 +328,7 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
// tmp = out(reg) _,
// );
// }
-fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> {
- p.bump_remap(T![asm]);
+pub(crate) fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> {
p.expect(T!['(']);
if expr(p).is_none() {
p.err_and_bump("expected asm template");
@@ -411,11 +417,10 @@ fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> {
dir_spec.abandon(p);
op.abandon(p);
op_n.abandon(p);
- p.err_and_bump("expected asm operand");
- // improves error recovery and handles err_and_bump recovering from `{` which gets
- // the parser stuck here
+ // improves error recovery
if p.at(T!['{']) {
+ p.error("expected asm operand");
// test_err bad_asm_expr
// fn foo() {
// builtin#asm(
@@ -423,6 +428,8 @@ fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> {
// );
// }
expr(p);
+ } else {
+ p.err_and_bump("expected asm operand");
}
if p.at(T!['}']) {
diff --git a/crates/parser/src/grammar/generic_params.rs b/crates/parser/src/grammar/generic_params.rs
index ea5a3bc859..55c5dc400b 100644
--- a/crates/parser/src/grammar/generic_params.rs
+++ b/crates/parser/src/grammar/generic_params.rs
@@ -122,7 +122,7 @@ fn lifetime_bounds(p: &mut Parser<'_>) {
}
// test type_param_bounds
-// struct S<T: 'a + ?Sized + (Copy) + ~const Drop>;
+// struct S<T: 'a + ?Sized + (Copy) + [const] Drop>;
pub(super) fn bounds(p: &mut Parser<'_>) {
p.expect(T![:]);
bounds_without_colon(p);
@@ -187,6 +187,11 @@ fn type_bound(p: &mut Parser<'_>) -> bool {
p.bump_any();
p.expect(T![const]);
}
+ T!['['] => {
+ p.bump_any();
+ p.expect(T![const]);
+ p.expect(T![']']);
+ }
// test const_trait_bound
// const fn foo(_: impl const Trait) {}
T![const] => {
diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs
index b9f4866574..8e551b0b96 100644
--- a/crates/parser/src/grammar/items.rs
+++ b/crates/parser/src/grammar/items.rs
@@ -261,6 +261,19 @@ fn opt_item_without_modifiers(p: &mut Parser<'_>, m: Marker) -> Result<(), Marke
T![const] if (la == IDENT || la == T![_] || la == T![mut]) => consts::konst(p, m),
T![static] if (la == IDENT || la == T![_] || la == T![mut]) => consts::static_(p, m),
+ IDENT
+ if p.at_contextual_kw(T![builtin])
+ && p.nth_at(1, T![#])
+ && p.nth_at_contextual_kw(2, T![global_asm]) =>
+ {
+ p.bump_remap(T![builtin]);
+ p.bump(T![#]);
+ p.bump_remap(T![global_asm]);
+ // test global_asm
+ // builtin#global_asm("")
+ expressions::parse_asm_expr(p, m);
+ }
+
_ => return Err(m),
};
Ok(())
diff --git a/crates/parser/src/lexed_str.rs b/crates/parser/src/lexed_str.rs
index 0fa9a26454..8fff1c3db7 100644
--- a/crates/parser/src/lexed_str.rs
+++ b/crates/parser/src/lexed_str.rs
@@ -11,7 +11,8 @@
use std::ops;
use rustc_literal_escaper::{
- EscapeError, Mode, unescape_byte, unescape_char, unescape_mixed, unescape_unicode,
+ EscapeError, Mode, unescape_byte, unescape_byte_str, unescape_c_str, unescape_char,
+ unescape_str,
};
use crate::{
@@ -43,7 +44,9 @@ impl<'a> LexedStr<'a> {
// Re-create the tokenizer from scratch every token because `GuardedStrPrefix` is one token in the lexer
// but we want to split it to two in edition <2024.
- while let Some(token) = rustc_lexer::tokenize(&text[conv.offset..]).next() {
+ while let Some(token) =
+ rustc_lexer::tokenize(&text[conv.offset..], rustc_lexer::FrontmatterAllowed::No).next()
+ {
let token_text = &text[conv.offset..][..token.len as usize];
conv.extend_token(&token.kind, token_text);
@@ -57,7 +60,7 @@ impl<'a> LexedStr<'a> {
return None;
}
- let token = rustc_lexer::tokenize(text).next()?;
+ let token = rustc_lexer::tokenize(text, rustc_lexer::FrontmatterAllowed::No).next()?;
if token.len as usize != text.len() {
return None;
}
@@ -151,14 +154,14 @@ impl<'a> Converter<'a> {
self.res
}
- fn push(&mut self, kind: SyntaxKind, len: usize, err: Option<&str>) {
+ fn push(&mut self, kind: SyntaxKind, len: usize, errors: Vec<String>) {
self.res.push(kind, self.offset);
self.offset += len;
- if let Some(err) = err {
- let token = self.res.len() as u32;
- let msg = err.to_owned();
- self.res.error.push(LexError { msg, token });
+ for msg in errors {
+ if !msg.is_empty() {
+ self.res.error.push(LexError { msg, token: self.res.len() as u32 });
+ }
}
}
@@ -167,14 +170,16 @@ impl<'a> Converter<'a> {
// We drop some useful information here (see patterns with double dots `..`)
// Storing that info in `SyntaxKind` is not possible due to its layout requirements of
// being `u16` that come from `rowan::SyntaxKind`.
- let mut err = "";
+ let mut errors: Vec<String> = vec![];
let syntax_kind = {
match kind {
rustc_lexer::TokenKind::LineComment { doc_style: _ } => COMMENT,
rustc_lexer::TokenKind::BlockComment { doc_style: _, terminated } => {
if !terminated {
- err = "Missing trailing `*/` symbols to terminate the block comment";
+ errors.push(
+ "Missing trailing `*/` symbols to terminate the block comment".into(),
+ );
}
COMMENT
}
@@ -184,9 +189,9 @@ impl<'a> Converter<'a> {
invalid_infostring,
} => {
if *has_invalid_preceding_whitespace {
- err = "invalid preceding whitespace for frontmatter opening"
+ errors.push("invalid preceding whitespace for frontmatter opening".into());
} else if *invalid_infostring {
- err = "invalid infostring for frontmatter"
+ errors.push("invalid infostring for frontmatter".into());
}
FRONTMATTER
}
@@ -198,7 +203,7 @@ impl<'a> Converter<'a> {
SyntaxKind::from_keyword(token_text, self.edition).unwrap_or(IDENT)
}
rustc_lexer::TokenKind::InvalidIdent => {
- err = "Ident contains invalid characters";
+ errors.push("Ident contains invalid characters".into());
IDENT
}
@@ -206,7 +211,7 @@ impl<'a> Converter<'a> {
rustc_lexer::TokenKind::GuardedStrPrefix if self.edition.at_least_2024() => {
// FIXME: rustc does something better for recovery.
- err = "Invalid string literal (reserved syntax)";
+ errors.push("Invalid string literal (reserved syntax)".into());
ERROR
}
rustc_lexer::TokenKind::GuardedStrPrefix => {
@@ -222,12 +227,12 @@ impl<'a> Converter<'a> {
rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
if *starts_with_number {
- err = "Lifetime name cannot start with a number";
+ errors.push("Lifetime name cannot start with a number".into());
}
LIFETIME_IDENT
}
rustc_lexer::TokenKind::UnknownPrefixLifetime => {
- err = "Unknown lifetime prefix";
+ errors.push("Unknown lifetime prefix".into());
LIFETIME_IDENT
}
rustc_lexer::TokenKind::RawLifetime => LIFETIME_IDENT,
@@ -262,119 +267,128 @@ impl<'a> Converter<'a> {
rustc_lexer::TokenKind::Unknown => ERROR,
rustc_lexer::TokenKind::UnknownPrefix if token_text == "builtin" => IDENT,
rustc_lexer::TokenKind::UnknownPrefix => {
- err = "unknown literal prefix";
+ errors.push("unknown literal prefix".into());
IDENT
}
rustc_lexer::TokenKind::Eof => EOF,
}
};
- let err = if err.is_empty() { None } else { Some(err) };
- self.push(syntax_kind, token_text.len(), err);
+ self.push(syntax_kind, token_text.len(), errors);
}
fn extend_literal(&mut self, len: usize, kind: &rustc_lexer::LiteralKind) {
- let mut err = "";
+ let invalid_raw_msg = String::from("Invalid raw string literal");
+
+ let mut errors = vec![];
+ let mut no_end_quote = |c: char, kind: &str| {
+ errors.push(format!("Missing trailing `{c}` symbol to terminate the {kind} literal"));
+ };
let syntax_kind = match *kind {
rustc_lexer::LiteralKind::Int { empty_int, base: _ } => {
if empty_int {
- err = "Missing digits after the integer base prefix";
+ errors.push("Missing digits after the integer base prefix".into());
}
INT_NUMBER
}
rustc_lexer::LiteralKind::Float { empty_exponent, base: _ } => {
if empty_exponent {
- err = "Missing digits after the exponent symbol";
+ errors.push("Missing digits after the exponent symbol".into());
}
FLOAT_NUMBER
}
rustc_lexer::LiteralKind::Char { terminated } => {
if !terminated {
- err = "Missing trailing `'` symbol to terminate the character literal";
+ no_end_quote('\'', "character");
} else {
let text = &self.res.text[self.offset + 1..][..len - 1];
- let i = text.rfind('\'').unwrap();
- let text = &text[..i];
+ let text = &text[..text.rfind('\'').unwrap()];
if let Err(e) = unescape_char(text) {
- err = error_to_diagnostic_message(e, Mode::Char);
+ errors.push(err_to_msg(e, Mode::Char));
}
}
CHAR
}
rustc_lexer::LiteralKind::Byte { terminated } => {
if !terminated {
- err = "Missing trailing `'` symbol to terminate the byte literal";
+ no_end_quote('\'', "byte");
} else {
let text = &self.res.text[self.offset + 2..][..len - 2];
- let i = text.rfind('\'').unwrap();
- let text = &text[..i];
+ let text = &text[..text.rfind('\'').unwrap()];
if let Err(e) = unescape_byte(text) {
- err = error_to_diagnostic_message(e, Mode::Byte);
+ errors.push(err_to_msg(e, Mode::Byte));
}
}
-
BYTE
}
rustc_lexer::LiteralKind::Str { terminated } => {
if !terminated {
- err = "Missing trailing `\"` symbol to terminate the string literal";
+ no_end_quote('"', "string");
} else {
let text = &self.res.text[self.offset + 1..][..len - 1];
- let i = text.rfind('"').unwrap();
- let text = &text[..i];
- err = unescape_string_error_message(text, Mode::Str);
+ let text = &text[..text.rfind('"').unwrap()];
+ unescape_str(text, |_, res| {
+ if let Err(e) = res {
+ errors.push(err_to_msg(e, Mode::Str));
+ }
+ });
}
STRING
}
rustc_lexer::LiteralKind::ByteStr { terminated } => {
if !terminated {
- err = "Missing trailing `\"` symbol to terminate the byte string literal";
+ no_end_quote('"', "byte string");
} else {
let text = &self.res.text[self.offset + 2..][..len - 2];
- let i = text.rfind('"').unwrap();
- let text = &text[..i];
- err = unescape_string_error_message(text, Mode::ByteStr);
+ let text = &text[..text.rfind('"').unwrap()];
+ unescape_byte_str(text, |_, res| {
+ if let Err(e) = res {
+ errors.push(err_to_msg(e, Mode::ByteStr));
+ }
+ });
}
BYTE_STRING
}
rustc_lexer::LiteralKind::CStr { terminated } => {
if !terminated {
- err = "Missing trailing `\"` symbol to terminate the string literal";
+ no_end_quote('"', "C string")
} else {
let text = &self.res.text[self.offset + 2..][..len - 2];
- let i = text.rfind('"').unwrap();
- let text = &text[..i];
- err = unescape_string_error_message(text, Mode::CStr);
+ let text = &text[..text.rfind('"').unwrap()];
+ unescape_c_str(text, |_, res| {
+ if let Err(e) = res {
+ errors.push(err_to_msg(e, Mode::CStr));
+ }
+ });
}
C_STRING
}
rustc_lexer::LiteralKind::RawStr { n_hashes } => {
if n_hashes.is_none() {
- err = "Invalid raw string literal";
+ errors.push(invalid_raw_msg);
}
STRING
}
rustc_lexer::LiteralKind::RawByteStr { n_hashes } => {
if n_hashes.is_none() {
- err = "Invalid raw string literal";
+ errors.push(invalid_raw_msg);
}
BYTE_STRING
}
rustc_lexer::LiteralKind::RawCStr { n_hashes } => {
if n_hashes.is_none() {
- err = "Invalid raw string literal";
+ errors.push(invalid_raw_msg);
}
C_STRING
}
};
- let err = if err.is_empty() { None } else { Some(err) };
- self.push(syntax_kind, len, err);
+ self.push(syntax_kind, len, errors);
}
}
-fn error_to_diagnostic_message(error: EscapeError, mode: Mode) -> &'static str {
+fn err_to_msg(error: EscapeError, mode: Mode) -> String {
match error {
EscapeError::ZeroChars => "empty character literal",
EscapeError::MoreThanOneChar => "character literal may only contain one codepoint",
@@ -410,28 +424,5 @@ fn error_to_diagnostic_message(error: EscapeError, mode: Mode) -> &'static str {
EscapeError::UnskippedWhitespaceWarning => "",
EscapeError::MultipleSkippedLinesWarning => "",
}
-}
-
-fn unescape_string_error_message(text: &str, mode: Mode) -> &'static str {
- let mut error_message = "";
- match mode {
- Mode::CStr => {
- unescape_mixed(text, mode, &mut |_, res| {
- if let Err(e) = res {
- error_message = error_to_diagnostic_message(e, mode);
- }
- });
- }
- Mode::ByteStr | Mode::Str => {
- unescape_unicode(text, mode, &mut |_, res| {
- if let Err(e) = res {
- error_message = error_to_diagnostic_message(e, mode);
- }
- });
- }
- _ => {
- // Other Modes are not supported yet or do not apply
- }
- }
- error_message
+ .into()
}
diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs
index 36a363afe9..ca02d9fdfd 100644
--- a/crates/parser/src/parser.rs
+++ b/crates/parser/src/parser.rs
@@ -29,7 +29,7 @@ pub(crate) struct Parser<'t> {
edition: Edition,
}
-const PARSER_STEP_LIMIT: usize = 15_000_000;
+const PARSER_STEP_LIMIT: usize = if cfg!(debug_assertions) { 150_000 } else { 15_000_000 };
impl<'t> Parser<'t> {
pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
@@ -254,7 +254,10 @@ impl<'t> Parser<'t> {
/// Create an error node and consume the next token.
pub(crate) fn err_and_bump(&mut self, message: &str) {
- self.err_recover(message, TokenSet::EMPTY);
+ let m = self.start();
+ self.error(message);
+ self.bump_any();
+ m.complete(self, ERROR);
}
/// Create an error node and consume the next token unless it is in the recovery set.
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs
index f534546ea0..12a13caa4d 100644
--- a/crates/parser/src/syntax_kind/generated.rs
+++ b/crates/parser/src/syntax_kind/generated.rs
@@ -120,12 +120,14 @@ pub enum SyntaxKind {
DYN_KW,
FORMAT_ARGS_KW,
GEN_KW,
+ GLOBAL_ASM_KW,
INLATEOUT_KW,
INOUT_KW,
LABEL_KW,
LATEOUT_KW,
MACRO_RULES_KW,
MAY_UNWIND_KW,
+ NAKED_ASM_KW,
NOMEM_KW,
NORETURN_KW,
NOSTACK_KW,
@@ -599,12 +601,14 @@ impl SyntaxKind {
DEFAULT_KW => "default",
DYN_KW => "dyn",
FORMAT_ARGS_KW => "format_args",
+ GLOBAL_ASM_KW => "global_asm",
INLATEOUT_KW => "inlateout",
INOUT_KW => "inout",
LABEL_KW => "label",
LATEOUT_KW => "lateout",
MACRO_RULES_KW => "macro_rules",
MAY_UNWIND_KW => "may_unwind",
+ NAKED_ASM_KW => "naked_asm",
NOMEM_KW => "nomem",
NORETURN_KW => "noreturn",
NOSTACK_KW => "nostack",
@@ -699,12 +703,14 @@ impl SyntaxKind {
DEFAULT_KW => true,
DYN_KW if edition < Edition::Edition2018 => true,
FORMAT_ARGS_KW => true,
+ GLOBAL_ASM_KW => true,
INLATEOUT_KW => true,
INOUT_KW => true,
LABEL_KW => true,
LATEOUT_KW => true,
MACRO_RULES_KW => true,
MAY_UNWIND_KW => true,
+ NAKED_ASM_KW => true,
NOMEM_KW => true,
NORETURN_KW => true,
NOSTACK_KW => true,
@@ -787,12 +793,14 @@ impl SyntaxKind {
DEFAULT_KW => true,
DYN_KW if edition < Edition::Edition2018 => true,
FORMAT_ARGS_KW => true,
+ GLOBAL_ASM_KW => true,
INLATEOUT_KW => true,
INOUT_KW => true,
LABEL_KW => true,
LATEOUT_KW => true,
MACRO_RULES_KW => true,
MAY_UNWIND_KW => true,
+ NAKED_ASM_KW => true,
NOMEM_KW => true,
NORETURN_KW => true,
NOSTACK_KW => true,
@@ -938,12 +946,14 @@ impl SyntaxKind {
"default" => DEFAULT_KW,
"dyn" if edition < Edition::Edition2018 => DYN_KW,
"format_args" => FORMAT_ARGS_KW,
+ "global_asm" => GLOBAL_ASM_KW,
"inlateout" => INLATEOUT_KW,
"inout" => INOUT_KW,
"label" => LABEL_KW,
"lateout" => LATEOUT_KW,
"macro_rules" => MACRO_RULES_KW,
"may_unwind" => MAY_UNWIND_KW,
+ "naked_asm" => NAKED_ASM_KW,
"nomem" => NOMEM_KW,
"noreturn" => NORETURN_KW,
"nostack" => NOSTACK_KW,
@@ -998,7 +1008,7 @@ impl SyntaxKind {
}
}
#[macro_export]
-macro_rules ! T_ { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; [frontmatter] => { $ crate :: SyntaxKind :: FRONTMATTER } ; }
+macro_rules ! T_ { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [global_asm] => { $ crate :: SyntaxKind :: GLOBAL_ASM_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [naked_asm] => { $ crate :: SyntaxKind :: NAKED_ASM_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; [frontmatter] => { $ crate :: SyntaxKind :: FRONTMATTER } ; }
impl ::core::marker::Copy for SyntaxKind {}
impl ::core::clone::Clone for SyntaxKind {
#[inline]
diff --git a/crates/parser/test_data/generated/runner.rs b/crates/parser/test_data/generated/runner.rs
index 6ec4192830..cef7b0ee23 100644
--- a/crates/parser/test_data/generated/runner.rs
+++ b/crates/parser/test_data/generated/runner.rs
@@ -21,6 +21,8 @@ mod ok {
#[test]
fn asm_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_expr.rs"); }
#[test]
+ fn asm_kinds() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_kinds.rs"); }
+ #[test]
fn asm_label() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_label.rs"); }
#[test]
fn assoc_const_eq() {
@@ -298,6 +300,8 @@ mod ok {
run_and_expect_no_errors("test_data/parser/inline/ok/generic_param_list.rs");
}
#[test]
+ fn global_asm() { run_and_expect_no_errors("test_data/parser/inline/ok/global_asm.rs"); }
+ #[test]
fn half_open_range_pat() {
run_and_expect_no_errors("test_data/parser/inline/ok/half_open_range_pat.rs");
}
diff --git a/crates/parser/test_data/parser/inline/ok/asm_kinds.rast b/crates/parser/test_data/parser/inline/ok/asm_kinds.rast
new file mode 100644
index 0000000000..c337d89aa5
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/asm_kinds.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ASM_EXPR
+ BUILTIN_KW "builtin"
+ POUND "#"
+ ASM_KW "asm"
+ L_PAREN "("
+ LITERAL
+ STRING "\"\""
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ ASM_EXPR
+ BUILTIN_KW "builtin"
+ POUND "#"
+ GLOBAL_ASM_KW "global_asm"
+ L_PAREN "("
+ LITERAL
+ STRING "\"\""
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ASM_EXPR
+ BUILTIN_KW "builtin"
+ POUND "#"
+ NAKED_ASM_KW "naked_asm"
+ L_PAREN "("
+ LITERAL
+ STRING "\"\""
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/asm_kinds.rs b/crates/parser/test_data/parser/inline/ok/asm_kinds.rs
new file mode 100644
index 0000000000..9c03e9de68
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/asm_kinds.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ builtin#asm("");
+ builtin#global_asm("");
+ builtin#naked_asm("");
+}
diff --git a/crates/parser/test_data/parser/inline/ok/global_asm.rast b/crates/parser/test_data/parser/inline/ok/global_asm.rast
new file mode 100644
index 0000000000..5337c56be1
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/global_asm.rast
@@ -0,0 +1,10 @@
+SOURCE_FILE
+ ASM_EXPR
+ BUILTIN_KW "builtin"
+ POUND "#"
+ GLOBAL_ASM_KW "global_asm"
+ L_PAREN "("
+ LITERAL
+ STRING "\"\""
+ R_PAREN ")"
+ WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/global_asm.rs b/crates/parser/test_data/parser/inline/ok/global_asm.rs
new file mode 100644
index 0000000000..967ce1f5fd
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/global_asm.rs
@@ -0,0 +1 @@
+builtin#global_asm("")
diff --git a/crates/parser/test_data/parser/inline/ok/type_param_bounds.rast b/crates/parser/test_data/parser/inline/ok/type_param_bounds.rast
index dee860c241..259637c898 100644
--- a/crates/parser/test_data/parser/inline/ok/type_param_bounds.rast
+++ b/crates/parser/test_data/parser/inline/ok/type_param_bounds.rast
@@ -40,8 +40,9 @@ SOURCE_FILE
PLUS "+"
WHITESPACE " "
TYPE_BOUND
- TILDE "~"
+ L_BRACK "["
CONST_KW "const"
+ R_BRACK "]"
WHITESPACE " "
PATH_TYPE
PATH
diff --git a/crates/parser/test_data/parser/inline/ok/type_param_bounds.rs b/crates/parser/test_data/parser/inline/ok/type_param_bounds.rs
index 5da3083b9c..8f37af78e9 100644
--- a/crates/parser/test_data/parser/inline/ok/type_param_bounds.rs
+++ b/crates/parser/test_data/parser/inline/ok/type_param_bounds.rs
@@ -1 +1 @@
-struct S<T: 'a + ?Sized + (Copy) + ~const Drop>;
+struct S<T: 'a + ?Sized + (Copy) + [const] Drop>;
diff --git a/crates/paths/Cargo.toml b/crates/paths/Cargo.toml
index 4cc70726da..f0dafab70c 100644
--- a/crates/paths/Cargo.toml
+++ b/crates/paths/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
camino.workspace = true
diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml
index f5ba40a994..dac8e09435 100644
--- a/crates/proc-macro-api/Cargo.toml
+++ b/crates/proc-macro-api/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
serde.workspace = true
diff --git a/crates/proc-macro-api/src/legacy_protocol/msg.rs b/crates/proc-macro-api/src/legacy_protocol/msg.rs
index 55185aa492..165936269d 100644
--- a/crates/proc-macro-api/src/legacy_protocol/msg.rs
+++ b/crates/proc-macro-api/src/legacy_protocol/msg.rs
@@ -22,9 +22,10 @@ pub const HAS_GLOBAL_SPANS: u32 = 3;
pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4;
/// Whether literals encode their kind as an additional u32 field and idents their rawness as a u32 field.
pub const EXTENDED_LEAF_DATA: u32 = 5;
+pub const HASHED_AST_ID: u32 = 6;
/// Current API version of the proc-macro protocol.
-pub const CURRENT_API_VERSION: u32 = EXTENDED_LEAF_DATA;
+pub const CURRENT_API_VERSION: u32 = HASHED_AST_ID;
/// Represents requests sent from the client to the proc-macro-srv.
#[derive(Debug, Serialize, Deserialize)]
@@ -201,7 +202,9 @@ type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str)
#[cfg(test)]
mod tests {
use intern::{Symbol, sym};
- use span::{Edition, ErasedFileAstId, Span, SpanAnchor, SyntaxContext, TextRange, TextSize};
+ use span::{
+ Edition, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext, TextRange, TextSize,
+ };
use tt::{
Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, TopSubtree,
TopSubtreeBuilder,
@@ -215,7 +218,7 @@ mod tests {
span::FileId::from_raw(0xe4e4e),
span::Edition::CURRENT,
),
- ast_id: ErasedFileAstId::from_raw(0),
+ ast_id: ROOT_ERASED_FILE_AST_ID,
};
let mut builder = TopSubtreeBuilder::new(Delimiter {
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index 25c30b6db4..516c7418bd 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -12,13 +12,13 @@ pub mod legacy_protocol {
mod process;
use paths::{AbsPath, AbsPathBuf};
-use span::Span;
+use span::{ErasedFileAstId, FIXUP_ERASED_FILE_AST_ID_MARKER, Span};
use std::{fmt, io, sync::Arc, time::SystemTime};
use crate::{
legacy_protocol::msg::{
- ExpandMacro, ExpandMacroData, ExpnGlobals, FlatTree, HAS_GLOBAL_SPANS, PanicMessage,
- RUST_ANALYZER_SPAN_SUPPORT, Request, Response, SpanDataIndexMap,
+ ExpandMacro, ExpandMacroData, ExpnGlobals, FlatTree, HAS_GLOBAL_SPANS, HASHED_AST_ID,
+ PanicMessage, RUST_ANALYZER_SPAN_SUPPORT, Request, Response, SpanDataIndexMap,
deserialize_span_data_index_map, flat::serialize_span_data_index_map,
},
process::ProcMacroServerProcess,
@@ -161,6 +161,38 @@ impl ProcMacro {
self.kind
}
+ fn needs_fixup_change(&self) -> bool {
+ let version = self.process.version();
+ (RUST_ANALYZER_SPAN_SUPPORT..HASHED_AST_ID).contains(&version)
+ }
+
+ /// On some server versions, the fixup ast id is different than ours. So change it to match.
+ fn change_fixup_to_match_old_server(&self, tt: &mut tt::TopSubtree<Span>) {
+ const OLD_FIXUP_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(!0 - 1);
+ let change_ast_id = |ast_id: &mut ErasedFileAstId| {
+ if *ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER {
+ *ast_id = OLD_FIXUP_AST_ID;
+ } else if *ast_id == OLD_FIXUP_AST_ID {
+ // Swap between them, that means no collision plus the change can be reversed by doing itself.
+ *ast_id = FIXUP_ERASED_FILE_AST_ID_MARKER;
+ }
+ };
+
+ for tt in &mut tt.0 {
+ match tt {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { span, .. }))
+ | tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { span, .. }))
+ | tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { span, .. })) => {
+ change_ast_id(&mut span.anchor.ast_id);
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ change_ast_id(&mut subtree.delimiter.open.anchor.ast_id);
+ change_ast_id(&mut subtree.delimiter.close.anchor.ast_id);
+ }
+ }
+ }
+ }
+
/// Expands the procedural macro by sending an expansion request to the server.
/// This includes span information and environmental context.
pub fn expand(
@@ -173,6 +205,20 @@ impl ProcMacro {
mixed_site: Span,
current_dir: String,
) -> Result<Result<tt::TopSubtree<Span>, PanicMessage>, ServerError> {
+ let (mut subtree, mut attr) = (subtree, attr);
+ let (mut subtree_changed, mut attr_changed);
+ if self.needs_fixup_change() {
+ subtree_changed = tt::TopSubtree::from_subtree(subtree);
+ self.change_fixup_to_match_old_server(&mut subtree_changed);
+ subtree = subtree_changed.view();
+
+ if let Some(attr) = &mut attr {
+ attr_changed = tt::TopSubtree::from_subtree(*attr);
+ self.change_fixup_to_match_old_server(&mut attr_changed);
+ *attr = attr_changed.view();
+ }
+ }
+
let version = self.process.version();
let mut span_data_table = SpanDataIndexMap::default();
@@ -205,15 +251,23 @@ impl ProcMacro {
let response = self.process.send_task(Request::ExpandMacro(Box::new(task)))?;
match response {
- Response::ExpandMacro(it) => {
- Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table)))
- }
+ Response::ExpandMacro(it) => Ok(it.map(|tree| {
+ let mut expanded = FlatTree::to_subtree_resolved(tree, version, &span_data_table);
+ if self.needs_fixup_change() {
+ self.change_fixup_to_match_old_server(&mut expanded);
+ }
+ expanded
+ })),
Response::ExpandMacroExtended(it) => Ok(it.map(|resp| {
- FlatTree::to_subtree_resolved(
+ let mut expanded = FlatTree::to_subtree_resolved(
resp.tree,
version,
&deserialize_span_data_index_map(&resp.span_data_table),
- )
+ );
+ if self.needs_fixup_change() {
+ self.change_fixup_to_match_old_server(&mut expanded);
+ }
+ expanded
})),
_ => Err(ServerError { message: "unexpected response".to_owned(), io: None }),
}
diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml
index 8fd675d0d3..4034f24439 100644
--- a/crates/proc-macro-srv/Cargo.toml
+++ b/crates/proc-macro-srv/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
object.workspace = true
diff --git a/crates/proc-macro-srv/proc-macro-test/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/Cargo.toml
index eddefb33c0..bc04482273 100644
--- a/crates/proc-macro-srv/proc-macro-test/Cargo.toml
+++ b/crates/proc-macro-srv/proc-macro-test/Cargo.toml
@@ -7,6 +7,7 @@ edition = "2024"
license = "MIT OR Apache-2.0"
[lib]
+doctest = false
[build-dependencies]
-cargo_metadata = "0.19.2"
+cargo_metadata = "0.20.0"
diff --git a/crates/proc-macro-srv/proc-macro-test/build.rs b/crates/proc-macro-srv/proc-macro-test/build.rs
index b97569d4db..b9e84a474d 100644
--- a/crates/proc-macro-srv/proc-macro-test/build.rs
+++ b/crates/proc-macro-srv/proc-macro-test/build.rs
@@ -109,13 +109,11 @@ fn main() {
let mut artifact_path = None;
for message in Message::parse_stream(output.stdout.as_slice()) {
- if let Message::CompilerArtifact(artifact) = message.unwrap() {
- if artifact.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
- && (artifact.package_id.repr.starts_with(&repr)
- || artifact.package_id.repr == pkgid)
- {
- artifact_path = Some(PathBuf::from(&artifact.filenames[0]));
- }
+ if let Message::CompilerArtifact(artifact) = message.unwrap()
+ && artifact.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
+ && (artifact.package_id.repr.starts_with(&repr) || artifact.package_id.repr == pkgid)
+ {
+ artifact_path = Some(PathBuf::from(&artifact.filenames[0]));
}
}
diff --git a/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml
index 33b7c2bb0a..e1678bddff 100644
--- a/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml
+++ b/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml
@@ -6,6 +6,7 @@ edition = "2024"
publish = false
[lib]
+doctest = false
proc-macro = true
[dependencies]
diff --git a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
index 6820e4b335..2a72e50f91 100644
--- a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
+++ b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
@@ -31,6 +31,7 @@ pub fn fn_like_mk_literals(_args: TokenStream) -> TokenStream {
TokenTree::from(Literal::byte_string(b"byte_string")),
TokenTree::from(Literal::character('c')),
TokenTree::from(Literal::string("string")),
+ TokenTree::from(Literal::string("-string")),
TokenTree::from(Literal::c_string(c"cstring")),
// as of 2022-07-21, there's no method on `Literal` to build a raw
// string or a raw byte string
diff --git a/crates/proc-macro-srv/src/server_impl.rs b/crates/proc-macro-srv/src/server_impl.rs
index ad28599033..662f625764 100644
--- a/crates/proc-macro-srv/src/server_impl.rs
+++ b/crates/proc-macro-srv/src/server_impl.rs
@@ -121,7 +121,7 @@ pub(super) fn literal_from_str<Span: Copy>(
use proc_macro::bridge::LitKind;
use rustc_lexer::{LiteralKind, Token, TokenKind};
- let mut tokens = rustc_lexer::tokenize(s);
+ let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
let lit = if minus_or_lit.kind == TokenKind::Minus {
@@ -199,37 +199,29 @@ pub(super) fn from_token_tree<Span: Copy>(
}
bridge::TokenTree::Literal(literal) => {
- let token_trees =
- if let Some((_minus, symbol)) = literal.symbol.as_str().split_once('-') {
- let punct = tt::Punct {
- spacing: tt::Spacing::Alone,
- span: literal.span,
- char: '-' as char,
- };
- let leaf: tt::Leaf<Span> = tt::Leaf::from(punct);
- let minus_tree = tt::TokenTree::from(leaf);
-
- let literal = tt::Literal {
- symbol: Symbol::intern(symbol),
- suffix: literal.suffix,
- span: literal.span,
- kind: literal_kind_to_internal(literal.kind),
- };
- let leaf: tt::Leaf<Span> = tt::Leaf::from(literal);
- let tree = tt::TokenTree::from(leaf);
- vec![minus_tree, tree]
- } else {
- let literal = tt::Literal {
- symbol: literal.symbol,
- suffix: literal.suffix,
- span: literal.span,
- kind: literal_kind_to_internal(literal.kind),
- };
-
- let leaf: tt::Leaf<Span> = tt::Leaf::from(literal);
- let tree = tt::TokenTree::from(leaf);
- vec![tree]
- };
+ let mut token_trees = Vec::new();
+ let mut symbol = literal.symbol;
+ if matches!(
+ literal.kind,
+ proc_macro::bridge::LitKind::Integer | proc_macro::bridge::LitKind::Float
+ ) && symbol.as_str().starts_with('-')
+ {
+ token_trees.push(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ spacing: tt::Spacing::Alone,
+ span: literal.span,
+ char: '-' as char,
+ })));
+ symbol = Symbol::intern(&symbol.as_str()[1..]);
+ }
+ let literal = tt::Literal {
+ symbol,
+ suffix: literal.suffix,
+ span: literal.span,
+ kind: literal_kind_to_internal(literal.kind),
+ };
+ let leaf: tt::Leaf<Span> = tt::Leaf::from(literal);
+ let tree = tt::TokenTree::from(leaf);
+ token_trees.push(tree);
TokenStream { token_trees }
}
diff --git a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index 5d1271ba81..a1863efafb 100644
--- a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -318,7 +318,7 @@ mod tests {
range: TextRange::empty(TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
- ast_id: span::ErasedFileAstId::from_raw(0),
+ ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContext::root(span::Edition::CURRENT),
};
@@ -360,7 +360,7 @@ mod tests {
range: TextRange::empty(TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
- ast_id: span::ErasedFileAstId::from_raw(0),
+ ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContext::root(span::Edition::CURRENT),
};
diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs
index 3a6ce639d1..08495f50bf 100644
--- a/crates/proc-macro-srv/src/tests/mod.rs
+++ b/crates/proc-macro-srv/src/tests/mod.rs
@@ -21,14 +21,14 @@ fn test_derive_empty() {
SUBTREE $$ 1 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT struct 42:[email protected]#ROOT2024
- IDENT S 42:[email protected]#ROOT2024
- PUNCH ; [alone] 42:[email protected]#ROOT2024
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT struct 42:Root[0000, 0]@0..6#ROOT2024
+ IDENT S 42:Root[0000, 0]@7..8#ROOT2024
+ PUNCH ; [alone] 42:Root[0000, 0]@8..9#ROOT2024
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024"#]],
);
}
@@ -52,19 +52,19 @@ fn test_derive_error() {
LITERAL Str #[derive(DeriveError)] struct S ; 1
PUNCH ; [alone] 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT struct 42:[email protected]#ROOT2024
- IDENT S 42:[email protected]#ROOT2024
- PUNCH ; [alone] 42:[email protected]#ROOT2024
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT struct 42:Root[0000, 0]@0..6#ROOT2024
+ IDENT S 42:Root[0000, 0]@7..8#ROOT2024
+ PUNCH ; [alone] 42:Root[0000, 0]@8..9#ROOT2024
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT compile_error 42:[email protected]#ROOT2024
- PUNCH ! [alone] 42:[email protected]#ROOT2024
- SUBTREE () 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- LITERAL Str #[derive(DeriveError)] struct S ; 42:[email protected]#ROOT2024
- PUNCH ; [alone] 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT compile_error 42:Root[0000, 0]@0..100#ROOT2024
+ PUNCH ! [alone] 42:Root[0000, 0]@0..100#ROOT2024
+ SUBTREE () 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Str #[derive(DeriveError)] struct S ; 42:Root[0000, 0]@0..100#ROOT2024
+ PUNCH ; [alone] 42:Root[0000, 0]@0..100#ROOT2024"#]],
);
}
@@ -94,25 +94,25 @@ fn test_fn_like_macro_noop() {
PUNCH , [alone] 1
SUBTREE [] 1 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT ident 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 0 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 1 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- SUBTREE [] 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
-
-
-
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT ident 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 0 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 1 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- SUBTREE [] 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
+ LITERAL Integer 0 42:Root[0000, 0]@7..8#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@8..9#ROOT2024
+ LITERAL Integer 1 42:Root[0000, 0]@10..11#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
+ SUBTREE [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024
+
+
+
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
+ LITERAL Integer 0 42:Root[0000, 0]@7..8#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@8..9#ROOT2024
+ LITERAL Integer 1 42:Root[0000, 0]@10..11#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
+ SUBTREE [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024"#]],
);
}
@@ -134,17 +134,17 @@ fn test_fn_like_macro_clone_ident_subtree() {
PUNCH , [alone] 1
SUBTREE [] 1 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT ident 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- SUBTREE [] 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
+ SUBTREE [] 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@8..9#ROOT2024
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT ident 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- SUBTREE [] 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
+ SUBTREE [] 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024"#]],
);
}
@@ -162,13 +162,13 @@ fn test_fn_like_macro_clone_raw_ident() {
SUBTREE $$ 1 1
IDENT r#async 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT r#async 42:[email protected]#ROOT2024
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT r#async 42:Root[0000, 0]@0..7#ROOT2024
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT r#async 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT r#async 42:Root[0000, 0]@0..7#ROOT2024"#]],
);
}
@@ -187,14 +187,14 @@ fn test_fn_like_fn_like_span_join() {
SUBTREE $$ 1 1
IDENT r#joined 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT foo 42:[email protected]#ROOT2024
- IDENT bar 42:[email protected]#ROOT2024
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT foo 42:Root[0000, 0]@0..3#ROOT2024
+ IDENT bar 42:Root[0000, 0]@8..11#ROOT2024
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT r#joined 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT r#joined 42:Root[0000, 0]@0..11#ROOT2024"#]],
);
}
@@ -216,17 +216,17 @@ fn test_fn_like_fn_like_span_ops() {
IDENT resolved_at_def_site 1
IDENT start_span 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT set_def_site 42:[email protected]#ROOT2024
- IDENT resolved_at_def_site 42:[email protected]#ROOT2024
- IDENT start_span 42:[email protected]#ROOT2024
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT set_def_site 42:Root[0000, 0]@0..12#ROOT2024
+ IDENT resolved_at_def_site 42:Root[0000, 0]@13..33#ROOT2024
+ IDENT start_span 42:Root[0000, 0]@34..44#ROOT2024
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT set_def_site 41:[email protected]#ROOT2024
- IDENT resolved_at_def_site 42:[email protected]#ROOT2024
- IDENT start_span 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT set_def_site 41:Root[0000, 0]@0..150#ROOT2024
+ IDENT resolved_at_def_site 42:Root[0000, 0]@13..33#ROOT2024
+ IDENT start_span 42:Root[0000, 0]@34..34#ROOT2024"#]],
);
}
@@ -244,6 +244,7 @@ fn test_fn_like_mk_literals() {
LITERAL ByteStr byte_string 1
LITERAL Char c 1
LITERAL Str string 1
+ LITERAL Str -string 1
LITERAL CStr cstring 1
LITERAL Float 3.14f64 1
PUNCH - [alone] 1
@@ -258,27 +259,28 @@ fn test_fn_like_mk_literals() {
PUNCH - [alone] 1
LITERAL Integer 123 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
-
-
-
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- LITERAL ByteStr byte_string 42:[email protected]#ROOT2024
- LITERAL Char c 42:[email protected]#ROOT2024
- LITERAL Str string 42:[email protected]#ROOT2024
- LITERAL CStr cstring 42:[email protected]#ROOT2024
- LITERAL Float 3.14f64 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Float 3.14f64 42:[email protected]#ROOT2024
- LITERAL Float 3.14 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Float 3.14 42:[email protected]#ROOT2024
- LITERAL Integer 123i64 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 123i64 42:[email protected]#ROOT2024
- LITERAL Integer 123 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 123 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+
+
+
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL ByteStr byte_string 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Char c 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Str string 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Str -string 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL CStr cstring 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Float 3.14f64 42:Root[0000, 0]@0..100#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Float 3.14f64 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Float 3.14 42:Root[0000, 0]@0..100#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Float 3.14 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Integer 123i64 42:Root[0000, 0]@0..100#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Integer 123i64 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Integer 123 42:Root[0000, 0]@0..100#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Integer 123 42:Root[0000, 0]@0..100#ROOT2024"#]],
);
}
@@ -296,13 +298,13 @@ fn test_fn_like_mk_idents() {
IDENT standard 1
IDENT r#raw 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT standard 42:[email protected]#ROOT2024
- IDENT r#raw 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT standard 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT r#raw 42:Root[0000, 0]@0..100#ROOT2024"#]],
);
}
@@ -358,51 +360,51 @@ fn test_fn_like_macro_clone_literals() {
PUNCH , [alone] 1
LITERAL CStr null 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- LITERAL Integer 1u16 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 2_u32 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 4i64 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Float 3.14f32 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Str hello bridge 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Str suffixedsuffix 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL StrRaw(2) raw 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Char a 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Byte b 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL CStr null 42:[email protected]#ROOT2024
-
-
-
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- LITERAL Integer 1u16 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 2_u32 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 4i64 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Float 3.14f32 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Str hello bridge 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Str suffixedsuffix 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL StrRaw(2) raw 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Char a 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL Byte b 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- LITERAL CStr null 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Integer 1u16 42:Root[0000, 0]@0..4#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@4..5#ROOT2024
+ LITERAL Integer 2_u32 42:Root[0000, 0]@6..11#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@13..14#ROOT2024
+ LITERAL Integer 4i64 42:Root[0000, 0]@14..18#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@18..19#ROOT2024
+ LITERAL Float 3.14f32 42:Root[0000, 0]@20..27#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@27..28#ROOT2024
+ LITERAL Str hello bridge 42:Root[0000, 0]@29..43#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@43..44#ROOT2024
+ LITERAL Str suffixedsuffix 42:Root[0000, 0]@45..61#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@61..62#ROOT2024
+ LITERAL StrRaw(2) raw 42:Root[0000, 0]@63..73#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@73..74#ROOT2024
+ LITERAL Char a 42:Root[0000, 0]@75..78#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@78..79#ROOT2024
+ LITERAL Byte b 42:Root[0000, 0]@80..84#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@84..85#ROOT2024
+ LITERAL CStr null 42:Root[0000, 0]@86..93#ROOT2024
+
+
+
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Integer 1u16 42:Root[0000, 0]@0..4#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@4..5#ROOT2024
+ LITERAL Integer 2_u32 42:Root[0000, 0]@6..11#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@13..14#ROOT2024
+ LITERAL Integer 4i64 42:Root[0000, 0]@14..18#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@18..19#ROOT2024
+ LITERAL Float 3.14f32 42:Root[0000, 0]@20..27#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@27..28#ROOT2024
+ LITERAL Str hello bridge 42:Root[0000, 0]@29..43#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@43..44#ROOT2024
+ LITERAL Str suffixedsuffix 42:Root[0000, 0]@45..61#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@61..62#ROOT2024
+ LITERAL StrRaw(2) raw 42:Root[0000, 0]@63..73#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@73..74#ROOT2024
+ LITERAL Char a 42:Root[0000, 0]@75..78#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@78..79#ROOT2024
+ LITERAL Byte b 42:Root[0000, 0]@80..84#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@84..85#ROOT2024
+ LITERAL CStr null 42:Root[0000, 0]@86..93#ROOT2024"#]],
);
}
@@ -440,33 +442,33 @@ fn test_fn_like_macro_negative_literals() {
PUNCH - [alone] 1
LITERAL Float 2.7 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 1u16 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 2_u32 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Float 3.14f32 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Float 2.7 42:[email protected]#ROOT2024
-
-
-
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 1u16 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Integer 2_u32 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Float 3.14f32 42:[email protected]#ROOT2024
- PUNCH , [alone] 42:[email protected]#ROOT2024
- PUNCH - [alone] 42:[email protected]#ROOT2024
- LITERAL Float 2.7 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@0..1#ROOT2024
+ LITERAL Integer 1u16 42:Root[0000, 0]@1..5#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@7..8#ROOT2024
+ LITERAL Integer 2_u32 42:Root[0000, 0]@9..14#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@14..15#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@16..17#ROOT2024
+ LITERAL Float 3.14f32 42:Root[0000, 0]@17..24#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@24..25#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@26..27#ROOT2024
+ LITERAL Float 2.7 42:Root[0000, 0]@28..31#ROOT2024
+
+
+
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@0..1#ROOT2024
+ LITERAL Integer 1u16 42:Root[0000, 0]@1..5#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@7..8#ROOT2024
+ LITERAL Integer 2_u32 42:Root[0000, 0]@9..14#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@14..15#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@16..17#ROOT2024
+ LITERAL Float 3.14f32 42:Root[0000, 0]@17..24#ROOT2024
+ PUNCH , [alone] 42:Root[0000, 0]@24..25#ROOT2024
+ PUNCH - [alone] 42:Root[0000, 0]@26..27#ROOT2024
+ LITERAL Float 2.7 42:Root[0000, 0]@28..31#ROOT2024"#]],
);
}
@@ -496,21 +498,21 @@ fn test_attr_macro() {
LITERAL Str #[attr_error(some arguments)] mod m {} 1
PUNCH ; [alone] 1"#]],
expect![[r#"
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT mod 42:[email protected]#ROOT2024
- IDENT m 42:[email protected]#ROOT2024
- SUBTREE {} 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
-
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT some 42:[email protected]#ROOT2024
- IDENT arguments 42:[email protected]#ROOT2024
-
- SUBTREE $$ 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- IDENT compile_error 42:[email protected]#ROOT2024
- PUNCH ! [alone] 42:[email protected]#ROOT2024
- SUBTREE () 42:[email protected]#ROOT2024 42:[email protected]#ROOT2024
- LITERAL Str #[attr_error(some arguments)] mod m {} 42:[email protected]#ROOT2024
- PUNCH ; [alone] 42:[email protected]#ROOT2024"#]],
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT mod 42:Root[0000, 0]@0..3#ROOT2024
+ IDENT m 42:Root[0000, 0]@4..5#ROOT2024
+ SUBTREE {} 42:Root[0000, 0]@6..7#ROOT2024 42:Root[0000, 0]@7..8#ROOT2024
+
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT some 42:Root[0000, 0]@0..4#ROOT2024
+ IDENT arguments 42:Root[0000, 0]@5..14#ROOT2024
+
+ SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ IDENT compile_error 42:Root[0000, 0]@0..100#ROOT2024
+ PUNCH ! [alone] 42:Root[0000, 0]@0..100#ROOT2024
+ SUBTREE () 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
+ LITERAL Str #[attr_error(some arguments)] mod m {} 42:Root[0000, 0]@0..100#ROOT2024
+ PUNCH ; [alone] 42:Root[0000, 0]@0..100#ROOT2024"#]],
);
}
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index a0a45b269e..10af5662b5 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -1,7 +1,9 @@
//! utils used in proc-macro tests
use expect_test::Expect;
-use span::{EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContext, TokenId};
+use span::{
+ EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext, TokenId,
+};
use tt::TextRange;
use crate::{EnvSnapshot, ProcMacroSrv, dylib, proc_macro_test_dylib_path};
@@ -76,7 +78,7 @@ fn assert_expand_impl(
range: TextRange::new(0.into(), 150.into()),
anchor: SpanAnchor {
file_id: EditionedFileId::current_edition(FileId::from_raw(41)),
- ast_id: ErasedFileAstId::from_raw(1),
+ ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContext::root(span::Edition::CURRENT),
};
@@ -84,7 +86,7 @@ fn assert_expand_impl(
range: TextRange::new(0.into(), 100.into()),
anchor: SpanAnchor {
file_id: EditionedFileId::current_edition(FileId::from_raw(42)),
- ast_id: ErasedFileAstId::from_raw(2),
+ ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContext::root(span::Edition::CURRENT),
};
diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml
index 1fb1383272..4828419003 100644
--- a/crates/profile/Cargo.toml
+++ b/crates/profile/Cargo.toml
@@ -10,9 +10,10 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
-cfg-if = "1.0.0"
+cfg-if = "1.0.1"
jemalloc-ctl = { version = "0.5.4", package = "tikv-jemalloc-ctl", optional = true }
[target.'cfg(all(target_os = "linux", not(target_env = "ohos")))'.dependencies]
@@ -22,7 +23,7 @@ perf-event = "=0.4.7"
libc.workspace = true
[target.'cfg(windows)'.dependencies]
-windows-sys = { version = "0.59", features = [
+windows-sys = { version = "0.60", features = [
"Win32_System_Threading",
"Win32_System_ProcessStatus",
] }
diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml
index 64ea75922f..27fe9f79bb 100644
--- a/crates/project-model/Cargo.toml
+++ b/crates/project-model/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
anyhow.workspace = true
diff --git a/crates/project-model/src/build_dependencies.rs b/crates/project-model/src/build_dependencies.rs
index e0c38ccf33..499caa622c 100644
--- a/crates/project-model/src/build_dependencies.rs
+++ b/crates/project-model/src/build_dependencies.rs
@@ -62,6 +62,7 @@ impl WorkspaceBuildScripts {
workspace: &CargoWorkspace,
progress: &dyn Fn(String),
sysroot: &Sysroot,
+ toolchain: Option<&semver::Version>,
) -> io::Result<WorkspaceBuildScripts> {
let current_dir = workspace.workspace_root();
@@ -72,6 +73,7 @@ impl WorkspaceBuildScripts {
workspace.manifest_path(),
current_dir,
sysroot,
+ toolchain,
)?;
Self::run_per_ws(cmd, workspace, progress)
}
@@ -93,6 +95,7 @@ impl WorkspaceBuildScripts {
&ManifestPath::try_from(working_directory.clone()).unwrap(),
working_directory,
&Sysroot::empty(),
+ None,
)?;
// NB: Cargo.toml could have been modified between `cargo metadata` and
// `cargo check`. We shouldn't assume that package ids we see here are
@@ -309,7 +312,9 @@ impl WorkspaceBuildScripts {
match message {
Message::BuildScriptExecuted(mut message) => {
with_output_for(&message.package_id.repr, &mut |name, data| {
- progress(format!("running build-script: {name}"));
+ progress(format!(
+ "building compile-time-deps: build script {name} run"
+ ));
let cfgs = {
let mut acc = Vec::new();
for cfg in &message.cfgs {
@@ -340,7 +345,9 @@ impl WorkspaceBuildScripts {
}
Message::CompilerArtifact(message) => {
with_output_for(&message.package_id.repr, &mut |name, data| {
- progress(format!("building proc-macros: {name}"));
+ progress(format!(
+ "building compile-time-deps: proc-macro {name} built"
+ ));
if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
{
// Skip rmeta file
@@ -354,7 +361,7 @@ impl WorkspaceBuildScripts {
});
}
Message::CompilerMessage(message) => {
- progress(message.target.name);
+ progress(format!("received compiler message for: {}", message.target.name));
if let Some(diag) = message.message.rendered.as_deref() {
push_err(diag);
@@ -385,12 +392,13 @@ impl WorkspaceBuildScripts {
manifest_path: &ManifestPath,
current_dir: &AbsPath,
sysroot: &Sysroot,
+ toolchain: Option<&semver::Version>,
) -> io::Result<Command> {
- let mut cmd = match config.run_build_script_command.as_deref() {
+ match config.run_build_script_command.as_deref() {
Some([program, args @ ..]) => {
let mut cmd = toolchain::command(program, current_dir, &config.extra_env);
cmd.args(args);
- cmd
+ Ok(cmd)
}
_ => {
let mut cmd = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
@@ -405,13 +413,6 @@ impl WorkspaceBuildScripts {
cmd.arg("--target-dir").arg(target_dir);
}
- // --all-targets includes tests, benches and examples in addition to the
- // default lib and bins. This is an independent concept from the --target
- // flag below.
- if config.all_targets {
- cmd.arg("--all-targets");
- }
-
if let Some(target) = &config.target {
cmd.args(["--target", target]);
}
@@ -442,20 +443,47 @@ impl WorkspaceBuildScripts {
cmd.arg("--keep-going");
- cmd
- }
- };
+ // If [`--compile-time-deps` flag](https://github.com/rust-lang/cargo/issues/14434) is
+ // available in current toolchain's cargo, use it to build compile time deps only.
+ const COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION: semver::Version = semver::Version {
+ major: 1,
+ minor: 89,
+ patch: 0,
+ pre: semver::Prerelease::EMPTY,
+ build: semver::BuildMetadata::EMPTY,
+ };
+
+ let cargo_comp_time_deps_available =
+ toolchain.is_some_and(|v| *v >= COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION);
+
+ if cargo_comp_time_deps_available {
+ cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
+ cmd.arg("-Zunstable-options");
+ cmd.arg("--compile-time-deps");
+ // we can pass this unconditionally, because we won't actually build the
+ // binaries, and as such, this will succeed even on targets without libtest
+ cmd.arg("--all-targets");
+ } else {
+ // --all-targets includes tests, benches and examples in addition to the
+ // default lib and bins. This is an independent concept from the --target
+ // flag below.
+ if config.all_targets {
+ cmd.arg("--all-targets");
+ }
- if config.wrap_rustc_in_build_scripts {
- // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
- // that to compile only proc macros and build scripts during the initial
- // `cargo check`.
- let myself = std::env::current_exe()?;
- cmd.env("RUSTC_WRAPPER", myself);
- cmd.env("RA_RUSTC_WRAPPER", "1");
+ if config.wrap_rustc_in_build_scripts {
+ // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
+ // that to compile only proc macros and build scripts during the initial
+ // `cargo check`.
+ // We don't need this if we are using `--compile-time-deps` flag.
+ let myself = std::env::current_exe()?;
+ cmd.env("RUSTC_WRAPPER", myself);
+ cmd.env("RA_RUSTC_WRAPPER", "1");
+ }
+ }
+ Ok(cmd)
+ }
}
-
- Ok(cmd)
}
}
diff --git a/crates/project-model/src/cargo_config_file.rs b/crates/project-model/src/cargo_config_file.rs
new file mode 100644
index 0000000000..7966f74df3
--- /dev/null
+++ b/crates/project-model/src/cargo_config_file.rs
@@ -0,0 +1,34 @@
+//! Read `.cargo/config.toml` as a JSON object
+use rustc_hash::FxHashMap;
+use toolchain::Tool;
+
+use crate::{ManifestPath, Sysroot, utf8_stdout};
+
+pub(crate) type CargoConfigFile = serde_json::Map<String, serde_json::Value>;
+
+pub(crate) fn read(
+ manifest: &ManifestPath,
+ extra_env: &FxHashMap<String, Option<String>>,
+ sysroot: &Sysroot,
+) -> Option<CargoConfigFile> {
+ let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
+ cargo_config
+ .args(["-Z", "unstable-options", "config", "get", "--format", "json"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ if manifest.is_rust_manifest() {
+ cargo_config.arg("-Zscript");
+ }
+
+ tracing::debug!("Discovering cargo config by {:?}", cargo_config);
+ let json: serde_json::Map<String, serde_json::Value> = utf8_stdout(&mut cargo_config)
+ .inspect(|json| {
+ tracing::debug!("Discovered cargo config: {:?}", json);
+ })
+ .inspect_err(|err| {
+ tracing::debug!("Failed to discover cargo config: {:?}", err);
+ })
+ .ok()
+ .and_then(|stdout| serde_json::from_str(&stdout).ok())?;
+
+ Some(json)
+}
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index bb02284a51..daadcd9d79 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -7,16 +7,25 @@ use anyhow::Context;
use base_db::Env;
use cargo_metadata::{CargoOpt, MetadataCommand};
use la_arena::{Arena, Idx};
-use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use serde_derive::Deserialize;
use serde_json::from_value;
use span::Edition;
+use stdx::process::spawn_with_streaming_output;
use toolchain::Tool;
use crate::{CfgOverrides, InvocationStrategy};
use crate::{ManifestPath, Sysroot};
+const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version = semver::Version {
+ major: 1,
+ minor: 82,
+ patch: 0,
+ pre: semver::Prerelease::EMPTY,
+ build: semver::BuildMetadata::EMPTY,
+};
+
/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
/// workspace. It pretty closely mirrors `cargo metadata` output.
///
@@ -39,6 +48,7 @@ pub struct CargoWorkspace {
is_sysroot: bool,
/// Environment variables set in the `.cargo/config` file.
config_env: Env,
+ requires_rustc_private: bool,
}
impl ops::Index<Package> for CargoWorkspace {
@@ -290,6 +300,11 @@ pub struct CargoMetadataConfig {
pub extra_args: Vec<String>,
/// Extra env vars to set when invoking the cargo command
pub extra_env: FxHashMap<String, Option<String>>,
+ /// What kind of metadata are we fetching: workspace, rustc, or sysroot.
+ pub kind: &'static str,
+ /// The toolchain version, if known.
+ /// Used to conditionally enable unstable cargo features.
+ pub toolchain_version: Option<semver::Version>,
}
// Deserialize helper for the cargo metadata
@@ -300,140 +315,6 @@ struct PackageMetadata {
}
impl CargoWorkspace {
- /// Fetches the metadata for the given `cargo_toml` manifest.
- /// A successful result may contain another metadata error if the initial fetching failed but
- /// the `--no-deps` retry succeeded.
- ///
- /// The sysroot is used to set the `RUSTUP_TOOLCHAIN` env var when invoking cargo
- /// to ensure that the rustup proxy uses the correct toolchain.
- pub fn fetch_metadata(
- cargo_toml: &ManifestPath,
- current_dir: &AbsPath,
- config: &CargoMetadataConfig,
- sysroot: &Sysroot,
- no_deps: bool,
- locked: bool,
- progress: &dyn Fn(String),
- ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
- let res = Self::fetch_metadata_(
- cargo_toml,
- current_dir,
- config,
- sysroot,
- no_deps,
- locked,
- progress,
- );
- if let Ok((_, Some(ref e))) = res {
- tracing::warn!(
- %cargo_toml,
- ?e,
- "`cargo metadata` failed, but retry with `--no-deps` succeeded"
- );
- }
- res
- }
-
- fn fetch_metadata_(
- cargo_toml: &ManifestPath,
- current_dir: &AbsPath,
- config: &CargoMetadataConfig,
- sysroot: &Sysroot,
- no_deps: bool,
- locked: bool,
- progress: &dyn Fn(String),
- ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
- let cargo = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
- let mut meta = MetadataCommand::new();
- meta.cargo_path(cargo.get_program());
- cargo.get_envs().for_each(|(var, val)| _ = meta.env(var, val.unwrap_or_default()));
- meta.manifest_path(cargo_toml.to_path_buf());
- match &config.features {
- CargoFeatures::All => {
- meta.features(CargoOpt::AllFeatures);
- }
- CargoFeatures::Selected { features, no_default_features } => {
- if *no_default_features {
- meta.features(CargoOpt::NoDefaultFeatures);
- }
- if !features.is_empty() {
- meta.features(CargoOpt::SomeFeatures(features.clone()));
- }
- }
- }
- meta.current_dir(current_dir);
-
- let mut other_options = vec![];
- // cargo metadata only supports a subset of flags of what cargo usually accepts, and usually
- // the only relevant flags for metadata here are unstable ones, so we pass those along
- // but nothing else
- let mut extra_args = config.extra_args.iter();
- while let Some(arg) = extra_args.next() {
- if arg == "-Z" {
- if let Some(arg) = extra_args.next() {
- other_options.push("-Z".to_owned());
- other_options.push(arg.to_owned());
- }
- }
- }
-
- if !config.targets.is_empty() {
- other_options.extend(
- config.targets.iter().flat_map(|it| ["--filter-platform".to_owned(), it.clone()]),
- );
- }
- // The manifest is a rust file, so this means its a script manifest
- if cargo_toml.is_rust_manifest() {
- // Deliberately don't set up RUSTC_BOOTSTRAP or a nightly override here, the user should
- // opt into it themselves.
- other_options.push("-Zscript".to_owned());
- }
- if locked {
- other_options.push("--locked".to_owned());
- }
- if no_deps {
- other_options.push("--no-deps".to_owned());
- }
- meta.other_options(other_options);
-
- // FIXME: Fetching metadata is a slow process, as it might require
- // calling crates.io. We should be reporting progress here, but it's
- // unclear whether cargo itself supports it.
- progress("metadata".to_owned());
-
- (|| -> anyhow::Result<(_, _)> {
- let output = meta.cargo_command().output()?;
- if !output.status.success() {
- let error = cargo_metadata::Error::CargoMetadata {
- stderr: String::from_utf8(output.stderr)?,
- }
- .into();
- if !no_deps {
- // If we failed to fetch metadata with deps, try again without them.
- // This makes r-a still work partially when offline.
- if let Ok((metadata, _)) = Self::fetch_metadata_(
- cargo_toml,
- current_dir,
- config,
- sysroot,
- locked,
- true,
- progress,
- ) {
- return Ok((metadata, Some(error)));
- }
- }
- return Err(error);
- }
- let stdout = from_utf8(&output.stdout)?
- .lines()
- .find(|line| line.starts_with('{'))
- .ok_or(cargo_metadata::Error::NoJson)?;
- Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None))
- })()
- .with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))
- }
-
pub fn new(
mut meta: cargo_metadata::Metadata,
ws_manifest_path: ManifestPath,
@@ -449,6 +330,7 @@ impl CargoWorkspace {
let workspace_root = AbsPathBuf::assert(meta.workspace_root);
let target_directory = AbsPathBuf::assert(meta.target_directory);
let mut is_virtual_workspace = true;
+ let mut requires_rustc_private = false;
meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
for meta_pkg in meta.packages {
@@ -492,7 +374,7 @@ impl CargoWorkspace {
is_virtual_workspace &= manifest != ws_manifest_path;
let pkg = packages.alloc(PackageData {
id: id.repr.clone(),
- name,
+ name: name.to_string(),
version,
manifest: manifest.clone(),
targets: Vec::new(),
@@ -513,6 +395,7 @@ impl CargoWorkspace {
metadata: meta.rust_analyzer.unwrap_or_default(),
});
let pkg_data = &mut packages[pkg];
+ requires_rustc_private |= pkg_data.metadata.rustc_private;
pkg_by_id.insert(id, pkg);
for meta_tgt in meta_targets {
let cargo_metadata::Target { name, kind, required_features, src_path, .. } =
@@ -547,10 +430,12 @@ impl CargoWorkspace {
.flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind)));
for (dep_node, kind) in dependencies {
let &pkg = pkg_by_id.get(&dep_node.pkg).unwrap();
- let dep = PackageDependency { name: dep_node.name.clone(), pkg, kind };
+ let dep = PackageDependency { name: dep_node.name.to_string(), pkg, kind };
packages[source].dependencies.push(dep);
}
- packages[source].active_features.extend(node.features);
+ packages[source]
+ .active_features
+ .extend(node.features.into_iter().map(|it| it.to_string()));
}
CargoWorkspace {
@@ -560,6 +445,7 @@ impl CargoWorkspace {
target_directory,
manifest_path: ws_manifest_path,
is_virtual_workspace,
+ requires_rustc_private,
is_sysroot,
config_env: cargo_config_env,
}
@@ -658,4 +544,219 @@ impl CargoWorkspace {
pub fn is_sysroot(&self) -> bool {
self.is_sysroot
}
+
+ pub fn requires_rustc_private(&self) -> bool {
+ self.requires_rustc_private
+ }
+}
+
+pub(crate) struct FetchMetadata {
+ command: cargo_metadata::MetadataCommand,
+ lockfile_path: Option<Utf8PathBuf>,
+ kind: &'static str,
+ no_deps: bool,
+ no_deps_result: anyhow::Result<cargo_metadata::Metadata>,
+ other_options: Vec<String>,
+}
+
+impl FetchMetadata {
+ /// Builds a command to fetch metadata for the given `cargo_toml` manifest.
+ ///
+ /// Performs a lightweight pre-fetch using the `--no-deps` option,
+ /// available via [`FetchMetadata::no_deps_metadata`], to gather basic
+ /// information such as the `target-dir`.
+ ///
+ /// The provided sysroot is used to set the `RUSTUP_TOOLCHAIN`
+ /// environment variable when invoking Cargo, ensuring that the
+ /// rustup proxy selects the correct toolchain.
+ pub(crate) fn new(
+ cargo_toml: &ManifestPath,
+ current_dir: &AbsPath,
+ config: &CargoMetadataConfig,
+ sysroot: &Sysroot,
+ no_deps: bool,
+ ) -> Self {
+ let cargo = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
+ let mut command = MetadataCommand::new();
+ command.cargo_path(cargo.get_program());
+ cargo.get_envs().for_each(|(var, val)| _ = command.env(var, val.unwrap_or_default()));
+ command.manifest_path(cargo_toml.to_path_buf());
+ match &config.features {
+ CargoFeatures::All => {
+ command.features(CargoOpt::AllFeatures);
+ }
+ CargoFeatures::Selected { features, no_default_features } => {
+ if *no_default_features {
+ command.features(CargoOpt::NoDefaultFeatures);
+ }
+ if !features.is_empty() {
+ command.features(CargoOpt::SomeFeatures(features.clone()));
+ }
+ }
+ }
+ command.current_dir(current_dir);
+
+ let mut needs_nightly = false;
+ let mut other_options = vec![];
+ // cargo metadata only supports a subset of flags of what cargo usually accepts, and usually
+ // the only relevant flags for metadata here are unstable ones, so we pass those along
+ // but nothing else
+ let mut extra_args = config.extra_args.iter();
+ while let Some(arg) = extra_args.next() {
+ if arg == "-Z" {
+ if let Some(arg) = extra_args.next() {
+ needs_nightly = true;
+ other_options.push("-Z".to_owned());
+ other_options.push(arg.to_owned());
+ }
+ }
+ }
+
+ let mut lockfile_path = None;
+ if cargo_toml.is_rust_manifest() {
+ needs_nightly = true;
+ other_options.push("-Zscript".to_owned());
+ } else if config
+ .toolchain_version
+ .as_ref()
+ .is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH)
+ {
+ lockfile_path = Some(<_ as AsRef<Utf8Path>>::as_ref(cargo_toml).with_extension("lock"));
+ }
+
+ if !config.targets.is_empty() {
+ other_options.extend(
+ config.targets.iter().flat_map(|it| ["--filter-platform".to_owned(), it.clone()]),
+ );
+ }
+
+ command.other_options(other_options.clone());
+
+ if needs_nightly {
+ command.env("RUSTC_BOOTSTRAP", "1");
+ }
+
+ // Pre-fetch basic metadata using `--no-deps`, which:
+ // - avoids fetching registries like crates.io,
+ // - skips dependency resolution and does not modify lockfiles,
+ // - and thus doesn't require progress reporting or copying lockfiles.
+ //
+ // Useful as a fast fallback to extract info like `target-dir`.
+ let cargo_command;
+ let no_deps_result = if no_deps {
+ command.no_deps();
+ cargo_command = command.cargo_command();
+ command.exec()
+ } else {
+ let mut no_deps_command = command.clone();
+ no_deps_command.no_deps();
+ cargo_command = no_deps_command.cargo_command();
+ no_deps_command.exec()
+ }
+ .with_context(|| format!("Failed to run `{cargo_command:?}`"));
+
+ Self { command, lockfile_path, kind: config.kind, no_deps, no_deps_result, other_options }
+ }
+
+ pub(crate) fn no_deps_metadata(&self) -> Option<&cargo_metadata::Metadata> {
+ self.no_deps_result.as_ref().ok()
+ }
+
+ /// Executes the metadata-fetching command.
+ ///
+ /// A successful result may still contain a metadata error if the full fetch failed,
+ /// but the fallback `--no-deps` pre-fetch succeeded during command construction.
+ pub(crate) fn exec(
+ self,
+ target_dir: &Utf8Path,
+ locked: bool,
+ progress: &dyn Fn(String),
+ ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
+ let Self { mut command, lockfile_path, kind, no_deps, no_deps_result, mut other_options } =
+ self;
+
+ if no_deps {
+ return no_deps_result.map(|m| (m, None));
+ }
+
+ let mut using_lockfile_copy = false;
+ // The manifest is a rust file, so this means its a script manifest
+ if let Some(lockfile) = lockfile_path {
+ let target_lockfile =
+ target_dir.join("rust-analyzer").join("metadata").join(kind).join("Cargo.lock");
+ match std::fs::copy(&lockfile, &target_lockfile) {
+ Ok(_) => {
+ using_lockfile_copy = true;
+ other_options.push("--lockfile-path".to_owned());
+ other_options.push(target_lockfile.to_string());
+ }
+ Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
+ // There exists no lockfile yet
+ using_lockfile_copy = true;
+ other_options.push("--lockfile-path".to_owned());
+ other_options.push(target_lockfile.to_string());
+ }
+ Err(e) => {
+ tracing::warn!(
+ "Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}",
+ );
+ }
+ }
+ }
+ if using_lockfile_copy {
+ other_options.push("-Zunstable-options".to_owned());
+ command.env("RUSTC_BOOTSTRAP", "1");
+ }
+ // No need to lock it if we copied the lockfile, we won't modify the original after all/
+ // This way cargo cannot error out on us if the lockfile requires updating.
+ if !using_lockfile_copy && locked {
+ other_options.push("--locked".to_owned());
+ }
+ command.other_options(other_options);
+
+ // FIXME: Fetching metadata is a slow process, as it might require
+ // calling crates.io. We should be reporting progress here, but it's
+ // unclear whether cargo itself supports it.
+ progress("cargo metadata: started".to_owned());
+
+ let res = (|| -> anyhow::Result<(_, _)> {
+ let mut errored = false;
+ let output =
+ spawn_with_streaming_output(command.cargo_command(), &mut |_| (), &mut |line| {
+ errored = errored || line.starts_with("error") || line.starts_with("warning");
+ if errored {
+ progress("cargo metadata: ?".to_owned());
+ return;
+ }
+ progress(format!("cargo metadata: {line}"));
+ })?;
+ if !output.status.success() {
+ progress(format!("cargo metadata: failed {}", output.status));
+ let error = cargo_metadata::Error::CargoMetadata {
+ stderr: String::from_utf8(output.stderr)?,
+ }
+ .into();
+ if !no_deps {
+ // If we failed to fetch metadata with deps, return pre-fetched result without them.
+ // This makes r-a still work partially when offline.
+ if let Ok(metadata) = no_deps_result {
+ tracing::warn!(
+ ?error,
+ "`cargo metadata` failed and returning succeeded result with `--no-deps`"
+ );
+ return Ok((metadata, Some(error)));
+ }
+ }
+ return Err(error);
+ }
+ let stdout = from_utf8(&output.stdout)?
+ .lines()
+ .find(|line| line.starts_with('{'))
+ .ok_or(cargo_metadata::Error::NoJson)?;
+ Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None))
+ })()
+ .with_context(|| format!("Failed to run `{:?}`", command.cargo_command()));
+ progress("cargo metadata: finished".to_owned());
+ res
+ }
}
diff --git a/crates/project-model/src/env.rs b/crates/project-model/src/env.rs
index 450def5461..d281492fc9 100644
--- a/crates/project-model/src/env.rs
+++ b/crates/project-model/src/env.rs
@@ -1,10 +1,9 @@
//! Cargo-like environment variables injection.
use base_db::Env;
use paths::Utf8Path;
-use rustc_hash::FxHashMap;
use toolchain::Tool;
-use crate::{ManifestPath, PackageData, Sysroot, TargetKind, utf8_stdout};
+use crate::{ManifestPath, PackageData, TargetKind, cargo_config_file::CargoConfigFile};
/// Recreates the compile-time environment variables that Cargo sets.
///
@@ -61,84 +60,68 @@ pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: Targe
env.set("CARGO_CRATE_NAME", cargo_name.replace('-', "_"));
}
-pub(crate) fn cargo_config_env(
- manifest: &ManifestPath,
- extra_env: &FxHashMap<String, Option<String>>,
- sysroot: &Sysroot,
-) -> Env {
- let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
- cargo_config
- .args(["-Z", "unstable-options", "config", "get", "env"])
- .env("RUSTC_BOOTSTRAP", "1");
- if manifest.is_rust_manifest() {
- cargo_config.arg("-Zscript");
- }
- // if successful we receive `env.key.value = "value" per entry
- tracing::debug!("Discovering cargo config env by {:?}", cargo_config);
- utf8_stdout(&mut cargo_config)
- .map(|stdout| parse_output_cargo_config_env(manifest, &stdout))
- .inspect(|env| {
- tracing::debug!("Discovered cargo config env: {:?}", env);
- })
- .inspect_err(|err| {
- tracing::debug!("Failed to discover cargo config env: {:?}", err);
- })
- .unwrap_or_default()
-}
-
-fn parse_output_cargo_config_env(manifest: &ManifestPath, stdout: &str) -> Env {
+pub(crate) fn cargo_config_env(manifest: &ManifestPath, config: &Option<CargoConfigFile>) -> Env {
let mut env = Env::default();
- let mut relatives = vec![];
- for (key, val) in
- stdout.lines().filter_map(|l| l.strip_prefix("env.")).filter_map(|l| l.split_once(" = "))
- {
- let val = val.trim_matches('"').to_owned();
- if let Some((key, modifier)) = key.split_once('.') {
- match modifier {
- "relative" => relatives.push((key, val)),
- "value" => _ = env.insert(key, val),
- _ => {
- tracing::warn!(
- "Unknown modifier in cargo config env: {}, expected `relative` or `value`",
- modifier
- );
- continue;
- }
- }
- } else {
- env.insert(key, val);
- }
- }
+ let Some(serde_json::Value::Object(env_json)) = config.as_ref().and_then(|c| c.get("env"))
+ else {
+ return env;
+ };
+
// FIXME: The base here should be the parent of the `.cargo/config` file, not the manifest.
// But cargo does not provide this information.
let base = <_ as AsRef<Utf8Path>>::as_ref(manifest.parent());
- for (key, relative) in relatives {
- if relative != "true" {
+
+ for (key, entry) in env_json {
+ let serde_json::Value::Object(entry) = entry else {
continue;
- }
- if let Some(suffix) = env.get(key) {
- env.insert(key, base.join(suffix).to_string());
- }
+ };
+ let Some(value) = entry.get("value").and_then(|v| v.as_str()) else {
+ continue;
+ };
+
+ let value = if entry
+ .get("relative")
+ .and_then(|v| v.as_bool())
+ .is_some_and(std::convert::identity)
+ {
+ base.join(value).to_string()
+ } else {
+ value.to_owned()
+ };
+ env.insert(key, value);
}
+
env
}
#[test]
fn parse_output_cargo_config_env_works() {
- let stdout = r#"
-env.CARGO_WORKSPACE_DIR.relative = true
-env.CARGO_WORKSPACE_DIR.value = ""
-env.RELATIVE.relative = true
-env.RELATIVE.value = "../relative"
-env.INVALID.relative = invalidbool
-env.INVALID.value = "../relative"
-env.TEST.value = "test"
-"#
- .trim();
+ let raw = r#"
+{
+ "env": {
+ "CARGO_WORKSPACE_DIR": {
+ "relative": true,
+ "value": ""
+ },
+ "INVALID": {
+ "relative": "invalidbool",
+ "value": "../relative"
+ },
+ "RELATIVE": {
+ "relative": true,
+ "value": "../relative"
+ },
+ "TEST": {
+ "value": "test"
+ }
+ }
+}
+"#;
+ let config: CargoConfigFile = serde_json::from_str(raw).unwrap();
let cwd = paths::Utf8PathBuf::try_from(std::env::current_dir().unwrap()).unwrap();
let manifest = paths::AbsPathBuf::assert(cwd.join("Cargo.toml"));
let manifest = ManifestPath::try_from(manifest).unwrap();
- let env = parse_output_cargo_config_env(&manifest, stdout);
+ let env = cargo_config_env(&manifest, &Some(config));
assert_eq!(env.get("CARGO_WORKSPACE_DIR").as_deref(), Some(cwd.join("").as_str()));
assert_eq!(env.get("RELATIVE").as_deref(), Some(cwd.join("../relative").as_str()));
assert_eq!(env.get("INVALID").as_deref(), Some("../relative"));
diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs
index 436af64cf1..3bf3d06e6b 100644
--- a/crates/project-model/src/lib.rs
+++ b/crates/project-model/src/lib.rs
@@ -24,7 +24,7 @@ pub mod toolchain_info {
use std::path::Path;
- use crate::{ManifestPath, Sysroot};
+ use crate::{ManifestPath, Sysroot, cargo_config_file::CargoConfigFile};
#[derive(Copy, Clone)]
pub enum QueryConfig<'a> {
@@ -32,11 +32,12 @@ pub mod toolchain_info {
Rustc(&'a Sysroot, &'a Path),
/// Attempt to use cargo to query the desired information, honoring cargo configurations.
/// If this fails, falls back to invoking `rustc` directly.
- Cargo(&'a Sysroot, &'a ManifestPath),
+ Cargo(&'a Sysroot, &'a ManifestPath, &'a Option<CargoConfigFile>),
}
}
mod build_dependencies;
+mod cargo_config_file;
mod cargo_workspace;
mod env;
mod manifest_path;
diff --git a/crates/project-model/src/manifest_path.rs b/crates/project-model/src/manifest_path.rs
index 4f43be2f38..fba8cc9709 100644
--- a/crates/project-model/src/manifest_path.rs
+++ b/crates/project-model/src/manifest_path.rs
@@ -1,7 +1,7 @@
//! See [`ManifestPath`].
use std::{borrow::Borrow, fmt, ops};
-use paths::{AbsPath, AbsPathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8Path};
/// More or less [`AbsPathBuf`] with non-None parent.
///
@@ -78,6 +78,12 @@ impl AsRef<std::ffi::OsStr> for ManifestPath {
}
}
+impl AsRef<Utf8Path> for ManifestPath {
+ fn as_ref(&self) -> &Utf8Path {
+ self.file.as_ref()
+ }
+}
+
impl Borrow<AbsPath> for ManifestPath {
fn borrow(&self) -> &AbsPath {
self.file.borrow()
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index d4055d9a0a..9781c46737 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -4,18 +4,20 @@
//! but we can't process `.rlib` and need source code instead. The source code
//! is typically installed with `rustup component add rust-src` command.
+use core::fmt;
use std::{env, fs, ops::Not, path::Path, process::Command};
use anyhow::{Result, format_err};
use itertools::Itertools;
-use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf};
use rustc_hash::FxHashMap;
use stdx::format_to;
use toolchain::{Tool, probe_for_binary};
use crate::{
CargoWorkspace, ManifestPath, ProjectJson, RustSourceWorkspaceConfig,
- cargo_workspace::CargoMetadataConfig, utf8_stdout,
+ cargo_workspace::{CargoMetadataConfig, FetchMetadata},
+ utf8_stdout,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -34,6 +36,19 @@ pub enum RustLibSrcWorkspace {
Empty,
}
+impl fmt::Display for RustLibSrcWorkspace {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ RustLibSrcWorkspace::Workspace(ws) => write!(f, "workspace {}", ws.workspace_root()),
+ RustLibSrcWorkspace::Json(json) => write!(f, "json {}", json.manifest_or_root()),
+ RustLibSrcWorkspace::Stitched(stitched) => {
+ write!(f, "stitched with {} crates", stitched.crates.len())
+ }
+ RustLibSrcWorkspace::Empty => write!(f, "empty"),
+ }
+ }
+}
+
impl Sysroot {
pub const fn empty() -> Sysroot {
Sysroot {
@@ -149,18 +164,18 @@ impl Sysroot {
}
}
- pub fn discover_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
- let Some(root) = self.root() else {
- return Err(anyhow::format_err!("no sysroot",));
- };
- ["libexec", "lib"]
- .into_iter()
- .map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
- .find_map(|server_path| probe_for_binary(server_path.into()))
- .map(AbsPathBuf::assert)
- .ok_or_else(|| {
- anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
- })
+ pub fn discover_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
+ let root = self.root()?;
+ Some(
+ ["libexec", "lib"]
+ .into_iter()
+ .map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
+ .find_map(|server_path| probe_for_binary(server_path.into()))
+ .map(AbsPathBuf::assert)
+ .ok_or_else(|| {
+ anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
+ }),
+ )
}
fn assemble(
@@ -195,6 +210,10 @@ impl Sysroot {
pub fn load_workspace(
&self,
sysroot_source_config: &RustSourceWorkspaceConfig,
+ no_deps: bool,
+ current_dir: &AbsPath,
+ target_dir: &Utf8Path,
+ progress: &dyn Fn(String),
) -> Option<RustLibSrcWorkspace> {
assert!(matches!(self.workspace, RustLibSrcWorkspace::Empty), "workspace already loaded");
let Self { root: _, rust_lib_src_root: Some(src_root), workspace: _, error: _ } = self
@@ -204,10 +223,18 @@ impl Sysroot {
if let RustSourceWorkspaceConfig::CargoMetadata(cargo_config) = sysroot_source_config {
let library_manifest = ManifestPath::try_from(src_root.join("Cargo.toml")).unwrap();
if fs::metadata(&library_manifest).is_ok() {
- if let Some(loaded) =
- self.load_library_via_cargo(library_manifest, src_root, cargo_config)
- {
- return Some(loaded);
+ match self.load_library_via_cargo(
+ &library_manifest,
+ current_dir,
+ target_dir,
+ cargo_config,
+ no_deps,
+ progress,
+ ) {
+ Ok(loaded) => return Some(loaded),
+ Err(e) => {
+ tracing::error!("`cargo metadata` failed on `{library_manifest}` : {e}")
+ }
}
}
tracing::debug!("Stitching sysroot library: {src_root}");
@@ -293,10 +320,13 @@ impl Sysroot {
fn load_library_via_cargo(
&self,
- library_manifest: ManifestPath,
- rust_lib_src_dir: &AbsPathBuf,
+ library_manifest: &ManifestPath,
+ current_dir: &AbsPath,
+ target_dir: &Utf8Path,
cargo_config: &CargoMetadataConfig,
- ) -> Option<RustLibSrcWorkspace> {
+ no_deps: bool,
+ progress: &dyn Fn(String),
+ ) -> Result<RustLibSrcWorkspace> {
tracing::debug!("Loading library metadata: {library_manifest}");
let mut cargo_config = cargo_config.clone();
// the sysroot uses `public-dependency`, so we make cargo think it's a nightly
@@ -305,22 +335,11 @@ impl Sysroot {
Some("nightly".to_owned()),
);
- let (mut res, _) = match CargoWorkspace::fetch_metadata(
- &library_manifest,
- rust_lib_src_dir,
- &cargo_config,
- self,
- false,
- // Make sure we never attempt to write to the sysroot
- true,
- &|_| (),
- ) {
- Ok(it) => it,
- Err(e) => {
- tracing::error!("`cargo metadata` failed on `{library_manifest}` : {e}");
- return None;
- }
- };
+ // Make sure we never attempt to write to the sysroot
+ let locked = true;
+ let (mut res, _) =
+ FetchMetadata::new(library_manifest, current_dir, &cargo_config, self, no_deps)
+ .exec(target_dir, locked, progress)?;
// Patch out `rustc-std-workspace-*` crates to point to the real crates.
// This is done prior to `CrateGraph` construction to prevent de-duplication logic from failing.
@@ -339,7 +358,7 @@ impl Sysroot {
Some(_) => {
tracing::warn!("unknown rustc-std-workspace-* crate: {}", package.name)
}
- None => match &*package.name {
+ None => match &**package.name {
"core" => real_core = Some(package.id.clone()),
"alloc" => real_alloc = Some(package.id.clone()),
"std" => real_std = Some(package.id.clone()),
@@ -371,8 +390,9 @@ impl Sysroot {
res.packages.remove(idx);
});
- let cargo_workspace = CargoWorkspace::new(res, library_manifest, Default::default(), true);
- Some(RustLibSrcWorkspace::Workspace(cargo_workspace))
+ let cargo_workspace =
+ CargoWorkspace::new(res, library_manifest.clone(), Default::default(), true);
+ Ok(RustLibSrcWorkspace::Workspace(cargo_workspace))
}
}
diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs
index c69891b746..ed72520f40 100644
--- a/crates/project-model/src/tests.rs
+++ b/crates/project-model/src/tests.rs
@@ -1,3 +1,5 @@
+use std::env::temp_dir;
+
use base_db::{CrateGraphBuilder, ProcMacroPaths};
use cargo_metadata::Metadata;
use cfg::{CfgAtom, CfgDiff};
@@ -235,11 +237,23 @@ fn smoke_test_real_sysroot_cargo() {
AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))),
&Default::default(),
);
- let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo());
+ let cwd = AbsPathBuf::assert_utf8(temp_dir().join("smoke_test_real_sysroot_cargo"));
+ std::fs::create_dir_all(&cwd).unwrap();
+ let loaded_sysroot = sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::default_cargo(),
+ false,
+ &cwd,
+ &Utf8PathBuf::default(),
+ &|_| (),
+ );
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
- assert!(matches!(sysroot.workspace(), RustLibSrcWorkspace::Workspace(_)));
+ assert!(
+ matches!(sysroot.workspace(), RustLibSrcWorkspace::Workspace(_)),
+ "got {}",
+ sysroot.workspace()
+ );
let project_workspace = ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo {
cargo: cargo_workspace,
diff --git a/crates/project-model/src/toolchain_info/rustc_cfg.rs b/crates/project-model/src/toolchain_info/rustc_cfg.rs
index a77f76797f..6e06e88bf7 100644
--- a/crates/project-model/src/toolchain_info/rustc_cfg.rs
+++ b/crates/project-model/src/toolchain_info/rustc_cfg.rs
@@ -63,7 +63,7 @@ fn rustc_print_cfg(
) -> anyhow::Result<String> {
const RUSTC_ARGS: [&str; 2] = ["--print", "cfg"];
let (sysroot, current_dir) = match config {
- QueryConfig::Cargo(sysroot, cargo_toml) => {
+ QueryConfig::Cargo(sysroot, cargo_toml, _) => {
let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS);
if let Some(target) = target {
@@ -109,7 +109,7 @@ mod tests {
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
- let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
+ let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert_ne!(get(cfg, None, &FxHashMap::default()), vec![]);
}
diff --git a/crates/project-model/src/toolchain_info/target_data_layout.rs b/crates/project-model/src/toolchain_info/target_data_layout.rs
index a4d0ec6953..a28f468e69 100644
--- a/crates/project-model/src/toolchain_info/target_data_layout.rs
+++ b/crates/project-model/src/toolchain_info/target_data_layout.rs
@@ -20,7 +20,7 @@ pub fn get(
})
};
let (sysroot, current_dir) = match config {
- QueryConfig::Cargo(sysroot, cargo_toml) => {
+ QueryConfig::Cargo(sysroot, cargo_toml, _) => {
let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
cmd.env("RUSTC_BOOTSTRAP", "1");
cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS).args([
@@ -66,7 +66,7 @@ mod tests {
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
- let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
+ let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert!(get(cfg, None, &FxHashMap::default()).is_ok());
}
diff --git a/crates/project-model/src/toolchain_info/target_tuple.rs b/crates/project-model/src/toolchain_info/target_tuple.rs
index f6ab853219..9f12ededb6 100644
--- a/crates/project-model/src/toolchain_info/target_tuple.rs
+++ b/crates/project-model/src/toolchain_info/target_tuple.rs
@@ -5,7 +5,9 @@ use anyhow::Context;
use rustc_hash::FxHashMap;
use toolchain::Tool;
-use crate::{ManifestPath, Sysroot, toolchain_info::QueryConfig, utf8_stdout};
+use crate::{
+ Sysroot, cargo_config_file::CargoConfigFile, toolchain_info::QueryConfig, utf8_stdout,
+};
/// For cargo, runs `cargo -Zunstable-options config get build.target` to get the configured project target(s).
/// For rustc, runs `rustc --print -vV` to get the host target.
@@ -20,8 +22,8 @@ pub fn get(
}
let (sysroot, current_dir) = match config {
- QueryConfig::Cargo(sysroot, cargo_toml) => {
- match cargo_config_build_target(cargo_toml, extra_env, sysroot) {
+ QueryConfig::Cargo(sysroot, cargo_toml, config_file) => {
+ match config_file.as_ref().and_then(cargo_config_build_target) {
Some(it) => return Ok(it),
None => (sysroot, cargo_toml.parent().as_ref()),
}
@@ -50,30 +52,30 @@ fn rustc_discover_host_tuple(
}
}
-fn cargo_config_build_target(
- cargo_toml: &ManifestPath,
- extra_env: &FxHashMap<String, Option<String>>,
- sysroot: &Sysroot,
-) -> Option<Vec<String>> {
- let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
- cmd.current_dir(cargo_toml.parent()).env("RUSTC_BOOTSTRAP", "1");
- cmd.args(["-Z", "unstable-options", "config", "get", "build.target"]);
- // if successful we receive `build.target = "target-tuple"`
- // or `build.target = ["<target 1>", ..]`
- // this might be `error: config value `build.target` is not set` in which case we
- // don't wanna log the error
- utf8_stdout(&mut cmd).and_then(parse_output_cargo_config_build_target).ok()
+fn cargo_config_build_target(config: &CargoConfigFile) -> Option<Vec<String>> {
+ match parse_json_cargo_config_build_target(config) {
+ Ok(v) => v,
+ Err(e) => {
+ tracing::debug!("Failed to discover cargo config build target {e:?}");
+ None
+ }
+ }
}
// Parses `"build.target = [target-tuple, target-tuple, ...]"` or `"build.target = "target-tuple"`
-fn parse_output_cargo_config_build_target(stdout: String) -> anyhow::Result<Vec<String>> {
- let trimmed = stdout.trim_start_matches("build.target = ").trim_matches('"');
-
- if !trimmed.starts_with('[') {
- return Ok([trimmed.to_owned()].to_vec());
+fn parse_json_cargo_config_build_target(
+ config: &CargoConfigFile,
+) -> anyhow::Result<Option<Vec<String>>> {
+ let target = config.get("build").and_then(|v| v.as_object()).and_then(|m| m.get("target"));
+ match target {
+ Some(serde_json::Value::String(s)) => Ok(Some(vec![s.to_owned()])),
+ Some(v) => serde_json::from_value(v.clone())
+ .map(Option::Some)
+ .context("Failed to parse `build.target` as an array of target"),
+ // t`error: config value `build.target` is not set`, in which case we
+ // don't wanna log the error
+ None => Ok(None),
}
-
- serde_json::from_str(trimmed).context("Failed to parse `build.target` as an array of target")
}
#[cfg(test)]
@@ -90,7 +92,7 @@ mod tests {
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
- let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
+ let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert!(get(cfg, None, &FxHashMap::default()).is_ok());
}
diff --git a/crates/project-model/src/toolchain_info/version.rs b/crates/project-model/src/toolchain_info/version.rs
index 91ba859859..357053d8e8 100644
--- a/crates/project-model/src/toolchain_info/version.rs
+++ b/crates/project-model/src/toolchain_info/version.rs
@@ -12,7 +12,7 @@ pub(crate) fn get(
extra_env: &FxHashMap<String, Option<String>>,
) -> Result<Option<Version>, anyhow::Error> {
let (mut cmd, prefix) = match config {
- QueryConfig::Cargo(sysroot, cargo_toml) => {
+ QueryConfig::Cargo(sysroot, cargo_toml, _) => {
(sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env), "cargo ")
}
QueryConfig::Rustc(sysroot, current_dir) => {
@@ -44,7 +44,7 @@ mod tests {
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
- let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
+ let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert!(get(cfg, &FxHashMap::default()).is_ok());
}
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index eec0077ea6..677f29e3c6 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -7,15 +7,16 @@ use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync, thread};
use anyhow::Context;
use base_db::{
CrateBuilderId, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin,
- CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroPaths,
- TargetLayoutLoadResult,
+ CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroLoadingError,
+ ProcMacroPaths, TargetLayoutLoadResult,
};
use cfg::{CfgAtom, CfgDiff, CfgOptions};
use intern::{Symbol, sym};
-use paths::{AbsPath, AbsPathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
use span::{Edition, FileId};
+use toolchain::Tool;
use tracing::instrument;
use triomphe::Arc;
@@ -24,11 +25,13 @@ use crate::{
ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
WorkspaceBuildScripts,
build_dependencies::BuildScriptOutput,
- cargo_workspace::{CargoMetadataConfig, DepKind, PackageData, RustLibSource},
+ cargo_config_file,
+ cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource},
env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
project_json::{Crate, CrateArrayIdx},
sysroot::RustLibSrcWorkspace,
toolchain_info::{QueryConfig, rustc_cfg, target_data_layout, target_tuple, version},
+ utf8_stdout,
};
use tracing::{debug, error, info};
@@ -170,7 +173,7 @@ impl ProjectWorkspace {
pub fn load(
manifest: ProjectManifest,
config: &CargoConfig,
- progress: &dyn Fn(String),
+ progress: &(dyn Fn(String) + Sync),
) -> anyhow::Result<ProjectWorkspace> {
ProjectWorkspace::load_inner(&manifest, config, progress)
.with_context(|| format!("Failed to load the project at {manifest}"))
@@ -179,7 +182,7 @@ impl ProjectWorkspace {
fn load_inner(
manifest: &ProjectManifest,
config: &CargoConfig,
- progress: &dyn Fn(String),
+ progress: &(dyn Fn(String) + Sync),
) -> anyhow::Result<ProjectWorkspace> {
let res = match manifest {
ProjectManifest::ProjectJson(project_json) => {
@@ -206,9 +209,9 @@ impl ProjectWorkspace {
fn load_cargo(
cargo_toml: &ManifestPath,
config: &CargoConfig,
- progress: &dyn Fn(String),
+ progress: &(dyn Fn(String) + Sync),
) -> Result<ProjectWorkspace, anyhow::Error> {
- progress("Discovering sysroot".to_owned());
+ progress("discovering sysroot".to_owned());
let CargoConfig {
features,
rustc_source,
@@ -223,16 +226,11 @@ impl ProjectWorkspace {
no_deps,
..
} = config;
+ let workspace_dir = cargo_toml.parent();
let mut sysroot = match (sysroot, sysroot_src) {
- (Some(RustLibSource::Discover), None) => {
- Sysroot::discover(cargo_toml.parent(), extra_env)
- }
+ (Some(RustLibSource::Discover), None) => Sysroot::discover(workspace_dir, extra_env),
(Some(RustLibSource::Discover), Some(sysroot_src)) => {
- Sysroot::discover_with_src_override(
- cargo_toml.parent(),
- extra_env,
- sysroot_src.clone(),
- )
+ Sysroot::discover_with_src_override(workspace_dir, extra_env, sysroot_src.clone())
}
(Some(RustLibSource::Path(path)), None) => {
Sysroot::discover_rust_lib_src_dir(path.clone())
@@ -243,29 +241,72 @@ impl ProjectWorkspace {
(None, _) => Sysroot::empty(),
};
+ // Resolve the Cargo.toml to the workspace root as we base the `target` dir off of it.
+ let mut cmd = sysroot.tool(Tool::Cargo, workspace_dir, extra_env);
+ cmd.args(["locate-project", "--workspace", "--manifest-path", cargo_toml.as_str()]);
+ let cargo_toml = &match utf8_stdout(&mut cmd) {
+ Ok(output) => {
+ #[derive(serde_derive::Deserialize)]
+ struct Root {
+ root: Utf8PathBuf,
+ }
+ match serde_json::from_str::<Root>(&output) {
+ Ok(object) => ManifestPath::try_from(AbsPathBuf::assert(object.root))
+ .expect("manifest path should be absolute"),
+ Err(e) => {
+ tracing::error!(%e, %cargo_toml, "failed fetching cargo workspace root");
+ cargo_toml.clone()
+ }
+ }
+ }
+ Err(e) => {
+ tracing::error!(%e, %cargo_toml, "failed fetching cargo workspace root");
+ cargo_toml.clone()
+ }
+ };
+ let workspace_dir = cargo_toml.parent();
+
tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot");
- progress("Querying project metadata".to_owned());
- let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml);
+ progress("querying project metadata".to_owned());
+ let config_file = cargo_config_file::read(cargo_toml, extra_env, &sysroot);
+ let config_file_ = config_file.clone();
+ let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml, &config_file_);
let targets =
target_tuple::get(toolchain_config, target.as_deref(), extra_env).unwrap_or_default();
+ let toolchain = version::get(toolchain_config, extra_env)
+ .inspect_err(|e| {
+ tracing::error!(%e,
+ "failed fetching toolchain version for {cargo_toml:?} workspace"
+ )
+ })
+ .ok()
+ .flatten();
+
+ let fetch_metadata = FetchMetadata::new(
+ cargo_toml,
+ workspace_dir,
+ &CargoMetadataConfig {
+ features: features.clone(),
+ targets: targets.clone(),
+ extra_args: extra_args.clone(),
+ extra_env: extra_env.clone(),
+ toolchain_version: toolchain.clone(),
+ kind: "workspace",
+ },
+ &sysroot,
+ *no_deps,
+ );
+ let target_dir = config
+ .target_dir
+ .clone()
+ .or_else(|| fetch_metadata.no_deps_metadata().map(|m| m.target_directory.clone()))
+ .unwrap_or_else(|| workspace_dir.join("target").into());
// We spawn a bunch of processes to query various information about the workspace's
// toolchain and sysroot
// We can speed up loading a bit by spawning all of these processes in parallel (especially
// on systems were process spawning is delayed)
let join = thread::scope(|s| {
- let workspace_dir = cargo_toml.parent();
- let toolchain = s.spawn(|| {
- version::get(toolchain_config, extra_env)
- .inspect_err(|e| {
- tracing::error!(%e,
- "failed fetching toolchain version for {cargo_toml:?} workspace"
- )
- })
- .ok()
- .flatten()
- });
-
let rustc_cfg = s.spawn(|| {
rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env)
});
@@ -292,7 +333,7 @@ impl ProjectWorkspace {
};
rustc_dir.and_then(|rustc_dir| {
info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source");
- match CargoWorkspace::fetch_metadata(
+ match FetchMetadata::new(
&rustc_dir,
workspace_dir,
&CargoMetadataConfig {
@@ -300,12 +341,12 @@ impl ProjectWorkspace {
targets: targets.clone(),
extra_args: extra_args.clone(),
extra_env: extra_env.clone(),
+ toolchain_version: toolchain.clone(),
+ kind: "rustc-dev"
},
&sysroot,
*no_deps,
- false,
- &|_| (),
- ) {
+ ).exec(&target_dir, true, progress) {
Ok((meta, _error)) => {
let workspace = CargoWorkspace::new(
meta,
@@ -334,31 +375,23 @@ impl ProjectWorkspace {
})
});
- let cargo_metadata = s.spawn(|| {
- CargoWorkspace::fetch_metadata(
- cargo_toml,
+ let cargo_metadata = s.spawn(|| fetch_metadata.exec(&target_dir, false, progress));
+ let loaded_sysroot = s.spawn(|| {
+ sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
+ config,
+ &targets,
+ toolchain.clone(),
+ )),
+ config.no_deps,
workspace_dir,
- &CargoMetadataConfig {
- features: features.clone(),
- targets: targets.clone(),
- extra_args: extra_args.clone(),
- extra_env: extra_env.clone(),
- },
- &sysroot,
- *no_deps,
- false,
- &|_| (),
+ &target_dir,
+ progress,
)
});
- let loaded_sysroot = s.spawn(|| {
- sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata(
- sysroot_metadata_config(extra_env, &targets),
- ))
- });
let cargo_config_extra_env =
- s.spawn(|| cargo_config_env(cargo_toml, extra_env, &sysroot));
+ s.spawn(move || cargo_config_env(cargo_toml, &config_file));
thread::Result::Ok((
- toolchain.join()?,
rustc_cfg.join()?,
data_layout.join()?,
rustc_dir.join()?,
@@ -369,10 +402,9 @@ impl ProjectWorkspace {
});
let (
- toolchain,
rustc_cfg,
data_layout,
- rustc,
+ mut rustc,
loaded_sysroot,
cargo_metadata,
cargo_config_extra_env,
@@ -388,9 +420,18 @@ impl ProjectWorkspace {
})?;
let cargo = CargoWorkspace::new(meta, cargo_toml.clone(), cargo_config_extra_env, false);
if let Some(loaded_sysroot) = loaded_sysroot {
+ tracing::info!(src_root = ?sysroot.rust_lib_src_root(), root = %loaded_sysroot, "Loaded sysroot");
sysroot.set_workspace(loaded_sysroot);
}
+ if !cargo.requires_rustc_private() {
+ if let Err(e) = &mut rustc {
+ // We don't need the rustc sources here,
+ // so just discard the error.
+ _ = e.take();
+ }
+ }
+
Ok(ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo {
cargo,
@@ -411,26 +452,31 @@ impl ProjectWorkspace {
pub fn load_inline(
mut project_json: ProjectJson,
config: &CargoConfig,
- progress: &dyn Fn(String),
+ progress: &(dyn Fn(String) + Sync),
) -> ProjectWorkspace {
- progress("Discovering sysroot".to_owned());
+ progress("discovering sysroot".to_owned());
let mut sysroot =
Sysroot::new(project_json.sysroot.clone(), project_json.sysroot_src.clone());
tracing::info!(workspace = %project_json.manifest_or_root(), src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot");
- progress("Querying project metadata".to_owned());
+ progress("querying project metadata".to_owned());
let sysroot_project = project_json.sysroot_project.take();
let query_config = QueryConfig::Rustc(&sysroot, project_json.path().as_ref());
let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env)
.unwrap_or_default();
+ let toolchain = version::get(query_config, &config.extra_env).ok().flatten();
+ let project_root = project_json.project_root();
+ let target_dir = config
+ .target_dir
+ .clone()
+ .or_else(|| cargo_target_dir(project_json.manifest()?, &config.extra_env, &sysroot))
+ .unwrap_or_else(|| project_root.join("target").into());
// We spawn a bunch of processes to query various information about the workspace's
// toolchain and sysroot
// We can speed up loading a bit by spawning all of these processes in parallel (especially
// on systems were process spawning is delayed)
let join = thread::scope(|s| {
- let toolchain =
- s.spawn(|| version::get(query_config, &config.extra_env).ok().flatten());
let rustc_cfg = s.spawn(|| {
rustc_cfg::get(query_config, targets.first().map(Deref::deref), &config.extra_env)
});
@@ -443,23 +489,32 @@ impl ProjectWorkspace {
});
let loaded_sysroot = s.spawn(|| {
if let Some(sysroot_project) = sysroot_project {
- sysroot.load_workspace(&RustSourceWorkspaceConfig::Json(*sysroot_project))
+ sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::Json(*sysroot_project),
+ config.no_deps,
+ project_root,
+ &target_dir,
+ progress,
+ )
} else {
- sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata(
- sysroot_metadata_config(&config.extra_env, &targets),
- ))
+ sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
+ config,
+ &targets,
+ toolchain.clone(),
+ )),
+ config.no_deps,
+ project_root,
+ &target_dir,
+ progress,
+ )
}
});
- thread::Result::Ok((
- toolchain.join()?,
- rustc_cfg.join()?,
- data_layout.join()?,
- loaded_sysroot.join()?,
- ))
+ thread::Result::Ok((rustc_cfg.join()?, data_layout.join()?, loaded_sysroot.join()?))
});
- let (toolchain, rustc_cfg, target_layout, loaded_sysroot) = match join {
+ let (rustc_cfg, target_layout, loaded_sysroot) = match join {
Ok(it) => it,
Err(e) => std::panic::resume_unwind(e),
};
@@ -491,20 +546,35 @@ impl ProjectWorkspace {
None => Sysroot::empty(),
};
- let query_config = QueryConfig::Cargo(&sysroot, detached_file);
+ let config_file = cargo_config_file::read(detached_file, &config.extra_env, &sysroot);
+ let query_config = QueryConfig::Cargo(&sysroot, detached_file, &config_file);
let toolchain = version::get(query_config, &config.extra_env).ok().flatten();
let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env)
.unwrap_or_default();
let rustc_cfg = rustc_cfg::get(query_config, None, &config.extra_env);
let data_layout = target_data_layout::get(query_config, None, &config.extra_env);
- let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata(
- sysroot_metadata_config(&config.extra_env, &targets),
- ));
+ let target_dir = config
+ .target_dir
+ .clone()
+ .or_else(|| cargo_target_dir(detached_file, &config.extra_env, &sysroot))
+ .unwrap_or_else(|| dir.join("target").into());
+
+ let loaded_sysroot = sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
+ config,
+ &targets,
+ toolchain.clone(),
+ )),
+ config.no_deps,
+ dir,
+ &target_dir,
+ &|_| (),
+ );
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
- let cargo_script = CargoWorkspace::fetch_metadata(
+ let fetch_metadata = FetchMetadata::new(
detached_file,
dir,
&CargoMetadataConfig {
@@ -512,22 +582,26 @@ impl ProjectWorkspace {
targets,
extra_args: config.extra_args.clone(),
extra_env: config.extra_env.clone(),
+ toolchain_version: toolchain.clone(),
+ kind: "detached-file",
},
&sysroot,
config.no_deps,
- false,
- &|_| (),
- )
- .ok()
- .map(|(ws, error)| {
- let cargo_config_extra_env =
- cargo_config_env(detached_file, &config.extra_env, &sysroot);
- (
- CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false),
- WorkspaceBuildScripts::default(),
- error.map(Arc::new),
- )
- });
+ );
+ let target_dir = config
+ .target_dir
+ .clone()
+ .or_else(|| fetch_metadata.no_deps_metadata().map(|m| m.target_directory.clone()))
+ .unwrap_or_else(|| dir.join("target").into());
+ let cargo_script =
+ fetch_metadata.exec(&target_dir, false, &|_| ()).ok().map(|(ws, error)| {
+ let cargo_config_extra_env = cargo_config_env(detached_file, &config_file);
+ (
+ CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false),
+ WorkspaceBuildScripts::default(),
+ error.map(Arc::new),
+ )
+ });
Ok(ProjectWorkspace {
kind: ProjectWorkspaceKind::DetachedFile {
@@ -560,10 +634,16 @@ impl ProjectWorkspace {
match &self.kind {
ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, None)), .. }
| ProjectWorkspaceKind::Cargo { cargo, error: None, .. } => {
- WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, &self.sysroot)
- .with_context(|| {
- format!("Failed to run build scripts for {}", cargo.workspace_root())
- })
+ WorkspaceBuildScripts::run_for_workspace(
+ config,
+ cargo,
+ progress,
+ &self.sysroot,
+ self.toolchain.as_ref(),
+ )
+ .with_context(|| {
+ format!("Failed to run build scripts for {}", cargo.workspace_root())
+ })
}
_ => Ok(WorkspaceBuildScripts::default()),
}
@@ -662,7 +742,7 @@ impl ProjectWorkspace {
}
}
- pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
+ pub fn find_sysroot_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
self.sysroot.discover_proc_macro_srv()
}
@@ -1145,14 +1225,10 @@ fn cargo_to_crate_graph(
// Mapping of a package to its library target
let mut pkg_to_lib_crate = FxHashMap::default();
let mut pkg_crates = FxHashMap::default();
- // Does any crate signal to rust-analyzer that they need the rustc_private crates?
- let mut has_private = false;
let workspace_proc_macro_cwd = Arc::new(cargo.workspace_root().to_path_buf());
// Next, create crates for each package, target pair
for pkg in cargo.packages() {
- has_private |= cargo[pkg].metadata.rustc_private;
-
let cfg_options = {
let mut cfg_options = cfg_options.clone();
@@ -1297,7 +1373,7 @@ fn cargo_to_crate_graph(
add_dep(crate_graph, from, name, to);
}
- if has_private {
+ if cargo.requires_rustc_private() {
// If the user provided a path to rustc sources, we add all the rustc_private crates
// and create dependencies on them for the crates which opt-in to that
if let Some((rustc_workspace, rustc_build_scripts)) = rustc {
@@ -1563,11 +1639,11 @@ fn add_target_crate_root(
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
match proc_macro_dylib_path {
Some(path) => Ok((cargo_name.to_owned(), path.clone())),
- None if has_errors => Err("failed to build proc-macro".to_owned()),
- None => Err("proc-macro crate build data is missing dylib path".to_owned()),
+ None if has_errors => Err(ProcMacroLoadingError::FailedToBuild),
+ None => Err(ProcMacroLoadingError::MissingDylibPath),
}
}
- None => Err("proc-macro crate is missing its build data".to_owned()),
+ None => Err(ProcMacroLoadingError::NotYetBuilt),
};
proc_macros.insert(crate_id, proc_macro);
}
@@ -1804,13 +1880,36 @@ fn add_dep_inner(graph: &mut CrateGraphBuilder, from: CrateBuilderId, dep: Depen
}
fn sysroot_metadata_config(
- extra_env: &FxHashMap<String, Option<String>>,
+ config: &CargoConfig,
targets: &[String],
+ toolchain_version: Option<Version>,
) -> CargoMetadataConfig {
CargoMetadataConfig {
features: Default::default(),
targets: targets.to_vec(),
extra_args: Default::default(),
- extra_env: extra_env.clone(),
+ extra_env: config.extra_env.clone(),
+ toolchain_version,
+ kind: "sysroot",
+ }
+}
+
+fn cargo_target_dir(
+ manifest: &ManifestPath,
+ extra_env: &FxHashMap<String, Option<String>>,
+ sysroot: &Sysroot,
+) -> Option<Utf8PathBuf> {
+ let cargo = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
+ let mut meta = cargo_metadata::MetadataCommand::new();
+ meta.cargo_path(cargo.get_program());
+ meta.manifest_path(manifest);
+ // `--no-deps` doesn't (over)write lockfiles as it doesn't do any package resolve.
+ // So we can use it to get `target_directory` before copying lockfiles
+ let mut other_options = vec!["--no-deps".to_owned()];
+ if manifest.is_rust_manifest() {
+ meta.env("RUSTC_BOOTSTRAP", "1");
+ other_options.push("-Zscript".to_owned());
}
+ meta.other_options(other_options);
+ meta.exec().map(|m| m.target_directory).ok()
}
diff --git a/crates/query-group-macro/Cargo.toml b/crates/query-group-macro/Cargo.toml
index 8b03d8f8cc..5991120a30 100644
--- a/crates/query-group-macro/Cargo.toml
+++ b/crates/query-group-macro/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
proc-macro = true
[dependencies]
diff --git a/crates/query-group-macro/src/queries.rs b/crates/query-group-macro/src/queries.rs
index baac3e8bbf..c151cca072 100644
--- a/crates/query-group-macro/src/queries.rs
+++ b/crates/query-group-macro/src/queries.rs
@@ -74,8 +74,8 @@ impl ToTokens for TrackedQuery {
quote! {
#sig {
#annotation
- fn #shim(
- db: &dyn #trait_name,
+ fn #shim<'db>(
+ db: &'db dyn #trait_name,
_input: #input_struct_name,
#(#pat_and_tys),*
) #ret
@@ -88,8 +88,8 @@ impl ToTokens for TrackedQuery {
quote! {
#sig {
#annotation
- fn #shim(
- db: &dyn #trait_name,
+ fn #shim<'db>(
+ db: &'db dyn #trait_name,
#(#pat_and_tys),*
) #ret
#invoke_block
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index b59d06838e..b301a7189b 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -13,6 +13,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[[bin]]
name = "rust-analyzer"
@@ -29,7 +30,7 @@ indexmap.workspace = true
itertools.workspace = true
scip = "0.5.2"
lsp-types = { version = "=0.95.0", features = ["proposed"] }
-parking_lot = "0.12.3"
+parking_lot = "0.12.4"
xflags = "0.3.2"
oorandom = "11.1.5"
rayon.workspace = true
@@ -37,19 +38,19 @@ rustc-hash.workspace = true
serde_json = { workspace = true, features = ["preserve_order"] }
serde.workspace = true
serde_derive.workspace = true
-tenthash = "1.0.0"
-num_cpus = "1.16.0"
-mimalloc = { version = "0.1.44", default-features = false, optional = true }
+tenthash = "1.1.0"
+num_cpus = "1.17.0"
+mimalloc = { version = "0.1.46", default-features = false, optional = true }
lsp-server.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
tracing-tree.workspace = true
triomphe.workspace = true
-toml = "0.8.20"
+toml = "0.8.23"
nohash-hasher.workspace = true
walkdir = "2.5.0"
semver.workspace = true
-memchr = "2.7.4"
+memchr = "2.7.5"
cargo_metadata.workspace = true
process-wrap.workspace = true
@@ -75,7 +76,7 @@ vfs.workspace = true
paths.workspace = true
[target.'cfg(windows)'.dependencies]
-windows-sys = { version = "0.59", features = [
+windows-sys = { version = "0.60", features = [
"Win32_System_Diagnostics_Debug",
"Win32_System_Threading",
] }
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 12b393b80c..fc89f486f8 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -532,7 +532,7 @@ impl flags::AnalysisStats {
}
let todo = syntax::ast::make::ext::expr_todo().to_string();
- let mut formatter = |_: &hir::Type| todo.clone();
+ let mut formatter = |_: &hir::Type<'_>| todo.clone();
let mut syntax_hit_found = false;
for term in found_terms {
let generated = term
@@ -796,7 +796,7 @@ impl flags::AnalysisStats {
// region:expressions
let (previous_exprs, previous_unknown, previous_partially_unknown) =
(num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
- for (expr_id, _) in body.exprs.iter() {
+ for (expr_id, _) in body.exprs() {
let ty = &inference_result[expr_id];
num_exprs += 1;
let unknown_or_partial = if ty.is_unknown() {
@@ -901,7 +901,7 @@ impl flags::AnalysisStats {
// region:patterns
let (previous_pats, previous_unknown, previous_partially_unknown) =
(num_pats, num_pats_unknown, num_pats_partially_unknown);
- for (pat_id, _) in body.pats.iter() {
+ for (pat_id, _) in body.pats() {
let ty = &inference_result[pat_id];
num_pats += 1;
let unknown_or_partial = if ty.is_unknown() {
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index e3b372c914..30ac93fb6f 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -64,9 +64,9 @@ fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> FxHashMap<DiagnosticCode,
impl Tester {
fn new() -> Result<Self> {
- let mut path = std::env::temp_dir();
- path.push("ra-rustc-test.rs");
- let tmp_file = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).unwrap()).unwrap();
+ let mut path = AbsPathBuf::assert_utf8(std::env::temp_dir());
+ path.push("ra-rustc-test");
+ let tmp_file = path.join("ra-rustc-test.rs");
std::fs::write(&tmp_file, "")?;
let cargo_config = CargoConfig {
sysroot: Some(RustLibSource::Discover),
@@ -76,7 +76,13 @@ impl Tester {
};
let mut sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env);
- let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo());
+ let loaded_sysroot = sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::default_cargo(),
+ false,
+ &path,
+ &Utf8PathBuf::default(),
+ &|_| (),
+ );
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index d258c5d819..37f83f6dee 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -25,7 +25,7 @@ impl flags::Scip {
eprintln!("Generating SCIP start...");
let now = Instant::now();
- let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
+ let no_progress = &|s| eprintln!("rust-analyzer: Loading {s}");
let root =
vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(&self.path)).normalize();
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 5cbea9c2b3..51d4c29aa7 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -94,6 +94,8 @@ config_data! {
+ /// Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`).
+ highlightRelated_branchExitPoints_enable: bool = true,
/// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
highlightRelated_breakPoints_enable: bool = true,
/// Enables highlighting of all captures of a closure while the cursor is on the `|` or move keyword of a closure.
@@ -452,6 +454,8 @@ config_data! {
assist_emitMustUse: bool = false,
/// Placeholder expression to use for missing expressions in assists.
assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo,
+ /// When inserting a type (e.g. in "fill match arms" assist), prefer to use `Self` over the type name where possible.
+ assist_preferSelf: bool = false,
/// Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.
assist_termSearch_borrowcheck: bool = true,
/// Term search fuel in "units of work" for assists (Defaults to 1800).
@@ -760,6 +764,12 @@ config_data! {
/// though Cargo might be the eventual consumer.
vfs_extraIncludes: Vec<String> = vec![],
+ /// Exclude all imports from workspace symbol search.
+ ///
+ /// In addition to regular imports (which are always excluded),
+ /// this option removes public imports (better known as re-exports)
+ /// and removes imports that rename the imported symbol.
+ workspace_symbol_search_excludeImports: bool = false,
/// Workspace symbol search kind.
workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = WorkspaceSymbolSearchKindDef::OnlyTypes,
/// Limits the number of items returned from a workspace symbol search (Defaults to 128).
@@ -1352,6 +1362,8 @@ pub struct RunnablesConfig {
/// Configuration for workspace symbol search requests.
#[derive(Debug, Clone)]
pub struct WorkspaceSymbolConfig {
+ /// Should imports be excluded.
+ pub search_exclude_imports: bool,
/// In what scope should the symbol be searched in.
pub search_scope: WorkspaceSymbolSearchScope,
/// What kind of symbol is being searched for.
@@ -1501,6 +1513,7 @@ impl Config {
ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
ExprFillDefaultDef::Underscore => ExprFillDefaultMode::Underscore,
},
+ prefer_self_ty: *self.assist_preferSelf(source_root),
}
}
@@ -1513,7 +1526,7 @@ impl Config {
CompletionConfig {
enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
- && self.caps.completion_item_edit_resolve(),
+ && self.caps.has_completion_item_resolve_additionalTextEdits(),
enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(),
enable_auto_iter: *self.completion_autoIter_enable(source_root),
enable_auto_await: *self.completion_autoAwait_enable(source_root),
@@ -1618,6 +1631,7 @@ impl Config {
exit_points: self.highlightRelated_exitPoints_enable().to_owned(),
yield_points: self.highlightRelated_yieldPoints_enable().to_owned(),
closure_captures: self.highlightRelated_closureCaptures_enable().to_owned(),
+ branch_exit_points: self.highlightRelated_branchExitPoints_enable().to_owned(),
}
}
@@ -2280,6 +2294,7 @@ impl Config {
pub fn workspace_symbol(&self, source_root: Option<SourceRootId>) -> WorkspaceSymbolConfig {
WorkspaceSymbolConfig {
+ search_exclude_imports: *self.workspace_symbol_search_excludeImports(source_root),
search_scope: match self.workspace_symbol_search_scope(source_root) {
WorkspaceSymbolSearchScopeDef::Workspace => WorkspaceSymbolSearchScope::Workspace,
WorkspaceSymbolSearchScopeDef::WorkspaceAndDependencies => {
@@ -2340,10 +2355,6 @@ impl Config {
.and_then(|it| it.version.as_ref())
}
- pub fn client_is_helix(&self) -> bool {
- self.client_info.as_ref().map(|it| it.name == "helix").unwrap_or_default()
- }
-
pub fn client_is_neovim(&self) -> bool {
self.client_info.as_ref().map(|it| it.name == "Neovim").unwrap_or_default()
}
diff --git a/crates/rust-analyzer/src/flycheck.rs b/crates/rust-analyzer/src/flycheck.rs
index 0e418240db..91d37bd7c9 100644
--- a/crates/rust-analyzer/src/flycheck.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -6,6 +6,7 @@ use std::{fmt, io, process::Command, time::Duration};
use cargo_metadata::PackageId;
use crossbeam_channel::{Receiver, Sender, select_biased, unbounded};
use ide_db::FxHashSet;
+use itertools::Itertools;
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashMap;
use serde::Deserialize as _;
@@ -379,7 +380,11 @@ impl FlycheckActor {
package_id = msg.package_id.repr,
"artifact received"
);
- self.report_progress(Progress::DidCheckCrate(msg.target.name));
+ self.report_progress(Progress::DidCheckCrate(format!(
+ "{} ({})",
+ msg.target.name,
+ msg.target.kind.iter().format_with(", ", |kind, f| f(&kind)),
+ )));
let package_id = Arc::new(msg.package_id);
if self.diagnostics_cleared_for.insert(package_id.clone()) {
tracing::trace!(
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index a870232d4a..62a28a1a68 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -101,7 +101,7 @@ pub(crate) struct GlobalState {
pub(crate) last_reported_status: lsp_ext::ServerStatusParams,
// proc macros
- pub(crate) proc_macro_clients: Arc<[anyhow::Result<ProcMacroClient>]>,
+ pub(crate) proc_macro_clients: Arc<[Option<anyhow::Result<ProcMacroClient>>]>,
pub(crate) build_deps_changed: bool,
// Flycheck
diff --git a/crates/rust-analyzer/src/handlers/dispatch.rs b/crates/rust-analyzer/src/handlers/dispatch.rs
index 40d05567fc..aea116e647 100644
--- a/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -6,7 +6,7 @@ use std::{
use ide_db::base_db::{
DbPanicContext,
- salsa::{self, Cancelled, UnexpectedCycle},
+ salsa::{self, Cancelled},
};
use lsp_server::{ExtractError, Response, ResponseError};
use serde::{Serialize, de::DeserializeOwned};
@@ -350,9 +350,6 @@ where
if let Some(panic_message) = panic_message {
message.push_str(": ");
message.push_str(panic_message);
- } else if let Some(cycle) = panic.downcast_ref::<UnexpectedCycle>() {
- tracing::error!("{cycle}");
- message.push_str(": unexpected cycle");
} else if let Ok(cancelled) = panic.downcast::<Cancelled>() {
tracing::error!("Cancellation propagated out of salsa! This is a bug");
return Err(HandlerCancelledError::Inner(*cancelled));
diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs
index b7373f274f..200e972e42 100644
--- a/crates/rust-analyzer/src/handlers/notification.rs
+++ b/crates/rust-analyzer/src/handlers/notification.rs
@@ -239,7 +239,7 @@ pub(crate) fn handle_did_change_configuration(
let (config, e, _) = config.apply_change(change);
this.config_errors = e.is_empty().not().then_some(e);
- // Client config changes neccesitates .update_config method to be called.
+ // Client config changes necessitates .update_config method to be called.
this.update_configuration(config);
}
}
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 6d46ce68ed..a76a65220d 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -658,6 +658,9 @@ pub(crate) fn handle_workspace_symbol(
if libs {
q.libs();
}
+ if config.search_exclude_imports {
+ q.exclude_imports();
+ }
q
};
let mut res = exec_query(&snap, query, config.search_limit)?;
@@ -2430,17 +2433,14 @@ fn run_rustfmt(
}
_ => {
// Something else happened - e.g. `rustfmt` is missing or caught a signal
- Err(LspError::new(
- -32900,
- format!(
- r#"rustfmt exited with:
- Status: {}
- stdout: {captured_stdout}
- stderr: {captured_stderr}"#,
- output.status,
- ),
- )
- .into())
+ tracing::error!(
+ ?command,
+ %output.status,
+ %captured_stdout,
+ %captured_stderr,
+ "rustfmt failed"
+ );
+ Ok(None)
}
};
}
diff --git a/crates/rust-analyzer/src/lsp/capabilities.rs b/crates/rust-analyzer/src/lsp/capabilities.rs
index 418fe95759..f94e7486ff 100644
--- a/crates/rust-analyzer/src/lsp/capabilities.rs
+++ b/crates/rust-analyzer/src/lsp/capabilities.rs
@@ -42,7 +42,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(CompletionOptions {
resolve_provider: if config.client_is_neovim() {
- config.completion_item_edit_resolve().then_some(true)
+ config.has_completion_item_resolve_additionalTextEdits().then_some(true)
} else {
Some(config.caps().completions_resolve_provider())
},
@@ -77,7 +77,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
_ => Some(OneOf::Left(false)),
},
document_on_type_formatting_provider: Some({
- let mut chars = ide::Analysis::SUPPORTED_TRIGGER_CHARS.chars();
+ let mut chars = ide::Analysis::SUPPORTED_TRIGGER_CHARS.iter();
DocumentOnTypeFormattingOptions {
first_trigger_character: chars.next().unwrap().to_string(),
more_trigger_character: Some(chars.map(|c| c.to_string()).collect()),
@@ -207,8 +207,8 @@ impl ClientCapabilities {
serde_json::from_value(self.0.experimental.as_ref()?.get(index)?.clone()).ok()
}
- /// Parses client capabilities and returns all completion resolve capabilities rust-analyzer supports.
- pub fn completion_item_edit_resolve(&self) -> bool {
+ #[allow(non_snake_case)]
+ pub fn has_completion_item_resolve_additionalTextEdits(&self) -> bool {
(|| {
Some(
self.0
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index 4efe330f16..292be1d531 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -900,15 +900,19 @@ pub(crate) fn folding_range(
FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
FoldKind::Region => Some(lsp_types::FoldingRangeKind::Region),
- FoldKind::Mods
+ FoldKind::Modules
| FoldKind::Block
| FoldKind::ArgList
| FoldKind::Consts
| FoldKind::Statics
+ | FoldKind::TypeAliases
| FoldKind::WhereClause
| FoldKind::ReturnType
| FoldKind::Array
- | FoldKind::MatchArm => None,
+ | FoldKind::TraitAliases
+ | FoldKind::ExternCrates
+ | FoldKind::MatchArm
+ | FoldKind::Function => None,
};
let range = range(line_index, fold.range);
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 0c0438c4b8..00cf890510 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -783,9 +783,14 @@ impl GlobalState {
DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it),
};
- let handle =
- discover.spawn(arg, &std::env::current_dir().unwrap()).unwrap();
- self.discover_handle = Some(handle);
+ let handle = discover.spawn(
+ arg,
+ &std::env::current_dir()
+ .expect("Failed to get cwd during project discovery"),
+ );
+ self.discover_handle = Some(handle.unwrap_or_else(|e| {
+ panic!("Failed to spawn project discovery command: {e}")
+ }));
}
}
}
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 4677880daa..e798aa6a8a 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -18,7 +18,7 @@ use std::{iter, mem};
use hir::{ChangeWithProcMacros, ProcMacrosBuilder, db::DefDatabase};
use ide_db::{
FxHashMap,
- base_db::{CrateGraphBuilder, ProcMacroPaths, salsa::Durability},
+ base_db::{CrateGraphBuilder, ProcMacroLoadingError, ProcMacroPaths, salsa::Durability},
};
use itertools::Itertools;
use load_cargo::{ProjectFolders, load_proc_macro};
@@ -114,6 +114,16 @@ impl GlobalState {
Durability::HIGH,
);
}
+
+ if self.config.cargo(None) != old_config.cargo(None) {
+ let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false };
+ self.fetch_workspaces_queue.request_op("cargo config changed".to_owned(), req)
+ }
+
+ if self.config.cfg_set_test(None) != old_config.cfg_set_test(None) {
+ let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false };
+ self.fetch_workspaces_queue.request_op("cfg_set_test config changed".to_owned(), req)
+ }
}
pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
@@ -184,8 +194,7 @@ impl GlobalState {
format_to!(message, "{e}");
});
- let proc_macro_clients =
- self.proc_macro_clients.iter().map(Some).chain(iter::repeat_with(|| None));
+ let proc_macro_clients = self.proc_macro_clients.iter().chain(iter::repeat(&None));
for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) {
if let ProjectWorkspaceKind::Cargo { error: Some(error), .. }
@@ -242,7 +251,8 @@ impl GlobalState {
message.push_str("\n\n");
}
}
- _ => (),
+ // sysroot was explicitly not set so we didn't discover a server
+ None => {}
}
}
}
@@ -409,16 +419,13 @@ impl GlobalState {
};
let mut builder = ProcMacrosBuilder::default();
- let proc_macro_clients = proc_macro_clients
- .iter()
- .map(|res| res.as_ref().map_err(|e| e.to_string()))
- .chain(iter::repeat_with(|| Err("proc-macro-srv is not running".into())));
+ let proc_macro_clients = proc_macro_clients.iter().chain(iter::repeat(&None));
for (client, paths) in proc_macro_clients.zip(paths) {
for (crate_id, res) in paths.iter() {
let expansion_res = match client {
- Ok(client) => match res {
+ Some(Ok(client)) => match res {
Ok((crate_name, path)) => {
- progress(path.to_string());
+ progress(format!("loading proc-macros: {path}"));
let ignored_proc_macros = ignored_proc_macros
.iter()
.find_map(|(name, macros)| {
@@ -428,9 +435,14 @@ impl GlobalState {
load_proc_macro(client, path, ignored_proc_macros)
}
- Err(e) => Err((e.clone(), true)),
+ Err(e) => Err(e.clone()),
},
- Err(ref e) => Err((e.clone(), true)),
+ Some(Err(e)) => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ e.to_string().into_boxed_str(),
+ )),
+ None => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ "proc-macro-srv is not running".into(),
+ )),
};
builder.insert(*crate_id, expansion_res)
}
@@ -645,7 +657,10 @@ impl GlobalState {
self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| {
let path = match self.config.proc_macro_srv() {
Some(path) => path,
- None => ws.find_sysroot_proc_macro_srv()?,
+ None => match ws.find_sysroot_proc_macro_srv()? {
+ Ok(path) => path,
+ Err(e) => return Some(Err(e)),
+ },
};
let env: FxHashMap<_, _> = match &ws.kind {
@@ -672,14 +687,14 @@ impl GlobalState {
};
info!("Using proc-macro server at {path}");
- ProcMacroClient::spawn(&path, &env).map_err(|err| {
+ Some(ProcMacroClient::spawn(&path, &env).map_err(|err| {
tracing::error!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
);
anyhow::format_err!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
)
- })
+ }))
}))
}
@@ -743,14 +758,14 @@ impl GlobalState {
change.set_proc_macros(
crate_graph
.iter()
- .map(|id| (id, Err(("proc-macro has not been built yet".to_owned(), true))))
+ .map(|id| (id, Err(ProcMacroLoadingError::NotYetBuilt)))
.collect(),
);
} else {
change.set_proc_macros(
crate_graph
.iter()
- .map(|id| (id, Err(("proc-macro expansion is disabled".to_owned(), false))))
+ .map(|id| (id, Err(ProcMacroLoadingError::Disabled)))
.collect(),
);
}
diff --git a/crates/rust-analyzer/src/test_runner.rs b/crates/rust-analyzer/src/test_runner.rs
index 9c0bc33af6..e7528dbc93 100644
--- a/crates/rust-analyzer/src/test_runner.rs
+++ b/crates/rust-analyzer/src/test_runner.rs
@@ -103,6 +103,7 @@ impl CargoTestHandle {
) -> std::io::Result<Self> {
let mut cmd = toolchain::command(Tool::Cargo.path(), root, &options.extra_env);
cmd.env("RUSTC_BOOTSTRAP", "1");
+ cmd.arg("--color=always");
cmd.arg("test");
cmd.arg("--package");
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index 59073af983..1b940c70da 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -880,7 +880,8 @@ fn main() {{}}
#[test]
fn diagnostics_dont_block_typing() {
- if skip_slow_tests() {
+ if skip_slow_tests() || std::env::var("CI").is_ok() {
+ // FIXME: This test is failing too frequently (therefore we disable it on CI).
return;
}
diff --git a/crates/span/Cargo.toml b/crates/span/Cargo.toml
index b3b401c3db..966962bab3 100644
--- a/crates/span/Cargo.toml
+++ b/crates/span/Cargo.toml
@@ -22,6 +22,9 @@ vfs.workspace = true
syntax.workspace = true
stdx.workspace = true
+[dev-dependencies]
+syntax.workspace = true
+
[features]
default = ["salsa"]
diff --git a/crates/span/src/ast_id.rs b/crates/span/src/ast_id.rs
index 228fba1fa0..a9288ecd6f 100644
--- a/crates/span/src/ast_id.rs
+++ b/crates/span/src/ast_id.rs
@@ -4,137 +4,550 @@
//! Specifically, it enumerates all items in a file and uses position of a an
//! item as an ID. That way, id's don't change unless the set of items itself
//! changes.
+//!
+//! These IDs are tricky. If one of them invalidates, its interned ID invalidates,
+//! and this can cause *a lot* to be recomputed. For example, if you invalidate the ID
+//! of a struct, and that struct has an impl (any impl!) this will cause the `Self`
+//! type of the impl to invalidate, which will cause the all impls queries to be
+//! invalidated, which will cause every trait solve query in this crate *and* all
+//! transitive reverse dependencies to be invalidated, which is pretty much the worst
+//! thing that can happen incrementality wise.
+//!
+//! So we want these IDs to stay as stable as possible. For top-level items, we store
+//! their kind and name, which should be unique, but since they can still not be, we
+//! also store an index disambiguator. For nested items, we also store the ID of their
+//! parent. For macro calls, we store the macro name and an index. There aren't usually
+//! a lot of macro calls in item position, and invalidation in bodies is not much of
+//! a problem, so this should be enough.
use std::{
any::type_name,
fmt,
- hash::{BuildHasher, BuildHasherDefault, Hash, Hasher},
+ hash::{BuildHasher, Hash, Hasher},
marker::PhantomData,
};
use la_arena::{Arena, Idx, RawIdx};
-use rustc_hash::FxHasher;
-use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, ast};
+use rustc_hash::{FxBuildHasher, FxHashMap};
+use syntax::{
+ AstNode, AstPtr, SyntaxKind, SyntaxNode, SyntaxNodePtr,
+ ast::{self, HasName},
+ match_ast,
+};
+
+// The first index is always the root node's AstId
+/// The root ast id always points to the encompassing file, using this in spans is discouraged as
+/// any range relative to it will be effectively absolute, ruining the entire point of anchored
+/// relative text ranges.
+pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
+ ErasedFileAstId(pack_hash_index_and_kind(0, 0, ErasedFileAstIdKind::Root as u32));
+
+/// ErasedFileAstId used as the span for syntax node fixups. Any Span containing this file id is to be
+/// considered fake.
+pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId =
+ ErasedFileAstId(pack_hash_index_and_kind(0, 0, ErasedFileAstIdKind::Fixup as u32));
-/// See crates\hir-expand\src\ast_id_map.rs
/// This is a type erased FileAstId.
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct ErasedFileAstId(u32);
+impl fmt::Debug for ErasedFileAstId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let kind = self.kind();
+ macro_rules! kind {
+ ($($kind:ident),* $(,)?) => {
+ if false {
+ // Ensure we covered all variants.
+ match ErasedFileAstIdKind::Root {
+ $( ErasedFileAstIdKind::$kind => {} )*
+ }
+ unreachable!()
+ }
+ $( else if kind == ErasedFileAstIdKind::$kind as u32 {
+ stringify!($kind)
+ } )*
+ else {
+ "Unknown"
+ }
+ };
+ }
+ let kind = kind!(
+ Root,
+ Enum,
+ Struct,
+ Union,
+ ExternCrate,
+ MacroDef,
+ MacroRules,
+ Module,
+ Static,
+ Trait,
+ TraitAlias,
+ Variant,
+ Const,
+ Fn,
+ MacroCall,
+ TypeAlias,
+ ExternBlock,
+ Use,
+ Impl,
+ BlockExpr,
+ AsmExpr,
+ Fixup,
+ );
+ if f.alternate() {
+ write!(f, "{kind}[{:04X}, {}]", self.hash_value(), self.index())
+ } else {
+ f.debug_struct("ErasedFileAstId")
+ .field("kind", &format_args!("{kind}"))
+ .field("index", &self.index())
+ .field("hash", &format_args!("{:04X}", self.hash_value()))
+ .finish()
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
+#[repr(u8)]
+enum ErasedFileAstIdKind {
+ /// This needs to not change because it's depended upon by the proc macro server.
+ Fixup = 0,
+ // The following are associated with `ErasedHasNameFileAstId`.
+ Enum,
+ Struct,
+ Union,
+ ExternCrate,
+ MacroDef,
+ MacroRules,
+ Module,
+ Static,
+ Trait,
+ TraitAlias,
+ // Until here associated with `ErasedHasNameFileAstId`.
+ // The following are associated with `ErasedAssocItemFileAstId`.
+ Variant,
+ Const,
+ Fn,
+ MacroCall,
+ TypeAlias,
+ // Until here associated with `ErasedAssocItemFileAstId`.
+ // Extern blocks don't really have any identifying property unfortunately.
+ ExternBlock,
+ // FIXME: If we store the final `UseTree` instead of the top-level `Use`, we can store its name,
+ // and be way more granular for incrementality, at the expense of increased memory usage.
+ // Use IDs aren't used a lot. The main thing that stores them is the def map. So everything that
+ // uses the def map will be invalidated. That includes infers, and so is pretty bad, but our
+ // def map incrementality story is pretty bad anyway and needs to be improved (see
+ // https://rust-lang.zulipchat.com/#narrow/channel/185405-t-compiler.2Frust-analyzer/topic/.60infer.60.20queries.20and.20splitting.20.60DefMap.60).
+ // So I left this as-is for now, as the def map improvement should also mitigate this.
+ Use,
+ /// Associated with [`ImplFileAstId`].
+ Impl,
+ /// Associated with [`BlockExprFileAstId`].
+ BlockExpr,
+ // `global_asm!()` is an item, so we need to give it an `AstId`. So we give to all inline asm
+ // because incrementality is not a problem, they will always be the only item in the macro file,
+ // and memory usage also not because they're rare.
+ AsmExpr,
+ /// Keep this last.
+ Root,
+}
+
+// First hash, then index, then kind.
+const HASH_BITS: u32 = 16;
+const INDEX_BITS: u32 = 11;
+const KIND_BITS: u32 = 5;
+const _: () = assert!(ErasedFileAstIdKind::Fixup as u32 <= ((1 << KIND_BITS) - 1));
+const _: () = assert!(HASH_BITS + INDEX_BITS + KIND_BITS == u32::BITS);
+
+#[inline]
+const fn u16_hash(hash: u64) -> u16 {
+ // We do basically the same as `FxHasher`. We don't use rustc-hash and truncate because the
+ // higher bits have more entropy, but unlike rustc-hash we don't rotate because it rotates
+ // for hashmaps that just use the low bits, but we compare all bits.
+ const K: u16 = 0xecc5;
+ let (part1, part2, part3, part4) =
+ (hash as u16, (hash >> 16) as u16, (hash >> 32) as u16, (hash >> 48) as u16);
+ part1
+ .wrapping_add(part2)
+ .wrapping_mul(K)
+ .wrapping_add(part3)
+ .wrapping_mul(K)
+ .wrapping_add(part4)
+ .wrapping_mul(K)
+}
+
+#[inline]
+const fn pack_hash_index_and_kind(hash: u16, index: u32, kind: u32) -> u32 {
+ (hash as u32) | (index << HASH_BITS) | (kind << (HASH_BITS + INDEX_BITS))
+}
+
impl ErasedFileAstId {
- pub const fn into_raw(self) -> u32 {
- self.0
+ #[inline]
+ fn hash_value(self) -> u16 {
+ self.0 as u16
}
- pub const fn from_raw(u32: u32) -> Self {
- Self(u32)
+
+ #[inline]
+ fn index(self) -> u32 {
+ (self.0 << KIND_BITS) >> (HASH_BITS + KIND_BITS)
}
-}
-impl fmt::Display for ErasedFileAstId {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.0.fmt(f)
+ #[inline]
+ fn kind(self) -> u32 {
+ self.0 >> (HASH_BITS + INDEX_BITS)
}
-}
-impl fmt::Debug for ErasedFileAstId {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.0.fmt(f)
+
+ fn ast_id_for(
+ node: &SyntaxNode,
+ index_map: &mut ErasedAstIdNextIndexMap,
+ parent: Option<&ErasedFileAstId>,
+ ) -> Option<ErasedFileAstId> {
+ // Blocks are deliberately not here - we only want to allocate a block if it contains items.
+ has_name_ast_id(node, index_map)
+ .or_else(|| assoc_item_ast_id(node, index_map, parent))
+ .or_else(|| extern_block_ast_id(node, index_map))
+ .or_else(|| use_ast_id(node, index_map))
+ .or_else(|| impl_ast_id(node, index_map))
+ .or_else(|| asm_expr_ast_id(node, index_map))
+ }
+
+ fn should_alloc(node: &SyntaxNode) -> bool {
+ let kind = node.kind();
+ should_alloc_has_name(kind)
+ || should_alloc_assoc_item(kind)
+ || ast::ExternBlock::can_cast(kind)
+ || ast::Use::can_cast(kind)
+ || ast::Impl::can_cast(kind)
+ || ast::AsmExpr::can_cast(kind)
+ }
+
+ #[inline]
+ pub fn into_raw(self) -> u32 {
+ self.0
+ }
+
+ #[inline]
+ pub const fn from_raw(v: u32) -> Self {
+ Self(v)
}
}
+pub trait AstIdNode: AstNode {}
+
/// `AstId` points to an AST node in a specific file.
-pub struct FileAstId<N: AstIdNode> {
+pub struct FileAstId<N> {
raw: ErasedFileAstId,
- covariant: PhantomData<fn() -> N>,
+ _marker: PhantomData<fn() -> N>,
}
-impl<N: AstIdNode> Clone for FileAstId<N> {
+/// Traits are manually implemented because `derive` adds redundant bounds.
+impl<N> Clone for FileAstId<N> {
+ #[inline]
fn clone(&self) -> FileAstId<N> {
*self
}
}
-impl<N: AstIdNode> Copy for FileAstId<N> {}
+impl<N> Copy for FileAstId<N> {}
-impl<N: AstIdNode> PartialEq for FileAstId<N> {
+impl<N> PartialEq for FileAstId<N> {
fn eq(&self, other: &Self) -> bool {
self.raw == other.raw
}
}
-impl<N: AstIdNode> Eq for FileAstId<N> {}
-impl<N: AstIdNode> Hash for FileAstId<N> {
+impl<N> Eq for FileAstId<N> {}
+impl<N> Hash for FileAstId<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.raw.hash(hasher);
}
}
-impl<N: AstIdNode> fmt::Debug for FileAstId<N> {
+impl<N> fmt::Debug for FileAstId<N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw)
+ write!(f, "FileAstId::<{}>({:?})", type_name::<N>(), self.raw)
}
}
-impl<N: AstIdNode> FileAstId<N> {
+impl<N> FileAstId<N> {
// Can't make this a From implementation because of coherence
+ #[inline]
pub fn upcast<M: AstIdNode>(self) -> FileAstId<M>
where
N: Into<M>,
{
- FileAstId { raw: self.raw, covariant: PhantomData }
+ FileAstId { raw: self.raw, _marker: PhantomData }
}
+ #[inline]
pub fn erase(self) -> ErasedFileAstId {
self.raw
}
}
-pub trait AstIdNode: AstNode {}
-macro_rules! register_ast_id_node {
- (impl AstIdNode for $($ident:ident),+ ) => {
+#[derive(Hash)]
+struct ErasedHasNameFileAstId<'a> {
+ name: &'a str,
+}
+
+/// This holds the ast ID for variants too (they're a kind of assoc item).
+#[derive(Hash)]
+struct ErasedAssocItemFileAstId<'a> {
+ /// Subtle: items in `extern` blocks **do not** store the ID of the extern block here.
+ /// Instead this is left empty. The reason is that `ExternBlockFileAstId` is pretty unstable
+ /// (it contains only an index), and extern blocks don't introduce a new scope, so storing
+ /// the extern block ID will do more harm to incrementality than help.
+ parent: Option<ErasedFileAstId>,
+ properties: ErasedHasNameFileAstId<'a>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+struct ImplFileAstId<'a> {
+ /// This can be `None` if the `Self` type is not a named type, or if it is inside a macro call.
+ self_ty_name: Option<&'a str>,
+ /// This can be `None` if this is an inherent impl, or if the trait name is inside a macro call.
+ trait_name: Option<&'a str>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+struct BlockExprFileAstId {
+ parent: Option<ErasedFileAstId>,
+}
+
+impl AstIdNode for ast::ExternBlock {}
+
+fn extern_block_ast_id(
+ node: &SyntaxNode,
+ index_map: &mut ErasedAstIdNextIndexMap,
+) -> Option<ErasedFileAstId> {
+ if ast::ExternBlock::can_cast(node.kind()) {
+ Some(index_map.new_id(ErasedFileAstIdKind::ExternBlock, ()))
+ } else {
+ None
+ }
+}
+
+impl AstIdNode for ast::Use {}
+
+fn use_ast_id(
+ node: &SyntaxNode,
+ index_map: &mut ErasedAstIdNextIndexMap,
+) -> Option<ErasedFileAstId> {
+ if ast::Use::can_cast(node.kind()) {
+ Some(index_map.new_id(ErasedFileAstIdKind::Use, ()))
+ } else {
+ None
+ }
+}
+
+impl AstIdNode for ast::AsmExpr {}
+
+fn asm_expr_ast_id(
+ node: &SyntaxNode,
+ index_map: &mut ErasedAstIdNextIndexMap,
+) -> Option<ErasedFileAstId> {
+ if ast::AsmExpr::can_cast(node.kind()) {
+ Some(index_map.new_id(ErasedFileAstIdKind::AsmExpr, ()))
+ } else {
+ None
+ }
+}
+
+impl AstIdNode for ast::Impl {}
+
+fn impl_ast_id(
+ node: &SyntaxNode,
+ index_map: &mut ErasedAstIdNextIndexMap,
+) -> Option<ErasedFileAstId> {
+ if let Some(node) = ast::Impl::cast(node.clone()) {
+ let type_as_name = |ty: Option<ast::Type>| match ty? {
+ ast::Type::PathType(it) => Some(it.path()?.segment()?.name_ref()?),
+ _ => None,
+ };
+ let self_ty_name = type_as_name(node.self_ty());
+ let trait_name = type_as_name(node.trait_());
+ let data = ImplFileAstId {
+ self_ty_name: self_ty_name.as_ref().map(|it| it.text_non_mutable()),
+ trait_name: trait_name.as_ref().map(|it| it.text_non_mutable()),
+ };
+ Some(index_map.new_id(ErasedFileAstIdKind::Impl, data))
+ } else {
+ None
+ }
+}
+
+// Blocks aren't `AstIdNode`s deliberately, because unlike other nodes, not all blocks get their own
+// ast id, only if they have items. To account for that we have a different, fallible, API for blocks.
+// impl !AstIdNode for ast::BlockExpr {}
+
+fn block_expr_ast_id(
+ node: &SyntaxNode,
+ index_map: &mut ErasedAstIdNextIndexMap,
+ parent: Option<&ErasedFileAstId>,
+) -> Option<ErasedFileAstId> {
+ if ast::BlockExpr::can_cast(node.kind()) {
+ Some(
+ index_map.new_id(
+ ErasedFileAstIdKind::BlockExpr,
+ BlockExprFileAstId { parent: parent.copied() },
+ ),
+ )
+ } else {
+ None
+ }
+}
+
+#[derive(Default)]
+struct ErasedAstIdNextIndexMap(FxHashMap<(ErasedFileAstIdKind, u16), u32>);
+
+impl ErasedAstIdNextIndexMap {
+ #[inline]
+ fn new_id(&mut self, kind: ErasedFileAstIdKind, data: impl Hash) -> ErasedFileAstId {
+ let hash = FxBuildHasher.hash_one(&data);
+ let initial_hash = u16_hash(hash);
+ // Even though 2^INDEX_BITS=2048 items with the same hash seems like a lot,
+ // it could happen with macro calls or `use`s in macro-generated files. So we want
+ // to handle it gracefully. We just increment the hash.
+ let mut hash = initial_hash;
+ let index = loop {
+ match self.0.entry((kind, hash)) {
+ std::collections::hash_map::Entry::Occupied(mut entry) => {
+ let i = entry.get_mut();
+ if *i < ((1 << INDEX_BITS) - 1) {
+ *i += 1;
+ break *i;
+ }
+ }
+ std::collections::hash_map::Entry::Vacant(entry) => {
+ entry.insert(0);
+ break 0;
+ }
+ }
+ hash = hash.wrapping_add(1);
+ if hash == initial_hash {
+ // That's 2^27=134,217,728 items!
+ panic!("you have way too many items in the same file!");
+ }
+ };
+ let kind = kind as u32;
+ ErasedFileAstId(pack_hash_index_and_kind(hash, index, kind))
+ }
+}
+
+macro_rules! register_enum_ast_id {
+ (impl $AstIdNode:ident for $($ident:ident),+ ) => {
+ $(
+ impl $AstIdNode for ast::$ident {}
+ )+
+ };
+}
+register_enum_ast_id! {
+ impl AstIdNode for
+ Item, AnyHasGenericParams, Adt, Macro,
+ AssocItem
+}
+
+macro_rules! register_has_name_ast_id {
+ (impl $AstIdNode:ident for $($ident:ident = $name_method:ident),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
- fn should_alloc_id(kind: syntax::SyntaxKind) -> bool {
- $(
- ast::$ident::can_cast(kind)
- )||+
+
+ fn has_name_ast_id(node: &SyntaxNode, index_map: &mut ErasedAstIdNextIndexMap) -> Option<ErasedFileAstId> {
+ match_ast! {
+ match node {
+ $(
+ ast::$ident(node) => {
+ let name = node.$name_method();
+ let name = name.as_ref().map_or("", |it| it.text_non_mutable());
+ let result = ErasedHasNameFileAstId {
+ name,
+ };
+ Some(index_map.new_id(ErasedFileAstIdKind::$ident, result))
+ },
+ )*
+ _ => None,
+ }
+ }
+ }
+
+ fn should_alloc_has_name(kind: SyntaxKind) -> bool {
+ false $( || ast::$ident::can_cast(kind) )*
}
};
}
-register_ast_id_node! {
+register_has_name_ast_id! {
impl AstIdNode for
- Item, AnyHasGenericParams,
- Adt,
- Enum,
- Variant,
- Struct,
- Union,
- AssocItem,
- Const,
- Fn,
- MacroCall,
- TypeAlias,
- ExternBlock,
- ExternCrate,
- Impl,
- Macro,
- MacroDef,
- MacroRules,
- Module,
- Static,
- Trait,
- TraitAlias,
- Use,
- BlockExpr, ConstArg
+ Enum = name,
+ Struct = name,
+ Union = name,
+ ExternCrate = name_ref,
+ MacroDef = name,
+ MacroRules = name,
+ Module = name,
+ Static = name,
+ Trait = name,
+ TraitAlias = name
+}
+
+macro_rules! register_assoc_item_ast_id {
+ (impl $AstIdNode:ident for $($ident:ident = $name_callback:expr),+ ) => {
+ $(
+ impl $AstIdNode for ast::$ident {}
+ )+
+
+ fn assoc_item_ast_id(
+ node: &SyntaxNode,
+ index_map: &mut ErasedAstIdNextIndexMap,
+ parent: Option<&ErasedFileAstId>,
+ ) -> Option<ErasedFileAstId> {
+ match_ast! {
+ match node {
+ $(
+ ast::$ident(node) => {
+ let name = $name_callback(node);
+ let name = name.as_ref().map_or("", |it| it.text_non_mutable());
+ let properties = ErasedHasNameFileAstId {
+ name,
+ };
+ let result = ErasedAssocItemFileAstId {
+ parent: parent.copied(),
+ properties,
+ };
+ Some(index_map.new_id(ErasedFileAstIdKind::$ident, result))
+ },
+ )*
+ _ => None,
+ }
+ }
+ }
+
+ fn should_alloc_assoc_item(kind: SyntaxKind) -> bool {
+ false $( || ast::$ident::can_cast(kind) )*
+ }
+ };
+}
+register_assoc_item_ast_id! {
+ impl AstIdNode for
+ Variant = |it: ast::Variant| it.name(),
+ Const = |it: ast::Const| it.name(),
+ Fn = |it: ast::Fn| it.name(),
+ MacroCall = |it: ast::MacroCall| it.path().and_then(|path| path.segment()?.name_ref()),
+ TypeAlias = |it: ast::TypeAlias| it.name()
}
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
#[derive(Default)]
pub struct AstIdMap {
- /// Maps stable id to unstable ptr.
- arena: Arena<SyntaxNodePtr>,
- /// Reverse: map ptr to id.
- map: hashbrown::HashTable<Idx<SyntaxNodePtr>>,
+ /// An arena of the ptrs and their associated ID.
+ arena: Arena<(SyntaxNodePtr, ErasedFileAstId)>,
+ /// Map ptr to id.
+ ptr_map: hashbrown::HashTable<ArenaId>,
+ /// Map id to ptr.
+ id_map: hashbrown::HashTable<ArenaId>,
}
+type ArenaId = Idx<(SyntaxNodePtr, ErasedFileAstId)>;
+
impl fmt::Debug for AstIdMap {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("AstIdMap").field("arena", &self.arena).finish()
@@ -148,31 +561,116 @@ impl PartialEq for AstIdMap {
}
impl Eq for AstIdMap {}
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum ContainsItems {
+ Yes,
+ No,
+}
+
impl AstIdMap {
pub fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none());
let mut res = AstIdMap::default();
+ let mut index_map = ErasedAstIdNextIndexMap::default();
+
+ // Ensure we allocate the root.
+ res.arena.alloc((SyntaxNodePtr::new(node), ROOT_ERASED_FILE_AST_ID));
- // make sure to allocate the root node
- if !should_alloc_id(node.kind()) {
- res.alloc(node);
- }
// By walking the tree in breadth-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching.
- bdfs(node, |it| {
- if should_alloc_id(it.kind()) {
- res.alloc(&it);
- TreeOrder::BreadthFirst
- } else {
- TreeOrder::DepthFirst
+
+ // This contains the stack of the `BlockExpr`s we are under. We do this
+ // so we only allocate `BlockExpr`s if they contain items.
+ // The general idea is: when we enter a block we push `(block, false)` here.
+ // Items inside the block are attributed to the block's container, not the block.
+ // For the first item we find inside a block, we make this `(block, true)`
+ // and create an ast id for the block. When exiting the block we pop it,
+ // whether or not we created an ast id for it.
+ // It may seem that with this setup we will generate an ID for blocks that
+ // have no items directly but have items inside other items inside them.
+ // This is true, but it doesn't matter, because such blocks can't exist.
+ // After all, the block will then contain the *outer* item, so we allocate
+ // an ID for it anyway.
+ let mut blocks = Vec::new();
+ let mut curr_layer = vec![(node.clone(), None)];
+ let mut next_layer = vec![];
+ while !curr_layer.is_empty() {
+ curr_layer.drain(..).for_each(|(node, parent_idx)| {
+ let mut preorder = node.preorder();
+ while let Some(event) = preorder.next() {
+ match event {
+ syntax::WalkEvent::Enter(node) => {
+ if ast::BlockExpr::can_cast(node.kind()) {
+ blocks.push((node, ContainsItems::No));
+ } else if ErasedFileAstId::should_alloc(&node) {
+ // Allocate blocks on-demand, only if they have items.
+ // We don't associate items with blocks, only with items, since block IDs can be quite unstable.
+ // FIXME: Is this the correct thing to do? Macro calls might actually be more incremental if
+ // associated with blocks (not sure). Either way it's not a big deal.
+ if let Some((
+ last_block_node,
+ already_allocated @ ContainsItems::No,
+ )) = blocks.last_mut()
+ {
+ let block_ast_id = block_expr_ast_id(
+ last_block_node,
+ &mut index_map,
+ parent_of(parent_idx, &res),
+ )
+ .expect("not a BlockExpr");
+ res.arena
+ .alloc((SyntaxNodePtr::new(last_block_node), block_ast_id));
+ *already_allocated = ContainsItems::Yes;
+ }
+
+ let parent = parent_of(parent_idx, &res);
+ let ast_id =
+ ErasedFileAstId::ast_id_for(&node, &mut index_map, parent)
+ .expect("this node should have an ast id");
+ let idx = res.arena.alloc((SyntaxNodePtr::new(&node), ast_id));
+
+ next_layer.extend(node.children().map(|child| (child, Some(idx))));
+ preorder.skip_subtree();
+ }
+ }
+ syntax::WalkEvent::Leave(node) => {
+ if ast::BlockExpr::can_cast(node.kind()) {
+ assert_eq!(
+ blocks.pop().map(|it| it.0),
+ Some(node),
+ "left a BlockExpr we never entered"
+ );
+ }
+ }
+ }
+ }
+ });
+ std::mem::swap(&mut curr_layer, &mut next_layer);
+ assert!(blocks.is_empty(), "didn't leave all BlockExprs");
+ }
+
+ res.ptr_map = hashbrown::HashTable::with_capacity(res.arena.len());
+ res.id_map = hashbrown::HashTable::with_capacity(res.arena.len());
+ for (idx, (ptr, ast_id)) in res.arena.iter() {
+ let ptr_hash = hash_ptr(ptr);
+ let ast_id_hash = hash_ast_id(ast_id);
+ match res.ptr_map.entry(
+ ptr_hash,
+ |idx2| *idx2 == idx,
+ |&idx| hash_ptr(&res.arena[idx].0),
+ ) {
+ hashbrown::hash_table::Entry::Occupied(_) => unreachable!(),
+ hashbrown::hash_table::Entry::Vacant(entry) => {
+ entry.insert(idx);
+ }
}
- });
- res.map = hashbrown::HashTable::with_capacity(res.arena.len());
- for (idx, ptr) in res.arena.iter() {
- let hash = hash_ptr(ptr);
- match res.map.entry(hash, |&idx2| idx2 == idx, |&idx| hash_ptr(&res.arena[idx])) {
+ match res.id_map.entry(
+ ast_id_hash,
+ |idx2| *idx2 == idx,
+ |&idx| hash_ast_id(&res.arena[idx].1),
+ ) {
hashbrown::hash_table::Entry::Occupied(_) => unreachable!(),
hashbrown::hash_table::Entry::Vacant(entry) => {
entry.insert(idx);
@@ -180,98 +678,235 @@ impl AstIdMap {
}
}
res.arena.shrink_to_fit();
- res
+ return res;
+
+ fn parent_of(parent_idx: Option<ArenaId>, res: &AstIdMap) -> Option<&ErasedFileAstId> {
+ let mut parent = parent_idx.map(|parent_idx| &res.arena[parent_idx].1);
+ if parent.is_some_and(|parent| parent.kind() == ErasedFileAstIdKind::ExternBlock as u32)
+ {
+ // See the comment on `ErasedAssocItemFileAstId` for why is this.
+ // FIXME: Technically there could be an extern block inside another item, e.g.:
+ // ```
+ // fn foo() {
+ // extern "C" {
+ // fn bar();
+ // }
+ // }
+ // ```
+ // Here we want to make `foo()` the parent of `bar()`, but we make it `None`.
+ // Shouldn't be a big deal though.
+ parent = None;
+ }
+ parent
+ }
}
/// The [`AstId`] of the root node
pub fn root(&self) -> SyntaxNodePtr {
- self.arena[Idx::from_raw(RawIdx::from_u32(0))]
+ self.arena[Idx::from_raw(RawIdx::from_u32(0))].0
}
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
- let raw = self.erased_ast_id(item.syntax());
- FileAstId { raw, covariant: PhantomData }
+ self.ast_id_for_ptr(AstPtr::new(item))
+ }
+
+ /// Blocks may not be allocated (if they have no items), so they have a different API.
+ pub fn ast_id_for_block(&self, block: &ast::BlockExpr) -> Option<FileAstId<ast::BlockExpr>> {
+ self.ast_id_for_ptr_for_block(AstPtr::new(block))
}
pub fn ast_id_for_ptr<N: AstIdNode>(&self, ptr: AstPtr<N>) -> FileAstId<N> {
let ptr = ptr.syntax_node_ptr();
- let hash = hash_ptr(&ptr);
- match self.map.find(hash, |&idx| self.arena[idx] == ptr) {
- Some(&raw) => FileAstId {
- raw: ErasedFileAstId(raw.into_raw().into_u32()),
- covariant: PhantomData,
- },
- None => panic!(
- "Can't find {:?} in AstIdMap:\n{:?}",
+ FileAstId { raw: self.erased_ast_id(ptr), _marker: PhantomData }
+ }
+
+ /// Blocks may not be allocated (if they have no items), so they have a different API.
+ pub fn ast_id_for_ptr_for_block(
+ &self,
+ ptr: AstPtr<ast::BlockExpr>,
+ ) -> Option<FileAstId<ast::BlockExpr>> {
+ let ptr = ptr.syntax_node_ptr();
+ self.try_erased_ast_id(ptr).map(|raw| FileAstId { raw, _marker: PhantomData })
+ }
+
+ fn erased_ast_id(&self, ptr: SyntaxNodePtr) -> ErasedFileAstId {
+ self.try_erased_ast_id(ptr).unwrap_or_else(|| {
+ panic!(
+ "Can't find SyntaxNodePtr {:?} in AstIdMap:\n{:?}",
ptr,
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
- ),
- }
+ )
+ })
}
- pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
- AstPtr::try_from_raw(self.arena[Idx::from_raw(RawIdx::from_u32(id.raw.into_raw()))])
- .unwrap()
+ fn try_erased_ast_id(&self, ptr: SyntaxNodePtr) -> Option<ErasedFileAstId> {
+ let hash = hash_ptr(&ptr);
+ let idx = *self.ptr_map.find(hash, |&idx| self.arena[idx].0 == ptr)?;
+ Some(self.arena[idx].1)
}
- pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
- self.arena[Idx::from_raw(RawIdx::from_u32(id.into_raw()))]
+ // Don't bound on `AstIdNode` here, because `BlockExpr`s are also valid here (`ast::BlockExpr`
+ // doesn't always have a matching `FileAstId`, but a `FileAstId<ast::BlockExpr>` always has
+ // a matching node).
+ pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
+ let ptr = self.get_erased(id.raw);
+ AstPtr::try_from_raw(ptr)
+ .unwrap_or_else(|| panic!("AstIdMap node mismatch with node `{ptr:?}`"))
}
- fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
- let ptr = SyntaxNodePtr::new(item);
- let hash = hash_ptr(&ptr);
- match self.map.find(hash, |&idx| self.arena[idx] == ptr) {
- Some(&idx) => ErasedFileAstId(idx.into_raw().into_u32()),
+ pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
+ let hash = hash_ast_id(&id);
+ match self.id_map.find(hash, |&idx| self.arena[idx].1 == id) {
+ Some(&idx) => self.arena[idx].0,
None => panic!(
- "Can't find {:?} in AstIdMap:\n{:?}\n source text: {}",
- item,
+ "Can't find ast id {:?} in AstIdMap:\n{:?}",
+ id,
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
- item
),
}
}
-
- fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
- ErasedFileAstId(self.arena.alloc(SyntaxNodePtr::new(item)).into_raw().into_u32())
- }
}
+#[inline]
fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
- BuildHasherDefault::<FxHasher>::default().hash_one(ptr)
-}
-
-#[derive(Copy, Clone, PartialEq, Eq)]
-enum TreeOrder {
- BreadthFirst,
- DepthFirst,
-}
-
-/// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs
-/// order? It is a mix of breadth-first and depth first orders. Nodes for which
-/// `f` returns [`TreeOrder::BreadthFirst`] are visited breadth-first, all the other nodes are explored
-/// [`TreeOrder::DepthFirst`].
-///
-/// In other words, the size of the bfs queue is bound by the number of "true"
-/// nodes.
-fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> TreeOrder) {
- let mut curr_layer = vec![node.clone()];
- let mut next_layer = vec![];
- while !curr_layer.is_empty() {
- curr_layer.drain(..).for_each(|node| {
- let mut preorder = node.preorder();
- while let Some(event) = preorder.next() {
- match event {
- syntax::WalkEvent::Enter(node) => {
- if f(node.clone()) == TreeOrder::BreadthFirst {
- next_layer.extend(node.children());
- preorder.skip_subtree();
- }
- }
- syntax::WalkEvent::Leave(_) => {}
- }
+ FxBuildHasher.hash_one(ptr)
+}
+
+#[inline]
+fn hash_ast_id(ptr: &ErasedFileAstId) -> u64 {
+ FxBuildHasher.hash_one(ptr)
+}
+
+#[cfg(test)]
+mod tests {
+ use syntax::{AstNode, Edition, SourceFile, SyntaxKind, SyntaxNodePtr, WalkEvent, ast};
+
+ use crate::AstIdMap;
+
+ #[test]
+ fn check_all_nodes() {
+ let syntax = SourceFile::parse(
+ r#"
+extern crate foo;
+fn foo() {
+ union U {}
+}
+struct S;
+macro_rules! m {}
+macro m2() {}
+trait Trait {}
+impl Trait for S {}
+impl S {}
+impl m!() {}
+impl m2!() for m!() {}
+type T = i32;
+enum E {
+ V1(),
+ V2 {},
+ V3,
+}
+struct S; // duplicate
+extern "C" {
+ static S: i32;
+}
+static mut S: i32 = 0;
+const FOO: i32 = 0;
+ "#,
+ Edition::CURRENT,
+ )
+ .syntax_node();
+ let ast_id_map = AstIdMap::from_source(&syntax);
+ for node in syntax.preorder() {
+ let WalkEvent::Enter(node) = node else { continue };
+ if !matches!(
+ node.kind(),
+ SyntaxKind::EXTERN_CRATE
+ | SyntaxKind::FN
+ | SyntaxKind::UNION
+ | SyntaxKind::STRUCT
+ | SyntaxKind::MACRO_RULES
+ | SyntaxKind::MACRO_DEF
+ | SyntaxKind::MACRO_CALL
+ | SyntaxKind::TRAIT
+ | SyntaxKind::IMPL
+ | SyntaxKind::TYPE_ALIAS
+ | SyntaxKind::ENUM
+ | SyntaxKind::VARIANT
+ | SyntaxKind::EXTERN_BLOCK
+ | SyntaxKind::STATIC
+ | SyntaxKind::CONST
+ ) {
+ continue;
}
- });
- std::mem::swap(&mut curr_layer, &mut next_layer);
+ let ptr = SyntaxNodePtr::new(&node);
+ let ast_id = ast_id_map.erased_ast_id(ptr);
+ let turn_back = ast_id_map.get_erased(ast_id);
+ assert_eq!(ptr, turn_back);
+ }
+ }
+
+ #[test]
+ fn different_names_get_different_hashes() {
+ let syntax = SourceFile::parse(
+ r#"
+fn foo() {}
+fn bar() {}
+ "#,
+ Edition::CURRENT,
+ )
+ .syntax_node();
+ let ast_id_map = AstIdMap::from_source(&syntax);
+ let fns = syntax.descendants().filter_map(ast::Fn::cast).collect::<Vec<_>>();
+ let [foo_fn, bar_fn] = fns.as_slice() else {
+ panic!("not exactly 2 functions");
+ };
+ let foo_fn_id = ast_id_map.ast_id(foo_fn);
+ let bar_fn_id = ast_id_map.ast_id(bar_fn);
+ assert_ne!(foo_fn_id.raw.hash_value(), bar_fn_id.raw.hash_value(), "hashes are equal");
+ }
+
+ #[test]
+ fn different_parents_get_different_hashes() {
+ let syntax = SourceFile::parse(
+ r#"
+fn foo() {
+ m!();
+}
+fn bar() {
+ m!();
+}
+ "#,
+ Edition::CURRENT,
+ )
+ .syntax_node();
+ let ast_id_map = AstIdMap::from_source(&syntax);
+ let macro_calls = syntax.descendants().filter_map(ast::MacroCall::cast).collect::<Vec<_>>();
+ let [macro_call_foo, macro_call_bar] = macro_calls.as_slice() else {
+ panic!("not exactly 2 macro calls");
+ };
+ let macro_call_foo_id = ast_id_map.ast_id(macro_call_foo);
+ let macro_call_bar_id = ast_id_map.ast_id(macro_call_bar);
+ assert_ne!(
+ macro_call_foo_id.raw.hash_value(),
+ macro_call_bar_id.raw.hash_value(),
+ "hashes are equal"
+ );
+ }
+
+ #[test]
+ fn blocks_with_no_items_have_no_id() {
+ let syntax = SourceFile::parse(
+ r#"
+fn foo() {
+ let foo = 1;
+ bar(foo);
+}
+ "#,
+ Edition::CURRENT,
+ )
+ .syntax_node();
+ let ast_id_map = AstIdMap::from_source(&syntax);
+ let block = syntax.descendants().find_map(ast::BlockExpr::cast).expect("no block");
+ assert!(ast_id_map.ast_id_for_block(&block).is_none());
}
}
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index 7bb88ac365..aef3fbf051 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -97,6 +97,7 @@ const _: () = {
const LOCATION: salsa::plumbing::Location =
salsa::plumbing::Location { file: file!(), line: line!() };
const DEBUG_NAME: &'static str = "SyntaxContextData";
+ const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX;
type Fields<'a> = SyntaxContextData;
type Struct<'a> = SyntaxContext;
}
@@ -108,7 +109,9 @@ const _: () = {
static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<SyntaxContext>> =
zalsa_::IngredientCache::new();
CACHE.get_or_create(db.zalsa(), || {
- db.zalsa().add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ db.zalsa()
+ .lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ .get_or_create()
})
}
}
@@ -130,9 +133,12 @@ const _: () = {
type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
fn lookup_or_create_ingredient_index(
- aux: &salsa::plumbing::Zalsa,
+ zalsa: &salsa::plumbing::Zalsa,
) -> salsa::plumbing::IngredientIndices {
- aux.add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>().into()
+ zalsa
+ .lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ .get_or_create()
+ .into()
}
#[inline]
@@ -326,14 +332,14 @@ impl<'db> SyntaxContext {
None
} else {
// SAFETY: By our invariant, this is either a root (which we verified it's not) or a valid `salsa::Id`.
- unsafe { Some(salsa::Id::from_u32(self.0)) }
+ unsafe { Some(salsa::Id::from_index(self.0)) }
}
}
#[inline]
fn from_salsa_id(id: salsa::Id) -> Self {
// SAFETY: This comes from a Salsa ID.
- unsafe { Self::from_u32(id.as_u32()) }
+ unsafe { Self::from_u32(id.index()) }
}
#[inline]
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index f81648ac42..b81d08eed6 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -6,7 +6,10 @@ mod hygiene;
mod map;
pub use self::{
- ast_id::{AstIdMap, AstIdNode, ErasedFileAstId, FileAstId},
+ ast_id::{
+ AstIdMap, AstIdNode, ErasedFileAstId, FIXUP_ERASED_FILE_AST_ID_MARKER, FileAstId,
+ ROOT_ERASED_FILE_AST_ID,
+ },
hygiene::{SyntaxContext, Transparency},
map::{RealSpanMap, SpanMap},
};
@@ -15,19 +18,6 @@ pub use syntax::Edition;
pub use text_size::{TextRange, TextSize};
pub use vfs::FileId;
-// The first index is always the root node's AstId
-/// The root ast id always points to the encompassing file, using this in spans is discouraged as
-/// any range relative to it will be effectively absolute, ruining the entire point of anchored
-/// relative text ranges.
-pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(0);
-
-/// FileId used as the span for syntax node fixups. Any Span containing this file id is to be
-/// considered fake.
-pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId =
- // we pick the second to last for this in case we ever consider making this a NonMaxU32, this
- // is required to be stable for the proc-macro-server
- ErasedFileAstId::from_raw(!0 - 1);
-
pub type Span = SpanData<SyntaxContext>;
impl Span {
@@ -60,7 +50,7 @@ impl<Ctx: fmt::Debug> fmt::Debug for SpanData<Ctx> {
if f.alternate() {
fmt::Debug::fmt(&self.anchor.file_id.file_id().index(), f)?;
f.write_char(':')?;
- fmt::Debug::fmt(&self.anchor.ast_id.into_raw(), f)?;
+ write!(f, "{:#?}", self.anchor.ast_id)?;
f.write_char('@')?;
fmt::Debug::fmt(&self.range, f)?;
f.write_char('#')?;
@@ -85,7 +75,7 @@ impl fmt::Display for Span {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.anchor.file_id.file_id().index(), f)?;
f.write_char(':')?;
- fmt::Debug::fmt(&self.anchor.ast_id.into_raw(), f)?;
+ write!(f, "{:#?}", self.anchor.ast_id)?;
f.write_char('@')?;
fmt::Debug::fmt(&self.range, f)?;
f.write_char('#')?;
@@ -101,7 +91,7 @@ pub struct SpanAnchor {
impl fmt::Debug for SpanAnchor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish()
+ f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id).finish()
}
}
diff --git a/crates/span/src/map.rs b/crates/span/src/map.rs
index cc7a886643..f58201793d 100644
--- a/crates/span/src/map.rs
+++ b/crates/span/src/map.rs
@@ -169,7 +169,7 @@ impl fmt::Display for RealSpanMap {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "RealSpanMap({:?}):", self.file_id)?;
for span in self.pairs.iter() {
- writeln!(f, "{}: {}", u32::from(span.0), span.1.into_raw())?;
+ writeln!(f, "{}: {:#?}", u32::from(span.0), span.1)?;
}
Ok(())
}
diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml
index b37aded6f6..2c19f00f08 100644
--- a/crates/stdx/Cargo.toml
+++ b/crates/stdx/Cargo.toml
@@ -10,9 +10,10 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
-backtrace = { version = "0.3.74", optional = true }
+backtrace = { version = "0.3.75", optional = true }
jod-thread = "1.0.0"
crossbeam-channel.workspace = true
itertools.workspace = true
@@ -25,7 +26,7 @@ libc.workspace = true
[target.'cfg(windows)'.dependencies]
miow = "0.6.0"
-windows-sys = { version = "0.59", features = ["Win32_Foundation"] }
+windows-sys = { version = "0.60", features = ["Win32_Foundation"] }
[features]
# Uncomment to enable for the whole crate graph
diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs
index 9a292eacd7..978c50d807 100644
--- a/crates/stdx/src/lib.rs
+++ b/crates/stdx/src/lib.rs
@@ -13,6 +13,7 @@ pub mod panic_context;
pub mod process;
pub mod rand;
pub mod thread;
+pub mod variance;
pub use itertools;
diff --git a/crates/stdx/src/variance.rs b/crates/stdx/src/variance.rs
new file mode 100644
index 0000000000..8465d72bf3
--- /dev/null
+++ b/crates/stdx/src/variance.rs
@@ -0,0 +1,270 @@
+//! This is a copy of [`std::marker::variance`].
+
+use std::any::type_name;
+use std::cmp::Ordering;
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::marker::PhantomData;
+
+macro_rules! first_token {
+ ($first:tt $($rest:tt)*) => {
+ $first
+ };
+}
+macro_rules! phantom_type {
+ ($(
+ $(#[$attr:meta])*
+ pub struct $name:ident <$t:ident> ($($inner:tt)*);
+ )*) => {$(
+ $(#[$attr])*
+ pub struct $name<$t>($($inner)*) where T: ?Sized;
+
+ impl<T> $name<T>
+ where T: ?Sized
+ {
+ /// Constructs a new instance of the variance marker.
+ pub const fn new() -> Self {
+ Self(PhantomData)
+ }
+ }
+
+ impl<T> self::sealed::Sealed for $name<T> where T: ?Sized {
+ const VALUE: Self = Self::new();
+ }
+
+ impl<T> Variance for $name<T> where T: ?Sized {}
+
+ impl<T> Default for $name<T>
+ where T: ?Sized
+ {
+ fn default() -> Self {
+ Self(PhantomData)
+ }
+ }
+
+ impl<T> fmt::Debug for $name<T>
+ where T: ?Sized
+ {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}<{}>", stringify!($name), type_name::<T>())
+ }
+ }
+
+ impl<T> Clone for $name<T>
+ where T: ?Sized
+ {
+ fn clone(&self) -> Self {
+ *self
+ }
+ }
+
+ impl<T> Copy for $name<T> where T: ?Sized {}
+
+ impl<T> PartialEq for $name<T>
+ where T: ?Sized
+ {
+ fn eq(&self, _: &Self) -> bool {
+ true
+ }
+ }
+
+ impl<T> Eq for $name<T> where T: ?Sized {}
+
+ #[allow(clippy::non_canonical_partial_ord_impl)]
+ impl<T> PartialOrd for $name<T>
+ where T: ?Sized
+ {
+ fn partial_cmp(&self, _: &Self) -> Option<Ordering> {
+ Some(Ordering::Equal)
+ }
+ }
+
+ impl<T> Ord for $name<T>
+ where T: ?Sized
+ {
+ fn cmp(&self, _: &Self) -> Ordering {
+ Ordering::Equal
+ }
+ }
+
+ impl<T> Hash for $name<T>
+ where T: ?Sized
+ {
+ fn hash<H: Hasher>(&self, _: &mut H) {}
+ }
+ )*};
+}
+
+macro_rules! phantom_lifetime {
+ ($(
+ $(#[$attr:meta])*
+ pub struct $name:ident <$lt:lifetime> ($($inner:tt)*);
+ )*) => {$(
+ $(#[$attr])*
+
+ #[derive(Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+ pub struct $name<$lt>($($inner)*);
+
+ impl $name<'_> {
+ /// Constructs a new instance of the variance marker.
+ pub const fn new() -> Self {
+ Self(first_token!($($inner)*)(PhantomData))
+ }
+ }
+
+ impl self::sealed::Sealed for $name<'_> {
+ const VALUE: Self = Self::new();
+ }
+
+ impl Variance for $name<'_> {}
+
+ impl fmt::Debug for $name<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", stringify!($name))
+ }
+ }
+ )*};
+}
+
+phantom_lifetime! {
+ /// Zero-sized type used to mark a lifetime as covariant.
+ ///
+ /// Covariant lifetimes must live at least as long as declared. See [the reference][1] for more
+ /// information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `'a`, the following are guaranteed:
+ /// * `size_of::<PhantomCovariantLifetime<'a>>() == 0`
+ /// * `align_of::<PhantomCovariantLifetime<'a>>() == 1`
+
+ pub struct PhantomCovariantLifetime<'a>(PhantomCovariant<&'a ()>);
+ /// Zero-sized type used to mark a lifetime as contravariant.
+ ///
+ /// Contravariant lifetimes must live at most as long as declared. See [the reference][1] for
+ /// more information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `'a`, the following are guaranteed:
+ /// * `size_of::<PhantomContravariantLifetime<'a>>() == 0`
+ /// * `align_of::<PhantomContravariantLifetime<'a>>() == 1`
+
+ pub struct PhantomContravariantLifetime<'a>(PhantomContravariant<&'a ()>);
+ /// Zero-sized type used to mark a lifetime as invariant.
+ ///
+ /// Invariant lifetimes must be live for the exact length declared, neither shorter nor longer.
+ /// See [the reference][1] for more information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `'a`, the following are guaranteed:
+ /// * `size_of::<PhantomInvariantLifetime<'a>>() == 0`
+ /// * `align_of::<PhantomInvariantLifetime<'a>>() == 1`
+
+ pub struct PhantomInvariantLifetime<'a>(PhantomInvariant<&'a ()>);
+
+}
+
+phantom_type! {
+ /// Zero-sized type used to mark a type parameter as covariant.
+ ///
+ /// Types used as part of the return value from a function are covariant. If the type is _also_
+ /// passed as a parameter then it is [invariant][PhantomInvariant]. See [the reference][1] for
+ /// more information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `T`, the following are guaranteed:
+ /// * `size_of::<PhantomCovariant<T>>() == 0`
+ /// * `align_of::<PhantomCovariant<T>>() == 1`
+
+ pub struct PhantomCovariant<T>(PhantomData<fn() -> T>);
+ /// Zero-sized type used to mark a type parameter as contravariant.
+ ///
+ /// Types passed as arguments to a function are contravariant. If the type is _also_ part of the
+ /// return value from a function then it is [invariant][PhantomInvariant]. See [the
+ /// reference][1] for more information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `T`, the following are guaranteed:
+ /// * `size_of::<PhantomContravariant<T>>() == 0`
+ /// * `align_of::<PhantomContravariant<T>>() == 1`
+
+ pub struct PhantomContravariant<T>(PhantomData<fn(T)>);
+ /// Zero-sized type used to mark a type parameter as invariant.
+ ///
+ /// Types that are both passed as an argument _and_ used as part of the return value from a
+ /// function are invariant. See [the reference][1] for more information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `T`, the following are guaranteed:
+ /// * `size_of::<PhantomInvariant<T>>() == 0`
+ /// * `align_of::<PhantomInvariant<T>>() == 1`
+
+ pub struct PhantomInvariant<T>(PhantomData<fn(T) -> T>);
+
+}
+
+mod sealed {
+
+ pub trait Sealed {
+ const VALUE: Self;
+ }
+}
+/// A marker trait for phantom variance types.
+pub trait Variance: sealed::Sealed + Default {}
+/// Construct a variance marker; equivalent to [`Default::default`].
+///
+/// This type can be any of the following. You generally should not need to explicitly name the
+/// type, however.
+///
+/// - [`PhantomCovariant`]
+/// - [`PhantomContravariant`]
+/// - [`PhantomInvariant`]
+/// - [`PhantomCovariantLifetime`]
+/// - [`PhantomContravariantLifetime`]
+/// - [`PhantomInvariantLifetime`]
+///
+/// # Example
+///
+/// ```rust
+/// #![feature(phantom_variance_markers)]
+///
+/// use core::marker::{PhantomCovariant, variance};
+///
+/// struct BoundFn<F, P, R>
+/// where
+/// F: Fn(P) -> R,
+/// {
+/// function: F,
+/// parameter: P,
+/// return_value: PhantomCovariant<R>,
+/// }
+///
+/// let bound_fn = BoundFn {
+/// function: core::convert::identity,
+/// parameter: 5u8,
+/// return_value: variance(),
+/// };
+/// ```
+pub const fn variance<T>() -> T
+where
+ T: Variance,
+{
+ T::VALUE
+}
diff --git a/crates/syntax-bridge/Cargo.toml b/crates/syntax-bridge/Cargo.toml
index cccd41d542..b0fd40ff59 100644
--- a/crates/syntax-bridge/Cargo.toml
+++ b/crates/syntax-bridge/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
rustc-hash.workspace = true
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index 4c7704803e..1ee93013e3 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
either.workspace = true
@@ -27,7 +28,7 @@ stdx.workspace = true
[dev-dependencies]
rayon.workspace = true
expect-test = "1.5.1"
-rustc_apfloat = "0.2.2"
+rustc_apfloat = "0.2.3"
test-utils.workspace = true
diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram
index c81da06682..4cbc88cfb5 100644
--- a/crates/syntax/rust.ungram
+++ b/crates/syntax/rust.ungram
@@ -158,6 +158,7 @@ Item =
| TypeAlias
| Union
| Use
+| AsmExpr
MacroRules =
Attr* Visibility?
@@ -409,7 +410,8 @@ OffsetOfExpr =
// global_asm := "global_asm!(" format_string *("," format_string) *("," operand) [","] ")"
// format_string := STRING_LITERAL / RAW_STRING_LITERAL
AsmExpr =
- Attr* 'builtin' '#' 'asm' '(' template:(Expr (',' Expr)*) (AsmPiece (',' AsmPiece)*)? ','? ')'
+ Attr* 'builtin' '#' ( 'asm' | 'global_asm' | 'naked_asm' )
+ '(' template:(Expr (',' Expr)*) (AsmPiece (',' AsmPiece)*)? ','? ')'
// operand_expr := expr / "_" / expr "=>" expr / expr "=>" "_"
AsmOperandExpr = in_expr:Expr ('=>' out_expr:Expr)?
@@ -669,7 +671,7 @@ TypeBoundList =
TypeBound =
Lifetime
-| ('~' 'const' | 'const')? 'async'? '?'? Type
+| ('~' 'const' | '[' 'const' ']' | 'const')? 'async'? '?'? Type
| 'use' UseBoundGenericArgs
UseBoundGenericArgs =
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs
index e60243f2c9..e902516471 100644
--- a/crates/syntax/src/ast/edit_in_place.rs
+++ b/crates/syntax/src/ast/edit_in_place.rs
@@ -406,42 +406,6 @@ impl ast::WhereClause {
}
}
-impl ast::TypeParam {
- pub fn remove_default(&self) {
- if let Some((eq, last)) = self
- .syntax()
- .children_with_tokens()
- .find(|it| it.kind() == T![=])
- .zip(self.syntax().last_child_or_token())
- {
- ted::remove_all(eq..=last);
-
- // remove any trailing ws
- if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) {
- last.detach();
- }
- }
- }
-}
-
-impl ast::ConstParam {
- pub fn remove_default(&self) {
- if let Some((eq, last)) = self
- .syntax()
- .children_with_tokens()
- .find(|it| it.kind() == T![=])
- .zip(self.syntax().last_child_or_token())
- {
- ted::remove_all(eq..=last);
-
- // remove any trailing ws
- if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) {
- last.detach();
- }
- }
- }
-}
-
pub trait Removable: AstNode {
fn remove(&self);
}
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index 04c7e8a578..2b86246542 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -118,6 +118,14 @@ impl AsmExpr {
pub fn asm_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![asm]) }
#[inline]
pub fn builtin_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![builtin]) }
+ #[inline]
+ pub fn global_asm_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![global_asm])
+ }
+ #[inline]
+ pub fn naked_asm_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![naked_asm])
+ }
}
pub struct AsmLabel {
pub(crate) syntax: SyntaxNode,
@@ -1766,6 +1774,10 @@ impl TypeBound {
support::child(&self.syntax)
}
#[inline]
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ #[inline]
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+ #[inline]
pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
#[inline]
pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
@@ -2083,6 +2095,7 @@ impl ast::HasAttrs for GenericParam {}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Item {
+ AsmExpr(AsmExpr),
Const(Const),
Enum(Enum),
ExternBlock(ExternBlock),
@@ -2102,7 +2115,6 @@ pub enum Item {
Use(Use),
}
impl ast::HasAttrs for Item {}
-impl ast::HasDocComments for Item {}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Pat {
@@ -8405,6 +8417,10 @@ impl AstNode for GenericParam {
}
}
}
+impl From<AsmExpr> for Item {
+ #[inline]
+ fn from(node: AsmExpr) -> Item { Item::AsmExpr(node) }
+}
impl From<Const> for Item {
#[inline]
fn from(node: Const) -> Item { Item::Const(node) }
@@ -8478,7 +8494,8 @@ impl AstNode for Item {
fn can_cast(kind: SyntaxKind) -> bool {
matches!(
kind,
- CONST
+ ASM_EXPR
+ | CONST
| ENUM
| EXTERN_BLOCK
| EXTERN_CRATE
@@ -8500,6 +8517,7 @@ impl AstNode for Item {
#[inline]
fn cast(syntax: SyntaxNode) -> Option<Self> {
let res = match syntax.kind() {
+ ASM_EXPR => Item::AsmExpr(AsmExpr { syntax }),
CONST => Item::Const(Const { syntax }),
ENUM => Item::Enum(Enum { syntax }),
EXTERN_BLOCK => Item::ExternBlock(ExternBlock { syntax }),
@@ -8524,6 +8542,7 @@ impl AstNode for Item {
#[inline]
fn syntax(&self) -> &SyntaxNode {
match self {
+ Item::AsmExpr(it) => &it.syntax,
Item::Const(it) => &it.syntax,
Item::Enum(it) => &it.syntax,
Item::ExternBlock(it) => &it.syntax,
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index fab4cb287c..d67f24fda9 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -134,6 +134,13 @@ pub fn name_ref(name_ref: &str) -> ast::NameRef {
}
}
}
+pub fn name_ref_self_ty() -> ast::NameRef {
+ quote! {
+ NameRef {
+ [Self]
+ }
+ }
+}
fn raw_ident_esc(ident: &str) -> &'static str {
if is_raw_identifier(ident, Edition::CURRENT) { "r#" } else { "" }
}
@@ -673,7 +680,7 @@ pub fn expr_tuple(elements: impl IntoIterator<Item = ast::Expr>) -> ast::TupleEx
let expr = elements.into_iter().format(", ");
expr_from_text(&format!("({expr})"))
}
-pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
+pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::BinExpr {
expr_from_text(&format!("{lhs} = {rhs}"))
}
fn expr_from_text<E: Into<ast::Expr> + AstNode>(text: &str) -> E {
@@ -835,9 +842,10 @@ pub fn ref_pat(pat: ast::Pat) -> ast::RefPat {
}
pub fn match_arm(pat: ast::Pat, guard: Option<ast::MatchGuard>, expr: ast::Expr) -> ast::MatchArm {
+ let comma_str = if expr.is_block_like() { "" } else { "," };
return match guard {
- Some(guard) => from_text(&format!("{pat} {guard} => {expr}")),
- None => from_text(&format!("{pat} => {expr}")),
+ Some(guard) => from_text(&format!("{pat} {guard} => {expr}{comma_str}")),
+ None => from_text(&format!("{pat} => {expr}{comma_str}")),
};
fn from_text(text: &str) -> ast::MatchArm {
@@ -870,7 +878,7 @@ pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::Mat
let arms_str = arms.into_iter().fold(String::new(), |mut acc, arm| {
let needs_comma =
arm.comma_token().is_none() && arm.expr().is_none_or(|it| !it.is_block_like());
- let comma = if needs_comma { "," } else { "" };
+ let comma = if needs_comma && arm.comma_token().is_none() { "," } else { "" };
let arm = arm.syntax();
format_to_acc!(acc, " {arm}{comma}\n")
});
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index dcf853427e..f5530c5fff 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -30,6 +30,16 @@ impl ast::Name {
pub fn text(&self) -> TokenText<'_> {
text_of_first_token(self.syntax())
}
+ pub fn text_non_mutable(&self) -> &str {
+ fn first_token(green_ref: &GreenNodeData) -> &GreenTokenData {
+ green_ref.children().next().and_then(NodeOrToken::into_token).unwrap()
+ }
+
+ match self.syntax().green() {
+ Cow::Borrowed(green_ref) => first_token(green_ref).text(),
+ Cow::Owned(_) => unreachable!(),
+ }
+ }
}
impl ast::NameRef {
diff --git a/crates/syntax/src/ast/syntax_factory/constructors.rs b/crates/syntax/src/ast/syntax_factory/constructors.rs
index 429e51ba36..1ba6107315 100644
--- a/crates/syntax/src/ast/syntax_factory/constructors.rs
+++ b/crates/syntax/src/ast/syntax_factory/constructors.rs
@@ -440,6 +440,19 @@ impl SyntaxFactory {
ast
}
+ pub fn expr_assignment(&self, lhs: ast::Expr, rhs: ast::Expr) -> ast::BinExpr {
+ let ast = make::expr_assignment(lhs.clone(), rhs.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(lhs.syntax().clone(), ast.lhs().unwrap().syntax().clone());
+ builder.map_node(rhs.syntax().clone(), ast.rhs().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
pub fn expr_bin(&self, lhs: ast::Expr, op: ast::BinaryOp, rhs: ast::Expr) -> ast::BinExpr {
let ast::Expr::BinExpr(ast) =
make::expr_bin_op(lhs.clone(), op, rhs.clone()).clone_for_update()
@@ -1212,6 +1225,43 @@ impl SyntaxFactory {
ast
}
+ pub fn attr_outer(&self, meta: ast::Meta) -> ast::Attr {
+ let ast = make::attr_outer(meta.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(meta.syntax().clone(), ast.meta().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
+ pub fn attr_inner(&self, meta: ast::Meta) -> ast::Attr {
+ let ast = make::attr_inner(meta.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(meta.syntax().clone(), ast.meta().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
+ pub fn meta_token_tree(&self, path: ast::Path, tt: ast::TokenTree) -> ast::Meta {
+ let ast = make::meta_token_tree(path.clone(), tt.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(path.syntax().clone(), ast.path().unwrap().syntax().clone());
+ builder.map_node(tt.syntax().clone(), ast.token_tree().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
pub fn token_tree(
&self,
delimiter: SyntaxKind,
@@ -1242,6 +1292,10 @@ impl SyntaxFactory {
pub fn whitespace(&self, text: &str) -> SyntaxToken {
make::tokens::whitespace(text)
}
+
+ pub fn ident(&self, text: &str) -> SyntaxToken {
+ make::tokens::ident(text)
+ }
}
// `ext` constructors
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index ced3b713d8..4afdda78a0 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -1,9 +1,11 @@
//! There are many AstNodes, but only a few tokens, so we hand-write them here.
+use std::ops::Range;
use std::{borrow::Cow, num::ParseIntError};
use rustc_literal_escaper::{
- EscapeError, MixedUnit, Mode, unescape_byte, unescape_char, unescape_mixed, unescape_unicode,
+ EscapeError, MixedUnit, unescape_byte, unescape_byte_str, unescape_c_str, unescape_char,
+ unescape_str,
};
use stdx::always;
@@ -150,7 +152,7 @@ impl QuoteOffsets {
pub trait IsString: AstToken {
const RAW_PREFIX: &'static str;
- const MODE: Mode;
+ fn unescape(s: &str, callback: impl FnMut(Range<usize>, Result<char, EscapeError>));
fn is_raw(&self) -> bool {
self.text().starts_with(Self::RAW_PREFIX)
}
@@ -185,7 +187,7 @@ pub trait IsString: AstToken {
let text = &self.text()[text_range_no_quotes - start];
let offset = text_range_no_quotes.start() - start;
- unescape_unicode(text, Self::MODE, &mut |range, unescaped_char| {
+ Self::unescape(text, &mut |range: Range<usize>, unescaped_char| {
if let Some((s, e)) = range.start.try_into().ok().zip(range.end.try_into().ok()) {
cb(TextRange::new(s, e) + offset, unescaped_char);
}
@@ -203,7 +205,9 @@ pub trait IsString: AstToken {
impl IsString for ast::String {
const RAW_PREFIX: &'static str = "r";
- const MODE: Mode = Mode::Str;
+ fn unescape(s: &str, cb: impl FnMut(Range<usize>, Result<char, EscapeError>)) {
+ unescape_str(s, cb)
+ }
}
impl ast::String {
@@ -218,20 +222,19 @@ impl ast::String {
let mut buf = String::new();
let mut prev_end = 0;
let mut has_error = None;
- unescape_unicode(text, Self::MODE, &mut |char_range, unescaped_char| match (
- unescaped_char,
- buf.capacity() == 0,
- ) {
- (Ok(c), false) => buf.push(c),
- (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
- prev_end = char_range.end
- }
- (Ok(c), true) => {
- buf.reserve_exact(text.len());
- buf.push_str(&text[..prev_end]);
- buf.push(c);
+ unescape_str(text, |char_range, unescaped_char| {
+ match (unescaped_char, buf.capacity() == 0) {
+ (Ok(c), false) => buf.push(c),
+ (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
+ prev_end = char_range.end
+ }
+ (Ok(c), true) => {
+ buf.reserve_exact(text.len());
+ buf.push_str(&text[..prev_end]);
+ buf.push(c);
+ }
+ (Err(e), _) => has_error = Some(e),
}
- (Err(e), _) => has_error = Some(e),
});
match (has_error, buf.capacity() == 0) {
@@ -244,7 +247,9 @@ impl ast::String {
impl IsString for ast::ByteString {
const RAW_PREFIX: &'static str = "br";
- const MODE: Mode = Mode::ByteStr;
+ fn unescape(s: &str, mut callback: impl FnMut(Range<usize>, Result<char, EscapeError>)) {
+ unescape_byte_str(s, |range, res| callback(range, res.map(char::from)))
+ }
}
impl ast::ByteString {
@@ -259,20 +264,19 @@ impl ast::ByteString {
let mut buf: Vec<u8> = Vec::new();
let mut prev_end = 0;
let mut has_error = None;
- unescape_unicode(text, Self::MODE, &mut |char_range, unescaped_char| match (
- unescaped_char,
- buf.capacity() == 0,
- ) {
- (Ok(c), false) => buf.push(c as u8),
- (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
- prev_end = char_range.end
- }
- (Ok(c), true) => {
- buf.reserve_exact(text.len());
- buf.extend_from_slice(&text.as_bytes()[..prev_end]);
- buf.push(c as u8);
+ unescape_byte_str(text, |char_range, unescaped_byte| {
+ match (unescaped_byte, buf.capacity() == 0) {
+ (Ok(b), false) => buf.push(b),
+ (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
+ prev_end = char_range.end
+ }
+ (Ok(b), true) => {
+ buf.reserve_exact(text.len());
+ buf.extend_from_slice(&text.as_bytes()[..prev_end]);
+ buf.push(b);
+ }
+ (Err(e), _) => has_error = Some(e),
}
- (Err(e), _) => has_error = Some(e),
});
match (has_error, buf.capacity() == 0) {
@@ -285,25 +289,10 @@ impl ast::ByteString {
impl IsString for ast::CString {
const RAW_PREFIX: &'static str = "cr";
- const MODE: Mode = Mode::CStr;
-
- fn escaped_char_ranges(&self, cb: &mut dyn FnMut(TextRange, Result<char, EscapeError>)) {
- let text_range_no_quotes = match self.text_range_between_quotes() {
- Some(it) => it,
- None => return,
- };
-
- let start = self.syntax().text_range().start();
- let text = &self.text()[text_range_no_quotes - start];
- let offset = text_range_no_quotes.start() - start;
-
- unescape_mixed(text, Self::MODE, &mut |range, unescaped_char| {
- let text_range =
- TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
- // XXX: This method should only be used for highlighting ranges. The unescaped
- // char/byte is not used. For simplicity, we return an arbitrary placeholder char.
- cb(text_range + offset, unescaped_char.map(|_| ' '));
- });
+ // NOTE: This method should only be used for highlighting ranges. The unescaped
+ // char/byte is not used. For simplicity, we return an arbitrary placeholder char.
+ fn unescape(s: &str, mut callback: impl FnMut(Range<usize>, Result<char, EscapeError>)) {
+ unescape_c_str(s, |range, _res| callback(range, Ok('_')))
}
}
@@ -323,10 +312,7 @@ impl ast::CString {
MixedUnit::Char(c) => buf.extend(c.encode_utf8(&mut [0; 4]).as_bytes()),
MixedUnit::HighByte(b) => buf.push(b),
};
- unescape_mixed(text, Self::MODE, &mut |char_range, unescaped| match (
- unescaped,
- buf.capacity() == 0,
- ) {
+ unescape_c_str(text, |char_range, unescaped| match (unescaped, buf.capacity() == 0) {
(Ok(u), false) => extend_unit(&mut buf, u),
(Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
prev_end = char_range.end
diff --git a/crates/syntax/src/syntax_editor.rs b/crates/syntax/src/syntax_editor.rs
index 31caf618be..3fa584850f 100644
--- a/crates/syntax/src/syntax_editor.rs
+++ b/crates/syntax/src/syntax_editor.rs
@@ -435,7 +435,7 @@ mod tests {
_ => {
let var_name = 2 + 2;
(var_name, true)
- }"#]];
+ },"#]];
expect.assert_eq(&edit.new_root.to_string());
assert_eq!(edit.find_annotation(placeholder_snippet).len(), 2);
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 5bfeb3bff8..4180f9cd18 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -6,7 +6,9 @@ mod block;
use itertools::Itertools;
use rowan::Direction;
-use rustc_literal_escaper::{self, EscapeError, Mode, unescape_mixed, unescape_unicode};
+use rustc_literal_escaper::{
+ EscapeError, unescape_byte, unescape_byte_str, unescape_c_str, unescape_char, unescape_str,
+};
use crate::{
AstNode, SyntaxError,
@@ -47,7 +49,7 @@ pub(crate) fn validate(root: &SyntaxNode, errors: &mut Vec<SyntaxError>) {
}
fn rustc_unescape_error_to_string(err: EscapeError) -> (&'static str, bool) {
- use rustc_literal_escaper::EscapeError as EE;
+ use EscapeError as EE;
#[rustfmt::skip]
let err_message = match err {
@@ -142,7 +144,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
ast::LiteralKind::String(s) => {
if !s.is_raw() {
if let Some(without_quotes) = unquote(text, 1, '"') {
- unescape_unicode(without_quotes, Mode::Str, &mut |range, char| {
+ unescape_str(without_quotes, |range, char| {
if let Err(err) = char {
push_err(1, range.start, err);
}
@@ -153,7 +155,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
ast::LiteralKind::ByteString(s) => {
if !s.is_raw() {
if let Some(without_quotes) = unquote(text, 2, '"') {
- unescape_unicode(without_quotes, Mode::ByteStr, &mut |range, char| {
+ unescape_byte_str(without_quotes, |range, char| {
if let Err(err) = char {
push_err(1, range.start, err);
}
@@ -164,7 +166,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
ast::LiteralKind::CString(s) => {
if !s.is_raw() {
if let Some(without_quotes) = unquote(text, 2, '"') {
- unescape_mixed(without_quotes, Mode::CStr, &mut |range, char| {
+ unescape_c_str(without_quotes, |range, char| {
if let Err(err) = char {
push_err(1, range.start, err);
}
@@ -174,20 +176,16 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
}
ast::LiteralKind::Char(_) => {
if let Some(without_quotes) = unquote(text, 1, '\'') {
- unescape_unicode(without_quotes, Mode::Char, &mut |range, char| {
- if let Err(err) = char {
- push_err(1, range.start, err);
- }
- });
+ if let Err(err) = unescape_char(without_quotes) {
+ push_err(1, 0, err);
+ }
}
}
ast::LiteralKind::Byte(_) => {
if let Some(without_quotes) = unquote(text, 2, '\'') {
- unescape_unicode(without_quotes, Mode::Byte, &mut |range, char| {
- if let Err(err) = char {
- push_err(2, range.start, err);
- }
- });
+ if let Err(err) = unescape_byte(without_quotes) {
+ push_err(2, 0, err);
+ }
}
}
ast::LiteralKind::IntNumber(_)
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index 8eb48f8d93..8937e53175 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -1,5 +1,5 @@
//! A set of high-level utility fixture methods to use in tests.
-use std::{mem, str::FromStr, sync};
+use std::{any::TypeId, mem, str::FromStr, sync};
use base_db::{
Crate, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
@@ -677,6 +677,10 @@ impl ProcMacroExpander for IdentityProcMacroExpander {
) -> Result<TopSubtree, ProcMacroExpansionError> {
Ok(subtree.clone())
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == TypeId::of::<Self>()
+ }
}
// Expands to a macro_rules! macro, for issue #18089.
@@ -708,6 +712,10 @@ impl ProcMacroExpander for Issue18089ProcMacroExpander {
#subtree
})
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == TypeId::of::<Self>()
+ }
}
// Pastes the attribute input as its output
@@ -728,6 +736,10 @@ impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
.cloned()
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == TypeId::of::<Self>()
+ }
}
#[derive(Debug)]
@@ -759,6 +771,10 @@ impl ProcMacroExpander for Issue18840ProcMacroExpander {
top_subtree_delimiter_mut.close = def_site;
Ok(result)
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == TypeId::of::<Self>()
+ }
}
#[derive(Debug)]
@@ -790,6 +806,10 @@ impl ProcMacroExpander for MirrorProcMacroExpander {
traverse(&mut builder, input.iter());
Ok(builder.build())
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == TypeId::of::<Self>()
+ }
}
// Replaces every literal with an empty string literal and every identifier with its first letter,
@@ -830,6 +850,10 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
}
}
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == TypeId::of::<Self>()
+ }
}
// Reads ident type within string quotes, for issue #17479.
@@ -855,6 +879,10 @@ impl ProcMacroExpander for Issue17479ProcMacroExpander {
#symbol()
})
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == TypeId::of::<Self>()
+ }
}
// Reads ident type within string quotes, for issue #17479.
@@ -906,6 +934,10 @@ impl ProcMacroExpander for Issue18898ProcMacroExpander {
}
})
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == TypeId::of::<Self>()
+ }
}
// Reads ident type within string quotes, for issue #17479.
@@ -933,6 +965,10 @@ impl ProcMacroExpander for DisallowCfgProcMacroExpander {
}
Ok(subtree.clone())
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == TypeId::of::<Self>()
+ }
}
// Generates a new type by adding a suffix to the original name
@@ -987,4 +1023,8 @@ impl ProcMacroExpander for GenerateSuffixedTypeProcMacroExpander {
Ok(ret)
}
+
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == TypeId::of::<Self>()
+ }
}
diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml
index c27e850ce7..6d1930aa26 100644
--- a/crates/test-utils/Cargo.toml
+++ b/crates/test-utils/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
# Avoid adding deps here, this crate is widely used in tests it should compile fast!
diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs
index 7240069753..e830c6a7cf 100644
--- a/crates/test-utils/src/fixture.rs
+++ b/crates/test-utils/src/fixture.rs
@@ -412,23 +412,39 @@ impl MiniCore {
}
let mut active_regions = Vec::new();
+ let mut inactive_regions = Vec::new();
let mut seen_regions = Vec::new();
for line in lines {
let trimmed = line.trim();
if let Some(region) = trimmed.strip_prefix("// region:") {
- active_regions.push(region);
- continue;
+ if let Some(region) = region.strip_prefix('!') {
+ inactive_regions.push(region);
+ continue;
+ } else {
+ active_regions.push(region);
+ continue;
+ }
}
if let Some(region) = trimmed.strip_prefix("// endregion:") {
- let prev = active_regions.pop().unwrap();
+ let (prev, region) = if let Some(region) = region.strip_prefix('!') {
+ (inactive_regions.pop().unwrap(), region)
+ } else {
+ (active_regions.pop().unwrap(), region)
+ };
assert_eq!(prev, region, "unbalanced region pairs");
continue;
}
- let mut line_region = false;
- if let Some(idx) = trimmed.find("// :") {
- line_region = true;
- active_regions.push(&trimmed[idx + "// :".len()..]);
+ let mut active_line_region = 0;
+ let mut inactive_line_region = 0;
+ if let Some(idx) = trimmed.find("// :!") {
+ let regions = trimmed[idx + "// :!".len()..].split(", ");
+ inactive_line_region += regions.clone().count();
+ inactive_regions.extend(regions);
+ } else if let Some(idx) = trimmed.find("// :") {
+ let regions = trimmed[idx + "// :".len()..].split(", ");
+ active_line_region += regions.clone().count();
+ active_regions.extend(regions);
}
let mut keep = true;
@@ -438,18 +454,30 @@ impl MiniCore {
seen_regions.push(region);
keep &= self.has_flag(region);
}
+ for &region in &inactive_regions {
+ assert!(!region.starts_with(' '), "region marker starts with a space: {region:?}");
+ self.assert_valid_flag(region);
+ seen_regions.push(region);
+ keep &= !self.has_flag(region);
+ }
if keep {
buf.push_str(line);
}
- if line_region {
- active_regions.pop().unwrap();
+ if active_line_region > 0 {
+ active_regions.drain(active_regions.len() - active_line_region..);
+ }
+ if inactive_line_region > 0 {
+ inactive_regions.drain(inactive_regions.len() - active_line_region..);
}
}
if !active_regions.is_empty() {
panic!("unclosed regions: {active_regions:?} Add an `endregion` comment");
}
+ if !inactive_regions.is_empty() {
+ panic!("unclosed regions: {inactive_regions:?} Add an `endregion` comment");
+ }
for flag in &self.valid_flags {
if !seen_regions.iter().any(|it| it == flag) {
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index 4bdd791eb1..7b719b5dec 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -11,10 +11,13 @@
//! add:
//! asm:
//! assert:
+//! as_mut: sized
//! as_ref: sized
//! async_fn: fn, tuple, future, copy
//! bool_impl: option, fn
//! builtin_impls:
+//! borrow: sized
+//! borrow_mut: borrow
//! cell: copy, drop
//! clone: sized
//! coerce_pointee: derive, sized, unsize, coerce_unsized, dispatch_from_dyn
@@ -26,17 +29,18 @@
//! deref: sized
//! derive:
//! discriminant:
-//! drop:
+//! drop: sized
//! env: option
//! eq: sized
//! error: fmt
//! fmt: option, result, transmute, coerce_unsized, copy, clone, derive
+//! fmt_before_1_89_0: fmt
//! fn: tuple
//! from: sized, result
//! future: pin
//! coroutine: pin
//! dispatch_from_dyn: unsize, pin
-//! hash:
+//! hash: sized
//! include:
//! index: sized
//! infallible:
@@ -76,33 +80,46 @@
pub mod marker {
// region:sized
+ #[lang = "pointee_sized"]
+ #[fundamental]
+ #[rustc_specialization_trait]
+ #[rustc_coinductive]
+ pub trait PointeeSized {}
+
+ #[lang = "meta_sized"]
+ #[fundamental]
+ #[rustc_specialization_trait]
+ #[rustc_coinductive]
+ pub trait MetaSized: PointeeSized {}
+
#[lang = "sized"]
#[fundamental]
#[rustc_specialization_trait]
- pub trait Sized {}
+ #[rustc_coinductive]
+ pub trait Sized: MetaSized {}
// endregion:sized
// region:send
pub unsafe auto trait Send {}
- impl<T: ?Sized> !Send for *const T {}
- impl<T: ?Sized> !Send for *mut T {}
+ impl<T: PointeeSized> !Send for *const T {}
+ impl<T: PointeeSized> !Send for *mut T {}
// region:sync
- unsafe impl<T: Sync + ?Sized> Send for &T {}
- unsafe impl<T: Send + ?Sized> Send for &mut T {}
+ unsafe impl<T: Sync + PointeeSized> Send for &T {}
+ unsafe impl<T: Send + PointeeSized> Send for &mut T {}
// endregion:sync
// endregion:send
// region:sync
pub unsafe auto trait Sync {}
- impl<T: ?Sized> !Sync for *const T {}
- impl<T: ?Sized> !Sync for *mut T {}
+ impl<T: PointeeSized> !Sync for *const T {}
+ impl<T: PointeeSized> !Sync for *mut T {}
// endregion:sync
// region:unsize
#[lang = "unsize"]
- pub trait Unsize<T: ?Sized> {}
+ pub trait Unsize<T: PointeeSized>: PointeeSized {}
// endregion:unsize
// region:unpin
@@ -119,7 +136,7 @@ pub mod marker {
// endregion:derive
mod copy_impls {
- use super::Copy;
+ use super::{Copy, PointeeSized};
macro_rules! impl_copy {
($($t:ty)*) => {
@@ -136,9 +153,9 @@ pub mod marker {
bool char
}
- impl<T: ?Sized> Copy for *const T {}
- impl<T: ?Sized> Copy for *mut T {}
- impl<T: ?Sized> Copy for &T {}
+ impl<T: PointeeSized> Copy for *const T {}
+ impl<T: PointeeSized> Copy for *mut T {}
+ impl<T: PointeeSized> Copy for &T {}
impl Copy for ! {}
}
// endregion:copy
@@ -150,7 +167,7 @@ pub mod marker {
// region:phantom_data
#[lang = "phantom_data"]
- pub struct PhantomData<T: ?Sized>;
+ pub struct PhantomData<T: PointeeSized>;
// endregion:phantom_data
// region:discriminant
@@ -205,25 +222,31 @@ pub mod default {
// region:hash
pub mod hash {
+ use crate::marker::PointeeSized;
+
pub trait Hasher {}
- pub trait Hash {
+ pub trait Hash: PointeeSized {
fn hash<H: Hasher>(&self, state: &mut H);
}
// region:derive
- #[rustc_builtin_macro]
- pub macro Hash($item:item) {}
+ pub(crate) mod derive {
+ #[rustc_builtin_macro]
+ pub macro Hash($item:item) {}
+ }
+ pub use derive::Hash;
// endregion:derive
}
// endregion:hash
// region:cell
pub mod cell {
+ use crate::marker::PointeeSized;
use crate::mem;
#[lang = "unsafe_cell"]
- pub struct UnsafeCell<T: ?Sized> {
+ pub struct UnsafeCell<T: PointeeSized> {
value: T,
}
@@ -237,7 +260,7 @@ pub mod cell {
}
}
- pub struct Cell<T: ?Sized> {
+ pub struct Cell<T: PointeeSized> {
value: UnsafeCell<T>,
}
@@ -356,20 +379,41 @@ pub mod convert {
// endregion:from
// region:as_ref
- pub trait AsRef<T: ?Sized> {
+ pub trait AsRef<T: crate::marker::PointeeSized>: crate::marker::PointeeSized {
fn as_ref(&self) -> &T;
}
// endregion:as_ref
+ // region:as_mut
+ pub trait AsMut<T: crate::marker::PointeeSized>: crate::marker::PointeeSized {
+ fn as_mut(&mut self) -> &mut T;
+ }
+ // endregion:as_mut
// region:infallible
pub enum Infallible {}
// endregion:infallible
}
+pub mod borrow {
+ // region:borrow
+ pub trait Borrow<Borrowed: ?Sized> {
+ fn borrow(&self) -> &Borrowed;
+ }
+ // endregion:borrow
+
+ // region:borrow_mut
+ pub trait BorrowMut<Borrowed: ?Sized>: Borrow<Borrowed> {
+ fn borrow_mut(&mut self) -> &mut Borrowed;
+ }
+ // endregion:borrow_mut
+}
+
pub mod mem {
// region:manually_drop
+ use crate::marker::PointeeSized;
+
#[lang = "manually_drop"]
#[repr(transparent)]
- pub struct ManuallyDrop<T: ?Sized> {
+ pub struct ManuallyDrop<T: PointeeSized> {
value: T,
}
@@ -380,7 +424,7 @@ pub mod mem {
}
// region:deref
- impl<T: ?Sized> crate::ops::Deref for ManuallyDrop<T> {
+ impl<T: PointeeSized> crate::ops::Deref for ManuallyDrop<T> {
type Target = T;
fn deref(&self) -> &T {
&self.value
@@ -427,7 +471,7 @@ pub mod mem {
pub mod ptr {
// region:drop
#[lang = "drop_in_place"]
- pub unsafe fn drop_in_place<T: ?Sized>(to_drop: *mut T) {
+ pub unsafe fn drop_in_place<T: crate::marker::PointeeSized>(to_drop: *mut T) {
unsafe { drop_in_place(to_drop) }
}
pub const unsafe fn read<T>(src: *const T) -> T {
@@ -443,7 +487,7 @@ pub mod ptr {
// region:pointee
#[lang = "pointee_trait"]
#[rustc_deny_explicit_impl(implement_via_object = false)]
- pub trait Pointee {
+ pub trait Pointee: crate::marker::PointeeSized {
#[lang = "metadata_type"]
type Metadata: Copy + Send + Sync + Ord + Hash + Unpin;
}
@@ -451,12 +495,14 @@ pub mod ptr {
// region:non_null
#[rustc_layout_scalar_valid_range_start(1)]
#[rustc_nonnull_optimization_guaranteed]
- pub struct NonNull<T: ?Sized> {
+ pub struct NonNull<T: crate::marker::PointeeSized> {
pointer: *const T,
}
// region:coerce_unsized
- impl<T: ?Sized, U: ?Sized> crate::ops::CoerceUnsized<NonNull<U>> for NonNull<T> where
- T: crate::marker::Unsize<U>
+ impl<T: crate::marker::PointeeSized, U: crate::marker::PointeeSized>
+ crate::ops::CoerceUnsized<NonNull<U>> for NonNull<T>
+ where
+ T: crate::marker::Unsize<U>,
{
}
// endregion:coerce_unsized
@@ -477,42 +523,44 @@ pub mod ptr {
pub mod ops {
// region:coerce_unsized
mod unsize {
- use crate::marker::Unsize;
+ use crate::marker::{PointeeSized, Unsize};
#[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T: ?Sized> {}
+ pub trait CoerceUnsized<T> {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {}
- impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b mut T {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for &'a mut T {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a mut T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<&'a mut U> for &'a mut T {}
+ impl<'a, 'b: 'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<&'a U> for &'b mut T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*mut U> for &'a mut T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*const U> for &'a mut T {}
- impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a T {}
+ impl<'a, 'b: 'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<&'a U> for &'b T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*const U> for &'a T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *mut T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}
+ impl<T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*mut U> for *mut T {}
+ impl<T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*const U> for *mut T {}
+ impl<T: PointeeSized + Unsize<U>, U: PointeeSized> CoerceUnsized<*const U> for *const T {}
}
pub use self::unsize::CoerceUnsized;
// endregion:coerce_unsized
// region:deref
mod deref {
+ use crate::marker::PointeeSized;
+
#[lang = "deref"]
- pub trait Deref {
+ pub trait Deref: PointeeSized {
#[lang = "deref_target"]
type Target: ?Sized;
fn deref(&self) -> &Self::Target;
}
- impl<T: ?Sized> Deref for &T {
+ impl<T: PointeeSized> Deref for &T {
type Target = T;
fn deref(&self) -> &T {
loop {}
}
}
- impl<T: ?Sized> Deref for &mut T {
+ impl<T: PointeeSized> Deref for &mut T {
type Target = T;
fn deref(&self) -> &T {
loop {}
@@ -520,19 +568,19 @@ pub mod ops {
}
// region:deref_mut
#[lang = "deref_mut"]
- pub trait DerefMut: Deref {
+ pub trait DerefMut: Deref + PointeeSized {
fn deref_mut(&mut self) -> &mut Self::Target;
}
// endregion:deref_mut
// region:receiver
#[lang = "receiver"]
- pub trait Receiver {
+ pub trait Receiver: PointeeSized {
#[lang = "receiver_target"]
type Target: ?Sized;
}
- impl<P: ?Sized, T: ?Sized> Receiver for P
+ impl<P: PointeeSized, T: PointeeSized> Receiver for P
where
P: Deref<Target = T>,
{
@@ -685,7 +733,7 @@ pub mod ops {
#[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
impl<A: Tuple, F: ?Sized> const Fn<A> for &F
where
- F: ~const Fn<A>,
+ F: [const] Fn<A>,
{
extern "rust-call" fn call(&self, args: A) -> F::Output {
(**self).call(args)
@@ -696,7 +744,7 @@ pub mod ops {
#[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
impl<A: Tuple, F: ?Sized> const FnMut<A> for &F
where
- F: ~const Fn<A>,
+ F: [const] Fn<A>,
{
extern "rust-call" fn call_mut(&mut self, args: A) -> F::Output {
(**self).call(args)
@@ -707,7 +755,7 @@ pub mod ops {
#[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
impl<A: Tuple, F: ?Sized> const FnOnce<A> for &F
where
- F: ~const Fn<A>,
+ F: [const] Fn<A>,
{
type Output = F::Output;
@@ -720,7 +768,7 @@ pub mod ops {
#[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
impl<A: Tuple, F: ?Sized> const FnMut<A> for &mut F
where
- F: ~const FnMut<A>,
+ F: [const] FnMut<A>,
{
extern "rust-call" fn call_mut(&mut self, args: A) -> F::Output {
(*self).call_mut(args)
@@ -731,7 +779,7 @@ pub mod ops {
#[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
impl<A: Tuple, F: ?Sized> const FnOnce<A> for &mut F
where
- F: ~const FnMut<A>,
+ F: [const] FnMut<A>,
{
type Output = F::Output;
extern "rust-call" fn call_once(self, args: A) -> F::Output {
@@ -963,8 +1011,7 @@ pub mod ops {
}
#[lang = "add_assign"]
- #[const_trait]
- pub trait AddAssign<Rhs = Self> {
+ pub const trait AddAssign<Rhs = Self> {
fn add_assign(&mut self, rhs: Rhs);
}
@@ -1005,18 +1052,18 @@ pub mod ops {
// region:dispatch_from_dyn
mod dispatch_from_dyn {
- use crate::marker::Unsize;
+ use crate::marker::{PointeeSized, Unsize};
#[lang = "dispatch_from_dyn"]
pub trait DispatchFromDyn<T> {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<&'a U> for &'a T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<&'a U> for &'a T {}
- impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<&'a mut U> for &'a mut T {}
+ impl<'a, T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<&'a mut U> for &'a mut T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<*const U> for *const T {}
+ impl<T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<*const U> for *const T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<*mut U> for *mut T {}
+ impl<T: PointeeSized + Unsize<U>, U: PointeeSized> DispatchFromDyn<*mut U> for *mut T {}
}
pub use self::dispatch_from_dyn::DispatchFromDyn;
// endregion:dispatch_from_dyn
@@ -1024,15 +1071,17 @@ pub mod ops {
// region:eq
pub mod cmp {
+ use crate::marker::PointeeSized;
+
#[lang = "eq"]
- pub trait PartialEq<Rhs: ?Sized = Self> {
+ pub trait PartialEq<Rhs: PointeeSized = Self>: PointeeSized {
fn eq(&self, other: &Rhs) -> bool;
fn ne(&self, other: &Rhs) -> bool {
!self.eq(other)
}
}
- pub trait Eq: PartialEq<Self> {}
+ pub trait Eq: PartialEq<Self> + PointeeSized {}
// region:derive
#[rustc_builtin_macro]
@@ -1043,11 +1092,11 @@ pub mod cmp {
// region:ord
#[lang = "partial_ord"]
- pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
+ pub trait PartialOrd<Rhs: PointeeSized = Self>: PartialEq<Rhs> + PointeeSized {
fn partial_cmp(&self, other: &Rhs) -> Option<Ordering>;
}
- pub trait Ord: Eq + PartialOrd<Self> {
+ pub trait Ord: Eq + PartialOrd<Self> + PointeeSized {
fn cmp(&self, other: &Self) -> Ordering;
}
@@ -1070,6 +1119,8 @@ pub mod cmp {
// region:fmt
pub mod fmt {
+ use crate::marker::PointeeSized;
+
pub struct Error;
pub type Result = crate::result::Result<(), Error>;
pub struct Formatter<'a>;
@@ -1105,10 +1156,10 @@ pub mod fmt {
}
}
- pub trait Debug {
+ pub trait Debug: PointeeSized {
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
- pub trait Display {
+ pub trait Display: PointeeSized {
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
@@ -1175,6 +1226,7 @@ pub mod fmt {
}
}
+ // region:fmt_before_1_89_0
#[lang = "format_unsafe_arg"]
pub struct UnsafeArg {
_private: (),
@@ -1185,6 +1237,7 @@ pub mod fmt {
UnsafeArg { _private: () }
}
}
+ // endregion:fmt_before_1_89_0
}
#[derive(Copy, Clone)]
@@ -1204,6 +1257,7 @@ pub mod fmt {
Arguments { pieces, fmt: None, args: &[] }
}
+ // region:fmt_before_1_89_0
pub fn new_v1_formatted(
pieces: &'a [&'static str],
args: &'a [rt::Argument<'a>],
@@ -1212,6 +1266,17 @@ pub mod fmt {
) -> Arguments<'a> {
Arguments { pieces, fmt: Some(fmt), args }
}
+ // endregion:fmt_before_1_89_0
+
+ // region:!fmt_before_1_89_0
+ pub unsafe fn new_v1_formatted(
+ pieces: &'a [&'static str],
+ args: &'a [rt::Argument<'a>],
+ fmt: &'a [rt::Placeholder],
+ ) -> Arguments<'a> {
+ Arguments { pieces, fmt: Some(fmt), args }
+ }
+ // endregion:!fmt_before_1_89_0
pub const fn as_str(&self) -> Option<&'static str> {
match (self.pieces, self.args) {
@@ -1223,8 +1288,11 @@ pub mod fmt {
}
// region:derive
- #[rustc_builtin_macro]
- pub macro Debug($item:item) {}
+ pub(crate) mod derive {
+ #[rustc_builtin_macro]
+ pub macro Debug($item:item) {}
+ }
+ pub use derive::Debug;
// endregion:derive
// region:builtin_impls
@@ -1253,7 +1321,7 @@ pub mod fmt {
}
}
- impl<T: Debug + ?Sized> Debug for &T {
+ impl<T: Debug + PointeeSized> Debug for &T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
(&**self).fmt(f)
}
@@ -1497,6 +1565,8 @@ pub mod iter {
mod traits {
mod iterator {
+ use crate::marker::PointeeSized;
+
#[doc(notable_trait)]
#[lang = "iterator"]
pub trait Iterator {
@@ -1528,7 +1598,7 @@ pub mod iter {
}
// endregion:iterators
}
- impl<I: Iterator + ?Sized> Iterator for &mut I {
+ impl<I: Iterator + PointeeSized> Iterator for &mut I {
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> {
(**self).next()
@@ -1870,6 +1940,7 @@ pub mod prelude {
clone::Clone, // :clone
cmp::{Eq, PartialEq}, // :eq
cmp::{Ord, PartialOrd}, // :ord
+ convert::AsMut, // :as_mut
convert::AsRef, // :as_ref
convert::{From, Into, TryFrom, TryInto}, // :from
default::Default, // :default
@@ -1888,6 +1959,8 @@ pub mod prelude {
panic, // :panic
result::Result::{self, Err, Ok}, // :result
str::FromStr, // :str
+ fmt::derive::Debug, // :fmt, derive
+ hash::derive::Hash, // :hash, derive
};
}
diff --git a/crates/toolchain/Cargo.toml b/crates/toolchain/Cargo.toml
index 315a3a2890..f561c1c0e2 100644
--- a/crates/toolchain/Cargo.toml
+++ b/crates/toolchain/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
home = "0.5.11"
diff --git a/crates/tt/Cargo.toml b/crates/tt/Cargo.toml
index 529fad3244..82e7c24668 100644
--- a/crates/tt/Cargo.toml
+++ b/crates/tt/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
arrayvec.workspace = true
diff --git a/crates/tt/src/iter.rs b/crates/tt/src/iter.rs
index 0418c00174..3246156f1c 100644
--- a/crates/tt/src/iter.rs
+++ b/crates/tt/src/iter.rs
@@ -211,6 +211,7 @@ impl<'a, S: Copy> TtIter<'a, S> {
}
}
+#[derive(Clone)]
pub enum TtElement<'a, S> {
Leaf(&'a Leaf<S>),
Subtree(&'a Subtree<S>, TtIter<'a, S>),
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index 14574a6456..44123385c8 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -579,7 +579,7 @@ where
{
use rustc_lexer::LiteralKind;
- let token = rustc_lexer::tokenize(text).next_tuple();
+ let token = rustc_lexer::tokenize(text, rustc_lexer::FrontmatterAllowed::No).next_tuple();
let Some((rustc_lexer::Token {
kind: rustc_lexer::TokenKind::Literal { kind, suffix_start },
..
diff --git a/crates/vfs-notify/Cargo.toml b/crates/vfs-notify/Cargo.toml
index 9b32ee17ab..bd6c8331e6 100644
--- a/crates/vfs-notify/Cargo.toml
+++ b/crates/vfs-notify/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
tracing.workspace = true
diff --git a/crates/vfs/Cargo.toml b/crates/vfs/Cargo.toml
index 546195481c..e8a6195036 100644
--- a/crates/vfs/Cargo.toml
+++ b/crates/vfs/Cargo.toml
@@ -10,6 +10,7 @@ license.workspace = true
rust-version.workspace = true
[lib]
+doctest = false
[dependencies]
rustc-hash.workspace = true
diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs
index 1228e2e177..0c41ede5b5 100644
--- a/crates/vfs/src/file_set.rs
+++ b/crates/vfs/src/file_set.rs
@@ -5,8 +5,8 @@
use std::fmt;
use fst::{IntoStreamer, Streamer};
-use nohash_hasher::IntMap;
-use rustc_hash::FxHashMap;
+use indexmap::IndexMap;
+use rustc_hash::{FxBuildHasher, FxHashMap};
use crate::{AnchoredPath, FileId, Vfs, VfsPath};
@@ -14,7 +14,7 @@ use crate::{AnchoredPath, FileId, Vfs, VfsPath};
#[derive(Default, Clone, Eq, PartialEq)]
pub struct FileSet {
files: FxHashMap<VfsPath, FileId>,
- paths: IntMap<FileId, VfsPath>,
+ paths: IndexMap<FileId, VfsPath, FxBuildHasher>,
}
impl FileSet {
diff --git a/docs/book/README.md b/docs/book/README.md
index 464ea02512..0a3161f3af 100644
--- a/docs/book/README.md
+++ b/docs/book/README.md
@@ -19,7 +19,7 @@ mdbook will rebuild the documentation as changes are made.
## Making updates
-While not required, installing the mdbook binary can be helfpul in order to see the changes.
+While not required, installing the mdbook binary can be helpful in order to see the changes.
Start with the mdbook [User Guide](https://rust-lang.github.io/mdBook/guide/installation.html) to familiarize yourself with the tool.
## Generated documentation
diff --git a/docs/book/src/SUMMARY.md b/docs/book/src/SUMMARY.md
index 1f211a97d7..dffdae94a6 100644
--- a/docs/book/src/SUMMARY.md
+++ b/docs/book/src/SUMMARY.md
@@ -6,6 +6,7 @@
- [rust-analyzer Binary](rust_analyzer_binary.md)
- [Other Editors](other_editors.md)
- [Troubleshooting](troubleshooting.md)
+ - [FAQ](faq.md)
- [Configuration](configuration.md)
- [Non-Cargo Based Projects](non_cargo_based_projects.md)
- [Security](security.md)
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md
index 0e07dadfb7..ebac26e1d6 100644
--- a/docs/book/src/configuration_generated.md
+++ b/docs/book/src/configuration_generated.md
@@ -13,6 +13,13 @@ Default: `"todo"`
Placeholder expression to use for missing expressions in assists.
+## rust-analyzer.assist.preferSelf {#assist.preferSelf}
+
+Default: `false`
+
+When inserting a type (e.g. in "fill match arms" assist), prefer to use `Self` over the type name where possible.
+
+
## rust-analyzer.assist.termSearch.borrowcheck {#assist.termSearch.borrowcheck}
Default: `true`
@@ -605,6 +612,13 @@ Default: `"client"`
Controls file watching implementation.
+## rust-analyzer.highlightRelated.branchExitPoints.enable {#highlightRelated.branchExitPoints.enable}
+
+Default: `true`
+
+Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`).
+
+
## rust-analyzer.highlightRelated.breakPoints.enable {#highlightRelated.breakPoints.enable}
Default: `true`
@@ -1531,6 +1545,17 @@ buck2's `rust-project` will likely be useful:
https://github.com/facebook/buck2/tree/main/integrations/rust-project.
+## rust-analyzer.workspace.symbol.search.excludeImports {#workspace.symbol.search.excludeImports}
+
+Default: `false`
+
+Exclude all imports from workspace symbol search.
+
+In addition to regular imports (which are always excluded),
+this option removes public imports (better known as re-exports)
+and removes imports that rename the imported symbol.
+
+
## rust-analyzer.workspace.symbol.search.kind {#workspace.symbol.search.kind}
Default: `"only_types"`
diff --git a/docs/book/src/contributing/README.md b/docs/book/src/contributing/README.md
index 05286b5429..beb94cdfc4 100644
--- a/docs/book/src/contributing/README.md
+++ b/docs/book/src/contributing/README.md
@@ -13,7 +13,7 @@ To learn more about how rust-analyzer works, see [Architecture](architecture.md)
It also explains the high-level layout of the source code.
Do skim through that document.
-We also publish rustdoc docs to pages: https://rust-lang.github.io/rust-analyzer/ide/.
+We also publish rustdoc docs to <https://rust-lang.github.io/rust-analyzer/ide/>.
Note though, that the internal documentation is very incomplete.
Various organizational and process issues are discussed in this document.
@@ -30,7 +30,7 @@ Discussion happens in this Zulip stream:
# Issue Labels
-* [good-first-issue](https://github.com/rust-lang/rust-analyzer/labels/good%20first%20issue)
+* [good-first-issue](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aissue%20state%3Aopen%20label%3A%22good%20first%20issue%22)
are good issues to get into the project.
* [E-has-instructions](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-has-instructions)
issues have links to the code in question and tests.
diff --git a/docs/book/src/contributing/lsp-extensions.md b/docs/book/src/contributing/lsp-extensions.md
index 1ada1cb24c..8c06f33a9f 100644
--- a/docs/book/src/contributing/lsp-extensions.md
+++ b/docs/book/src/contributing/lsp-extensions.md
@@ -694,24 +694,6 @@ interface CancelFlycheckParams {}
Cancels all running flycheck processes.
-## Syntax Tree
-
-**Method:** `rust-analyzer/syntaxTree`
-
-**Request:**
-
-```typescript
-interface SyntaxTreeParams {
- textDocument: TextDocumentIdentifier,
- range?: Range,
-}
-```
-
-**Response:** `string`
-
-Returns textual representation of a parse tree for the file/selected region.
-Primarily for debugging, but very useful for all people working on rust-analyzer itself.
-
## View Syntax Tree
**Method:** `rust-analyzer/viewSyntaxTree`
diff --git a/docs/book/src/faq.md b/docs/book/src/faq.md
new file mode 100644
index 0000000000..c872033090
--- /dev/null
+++ b/docs/book/src/faq.md
@@ -0,0 +1,7 @@
+# Troubleshooting FAQ
+
+### I see a warning "Variable `None` should have snake_case name, e.g. `none`"
+
+rust-analyzer fails to resolve `None`, and thinks you are binding to a variable
+named `None`. That's usually a sign of a corrupted sysroot. Try removing and re-installing
+it: `rustup component remove rust-src` then `rustup component install rust-src`.
diff --git a/docs/book/src/non_cargo_based_projects.md b/docs/book/src/non_cargo_based_projects.md
index bbdb48bbbc..befb631ec0 100644
--- a/docs/book/src/non_cargo_based_projects.md
+++ b/docs/book/src/non_cargo_based_projects.md
@@ -40,6 +40,9 @@ interface ProjectJson {
/// several different "sysroots" in one graph of
/// crates.
sysroot_src?: string;
+ /// A ProjectJson describing the crates of the sysroot.
+ sysroot_project?: ProjectJson;
+
/// List of groups of common cfg values, to allow
/// sharing them between crates.
///
diff --git a/docs/book/src/troubleshooting.md b/docs/book/src/troubleshooting.md
index 4092b9de99..a357cbef41 100644
--- a/docs/book/src/troubleshooting.md
+++ b/docs/book/src/troubleshooting.md
@@ -1,5 +1,8 @@
# Troubleshooting
+First, search the [troubleshooting FAQ](faq.html). If your problem appears
+there (and the proposed solution works for you), great! Otherwise, read on.
+
Start with looking at the rust-analyzer version. Try **rust-analyzer:
Show RA Version** in VS Code (using **Command Palette** feature
typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the
@@ -46,5 +49,4 @@ It is especially useful when the `repo` doesn’t use external crates or
the standard library.
If you want to go as far as to modify the source code to debug the
-problem, be sure to take a look at the [dev
-docs](https://github.com/rust-lang/rust-analyzer/tree/master/docs/dev)!
+problem, be sure to take a look at the [contribution guide](contributing/index.html)!
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index ab431140c1..57d67a69b2 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -695,9 +695,9 @@
}
},
"node_modules/@eslint/config-array/node_modules/brace-expansion": {
- "version": "1.1.11",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
- "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "version": "1.1.12",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+ "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -756,9 +756,9 @@
}
},
"node_modules/@eslint/eslintrc/node_modules/brace-expansion": {
- "version": "1.1.11",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
- "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "version": "1.1.12",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+ "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1407,9 +1407,9 @@
]
},
"node_modules/@vscode/vsce/node_modules/brace-expansion": {
- "version": "1.1.11",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
- "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "version": "1.1.12",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+ "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1599,9 +1599,9 @@
"license": "ISC"
},
"node_modules/brace-expansion": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
- "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
+ "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
@@ -2981,9 +2981,9 @@
}
},
"node_modules/eslint/node_modules/brace-expansion": {
- "version": "1.1.11",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
- "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "version": "1.1.12",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+ "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
"dev": true,
"license": "MIT",
"dependencies": {
diff --git a/editors/code/package.json b/editors/code/package.json
index c8c36cd85c..3cb4c21ee1 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -683,6 +683,16 @@
{
"title": "assist",
"properties": {
+ "rust-analyzer.assist.preferSelf": {
+ "markdownDescription": "When inserting a type (e.g. in \"fill match arms\" assist), prefer to use `Self` over the type name where possible.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "assist",
+ "properties": {
"rust-analyzer.assist.termSearch.borrowcheck": {
"markdownDescription": "Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.",
"default": true,
@@ -1522,6 +1532,16 @@
{
"title": "highlightRelated",
"properties": {
+ "rust-analyzer.highlightRelated.branchExitPoints.enable": {
+ "markdownDescription": "Enables highlighting of related return values while the cursor is on any `match`, `if`, or match arm arrow (`=>`).",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "highlightRelated",
+ "properties": {
"rust-analyzer.highlightRelated.breakPoints.enable": {
"markdownDescription": "Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.",
"default": true,
@@ -2894,6 +2914,16 @@
{
"title": "workspace",
"properties": {
+ "rust-analyzer.workspace.symbol.search.excludeImports": {
+ "markdownDescription": "Exclude all imports from workspace symbol search.\n\nIn addition to regular imports (which are always excluded),\nthis option removes public imports (better known as re-exports)\nand removes imports that rename the imported symbol.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "workspace",
+ "properties": {
"rust-analyzer.workspace.symbol.search.kind": {
"markdownDescription": "Workspace symbol search kind.",
"default": "only_types",
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index cdeea7333a..073ff2f470 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -3,7 +3,7 @@ import * as lc from "vscode-languageclient/node";
import * as vscode from "vscode";
import * as ra from "../src/lsp_ext";
import * as Is from "vscode-languageclient/lib/common/utils/is";
-import { assert, unwrapUndefinable } from "./util";
+import { assert } from "./util";
import * as diagnostics from "./diagnostics";
import { WorkspaceEdit } from "vscode";
import { type Config, prepareVSCodeConfig } from "./config";
@@ -188,11 +188,17 @@ export async function createClient(
context: await client.code2ProtocolConverter.asCodeActionContext(context, token),
};
const callback = async (
- values: (lc.Command | lc.CodeAction)[] | null,
+ values: (lc.Command | lc.CodeAction | object)[] | null,
): Promise<(vscode.Command | vscode.CodeAction)[] | undefined> => {
if (values === null) return undefined;
const result: (vscode.CodeAction | vscode.Command)[] = [];
- const groups = new Map<string, { index: number; items: vscode.CodeAction[] }>();
+ const groups = new Map<
+ string,
+ {
+ primary: vscode.CodeAction;
+ items: { label: string; arguments: lc.CodeAction }[];
+ }
+ >();
for (const item of values) {
// In our case we expect to get code edits only from diagnostics
if (lc.CodeAction.is(item)) {
@@ -204,62 +210,55 @@ export async function createClient(
result.push(action);
continue;
}
- assert(
- isCodeActionWithoutEditsAndCommands(item),
- "We don't expect edits or commands here",
- );
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const kind = client.protocol2CodeConverter.asCodeActionKind((item as any).kind);
- const action = new vscode.CodeAction(item.title, kind);
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const group = (item as any).group;
- action.command = {
- command: "rust-analyzer.resolveCodeAction",
- title: item.title,
- arguments: [item],
- };
+ assertIsCodeActionWithoutEditsAndCommands(item);
+ const kind = client.protocol2CodeConverter.asCodeActionKind(item.kind);
+ const group = item.group;
- // Set a dummy edit, so that VS Code doesn't try to resolve this.
- action.edit = new WorkspaceEdit();
+ const mkAction = () => {
+ const action = new vscode.CodeAction(item.title, kind);
+ action.command = {
+ command: "rust-analyzer.resolveCodeAction",
+ title: item.title,
+ arguments: [item],
+ };
+ // Set a dummy edit, so that VS Code doesn't try to resolve this.
+ action.edit = new WorkspaceEdit();
+ return action;
+ };
if (group) {
let entry = groups.get(group);
if (!entry) {
- entry = { index: result.length, items: [] };
+ entry = { primary: mkAction(), items: [] };
groups.set(group, entry);
- result.push(action);
+ } else {
+ entry.items.push({
+ label: item.title,
+ arguments: item,
+ });
}
- entry.items.push(action);
} else {
- result.push(action);
+ result.push(mkAction());
}
}
- for (const [group, { index, items }] of groups) {
- if (items.length === 1) {
- const item = unwrapUndefinable(items[0]);
- result[index] = item;
- } else {
- const action = new vscode.CodeAction(group);
- const item = unwrapUndefinable(items[0]);
- action.kind = item.kind;
- action.command = {
+ for (const [group, { items, primary }] of groups) {
+ // This group contains more than one item, so rewrite it to be a group action
+ if (items.length !== 0) {
+ const args = [
+ {
+ label: primary.title,
+ arguments: primary.command!.arguments![0],
+ },
+ ...items,
+ ];
+ primary.title = group;
+ primary.command = {
command: "rust-analyzer.applyActionGroup",
title: "",
- arguments: [
- items.map((item) => {
- return {
- label: item.title,
- arguments: item.command!.arguments![0],
- };
- }),
- ],
+ arguments: [args],
};
-
- // Set a dummy edit, so that VS Code doesn't try to resolve this.
- action.edit = new WorkspaceEdit();
-
- result[index] = action;
}
+ result.push(primary);
}
return result;
};
@@ -363,17 +362,22 @@ class OverrideFeatures implements lc.StaticFeature {
clear(): void {}
}
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function isCodeActionWithoutEditsAndCommands(value: any): boolean {
- const candidate: lc.CodeAction = value;
- return (
+function assertIsCodeActionWithoutEditsAndCommands(
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ candidate: any,
+): asserts candidate is lc.CodeAction & {
+ group?: string;
+} {
+ assert(
candidate &&
- Is.string(candidate.title) &&
- (candidate.diagnostics === void 0 ||
- Is.typedArray(candidate.diagnostics, lc.Diagnostic.is)) &&
- (candidate.kind === void 0 || Is.string(candidate.kind)) &&
- candidate.edit === void 0 &&
- candidate.command === void 0
+ Is.string(candidate.title) &&
+ (candidate.diagnostics === undefined ||
+ Is.typedArray(candidate.diagnostics, lc.Diagnostic.is)) &&
+ (candidate.group === undefined || Is.string(candidate.group)) &&
+ (candidate.kind === undefined || Is.string(candidate.kind)) &&
+ candidate.edit === undefined &&
+ candidate.command === undefined,
+ `Expected a CodeAction without edits or commands, got: ${JSON.stringify(candidate)}`,
);
}
diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts
index 3ac1a933d9..25b30013fa 100644
--- a/editors/code/src/commands.ts
+++ b/editors/code/src/commands.ts
@@ -1114,11 +1114,11 @@ export function applySnippetWorkspaceEditCommand(_ctx: CtxInit): Cmd {
};
}
-export function run(ctx: CtxInit): Cmd {
+export function run(ctx: CtxInit, mode?: "cursor"): Cmd {
let prevRunnable: RunnableQuickPick | undefined;
return async () => {
- const item = await selectRunnable(ctx, prevRunnable);
+ const item = await selectRunnable(ctx, prevRunnable, false, true, mode);
if (!item) return;
item.detail = "rerun";
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index f36e18a73d..d2dc740c09 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -20,15 +20,9 @@ export class Config {
configureLang: vscode.Disposable | undefined;
readonly rootSection = "rust-analyzer";
- private readonly requiresServerReloadOpts = [
- "cargo",
- "procMacro",
- "serverPath",
- "server",
- "files",
- "cfg",
- "showSyntaxTree",
- ].map((opt) => `${this.rootSection}.${opt}`);
+ private readonly requiresServerReloadOpts = ["server", "files", "showSyntaxTree"].map(
+ (opt) => `${this.rootSection}.${opt}`,
+ );
private readonly requiresWindowReloadOpts = ["testExplorer"].map(
(opt) => `${this.rootSection}.${opt}`,
@@ -208,7 +202,7 @@ export class Config {
}
get serverPath() {
- return this.get<null | string>("server.path") ?? this.get<null | string>("serverPath");
+ return this.get<null | string>("server.path");
}
get serverExtraEnv(): Env {
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts
index 5e50073069..996298524f 100644
--- a/editors/code/src/main.ts
+++ b/editors/code/src/main.ts
@@ -167,7 +167,7 @@ function createCommands(): Record<string, CommandFactory> {
viewCrateGraph: { enabled: commands.viewCrateGraph },
viewFullCrateGraph: { enabled: commands.viewFullCrateGraph },
expandMacro: { enabled: commands.expandMacro },
- run: { enabled: commands.run },
+ run: { enabled: (ctx) => (mode?: "cursor") => commands.run(ctx, mode)() },
copyRunCommandLine: { enabled: commands.copyRunCommandLine },
debug: { enabled: commands.debug },
newDebugConfig: { enabled: commands.newDebugConfig },
diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts
index 40027cc7c8..95166c427b 100644
--- a/editors/code/src/run.ts
+++ b/editors/code/src/run.ts
@@ -18,10 +18,15 @@ export async function selectRunnable(
prevRunnable?: RunnableQuickPick,
debuggeeOnly = false,
showButtons: boolean = true,
+ mode?: "cursor",
): Promise<RunnableQuickPick | undefined> {
const editor = ctx.activeRustEditor ?? ctx.activeCargoTomlEditor;
if (!editor) return;
+ if (mode === "cursor") {
+ return selectRunnableAtCursor(ctx, editor, prevRunnable);
+ }
+
// show a placeholder while we get the runnables from the server
const quickPick = vscode.window.createQuickPick();
quickPick.title = "Select Runnable";
@@ -54,6 +59,58 @@ export async function selectRunnable(
);
}
+async function selectRunnableAtCursor(
+ ctx: CtxInit,
+ editor: RustEditor,
+ prevRunnable?: RunnableQuickPick,
+): Promise<RunnableQuickPick | undefined> {
+ const runnableQuickPicks = await getRunnables(ctx.client, editor, prevRunnable, false);
+ let runnableQuickPickAtCursor = null;
+ const cursorPosition = ctx.client.code2ProtocolConverter.asPosition(editor.selection.active);
+ for (const runnableQuickPick of runnableQuickPicks) {
+ if (!runnableQuickPick.runnable.location?.targetRange) {
+ continue;
+ }
+ const runnableQuickPickRange = runnableQuickPick.runnable.location.targetRange;
+ if (
+ runnableQuickPickAtCursor?.runnable?.location?.targetRange != null &&
+ rangeContainsOtherRange(
+ runnableQuickPickRange,
+ runnableQuickPickAtCursor.runnable.location.targetRange,
+ )
+ ) {
+ continue;
+ }
+ if (rangeContainsPosition(runnableQuickPickRange, cursorPosition)) {
+ runnableQuickPickAtCursor = runnableQuickPick;
+ }
+ }
+ if (runnableQuickPickAtCursor == null) {
+ return;
+ }
+ return Promise.resolve(runnableQuickPickAtCursor);
+}
+
+function rangeContainsPosition(range: lc.Range, position: lc.Position): boolean {
+ return (
+ (position.line > range.start.line ||
+ (position.line === range.start.line && position.character >= range.start.character)) &&
+ (position.line < range.end.line ||
+ (position.line === range.end.line && position.character <= range.end.character))
+ );
+}
+
+function rangeContainsOtherRange(range: lc.Range, otherRange: lc.Range) {
+ return (
+ (range.start.line < otherRange.start.line ||
+ (range.start.line === otherRange.start.line &&
+ range.start.character <= otherRange.start.character)) &&
+ (range.end.line > otherRange.end.line ||
+ (range.end.line === otherRange.end.line &&
+ range.end.character >= otherRange.end.character))
+ );
+}
+
export class RunnableQuickPick implements vscode.QuickPickItem {
public label: string;
public description?: string | undefined;
diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml
index 1dc6d3ce5d..1fc1da50a0 100644
--- a/lib/lsp-server/Cargo.toml
+++ b/lib/lsp-server/Cargo.toml
@@ -7,7 +7,7 @@ repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-ser
edition = "2024"
[dependencies]
-log = "0.4.26"
+log = "0.4.27"
serde_json = "1.0.140"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }
@@ -15,7 +15,10 @@ crossbeam-channel.workspace = true
[dev-dependencies]
lsp-types = "=0.95"
-ctrlc = "3.4.5"
+ctrlc = "3.4.7"
+anyhow.workspace = true
+rustc-hash.workspace = true
+toolchain.workspace = true
[lints]
workspace = true
diff --git a/lib/lsp-server/examples/goto_def.rs b/lib/lsp-server/examples/goto_def.rs
deleted file mode 100644
index 6b3acda7bc..0000000000
--- a/lib/lsp-server/examples/goto_def.rs
+++ /dev/null
@@ -1,132 +0,0 @@
-//! A minimal example LSP server that can only respond to the `gotoDefinition` request. To use
-//! this example, execute it and then send an `initialize` request.
-//!
-//! ```no_run
-//! Content-Length: 85
-//!
-//! {"jsonrpc": "2.0", "method": "initialize", "id": 1, "params": {"capabilities": {}}}
-//! ```
-//!
-//! This will respond with a server response. Then send it a `initialized` notification which will
-//! have no response.
-//!
-//! ```no_run
-//! Content-Length: 59
-//!
-//! {"jsonrpc": "2.0", "method": "initialized", "params": {}}
-//! ```
-//!
-//! Once these two are sent, then we enter the main loop of the server. The only request this
-//! example can handle is `gotoDefinition`:
-//!
-//! ```no_run
-//! Content-Length: 159
-//!
-//! {"jsonrpc": "2.0", "method": "textDocument/definition", "id": 2, "params": {"textDocument": {"uri": "file://temp"}, "position": {"line": 1, "character": 1}}}
-//! ```
-//!
-//! To finish up without errors, send a shutdown request:
-//!
-//! ```no_run
-//! Content-Length: 67
-//!
-//! {"jsonrpc": "2.0", "method": "shutdown", "id": 3, "params": null}
-//! ```
-//!
-//! The server will exit the main loop and finally we send a `shutdown` notification to stop
-//! the server.
-//!
-//! ```
-//! Content-Length: 54
-//!
-//! {"jsonrpc": "2.0", "method": "exit", "params": null}
-//! ```
-
-#![allow(clippy::print_stderr)]
-
-use std::error::Error;
-
-use lsp_types::OneOf;
-use lsp_types::{
- GotoDefinitionResponse, InitializeParams, ServerCapabilities, request::GotoDefinition,
-};
-
-use lsp_server::{Connection, ExtractError, Message, Request, RequestId, Response};
-
-fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
- // Note that we must have our logging only write out to stderr.
- eprintln!("starting generic LSP server");
-
- // Create the transport. Includes the stdio (stdin and stdout) versions but this could
- // also be implemented to use sockets or HTTP.
- let (connection, io_threads) = Connection::stdio();
-
- // Run the server and wait for the two threads to end (typically by trigger LSP Exit event).
- let server_capabilities = serde_json::to_value(&ServerCapabilities {
- definition_provider: Some(OneOf::Left(true)),
- ..Default::default()
- })
- .unwrap();
- let initialization_params = match connection.initialize(server_capabilities) {
- Ok(it) => it,
- Err(e) => {
- if e.channel_is_disconnected() {
- io_threads.join()?;
- }
- return Err(e.into());
- }
- };
- main_loop(connection, initialization_params)?;
- io_threads.join()?;
-
- // Shut down gracefully.
- eprintln!("shutting down server");
- Ok(())
-}
-
-fn main_loop(
- connection: Connection,
- params: serde_json::Value,
-) -> Result<(), Box<dyn Error + Sync + Send>> {
- let _params: InitializeParams = serde_json::from_value(params).unwrap();
- eprintln!("starting example main loop");
- for msg in &connection.receiver {
- eprintln!("got msg: {msg:?}");
- match msg {
- Message::Request(req) => {
- if connection.handle_shutdown(&req)? {
- return Ok(());
- }
- eprintln!("got request: {req:?}");
- match cast::<GotoDefinition>(req) {
- Ok((id, params)) => {
- eprintln!("got gotoDefinition request #{id}: {params:?}");
- let result = Some(GotoDefinitionResponse::Array(Vec::new()));
- let result = serde_json::to_value(&result).unwrap();
- let resp = Response { id, result: Some(result), error: None };
- connection.sender.send(Message::Response(resp))?;
- continue;
- }
- Err(err @ ExtractError::JsonError { .. }) => panic!("{err:?}"),
- Err(ExtractError::MethodMismatch(req)) => req,
- };
- // ...
- }
- Message::Response(resp) => {
- eprintln!("got response: {resp:?}");
- }
- Message::Notification(not) => {
- eprintln!("got notification: {not:?}");
- }
- }
- }
- Ok(())
-}
-
-fn cast<R>(req: Request) -> Result<(RequestId, R::Params), ExtractError<Request>>
-where
- R: lsp_types::request::Request,
- R::Params: serde::de::DeserializeOwned,
-{
- req.extract(R::METHOD)
-}
diff --git a/lib/lsp-server/examples/manual_test.sh b/lib/lsp-server/examples/manual_test.sh
new file mode 100755
index 0000000000..d028ac4330
--- /dev/null
+++ b/lib/lsp-server/examples/manual_test.sh
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+# Simple nine-packet LSP test for examples/minimal_lsp.rs
+# Usage (two tabs):
+#
+# mkfifo /tmp/lsp_pipe # one-time setup
+# # tab 1 – run the server
+# cat /tmp/lsp_pipe | cargo run --example minimal_lsp
+#
+# # tab 2 – fire the packets (this script)
+# bash examples/manual_test.sh # blocks until server exits
+#
+# If you don’t use a second tab, run the script in the background:
+#
+# bash examples/manual_test.sh & # writer in background
+# cat /tmp/lsp_pipe | cargo run --example minimal_lsp
+#
+# The script opens /tmp/lsp_pipe for writing (exec 3>) and sends each JSON
+# packet with a correct Content-Length header.
+#
+# One-liner alternative (single terminal, no FIFO):
+#
+# cargo run --example minimal_lsp <<'EOF'
+# … nine packets …
+# EOF
+#
+# Both approaches feed identical bytes to minimal_lsp via stdin.
+
+set -eu
+PIPE=${1:-/tmp/lsp_pipe}
+
+mkfifo -m 600 "$PIPE" 2>/dev/null || true # create once, ignore if exists
+
+# open write end so the fifo stays open
+exec 3> "$PIPE"
+
+send() {
+ local body=$1
+ local len=$(printf '%s' "$body" | wc -c)
+ printf 'Content-Length: %d\r\n\r\n%s' "$len" "$body" >&3
+}
+
+send '{"jsonrpc":"2.0","id":1,"method":"initialize","params":{"capabilities":{}}}'
+send '{"jsonrpc":"2.0","method":"initialized","params":{}}'
+send '{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file:///tmp/foo.rs","languageId":"rust","version":1,"text":"fn main( ){println!(\"hi\") }"}}}'
+send '{"jsonrpc":"2.0","id":2,"method":"textDocument/completion","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}'
+send '{"jsonrpc":"2.0","id":3,"method":"textDocument/hover","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}'
+send '{"jsonrpc":"2.0","id":4,"method":"textDocument/definition","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}'
+send '{"jsonrpc":"2.0","id":5,"method":"textDocument/formatting","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"options":{"tabSize":4,"insertSpaces":true}}}'
+send '{"jsonrpc":"2.0","id":6,"method":"shutdown","params":null}'
+send '{"jsonrpc":"2.0","method":"exit","params":null}'
+
+exec 3>&-
+echo "Packets sent – watch the other terminal for responses."
diff --git a/lib/lsp-server/examples/minimal_lsp.rs b/lib/lsp-server/examples/minimal_lsp.rs
new file mode 100644
index 0000000000..5eef999e06
--- /dev/null
+++ b/lib/lsp-server/examples/minimal_lsp.rs
@@ -0,0 +1,335 @@
+//! Minimal Language‑Server‑Protocol example: **`minimal_lsp.rs`**
+//! =============================================================
+//!
+//! | ↔ / ← | LSP method | What the implementation does |
+//! |-------|------------|------------------------------|
+//! | ↔ | `initialize` / `initialized` | capability handshake |
+//! | ← | `textDocument/publishDiagnostics` | pushes a dummy info diagnostic whenever the buffer changes |
+//! | ← | `textDocument/definition` | echoes an empty location array so the jump works |
+//! | ← | `textDocument/completion` | offers one hard‑coded item `HelloFromLSP` |
+//! | ← | `textDocument/hover` | shows *Hello from minimal_lsp* markdown |
+//! | ← | `textDocument/formatting` | pipes the doc through **rustfmt** and returns a full‑file edit |
+//!
+//! ### Quick start
+//! ```bash
+//! cd rust-analyzer/lib/lsp-server
+//! cargo run --example minimal_lsp
+//! ```
+//!
+//! ### Minimal manual session (all nine packets)
+//! ```no_run
+//! # 1. initialize - server replies with capabilities
+//! Content-Length: 85
+
+//! {"jsonrpc":"2.0","id":1,"method":"initialize","params":{"capabilities":{}}}
+//!
+//! # 2. initialized - no response expected
+//! Content-Length: 59
+
+//! {"jsonrpc":"2.0","method":"initialized","params":{}}
+//!
+//! # 3. didOpen - provide initial buffer text
+//! Content-Length: 173
+
+//! {"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file:///tmp/foo.rs","languageId":"rust","version":1,"text":"fn main( ){println!(\"hi\") }"}}}
+//!
+//! # 4. completion - expect HelloFromLSP
+//! Content-Length: 139
+
+//! {"jsonrpc":"2.0","id":2,"method":"textDocument/completion","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}
+//!
+//! # 5. hover - expect markdown greeting
+//! Content-Length: 135
+
+//! {"jsonrpc":"2.0","id":3,"method":"textDocument/hover","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}
+//!
+//! # 6. goto-definition - dummy empty array
+//! Content-Length: 139
+
+//! {"jsonrpc":"2.0","id":4,"method":"textDocument/definition","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}
+//!
+//! # 7. formatting - rustfmt full document
+//! Content-Length: 157
+
+//! {"jsonrpc":"2.0","id":5,"method":"textDocument/formatting","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"options":{"tabSize":4,"insertSpaces":true}}}
+//!
+//! # 8. shutdown request - server acks and prepares to exit
+//! Content-Length: 67
+
+//! {"jsonrpc":"2.0","id":6,"method":"shutdown","params":null}
+//!
+//! # 9. exit notification - terminates the server
+//! Content-Length: 54
+
+//! {"jsonrpc":"2.0","method":"exit","params":null}
+//! ```
+//!
+
+use std::{error::Error, io::Write};
+
+use rustc_hash::FxHashMap; // fast hash map
+use std::process::Stdio;
+use toolchain::command; // clippy-approved wrapper
+
+#[allow(clippy::print_stderr, clippy::disallowed_types, clippy::disallowed_methods)]
+use anyhow::{Context, Result, anyhow, bail};
+use lsp_server::{Connection, Message, Request as ServerRequest, RequestId, Response};
+use lsp_types::notification::Notification as _; // for METHOD consts
+use lsp_types::request::Request as _;
+use lsp_types::{
+ CompletionItem,
+ CompletionItemKind,
+ // capability helpers
+ CompletionOptions,
+ CompletionResponse,
+ Diagnostic,
+ DiagnosticSeverity,
+ DidChangeTextDocumentParams,
+ DidOpenTextDocumentParams,
+ DocumentFormattingParams,
+ Hover,
+ HoverContents,
+ HoverProviderCapability,
+ // core
+ InitializeParams,
+ MarkedString,
+ OneOf,
+ Position,
+ PublishDiagnosticsParams,
+ Range,
+ ServerCapabilities,
+ TextDocumentSyncCapability,
+ TextDocumentSyncKind,
+ TextEdit,
+ Url,
+ // notifications
+ notification::{DidChangeTextDocument, DidOpenTextDocument, PublishDiagnostics},
+ // requests
+ request::{Completion, Formatting, GotoDefinition, HoverRequest},
+}; // for METHOD consts
+
+// =====================================================================
+// main
+// =====================================================================
+
+#[allow(clippy::print_stderr)]
+fn main() -> std::result::Result<(), Box<dyn Error + Sync + Send>> {
+ log::error!("starting minimal_lsp");
+
+ // transport
+ let (connection, io_thread) = Connection::stdio();
+
+ // advertised capabilities
+ let caps = ServerCapabilities {
+ text_document_sync: Some(TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL)),
+ completion_provider: Some(CompletionOptions::default()),
+ definition_provider: Some(OneOf::Left(true)),
+ hover_provider: Some(HoverProviderCapability::Simple(true)),
+ document_formatting_provider: Some(OneOf::Left(true)),
+ ..Default::default()
+ };
+ let init_value = serde_json::json!({
+ "capabilities": caps,
+ "offsetEncoding": ["utf-8"],
+ });
+
+ let init_params = connection.initialize(init_value)?;
+ main_loop(connection, init_params)?;
+ io_thread.join()?;
+ log::error!("shutting down server");
+ Ok(())
+}
+
+// =====================================================================
+// event loop
+// =====================================================================
+
+fn main_loop(
+ connection: Connection,
+ params: serde_json::Value,
+) -> std::result::Result<(), Box<dyn Error + Sync + Send>> {
+ let _init: InitializeParams = serde_json::from_value(params)?;
+ let mut docs: FxHashMap<Url, String> = FxHashMap::default();
+
+ for msg in &connection.receiver {
+ match msg {
+ Message::Request(req) => {
+ if connection.handle_shutdown(&req)? {
+ break;
+ }
+ if let Err(err) = handle_request(&connection, &req, &mut docs) {
+ log::error!("[lsp] request {} failed: {err}", &req.method);
+ }
+ }
+ Message::Notification(note) => {
+ if let Err(err) = handle_notification(&connection, &note, &mut docs) {
+ log::error!("[lsp] notification {} failed: {err}", note.method);
+ }
+ }
+ Message::Response(resp) => log::error!("[lsp] response: {resp:?}"),
+ }
+ }
+ Ok(())
+}
+
+// =====================================================================
+// notifications
+// =====================================================================
+
+fn handle_notification(
+ conn: &Connection,
+ note: &lsp_server::Notification,
+ docs: &mut FxHashMap<Url, String>,
+) -> Result<()> {
+ match note.method.as_str() {
+ DidOpenTextDocument::METHOD => {
+ let p: DidOpenTextDocumentParams = serde_json::from_value(note.params.clone())?;
+ let uri = p.text_document.uri;
+ docs.insert(uri.clone(), p.text_document.text);
+ publish_dummy_diag(conn, &uri)?;
+ }
+ DidChangeTextDocument::METHOD => {
+ let p: DidChangeTextDocumentParams = serde_json::from_value(note.params.clone())?;
+ if let Some(change) = p.content_changes.into_iter().next() {
+ let uri = p.text_document.uri;
+ docs.insert(uri.clone(), change.text);
+ publish_dummy_diag(conn, &uri)?;
+ }
+ }
+ _ => {}
+ }
+ Ok(())
+}
+
+// =====================================================================
+// requests
+// =====================================================================
+
+fn handle_request(
+ conn: &Connection,
+ req: &ServerRequest,
+ docs: &mut FxHashMap<Url, String>,
+) -> Result<()> {
+ match req.method.as_str() {
+ GotoDefinition::METHOD => {
+ send_ok(conn, req.id.clone(), &lsp_types::GotoDefinitionResponse::Array(Vec::new()))?;
+ }
+ Completion::METHOD => {
+ let item = CompletionItem {
+ label: "HelloFromLSP".into(),
+ kind: Some(CompletionItemKind::FUNCTION),
+ detail: Some("dummy completion".into()),
+ ..Default::default()
+ };
+ send_ok(conn, req.id.clone(), &CompletionResponse::Array(vec![item]))?;
+ }
+ HoverRequest::METHOD => {
+ let hover = Hover {
+ contents: HoverContents::Scalar(MarkedString::String(
+ "Hello from *minimal_lsp*".into(),
+ )),
+ range: None,
+ };
+ send_ok(conn, req.id.clone(), &hover)?;
+ }
+ Formatting::METHOD => {
+ let p: DocumentFormattingParams = serde_json::from_value(req.params.clone())?;
+ let uri = p.text_document.uri;
+ let text = docs
+ .get(&uri)
+ .ok_or_else(|| anyhow!("document not in cache – did you send DidOpen?"))?;
+ let formatted = run_rustfmt(text)?;
+ let edit = TextEdit { range: full_range(text), new_text: formatted };
+ send_ok(conn, req.id.clone(), &vec![edit])?;
+ }
+ _ => send_err(
+ conn,
+ req.id.clone(),
+ lsp_server::ErrorCode::MethodNotFound,
+ "unhandled method",
+ )?,
+ }
+ Ok(())
+}
+
+// =====================================================================
+// diagnostics
+// =====================================================================
+fn publish_dummy_diag(conn: &Connection, uri: &Url) -> Result<()> {
+ let diag = Diagnostic {
+ range: Range::new(Position::new(0, 0), Position::new(0, 1)),
+ severity: Some(DiagnosticSeverity::INFORMATION),
+ code: None,
+ code_description: None,
+ source: Some("minimal_lsp".into()),
+ message: "dummy diagnostic".into(),
+ related_information: None,
+ tags: None,
+ data: None,
+ };
+ let params =
+ PublishDiagnosticsParams { uri: uri.clone(), diagnostics: vec![diag], version: None };
+ conn.sender.send(Message::Notification(lsp_server::Notification::new(
+ PublishDiagnostics::METHOD.to_owned(),
+ params,
+ )))?;
+ Ok(())
+}
+
+// =====================================================================
+// helpers
+// =====================================================================
+
+fn run_rustfmt(input: &str) -> Result<String> {
+ let cwd = std::env::current_dir().expect("can't determine CWD");
+ let mut child = command("rustfmt", &cwd, &FxHashMap::default())
+ .arg("--emit")
+ .arg("stdout")
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .context("failed to spawn rustfmt – is it installed?")?;
+
+ let Some(stdin) = child.stdin.as_mut() else {
+ bail!("stdin unavailable");
+ };
+ stdin.write_all(input.as_bytes())?;
+ let output = child.wait_with_output()?;
+ if !output.status.success() {
+ let stderr = String::from_utf8_lossy(&output.stderr);
+ bail!("rustfmt failed: {stderr}");
+ }
+ Ok(String::from_utf8(output.stdout)?)
+}
+
+fn full_range(text: &str) -> Range {
+ let last_line_idx = text.lines().count().saturating_sub(1) as u32;
+ let last_col = text.lines().last().map_or(0, |l| l.chars().count()) as u32;
+ Range::new(Position::new(0, 0), Position::new(last_line_idx, last_col))
+}
+
+fn send_ok<T: serde::Serialize>(conn: &Connection, id: RequestId, result: &T) -> Result<()> {
+ let resp = Response { id, result: Some(serde_json::to_value(result)?), error: None };
+ conn.sender.send(Message::Response(resp))?;
+ Ok(())
+}
+
+fn send_err(
+ conn: &Connection,
+ id: RequestId,
+ code: lsp_server::ErrorCode,
+ msg: &str,
+) -> Result<()> {
+ let resp = Response {
+ id,
+ result: None,
+ error: Some(lsp_server::ResponseError {
+ code: code as i32,
+ message: msg.into(),
+ data: None,
+ }),
+ };
+ conn.sender.send(Message::Response(resp))?;
+ Ok(())
+}
diff --git a/lib/lsp-server/src/msg.rs b/lib/lsp-server/src/msg.rs
index 2749557b91..399d674e41 100644
--- a/lib/lsp-server/src/msg.rs
+++ b/lib/lsp-server/src/msg.rs
@@ -283,12 +283,12 @@ fn read_msg_text(inp: &mut dyn BufRead) -> io::Result<Option<String>> {
buf.resize(size, 0);
inp.read_exact(&mut buf)?;
let buf = String::from_utf8(buf).map_err(invalid_data)?;
- log::debug!("< {}", buf);
+ log::debug!("< {buf}");
Ok(Some(buf))
}
fn write_msg_text(out: &mut dyn Write, msg: &str) -> io::Result<()> {
- log::debug!("> {}", msg);
+ log::debug!("> {msg}");
write!(out, "Content-Length: {}\r\n\r\n", msg.len())?;
out.write_all(msg.as_bytes())?;
out.flush()?;
diff --git a/lib/lsp-server/src/stdio.rs b/lib/lsp-server/src/stdio.rs
index c558b6c6e7..eccc89fd59 100644
--- a/lib/lsp-server/src/stdio.rs
+++ b/lib/lsp-server/src/stdio.rs
@@ -38,7 +38,7 @@ pub(crate) fn stdio_transport() -> (Sender<Message>, Receiver<Message>, IoThread
while let Some(msg) = Message::read(&mut stdin)? {
let is_exit = matches!(&msg, Message::Notification(n) if n.is_exit());
- debug!("sending message {:#?}", msg);
+ debug!("sending message {msg:#?}");
if let Err(e) = reader_sender.send(msg) {
return Err(io::Error::other(e));
}
diff --git a/rust-version b/rust-version
index af0dd5c9ac..57ff326ce5 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-7c10378e1fee5ddc6573b916aeb884ab10e0de17
+a9fb6103b05c6ad6eee6bed4c0bb5a2e8e1024c6
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml
index bb7d83c4b7..8cd5811c0a 100644
--- a/xtask/Cargo.toml
+++ b/xtask/Cargo.toml
@@ -9,14 +9,14 @@ rust-version.workspace = true
[dependencies]
anyhow.workspace = true
directories = "6.0"
-flate2 = "1.1.0"
+flate2 = "1.1.2"
write-json = "0.1.4"
xshell.workspace = true
xflags = "0.3.2"
time = { version = "0.3", default-features = false }
-zip = { version = "3.0", default-features = false, features = ["deflate-flate2", "time"] }
+zip = { version = "4.0", default-features = false, features = ["deflate-flate2", "time"] }
stdx.workspace = true
-proc-macro2 = "1.0.94"
+proc-macro2 = "1.0.95"
quote = "1.0.40"
ungrammar = "1.16.1"
either.workspace = true
diff --git a/xtask/src/codegen/grammar/ast_src.rs b/xtask/src/codegen/grammar/ast_src.rs
index d8cbf89452..b9f570fe0e 100644
--- a/xtask/src/codegen/grammar/ast_src.rs
+++ b/xtask/src/codegen/grammar/ast_src.rs
@@ -116,6 +116,8 @@ const CONTEXTUAL_KEYWORDS: &[&str] =
// keywords we use for special macro expansions
const CONTEXTUAL_BUILTIN_KEYWORDS: &[&str] = &[
"asm",
+ "naked_asm",
+ "global_asm",
"att_syntax",
"builtin",
"clobber_abi",