Unnamed repository; edit this file 'description' to name the repository.
Merge pull request #21046 from Veykril/push-xluyprrqpzxz
perf: Improve start up time
Lukas Wirth 5 months ago
parent afcfe14 · parent 309de02 · commit d7aeed0
-rw-r--r--Cargo.toml3
-rw-r--r--crates/base-db/src/input.rs4
-rw-r--r--crates/hir/src/lib.rs2
-rw-r--r--crates/ide-db/src/prime_caches.rs7
-rw-r--r--crates/project-model/src/env.rs5
-rw-r--r--crates/project-model/src/sysroot.rs27
-rw-r--r--crates/project-model/src/workspace.rs126
-rw-r--r--crates/rust-analyzer/src/bin/main.rs5
-rw-r--r--crates/rust-analyzer/src/main_loop.rs6
-rw-r--r--crates/span/src/map.rs37
-rw-r--r--crates/syntax/src/lib.rs39
11 files changed, 198 insertions, 63 deletions
diff --git a/Cargo.toml b/Cargo.toml
index 6f5ea44401..df9442882c 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -134,10 +134,11 @@ rayon = "1.10.0"
rowan = "=0.15.15"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
-salsa = { version = "0.24.0", default-features = true, features = [
+salsa = { version = "0.24.0", default-features = false, features = [
"rayon",
"salsa_unstable",
"macros",
+ "inventory",
] }
salsa-macros = "0.24.0"
semver = "1.0.26"
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index cac74778a2..ffd82d5043 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -867,6 +867,10 @@ impl Env {
pub fn insert(&mut self, k: impl Into<String>, v: impl Into<String>) -> Option<String> {
self.entries.insert(k.into(), v.into())
}
+
+ pub fn contains_key(&self, arg: &str) -> bool {
+ self.entries.contains_key(arg)
+ }
}
impl From<Env> for Vec<(String, String)> {
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index bb1741a083..9137e3f5df 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -136,7 +136,7 @@ pub use {
attr::{AttrSourceMap, Attrs, AttrsWithOwner},
find_path::PrefixKind,
import_map,
- lang_item::LangItem,
+ lang_item::{LangItem, crate_lang_items},
nameres::{DefMap, ModuleSource, crate_def_map},
per_ns::Namespace,
type_ref::{Mutability, TypeRef},
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index 1463fdb195..4f8dc4aea1 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -83,7 +83,12 @@ pub fn parallel_prime_caches(
crate_name,
})?;
- let cancelled = Cancelled::catch(|| _ = hir::crate_def_map(&db, crate_id));
+ let cancelled = Cancelled::catch(|| {
+ _ = hir::crate_def_map(&db, crate_id);
+ // we compute the lang items here as the work for them is also highly recursive and will be trigger by the module symbols query
+ // slowing down leaf crate analysis tremendously as we go back to being blocked on a single thread
+ _ = hir::crate_lang_items(&db, crate_id);
+ });
match cancelled {
Ok(()) => progress_sender
diff --git a/crates/project-model/src/env.rs b/crates/project-model/src/env.rs
index ae0458af7a..8089155adf 100644
--- a/crates/project-model/src/env.rs
+++ b/crates/project-model/src/env.rs
@@ -2,7 +2,6 @@
use base_db::Env;
use paths::Utf8Path;
use rustc_hash::FxHashMap;
-use toolchain::Tool;
use crate::{ManifestPath, PackageData, TargetKind, cargo_config_file::CargoConfigFile};
@@ -48,8 +47,8 @@ pub(crate) fn inject_cargo_package_env(env: &mut Env, package: &PackageData) {
);
}
-pub(crate) fn inject_cargo_env(env: &mut Env) {
- env.set("CARGO", Tool::Cargo.path().to_string());
+pub(crate) fn inject_cargo_env(env: &mut Env, cargo_path: &Utf8Path) {
+ env.set("CARGO", cargo_path.as_str());
}
pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: TargetKind) {
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 920afe65d7..1b31138bec 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -8,6 +8,7 @@ use core::fmt;
use std::{env, fs, ops::Not, path::Path, process::Command};
use anyhow::{Result, format_err};
+use base_db::Env;
use itertools::Itertools;
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashMap;
@@ -172,6 +173,32 @@ impl Sysroot {
}
}
+ pub fn tool_path(&self, tool: Tool, current_dir: impl AsRef<Path>, envs: &Env) -> Utf8PathBuf {
+ match self.root() {
+ Some(root) => {
+ let mut cmd = toolchain::command(
+ Tool::Rustup.path(),
+ current_dir,
+ &envs
+ .into_iter()
+ .map(|(k, v)| (k.clone(), Some(v.clone())))
+ .collect::<FxHashMap<_, _>>(),
+ );
+ if !envs.contains_key("RUSTUP_TOOLCHAIN")
+ && std::env::var_os("RUSTUP_TOOLCHAIN").is_none()
+ {
+ cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(root));
+ }
+
+ cmd.arg("which");
+ cmd.arg(tool.name());
+ (|| Some(Utf8PathBuf::from(String::from_utf8(cmd.output().ok()?.stdout).ok()?)))()
+ .unwrap_or_else(|| Utf8PathBuf::from(tool.name()))
+ }
+ _ => tool.path(),
+ }
+ }
+
pub fn discover_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
let root = self.root()?;
Some(
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index aa2e15930c..f01daa82b6 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -2,6 +2,7 @@
//! metadata` or `rust-project.json`) into representation stored in the salsa
//! database -- `CrateGraph`.
+use std::thread::Builder;
use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync, thread};
use anyhow::Context;
@@ -12,7 +13,7 @@ use base_db::{
};
use cfg::{CfgAtom, CfgDiff, CfgOptions};
use intern::{Symbol, sym};
-use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
use span::{Edition, FileId};
@@ -301,31 +302,39 @@ impl ProjectWorkspace {
// We can speed up loading a bit by spawning all of these processes in parallel (especially
// on systems were process spawning is delayed)
let join = thread::scope(|s| {
- let rustc_cfg = s.spawn(|| {
- rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env)
- });
- let target_data = s.spawn(|| {
- target_data::get(
- toolchain_config,
- targets.first().map(Deref::deref),
- extra_env,
- ).inspect_err(|e| {
- tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace")
+ let rustc_cfg = Builder::new()
+ .name("ProjectWorkspace::rustc_cfg".to_owned())
+ .spawn_scoped(s, || {
+ rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env)
})
- });
-
- let rustc_dir = s.spawn(|| {
- let rustc_dir = match rustc_source {
- Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
- .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
- Some(RustLibSource::Discover) => {
- sysroot.discover_rustc_src().ok_or_else(|| {
- Some("Failed to discover rustc source for sysroot.".to_owned())
+ .expect("failed to spawn thread");
+ let target_data = Builder::new()
+ .name("ProjectWorkspace::target_data".to_owned())
+ .spawn_scoped(s, || {
+ target_data::get(toolchain_config, targets.first().map(Deref::deref), extra_env)
+ .inspect_err(|e| {
+ tracing::error!(%e,
+ "failed fetching data layout for \
+ {cargo_toml:?} workspace"
+ )
})
- }
- None => Err(None),
- };
- rustc_dir.and_then(|rustc_dir| {
+ })
+ .expect("failed to spawn thread");
+
+ let rustc_dir = Builder::new()
+ .name("ProjectWorkspace::rustc_dir".to_owned())
+ .spawn_scoped(s, || {
+ let rustc_dir = match rustc_source {
+ Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
+ .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
+ Some(RustLibSource::Discover) => {
+ sysroot.discover_rustc_src().ok_or_else(|| {
+ Some("Failed to discover rustc source for sysroot.".to_owned())
+ })
+ }
+ None => Err(None),
+ };
+ rustc_dir.and_then(|rustc_dir| {
info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source");
match FetchMetadata::new(
&rustc_dir,
@@ -359,31 +368,44 @@ impl ProjectWorkspace {
Err(e) => {
tracing::error!(
%e,
- "Failed to read Cargo metadata from rustc source at {rustc_dir}",
+ "Failed to read Cargo metadata from rustc source \
+ at {rustc_dir}",
);
Err(Some(format!(
- "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}"
+ "Failed to read Cargo metadata from rustc source \
+ at {rustc_dir}: {e}"
)))
}
}
})
- });
-
- let cargo_metadata = s.spawn(|| fetch_metadata.exec(false, progress));
- let loaded_sysroot = s.spawn(|| {
- sysroot.load_workspace(
- &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
- config,
- workspace_dir,
- &targets,
- toolchain.clone(),
- )),
- config.no_deps,
- progress,
- )
- });
- let cargo_env =
- s.spawn(move || cargo_config_env(cargo_toml, &config_file, &config.extra_env));
+ })
+ .expect("failed to spawn thread");
+
+ let cargo_metadata = Builder::new()
+ .name("ProjectWorkspace::cargo_metadata".to_owned())
+ .spawn_scoped(s, || fetch_metadata.exec(false, progress))
+ .expect("failed to spawn thread");
+ let loaded_sysroot = Builder::new()
+ .name("ProjectWorkspace::loaded_sysroot".to_owned())
+ .spawn_scoped(s, || {
+ sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
+ config,
+ workspace_dir,
+ &targets,
+ toolchain.clone(),
+ )),
+ config.no_deps,
+ progress,
+ )
+ })
+ .expect("failed to spawn thread");
+ let cargo_env = Builder::new()
+ .name("ProjectWorkspace::cargo_env".to_owned())
+ .spawn_scoped(s, move || {
+ cargo_config_env(cargo_toml, &config_file, &config.extra_env)
+ })
+ .expect("failed to spawn thread");
thread::Result::Ok((
rustc_cfg.join()?,
target_data.join()?,
@@ -1194,6 +1216,7 @@ fn cargo_to_crate_graph(
load,
crate_ws_data.clone(),
);
+ let cargo_path = sysroot.tool_path(Tool::Cargo, cargo.workspace_root(), cargo.env());
let cfg_options = CfgOptions::from_iter(rustc_cfg);
@@ -1268,6 +1291,7 @@ fn cargo_to_crate_graph(
} else {
Arc::new(pkg_data.manifest.parent().to_path_buf())
},
+ &cargo_path,
);
if let TargetKind::Lib { .. } = kind {
lib_tgt = Some((crate_id, name.clone()));
@@ -1375,6 +1399,7 @@ fn cargo_to_crate_graph(
},
// FIXME: This looks incorrect but I don't think this causes problems.
crate_ws_data,
+ &cargo_path,
);
}
}
@@ -1453,6 +1478,7 @@ fn handle_rustc_crates(
override_cfg: &CfgOverrides,
build_scripts: &WorkspaceBuildScripts,
crate_ws_data: Arc<CrateWorkspaceData>,
+ cargo_path: &Utf8Path,
) {
let mut rustc_pkg_crates = FxHashMap::default();
// The root package of the rustc-dev component is rustc_driver, so we match that
@@ -1503,6 +1529,7 @@ fn handle_rustc_crates(
} else {
Arc::new(pkg_data.manifest.parent().to_path_buf())
},
+ cargo_path,
);
pkg_to_lib_crate.insert(pkg, crate_id);
// Add dependencies on core / std / alloc for this crate
@@ -1560,11 +1587,12 @@ fn add_target_crate_root(
build_data: Option<(&BuildScriptOutput, bool)>,
cfg_options: CfgOptions,
file_id: FileId,
- cargo_name: &str,
+ cargo_crate_name: &str,
kind: TargetKind,
origin: CrateOrigin,
crate_ws_data: Arc<CrateWorkspaceData>,
proc_macro_cwd: Arc<AbsPathBuf>,
+ cargo_path: &Utf8Path,
) -> CrateBuilderId {
let edition = pkg.edition;
let potential_cfg_options = if pkg.features.is_empty() {
@@ -1591,8 +1619,8 @@ fn add_target_crate_root(
let mut env = cargo.env().clone();
inject_cargo_package_env(&mut env, pkg);
- inject_cargo_env(&mut env);
- inject_rustc_tool_env(&mut env, cargo_name, kind);
+ inject_cargo_env(&mut env, cargo_path);
+ inject_rustc_tool_env(&mut env, cargo_crate_name, kind);
if let Some(envs) = build_data.map(|(it, _)| &it.envs) {
env.extend_from_other(envs);
@@ -1600,7 +1628,7 @@ fn add_target_crate_root(
let crate_id = crate_graph.add_crate_root(
file_id,
edition,
- Some(CrateDisplayName::from_canonical_name(cargo_name)),
+ Some(CrateDisplayName::from_canonical_name(cargo_crate_name)),
Some(pkg.version.to_string()),
cfg_options,
potential_cfg_options,
@@ -1614,7 +1642,9 @@ fn add_target_crate_root(
let proc_macro = match build_data {
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
match proc_macro_dylib_path {
- ProcMacroDylibPath::Path(path) => Ok((cargo_name.to_owned(), path.clone())),
+ ProcMacroDylibPath::Path(path) => {
+ Ok((cargo_crate_name.to_owned(), path.clone()))
+ }
ProcMacroDylibPath::NotBuilt => Err(ProcMacroLoadingError::NotYetBuilt),
ProcMacroDylibPath::NotProcMacro | ProcMacroDylibPath::DylibNotFound
if has_errors =>
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index cc8db1b841..44c442ffd8 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -307,6 +307,11 @@ fn run_server() -> anyhow::Result<()> {
config.rediscover_workspaces();
}
+ rayon::ThreadPoolBuilder::new()
+ .thread_name(|ix| format!("RayonWorker{}", ix))
+ .build_global()
+ .unwrap();
+
// If the io_threads have an error, there's usually an error on the main
// loop too because the channels are closed. Ensure we report both errors.
match (rust_analyzer::main_loop(config, connection), io_threads.join()) {
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index c2b887c9b3..099eed92b2 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -452,7 +452,11 @@ impl GlobalState {
// Project has loaded properly, kick off initial flycheck
self.flycheck.iter().for_each(|flycheck| flycheck.restart_workspace(None));
}
- if self.config.prefill_caches() {
+ // delay initial cache priming until proc macros are loaded, or we will load up a bunch of garbage into salsa
+ let proc_macros_loaded = self.config.prefill_caches()
+ && !self.config.expand_proc_macros()
+ || self.fetch_proc_macros_queue.last_op_result().copied().unwrap_or(false);
+ if proc_macros_loaded {
self.prime_caches_queue.request_op("became quiescent".to_owned(), ());
}
}
diff --git a/crates/span/src/map.rs b/crates/span/src/map.rs
index bb09933536..83b2413676 100644
--- a/crates/span/src/map.rs
+++ b/crates/span/src/map.rs
@@ -156,6 +156,43 @@ where
}
}
+impl<S> Drop for SpanMap<S> {
+ fn drop(&mut self) {
+ struct SendPtr(*mut [()]);
+ unsafe impl Send for SendPtr {}
+ static SPAN_MAP_DROP_THREAD: std::sync::OnceLock<
+ std::sync::mpsc::Sender<(SendPtr, fn(SendPtr))>,
+ > = std::sync::OnceLock::new();
+ SPAN_MAP_DROP_THREAD
+ .get_or_init(|| {
+ let (sender, receiver) = std::sync::mpsc::channel::<(SendPtr, fn(SendPtr))>();
+ std::thread::Builder::new()
+ .name("SpanMapDropper".to_owned())
+ .spawn(move || receiver.iter().for_each(|(b, drop)| drop(b)))
+ .unwrap();
+ sender
+ })
+ .send((
+ unsafe {
+ SendPtr(std::mem::transmute::<*mut [(TextSize, SpanData<S>)], *mut [()]>(
+ Box::<[(TextSize, SpanData<S>)]>::into_raw(
+ std::mem::take(&mut self.spans).into_boxed_slice(),
+ ),
+ ))
+ },
+ |b: SendPtr| {
+ _ = unsafe {
+ Box::from_raw(std::mem::transmute::<
+ *mut [()],
+ *mut [(TextSize, SpanData<S>)],
+ >(b.0))
+ }
+ },
+ ))
+ .unwrap();
+ }
+}
+
#[derive(PartialEq, Eq, Hash, Debug)]
pub struct RealSpanMap {
file_id: EditionedFileId,
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs
index a3c19f71fb..27d288953b 100644
--- a/crates/syntax/src/lib.rs
+++ b/crates/syntax/src/lib.rs
@@ -67,7 +67,7 @@ pub use smol_str::{SmolStr, SmolStrBuilder, ToSmolStr, format_smolstr};
/// files.
#[derive(Debug, PartialEq, Eq)]
pub struct Parse<T> {
- green: GreenNode,
+ green: Option<GreenNode>,
errors: Option<Arc<[SyntaxError]>>,
_ty: PhantomData<fn() -> T>,
}
@@ -81,14 +81,14 @@ impl<T> Clone for Parse<T> {
impl<T> Parse<T> {
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
Parse {
- green,
+ green: Some(green),
errors: if errors.is_empty() { None } else { Some(errors.into()) },
_ty: PhantomData,
}
}
pub fn syntax_node(&self) -> SyntaxNode {
- SyntaxNode::new_root(self.green.clone())
+ SyntaxNode::new_root(self.green.as_ref().unwrap().clone())
}
pub fn errors(&self) -> Vec<SyntaxError> {
@@ -100,8 +100,10 @@ impl<T> Parse<T> {
impl<T: AstNode> Parse<T> {
/// Converts this parse result into a parse result for an untyped syntax tree.
- pub fn to_syntax(self) -> Parse<SyntaxNode> {
- Parse { green: self.green, errors: self.errors, _ty: PhantomData }
+ pub fn to_syntax(mut self) -> Parse<SyntaxNode> {
+ let green = self.green.take();
+ let errors = self.errors.take();
+ Parse { green, errors, _ty: PhantomData }
}
/// Gets the parsed syntax tree as a typed ast node.
@@ -124,9 +126,9 @@ impl<T: AstNode> Parse<T> {
}
impl Parse<SyntaxNode> {
- pub fn cast<N: AstNode>(self) -> Option<Parse<N>> {
+ pub fn cast<N: AstNode>(mut self) -> Option<Parse<N>> {
if N::cast(self.syntax_node()).is_some() {
- Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData })
+ Some(Parse { green: self.green.take(), errors: self.errors.take(), _ty: PhantomData })
} else {
None
}
@@ -162,7 +164,7 @@ impl Parse<SourceFile> {
edition,
)
.map(|(green_node, errors, _reparsed_range)| Parse {
- green: green_node,
+ green: Some(green_node),
errors: if errors.is_empty() { None } else { Some(errors.into()) },
_ty: PhantomData,
})
@@ -198,6 +200,27 @@ impl ast::Expr {
}
}
+impl<T> Drop for Parse<T> {
+ fn drop(&mut self) {
+ let Some(green) = self.green.take() else {
+ return;
+ };
+ static PARSE_DROP_THREAD: std::sync::OnceLock<std::sync::mpsc::Sender<GreenNode>> =
+ std::sync::OnceLock::new();
+ PARSE_DROP_THREAD
+ .get_or_init(|| {
+ let (sender, receiver) = std::sync::mpsc::channel::<GreenNode>();
+ std::thread::Builder::new()
+ .name("ParseNodeDropper".to_owned())
+ .spawn(move || receiver.iter().for_each(drop))
+ .unwrap();
+ sender
+ })
+ .send(green)
+ .unwrap();
+ }
+}
+
/// `SourceFile` represents a parse tree for a single Rust file.
pub use crate::ast::SourceFile;