1 //! Project loading & configuration updates.
3 //! This is quite tricky. The main problem is time and changes -- there's no
4 //! fixed "project" rust-analyzer is working with, "current project" is itself
5 //! mutable state. For example, when the user edits `Cargo.toml` by adding a new
6 //! dependency, project model changes. What's more, switching project model is
7 //! not instantaneous -- it takes time to run `cargo metadata` and (for proc
8 //! macros) `cargo check`.
10 //! The main guiding principle here is, as elsewhere in rust-analyzer,
11 //! robustness. We try not to assume that the project model exists or is
12 //! correct. Instead, we try to provide a best-effort service. Even if the
13 //! project is currently loading and we don't have a full project model, we
14 //! still want to respond to various requests.
15 use std
::{mem, sync::Arc}
;
17 use flycheck
::{FlycheckConfig, FlycheckHandle}
;
18 use hir
::db
::DefDatabase
;
20 use ide_db
::base_db
::{
21 CrateGraph
, Env
, ProcMacro
, ProcMacroExpander
, ProcMacroExpansionError
, ProcMacroKind
,
22 ProcMacroLoadResult
, SourceRoot
, VfsPath
,
24 use proc_macro_api
::{MacroDylib, ProcMacroServer}
;
25 use project_model
::{ProjectWorkspace, WorkspaceBuildScripts}
;
27 use vfs
::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind}
;
30 config
::{Config, FilesWatcher, LinkedProject}
,
31 global_state
::GlobalState
,
38 pub(crate) enum ProjectWorkspaceProgress
{
41 End(Vec
<anyhow
::Result
<ProjectWorkspace
>>),
45 pub(crate) enum BuildDataProgress
{
48 End((Arc
<Vec
<ProjectWorkspace
>>, Vec
<anyhow
::Result
<WorkspaceBuildScripts
>>)),
52 pub(crate) fn is_quiescent(&self) -> bool
{
53 !(self.fetch_workspaces_queue
.op_in_progress()
54 || self.fetch_build_data_queue
.op_in_progress()
55 || self.vfs_progress_config_version
< self.vfs_config_version
56 || self.vfs_progress_n_done
< self.vfs_progress_n_total
)
59 pub(crate) fn update_configuration(&mut self, config
: Config
) {
60 let _p
= profile
::span("GlobalState::update_configuration");
61 let old_config
= mem
::replace(&mut self.config
, Arc
::new(config
));
62 if self.config
.lru_capacity() != old_config
.lru_capacity() {
63 self.analysis_host
.update_lru_capacity(self.config
.lru_capacity());
65 if self.config
.linked_projects() != old_config
.linked_projects() {
66 self.fetch_workspaces_queue
.request_op("linked projects changed".to_string())
67 } else if self.config
.flycheck() != old_config
.flycheck() {
68 self.reload_flycheck();
71 if self.analysis_host
.raw_database().enable_proc_attr_macros()
72 != self.config
.expand_proc_attr_macros()
76 .set_enable_proc_attr_macros(self.config
.expand_proc_attr_macros());
80 pub(crate) fn current_status(&self) -> lsp_ext
::ServerStatusParams
{
81 let mut status
= lsp_ext
::ServerStatusParams
{
82 health
: lsp_ext
::Health
::Ok
,
83 quiescent
: self.is_quiescent(),
87 if self.proc_macro_changed
{
88 status
.health
= lsp_ext
::Health
::Warning
;
90 Some("Reload required due to source changes of a procedural macro.".into())
92 if let Err(_
) = self.fetch_build_data_error() {
93 status
.health
= lsp_ext
::Health
::Warning
;
95 Some("Failed to run build scripts of some packages, check the logs.".to_string());
97 if !self.config
.cargo_autoreload()
98 && self.is_quiescent()
99 && self.fetch_workspaces_queue
.op_requested()
101 status
.health
= lsp_ext
::Health
::Warning
;
102 status
.message
= Some("Workspace reload required".to_string())
105 if let Err(error
) = self.fetch_workspace_error() {
106 status
.health
= lsp_ext
::Health
::Error
;
107 status
.message
= Some(error
)
110 if self.config
.linked_projects().is_empty()
111 && self.config
.detached_files().is_empty()
112 && self.config
.notifications().cargo_toml_not_found
114 status
.health
= lsp_ext
::Health
::Warning
;
115 status
.message
= Some("Workspace reload required".to_string())
120 pub(crate) fn fetch_workspaces(&mut self, cause
: Cause
) {
121 tracing
::info
!(%cause
, "will fetch workspaces");
123 self.task_pool
.handle
.spawn_with_sender({
124 let linked_projects
= self.config
.linked_projects();
125 let detached_files
= self.config
.detached_files().to_vec();
126 let cargo_config
= self.config
.cargo();
130 let sender
= sender
.clone();
133 .send(Task
::FetchWorkspace(ProjectWorkspaceProgress
::Report(msg
)))
138 sender
.send(Task
::FetchWorkspace(ProjectWorkspaceProgress
::Begin
)).unwrap();
140 let mut workspaces
= linked_projects
142 .map(|project
| match project
{
143 LinkedProject
::ProjectManifest(manifest
) => {
144 project_model
::ProjectWorkspace
::load(
150 LinkedProject
::InlineJsonProject(it
) => {
151 project_model
::ProjectWorkspace
::load_inline(
153 cargo_config
.target
.as_deref(),
154 &cargo_config
.extra_env
,
158 .collect
::<Vec
<_
>>();
160 if !detached_files
.is_empty() {
162 .push(project_model
::ProjectWorkspace
::load_detached_files(detached_files
));
165 tracing
::info
!("did fetch workspaces {:?}", workspaces
);
167 .send(Task
::FetchWorkspace(ProjectWorkspaceProgress
::End(workspaces
)))
173 pub(crate) fn fetch_build_data(&mut self, cause
: Cause
) {
174 tracing
::info
!(%cause
, "will fetch build data");
175 let workspaces
= Arc
::clone(&self.workspaces
);
176 let config
= self.config
.cargo();
177 self.task_pool
.handle
.spawn_with_sender(move |sender
| {
178 sender
.send(Task
::FetchBuildData(BuildDataProgress
::Begin
)).unwrap();
181 let sender
= sender
.clone();
183 sender
.send(Task
::FetchBuildData(BuildDataProgress
::Report(msg
))).unwrap()
186 let res
= ProjectWorkspace
::run_all_build_scripts(&workspaces
, &config
, &progress
);
188 sender
.send(Task
::FetchBuildData(BuildDataProgress
::End((workspaces
, res
)))).unwrap();
192 pub(crate) fn switch_workspaces(&mut self, cause
: Cause
) {
193 let _p
= profile
::span("GlobalState::switch_workspaces");
194 tracing
::info
!(%cause
, "will switch workspaces");
196 if let Err(error_message
) = self.fetch_workspace_error() {
197 self.show_and_log_error(error_message
, None
);
198 if !self.workspaces
.is_empty() {
199 // It only makes sense to switch to a partially broken workspace
200 // if we don't have any workspace at all yet.
205 if let Err(error
) = self.fetch_build_data_error() {
206 self.show_and_log_error("failed to run build scripts".to_string(), Some(error
));
209 let Some(workspaces
) = self.fetch_workspaces_queue
.last_op_result() else { return; }
;
211 workspaces
.iter().filter_map(|res
| res
.as_ref().ok().cloned()).collect
::<Vec
<_
>>();
213 fn eq_ignore_build_data
<'a
>(
214 left
: &'a ProjectWorkspace
,
215 right
: &'a ProjectWorkspace
,
217 let key
= |p
: &'a ProjectWorkspace
| match p
{
218 ProjectWorkspace
::Cargo
{
227 } => Some((cargo
, sysroot
, rustc
, rustc_cfg
, cfg_overrides
)),
230 match (key(left
), key(right
)) {
231 (Some(lk
), Some(rk
)) => lk
== rk
,
236 let same_workspaces
= workspaces
.len() == self.workspaces
.len()
239 .zip(self.workspaces
.iter())
240 .all(|(l
, r
)| eq_ignore_build_data(l
, r
));
243 let (workspaces
, build_scripts
) = self.fetch_build_data_queue
.last_op_result();
244 if Arc
::ptr_eq(workspaces
, &self.workspaces
) {
245 tracing
::debug
!("set build scripts to workspaces");
247 let workspaces
= workspaces
251 .map(|(mut ws
, bs
)| {
252 ws
.set_build_scripts(bs
.as_ref().ok().cloned().unwrap_or_default());
255 .collect
::<Vec
<_
>>();
257 // Workspaces are the same, but we've updated build data.
258 self.workspaces
= Arc
::new(workspaces
);
260 tracing
::info
!("build scripts do not match the version of the active workspace");
261 // Current build scripts do not match the version of the active
262 // workspace, so there's nothing for us to update.
266 tracing
::debug
!("abandon build scripts for workspaces");
268 // Here, we completely changed the workspace (Cargo.toml edit), so
269 // we don't care about build-script results, they are stale.
270 self.workspaces
= Arc
::new(workspaces
)
273 if let FilesWatcher
::Client
= self.config
.files().watcher
{
274 let registration_options
= lsp_types
::DidChangeWatchedFilesRegistrationOptions
{
278 .flat_map(|ws
| ws
.to_roots())
279 .filter(|it
| it
.is_local
)
281 root
.include
.into_iter().flat_map(|it
| {
283 format
!("{}/**/*.rs", it
.display()),
284 format
!("{}/**/Cargo.toml", it
.display()),
285 format
!("{}/**/Cargo.lock", it
.display()),
289 .map(|glob_pattern
| lsp_types
::FileSystemWatcher { glob_pattern, kind: None }
)
292 let registration
= lsp_types
::Registration
{
293 id
: "workspace/didChangeWatchedFiles".to_string(),
294 method
: "workspace/didChangeWatchedFiles".to_string(),
295 register_options
: Some(serde_json
::to_value(registration_options
).unwrap()),
297 self.send_request
::<lsp_types
::request
::RegisterCapability
>(
298 lsp_types
::RegistrationParams { registrations: vec![registration] }
,
303 let mut change
= Change
::new();
305 let files_config
= self.config
.files();
306 let project_folders
= ProjectFolders
::new(&self.workspaces
, &files_config
.exclude
);
308 if self.proc_macro_clients
.is_empty() {
309 if let Some((path
, path_manually_set
)) = self.config
.proc_macro_srv() {
310 tracing
::info
!("Spawning proc-macro servers");
311 self.proc_macro_clients
= self
315 let (path
, args
): (_
, &[_
]) = if path_manually_set
{
317 "Pro-macro server path explicitly set: {}",
322 match ws
.find_sysroot_proc_macro_srv() {
323 Some(server_path
) => (server_path
, &[]),
324 None
=> (path
.clone(), &["proc-macro"]),
328 tracing
::info
!(?args
, "Using proc-macro server at {}", path
.display(),);
329 ProcMacroServer
::spawn(path
.clone(), args
).map_err(|err
| {
331 "Failed to run proc-macro server from path {}, error: {:?}",
335 tracing
::error
!(error
);
343 let watch
= match files_config
.watcher
{
344 FilesWatcher
::Client
=> vec
![],
345 FilesWatcher
::Server
=> project_folders
.watch
,
347 self.vfs_config_version
+= 1;
348 self.loader
.handle
.set_config(vfs
::loader
::Config
{
349 load
: project_folders
.load
,
351 version
: self.vfs_config_version
,
354 // Create crate graph from all the workspaces
356 let dummy_replacements
= self.config
.dummy_replacements();
358 let vfs
= &mut self.vfs
.write().0;
359 let loader
= &mut self.loader
;
360 let mem_docs
= &self.mem_docs
;
361 let mut load
= move |path
: &AbsPath
| {
362 let _p
= profile
::span("GlobalState::load");
363 let vfs_path
= vfs
::VfsPath
::from(path
.to_path_buf());
364 if !mem_docs
.contains(&vfs_path
) {
365 let contents
= loader
.handle
.load_sync(path
);
366 vfs
.set_file_contents(vfs_path
.clone(), contents
);
368 let res
= vfs
.file_id(&vfs_path
);
370 tracing
::warn
!("failed to load {}", path
.display())
375 let mut crate_graph
= CrateGraph
::default();
376 for (idx
, ws
) in self.workspaces
.iter().enumerate() {
377 let proc_macro_client
= match self.proc_macro_clients
.get(idx
) {
378 Some(res
) => res
.as_ref().map_err(|e
| &**e
),
379 None
=> Err("Proc macros are disabled"),
381 let mut load_proc_macro
= move |crate_name
: &str, path
: &AbsPath
| {
385 dummy_replacements
.get(crate_name
).map(|v
| &**v
).unwrap_or_default(),
388 crate_graph
.extend(ws
.to_crate_graph(
389 &mut load_proc_macro
,
391 &self.config
.cargo().extra_env
,
396 change
.set_crate_graph(crate_graph
);
398 self.source_root_config
= project_folders
.source_root_config
;
400 self.analysis_host
.apply_change(change
);
401 self.process_changes();
402 self.reload_flycheck();
403 tracing
::info
!("did switch workspaces");
406 fn fetch_workspace_error(&self) -> Result
<(), String
> {
407 let mut buf
= String
::new();
409 let Some(last_op_result
) = self.fetch_workspaces_queue
.last_op_result() else { return Ok(()) }
;
410 if last_op_result
.is_empty() {
411 stdx
::format_to
!(buf
, "rust-analyzer failed to discover workspace");
413 for ws
in last_op_result
{
414 if let Err(err
) = ws
{
415 stdx
::format_to
!(buf
, "rust-analyzer failed to load workspace: {:#}\n", err
);
427 fn fetch_build_data_error(&self) -> Result
<(), String
> {
428 let mut buf
= String
::new();
430 for ws
in &self.fetch_build_data_queue
.last_op_result().1 {
432 Ok(data
) => match data
.error() {
433 Some(stderr
) => stdx
::format_to
!(buf
, "{:#}\n", stderr
),
437 Err(err
) => stdx
::format_to
!(buf
, "{:#}\n", err
),
448 fn reload_flycheck(&mut self) {
449 let _p
= profile
::span("GlobalState::reload_flycheck");
450 let config
= match self.config
.flycheck() {
453 self.flycheck
= Arc
::new([]);
454 self.diagnostics
.clear_check_all();
459 let sender
= self.flycheck_sender
.clone();
460 let invocation_strategy
= match config
{
461 FlycheckConfig
::CargoCommand { .. }
=> flycheck
::InvocationStrategy
::PerWorkspace
,
462 FlycheckConfig
::CustomCommand { invocation_strategy, .. }
=> invocation_strategy
,
465 self.flycheck
= match invocation_strategy
{
466 flycheck
::InvocationStrategy
::Once
=> vec
![FlycheckHandle
::spawn(
468 Box
::new(move |msg
| sender
.send(msg
).unwrap()),
470 self.config
.root_path().clone(),
472 flycheck
::InvocationStrategy
::PerWorkspace
=> {
476 .filter_map(|(id
, w
)| match w
{
477 ProjectWorkspace
::Cargo { cargo, .. }
=> Some((id
, cargo
.workspace_root())),
478 ProjectWorkspace
::Json { project, .. }
=> {
479 // Enable flychecks for json projects if a custom flycheck command was supplied
480 // in the workspace configuration.
482 FlycheckConfig
::CustomCommand { .. }
=> Some((id
, project
.path())),
486 ProjectWorkspace
::DetachedFiles { .. }
=> None
,
489 let sender
= sender
.clone();
490 FlycheckHandle
::spawn(
492 Box
::new(move |msg
| sender
.send(msg
).unwrap()),
505 pub(crate) struct ProjectFolders
{
506 pub(crate) load
: Vec
<vfs
::loader
::Entry
>,
507 pub(crate) watch
: Vec
<usize>,
508 pub(crate) source_root_config
: SourceRootConfig
,
511 impl ProjectFolders
{
513 workspaces
: &[ProjectWorkspace
],
514 global_excludes
: &[AbsPathBuf
],
515 ) -> ProjectFolders
{
516 let mut res
= ProjectFolders
::default();
517 let mut fsc
= FileSetConfig
::builder();
518 let mut local_filesets
= vec
![];
520 for root
in workspaces
.iter().flat_map(|ws
| ws
.to_roots()) {
521 let file_set_roots
: Vec
<VfsPath
> =
522 root
.include
.iter().cloned().map(VfsPath
::from
).collect();
525 let mut dirs
= vfs
::loader
::Directories
::default();
526 dirs
.extensions
.push("rs".into());
527 dirs
.include
.extend(root
.include
);
528 dirs
.exclude
.extend(root
.exclude
);
529 for excl
in global_excludes
{
533 .any(|incl
| incl
.starts_with(excl
) || excl
.starts_with(incl
))
535 dirs
.exclude
.push(excl
.clone());
539 vfs
::loader
::Entry
::Directories(dirs
)
543 res
.watch
.push(res
.load
.len());
545 res
.load
.push(entry
);
548 local_filesets
.push(fsc
.len());
550 fsc
.add_file_set(file_set_roots
)
553 let fsc
= fsc
.build();
554 res
.source_root_config
= SourceRootConfig { fsc, local_filesets }
;
560 #[derive(Default, Debug)]
561 pub(crate) struct SourceRootConfig
{
562 pub(crate) fsc
: FileSetConfig
,
563 pub(crate) local_filesets
: Vec
<usize>,
566 impl SourceRootConfig
{
567 pub(crate) fn partition(&self, vfs
: &vfs
::Vfs
) -> Vec
<SourceRoot
> {
568 let _p
= profile
::span("SourceRootConfig::partition");
573 .map(|(idx
, file_set
)| {
574 let is_local
= self.local_filesets
.contains(&idx
);
576 SourceRoot
::new_local(file_set
)
578 SourceRoot
::new_library(file_set
)
585 /// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
586 /// with an identity dummy expander.
587 pub(crate) fn load_proc_macro(
588 server
: Result
<&ProcMacroServer
, &str>,
590 dummy_replace
: &[Box
<str>],
591 ) -> ProcMacroLoadResult
{
592 let res
: Result
<Vec
<_
>, String
> = (|| {
593 let dylib
= MacroDylib
::new(path
.to_path_buf())
594 .map_err(|io
| format
!("Proc-macro dylib loading failed: {io}"))?
;
595 let server
= server
.map_err(ToOwned
::to_owned
)?
;
596 let vec
= server
.load_dylib(dylib
).map_err(|e
| format
!("{e}"))?
;
598 return Err("proc macro library returned no proc macros".to_string());
602 .map(|expander
| expander_to_proc_macro(expander
, dummy_replace
))
608 "Loaded proc-macros for {}: {:?}",
610 proc_macros
.iter().map(|it
| it
.name
.clone()).collect
::<Vec
<_
>>()
615 tracing
::warn
!("proc-macro loading for {} failed: {e}", path
.display());
620 fn expander_to_proc_macro(
621 expander
: proc_macro_api
::ProcMacro
,
622 dummy_replace
: &[Box
<str>],
624 let name
= SmolStr
::from(expander
.name());
625 let kind
= match expander
.kind() {
626 proc_macro_api
::ProcMacroKind
::CustomDerive
=> ProcMacroKind
::CustomDerive
,
627 proc_macro_api
::ProcMacroKind
::FuncLike
=> ProcMacroKind
::FuncLike
,
628 proc_macro_api
::ProcMacroKind
::Attr
=> ProcMacroKind
::Attr
,
630 let expander
: Arc
<dyn ProcMacroExpander
> =
631 if dummy_replace
.iter().any(|replace
| &**replace
== name
) {
633 ProcMacroKind
::Attr
=> Arc
::new(IdentityExpander
),
634 _
=> Arc
::new(EmptyExpander
),
637 Arc
::new(Expander(expander
))
639 ProcMacro { name, kind, expander }
643 struct Expander(proc_macro_api
::ProcMacro
);
645 impl ProcMacroExpander
for Expander
{
648 subtree
: &tt
::Subtree
,
649 attrs
: Option
<&tt
::Subtree
>,
651 ) -> Result
<tt
::Subtree
, ProcMacroExpansionError
> {
652 let env
= env
.iter().map(|(k
, v
)| (k
.to_string(), v
.to_string())).collect();
653 match self.0.expand(subtree
, attrs
, env
) {
654 Ok(Ok(subtree
)) => Ok(subtree
),
655 Ok(Err(err
)) => Err(ProcMacroExpansionError
::Panic(err
.0)),
656 Err(err
) => Err(ProcMacroExpansionError
::System(err
.to_string())),
661 /// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user.
663 struct IdentityExpander
;
665 impl ProcMacroExpander
for IdentityExpander
{
668 subtree
: &tt
::Subtree
,
669 _
: Option
<&tt
::Subtree
>,
671 ) -> Result
<tt
::Subtree
, ProcMacroExpansionError
> {
676 /// Empty expander, used for proc-macros that are deliberately ignored by the user.
678 struct EmptyExpander
;
680 impl ProcMacroExpander
for EmptyExpander
{
684 _
: Option
<&tt
::Subtree
>,
686 ) -> Result
<tt
::Subtree
, ProcMacroExpansionError
> {
687 Ok(tt
::Subtree
::default())
692 pub(crate) fn should_refresh_for_change(path
: &AbsPath
, change_kind
: ChangeKind
) -> bool
{
693 const IMPLICIT_TARGET_FILES
: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
694 const IMPLICIT_TARGET_DIRS
: &[&str] = &["src/bin", "examples", "tests", "benches"];
696 let file_name
= match path
.file_name().unwrap_or_default().to_str() {
698 None
=> return false,
701 if let "Cargo.toml" | "Cargo.lock" = file_name
{
704 if change_kind
== ChangeKind
::Modify
{
708 // .cargo/config{.toml}
709 if path
.extension().unwrap_or_default() != "rs" {
710 let is_cargo_config
= matches
!(file_name
, "config.toml" | "config")
711 && path
.parent().map(|parent
| parent
.as_ref().ends_with(".cargo")).unwrap_or(false);
712 return is_cargo_config
;
715 if IMPLICIT_TARGET_FILES
.iter().any(|it
| path
.as_ref().ends_with(it
)) {
718 let parent
= match path
.parent() {
720 None
=> return false,
722 if IMPLICIT_TARGET_DIRS
.iter().any(|it
| parent
.as_ref().ends_with(it
)) {
725 if file_name
== "main.rs" {
726 let grand_parent
= match parent
.parent() {
728 None
=> return false,
730 if IMPLICIT_TARGET_DIRS
.iter().any(|it
| grand_parent
.as_ref().ends_with(it
)) {