From 38e3182bef48a9aec70fbba1b789b59e5098d4d2 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 23 Jul 2024 11:32:37 -0700 Subject: [PATCH] Handle buffer diff base updates and file renames properly for SSH projects (#14989) Release Notes: - N/A --------- Co-authored-by: Conrad --- Cargo.lock | 1 + crates/assistant/src/assistant_panel.rs | 2 +- .../src/slash_command/diagnostics_command.rs | 2 +- .../src/slash_command/docs_command.rs | 2 +- .../src/slash_command/file_command.rs | 2 +- .../src/slash_command/project_command.rs | 2 +- .../assistant_tooling/src/project_context.rs | 2 +- crates/call/src/room.rs | 2 +- .../collab/src/tests/channel_guest_tests.rs | 2 +- crates/collab/src/tests/editor_tests.rs | 8 +- crates/collab/src/tests/integration_tests.rs | 29 +- .../random_project_collaboration_tests.rs | 10 +- crates/editor/src/editor_tests.rs | 6 +- crates/editor/src/inlay_hint_cache.rs | 4 +- crates/file_finder/src/file_finder_tests.rs | 2 +- crates/file_finder/src/new_path_prompt.rs | 6 +- crates/git/src/repository.rs | 7 +- crates/go_to_line/src/go_to_line.rs | 2 +- crates/language_tools/src/lsp_log.rs | 2 +- crates/language_tools/src/lsp_log_tests.rs | 2 +- crates/outline/src/outline.rs | 2 +- crates/project/src/buffer_store.rs | 369 +++++++-- crates/project/src/connection_manager.rs | 2 +- crates/project/src/debounced_delay.rs | 19 +- crates/project/src/project.rs | 771 +++++------------- crates/project/src/project_tests.rs | 36 +- crates/project/src/worktree_store.rs | 311 +++++++ crates/project_panel/src/project_panel.rs | 8 +- crates/remote/src/ssh_session.rs | 9 +- crates/remote_server/Cargo.toml | 1 + crates/remote_server/src/headless_project.rs | 107 ++- crates/remote_server/src/main.rs | 1 - .../remote_server/src/remote_editing_tests.rs | 45 +- crates/search/src/project_search.rs | 6 +- crates/tab_switcher/src/tab_switcher_tests.rs | 2 +- crates/tasks_ui/src/lib.rs | 2 +- crates/workspace/src/pane.rs | 2 +- crates/workspace/src/workspace.rs | 14 +- crates/worktree/src/worktree.rs | 30 +- 39 files changed, 1021 insertions(+), 811 deletions(-) create mode 100644 crates/project/src/worktree_store.rs diff --git a/Cargo.lock b/Cargo.lock index 1e3eff9369..9476ab09ce 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8771,6 +8771,7 @@ dependencies = [ "settings", "smol", "toml 0.8.10", + "util", "worktree", ] diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 77bd9ac286..43e63953ac 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -2867,7 +2867,7 @@ fn make_lsp_adapter_delegate( project.update(cx, |project, cx| { // TODO: Find the right worktree. let worktree = project - .worktrees() + .worktrees(cx) .next() .ok_or_else(|| anyhow!("no worktrees when constructing ProjectLspAdapterDelegate"))?; Ok(ProjectLspAdapterDelegate::new(project, &worktree, cx) as Arc) diff --git a/crates/assistant/src/slash_command/diagnostics_command.rs b/crates/assistant/src/slash_command/diagnostics_command.rs index 20e712803b..723deb1f1a 100644 --- a/crates/assistant/src/slash_command/diagnostics_command.rs +++ b/crates/assistant/src/slash_command/diagnostics_command.rs @@ -284,7 +284,7 @@ fn collect_diagnostics( PathBuf::try_from(path) .ok() .and_then(|path| { - project.read(cx).worktrees().find_map(|worktree| { + project.read(cx).worktrees(cx).find_map(|worktree| { let worktree = worktree.read(cx); let worktree_root_path = Path::new(worktree.root_name()); let relative_path = path.strip_prefix(worktree_root_path).ok()?; diff --git a/crates/assistant/src/slash_command/docs_command.rs b/crates/assistant/src/slash_command/docs_command.rs index 6271bdc32e..90db4a5e19 100644 --- a/crates/assistant/src/slash_command/docs_command.rs +++ b/crates/assistant/src/slash_command/docs_command.rs @@ -24,7 +24,7 @@ impl DocsSlashCommand { pub const NAME: &'static str = "docs"; fn path_to_cargo_toml(project: Model, cx: &mut AppContext) -> Option> { - let worktree = project.read(cx).worktrees().next()?; + let worktree = project.read(cx).worktrees(cx).next()?; let worktree = worktree.read(cx); let entry = worktree.entry_for_path("Cargo.toml")?; let path = ProjectPath { diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index d5d5662914..849ece17d2 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -188,7 +188,7 @@ fn collect_files( let project_handle = project.downgrade(); let snapshots = project .read(cx) - .worktrees() + .worktrees(cx) .map(|worktree| worktree.read(cx).snapshot()) .collect::>(); cx.spawn(|mut cx| async move { diff --git a/crates/assistant/src/slash_command/project_command.rs b/crates/assistant/src/slash_command/project_command.rs index 476e60c5d4..c6c394f56b 100644 --- a/crates/assistant/src/slash_command/project_command.rs +++ b/crates/assistant/src/slash_command/project_command.rs @@ -75,7 +75,7 @@ impl ProjectSlashCommand { } fn path_to_cargo_toml(project: Model, cx: &mut AppContext) -> Option> { - let worktree = project.read(cx).worktrees().next()?; + let worktree = project.read(cx).worktrees(cx).next()?; let worktree = worktree.read(cx); let entry = worktree.entry_for_path("Cargo.toml")?; let path = ProjectPath { diff --git a/crates/assistant_tooling/src/project_context.rs b/crates/assistant_tooling/src/project_context.rs index aafe2728bf..2640ce1ed5 100644 --- a/crates/assistant_tooling/src/project_context.rs +++ b/crates/assistant_tooling/src/project_context.rs @@ -222,7 +222,7 @@ mod tests { let worktree_ids = project.read_with(cx, |project, cx| { project - .worktrees() + .worktrees(cx) .map(|worktree| worktree.read(cx).id()) .collect::>() }); diff --git a/crates/call/src/room.rs b/crates/call/src/room.rs index f51221908f..bafbc26447 100644 --- a/crates/call/src/room.rs +++ b/crates/call/src/room.rs @@ -526,7 +526,7 @@ impl Room { rejoined_projects.push(proto::RejoinProject { id: project_id, worktrees: project - .worktrees() + .worktrees(cx) .map(|worktree| { let worktree = worktree.read(cx); proto::RejoinWorktree { diff --git a/crates/collab/src/tests/channel_guest_tests.rs b/crates/collab/src/tests/channel_guest_tests.rs index 1d92691a8a..df7d6a8890 100644 --- a/crates/collab/src/tests/channel_guest_tests.rs +++ b/crates/collab/src/tests/channel_guest_tests.rs @@ -52,7 +52,7 @@ async fn test_channel_guests( assert!(project_b.read_with(cx_b, |project, _| project.is_read_only())); assert!(project_b .update(cx_b, |project, cx| { - let worktree_id = project.worktrees().next().unwrap().read(cx).id(); + let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id(); project.create_entry((worktree_id, "b.txt"), false, cx) }) .await diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 74cb699e08..cf6220d107 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -76,7 +76,7 @@ async fn test_host_disconnect( let active_call_a = cx_a.read(ActiveCall::global); let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; - let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap()); + let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap()); let project_id = active_call_a .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await @@ -1144,7 +1144,7 @@ async fn test_share_project( }); project_b.read_with(cx_b, |project, cx| { - let worktree = project.worktrees().next().unwrap().read(cx); + let worktree = project.worktrees(cx).next().unwrap().read(cx); assert_eq!( worktree.paths().map(AsRef::as_ref).collect::>(), [ @@ -1158,7 +1158,7 @@ async fn test_share_project( project_b .update(cx_b, |project, cx| { - let worktree = project.worktrees().next().unwrap(); + let worktree = project.worktrees(cx).next().unwrap(); let entry = worktree.read(cx).entry_for_path("ignored-dir").unwrap(); project.expand_entry(worktree_id, entry.id, cx).unwrap() }) @@ -1166,7 +1166,7 @@ async fn test_share_project( .unwrap(); project_b.read_with(cx_b, |project, cx| { - let worktree = project.worktrees().next().unwrap().read(cx); + let worktree = project.worktrees(cx).next().unwrap().read(cx); assert_eq!( worktree.paths().map(AsRef::as_ref).collect::>(), [ diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 98cbfdca63..c3bcf43c99 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -1377,7 +1377,7 @@ async fn test_unshare_project( .await .unwrap(); - let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap()); + let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap()); let project_b = client_b.build_dev_server_project(project_id, cx_b).await; executor.run_until_parked(); @@ -1505,7 +1505,8 @@ async fn test_project_reconnect( let (project_a1, _) = client_a.build_local_project("/root-1/dir1", cx_a).await; let (project_a2, _) = client_a.build_local_project("/root-2", cx_a).await; let (project_a3, _) = client_a.build_local_project("/root-3", cx_a).await; - let worktree_a1 = project_a1.read_with(cx_a, |project, _| project.worktrees().next().unwrap()); + let worktree_a1 = + project_a1.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap()); let project1_id = active_call_a .update(cx_a, |call, cx| call.share_project(project_a1.clone(), cx)) .await @@ -2308,7 +2309,7 @@ async fn test_propagate_saves_and_fs_changes( .await; let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; - let worktree_a = project_a.read_with(cx_a, |p, _| p.worktrees().next().unwrap()); + let worktree_a = project_a.read_with(cx_a, |p, cx| p.worktrees(cx).next().unwrap()); let project_id = active_call_a .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .await @@ -2318,9 +2319,9 @@ async fn test_propagate_saves_and_fs_changes( let project_b = client_b.build_dev_server_project(project_id, cx_b).await; let project_c = client_c.build_dev_server_project(project_id, cx_c).await; - let worktree_b = project_b.read_with(cx_b, |p, _| p.worktrees().next().unwrap()); + let worktree_b = project_b.read_with(cx_b, |p, cx| p.worktrees(cx).next().unwrap()); - let worktree_c = project_c.read_with(cx_c, |p, _| p.worktrees().next().unwrap()); + let worktree_c = project_c.read_with(cx_c, |p, cx| p.worktrees(cx).next().unwrap()); // Open and edit a buffer as both guests B and C. let buffer_b = project_b @@ -3022,8 +3023,8 @@ async fn test_fs_operations( .unwrap(); let project_b = client_b.build_dev_server_project(project_id, cx_b).await; - let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap()); - let worktree_b = project_b.read_with(cx_b, |project, _| project.worktrees().next().unwrap()); + let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap()); + let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap()); let entry = project_b .update(cx_b, |project, cx| { @@ -3323,7 +3324,7 @@ async fn test_local_settings( // As client B, join that project and observe the local settings. let project_b = client_b.build_dev_server_project(project_id, cx_b).await; - let worktree_b = project_b.read_with(cx_b, |project, _| project.worktrees().next().unwrap()); + let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap()); executor.run_until_parked(); cx_b.read(|cx| { let store = cx.global::(); @@ -3735,7 +3736,7 @@ async fn test_leaving_project( // Client B opens a buffer. let buffer_b1 = project_b1 .update(cx_b, |project, cx| { - let worktree_id = project.worktrees().next().unwrap().read(cx).id(); + let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id(); project.open_buffer((worktree_id, "a.txt"), cx) }) .await @@ -3773,7 +3774,7 @@ async fn test_leaving_project( let buffer_b2 = project_b2 .update(cx_b, |project, cx| { - let worktree_id = project.worktrees().next().unwrap().read(cx).id(); + let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id(); project.open_buffer((worktree_id, "a.txt"), cx) }) .await @@ -4627,7 +4628,7 @@ async fn test_definition( .unwrap(); cx_b.read(|cx| { assert_eq!(definitions_1.len(), 1); - assert_eq!(project_b.read(cx).worktrees().count(), 2); + assert_eq!(project_b.read(cx).worktrees(cx).count(), 2); let target_buffer = definitions_1[0].target.buffer.read(cx); assert_eq!( target_buffer.text(), @@ -4656,7 +4657,7 @@ async fn test_definition( .unwrap(); cx_b.read(|cx| { assert_eq!(definitions_2.len(), 1); - assert_eq!(project_b.read(cx).worktrees().count(), 2); + assert_eq!(project_b.read(cx).worktrees(cx).count(), 2); let target_buffer = definitions_2[0].target.buffer.read(cx); assert_eq!( target_buffer.text(), @@ -4814,7 +4815,7 @@ async fn test_references( assert!(status.pending_work.is_empty()); assert_eq!(references.len(), 3); - assert_eq!(project.worktrees().count(), 2); + assert_eq!(project.worktrees(cx).count(), 2); let two_buffer = references[0].buffer.read(cx); let three_buffer = references[2].buffer.read(cx); @@ -6199,7 +6200,7 @@ async fn test_preview_tabs(cx: &mut TestAppContext) { let project = workspace.update(cx, |workspace, _| workspace.project().clone()); let worktree_id = project.update(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }); let path_1 = ProjectPath { diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index 70721dea69..d5408b4090 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -301,7 +301,7 @@ impl RandomizedTest for ProjectCollaborationTest { let is_local = project.read_with(cx, |project, _| project.is_local()); let worktree = project.read_with(cx, |project, cx| { project - .worktrees() + .worktrees(cx) .filter(|worktree| { let worktree = worktree.read(cx); worktree.is_visible() @@ -423,7 +423,7 @@ impl RandomizedTest for ProjectCollaborationTest { 81.. => { let worktree = project.read_with(cx, |project, cx| { project - .worktrees() + .worktrees(cx) .filter(|worktree| { let worktree = worktree.read(cx); worktree.is_visible() @@ -1172,7 +1172,7 @@ impl RandomizedTest for ProjectCollaborationTest { let host_worktree_snapshots = host_project.read_with(host_cx, |host_project, cx| { host_project - .worktrees() + .worktrees(cx) .map(|worktree| { let worktree = worktree.read(cx); (worktree.id(), worktree.snapshot()) @@ -1180,7 +1180,7 @@ impl RandomizedTest for ProjectCollaborationTest { .collect::>() }); let guest_worktree_snapshots = guest_project - .worktrees() + .worktrees(cx) .map(|worktree| { let worktree = worktree.read(cx); (worktree.id(), worktree.snapshot()) @@ -1538,7 +1538,7 @@ fn project_path_for_full_path( let root_name = components.next().unwrap().as_os_str().to_str().unwrap(); let path = components.as_path().into(); let worktree_id = project.read_with(cx, |project, cx| { - project.worktrees().find_map(|worktree| { + project.worktrees(cx).find_map(|worktree| { let worktree = worktree.read(cx); if worktree.root_name() == root_name { Some(worktree.id()) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index aa1bc27ce8..407d3593ed 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -6253,8 +6253,8 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) { }, ); - let worktree = project.update(cx, |project, _| { - let mut worktrees = project.worktrees().collect::>(); + let worktree = project.update(cx, |project, cx| { + let mut worktrees = project.worktrees(cx).collect::>(); assert_eq!(worktrees.len(), 1); worktrees.pop().unwrap() }); @@ -9319,7 +9319,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) { let worktree_id = workspace .update(cx, |workspace, cx| { workspace.project().update(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }) }) .unwrap(); diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index ff7ad73910..f2a5c41cd3 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -2581,7 +2581,7 @@ pub mod tests { ); let worktree_id = project.update(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }); let buffer_1 = project @@ -2931,7 +2931,7 @@ pub mod tests { ); let worktree_id = project.update(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }); let buffer_1 = project diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index dffbdcc342..e2056f3831 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -1496,7 +1496,7 @@ async fn test_search_results_refreshed_on_adding_and_removing_worktrees( let project = Project::test(app_state.fs.clone(), ["/test/project_1".as_ref()], cx).await; let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); let worktree_1_id = project.update(cx, |project, cx| { - let worktree = project.worktrees().last().expect("worktree not found"); + let worktree = project.worktrees(cx).last().expect("worktree not found"); worktree.read(cx).id() }); diff --git a/crates/file_finder/src/new_path_prompt.rs b/crates/file_finder/src/new_path_prompt.rs index a6fa38b7b0..206545f5b4 100644 --- a/crates/file_finder/src/new_path_prompt.rs +++ b/crates/file_finder/src/new_path_prompt.rs @@ -32,7 +32,7 @@ impl Match { path_match.path.join(suffix), ) } else { - (project.worktrees().next(), PathBuf::from(suffix)) + (project.worktrees(cx).next(), PathBuf::from(suffix)) }; worktree.and_then(|worktree| worktree.read(cx).entry_for_path(path)) @@ -72,7 +72,7 @@ impl Match { let worktree_id = if let Some(path_match) = &self.path_match { WorktreeId::from_usize(path_match.worktree_id) } else { - project.worktrees().next()?.read(cx).id() + project.worktrees(cx).next()?.read(cx).id() }; let path = PathBuf::from(self.relative_path()); @@ -84,7 +84,7 @@ impl Match { } fn existing_prefix(&self, project: &Project, cx: &WindowContext) -> Option { - let worktree = project.worktrees().next()?.read(cx); + let worktree = project.worktrees(cx).next()?.read(cx); let mut prefix = PathBuf::new(); let parts = self.suffix.as_ref()?.split('/'); for part in parts { diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index ecd130176a..6a3e4dcb1e 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -75,6 +75,9 @@ impl RealGitRepository { } } +// https://git-scm.com/book/en/v2/Git-Internals-Git-Objects +const GIT_MODE_SYMLINK: u32 = 0o120000; + impl GitRepository for RealGitRepository { fn reload_index(&self) { if let Ok(mut index) = self.repository.lock().index() { @@ -91,8 +94,8 @@ impl GitRepository for RealGitRepository { check_path_to_repo_path_errors(relative_file_path)?; let oid = match index.get_path(relative_file_path, STAGE_NORMAL) { - Some(entry) => entry.id, - None => return Ok(None), + Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id, + _ => return Ok(None), }; let content = repo.find_blob(oid)?.content().to_owned(); diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 4efef28d0e..155f38d808 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -258,7 +258,7 @@ mod tests { let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); let worktree_id = workspace.update(cx, |workspace, cx| { workspace.project().update(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }) }); let _buffer = project diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index 274a9b7f51..0fabdab660 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -551,7 +551,7 @@ impl LspLogView { self.project .read(cx) .supplementary_language_servers() - .filter_map(|(&server_id, (name, _))| { + .filter_map(|(&server_id, name)| { let state = log_store.language_servers.get(&server_id)?; Some(LogMenuItem { server_id, diff --git a/crates/language_tools/src/lsp_log_tests.rs b/crates/language_tools/src/lsp_log_tests.rs index a6bc3112f6..395bbca530 100644 --- a/crates/language_tools/src/lsp_log_tests.rs +++ b/crates/language_tools/src/lsp_log_tests.rs @@ -85,7 +85,7 @@ async fn test_lsp_logs(cx: &mut TestAppContext) { server_name: LanguageServerName("the-rust-language-server".into()), worktree_root_name: project .read(cx) - .worktrees() + .worktrees(cx) .next() .unwrap() .read(cx) diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index 84c2089b11..c915f78a8f 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -321,7 +321,7 @@ mod tests { let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); let worktree_id = workspace.update(cx, |workspace, cx| { workspace.project().update(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }) }); let _buffer = project diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index badc4569f3..4509b4e056 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -1,13 +1,16 @@ -use crate::ProjectPath; -use anyhow::{anyhow, Context as _, Result}; +use crate::{ + worktree_store::{WorktreeStore, WorktreeStoreEvent}, + ProjectPath, +}; +use anyhow::{anyhow, Result}; use collections::{hash_map, HashMap}; -use futures::{channel::oneshot, StreamExt as _}; +use futures::{channel::oneshot, stream::FuturesUnordered, StreamExt as _}; use gpui::{ AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, Task, WeakModel, }; use language::{ - proto::{deserialize_version, serialize_version, split_operations}, - Buffer, Capability, Language, Operation, + proto::{deserialize_line_ending, deserialize_version, serialize_version, split_operations}, + Buffer, Capability, Event as BufferEvent, Language, Operation, }; use rpc::{ proto::{self, AnyProtoClient, PeerId}, @@ -16,11 +19,15 @@ use rpc::{ use std::{io, path::Path, sync::Arc}; use text::BufferId; use util::{debug_panic, maybe, ResultExt as _}; -use worktree::{File, ProjectEntryId, RemoteWorktree, Worktree}; +use worktree::{ + File, PathChange, ProjectEntryId, RemoteWorktree, UpdatedGitRepositoriesSet, Worktree, +}; /// A set of open buffers. pub struct BufferStore { retain_buffers: bool, + #[allow(unused)] + worktree_store: Model, opened_buffers: HashMap, local_buffer_ids_by_path: HashMap, local_buffer_ids_by_entry_id: HashMap, @@ -51,6 +58,12 @@ pub enum BufferStoreEvent { has_changed_file: bool, saved_version: clock::Global, }, + LocalBufferUpdated { + buffer: Model, + }, + DiffBaseUpdated { + buffer: Model, + }, } impl EventEmitter for BufferStore {} @@ -62,9 +75,22 @@ impl BufferStore { /// and won't be released unless they are explicitly removed, or `retain_buffers` /// is set to `false` via `set_retain_buffers`. Otherwise, buffers are stored as /// weak handles. - pub fn new(retain_buffers: bool) -> Self { + pub fn new( + worktree_store: Model, + retain_buffers: bool, + cx: &mut ModelContext, + ) -> Self { + cx.subscribe(&worktree_store, |this, _, event, cx| match event { + WorktreeStoreEvent::WorktreeAdded(worktree) => { + this.subscribe_to_worktree(worktree, cx); + } + _ => {} + }) + .detach(); + Self { retain_buffers, + worktree_store, opened_buffers: Default::default(), remote_buffer_listeners: Default::default(), loading_remote_buffers_by_id: Default::default(), @@ -77,7 +103,6 @@ impl BufferStore { pub fn open_buffer( &mut self, project_path: ProjectPath, - worktree: Model, cx: &mut ModelContext, ) -> Task>> { let existing_buffer = self.get_by_path(&project_path, cx); @@ -85,6 +110,14 @@ impl BufferStore { return Task::ready(Ok(existing_buffer)); } + let Some(worktree) = self + .worktree_store + .read(cx) + .worktree_for_id(project_path.worktree_id, cx) + else { + return Task::ready(Err(anyhow!("no such worktree"))); + }; + let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) { // If the given path is already being loaded, then wait for that existing // task to complete and return the same buffer. @@ -127,6 +160,131 @@ impl BufferStore { }) } + fn subscribe_to_worktree(&mut self, worktree: &Model, cx: &mut ModelContext) { + cx.subscribe(worktree, |this, worktree, event, cx| { + if worktree.read(cx).is_local() { + match event { + worktree::Event::UpdatedEntries(changes) => { + this.local_worktree_entries_changed(&worktree, changes, cx); + } + worktree::Event::UpdatedGitRepositories(updated_repos) => { + this.local_worktree_git_repos_changed(worktree.clone(), updated_repos, cx) + } + _ => {} + } + } + }) + .detach(); + } + + fn local_worktree_entries_changed( + &mut self, + worktree_handle: &Model, + changes: &[(Arc, ProjectEntryId, PathChange)], + cx: &mut ModelContext, + ) { + let snapshot = worktree_handle.read(cx).snapshot(); + for (path, entry_id, _) in changes { + self.local_worktree_entry_changed(*entry_id, path, worktree_handle, &snapshot, cx); + } + } + + fn local_worktree_git_repos_changed( + &mut self, + worktree_handle: Model, + changed_repos: &UpdatedGitRepositoriesSet, + cx: &mut ModelContext, + ) { + debug_assert!(worktree_handle.read(cx).is_local()); + + // Identify the loading buffers whose containing repository that has changed. + let future_buffers = self + .loading_buffers() + .filter_map(|(project_path, receiver)| { + if project_path.worktree_id != worktree_handle.read(cx).id() { + return None; + } + let path = &project_path.path; + changed_repos + .iter() + .find(|(work_dir, _)| path.starts_with(work_dir))?; + let path = path.clone(); + Some(async move { + Self::wait_for_loading_buffer(receiver) + .await + .ok() + .map(|buffer| (buffer, path)) + }) + }) + .collect::>(); + + // Identify the current buffers whose containing repository has changed. + let current_buffers = self + .buffers() + .filter_map(|buffer| { + let file = File::from_dyn(buffer.read(cx).file())?; + if file.worktree != worktree_handle { + return None; + } + changed_repos + .iter() + .find(|(work_dir, _)| file.path.starts_with(work_dir))?; + Some((buffer, file.path.clone())) + }) + .collect::>(); + + if future_buffers.len() + current_buffers.len() == 0 { + return; + } + + cx.spawn(move |this, mut cx| async move { + // Wait for all of the buffers to load. + let future_buffers = future_buffers.collect::>().await; + + // Reload the diff base for every buffer whose containing git repository has changed. + let snapshot = + worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?; + let diff_bases_by_buffer = cx + .background_executor() + .spawn(async move { + let mut diff_base_tasks = future_buffers + .into_iter() + .flatten() + .chain(current_buffers) + .filter_map(|(buffer, path)| { + let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?; + let relative_path = repo_entry.relativize(&snapshot, &path).ok()?; + Some(async move { + let base_text = + local_repo_entry.repo().load_index_text(&relative_path); + Some((buffer, base_text)) + }) + }) + .collect::>(); + + let mut diff_bases = Vec::with_capacity(diff_base_tasks.len()); + while let Some(diff_base) = diff_base_tasks.next().await { + if let Some(diff_base) = diff_base { + diff_bases.push(diff_base); + } + } + diff_bases + }) + .await; + + this.update(&mut cx, |_, cx| { + // Assign the new diff bases on all of the buffers. + for (buffer, diff_base) in diff_bases_by_buffer { + buffer.update(cx, |buffer, cx| { + buffer.set_diff_base(diff_base.clone(), cx); + }); + cx.emit(BufferStoreEvent::DiffBaseUpdated { buffer }) + } + }) + }) + .detach_and_log_err(cx); + } + fn open_local_buffer_internal( &mut self, path: Arc, @@ -265,9 +423,16 @@ impl BufferStore { &mut self, buffer: Model, path: ProjectPath, - worktree: Model, cx: &mut ModelContext, ) -> Task> { + let Some(worktree) = self + .worktree_store + .read(cx) + .worktree_for_id(path.worktree_id, cx) + else { + return Task::ready(Err(anyhow!("no such worktree"))); + }; + let old_file = File::from_dyn(buffer.read(cx).file()) .cloned() .map(Arc::new); @@ -411,6 +576,7 @@ impl BufferStore { } } + cx.subscribe(&buffer, Self::on_buffer_event).detach(); cx.emit(BufferStoreEvent::BufferAdded(buffer)); Ok(()) } @@ -461,31 +627,6 @@ impl BufferStore { .or_else(|| self.loading_remote_buffers_by_id.get(&buffer_id).cloned()) } - fn get_or_remove_by_path( - &mut self, - entry_id: ProjectEntryId, - project_path: &ProjectPath, - ) -> Option<(BufferId, Model)> { - let buffer_id = match self.local_buffer_ids_by_entry_id.get(&entry_id) { - Some(&buffer_id) => buffer_id, - None => match self.local_buffer_ids_by_path.get(project_path) { - Some(&buffer_id) => buffer_id, - None => { - return None; - } - }, - }; - let buffer = if let Some(buffer) = self.get(buffer_id) { - buffer - } else { - self.opened_buffers.remove(&buffer_id); - self.local_buffer_ids_by_path.remove(project_path); - self.local_buffer_ids_by_entry_id.remove(&entry_id); - return None; - }; - Some((buffer_id, buffer)) - } - pub fn wait_for_remote_buffer( &mut self, id: BufferId, @@ -561,25 +702,48 @@ impl BufferStore { .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_))); } - pub fn file_changed( + fn on_buffer_event( + &mut self, + buffer: Model, + event: &BufferEvent, + cx: &mut ModelContext, + ) { + match event { + BufferEvent::FileHandleChanged => { + self.buffer_changed_file(buffer, cx); + } + _ => {} + } + } + + fn local_worktree_entry_changed( &mut self, - path: Arc, entry_id: ProjectEntryId, - worktree_handle: &Model, + path: &Arc, + worktree: &Model, snapshot: &worktree::Snapshot, cx: &mut ModelContext, - ) -> Option<(Model, Arc, Arc)> { - let (buffer_id, buffer) = self.get_or_remove_by_path( - entry_id, - &ProjectPath { - worktree_id: snapshot.id(), - path, - }, - )?; + ) -> Option<()> { + let project_path = ProjectPath { + worktree_id: snapshot.id(), + path: path.clone(), + }; + let buffer_id = match self.local_buffer_ids_by_entry_id.get(&entry_id) { + Some(&buffer_id) => buffer_id, + None => self.local_buffer_ids_by_path.get(&project_path).copied()?, + }; + let buffer = if let Some(buffer) = self.get(buffer_id) { + buffer + } else { + self.opened_buffers.remove(&buffer_id); + self.local_buffer_ids_by_path.remove(&project_path); + self.local_buffer_ids_by_entry_id.remove(&entry_id); + return None; + }; - let result = buffer.update(cx, |buffer, cx| { + let (old_file, new_file) = buffer.update(cx, |buffer, cx| { let old_file = File::from_dyn(buffer.file())?; - if old_file.worktree != *worktree_handle { + if old_file.worktree != *worktree { return None; } @@ -592,7 +756,7 @@ impl BufferStore { entry_id: Some(entry.id), mtime: entry.mtime, path: entry.path.clone(), - worktree: worktree_handle.clone(), + worktree: worktree.clone(), is_deleted: false, is_private: entry.is_private, } @@ -602,7 +766,7 @@ impl BufferStore { entry_id: Some(entry.id), mtime: entry.mtime, path: entry.path.clone(), - worktree: worktree_handle.clone(), + worktree: worktree.clone(), is_deleted: false, is_private: entry.is_private, } @@ -612,7 +776,7 @@ impl BufferStore { entry_id: old_file.entry_id, path: old_file.path.clone(), mtime: old_file.mtime, - worktree: worktree_handle.clone(), + worktree: worktree.clone(), is_deleted: true, is_private: old_file.is_private, } @@ -625,47 +789,42 @@ impl BufferStore { let old_file = Arc::new(old_file.clone()); let new_file = Arc::new(new_file); buffer.file_updated(new_file.clone(), cx); - Some((cx.handle(), old_file, new_file)) - }); + Some((old_file, new_file)) + })?; - if let Some((buffer, old_file, new_file)) = &result { - if new_file.path != old_file.path { - self.local_buffer_ids_by_path.remove(&ProjectPath { - path: old_file.path.clone(), - worktree_id: old_file.worktree_id(cx), - }); - self.local_buffer_ids_by_path.insert( - ProjectPath { - worktree_id: new_file.worktree_id(cx), - path: new_file.path.clone(), - }, - buffer_id, - ); - cx.emit(BufferStoreEvent::BufferChangedFilePath { - buffer: buffer.clone(), - old_file: Some(old_file.clone()), - }); + if new_file.path != old_file.path { + self.local_buffer_ids_by_path.remove(&ProjectPath { + path: old_file.path.clone(), + worktree_id: old_file.worktree_id(cx), + }); + self.local_buffer_ids_by_path.insert( + ProjectPath { + worktree_id: new_file.worktree_id(cx), + path: new_file.path.clone(), + }, + buffer_id, + ); + cx.emit(BufferStoreEvent::BufferChangedFilePath { + buffer: buffer.clone(), + old_file: Some(old_file.clone()), + }); + } + + if new_file.entry_id != old_file.entry_id { + if let Some(entry_id) = old_file.entry_id { + self.local_buffer_ids_by_entry_id.remove(&entry_id); } - - if new_file.entry_id != old_file.entry_id { - if let Some(entry_id) = old_file.entry_id { - self.local_buffer_ids_by_entry_id.remove(&entry_id); - } - if let Some(entry_id) = new_file.entry_id { - self.local_buffer_ids_by_entry_id - .insert(entry_id, buffer_id); - } + if let Some(entry_id) = new_file.entry_id { + self.local_buffer_ids_by_entry_id + .insert(entry_id, buffer_id); } } - result + cx.emit(BufferStoreEvent::LocalBufferUpdated { buffer }); + None } - pub fn buffer_changed_file( - &mut self, - buffer: Model, - cx: &mut AppContext, - ) -> Option<()> { + fn buffer_changed_file(&mut self, buffer: Model, cx: &mut AppContext) -> Option<()> { let file = File::from_dyn(buffer.read(cx).file())?; let remote_id = buffer.read(cx).remote_id(); @@ -862,7 +1021,6 @@ impl BufferStore { pub async fn handle_save_buffer( this: Model, project_id: u64, - worktree: Option>, envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { @@ -876,10 +1034,9 @@ impl BufferStore { let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?; if let Some(new_path) = envelope.payload.new_path { - let worktree = worktree.context("no such worktree")?; let new_path = ProjectPath::from_proto(new_path); this.update(&mut cx, |this, cx| { - this.save_buffer_as(buffer.clone(), new_path, worktree, cx) + this.save_buffer_as(buffer.clone(), new_path, cx) })? .await?; } else { @@ -895,6 +1052,44 @@ impl BufferStore { }) } + pub async fn handle_buffer_saved( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result<()> { + let buffer_id = BufferId::new(envelope.payload.buffer_id)?; + let version = deserialize_version(&envelope.payload.version); + let mtime = envelope.payload.mtime.map(|time| time.into()); + this.update(&mut cx, |this, cx| { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { + buffer.update(cx, |buffer, cx| { + buffer.did_save(version, mtime, cx); + }); + } + }) + } + + pub async fn handle_buffer_reloaded( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result<()> { + let buffer_id = BufferId::new(envelope.payload.buffer_id)?; + let version = deserialize_version(&envelope.payload.version); + let mtime = envelope.payload.mtime.map(|time| time.into()); + let line_ending = deserialize_line_ending( + proto::LineEnding::from_i32(envelope.payload.line_ending) + .ok_or_else(|| anyhow!("missing line ending"))?, + ); + this.update(&mut cx, |this, cx| { + if let Some(buffer) = this.get_possibly_incomplete(buffer_id) { + buffer.update(cx, |buffer, cx| { + buffer.did_reload(version, line_ending, mtime, cx); + }); + } + }) + } + pub async fn wait_for_loading_buffer( mut receiver: postage::watch::Receiver, Arc>>>, ) -> Result, Arc> { diff --git a/crates/project/src/connection_manager.rs b/crates/project/src/connection_manager.rs index f9b342ea98..620172ed30 100644 --- a/crates/project/src/connection_manager.rs +++ b/crates/project/src/connection_manager.rs @@ -85,7 +85,7 @@ impl Manager { Some(proto::RejoinProject { id: project_id, worktrees: project - .worktrees() + .worktrees(cx) .map(|worktree| { let worktree = worktree.read(cx); proto::RejoinWorktree { diff --git a/crates/project/src/debounced_delay.rs b/crates/project/src/debounced_delay.rs index 152df1ba0e..37e923d665 100644 --- a/crates/project/src/debounced_delay.rs +++ b/crates/project/src/debounced_delay.rs @@ -1,26 +1,25 @@ -use std::time::Duration; - use futures::{channel::oneshot, FutureExt}; use gpui::{ModelContext, Task}; +use std::{marker::PhantomData, time::Duration}; -use crate::Project; - -pub struct DebouncedDelay { +pub struct DebouncedDelay { task: Option>, cancel_channel: Option>, + _phantom_data: PhantomData, } -impl DebouncedDelay { - pub fn new() -> DebouncedDelay { - DebouncedDelay { +impl DebouncedDelay { + pub fn new() -> Self { + Self { task: None, cancel_channel: None, + _phantom_data: PhantomData, } } - pub fn fire_new(&mut self, delay: Duration, cx: &mut ModelContext, func: F) + pub fn fire_new(&mut self, delay: Duration, cx: &mut ModelContext, func: F) where - F: 'static + Send + FnOnce(&mut Project, &mut ModelContext) -> Task<()>, + F: 'static + Send + FnOnce(&mut E, &mut ModelContext) -> Task<()>, { if let Some(channel) = self.cancel_channel.take() { _ = channel.send(()); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index cf6e8b5ae7..93c4129623 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -8,6 +8,7 @@ pub mod project_settings; pub mod search; mod task_inventory; pub mod terminals; +pub mod worktree_store; #[cfg(test)] mod project_tests; @@ -47,8 +48,8 @@ use language::{ }, markdown, point_to_lsp, prepare_completion_documentation, proto::{ - deserialize_anchor, deserialize_line_ending, deserialize_version, serialize_anchor, - serialize_line_ending, serialize_version, split_operations, + deserialize_anchor, deserialize_version, serialize_anchor, serialize_line_ending, + serialize_version, split_operations, }, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability, CodeLabel, ContextProvider, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation, @@ -119,6 +120,7 @@ use util::{ NumericPrefixWithSuffix, ResultExt, TryFutureExt as _, }; use worktree::{CreatedEntry, Snapshot, Traversal}; +use worktree_store::{WorktreeStore, WorktreeStoreEvent}; use yarn::YarnPathStore; pub use fs::*; @@ -166,8 +168,6 @@ pub enum OpenedBufferEvent { /// /// Can be either local (for the project opened on the same host) or remote.(for collab projects, browsed by multiple remote users). pub struct Project { - worktrees: Vec, - worktrees_reordered: bool, active_entry: Option, buffer_ordered_messages_tx: mpsc::UnboundedSender, languages: Arc, @@ -203,6 +203,7 @@ pub struct Project { client_state: ProjectClientState, collaborators: HashMap, client_subscriptions: Vec, + worktree_store: Model, buffer_store: Model, _subscriptions: Vec, shared_buffers: HashMap>, @@ -212,7 +213,7 @@ pub struct Project { buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots buffers_being_formatted: HashSet, buffers_needing_diff: HashSet>, - git_diff_debouncer: DebouncedDelay, + git_diff_debouncer: DebouncedDelay, nonce: u128, _maintain_buffer_languages: Task<()>, _maintain_workspace_config: Task>, @@ -263,12 +264,6 @@ enum LocalProjectUpdate { }, } -#[derive(Clone)] -enum WorktreeHandle { - Strong(Model), - Weak(WeakModel), -} - #[derive(Debug)] enum ProjectClientState { Local, @@ -765,17 +760,21 @@ impl Project { let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - let buffer_store = cx.new_model(|_| BufferStore::new(false)); + let worktree_store = cx.new_model(|_| WorktreeStore::new(false)); + cx.subscribe(&worktree_store, Self::on_worktree_store_event) + .detach(); + + let buffer_store = + cx.new_model(|cx| BufferStore::new(worktree_store.clone(), false, cx)); cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); let yarn = YarnPathStore::new(fs.clone(), cx); Self { - worktrees: Vec::new(), - worktrees_reordered: false, buffer_ordered_messages_tx: tx, collaborators: Default::default(), + worktree_store, buffer_store, shared_buffers: Default::default(), loading_worktrees: Default::default(), @@ -843,6 +842,8 @@ impl Project { this.update(cx, |this, cx| { ssh_session.add_message_handler(cx.weak_model(), Self::handle_update_worktree); ssh_session.add_message_handler(cx.weak_model(), Self::handle_create_buffer_for_peer); + ssh_session.add_message_handler(cx.weak_model(), Self::handle_update_buffer_file); + ssh_session.add_message_handler(cx.weak_model(), Self::handle_update_diff_base); this.ssh_session = Some(ssh_session); }); this @@ -926,15 +927,17 @@ impl Project { cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx)) .detach(); - let buffer_store = cx.new_model(|_| BufferStore::new(true)); + let worktree_store = cx.new_model(|_| WorktreeStore::new(true)); + + let buffer_store = + cx.new_model(|cx| BufferStore::new(worktree_store.clone(), true, cx)); cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); let mut this = Self { - worktrees: Vec::new(), - worktrees_reordered: false, buffer_ordered_messages_tx: tx, buffer_store, + worktree_store, shared_buffers: Default::default(), loading_worktrees: Default::default(), active_entry: None, @@ -1407,15 +1410,18 @@ impl Project { self.collaborators.values().find(|c| c.replica_id == 0) } - pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) { - self.worktrees_reordered = worktrees_reordered; + pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool, cx: &mut AppContext) { + self.worktree_store.update(cx, |store, _| { + store.set_worktrees_reordered(worktrees_reordered); + }); } /// Collect all worktrees, including ones that don't appear in the project panel - pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator> { - self.worktrees - .iter() - .filter_map(move |worktree| worktree.upgrade()) + pub fn worktrees<'a>( + &self, + cx: &'a AppContext, + ) -> impl 'a + DoubleEndedIterator> { + self.worktree_store.read(cx).worktrees() } /// Collect all user-visible worktrees, the ones that appear in the project panel. @@ -1423,8 +1429,7 @@ impl Project { &'a self, cx: &'a AppContext, ) -> impl 'a + DoubleEndedIterator> { - self.worktrees() - .filter(|worktree| worktree.read(cx).is_visible()) + self.worktree_store.read(cx).visible_worktrees(cx) } pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator { @@ -1433,8 +1438,7 @@ impl Project { } pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option> { - self.worktrees() - .find(|worktree| worktree.read(cx).id() == id) + self.worktree_store.read(cx).worktree_for_id(id, cx) } pub fn worktree_for_entry( @@ -1442,8 +1446,9 @@ impl Project { entry_id: ProjectEntryId, cx: &AppContext, ) -> Option> { - self.worktrees() - .find(|worktree| worktree.read(cx).contains_entry(entry_id)) + self.worktree_store + .read(cx) + .worktree_for_entry(entry_id, cx) } pub fn worktree_id_for_entry( @@ -1476,7 +1481,7 @@ impl Project { } pub fn visibility_for_path(&self, path: &Path, cx: &AppContext) -> Option { - self.worktrees() + self.worktrees(cx) .filter_map(|worktree| { let worktree = worktree.read(cx); worktree @@ -1539,7 +1544,6 @@ impl Project { cx: &mut ModelContext, ) -> Option>> { let worktree = self.worktree_for_entry(entry_id, cx)?; - cx.emit(Event::DeletedEntry(entry_id)); worktree.update(cx, |worktree, cx| { worktree.delete_entry(entry_id, trash, cx) }) @@ -1577,17 +1581,9 @@ impl Project { self.buffer_store.update(cx, |buffer_store, cx| { buffer_store.set_retain_buffers(true, cx) }); - - for worktree_handle in self.worktrees.iter_mut() { - match worktree_handle { - WorktreeHandle::Strong(_) => {} - WorktreeHandle::Weak(worktree) => { - if let Some(worktree) = worktree.upgrade() { - *worktree_handle = WorktreeHandle::Strong(worktree); - } - } - } - } + self.worktree_store.update(cx, |store, cx| { + store.set_shared(true, cx); + }); for (server_id, status) in &self.language_server_statuses { self.client @@ -1602,7 +1598,7 @@ impl Project { } let store = cx.global::(); - for worktree in self.worktrees() { + for worktree in self.worktrees(cx) { let worktree_id = worktree.read(cx).id().to_proto(); for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) { self.client @@ -1625,8 +1621,8 @@ impl Project { while let Some(update) = updates_rx.next().await { match update { LocalProjectUpdate::WorktreesChanged => { - let worktrees = this.update(&mut cx, |this, _cx| { - this.worktrees().collect::>() + let worktrees = this.update(&mut cx, |this, cx| { + this.worktrees(cx).collect::>() })?; let update_project = this @@ -1732,11 +1728,13 @@ impl Project { cx: &mut ModelContext, ) -> Result<()> { cx.update_global::(|store, cx| { - for worktree in &self.worktrees { - store - .clear_local_settings(worktree.handle_id(), cx) - .log_err(); - } + self.worktree_store.update(cx, |worktree_store, cx| { + for worktree in worktree_store.worktrees() { + store + .clear_local_settings(worktree.entity_id().as_u64() as usize, cx) + .log_err(); + } + }); }); self.join_project_response_message_id = message_id; @@ -1788,29 +1786,17 @@ impl Project { self.collaborators.clear(); self.shared_buffers.clear(); self.client_subscriptions.clear(); - - for worktree_handle in self.worktrees.iter_mut() { - if let WorktreeHandle::Strong(worktree) = worktree_handle { - let is_visible = worktree.update(cx, |worktree, _| { - worktree.stop_observing_updates(); - worktree.is_visible() - }); - if !is_visible { - *worktree_handle = WorktreeHandle::Weak(worktree.downgrade()); - } - } - } - + self.worktree_store.update(cx, |store, cx| { + store.set_shared(false, cx); + }); self.buffer_store.update(cx, |buffer_store, cx| { buffer_store.set_retain_buffers(false, cx) }); - self.client .send(proto::UnshareProject { project_id: remote_id, }) .ok(); - Ok(()) } else { Err(anyhow!("attempted to unshare an unshared project")) @@ -1852,19 +1838,10 @@ impl Project { } = &mut self.client_state { *sharing_has_stopped = true; - self.collaborators.clear(); - - for worktree in &self.worktrees { - if let Some(worktree) = worktree.upgrade() { - worktree.update(cx, |worktree, _| { - if let Some(worktree) = worktree.as_remote_mut() { - worktree.disconnected_from_host(); - } - }); - } - } - + self.worktree_store.update(cx, |store, cx| { + store.disconnected_from_host(cx); + }); self.buffer_store.update(cx, |buffer_store, cx| { buffer_store.disconnected_from_host(cx) }); @@ -1951,30 +1928,6 @@ impl Project { }) } - pub fn open_buffer_for_full_path( - &mut self, - path: &Path, - cx: &mut ModelContext, - ) -> Task>> { - if let Some(worktree_name) = path.components().next() { - let worktree = self.worktrees().find(|worktree| { - OsStr::new(worktree.read(cx).root_name()) == worktree_name.as_os_str() - }); - if let Some(worktree) = worktree { - let worktree = worktree.read(cx); - let worktree_root_path = Path::new(worktree.root_name()); - if let Ok(path) = path.strip_prefix(worktree_root_path) { - let project_path = ProjectPath { - worktree_id: worktree.id(), - path: path.into(), - }; - return self.open_buffer(project_path, cx); - } - } - } - Task::ready(Err(anyhow!("buffer not found for {:?}", path))) - } - pub fn open_local_buffer( &mut self, abs_path: impl AsRef, @@ -1992,23 +1945,15 @@ impl Project { path: impl Into, cx: &mut ModelContext, ) -> Task>> { - let project_path = path.into(); - let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) { - worktree - } else { - return Task::ready(Err(anyhow!("no such worktree"))); - }; - if self.is_remote() && self.is_disconnected() { return Task::ready(Err(anyhow!(ErrorCode::Disconnected))); } self.buffer_store.update(cx, |buffer_store, cx| { - buffer_store.open_buffer(project_path, worktree, cx) + buffer_store.open_buffer(path.into(), cx) }) } - /// LanguageServerName is owned, because it is inserted into a map pub fn open_local_buffer_via_lsp( &mut self, mut abs_path: lsp::Url, @@ -2138,11 +2083,8 @@ impl Project { path: ProjectPath, cx: &mut ModelContext, ) -> Task> { - let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) else { - return Task::ready(Err(anyhow!("worktree does not exist"))); - }; self.buffer_store.update(cx, |buffer_store, cx| { - buffer_store.save_buffer_as(buffer.clone(), path, worktree, cx) + buffer_store.save_buffer_as(buffer.clone(), path, cx) }) } @@ -2442,6 +2384,36 @@ impl Project { self.detect_language_for_buffer(&buffer, cx); self.register_buffer_with_language_servers(&buffer, cx); } + BufferStoreEvent::LocalBufferUpdated { buffer } => { + let buffer = buffer.read(cx); + let buffer_id = buffer.remote_id(); + let Some(new_file) = buffer.file() else { + return; + }; + if let Some(project_id) = self.remote_id() { + self.client + .send(proto::UpdateBufferFile { + project_id, + buffer_id: buffer_id.into(), + file: Some(new_file.to_proto(cx)), + }) + .log_err(); + } + } + BufferStoreEvent::DiffBaseUpdated { buffer } => { + let buffer = buffer.read(cx); + let buffer_id = buffer.remote_id(); + let diff_base = buffer.diff_base(); + if let Some(project_id) = self.remote_id() { + self.client + .send(proto::UpdateDiffBase { + project_id, + buffer_id: buffer_id.to_proto(), + diff_base: diff_base.map(|b| b.to_string()), + }) + .log_err(); + } + } BufferStoreEvent::BufferSaved { buffer: buffer_handle, has_changed_file, @@ -2475,6 +2447,19 @@ impl Project { } } + fn on_worktree_store_event( + &mut self, + _: Model, + event: &WorktreeStoreEvent, + cx: &mut ModelContext, + ) { + match event { + WorktreeStoreEvent::WorktreeAdded(_) => cx.emit(Event::WorktreeAdded), + WorktreeStoreEvent::WorktreeRemoved(_, id) => cx.emit(Event::WorktreeRemoved(*id)), + WorktreeStoreEvent::WorktreeOrderChanged => cx.emit(Event::WorktreeOrderChanged), + } + } + fn on_buffer_event( &mut self, buffer: Model, @@ -2652,11 +2637,6 @@ impl Project { } } - BufferEvent::FileHandleChanged => { - self.buffer_store.update(cx, |buffer_store, cx| { - buffer_store.buffer_changed_file(buffer, cx) - })?; - } _ => {} } @@ -3172,12 +3152,12 @@ impl Project { _ => None, }; - for worktree in &self.worktrees { - if let Some(worktree) = worktree.upgrade() { + self.worktree_store.update(cx, |store, cx| { + for worktree in store.worktrees() { let key = (worktree.read(cx).id(), adapter.name.clone()); self.language_server_ids.remove(&key); } - } + }); Some(cx.spawn(move |this, mut cx| async move { if let Some(task) = existing_server.and_then(|server| server.shutdown()) { @@ -3199,16 +3179,8 @@ impl Project { task.await; this.update(&mut cx, |this, cx| { - let worktrees = this.worktrees.clone(); - for worktree in worktrees { - if let Some(worktree) = worktree.upgrade() { - this.start_language_server( - &worktree, - adapter.clone(), - language.clone(), - cx, - ); - } + for worktree in this.worktree_store.read(cx).worktrees().collect::>() { + this.start_language_server(&worktree, adapter.clone(), language.clone(), cx); } }) .ok(); @@ -4424,47 +4396,45 @@ impl Project { let mut builders = HashMap::default(); for watcher in watchers.values().flatten() { - for worktree in &self.worktrees { - if let Some(worktree) = worktree.upgrade() { - let glob_is_inside_worktree = worktree.update(cx, |tree, _| { - if let Some(abs_path) = tree.abs_path().to_str() { - let relative_glob_pattern = match &watcher.glob_pattern { - lsp::GlobPattern::String(s) => Some( - s.strip_prefix(abs_path) - .unwrap_or(s) - .strip_prefix(std::path::MAIN_SEPARATOR) - .unwrap_or(s), - ), - lsp::GlobPattern::Relative(rp) => { - let base_uri = match &rp.base_uri { - lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri, - lsp::OneOf::Right(base_uri) => base_uri, - }; - base_uri.to_file_path().ok().and_then(|file_path| { - (file_path.to_str() == Some(abs_path)) - .then_some(rp.pattern.as_str()) - }) - } - }; - if let Some(relative_glob_pattern) = relative_glob_pattern { - let literal_prefix = glob_literal_prefix(relative_glob_pattern); - tree.as_local_mut() - .unwrap() - .add_path_prefix_to_scan(Path::new(literal_prefix).into()); - if let Some(glob) = Glob::new(relative_glob_pattern).log_err() { - builders - .entry(tree.id()) - .or_insert_with(|| GlobSetBuilder::new()) - .add(glob); - } - return true; + for worktree in self.worktree_store.read(cx).worktrees().collect::>() { + let glob_is_inside_worktree = worktree.update(cx, |tree, _| { + if let Some(abs_path) = tree.abs_path().to_str() { + let relative_glob_pattern = match &watcher.glob_pattern { + lsp::GlobPattern::String(s) => Some( + s.strip_prefix(abs_path) + .unwrap_or(s) + .strip_prefix(std::path::MAIN_SEPARATOR) + .unwrap_or(s), + ), + lsp::GlobPattern::Relative(rp) => { + let base_uri = match &rp.base_uri { + lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri, + lsp::OneOf::Right(base_uri) => base_uri, + }; + base_uri.to_file_path().ok().and_then(|file_path| { + (file_path.to_str() == Some(abs_path)) + .then_some(rp.pattern.as_str()) + }) } + }; + if let Some(relative_glob_pattern) = relative_glob_pattern { + let literal_prefix = glob_literal_prefix(relative_glob_pattern); + tree.as_local_mut() + .unwrap() + .add_path_prefix_to_scan(Path::new(literal_prefix).into()); + if let Some(glob) = Glob::new(relative_glob_pattern).log_err() { + builders + .entry(tree.id()) + .or_insert_with(|| GlobSetBuilder::new()) + .add(glob); + } + return true; } - false - }); - if glob_is_inside_worktree { - break; } + false + }); + if glob_is_inside_worktree { + break; } } } @@ -7712,44 +7682,9 @@ impl Project { destination: WorktreeId, cx: &mut ModelContext<'_, Self>, ) -> Result<()> { - if source == destination { - return Ok(()); - } - - let mut source_index = None; - let mut destination_index = None; - for (i, worktree) in self.worktrees.iter().enumerate() { - if let Some(worktree) = worktree.upgrade() { - let worktree_id = worktree.read(cx).id(); - if worktree_id == source { - source_index = Some(i); - if destination_index.is_some() { - break; - } - } else if worktree_id == destination { - destination_index = Some(i); - if source_index.is_some() { - break; - } - } - } - } - - let source_index = - source_index.with_context(|| format!("Missing worktree for id {source}"))?; - let destination_index = - destination_index.with_context(|| format!("Missing worktree for id {destination}"))?; - - if source_index == destination_index { - return Ok(()); - } - - let worktree_to_move = self.worktrees.remove(source_index); - self.worktrees.insert(destination_index, worktree_to_move); - self.worktrees_reordered = true; - cx.emit(Event::WorktreeOrderChanged); - cx.notify(); - Ok(()) + self.worktree_store.update(cx, |worktree_store, cx| { + worktree_store.move_worktree(source, destination, cx) + }) } pub fn find_or_create_worktree( @@ -7773,8 +7708,8 @@ impl Project { abs_path: &Path, cx: &AppContext, ) -> Option<(Model, PathBuf)> { - for tree in &self.worktrees { - if let Some(tree) = tree.upgrade() { + self.worktree_store.read_with(cx, |worktree_store, cx| { + for tree in worktree_store.worktrees() { if let Some(relative_path) = tree .read(cx) .as_local() @@ -7783,8 +7718,8 @@ impl Project { return Some((tree.clone(), relative_path.into())); } } - } - None + None + }) } pub fn is_shared(&self) -> bool { @@ -8042,18 +7977,8 @@ impl Project { inventory.remove_worktree_sources(id_to_remove); }); - self.worktrees.retain(|worktree| { - if let Some(worktree) = worktree.upgrade() { - let id = worktree.read(cx).id(); - if id == id_to_remove { - cx.emit(Event::WorktreeRemoved(id)); - false - } else { - true - } - } else { - false - } + self.worktree_store.update(cx, |worktree_store, cx| { + worktree_store.remove_worktree(id_to_remove, cx); }); self.metadata_changed(cx); @@ -8066,7 +7991,6 @@ impl Project { match event { worktree::Event::UpdatedEntries(changes) => { if is_local { - this.update_local_worktree_buffers(&worktree, changes, cx); this.update_local_worktree_language_servers(&worktree, changes, cx); this.update_local_worktree_settings(&worktree, changes, cx); this.update_prettier_settings(&worktree, changes, cx); @@ -8082,85 +8006,18 @@ impl Project { .telemetry() .report_discovered_project_events(worktree_id, changes); } - worktree::Event::UpdatedGitRepositories(updated_repos) => { - if is_local { - this.update_local_worktree_buffers_git_repos( - worktree.clone(), - updated_repos, - cx, - ) - } + worktree::Event::UpdatedGitRepositories(_) => { cx.emit(Event::WorktreeUpdatedGitRepositories); } + worktree::Event::DeletedEntry(id) => cx.emit(Event::DeletedEntry(*id)), } }) .detach(); - let push_strong_handle = { - let worktree = worktree.read(cx); - self.is_shared() || worktree.is_visible() || worktree.is_remote() - }; - let handle = if push_strong_handle { - WorktreeHandle::Strong(worktree.clone()) - } else { - WorktreeHandle::Weak(worktree.downgrade()) - }; - if self.worktrees_reordered { - self.worktrees.push(handle); - } else { - let i = match self - .worktrees - .binary_search_by_key(&Some(worktree.read(cx).abs_path()), |other| { - other.upgrade().map(|worktree| worktree.read(cx).abs_path()) - }) { - Ok(i) | Err(i) => i, - }; - self.worktrees.insert(i, handle); - } - - let handle_id = worktree.entity_id(); - cx.observe_release(worktree, move |this, worktree, cx| { - let _ = this.remove_worktree(worktree.id(), cx); - cx.update_global::(|store, cx| { - store - .clear_local_settings(handle_id.as_u64() as usize, cx) - .log_err() - }); - }) - .detach(); - - cx.emit(Event::WorktreeAdded); - self.metadata_changed(cx); - } - - fn update_local_worktree_buffers( - &mut self, - worktree_handle: &Model, - changes: &[(Arc, ProjectEntryId, PathChange)], - cx: &mut ModelContext, - ) { - let snapshot = worktree_handle.read(cx).snapshot(); - self.buffer_store.clone().update(cx, |buffer_store, cx| { - for (path, entry_id, _) in changes { - if let Some((buffer, _, new_file)) = buffer_store.file_changed( - path.clone(), - *entry_id, - worktree_handle, - &snapshot, - cx, - ) { - if let Some(project_id) = self.remote_id() { - self.client - .send(proto::UpdateBufferFile { - project_id, - buffer_id: buffer.read(cx).remote_id().into(), - file: Some(new_file.to_proto(cx)), - }) - .log_err(); - } - } - } + self.worktree_store.update(cx, |worktree_store, cx| { + worktree_store.add(worktree, cx); }); + self.metadata_changed(cx); } fn update_local_worktree_language_servers( @@ -8225,138 +8082,6 @@ impl Project { } } - fn update_local_worktree_buffers_git_repos( - &mut self, - worktree_handle: Model, - changed_repos: &UpdatedGitRepositoriesSet, - cx: &mut ModelContext, - ) { - debug_assert!(worktree_handle.read(cx).is_local()); - - // Identify the loading buffers whose containing repository that has changed. - let future_buffers = self - .buffer_store - .read(cx) - .loading_buffers() - .filter_map(|(project_path, receiver)| { - if project_path.worktree_id != worktree_handle.read(cx).id() { - return None; - } - let path = &project_path.path; - changed_repos - .iter() - .find(|(work_dir, _)| path.starts_with(work_dir))?; - let path = path.clone(); - let abs_path = worktree_handle.read(cx).absolutize(&path).ok()?; - Some(async move { - BufferStore::wait_for_loading_buffer(receiver) - .await - .ok() - .map(|buffer| (buffer, path, abs_path)) - }) - }) - .collect::>(); - - // Identify the current buffers whose containing repository has changed. - let current_buffers = self - .buffer_store - .read(cx) - .buffers() - .filter_map(|buffer| { - let file = File::from_dyn(buffer.read(cx).file())?; - if file.worktree != worktree_handle { - return None; - } - let path = file.path(); - changed_repos - .iter() - .find(|(work_dir, _)| path.starts_with(work_dir))?; - Some((buffer, path.clone(), file.abs_path(cx))) - }) - .collect::>(); - - if future_buffers.len() + current_buffers.len() == 0 { - return; - } - - let remote_id = self.remote_id(); - let client = self.client.clone(); - let fs = self.fs.clone(); - cx.spawn(move |_, mut cx| async move { - // Wait for all of the buffers to load. - let future_buffers = future_buffers.collect::>().await; - - // Reload the diff base for every buffer whose containing git repository has changed. - let snapshot = - worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?; - let diff_bases_by_buffer = cx - .background_executor() - .spawn(async move { - let mut diff_base_tasks = future_buffers - .into_iter() - .flatten() - .chain(current_buffers) - .filter_map(|(buffer, path, abs_path)| { - let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?; - Some((buffer, path, abs_path, repo_entry, local_repo_entry)) - }) - .map(|(buffer, path, abs_path, repo, local_repo_entry)| { - let fs = fs.clone(); - let snapshot = snapshot.clone(); - async move { - let abs_path_metadata = fs - .metadata(&abs_path) - .await - .with_context(|| { - format!("loading file and FS metadata for {path:?}") - }) - .log_err() - .flatten()?; - let base_text = if abs_path_metadata.is_dir - || abs_path_metadata.is_symlink - { - None - } else { - let relative_path = repo.relativize(&snapshot, &path).ok()?; - local_repo_entry.repo().load_index_text(&relative_path) - }; - Some((buffer, base_text)) - } - }) - .collect::>(); - - let mut diff_bases = Vec::with_capacity(diff_base_tasks.len()); - while let Some(diff_base) = diff_base_tasks.next().await { - if let Some(diff_base) = diff_base { - diff_bases.push(diff_base); - } - } - diff_bases - }) - .await; - - // Assign the new diff bases on all of the buffers. - for (buffer, diff_base) in diff_bases_by_buffer { - let buffer_id = buffer.update(&mut cx, |buffer, cx| { - buffer.set_diff_base(diff_base.clone(), cx); - buffer.remote_id().into() - })?; - if let Some(project_id) = remote_id { - client - .send(proto::UpdateDiffBase { - project_id, - buffer_id, - diff_base, - }) - .log_err(); - } - } - - anyhow::Ok(()) - }) - .detach(); - } - fn update_local_worktree_settings( &mut self, worktree: &Model, @@ -8655,13 +8380,14 @@ impl Project { full_path: &Path, cx: &AppContext, ) -> Option { - self.worktrees.iter().find_map(|worktree| { - let worktree = worktree.upgrade()?; - let worktree_root_name = worktree.read(cx).root_name(); - let relative_path = full_path.strip_prefix(worktree_root_name).ok()?; - Some(ProjectPath { - worktree_id: worktree.read(cx).id(), - path: relative_path.into(), + self.worktree_store.read_with(cx, |worktree_store, cx| { + worktree_store.worktrees().find_map(|worktree| { + let worktree_root_name = worktree.read(cx).root_name(); + let relative_path = full_path.strip_prefix(worktree_root_name).ok()?; + Some(ProjectPath { + worktree_id: worktree.read(cx).id(), + path: relative_path.into(), + }) }) }) } @@ -9106,12 +8832,8 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let worktree = this.update(&mut cx, |this, cx| { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - this.worktree_for_id(worktree_id, cx) - .ok_or_else(|| anyhow!("worktree not found")) - })??; - Worktree::handle_create_entry(worktree, envelope.payload, cx).await + let worktree_store = this.update(&mut cx, |this, _| this.worktree_store.clone())?; + WorktreeStore::handle_create_project_entry(worktree_store, envelope, cx).await } async fn handle_rename_project_entry( @@ -9119,12 +8841,8 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); - let worktree = this.update(&mut cx, |this, cx| { - this.worktree_for_entry(entry_id, cx) - .ok_or_else(|| anyhow!("worktree not found")) - })??; - Worktree::handle_rename_entry(worktree, envelope.payload, cx).await + let worktree_store = this.update(&mut cx, |this, _| this.worktree_store.clone())?; + WorktreeStore::handle_rename_project_entry(worktree_store, envelope, cx).await } async fn handle_copy_project_entry( @@ -9132,12 +8850,8 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); - let worktree = this.update(&mut cx, |this, cx| { - this.worktree_for_entry(entry_id, cx) - .ok_or_else(|| anyhow!("worktree not found")) - })??; - Worktree::handle_copy_entry(worktree, envelope.payload, cx).await + let worktree_store = this.update(&mut cx, |this, _| this.worktree_store.clone())?; + WorktreeStore::handle_copy_project_entry(worktree_store, envelope, cx).await } async fn handle_delete_project_entry( @@ -9145,13 +8859,8 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); - let worktree = this.update(&mut cx, |this, cx| { - this.worktree_for_entry(entry_id, cx) - .ok_or_else(|| anyhow!("worktree not found")) - })??; - this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?; - Worktree::handle_delete_entry(worktree, envelope.payload, cx).await + let worktree_store = this.update(&mut cx, |this, _| this.worktree_store.clone())?; + WorktreeStore::handle_delete_project_entry(worktree_store, envelope, cx).await } async fn handle_expand_project_entry( @@ -9159,11 +8868,8 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); - let worktree = this - .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))? - .ok_or_else(|| anyhow!("invalid request"))?; - Worktree::handle_expand_entry(worktree, envelope.payload, cx).await + let worktree_store = this.update(&mut cx, |this, _| this.worktree_store.clone())?; + WorktreeStore::handle_expand_project_entry(worktree_store, envelope, cx).await } async fn handle_update_diagnostic_summary( @@ -9327,7 +9033,7 @@ impl Project { this.buffer_store.update(cx, |buffer_store, cx| { buffer_store.handle_create_buffer_for_peer( envelope, - this.worktrees(), + this.worktrees(cx).collect::>().into_iter(), this.replica_id(), this.capability(), cx, @@ -9391,20 +9097,12 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let (buffer_store, worktree, project_id) = this.update(&mut cx, |this, cx| { + let (buffer_store, project_id) = this.update(&mut cx, |this, _| { let buffer_store = this.buffer_store.clone(); let project_id = this.remote_id().context("not connected")?; - let worktree = if let Some(path) = &envelope.payload.new_path { - Some( - this.worktree_for_id(WorktreeId::from_proto(path.worktree_id), cx) - .context("worktree does not exist")?, - ) - } else { - None - }; - anyhow::Ok((buffer_store, worktree, project_id)) + anyhow::Ok((buffer_store, project_id)) })??; - BufferStore::handle_save_buffer(buffer_store, project_id, worktree, envelope, cx).await + BufferStore::handle_save_buffer(buffer_store, project_id, envelope, cx).await } async fn handle_reload_buffers( @@ -10404,7 +10102,7 @@ impl Project { } pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec { - self.worktrees() + self.worktrees(cx) .map(|worktree| { let worktree = worktree.read(cx); proto::WorktreeMetadata { @@ -10422,43 +10120,16 @@ impl Project { worktrees: Vec, cx: &mut ModelContext, ) -> Result<()> { - let replica_id = self.replica_id(); - let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?; - - let mut old_worktrees_by_id = self - .worktrees - .drain(..) - .filter_map(|worktree| { - let worktree = worktree.upgrade()?; - Some((worktree.read(cx).id(), worktree)) - }) - .collect::>(); - - for worktree in worktrees { - if let Some(old_worktree) = - old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id)) - { - self.worktrees.push(WorktreeHandle::Strong(old_worktree)); - } else { - self.add_worktree( - &Worktree::remote( - remote_id, - replica_id, - worktree, - self.client.clone().into(), - cx, - ), - cx, - ); - } - } - self.metadata_changed(cx); - for id in old_worktrees_by_id.keys() { - cx.emit(Event::WorktreeRemoved(*id)); - } - - Ok(()) + self.worktree_store.update(cx, |worktree_store, cx| { + worktree_store.set_worktrees_from_proto( + worktrees, + self.replica_id(), + self.remote_id().ok_or_else(|| anyhow!("invalid project"))?, + self.client.clone().into(), + cx, + ) + }) } fn set_collaborators_from_proto( @@ -10570,22 +10241,8 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result<()> { - let version = deserialize_version(&envelope.payload.version); - let buffer_id = BufferId::new(envelope.payload.buffer_id)?; - let mtime = envelope.payload.mtime.map(|time| time.into()); - - this.update(&mut cx, |this, cx| { - let buffer = this - .buffer_store - .read(cx) - .get_possibly_incomplete(buffer_id); - if let Some(buffer) = buffer { - buffer.update(cx, |buffer, cx| { - buffer.did_save(version, mtime, cx); - }); - } - Ok(()) - })? + let buffer_store = this.update(&mut cx, |this, _| this.buffer_store.clone())?; + BufferStore::handle_buffer_saved(buffer_store, envelope, cx).await } async fn handle_buffer_reloaded( @@ -10593,26 +10250,8 @@ impl Project { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result<()> { - let payload = envelope.payload; - let version = deserialize_version(&payload.version); - let line_ending = deserialize_line_ending( - proto::LineEnding::from_i32(payload.line_ending) - .ok_or_else(|| anyhow!("missing line ending"))?, - ); - let mtime = payload.mtime.map(|time| time.into()); - let buffer_id = BufferId::new(payload.buffer_id)?; - this.update(&mut cx, |this, cx| { - if let Some(buffer) = this - .buffer_store - .read(cx) - .get_possibly_incomplete(buffer_id) - { - buffer.update(cx, |buffer, cx| { - buffer.did_reload(version, line_ending, mtime, cx); - }); - } - Ok(()) - })? + let buffer_store = this.update(&mut cx, |this, _| this.buffer_store.clone())?; + BufferStore::handle_buffer_reloaded(buffer_store, envelope, cx).await } #[allow(clippy::type_complexity)] @@ -10765,14 +10404,10 @@ impl Project { pub fn supplementary_language_servers( &self, - ) -> impl '_ - + Iterator< - Item = ( - &LanguageServerId, - &(LanguageServerName, Arc), - ), - > { - self.supplementary_language_servers.iter() + ) -> impl '_ + Iterator { + self.supplementary_language_servers + .iter() + .map(|(id, (name, _))| (id, name)) } pub fn language_server_adapter_for_id( @@ -11017,7 +10652,7 @@ impl Project { fn task_cwd(&self, cx: &AppContext) -> anyhow::Result> { let available_worktrees = self - .worktrees() + .worktrees(cx) .filter(|worktree| { let worktree = worktree.read(cx); worktree.is_visible() @@ -11381,22 +11016,6 @@ fn glob_literal_prefix(glob: &str) -> &str { &glob[..literal_end] } -impl WorktreeHandle { - pub fn upgrade(&self) -> Option> { - match self { - WorktreeHandle::Strong(handle) => Some(handle.clone()), - WorktreeHandle::Weak(handle) => handle.upgrade(), - } - } - - pub fn handle_id(&self) -> usize { - match self { - WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize, - WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize, - } - } -} - pub struct PathMatchCandidateSet { pub snapshot: Snapshot, pub include_ignored: bool, diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 44ef1b95a2..427b156bbd 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -80,7 +80,7 @@ async fn test_symlinks(cx: &mut gpui::TestAppContext) { let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await; project.update(cx, |project, cx| { - let tree = project.worktrees().next().unwrap().read(cx); + let tree = project.worktrees(cx).next().unwrap().read(cx); assert_eq!(tree.file_count(), 5); assert_eq!( tree.inode_for_path("fennel/grape"), @@ -124,13 +124,13 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) .await; let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await; - let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap()); + let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap()); let task_context = TaskContext::default(); cx.executor().run_until_parked(); let worktree_id = cx.update(|cx| { project.update(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }) }); let global_task_source_kind = TaskSourceKind::Worktree { @@ -734,7 +734,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them. project.update(cx, |project, cx| { - let worktree = project.worktrees().next().unwrap(); + let worktree = project.worktrees(cx).next().unwrap(); assert_eq!( worktree .read(cx) @@ -808,7 +808,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon // Now the language server has asked us to watch an ignored directory path, // so we recursively load it. project.update(cx, |project, cx| { - let worktree = project.worktrees().next().unwrap(); + let worktree = project.worktrees(cx).next().unwrap(); assert_eq!( worktree .read(cx) @@ -1132,7 +1132,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { }, ); - let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id()); + let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id()); // Cause worktree to start the fake language server let _buffer = project @@ -2477,7 +2477,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { ) -> Vec<(&'a Path, bool)> { project .read(cx) - .worktrees() + .worktrees(cx) .map(|worktree| { let worktree = worktree.read(cx); ( @@ -2821,7 +2821,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) .await; let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; - let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap()); + let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); let buffer = project .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx)) .await @@ -2876,7 +2876,7 @@ async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) { .await; let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; - let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap()); + let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); let buffer = project .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx)) .await @@ -2978,7 +2978,7 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) { }); project .update(cx, |project, cx| { - let worktree_id = project.worktrees().next().unwrap().read(cx).id(); + let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id(); let path = ProjectPath { worktree_id, path: Arc::from(Path::new("file1.rs")), @@ -3038,7 +3038,7 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) { }; let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| { project.update(cx, |project, cx| { - let tree = project.worktrees().next().unwrap(); + let tree = project.worktrees(cx).next().unwrap(); tree.read(cx) .entry_for_path(path) .unwrap_or_else(|| panic!("no entry for path {}", path)) @@ -3056,7 +3056,7 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) { let file4_id = id_for_path("b/c/file4", cx); // Create a remote copy of this worktree. - let tree = project.update(cx, |project, _| project.worktrees().next().unwrap()); + let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap()); let metadata = tree.update(cx, |tree, _| tree.metadata_proto()); let updates = Arc::new(Mutex::new(Vec::new())); @@ -3173,12 +3173,12 @@ async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) { .await; let project = Project::test(fs, [Path::new("/dir")], cx).await; - let tree = project.update(cx, |project, _| project.worktrees().next().unwrap()); + let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap()); let tree_id = tree.update(cx, |tree, _| tree.id()); let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| { project.update(cx, |project, cx| { - let tree = project.worktrees().next().unwrap(); + let tree = project.worktrees(cx).next().unwrap(); tree.read(cx) .entry_for_path(path) .unwrap_or_else(|| panic!("no entry for path {}", path)) @@ -4549,7 +4549,7 @@ async fn test_create_entry(cx: &mut gpui::TestAppContext) { let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await; project .update(cx, |project, cx| { - let id = project.worktrees().next().unwrap().read(cx).id(); + let id = project.worktrees(cx).next().unwrap().read(cx).id(); project.create_entry((id, "b.."), true, cx) }) .unwrap() @@ -4560,7 +4560,7 @@ async fn test_create_entry(cx: &mut gpui::TestAppContext) { // Can't create paths outside the project let result = project .update(cx, |project, cx| { - let id = project.worktrees().next().unwrap().read(cx).id(); + let id = project.worktrees(cx).next().unwrap().read(cx).id(); project.create_entry((id, "../../boop"), true, cx) }) .await; @@ -4569,7 +4569,7 @@ async fn test_create_entry(cx: &mut gpui::TestAppContext) { // Can't create paths with '..' let result = project .update(cx, |project, cx| { - let id = project.worktrees().next().unwrap().read(cx).id(); + let id = project.worktrees(cx).next().unwrap().read(cx).id(); project.create_entry((id, "four/../beep"), true, cx) }) .await; @@ -4592,7 +4592,7 @@ async fn test_create_entry(cx: &mut gpui::TestAppContext) { // And we cannot open buffers with '..' let result = project .update(cx, |project, cx| { - let id = project.worktrees().next().unwrap().read(cx).id(); + let id = project.worktrees(cx).next().unwrap().read(cx).id(); project.open_buffer((id, "../c.rs"), cx) }) .await; diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs new file mode 100644 index 0000000000..dc1a295194 --- /dev/null +++ b/crates/project/src/worktree_store.rs @@ -0,0 +1,311 @@ +use anyhow::{anyhow, Context as _, Result}; +use collections::HashMap; +use gpui::{AppContext, AsyncAppContext, EntityId, EventEmitter, Model, ModelContext, WeakModel}; +use rpc::{ + proto::{self, AnyProtoClient}, + TypedEnvelope, +}; +use text::ReplicaId; +use worktree::{ProjectEntryId, Worktree, WorktreeId}; + +pub struct WorktreeStore { + is_shared: bool, + worktrees: Vec, + worktrees_reordered: bool, +} + +pub enum WorktreeStoreEvent { + WorktreeAdded(Model), + WorktreeRemoved(EntityId, WorktreeId), + WorktreeOrderChanged, +} + +impl EventEmitter for WorktreeStore {} + +impl WorktreeStore { + pub fn new(retain_worktrees: bool) -> Self { + Self { + is_shared: retain_worktrees, + worktrees: Vec::new(), + worktrees_reordered: false, + } + } + + /// Iterates through all worktrees, including ones that don't appear in the project panel + pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator> { + self.worktrees + .iter() + .filter_map(move |worktree| worktree.upgrade()) + } + + /// Iterates through all user-visible worktrees, the ones that appear in the project panel. + pub fn visible_worktrees<'a>( + &'a self, + cx: &'a AppContext, + ) -> impl 'a + DoubleEndedIterator> { + self.worktrees() + .filter(|worktree| worktree.read(cx).is_visible()) + } + + pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option> { + self.worktrees() + .find(|worktree| worktree.read(cx).id() == id) + } + + pub fn worktree_for_entry( + &self, + entry_id: ProjectEntryId, + cx: &AppContext, + ) -> Option> { + self.worktrees() + .find(|worktree| worktree.read(cx).contains_entry(entry_id)) + } + + pub fn add(&mut self, worktree: &Model, cx: &mut ModelContext) { + let push_strong_handle = self.is_shared || worktree.read(cx).is_visible(); + let handle = if push_strong_handle { + WorktreeHandle::Strong(worktree.clone()) + } else { + WorktreeHandle::Weak(worktree.downgrade()) + }; + if self.worktrees_reordered { + self.worktrees.push(handle); + } else { + let i = match self + .worktrees + .binary_search_by_key(&Some(worktree.read(cx).abs_path()), |other| { + other.upgrade().map(|worktree| worktree.read(cx).abs_path()) + }) { + Ok(i) | Err(i) => i, + }; + self.worktrees.insert(i, handle); + } + + cx.emit(WorktreeStoreEvent::WorktreeAdded(worktree.clone())); + + let handle_id = worktree.entity_id(); + cx.observe_release(worktree, move |_, worktree, cx| { + cx.emit(WorktreeStoreEvent::WorktreeRemoved( + handle_id, + worktree.id(), + )); + }) + .detach(); + } + + pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { + self.worktrees.retain(|worktree| { + if let Some(worktree) = worktree.upgrade() { + worktree.read(cx).id() != id_to_remove + } else { + false + } + }); + } + + pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) { + self.worktrees_reordered = worktrees_reordered; + } + + pub fn set_worktrees_from_proto( + &mut self, + worktrees: Vec, + replica_id: ReplicaId, + remote_id: u64, + client: AnyProtoClient, + cx: &mut ModelContext, + ) -> Result<()> { + let mut old_worktrees_by_id = self + .worktrees + .drain(..) + .filter_map(|worktree| { + let worktree = worktree.upgrade()?; + Some((worktree.read(cx).id(), worktree)) + }) + .collect::>(); + + for worktree in worktrees { + if let Some(old_worktree) = + old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id)) + { + self.worktrees.push(WorktreeHandle::Strong(old_worktree)); + } else { + self.add( + &Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx), + cx, + ); + } + } + + Ok(()) + } + + pub fn move_worktree( + &mut self, + source: WorktreeId, + destination: WorktreeId, + cx: &mut ModelContext, + ) -> Result<()> { + if source == destination { + return Ok(()); + } + + let mut source_index = None; + let mut destination_index = None; + for (i, worktree) in self.worktrees.iter().enumerate() { + if let Some(worktree) = worktree.upgrade() { + let worktree_id = worktree.read(cx).id(); + if worktree_id == source { + source_index = Some(i); + if destination_index.is_some() { + break; + } + } else if worktree_id == destination { + destination_index = Some(i); + if source_index.is_some() { + break; + } + } + } + } + + let source_index = + source_index.with_context(|| format!("Missing worktree for id {source}"))?; + let destination_index = + destination_index.with_context(|| format!("Missing worktree for id {destination}"))?; + + if source_index == destination_index { + return Ok(()); + } + + let worktree_to_move = self.worktrees.remove(source_index); + self.worktrees.insert(destination_index, worktree_to_move); + self.worktrees_reordered = true; + cx.emit(WorktreeStoreEvent::WorktreeOrderChanged); + cx.notify(); + Ok(()) + } + + pub fn disconnected_from_host(&mut self, cx: &mut AppContext) { + for worktree in &self.worktrees { + if let Some(worktree) = worktree.upgrade() { + worktree.update(cx, |worktree, _| { + if let Some(worktree) = worktree.as_remote_mut() { + worktree.disconnected_from_host(); + } + }); + } + } + } + + pub fn set_shared(&mut self, is_shared: bool, cx: &mut ModelContext) { + self.is_shared = is_shared; + + // When shared, retain all worktrees + if is_shared { + for worktree_handle in self.worktrees.iter_mut() { + match worktree_handle { + WorktreeHandle::Strong(_) => {} + WorktreeHandle::Weak(worktree) => { + if let Some(worktree) = worktree.upgrade() { + *worktree_handle = WorktreeHandle::Strong(worktree); + } + } + } + } + } + // When not shared, only retain the visible worktrees + else { + for worktree_handle in self.worktrees.iter_mut() { + if let WorktreeHandle::Strong(worktree) = worktree_handle { + let is_visible = worktree.update(cx, |worktree, _| { + worktree.stop_observing_updates(); + worktree.is_visible() + }); + if !is_visible { + *worktree_handle = WorktreeHandle::Weak(worktree.downgrade()); + } + } + } + } + } + + pub async fn handle_create_project_entry( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let worktree = this.update(&mut cx, |this, cx| { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + this.worktree_for_id(worktree_id, cx) + .ok_or_else(|| anyhow!("worktree not found")) + })??; + Worktree::handle_create_entry(worktree, envelope.payload, cx).await + } + + pub async fn handle_rename_project_entry( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); + let worktree = this.update(&mut cx, |this, cx| { + this.worktree_for_entry(entry_id, cx) + .ok_or_else(|| anyhow!("worktree not found")) + })??; + Worktree::handle_rename_entry(worktree, envelope.payload, cx).await + } + + pub async fn handle_copy_project_entry( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); + let worktree = this.update(&mut cx, |this, cx| { + this.worktree_for_entry(entry_id, cx) + .ok_or_else(|| anyhow!("worktree not found")) + })??; + Worktree::handle_copy_entry(worktree, envelope.payload, cx).await + } + + pub async fn handle_delete_project_entry( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); + let worktree = this.update(&mut cx, |this, cx| { + this.worktree_for_entry(entry_id, cx) + .ok_or_else(|| anyhow!("worktree not found")) + })??; + Worktree::handle_delete_entry(worktree, envelope.payload, cx).await + } + + pub async fn handle_expand_project_entry( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); + let worktree = this + .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))? + .ok_or_else(|| anyhow!("invalid request"))?; + Worktree::handle_expand_entry(worktree, envelope.payload, cx).await + } +} + +#[derive(Clone)] +enum WorktreeHandle { + Strong(Model), + Weak(WeakModel), +} + +impl WorktreeHandle { + fn upgrade(&self) -> Option> { + match self { + WorktreeHandle::Strong(handle) => Some(handle.clone()), + WorktreeHandle::Weak(handle) => handle.upgrade(), + } + } +} diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index f15ab4f6a9..c93b88ce05 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -4173,7 +4173,7 @@ mod tests { let project = Project::test(fs.clone(), ["/project_root".as_ref()], cx).await; let worktree_id = - cx.update(|cx| project.read(cx).worktrees().next().unwrap().read(cx).id()); + cx.update(|cx| project.read(cx).worktrees(cx).next().unwrap().read(cx).id()); let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); let cx = &mut VisualTestContext::from_window(*workspace, cx); let panel = workspace @@ -4969,7 +4969,7 @@ mod tests { ) { let path = path.as_ref(); panel.update(cx, |panel, cx| { - for worktree in panel.project.read(cx).worktrees().collect::>() { + for worktree in panel.project.read(cx).worktrees(cx).collect::>() { let worktree = worktree.read(cx); if let Ok(relative_path) = path.strip_prefix(worktree.root_name()) { let entry_id = worktree.entry_for_path(relative_path).unwrap().id; @@ -4984,7 +4984,7 @@ mod tests { fn select_path(panel: &View, path: impl AsRef, cx: &mut VisualTestContext) { let path = path.as_ref(); panel.update(cx, |panel, cx| { - for worktree in panel.project.read(cx).worktrees().collect::>() { + for worktree in panel.project.read(cx).worktrees(cx).collect::>() { let worktree = worktree.read(cx); if let Ok(relative_path) = path.strip_prefix(worktree.root_name()) { let entry_id = worktree.entry_for_path(relative_path).unwrap().id; @@ -5006,7 +5006,7 @@ mod tests { ) -> Option { let path = path.as_ref(); panel.update(cx, |panel, cx| { - for worktree in panel.project.read(cx).worktrees().collect::>() { + for worktree in panel.project.read(cx).worktrees(cx).collect::>() { let worktree = worktree.read(cx); if let Ok(relative_path) = path.strip_prefix(worktree.root_name()) { return worktree.entry_for_path(relative_path).map(|entry| entry.id); diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 03892573ea..4459a30bc9 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -534,9 +534,12 @@ impl SshClientState { } let mut server_binary_exists = false; - if let Ok(installed_version) = run_cmd(self.ssh_command(&dst_path).arg("version")).await { - if installed_version.trim() == version.to_string() { - server_binary_exists = true; + if cfg!(not(debug_assertions)) { + if let Ok(installed_version) = run_cmd(self.ssh_command(&dst_path).arg("version")).await + { + if installed_version.trim() == version.to_string() { + server_binary_exists = true; + } } } diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index ff7d1f8576..d0096bc3fd 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -32,6 +32,7 @@ remote.workspace = true rpc.workspace = true settings.workspace = true smol.workspace = true +util.workspace = true worktree.workspace = true [dev-dependencies] diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index e3c1f91492..5ab35144d7 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -1,7 +1,11 @@ -use anyhow::{Context as _, Result}; +use anyhow::Result; use fs::Fs; use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext}; -use project::{buffer_store::BufferStore, ProjectPath, WorktreeId, WorktreeSettings}; +use project::{ + buffer_store::{BufferStore, BufferStoreEvent}, + worktree_store::WorktreeStore, + ProjectPath, WorktreeId, WorktreeSettings, +}; use remote::SshSession; use rpc::{ proto::{self, AnyProtoClient, PeerId}, @@ -12,6 +16,7 @@ use std::{ path::{Path, PathBuf}, sync::{atomic::AtomicUsize, Arc}, }; +use util::ResultExt as _; use worktree::Worktree; const PEER_ID: PeerId = PeerId { owner_id: 0, id: 0 }; @@ -20,7 +25,7 @@ const PROJECT_ID: u64 = 0; pub struct HeadlessProject { pub fs: Arc, pub session: AnyProtoClient, - pub worktrees: Vec>, + pub worktree_store: Model, pub buffer_store: Model, pub next_entry_id: Arc, } @@ -34,27 +39,45 @@ impl HeadlessProject { pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { let this = cx.weak_model(); + let worktree_store = cx.new_model(|_| WorktreeStore::new(true)); + let buffer_store = cx.new_model(|cx| BufferStore::new(worktree_store.clone(), true, cx)); + cx.subscribe(&buffer_store, Self::on_buffer_store_event) + .detach(); + session.add_request_handler(this.clone(), Self::handle_add_worktree); session.add_request_handler(this.clone(), Self::handle_open_buffer_by_path); session.add_request_handler(this.clone(), Self::handle_update_buffer); session.add_request_handler(this.clone(), Self::handle_save_buffer); + session.add_request_handler( + worktree_store.downgrade(), + WorktreeStore::handle_create_project_entry, + ); + session.add_request_handler( + worktree_store.downgrade(), + WorktreeStore::handle_rename_project_entry, + ); + session.add_request_handler( + worktree_store.downgrade(), + WorktreeStore::handle_copy_project_entry, + ); + session.add_request_handler( + worktree_store.downgrade(), + WorktreeStore::handle_delete_project_entry, + ); + session.add_request_handler( + worktree_store.downgrade(), + WorktreeStore::handle_expand_project_entry, + ); HeadlessProject { session: session.into(), fs, - worktrees: Vec::new(), - buffer_store: cx.new_model(|_| BufferStore::new(true)), + worktree_store, + buffer_store, next_entry_id: Default::default(), } } - fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option> { - self.worktrees - .iter() - .find(|worktree| worktree.read(cx).id() == id) - .cloned() - } - pub async fn handle_add_worktree( this: Model, message: TypedEnvelope, @@ -74,7 +97,9 @@ impl HeadlessProject { this.update(&mut cx, |this, cx| { let session = this.session.clone(); - this.worktrees.push(worktree.clone()); + this.worktree_store.update(cx, |worktree_store, cx| { + worktree_store.add(&worktree, cx); + }); worktree.update(cx, |worktree, cx| { worktree.observe_updates(0, cx, move |update| { session.send(update).ok(); @@ -104,19 +129,8 @@ impl HeadlessProject { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { - let (buffer_store, worktree) = this.update(&mut cx, |this, cx| { - let buffer_store = this.buffer_store.clone(); - let worktree = if let Some(path) = &envelope.payload.new_path { - Some( - this.worktree_for_id(WorktreeId::from_proto(path.worktree_id), cx) - .context("worktree does not exist")?, - ) - } else { - None - }; - anyhow::Ok((buffer_store, worktree)) - })??; - BufferStore::handle_save_buffer(buffer_store, PROJECT_ID, worktree, envelope, cx).await + let buffer_store = this.update(&mut cx, |this, _| this.buffer_store.clone())?; + BufferStore::handle_save_buffer(buffer_store, PROJECT_ID, envelope, cx).await } pub async fn handle_open_buffer_by_path( @@ -126,9 +140,6 @@ impl HeadlessProject { ) -> Result { let worktree_id = WorktreeId::from_proto(message.payload.worktree_id); let (buffer_store, buffer, session) = this.update(&mut cx, |this, cx| { - let worktree = this - .worktree_for_id(worktree_id, cx) - .context("no such worktree")?; let buffer_store = this.buffer_store.clone(); let buffer = this.buffer_store.update(cx, |buffer_store, cx| { buffer_store.open_buffer( @@ -136,7 +147,6 @@ impl HeadlessProject { worktree_id, path: PathBuf::from(message.payload.path).into(), }, - worktree, cx, ) }); @@ -163,4 +173,41 @@ impl HeadlessProject { buffer_id: buffer_id.to_proto(), }) } + + pub fn on_buffer_store_event( + &mut self, + _: Model, + event: &BufferStoreEvent, + cx: &mut ModelContext, + ) { + match event { + BufferStoreEvent::LocalBufferUpdated { buffer } => { + let buffer = buffer.read(cx); + let buffer_id = buffer.remote_id(); + let Some(new_file) = buffer.file() else { + return; + }; + self.session + .send(proto::UpdateBufferFile { + project_id: 0, + buffer_id: buffer_id.into(), + file: Some(new_file.to_proto(cx)), + }) + .log_err(); + } + BufferStoreEvent::DiffBaseUpdated { buffer } => { + let buffer = buffer.read(cx); + let buffer_id = buffer.remote_id(); + let diff_base = buffer.diff_base(); + self.session + .send(proto::UpdateDiffBase { + project_id: 0, + buffer_id: buffer_id.to_proto(), + diff_base: diff_base.map(|b| b.to_string()), + }) + .log_err(); + } + _ => {} + } + } } diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 529db89ad1..00fd3270c1 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -11,7 +11,6 @@ use std::{env, io, mem, process, sync::Arc}; fn main() { env::set_var("RUST_BACKTRACE", "1"); - env::set_var("RUST_LOG", "remote=trace"); let subcommand = std::env::args().nth(1); match subcommand.as_deref() { diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 08d1e9cc21..15589261ec 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -12,15 +12,23 @@ use serde_json::json; use settings::SettingsStore; use std::{path::Path, sync::Arc}; +fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::try_init().ok(); + } +} + #[gpui::test] async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let (client_ssh, server_ssh) = SshSession::fake(cx, server_cx); + init_logger(); let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( "/code", json!({ "project1": { + ".git": {}, "README.md": "# project 1", "src": { "lib.rs": "fn one() -> usize { 1 }" @@ -32,6 +40,10 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon }), ) .await; + fs.set_index_for_repo( + Path::new("/code/project1/.git"), + &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())], + ); server_cx.update(HeadlessProject::init); let _headless_project = @@ -52,6 +64,7 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon assert_eq!( worktree.paths().map(Arc::as_ref).collect::>(), vec![ + Path::new(".git"), Path::new("README.md"), Path::new("src"), Path::new("src/lib.rs"), @@ -69,6 +82,10 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon .unwrap(); buffer.update(cx, |buffer, cx| { assert_eq!(buffer.text(), "fn one() -> usize { 1 }"); + assert_eq!( + buffer.diff_base().unwrap().to_string(), + "fn one() -> usize { 0 }" + ); let ix = buffer.text().find('1').unwrap(); buffer.edit([(ix..ix + 1, "100")], None, cx); }); @@ -76,7 +93,7 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon // The user saves the buffer. The new contents are written to the // remote filesystem. project - .update(cx, |project, cx| project.save_buffer(buffer, cx)) + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) .await .unwrap(); assert_eq!( @@ -98,6 +115,7 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon assert_eq!( worktree.paths().map(Arc::as_ref).collect::>(), vec![ + Path::new(".git"), Path::new("README.md"), Path::new("src"), Path::new("src/lib.rs"), @@ -105,6 +123,31 @@ async fn test_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppCon ] ); }); + + // A file that is currently open in a buffer is renamed. + fs.rename( + "/code/project1/src/lib.rs".as_ref(), + "/code/project1/src/lib2.rs".as_ref(), + Default::default(), + ) + .await + .unwrap(); + cx.executor().run_until_parked(); + buffer.update(cx, |buffer, _| { + assert_eq!(&**buffer.file().unwrap().path(), Path::new("src/lib2.rs")); + }); + + fs.set_index_for_repo( + Path::new("/code/project1/.git"), + &[(Path::new("src/lib2.rs"), "fn one() -> usize { 100 }".into())], + ); + cx.executor().run_until_parked(); + buffer.update(cx, |buffer, _| { + assert_eq!( + buffer.diff_base().unwrap().to_string(), + "fn one() -> usize { 100 }" + ); + }); } fn build_project(ssh: Arc, cx: &mut TestAppContext) -> Model { diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 0014b20304..5f2aa2233c 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -2400,7 +2400,7 @@ pub mod tests { .await; let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; let worktree_id = project.read_with(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }); let window = cx.add_window(|cx| Workspace::test_new(project, cx)); let workspace = window.root(cx).unwrap(); @@ -2836,7 +2836,7 @@ pub mod tests { .await; let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; let worktree_id = project.update(cx, |this, cx| { - this.worktrees().next().unwrap().read(cx).id() + this.worktrees(cx).next().unwrap().read(cx).id() }); let window = cx.add_window(|cx| Workspace::test_new(project, cx)); @@ -3053,7 +3053,7 @@ pub mod tests { .await; let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; let worktree_id = project.update(cx, |this, cx| { - this.worktrees().next().unwrap().read(cx).id() + this.worktrees(cx).next().unwrap().read(cx).id() }); let window = cx.add_window(|cx| Workspace::test_new(project, cx)); let panes: Vec<_> = window diff --git a/crates/tab_switcher/src/tab_switcher_tests.rs b/crates/tab_switcher/src/tab_switcher_tests.rs index f5577878c9..c4cb9d8ecf 100644 --- a/crates/tab_switcher/src/tab_switcher_tests.rs +++ b/crates/tab_switcher/src/tab_switcher_tests.rs @@ -273,7 +273,7 @@ async fn open_buffer( ) -> Box { let project = workspace.update(cx, |workspace, _| workspace.project().clone()); let worktree_id = project.update(cx, |project, cx| { - let worktree = project.worktrees().last().expect("worktree not found"); + let worktree = project.worktrees(cx).last().expect("worktree not found"); worktree.read(cx).id() }); let project_path = ProjectPath { diff --git a/crates/tasks_ui/src/lib.rs b/crates/tasks_ui/src/lib.rs index e4bdc238f5..3d4e062cb2 100644 --- a/crates/tasks_ui/src/lib.rs +++ b/crates/tasks_ui/src/lib.rs @@ -256,7 +256,7 @@ mod tests { ); let worktree_id = project.update(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }); let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 7d267ce0c9..39743c8ba8 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2229,7 +2229,7 @@ impl Render for Pane { pane.child(self.render_tab_bar(cx)) }) .child({ - let has_worktrees = self.project.read(cx).worktrees().next().is_some(); + let has_worktrees = self.project.read(cx).worktrees(cx).next().is_some(); // main content div() .flex_1() diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index b93af61cd5..ae6f6ebf85 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1077,8 +1077,8 @@ impl Workspace { .collect::>(); if paths_order.iter().enumerate().any(|(i, &j)| i != j) { project_handle - .update(&mut cx, |project, _| { - project.set_worktrees_reordered(true); + .update(&mut cx, |project, cx| { + project.set_worktrees_reordered(true, cx); }) .log_err(); } @@ -1567,7 +1567,7 @@ impl Workspace { } pub fn worktrees<'a>(&self, cx: &'a AppContext) -> impl 'a + Iterator> { - self.project.read(cx).worktrees() + self.project.read(cx).worktrees(cx) } pub fn visible_worktrees<'a>( @@ -1861,7 +1861,7 @@ impl Workspace { ) -> Task> { let window = cx.window_handle().downcast::(); let is_remote = self.project.read(cx).is_remote(); - let has_worktree = self.project.read(cx).worktrees().next().is_some(); + let has_worktree = self.project.read(cx).worktrees(cx).next().is_some(); let has_dirty_items = self.items(cx).any(|item| item.is_dirty(cx)); let window_to_replace = if replace_current_window { @@ -5685,7 +5685,7 @@ mod tests { let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone()); let worktree_id = project.update(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }); let item1 = cx.new_view(|cx| { @@ -6809,7 +6809,7 @@ mod tests { let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); let worktree_id = project.update(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }); let handle = workspace @@ -6872,7 +6872,7 @@ mod tests { let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx)); let worktree_id = project.update(cx, |project, cx| { - project.worktrees().next().unwrap().read(cx).id() + project.worktrees(cx).next().unwrap().read(cx).id() }); let handle = workspace diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index e168d9dfc8..6834e1017d 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -354,6 +354,7 @@ struct UpdateObservationState { pub enum Event { UpdatedEntries(UpdatedEntriesSet), UpdatedGitRepositories(UpdatedGitRepositoriesSet), + DeletedEntry(ProjectEntryId), } static EMPTY_PATH: &str = ""; @@ -738,10 +739,12 @@ impl Worktree { trash: bool, cx: &mut ModelContext, ) -> Option>> { - match self { + let task = match self { Worktree::Local(this) => this.delete_entry(entry_id, trash, cx), Worktree::Remote(this) => this.delete_entry(entry_id, trash, cx), - } + }?; + cx.emit(Event::DeletedEntry(entry_id)); + Some(task) } pub fn rename_entry( @@ -1208,25 +1211,10 @@ impl LocalWorktree { if let Some(repo_path) = repo.relativize(&snapshot, &path).log_err() { if let Some(git_repo) = snapshot.git_repositories.get(&*repo.work_directory) { let git_repo = git_repo.repo_ptr.clone(); - index_task = Some(cx.background_executor().spawn({ - let fs = fs.clone(); - let abs_path = abs_path.clone(); - async move { - let metadata = fs - .metadata(&abs_path) - .await - .with_context(|| { - format!("loading file and FS metadata for {abs_path:?}") - }) - .log_err() - .flatten()?; - if metadata.is_dir || metadata.is_symlink { - None - } else { - git_repo.load_index_text(&repo_path) - } - } - })); + index_task = Some( + cx.background_executor() + .spawn(async move { git_repo.load_index_text(&repo_path) }), + ); } } }