Remove dependencies from the Worktree crate and make it more focused (#12747)

The `worktree` crate mainly provides an in-memory model of a directory
and its git repositories. But because it was originally extracted from
the Project crate, it also contained lingering bits of code that were
outside of that area:
* it had a little bit of logic related to buffers (though most buffer
management lives in `project`)
* it had a *little* bit of logic for storing diagnostics (though the
vast majority of LSP and diagnostic logic lives in `project`)
* it had a little bit of logic for sending RPC message (though the
*receiving* logic for those RPC messages lived in `project`)

In this PR, I've moved those concerns entirely to the project crate
(where they were already dealt with for the most part), so that the
worktree crate can be more focused on its main job, and have fewer
dependencies.

Worktree no longer depends on `client` or `lsp`. It still depends on
`language`, but only because of `impl language::File for
worktree::File`.

Release Notes:

- N/A
This commit is contained in:
Max Brunsfeld 2024-06-06 11:16:58 -07:00 committed by GitHub
parent 00dfd217d8
commit 48581167b7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 554 additions and 689 deletions

2
Cargo.lock generated
View File

@ -12887,7 +12887,6 @@ name = "worktree"
version = "0.1.0"
dependencies = [
"anyhow",
"client",
"clock",
"collections",
"env_logger",
@ -12902,7 +12901,6 @@ dependencies = [
"itertools 0.11.0",
"language",
"log",
"lsp",
"parking_lot",
"postage",
"pretty_assertions",

View File

@ -83,7 +83,10 @@ async fn test_host_disconnect(
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
cx_a.background_executor.run_until_parked();
assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
assert!(worktree_a.read_with(cx_a, |tree, _| tree
.as_local()
.unwrap()
.has_update_observer()));
let workspace_b = cx_b
.add_window(|cx| Workspace::new(None, project_b.clone(), client_b.app_state.clone(), cx));
@ -120,7 +123,10 @@ async fn test_host_disconnect(
project_b.read_with(cx_b, |project, _| project.is_read_only());
assert!(worktree_a.read_with(cx_a, |tree, _| !tree.as_local().unwrap().is_shared()));
assert!(worktree_a.read_with(cx_a, |tree, _| !tree
.as_local()
.unwrap()
.has_update_observer()));
// Ensure client B's edited state is reset and that the whole window is blurred.

View File

@ -1378,7 +1378,10 @@ async fn test_unshare_project(
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
executor.run_until_parked();
assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
assert!(worktree_a.read_with(cx_a, |tree, _| tree
.as_local()
.unwrap()
.has_update_observer()));
project_b
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
@ -1403,7 +1406,10 @@ async fn test_unshare_project(
.unwrap();
executor.run_until_parked();
assert!(worktree_a.read_with(cx_a, |tree, _| !tree.as_local().unwrap().is_shared()));
assert!(worktree_a.read_with(cx_a, |tree, _| !tree
.as_local()
.unwrap()
.has_update_observer()));
assert!(project_c.read_with(cx_c, |project, _| project.is_disconnected()));
@ -1415,7 +1421,10 @@ async fn test_unshare_project(
let project_c2 = client_c.build_dev_server_project(project_id, cx_c).await;
executor.run_until_parked();
assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
assert!(worktree_a.read_with(cx_a, |tree, _| tree
.as_local()
.unwrap()
.has_update_observer()));
project_c2
.update(cx_c, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
@ -1522,7 +1531,7 @@ async fn test_project_reconnect(
executor.run_until_parked();
let worktree1_id = worktree_a1.read_with(cx_a, |worktree, _| {
assert!(worktree.as_local().unwrap().is_shared());
assert!(worktree.as_local().unwrap().has_update_observer());
worktree.id()
});
let (worktree_a2, _) = project_a1
@ -1534,7 +1543,7 @@ async fn test_project_reconnect(
executor.run_until_parked();
let worktree2_id = worktree_a2.read_with(cx_a, |tree, _| {
assert!(tree.as_local().unwrap().is_shared());
assert!(tree.as_local().unwrap().has_update_observer());
tree.id()
});
executor.run_until_parked();
@ -1568,7 +1577,7 @@ async fn test_project_reconnect(
});
worktree_a1.read_with(cx_a, |tree, _| {
assert!(tree.as_local().unwrap().is_shared())
assert!(tree.as_local().unwrap().has_update_observer())
});
// While client A is disconnected, add and remove files from client A's project.
@ -1611,7 +1620,7 @@ async fn test_project_reconnect(
.await;
let worktree3_id = worktree_a3.read_with(cx_a, |tree, _| {
assert!(!tree.as_local().unwrap().is_shared());
assert!(!tree.as_local().unwrap().has_update_observer());
tree.id()
});
executor.run_until_parked();
@ -1634,7 +1643,11 @@ async fn test_project_reconnect(
project_a1.read_with(cx_a, |project, cx| {
assert!(project.is_shared());
assert!(worktree_a1.read(cx).as_local().unwrap().is_shared());
assert!(worktree_a1
.read(cx)
.as_local()
.unwrap()
.has_update_observer());
assert_eq!(
worktree_a1
.read(cx)
@ -1652,7 +1665,11 @@ async fn test_project_reconnect(
"subdir2/i.txt"
]
);
assert!(worktree_a3.read(cx).as_local().unwrap().is_shared());
assert!(worktree_a3
.read(cx)
.as_local()
.unwrap()
.has_update_observer());
assert_eq!(
worktree_a3
.read(cx)
@ -1733,7 +1750,7 @@ async fn test_project_reconnect(
executor.run_until_parked();
let worktree4_id = worktree_a4.read_with(cx_a, |tree, _| {
assert!(tree.as_local().unwrap().is_shared());
assert!(tree.as_local().unwrap().has_update_observer());
tree.id()
});
project_a1.update(cx_a, |project, cx| {

View File

@ -1044,7 +1044,6 @@ async fn get_copilot_lsp(http: Arc<dyn HttpClient>) -> anyhow::Result<PathBuf> {
mod tests {
use super::*;
use gpui::TestAppContext;
use language::BufferId;
#[gpui::test(iterations = 10)]
async fn test_buffer_management(cx: &mut TestAppContext) {
@ -1258,16 +1257,5 @@ mod tests {
fn load(&self, _: &AppContext) -> Task<Result<String>> {
unimplemented!()
}
fn buffer_reloaded(
&self,
_: BufferId,
_: &clock::Global,
_: language::LineEnding,
_: Option<std::time::SystemTime>,
_: &mut AppContext,
) {
unimplemented!()
}
}
}

View File

@ -382,16 +382,6 @@ pub trait LocalFile: File {
/// Loads the file's contents from disk.
fn load(&self, cx: &AppContext) -> Task<Result<String>>;
/// Called when the buffer is reloaded from disk.
fn buffer_reloaded(
&self,
buffer_id: BufferId,
version: &clock::Global,
line_ending: LineEnding,
mtime: Option<SystemTime>,
cx: &mut AppContext,
);
/// Returns true if the file should not be shared with collaborators.
fn is_private(&self, _: &AppContext) -> bool {
false
@ -884,15 +874,6 @@ impl Buffer {
self.saved_version = version;
self.text.set_line_ending(line_ending);
self.saved_mtime = mtime;
if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
file.buffer_reloaded(
self.remote_id(),
&self.saved_version,
self.line_ending(),
self.saved_mtime,
cx,
);
}
cx.emit(Event::Reloaded);
cx.notify();
}

View File

@ -44,7 +44,7 @@ use language::{
markdown, point_to_lsp, prepare_completion_documentation,
proto::{
deserialize_anchor, deserialize_line_ending, deserialize_version, serialize_anchor,
serialize_version, split_operations,
serialize_line_ending, serialize_version, split_operations,
},
range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability, CodeLabel,
ContextProvider, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation,
@ -121,9 +121,9 @@ pub use task_inventory::{
BasicContextProvider, ContextProviderWithTasks, Inventory, TaskSourceKind,
};
pub use worktree::{
DiagnosticSummary, Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId,
RepositoryEntry, UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId,
WorktreeSettings, FS_WATCH_LATENCY,
Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, RepositoryEntry,
UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings,
FS_WATCH_LATENCY,
};
const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
@ -180,6 +180,18 @@ pub struct Project {
next_entry_id: Arc<AtomicUsize>,
join_project_response_message_id: u32,
next_diagnostic_group_id: usize,
diagnostic_summaries:
HashMap<WorktreeId, HashMap<Arc<Path>, HashMap<LanguageServerId, DiagnosticSummary>>>,
diagnostics: HashMap<
WorktreeId,
HashMap<
Arc<Path>,
Vec<(
LanguageServerId,
Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
)>,
>,
>,
user_store: Model<UserStore>,
fs: Arc<dyn Fs>,
client_state: ProjectClientState,
@ -749,6 +761,8 @@ impl Project {
fs,
next_entry_id: Default::default(),
next_diagnostic_group_id: Default::default(),
diagnostics: Default::default(),
diagnostic_summaries: Default::default(),
supplementary_language_servers: HashMap::default(),
language_servers: Default::default(),
language_server_ids: HashMap::default(),
@ -842,8 +856,7 @@ impl Project {
// That's because Worktree's identifier is entity id, which should probably be changed.
let mut worktrees = Vec::new();
for worktree in response.payload.worktrees {
let worktree =
Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
let worktree = Worktree::remote(replica_id, worktree, cx);
worktrees.push(worktree);
}
@ -873,6 +886,8 @@ impl Project {
fs,
next_entry_id: Default::default(),
next_diagnostic_group_id: Default::default(),
diagnostic_summaries: Default::default(),
diagnostics: Default::default(),
client_subscriptions: Default::default(),
_subscriptions: vec![
cx.on_release(Self::release),
@ -1733,6 +1748,7 @@ impl Project {
let worktrees = this.update(&mut cx, |this, _cx| {
this.worktrees().collect::<Vec<_>>()
})?;
let update_project = this
.update(&mut cx, |this, cx| {
this.client.request(proto::UpdateProject {
@ -1741,14 +1757,49 @@ impl Project {
})
})?
.await;
if update_project.log_err().is_some() {
if update_project.log_err().is_none() {
continue;
}
this.update(&mut cx, |this, cx| {
for worktree in worktrees {
worktree.update(&mut cx, |worktree, cx| {
let worktree = worktree.as_local_mut().unwrap();
worktree.share(project_id, cx).detach_and_log_err(cx)
worktree.update(cx, |worktree, cx| {
if let Some(summaries) =
this.diagnostic_summaries.get(&worktree.id())
{
for (path, summaries) in summaries {
for (&server_id, summary) in summaries {
this.client.send(
proto::UpdateDiagnosticSummary {
project_id,
worktree_id: cx.entity_id().as_u64(),
summary: Some(
summary.to_proto(server_id, path),
),
},
)?;
}
}
}
worktree.as_local_mut().unwrap().observe_updates(
project_id,
cx,
{
let client = client.clone();
move |update| {
client
.request(update)
.map(|result| result.is_ok())
}
},
);
anyhow::Ok(())
})?;
}
}
anyhow::Ok(())
})??;
}
LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
let buffer = this.update(&mut cx, |this, _| {
@ -1893,7 +1944,7 @@ impl Project {
for worktree_handle in self.worktrees.iter_mut() {
if let WorktreeHandle::Strong(worktree) = worktree_handle {
let is_visible = worktree.update(cx, |worktree, _| {
worktree.as_local_mut().unwrap().unshare();
worktree.as_local_mut().unwrap().stop_observing_updates();
worktree.is_visible()
});
if !is_visible {
@ -2177,23 +2228,47 @@ impl Project {
) -> Task<Result<Model<Buffer>>> {
let load_buffer = worktree.update(cx, |worktree, cx| {
let worktree = worktree.as_local_mut().unwrap();
worktree.load_buffer(&path, cx)
let file = worktree.load_file(path.as_ref(), cx);
let reservation = cx.reserve_model();
let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
cx.spawn(move |_, mut cx| async move {
let (file, contents, diff_base) = file.await?;
let text_buffer = cx
.background_executor()
.spawn(async move { text::Buffer::new(0, buffer_id, contents) })
.await;
cx.insert_model(reservation, |_| {
Buffer::build(
text_buffer,
diff_base,
Some(Arc::new(file)),
Capability::ReadWrite,
)
})
})
});
fn is_not_found_error(error: &anyhow::Error) -> bool {
error
.root_cause()
.downcast_ref::<io::Error>()
.is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
}
cx.spawn(move |this, mut cx| async move {
let buffer = match load_buffer.await {
Ok(buffer) => Ok(buffer),
Err(error) if is_not_found_error(&error) => {
worktree.update(&mut cx, |worktree, cx| {
let worktree = worktree.as_local_mut().unwrap();
worktree.new_buffer(path, cx)
})
}
Err(error) if is_not_found_error(&error) => cx.new_model(|cx| {
let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
let text_buffer = text::Buffer::new(0, buffer_id, "".into());
Buffer::build(
text_buffer,
None,
Some(Arc::new(File {
worktree,
path,
mtime: None,
entry_id: None,
is_local: true,
is_deleted: false,
is_private: false,
})),
Capability::ReadWrite,
)
}),
Err(e) => Err(e),
}?;
this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
@ -2321,8 +2396,8 @@ impl Project {
let worktree = file.worktree.clone();
let path = file.path.clone();
worktree.update(cx, |worktree, cx| match worktree {
Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
Worktree::Remote(worktree) => worktree.save_buffer(buffer, None, cx),
Worktree::Local(worktree) => self.save_local_buffer(&worktree, buffer, path, false, cx),
Worktree::Remote(_) => self.save_remote_buffer(buffer, None, cx),
})
}
@ -2340,21 +2415,20 @@ impl Project {
};
cx.spawn(move |this, mut cx| async move {
if let Some(old_file) = &old_file {
this.update(&mut cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if let Some(old_file) = &old_file {
this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
})?;
}
worktree
.update(&mut cx, |worktree, cx| match worktree {
}
worktree.update(cx, |worktree, cx| match worktree {
Worktree::Local(worktree) => {
worktree.save_buffer(buffer.clone(), path.path, true, cx)
this.save_local_buffer(worktree, buffer.clone(), path.path, true, cx)
}
Worktree::Remote(worktree) => {
worktree.save_buffer(buffer.clone(), Some(path.to_proto()), cx)
Worktree::Remote(_) => {
this.save_remote_buffer(buffer.clone(), Some(path.to_proto()), cx)
}
})?
.await?;
})
})?
.await?;
this.update(&mut cx, |this, cx| {
this.detect_language_for_buffer(&buffer, cx);
@ -2364,6 +2438,129 @@ impl Project {
})
}
pub fn save_local_buffer(
&self,
worktree: &LocalWorktree,
buffer_handle: Model<Buffer>,
path: Arc<Path>,
mut has_changed_file: bool,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<()>> {
let buffer = buffer_handle.read(cx);
let rpc = self.client.clone();
let buffer_id: u64 = buffer.remote_id().into();
let project_id = self.remote_id();
if buffer.file().is_some_and(|file| !file.is_created()) {
has_changed_file = true;
}
let text = buffer.as_rope().clone();
let version = buffer.version();
let save = worktree.write_file(path.as_ref(), text, buffer.line_ending(), cx);
let fs = Arc::clone(&self.fs);
let abs_path = worktree.absolutize(&path);
let is_private = worktree.is_path_private(&path);
cx.spawn(move |this, mut cx| async move {
let entry = save.await?;
let abs_path = abs_path?;
let this = this.upgrade().context("worktree dropped")?;
let (entry_id, mtime, path, is_private) = match entry {
Some(entry) => (Some(entry.id), entry.mtime, entry.path, entry.is_private),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!(
"Fetching metadata after saving the excluded buffer {abs_path:?}"
)
})?
.with_context(|| {
format!("Excluded buffer {path:?} got removed during saving")
})?;
(None, Some(metadata.mtime), path, is_private)
}
};
if has_changed_file {
let new_file = Arc::new(File {
entry_id,
worktree: this,
path,
mtime,
is_local: true,
is_deleted: false,
is_private,
});
if let Some(project_id) = project_id {
rpc.send(proto::UpdateBufferFile {
project_id,
buffer_id,
file: Some(new_file.to_proto()),
})
.log_err();
}
buffer_handle.update(&mut cx, |buffer, cx| {
if has_changed_file {
buffer.file_updated(new_file, cx);
}
})?;
}
if let Some(project_id) = project_id {
rpc.send(proto::BufferSaved {
project_id,
buffer_id,
version: serialize_version(&version),
mtime: mtime.map(|time| time.into()),
})?;
}
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), mtime, cx);
})?;
Ok(())
})
}
pub fn save_remote_buffer(
&self,
buffer_handle: Model<Buffer>,
new_path: Option<proto::ProjectPath>,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<()>> {
let buffer = buffer_handle.read(cx);
let buffer_id = buffer.remote_id().into();
let version = buffer.version();
let rpc = self.client.clone();
let project_id = self.remote_id();
cx.spawn(move |_, mut cx| async move {
let response = rpc
.request(proto::SaveBuffer {
project_id: project_id.ok_or_else(|| anyhow!("project_id is not set"))?,
buffer_id,
new_path,
version: serialize_version(&version),
})
.await?;
let version = deserialize_version(&response.version);
let mtime = response.mtime.map(|mtime| mtime.into());
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), mtime, cx);
})?;
Ok(())
})
}
pub fn get_open_buffer(
&mut self,
path: &ProjectPath,
@ -2489,8 +2686,10 @@ impl Project {
let language = buffer.language().cloned();
let worktree_id = file.worktree_id(cx);
if let Some(local_worktree) = file.worktree.read(cx).as_local() {
for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
if let Some(diagnostics) = self.diagnostics.get(&worktree_id) {
for (server_id, diagnostics) in
diagnostics.get(file.path()).cloned().unwrap_or_default()
{
self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
.log_err();
}
@ -2729,6 +2928,23 @@ impl Project {
.ok();
}
BufferEvent::Reloaded => {
if self.is_local() {
if let Some(project_id) = self.remote_id() {
let buffer = buffer.read(cx);
self.client
.send(proto::BufferReloaded {
project_id,
buffer_id: buffer.remote_id().to_proto(),
version: serialize_version(&buffer.version()),
mtime: buffer.saved_mtime().map(|t| t.into()),
line_ending: serialize_line_ending(buffer.line_ending()) as i32,
})
.log_err();
}
}
}
BufferEvent::Edited { .. } => {
let buffer = buffer.read(cx);
let file = File::from_dyn(buffer.file())?;
@ -3929,14 +4145,43 @@ impl Project {
});
}
}
for worktree in &self.worktrees {
if let Some(worktree) = worktree.upgrade() {
worktree.update(cx, |worktree, cx| {
if let Some(worktree) = worktree.as_local_mut() {
worktree.clear_diagnostics_for_language_server(server_id, cx);
let project_id = self.remote_id();
for (worktree_id, summaries) in self.diagnostic_summaries.iter_mut() {
summaries.retain(|path, summaries_by_server_id| {
if summaries_by_server_id.remove(&server_id).is_some() {
if let Some(project_id) = project_id {
self.client
.send(proto::UpdateDiagnosticSummary {
project_id,
worktree_id: worktree_id.to_proto(),
summary: Some(proto::DiagnosticSummary {
path: path.to_string_lossy().to_string(),
language_server_id: server_id.0 as u64,
error_count: 0,
warning_count: 0,
}),
})
.log_err();
}
});
}
!summaries_by_server_id.is_empty()
} else {
true
}
});
}
for diagnostics in self.diagnostics.values_mut() {
diagnostics.retain(|_, diagnostics_by_server_id| {
if let Ok(ix) =
diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0)
{
diagnostics_by_server_id.remove(ix);
!diagnostics_by_server_id.is_empty()
} else {
true
}
});
}
self.language_server_watched_paths.remove(&server_id);
@ -4673,10 +4918,13 @@ impl Project {
}
let updated = worktree.update(cx, |worktree, cx| {
worktree
.as_local_mut()
.ok_or_else(|| anyhow!("not a local worktree"))?
.update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
self.update_worktree_diagnostics(
worktree.id(),
server_id,
project_path.path.clone(),
diagnostics,
cx,
)
})?;
if updated {
cx.emit(Event::DiagnosticsUpdated {
@ -4687,6 +4935,67 @@ impl Project {
Ok(())
}
pub fn update_worktree_diagnostics(
&mut self,
worktree_id: WorktreeId,
server_id: LanguageServerId,
worktree_path: Arc<Path>,
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
_: &mut ModelContext<Worktree>,
) -> Result<bool> {
let summaries_for_tree = self.diagnostic_summaries.entry(worktree_id).or_default();
let diagnostics_for_tree = self.diagnostics.entry(worktree_id).or_default();
let summaries_by_server_id = summaries_for_tree.entry(worktree_path.clone()).or_default();
let old_summary = summaries_by_server_id
.remove(&server_id)
.unwrap_or_default();
let new_summary = DiagnosticSummary::new(&diagnostics);
if new_summary.is_empty() {
if let Some(diagnostics_by_server_id) = diagnostics_for_tree.get_mut(&worktree_path) {
if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
diagnostics_by_server_id.remove(ix);
}
if diagnostics_by_server_id.is_empty() {
diagnostics_for_tree.remove(&worktree_path);
}
}
} else {
summaries_by_server_id.insert(server_id, new_summary);
let diagnostics_by_server_id = diagnostics_for_tree
.entry(worktree_path.clone())
.or_default();
match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
Ok(ix) => {
diagnostics_by_server_id[ix] = (server_id, diagnostics);
}
Err(ix) => {
diagnostics_by_server_id.insert(ix, (server_id, diagnostics));
}
}
}
if !old_summary.is_empty() || !new_summary.is_empty() {
if let Some(project_id) = self.remote_id() {
self.client
.send(proto::UpdateDiagnosticSummary {
project_id,
worktree_id: worktree_id.to_proto(),
summary: Some(proto::DiagnosticSummary {
path: worktree_path.to_string_lossy().to_string(),
language_server_id: server_id.0 as u64,
error_count: new_summary.error_count as u32,
warning_count: new_summary.warning_count as u32,
}),
})
.log_err();
}
}
Ok(!old_summary.is_empty() || !new_summary.is_empty())
}
fn update_buffer_diagnostics(
&mut self,
buffer: &Model<Buffer>,
@ -7453,7 +7762,6 @@ impl Project {
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Worktree>>> {
let fs = self.fs.clone();
let client = self.client.clone();
let next_entry_id = self.next_entry_id.clone();
let path: Arc<Path> = abs_path.as_ref().into();
let task = self
@ -7462,15 +7770,9 @@ impl Project {
.or_insert_with(|| {
cx.spawn(move |project, mut cx| {
async move {
let worktree = Worktree::local(
client.clone(),
path.clone(),
visible,
fs,
next_entry_id,
&mut cx,
)
.await;
let worktree =
Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx)
.await;
project.update(&mut cx, |project, _| {
project.loading_local_worktrees.remove(&path);
@ -7503,6 +7805,9 @@ impl Project {
}
pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
self.diagnostics.remove(&id_to_remove);
self.diagnostic_summaries.remove(&id_to_remove);
let mut servers_to_remove = HashMap::default();
let mut servers_to_preserve = HashSet::default();
for ((worktree_id, server_name), &server_id) in &self.language_server_ids {
@ -8128,23 +8433,34 @@ impl Project {
include_ignored: bool,
cx: &'a AppContext,
) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
self.visible_worktrees(cx).flat_map(move |worktree| {
let worktree = worktree.read(cx);
let worktree_id = worktree.id();
worktree
.diagnostic_summaries()
.filter_map(move |(path, server_id, summary)| {
if include_ignored
|| worktree
.entry_for_path(path.as_ref())
.map_or(false, |entry| !entry.is_ignored)
{
Some((ProjectPath { worktree_id, path }, server_id, summary))
} else {
None
}
})
})
self.visible_worktrees(cx)
.filter_map(|worktree| {
let worktree = worktree.read(cx);
Some((worktree, self.diagnostic_summaries.get(&worktree.id())?))
})
.flat_map(move |(worktree, summaries)| {
let worktree_id = worktree.id();
summaries
.iter()
.filter(move |(path, _)| {
include_ignored
|| worktree
.entry_for_path(path.as_ref())
.map_or(false, |entry| !entry.is_ignored)
})
.flat_map(move |(path, summaries)| {
summaries.iter().map(move |(server_id, summary)| {
(
ProjectPath {
worktree_id,
path: path.clone(),
},
*server_id,
*summary,
)
})
})
})
}
pub fn disk_based_diagnostics_started(
@ -8805,23 +9121,41 @@ impl Project {
) -> Result<()> {
this.update(&mut cx, |this, cx| {
let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
if let Some(summary) = envelope.payload.summary {
let project_path = ProjectPath {
worktree_id,
path: Path::new(&summary.path).into(),
};
worktree.update(cx, |worktree, _| {
worktree
.as_remote_mut()
.unwrap()
.update_diagnostic_summary(project_path.path.clone(), &summary);
});
cx.emit(Event::DiagnosticsUpdated {
language_server_id: LanguageServerId(summary.language_server_id as usize),
path: project_path,
});
if let Some(message) = envelope.payload.summary {
let project_path = ProjectPath {
worktree_id,
path: Path::new(&message.path).into(),
};
let path = project_path.path.clone();
let server_id = LanguageServerId(message.language_server_id as usize);
let summary = DiagnosticSummary {
error_count: message.error_count as usize,
warning_count: message.warning_count as usize,
};
if summary.is_empty() {
if let Some(worktree_summaries) =
this.diagnostic_summaries.get_mut(&worktree_id)
{
if let Some(summaries) = worktree_summaries.get_mut(&path) {
summaries.remove(&server_id);
if summaries.is_empty() {
worktree_summaries.remove(&path);
}
}
}
} else {
this.diagnostic_summaries
.entry(worktree_id)
.or_default()
.entry(path)
.or_default()
.insert(server_id, summary);
}
cx.emit(Event::DiagnosticsUpdated {
language_server_id: LanguageServerId(message.language_server_id as usize),
path: project_path,
});
}
Ok(())
})?
@ -10229,7 +10563,6 @@ impl Project {
cx: &mut ModelContext<Project>,
) -> Result<()> {
let replica_id = self.replica_id();
let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
let mut old_worktrees_by_id = self
.worktrees
@ -10246,8 +10579,7 @@ impl Project {
{
self.worktrees.push(WorktreeHandle::Strong(old_worktree));
} else {
let worktree =
Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
let worktree = Worktree::remote(replica_id, worktree, cx);
let _ = self.add_worktree(&worktree, cx);
}
}
@ -11502,6 +11834,13 @@ async fn wait_for_loading_buffer(
}
}
fn is_not_found_error(error: &anyhow::Error) -> bool {
error
.root_cause()
.downcast_ref::<io::Error>()
.is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
}
fn include_text(server: &lsp::LanguageServer) -> bool {
server
.capabilities()
@ -11740,3 +12079,47 @@ fn deserialize_location(
})
})
}
#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
pub struct DiagnosticSummary {
pub error_count: usize,
pub warning_count: usize,
}
impl DiagnosticSummary {
pub fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
let mut this = Self {
error_count: 0,
warning_count: 0,
};
for entry in diagnostics {
if entry.diagnostic.is_primary {
match entry.diagnostic.severity {
DiagnosticSeverity::ERROR => this.error_count += 1,
DiagnosticSeverity::WARNING => this.warning_count += 1,
_ => {}
}
}
}
this
}
pub fn is_empty(&self) -> bool {
self.error_count == 0 && self.warning_count == 0
}
pub fn to_proto(
&self,
language_server_id: LanguageServerId,
path: &Path,
) -> proto::DiagnosticSummary {
proto::DiagnosticSummary {
path: path.to_string_lossy().to_string(),
language_server_id: language_server_id.0 as u64,
error_count: self.error_count as u32,
warning_count: self.warning_count as u32,
}
}
}

View File

@ -2955,7 +2955,6 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
}));
let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
let rpc = project.update(cx, |p, _| p.client.clone());
let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
@ -2987,7 +2986,7 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
let updates = Arc::new(Mutex::new(Vec::new()));
tree.update(cx, |tree, cx| {
let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
tree.as_local_mut().unwrap().observe_updates(0, cx, {
let updates = updates.clone();
move |update| {
updates.lock().push(update);
@ -2996,7 +2995,7 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
});
});
let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
let remote = cx.update(|cx| Worktree::remote(1, metadata, cx));
cx.executor().run_until_parked();

View File

@ -14,7 +14,6 @@ workspace = true
[features]
test-support = [
"client/test-support",
"language/test-support",
"settings/test-support",
"text/test-support",
@ -24,7 +23,6 @@ test-support = [
[dependencies]
anyhow.workspace = true
client.workspace = true
clock.workspace = true
collections.workspace = true
fs.workspace = true
@ -36,7 +34,6 @@ ignore.workspace = true
itertools.workspace = true
language.workspace = true
log.workspace = true
lsp.workspace = true
parking_lot.workspace = true
postage.workspace = true
rpc.workspace = true

View File

@ -5,11 +5,9 @@ mod worktree_tests;
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
use anyhow::{anyhow, Context as _, Result};
use client::{proto, Client};
use clock::ReplicaId;
use collections::{HashMap, HashSet, VecDeque};
use fs::{copy_recursive, RemoveOptions};
use fs::{Fs, Watcher};
use fs::{copy_recursive, Fs, RemoveOptions, Watcher};
use futures::{
channel::{
mpsc::{self, UnboundedSender},
@ -21,9 +19,9 @@ use futures::{
FutureExt as _, Stream, StreamExt,
};
use fuzzy::CharBag;
use git::status::GitStatus;
use git::{
repository::{GitFileStatus, GitRepository, RepoPath},
status::GitStatus,
DOT_GIT, GITIGNORE,
};
use gpui::{
@ -32,21 +30,15 @@ use gpui::{
};
use ignore::IgnoreStack;
use itertools::Itertools;
use language::{
proto::{deserialize_version, serialize_line_ending, serialize_version},
Buffer, Capability, DiagnosticEntry, File as _, LineEnding, PointUtf16, Rope, Unclipped,
};
use lsp::{DiagnosticSeverity, LanguageServerId};
use parking_lot::Mutex;
use postage::{
barrier,
prelude::{Sink as _, Stream as _},
watch,
};
use serde::Serialize;
use rpc::proto;
use settings::{Settings, SettingsLocation, SettingsStore};
use smol::channel::{self, Sender};
use std::time::Instant;
use std::{
any::Any,
cmp::{self, Ordering},
@ -62,10 +54,10 @@ use std::{
atomic::{AtomicUsize, Ordering::SeqCst},
Arc,
},
time::{Duration, SystemTime},
time::{Duration, Instant, SystemTime},
};
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
use text::BufferId;
use text::{LineEnding, Rope};
use util::{
paths::{PathMatcher, HOME},
ResultExt,
@ -106,36 +98,16 @@ pub enum CreatedEntry {
Excluded { abs_path: PathBuf },
}
#[cfg(any(test, feature = "test-support"))]
impl CreatedEntry {
pub fn to_included(self) -> Option<Entry> {
match self {
CreatedEntry::Included(entry) => Some(entry),
CreatedEntry::Excluded { .. } => None,
}
}
}
pub struct LocalWorktree {
snapshot: LocalSnapshot,
scan_requests_tx: channel::Sender<ScanRequest>,
path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
_background_scanner_tasks: Vec<Task<()>>,
share: Option<ShareState>,
diagnostics: HashMap<
Arc<Path>,
Vec<(
LanguageServerId,
Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
)>,
>,
diagnostic_summaries: HashMap<Arc<Path>, HashMap<LanguageServerId, DiagnosticSummary>>,
client: Arc<Client>,
update_observer: Option<ShareState>,
fs: Arc<dyn Fs>,
fs_case_sensitive: bool,
visible: bool,
next_entry_id: Arc<AtomicUsize>,
}
@ -147,12 +119,9 @@ struct ScanRequest {
pub struct RemoteWorktree {
snapshot: Snapshot,
background_snapshot: Arc<Mutex<Snapshot>>,
project_id: u64,
client: Arc<Client>,
updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
replica_id: ReplicaId,
diagnostic_summaries: HashMap<Arc<Path>, HashMap<LanguageServerId, DiagnosticSummary>>,
visible: bool,
disconnected: bool,
}
@ -365,7 +334,6 @@ enum ScanState {
}
struct ShareState {
project_id: u64,
snapshots_tx:
mpsc::UnboundedSender<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>,
resume_updates: watch::Sender<()>,
@ -382,7 +350,6 @@ impl EventEmitter<Event> for Worktree {}
impl Worktree {
pub async fn local(
client: Arc<Client>,
path: impl Into<Arc<Path>>,
visible: bool,
fs: Arc<dyn Fs>,
@ -502,7 +469,7 @@ impl Worktree {
next_entry_id: Arc::clone(&next_entry_id),
snapshot,
is_scanning: watch::channel_with(true),
share: None,
update_observer: None,
scan_requests_tx,
path_prefixes_to_scan_tx,
_background_scanner_tasks: start_background_scan_tasks(
@ -514,9 +481,6 @@ impl Worktree {
Arc::clone(&fs),
cx,
),
diagnostics: Default::default(),
diagnostic_summaries: Default::default(),
client,
fs,
fs_case_sensitive,
visible,
@ -525,10 +489,8 @@ impl Worktree {
}
pub fn remote(
project_remote_id: u64,
replica_id: ReplicaId,
worktree: proto::WorktreeMetadata,
client: Arc<Client>,
cx: &mut AppContext,
) -> Model<Self> {
cx.new_model(|cx: &mut ModelContext<Self>| {
@ -590,14 +552,11 @@ impl Worktree {
.detach();
Worktree::Remote(RemoteWorktree {
project_id: project_remote_id,
replica_id,
snapshot: snapshot.clone(),
background_snapshot,
updates_tx: Some(updates_tx),
snapshot_subscriptions: Default::default(),
client: client.clone(),
diagnostic_summaries: Default::default(),
visible: worktree.visible,
disconnected: false,
})
@ -679,21 +638,6 @@ impl Worktree {
}
}
pub fn diagnostic_summaries(
&self,
) -> impl Iterator<Item = (Arc<Path>, LanguageServerId, DiagnosticSummary)> + '_ {
match self {
Worktree::Local(worktree) => &worktree.diagnostic_summaries,
Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
}
.iter()
.flat_map(|(path, summaries)| {
summaries
.iter()
.map(move |(&server_id, &summary)| (path.clone(), server_id, summary))
})
}
pub fn abs_path(&self) -> Arc<Path> {
match self {
Worktree::Local(worktree) => worktree.abs_path.clone(),
@ -807,168 +751,6 @@ impl LocalWorktree {
path.starts_with(&self.abs_path)
}
pub fn load_buffer(
&mut self,
path: &Path,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<Model<Buffer>>> {
let path = Arc::from(path);
let reservation = cx.reserve_model();
let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
cx.spawn(move |this, mut cx| async move {
let (file, contents, diff_base) = this
.update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))?
.await?;
let text_buffer = cx
.background_executor()
.spawn(async move { text::Buffer::new(0, buffer_id, contents) })
.await;
cx.insert_model(reservation, |_| {
Buffer::build(
text_buffer,
diff_base,
Some(Arc::new(file)),
Capability::ReadWrite,
)
})
})
}
pub fn new_buffer(
&mut self,
path: Arc<Path>,
cx: &mut ModelContext<Worktree>,
) -> Model<Buffer> {
let worktree = cx.handle();
cx.new_model(|cx| {
let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
let text_buffer = text::Buffer::new(0, buffer_id, "".into());
Buffer::build(
text_buffer,
None,
Some(Arc::new(File {
worktree,
path,
mtime: None,
entry_id: None,
is_local: true,
is_deleted: false,
is_private: false,
})),
Capability::ReadWrite,
)
})
}
pub fn diagnostics_for_path(
&self,
path: &Path,
) -> Vec<(
LanguageServerId,
Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
)> {
self.diagnostics.get(path).cloned().unwrap_or_default()
}
pub fn clear_diagnostics_for_language_server(
&mut self,
server_id: LanguageServerId,
_: &mut ModelContext<Worktree>,
) {
let worktree_id = self.id().to_proto();
self.diagnostic_summaries
.retain(|path, summaries_by_server_id| {
if summaries_by_server_id.remove(&server_id).is_some() {
if let Some(share) = self.share.as_ref() {
self.client
.send(proto::UpdateDiagnosticSummary {
project_id: share.project_id,
worktree_id,
summary: Some(proto::DiagnosticSummary {
path: path.to_string_lossy().to_string(),
language_server_id: server_id.0 as u64,
error_count: 0,
warning_count: 0,
}),
})
.log_err();
}
!summaries_by_server_id.is_empty()
} else {
true
}
});
self.diagnostics.retain(|_, diagnostics_by_server_id| {
if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
diagnostics_by_server_id.remove(ix);
!diagnostics_by_server_id.is_empty()
} else {
true
}
});
}
pub fn update_diagnostics(
&mut self,
server_id: LanguageServerId,
worktree_path: Arc<Path>,
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
_: &mut ModelContext<Worktree>,
) -> Result<bool> {
let summaries_by_server_id = self
.diagnostic_summaries
.entry(worktree_path.clone())
.or_default();
let old_summary = summaries_by_server_id
.remove(&server_id)
.unwrap_or_default();
let new_summary = DiagnosticSummary::new(&diagnostics);
if new_summary.is_empty() {
if let Some(diagnostics_by_server_id) = self.diagnostics.get_mut(&worktree_path) {
if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
diagnostics_by_server_id.remove(ix);
}
if diagnostics_by_server_id.is_empty() {
self.diagnostics.remove(&worktree_path);
}
}
} else {
summaries_by_server_id.insert(server_id, new_summary);
let diagnostics_by_server_id =
self.diagnostics.entry(worktree_path.clone()).or_default();
match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
Ok(ix) => {
diagnostics_by_server_id[ix] = (server_id, diagnostics);
}
Err(ix) => {
diagnostics_by_server_id.insert(ix, (server_id, diagnostics));
}
}
}
if !old_summary.is_empty() || !new_summary.is_empty() {
if let Some(share) = self.share.as_ref() {
self.client
.send(proto::UpdateDiagnosticSummary {
project_id: share.project_id,
worktree_id: self.id().to_proto(),
summary: Some(proto::DiagnosticSummary {
path: worktree_path.to_string_lossy().to_string(),
language_server_id: server_id.0 as u64,
error_count: new_summary.error_count as u32,
warning_count: new_summary.warning_count as u32,
}),
})
.log_err();
}
}
Ok(!old_summary.is_empty() || !new_summary.is_empty())
}
fn restart_background_scanners(&mut self, cx: &mut ModelContext<Worktree>) {
let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
@ -997,7 +779,7 @@ impl LocalWorktree {
new_snapshot.share_private_files = self.snapshot.share_private_files;
self.snapshot = new_snapshot;
if let Some(share) = self.share.as_mut() {
if let Some(share) = self.update_observer.as_mut() {
share
.snapshots_tx
.unbounded_send((
@ -1138,7 +920,7 @@ impl LocalWorktree {
}
}
fn load(
pub fn load_file(
&self,
path: &Path,
cx: &mut ModelContext<Worktree>,
@ -1232,97 +1014,6 @@ impl LocalWorktree {
})
}
pub fn save_buffer(
&self,
buffer_handle: Model<Buffer>,
path: Arc<Path>,
mut has_changed_file: bool,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<()>> {
let buffer = buffer_handle.read(cx);
let rpc = self.client.clone();
let buffer_id: u64 = buffer.remote_id().into();
let project_id = self.share.as_ref().map(|share| share.project_id);
if buffer.file().is_some_and(|file| !file.is_created()) {
has_changed_file = true;
}
let text = buffer.as_rope().clone();
let version = buffer.version();
let save = self.write_file(path.as_ref(), text, buffer.line_ending(), cx);
let fs = Arc::clone(&self.fs);
let abs_path = self.absolutize(&path);
let is_private = self.snapshot.is_path_private(&path);
cx.spawn(move |this, mut cx| async move {
let entry = save.await?;
let abs_path = abs_path?;
let this = this.upgrade().context("worktree dropped")?;
let (entry_id, mtime, path, is_dotenv) = match entry {
Some(entry) => (Some(entry.id), entry.mtime, entry.path, entry.is_private),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!(
"Fetching metadata after saving the excluded buffer {abs_path:?}"
)
})?
.with_context(|| {
format!("Excluded buffer {path:?} got removed during saving")
})?;
(None, Some(metadata.mtime), path, is_private)
}
};
if has_changed_file {
let new_file = Arc::new(File {
entry_id,
worktree: this,
path,
mtime,
is_local: true,
is_deleted: false,
is_private: is_dotenv,
});
if let Some(project_id) = project_id {
rpc.send(proto::UpdateBufferFile {
project_id,
buffer_id,
file: Some(new_file.to_proto()),
})
.log_err();
}
buffer_handle.update(&mut cx, |buffer, cx| {
if has_changed_file {
buffer.file_updated(new_file, cx);
}
})?;
}
if let Some(project_id) = project_id {
rpc.send(proto::BufferSaved {
project_id,
buffer_id,
version: serialize_version(&version),
mtime: mtime.map(|time| time.into()),
})?;
}
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), mtime, cx);
})?;
Ok(())
})
}
/// Find the lowest path in the worktree's datastructures that is an ancestor
fn lowest_ancestor(&self, path: &Path) -> PathBuf {
let mut lowest_ancestor = None;
@ -1400,7 +1091,7 @@ impl LocalWorktree {
})
}
pub(crate) fn write_file(
pub fn write_file(
&self,
path: impl Into<Arc<Path>>,
text: Rope,
@ -1630,8 +1321,7 @@ impl LocalWorktree {
project_id: u64,
cx: &mut ModelContext<Worktree>,
callback: F,
) -> oneshot::Receiver<()>
where
) where
F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
Fut: Send + Future<Output = bool>,
{
@ -1640,12 +1330,9 @@ impl LocalWorktree {
#[cfg(not(any(test, feature = "test-support")))]
const MAX_CHUNK_SIZE: usize = 256;
let (share_tx, share_rx) = oneshot::channel();
if let Some(share) = self.share.as_mut() {
share_tx.send(()).ok();
*share.resume_updates.borrow_mut() = ();
return share_rx;
if let Some(observer) = self.update_observer.as_mut() {
*observer.resume_updates.borrow_mut() = ();
return;
}
let (resume_updates_tx, mut resume_updates_rx) = watch::channel::<()>();
@ -1683,47 +1370,23 @@ impl LocalWorktree {
}
}
}
share_tx.send(()).ok();
Some(())
});
self.share = Some(ShareState {
project_id,
self.update_observer = Some(ShareState {
snapshots_tx,
resume_updates: resume_updates_tx,
_maintain_remote_snapshot,
});
share_rx
}
pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
let client = self.client.clone();
for (path, summaries) in &self.diagnostic_summaries {
for (&server_id, summary) in summaries {
if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
project_id,
worktree_id: cx.entity_id().as_u64(),
summary: Some(summary.to_proto(server_id, path)),
}) {
return Task::ready(Err(e));
}
}
}
let rx = self.observe_updates(project_id, cx, move |update| {
client.request(update).map(|result| result.is_ok())
});
cx.background_executor()
.spawn(async move { rx.await.map_err(|_| anyhow!("share ended")) })
pub fn stop_observing_updates(&mut self) {
self.update_observer.take();
}
pub fn unshare(&mut self) {
self.share.take();
}
pub fn is_shared(&self) -> bool {
self.share.is_some()
#[cfg(any(test, feature = "test-support"))]
pub fn has_update_observer(&self) -> bool {
self.update_observer.is_some()
}
pub fn share_private_files(&mut self, cx: &mut ModelContext<Worktree>) {
@ -1743,37 +1406,6 @@ impl RemoteWorktree {
self.disconnected = true;
}
pub fn save_buffer(
&self,
buffer_handle: Model<Buffer>,
new_path: Option<proto::ProjectPath>,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<()>> {
let buffer = buffer_handle.read(cx);
let buffer_id = buffer.remote_id().into();
let version = buffer.version();
let rpc = self.client.clone();
let project_id = self.project_id;
cx.spawn(move |_, mut cx| async move {
let response = rpc
.request(proto::SaveBuffer {
project_id,
buffer_id,
new_path,
version: serialize_version(&version),
})
.await?;
let version = deserialize_version(&response.version);
let mtime = response.mtime.map(|mtime| mtime.into());
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), mtime, cx);
})?;
Ok(())
})
}
pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
if let Some(updates_tx) = &self.updates_tx {
updates_tx
@ -1807,32 +1439,6 @@ impl RemoteWorktree {
}
}
pub fn update_diagnostic_summary(
&mut self,
path: Arc<Path>,
summary: &proto::DiagnosticSummary,
) {
let server_id = LanguageServerId(summary.language_server_id as usize);
let summary = DiagnosticSummary {
error_count: summary.error_count as usize,
warning_count: summary.warning_count as usize,
};
if summary.is_empty() {
if let Some(summaries) = self.diagnostic_summaries.get_mut(&path) {
summaries.remove(&server_id);
if summaries.is_empty() {
self.diagnostic_summaries.remove(&path);
}
}
} else {
self.diagnostic_summaries
.entry(path)
.or_default()
.insert(server_id, summary);
}
}
pub fn insert_entry(
&mut self,
entry: proto::Entry,
@ -3023,29 +2629,6 @@ impl language::LocalFile for File {
cx.background_executor()
.spawn(async move { fs.load(&abs_path?).await })
}
fn buffer_reloaded(
&self,
buffer_id: BufferId,
version: &clock::Global,
line_ending: LineEnding,
mtime: Option<SystemTime>,
cx: &mut AppContext,
) {
let worktree = self.worktree.read(cx).as_local().unwrap();
if let Some(project_id) = worktree.share.as_ref().map(|share| share.project_id) {
worktree
.client
.send(proto::BufferReloaded {
project_id,
buffer_id: buffer_id.into(),
version: serialize_version(version),
mtime: mtime.map(|time| time.into()),
line_ending: serialize_line_ending(line_ending) as i32,
})
.log_err();
}
}
}
impl File {
@ -5109,46 +4692,12 @@ impl ProjectEntryId {
}
}
#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
pub struct DiagnosticSummary {
pub error_count: usize,
pub warning_count: usize,
}
impl DiagnosticSummary {
fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
let mut this = Self {
error_count: 0,
warning_count: 0,
};
for entry in diagnostics {
if entry.diagnostic.is_primary {
match entry.diagnostic.severity {
DiagnosticSeverity::ERROR => this.error_count += 1,
DiagnosticSeverity::WARNING => this.warning_count += 1,
_ => {}
}
}
}
this
}
pub fn is_empty(&self) -> bool {
self.error_count == 0 && self.warning_count == 0
}
pub fn to_proto(
&self,
language_server_id: LanguageServerId,
path: &Path,
) -> proto::DiagnosticSummary {
proto::DiagnosticSummary {
path: path.to_string_lossy().to_string(),
language_server_id: language_server_id.0 as u64,
error_count: self.error_count as u32,
warning_count: self.warning_count as u32,
#[cfg(any(test, feature = "test-support"))]
impl CreatedEntry {
pub fn to_included(self) -> Option<Entry> {
match self {
CreatedEntry::Included(entry) => Some(entry),
CreatedEntry::Excluded { .. } => None,
}
}
}

View File

@ -3,12 +3,9 @@ use crate::{
WorktreeModelHandle,
};
use anyhow::Result;
use client::Client;
use clock::FakeSystemClock;
use fs::{FakeFs, Fs, RealFs, RemoveOptions};
use git::{repository::GitFileStatus, GITIGNORE};
use gpui::{BorrowAppContext, ModelContext, Task, TestAppContext};
use http::FakeHttpClient;
use parking_lot::Mutex;
use postage::stream::Stream;
use pretty_assertions::assert_eq;
@ -35,7 +32,6 @@ async fn test_traversal(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
Path::new("/root"),
true,
fs,
@ -100,7 +96,6 @@ async fn test_circular_symlinks(cx: &mut TestAppContext) {
.unwrap();
let tree = Worktree::local(
build_client(cx),
Path::new("/root"),
true,
fs.clone(),
@ -200,7 +195,6 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
.unwrap();
let tree = Worktree::local(
build_client(cx),
Path::new("/root/dir1"),
true,
fs.clone(),
@ -351,7 +345,6 @@ async fn test_renaming_case_only(cx: &mut TestAppContext) {
}));
let tree = Worktree::local(
build_client(cx),
temp_root.path(),
true,
fs.clone(),
@ -428,7 +421,6 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
Path::new("/root"),
true,
fs.clone(),
@ -461,16 +453,16 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
// Open a file that is nested inside of a gitignored directory that
// has not yet been expanded.
let prev_read_dir_count = fs.read_dir_call_count();
let buffer = tree
let (file, _, _) = tree
.update(cx, |tree, cx| {
tree.as_local_mut()
.unwrap()
.load_buffer("one/node_modules/b/b1.js".as_ref(), cx)
.load_file("one/node_modules/b/b1.js".as_ref(), cx)
})
.await
.unwrap();
tree.read_with(cx, |tree, cx| {
tree.read_with(cx, |tree, _| {
assert_eq!(
tree.entries(true)
.map(|entry| (entry.path.as_ref(), entry.is_ignored))
@ -491,10 +483,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
]
);
assert_eq!(
buffer.read(cx).file().unwrap().path().as_ref(),
Path::new("one/node_modules/b/b1.js")
);
assert_eq!(file.path.as_ref(), Path::new("one/node_modules/b/b1.js"));
// Only the newly-expanded directories are scanned.
assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
@ -503,16 +492,16 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
// Open another file in a different subdirectory of the same
// gitignored directory.
let prev_read_dir_count = fs.read_dir_call_count();
let buffer = tree
let (file, _, _) = tree
.update(cx, |tree, cx| {
tree.as_local_mut()
.unwrap()
.load_buffer("one/node_modules/a/a2.js".as_ref(), cx)
.load_file("one/node_modules/a/a2.js".as_ref(), cx)
})
.await
.unwrap();
tree.read_with(cx, |tree, cx| {
tree.read_with(cx, |tree, _| {
assert_eq!(
tree.entries(true)
.map(|entry| (entry.path.as_ref(), entry.is_ignored))
@ -535,10 +524,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
]
);
assert_eq!(
buffer.read(cx).file().unwrap().path().as_ref(),
Path::new("one/node_modules/a/a2.js")
);
assert_eq!(file.path.as_ref(), Path::new("one/node_modules/a/a2.js"));
// Only the newly-expanded directory is scanned.
assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
@ -591,7 +577,6 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
Path::new("/root"),
true,
fs.clone(),
@ -711,7 +696,6 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
"/root/tree".as_ref(),
true,
fs.clone(),
@ -793,7 +777,6 @@ async fn test_update_gitignore(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
"/root".as_ref(),
true,
fs.clone(),
@ -848,7 +831,6 @@ async fn test_write_file(cx: &mut TestAppContext) {
}));
let tree = Worktree::local(
build_client(cx),
dir.path(),
true,
Arc::new(RealFs::default()),
@ -928,7 +910,6 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
});
let tree = Worktree::local(
build_client(cx),
dir.path(),
true,
Arc::new(RealFs::default()),
@ -1032,7 +1013,6 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
});
let tree = Worktree::local(
build_client(cx),
dir.path(),
true,
Arc::new(RealFs::default()),
@ -1142,7 +1122,6 @@ async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
let dot_git_worktree_dir = dir.path().join(".git");
let tree = Worktree::local(
build_client(cx),
dot_git_worktree_dir.clone(),
true,
Arc::new(RealFs::default()),
@ -1181,7 +1160,6 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
build_client(cx),
"/root".as_ref(),
true,
fs,
@ -1194,7 +1172,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
let snapshot1 = tree.update(cx, |tree, cx| {
let tree = tree.as_local_mut().unwrap();
let snapshot = Arc::new(Mutex::new(tree.snapshot()));
let _ = tree.observe_updates(0, cx, {
tree.observe_updates(0, cx, {
let snapshot = snapshot.clone();
move |update| {
snapshot.lock().apply_remote_update(update).unwrap();
@ -1232,13 +1210,6 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
let client_fake = cx.update(|cx| {
Client::new(
Arc::new(FakeSystemClock::default()),
FakeHttpClient::with_404_response(),
cx,
)
});
let fs_fake = FakeFs::new(cx.background_executor.clone());
fs_fake
@ -1251,7 +1222,6 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.await;
let tree_fake = Worktree::local(
client_fake,
"/root".as_ref(),
true,
fs_fake,
@ -1280,21 +1250,12 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
});
let client_real = cx.update(|cx| {
Client::new(
Arc::new(FakeSystemClock::default()),
FakeHttpClient::with_404_response(),
cx,
)
});
let fs_real = Arc::new(RealFs::default());
let temp_root = temp_tree(json!({
"a": {}
}));
let tree_real = Worktree::local(
client_real,
temp_root.path(),
true,
fs_real,
@ -1385,7 +1346,6 @@ async fn test_random_worktree_operations_during_initial_scan(
log::info!("generated initial tree");
let worktree = Worktree::local(
build_client(cx),
root_dir,
true,
fs.clone(),
@ -1400,7 +1360,7 @@ async fn test_random_worktree_operations_during_initial_scan(
worktree.update(cx, |tree, cx| {
check_worktree_change_events(tree, cx);
let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
tree.as_local_mut().unwrap().observe_updates(0, cx, {
let updates = updates.clone();
move |update| {
updates.lock().push(update);
@ -1475,7 +1435,6 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
log::info!("generated initial tree");
let worktree = Worktree::local(
build_client(cx),
root_dir,
true,
fs.clone(),
@ -1489,7 +1448,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
worktree.update(cx, |tree, cx| {
check_worktree_change_events(tree, cx);
let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
tree.as_local_mut().unwrap().observe_updates(0, cx, {
let updates = updates.clone();
move |update| {
updates.lock().push(update);
@ -1548,7 +1507,6 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
{
let new_worktree = Worktree::local(
build_client(cx),
root_dir,
true,
fs.clone(),
@ -1892,7 +1850,6 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
let root_path = root.path();
let tree = Worktree::local(
build_client(cx),
root_path,
true,
Arc::new(RealFs::default()),
@ -1971,7 +1928,6 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
}));
let tree = Worktree::local(
build_client(cx),
root.path(),
true,
Arc::new(RealFs::default()),
@ -2112,7 +2068,6 @@ async fn test_git_status(cx: &mut TestAppContext) {
git_commit("Initial commit", &repo);
let tree = Worktree::local(
build_client(cx),
root.path(),
true,
Arc::new(RealFs::default()),
@ -2294,7 +2249,6 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) {
// Open the worktree in subfolder
let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2");
let tree = Worktree::local(
build_client(cx),
root.path().join(project_root),
true,
Arc::new(RealFs::default()),
@ -2392,7 +2346,6 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
);
let tree = Worktree::local(
build_client(cx),
Path::new("/root"),
true,
fs.clone(),
@ -2471,12 +2424,6 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
}
}
fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
let clock = Arc::new(FakeSystemClock::default());
let http_client = FakeHttpClient::with_404_response();
cx.update(|cx| Client::new(clock, http_client, cx))
}
#[track_caller]
fn git_init(path: &Path) -> git2::Repository {
git2::Repository::init(path).expect("Failed to initialize git repository")