mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-07 20:39:04 +03:00
Only update changed local worktree buffers
Co-Authored-By: Antonio Scandurra <me@as-cii.com>
This commit is contained in:
parent
2d7cfb8c7c
commit
5c859da457
@ -122,6 +122,7 @@ pub struct Project {
|
||||
loading_local_worktrees:
|
||||
HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
|
||||
opened_buffers: HashMap<u64, OpenBuffer>,
|
||||
local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
|
||||
/// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
|
||||
/// Used for re-issuing buffer requests when peers temporarily disconnect
|
||||
incomplete_remote_buffers: HashMap<u64, Option<ModelHandle<Buffer>>>,
|
||||
@ -449,6 +450,7 @@ impl Project {
|
||||
incomplete_remote_buffers: Default::default(),
|
||||
loading_buffers_by_path: Default::default(),
|
||||
loading_local_worktrees: Default::default(),
|
||||
local_buffer_ids_by_path: Default::default(),
|
||||
buffer_snapshots: Default::default(),
|
||||
join_project_response_message_id: 0,
|
||||
client_state: None,
|
||||
@ -517,6 +519,7 @@ impl Project {
|
||||
shared_buffers: Default::default(),
|
||||
incomplete_remote_buffers: Default::default(),
|
||||
loading_local_worktrees: Default::default(),
|
||||
local_buffer_ids_by_path: Default::default(),
|
||||
active_entry: None,
|
||||
collaborators: Default::default(),
|
||||
join_project_response_message_id: response.message_id,
|
||||
@ -1628,6 +1631,18 @@ impl Project {
|
||||
})
|
||||
.detach();
|
||||
|
||||
if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
|
||||
if file.is_local {
|
||||
self.local_buffer_ids_by_path.insert(
|
||||
ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path.clone(),
|
||||
},
|
||||
remote_id,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
self.detect_language_for_buffer(buffer, cx);
|
||||
self.register_buffer_with_language_servers(buffer, cx);
|
||||
self.register_buffer_with_copilot(buffer, cx);
|
||||
@ -4525,7 +4540,7 @@ impl Project {
|
||||
if worktree.read(cx).is_local() {
|
||||
cx.subscribe(worktree, |this, worktree, event, cx| match event {
|
||||
worktree::Event::UpdatedEntries(changes) => {
|
||||
this.update_local_worktree_buffers(&worktree, cx);
|
||||
this.update_local_worktree_buffers(&worktree, &changes, cx);
|
||||
this.update_local_worktree_language_servers(&worktree, changes, cx);
|
||||
}
|
||||
worktree::Event::UpdatedGitRepositories(updated_repos) => {
|
||||
@ -4559,82 +4574,98 @@ impl Project {
|
||||
fn update_local_worktree_buffers(
|
||||
&mut self,
|
||||
worktree_handle: &ModelHandle<Worktree>,
|
||||
changes: &HashMap<Arc<Path>, PathChange>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
let snapshot = worktree_handle.read(cx).snapshot();
|
||||
|
||||
let mut buffers_to_delete = Vec::new();
|
||||
let mut renamed_buffers = Vec::new();
|
||||
for path in changes.keys() {
|
||||
let worktree_id = worktree_handle.read(cx).id();
|
||||
let project_path = ProjectPath {
|
||||
worktree_id,
|
||||
path: path.clone(),
|
||||
};
|
||||
|
||||
for (buffer_id, buffer) in &self.opened_buffers {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
if let Some(old_file) = File::from_dyn(buffer.file()) {
|
||||
if old_file.worktree != *worktree_handle {
|
||||
return;
|
||||
if let Some(&buffer_id) = self.local_buffer_ids_by_path.get(&project_path) {
|
||||
if let Some(buffer) = self
|
||||
.opened_buffers
|
||||
.get(&buffer_id)
|
||||
.and_then(|buffer| buffer.upgrade(cx))
|
||||
{
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
if let Some(old_file) = File::from_dyn(buffer.file()) {
|
||||
if old_file.worktree != *worktree_handle {
|
||||
return;
|
||||
}
|
||||
|
||||
let new_file =
|
||||
if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: entry.id,
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
is_deleted: false,
|
||||
}
|
||||
} else if let Some(entry) =
|
||||
snapshot.entry_for_path(old_file.path().as_ref())
|
||||
{
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: entry.id,
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
is_deleted: false,
|
||||
}
|
||||
} else {
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: old_file.entry_id,
|
||||
path: old_file.path().clone(),
|
||||
mtime: old_file.mtime(),
|
||||
worktree: worktree_handle.clone(),
|
||||
is_deleted: true,
|
||||
}
|
||||
};
|
||||
|
||||
let old_path = old_file.abs_path(cx);
|
||||
if new_file.abs_path(cx) != old_path {
|
||||
renamed_buffers.push((cx.handle(), old_file.clone()));
|
||||
self.local_buffer_ids_by_path.remove(&project_path);
|
||||
self.local_buffer_ids_by_path.insert(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: path.clone(),
|
||||
},
|
||||
buffer_id,
|
||||
);
|
||||
}
|
||||
|
||||
if new_file != *old_file {
|
||||
if let Some(project_id) = self.remote_id() {
|
||||
self.client
|
||||
.send(proto::UpdateBufferFile {
|
||||
project_id,
|
||||
buffer_id: buffer_id as u64,
|
||||
file: Some(new_file.to_proto()),
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
|
||||
buffer.file_updated(Arc::new(new_file), cx).detach();
|
||||
}
|
||||
}
|
||||
|
||||
let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id)
|
||||
{
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: entry.id,
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
is_deleted: false,
|
||||
}
|
||||
} else if let Some(entry) =
|
||||
snapshot.entry_for_path(old_file.path().as_ref())
|
||||
{
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: entry.id,
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
is_deleted: false,
|
||||
}
|
||||
} else {
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: old_file.entry_id,
|
||||
path: old_file.path().clone(),
|
||||
mtime: old_file.mtime(),
|
||||
worktree: worktree_handle.clone(),
|
||||
is_deleted: true,
|
||||
}
|
||||
};
|
||||
|
||||
let old_path = old_file.abs_path(cx);
|
||||
if new_file.abs_path(cx) != old_path {
|
||||
renamed_buffers.push((cx.handle(), old_file.clone()));
|
||||
}
|
||||
|
||||
if new_file != *old_file {
|
||||
if let Some(project_id) = self.remote_id() {
|
||||
self.client
|
||||
.send(proto::UpdateBufferFile {
|
||||
project_id,
|
||||
buffer_id: *buffer_id as u64,
|
||||
file: Some(new_file.to_proto()),
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
|
||||
buffer.file_updated(Arc::new(new_file), cx).detach();
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
buffers_to_delete.push(*buffer_id);
|
||||
});
|
||||
} else {
|
||||
self.opened_buffers.remove(&buffer_id);
|
||||
self.local_buffer_ids_by_path.remove(&project_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for buffer_id in buffers_to_delete {
|
||||
self.opened_buffers.remove(&buffer_id);
|
||||
}
|
||||
|
||||
for (buffer, old_file) in renamed_buffers {
|
||||
self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
|
||||
self.detect_language_for_buffer(&buffer, cx);
|
||||
|
Loading…
Reference in New Issue
Block a user