Merge pull request #2473 from zed-industries/fix-styling-feedback

Fix git status issues
This commit is contained in:
Mikayla Maki 2023-05-15 16:28:57 -07:00 committed by GitHub
commit 790223f23a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 319 additions and 211 deletions

View File

@ -1569,8 +1569,8 @@ impl Database {
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
removed_worktree_repo_paths: Default::default(),
updated_worktree_statuses: Default::default(),
removed_repo_paths: Default::default(),
updated_statuses: Default::default(),
});
}
}
@ -1607,15 +1607,13 @@ impl Database {
let db_status_entry = db_status_entry?;
if db_status_entry.is_deleted {
repository
.removed_worktree_repo_paths
.removed_repo_paths
.push(db_status_entry.repo_path);
} else {
repository
.updated_worktree_statuses
.push(proto::StatusEntry {
repo_path: db_status_entry.repo_path,
status: db_status_entry.status as i32,
});
repository.updated_statuses.push(proto::StatusEntry {
repo_path: db_status_entry.repo_path,
status: db_status_entry.status as i32,
});
}
}
}
@ -2444,12 +2442,10 @@ impl Database {
.await?;
for repository in update.updated_repositories.iter() {
if !repository.updated_worktree_statuses.is_empty() {
if !repository.updated_statuses.is_empty() {
worktree_repository_statuses::Entity::insert_many(
repository
.updated_worktree_statuses
.iter()
.map(|status_entry| worktree_repository_statuses::ActiveModel {
repository.updated_statuses.iter().map(|status_entry| {
worktree_repository_statuses::ActiveModel {
project_id: ActiveValue::set(project_id),
worktree_id: ActiveValue::set(worktree_id),
work_directory_id: ActiveValue::set(
@ -2459,7 +2455,8 @@ impl Database {
status: ActiveValue::set(status_entry.status as i64),
scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false),
}),
}
}),
)
.on_conflict(
OnConflict::columns([
@ -2479,7 +2476,7 @@ impl Database {
.await?;
}
if !repository.removed_worktree_repo_paths.is_empty() {
if !repository.removed_repo_paths.is_empty() {
worktree_repository_statuses::Entity::update_many()
.filter(
worktree_repository_statuses::Column::ProjectId
@ -2492,14 +2489,9 @@ impl Database {
worktree_repository_statuses::Column::WorkDirectoryId
.eq(repository.work_directory_id as i64),
)
.and(
worktree_repository_statuses::Column::RepoPath.is_in(
repository
.removed_worktree_repo_paths
.iter()
.map(String::as_str),
),
),
.and(worktree_repository_statuses::Column::RepoPath.is_in(
repository.removed_repo_paths.iter().map(String::as_str),
)),
)
.set(worktree_repository_statuses::ActiveModel {
is_deleted: ActiveValue::Set(true),
@ -2765,8 +2757,8 @@ impl Database {
proto::RepositoryEntry {
work_directory_id: db_repository_entry.work_directory_id as u64,
branch: db_repository_entry.branch,
removed_worktree_repo_paths: Default::default(),
updated_worktree_statuses: Default::default(),
removed_repo_paths: Default::default(),
updated_statuses: Default::default(),
},
);
}
@ -2791,12 +2783,10 @@ impl Database {
.repository_entries
.get_mut(&(db_status_entry.work_directory_id as u64))
{
repository_entry
.updated_worktree_statuses
.push(proto::StatusEntry {
repo_path: db_status_entry.repo_path,
status: db_status_entry.status as i32,
});
repository_entry.updated_statuses.push(proto::StatusEntry {
repo_path: db_status_entry.repo_path,
status: db_status_entry.status as i32,
});
}
}
}

View File

@ -14,7 +14,7 @@ use language::{
proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, OffsetRangeExt, Point,
SelectionGoal,
};
use project::{repository::GitFileStatus, FormatTrigger, Item as _, Project, ProjectPath};
use project::{FormatTrigger, Item as _, Project, ProjectPath};
use rpc::proto::{self, update_view};
use settings::Settings;
use smallvec::SmallVec;
@ -27,7 +27,6 @@ use std::{
path::{Path, PathBuf},
};
use text::Selection;
use theme::ui::FileName;
use util::{ResultExt, TryFutureExt};
use workspace::item::{BreadcrumbText, FollowableItemHandle};
use workspace::{
@ -566,25 +565,8 @@ impl Item for Editor {
style: &theme::Tab,
cx: &AppContext,
) -> AnyElement<T> {
fn git_file_status(this: &Editor, cx: &AppContext) -> Option<GitFileStatus> {
let project_entry_id = this
.buffer()
.read(cx)
.as_singleton()?
.read(cx)
.entry_id(cx)?;
let project = this.project.as_ref()?.read(cx);
let path = project.path_for_entry(project_entry_id, cx)?.path;
let worktree = project.worktree_for_entry(project_entry_id, cx)?.read(cx);
worktree.repo_for(&path)?.status_for_path(&worktree, &path)
}
Flex::row()
.with_child(ComponentHost::new(FileName::new(
self.title(cx).to_string(),
git_file_status(self, cx),
FileName::style(style.label.clone(), &cx.global::<Settings>().theme),
)))
.with_child(Label::new(self.title(cx).to_string(), style.label.clone()).into_any())
.with_children(detail.and_then(|detail| {
let path = path_for_buffer(&self.buffer, detail, false, cx)?;
let description = path.to_string_lossy();

View File

@ -22,9 +22,9 @@ pub trait GitRepository: Send {
fn branch_name(&self) -> Option<String>;
fn worktree_statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>;
fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>;
fn worktree_status(&self, path: &RepoPath) -> Option<GitFileStatus>;
fn status(&self, path: &RepoPath) -> Option<GitFileStatus>;
}
impl std::fmt::Debug for dyn GitRepository {
@ -71,7 +71,7 @@ impl GitRepository for LibGitRepository {
Some(branch.to_string())
}
fn worktree_statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
let statuses = self.statuses(None).log_err()?;
let mut map = TreeMap::default();
@ -91,7 +91,7 @@ impl GitRepository for LibGitRepository {
Some(map)
}
fn worktree_status(&self, path: &RepoPath) -> Option<GitFileStatus> {
fn status(&self, path: &RepoPath) -> Option<GitFileStatus> {
let status = self.status_file(path).log_err()?;
read_status(status)
}
@ -100,9 +100,14 @@ impl GitRepository for LibGitRepository {
fn read_status(status: git2::Status) -> Option<GitFileStatus> {
if status.contains(git2::Status::CONFLICTED) {
Some(GitFileStatus::Conflict)
} else if status.intersects(git2::Status::WT_MODIFIED | git2::Status::WT_RENAMED) {
} else if status.intersects(
git2::Status::WT_MODIFIED
| git2::Status::WT_RENAMED
| git2::Status::INDEX_MODIFIED
| git2::Status::INDEX_RENAMED,
) {
Some(GitFileStatus::Modified)
} else if status.intersects(git2::Status::WT_NEW) {
} else if status.intersects(git2::Status::WT_NEW | git2::Status::INDEX_NEW) {
Some(GitFileStatus::Added)
} else {
None
@ -141,7 +146,7 @@ impl GitRepository for FakeGitRepository {
state.branch_name.clone()
}
fn worktree_statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
let state = self.state.lock();
let mut map = TreeMap::default();
for (repo_path, status) in state.worktree_statuses.iter() {
@ -150,7 +155,7 @@ impl GitRepository for FakeGitRepository {
Some(map)
}
fn worktree_status(&self, path: &RepoPath) -> Option<GitFileStatus> {
fn status(&self, path: &RepoPath) -> Option<GitFileStatus> {
let state = self.state.lock();
state.worktree_statuses.get(path).cloned()
}

View File

@ -143,7 +143,7 @@ impl Snapshot {
pub struct RepositoryEntry {
pub(crate) work_directory: WorkDirectoryEntry,
pub(crate) branch: Option<Arc<str>>,
pub(crate) worktree_statuses: TreeMap<RepoPath, GitFileStatus>,
pub(crate) statuses: TreeMap<RepoPath, GitFileStatus>,
}
fn read_git_status(git_status: i32) -> Option<GitFileStatus> {
@ -176,7 +176,7 @@ impl RepositoryEntry {
pub fn status_for_file(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> {
self.work_directory
.relativize(snapshot, path)
.and_then(|repo_path| self.worktree_statuses.get(&repo_path))
.and_then(|repo_path| self.statuses.get(&repo_path))
.cloned()
}
@ -184,12 +184,12 @@ impl RepositoryEntry {
self.work_directory
.relativize(snapshot, path)
.and_then(|repo_path| {
self.worktree_statuses
self.statuses
.iter_from(&repo_path)
.take_while(|(key, _)| key.starts_with(&repo_path))
.map(|(_, status)| status)
// Short circut once we've found the highest level
.take_until(|status| status == &&GitFileStatus::Conflict)
.take_until(|(_, status)| status == &&GitFileStatus::Conflict)
.map(|(_, status)| status)
.reduce(
|status_first, status_second| match (status_first, status_second) {
(GitFileStatus::Conflict, _) | (_, GitFileStatus::Conflict) => {
@ -209,8 +209,8 @@ impl RepositoryEntry {
let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
let mut removed_statuses: Vec<String> = Vec::new();
let mut self_statuses = self.worktree_statuses.iter().peekable();
let mut other_statuses = other.worktree_statuses.iter().peekable();
let mut self_statuses = self.statuses.iter().peekable();
let mut other_statuses = other.statuses.iter().peekable();
loop {
match (self_statuses.peek(), other_statuses.peek()) {
(Some((self_repo_path, self_status)), Some((other_repo_path, other_status))) => {
@ -249,8 +249,8 @@ impl RepositoryEntry {
proto::RepositoryEntry {
work_directory_id: self.work_directory_id().to_proto(),
branch: self.branch.as_ref().map(|str| str.to_string()),
removed_worktree_repo_paths: removed_statuses,
updated_worktree_statuses: updated_statuses,
removed_repo_paths: removed_statuses,
updated_statuses: updated_statuses,
}
}
}
@ -275,12 +275,12 @@ impl From<&RepositoryEntry> for proto::RepositoryEntry {
proto::RepositoryEntry {
work_directory_id: value.work_directory.to_proto(),
branch: value.branch.as_ref().map(|str| str.to_string()),
updated_worktree_statuses: value
.worktree_statuses
updated_statuses: value
.statuses
.iter()
.map(|(repo_path, status)| make_status_entry(repo_path, status))
.collect(),
removed_worktree_repo_paths: Default::default(),
removed_repo_paths: Default::default(),
}
}
}
@ -1546,7 +1546,7 @@ impl Snapshot {
if let Some(entry) = self.entry_for_id(*work_directory_entry) {
let mut statuses = TreeMap::default();
for status_entry in repository.updated_worktree_statuses {
for status_entry in repository.updated_statuses {
let Some(git_file_status) = read_git_status(status_entry.status) else {
continue;
};
@ -1559,11 +1559,11 @@ impl Snapshot {
if self.repository_entries.get(&work_directory).is_some() {
self.repository_entries.update(&work_directory, |repo| {
repo.branch = repository.branch.map(Into::into);
repo.worktree_statuses.insert_tree(statuses);
repo.statuses.insert_tree(statuses);
for repo_path in repository.removed_worktree_repo_paths {
for repo_path in repository.removed_repo_paths {
let repo_path = RepoPath::new(repo_path.into());
repo.worktree_statuses.remove(&repo_path);
repo.statuses.remove(&repo_path);
}
});
} else {
@ -1572,7 +1572,7 @@ impl Snapshot {
RepositoryEntry {
work_directory: work_directory_entry,
branch: repository.branch.map(Into::into),
worktree_statuses: statuses,
statuses,
},
)
}
@ -1669,6 +1669,30 @@ impl Snapshot {
}
}
fn descendent_entries<'a>(
&'a self,
include_dirs: bool,
include_ignored: bool,
parent_path: &'a Path,
) -> DescendentEntriesIter<'a> {
let mut cursor = self.entries_by_path.cursor();
cursor.seek(&TraversalTarget::Path(parent_path), Bias::Left, &());
let mut traversal = Traversal {
cursor,
include_dirs,
include_ignored,
};
if traversal.end_offset() == traversal.start_offset() {
traversal.advance();
}
DescendentEntriesIter {
traversal,
parent_path,
}
}
pub fn root_entry(&self) -> Option<&Entry> {
self.entry_for_path("")
}
@ -1988,7 +2012,7 @@ impl LocalSnapshot {
RepositoryEntry {
work_directory: work_dir_id.into(),
branch: repo_lock.branch_name().map(Into::into),
worktree_statuses: repo_lock.worktree_statuses().unwrap_or_default(),
statuses: repo_lock.statuses().unwrap_or_default(),
},
);
drop(repo_lock);
@ -2670,14 +2694,13 @@ impl BackgroundScanner {
async fn process_events(&mut self, paths: Vec<PathBuf>) {
let (scan_job_tx, scan_job_rx) = channel::unbounded();
if let Some(mut paths) = self
let paths = self
.reload_entries_for_paths(paths, Some(scan_job_tx.clone()))
.await
{
paths.sort_unstable();
.await;
if let Some(paths) = &paths {
util::extend_sorted(
&mut self.prev_state.lock().event_paths,
paths,
paths.iter().cloned(),
usize::MAX,
Ord::cmp,
);
@ -2689,6 +2712,12 @@ impl BackgroundScanner {
let mut snapshot = self.snapshot.lock();
if let Some(paths) = paths {
for path in paths {
self.reload_repo_for_file_path(&path, &mut *snapshot, self.fs.as_ref());
}
}
let mut git_repositories = mem::take(&mut snapshot.git_repositories);
git_repositories.retain(|work_directory_id, _| {
snapshot
@ -2999,8 +3028,6 @@ impl BackgroundScanner {
fs_entry.is_ignored = ignore_stack.is_all();
snapshot.insert_entry(fs_entry, self.fs.as_ref());
self.reload_repo_for_path(&path, &mut snapshot, self.fs.as_ref());
if let Some(scan_queue_tx) = &scan_queue_tx {
let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
@ -3048,7 +3075,7 @@ impl BackgroundScanner {
snapshot.repository_entries.update(&work_dir, |entry| {
entry
.worktree_statuses
.statuses
.remove_range(&repo_path, &RepoPathDescendants(&repo_path))
});
}
@ -3056,7 +3083,7 @@ impl BackgroundScanner {
Some(())
}
fn reload_repo_for_path(
fn reload_repo_for_file_path(
&self,
path: &Path,
snapshot: &mut LocalSnapshot,
@ -3090,7 +3117,7 @@ impl BackgroundScanner {
let repo = repo_ptr.lock();
repo.reload_index();
let branch = repo.branch_name();
let statuses = repo.worktree_statuses().unwrap_or_default();
let statuses = repo.statuses().unwrap_or_default();
snapshot.git_repositories.update(&entry_id, |entry| {
entry.scan_id = scan_id;
@ -3099,7 +3126,7 @@ impl BackgroundScanner {
snapshot.repository_entries.update(&work_dir, |entry| {
entry.branch = branch.map(Into::into);
entry.worktree_statuses = statuses;
entry.statuses = statuses;
});
} else {
if snapshot
@ -3113,34 +3140,36 @@ impl BackgroundScanner {
let repo = snapshot.repo_for(&path)?;
let repo_path = repo.work_directory.relativize(&snapshot, &path)?;
let status = {
let local_repo = snapshot.get_local_repo(&repo)?;
// Short circuit if we've already scanned everything
if local_repo.full_scan_id == scan_id {
return None;
}
let git_ptr = local_repo.repo_ptr.lock();
git_ptr.worktree_status(&repo_path)
};
let work_dir = repo.work_directory(snapshot)?;
let work_dir_id = repo.work_directory;
let work_dir_id = repo.work_directory.clone();
snapshot
.git_repositories
.update(&work_dir_id, |entry| entry.scan_id = scan_id);
snapshot.repository_entries.update(&work_dir, |entry| {
let local_repo = snapshot.get_local_repo(&repo)?.to_owned();
// Short circuit if we've already scanned everything
if local_repo.full_scan_id == scan_id {
return None;
}
let mut repository = snapshot.repository_entries.remove(&work_dir)?;
for entry in snapshot.descendent_entries(false, false, path) {
let Some(repo_path) = repo.work_directory.relativize(snapshot, &entry.path) else {
continue;
};
let status = local_repo.repo_ptr.lock().status(&repo_path);
if let Some(status) = status {
entry.worktree_statuses.insert(repo_path, status);
repository.statuses.insert(repo_path.clone(), status);
} else {
entry.worktree_statuses.remove(&repo_path);
repository.statuses.remove(&repo_path);
}
});
}
snapshot.repository_entries.insert(work_dir, repository)
}
Some(())
@ -3475,17 +3504,13 @@ pub struct Traversal<'a> {
impl<'a> Traversal<'a> {
pub fn advance(&mut self) -> bool {
self.advance_to_offset(self.offset() + 1)
}
pub fn advance_to_offset(&mut self, offset: usize) -> bool {
self.cursor.seek_forward(
&TraversalTarget::Count {
count: offset,
count: self.end_offset() + 1,
include_dirs: self.include_dirs,
include_ignored: self.include_ignored,
},
Bias::Right,
Bias::Left,
&(),
)
}
@ -3512,11 +3537,17 @@ impl<'a> Traversal<'a> {
self.cursor.item()
}
pub fn offset(&self) -> usize {
pub fn start_offset(&self) -> usize {
self.cursor
.start()
.count(self.include_dirs, self.include_ignored)
}
pub fn end_offset(&self) -> usize {
self.cursor
.end(&())
.count(self.include_dirs, self.include_ignored)
}
}
impl<'a> Iterator for Traversal<'a> {
@ -3585,6 +3616,25 @@ impl<'a> Iterator for ChildEntriesIter<'a> {
}
}
struct DescendentEntriesIter<'a> {
parent_path: &'a Path,
traversal: Traversal<'a>,
}
impl<'a> Iterator for DescendentEntriesIter<'a> {
type Item = &'a Entry;
fn next(&mut self) -> Option<Self::Item> {
if let Some(item) = self.traversal.entry() {
if item.path.starts_with(&self.parent_path) {
self.traversal.advance();
return Some(item);
}
}
None
}
}
impl<'a> From<&'a Entry> for proto::Entry {
fn from(entry: &'a Entry) -> Self {
Self {
@ -3699,6 +3749,105 @@ mod tests {
})
}
#[gpui::test]
async fn test_descendent_entries(cx: &mut TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
json!({
"a": "",
"b": {
"c": {
"d": ""
},
"e": {}
},
"f": "",
"g": {
"h": {}
},
"i": {
"j": {
"k": ""
},
"l": {
}
},
".gitignore": "i/j\n",
}),
)
.await;
let http_client = FakeHttpClient::with_404_response();
let client = cx.read(|cx| Client::new(http_client, cx));
let tree = Worktree::local(
client,
Path::new("/root"),
true,
fs,
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
tree.read_with(cx, |tree, _| {
assert_eq!(
tree.descendent_entries(false, false, Path::new("b"))
.map(|entry| entry.path.as_ref())
.collect::<Vec<_>>(),
vec![Path::new("b/c/d"),]
);
assert_eq!(
tree.descendent_entries(true, false, Path::new("b"))
.map(|entry| entry.path.as_ref())
.collect::<Vec<_>>(),
vec![
Path::new("b"),
Path::new("b/c"),
Path::new("b/c/d"),
Path::new("b/e"),
]
);
assert_eq!(
tree.descendent_entries(false, false, Path::new("g"))
.map(|entry| entry.path.as_ref())
.collect::<Vec<_>>(),
Vec::<PathBuf>::new()
);
assert_eq!(
tree.descendent_entries(true, false, Path::new("g"))
.map(|entry| entry.path.as_ref())
.collect::<Vec<_>>(),
vec![Path::new("g"), Path::new("g/h"),]
);
assert_eq!(
tree.descendent_entries(false, false, Path::new("i"))
.map(|entry| entry.path.as_ref())
.collect::<Vec<_>>(),
Vec::<PathBuf>::new()
);
assert_eq!(
tree.descendent_entries(false, true, Path::new("i"))
.map(|entry| entry.path.as_ref())
.collect::<Vec<_>>(),
vec![Path::new("i/j/k")]
);
assert_eq!(
tree.descendent_entries(true, false, Path::new("i"))
.map(|entry| entry.path.as_ref())
.collect::<Vec<_>>(),
vec![Path::new("i"), Path::new("i/l"),]
);
})
}
#[gpui::test(iterations = 10)]
async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
let fs = FakeFs::new(cx.background());
@ -4095,17 +4244,17 @@ mod tests {
let (dir, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(dir.0.as_ref(), Path::new("project"));
assert_eq!(repo.worktree_statuses.iter().count(), 3);
assert_eq!(repo.statuses.iter().count(), 3);
assert_eq!(
repo.worktree_statuses.get(&Path::new(A_TXT).into()),
repo.statuses.get(&Path::new(A_TXT).into()),
Some(&GitFileStatus::Modified)
);
assert_eq!(
repo.worktree_statuses.get(&Path::new(B_TXT).into()),
repo.statuses.get(&Path::new(B_TXT).into()),
Some(&GitFileStatus::Added)
);
assert_eq!(
repo.worktree_statuses.get(&Path::new(F_TXT).into()),
repo.statuses.get(&Path::new(F_TXT).into()),
Some(&GitFileStatus::Added)
);
});
@ -4120,11 +4269,9 @@ mod tests {
let snapshot = tree.snapshot();
let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(repo.worktree_statuses.iter().count(), 1);
assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(B_TXT).into()), None);
assert_eq!(repo.statuses.iter().count(), 1);
assert_eq!(
repo.worktree_statuses.get(&Path::new(F_TXT).into()),
repo.statuses.get(&Path::new(F_TXT).into()),
Some(&GitFileStatus::Added)
);
});
@ -4141,18 +4288,18 @@ mod tests {
let snapshot = tree.snapshot();
let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(repo.worktree_statuses.iter().count(), 3);
assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None);
assert_eq!(repo.statuses.iter().count(), 3);
assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None);
assert_eq!(
repo.worktree_statuses.get(&Path::new(B_TXT).into()),
repo.statuses.get(&Path::new(B_TXT).into()),
Some(&GitFileStatus::Added)
);
assert_eq!(
repo.worktree_statuses.get(&Path::new(E_TXT).into()),
repo.statuses.get(&Path::new(E_TXT).into()),
Some(&GitFileStatus::Modified)
);
assert_eq!(
repo.worktree_statuses.get(&Path::new(F_TXT).into()),
repo.statuses.get(&Path::new(F_TXT).into()),
Some(&GitFileStatus::Added)
);
});
@ -4170,20 +4317,58 @@ mod tests {
tree.flush_fs_events(cx).await;
dbg!(git_status(&repo));
// Check that non-repo behavior is tracked
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
dbg!(&repo.worktree_statuses);
assert_eq!(repo.statuses.iter().count(), 0);
});
assert_eq!(repo.worktree_statuses.iter().count(), 0);
assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(B_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(E_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(F_TXT).into()), None);
let mut renamed_dir_name = "first_directory/second_directory";
const RENAMED_FILE: &'static str = "rf.txt";
std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
std::fs::write(
work_dir.join(renamed_dir_name).join(RENAMED_FILE),
"new-contents",
)
.unwrap();
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(repo.statuses.iter().count(), 1);
assert_eq!(
repo.statuses
.get(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()),
Some(&GitFileStatus::Added)
);
});
renamed_dir_name = "new_first_directory/second_directory";
std::fs::rename(
work_dir.join("first_directory"),
work_dir.join("new_first_directory"),
)
.unwrap();
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(repo.statuses.iter().count(), 1);
assert_eq!(
repo.statuses
.get(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()),
Some(&GitFileStatus::Added)
);
});
}

View File

@ -986,8 +986,8 @@ message Entry {
message RepositoryEntry {
uint64 work_directory_id = 1;
optional string branch = 2;
repeated string removed_worktree_repo_paths = 3;
repeated StatusEntry updated_worktree_statuses = 4;
repeated string removed_repo_paths = 3;
repeated StatusEntry updated_statuses = 4;
}
message StatusEntry {

View File

@ -1,6 +1,7 @@
use super::{entity_messages, messages, request_messages, ConnectionId, TypedEnvelope};
use anyhow::{anyhow, Result};
use async_tungstenite::tungstenite::Message as WebSocketMessage;
use collections::HashMap;
use futures::{SinkExt as _, StreamExt as _};
use prost::Message as _;
use serde::Serialize;
@ -485,11 +486,15 @@ pub fn split_worktree_update(
max_chunk_size: usize,
) -> impl Iterator<Item = UpdateWorktree> {
let mut done_files = false;
let mut done_statuses = false;
let mut repository_index = 0;
let mut root_repo_found = false;
let mut repository_map = message
.updated_repositories
.into_iter()
.map(|repo| (repo.work_directory_id, repo))
.collect::<HashMap<_, _>>();
iter::from_fn(move || {
if done_files && done_statuses {
if done_files {
return None;
}
@ -499,25 +504,6 @@ pub fn split_worktree_update(
.drain(..updated_entries_chunk_size)
.collect();
let mut updated_repositories: Vec<_> = Default::default();
if !root_repo_found {
for entry in updated_entries.iter() {
if let Some(repo) = message.updated_repositories.get(0) {
if repo.work_directory_id == entry.id {
root_repo_found = true;
updated_repositories.push(RepositoryEntry {
work_directory_id: repo.work_directory_id,
branch: repo.branch.clone(),
removed_worktree_repo_paths: Default::default(),
updated_worktree_statuses: Default::default(),
});
break;
}
}
}
}
let removed_entries_chunk_size = cmp::min(message.removed_entries.len(), max_chunk_size);
let removed_entries = message
.removed_entries
@ -526,65 +512,25 @@ pub fn split_worktree_update(
done_files = message.updated_entries.is_empty() && message.removed_entries.is_empty();
// Wait to send repositories until after we've guaranteed that their associated entries
// will be read
if done_files {
let mut total_statuses = 0;
while total_statuses < max_chunk_size
&& repository_index < message.updated_repositories.len()
{
let updated_statuses_chunk_size = cmp::min(
message.updated_repositories[repository_index]
.updated_worktree_statuses
.len(),
max_chunk_size - total_statuses,
);
let mut updated_repositories = Vec::new();
let updated_statuses: Vec<_> = message.updated_repositories[repository_index]
.updated_worktree_statuses
.drain(..updated_statuses_chunk_size)
.collect();
total_statuses += updated_statuses.len();
let done_this_repo = message.updated_repositories[repository_index]
.updated_worktree_statuses
.is_empty();
let removed_repo_paths = if done_this_repo {
mem::take(
&mut message.updated_repositories[repository_index]
.removed_worktree_repo_paths,
)
} else {
Default::default()
};
updated_repositories.push(RepositoryEntry {
work_directory_id: message.updated_repositories[repository_index]
.work_directory_id,
branch: message.updated_repositories[repository_index]
.branch
.clone(),
updated_worktree_statuses: updated_statuses,
removed_worktree_repo_paths: removed_repo_paths,
});
if done_this_repo {
repository_index += 1;
if !repository_map.is_empty() {
for entry in &updated_entries {
if let Some(repo) = repository_map.remove(&entry.id) {
updated_repositories.push(repo)
}
}
} else {
Default::default()
};
}
let removed_repositories = if done_files && done_statuses {
let removed_repositories = if done_files {
mem::take(&mut message.removed_repositories)
} else {
Default::default()
};
done_statuses = repository_index >= message.updated_repositories.len();
if done_files {
updated_repositories.extend(mem::take(&mut repository_map).into_values());
}
Some(UpdateWorktree {
project_id: message.project_id,