Merge pull request #2455 from zed-industries/git-status-viewer

Add Git Status to the project panel
This commit is contained in:
Mikayla Maki 2023-05-11 16:13:34 -07:00 committed by GitHub
commit defc9c8591
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 1494 additions and 192 deletions

9
Cargo.lock generated
View File

@ -2350,6 +2350,7 @@ dependencies = [
"serde_derive",
"serde_json",
"smol",
"sum_tree",
"tempfile",
"util",
]
@ -4716,6 +4717,7 @@ dependencies = [
"futures 0.3.25",
"fuzzy",
"git",
"git2",
"glob",
"gpui",
"ignore",
@ -6535,6 +6537,12 @@ dependencies = [
"winx",
]
[[package]]
name = "take-until"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8bdb6fa0dfa67b38c1e66b7041ba9dcf23b99d8121907cd31c807a332f7a0bbb"
[[package]]
name = "target-lexicon"
version = "0.12.5"
@ -7594,6 +7602,7 @@ dependencies = [
"serde",
"serde_json",
"smol",
"take-until",
"tempdir",
"url",
]

View File

@ -86,8 +86,8 @@ CREATE TABLE "worktree_repositories" (
"project_id" INTEGER NOT NULL,
"worktree_id" INTEGER NOT NULL,
"work_directory_id" INTEGER NOT NULL,
"scan_id" INTEGER NOT NULL,
"branch" VARCHAR,
"scan_id" INTEGER NOT NULL,
"is_deleted" BOOL NOT NULL,
PRIMARY KEY(project_id, worktree_id, work_directory_id),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
@ -96,6 +96,23 @@ CREATE TABLE "worktree_repositories" (
CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
CREATE TABLE "worktree_repository_statuses" (
"project_id" INTEGER NOT NULL,
"worktree_id" INTEGER NOT NULL,
"work_directory_id" INTEGER NOT NULL,
"repo_path" VARCHAR NOT NULL,
"status" INTEGER NOT NULL,
"scan_id" INTEGER NOT NULL,
"is_deleted" BOOL NOT NULL,
PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
);
CREATE INDEX "index_worktree_repository_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id");
CREATE INDEX "index_worktree_repository_statuses_on_project_id_and_worktree_id" ON "worktree_repository_statuses" ("project_id", "worktree_id");
CREATE INDEX "index_worktree_repository_statuses_on_project_id_and_worktree_id_and_work_directory_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id");
CREATE TABLE "worktree_diagnostic_summaries" (
"project_id" INTEGER NOT NULL,
"worktree_id" INTEGER NOT NULL,

View File

@ -0,0 +1,15 @@
CREATE TABLE "worktree_repository_statuses" (
"project_id" INTEGER NOT NULL,
"worktree_id" INT8 NOT NULL,
"work_directory_id" INT8 NOT NULL,
"repo_path" VARCHAR NOT NULL,
"status" INT8 NOT NULL,
"scan_id" INT8 NOT NULL,
"is_deleted" BOOL NOT NULL,
PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
);
CREATE INDEX "index_wt_repos_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id");
CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id" ON "worktree_repository_statuses" ("project_id", "worktree_id");
CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id_and_wd_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id");

View File

@ -15,6 +15,7 @@ mod worktree;
mod worktree_diagnostic_summary;
mod worktree_entry;
mod worktree_repository;
mod worktree_repository_statuses;
use crate::executor::Executor;
use crate::{Error, Result};
@ -1568,11 +1569,57 @@ impl Database {
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
removed_worktree_repo_paths: Default::default(),
updated_worktree_statuses: Default::default(),
});
}
}
}
// Repository Status Entries
for repository in worktree.updated_repositories.iter_mut() {
let repository_status_entry_filter =
if let Some(rejoined_worktree) = rejoined_worktree {
worktree_repository_statuses::Column::ScanId
.gt(rejoined_worktree.scan_id)
} else {
worktree_repository_statuses::Column::IsDeleted.eq(false)
};
let mut db_repository_statuses =
worktree_repository_statuses::Entity::find()
.filter(
Condition::all()
.add(
worktree_repository_statuses::Column::WorktreeId
.eq(worktree.id),
)
.add(
worktree_repository_statuses::Column::WorkDirectoryId
.eq(repository.work_directory_id),
)
.add(repository_status_entry_filter),
)
.stream(&*tx)
.await?;
while let Some(db_status_entry) = db_repository_statuses.next().await {
let db_status_entry = db_status_entry?;
if db_status_entry.is_deleted {
repository
.removed_worktree_repo_paths
.push(db_status_entry.repo_path);
} else {
repository
.updated_worktree_statuses
.push(proto::StatusEntry {
repo_path: db_status_entry.repo_path,
status: db_status_entry.status as i32,
});
}
}
}
worktrees.push(worktree);
}
@ -2395,6 +2442,74 @@ impl Database {
)
.exec(&*tx)
.await?;
for repository in update.updated_repositories.iter() {
if !repository.updated_worktree_statuses.is_empty() {
worktree_repository_statuses::Entity::insert_many(
repository
.updated_worktree_statuses
.iter()
.map(|status_entry| worktree_repository_statuses::ActiveModel {
project_id: ActiveValue::set(project_id),
worktree_id: ActiveValue::set(worktree_id),
work_directory_id: ActiveValue::set(
repository.work_directory_id as i64,
),
repo_path: ActiveValue::set(status_entry.repo_path.clone()),
status: ActiveValue::set(status_entry.status as i64),
scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false),
}),
)
.on_conflict(
OnConflict::columns([
worktree_repository_statuses::Column::ProjectId,
worktree_repository_statuses::Column::WorktreeId,
worktree_repository_statuses::Column::WorkDirectoryId,
worktree_repository_statuses::Column::RepoPath,
])
.update_columns([
worktree_repository_statuses::Column::ScanId,
worktree_repository_statuses::Column::Status,
worktree_repository_statuses::Column::IsDeleted,
])
.to_owned(),
)
.exec(&*tx)
.await?;
}
if !repository.removed_worktree_repo_paths.is_empty() {
worktree_repository_statuses::Entity::update_many()
.filter(
worktree_repository_statuses::Column::ProjectId
.eq(project_id)
.and(
worktree_repository_statuses::Column::WorktreeId
.eq(worktree_id),
)
.and(
worktree_repository_statuses::Column::WorkDirectoryId
.eq(repository.work_directory_id as i64),
)
.and(
worktree_repository_statuses::Column::RepoPath.is_in(
repository
.removed_worktree_repo_paths
.iter()
.map(String::as_str),
),
),
)
.set(worktree_repository_statuses::ActiveModel {
is_deleted: ActiveValue::Set(true),
scan_id: ActiveValue::Set(update.scan_id as i64),
..Default::default()
})
.exec(&*tx)
.await?;
}
}
}
if !update.removed_repositories.is_empty() {
@ -2645,10 +2760,44 @@ impl Database {
if let Some(worktree) =
worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
{
worktree.repository_entries.push(proto::RepositoryEntry {
work_directory_id: db_repository_entry.work_directory_id as u64,
branch: db_repository_entry.branch,
});
worktree.repository_entries.insert(
db_repository_entry.work_directory_id as u64,
proto::RepositoryEntry {
work_directory_id: db_repository_entry.work_directory_id as u64,
branch: db_repository_entry.branch,
removed_worktree_repo_paths: Default::default(),
updated_worktree_statuses: Default::default(),
},
);
}
}
}
{
let mut db_status_entries = worktree_repository_statuses::Entity::find()
.filter(
Condition::all()
.add(worktree_repository_statuses::Column::ProjectId.eq(project_id))
.add(worktree_repository_statuses::Column::IsDeleted.eq(false)),
)
.stream(&*tx)
.await?;
while let Some(db_status_entry) = db_status_entries.next().await {
let db_status_entry = db_status_entry?;
if let Some(worktree) = worktrees.get_mut(&(db_status_entry.worktree_id as u64))
{
if let Some(repository_entry) = worktree
.repository_entries
.get_mut(&(db_status_entry.work_directory_id as u64))
{
repository_entry
.updated_worktree_statuses
.push(proto::StatusEntry {
repo_path: db_status_entry.repo_path,
status: db_status_entry.status as i32,
});
}
}
}
}
@ -3390,7 +3539,7 @@ pub struct Worktree {
pub root_name: String,
pub visible: bool,
pub entries: Vec<proto::Entry>,
pub repository_entries: Vec<proto::RepositoryEntry>,
pub repository_entries: BTreeMap<u64, proto::RepositoryEntry>,
pub diagnostic_summaries: Vec<proto::DiagnosticSummary>,
pub scan_id: u64,
pub completed_scan_id: u64,

View File

@ -0,0 +1,23 @@
use super::ProjectId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "worktree_repository_statuses")]
pub struct Model {
#[sea_orm(primary_key)]
pub project_id: ProjectId,
#[sea_orm(primary_key)]
pub worktree_id: i64,
#[sea_orm(primary_key)]
pub work_directory_id: i64,
#[sea_orm(primary_key)]
pub repo_path: String,
pub status: i64,
pub scan_id: i64,
pub is_deleted: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@ -1385,7 +1385,7 @@ async fn join_project(
removed_entries: Default::default(),
scan_id: worktree.scan_id,
is_last_update: worktree.scan_id == worktree.completed_scan_id,
updated_repositories: worktree.repository_entries,
updated_repositories: worktree.repository_entries.into_values().collect(),
removed_repositories: Default::default(),
};
for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {

View File

@ -10,7 +10,7 @@ use editor::{
ConfirmRename, Editor, ExcerptRange, MultiBuffer, Redo, Rename, ToOffset, ToggleCodeActions,
Undo,
};
use fs::{FakeFs, Fs as _, LineEnding, RemoveOptions};
use fs::{repository::GitFileStatus, FakeFs, Fs as _, LineEnding, RemoveOptions};
use futures::StreamExt as _;
use gpui::{
executor::Deterministic, geometry::vector::vec2f, test::EmptyView, AppContext, ModelHandle,
@ -2690,6 +2690,154 @@ async fn test_git_branch_name(
});
}
#[gpui::test]
async fn test_git_status_sync(
deterministic: Arc<Deterministic>,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
server
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
.await;
let active_call_a = cx_a.read(ActiveCall::global);
client_a
.fs
.insert_tree(
"/dir",
json!({
".git": {},
"a.txt": "a",
"b.txt": "b",
}),
)
.await;
const A_TXT: &'static str = "a.txt";
const B_TXT: &'static str = "b.txt";
client_a
.fs
.as_fake()
.set_status_for_repo(
Path::new("/dir/.git"),
&[
(&Path::new(A_TXT), GitFileStatus::Added),
(&Path::new(B_TXT), GitFileStatus::Added),
],
)
.await;
let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| {
call.share_project(project_local.clone(), cx)
})
.await
.unwrap();
let project_remote = client_b.build_remote_project(project_id, cx_b).await;
// Wait for it to catch up to the new status
deterministic.run_until_parked();
#[track_caller]
fn assert_status(
file: &impl AsRef<Path>,
status: Option<GitFileStatus>,
project: &Project,
cx: &AppContext,
) {
let file = file.as_ref();
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 1);
let worktree = worktrees[0].clone();
let snapshot = worktree.read(cx).snapshot();
let root_entry = snapshot.root_git_entry().unwrap();
assert_eq!(root_entry.status_for_file(&snapshot, file), status);
}
// Smoke test status reading
project_local.read_with(cx_a, |project, cx| {
assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx);
assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx);
});
project_remote.read_with(cx_b, |project, cx| {
assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx);
assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx);
});
client_a
.fs
.as_fake()
.set_status_for_repo(
Path::new("/dir/.git"),
&[
(&Path::new(A_TXT), GitFileStatus::Modified),
(&Path::new(B_TXT), GitFileStatus::Modified),
],
)
.await;
// Wait for buffer_local_a to receive it
deterministic.run_until_parked();
// Smoke test status reading
project_local.read_with(cx_a, |project, cx| {
assert_status(
&Path::new(A_TXT),
Some(GitFileStatus::Modified),
project,
cx,
);
assert_status(
&Path::new(B_TXT),
Some(GitFileStatus::Modified),
project,
cx,
);
});
project_remote.read_with(cx_b, |project, cx| {
assert_status(
&Path::new(A_TXT),
Some(GitFileStatus::Modified),
project,
cx,
);
assert_status(
&Path::new(B_TXT),
Some(GitFileStatus::Modified),
project,
cx,
);
});
// And synchronization while joining
let project_remote_c = client_c.build_remote_project(project_id, cx_c).await;
deterministic.run_until_parked();
project_remote_c.read_with(cx_c, |project, cx| {
assert_status(
&Path::new(A_TXT),
Some(GitFileStatus::Modified),
project,
cx,
);
assert_status(
&Path::new(B_TXT),
Some(GitFileStatus::Modified),
project,
cx,
);
});
}
#[gpui::test(iterations = 10)]
async fn test_fs_operations(
deterministic: Arc<Deterministic>,

View File

@ -8,12 +8,13 @@ use call::ActiveCall;
use client::RECEIVE_TIMEOUT;
use collections::BTreeMap;
use editor::Bias;
use fs::{FakeFs, Fs as _};
use fs::{repository::GitFileStatus, FakeFs, Fs as _};
use futures::StreamExt as _;
use gpui::{executor::Deterministic, ModelHandle, Task, TestAppContext};
use language::{range_to_lsp, FakeLspAdapter, Language, LanguageConfig, PointUtf16};
use lsp::FakeLanguageServer;
use parking_lot::Mutex;
use pretty_assertions::assert_eq;
use project::{search::SearchQuery, Project, ProjectPath};
use rand::{
distributions::{Alphanumeric, DistString},
@ -763,53 +764,85 @@ async fn apply_client_operation(
}
}
ClientOperation::WriteGitIndex {
repo_path,
contents,
} => {
if !client.fs.directories().contains(&repo_path) {
return Err(TestError::Inapplicable);
}
log::info!(
"{}: writing git index for repo {:?}: {:?}",
client.username,
ClientOperation::GitOperation { operation } => match operation {
GitOperation::WriteGitIndex {
repo_path,
contents
);
contents,
} => {
if !client.fs.directories().contains(&repo_path) {
return Err(TestError::Inapplicable);
}
let dot_git_dir = repo_path.join(".git");
let contents = contents
.iter()
.map(|(path, contents)| (path.as_path(), contents.clone()))
.collect::<Vec<_>>();
if client.fs.metadata(&dot_git_dir).await?.is_none() {
client.fs.create_dir(&dot_git_dir).await?;
log::info!(
"{}: writing git index for repo {:?}: {:?}",
client.username,
repo_path,
contents
);
let dot_git_dir = repo_path.join(".git");
let contents = contents
.iter()
.map(|(path, contents)| (path.as_path(), contents.clone()))
.collect::<Vec<_>>();
if client.fs.metadata(&dot_git_dir).await?.is_none() {
client.fs.create_dir(&dot_git_dir).await?;
}
client.fs.set_index_for_repo(&dot_git_dir, &contents).await;
}
client.fs.set_index_for_repo(&dot_git_dir, &contents).await;
}
ClientOperation::WriteGitBranch {
repo_path,
new_branch,
} => {
if !client.fs.directories().contains(&repo_path) {
return Err(TestError::Inapplicable);
}
log::info!(
"{}: writing git branch for repo {:?}: {:?}",
client.username,
GitOperation::WriteGitBranch {
repo_path,
new_branch
);
new_branch,
} => {
if !client.fs.directories().contains(&repo_path) {
return Err(TestError::Inapplicable);
}
let dot_git_dir = repo_path.join(".git");
if client.fs.metadata(&dot_git_dir).await?.is_none() {
client.fs.create_dir(&dot_git_dir).await?;
log::info!(
"{}: writing git branch for repo {:?}: {:?}",
client.username,
repo_path,
new_branch
);
let dot_git_dir = repo_path.join(".git");
if client.fs.metadata(&dot_git_dir).await?.is_none() {
client.fs.create_dir(&dot_git_dir).await?;
}
client.fs.set_branch_name(&dot_git_dir, new_branch).await;
}
client.fs.set_branch_name(&dot_git_dir, new_branch).await;
}
GitOperation::WriteGitStatuses {
repo_path,
statuses,
} => {
if !client.fs.directories().contains(&repo_path) {
return Err(TestError::Inapplicable);
}
log::info!(
"{}: writing git statuses for repo {:?}: {:?}",
client.username,
repo_path,
statuses
);
let dot_git_dir = repo_path.join(".git");
let statuses = statuses
.iter()
.map(|(path, val)| (path.as_path(), val.clone()))
.collect::<Vec<_>>();
if client.fs.metadata(&dot_git_dir).await?.is_none() {
client.fs.create_dir(&dot_git_dir).await?;
}
client
.fs
.set_status_for_repo(&dot_git_dir, statuses.as_slice())
.await;
}
},
}
Ok(())
}
@ -1178,6 +1211,13 @@ enum ClientOperation {
is_dir: bool,
content: String,
},
GitOperation {
operation: GitOperation,
},
}
#[derive(Clone, Debug, Serialize, Deserialize)]
enum GitOperation {
WriteGitIndex {
repo_path: PathBuf,
contents: Vec<(PathBuf, String)>,
@ -1186,6 +1226,10 @@ enum ClientOperation {
repo_path: PathBuf,
new_branch: Option<String>,
},
WriteGitStatuses {
repo_path: PathBuf,
statuses: Vec<(PathBuf, GitFileStatus)>,
},
}
#[derive(Clone, Debug, Serialize, Deserialize)]
@ -1698,57 +1742,10 @@ impl TestPlan {
}
}
// Update a git index
91..=93 => {
let repo_path = client
.fs
.directories()
.into_iter()
.choose(&mut self.rng)
.unwrap()
.clone();
let mut file_paths = client
.fs
.files()
.into_iter()
.filter(|path| path.starts_with(&repo_path))
.collect::<Vec<_>>();
let count = self.rng.gen_range(0..=file_paths.len());
file_paths.shuffle(&mut self.rng);
file_paths.truncate(count);
let mut contents = Vec::new();
for abs_child_file_path in &file_paths {
let child_file_path = abs_child_file_path
.strip_prefix(&repo_path)
.unwrap()
.to_path_buf();
let new_base = Alphanumeric.sample_string(&mut self.rng, 16);
contents.push((child_file_path, new_base));
}
break ClientOperation::WriteGitIndex {
repo_path,
contents,
};
}
// Update a git branch
94..=95 => {
let repo_path = client
.fs
.directories()
.choose(&mut self.rng)
.unwrap()
.clone();
let new_branch = (self.rng.gen_range(0..10) > 3)
.then(|| Alphanumeric.sample_string(&mut self.rng, 8));
break ClientOperation::WriteGitBranch {
repo_path,
new_branch,
// Update a git related action
91..=95 => {
break ClientOperation::GitOperation {
operation: self.generate_git_operation(client),
};
}
@ -1786,6 +1783,86 @@ impl TestPlan {
})
}
fn generate_git_operation(&mut self, client: &TestClient) -> GitOperation {
fn generate_file_paths(
repo_path: &Path,
rng: &mut StdRng,
client: &TestClient,
) -> Vec<PathBuf> {
let mut paths = client
.fs
.files()
.into_iter()
.filter(|path| path.starts_with(repo_path))
.collect::<Vec<_>>();
let count = rng.gen_range(0..=paths.len());
paths.shuffle(rng);
paths.truncate(count);
paths
.iter()
.map(|path| path.strip_prefix(repo_path).unwrap().to_path_buf())
.collect::<Vec<_>>()
}
let repo_path = client
.fs
.directories()
.choose(&mut self.rng)
.unwrap()
.clone();
match self.rng.gen_range(0..100_u32) {
0..=25 => {
let file_paths = generate_file_paths(&repo_path, &mut self.rng, client);
let contents = file_paths
.into_iter()
.map(|path| (path, Alphanumeric.sample_string(&mut self.rng, 16)))
.collect();
GitOperation::WriteGitIndex {
repo_path,
contents,
}
}
26..=63 => {
let new_branch = (self.rng.gen_range(0..10) > 3)
.then(|| Alphanumeric.sample_string(&mut self.rng, 8));
GitOperation::WriteGitBranch {
repo_path,
new_branch,
}
}
64..=100 => {
let file_paths = generate_file_paths(&repo_path, &mut self.rng, client);
let statuses = file_paths
.into_iter()
.map(|paths| {
(
paths,
match self.rng.gen_range(0..3_u32) {
0 => GitFileStatus::Added,
1 => GitFileStatus::Modified,
2 => GitFileStatus::Conflict,
_ => unreachable!(),
},
)
})
.collect::<Vec<_>>();
GitOperation::WriteGitStatuses {
repo_path,
statuses,
}
}
_ => unreachable!(),
}
}
fn next_root_dir_name(&mut self, user_id: UserId) -> String {
let user_ix = self
.users

View File

@ -13,6 +13,7 @@ gpui = { path = "../gpui" }
lsp = { path = "../lsp" }
rope = { path = "../rope" }
util = { path = "../util" }
sum_tree = { path = "../sum_tree" }
anyhow.workspace = true
async-trait.workspace = true
futures.workspace = true

View File

@ -27,7 +27,7 @@ use util::ResultExt;
#[cfg(any(test, feature = "test-support"))]
use collections::{btree_map, BTreeMap};
#[cfg(any(test, feature = "test-support"))]
use repository::FakeGitRepositoryState;
use repository::{FakeGitRepositoryState, GitFileStatus};
#[cfg(any(test, feature = "test-support"))]
use std::sync::Weak;
@ -654,6 +654,17 @@ impl FakeFs {
});
}
pub async fn set_status_for_repo(&self, dot_git: &Path, statuses: &[(&Path, GitFileStatus)]) {
self.with_git_state(dot_git, |state| {
state.worktree_statuses.clear();
state.worktree_statuses.extend(
statuses
.iter()
.map(|(path, content)| ((**path).into(), content.clone())),
);
});
}
pub fn paths(&self) -> Vec<PathBuf> {
let mut result = Vec::new();
let mut queue = collections::VecDeque::new();

View File

@ -1,10 +1,14 @@
use anyhow::Result;
use collections::HashMap;
use parking_lot::Mutex;
use serde_derive::{Deserialize, Serialize};
use std::{
ffi::OsStr,
os::unix::prelude::OsStrExt,
path::{Component, Path, PathBuf},
sync::Arc,
};
use sum_tree::TreeMap;
use util::ResultExt;
pub use git2::Repository as LibGitRepository;
@ -16,6 +20,10 @@ pub trait GitRepository: Send {
fn load_index_text(&self, relative_file_path: &Path) -> Option<String>;
fn branch_name(&self) -> Option<String>;
fn worktree_statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>;
fn worktree_status(&self, path: &RepoPath) -> Option<GitFileStatus>;
}
impl std::fmt::Debug for dyn GitRepository {
@ -61,6 +69,43 @@ impl GitRepository for LibGitRepository {
let branch = String::from_utf8_lossy(head.shorthand_bytes());
Some(branch.to_string())
}
fn worktree_statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
let statuses = self.statuses(None).log_err()?;
let mut map = TreeMap::default();
for status in statuses
.iter()
.filter(|status| !status.status().contains(git2::Status::IGNORED))
{
let path = RepoPath(PathBuf::from(OsStr::from_bytes(status.path_bytes())));
let Some(status) = read_status(status.status()) else {
continue
};
map.insert(path, status)
}
Some(map)
}
fn worktree_status(&self, path: &RepoPath) -> Option<GitFileStatus> {
let status = self.status_file(path).log_err()?;
read_status(status)
}
}
fn read_status(status: git2::Status) -> Option<GitFileStatus> {
if status.contains(git2::Status::CONFLICTED) {
Some(GitFileStatus::Conflict)
} else if status.intersects(git2::Status::WT_MODIFIED | git2::Status::WT_RENAMED) {
Some(GitFileStatus::Modified)
} else if status.intersects(git2::Status::WT_NEW) {
Some(GitFileStatus::Added)
} else {
None
}
}
#[derive(Debug, Clone, Default)]
@ -71,6 +116,7 @@ pub struct FakeGitRepository {
#[derive(Debug, Clone, Default)]
pub struct FakeGitRepositoryState {
pub index_contents: HashMap<PathBuf, String>,
pub worktree_statuses: HashMap<RepoPath, GitFileStatus>,
pub branch_name: Option<String>,
}
@ -93,6 +139,20 @@ impl GitRepository for FakeGitRepository {
let state = self.state.lock();
state.branch_name.clone()
}
fn worktree_statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
let state = self.state.lock();
let mut map = TreeMap::default();
for (repo_path, status) in state.worktree_statuses.iter() {
map.insert(repo_path.to_owned(), status.to_owned());
}
Some(map)
}
fn worktree_status(&self, path: &RepoPath) -> Option<GitFileStatus> {
let state = self.state.lock();
state.worktree_statuses.get(path).cloned()
}
}
fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> {
@ -123,3 +183,53 @@ fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> {
_ => Ok(()),
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum GitFileStatus {
Added,
Modified,
Conflict,
}
#[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)]
pub struct RepoPath(PathBuf);
impl RepoPath {
pub fn new(path: PathBuf) -> Self {
debug_assert!(path.is_relative(), "Repo paths must be relative");
RepoPath(path)
}
}
impl From<&Path> for RepoPath {
fn from(value: &Path) -> Self {
RepoPath::new(value.to_path_buf())
}
}
impl From<PathBuf> for RepoPath {
fn from(value: PathBuf) -> Self {
RepoPath::new(value)
}
}
impl Default for RepoPath {
fn default() -> Self {
RepoPath(PathBuf::new())
}
}
impl AsRef<Path> for RepoPath {
fn as_ref(&self) -> &Path {
self.0.as_ref()
}
}
impl std::ops::Deref for RepoPath {
type Target = PathBuf;
fn deref(&self) -> &Self::Target {
&self.0
}
}

View File

@ -42,7 +42,7 @@ impl Color {
}
pub fn yellow() -> Self {
Self(ColorU::from_u32(0x00ffffff))
Self(ColorU::from_u32(0xffff00ff))
}
pub fn new(r: u8, g: u8, b: u8, a: u8) -> Self {

View File

@ -74,5 +74,6 @@ lsp = { path = "../lsp", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
git2 = { version = "0.15", default-features = false }
tempdir.workspace = true
unindent.workspace = true

View File

@ -6,7 +6,10 @@ use anyhow::{anyhow, Context, Result};
use client::{proto, Client};
use clock::ReplicaId;
use collections::{HashMap, VecDeque};
use fs::{repository::GitRepository, Fs, LineEnding};
use fs::{
repository::{GitFileStatus, GitRepository, RepoPath},
Fs, LineEnding,
};
use futures::{
channel::{
mpsc::{self, UnboundedSender},
@ -52,7 +55,7 @@ use std::{
time::{Duration, SystemTime},
};
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
use util::{paths::HOME, ResultExt, TryFutureExt};
use util::{paths::HOME, ResultExt, TakeUntilExt, TryFutureExt};
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
pub struct WorktreeId(usize);
@ -117,10 +120,38 @@ pub struct Snapshot {
completed_scan_id: usize,
}
#[derive(Clone, Debug, Eq, PartialEq)]
impl Snapshot {
pub fn repo_for(&self, path: &Path) -> Option<RepositoryEntry> {
let mut max_len = 0;
let mut current_candidate = None;
for (work_directory, repo) in (&self.repository_entries).iter() {
if repo.contains(self, path) {
if work_directory.0.as_os_str().len() >= max_len {
current_candidate = Some(repo);
max_len = work_directory.0.as_os_str().len();
} else {
break;
}
}
}
current_candidate.map(|entry| entry.to_owned())
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct RepositoryEntry {
pub(crate) work_directory: WorkDirectoryEntry,
pub(crate) branch: Option<Arc<str>>,
pub(crate) worktree_statuses: TreeMap<RepoPath, GitFileStatus>,
}
fn read_git_status(git_status: i32) -> Option<GitFileStatus> {
proto::GitStatus::from_i32(git_status).map(|status| match status {
proto::GitStatus::Added => GitFileStatus::Added,
proto::GitStatus::Modified => GitFileStatus::Modified,
proto::GitStatus::Conflict => GitFileStatus::Conflict,
})
}
impl RepositoryEntry {
@ -141,6 +172,102 @@ impl RepositoryEntry {
pub(crate) fn contains(&self, snapshot: &Snapshot, path: &Path) -> bool {
self.work_directory.contains(snapshot, path)
}
pub fn status_for_file(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> {
self.work_directory
.relativize(snapshot, path)
.and_then(|repo_path| self.worktree_statuses.get(&repo_path))
.cloned()
}
pub fn status_for_path(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> {
self.work_directory
.relativize(snapshot, path)
.and_then(|repo_path| {
self.worktree_statuses
.iter_from(&repo_path)
.take_while(|(key, _)| key.starts_with(&repo_path))
.map(|(_, status)| status)
// Short circut once we've found the highest level
.take_until(|status| status == &&GitFileStatus::Conflict)
.reduce(
|status_first, status_second| match (status_first, status_second) {
(GitFileStatus::Conflict, _) | (_, GitFileStatus::Conflict) => {
&GitFileStatus::Conflict
}
(GitFileStatus::Added, _) | (_, GitFileStatus::Added) => {
&GitFileStatus::Added
}
_ => &GitFileStatus::Modified,
},
)
.copied()
})
}
pub fn build_update(&self, other: &Self) -> proto::RepositoryEntry {
let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
let mut removed_statuses: Vec<String> = Vec::new();
let mut self_statuses = self.worktree_statuses.iter().peekable();
let mut other_statuses = other.worktree_statuses.iter().peekable();
loop {
match (self_statuses.peek(), other_statuses.peek()) {
(Some((self_repo_path, self_status)), Some((other_repo_path, other_status))) => {
match Ord::cmp(self_repo_path, other_repo_path) {
Ordering::Less => {
updated_statuses.push(make_status_entry(self_repo_path, self_status));
self_statuses.next();
}
Ordering::Equal => {
if self_status != other_status {
updated_statuses
.push(make_status_entry(self_repo_path, self_status));
}
self_statuses.next();
other_statuses.next();
}
Ordering::Greater => {
removed_statuses.push(make_repo_path(other_repo_path));
other_statuses.next();
}
}
}
(Some((self_repo_path, self_status)), None) => {
updated_statuses.push(make_status_entry(self_repo_path, self_status));
self_statuses.next();
}
(None, Some((other_repo_path, _))) => {
removed_statuses.push(make_repo_path(other_repo_path));
other_statuses.next();
}
(None, None) => break,
}
}
proto::RepositoryEntry {
work_directory_id: self.work_directory_id().to_proto(),
branch: self.branch.as_ref().map(|str| str.to_string()),
removed_worktree_repo_paths: removed_statuses,
updated_worktree_statuses: updated_statuses,
}
}
}
fn make_repo_path(path: &RepoPath) -> String {
path.as_os_str().to_string_lossy().to_string()
}
fn make_status_entry(path: &RepoPath, status: &GitFileStatus) -> proto::StatusEntry {
proto::StatusEntry {
repo_path: make_repo_path(path),
status: match status {
GitFileStatus::Added => proto::GitStatus::Added.into(),
GitFileStatus::Modified => proto::GitStatus::Modified.into(),
GitFileStatus::Conflict => proto::GitStatus::Conflict.into(),
},
}
}
impl From<&RepositoryEntry> for proto::RepositoryEntry {
@ -148,6 +275,12 @@ impl From<&RepositoryEntry> for proto::RepositoryEntry {
proto::RepositoryEntry {
work_directory_id: value.work_directory.to_proto(),
branch: value.branch.as_ref().map(|str| str.to_string()),
updated_worktree_statuses: value
.worktree_statuses
.iter()
.map(|(repo_path, status)| make_status_entry(repo_path, status))
.collect(),
removed_worktree_repo_paths: Default::default(),
}
}
}
@ -162,6 +295,12 @@ impl Default for RepositoryWorkDirectory {
}
}
impl AsRef<Path> for RepositoryWorkDirectory {
fn as_ref(&self) -> &Path {
self.0.as_ref()
}
}
#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
pub struct WorkDirectoryEntry(ProjectEntryId);
@ -178,7 +317,7 @@ impl WorkDirectoryEntry {
worktree.entry_for_id(self.0).and_then(|entry| {
path.strip_prefix(&entry.path)
.ok()
.map(move |path| RepoPath(path.to_owned()))
.map(move |path| path.into())
})
}
}
@ -197,29 +336,6 @@ impl<'a> From<ProjectEntryId> for WorkDirectoryEntry {
}
}
#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
pub struct RepoPath(PathBuf);
impl AsRef<Path> for RepoPath {
fn as_ref(&self) -> &Path {
self.0.as_ref()
}
}
impl Deref for RepoPath {
type Target = PathBuf;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl AsRef<Path> for RepositoryWorkDirectory {
fn as_ref(&self) -> &Path {
self.0.as_ref()
}
}
#[derive(Debug, Clone)]
pub struct LocalSnapshot {
ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
@ -234,6 +350,7 @@ pub struct LocalSnapshot {
#[derive(Debug, Clone)]
pub struct LocalRepositoryEntry {
pub(crate) scan_id: usize,
pub(crate) full_scan_id: usize,
pub(crate) repo_ptr: Arc<Mutex<dyn GitRepository>>,
/// Path to the actual .git folder.
/// Note: if .git is a file, this points to the folder indicated by the .git file
@ -1424,13 +1541,41 @@ impl Snapshot {
});
for repository in update.updated_repositories {
let repository = RepositoryEntry {
work_directory: ProjectEntryId::from_proto(repository.work_directory_id).into(),
branch: repository.branch.map(Into::into),
};
if let Some(entry) = self.entry_for_id(repository.work_directory_id()) {
self.repository_entries
.insert(RepositoryWorkDirectory(entry.path.clone()), repository)
let work_directory_entry: WorkDirectoryEntry =
ProjectEntryId::from_proto(repository.work_directory_id).into();
if let Some(entry) = self.entry_for_id(*work_directory_entry) {
let mut statuses = TreeMap::default();
for status_entry in repository.updated_worktree_statuses {
let Some(git_file_status) = read_git_status(status_entry.status) else {
continue;
};
let repo_path = RepoPath::new(status_entry.repo_path.into());
statuses.insert(repo_path, git_file_status);
}
let work_directory = RepositoryWorkDirectory(entry.path.clone());
if self.repository_entries.get(&work_directory).is_some() {
self.repository_entries.update(&work_directory, |repo| {
repo.branch = repository.branch.map(Into::into);
repo.worktree_statuses.insert_tree(statuses);
for repo_path in repository.removed_worktree_repo_paths {
let repo_path = RepoPath::new(repo_path.into());
repo.worktree_statuses.remove(&repo_path);
}
});
} else {
self.repository_entries.insert(
work_directory,
RepositoryEntry {
work_directory: work_directory_entry,
branch: repository.branch.map(Into::into),
worktree_statuses: statuses,
},
)
}
} else {
log::error!("no work directory entry for repository {:?}", repository)
}
@ -1570,32 +1715,17 @@ impl Snapshot {
}
impl LocalSnapshot {
pub(crate) fn repo_for(&self, path: &Path) -> Option<RepositoryEntry> {
let mut max_len = 0;
let mut current_candidate = None;
for (work_directory, repo) in (&self.repository_entries).iter() {
if repo.contains(self, path) {
if work_directory.0.as_os_str().len() >= max_len {
current_candidate = Some(repo);
max_len = work_directory.0.as_os_str().len();
} else {
break;
}
}
}
current_candidate.map(|entry| entry.to_owned())
pub(crate) fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> {
self.git_repositories.get(&repo.work_directory.0)
}
pub(crate) fn repo_for_metadata(
&self,
path: &Path,
) -> Option<(ProjectEntryId, Arc<Mutex<dyn GitRepository>>)> {
let (entry_id, local_repo) = self
.git_repositories
) -> Option<(&ProjectEntryId, &LocalRepositoryEntry)> {
self.git_repositories
.iter()
.find(|(_, repo)| repo.in_dot_git(path))?;
Some((*entry_id, local_repo.repo_ptr.to_owned()))
.find(|(_, repo)| repo.in_dot_git(path))
}
#[cfg(test)]
@ -1685,7 +1815,7 @@ impl LocalSnapshot {
}
Ordering::Equal => {
if self_repo != other_repo {
updated_repositories.push((*self_repo).into());
updated_repositories.push(self_repo.build_update(other_repo));
}
self_repos.next();
@ -1852,11 +1982,13 @@ impl LocalSnapshot {
let scan_id = self.scan_id;
let repo_lock = repo.lock();
self.repository_entries.insert(
work_directory,
RepositoryEntry {
work_directory: work_dir_id.into(),
branch: repo_lock.branch_name().map(Into::into),
worktree_statuses: repo_lock.worktree_statuses().unwrap_or_default(),
},
);
drop(repo_lock);
@ -1865,6 +1997,7 @@ impl LocalSnapshot {
work_dir_id,
LocalRepositoryEntry {
scan_id,
full_scan_id: scan_id,
repo_ptr: repo,
git_dir_path: parent_path.clone(),
},
@ -2840,26 +2973,7 @@ impl BackgroundScanner {
fs_entry.is_ignored = ignore_stack.is_all();
snapshot.insert_entry(fs_entry, self.fs.as_ref());
let scan_id = snapshot.scan_id;
let repo_with_path_in_dotgit = snapshot.repo_for_metadata(&path);
if let Some((entry_id, repo)) = repo_with_path_in_dotgit {
let work_dir = snapshot
.entry_for_id(entry_id)
.map(|entry| RepositoryWorkDirectory(entry.path.clone()))?;
let repo = repo.lock();
repo.reload_index();
let branch = repo.branch_name();
snapshot.git_repositories.update(&entry_id, |entry| {
entry.scan_id = scan_id;
});
snapshot
.repository_entries
.update(&work_dir, |entry| entry.branch = branch.map(Into::into));
}
self.reload_repo_for_path(&path, &mut snapshot);
if let Some(scan_queue_tx) = &scan_queue_tx {
let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
@ -2876,7 +2990,9 @@ impl BackgroundScanner {
}
}
}
Ok(None) => {}
Ok(None) => {
self.remove_repo_path(&path, &mut snapshot);
}
Err(err) => {
// TODO - create a special 'error' entry in the entries tree to mark this
log::error!("error reading file on event {:?}", err);
@ -2887,6 +3003,109 @@ impl BackgroundScanner {
Some(event_paths)
}
fn remove_repo_path(&self, path: &Path, snapshot: &mut LocalSnapshot) -> Option<()> {
if !path
.components()
.any(|component| component.as_os_str() == *DOT_GIT)
{
let scan_id = snapshot.scan_id;
let repo = snapshot.repo_for(&path)?;
let repo_path = repo.work_directory.relativize(&snapshot, &path)?;
let work_dir = repo.work_directory(snapshot)?;
let work_dir_id = repo.work_directory;
snapshot
.git_repositories
.update(&work_dir_id, |entry| entry.scan_id = scan_id);
snapshot.repository_entries.update(&work_dir, |entry| {
entry
.worktree_statuses
.remove_by(&repo_path, |stored_path| {
stored_path.starts_with(&repo_path)
})
});
}
Some(())
}
fn reload_repo_for_path(&self, path: &Path, snapshot: &mut LocalSnapshot) -> Option<()> {
let scan_id = snapshot.scan_id;
if path
.components()
.any(|component| component.as_os_str() == *DOT_GIT)
{
let (entry_id, repo_ptr) = {
let (entry_id, repo) = snapshot.repo_for_metadata(&path)?;
if repo.full_scan_id == scan_id {
return None;
}
(*entry_id, repo.repo_ptr.to_owned())
};
let work_dir = snapshot
.entry_for_id(entry_id)
.map(|entry| RepositoryWorkDirectory(entry.path.clone()))?;
let repo = repo_ptr.lock();
repo.reload_index();
let branch = repo.branch_name();
let statuses = repo.worktree_statuses().unwrap_or_default();
snapshot.git_repositories.update(&entry_id, |entry| {
entry.scan_id = scan_id;
entry.full_scan_id = scan_id;
});
snapshot.repository_entries.update(&work_dir, |entry| {
entry.branch = branch.map(Into::into);
entry.worktree_statuses = statuses;
});
} else {
if snapshot
.entry_for_path(&path)
.map(|entry| entry.is_ignored)
.unwrap_or(false)
{
self.remove_repo_path(&path, snapshot);
return None;
}
let repo = snapshot.repo_for(&path)?;
let repo_path = repo.work_directory.relativize(&snapshot, &path)?;
let status = {
let local_repo = snapshot.get_local_repo(&repo)?;
// Short circuit if we've already scanned everything
if local_repo.full_scan_id == scan_id {
return None;
}
let git_ptr = local_repo.repo_ptr.lock();
git_ptr.worktree_status(&repo_path)?
};
let work_dir = repo.work_directory(snapshot)?;
let work_dir_id = repo.work_directory;
snapshot
.git_repositories
.update(&work_dir_id, |entry| entry.scan_id = scan_id);
snapshot.repository_entries.update(&work_dir, |entry| {
entry.worktree_statuses.insert(repo_path, status)
});
}
Some(())
}
async fn update_ignore_statuses(&self) {
use futures::FutureExt as _;
@ -3686,6 +3905,244 @@ mod tests {
});
}
#[gpui::test]
async fn test_git_status(cx: &mut TestAppContext) {
#[track_caller]
fn git_init(path: &Path) -> git2::Repository {
git2::Repository::init(path).expect("Failed to initialize git repository")
}
#[track_caller]
fn git_add(path: &Path, repo: &git2::Repository) {
let mut index = repo.index().expect("Failed to get index");
index.add_path(path).expect("Failed to add a.txt");
index.write().expect("Failed to write index");
}
#[track_caller]
fn git_remove_index(path: &Path, repo: &git2::Repository) {
let mut index = repo.index().expect("Failed to get index");
index.remove_path(path).expect("Failed to add a.txt");
index.write().expect("Failed to write index");
}
#[track_caller]
fn git_commit(msg: &'static str, repo: &git2::Repository) {
use git2::Signature;
let signature = Signature::now("test", "test@zed.dev").unwrap();
let oid = repo.index().unwrap().write_tree().unwrap();
let tree = repo.find_tree(oid).unwrap();
if let Some(head) = repo.head().ok() {
let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
let parent_commit = parent_obj.as_commit().unwrap();
repo.commit(
Some("HEAD"),
&signature,
&signature,
msg,
&tree,
&[parent_commit],
)
.expect("Failed to commit with parent");
} else {
repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
.expect("Failed to commit");
}
}
#[track_caller]
fn git_stash(repo: &mut git2::Repository) {
use git2::Signature;
let signature = Signature::now("test", "test@zed.dev").unwrap();
repo.stash_save(&signature, "N/A", None)
.expect("Failed to stash");
}
#[track_caller]
fn git_reset(offset: usize, repo: &git2::Repository) {
let head = repo.head().expect("Couldn't get repo head");
let object = head.peel(git2::ObjectType::Commit).unwrap();
let commit = object.as_commit().unwrap();
let new_head = commit
.parents()
.inspect(|parnet| {
parnet.message();
})
.skip(offset)
.next()
.expect("Not enough history");
repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
.expect("Could not reset");
}
#[allow(dead_code)]
#[track_caller]
fn git_status(repo: &git2::Repository) -> HashMap<String, git2::Status> {
repo.statuses(None)
.unwrap()
.iter()
.map(|status| (status.path().unwrap().to_string(), status.status()))
.collect()
}
const IGNORE_RULE: &'static str = "**/target";
let root = temp_tree(json!({
"project": {
"a.txt": "a",
"b.txt": "bb",
"c": {
"d": {
"e.txt": "eee"
}
},
"f.txt": "ffff",
"target": {
"build_file": "???"
},
".gitignore": IGNORE_RULE
},
}));
let http_client = FakeHttpClient::with_404_response();
let client = cx.read(|cx| Client::new(http_client, cx));
let tree = Worktree::local(
client,
root.path(),
true,
Arc::new(RealFs),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
const A_TXT: &'static str = "a.txt";
const B_TXT: &'static str = "b.txt";
const E_TXT: &'static str = "c/d/e.txt";
const F_TXT: &'static str = "f.txt";
const DOTGITIGNORE: &'static str = ".gitignore";
const BUILD_FILE: &'static str = "target/build_file";
let work_dir = root.path().join("project");
let mut repo = git_init(work_dir.as_path());
repo.add_ignore_rule(IGNORE_RULE).unwrap();
git_add(Path::new(A_TXT), &repo);
git_add(Path::new(E_TXT), &repo);
git_add(Path::new(DOTGITIGNORE), &repo);
git_commit("Initial commit", &repo);
std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
tree.flush_fs_events(cx).await;
// Check that the right git state is observed on startup
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
assert_eq!(snapshot.repository_entries.iter().count(), 1);
let (dir, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(dir.0.as_ref(), Path::new("project"));
assert_eq!(repo.worktree_statuses.iter().count(), 3);
assert_eq!(
repo.worktree_statuses.get(&Path::new(A_TXT).into()),
Some(&GitFileStatus::Modified)
);
assert_eq!(
repo.worktree_statuses.get(&Path::new(B_TXT).into()),
Some(&GitFileStatus::Added)
);
assert_eq!(
repo.worktree_statuses.get(&Path::new(F_TXT).into()),
Some(&GitFileStatus::Added)
);
});
git_add(Path::new(A_TXT), &repo);
git_add(Path::new(B_TXT), &repo);
git_commit("Committing modified and added", &repo);
tree.flush_fs_events(cx).await;
// Check that repo only changes are tracked
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(repo.worktree_statuses.iter().count(), 1);
assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(B_TXT).into()), None);
assert_eq!(
repo.worktree_statuses.get(&Path::new(F_TXT).into()),
Some(&GitFileStatus::Added)
);
});
git_reset(0, &repo);
git_remove_index(Path::new(B_TXT), &repo);
git_stash(&mut repo);
std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
tree.flush_fs_events(cx).await;
// Check that more complex repo changes are tracked
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(repo.worktree_statuses.iter().count(), 3);
assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None);
assert_eq!(
repo.worktree_statuses.get(&Path::new(B_TXT).into()),
Some(&GitFileStatus::Added)
);
assert_eq!(
repo.worktree_statuses.get(&Path::new(E_TXT).into()),
Some(&GitFileStatus::Modified)
);
assert_eq!(
repo.worktree_statuses.get(&Path::new(F_TXT).into()),
Some(&GitFileStatus::Added)
);
});
std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
std::fs::remove_dir_all(work_dir.join("c")).unwrap();
std::fs::write(
work_dir.join(DOTGITIGNORE),
[IGNORE_RULE, "f.txt"].join("\n"),
)
.unwrap();
git_add(Path::new(DOTGITIGNORE), &repo);
git_commit("Committing modified git ignore", &repo);
tree.flush_fs_events(cx).await;
dbg!(git_status(&repo));
// Check that non-repo behavior is tracked
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
dbg!(&repo.worktree_statuses);
assert_eq!(repo.worktree_statuses.iter().count(), 0);
assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(B_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(E_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(F_TXT).into()), None);
});
}
#[gpui::test]
async fn test_write_file(cx: &mut TestAppContext) {
let dir = temp_tree(json!({

View File

@ -16,7 +16,10 @@ use gpui::{
ViewHandle, WeakViewHandle,
};
use menu::{Confirm, SelectNext, SelectPrev};
use project::{Entry, EntryKind, Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId};
use project::{
repository::GitFileStatus, Entry, EntryKind, Project, ProjectEntryId, ProjectPath, Worktree,
WorktreeId,
};
use settings::Settings;
use std::{
cmp::Ordering,
@ -86,6 +89,7 @@ pub struct EntryDetails {
is_editing: bool,
is_processing: bool,
is_cut: bool,
git_status: Option<GitFileStatus>,
}
actions!(
@ -1008,6 +1012,15 @@ impl ProjectPanel {
let entry_range = range.start.saturating_sub(ix)..end_ix - ix;
for entry in &visible_worktree_entries[entry_range] {
let path = &entry.path;
let status = (entry.path.parent().is_some() && !entry.is_ignored)
.then(|| {
snapshot
.repo_for(path)
.and_then(|entry| entry.status_for_path(&snapshot, path))
})
.flatten();
let mut details = EntryDetails {
filename: entry
.path
@ -1028,6 +1041,7 @@ impl ProjectPanel {
is_cut: self
.clipboard_entry
.map_or(false, |e| e.is_cut() && e.entry_id() == entry.id),
git_status: status,
};
if let Some(edit_state) = &self.edit_state {
@ -1069,6 +1083,19 @@ impl ProjectPanel {
let kind = details.kind;
let show_editor = details.is_editing && !details.is_processing;
// Prepare colors for git statuses
let editor_theme = &cx.global::<Settings>().theme.editor;
let mut filename_text_style = style.text.clone();
filename_text_style.color = details
.git_status
.as_ref()
.map(|status| match status {
GitFileStatus::Added => editor_theme.diff.inserted,
GitFileStatus::Modified => editor_theme.diff.modified,
GitFileStatus::Conflict => editor_theme.diff.deleted,
})
.unwrap_or(style.text.color);
Flex::row()
.with_child(
if kind == EntryKind::Dir {
@ -1096,7 +1123,7 @@ impl ProjectPanel {
.flex(1.0, true)
.into_any()
} else {
Label::new(details.filename.clone(), style.text.clone())
Label::new(details.filename.clone(), filename_text_style)
.contained()
.with_margin_left(style.icon_spacing)
.aligned()

View File

@ -986,8 +986,22 @@ message Entry {
message RepositoryEntry {
uint64 work_directory_id = 1;
optional string branch = 2;
repeated string removed_worktree_repo_paths = 3;
repeated StatusEntry updated_worktree_statuses = 4;
}
message StatusEntry {
string repo_path = 1;
GitStatus status = 2;
}
enum GitStatus {
Added = 0;
Modified = 1;
Conflict = 2;
}
message BufferState {
uint64 id = 1;
optional File file = 2;

View File

@ -484,9 +484,11 @@ pub fn split_worktree_update(
mut message: UpdateWorktree,
max_chunk_size: usize,
) -> impl Iterator<Item = UpdateWorktree> {
let mut done = false;
let mut done_files = false;
let mut done_statuses = false;
let mut repository_index = 0;
iter::from_fn(move || {
if done {
if done_files && done_statuses {
return None;
}
@ -502,22 +504,71 @@ pub fn split_worktree_update(
.drain(..removed_entries_chunk_size)
.collect();
done = message.updated_entries.is_empty() && message.removed_entries.is_empty();
done_files = message.updated_entries.is_empty() && message.removed_entries.is_empty();
// Wait to send repositories until after we've guaranteed that their associated entries
// will be read
let updated_repositories = if done {
mem::take(&mut message.updated_repositories)
let updated_repositories = if done_files {
let mut total_statuses = 0;
let mut updated_repositories = Vec::new();
while total_statuses < max_chunk_size
&& repository_index < message.updated_repositories.len()
{
let updated_statuses_chunk_size = cmp::min(
message.updated_repositories[repository_index]
.updated_worktree_statuses
.len(),
max_chunk_size - total_statuses,
);
let updated_statuses: Vec<_> = message.updated_repositories[repository_index]
.updated_worktree_statuses
.drain(..updated_statuses_chunk_size)
.collect();
total_statuses += updated_statuses.len();
let done_this_repo = message.updated_repositories[repository_index]
.updated_worktree_statuses
.is_empty();
let removed_repo_paths = if done_this_repo {
mem::take(
&mut message.updated_repositories[repository_index]
.removed_worktree_repo_paths,
)
} else {
Default::default()
};
updated_repositories.push(RepositoryEntry {
work_directory_id: message.updated_repositories[repository_index]
.work_directory_id,
branch: message.updated_repositories[repository_index]
.branch
.clone(),
updated_worktree_statuses: updated_statuses,
removed_worktree_repo_paths: removed_repo_paths,
});
if done_this_repo {
repository_index += 1;
}
}
updated_repositories
} else {
Default::default()
};
let removed_repositories = if done {
let removed_repositories = if done_files && done_statuses {
mem::take(&mut message.removed_repositories)
} else {
Default::default()
};
done_statuses = repository_index >= message.updated_repositories.len();
Some(UpdateWorktree {
project_id: message.project_id,
worktree_id: message.worktree_id,
@ -526,7 +577,7 @@ pub fn split_worktree_update(
updated_entries,
removed_entries,
scan_id: message.scan_id,
is_last_update: done && message.is_last_update,
is_last_update: done_files && message.is_last_update,
updated_repositories,
removed_repositories,
})

View File

@ -6,4 +6,4 @@ pub use conn::Connection;
pub use peer::*;
mod macros;
pub const PROTOCOL_VERSION: u32 = 54;
pub const PROTOCOL_VERSION: u32 = 55;

View File

@ -1,14 +1,14 @@
use std::{cmp::Ordering, fmt::Debug};
use crate::{Bias, Dimension, Item, KeyedItem, SeekTarget, SumTree, Summary};
use crate::{Bias, Dimension, Edit, Item, KeyedItem, SeekTarget, SumTree, Summary};
#[derive(Clone, Debug)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TreeMap<K, V>(SumTree<MapEntry<K, V>>)
where
K: Clone + Debug + Default + Ord,
V: Clone + Debug;
#[derive(Clone, Debug)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct MapEntry<K, V> {
key: K,
value: V,
@ -82,6 +82,27 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
cursor.item().map(|item| (&item.key, &item.value))
}
pub fn remove_between(&mut self, from: &K, until: &K) {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
let from_key = MapKeyRef(Some(from));
let mut new_tree = cursor.slice(&from_key, Bias::Left, &());
let until_key = MapKeyRef(Some(until));
cursor.seek_forward(&until_key, Bias::Left, &());
new_tree.push_tree(cursor.suffix(&()), &());
drop(cursor);
self.0 = new_tree;
}
pub fn iter_from<'a>(&'a self, from: &'a K) -> impl Iterator<Item = (&K, &V)> + '_ {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
let from_key = MapKeyRef(Some(from));
cursor.seek(&from_key, Bias::Left, &());
cursor
.into_iter()
.map(|map_entry| (&map_entry.key, &map_entry.value))
}
pub fn update<F, T>(&mut self, key: &K, f: F) -> Option<T>
where
F: FnOnce(&mut V) -> T,
@ -125,6 +146,65 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
pub fn values(&self) -> impl Iterator<Item = &V> + '_ {
self.0.iter().map(|entry| &entry.value)
}
pub fn insert_tree(&mut self, other: TreeMap<K, V>) {
let edits = other
.iter()
.map(|(key, value)| {
Edit::Insert(MapEntry {
key: key.to_owned(),
value: value.to_owned(),
})
})
.collect();
self.0.edit(edits, &());
}
pub fn remove_by<F>(&mut self, key: &K, f: F)
where
F: Fn(&K) -> bool,
{
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
let key = MapKeyRef(Some(key));
let mut new_tree = cursor.slice(&key, Bias::Left, &());
let until = RemoveByTarget(key, &f);
cursor.seek_forward(&until, Bias::Right, &());
new_tree.push_tree(cursor.suffix(&()), &());
drop(cursor);
self.0 = new_tree;
}
}
struct RemoveByTarget<'a, K>(MapKeyRef<'a, K>, &'a dyn Fn(&K) -> bool);
impl<'a, K: Debug> Debug for RemoveByTarget<'a, K> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("RemoveByTarget")
.field("key", &self.0)
.field("F", &"<...>")
.finish()
}
}
impl<'a, K: Debug + Clone + Default + Ord> SeekTarget<'a, MapKey<K>, MapKeyRef<'a, K>>
for RemoveByTarget<'_, K>
{
fn cmp(
&self,
cursor_location: &MapKeyRef<'a, K>,
_cx: &<MapKey<K> as Summary>::Context,
) -> Ordering {
if let Some(cursor_location) = cursor_location.0 {
if (self.1)(cursor_location) {
Ordering::Equal
} else {
self.0 .0.unwrap().cmp(cursor_location)
}
} else {
Ordering::Greater
}
}
}
impl<K, V> Default for TreeMap<K, V>
@ -272,4 +352,113 @@ mod tests {
map.retain(|key, _| *key % 2 == 0);
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&4, &"d"), (&6, &"f")]);
}
#[test]
fn test_remove_between() {
let mut map = TreeMap::default();
map.insert("a", 1);
map.insert("b", 2);
map.insert("baa", 3);
map.insert("baaab", 4);
map.insert("c", 5);
map.remove_between(&"ba", &"bb");
assert_eq!(map.get(&"a"), Some(&1));
assert_eq!(map.get(&"b"), Some(&2));
assert_eq!(map.get(&"baaa"), None);
assert_eq!(map.get(&"baaaab"), None);
assert_eq!(map.get(&"c"), Some(&5));
}
#[test]
fn test_remove_by() {
let mut map = TreeMap::default();
map.insert("a", 1);
map.insert("aa", 1);
map.insert("b", 2);
map.insert("baa", 3);
map.insert("baaab", 4);
map.insert("c", 5);
map.insert("ca", 6);
map.remove_by(&"ba", |key| key.starts_with("ba"));
assert_eq!(map.get(&"a"), Some(&1));
assert_eq!(map.get(&"aa"), Some(&1));
assert_eq!(map.get(&"b"), Some(&2));
assert_eq!(map.get(&"baaa"), None);
assert_eq!(map.get(&"baaaab"), None);
assert_eq!(map.get(&"c"), Some(&5));
assert_eq!(map.get(&"ca"), Some(&6));
map.remove_by(&"c", |key| key.starts_with("c"));
assert_eq!(map.get(&"a"), Some(&1));
assert_eq!(map.get(&"aa"), Some(&1));
assert_eq!(map.get(&"b"), Some(&2));
assert_eq!(map.get(&"c"), None);
assert_eq!(map.get(&"ca"), None);
map.remove_by(&"a", |key| key.starts_with("a"));
assert_eq!(map.get(&"a"), None);
assert_eq!(map.get(&"aa"), None);
assert_eq!(map.get(&"b"), Some(&2));
map.remove_by(&"b", |key| key.starts_with("b"));
assert_eq!(map.get(&"b"), None);
}
#[test]
fn test_iter_from() {
let mut map = TreeMap::default();
map.insert("a", 1);
map.insert("b", 2);
map.insert("baa", 3);
map.insert("baaab", 4);
map.insert("c", 5);
let result = map
.iter_from(&"ba")
.take_while(|(key, _)| key.starts_with(&"ba"))
.collect::<Vec<_>>();
assert_eq!(result.len(), 2);
assert!(result.iter().find(|(k, _)| k == &&"baa").is_some());
assert!(result.iter().find(|(k, _)| k == &&"baaab").is_some());
let result = map
.iter_from(&"c")
.take_while(|(key, _)| key.starts_with(&"c"))
.collect::<Vec<_>>();
assert_eq!(result.len(), 1);
assert!(result.iter().find(|(k, _)| k == &&"c").is_some());
}
#[test]
fn test_insert_tree() {
let mut map = TreeMap::default();
map.insert("a", 1);
map.insert("b", 2);
map.insert("c", 3);
let mut other = TreeMap::default();
other.insert("a", 2);
other.insert("b", 2);
other.insert("d", 4);
map.insert_tree(other);
assert_eq!(map.iter().count(), 4);
assert_eq!(map.get(&"a"), Some(&2));
assert_eq!(map.get(&"b"), Some(&2));
assert_eq!(map.get(&"c"), Some(&3));
assert_eq!(map.get(&"d"), Some(&4));
}
}

View File

@ -26,6 +26,7 @@ serde.workspace = true
serde_json.workspace = true
git2 = { version = "0.15", default-features = false, optional = true }
dirs = "3.0"
take-until = "0.2.0"
[dev-dependencies]
tempdir.workspace = true

View File

@ -17,6 +17,8 @@ pub use backtrace::Backtrace;
use futures::Future;
use rand::{seq::SliceRandom, Rng};
pub use take_until::*;
#[macro_export]
macro_rules! debug_panic {
( $($fmt_arg:tt)* ) => {