diff --git a/gitbutler-app/src/database.rs b/gitbutler-app/src/database.rs index 7bcd0dda8..467b56c84 100644 --- a/gitbutler-app/src/database.rs +++ b/gitbutler-app/src/database.rs @@ -46,28 +46,3 @@ impl Database { Ok(result) } } - -#[cfg(test)] -mod tests { - use crate::tests; - - use super::*; - - #[test] - fn smoke() { - let data_dir = tests::temp_dir(); - let db = Database::open_in_directory(data_dir).unwrap(); - db.transaction(|tx| { - tx.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)", []) - .unwrap(); - tx.execute("INSERT INTO test (id) VALUES (1)", []).unwrap(); - let mut stmt = tx.prepare("SELECT id FROM test").unwrap(); - let mut rows = stmt.query([]).unwrap(); - let row = rows.next().unwrap().unwrap(); - let id: i32 = row.get(0).unwrap(); - assert_eq!(id, 1_i32); - Ok(()) - }) - .unwrap(); - } -} diff --git a/gitbutler-app/src/deltas/database.rs b/gitbutler-app/src/deltas/database.rs index 616068212..65492e5d0 100644 --- a/gitbutler-app/src/deltas/database.rs +++ b/gitbutler-app/src/deltas/database.rs @@ -120,107 +120,3 @@ fn insert_stmt<'conn>( ", )?) } - -#[cfg(test)] -mod tests { - use crate::tests; - - use super::*; - - #[test] - fn insert_query() -> Result<()> { - let db = tests::test_database(); - let database = Database::new(db); - - let project_id = ProjectId::generate(); - let session_id = SessionId::generate(); - let file_path = path::PathBuf::from("file_path"); - let delta1 = delta::Delta { - timestamp_ms: 0, - operations: vec![operations::Operation::Insert((0, "text".to_string()))], - }; - let deltas = vec![delta1.clone()]; - - database.insert(&project_id, &session_id, &file_path, &deltas)?; - - assert_eq!( - database.list_by_project_id_session_id(&project_id, &session_id, &None)?, - vec![(file_path.display().to_string(), vec![delta1])] - .into_iter() - .collect() - ); - - Ok(()) - } - - #[test] - fn insert_update() -> Result<()> { - let db = tests::test_database(); - let database = Database::new(db); - - let project_id = ProjectId::generate(); - let session_id = SessionId::generate(); - let file_path = path::PathBuf::from("file_path"); - let delta1 = delta::Delta { - timestamp_ms: 0, - operations: vec![operations::Operation::Insert((0, "text".to_string()))], - }; - let delta2 = delta::Delta { - timestamp_ms: 0, - operations: vec![operations::Operation::Insert(( - 0, - "updated_text".to_string(), - ))], - }; - - database.insert(&project_id, &session_id, &file_path, &vec![delta1])?; - database.insert(&project_id, &session_id, &file_path, &vec![delta2.clone()])?; - - assert_eq!( - database.list_by_project_id_session_id(&project_id, &session_id, &None)?, - vec![(file_path.display().to_string(), vec![delta2])] - .into_iter() - .collect() - ); - - Ok(()) - } - - #[test] - fn aggregate_deltas_by_file() -> Result<()> { - let db = tests::test_database(); - let database = Database::new(db); - - let project_id = ProjectId::generate(); - let session_id = SessionId::generate(); - let file_path1 = path::PathBuf::from("file_path1"); - let file_path2 = path::PathBuf::from("file_path2"); - let delta1 = delta::Delta { - timestamp_ms: 1, - operations: vec![operations::Operation::Insert((0, "text".to_string()))], - }; - let delta2 = delta::Delta { - timestamp_ms: 2, - operations: vec![operations::Operation::Insert(( - 0, - "updated_text".to_string(), - ))], - }; - - database.insert(&project_id, &session_id, &file_path1, &vec![delta1.clone()])?; - database.insert(&project_id, &session_id, &file_path2, &vec![delta1.clone()])?; - database.insert(&project_id, &session_id, &file_path2, &vec![delta2.clone()])?; - - assert_eq!( - database.list_by_project_id_session_id(&project_id, &session_id, &None)?, - vec![ - (file_path1.display().to_string(), vec![delta1.clone()]), - (file_path2.display().to_string(), vec![delta1, delta2]) - ] - .into_iter() - .collect() - ); - - Ok(()) - } -} diff --git a/gitbutler-app/src/deltas/writer.rs b/gitbutler-app/src/deltas/writer.rs index e833feceb..98c738581 100644 --- a/gitbutler-app/src/deltas/writer.rs +++ b/gitbutler-app/src/deltas/writer.rs @@ -71,46 +71,3 @@ impl<'writer> DeltasWriter<'writer> { Ok(()) } } - -#[cfg(test)] -mod tests { - use std::vec; - - use crate::{ - deltas, sessions, - tests::{Case, Suite}, - }; - - use super::*; - use deltas::operations::Operation; - - #[test] - fn write_no_vbranches() -> Result<()> { - let Case { gb_repository, .. } = Suite::default().new_case(); - - let deltas_writer = DeltasWriter::new(&gb_repository)?; - - let session = gb_repository.get_or_create_current_session()?; - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - let deltas_reader = deltas::Reader::new(&session_reader); - - let path = "test.txt"; - let deltas = vec![ - deltas::Delta { - operations: vec![Operation::Insert((0, "hello".to_string()))], - timestamp_ms: 0, - }, - deltas::Delta { - operations: vec![Operation::Insert((5, " world".to_string()))], - timestamp_ms: 0, - }, - ]; - - deltas_writer.write(path, &deltas).unwrap(); - - assert_eq!(deltas_reader.read_file(path).unwrap(), Some(deltas)); - assert_eq!(deltas_reader.read_file("not found").unwrap(), None); - - Ok(()) - } -} diff --git a/gitbutler-app/src/gb_repository.rs b/gitbutler-app/src/gb_repository.rs index a16f07ca1..5e4879e0e 100644 --- a/gitbutler-app/src/gb_repository.rs +++ b/gitbutler-app/src/gb_repository.rs @@ -1,6 +1,3 @@ mod repository; -#[cfg(test)] -mod repository_tests; - pub use repository::{RemoteError, Repository}; diff --git a/gitbutler-app/src/gb_repository/repository.rs b/gitbutler-app/src/gb_repository/repository.rs index 561f14018..d07923a9d 100644 --- a/gitbutler-app/src/gb_repository/repository.rs +++ b/gitbutler-app/src/gb_repository/repository.rs @@ -417,7 +417,6 @@ impl Repository { } } - #[cfg(test)] pub fn flush( &self, project_repository: &project_repository::Repository, @@ -513,15 +512,15 @@ impl Repository { } } - pub(crate) fn root(&self) -> std::path::PathBuf { + pub fn root(&self) -> std::path::PathBuf { self.git_repository.path().join("gitbutler") } - pub(crate) fn session_path(&self) -> std::path::PathBuf { + pub fn session_path(&self) -> std::path::PathBuf { self.root().join("session") } - pub(crate) fn session_wd_path(&self) -> std::path::PathBuf { + pub fn session_wd_path(&self) -> std::path::PathBuf { self.session_path().join("wd") } @@ -963,35 +962,37 @@ pub enum RemoteError { Other(#[from] anyhow::Error), } -#[cfg(test)] -mod test { - use std::path::PathBuf; - - use anyhow::Result; - use pretty_assertions::assert_eq; - - use crate::tests::{Case, Suite}; - - #[test] - fn test_alternates_file_being_set() -> Result<()> { - let Case { - gb_repository, - project_repository, - .. - } = Suite::default().new_case(); - - let file_content = std::fs::read_to_string( - gb_repository - .git_repository - .path() - .join("objects/info/alternates"), - )?; - - let file_content = PathBuf::from(file_content.trim()); - let project_path = project_repository.path().to_path_buf().join(".git/objects"); - - assert_eq!(file_content, project_path); - - Ok(()) - } -} +// TODO: this is a unit-test - could use code from `tests::common` via custom module path +// to make it work. +// #[cfg(test)] +// mod test { +// use std::path::PathBuf; +// +// use anyhow::Result; +// use pretty_assertions::assert_eq; +// +// use crate::tests::{Case, Suite}; +// +// #[test] +// fn test_alternates_file_being_set() -> Result<()> { +// let Case { +// gb_repository, +// project_repository, +// .. +// } = Suite::default().new_case(); +// +// let file_content = std::fs::read_to_string( +// gb_repository +// .git_repository +// .path() +// .join("objects/info/alternates"), +// )?; +// +// let file_content = PathBuf::from(file_content.trim()); +// let project_path = project_repository.path().to_path_buf().join(".git/objects"); +// +// assert_eq!(file_content, project_path); +// +// Ok(()) +// } +// } diff --git a/gitbutler-app/src/git/config.rs b/gitbutler-app/src/git/config.rs index f351c62bb..5afe4ffb9 100644 --- a/gitbutler-app/src/git/config.rs +++ b/gitbutler-app/src/git/config.rs @@ -66,41 +66,3 @@ impl Config { } } } - -#[cfg(test)] -mod tests { - use crate::tests; - - #[test] - pub fn test_set_str() { - let repo = tests::test_repository(); - let mut config = repo.config().unwrap(); - config.set_str("test.key", "test.value").unwrap(); - assert_eq!( - config.get_string("test.key").unwrap().unwrap(), - "test.value" - ); - } - - #[test] - pub fn test_set_bool() { - let repo = tests::test_repository(); - let mut config = repo.config().unwrap(); - config.set_bool("test.key", true).unwrap(); - assert!(config.get_bool("test.key").unwrap().unwrap()); - } - - #[test] - pub fn test_get_string_none() { - let repo = tests::test_repository(); - let config = repo.config().unwrap(); - assert_eq!(config.get_string("test.key").unwrap(), None); - } - - #[test] - pub fn test_get_bool_none() { - let repo = tests::test_repository(); - let config = repo.config().unwrap(); - assert_eq!(config.get_bool("test.key").unwrap(), None); - } -} diff --git a/gitbutler-app/src/git/credentials.rs b/gitbutler-app/src/git/credentials.rs index 864cec449..c7e0a452b 100644 --- a/gitbutler-app/src/git/credentials.rs +++ b/gitbutler-app/src/git/credentials.rs @@ -122,7 +122,6 @@ impl Helper { } } - #[cfg(test)] pub fn from_path>(path: P) -> Self { let keys = keys::Controller::from_path(&path); let users = users::Controller::from_path(path); @@ -391,317 +390,3 @@ impl Helper { Ok(flow) } } - -#[cfg(test)] -mod tests { - use super::*; - - use crate::tests::{self, test_repository}; - - #[derive(Default)] - struct TestCase<'a> { - remote_url: &'a str, - github_access_token: Option<&'a str>, - preferred_key: projects::AuthKey, - home_dir: Option, - } - - impl TestCase<'_> { - fn run(&self) -> Vec<(String, Vec)> { - let local_app_data = tests::temp_dir(); - - let users = users::Controller::from_path(&local_app_data); - let user = users::User { - github_access_token: self.github_access_token.map(ToString::to_string), - ..Default::default() - }; - users.set_user(&user).unwrap(); - - let keys = keys::Controller::from_path(&local_app_data); - let helper = Helper::new(keys, users, self.home_dir.clone()); - - let repo = test_repository(); - repo.remote( - "origin", - &self.remote_url.parse().expect("failed to parse remote url"), - ) - .unwrap(); - let project = projects::Project { - path: repo.workdir().unwrap().to_path_buf(), - preferred_key: self.preferred_key.clone(), - ..Default::default() - }; - let project_repository = project_repository::Repository::open(&project).unwrap(); - - let flow = helper.help(&project_repository, "origin").unwrap(); - flow.into_iter() - .map(|(remote, credentials)| { - ( - remote.url().unwrap().as_ref().unwrap().to_string(), - credentials, - ) - }) - .collect::>() - } - } - - mod not_github { - use super::*; - - mod with_preferred_key { - use super::*; - - #[test] - fn https() { - let test_case = TestCase { - remote_url: "https://gitlab.com/test-gitbutler/test.git", - github_access_token: Some("token"), - preferred_key: projects::AuthKey::Local { - private_key_path: PathBuf::from("/tmp/id_rsa"), - }, - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "git@gitlab.com:test-gitbutler/test.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Ssh(SshCredential::Keyfile { - key_path: PathBuf::from("/tmp/id_rsa"), - passphrase: None, - })] - ); - } - - #[test] - fn ssh() { - let test_case = TestCase { - remote_url: "git@gitlab.com:test-gitbutler/test.git", - github_access_token: Some("token"), - preferred_key: projects::AuthKey::Local { - private_key_path: PathBuf::from("/tmp/id_rsa"), - }, - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "git@gitlab.com:test-gitbutler/test.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Ssh(SshCredential::Keyfile { - key_path: PathBuf::from("/tmp/id_rsa"), - passphrase: None, - })] - ); - } - } - - mod with_github_token { - use super::*; - - #[test] - fn https() { - let test_case = TestCase { - remote_url: "https://gitlab.com/test-gitbutler/test.git", - github_access_token: Some("token"), - ..Default::default() - }; - let flow = test_case.run(); - - assert_eq!(flow.len(), 1); - - assert_eq!( - flow[0].0, - "git@gitlab.com:test-gitbutler/test.git".to_string(), - ); - assert_eq!(flow[0].1.len(), 1); - assert!(matches!( - flow[0].1[0], - Credential::Ssh(SshCredential::GitButlerKey(_)) - )); - } - - #[test] - fn ssh() { - let test_case = TestCase { - remote_url: "git@gitlab.com:test-gitbutler/test.git", - github_access_token: Some("token"), - ..Default::default() - }; - let flow = test_case.run(); - - assert_eq!(flow.len(), 1); - - assert_eq!( - flow[0].0, - "git@gitlab.com:test-gitbutler/test.git".to_string(), - ); - assert_eq!(flow[0].1.len(), 1); - assert!(matches!( - flow[0].1[0], - Credential::Ssh(SshCredential::GitButlerKey(_)) - )); - } - } - } - - mod github { - use super::*; - - mod with_github_token { - use super::*; - - #[test] - fn https() { - let test_case = TestCase { - remote_url: "https://github.com/gitbutlerapp/gitbutler.git", - github_access_token: Some("token"), - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "https://github.com/gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Https(HttpsCredential::GitHubToken( - "token".to_string() - ))] - ); - } - - #[test] - fn ssh() { - let test_case = TestCase { - remote_url: "git@github.com:gitbutlerapp/gitbutler.git", - github_access_token: Some("token"), - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "https://github.com/gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Https(HttpsCredential::GitHubToken( - "token".to_string() - ))] - ); - } - } - - mod without_github_token { - use super::*; - - mod without_preferred_key { - use super::*; - - #[test] - fn https() { - let test_case = TestCase { - remote_url: "https://github.com/gitbutlerapp/gitbutler.git", - ..Default::default() - }; - let flow = test_case.run(); - - assert_eq!(flow.len(), 1); - - assert_eq!( - flow[0].0, - "git@github.com:gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!(flow[0].1.len(), 1); - assert!(matches!( - flow[0].1[0], - Credential::Ssh(SshCredential::GitButlerKey(_)) - )); - } - - #[test] - fn ssh() { - let test_case = TestCase { - remote_url: "git@github.com:gitbutlerapp/gitbutler.git", - ..Default::default() - }; - let flow = test_case.run(); - - assert_eq!(flow.len(), 1); - - assert_eq!( - flow[0].0, - "git@github.com:gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!(flow[0].1.len(), 1); - assert!(matches!( - flow[0].1[0], - Credential::Ssh(SshCredential::GitButlerKey(_)) - )); - } - } - - mod with_preferred_key { - use super::*; - - #[test] - fn https() { - let test_case = TestCase { - remote_url: "https://github.com/gitbutlerapp/gitbutler.git", - github_access_token: Some("token"), - preferred_key: projects::AuthKey::Local { - private_key_path: PathBuf::from("/tmp/id_rsa"), - }, - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "git@github.com:gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Ssh(SshCredential::Keyfile { - key_path: PathBuf::from("/tmp/id_rsa"), - passphrase: None, - })] - ); - } - - #[test] - fn ssh() { - let test_case = TestCase { - remote_url: "git@github.com:gitbutlerapp/gitbutler.git", - github_access_token: Some("token"), - preferred_key: projects::AuthKey::Local { - private_key_path: PathBuf::from("/tmp/id_rsa"), - }, - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "git@github.com:gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Ssh(SshCredential::Keyfile { - key_path: PathBuf::from("/tmp/id_rsa"), - passphrase: None, - })] - ); - } - } - } - } -} diff --git a/gitbutler-app/src/git/diff.rs b/gitbutler-app/src/git/diff.rs index 8c1b7c2f4..807d295d0 100644 --- a/gitbutler-app/src/git/diff.rs +++ b/gitbutler-app/src/git/diff.rs @@ -419,166 +419,3 @@ pub fn diff_files_to_hunks( } file_hunks } - -#[cfg(test)] -mod tests { - use crate::tests; - - use super::*; - - #[test] - fn diff_simple_text() { - let repository = tests::test_repository(); - std::fs::write(repository.workdir().unwrap().join("file"), "hello").unwrap(); - - let head_commit_id = repository.head().unwrap().peel_to_commit().unwrap().id(); - - let diff = workdir(&repository, &head_commit_id, 0).unwrap(); - assert_eq!(diff.len(), 1); - assert_eq!( - diff[&path::PathBuf::from("file")].clone().hunks.unwrap(), - vec![GitHunk { - old_start: 0, - old_lines: 0, - new_start: 1, - new_lines: 1, - diff: "@@ -0,0 +1 @@\n+hello\n\\ No newline at end of file\n".to_string(), - binary: false, - change_type: ChangeType::Added, - }] - ); - } - - #[test] - fn diff_empty_file() { - let repository = tests::test_repository(); - std::fs::write(repository.workdir().unwrap().join("first"), "").unwrap(); - - let head_commit_id = repository.head().unwrap().peel_to_commit().unwrap().id(); - - let diff = workdir(&repository, &head_commit_id, 0).unwrap(); - assert_eq!(diff.len(), 1); - assert_eq!( - diff[&path::PathBuf::from("first")].clone().hunks.unwrap(), - vec![GitHunk { - old_start: 0, - old_lines: 0, - new_start: 0, - new_lines: 0, - diff: String::new(), - binary: false, - change_type: ChangeType::Modified, - }] - ); - } - - #[test] - fn diff_multiple_empty_files() { - let repository = tests::test_repository(); - std::fs::write(repository.workdir().unwrap().join("first"), "").unwrap(); - std::fs::write(repository.workdir().unwrap().join("second"), "").unwrap(); - - let head_commit_id = repository.head().unwrap().peel_to_commit().unwrap().id(); - - let diff = workdir(&repository, &head_commit_id, 0).unwrap(); - assert_eq!(diff.len(), 2); - assert_eq!( - diff[&path::PathBuf::from("first")].clone().hunks.unwrap(), - vec![GitHunk { - old_start: 0, - old_lines: 0, - new_start: 0, - new_lines: 0, - diff: String::new(), - binary: false, - change_type: ChangeType::Modified, - }] - ); - assert_eq!( - diff[&path::PathBuf::from("second")].clone().hunks.unwrap(), - vec![GitHunk { - old_start: 0, - old_lines: 0, - new_start: 0, - new_lines: 0, - diff: String::new(), - binary: false, - change_type: ChangeType::Modified, - }] - ); - } - - #[test] - fn diff_binary() { - let repository = tests::test_repository(); - std::fs::write( - repository.workdir().unwrap().join("image"), - [ - 255, 0, 0, // Red pixel - 0, 0, 255, // Blue pixel - 255, 255, 0, // Yellow pixel - 0, 255, 0, // Green pixel - ], - ) - .unwrap(); - - let head_commit_id = repository.head().unwrap().peel_to_commit().unwrap().id(); - - let diff = workdir(&repository, &head_commit_id, 0).unwrap(); - assert_eq!( - diff[&path::PathBuf::from("image")].clone().hunks.unwrap(), - vec![GitHunk { - old_start: 0, - old_lines: 0, - new_start: 0, - new_lines: 0, - diff: "71ae6e216f38164b6633e25d35abb043c3785af6".to_string(), - binary: true, - change_type: ChangeType::Added, - }] - ); - } - - #[test] - fn diff_some_lines_are_binary() { - let repository = tests::test_repository(); - std::fs::write( - repository.workdir().unwrap().join("file"), - [ - // butler/test/fixtures/git/1/8e/18ec9df5-65c5-4828-97ba-d91ec4903a74/objects/1f/9d7d5dd0d3d3ced66cee36bf1dd42bd33d0aa8 - 120, 1, 101, 144, 79, 75, 195, 64, 16, 197, 61, 239, 167, 120, 160, 224, 165, 77, 3, - 5, 17, 111, 42, 42, 245, 162, 135, 22, 60, 118, 155, 76, 179, 75, 55, 59, 97, 103, - 182, 177, 223, 222, 77, 244, 38, 204, 97, 254, 188, 247, 155, 97, 14, 129, 15, 88, - 223, 213, 87, 215, 120, 243, 250, 148, 53, 80, 194, 110, 131, 103, 142, 13, 13, 42, - 198, 60, 10, 54, 183, 61, 34, 163, 99, 110, 97, 21, 175, 190, 235, 237, 98, 238, - 102, 241, 177, 195, 214, 250, 48, 250, 216, 66, 25, 71, 223, 229, 68, 224, 172, 24, - 93, 17, 111, 48, 218, 168, 80, 71, 5, 187, 218, 125, 77, 154, 192, 124, 66, 240, - 39, 170, 176, 117, 94, 80, 98, 154, 147, 21, 79, 82, 124, 246, 50, 169, 90, 134, - 215, 9, 36, 190, 45, 192, 35, 62, 131, 189, 116, 137, 115, 108, 23, 56, 20, 190, - 78, 94, 103, 5, 103, 74, 226, 57, 162, 225, 168, 137, 67, 101, 204, 123, 46, 156, - 148, 227, 172, 121, 48, 102, 191, 223, 155, 27, 196, 225, 27, 250, 119, 107, 35, - 130, 165, 71, 181, 242, 113, 200, 90, 205, 37, 151, 82, 199, 223, 124, 57, 90, 109, - 92, 49, 13, 23, 117, 28, 215, 88, 246, 112, 170, 67, 37, 148, 202, 62, 220, 215, - 117, 61, 99, 205, 71, 90, 64, 184, 167, 114, 78, 249, 5, 5, 161, 202, 188, 156, 41, - 162, 79, 76, 255, 38, 63, 226, 30, 123, 106, - ], - ) - .unwrap(); - - let head_commit_id = repository.head().unwrap().peel_to_commit().unwrap().id(); - - let diff = workdir(&repository, &head_commit_id, 0).unwrap(); - assert_eq!( - diff[&path::PathBuf::from("file")].clone().hunks.unwrap(), - vec![GitHunk { - old_start: 0, - old_lines: 0, - new_start: 0, - new_lines: 0, - diff: "3fc41b9ae6836a94f41c78b4ce69d78b6e7080f1".to_string(), - binary: true, - change_type: ChangeType::Added, - }] - ); - } -} diff --git a/gitbutler-app/src/git/repository.rs b/gitbutler-app/src/git/repository.rs index 820daf2d5..3091657d9 100644 --- a/gitbutler-app/src/git/repository.rs +++ b/gitbutler-app/src/git/repository.rs @@ -26,24 +26,18 @@ impl From for Repository { } impl Repository { - #[cfg(test)] - pub fn init_bare>(path: P) -> Result { - let inner = git2::Repository::init_opts(path, &crate::tests::init_opts_bare())?; - Ok(Repository(inner)) - } - pub fn init>(path: P) -> Result { let inner = git2::Repository::init(path)?; Ok(Repository(inner)) } - pub fn open>(path: P) -> Result { - let inner = git2::Repository::open(path)?; + pub fn init_opts>(path: P, opts: &git2::RepositoryInitOptions) -> Result { + let inner = git2::Repository::init_opts(path, opts)?; Ok(Repository(inner)) } - pub fn init_opts>(path: P, opts: &git2::RepositoryInitOptions) -> Result { - let inner = git2::Repository::init_opts(path, opts)?; + pub fn open>(path: P) -> Result { + let inner = git2::Repository::open(path)?; Ok(Repository(inner)) } diff --git a/gitbutler-app/src/keys/controller.rs b/gitbutler-app/src/keys/controller.rs index 7a67a6b72..de9096521 100644 --- a/gitbutler-app/src/keys/controller.rs +++ b/gitbutler-app/src/keys/controller.rs @@ -12,7 +12,6 @@ impl Controller { Self { storage } } - #[cfg(test)] pub fn from_path>(path: P) -> Self { Self::new(Storage::from_path(path)) } @@ -33,32 +32,3 @@ pub enum GetOrCreateError { #[error(transparent)] Other(#[from] anyhow::Error), } - -#[cfg(not(target_os = "windows"))] -#[cfg(test)] -mod tests { - use std::fs; - #[cfg(target_family = "unix")] - use std::os::unix::prelude::*; - - use crate::tests::Suite; - - use super::*; - - #[test] - fn test_get_or_create() { - let suite = Suite::default(); - let controller = Controller::new(Storage::from_path(&suite.local_app_data)); - - let once = controller.get_or_create().unwrap(); - let twice = controller.get_or_create().unwrap(); - assert_eq!(once, twice); - - // check permissions of the private key - let permissions = fs::metadata(suite.local_app_data.join("keys/ed25519")) - .unwrap() - .permissions(); - let perms = format!("{:o}", permissions.mode()); - assert_eq!(perms, "100600"); - } -} diff --git a/gitbutler-app/src/keys/storage.rs b/gitbutler-app/src/keys/storage.rs index 3e73d753c..e6dac6506 100644 --- a/gitbutler-app/src/keys/storage.rs +++ b/gitbutler-app/src/keys/storage.rs @@ -20,7 +20,6 @@ impl Storage { Storage { storage } } - #[cfg(test)] pub fn from_path>(path: P) -> Storage { Storage::new(storage::Storage::new(path)) } diff --git a/gitbutler-app/src/lib.rs b/gitbutler-app/src/lib.rs new file mode 100644 index 000000000..cb5bf5ba3 --- /dev/null +++ b/gitbutler-app/src/lib.rs @@ -0,0 +1,50 @@ +#![feature(error_generic_member_access)] +#![cfg_attr(windows, feature(windows_by_handle))] +#![cfg_attr( + all(windows, not(test), not(debug_assertions)), + windows_subsystem = "windows" +)] +// FIXME(qix-): Stuff we want to fix but don't have a lot of time for. +// FIXME(qix-): PRs welcome! +#![allow( + clippy::used_underscore_binding, + clippy::module_name_repetitions, + clippy::struct_field_names, + clippy::too_many_lines +)] + +pub mod analytics; +pub mod app; +pub mod askpass; +pub mod assets; +pub mod commands; +pub mod database; +pub mod dedup; +pub mod deltas; +pub mod error; +pub mod events; +pub mod fs; +pub mod gb_repository; +pub mod git; +pub mod github; +pub mod id; +pub mod keys; +pub mod lock; +pub mod logs; +pub mod menu; +pub mod path; +pub mod project_repository; +pub mod projects; +pub mod reader; +pub mod sentry; +pub mod sessions; +pub mod ssh; +pub mod storage; +pub mod types; +pub mod users; +pub mod virtual_branches; +pub mod watcher; +#[cfg(target_os = "windows")] +pub mod windows; +pub mod writer; +pub mod zip; diff --git a/gitbutler-app/src/lock.rs b/gitbutler-app/src/lock.rs index 2ad188370..2783c77a3 100644 --- a/gitbutler-app/src/lock.rs +++ b/gitbutler-app/src/lock.rs @@ -49,98 +49,3 @@ impl Inner { Ok(result) } } - -#[cfg(test)] -mod tests { - use super::*; - - use crate::tests::temp_dir; - - #[tokio::test] - async fn test_lock_same_instance() { - let dir_path = temp_dir(); - std::fs::write(dir_path.join("file.txt"), "").unwrap(); - let dir = Dir::new(&dir_path).unwrap(); - - let (tx, rx) = std::sync::mpsc::sync_channel(1); - - // spawn a task that will signal right after aquireing the lock - let _ = tokio::spawn({ - let dir = dir.clone(); - async move { - dir.batch(|root| { - tx.send(()).unwrap(); - assert_eq!( - std::fs::read_to_string(root.join("file.txt")).unwrap(), - String::new() - ); - std::fs::write(root.join("file.txt"), "1") - }) - } - }) - .await - .unwrap(); - - // then we wait until the lock is aquired - rx.recv().unwrap(); - - // and immidiately try to lock again - dir.batch(|root| { - assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1"); - std::fs::write(root.join("file.txt"), "2") - }) - .unwrap() - .unwrap(); - - assert_eq!( - std::fs::read_to_string(dir_path.join("file.txt")).unwrap(), - "2" - ); - } - - #[tokio::test] - async fn test_lock_different_instances() { - let dir_path = temp_dir(); - std::fs::write(dir_path.join("file.txt"), "").unwrap(); - - let (tx, rx) = std::sync::mpsc::sync_channel(1); - - // spawn a task that will signal right after aquireing the lock - let _ = tokio::spawn({ - let dir_path = dir_path.clone(); - async move { - // one dir instance is created on a separate thread - let dir = Dir::new(&dir_path).unwrap(); - dir.batch(|root| { - tx.send(()).unwrap(); - assert_eq!( - std::fs::read_to_string(root.join("file.txt")).unwrap(), - String::new() - ); - std::fs::write(root.join("file.txt"), "1") - }) - } - }) - .await - .unwrap(); - - // another dir instance is created on the main thread - let dir = Dir::new(&dir_path).unwrap(); - - // then we wait until the lock is aquired - rx.recv().unwrap(); - - // and immidiately try to lock again - dir.batch(|root| { - assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1"); - std::fs::write(root.join("file.txt"), "2") - }) - .unwrap() - .unwrap(); - - assert_eq!( - std::fs::read_to_string(dir_path.join("file.txt")).unwrap(), - "2" - ); - } -} diff --git a/gitbutler-app/src/main.rs b/gitbutler-app/src/main.rs index b9c859f79..8432c49c1 100644 --- a/gitbutler-app/src/main.rs +++ b/gitbutler-app/src/main.rs @@ -13,44 +13,28 @@ clippy::too_many_lines )] -pub(crate) mod analytics; -pub(crate) mod app; -pub(crate) mod askpass; -pub(crate) mod assets; -pub(crate) mod commands; -pub(crate) mod database; -pub(crate) mod dedup; -pub(crate) mod deltas; -pub(crate) mod error; -pub(crate) mod events; -pub(crate) mod fs; -pub(crate) mod gb_repository; -pub(crate) mod git; -pub(crate) mod github; -pub(crate) mod id; -pub(crate) mod keys; -pub(crate) mod lock; -pub(crate) mod logs; -pub(crate) mod menu; -pub(crate) mod path; -pub(crate) mod project_repository; -pub(crate) mod projects; -pub(crate) mod reader; -pub(crate) mod sentry; -pub(crate) mod sessions; -pub(crate) mod ssh; -pub(crate) mod storage; -pub(crate) mod types; -pub(crate) mod users; -pub(crate) mod virtual_branches; -pub(crate) mod watcher; +use gitbutler_app::analytics; +use gitbutler_app::app; +use gitbutler_app::askpass; +use gitbutler_app::assets; +use gitbutler_app::commands; +use gitbutler_app::database; +use gitbutler_app::deltas; +use gitbutler_app::git; +use gitbutler_app::github; +use gitbutler_app::keys; +use gitbutler_app::logs; +use gitbutler_app::menu; +use gitbutler_app::projects; +use gitbutler_app::sentry; +use gitbutler_app::sessions; +use gitbutler_app::storage; +use gitbutler_app::users; +use gitbutler_app::virtual_branches; +use gitbutler_app::watcher; #[cfg(target_os = "windows")] -pub(crate) mod windows; -pub(crate) mod writer; -pub(crate) mod zip; - -#[cfg(test)] -pub(crate) mod tests; +use gitbutler_app::windows; +use gitbutler_app::zip; use std::path::PathBuf; diff --git a/gitbutler-app/src/project_repository/repository.rs b/gitbutler-app/src/project_repository/repository.rs index 95899d920..d51f097ac 100644 --- a/gitbutler-app/src/project_repository/repository.rs +++ b/gitbutler-app/src/project_repository/repository.rs @@ -312,7 +312,6 @@ impl Repository { } Ok(oids) } - #[cfg(test)] LogUntil::End => { let mut revwalk = self .git_repository @@ -654,7 +653,6 @@ pub enum LogUntil { Commit(git::Oid), Take(usize), When(Box), - #[cfg(test)] End, } diff --git a/gitbutler-app/src/projects.rs b/gitbutler-app/src/projects.rs index 0cb89971b..8189caa70 100644 --- a/gitbutler-app/src/projects.rs +++ b/gitbutler-app/src/projects.rs @@ -7,5 +7,4 @@ pub use controller::*; pub use project::{AuthKey, CodePushState, FetchResult, Project, ProjectId}; pub use storage::UpdateRequest; -#[cfg(test)] pub use project::ApiProject; diff --git a/gitbutler-app/src/projects/controller.rs b/gitbutler-app/src/projects/controller.rs index 327508822..6f2c30baf 100644 --- a/gitbutler-app/src/projects/controller.rs +++ b/gitbutler-app/src/projects/controller.rs @@ -26,7 +26,6 @@ impl Controller { } } - #[cfg(test)] pub fn from_path>(path: P) -> Self { let pathbuf = path.as_ref().to_path_buf(); Self { diff --git a/gitbutler-app/src/projects/storage.rs b/gitbutler-app/src/projects/storage.rs index 652439743..dab7adbb0 100644 --- a/gitbutler-app/src/projects/storage.rs +++ b/gitbutler-app/src/projects/storage.rs @@ -42,7 +42,6 @@ impl Storage { Storage { storage } } - #[cfg(test)] pub fn from_path>(path: P) -> Storage { Storage::new(storage::Storage::new(path)) } diff --git a/gitbutler-app/src/reader.rs b/gitbutler-app/src/reader.rs index d21c84076..5f64a5072 100644 --- a/gitbutler-app/src/reader.rs +++ b/gitbutler-app/src/reader.rs @@ -142,7 +142,7 @@ pub struct CommitReader<'reader> { } impl<'reader> CommitReader<'reader> { - fn new( + pub fn new( repository: &'reader git::Repository, commit: &git::Commit<'reader>, ) -> Result> { @@ -177,7 +177,7 @@ impl<'reader> CommitReader<'reader> { Ok(Content::from(&blob)) } - fn list_files>(&self, dir_path: P) -> Result> { + pub fn list_files>(&self, dir_path: P) -> Result> { let dir_path = dir_path.as_ref(); let mut files = vec![]; self.tree @@ -204,7 +204,7 @@ impl<'reader> CommitReader<'reader> { Ok(files) } - fn exists>(&self, file_path: P) -> bool { + pub fn exists>(&self, file_path: P) -> bool { self.tree.get_path(file_path.normalize()).is_ok() } } @@ -441,189 +441,3 @@ impl TryFrom<&Content> for bool { text.parse().map_err(FromError::ParseBool) } } - -#[cfg(test)] -mod tests { - use super::*; - - use anyhow::Result; - - use crate::tests; - - #[test] - fn test_directory_reader_read_file() -> Result<()> { - let dir = tests::temp_dir(); - - let file_path = Path::new("test.txt"); - fs::write(dir.join(file_path), "test")?; - - let reader = Reader::open(dir.clone())?; - assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string())); - - Ok(()) - } - - #[test] - fn test_commit_reader_read_file() -> Result<()> { - let repository = tests::test_repository(); - - let file_path = Path::new("test.txt"); - fs::write(repository.path().parent().unwrap().join(file_path), "test")?; - - let oid = tests::commit_all(&repository); - - fs::write(repository.path().parent().unwrap().join(file_path), "test2")?; - - let reader = Reader::from_commit(&repository, &repository.find_commit(oid)?)?; - assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string())); - - Ok(()) - } - - #[test] - fn test_reader_list_files_should_return_relative() -> Result<()> { - let dir = tests::temp_dir(); - - fs::write(dir.join("test1.txt"), "test")?; - fs::create_dir_all(dir.join("dir"))?; - fs::write(dir.join("dir").join("test.txt"), "test")?; - - let reader = Reader::open(dir.clone())?; - let files = reader.list_files(Path::new("dir"))?; - assert_eq!(files.len(), 1); - assert!(files.contains(&Path::new("test.txt").to_path_buf())); - - Ok(()) - } - - #[test] - fn test_reader_list_files() -> Result<()> { - let dir = tests::temp_dir(); - - fs::write(dir.join("test.txt"), "test")?; - fs::create_dir_all(dir.join("dir"))?; - fs::write(dir.join("dir").join("test.txt"), "test")?; - - let reader = Reader::open(dir.clone())?; - let files = reader.list_files(Path::new(""))?; - assert_eq!(files.len(), 2); - assert!(files.contains(&Path::new("test.txt").to_path_buf())); - assert!(files.contains(&Path::new("dir/test.txt").to_path_buf())); - - Ok(()) - } - - #[test] - fn test_commit_reader_list_files_should_return_relative() -> Result<()> { - let repository = tests::test_repository(); - - fs::write( - repository.path().parent().unwrap().join("test1.txt"), - "test", - )?; - fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?; - fs::write( - repository - .path() - .parent() - .unwrap() - .join("dir") - .join("test.txt"), - "test", - )?; - - let oid = tests::commit_all(&repository); - - fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?; - - let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; - let files = reader.list_files(Path::new("dir"))?; - assert_eq!(files.len(), 1); - assert!(files.contains(&Path::new("test.txt").to_path_buf())); - - Ok(()) - } - - #[test] - fn test_commit_reader_list_files() -> Result<()> { - let repository = tests::test_repository(); - - fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?; - fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?; - fs::write( - repository - .path() - .parent() - .unwrap() - .join("dir") - .join("test.txt"), - "test", - )?; - - let oid = tests::commit_all(&repository); - - fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?; - - let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; - let files = reader.list_files(Path::new(""))?; - assert_eq!(files.len(), 2); - assert!(files.contains(&Path::new("test.txt").to_path_buf())); - assert!(files.contains(&Path::new("dir/test.txt").to_path_buf())); - - Ok(()) - } - - #[test] - fn test_directory_reader_exists() -> Result<()> { - let dir = tests::temp_dir(); - - fs::write(dir.join("test.txt"), "test")?; - - let reader = Reader::open(dir.clone())?; - assert!(reader.exists(Path::new("test.txt"))?); - assert!(!reader.exists(Path::new("test2.txt"))?); - - Ok(()) - } - - #[test] - fn test_commit_reader_exists() -> Result<()> { - let repository = tests::test_repository(); - - fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?; - - let oid = tests::commit_all(&repository); - - fs::remove_file(repository.path().parent().unwrap().join("test.txt"))?; - - let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; - assert!(reader.exists(Path::new("test.txt"))); - assert!(!reader.exists(Path::new("test2.txt"))); - - Ok(()) - } - - #[test] - fn test_from_bytes() { - for (bytes, expected) in [ - ("test".as_bytes(), Content::UTF8("test".to_string())), - (&[0, 159, 146, 150, 159, 146, 150], Content::Binary), - ] { - assert_eq!(Content::from(bytes), expected); - } - } - - #[test] - fn test_serialize_content() { - for (content, expected) in [ - ( - Content::UTF8("test".to_string()), - r#"{"type":"utf8","value":"test"}"#, - ), - (Content::Binary, r#"{"type":"binary"}"#), - (Content::Large, r#"{"type":"large"}"#), - ] { - assert_eq!(serde_json::to_string(&content).unwrap(), expected); - } - } -} diff --git a/gitbutler-app/src/sessions.rs b/gitbutler-app/src/sessions.rs index b09718956..c904b9115 100644 --- a/gitbutler-app/src/sessions.rs +++ b/gitbutler-app/src/sessions.rs @@ -1,15 +1,12 @@ mod controller; mod iterator; mod reader; -mod session; +pub mod session; mod writer; pub mod commands; pub mod database; -#[cfg(test)] -mod tests; - pub use controller::Controller; pub use database::Database; pub use iterator::SessionsIterator; diff --git a/gitbutler-app/src/sessions/database.rs b/gitbutler-app/src/sessions/database.rs index 850968506..3c60790f9 100644 --- a/gitbutler-app/src/sessions/database.rs +++ b/gitbutler-app/src/sessions/database.rs @@ -180,91 +180,3 @@ fn insert_stmt<'conn>( ", )?) } - -#[cfg(test)] -mod tests { - use crate::tests; - - use super::*; - - #[test] - fn test_insert_query() -> Result<()> { - let db = tests::test_database(); - println!("0"); - let database = Database::new(db); - println!("1"); - - let project_id = ProjectId::generate(); - let session1 = session::Session { - id: SessionId::generate(), - hash: None, - meta: session::Meta { - branch: None, - commit: None, - start_timestamp_ms: 1, - last_timestamp_ms: 2, - }, - }; - let session2 = session::Session { - id: SessionId::generate(), - hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()), - meta: session::Meta { - branch: Some("branch2".to_string()), - commit: Some("commit2".to_string()), - start_timestamp_ms: 3, - last_timestamp_ms: 4, - }, - }; - let sessions = vec![&session1, &session2]; - - database.insert(&project_id, &sessions)?; - - assert_eq!( - database.list_by_project_id(&project_id, None)?, - vec![session2.clone(), session1.clone()] - ); - assert_eq!(database.get_by_id(&session1.id)?.unwrap(), session1); - assert_eq!(database.get_by_id(&session2.id)?.unwrap(), session2); - assert_eq!(database.get_by_id(&SessionId::generate())?, None); - - Ok(()) - } - - #[test] - fn test_update() -> Result<()> { - let db = tests::test_database(); - let database = Database::new(db); - - let project_id = ProjectId::generate(); - let session = session::Session { - id: SessionId::generate(), - hash: None, - meta: session::Meta { - branch: None, - commit: None, - start_timestamp_ms: 1, - last_timestamp_ms: 2, - }, - }; - let session_updated = session::Session { - id: session.id, - hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()), - meta: session::Meta { - branch: Some("branch2".to_string()), - commit: Some("commit2".to_string()), - start_timestamp_ms: 3, - last_timestamp_ms: 4, - }, - }; - database.insert(&project_id, &[&session])?; - database.insert(&project_id, &[&session_updated])?; - - assert_eq!( - database.list_by_project_id(&project_id, None)?, - vec![session_updated.clone()] - ); - assert_eq!(database.get_by_id(&session.id)?.unwrap(), session_updated); - - Ok(()) - } -} diff --git a/gitbutler-app/src/tests/suite/virtual_branches.rs b/gitbutler-app/src/tests/suite/virtual_branches.rs deleted file mode 100644 index 456db35b5..000000000 --- a/gitbutler-app/src/tests/suite/virtual_branches.rs +++ /dev/null @@ -1,6754 +0,0 @@ -//TODO: -#![allow( - clippy::redundant_closure_for_method_calls, - clippy::rest_pat_in_fully_bound_structs, - clippy::dbg_macro -)] - -use std::{fs, path, str::FromStr}; - -use crate::{ - git, keys, - projects::{self, ProjectId}, - tests::common::{paths, TestProject}, - users, - virtual_branches::{branch, controller::ControllerError, errors, Controller}, -}; - -struct Test { - repository: TestProject, - project_id: ProjectId, - projects: projects::Controller, - controller: Controller, -} - -impl Default for Test { - fn default() -> Self { - let data_dir = paths::data_dir(); - let keys = keys::Controller::from_path(&data_dir); - let projects = projects::Controller::from_path(&data_dir); - let users = users::Controller::from_path(&data_dir); - let helper = git::credentials::Helper::from_path(&data_dir); - - let test_project = TestProject::default(); - let project = projects - .add(test_project.path()) - .expect("failed to add project"); - - Self { - repository: test_project, - project_id: project.id, - controller: Controller::new(data_dir, projects.clone(), users, keys, helper), - projects, - } - } -} - -mod unapply_ownership { - use crate::virtual_branches::branch::BranchOwnershipClaims; - - use super::*; - - #[tokio::test] - async fn should_unapply_with_commits() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write( - repository.path().join("file.txt"), - "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n", - ) - .unwrap(); - controller - .create_commit(&project_id, &branch_id, "test", None, false) - .await - .unwrap(); - - // change in the committed hunks leads to hunk locking - fs::write( - repository.path().join("file.txt"), - "_\n2\n3\n4\n5\n6\n7\n8\n9\n_\n", - ) - .unwrap(); - - controller - .unapply_ownership( - &project_id, - &"file.txt:1-5,7-11" - .parse::() - .unwrap(), - ) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert!(branch.files.is_empty()); - } -} - -mod create_commit { - - use super::*; - - #[tokio::test] - async fn should_lock_updated_hunks() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // by default, hunks are not locked - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - assert!(!branch.files[0].hunks[0].locked); - } - - controller - .create_commit(&project_id, &branch_id, "test", None, false) - .await - .unwrap(); - - { - // change in the committed hunks leads to hunk locking - fs::write(repository.path().join("file.txt"), "updated content").unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - assert!(branch.files[0].hunks[0].locked); - } - } - - #[tokio::test] - async fn should_not_lock_disjointed_hunks() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - let mut lines: Vec<_> = (0_i32..24_i32).map(|i| format!("line {}", i)).collect(); - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - repository.commit_all("my commit"); - repository.push(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // new hunk in the middle of the file - lines[12] = "commited stuff".to_string(); - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - assert!(!branch.files[0].hunks[0].locked); - } - - controller - .create_commit(&project_id, &branch_id, "test commit", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - { - // hunk before the commited part is not locked - let mut changed_lines = lines.clone(); - changed_lines[0] = "updated line".to_string(); - fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - assert!(!branch.files[0].hunks[0].locked); - // cleanup - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - } - { - // hunk after the commited part is not locked - let mut changed_lines = lines.clone(); - changed_lines[23] = "updated line".to_string(); - fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - assert!(!branch.files[0].hunks[0].locked); - // cleanup - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - } - { - // hunk before the commited part but with overlapping context - let mut changed_lines = lines.clone(); - changed_lines[10] = "updated line".to_string(); - fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - // TODO: We lock this hunk, but can we afford not lock it? - assert!(branch.files[0].hunks[0].locked); - // cleanup - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - } - { - // hunk after the commited part but with overlapping context - let mut changed_lines = lines.clone(); - changed_lines[14] = "updated line".to_string(); - fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - // TODO: We lock this hunk, but can we afford not lock it? - assert!(branch.files[0].hunks[0].locked); - // cleanup - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - } - } -} - -mod references { - use super::*; - - mod create_virtual_branch { - use super::*; - - #[tokio::test] - async fn simple() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert_eq!(branches[0].name, "Virtual branch"); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(refnames.contains(&"refs/gitbutler/Virtual-branch".to_string())); - } - - #[tokio::test] - async fn duplicate_name() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch( - &project_id, - &crate::virtual_branches::branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let branch2_id = controller - .create_virtual_branch( - &project_id, - &crate::virtual_branches::branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 2); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].name, "name"); - assert_eq!(branches[1].id, branch2_id); - assert_eq!(branches[1].name, "name 1"); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(refnames.contains(&"refs/gitbutler/name".to_string())); - assert!(refnames.contains(&"refs/gitbutler/name-1".to_string())); - } - } - - mod update_virtual_branch { - use super::*; - - #[tokio::test] - async fn simple() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch( - &project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - controller - .update_virtual_branch( - &project_id, - branch::BranchUpdateRequest { - id: branch_id, - name: Some("new name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert_eq!(branches[0].name, "new name"); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); - assert!(refnames.contains(&"refs/gitbutler/new-name".to_string())); - } - - #[tokio::test] - async fn duplicate_name() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch( - &project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let branch2_id = controller - .create_virtual_branch( - &project_id, - &branch::BranchCreateRequest { - ..Default::default() - }, - ) - .await - .unwrap(); - - controller - .update_virtual_branch( - &project_id, - branch::BranchUpdateRequest { - id: branch2_id, - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 2); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].name, "name"); - assert_eq!(branches[1].id, branch2_id); - assert_eq!(branches[1].name, "name 1"); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(refnames.contains(&"refs/gitbutler/name".to_string())); - assert!(refnames.contains(&"refs/gitbutler/name-1".to_string())); - } - } - - mod push_virtual_branch { - - use super::*; - - #[tokio::test] - async fn simple() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch( - &project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - controller - .create_commit(&project_id, &branch1_id, "test", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(&project_id, &branch1_id, false, None) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].name, "name"); - assert_eq!( - branches[0].upstream.as_ref().unwrap().name.to_string(), - "refs/remotes/origin/name" - ); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(refnames.contains(&branches[0].upstream.clone().unwrap().name.to_string())); - } - - #[tokio::test] - async fn duplicate_names() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = { - // create and push branch with some work - let branch1_id = controller - .create_virtual_branch( - &project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch1_id, "test", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(&project_id, &branch1_id, false, None) - .await - .unwrap(); - branch1_id - }; - - // rename first branch - controller - .update_virtual_branch( - &project_id, - branch::BranchUpdateRequest { - id: branch1_id, - name: Some("updated name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let branch2_id = { - // create another branch with first branch's old name and push it - let branch2_id = controller - .create_virtual_branch( - &project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - fs::write(repository.path().join("file.txt"), "updated content").unwrap(); - controller - .create_commit(&project_id, &branch2_id, "test", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(&project_id, &branch2_id, false, None) - .await - .unwrap(); - branch2_id - }; - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 2); - // first branch is pushing to old ref remotely - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].name, "updated name"); - assert_eq!( - branches[0].upstream.as_ref().unwrap().name, - "refs/remotes/origin/name".parse().unwrap() - ); - // new branch is pushing to new ref remotely - assert_eq!(branches[1].id, branch2_id); - assert_eq!(branches[1].name, "name"); - assert_eq!( - branches[1].upstream.as_ref().unwrap().name, - "refs/remotes/origin/name-1".parse().unwrap() - ); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(refnames.contains(&branches[0].upstream.clone().unwrap().name.to_string())); - assert!(refnames.contains(&branches[1].upstream.clone().unwrap().name.to_string())); - } - } -} - -mod delete_virtual_branch { - use super::*; - - #[tokio::test] - async fn should_unapply_diff() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // write some - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - - controller - .delete_virtual_branch(&project_id, &branches[0].id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - assert!(!repository.path().join("file.txt").exists()); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); - } - - #[tokio::test] - async fn should_remove_reference() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let id = controller - .create_virtual_branch( - &project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - controller - .delete_virtual_branch(&project_id, &id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); - } -} - -mod set_base_branch { - use super::*; - - #[tokio::test] - async fn success() { - let Test { - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - } - - mod error { - use super::*; - - #[tokio::test] - async fn missing() { - let Test { - project_id, - controller, - .. - } = Test::default(); - - assert!(matches!( - controller - .set_base_branch( - &project_id, - &git::RemoteRefname::from_str("refs/remotes/origin/missing").unwrap(), - ) - .await - .unwrap_err(), - ControllerError::Action(errors::SetBaseBranchError::BranchNotFound(_)) - )); - } - } - - mod go_back_to_integration { - use pretty_assertions::assert_eq; - - use super::*; - - #[tokio::test] - async fn should_preserve_applied_vbranches() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - std::fs::write(repository.path().join("file.txt"), "one").unwrap(); - let oid_one = repository.commit_all("one"); - std::fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("two"); - repository.push(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let vbranch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - std::fs::write(repository.path().join("another file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &vbranch_id, "one", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - repository.checkout_commit(oid_one); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, vbranch_id); - assert!(branches[0].active); - } - - #[tokio::test] - async fn from_target_branch_index_conflicts() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - std::fs::write(repository.path().join("file.txt"), "one").unwrap(); - let oid_one = repository.commit_all("one"); - std::fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("two"); - repository.push(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert!(branches.is_empty()); - - repository.checkout_commit(oid_one); - std::fs::write(repository.path().join("file.txt"), "tree").unwrap(); - - assert!(matches!( - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap_err(), - ControllerError::Action(errors::SetBaseBranchError::DirtyWorkingDirectory) - )); - } - - #[tokio::test] - async fn from_target_branch_with_uncommited() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - std::fs::write(repository.path().join("file.txt"), "one").unwrap(); - let oid_one = repository.commit_all("one"); - std::fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("two"); - repository.push(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert!(branches.is_empty()); - - repository.checkout_commit(oid_one); - std::fs::write(repository.path().join("another file.txt"), "tree").unwrap(); - - assert!(matches!( - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .map_err(|error| dbg!(error)) - .unwrap_err(), - ControllerError::Action(errors::SetBaseBranchError::DirtyWorkingDirectory) - )); - } - - #[tokio::test] - async fn from_target_branch_with_commit() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - std::fs::write(repository.path().join("file.txt"), "one").unwrap(); - let oid_one = repository.commit_all("one"); - std::fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("two"); - repository.push(); - - let base = controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert!(branches.is_empty()); - - repository.checkout_commit(oid_one); - std::fs::write(repository.path().join("another file.txt"), "tree").unwrap(); - repository.commit_all("three"); - - let base_two = controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - assert_eq!(base_two, base); - } - - #[tokio::test] - async fn from_target_branch_without_any_changes() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - std::fs::write(repository.path().join("file.txt"), "one").unwrap(); - let oid_one = repository.commit_all("one"); - std::fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("two"); - repository.push(); - - let base = controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert!(branches.is_empty()); - - repository.checkout_commit(oid_one); - - let base_two = controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - assert_eq!(base_two, base); - } - } -} - -mod unapply { - use super::*; - - #[tokio::test] - async fn unapply_with_data() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - controller - .unapply_virtual_branch(&project_id, &branches[0].id) - .await - .unwrap(); - - assert!(!repository.path().join("file.txt").exists()); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(!branches[0].active); - } - - #[tokio::test] - async fn conflicting() { - let Test { - project_id, - controller, - repository, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a conflicting branch, and stash it - - std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].base_current); - assert!(branches[0].active); - assert_eq!(branches[0].files[0].hunks[0].diff, "@@ -1 +1 @@\n-first\n\\ No newline at end of file\n+conflict\n\\ No newline at end of file\n"); - - controller - .unapply_virtual_branch(&project_id, &branches[0].id) - .await - .unwrap(); - - branches[0].id - }; - - { - // update base branch, causing conflict - controller.update_base_branch(&project_id).await.unwrap(); - - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == branch_id) - .unwrap(); - assert!(!branch.base_current); - assert!(!branch.active); - } - - { - // apply branch, it should conflict - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert!(branch.base_current); - assert!(branch.conflicted); - assert_eq!(branch.files[0].hunks[0].diff, "@@ -1 +1,5 @@\n-first\n\\ No newline at end of file\n+<<<<<<< ours\n+conflict\n+=======\n+second\n+>>>>>>> theirs\n"); - } - - { - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert!(!branch.active); - assert!(!branch.base_current); - assert!(!branch.conflicted); - assert_eq!(branch.files[0].hunks[0].diff, "@@ -1 +1 @@\n-first\n\\ No newline at end of file\n+conflict\n\\ No newline at end of file\n"); - } - } - - #[tokio::test] - async fn delete_if_empty() { - let Test { - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - controller - .unapply_virtual_branch(&project_id, &branches[0].id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - } -} - -mod apply_virtual_branch { - use super::*; - - #[tokio::test] - async fn deltect_conflict() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = { - let branch1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - fs::write(repository.path().join("file.txt"), "branch one").unwrap(); - - branch1_id - }; - - // unapply first vbranch - controller - .unapply_virtual_branch(&project_id, &branch1_id) - .await - .unwrap(); - - { - // create another vbranch that conflicts with the first one - controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - fs::write(repository.path().join("file.txt"), "branch two").unwrap(); - } - - { - // it should not be possible to apply the first branch - assert!(!controller - .can_apply_virtual_branch(&project_id, &branch1_id) - .await - .unwrap()); - - assert!(matches!( - controller - .apply_virtual_branch(&project_id, &branch1_id) - .await, - Err(ControllerError::Action( - errors::ApplyBranchError::BranchConflicts(_) - )) - )); - } - } - - #[tokio::test] - async fn rebase_commit() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "one").unwrap(); - fs::write(repository.path().join("another_file.txt"), "").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = { - // create a branch with some commited work - let branch1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - fs::write(repository.path().join("another_file.txt"), "virtual").unwrap(); - - controller - .create_commit(&project_id, &branch1_id, "virtual commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(branches[0].active); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - - branch1_id - }; - - { - // unapply first vbranch - controller - .unapply_virtual_branch(&project_id, &branch1_id) - .await - .unwrap(); - - assert_eq!( - fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), - "" - ); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "one" - ); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!branches[0].active); - } - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // branch is stil unapplied - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!branches[0].active); - assert!(!branches[0].conflicted); - - assert_eq!( - fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), - "" - ); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "two" - ); - } - - { - // apply first vbranch again - controller - .apply_virtual_branch(&project_id, &branch1_id) - .await - .unwrap(); - - // it should be rebased - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - - assert_eq!( - fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), - "virtual" - ); - - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "two" - ); - } - } - - #[tokio::test] - async fn rebase_work() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = { - // make a branch with some work - let branch1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - fs::write(repository.path().join("another_file.txt"), "").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(branches[0].active); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - - branch1_id - }; - - { - // unapply first vbranch - controller - .unapply_virtual_branch(&project_id, &branch1_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(!branches[0].active); - - assert!(!repository.path().join("another_file.txt").exists()); - assert!(!repository.path().join("file.txt").exists()); - } - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // first branch is stil unapplied - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(!branches[0].active); - assert!(!branches[0].conflicted); - - assert!(!repository.path().join("another_file.txt").exists()); - assert!(repository.path().join("file.txt").exists()); - } - - { - // apply first vbranch again - controller - .apply_virtual_branch(&project_id, &branch1_id) - .await - .unwrap(); - - // workdir should be rebased, and work should be restored - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - - assert!(repository.path().join("another_file.txt").exists()); - assert!(repository.path().join("file.txt").exists()); - } - } -} - -#[tokio::test] -async fn resolve_conflict_flow() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(branches[0].active); - - branch1_id - }; - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // there is a conflict now, so the branch should be inactive - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(!branches[0].active); - } - - { - // when we apply conflicted branch, it has conflict - controller - .apply_virtual_branch(&project_id, &branch1_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - - // and the conflict markers are in the file - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - - { - // can't commit conflicts - assert!(matches!( - controller - .create_commit(&project_id, &branch1_id, "commit conflicts", None, false) - .await, - Err(ControllerError::Action(errors::CommitError::Conflicted(_))) - )); - } - - { - // fixing the conflict removes conflicted mark - fs::write(repository.path().join("file.txt"), "resolved").unwrap(); - let commit_oid = controller - .create_commit(&project_id, &branch1_id, "resolution", None, false) - .await - .unwrap(); - - let commit = repository.find_commit(commit_oid).unwrap(); - assert_eq!(commit.parent_count(), 2); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - } -} - -mod fetch_from_target { - use super::*; - - #[tokio::test] - async fn should_update_last_fetched() { - let Test { - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let before_fetch = controller.get_base_branch_data(&project_id).await.unwrap(); - assert!(before_fetch.unwrap().last_fetched_ms.is_none()); - - let fetch = controller - .fetch_from_target(&project_id, None) - .await - .unwrap(); - assert!(fetch.last_fetched_ms.is_some()); - - let after_fetch = controller.get_base_branch_data(&project_id).await.unwrap(); - assert!(after_fetch.as_ref().unwrap().last_fetched_ms.is_some()); - assert_eq!(fetch.last_fetched_ms, after_fetch.unwrap().last_fetched_ms); - - let second_fetch = controller - .fetch_from_target(&project_id, None) - .await - .unwrap(); - assert!(second_fetch.last_fetched_ms.is_some()); - assert_ne!(fetch.last_fetched_ms, second_fetch.last_fetched_ms); - - let after_second_fetch = controller.get_base_branch_data(&project_id).await.unwrap(); - assert!(after_second_fetch - .as_ref() - .unwrap() - .last_fetched_ms - .is_some()); - assert_eq!( - second_fetch.last_fetched_ms, - after_second_fetch.unwrap().last_fetched_ms - ); - } -} - -mod update_base_branch { - use super::*; - - mod unapplied_branch { - - use super::*; - - #[tokio::test] - async fn conflicts_with_uncommitted_work() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that is unapplied and contains not commited conflict - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(&project_id).await.unwrap(); - - // branch should not be changed. - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(!controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_not_pushed() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(&project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should not change the branch. - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_pushed() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(&project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should not change the branch. - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_not_pushed_fixed_with_more_work() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(&project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); - - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should rebase upstream, and leave uncommited file as is - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); // TODO: should be true - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); // TODO: should be true - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_pushed_fixed_with_more_work() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(&project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); - - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should not touch the branch - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].files.len(), 1); - assert!(!controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn no_conflicts() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); - - controller - .create_commit( - &project_id, - &branch_id, - "non conflicting commit", - None, - false, - ) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "still no conflicts").unwrap(); - - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // fetching remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should update branch base - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].upstream.is_none()); - assert!(controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - } - } - - #[tokio::test] - async fn integrated_commit_plus_work() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - repository.commit_all("first"); - repository.push(); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "second").unwrap(); - controller - .create_commit(&project_id, &branch_id, "second", None, false) - .await - .unwrap(); - - // more local work in the same branch - fs::write(repository.path().join("file2.txt"), "other").unwrap(); - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - { - // merge branch upstream - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - repository.merge(&branch.upstream.as_ref().unwrap().name); - repository.fetch(); - } - - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - branch_id - }; - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should remove integrated commit, but leave work - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(branches[0].upstream.is_none()); - assert!(controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - assert_eq!( - std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), - "other" - ); - } - } - - #[tokio::test] - async fn all_integrated() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "second").unwrap(); - - controller - .create_commit(&project_id, &branch_id, "second", None, false) - .await - .unwrap(); - - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - }; - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should remove identical branch - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - } - } - - #[tokio::test] - async fn integrate_work_while_being_behind() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // open pr - fs::write(repository.path().join("file2.txt"), "new file").unwrap(); - controller - .create_commit(&project_id, &branch_id, "second", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - } - - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - { - // merge pr - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0[0] - .clone(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - repository.fetch(); - } - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // just removes integrated branch - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - } - } - } - - mod applied_branch { - - use super::*; - - #[tokio::test] - async fn conflicts_with_uncommitted_work() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - branch_id - }; - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should stash conflicing branch - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(!controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_not_pushed() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(&project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should stash the branch. - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_pushed() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(&project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should stash the branch. - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_not_pushed_fixed_with_more_work() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(&project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should rebase upstream, and leave uncommited file as is - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); // TODO: should be true - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); // TODO: should be true - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_pushed_fixed_with_more_work() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(&project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should merge upstream, and leave uncommited file as is. - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); // TODO: should be true - assert_eq!(branches[0].commits.len(), 1); // TODO: should be 2 - assert_eq!(branches[0].files.len(), 1); - assert!(!controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); // TODO: should be true - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - mod no_conflicts_pushed { - use super::*; - - #[tokio::test] - async fn force_push_ok() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - projects - .update(&projects::UpdateRequest { - id: project_id, - ok_with_force_push: Some(true), - ..Default::default() - }) - .await - .unwrap(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); - - controller - .create_commit(&project_id, &branch_id, "no conflicts", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); - - branch_id - }; - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // rebases branch, since the branch is pushed and force pushing is - // allowed - - let (branches, _, _) = - controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].requires_force); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert!(!branches[0].commits[0].is_remote); - assert!(!branches[0].commits[0].is_integrated); - assert!(controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - } - - #[tokio::test] - async fn force_push_not_ok() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); - - controller - .create_commit(&project_id, &branch_id, "no conflicts", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); - - branch_id - }; - - projects - .update(&projects::UpdateRequest { - id: project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // creates a merge commit, since the branch is pushed - - let (branches, _, _) = - controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(!branches[0].requires_force); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 2); - assert!(!branches[0].commits[0].is_remote); - assert!(!branches[0].commits[0].is_integrated); - assert!(branches[0].commits[1].is_remote); - assert!(!branches[0].commits[1].is_integrated); - assert!(controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - } - } - - #[tokio::test] - async fn no_conflicts() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); - - controller - .create_commit(&project_id, &branch_id, "no conflicts", None, false) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); - - branch_id - }; - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // just rebases branch - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert!(controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - - { - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - assert_eq!( - std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), - "still no conflict" - ); - } - } - - #[tokio::test] - async fn integrated_commit_plus_work() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - repository.commit_all("first"); - repository.push(); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "second").unwrap(); - - controller - .create_commit(&project_id, &branch_id, "second", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - { - // merge branch upstream - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - repository.fetch(); - } - - // more local work in the same branch - fs::write(repository.path().join("file2.txt"), "other").unwrap(); - - branch_id - }; - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // should remove integrated commit, but leave non integrated work as is - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(controller - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - assert_eq!( - std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), - "other" - ); - } - } - - #[tokio::test] - async fn integrated_with_locked_conflicting_hunks() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write( - repository.path().join("file.txt"), - "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n", - ) - .unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write( - repository.path().join("file.txt"), - "1\n2\n3\n4\n5\n6\n17\n8\n9\n10\n11\n12\n", - ) - .unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // branch has no conflict - let branch_id = { - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write( - repository.path().join("file.txt"), - "1\n2\n3\n4\n5\n6\n7\n8\n19\n10\n11\n12\n", - ) - .unwrap(); - - controller - .create_commit(&project_id, &branch_id, "first", None, false) - .await - .unwrap(); - - branch_id - }; - - // push the branch - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - // another locked conflicing hunk - fs::write( - repository.path().join("file.txt"), - "1\n2\n3\n4\n5\n6\n77\n8\n19\n10\n11\n12\n", - ) - .unwrap(); - - { - // merge branch remotely - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0[0] - .clone(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - } - - repository.fetch(); - - { - controller.update_base_branch(&project_id).await.unwrap(); - - // removes integrated commit, leaves non commited work as is - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(branches[0].commits.is_empty()); - assert!(!branches[0].files.is_empty()); - } - - { - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].files[0].hunks.len(), 1); - assert_eq!(branches[0].files[0].hunks[0].diff, "@@ -4,7 +4,11 @@\n 4\n 5\n 6\n-7\n+<<<<<<< ours\n+77\n+=======\n+17\n+>>>>>>> theirs\n 8\n 19\n 10\n"); - assert_eq!(branches[0].commits.len(), 0); - } - } - - #[tokio::test] - async fn integrated_with_locked_hunks() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = Test::default(); - - projects - .update(&projects::UpdateRequest { - id: project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "first").unwrap(); - - controller - .create_commit(&project_id, &branch_id, "first", None, false) - .await - .unwrap(); - - branch_id - }; - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - // another non-locked hunk - fs::write(repository.path().join("file.txt"), "first\nsecond").unwrap(); - - { - // push and merge branch remotely - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0[0] - .clone(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - } - - repository.fetch(); - - { - controller.update_base_branch(&project_id).await.unwrap(); - - // removes integrated commit, leaves non commited work as is - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].commits.is_empty()); - assert!(branches[0].upstream.is_none()); - assert_eq!(branches[0].files.len(), 1); - } - - { - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); // no merge commit - } - } - - #[tokio::test] - async fn integrated_with_non_locked_hunks() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "first").unwrap(); - - controller - .create_commit(&project_id, &branch_id, "first", None, false) - .await - .unwrap(); - - branch_id - }; - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - // another non-locked hunk - fs::write(repository.path().join("another_file.txt"), "first").unwrap(); - - { - // push and merge branch remotely - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0[0] - .clone(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - } - - repository.fetch(); - - { - controller.update_base_branch(&project_id).await.unwrap(); - - // removes integrated commit, leaves non commited work as is - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].commits.is_empty()); - assert!(branches[0].upstream.is_none()); - assert!(!branches[0].files.is_empty()); - } - - { - controller - .apply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - } - } - - #[tokio::test] - async fn all_integrated() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "second").unwrap(); - - controller - .create_commit(&project_id, &branch_id, "second", None, false) - .await - .unwrap(); - }; - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // just removes integrated branch - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - } - } - - #[tokio::test] - async fn integrate_work_while_being_behind() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // open pr - fs::write(repository.path().join("file2.txt"), "new file").unwrap(); - controller - .create_commit(&project_id, &branch_id, "second", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - } - - { - // merge pr - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0[0] - .clone(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - repository.fetch(); - } - - { - // fetch remote - controller.update_base_branch(&project_id).await.unwrap(); - - // just removes integrated branch - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - } - } - } -} - -mod reset_virtual_branch { - use crate::virtual_branches::{controller::ControllerError, errors::ResetBranchError}; - - use super::*; - - #[tokio::test] - async fn to_head() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let oid = { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - // commit changes - let oid = controller - .create_commit(&project_id, &branch1_id, "commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - - oid - }; - - { - // reset changes to head - controller - .reset_virtual_branch(&project_id, &branch1_id, oid) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - } - } - - #[tokio::test] - async fn to_target() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - let base_branch = controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - // commit changes - let oid = controller - .create_commit(&project_id, &branch1_id, "commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - } - - { - // reset changes to head - controller - .reset_virtual_branch(&project_id, &branch1_id, base_branch.base_sha) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 0); - assert_eq!(branches[0].files.len(), 1); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - } - } - - #[tokio::test] - async fn to_commit() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let first_commit_oid = { - // commit some changes - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let oid = controller - .create_commit(&project_id, &branch1_id, "commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - - oid - }; - - { - // commit some more - fs::write(repository.path().join("file.txt"), "more content").unwrap(); - - let second_commit_oid = controller - .create_commit(&project_id, &branch1_id, "commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 2); - assert_eq!(branches[0].commits[0].id, second_commit_oid); - assert_eq!(branches[0].commits[1].id, first_commit_oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "more content" - ); - } - - { - // reset changes to the first commit - controller - .reset_virtual_branch(&project_id, &branch1_id, first_commit_oid) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, first_commit_oid); - assert_eq!(branches[0].files.len(), 1); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "more content" - ); - } - } - - #[tokio::test] - async fn to_non_existing() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - // commit changes - let oid = controller - .create_commit(&project_id, &branch1_id, "commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - - oid - }; - - assert!(matches!( - controller - .reset_virtual_branch( - &project_id, - &branch1_id, - "fe14df8c66b73c6276f7bb26102ad91da680afcb".parse().unwrap() - ) - .await, - Err(ControllerError::Action( - ResetBranchError::CommitNotFoundInBranch(_) - )) - )); - } -} - -mod upstream { - - use super::*; - - #[tokio::test] - async fn detect_upstream_commits() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let oid1 = { - // create first commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - let oid2 = { - // create second commit - fs::write(repository.path().join("file.txt"), "content2").unwrap(); - controller - .create_commit(&project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - // push - controller - .push_virtual_branch(&project_id, &branch1_id, false, None) - .await - .unwrap(); - - let oid3 = { - // create third commit - fs::write(repository.path().join("file.txt"), "content3").unwrap(); - controller - .create_commit(&project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - { - // should correctly detect pushed commits - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 3); - assert_eq!(branches[0].commits[0].id, oid3); - assert!(!branches[0].commits[0].is_remote); - assert_eq!(branches[0].commits[1].id, oid2); - assert!(branches[0].commits[1].is_remote); - assert_eq!(branches[0].commits[2].id, oid1); - assert!(branches[0].commits[2].is_remote); - } - } - - #[tokio::test] - async fn detect_integrated_commits() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let oid1 = { - // create first commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - let oid2 = { - // create second commit - fs::write(repository.path().join("file.txt"), "content2").unwrap(); - controller - .create_commit(&project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - // push - controller - .push_virtual_branch(&project_id, &branch1_id, false, None) - .await - .unwrap(); - - { - // merge branch upstream - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch1_id) - .unwrap(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - repository.fetch(); - } - - let oid3 = { - // create third commit - fs::write(repository.path().join("file.txt"), "content3").unwrap(); - controller - .create_commit(&project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - { - // should correctly detect pushed commits - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 3); - assert_eq!(branches[0].commits[0].id, oid3); - assert!(!branches[0].commits[0].is_integrated); - assert_eq!(branches[0].commits[1].id, oid2); - assert!(branches[0].commits[1].is_integrated); - assert_eq!(branches[0].commits[2].id, oid1); - assert!(branches[0].commits[2].is_integrated); - } - } -} - -mod cherry_pick { - use super::*; - - mod cleanly { - - use super::*; - - #[tokio::test] - async fn applied() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one = { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - let commit_two = { - fs::write(repository.path().join("file.txt"), "content two").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - controller - .reset_virtual_branch(&project_id, &branch_id, commit_one) - .await - .unwrap(); - - repository.reset_hard(None); - - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - - let cherry_picked_commit_oid = controller - .cherry_pick(&project_id, &branch_id, commit_two) - .await - .unwrap(); - assert!(cherry_picked_commit_oid.is_some()); - assert!(repository.path().join("file.txt").exists()); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content two" - ); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert_eq!(branches[0].commits.len(), 2); - assert_eq!(branches[0].commits[0].id, cherry_picked_commit_oid.unwrap()); - assert_eq!(branches[0].commits[1].id, commit_one); - } - - #[tokio::test] - async fn to_different_branch() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one = { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - let commit_two = { - fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - controller - .reset_virtual_branch(&project_id, &branch_id, commit_one) - .await - .unwrap(); - - repository.reset_hard(None); - - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - assert!(!repository.path().join("file_two.txt").exists()); - - let branch_two_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let cherry_picked_commit_oid = controller - .cherry_pick(&project_id, &branch_two_id, commit_two) - .await - .unwrap(); - assert!(cherry_picked_commit_oid.is_some()); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert!(repository.path().join("file_two.txt").exists()); - assert_eq!( - fs::read_to_string(repository.path().join("file_two.txt")).unwrap(), - "content two" - ); - - assert_eq!(branches.len(), 2); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, commit_one); - - assert_eq!(branches[1].id, branch_two_id); - assert!(branches[1].active); - assert_eq!(branches[1].commits.len(), 1); - assert_eq!(branches[1].commits[0].id, cherry_picked_commit_oid.unwrap()); - } - - #[tokio::test] - async fn non_applied() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - let commit_three_oid = { - fs::write(repository.path().join("file_three.txt"), "content three").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - controller - .reset_virtual_branch(&project_id, &branch_id, commit_one_oid) - .await - .unwrap(); - - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - assert!(matches!( - controller - .cherry_pick(&project_id, &branch_id, commit_three_oid) - .await, - Err(ControllerError::Action(errors::CherryPickError::NotApplied)) - )); - } - } - - mod with_conflicts { - - use super::*; - - #[tokio::test] - async fn applied() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one = { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - let commit_three = { - fs::write(repository.path().join("file_three.txt"), "content three").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - controller - .reset_virtual_branch(&project_id, &branch_id, commit_one) - .await - .unwrap(); - - repository.reset_hard(None); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - assert!(!repository.path().join("file_two.txt").exists()); - assert!(!repository.path().join("file_three.txt").exists()); - - // introduce conflict with the remote commit - fs::write(repository.path().join("file_three.txt"), "conflict").unwrap(); - - { - // cherry picking leads to conflict - let cherry_picked_commit_oid = controller - .cherry_pick(&project_id, &branch_id, commit_three) - .await - .unwrap(); - assert!(cherry_picked_commit_oid.is_none()); - - assert_eq!( - fs::read_to_string(repository.path().join("file_three.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\ncontent three\n>>>>>>> theirs\n" - ); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert_eq!(branches[0].files.len(), 1); - assert!(branches[0].files[0].conflicted); - assert_eq!(branches[0].commits.len(), 1); - } - - { - // conflict can be resolved - fs::write(repository.path().join("file_three.txt"), "resolved").unwrap(); - let commited_oid = controller - .create_commit(&project_id, &branch_id, "resolution", None, false) - .await - .unwrap(); - - let commit = repository.find_commit(commited_oid).unwrap(); - assert_eq!(commit.parent_count(), 2); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].requires_force); - assert!(!branches[0].conflicted); - assert_eq!(branches[0].commits.len(), 2); - // resolution commit is there - assert_eq!(branches[0].commits[0].id, commited_oid); - assert_eq!(branches[0].commits[1].id, commit_one); - } - } - - #[tokio::test] - async fn non_applied() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - let commit_oid = { - let first = repository.commit_all("commit"); - fs::write(repository.path().join("file.txt"), "content").unwrap(); - let second = repository.commit_all("commit"); - repository.push(); - repository.reset_hard(Some(first)); - second - }; - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - // introduce conflict with the remote commit - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - controller - .unapply_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - assert!(matches!( - controller - .cherry_pick(&project_id, &branch_id, commit_oid) - .await, - Err(ControllerError::Action(errors::CherryPickError::NotApplied)) - )); - } - } -} - -mod amend { - - use super::*; - - #[tokio::test] - async fn to_default_target() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - // amend without head commit - fs::write(repository.path().join("file2.txt"), "content").unwrap(); - let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); - assert!(matches!( - controller - .amend(&project_id, &branch_id, &to_amend) - .await - .unwrap_err(), - ControllerError::Action(errors::AmendError::BranchHasNoCommits) - )); - } - - #[tokio::test] - async fn forcepush_allowed() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = Test::default(); - - projects - .update(&projects::UpdateRequest { - id: project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - projects - .update(&projects::UpdateRequest { - id: project_id, - ok_with_force_push: Some(true), - ..Default::default() - }) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // create commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap(); - }; - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - { - // amend another hunk - fs::write(repository.path().join("file2.txt"), "content2").unwrap(); - let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); - controller - .amend(&project_id, &branch_id, &to_amend) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert!(branch.requires_force); - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 2); - } - } - - #[tokio::test] - async fn forcepush_forbidden() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - projects - .update(&projects::UpdateRequest { - id: project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // create commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap(); - }; - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - { - fs::write(repository.path().join("file2.txt"), "content2").unwrap(); - let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); - assert!(matches!( - controller - .amend(&project_id, &branch_id, &to_amend) - .await - .unwrap_err(), - ControllerError::Action(errors::AmendError::ForcePushNotAllowed(_)) - )); - } - } - - #[tokio::test] - async fn non_locked_hunk() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // create commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 1); - }; - - { - // amend another hunk - fs::write(repository.path().join("file2.txt"), "content2").unwrap(); - let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); - controller - .amend(&project_id, &branch_id, &to_amend) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 2); - } - } - - #[tokio::test] - async fn locked_hunk() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // create commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 1); - assert_eq!( - branch.commits[0].files[0].hunks[0].diff, - "@@ -0,0 +1 @@\n+content\n\\ No newline at end of file\n" - ); - }; - - { - // amend another hunk - fs::write(repository.path().join("file.txt"), "more content").unwrap(); - let to_amend: branch::BranchOwnershipClaims = "file.txt:1-2".parse().unwrap(); - controller - .amend(&project_id, &branch_id, &to_amend) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 1); - assert_eq!( - branch.commits[0].files[0].hunks[0].diff, - "@@ -0,0 +1 @@\n+more content\n\\ No newline at end of file\n" - ); - } - } - - #[tokio::test] - async fn non_existing_ownership() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // create commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 1); - }; - - { - // amend non existing hunk - let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); - assert!(matches!( - controller - .amend(&project_id, &branch_id, &to_amend) - .await - .unwrap_err(), - ControllerError::Action(errors::AmendError::TargetOwnerhshipNotFound(_)) - )); - } - } -} - -mod init { - use super::*; - - #[tokio::test] - async fn twice() { - let data_dir = paths::data_dir(); - let keys = keys::Controller::from_path(&data_dir); - let projects = projects::Controller::from_path(&data_dir); - let users = users::Controller::from_path(&data_dir); - let helper = git::credentials::Helper::from_path(&data_dir); - - let test_project = TestProject::default(); - - let controller = Controller::new(data_dir, projects.clone(), users, keys, helper); - - { - let project = projects - .add(test_project.path()) - .expect("failed to add project"); - controller - .set_base_branch(&project.id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - assert!(controller - .list_virtual_branches(&project.id) - .await - .unwrap() - .0 - .is_empty()); - projects.delete(&project.id).await.unwrap(); - controller - .list_virtual_branches(&project.id) - .await - .unwrap_err(); - } - - { - let project = projects.add(test_project.path()).unwrap(); - controller - .set_base_branch(&project.id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // even though project is on gitbutler/integration, we should not import it - assert!(controller - .list_virtual_branches(&project.id) - .await - .unwrap() - .0 - .is_empty()); - } - } - - #[tokio::test] - async fn dirty_non_target() { - // a situation when you initialize project while being on the local verison of the master - // that has uncommited changes. - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - repository.checkout(&"refs/heads/some-feature".parse().unwrap()); - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].files[0].hunks.len(), 1); - assert!(branches[0].upstream.is_none()); - assert_eq!(branches[0].name, "some-feature"); - } - - #[tokio::test] - async fn dirty_target() { - // a situation when you initialize project while being on the local verison of the master - // that has uncommited changes. - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].files[0].hunks.len(), 1); - assert!(branches[0].upstream.is_none()); - assert_eq!(branches[0].name, "master"); - } - - #[tokio::test] - async fn commit_on_non_target_local() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - repository.checkout(&"refs/heads/some-feature".parse().unwrap()); - fs::write(repository.path().join("file.txt"), "content").unwrap(); - repository.commit_all("commit on target"); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].files.is_empty()); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].upstream.is_none()); - assert_eq!(branches[0].name, "some-feature"); - } - - #[tokio::test] - async fn commit_on_non_target_remote() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - repository.checkout(&"refs/heads/some-feature".parse().unwrap()); - fs::write(repository.path().join("file.txt"), "content").unwrap(); - repository.commit_all("commit on target"); - repository.push_branch(&"refs/heads/some-feature".parse().unwrap()); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].files.is_empty()); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].upstream.is_some()); - assert_eq!(branches[0].name, "some-feature"); - } - - #[tokio::test] - async fn commit_on_target() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - repository.commit_all("commit on target"); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].files.is_empty()); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].upstream.is_none()); - assert_eq!(branches[0].name, "master"); - } - - #[tokio::test] - async fn submodule() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - let submodule_url: git::Url = TestProject::default() - .path() - .display() - .to_string() - .parse() - .unwrap(); - repository.add_submodule(&submodule_url, path::Path::new("submodule")); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].files[0].hunks.len(), 1); - } -} - -mod squash { - - use super::*; - - #[tokio::test] - async fn head() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - let commit_four_oid = { - fs::write(repository.path().join("file four.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit four", None, false) - .await - .unwrap() - }; - - controller - .squash(&project_id, &branch_id, commit_four_oid) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!( - descriptions, - vec!["commit three\ncommit four", "commit two", "commit one"] - ); - } - - #[tokio::test] - async fn middle() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - let commit_two_oid = { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file four.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit four", None, false) - .await - .unwrap() - }; - - controller - .squash(&project_id, &branch_id, commit_two_oid) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!( - descriptions, - vec!["commit four", "commit three", "commit one\ncommit two"] - ); - } - - #[tokio::test] - async fn forcepush_allowed() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = Test::default(); - - projects - .update(&projects::UpdateRequest { - id: project_id, - ok_with_force_push: Some(true), - ..Default::default() - }) - .await - .unwrap(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - let commit_two_oid = { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file four.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit four", None, false) - .await - .unwrap() - }; - - controller - .squash(&project_id, &branch_id, commit_two_oid) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!( - descriptions, - vec!["commit four", "commit three", "commit one\ncommit two"] - ); - assert!(branch.requires_force); - } - - #[tokio::test] - async fn forcepush_forbidden() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - projects - .update(&projects::UpdateRequest { - id: project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - let commit_two_oid = { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file four.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit four", None, false) - .await - .unwrap() - }; - - assert!(matches!( - controller - .squash(&project_id, &branch_id, commit_two_oid) - .await - .unwrap_err(), - ControllerError::Action(errors::SquashError::ForcePushNotAllowed(_)) - )); - } - - #[tokio::test] - async fn root() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - assert!(matches!( - controller - .squash(&project_id, &branch_id, commit_one_oid) - .await - .unwrap_err(), - ControllerError::Action(errors::SquashError::CantSquashRootCommit) - )); - } -} - -mod update_commit_message { - - use super::*; - - #[tokio::test] - async fn head() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - let commit_three_oid = { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - controller - .update_commit_message( - &project_id, - &branch_id, - commit_three_oid, - "commit three updated", - ) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - - assert_eq!( - descriptions, - vec!["commit three updated", "commit two", "commit one"] - ); - } - - #[tokio::test] - async fn middle() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - let commit_two_oid = { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - controller - .update_commit_message( - &project_id, - &branch_id, - commit_two_oid, - "commit two updated", - ) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!( - descriptions, - vec!["commit three", "commit two updated", "commit one"] - ); - } - - #[tokio::test] - async fn forcepush_allowed() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - projects - .update(&projects::UpdateRequest { - id: project_id, - ok_with_force_push: Some(true), - ..Default::default() - }) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - controller - .update_commit_message( - &project_id, - &branch_id, - commit_one_oid, - "commit one updated", - ) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!(descriptions, vec!["commit one updated"]); - assert!(branch.requires_force); - } - - #[tokio::test] - async fn forcepush_forbidden() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - projects - .update(&projects::UpdateRequest { - id: project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - assert!(matches!( - controller - .update_commit_message( - &project_id, - &branch_id, - commit_one_oid, - "commit one updated", - ) - .await - .unwrap_err(), - ControllerError::Action(errors::UpdateCommitMessageError::ForcePushNotAllowed(_)) - )); - } - - #[tokio::test] - async fn root() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - controller - .update_commit_message( - &project_id, - &branch_id, - commit_one_oid, - "commit one updated", - ) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!( - descriptions, - vec!["commit three", "commit two", "commit one updated"] - ); - } - - #[tokio::test] - async fn empty() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(&project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - assert!(matches!( - controller - .update_commit_message(&project_id, &branch_id, commit_one_oid, "",) - .await, - Err(ControllerError::Action( - errors::UpdateCommitMessageError::EmptyMessage - )) - )); - } -} - -mod create_virtual_branch_from_branch { - use super::*; - - #[tokio::test] - async fn integration() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_name = { - // make a remote branch - - let branch_id = controller - .create_virtual_branch(&project_id, &super::branch::BranchCreateRequest::default()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "first\n").unwrap(); - controller - .create_commit(&project_id, &branch_id, "first", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == branch_id) - .unwrap(); - - let name = branch.upstream.unwrap().name; - - controller - .delete_virtual_branch(&project_id, &branch_id) - .await - .unwrap(); - - name - }; - - // checkout a existing remote branch - let branch_id = controller - .create_virtual_branch_from_branch(&project_id, &branch_name) - .await - .unwrap(); - - { - // add a commit - std::fs::write(repository.path().join("file.txt"), "first\nsecond").unwrap(); - - controller - .create_commit(&project_id, &branch_id, "second", None, false) - .await - .unwrap(); - } - - { - // meanwhile, there is a new commit on master - repository.checkout(&"refs/heads/master".parse().unwrap()); - std::fs::write(repository.path().join("another.txt"), "").unwrap(); - repository.commit_all("another"); - repository.push_branch(&"refs/heads/master".parse().unwrap()); - repository.checkout(&"refs/heads/gitbutler/integration".parse().unwrap()); - } - - { - // merge branch into master - controller - .push_virtual_branch(&project_id, &branch_id, false, None) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == branch_id) - .unwrap(); - - assert!(branch.commits[0].is_remote); - assert!(!branch.commits[0].is_integrated); - assert!(branch.commits[1].is_remote); - assert!(!branch.commits[1].is_integrated); - - repository.rebase_and_merge(&branch_name); - } - - { - // should mark commits as integrated - controller - .fetch_from_target(&project_id, None) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == branch_id) - .unwrap(); - - assert!(branch.commits[0].is_remote); - assert!(branch.commits[0].is_integrated); - assert!(branch.commits[1].is_remote); - assert!(branch.commits[1].is_integrated); - } - } - - #[tokio::test] - async fn no_conflicts() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - { - // create a remote branch - let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); - repository.checkout(&branch_name); - fs::write(repository.path().join("file.txt"), "first").unwrap(); - repository.commit_all("first"); - repository.push_branch(&branch_name); - repository.checkout(&"refs/heads/master".parse().unwrap()); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert!(branches.is_empty()); - - let branch_id = controller - .create_virtual_branch_from_branch( - &project_id, - &"refs/remotes/origin/branch".parse().unwrap(), - ) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].description, "first"); - } - - #[tokio::test] - async fn conflicts_with_uncommited() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - { - // create a remote branch - let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); - repository.checkout(&branch_name); - fs::write(repository.path().join("file.txt"), "first").unwrap(); - repository.commit_all("first"); - repository.push_branch(&branch_name); - repository.checkout(&"refs/heads/master".parse().unwrap()); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // create a local branch that conflicts with remote - { - std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - }; - - // branch should be created unapplied, because of the conflict - - let new_branch_id = controller - .create_virtual_branch_from_branch( - &project_id, - &"refs/remotes/origin/branch".parse().unwrap(), - ) - .await - .unwrap(); - let new_branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == new_branch_id) - .unwrap(); - assert!(!new_branch.active); - assert_eq!(new_branch.commits.len(), 1); - assert!(new_branch.upstream.is_some()); - } - - #[tokio::test] - async fn conflicts_with_commited() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - { - // create a remote branch - let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); - repository.checkout(&branch_name); - fs::write(repository.path().join("file.txt"), "first").unwrap(); - repository.commit_all("first"); - repository.push_branch(&branch_name); - repository.checkout(&"refs/heads/master".parse().unwrap()); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // create a local branch that conflicts with remote - { - std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - controller - .create_commit(&project_id, &branches[0].id, "hej", None, false) - .await - .unwrap(); - }; - - // branch should be created unapplied, because of the conflict - - let new_branch_id = controller - .create_virtual_branch_from_branch( - &project_id, - &"refs/remotes/origin/branch".parse().unwrap(), - ) - .await - .unwrap(); - let new_branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == new_branch_id) - .unwrap(); - assert!(!new_branch.active); - assert_eq!(new_branch.commits.len(), 1); - assert!(new_branch.upstream.is_some()); - } - - #[tokio::test] - async fn from_default_target() { - let Test { - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // branch should be created unapplied, because of the conflict - - assert!(matches!( - controller - .create_virtual_branch_from_branch( - &project_id, - &"refs/remotes/origin/master".parse().unwrap(), - ) - .await - .unwrap_err(), - ControllerError::Action( - errors::CreateVirtualBranchFromBranchError::CantMakeBranchFromDefaultTarget - ) - )); - } - - #[tokio::test] - async fn from_non_existent_branch() { - let Test { - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // branch should be created unapplied, because of the conflict - - assert!(matches!( - controller - .create_virtual_branch_from_branch( - &project_id, - &"refs/remotes/origin/branch".parse().unwrap(), - ) - .await - .unwrap_err(), - ControllerError::Action(errors::CreateVirtualBranchFromBranchError::BranchNotFound( - _ - )) - )); - } - - #[tokio::test] - async fn from_state_remote_branch() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - { - // create a remote branch - let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); - repository.checkout(&branch_name); - fs::write(repository.path().join("file.txt"), "branch commit").unwrap(); - repository.commit_all("branch commit"); - repository.push_branch(&branch_name); - repository.checkout(&"refs/heads/master".parse().unwrap()); - - // make remote branch stale - std::fs::write(repository.path().join("antoher_file.txt"), "master commit").unwrap(); - repository.commit_all("master commit"); - repository.push(); - } - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch_from_branch( - &project_id, - &"refs/remotes/origin/branch".parse().unwrap(), - ) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].files.is_empty()); - assert_eq!(branches[0].commits[0].description, "branch commit"); - } -} - -mod selected_for_changes { - use super::*; - - #[tokio::test] - async fn unapplying_selected_branch_selects_anther() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file one.txt"), "").unwrap(); - - // first branch should be created as default - let b_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - // if default branch exists, new branch should not be created as default - let b2_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - - let b = branches.iter().find(|b| b.id == b_id).unwrap(); - - let b2 = branches.iter().find(|b| b.id == b2_id).unwrap(); - - assert!(b.selected_for_changes); - assert!(!b2.selected_for_changes); - - controller - .unapply_virtual_branch(&project_id, &b_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - - assert_eq!(branches.len(), 2); - assert_eq!(branches[0].id, b.id); - assert!(!branches[0].selected_for_changes); - assert!(!branches[0].active); - assert_eq!(branches[1].id, b2.id); - assert!(branches[1].selected_for_changes); - assert!(branches[1].active); - } - - #[tokio::test] - async fn deleting_selected_branch_selects_anther() { - let Test { - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // first branch should be created as default - let b_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - // if default branch exists, new branch should not be created as default - let b2_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - - let b = branches.iter().find(|b| b.id == b_id).unwrap(); - - let b2 = branches.iter().find(|b| b.id == b2_id).unwrap(); - - assert!(b.selected_for_changes); - assert!(!b2.selected_for_changes); - - controller - .delete_virtual_branch(&project_id, &b_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, b2.id); - assert!(branches[0].selected_for_changes); - } - - #[tokio::test] - async fn create_virtual_branch_should_set_selected_for_changes() { - let Test { - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // first branch should be created as default - let b_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b_id) - .unwrap(); - assert!(branch.selected_for_changes); - - // if default branch exists, new branch should not be created as default - let b_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b_id) - .unwrap(); - assert!(!branch.selected_for_changes); - - // explicitly don't make this one default - let b_id = controller - .create_virtual_branch( - &project_id, - &branch::BranchCreateRequest { - selected_for_changes: Some(false), - ..Default::default() - }, - ) - .await - .unwrap(); - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b_id) - .unwrap(); - assert!(!branch.selected_for_changes); - - // explicitly make this one default - let b_id = controller - .create_virtual_branch( - &project_id, - &branch::BranchCreateRequest { - selected_for_changes: Some(true), - ..Default::default() - }, - ) - .await - .unwrap(); - let branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b_id) - .unwrap(); - assert!(branch.selected_for_changes); - } - - #[tokio::test] - async fn update_virtual_branch_should_reset_selected_for_changes() { - let Test { - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let b1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - let b1 = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b1_id) - .unwrap(); - assert!(b1.selected_for_changes); - - let b2_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - let b2 = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b2_id) - .unwrap(); - assert!(!b2.selected_for_changes); - - controller - .update_virtual_branch( - &project_id, - branch::BranchUpdateRequest { - id: b2_id, - selected_for_changes: Some(true), - ..Default::default() - }, - ) - .await - .unwrap(); - - let b1 = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b1_id) - .unwrap(); - assert!(!b1.selected_for_changes); - - let b2 = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b2_id) - .unwrap(); - assert!(b2.selected_for_changes); - } - - #[tokio::test] - async fn unapply_virtual_branch_should_reset_selected_for_changes() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let b1_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let b1 = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b1_id) - .unwrap(); - assert!(b1.selected_for_changes); - - controller - .unapply_virtual_branch(&project_id, &b1_id) - .await - .unwrap(); - - let b1 = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b1_id) - .unwrap(); - assert!(!b1.selected_for_changes); - } - - #[tokio::test] - async fn hunks_distribution() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches[0].files.len(), 1); - - controller - .create_virtual_branch( - &project_id, - &branch::BranchCreateRequest { - selected_for_changes: Some(true), - ..Default::default() - }, - ) - .await - .unwrap(); - std::fs::write(repository.path().join("another_file.txt"), "content").unwrap(); - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[1].files.len(), 1); - } - - #[tokio::test] - async fn applying_first_branch() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - controller - .unapply_virtual_branch(&project_id, &branches[0].id) - .await - .unwrap(); - controller - .apply_virtual_branch(&project_id, &branches[0].id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].active); - assert!(branches[0].selected_for_changes); - } -} - -mod move_commit_to_vbranch { - use crate::virtual_branches::BranchId; - - use super::*; - - #[tokio::test] - async fn no_diffs() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - let commit_oid = controller - .create_commit(&project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - let target_branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - controller - .move_commit(&project_id, &target_branch_id, commit_oid) - .await - .unwrap(); - - let destination_branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == target_branch_id) - .unwrap(); - - let source_branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == source_branch_id) - .unwrap(); - - assert_eq!(destination_branch.commits.len(), 1); - assert_eq!(destination_branch.files.len(), 0); - assert_eq!(source_branch.commits.len(), 0); - assert_eq!(source_branch.files.len(), 0); - } - - #[tokio::test] - async fn diffs_on_source_branch() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - let commit_oid = controller - .create_commit(&project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - std::fs::write( - repository.path().join("another file.txt"), - "another content", - ) - .unwrap(); - - let target_branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - controller - .move_commit(&project_id, &target_branch_id, commit_oid) - .await - .unwrap(); - - let destination_branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == target_branch_id) - .unwrap(); - - let source_branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == source_branch_id) - .unwrap(); - - assert_eq!(destination_branch.commits.len(), 1); - assert_eq!(destination_branch.files.len(), 0); - assert_eq!(source_branch.commits.len(), 0); - assert_eq!(source_branch.files.len(), 1); - } - - #[tokio::test] - async fn diffs_on_target_branch() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - let commit_oid = controller - .create_commit(&project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - let target_branch_id = controller - .create_virtual_branch( - &project_id, - &branch::BranchCreateRequest { - selected_for_changes: Some(true), - ..Default::default() - }, - ) - .await - .unwrap(); - - std::fs::write( - repository.path().join("another file.txt"), - "another content", - ) - .unwrap(); - - controller - .move_commit(&project_id, &target_branch_id, commit_oid) - .await - .unwrap(); - - let destination_branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == target_branch_id) - .unwrap(); - - let source_branch = controller - .list_virtual_branches(&project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == source_branch_id) - .unwrap(); - - assert_eq!(destination_branch.commits.len(), 1); - assert_eq!(destination_branch.files.len(), 1); - assert_eq!(source_branch.commits.len(), 0); - assert_eq!(source_branch.files.len(), 0); - } - - #[tokio::test] - async fn locked_hunks_on_source_branch() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - let commit_oid = controller - .create_commit(&project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "locked content").unwrap(); - - let target_branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - assert!(matches!( - controller - .move_commit(&project_id, &target_branch_id, commit_oid) - .await - .unwrap_err(), - ControllerError::Action(errors::MoveCommitError::SourceLocked) - )); - } - - #[tokio::test] - async fn no_commit() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - controller - .create_commit(&project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - let target_branch_id = controller - .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - assert!(matches!( - controller - .move_commit( - &project_id, - &target_branch_id, - git::Oid::from_str("a99c95cca7a60f1a2180c2f86fb18af97333c192").unwrap() - ) - .await - .unwrap_err(), - ControllerError::Action(errors::MoveCommitError::CommitNotFound(_)) - )); - } - - #[tokio::test] - async fn no_branch() { - let Test { - repository, - project_id, - controller, - .. - } = Test::default(); - - controller - .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - let commit_oid = controller - .create_commit(&project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - assert!(matches!( - controller - .move_commit(&project_id, &BranchId::generate(), commit_oid) - .await - .unwrap_err(), - ControllerError::Action(errors::MoveCommitError::BranchNotFound(_)) - )); - } -} diff --git a/gitbutler-app/src/users/controller.rs b/gitbutler-app/src/users/controller.rs index be6f98f5d..b6c4d4d66 100644 --- a/gitbutler-app/src/users/controller.rs +++ b/gitbutler-app/src/users/controller.rs @@ -12,7 +12,6 @@ impl Controller { Controller { storage } } - #[cfg(test)] pub fn from_path>(path: P) -> Controller { Controller::new(Storage::from_path(path)) } diff --git a/gitbutler-app/src/users/storage.rs b/gitbutler-app/src/users/storage.rs index 7a00f63cf..8c77323c3 100644 --- a/gitbutler-app/src/users/storage.rs +++ b/gitbutler-app/src/users/storage.rs @@ -22,7 +22,6 @@ impl Storage { Storage { storage } } - #[cfg(test)] pub fn from_path>(path: P) -> Storage { Storage::new(storage::Storage::new(path)) } diff --git a/gitbutler-app/src/virtual_branches.rs b/gitbutler-app/src/virtual_branches.rs index 04193342f..ae76e2ce8 100644 --- a/gitbutler-app/src/virtual_branches.rs +++ b/gitbutler-app/src/virtual_branches.rs @@ -8,7 +8,7 @@ pub mod errors; mod files; pub use files::*; -mod integration; +pub mod integration; pub use integration::GITBUTLER_INTEGRATION_REFERENCE; mod base; @@ -22,11 +22,6 @@ pub mod commands; mod iterator; pub use iterator::BranchIterator as Iterator; -#[cfg(test)] -mod tests; -#[cfg(test)] -pub use tests::set_test_target; - mod r#virtual; pub use r#virtual::*; diff --git a/gitbutler-app/src/virtual_branches/branch/reader.rs b/gitbutler-app/src/virtual_branches/branch/reader.rs index 09918cf94..cebc0c009 100644 --- a/gitbutler-app/src/virtual_branches/branch/reader.rs +++ b/gitbutler-app/src/virtual_branches/branch/reader.rs @@ -17,107 +17,3 @@ impl<'r> BranchReader<'r> { Branch::from_reader(&self.reader.sub(format!("branches/{}", id))) } } - -#[cfg(test)] -mod tests { - use std::sync::atomic::{AtomicUsize, Ordering}; - - use anyhow::Result; - use once_cell::sync::Lazy; - - use crate::{ - sessions, - tests::{Case, Suite}, - virtual_branches::branch::BranchOwnershipClaims, - }; - - use super::{super::Writer, *}; - - static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - - fn test_branch() -> Branch { - TEST_INDEX.fetch_add(1, Ordering::Relaxed); - - Branch { - id: BranchId::generate(), - name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), - notes: String::new(), - applied: true, - order: TEST_INDEX.load(Ordering::Relaxed), - upstream: Some( - format!( - "refs/remotes/origin/upstream_{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - upstream_head: Some( - format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, - updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, - head: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - tree: format!( - "0123456789abcdef0123456789abcdef012345{}", - TEST_INDEX.load(Ordering::Relaxed) + 10 - ) - .parse() - .unwrap(), - ownership: BranchOwnershipClaims { - claims: vec![format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed)) - .parse() - .unwrap()], - }, - selected_for_changes: Some(1), - } - } - - #[test] - fn test_read_not_found() -> Result<()> { - let Case { gb_repository, .. } = Suite::default().new_case(); - - let session = gb_repository.get_or_create_current_session()?; - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - - let reader = BranchReader::new(&session_reader); - let result = reader.read(&BranchId::generate()); - assert!(result.is_err()); - assert_eq!(result.unwrap_err().to_string(), "file not found"); - - Ok(()) - } - - #[test] - fn test_read_override() -> Result<()> { - let Case { - gb_repository, - project, - .. - } = Suite::default().new_case(); - - let mut branch = test_branch(); - - let writer = Writer::new(&gb_repository, project.gb_dir())?; - writer.write(&mut branch)?; - - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - - let reader = BranchReader::new(&session_reader); - - assert_eq!(branch, reader.read(&branch.id).unwrap()); - - Ok(()) - } -} diff --git a/gitbutler-app/src/virtual_branches/branch/writer.rs b/gitbutler-app/src/virtual_branches/branch/writer.rs index 3561c0577..821bdc8fe 100644 --- a/gitbutler-app/src/virtual_branches/branch/writer.rs +++ b/gitbutler-app/src/virtual_branches/branch/writer.rs @@ -158,224 +158,3 @@ impl<'writer> BranchWriter<'writer> { Ok(()) } } - -#[cfg(test)] -mod tests { - use std::{ - fs, - sync::atomic::{AtomicUsize, Ordering}, - }; - - use anyhow::Context; - use once_cell::sync::Lazy; - - use crate::{ - tests::{Case, Suite}, - virtual_branches::branch, - }; - - use self::branch::BranchId; - - use super::*; - - static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - - fn test_branch() -> Branch { - TEST_INDEX.fetch_add(1, Ordering::Relaxed); - - Branch { - id: BranchId::generate(), - name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), - notes: String::new(), - applied: true, - upstream: Some( - format!( - "refs/remotes/origin/upstream_{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - upstream_head: None, - created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, - updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, - head: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - tree: format!( - "0123456789abcdef0123456789abcdef012345{}", - TEST_INDEX.load(Ordering::Relaxed) + 10 - ) - .parse() - .unwrap(), - ownership: branch::BranchOwnershipClaims { - claims: vec![branch::OwnershipClaim { - file_path: format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed)).into(), - hunks: vec![], - }], - }, - order: TEST_INDEX.load(Ordering::Relaxed), - selected_for_changes: Some(1), - } - } - - #[test] - fn test_write_branch() -> Result<()> { - let Case { - gb_repository, - project, - .. - } = Suite::default().new_case(); - - let mut branch = test_branch(); - - let writer = BranchWriter::new(&gb_repository, project.gb_dir())?; - writer.write(&mut branch)?; - - let root = gb_repository - .root() - .join("branches") - .join(branch.id.to_string()); - - assert_eq!( - fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) - .context("Failed to read branch name")?, - branch.name - ); - assert_eq!( - fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? - .parse::() - .context("Failed to read branch applied")?, - branch.applied - ); - assert_eq!( - fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) - .context("Failed to read branch upstream")?, - branch.upstream.clone().unwrap().to_string() - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("created_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch created timestamp")? - .parse::() - .context("Failed to parse branch created timestamp")?, - branch.created_timestamp_ms - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("updated_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch updated timestamp")? - .parse::() - .context("Failed to parse branch updated timestamp")?, - branch.updated_timestamp_ms - ); - - writer.delete(&branch)?; - fs::read_dir(root).unwrap_err(); - - Ok(()) - } - - #[test] - fn test_should_create_session() -> Result<()> { - let Case { - gb_repository, - project, - .. - } = Suite::default().new_case(); - - let mut branch = test_branch(); - - let writer = BranchWriter::new(&gb_repository, project.gb_dir())?; - writer.write(&mut branch)?; - - assert!(gb_repository.get_current_session()?.is_some()); - - Ok(()) - } - - #[test] - fn test_should_update() -> Result<()> { - let Case { - gb_repository, - project, - .. - } = Suite::default().new_case(); - - let mut branch = test_branch(); - - let writer = BranchWriter::new(&gb_repository, project.gb_dir())?; - writer.write(&mut branch)?; - - let mut updated_branch = Branch { - name: "updated_name".to_string(), - applied: false, - upstream: Some("refs/remotes/origin/upstream_updated".parse().unwrap()), - created_timestamp_ms: 2, - updated_timestamp_ms: 3, - ownership: branch::BranchOwnershipClaims { claims: vec![] }, - ..branch.clone() - }; - - writer.write(&mut updated_branch)?; - - let root = gb_repository - .root() - .join("branches") - .join(branch.id.to_string()); - - assert_eq!( - fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) - .context("Failed to read branch name")?, - updated_branch.name - ); - assert_eq!( - fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? - .parse::() - .context("Failed to read branch applied")?, - updated_branch.applied - ); - assert_eq!( - fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) - .context("Failed to read branch upstream")?, - updated_branch.upstream.unwrap().to_string() - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("created_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch created timestamp")? - .parse::() - .context("Failed to parse branch created timestamp")?, - updated_branch.created_timestamp_ms - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("updated_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch updated timestamp")? - .parse::() - .context("Failed to parse branch updated timestamp")?, - updated_branch.updated_timestamp_ms - ); - - Ok(()) - } -} diff --git a/gitbutler-app/src/virtual_branches/iterator.rs b/gitbutler-app/src/virtual_branches/iterator.rs index ceff3d7cc..c169bf62d 100644 --- a/gitbutler-app/src/virtual_branches/iterator.rs +++ b/gitbutler-app/src/virtual_branches/iterator.rs @@ -54,125 +54,3 @@ impl Iterator for BranchIterator<'_> { Some(branch) } } - -#[cfg(test)] -mod tests { - use std::sync::atomic::{AtomicUsize, Ordering}; - - use anyhow::Result; - use once_cell::sync::Lazy; - - use crate::{ - reader, sessions, - tests::{Case, Suite}, - virtual_branches::target, - }; - - use super::*; - - static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - - fn test_branch() -> branch::Branch { - TEST_INDEX.fetch_add(1, Ordering::Relaxed); - - branch::Branch { - id: BranchId::generate(), - name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), - notes: String::new(), - applied: true, - upstream: Some( - format!( - "refs/remotes/origin/upstream_{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - upstream_head: None, - created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, - updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, - head: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - tree: format!( - "0123456789abcdef0123456789abcdef012345{}", - TEST_INDEX.load(Ordering::Relaxed) + 10 - ) - .parse() - .unwrap(), - ownership: branch::BranchOwnershipClaims::default(), - order: TEST_INDEX.load(Ordering::Relaxed), - selected_for_changes: Some(1), - } - } - - static TEST_TARGET_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - - fn test_target() -> target::Target { - target::Target { - branch: format!( - "refs/remotes/branch name{}/remote name {}", - TEST_TARGET_INDEX.load(Ordering::Relaxed), - TEST_TARGET_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - remote_url: format!("remote url {}", TEST_TARGET_INDEX.load(Ordering::Relaxed)), - sha: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_TARGET_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - } - } - - #[test] - fn test_empty_iterator() -> Result<()> { - let Case { gb_repository, .. } = Suite::default().new_case(); - - let session = gb_repository.get_or_create_current_session()?; - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - - let iter = BranchIterator::new(&session_reader)?; - - assert_eq!(iter.count(), 0); - - Ok(()) - } - - #[test] - fn test_iterate_all() -> Result<()> { - let Case { - gb_repository, - project, - .. - } = Suite::default().new_case(); - - let target_writer = target::Writer::new(&gb_repository, project.gb_dir())?; - target_writer.write_default(&test_target())?; - - let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; - let mut branch_1 = test_branch(); - branch_writer.write(&mut branch_1)?; - let mut branch_2 = test_branch(); - branch_writer.write(&mut branch_2)?; - let mut branch_3 = test_branch(); - branch_writer.write(&mut branch_3)?; - - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - - let iter = - BranchIterator::new(&session_reader)?.collect::, reader::Error>>()?; - assert_eq!(iter.len(), 3); - assert!(iter.contains(&branch_1)); - assert!(iter.contains(&branch_2)); - assert!(iter.contains(&branch_3)); - - Ok(()) - } -} diff --git a/gitbutler-app/src/virtual_branches/target/reader.rs b/gitbutler-app/src/virtual_branches/target/reader.rs index 0bc3ef445..d5fabd7d0 100644 --- a/gitbutler-app/src/virtual_branches/target/reader.rs +++ b/gitbutler-app/src/virtual_branches/target/reader.rs @@ -29,157 +29,3 @@ impl<'r> TargetReader<'r> { Target::try_from(&self.reader.sub(format!("branches/{}/target", id))) } } - -#[cfg(test)] -mod tests { - use std::sync::atomic::{AtomicUsize, Ordering}; - - use anyhow::Result; - use once_cell::sync::Lazy; - - use crate::{ - sessions, - tests::{Case, Suite}, - virtual_branches::{branch, target::writer::TargetWriter}, - }; - - use super::*; - - static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - - fn test_branch() -> branch::Branch { - TEST_INDEX.fetch_add(1, Ordering::Relaxed); - - branch::Branch { - id: BranchId::generate(), - name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), - notes: String::new(), - applied: true, - upstream: Some( - format!( - "refs/remotes/origin/upstream_{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - upstream_head: None, - created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, - updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, - head: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - tree: format!( - "0123456789abcdef0123456789abcdef012345{}", - (TEST_INDEX.load(Ordering::Relaxed) + 10) - ) - .parse() - .unwrap(), - ownership: branch::BranchOwnershipClaims { - claims: vec![branch::OwnershipClaim { - file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(), - hunks: vec![], - }], - }, - order: TEST_INDEX.load(Ordering::Relaxed), - selected_for_changes: None, - } - } - - #[test] - fn test_read_not_found() -> Result<()> { - let Case { gb_repository, .. } = Suite::default().new_case(); - - let session = gb_repository.get_or_create_current_session()?; - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - - let reader = TargetReader::new(&session_reader); - let result = reader.read(&BranchId::generate()); - assert!(result.is_err()); - assert_eq!(result.unwrap_err().to_string(), "file not found"); - - Ok(()) - } - - #[test] - fn test_read_deprecated_format() -> Result<()> { - let Case { gb_repository, .. } = Suite::default().new_case(); - - let writer = crate::writer::DirWriter::open(gb_repository.root())?; - writer - .write_string("branches/target/name", "origin/master") - .unwrap(); - writer - .write_string( - "branches/target/remote", - "git@github.com:gitbutlerapp/gitbutler.git", - ) - .unwrap(); - writer - .write_string( - "branches/target/sha", - "dd945831869e9593448aa622fa4342bbfb84813d", - ) - .unwrap(); - - let session = gb_repository.get_or_create_current_session()?; - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - let reader = TargetReader::new(&session_reader); - - let read = reader.read_default().unwrap(); - assert_eq!(read.branch.branch(), "master"); - assert_eq!(read.branch.remote(), "origin"); - assert_eq!(read.remote_url, "git@github.com:gitbutlerapp/gitbutler.git"); - assert_eq!( - read.sha.to_string(), - "dd945831869e9593448aa622fa4342bbfb84813d" - ); - - Ok(()) - } - - #[test] - fn test_read_override_target() -> Result<()> { - let Case { - gb_repository, - project, - .. - } = Suite::default().new_case(); - - let mut branch = test_branch(); - - let target = Target { - branch: "refs/remotes/remote/branch".parse().unwrap(), - remote_url: "remote url".to_string(), - sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(), - }; - - let default_target = Target { - branch: "refs/remotes/default remote/default branch" - .parse() - .unwrap(), - remote_url: "default remote url".to_string(), - sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), - }; - - let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; - branch_writer.write(&mut branch)?; - - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - - let target_writer = TargetWriter::new(&gb_repository, project.gb_dir())?; - let reader = TargetReader::new(&session_reader); - - target_writer.write_default(&default_target)?; - assert_eq!(default_target, reader.read(&branch.id)?); - - target_writer.write(&branch.id, &target)?; - assert_eq!(target, reader.read(&branch.id)?); - - Ok(()) - } -} diff --git a/gitbutler-app/src/virtual_branches/target/writer.rs b/gitbutler-app/src/virtual_branches/target/writer.rs index a48276bab..6aabd4008 100644 --- a/gitbutler-app/src/virtual_branches/target/writer.rs +++ b/gitbutler-app/src/virtual_branches/target/writer.rs @@ -107,219 +107,3 @@ impl<'writer> TargetWriter<'writer> { Ok(()) } } - -#[cfg(test)] -mod tests { - use std::{ - fs, - sync::atomic::{AtomicUsize, Ordering}, - }; - - use once_cell::sync::Lazy; - - use crate::{ - tests::{Case, Suite}, - virtual_branches::branch, - }; - - use super::{super::Target, *}; - - static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - - fn test_branch() -> branch::Branch { - TEST_INDEX.fetch_add(1, Ordering::Relaxed); - - branch::Branch { - id: BranchId::generate(), - name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), - notes: format!("branch_notes_{}", TEST_INDEX.load(Ordering::Relaxed)), - applied: true, - created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, - upstream: Some( - format!( - "refs/remotes/origin/upstream_{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - upstream_head: None, - updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, - head: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - tree: format!( - "0123456789abcdef0123456789abcdef012345{}", - TEST_INDEX.load(Ordering::Relaxed) + 10 - ) - .parse() - .unwrap(), - ownership: branch::BranchOwnershipClaims { - claims: vec![branch::OwnershipClaim { - file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(), - hunks: vec![], - }], - }, - order: TEST_INDEX.load(Ordering::Relaxed), - selected_for_changes: None, - } - } - - #[test] - fn test_write() -> Result<()> { - let Case { - gb_repository, - project, - .. - } = Suite::default().new_case(); - - let mut branch = test_branch(); - let target = Target { - branch: "refs/remotes/remote name/branch name".parse().unwrap(), - remote_url: "remote url".to_string(), - sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), - }; - - let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; - branch_writer.write(&mut branch)?; - - let target_writer = TargetWriter::new(&gb_repository, project.gb_dir())?; - target_writer.write(&branch.id, &target)?; - - let root = gb_repository - .root() - .join("branches") - .join(branch.id.to_string()); - - assert_eq!( - fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) - .context("Failed to read branch name")?, - branch.name - ); - assert_eq!( - fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap()) - .context("Failed to read branch target name")?, - format!("{}/{}", target.branch.remote(), target.branch.branch()) - ); - assert_eq!( - fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap()) - .context("Failed to read branch target name name")?, - target.branch.remote() - ); - assert_eq!( - fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap()) - .context("Failed to read branch target remote url")?, - target.remote_url - ); - assert_eq!( - fs::read_to_string(root.join("target").join("sha").to_str().unwrap()) - .context("Failed to read branch target sha")?, - target.sha.to_string() - ); - - assert_eq!( - fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? - .parse::() - .context("Failed to read branch applied")?, - branch.applied - ); - assert_eq!( - fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) - .context("Failed to read branch upstream")?, - branch.upstream.unwrap().to_string() - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("created_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch created timestamp")? - .parse::() - .context("Failed to parse branch created timestamp")?, - branch.created_timestamp_ms - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("updated_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch updated timestamp")? - .parse::() - .context("Failed to parse branch updated timestamp")?, - branch.updated_timestamp_ms - ); - - Ok(()) - } - - #[test] - fn test_should_update() -> Result<()> { - let Case { - gb_repository, - project, - .. - } = Suite::default().new_case(); - - let mut branch = test_branch(); - let target = Target { - branch: "refs/remotes/remote name/branch name".parse().unwrap(), - remote_url: "remote url".to_string(), - sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), - }; - - let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; - branch_writer.write(&mut branch)?; - let target_writer = TargetWriter::new(&gb_repository, project.gb_dir())?; - target_writer.write(&branch.id, &target)?; - - let updated_target = Target { - branch: "refs/remotes/updated remote name/updated branch name" - .parse() - .unwrap(), - remote_url: "updated remote url".to_string(), - sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(), - }; - - target_writer.write(&branch.id, &updated_target)?; - - let root = gb_repository - .root() - .join("branches") - .join(branch.id.to_string()); - - assert_eq!( - fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap()) - .context("Failed to read branch target branch name")?, - format!( - "{}/{}", - updated_target.branch.remote(), - updated_target.branch.branch() - ) - ); - - assert_eq!( - fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap()) - .context("Failed to read branch target remote name")?, - updated_target.branch.remote() - ); - assert_eq!( - fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap()) - .context("Failed to read branch target remote url")?, - updated_target.remote_url - ); - assert_eq!( - fs::read_to_string(root.join("target").join("sha").to_str().unwrap()) - .context("Failed to read branch target sha")?, - updated_target.sha.to_string() - ); - - Ok(()) - } -} diff --git a/gitbutler-app/src/watcher.rs b/gitbutler-app/src/watcher.rs index c7b394d30..ac7c41b35 100644 --- a/gitbutler-app/src/watcher.rs +++ b/gitbutler-app/src/watcher.rs @@ -1,6 +1,6 @@ mod dispatchers; mod events; -mod handlers; +pub mod handlers; use std::{collections::HashMap, path, sync::Arc, time}; diff --git a/gitbutler-app/src/watcher/handlers.rs b/gitbutler-app/src/watcher/handlers.rs index 5f68d1351..ac30b6a02 100644 --- a/gitbutler-app/src/watcher/handlers.rs +++ b/gitbutler-app/src/watcher/handlers.rs @@ -1,13 +1,13 @@ mod analytics_handler; -mod calculate_deltas_handler; +pub mod calculate_deltas_handler; mod caltulate_virtual_branches_handler; -mod fetch_gitbutler_data; +pub mod fetch_gitbutler_data; mod filter_ignored_files; mod flush_session; -mod git_file_change; +pub mod git_file_change; mod index_handler; mod push_gitbutler_data; -mod push_project_to_gitbutler; +pub mod push_project_to_gitbutler; use std::time; @@ -195,11 +195,3 @@ impl Handler { } } } - -#[cfg(test)] -fn test_remote_repository() -> Result { - let path = tempfile::tempdir()?.path().to_str().unwrap().to_string(); - let repo_a = git2::Repository::init_opts(path, &crate::tests::init_opts_bare())?; - - Ok(repo_a) -} diff --git a/gitbutler-app/src/watcher/handlers/calculate_deltas_handler.rs b/gitbutler-app/src/watcher/handlers/calculate_deltas_handler.rs index d20f46c86..907fe0357 100644 --- a/gitbutler-app/src/watcher/handlers/calculate_deltas_handler.rs +++ b/gitbutler-app/src/watcher/handlers/calculate_deltas_handler.rs @@ -51,7 +51,6 @@ impl Handler { } } - #[cfg(test)] pub fn from_path>(path: P) -> Self { Self::new( path.as_ref().to_path_buf(), @@ -182,1000 +181,3 @@ impl Handler { } } } - -#[cfg(test)] -mod test { - use std::{ - collections::HashMap, - path, - sync::atomic::{AtomicUsize, Ordering}, - }; - - use once_cell::sync::Lazy; - - use crate::{ - deltas::{self, operations::Operation}, - sessions, - tests::{self, Case, Suite}, - virtual_branches::{self, branch}, - }; - - use self::branch::BranchId; - - use super::*; - - static TEST_TARGET_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - - fn test_target() -> virtual_branches::target::Target { - virtual_branches::target::Target { - branch: format!( - "refs/remotes/remote name {}/branch name {}", - TEST_TARGET_INDEX.load(Ordering::Relaxed), - TEST_TARGET_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - remote_url: format!("remote url {}", TEST_TARGET_INDEX.load(Ordering::Relaxed)), - sha: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_TARGET_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - } - } - - static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - - fn test_branch() -> virtual_branches::branch::Branch { - TEST_INDEX.fetch_add(1, Ordering::Relaxed); - - virtual_branches::branch::Branch { - id: BranchId::generate(), - name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), - notes: format!("branch_notes_{}", TEST_INDEX.load(Ordering::Relaxed)), - applied: true, - upstream: Some( - format!( - "refs/remotes/origin/upstream_{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - upstream_head: None, - created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, - updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, - head: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - tree: format!( - "0123456789abcdef0123456789abcdef012345{}", - TEST_INDEX.load(Ordering::Relaxed) + 10 - ) - .parse() - .unwrap(), - ownership: branch::BranchOwnershipClaims::default(), - order: TEST_INDEX.load(Ordering::Relaxed), - selected_for_changes: None, - } - } - - #[test] - fn test_register_existing_commited_file() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = suite.new_case_with_files(HashMap::from([(path::PathBuf::from("test.txt"), "test")])); - let listener = Handler::from_path(&suite.local_app_data); - - std::fs::write(project.path.join("test.txt"), "test2")?; - listener.handle("test.txt", &project.id)?; - - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read_file("test.txt")?.unwrap(); - assert_eq!(deltas.len(), 1); - assert_eq!(deltas[0].operations.len(), 1); - assert_eq!( - deltas[0].operations[0], - Operation::Insert((4, "2".to_string())), - ); - assert_eq!( - std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, - "test2" - ); - - Ok(()) - } - - #[test] - fn test_register_must_init_current_session() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - std::fs::write(project.path.join("test.txt"), "test")?; - listener.handle("test.txt", &project.id)?; - - assert!(gb_repository.get_current_session()?.is_some()); - - Ok(()) - } - - #[test] - fn test_register_must_not_override_current_session() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - std::fs::write(project.path.join("test.txt"), "test")?; - listener.handle("test.txt", &project.id)?; - let session1 = gb_repository.get_current_session()?.unwrap(); - - std::fs::write(project.path.join("test.txt"), "test2")?; - listener.handle("test.txt", &project.id)?; - let session2 = gb_repository.get_current_session()?.unwrap(); - - assert_eq!(session1.id, session2.id); - - Ok(()) - } - - #[test] - fn test_register_binfile() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - std::fs::write( - project.path.join("test.bin"), - [0, 159, 146, 150, 159, 146, 150], - )?; - - listener.handle("test.bin", &project.id)?; - - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read_file("test.bin")?.unwrap(); - - assert_eq!(deltas.len(), 1); - assert_eq!(deltas[0].operations.len(), 0); - assert_eq!( - std::fs::read_to_string(gb_repository.session_wd_path().join("test.bin"))?, - "" - ); - - Ok(()) - } - - #[test] - fn test_register_empty_new_file() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - std::fs::write(project.path.join("test.txt"), "")?; - - listener.handle("test.txt", &project.id)?; - - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read_file("test.txt")?.unwrap(); - assert_eq!(deltas.len(), 1); - assert_eq!(deltas[0].operations.len(), 0); - assert_eq!( - std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, - "" - ); - - Ok(()) - } - - #[test] - fn test_register_new_file() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - std::fs::write(project.path.join("test.txt"), "test")?; - - listener.handle("test.txt", &project.id)?; - - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read_file("test.txt")?.unwrap(); - assert_eq!(deltas.len(), 1); - assert_eq!(deltas[0].operations.len(), 1); - assert_eq!( - deltas[0].operations[0], - Operation::Insert((0, "test".to_string())), - ); - assert_eq!( - std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, - "test" - ); - - Ok(()) - } - - #[test] - fn test_register_no_changes_saved_thgoughout_flushes() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - project, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - // file change, wd and deltas are written - std::fs::write(project.path.join("test.txt"), "test")?; - listener.handle("test.txt", &project.id)?; - - // make two more sessions. - gb_repository.flush(&project_repository, None)?; - gb_repository.get_or_create_current_session()?; - gb_repository.flush(&project_repository, None)?; - - // after some sessions, files from the first change are still there. - let session = gb_repository.get_or_create_current_session()?; - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - let files = session_reader.files(None)?; - assert_eq!(files.len(), 1); - - Ok(()) - } - - #[test] - fn test_register_new_file_twice() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - std::fs::write(project.path.join("test.txt"), "test")?; - listener.handle("test.txt", &project.id)?; - - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read_file("test.txt")?.unwrap(); - assert_eq!(deltas.len(), 1); - assert_eq!(deltas[0].operations.len(), 1); - assert_eq!( - deltas[0].operations[0], - Operation::Insert((0, "test".to_string())), - ); - assert_eq!( - std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, - "test" - ); - - std::fs::write(project.path.join("test.txt"), "test2")?; - listener.handle("test.txt", &project.id)?; - - let deltas = deltas_reader.read_file("test.txt")?.unwrap(); - assert_eq!(deltas.len(), 2); - assert_eq!(deltas[0].operations.len(), 1); - assert_eq!( - deltas[0].operations[0], - Operation::Insert((0, "test".to_string())), - ); - assert_eq!(deltas[1].operations.len(), 1); - assert_eq!( - deltas[1].operations[0], - Operation::Insert((4, "2".to_string())), - ); - assert_eq!( - std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, - "test2" - ); - - Ok(()) - } - - #[test] - fn test_register_file_deleted() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - project, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - { - // write file - std::fs::write(project.path.join("test.txt"), "test")?; - listener.handle("test.txt", &project.id)?; - } - - { - // current session must have the deltas, but not the file (it didn't exist) - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read_file("test.txt")?.unwrap(); - assert_eq!(deltas.len(), 1); - assert_eq!(deltas[0].operations.len(), 1); - assert_eq!( - deltas[0].operations[0], - Operation::Insert((0, "test".to_string())), - ); - assert_eq!( - std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, - "test" - ); - - let files = session_reader.files(None).unwrap(); - assert!(files.is_empty()); - } - - gb_repository.flush(&project_repository, None)?; - - { - // file should be available in the next session, but not deltas just yet. - let session = gb_repository.get_or_create_current_session()?; - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - let files = session_reader.files(None).unwrap(); - assert_eq!(files.len(), 1); - assert_eq!( - files[std::path::Path::new("test.txt")], - reader::Content::UTF8("test".to_string()) - ); - - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read(None)?; - assert!(deltas.is_empty()); - - // removing the file - std::fs::remove_file(project.path.join("test.txt"))?; - listener.handle("test.txt", &project.id)?; - - // deltas are recorded - let deltas = deltas_reader.read_file("test.txt")?.unwrap(); - assert_eq!(deltas.len(), 1); - assert_eq!(deltas[0].operations.len(), 1); - assert_eq!(deltas[0].operations[0], Operation::Delete((0, 4)),); - } - - gb_repository.flush(&project_repository, None)?; - - { - // since file was deleted in the previous session, it should not exist in the new one. - let session = gb_repository.get_or_create_current_session()?; - let session_reader = sessions::Reader::open(&gb_repository, &session)?; - let files = session_reader.files(None).unwrap(); - assert!(files.is_empty()); - } - - Ok(()) - } - - #[test] - fn test_flow_with_commits() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - project_repository, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - let size = 10; - let relative_file_path = std::path::Path::new("one/two/test.txt"); - for i in 1..=size { - std::fs::create_dir_all(std::path::Path::new(&project.path).join("one/two"))?; - // create a session with a single file change and flush it - std::fs::write( - std::path::Path::new(&project.path).join(relative_file_path), - i.to_string(), - )?; - - tests::commit_all(&project_repository.git_repository); - listener.handle(relative_file_path, &project.id)?; - assert!(gb_repository.flush(&project_repository, None)?.is_some()); - } - - // get all the created sessions - let mut sessions: Vec = gb_repository - .get_sessions_iterator()? - .map(Result::unwrap) - .collect(); - assert_eq!(sessions.len(), size); - // verify sessions order is correct - let mut last_start = sessions[0].meta.start_timestamp_ms; - let mut last_end = sessions[0].meta.start_timestamp_ms; - sessions[1..].iter().for_each(|session| { - assert!(session.meta.start_timestamp_ms < last_start); - assert!(session.meta.last_timestamp_ms < last_end); - last_start = session.meta.start_timestamp_ms; - last_end = session.meta.last_timestamp_ms; - }); - - sessions.reverse(); - // try to reconstruct file state from operations for every session slice - for i in 0..sessions.len() { - let sessions_slice = &mut sessions[i..]; - - // collect all operations from sessions in the reverse order - let mut operations: Vec = vec![]; - for session in &mut *sessions_slice { - let session_reader = sessions::Reader::open(&gb_repository, session).unwrap(); - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas_by_filepath = deltas_reader.read(None).unwrap(); - for deltas in deltas_by_filepath.values() { - for delta in deltas { - delta.operations.iter().for_each(|operation| { - operations.push(operation.clone()); - }); - } - } - } - - let reader = - sessions::Reader::open(&gb_repository, sessions_slice.first().unwrap()).unwrap(); - let files = reader.files(None).unwrap(); - - if i == 0 { - assert_eq!(files.len(), 0); - } else { - assert_eq!(files.len(), 1); - } - - let base_file = files.get(&relative_file_path.to_path_buf()); - let mut text: Vec = match base_file { - Some(reader::Content::UTF8(file)) => file.chars().collect(), - _ => vec![], - }; - - for operation in operations { - operation.apply(&mut text).unwrap(); - } - - assert_eq!(text.iter().collect::(), size.to_string()); - } - Ok(()) - } - - #[test] - fn test_flow_no_commits() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - project_repository, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - let size = 10; - let relative_file_path = std::path::Path::new("one/two/test.txt"); - for i in 1..=size { - std::fs::create_dir_all(std::path::Path::new(&project.path).join("one/two"))?; - // create a session with a single file change and flush it - std::fs::write( - std::path::Path::new(&project.path).join(relative_file_path), - i.to_string(), - )?; - - listener.handle(relative_file_path, &project.id)?; - assert!(gb_repository.flush(&project_repository, None)?.is_some()); - } - - // get all the created sessions - let mut sessions: Vec = gb_repository - .get_sessions_iterator()? - .map(Result::unwrap) - .collect(); - assert_eq!(sessions.len(), size); - // verify sessions order is correct - let mut last_start = sessions[0].meta.start_timestamp_ms; - let mut last_end = sessions[0].meta.start_timestamp_ms; - sessions[1..].iter().for_each(|session| { - assert!(session.meta.start_timestamp_ms < last_start); - assert!(session.meta.last_timestamp_ms < last_end); - last_start = session.meta.start_timestamp_ms; - last_end = session.meta.last_timestamp_ms; - }); - - sessions.reverse(); - // try to reconstruct file state from operations for every session slice - for i in 0..sessions.len() { - let sessions_slice = &mut sessions[i..]; - - // collect all operations from sessions in the reverse order - let mut operations: Vec = vec![]; - for session in &mut *sessions_slice { - let session_reader = sessions::Reader::open(&gb_repository, session).unwrap(); - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas_by_filepath = deltas_reader.read(None).unwrap(); - for deltas in deltas_by_filepath.values() { - for delta in deltas { - delta.operations.iter().for_each(|operation| { - operations.push(operation.clone()); - }); - } - } - } - - let reader = - sessions::Reader::open(&gb_repository, sessions_slice.first().unwrap()).unwrap(); - let files = reader.files(None).unwrap(); - - if i == 0 { - assert_eq!(files.len(), 0); - } else { - assert_eq!(files.len(), 1); - } - - let base_file = files.get(&relative_file_path.to_path_buf()); - let mut text: Vec = match base_file { - Some(reader::Content::UTF8(file)) => file.chars().collect(), - _ => vec![], - }; - - for operation in operations { - operation.apply(&mut text).unwrap(); - } - - assert_eq!(text.iter().collect::(), size.to_string()); - } - Ok(()) - } - - #[test] - fn test_flow_signle_session() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - let size = 10_i32; - let relative_file_path = std::path::Path::new("one/two/test.txt"); - for i in 1_i32..=size { - std::fs::create_dir_all(std::path::Path::new(&project.path).join("one/two"))?; - // create a session with a single file change and flush it - std::fs::write( - std::path::Path::new(&project.path).join(relative_file_path), - i.to_string(), - )?; - - listener.handle(relative_file_path, &project.id)?; - } - - // collect all operations from sessions in the reverse order - let mut operations: Vec = vec![]; - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = sessions::Reader::open(&gb_repository, &session).unwrap(); - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas_by_filepath = deltas_reader.read(None).unwrap(); - for deltas in deltas_by_filepath.values() { - for delta in deltas { - delta.operations.iter().for_each(|operation| { - operations.push(operation.clone()); - }); - } - } - - let reader = sessions::Reader::open(&gb_repository, &session).unwrap(); - let files = reader.files(None).unwrap(); - - let base_file = files.get(&relative_file_path.to_path_buf()); - let mut text: Vec = match base_file { - Some(reader::Content::UTF8(file)) => file.chars().collect(), - _ => vec![], - }; - - for operation in operations { - operation.apply(&mut text).unwrap(); - } - - assert_eq!(text.iter().collect::(), size.to_string()); - Ok(()) - } - - #[test] - fn should_persist_branches_targets_state_between_sessions() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - project_repository, - .. - } = suite.new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "hello world", - )])); - let listener = Handler::from_path(&suite.local_app_data); - - let branch_writer = - virtual_branches::branch::Writer::new(&gb_repository, project.gb_dir())?; - let target_writer = - virtual_branches::target::Writer::new(&gb_repository, project.gb_dir())?; - let default_target = test_target(); - target_writer.write_default(&default_target)?; - let mut vbranch0 = test_branch(); - branch_writer.write(&mut vbranch0)?; - let mut vbranch1 = test_branch(); - let vbranch1_target = test_target(); - branch_writer.write(&mut vbranch1)?; - target_writer.write(&vbranch1.id, &vbranch1_target)?; - - std::fs::write(project.path.join("test.txt"), "hello world!").unwrap(); - listener.handle("test.txt", &project.id)?; - - let flushed_session = gb_repository.flush(&project_repository, None).unwrap(); - - // create a new session - let session = gb_repository.get_or_create_current_session().unwrap(); - assert_ne!(session.id, flushed_session.unwrap().id); - - // ensure that the virtual branch is still there and selected - let session_reader = sessions::Reader::open(&gb_repository, &session).unwrap(); - - let branches = virtual_branches::Iterator::new(&session_reader) - .unwrap() - .collect::, crate::reader::Error>>() - .unwrap() - .into_iter() - .collect::>(); - assert_eq!(branches.len(), 2); - let branch_ids = branches.iter().map(|b| b.id).collect::>(); - assert!(branch_ids.contains(&vbranch0.id)); - assert!(branch_ids.contains(&vbranch1.id)); - - let target_reader = virtual_branches::target::Reader::new(&session_reader); - assert_eq!(target_reader.read_default().unwrap(), default_target); - assert_eq!(target_reader.read(&vbranch0.id).unwrap(), default_target); - assert_eq!(target_reader.read(&vbranch1.id).unwrap(), vbranch1_target); - - Ok(()) - } - - #[test] - fn should_restore_branches_targets_state_from_head_session() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - project_repository, - .. - } = suite.new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "hello world", - )])); - let listener = Handler::from_path(&suite.local_app_data); - - let branch_writer = - virtual_branches::branch::Writer::new(&gb_repository, project.gb_dir())?; - let target_writer = - virtual_branches::target::Writer::new(&gb_repository, project.gb_dir())?; - let default_target = test_target(); - target_writer.write_default(&default_target)?; - let mut vbranch0 = test_branch(); - branch_writer.write(&mut vbranch0)?; - let mut vbranch1 = test_branch(); - let vbranch1_target = test_target(); - branch_writer.write(&mut vbranch1)?; - target_writer.write(&vbranch1.id, &vbranch1_target)?; - - std::fs::write(project.path.join("test.txt"), "hello world!").unwrap(); - listener.handle("test.txt", &project.id).unwrap(); - - let flushed_session = gb_repository.flush(&project_repository, None).unwrap(); - - // hard delete branches state from disk - std::fs::remove_dir_all(gb_repository.root()).unwrap(); - - // create a new session - let session = gb_repository.get_or_create_current_session().unwrap(); - assert_ne!(session.id, flushed_session.unwrap().id); - - // ensure that the virtual branch is still there and selected - let session_reader = sessions::Reader::open(&gb_repository, &session).unwrap(); - - let branches = virtual_branches::Iterator::new(&session_reader) - .unwrap() - .collect::, crate::reader::Error>>() - .unwrap() - .into_iter() - .collect::>(); - assert_eq!(branches.len(), 2); - let branch_ids = branches.iter().map(|b| b.id).collect::>(); - assert!(branch_ids.contains(&vbranch0.id)); - assert!(branch_ids.contains(&vbranch1.id)); - - let target_reader = virtual_branches::target::Reader::new(&session_reader); - assert_eq!(target_reader.read_default().unwrap(), default_target); - assert_eq!(target_reader.read(&vbranch0.id).unwrap(), default_target); - assert_eq!(target_reader.read(&vbranch1.id).unwrap(), vbranch1_target); - - Ok(()) - } - - mod flush_wd { - use super::*; - - #[test] - fn should_add_new_files_to_session_wd() { - let suite = Suite::default(); - let Case { - gb_repository, - project, - project_repository, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - // write a file into session - std::fs::write(project.path.join("test.txt"), "hello world!").unwrap(); - listener.handle("test.txt", &project.id).unwrap(); - - let flushed_session = gb_repository - .flush(&project_repository, None) - .unwrap() - .unwrap(); - { - // after flush it should be flushed into the commit - let session_commit = gb_repository - .git_repository() - .find_commit(flushed_session.hash.unwrap()) - .unwrap(); - let commit_reader = - reader::Reader::from_commit(gb_repository.git_repository(), &session_commit) - .unwrap(); - assert_eq!( - commit_reader.list_files(path::Path::new("wd")).unwrap(), - vec![path::Path::new("test.txt")] - ); - assert_eq!( - commit_reader.read(path::Path::new("wd/test.txt")).unwrap(), - reader::Content::UTF8("hello world!".to_string()) - ); - } - - // write another file into session - std::fs::create_dir_all(project.path.join("one/two")).unwrap(); - std::fs::write(project.path.join("one/two/test2.txt"), "hello world!").unwrap(); - listener.handle("one/two/test2.txt", &project.id).unwrap(); - - let flushed_session = gb_repository - .flush(&project_repository, None) - .unwrap() - .unwrap(); - { - // after flush it should be flushed into the commit next to the previous one - let session_commit = gb_repository - .git_repository() - .find_commit(flushed_session.hash.unwrap()) - .unwrap(); - let commit_reader = - reader::Reader::from_commit(gb_repository.git_repository(), &session_commit) - .unwrap(); - assert_eq!( - commit_reader.list_files(path::Path::new("wd")).unwrap(), - vec![ - path::Path::new("one/two/test2.txt"), - path::Path::new("test.txt"), - ] - ); - assert_eq!( - commit_reader.read(path::Path::new("wd/test.txt")).unwrap(), - reader::Content::UTF8("hello world!".to_string()) - ); - assert_eq!( - commit_reader - .read(path::Path::new("wd/one/two/test2.txt")) - .unwrap(), - reader::Content::UTF8("hello world!".to_string()) - ); - } - } - - #[test] - fn should_remove_deleted_files_from_session_wd() { - let suite = Suite::default(); - let Case { - gb_repository, - project, - project_repository, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - // write a file into session - std::fs::write(project.path.join("test.txt"), "hello world!").unwrap(); - listener.handle("test.txt", &project.id).unwrap(); - std::fs::create_dir_all(project.path.join("one/two")).unwrap(); - std::fs::write(project.path.join("one/two/test2.txt"), "hello world!").unwrap(); - listener.handle("one/two/test2.txt", &project.id).unwrap(); - - let flushed_session = gb_repository - .flush(&project_repository, None) - .unwrap() - .unwrap(); - { - // after flush it should be flushed into the commit - let session_commit = gb_repository - .git_repository() - .find_commit(flushed_session.hash.unwrap()) - .unwrap(); - let commit_reader = - reader::Reader::from_commit(gb_repository.git_repository(), &session_commit) - .unwrap(); - assert_eq!( - commit_reader.list_files(path::Path::new("wd")).unwrap(), - vec![ - path::Path::new("one/two/test2.txt"), - path::Path::new("test.txt"), - ] - ); - assert_eq!( - commit_reader.read(path::Path::new("wd/test.txt")).unwrap(), - reader::Content::UTF8("hello world!".to_string()) - ); - assert_eq!( - commit_reader - .read(path::Path::new("wd/one/two/test2.txt")) - .unwrap(), - reader::Content::UTF8("hello world!".to_string()) - ); - } - - // rm the files - std::fs::remove_file(project.path.join("test.txt")).unwrap(); - listener.handle("test.txt", &project.id).unwrap(); - std::fs::remove_file(project.path.join("one/two/test2.txt")).unwrap(); - listener.handle("one/two/test2.txt", &project.id).unwrap(); - - let flushed_session = gb_repository - .flush(&project_repository, None) - .unwrap() - .unwrap(); - { - // after flush it should be removed from the commit - let session_commit = gb_repository - .git_repository() - .find_commit(flushed_session.hash.unwrap()) - .unwrap(); - let commit_reader = - reader::Reader::from_commit(gb_repository.git_repository(), &session_commit) - .unwrap(); - assert!(commit_reader - .list_files(path::Path::new("wd")) - .unwrap() - .is_empty()); - } - } - - #[test] - fn should_update_updated_files_in_session_wd() { - let suite = Suite::default(); - let Case { - gb_repository, - project, - project_repository, - .. - } = suite.new_case(); - let listener = Handler::from_path(&suite.local_app_data); - - // write a file into session - std::fs::write(project.path.join("test.txt"), "hello world!").unwrap(); - listener.handle("test.txt", &project.id).unwrap(); - std::fs::create_dir_all(project.path.join("one/two")).unwrap(); - std::fs::write(project.path.join("one/two/test2.txt"), "hello world!").unwrap(); - listener.handle("one/two/test2.txt", &project.id).unwrap(); - - let flushed_session = gb_repository - .flush(&project_repository, None) - .unwrap() - .unwrap(); - { - // after flush it should be flushed into the commit - let session_commit = gb_repository - .git_repository() - .find_commit(flushed_session.hash.unwrap()) - .unwrap(); - let commit_reader = - reader::Reader::from_commit(gb_repository.git_repository(), &session_commit) - .unwrap(); - assert_eq!( - commit_reader.list_files(path::Path::new("wd")).unwrap(), - vec![ - path::Path::new("one/two/test2.txt"), - path::Path::new("test.txt"), - ] - ); - assert_eq!( - commit_reader.read(path::Path::new("wd/test.txt")).unwrap(), - reader::Content::UTF8("hello world!".to_string()) - ); - assert_eq!( - commit_reader - .read(path::Path::new("wd/one/two/test2.txt")) - .unwrap(), - reader::Content::UTF8("hello world!".to_string()) - ); - } - - // update the file - std::fs::write(project.path.join("test.txt"), "hello world!2").unwrap(); - listener.handle("test.txt", &project.id).unwrap(); - - std::fs::write(project.path.join("one/two/test2.txt"), "hello world!2").unwrap(); - listener.handle("one/two/test2.txt", &project.id).unwrap(); - - let flushed_session = gb_repository - .flush(&project_repository, None) - .unwrap() - .unwrap(); - { - // after flush it should be updated in the commit - let session_commit = gb_repository - .git_repository() - .find_commit(flushed_session.hash.unwrap()) - .unwrap(); - let commit_reader = - reader::Reader::from_commit(gb_repository.git_repository(), &session_commit) - .unwrap(); - assert_eq!( - commit_reader.list_files(path::Path::new("wd")).unwrap(), - vec![ - path::Path::new("one/two/test2.txt"), - path::Path::new("test.txt"), - ] - ); - assert_eq!( - commit_reader.read(path::Path::new("wd/test.txt")).unwrap(), - reader::Content::UTF8("hello world!2".to_string()) - ); - assert_eq!( - commit_reader - .read(path::Path::new("wd/one/two/test2.txt")) - .unwrap(), - reader::Content::UTF8("hello world!2".to_string()) - ); - } - } - } -} diff --git a/gitbutler-app/src/watcher/handlers/fetch_gitbutler_data.rs b/gitbutler-app/src/watcher/handlers/fetch_gitbutler_data.rs index 45f981eca..3ec2199e8 100644 --- a/gitbutler-app/src/watcher/handlers/fetch_gitbutler_data.rs +++ b/gitbutler-app/src/watcher/handlers/fetch_gitbutler_data.rs @@ -52,10 +52,11 @@ impl Handler { } } -struct InnerHandler { - local_data_dir: path::PathBuf, - projects: projects::Controller, - users: users::Controller, +// TODO(ST): rename this to `State`, move logic into `Handler`. +pub struct InnerHandler { + pub local_data_dir: path::PathBuf, + pub projects: projects::Controller, + pub users: users::Controller, } impl InnerHandler { @@ -160,72 +161,3 @@ impl InnerHandler { Ok(events) } } - -#[cfg(test)] -mod test { - use std::time::SystemTime; - - use pretty_assertions::assert_eq; - - use crate::tests::{Case, Suite}; - - use super::super::test_remote_repository; - use super::*; - - #[tokio::test] - async fn test_fetch_success() -> Result<()> { - let suite = Suite::default(); - let Case { project, .. } = suite.new_case(); - - let cloud = test_remote_repository()?; - - let api_project = projects::ApiProject { - name: "test-sync".to_string(), - description: None, - repository_id: "123".to_string(), - git_url: cloud.path().to_str().unwrap().to_string(), - code_git_url: None, - created_at: 0_i32.to_string(), - updated_at: 0_i32.to_string(), - sync: true, - }; - - suite - .projects - .update(&projects::UpdateRequest { - id: project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await?; - - let listener = InnerHandler { - local_data_dir: suite.local_app_data, - projects: suite.projects, - users: suite.users, - }; - - listener - .handle(&project.id, &SystemTime::now()) - .await - .unwrap(); - - Ok(()) - } - - #[tokio::test] - async fn test_fetch_fail_no_sync() { - let suite = Suite::default(); - let Case { project, .. } = suite.new_case(); - - let listener = InnerHandler { - local_data_dir: suite.local_app_data, - projects: suite.projects, - users: suite.users, - }; - - let res = listener.handle(&project.id, &SystemTime::now()).await; - - assert_eq!(&res.unwrap_err().to_string(), "sync disabled"); - } -} diff --git a/gitbutler-app/src/watcher/handlers/git_file_change.rs b/gitbutler-app/src/watcher/handlers/git_file_change.rs index 6615f69b1..6e38ed208 100644 --- a/gitbutler-app/src/watcher/handlers/git_file_change.rs +++ b/gitbutler-app/src/watcher/handlers/git_file_change.rs @@ -37,7 +37,7 @@ impl TryFrom<&AppHandle> for Handler { } impl Handler { - fn new( + pub fn new( local_data_dir: path::PathBuf, projects: projects::Controller, users: users::Controller, @@ -132,109 +132,3 @@ impl Handler { } } } - -#[cfg(test)] -mod test { - use std::fs; - - use events::Event; - use pretty_assertions::assert_eq; - - use crate::{ - tests::{Case, Suite}, - watcher::handlers, - }; - - use super::*; - - #[test] - fn test_flush_session() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - gb_repository, - .. - } = suite.new_case(); - - assert!(gb_repository.get_current_session()?.is_none()); - create_new_session_via_new_file(&project, &suite); - assert!(gb_repository.get_current_session()?.is_some()); - - let listener = Handler { - local_data_dir: suite.local_app_data, - projects: suite.projects, - users: suite.users, - }; - - let flush_file_path = project.path.join(".git/GB_FLUSH"); - fs::write(flush_file_path.as_path(), "")?; - - let result = listener.handle("GB_FLUSH", &project.id)?; - - assert_eq!(result.len(), 1); - assert!(matches!(result[0], Event::Flush(_, _))); - - assert!(!flush_file_path.exists(), "flush file deleted"); - - Ok(()) - } - - #[test] - fn test_do_not_flush_session_if_file_is_missing() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - gb_repository, - .. - } = suite.new_case(); - - assert!(gb_repository.get_current_session()?.is_none()); - create_new_session_via_new_file(&project, &suite); - assert!(gb_repository.get_current_session()?.is_some()); - - let listener = Handler { - local_data_dir: suite.local_app_data, - projects: suite.projects, - users: suite.users, - }; - - let result = listener.handle("GB_FLUSH", &project.id)?; - - assert_eq!(result.len(), 0); - - Ok(()) - } - - fn create_new_session_via_new_file(project: &projects::Project, suite: &Suite) { - fs::write(project.path.join("test.txt"), "test").unwrap(); - - let file_change_listener = - handlers::calculate_deltas_handler::Handler::from_path(&suite.local_app_data); - file_change_listener - .handle("test.txt", &project.id) - .unwrap(); - } - - #[test] - fn test_flush_deletes_flush_file_without_session_to_flush() -> Result<()> { - let suite = Suite::default(); - let Case { project, .. } = suite.new_case(); - - let listener = Handler { - local_data_dir: suite.local_app_data, - projects: suite.projects, - users: suite.users, - }; - - let flush_file_path = project.path.join(".git/GB_FLUSH"); - fs::write(flush_file_path.as_path(), "")?; - - let result = listener.handle("GB_FLUSH", &project.id)?; - - assert_eq!(result.len(), 0); - - assert!(!flush_file_path.exists(), "flush file deleted"); - - Ok(()) - } -} diff --git a/gitbutler-app/src/watcher/handlers/push_project_to_gitbutler.rs b/gitbutler-app/src/watcher/handlers/push_project_to_gitbutler.rs index 8842f76c2..6f7a40f88 100644 --- a/gitbutler-app/src/watcher/handlers/push_project_to_gitbutler.rs +++ b/gitbutler-app/src/watcher/handlers/push_project_to_gitbutler.rs @@ -55,11 +55,12 @@ impl Handler { } } +// TODO(ST): rename to state, move logic into handler itself. pub struct HandlerInner { - local_data_dir: path::PathBuf, - project_store: projects::Controller, - users: users::Controller, - batch_size: usize, + pub local_data_dir: path::PathBuf, + pub project_store: projects::Controller, + pub users: users::Controller, + pub batch_size: usize, } impl HandlerInner { @@ -285,421 +286,3 @@ fn batch_rev_walk( } Ok(oids) } - -#[cfg(test)] -mod test { - use std::collections::HashMap; - use std::path::PathBuf; - - use crate::project_repository::LogUntil; - use crate::tests::{Case, Suite}; - use crate::virtual_branches::set_test_target; - - use super::super::test_remote_repository; - use super::*; - - fn log_walk(repo: &git2::Repository, head: git::Oid) -> Vec { - let mut walker = repo.revwalk().unwrap(); - walker.push(head.into()).unwrap(); - walker.map(|oid| oid.unwrap().into()).collect::>() - } - - #[tokio::test] - async fn test_push_error() -> Result<()> { - let suite = Suite::default(); - let Case { project, .. } = suite.new_case(); - - let api_project = projects::ApiProject { - name: "test-sync".to_string(), - description: None, - repository_id: "123".to_string(), - git_url: String::new(), - code_git_url: Some(String::new()), - created_at: 0_i32.to_string(), - updated_at: 0_i32.to_string(), - sync: true, - }; - - suite - .projects - .update(&projects::UpdateRequest { - id: project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await?; - - let listener = HandlerInner { - local_data_dir: suite.local_app_data, - project_store: suite.projects, - users: suite.users, - batch_size: 100, - }; - - let res = listener.handle(&project.id).await; - - res.unwrap_err(); - - Ok(()) - } - - #[tokio::test] - async fn test_push_simple() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - gb_repository, - project_repository, - .. - } = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "test")])); - - suite.sign_in(); - - set_test_target(&gb_repository, &project_repository).unwrap(); - - let target_id = gb_repository.default_target().unwrap().unwrap().sha; - - let reference = project_repository.l(target_id, LogUntil::End).unwrap(); - - let cloud_code = test_remote_repository()?; - - let api_project = projects::ApiProject { - name: "test-sync".to_string(), - description: None, - repository_id: "123".to_string(), - git_url: String::new(), - code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()), - created_at: 0_i32.to_string(), - updated_at: 0_i32.to_string(), - sync: true, - }; - - suite - .projects - .update(&projects::UpdateRequest { - id: project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await?; - - cloud_code.find_commit(target_id.into()).unwrap_err(); - - { - let listener = HandlerInner { - local_data_dir: suite.local_app_data, - project_store: suite.projects.clone(), - users: suite.users, - batch_size: 10, - }; - - let res = listener.handle(&project.id).await.unwrap(); - assert!(res.is_empty()); - } - - cloud_code.find_commit(target_id.into()).unwrap(); - - let pushed = log_walk(&cloud_code, target_id); - assert_eq!(reference.len(), pushed.len()); - assert_eq!(reference, pushed); - - assert_eq!( - suite - .projects - .get(&project.id) - .unwrap() - .gitbutler_code_push_state - .unwrap() - .id, - target_id - ); - - Ok(()) - } - - #[tokio::test] - async fn test_push_remote_ref() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - gb_repository, - project_repository, - .. - } = suite.new_case(); - - suite.sign_in(); - - set_test_target(&gb_repository, &project_repository).unwrap(); - - let cloud_code: git::Repository = test_remote_repository()?.into(); - - let remote_repo: git::Repository = test_remote_repository()?.into(); - - let last_commit = create_initial_commit(&remote_repo); - - remote_repo - .reference( - &git::Refname::Local(git::LocalRefname::new("refs/heads/testbranch", None)), - last_commit, - false, - "", - ) - .unwrap(); - - let mut remote = project_repository - .git_repository - .remote("tr", &remote_repo.path().to_str().unwrap().parse().unwrap()) - .unwrap(); - - remote - .fetch(&["+refs/heads/*:refs/remotes/tr/*"], None) - .unwrap(); - - project_repository - .git_repository - .find_commit(last_commit) - .unwrap(); - - let api_project = projects::ApiProject { - name: "test-sync".to_string(), - description: None, - repository_id: "123".to_string(), - git_url: String::new(), - code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()), - created_at: 0_i32.to_string(), - updated_at: 0_i32.to_string(), - sync: true, - }; - - suite - .projects - .update(&projects::UpdateRequest { - id: project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await?; - - { - let listener = HandlerInner { - local_data_dir: suite.local_app_data, - project_store: suite.projects.clone(), - users: suite.users, - batch_size: 10, - }; - - listener.handle(&project.id).await.unwrap(); - } - - cloud_code.find_commit(last_commit).unwrap(); - - Ok(()) - } - - fn create_initial_commit(repo: &git::Repository) -> git::Oid { - let signature = git::Signature::now("test", "test@email.com").unwrap(); - - let mut index = repo.index().unwrap(); - let oid = index.write_tree().unwrap(); - - repo.commit( - None, - &signature, - &signature, - "initial commit", - &repo.find_tree(oid).unwrap(), - &[], - ) - .unwrap() - } - - fn create_test_commits(repo: &git::Repository, commits: usize) -> git::Oid { - let signature = git::Signature::now("test", "test@email.com").unwrap(); - - let mut last = None; - - for i in 0..commits { - let mut index = repo.index().unwrap(); - let oid = index.write_tree().unwrap(); - let head = repo.head().unwrap(); - - last = Some( - repo.commit( - Some(&head.name().unwrap()), - &signature, - &signature, - format!("commit {i}").as_str(), - &repo.find_tree(oid).unwrap(), - &[&repo - .find_commit(repo.refname_to_id("HEAD").unwrap()) - .unwrap()], - ) - .unwrap(), - ); - } - - last.unwrap() - } - - #[tokio::test] - async fn test_push_batches() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - gb_repository, - project_repository, - .. - } = suite.new_case(); - - suite.sign_in(); - - { - let head: git::Oid = project_repository - .get_head() - .unwrap() - .peel_to_commit() - .unwrap() - .id(); - - let reference = project_repository.l(head, LogUntil::End).unwrap(); - assert_eq!(reference.len(), 2); - - let head = create_test_commits(&project_repository.git_repository, 10); - - let reference = project_repository.l(head, LogUntil::End).unwrap(); - assert_eq!(reference.len(), 12); - } - - set_test_target(&gb_repository, &project_repository).unwrap(); - - let target_id = gb_repository.default_target().unwrap().unwrap().sha; - - let reference = project_repository.l(target_id, LogUntil::End).unwrap(); - - let cloud_code = test_remote_repository()?; - - let api_project = projects::ApiProject { - name: "test-sync".to_string(), - description: None, - repository_id: "123".to_string(), - git_url: String::new(), - code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()), - created_at: 0_i32.to_string(), - updated_at: 0_i32.to_string(), - sync: true, - }; - - suite - .projects - .update(&projects::UpdateRequest { - id: project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await?; - - { - let listener = HandlerInner { - local_data_dir: suite.local_app_data.clone(), - project_store: suite.projects.clone(), - users: suite.users.clone(), - batch_size: 2, - }; - - listener.handle(&project.id).await.unwrap(); - } - - cloud_code.find_commit(target_id.into()).unwrap(); - - let pushed = log_walk(&cloud_code, target_id); - assert_eq!(reference.len(), pushed.len()); - assert_eq!(reference, pushed); - - assert_eq!( - suite - .projects - .get(&project.id) - .unwrap() - .gitbutler_code_push_state - .unwrap() - .id, - target_id - ); - - Ok(()) - } - - #[tokio::test] - async fn test_push_again_no_change() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - gb_repository, - project_repository, - .. - } = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "test")])); - - suite.sign_in(); - - set_test_target(&gb_repository, &project_repository).unwrap(); - - let target_id = gb_repository.default_target().unwrap().unwrap().sha; - - let reference = project_repository.l(target_id, LogUntil::End).unwrap(); - - let cloud_code = test_remote_repository()?; - - let api_project = projects::ApiProject { - name: "test-sync".to_string(), - description: None, - repository_id: "123".to_string(), - git_url: String::new(), - code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()), - created_at: 0_i32.to_string(), - updated_at: 0_i32.to_string(), - sync: true, - }; - - suite - .projects - .update(&projects::UpdateRequest { - id: project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await?; - - cloud_code.find_commit(target_id.into()).unwrap_err(); - - { - let listener = HandlerInner { - local_data_dir: suite.local_app_data, - project_store: suite.projects.clone(), - users: suite.users, - batch_size: 10, - }; - - let res = listener.handle(&project.id).await.unwrap(); - assert!(res.is_empty()); - } - - cloud_code.find_commit(target_id.into()).unwrap(); - - let pushed = log_walk(&cloud_code, target_id); - assert_eq!(reference.len(), pushed.len()); - assert_eq!(reference, pushed); - - assert_eq!( - suite - .projects - .get(&project.id) - .unwrap() - .gitbutler_code_push_state - .unwrap() - .id, - target_id - ); - - Ok(()) - } -} diff --git a/gitbutler-app/src/tests.rs b/gitbutler-app/tests/app.rs similarity index 62% rename from gitbutler-app/src/tests.rs rename to gitbutler-app/tests/app.rs index 7bcfe5f0c..b0ddf846d 100644 --- a/gitbutler-app/src/tests.rs +++ b/gitbutler-app/tests/app.rs @@ -5,27 +5,36 @@ mod suite { mod virtual_branches; } +mod database; +mod deltas; +mod gb_repository; +mod git; +mod keys; +mod lock; +mod reader; +mod sessions; +pub mod virtual_branches; +mod watcher; + use std::{collections::HashMap, fs, path}; use tempfile::tempdir; -use crate::{database, gb_repository, git, keys, project_repository, projects, storage, users}; - pub struct Suite { pub local_app_data: path::PathBuf, - pub storage: storage::Storage, - pub users: users::Controller, - pub projects: projects::Controller, - pub keys: keys::Controller, + pub storage: gitbutler_app::storage::Storage, + pub users: gitbutler_app::users::Controller, + pub projects: gitbutler_app::projects::Controller, + pub keys: gitbutler_app::keys::Controller, } impl Default for Suite { fn default() -> Self { let local_app_data = temp_dir(); - let storage = storage::Storage::new(&local_app_data); - let users = users::Controller::from_path(&local_app_data); - let projects = projects::Controller::from_path(&local_app_data); - let keys = keys::Controller::from_path(&local_app_data); + let storage = gitbutler_app::storage::Storage::new(&local_app_data); + let users = gitbutler_app::users::Controller::from_path(&local_app_data); + let projects = gitbutler_app::projects::Controller::from_path(&local_app_data); + let keys = gitbutler_app::keys::Controller::from_path(&local_app_data); Self { storage, local_app_data, @@ -37,8 +46,8 @@ impl Default for Suite { } impl Suite { - pub fn sign_in(&self) -> users::User { - let user = users::User { + pub fn sign_in(&self) -> gitbutler_app::users::User { + let user = gitbutler_app::users::User { name: Some("test".to_string()), email: "test@email.com".to_string(), access_token: "token".to_string(), @@ -48,7 +57,7 @@ impl Suite { user } - fn project(&self, fs: HashMap) -> projects::Project { + fn project(&self, fs: HashMap) -> gitbutler_app::projects::Project { let repository = test_repository(); for (path, contents) in fs { if let Some(parent) = path.parent() { @@ -80,20 +89,23 @@ impl Suite { pub struct Case<'a> { suite: &'a Suite, - pub project: projects::Project, - pub project_repository: project_repository::Repository, - pub gb_repository: gb_repository::Repository, - pub credentials: git::credentials::Helper, + pub project: gitbutler_app::projects::Project, + pub project_repository: gitbutler_app::project_repository::Repository, + pub gb_repository: gitbutler_app::gb_repository::Repository, + pub credentials: gitbutler_app::git::credentials::Helper, } impl<'a> Case<'a> { - fn new(suite: &'a Suite, project: projects::Project) -> Case<'a> { - let project_repository = project_repository::Repository::open(&project) + fn new(suite: &'a Suite, project: gitbutler_app::projects::Project) -> Case<'a> { + let project_repository = gitbutler_app::project_repository::Repository::open(&project) .expect("failed to create project repository"); - let gb_repository = - gb_repository::Repository::open(&suite.local_app_data, &project_repository, None) - .expect("failed to open gb repository"); - let credentials = git::credentials::Helper::from_path(&suite.local_app_data); + let gb_repository = gitbutler_app::gb_repository::Repository::open( + &suite.local_app_data, + &project_repository, + None, + ) + .expect("failed to open gb repository"); + let credentials = gitbutler_app::git::credentials::Helper::from_path(&suite.local_app_data); Case { suite, project, @@ -109,13 +121,14 @@ impl<'a> Case<'a> { .projects .get(&self.project.id) .expect("failed to get project"); - let project_repository = project_repository::Repository::open(&project) + let project_repository = gitbutler_app::project_repository::Repository::open(&project) .expect("failed to create project repository"); let user = self.suite.users.get_user().expect("failed to get user"); - let credentials = git::credentials::Helper::from_path(&self.suite.local_app_data); + let credentials = + gitbutler_app::git::credentials::Helper::from_path(&self.suite.local_app_data); Self { suite: self.suite, - gb_repository: gb_repository::Repository::open( + gb_repository: gitbutler_app::gb_repository::Repository::open( &self.suite.local_app_data, &project_repository, user.as_ref(), @@ -128,8 +141,8 @@ impl<'a> Case<'a> { } } -pub fn test_database() -> database::Database { - database::Database::open_in_directory(temp_dir()).unwrap() +pub fn test_database() -> gitbutler_app::database::Database { + gitbutler_app::database::Database::open_in_directory(temp_dir()).unwrap() } pub fn temp_dir() -> path::PathBuf { @@ -138,18 +151,19 @@ pub fn temp_dir() -> path::PathBuf { path } -pub fn empty_bare_repository() -> git::Repository { +pub fn empty_bare_repository() -> gitbutler_app::git::Repository { let path = temp_dir(); - git::Repository::init_opts(path, &init_opts_bare()).expect("failed to init repository") + gitbutler_app::git::Repository::init_opts(path, &init_opts_bare()) + .expect("failed to init repository") } -pub fn test_repository() -> git::Repository { +pub fn test_repository() -> gitbutler_app::git::Repository { let path = temp_dir(); - let repository = - git::Repository::init_opts(path, &init_opts()).expect("failed to init repository"); + let repository = gitbutler_app::git::Repository::init_opts(path, &init_opts()) + .expect("failed to init repository"); let mut index = repository.index().expect("failed to get index"); let oid = index.write_tree().expect("failed to write tree"); - let signature = git::Signature::now("test", "test@email.com").unwrap(); + let signature = gitbutler_app::git::Signature::now("test", "test@email.com").unwrap(); repository .commit( Some(&"refs/heads/master".parse().unwrap()), @@ -163,14 +177,14 @@ pub fn test_repository() -> git::Repository { repository } -pub fn commit_all(repository: &git::Repository) -> git::Oid { +pub fn commit_all(repository: &gitbutler_app::git::Repository) -> gitbutler_app::git::Oid { let mut index = repository.index().expect("failed to get index"); index .add_all(["."], git2::IndexAddOption::DEFAULT, None) .expect("failed to add all"); index.write().expect("failed to write index"); let oid = index.write_tree().expect("failed to write tree"); - let signature = git::Signature::now("test", "test@email.com").unwrap(); + let signature = gitbutler_app::git::Signature::now("test", "test@email.com").unwrap(); let head = repository.head().expect("failed to get head"); let commit_oid = repository .commit( diff --git a/gitbutler-app/src/tests/common.rs b/gitbutler-app/tests/common/mod.rs similarity index 99% rename from gitbutler-app/src/tests/common.rs rename to gitbutler-app/tests/common/mod.rs index f2b714df8..bd6d1933f 100644 --- a/gitbutler-app/src/tests/common.rs +++ b/gitbutler-app/tests/common/mod.rs @@ -1,6 +1,6 @@ #![allow(unused)] -use crate::git; -use crate::tests::init_opts; +use crate::init_opts; +use gitbutler_app::git; use std::{path, str::from_utf8}; pub fn temp_dir() -> std::path::PathBuf { diff --git a/gitbutler-app/tests/database/mod.rs b/gitbutler-app/tests/database/mod.rs new file mode 100644 index 000000000..df6774493 --- /dev/null +++ b/gitbutler-app/tests/database/mod.rs @@ -0,0 +1,20 @@ +use crate::temp_dir; +use gitbutler_app::database::Database; + +#[test] +fn smoke() { + let data_dir = temp_dir(); + let db = Database::open_in_directory(data_dir).unwrap(); + db.transaction(|tx| { + tx.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)", []) + .unwrap(); + tx.execute("INSERT INTO test (id) VALUES (1)", []).unwrap(); + let mut stmt = tx.prepare("SELECT id FROM test").unwrap(); + let mut rows = stmt.query([]).unwrap(); + let row = rows.next().unwrap().unwrap(); + let id: i32 = row.get(0).unwrap(); + assert_eq!(id, 1_i32); + Ok(()) + }) + .unwrap(); +} diff --git a/gitbutler-app/tests/deltas/mod.rs b/gitbutler-app/tests/deltas/mod.rs new file mode 100644 index 000000000..8ea17b034 --- /dev/null +++ b/gitbutler-app/tests/deltas/mod.rs @@ -0,0 +1,142 @@ +mod database { + use crate::test_database; + use gitbutler_app::deltas::{operations, Database, Delta}; + use gitbutler_app::projects::ProjectId; + use gitbutler_app::sessions::SessionId; + use std::path; + + #[test] + fn insert_query() -> anyhow::Result<()> { + let db = test_database(); + let database = Database::new(db); + + let project_id = ProjectId::generate(); + let session_id = SessionId::generate(); + let file_path = path::PathBuf::from("file_path"); + let delta1 = Delta { + timestamp_ms: 0, + operations: vec![operations::Operation::Insert((0, "text".to_string()))], + }; + let deltas = vec![delta1.clone()]; + + database.insert(&project_id, &session_id, &file_path, &deltas)?; + + assert_eq!( + database.list_by_project_id_session_id(&project_id, &session_id, &None)?, + vec![(file_path.display().to_string(), vec![delta1])] + .into_iter() + .collect() + ); + + Ok(()) + } + + #[test] + fn insert_update() -> anyhow::Result<()> { + let db = test_database(); + let database = Database::new(db); + + let project_id = ProjectId::generate(); + let session_id = SessionId::generate(); + let file_path = path::PathBuf::from("file_path"); + let delta1 = Delta { + timestamp_ms: 0, + operations: vec![operations::Operation::Insert((0, "text".to_string()))], + }; + let delta2 = Delta { + timestamp_ms: 0, + operations: vec![operations::Operation::Insert(( + 0, + "updated_text".to_string(), + ))], + }; + + database.insert(&project_id, &session_id, &file_path, &vec![delta1])?; + database.insert(&project_id, &session_id, &file_path, &vec![delta2.clone()])?; + + assert_eq!( + database.list_by_project_id_session_id(&project_id, &session_id, &None)?, + vec![(file_path.display().to_string(), vec![delta2])] + .into_iter() + .collect() + ); + + Ok(()) + } + + #[test] + fn aggregate_deltas_by_file() -> anyhow::Result<()> { + let db = test_database(); + let database = Database::new(db); + + let project_id = ProjectId::generate(); + let session_id = SessionId::generate(); + let file_path1 = path::PathBuf::from("file_path1"); + let file_path2 = path::PathBuf::from("file_path2"); + let delta1 = Delta { + timestamp_ms: 1, + operations: vec![operations::Operation::Insert((0, "text".to_string()))], + }; + let delta2 = Delta { + timestamp_ms: 2, + operations: vec![operations::Operation::Insert(( + 0, + "updated_text".to_string(), + ))], + }; + + database.insert(&project_id, &session_id, &file_path1, &vec![delta1.clone()])?; + database.insert(&project_id, &session_id, &file_path2, &vec![delta1.clone()])?; + database.insert(&project_id, &session_id, &file_path2, &vec![delta2.clone()])?; + + assert_eq!( + database.list_by_project_id_session_id(&project_id, &session_id, &None)?, + vec![ + (file_path1.display().to_string(), vec![delta1.clone()]), + (file_path2.display().to_string(), vec![delta1, delta2]) + ] + .into_iter() + .collect() + ); + + Ok(()) + } +} + +mod writer { + use gitbutler_app::deltas::operations::Operation; + use gitbutler_app::{deltas, sessions}; + use std::vec; + + use crate::{Case, Suite}; + + #[test] + fn write_no_vbranches() -> anyhow::Result<()> { + let Case { gb_repository, .. } = Suite::default().new_case(); + + let deltas_writer = deltas::Writer::new(&gb_repository)?; + + let session = gb_repository.get_or_create_current_session()?; + let session_reader = sessions::Reader::open(&gb_repository, &session)?; + let deltas_reader = gitbutler_app::deltas::Reader::new(&session_reader); + + let path = "test.txt"; + let deltas = vec![ + gitbutler_app::deltas::Delta { + operations: vec![Operation::Insert((0, "hello".to_string()))], + timestamp_ms: 0, + }, + gitbutler_app::deltas::Delta { + operations: vec![Operation::Insert((5, " world".to_string()))], + timestamp_ms: 0, + }, + ]; + + deltas_writer.write(path, &deltas).unwrap(); + + assert_eq!(deltas_reader.read_file(path).unwrap(), Some(deltas)); + assert_eq!(deltas_reader.read_file("not found").unwrap(), None); + + Ok(()) + } +} diff --git a/gitbutler-app/tests/gb_repository/mod.rs b/gitbutler-app/tests/gb_repository/mod.rs new file mode 100644 index 000000000..d37d3f14e --- /dev/null +++ b/gitbutler-app/tests/gb_repository/mod.rs @@ -0,0 +1,448 @@ +use std::{collections::HashMap, path, thread, time}; + +use anyhow::Result; +use pretty_assertions::assert_eq; + +use crate::init_opts_bare; +use crate::{Case, Suite}; + +use gitbutler_app::{ + deltas::{self, operations::Operation}, + projects::{self, ApiProject, ProjectId}, + reader, + sessions::{self, SessionId}, +}; + +fn test_remote_repository() -> Result { + let path = tempfile::tempdir()?.path().to_str().unwrap().to_string(); + let repo_a = git2::Repository::init_opts(path, &init_opts_bare())?; + Ok(repo_a) +} + +#[test] +fn test_get_current_session_writer_should_use_existing_session() -> Result<()> { + let Case { gb_repository, .. } = Suite::default().new_case(); + + let current_session_1 = gb_repository.get_or_create_current_session()?; + let current_session_2 = gb_repository.get_or_create_current_session()?; + assert_eq!(current_session_1.id, current_session_2.id); + + Ok(()) +} + +#[test] +fn test_must_not_return_init_session() -> Result<()> { + let Case { gb_repository, .. } = Suite::default().new_case(); + + assert!(gb_repository.get_current_session()?.is_none()); + + let iter = gb_repository.get_sessions_iterator()?; + assert_eq!(iter.count(), 0); + + Ok(()) +} + +#[test] +fn test_must_not_flush_without_current_session() -> Result<()> { + let Case { + gb_repository, + project_repository, + .. + } = Suite::default().new_case(); + + let session = gb_repository.flush(&project_repository, None)?; + assert!(session.is_none()); + + let iter = gb_repository.get_sessions_iterator()?; + assert_eq!(iter.count(), 0); + + Ok(()) +} + +#[test] +fn test_non_empty_repository() -> Result<()> { + let Case { + gb_repository, + project_repository, + .. + } = Suite::default() + .new_case_with_files(HashMap::from([(path::PathBuf::from("test.txt"), "test")])); + + gb_repository.get_or_create_current_session()?; + gb_repository.flush(&project_repository, None)?; + + Ok(()) +} + +#[test] +fn test_must_flush_current_session() -> Result<()> { + let Case { + gb_repository, + project_repository, + .. + } = Suite::default().new_case(); + + gb_repository.get_or_create_current_session()?; + + let session = gb_repository.flush(&project_repository, None)?; + assert!(session.is_some()); + + let iter = gb_repository.get_sessions_iterator()?; + assert_eq!(iter.count(), 1); + + Ok(()) +} + +#[test] +fn test_list_deltas_from_current_session() -> Result<()> { + let Case { gb_repository, .. } = Suite::default().new_case(); + + let current_session = gb_repository.get_or_create_current_session()?; + let writer = deltas::Writer::new(&gb_repository)?; + writer.write( + "test.txt", + &vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }], + )?; + + let session_reader = sessions::Reader::open(&gb_repository, ¤t_session)?; + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read(None)?; + + assert_eq!(deltas.len(), 1); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations.len(), + 1 + ); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations[0], + Operation::Insert((0, "Hello World".to_string())) + ); + + Ok(()) +} + +#[test] +fn test_list_deltas_from_flushed_session() { + let Case { + gb_repository, + project_repository, + .. + } = Suite::default().new_case(); + + let writer = deltas::Writer::new(&gb_repository).unwrap(); + writer + .write( + "test.txt", + &vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }], + ) + .unwrap(); + let session = gb_repository.flush(&project_repository, None).unwrap(); + + let session_reader = sessions::Reader::open(&gb_repository, &session.unwrap()).unwrap(); + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read(None).unwrap(); + + assert_eq!(deltas.len(), 1); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations.len(), + 1 + ); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations[0], + Operation::Insert((0, "Hello World".to_string())) + ); +} + +#[test] +fn test_list_files_from_current_session() { + let Case { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([( + path::PathBuf::from("test.txt"), + "Hello World", + )])); + + let current = gb_repository.get_or_create_current_session().unwrap(); + let reader = sessions::Reader::open(&gb_repository, ¤t).unwrap(); + let files = reader.files(None).unwrap(); + + assert_eq!(files.len(), 1); + assert_eq!( + files[&path::PathBuf::from("test.txt")], + reader::Content::UTF8("Hello World".to_string()) + ); +} + +#[test] +fn test_list_files_from_flushed_session() { + let Case { + gb_repository, + project_repository, + .. + } = Suite::default().new_case_with_files(HashMap::from([( + path::PathBuf::from("test.txt"), + "Hello World", + )])); + + gb_repository.get_or_create_current_session().unwrap(); + let session = gb_repository + .flush(&project_repository, None) + .unwrap() + .unwrap(); + let reader = sessions::Reader::open(&gb_repository, &session).unwrap(); + let files = reader.files(None).unwrap(); + + assert_eq!(files.len(), 1); + assert_eq!( + files[&path::PathBuf::from("test.txt")], + reader::Content::UTF8("Hello World".to_string()) + ); +} + +#[tokio::test] +async fn test_remote_syncronization() { + // first, crate a remote, pretending it's a cloud + let cloud = test_remote_repository().unwrap(); + let api_project = ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: cloud.path().to_str().unwrap().to_string(), + code_git_url: None, + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + let suite = Suite::default(); + let user = suite.sign_in(); + + // create first local project, add files, deltas and flush a session + let case_one = suite.new_case_with_files(HashMap::from([( + path::PathBuf::from("test.txt"), + "Hello World", + )])); + suite + .projects + .update(&projects::UpdateRequest { + id: case_one.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_one = case_one.refresh(); + + let writer = deltas::Writer::new(&case_one.gb_repository).unwrap(); + writer + .write( + "test.txt", + &vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }], + ) + .unwrap(); + let session_one = case_one + .gb_repository + .flush(&case_one.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_one.gb_repository.push(Some(&user)).unwrap(); + + // create second local project, fetch it and make sure session is there + let case_two = suite.new_case(); + suite + .projects + .update(&projects::UpdateRequest { + id: case_two.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_two = case_two.refresh(); + + case_two.gb_repository.fetch(Some(&user)).unwrap(); + + // now it should have the session from the first local project synced + let sessions_two = case_two + .gb_repository + .get_sessions_iterator() + .unwrap() + .map(Result::unwrap) + .collect::>(); + assert_eq!(sessions_two.len(), 1); + assert_eq!(sessions_two[0].id, session_one.id); + + let session_reader = sessions::Reader::open(&case_two.gb_repository, &sessions_two[0]).unwrap(); + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read(None).unwrap(); + let files = session_reader.files(None).unwrap(); + assert_eq!(deltas.len(), 1); + assert_eq!(files.len(), 1); + assert_eq!( + files[&path::PathBuf::from("test.txt")], + reader::Content::UTF8("Hello World".to_string()) + ); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")], + vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }] + ); +} + +#[tokio::test] +async fn test_remote_sync_order() { + // first, crate a remote, pretending it's a cloud + let cloud = test_remote_repository().unwrap(); + let api_project = projects::ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: cloud.path().to_str().unwrap().to_string(), + code_git_url: None, + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + let suite = Suite::default(); + + let case_one = suite.new_case(); + suite + .projects + .update(&projects::UpdateRequest { + id: case_one.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_one = case_one.refresh(); + + let case_two = suite.new_case(); + suite + .projects + .update(&projects::UpdateRequest { + id: case_two.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_two = case_two.refresh(); + + let user = suite.sign_in(); + + // create session in the first project + case_one + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_one_first = case_one + .gb_repository + .flush(&case_one.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_one.gb_repository.push(Some(&user)).unwrap(); + + thread::sleep(time::Duration::from_secs(1)); + + // create session in the second project + case_two + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_two_first = case_two + .gb_repository + .flush(&case_two.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_two.gb_repository.push(Some(&user)).unwrap(); + + thread::sleep(time::Duration::from_secs(1)); + + // create second session in the first project + case_one + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_one_second = case_one + .gb_repository + .flush(&case_one.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_one.gb_repository.push(Some(&user)).unwrap(); + + thread::sleep(time::Duration::from_secs(1)); + + // create second session in the second project + case_two + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_two_second = case_two + .gb_repository + .flush(&case_two.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_two.gb_repository.push(Some(&user)).unwrap(); + + case_one.gb_repository.fetch(Some(&user)).unwrap(); + let sessions_one = case_one + .gb_repository + .get_sessions_iterator() + .unwrap() + .map(Result::unwrap) + .collect::>(); + + case_two.gb_repository.fetch(Some(&user)).unwrap(); + let sessions_two = case_two + .gb_repository + .get_sessions_iterator() + .unwrap() + .map(Result::unwrap) + .collect::>(); + + // make sure the sessions are the same on both repos + assert_eq!(sessions_one.len(), 4); + assert_eq!(sessions_two, sessions_one); + + assert_eq!(sessions_one[0].id, session_two_second.id); + assert_eq!(sessions_one[1].id, session_one_second.id); + assert_eq!(sessions_one[2].id, session_two_first.id); + assert_eq!(sessions_one[3].id, session_one_first.id); +} + +#[test] +fn test_gitbutler_file() { + let Case { + gb_repository, + project_repository, + .. + } = Suite::default().new_case(); + + let session = gb_repository.get_or_create_current_session().unwrap(); + + let gitbutler_file_path = project_repository.path().join(".git/gitbutler.json"); + assert!(gitbutler_file_path.exists()); + + let file_content: serde_json::Value = + serde_json::from_str(&std::fs::read_to_string(&gitbutler_file_path).unwrap()).unwrap(); + let sid: SessionId = file_content["sessionId"].as_str().unwrap().parse().unwrap(); + assert_eq!(sid, session.id); + + let pid: ProjectId = file_content["repositoryId"] + .as_str() + .unwrap() + .parse() + .unwrap(); + assert_eq!(pid, project_repository.project().id); +} diff --git a/gitbutler-app/tests/git/config.rs b/gitbutler-app/tests/git/config.rs new file mode 100644 index 000000000..b5ed2accd --- /dev/null +++ b/gitbutler-app/tests/git/config.rs @@ -0,0 +1,34 @@ +use crate::test_repository; + +#[test] +pub fn test_set_str() { + let repo = test_repository(); + let mut config = repo.config().unwrap(); + config.set_str("test.key", "test.value").unwrap(); + assert_eq!( + config.get_string("test.key").unwrap().unwrap(), + "test.value" + ); +} + +#[test] +pub fn test_set_bool() { + let repo = test_repository(); + let mut config = repo.config().unwrap(); + config.set_bool("test.key", true).unwrap(); + assert!(config.get_bool("test.key").unwrap().unwrap()); +} + +#[test] +pub fn test_get_string_none() { + let repo = test_repository(); + let config = repo.config().unwrap(); + assert_eq!(config.get_string("test.key").unwrap(), None); +} + +#[test] +pub fn test_get_bool_none() { + let repo = test_repository(); + let config = repo.config().unwrap(); + assert_eq!(config.get_bool("test.key").unwrap(), None); +} diff --git a/gitbutler-app/tests/git/credentials.rs b/gitbutler-app/tests/git/credentials.rs new file mode 100644 index 000000000..860c7d9c3 --- /dev/null +++ b/gitbutler-app/tests/git/credentials.rs @@ -0,0 +1,312 @@ +use gitbutler_app::git::credentials::{Credential, Helper, HttpsCredential, SshCredential}; +use gitbutler_app::{keys, project_repository, projects, users}; +use std::path::PathBuf; + +use crate::{temp_dir, test_repository}; + +#[derive(Default)] +struct TestCase<'a> { + remote_url: &'a str, + github_access_token: Option<&'a str>, + preferred_key: projects::AuthKey, + home_dir: Option, +} + +impl TestCase<'_> { + fn run(&self) -> Vec<(String, Vec)> { + let local_app_data = temp_dir(); + + let users = users::Controller::from_path(&local_app_data); + let user = users::User { + github_access_token: self.github_access_token.map(ToString::to_string), + ..Default::default() + }; + users.set_user(&user).unwrap(); + + let keys = keys::Controller::from_path(&local_app_data); + let helper = Helper::new(keys, users, self.home_dir.clone()); + + let repo = test_repository(); + repo.remote( + "origin", + &self.remote_url.parse().expect("failed to parse remote url"), + ) + .unwrap(); + let project = projects::Project { + path: repo.workdir().unwrap().to_path_buf(), + preferred_key: self.preferred_key.clone(), + ..Default::default() + }; + let project_repository = project_repository::Repository::open(&project).unwrap(); + + let flow = helper.help(&project_repository, "origin").unwrap(); + flow.into_iter() + .map(|(remote, credentials)| { + ( + remote.url().unwrap().as_ref().unwrap().to_string(), + credentials, + ) + }) + .collect::>() + } +} + +mod not_github { + use super::*; + + mod with_preferred_key { + use super::*; + + #[test] + fn https() { + let test_case = TestCase { + remote_url: "https://gitlab.com/test-gitbutler/test.git", + github_access_token: Some("token"), + preferred_key: projects::AuthKey::Local { + private_key_path: PathBuf::from("/tmp/id_rsa"), + }, + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "git@gitlab.com:test-gitbutler/test.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Ssh(SshCredential::Keyfile { + key_path: PathBuf::from("/tmp/id_rsa"), + passphrase: None, + })] + ); + } + + #[test] + fn ssh() { + let test_case = TestCase { + remote_url: "git@gitlab.com:test-gitbutler/test.git", + github_access_token: Some("token"), + preferred_key: projects::AuthKey::Local { + private_key_path: PathBuf::from("/tmp/id_rsa"), + }, + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "git@gitlab.com:test-gitbutler/test.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Ssh(SshCredential::Keyfile { + key_path: PathBuf::from("/tmp/id_rsa"), + passphrase: None, + })] + ); + } + } + + mod with_github_token { + use super::*; + + #[test] + fn https() { + let test_case = TestCase { + remote_url: "https://gitlab.com/test-gitbutler/test.git", + github_access_token: Some("token"), + ..Default::default() + }; + let flow = test_case.run(); + + assert_eq!(flow.len(), 1); + + assert_eq!( + flow[0].0, + "git@gitlab.com:test-gitbutler/test.git".to_string(), + ); + assert_eq!(flow[0].1.len(), 1); + assert!(matches!( + flow[0].1[0], + Credential::Ssh(SshCredential::GitButlerKey(_)) + )); + } + + #[test] + fn ssh() { + let test_case = TestCase { + remote_url: "git@gitlab.com:test-gitbutler/test.git", + github_access_token: Some("token"), + ..Default::default() + }; + let flow = test_case.run(); + + assert_eq!(flow.len(), 1); + + assert_eq!( + flow[0].0, + "git@gitlab.com:test-gitbutler/test.git".to_string(), + ); + assert_eq!(flow[0].1.len(), 1); + assert!(matches!( + flow[0].1[0], + Credential::Ssh(SshCredential::GitButlerKey(_)) + )); + } + } +} + +mod github { + use super::*; + + mod with_github_token { + use super::*; + + #[test] + fn https() { + let test_case = TestCase { + remote_url: "https://github.com/gitbutlerapp/gitbutler.git", + github_access_token: Some("token"), + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "https://github.com/gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Https(HttpsCredential::GitHubToken( + "token".to_string() + ))] + ); + } + + #[test] + fn ssh() { + let test_case = TestCase { + remote_url: "git@github.com:gitbutlerapp/gitbutler.git", + github_access_token: Some("token"), + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "https://github.com/gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Https(HttpsCredential::GitHubToken( + "token".to_string() + ))] + ); + } + } + + mod without_github_token { + use super::*; + + mod without_preferred_key { + use super::*; + + #[test] + fn https() { + let test_case = TestCase { + remote_url: "https://github.com/gitbutlerapp/gitbutler.git", + ..Default::default() + }; + let flow = test_case.run(); + + assert_eq!(flow.len(), 1); + + assert_eq!( + flow[0].0, + "git@github.com:gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!(flow[0].1.len(), 1); + assert!(matches!( + flow[0].1[0], + Credential::Ssh(SshCredential::GitButlerKey(_)) + )); + } + + #[test] + fn ssh() { + let test_case = TestCase { + remote_url: "git@github.com:gitbutlerapp/gitbutler.git", + ..Default::default() + }; + let flow = test_case.run(); + + assert_eq!(flow.len(), 1); + + assert_eq!( + flow[0].0, + "git@github.com:gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!(flow[0].1.len(), 1); + assert!(matches!( + flow[0].1[0], + Credential::Ssh(SshCredential::GitButlerKey(_)) + )); + } + } + + mod with_preferred_key { + use super::*; + + #[test] + fn https() { + let test_case = TestCase { + remote_url: "https://github.com/gitbutlerapp/gitbutler.git", + github_access_token: Some("token"), + preferred_key: projects::AuthKey::Local { + private_key_path: PathBuf::from("/tmp/id_rsa"), + }, + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "git@github.com:gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Ssh(SshCredential::Keyfile { + key_path: PathBuf::from("/tmp/id_rsa"), + passphrase: None, + })] + ); + } + + #[test] + fn ssh() { + let test_case = TestCase { + remote_url: "git@github.com:gitbutlerapp/gitbutler.git", + github_access_token: Some("token"), + preferred_key: projects::AuthKey::Local { + private_key_path: PathBuf::from("/tmp/id_rsa"), + }, + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "git@github.com:gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Ssh(SshCredential::Keyfile { + key_path: PathBuf::from("/tmp/id_rsa"), + passphrase: None, + })] + ); + } + } + } +} diff --git a/gitbutler-app/src/gb_repository/repository_tests.rs b/gitbutler-app/tests/git/diff.rs similarity index 99% rename from gitbutler-app/src/gb_repository/repository_tests.rs rename to gitbutler-app/tests/git/diff.rs index e9df5ac1b..35e81bc53 100644 --- a/gitbutler-app/src/gb_repository/repository_tests.rs +++ b/gitbutler-app/tests/git/diff.rs @@ -3,13 +3,13 @@ use std::{collections::HashMap, path, thread, time}; use anyhow::Result; use pretty_assertions::assert_eq; -use crate::tests::init_opts_bare; -use crate::{ +use crate::init_opts_bare; +use crate::{Case, Suite}; +use gitbutler_app::{ deltas::{self, operations::Operation}, projects::{self, ApiProject, ProjectId}, reader, sessions::{self, SessionId}, - tests::{Case, Suite}, }; fn test_remote_repository() -> Result { diff --git a/gitbutler-app/tests/git/mod.rs b/gitbutler-app/tests/git/mod.rs new file mode 100644 index 000000000..23bc6d0b7 --- /dev/null +++ b/gitbutler-app/tests/git/mod.rs @@ -0,0 +1,3 @@ +mod config; +mod credentials; +mod diff; diff --git a/gitbutler-app/tests/keys/controller.rs b/gitbutler-app/tests/keys/controller.rs new file mode 100644 index 000000000..00345cbc9 --- /dev/null +++ b/gitbutler-app/tests/keys/controller.rs @@ -0,0 +1,27 @@ +#[cfg(not(target_os = "windows"))] +mod not_windows { + use gitbutler_app::keys::storage::Storage; + use gitbutler_app::keys::Controller; + use std::fs; + #[cfg(target_family = "unix")] + use std::os::unix::prelude::*; + + use crate::Suite; + + #[test] + fn test_get_or_create() { + let suite = Suite::default(); + let controller = Controller::new(Storage::from_path(&suite.local_app_data)); + + let once = controller.get_or_create().unwrap(); + let twice = controller.get_or_create().unwrap(); + assert_eq!(once, twice); + + // check permissions of the private key + let permissions = fs::metadata(suite.local_app_data.join("keys/ed25519")) + .unwrap() + .permissions(); + let perms = format!("{:o}", permissions.mode()); + assert_eq!(perms, "100600"); + } +} diff --git a/gitbutler-app/tests/keys/mod.rs b/gitbutler-app/tests/keys/mod.rs new file mode 100644 index 000000000..9afdb4e75 --- /dev/null +++ b/gitbutler-app/tests/keys/mod.rs @@ -0,0 +1 @@ +mod controller; diff --git a/gitbutler-app/tests/lock/mod.rs b/gitbutler-app/tests/lock/mod.rs new file mode 100644 index 000000000..a0049db82 --- /dev/null +++ b/gitbutler-app/tests/lock/mod.rs @@ -0,0 +1,91 @@ +use gitbutler_app::lock::Dir; + +use crate::temp_dir; + +#[tokio::test] +async fn test_lock_same_instance() { + let dir_path = temp_dir(); + std::fs::write(dir_path.join("file.txt"), "").unwrap(); + let dir = Dir::new(&dir_path).unwrap(); + + let (tx, rx) = std::sync::mpsc::sync_channel(1); + + // spawn a task that will signal right after aquireing the lock + let _ = tokio::spawn({ + let dir = dir.clone(); + async move { + dir.batch(|root| { + tx.send(()).unwrap(); + assert_eq!( + std::fs::read_to_string(root.join("file.txt")).unwrap(), + String::new() + ); + std::fs::write(root.join("file.txt"), "1") + }) + } + }) + .await + .unwrap(); + + // then we wait until the lock is aquired + rx.recv().unwrap(); + + // and immidiately try to lock again + dir.batch(|root| { + assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1"); + std::fs::write(root.join("file.txt"), "2") + }) + .unwrap() + .unwrap(); + + assert_eq!( + std::fs::read_to_string(dir_path.join("file.txt")).unwrap(), + "2" + ); +} + +#[tokio::test] +async fn test_lock_different_instances() { + let dir_path = temp_dir(); + std::fs::write(dir_path.join("file.txt"), "").unwrap(); + + let (tx, rx) = std::sync::mpsc::sync_channel(1); + + // spawn a task that will signal right after aquireing the lock + let _ = tokio::spawn({ + let dir_path = dir_path.clone(); + async move { + // one dir instance is created on a separate thread + let dir = Dir::new(&dir_path).unwrap(); + dir.batch(|root| { + tx.send(()).unwrap(); + assert_eq!( + std::fs::read_to_string(root.join("file.txt")).unwrap(), + String::new() + ); + std::fs::write(root.join("file.txt"), "1") + }) + } + }) + .await + .unwrap(); + + // another dir instance is created on the main thread + let dir = Dir::new(&dir_path).unwrap(); + + // then we wait until the lock is aquired + rx.recv().unwrap(); + + // and immidiately try to lock again + dir.batch(|root| { + assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1"); + std::fs::write(root.join("file.txt"), "2") + }) + .unwrap() + .unwrap(); + + assert_eq!( + std::fs::read_to_string(dir_path.join("file.txt")).unwrap(), + "2" + ); +} diff --git a/gitbutler-app/tests/reader/mod.rs b/gitbutler-app/tests/reader/mod.rs new file mode 100644 index 000000000..f7accf5e9 --- /dev/null +++ b/gitbutler-app/tests/reader/mod.rs @@ -0,0 +1,183 @@ +use gitbutler_app::reader::{CommitReader, Content, Reader}; +use std::fs; +use std::path::Path; + +use crate::{commit_all, temp_dir, test_repository}; +use anyhow::Result; + +#[test] +fn test_directory_reader_read_file() -> Result<()> { + let dir = temp_dir(); + + let file_path = Path::new("test.txt"); + fs::write(dir.join(file_path), "test")?; + + let reader = Reader::open(dir.clone())?; + assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string())); + + Ok(()) +} + +#[test] +fn test_commit_reader_read_file() -> Result<()> { + let repository = test_repository(); + + let file_path = Path::new("test.txt"); + fs::write(repository.path().parent().unwrap().join(file_path), "test")?; + + let oid = commit_all(&repository); + + fs::write(repository.path().parent().unwrap().join(file_path), "test2")?; + + let reader = Reader::from_commit(&repository, &repository.find_commit(oid)?)?; + assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string())); + + Ok(()) +} + +#[test] +fn test_reader_list_files_should_return_relative() -> Result<()> { + let dir = temp_dir(); + + fs::write(dir.join("test1.txt"), "test")?; + fs::create_dir_all(dir.join("dir"))?; + fs::write(dir.join("dir").join("test.txt"), "test")?; + + let reader = Reader::open(dir.clone())?; + let files = reader.list_files(Path::new("dir"))?; + assert_eq!(files.len(), 1); + assert!(files.contains(&Path::new("test.txt").to_path_buf())); + + Ok(()) +} + +#[test] +fn test_reader_list_files() -> Result<()> { + let dir = temp_dir(); + + fs::write(dir.join("test.txt"), "test")?; + fs::create_dir_all(dir.join("dir"))?; + fs::write(dir.join("dir").join("test.txt"), "test")?; + + let reader = Reader::open(dir.clone())?; + let files = reader.list_files(Path::new(""))?; + assert_eq!(files.len(), 2); + assert!(files.contains(&Path::new("test.txt").to_path_buf())); + assert!(files.contains(&Path::new("dir/test.txt").to_path_buf())); + + Ok(()) +} + +#[test] +fn test_commit_reader_list_files_should_return_relative() -> Result<()> { + let repository = test_repository(); + + fs::write( + repository.path().parent().unwrap().join("test1.txt"), + "test", + )?; + fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?; + fs::write( + repository + .path() + .parent() + .unwrap() + .join("dir") + .join("test.txt"), + "test", + )?; + + let oid = commit_all(&repository); + + fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?; + + let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; + let files = reader.list_files(Path::new("dir"))?; + assert_eq!(files.len(), 1); + assert!(files.contains(&Path::new("test.txt").to_path_buf())); + + Ok(()) +} + +#[test] +fn test_commit_reader_list_files() -> Result<()> { + let repository = test_repository(); + + fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?; + fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?; + fs::write( + repository + .path() + .parent() + .unwrap() + .join("dir") + .join("test.txt"), + "test", + )?; + + let oid = commit_all(&repository); + + fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?; + + let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; + let files = reader.list_files(Path::new(""))?; + assert_eq!(files.len(), 2); + assert!(files.contains(&Path::new("test.txt").to_path_buf())); + assert!(files.contains(&Path::new("dir/test.txt").to_path_buf())); + + Ok(()) +} + +#[test] +fn test_directory_reader_exists() -> Result<()> { + let dir = temp_dir(); + + fs::write(dir.join("test.txt"), "test")?; + + let reader = Reader::open(dir.clone())?; + assert!(reader.exists(Path::new("test.txt"))?); + assert!(!reader.exists(Path::new("test2.txt"))?); + + Ok(()) +} + +#[test] +fn test_commit_reader_exists() -> Result<()> { + let repository = test_repository(); + + fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?; + + let oid = commit_all(&repository); + + fs::remove_file(repository.path().parent().unwrap().join("test.txt"))?; + + let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; + assert!(reader.exists(Path::new("test.txt"))); + assert!(!reader.exists(Path::new("test2.txt"))); + + Ok(()) +} + +#[test] +fn test_from_bytes() { + for (bytes, expected) in [ + ("test".as_bytes(), Content::UTF8("test".to_string())), + (&[0, 159, 146, 150, 159, 146, 150], Content::Binary), + ] { + assert_eq!(Content::from(bytes), expected); + } +} + +#[test] +fn test_serialize_content() { + for (content, expected) in [ + ( + Content::UTF8("test".to_string()), + r#"{"type":"utf8","value":"test"}"#, + ), + (Content::Binary, r#"{"type":"binary"}"#), + (Content::Large, r#"{"type":"large"}"#), + ] { + assert_eq!(serde_json::to_string(&content).unwrap(), expected); + } +} diff --git a/gitbutler-app/tests/sessions/database.rs b/gitbutler-app/tests/sessions/database.rs new file mode 100644 index 000000000..3be25dfb5 --- /dev/null +++ b/gitbutler-app/tests/sessions/database.rs @@ -0,0 +1,84 @@ +use crate::test_database; +use gitbutler_app::projects::ProjectId; +use gitbutler_app::sessions::{session, Database, Session, SessionId}; + +#[test] +fn test_insert_query() -> anyhow::Result<()> { + let db = test_database(); + println!("0"); + let database = Database::new(db); + println!("1"); + + let project_id = ProjectId::generate(); + let session1 = Session { + id: SessionId::generate(), + hash: None, + meta: session::Meta { + branch: None, + commit: None, + start_timestamp_ms: 1, + last_timestamp_ms: 2, + }, + }; + let session2 = session::Session { + id: SessionId::generate(), + hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()), + meta: session::Meta { + branch: Some("branch2".to_string()), + commit: Some("commit2".to_string()), + start_timestamp_ms: 3, + last_timestamp_ms: 4, + }, + }; + let sessions = vec![&session1, &session2]; + + database.insert(&project_id, &sessions)?; + + assert_eq!( + database.list_by_project_id(&project_id, None)?, + vec![session2.clone(), session1.clone()] + ); + assert_eq!(database.get_by_id(&session1.id)?.unwrap(), session1); + assert_eq!(database.get_by_id(&session2.id)?.unwrap(), session2); + assert_eq!(database.get_by_id(&SessionId::generate())?, None); + + Ok(()) +} + +#[test] +fn test_update() -> anyhow::Result<()> { + let db = test_database(); + let database = Database::new(db); + + let project_id = ProjectId::generate(); + let session = session::Session { + id: SessionId::generate(), + hash: None, + meta: session::Meta { + branch: None, + commit: None, + start_timestamp_ms: 1, + last_timestamp_ms: 2, + }, + }; + let session_updated = session::Session { + id: session.id, + hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()), + meta: session::Meta { + branch: Some("branch2".to_string()), + commit: Some("commit2".to_string()), + start_timestamp_ms: 3, + last_timestamp_ms: 4, + }, + }; + database.insert(&project_id, &[&session])?; + database.insert(&project_id, &[&session_updated])?; + + assert_eq!( + database.list_by_project_id(&project_id, None)?, + vec![session_updated.clone()] + ); + assert_eq!(database.get_by_id(&session.id)?.unwrap(), session_updated); + + Ok(()) +} diff --git a/gitbutler-app/src/sessions/tests.rs b/gitbutler-app/tests/sessions/mod.rs similarity index 90% rename from gitbutler-app/src/sessions/tests.rs rename to gitbutler-app/tests/sessions/mod.rs index e9f877b92..d9f24ae97 100644 --- a/gitbutler-app/src/sessions/tests.rs +++ b/gitbutler-app/tests/sessions/mod.rs @@ -1,11 +1,9 @@ +mod database; + use anyhow::Result; -use crate::{ - sessions::{self, session::SessionId}, - tests::{Case, Suite}, -}; - -use super::Writer; +use crate::{Case, Suite}; +use gitbutler_app::sessions::{self, session::SessionId}; #[test] fn test_should_not_write_session_with_hash() { @@ -22,7 +20,7 @@ fn test_should_not_write_session_with_hash() { }, }; - assert!(Writer::new(&gb_repository) + assert!(sessions::Writer::new(&gb_repository) .unwrap() .write(&session) .is_err()); @@ -43,7 +41,7 @@ fn test_should_write_full_session() -> Result<()> { }, }; - Writer::new(&gb_repository)?.write(&session)?; + sessions::Writer::new(&gb_repository)?.write(&session)?; assert_eq!( std::fs::read_to_string(gb_repository.session_path().join("meta/id"))?, @@ -84,7 +82,7 @@ fn test_should_write_partial_session() -> Result<()> { }, }; - Writer::new(&gb_repository)?.write(&session)?; + sessions::Writer::new(&gb_repository)?.write(&session)?; assert_eq!( std::fs::read_to_string(gb_repository.session_path().join("meta/id"))?, diff --git a/gitbutler-app/src/tests/suite/gb_repository.rs b/gitbutler-app/tests/suite/gb_repository.rs similarity index 97% rename from gitbutler-app/src/tests/suite/gb_repository.rs rename to gitbutler-app/tests/suite/gb_repository.rs index 9863745cf..c278fb1ca 100644 --- a/gitbutler-app/src/tests/suite/gb_repository.rs +++ b/gitbutler-app/tests/suite/gb_repository.rs @@ -1,7 +1,5 @@ -use crate::{ - gb_repository, git, project_repository, projects, - tests::common::{paths, TestProject}, -}; +use crate::common::{paths, TestProject}; +use gitbutler_app::{gb_repository, git, project_repository, projects}; use std::path; mod init { diff --git a/gitbutler-app/src/tests/suite/projects.rs b/gitbutler-app/tests/suite/projects.rs similarity index 93% rename from gitbutler-app/src/tests/suite/projects.rs rename to gitbutler-app/tests/suite/projects.rs index 5f95fbc86..66ad2ebde 100644 --- a/gitbutler-app/src/tests/suite/projects.rs +++ b/gitbutler-app/tests/suite/projects.rs @@ -1,7 +1,6 @@ -use crate::{ - projects::Controller, - tests::common::{self, paths}, -}; +use gitbutler_app::projects::Controller; + +use crate::common::{self, paths}; pub fn new() -> Controller { let data_dir = paths::data_dir(); @@ -22,7 +21,7 @@ mod add { } mod error { - use crate::projects::AddError; + use gitbutler_app::projects::AddError; use super::*; diff --git a/gitbutler-app/tests/suite/virtual_branches/amend.rs b/gitbutler-app/tests/suite/virtual_branches/amend.rs new file mode 100644 index 000000000..8d778a0ea --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/amend.rs @@ -0,0 +1,352 @@ +use super::*; + +#[tokio::test] +async fn to_default_target() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + // amend without head commit + fs::write(repository.path().join("file2.txt"), "content").unwrap(); + let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); + assert!(matches!( + controller + .amend(&project_id, &branch_id, &to_amend) + .await + .unwrap_err(), + ControllerError::Action(errors::AmendError::BranchHasNoCommits) + )); +} + +#[tokio::test] +async fn forcepush_allowed() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = Test::default(); + + projects + .update(&projects::UpdateRequest { + id: project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + projects + .update(&projects::UpdateRequest { + id: project_id, + ok_with_force_push: Some(true), + ..Default::default() + }) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // create commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap(); + }; + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + { + // amend another hunk + fs::write(repository.path().join("file2.txt"), "content2").unwrap(); + let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); + controller + .amend(&project_id, &branch_id, &to_amend) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert!(branch.requires_force); + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 2); + } +} + +#[tokio::test] +async fn forcepush_forbidden() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + projects + .update(&projects::UpdateRequest { + id: project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // create commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap(); + }; + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + { + fs::write(repository.path().join("file2.txt"), "content2").unwrap(); + let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); + assert!(matches!( + controller + .amend(&project_id, &branch_id, &to_amend) + .await + .unwrap_err(), + ControllerError::Action(errors::AmendError::ForcePushNotAllowed(_)) + )); + } +} + +#[tokio::test] +async fn non_locked_hunk() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // create commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 1); + }; + + { + // amend another hunk + fs::write(repository.path().join("file2.txt"), "content2").unwrap(); + let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); + controller + .amend(&project_id, &branch_id, &to_amend) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 2); + } +} + +#[tokio::test] +async fn locked_hunk() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // create commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 1); + assert_eq!( + branch.commits[0].files[0].hunks[0].diff, + "@@ -0,0 +1 @@\n+content\n\\ No newline at end of file\n" + ); + }; + + { + // amend another hunk + fs::write(repository.path().join("file.txt"), "more content").unwrap(); + let to_amend: branch::BranchOwnershipClaims = "file.txt:1-2".parse().unwrap(); + controller + .amend(&project_id, &branch_id, &to_amend) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 1); + assert_eq!( + branch.commits[0].files[0].hunks[0].diff, + "@@ -0,0 +1 @@\n+more content\n\\ No newline at end of file\n" + ); + } +} + +#[tokio::test] +async fn non_existing_ownership() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // create commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 1); + }; + + { + // amend non existing hunk + let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); + assert!(matches!( + controller + .amend(&project_id, &branch_id, &to_amend) + .await + .unwrap_err(), + ControllerError::Action(errors::AmendError::TargetOwnerhshipNotFound(_)) + )); + } +} diff --git a/gitbutler-app/tests/suite/virtual_branches/apply_virtual_branch.rs b/gitbutler-app/tests/suite/virtual_branches/apply_virtual_branch.rs new file mode 100644 index 000000000..3b65e57ea --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/apply_virtual_branch.rs @@ -0,0 +1,278 @@ +use super::*; + +#[tokio::test] +async fn deltect_conflict() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = { + let branch1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + fs::write(repository.path().join("file.txt"), "branch one").unwrap(); + + branch1_id + }; + + // unapply first vbranch + controller + .unapply_virtual_branch(&project_id, &branch1_id) + .await + .unwrap(); + + { + // create another vbranch that conflicts with the first one + controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + fs::write(repository.path().join("file.txt"), "branch two").unwrap(); + } + + { + // it should not be possible to apply the first branch + assert!(!controller + .can_apply_virtual_branch(&project_id, &branch1_id) + .await + .unwrap()); + + assert!(matches!( + controller + .apply_virtual_branch(&project_id, &branch1_id) + .await, + Err(ControllerError::Action( + errors::ApplyBranchError::BranchConflicts(_) + )) + )); + } +} + +#[tokio::test] +async fn rebase_commit() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "one").unwrap(); + fs::write(repository.path().join("another_file.txt"), "").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = { + // create a branch with some commited work + let branch1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + fs::write(repository.path().join("another_file.txt"), "virtual").unwrap(); + + controller + .create_commit(&project_id, &branch1_id, "virtual commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(branches[0].active); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + + branch1_id + }; + + { + // unapply first vbranch + controller + .unapply_virtual_branch(&project_id, &branch1_id) + .await + .unwrap(); + + assert_eq!( + fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), + "" + ); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "one" + ); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!branches[0].active); + } + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // branch is stil unapplied + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!branches[0].active); + assert!(!branches[0].conflicted); + + assert_eq!( + fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), + "" + ); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "two" + ); + } + + { + // apply first vbranch again + controller + .apply_virtual_branch(&project_id, &branch1_id) + .await + .unwrap(); + + // it should be rebased + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + + assert_eq!( + fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), + "virtual" + ); + + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "two" + ); + } +} + +#[tokio::test] +async fn rebase_work() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = { + // make a branch with some work + let branch1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + fs::write(repository.path().join("another_file.txt"), "").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(branches[0].active); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + + branch1_id + }; + + { + // unapply first vbranch + controller + .unapply_virtual_branch(&project_id, &branch1_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(!branches[0].active); + + assert!(!repository.path().join("another_file.txt").exists()); + assert!(!repository.path().join("file.txt").exists()); + } + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // first branch is stil unapplied + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(!branches[0].active); + assert!(!branches[0].conflicted); + + assert!(!repository.path().join("another_file.txt").exists()); + assert!(repository.path().join("file.txt").exists()); + } + + { + // apply first vbranch again + controller + .apply_virtual_branch(&project_id, &branch1_id) + .await + .unwrap(); + + // workdir should be rebased, and work should be restored + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + + assert!(repository.path().join("another_file.txt").exists()); + assert!(repository.path().join("file.txt").exists()); + } +} diff --git a/gitbutler-app/tests/suite/virtual_branches/cherry_pick.rs b/gitbutler-app/tests/suite/virtual_branches/cherry_pick.rs new file mode 100644 index 000000000..e0ecdd015 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/cherry_pick.rs @@ -0,0 +1,382 @@ +use super::*; + +mod cleanly { + + use super::*; + + #[tokio::test] + async fn applied() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one = { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + let commit_two = { + fs::write(repository.path().join("file.txt"), "content two").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + controller + .reset_virtual_branch(&project_id, &branch_id, commit_one) + .await + .unwrap(); + + repository.reset_hard(None); + + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + + let cherry_picked_commit_oid = controller + .cherry_pick(&project_id, &branch_id, commit_two) + .await + .unwrap(); + assert!(cherry_picked_commit_oid.is_some()); + assert!(repository.path().join("file.txt").exists()); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content two" + ); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert_eq!(branches[0].commits.len(), 2); + assert_eq!(branches[0].commits[0].id, cherry_picked_commit_oid.unwrap()); + assert_eq!(branches[0].commits[1].id, commit_one); + } + + #[tokio::test] + async fn to_different_branch() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one = { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + let commit_two = { + fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + controller + .reset_virtual_branch(&project_id, &branch_id, commit_one) + .await + .unwrap(); + + repository.reset_hard(None); + + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + assert!(!repository.path().join("file_two.txt").exists()); + + let branch_two_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let cherry_picked_commit_oid = controller + .cherry_pick(&project_id, &branch_two_id, commit_two) + .await + .unwrap(); + assert!(cherry_picked_commit_oid.is_some()); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert!(repository.path().join("file_two.txt").exists()); + assert_eq!( + fs::read_to_string(repository.path().join("file_two.txt")).unwrap(), + "content two" + ); + + assert_eq!(branches.len(), 2); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, commit_one); + + assert_eq!(branches[1].id, branch_two_id); + assert!(branches[1].active); + assert_eq!(branches[1].commits.len(), 1); + assert_eq!(branches[1].commits[0].id, cherry_picked_commit_oid.unwrap()); + } + + #[tokio::test] + async fn non_applied() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + let commit_three_oid = { + fs::write(repository.path().join("file_three.txt"), "content three").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + controller + .reset_virtual_branch(&project_id, &branch_id, commit_one_oid) + .await + .unwrap(); + + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + assert!(matches!( + controller + .cherry_pick(&project_id, &branch_id, commit_three_oid) + .await, + Err(ControllerError::Action(errors::CherryPickError::NotApplied)) + )); + } +} + +mod with_conflicts { + + use super::*; + + #[tokio::test] + async fn applied() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one = { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + let commit_three = { + fs::write(repository.path().join("file_three.txt"), "content three").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + controller + .reset_virtual_branch(&project_id, &branch_id, commit_one) + .await + .unwrap(); + + repository.reset_hard(None); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + assert!(!repository.path().join("file_two.txt").exists()); + assert!(!repository.path().join("file_three.txt").exists()); + + // introduce conflict with the remote commit + fs::write(repository.path().join("file_three.txt"), "conflict").unwrap(); + + { + // cherry picking leads to conflict + let cherry_picked_commit_oid = controller + .cherry_pick(&project_id, &branch_id, commit_three) + .await + .unwrap(); + assert!(cherry_picked_commit_oid.is_none()); + + assert_eq!( + fs::read_to_string(repository.path().join("file_three.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\ncontent three\n>>>>>>> theirs\n" + ); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert_eq!(branches[0].files.len(), 1); + assert!(branches[0].files[0].conflicted); + assert_eq!(branches[0].commits.len(), 1); + } + + { + // conflict can be resolved + fs::write(repository.path().join("file_three.txt"), "resolved").unwrap(); + let commited_oid = controller + .create_commit(&project_id, &branch_id, "resolution", None, false) + .await + .unwrap(); + + let commit = repository.find_commit(commited_oid).unwrap(); + assert_eq!(commit.parent_count(), 2); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].requires_force); + assert!(!branches[0].conflicted); + assert_eq!(branches[0].commits.len(), 2); + // resolution commit is there + assert_eq!(branches[0].commits[0].id, commited_oid); + assert_eq!(branches[0].commits[1].id, commit_one); + } + } + + #[tokio::test] + async fn non_applied() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + let commit_oid = { + let first = repository.commit_all("commit"); + fs::write(repository.path().join("file.txt"), "content").unwrap(); + let second = repository.commit_all("commit"); + repository.push(); + repository.reset_hard(Some(first)); + second + }; + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + // introduce conflict with the remote commit + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + assert!(matches!( + controller + .cherry_pick(&project_id, &branch_id, commit_oid) + .await, + Err(ControllerError::Action(errors::CherryPickError::NotApplied)) + )); + } +} diff --git a/gitbutler-app/tests/suite/virtual_branches/create_commit.rs b/gitbutler-app/tests/suite/virtual_branches/create_commit.rs new file mode 100644 index 000000000..d47dfd511 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/create_commit.rs @@ -0,0 +1,198 @@ +use super::*; + +#[tokio::test] +async fn should_lock_updated_hunks() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // by default, hunks are not locked + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + assert!(!branch.files[0].hunks[0].locked); + } + + controller + .create_commit(&project_id, &branch_id, "test", None, false) + .await + .unwrap(); + + { + // change in the committed hunks leads to hunk locking + fs::write(repository.path().join("file.txt"), "updated content").unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + assert!(branch.files[0].hunks[0].locked); + } +} + +#[tokio::test] +async fn should_not_lock_disjointed_hunks() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + let mut lines: Vec<_> = (0_i32..24_i32).map(|i| format!("line {}", i)).collect(); + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + repository.commit_all("my commit"); + repository.push(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // new hunk in the middle of the file + lines[12] = "commited stuff".to_string(); + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + assert!(!branch.files[0].hunks[0].locked); + } + + controller + .create_commit(&project_id, &branch_id, "test commit", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + { + // hunk before the commited part is not locked + let mut changed_lines = lines.clone(); + changed_lines[0] = "updated line".to_string(); + fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + assert!(!branch.files[0].hunks[0].locked); + // cleanup + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + } + { + // hunk after the commited part is not locked + let mut changed_lines = lines.clone(); + changed_lines[23] = "updated line".to_string(); + fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + assert!(!branch.files[0].hunks[0].locked); + // cleanup + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + } + { + // hunk before the commited part but with overlapping context + let mut changed_lines = lines.clone(); + changed_lines[10] = "updated line".to_string(); + fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + // TODO: We lock this hunk, but can we afford not lock it? + assert!(branch.files[0].hunks[0].locked); + // cleanup + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + } + { + // hunk after the commited part but with overlapping context + let mut changed_lines = lines.clone(); + changed_lines[14] = "updated line".to_string(); + fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + // TODO: We lock this hunk, but can we afford not lock it? + assert!(branch.files[0].hunks[0].locked); + // cleanup + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + } +} diff --git a/gitbutler-app/tests/suite/virtual_branches/create_virtual_branch_from_branch.rs b/gitbutler-app/tests/suite/virtual_branches/create_virtual_branch_from_branch.rs new file mode 100644 index 000000000..29d5fb1a1 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/create_virtual_branch_from_branch.rs @@ -0,0 +1,382 @@ +use super::*; + +#[tokio::test] +async fn integration() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_name = { + // make a remote branch + + let branch_id = controller + .create_virtual_branch(&project_id, &super::branch::BranchCreateRequest::default()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "first\n").unwrap(); + controller + .create_commit(&project_id, &branch_id, "first", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == branch_id) + .unwrap(); + + let name = branch.upstream.unwrap().name; + + controller + .delete_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + name + }; + + // checkout a existing remote branch + let branch_id = controller + .create_virtual_branch_from_branch(&project_id, &branch_name) + .await + .unwrap(); + + { + // add a commit + std::fs::write(repository.path().join("file.txt"), "first\nsecond").unwrap(); + + controller + .create_commit(&project_id, &branch_id, "second", None, false) + .await + .unwrap(); + } + + { + // meanwhile, there is a new commit on master + repository.checkout(&"refs/heads/master".parse().unwrap()); + std::fs::write(repository.path().join("another.txt"), "").unwrap(); + repository.commit_all("another"); + repository.push_branch(&"refs/heads/master".parse().unwrap()); + repository.checkout(&"refs/heads/gitbutler/integration".parse().unwrap()); + } + + { + // merge branch into master + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == branch_id) + .unwrap(); + + assert!(branch.commits[0].is_remote); + assert!(!branch.commits[0].is_integrated); + assert!(branch.commits[1].is_remote); + assert!(!branch.commits[1].is_integrated); + + repository.rebase_and_merge(&branch_name); + } + + { + // should mark commits as integrated + controller + .fetch_from_target(&project_id, None) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == branch_id) + .unwrap(); + + assert!(branch.commits[0].is_remote); + assert!(branch.commits[0].is_integrated); + assert!(branch.commits[1].is_remote); + assert!(branch.commits[1].is_integrated); + } +} + +#[tokio::test] +async fn no_conflicts() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + { + // create a remote branch + let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); + repository.checkout(&branch_name); + fs::write(repository.path().join("file.txt"), "first").unwrap(); + repository.commit_all("first"); + repository.push_branch(&branch_name); + repository.checkout(&"refs/heads/master".parse().unwrap()); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert!(branches.is_empty()); + + let branch_id = controller + .create_virtual_branch_from_branch( + &project_id, + &"refs/remotes/origin/branch".parse().unwrap(), + ) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].description, "first"); +} + +#[tokio::test] +async fn conflicts_with_uncommited() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + { + // create a remote branch + let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); + repository.checkout(&branch_name); + fs::write(repository.path().join("file.txt"), "first").unwrap(); + repository.commit_all("first"); + repository.push_branch(&branch_name); + repository.checkout(&"refs/heads/master".parse().unwrap()); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // create a local branch that conflicts with remote + { + std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + }; + + // branch should be created unapplied, because of the conflict + + let new_branch_id = controller + .create_virtual_branch_from_branch( + &project_id, + &"refs/remotes/origin/branch".parse().unwrap(), + ) + .await + .unwrap(); + let new_branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == new_branch_id) + .unwrap(); + assert!(!new_branch.active); + assert_eq!(new_branch.commits.len(), 1); + assert!(new_branch.upstream.is_some()); +} + +#[tokio::test] +async fn conflicts_with_commited() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + { + // create a remote branch + let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); + repository.checkout(&branch_name); + fs::write(repository.path().join("file.txt"), "first").unwrap(); + repository.commit_all("first"); + repository.push_branch(&branch_name); + repository.checkout(&"refs/heads/master".parse().unwrap()); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // create a local branch that conflicts with remote + { + std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + controller + .create_commit(&project_id, &branches[0].id, "hej", None, false) + .await + .unwrap(); + }; + + // branch should be created unapplied, because of the conflict + + let new_branch_id = controller + .create_virtual_branch_from_branch( + &project_id, + &"refs/remotes/origin/branch".parse().unwrap(), + ) + .await + .unwrap(); + let new_branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == new_branch_id) + .unwrap(); + assert!(!new_branch.active); + assert_eq!(new_branch.commits.len(), 1); + assert!(new_branch.upstream.is_some()); +} + +#[tokio::test] +async fn from_default_target() { + let Test { + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // branch should be created unapplied, because of the conflict + + assert!(matches!( + controller + .create_virtual_branch_from_branch( + &project_id, + &"refs/remotes/origin/master".parse().unwrap(), + ) + .await + .unwrap_err(), + ControllerError::Action( + errors::CreateVirtualBranchFromBranchError::CantMakeBranchFromDefaultTarget + ) + )); +} + +#[tokio::test] +async fn from_non_existent_branch() { + let Test { + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // branch should be created unapplied, because of the conflict + + assert!(matches!( + controller + .create_virtual_branch_from_branch( + &project_id, + &"refs/remotes/origin/branch".parse().unwrap(), + ) + .await + .unwrap_err(), + ControllerError::Action(errors::CreateVirtualBranchFromBranchError::BranchNotFound( + _ + )) + )); +} + +#[tokio::test] +async fn from_state_remote_branch() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + { + // create a remote branch + let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); + repository.checkout(&branch_name); + fs::write(repository.path().join("file.txt"), "branch commit").unwrap(); + repository.commit_all("branch commit"); + repository.push_branch(&branch_name); + repository.checkout(&"refs/heads/master".parse().unwrap()); + + // make remote branch stale + std::fs::write(repository.path().join("antoher_file.txt"), "master commit").unwrap(); + repository.commit_all("master commit"); + repository.push(); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch_from_branch( + &project_id, + &"refs/remotes/origin/branch".parse().unwrap(), + ) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].files.is_empty()); + assert_eq!(branches[0].commits[0].description, "branch commit"); +} diff --git a/gitbutler-app/tests/suite/virtual_branches/delete_virtual_branch.rs b/gitbutler-app/tests/suite/virtual_branches/delete_virtual_branch.rs new file mode 100644 index 000000000..9b91f9a4a --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/delete_virtual_branch.rs @@ -0,0 +1,78 @@ +use super::*; + +#[tokio::test] +async fn should_unapply_diff() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // write some + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + + controller + .delete_virtual_branch(&project_id, &branches[0].id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + assert!(!repository.path().join("file.txt").exists()); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); +} + +#[tokio::test] +async fn should_remove_reference() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let id = controller + .create_virtual_branch( + &project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + controller + .delete_virtual_branch(&project_id, &id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); +} diff --git a/gitbutler-app/tests/suite/virtual_branches/fetch_from_target.rs b/gitbutler-app/tests/suite/virtual_branches/fetch_from_target.rs new file mode 100644 index 000000000..b46b2a3b9 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/fetch_from_target.rs @@ -0,0 +1,46 @@ +use super::*; + +#[tokio::test] +async fn should_update_last_fetched() { + let Test { + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let before_fetch = controller.get_base_branch_data(&project_id).await.unwrap(); + assert!(before_fetch.unwrap().last_fetched_ms.is_none()); + + let fetch = controller + .fetch_from_target(&project_id, None) + .await + .unwrap(); + assert!(fetch.last_fetched_ms.is_some()); + + let after_fetch = controller.get_base_branch_data(&project_id).await.unwrap(); + assert!(after_fetch.as_ref().unwrap().last_fetched_ms.is_some()); + assert_eq!(fetch.last_fetched_ms, after_fetch.unwrap().last_fetched_ms); + + let second_fetch = controller + .fetch_from_target(&project_id, None) + .await + .unwrap(); + assert!(second_fetch.last_fetched_ms.is_some()); + assert_ne!(fetch.last_fetched_ms, second_fetch.last_fetched_ms); + + let after_second_fetch = controller.get_base_branch_data(&project_id).await.unwrap(); + assert!(after_second_fetch + .as_ref() + .unwrap() + .last_fetched_ms + .is_some()); + assert_eq!( + second_fetch.last_fetched_ms, + after_second_fetch.unwrap().last_fetched_ms + ); +} diff --git a/gitbutler-app/tests/suite/virtual_branches/init.rs b/gitbutler-app/tests/suite/virtual_branches/init.rs new file mode 100644 index 000000000..40adbfb98 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/init.rs @@ -0,0 +1,211 @@ +use super::*; + +#[tokio::test] +async fn twice() { + let data_dir = paths::data_dir(); + let keys = keys::Controller::from_path(&data_dir); + let projects = projects::Controller::from_path(&data_dir); + let users = users::Controller::from_path(&data_dir); + let helper = git::credentials::Helper::from_path(&data_dir); + + let test_project = TestProject::default(); + + let controller = Controller::new(data_dir, projects.clone(), users, keys, helper); + + { + let project = projects + .add(test_project.path()) + .expect("failed to add project"); + controller + .set_base_branch(&project.id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + assert!(controller + .list_virtual_branches(&project.id) + .await + .unwrap() + .0 + .is_empty()); + projects.delete(&project.id).await.unwrap(); + controller + .list_virtual_branches(&project.id) + .await + .unwrap_err(); + } + + { + let project = projects.add(test_project.path()).unwrap(); + controller + .set_base_branch(&project.id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // even though project is on gitbutler/integration, we should not import it + assert!(controller + .list_virtual_branches(&project.id) + .await + .unwrap() + .0 + .is_empty()); + } +} + +#[tokio::test] +async fn dirty_non_target() { + // a situation when you initialize project while being on the local verison of the master + // that has uncommited changes. + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + repository.checkout(&"refs/heads/some-feature".parse().unwrap()); + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].files[0].hunks.len(), 1); + assert!(branches[0].upstream.is_none()); + assert_eq!(branches[0].name, "some-feature"); +} + +#[tokio::test] +async fn dirty_target() { + // a situation when you initialize project while being on the local verison of the master + // that has uncommited changes. + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].files[0].hunks.len(), 1); + assert!(branches[0].upstream.is_none()); + assert_eq!(branches[0].name, "master"); +} + +#[tokio::test] +async fn commit_on_non_target_local() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + repository.checkout(&"refs/heads/some-feature".parse().unwrap()); + fs::write(repository.path().join("file.txt"), "content").unwrap(); + repository.commit_all("commit on target"); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].files.is_empty()); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].upstream.is_none()); + assert_eq!(branches[0].name, "some-feature"); +} + +#[tokio::test] +async fn commit_on_non_target_remote() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + repository.checkout(&"refs/heads/some-feature".parse().unwrap()); + fs::write(repository.path().join("file.txt"), "content").unwrap(); + repository.commit_all("commit on target"); + repository.push_branch(&"refs/heads/some-feature".parse().unwrap()); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].files.is_empty()); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].upstream.is_some()); + assert_eq!(branches[0].name, "some-feature"); +} + +#[tokio::test] +async fn commit_on_target() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + repository.commit_all("commit on target"); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].files.is_empty()); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].upstream.is_none()); + assert_eq!(branches[0].name, "master"); +} + +#[tokio::test] +async fn submodule() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + let submodule_url: git::Url = TestProject::default() + .path() + .display() + .to_string() + .parse() + .unwrap(); + repository.add_submodule(&submodule_url, path::Path::new("submodule")); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].files[0].hunks.len(), 1); +} diff --git a/gitbutler-app/tests/suite/virtual_branches/mod.rs b/gitbutler-app/tests/suite/virtual_branches/mod.rs new file mode 100644 index 000000000..9b0225fea --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/mod.rs @@ -0,0 +1,165 @@ +//TODO(ST): see if this can be fixed +#![allow( + clippy::redundant_closure_for_method_calls, + clippy::rest_pat_in_fully_bound_structs, + clippy::dbg_macro +)] + +use std::{fs, path, str::FromStr}; + +use crate::common::{paths, TestProject}; +use gitbutler_app::{ + git, keys, + projects::{self, ProjectId}, + users, + virtual_branches::{branch, controller::ControllerError, errors, Controller}, +}; + +struct Test { + repository: TestProject, + project_id: ProjectId, + projects: projects::Controller, + controller: Controller, +} + +impl Default for Test { + fn default() -> Self { + let data_dir = paths::data_dir(); + let keys = keys::Controller::from_path(&data_dir); + let projects = projects::Controller::from_path(&data_dir); + let users = users::Controller::from_path(&data_dir); + let helper = git::credentials::Helper::from_path(&data_dir); + + let test_project = TestProject::default(); + let project = projects + .add(test_project.path()) + .expect("failed to add project"); + + Self { + repository: test_project, + project_id: project.id, + controller: Controller::new(data_dir, projects.clone(), users, keys, helper), + projects, + } + } +} + +mod amend; +mod apply_virtual_branch; +mod cherry_pick; +mod create_commit; +mod create_virtual_branch_from_branch; +mod delete_virtual_branch; +mod fetch_from_target; +mod init; +mod move_commit_to_vbranch; +mod references; +mod reset_virtual_branch; +mod selected_for_changes; +mod set_base_branch; +mod squash; +mod unapply; +mod unapply_ownership; +mod update_base_branch; +mod update_commit_message; +mod upstream; + +#[tokio::test] +async fn resolve_conflict_flow() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(branches[0].active); + + branch1_id + }; + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // there is a conflict now, so the branch should be inactive + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(!branches[0].active); + } + + { + // when we apply conflicted branch, it has conflict + controller + .apply_virtual_branch(&project_id, &branch1_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + + // and the conflict markers are in the file + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + + { + // can't commit conflicts + assert!(matches!( + controller + .create_commit(&project_id, &branch1_id, "commit conflicts", None, false) + .await, + Err(ControllerError::Action(errors::CommitError::Conflicted(_))) + )); + } + + { + // fixing the conflict removes conflicted mark + fs::write(repository.path().join("file.txt"), "resolved").unwrap(); + let commit_oid = controller + .create_commit(&project_id, &branch1_id, "resolution", None, false) + .await + .unwrap(); + + let commit = repository.find_commit(commit_oid).unwrap(); + assert_eq!(commit.parent_count(), 2); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + } +} diff --git a/gitbutler-app/tests/suite/virtual_branches/move_commit_to_vbranch.rs b/gitbutler-app/tests/suite/virtual_branches/move_commit_to_vbranch.rs new file mode 100644 index 000000000..baa6b5fe2 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/move_commit_to_vbranch.rs @@ -0,0 +1,324 @@ +use crate::suite::virtual_branches::Test; +use gitbutler_app::git; +use gitbutler_app::virtual_branches::controller::ControllerError; +use gitbutler_app::virtual_branches::{branch, errors, BranchId}; +use std::str::FromStr; + +#[tokio::test] +async fn no_diffs() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + let commit_oid = controller + .create_commit(&project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + let target_branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + controller + .move_commit(&project_id, &target_branch_id, commit_oid) + .await + .unwrap(); + + let destination_branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == target_branch_id) + .unwrap(); + + let source_branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == source_branch_id) + .unwrap(); + + assert_eq!(destination_branch.commits.len(), 1); + assert_eq!(destination_branch.files.len(), 0); + assert_eq!(source_branch.commits.len(), 0); + assert_eq!(source_branch.files.len(), 0); +} + +#[tokio::test] +async fn diffs_on_source_branch() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + let commit_oid = controller + .create_commit(&project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + std::fs::write( + repository.path().join("another file.txt"), + "another content", + ) + .unwrap(); + + let target_branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + controller + .move_commit(&project_id, &target_branch_id, commit_oid) + .await + .unwrap(); + + let destination_branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == target_branch_id) + .unwrap(); + + let source_branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == source_branch_id) + .unwrap(); + + assert_eq!(destination_branch.commits.len(), 1); + assert_eq!(destination_branch.files.len(), 0); + assert_eq!(source_branch.commits.len(), 0); + assert_eq!(source_branch.files.len(), 1); +} + +#[tokio::test] +async fn diffs_on_target_branch() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + let commit_oid = controller + .create_commit(&project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + let target_branch_id = controller + .create_virtual_branch( + &project_id, + &branch::BranchCreateRequest { + selected_for_changes: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + + std::fs::write( + repository.path().join("another file.txt"), + "another content", + ) + .unwrap(); + + controller + .move_commit(&project_id, &target_branch_id, commit_oid) + .await + .unwrap(); + + let destination_branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == target_branch_id) + .unwrap(); + + let source_branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == source_branch_id) + .unwrap(); + + assert_eq!(destination_branch.commits.len(), 1); + assert_eq!(destination_branch.files.len(), 1); + assert_eq!(source_branch.commits.len(), 0); + assert_eq!(source_branch.files.len(), 0); +} + +#[tokio::test] +async fn locked_hunks_on_source_branch() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + let commit_oid = controller + .create_commit(&project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "locked content").unwrap(); + + let target_branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + assert!(matches!( + controller + .move_commit(&project_id, &target_branch_id, commit_oid) + .await + .unwrap_err(), + ControllerError::Action(errors::MoveCommitError::SourceLocked) + )); +} + +#[tokio::test] +async fn no_commit() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + controller + .create_commit(&project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + let target_branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + assert!(matches!( + controller + .move_commit( + &project_id, + &target_branch_id, + git::Oid::from_str("a99c95cca7a60f1a2180c2f86fb18af97333c192").unwrap() + ) + .await + .unwrap_err(), + ControllerError::Action(errors::MoveCommitError::CommitNotFound(_)) + )); +} + +#[tokio::test] +async fn no_branch() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + let commit_oid = controller + .create_commit(&project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + assert!(matches!( + controller + .move_commit(&project_id, &BranchId::generate(), commit_oid) + .await + .unwrap_err(), + ControllerError::Action(errors::MoveCommitError::BranchNotFound(_)) + )); +} diff --git a/gitbutler-app/tests/suite/virtual_branches/references.rs b/gitbutler-app/tests/suite/virtual_branches/references.rs new file mode 100644 index 000000000..5a3ce6e3f --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/references.rs @@ -0,0 +1,366 @@ +use super::*; + +mod create_virtual_branch { + use super::*; + + #[tokio::test] + async fn simple() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert_eq!(branches[0].name, "Virtual branch"); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(refnames.contains(&"refs/gitbutler/Virtual-branch".to_string())); + } + + #[tokio::test] + async fn duplicate_name() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch( + &project_id, + &gitbutler_app::virtual_branches::branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let branch2_id = controller + .create_virtual_branch( + &project_id, + &gitbutler_app::virtual_branches::branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 2); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].name, "name"); + assert_eq!(branches[1].id, branch2_id); + assert_eq!(branches[1].name, "name 1"); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(refnames.contains(&"refs/gitbutler/name".to_string())); + assert!(refnames.contains(&"refs/gitbutler/name-1".to_string())); + } +} + +mod update_virtual_branch { + use super::*; + + #[tokio::test] + async fn simple() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch( + &project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + controller + .update_virtual_branch( + &project_id, + branch::BranchUpdateRequest { + id: branch_id, + name: Some("new name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert_eq!(branches[0].name, "new name"); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); + assert!(refnames.contains(&"refs/gitbutler/new-name".to_string())); + } + + #[tokio::test] + async fn duplicate_name() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch( + &project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let branch2_id = controller + .create_virtual_branch( + &project_id, + &branch::BranchCreateRequest { + ..Default::default() + }, + ) + .await + .unwrap(); + + controller + .update_virtual_branch( + &project_id, + branch::BranchUpdateRequest { + id: branch2_id, + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 2); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].name, "name"); + assert_eq!(branches[1].id, branch2_id); + assert_eq!(branches[1].name, "name 1"); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(refnames.contains(&"refs/gitbutler/name".to_string())); + assert!(refnames.contains(&"refs/gitbutler/name-1".to_string())); + } +} + +mod push_virtual_branch { + + use super::*; + + #[tokio::test] + async fn simple() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch( + &project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + controller + .create_commit(&project_id, &branch1_id, "test", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(&project_id, &branch1_id, false, None) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].name, "name"); + assert_eq!( + branches[0].upstream.as_ref().unwrap().name.to_string(), + "refs/remotes/origin/name" + ); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(refnames.contains(&branches[0].upstream.clone().unwrap().name.to_string())); + } + + #[tokio::test] + async fn duplicate_names() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = { + // create and push branch with some work + let branch1_id = controller + .create_virtual_branch( + &project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch1_id, "test", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(&project_id, &branch1_id, false, None) + .await + .unwrap(); + branch1_id + }; + + // rename first branch + controller + .update_virtual_branch( + &project_id, + branch::BranchUpdateRequest { + id: branch1_id, + name: Some("updated name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let branch2_id = { + // create another branch with first branch's old name and push it + let branch2_id = controller + .create_virtual_branch( + &project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + fs::write(repository.path().join("file.txt"), "updated content").unwrap(); + controller + .create_commit(&project_id, &branch2_id, "test", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(&project_id, &branch2_id, false, None) + .await + .unwrap(); + branch2_id + }; + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 2); + // first branch is pushing to old ref remotely + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].name, "updated name"); + assert_eq!( + branches[0].upstream.as_ref().unwrap().name, + "refs/remotes/origin/name".parse().unwrap() + ); + // new branch is pushing to new ref remotely + assert_eq!(branches[1].id, branch2_id); + assert_eq!(branches[1].name, "name"); + assert_eq!( + branches[1].upstream.as_ref().unwrap().name, + "refs/remotes/origin/name-1".parse().unwrap() + ); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(refnames.contains(&branches[0].upstream.clone().unwrap().name.to_string())); + assert!(refnames.contains(&branches[1].upstream.clone().unwrap().name.to_string())); + } +} diff --git a/gitbutler-app/tests/suite/virtual_branches/reset_virtual_branch.rs b/gitbutler-app/tests/suite/virtual_branches/reset_virtual_branch.rs new file mode 100644 index 000000000..823f001cb --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/reset_virtual_branch.rs @@ -0,0 +1,267 @@ +use crate::suite::virtual_branches::Test; +use gitbutler_app::virtual_branches::{ + branch, controller::ControllerError, errors::ResetBranchError, +}; +use std::fs; + +#[tokio::test] +async fn to_head() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let oid = { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + // commit changes + let oid = controller + .create_commit(&project_id, &branch1_id, "commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + + oid + }; + + { + // reset changes to head + controller + .reset_virtual_branch(&project_id, &branch1_id, oid) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + } +} + +#[tokio::test] +async fn to_target() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + let base_branch = controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + // commit changes + let oid = controller + .create_commit(&project_id, &branch1_id, "commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + } + + { + // reset changes to head + controller + .reset_virtual_branch(&project_id, &branch1_id, base_branch.base_sha) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 0); + assert_eq!(branches[0].files.len(), 1); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + } +} + +#[tokio::test] +async fn to_commit() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let first_commit_oid = { + // commit some changes + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let oid = controller + .create_commit(&project_id, &branch1_id, "commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + + oid + }; + + { + // commit some more + fs::write(repository.path().join("file.txt"), "more content").unwrap(); + + let second_commit_oid = controller + .create_commit(&project_id, &branch1_id, "commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 2); + assert_eq!(branches[0].commits[0].id, second_commit_oid); + assert_eq!(branches[0].commits[1].id, first_commit_oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "more content" + ); + } + + { + // reset changes to the first commit + controller + .reset_virtual_branch(&project_id, &branch1_id, first_commit_oid) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, first_commit_oid); + assert_eq!(branches[0].files.len(), 1); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "more content" + ); + } +} + +#[tokio::test] +async fn to_non_existing() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + // commit changes + let oid = controller + .create_commit(&project_id, &branch1_id, "commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + + oid + }; + + assert!(matches!( + controller + .reset_virtual_branch( + &project_id, + &branch1_id, + "fe14df8c66b73c6276f7bb26102ad91da680afcb".parse().unwrap() + ) + .await, + Err(ControllerError::Action( + ResetBranchError::CommitNotFoundInBranch(_) + )) + )); +} diff --git a/gitbutler-app/tests/suite/virtual_branches/selected_for_changes.rs b/gitbutler-app/tests/suite/virtual_branches/selected_for_changes.rs new file mode 100644 index 000000000..9bba1e7ab --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/selected_for_changes.rs @@ -0,0 +1,375 @@ +use super::*; + +#[tokio::test] +async fn unapplying_selected_branch_selects_anther() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file one.txt"), "").unwrap(); + + // first branch should be created as default + let b_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + // if default branch exists, new branch should not be created as default + let b2_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + + let b = branches.iter().find(|b| b.id == b_id).unwrap(); + + let b2 = branches.iter().find(|b| b.id == b2_id).unwrap(); + + assert!(b.selected_for_changes); + assert!(!b2.selected_for_changes); + + controller + .unapply_virtual_branch(&project_id, &b_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + + assert_eq!(branches.len(), 2); + assert_eq!(branches[0].id, b.id); + assert!(!branches[0].selected_for_changes); + assert!(!branches[0].active); + assert_eq!(branches[1].id, b2.id); + assert!(branches[1].selected_for_changes); + assert!(branches[1].active); +} + +#[tokio::test] +async fn deleting_selected_branch_selects_anther() { + let Test { + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // first branch should be created as default + let b_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + // if default branch exists, new branch should not be created as default + let b2_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + + let b = branches.iter().find(|b| b.id == b_id).unwrap(); + + let b2 = branches.iter().find(|b| b.id == b2_id).unwrap(); + + assert!(b.selected_for_changes); + assert!(!b2.selected_for_changes); + + controller + .delete_virtual_branch(&project_id, &b_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, b2.id); + assert!(branches[0].selected_for_changes); +} + +#[tokio::test] +async fn create_virtual_branch_should_set_selected_for_changes() { + let Test { + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // first branch should be created as default + let b_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b_id) + .unwrap(); + assert!(branch.selected_for_changes); + + // if default branch exists, new branch should not be created as default + let b_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b_id) + .unwrap(); + assert!(!branch.selected_for_changes); + + // explicitly don't make this one default + let b_id = controller + .create_virtual_branch( + &project_id, + &branch::BranchCreateRequest { + selected_for_changes: Some(false), + ..Default::default() + }, + ) + .await + .unwrap(); + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b_id) + .unwrap(); + assert!(!branch.selected_for_changes); + + // explicitly make this one default + let b_id = controller + .create_virtual_branch( + &project_id, + &branch::BranchCreateRequest { + selected_for_changes: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b_id) + .unwrap(); + assert!(branch.selected_for_changes); +} + +#[tokio::test] +async fn update_virtual_branch_should_reset_selected_for_changes() { + let Test { + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let b1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + let b1 = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b1_id) + .unwrap(); + assert!(b1.selected_for_changes); + + let b2_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + let b2 = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b2_id) + .unwrap(); + assert!(!b2.selected_for_changes); + + controller + .update_virtual_branch( + &project_id, + branch::BranchUpdateRequest { + id: b2_id, + selected_for_changes: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + + let b1 = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b1_id) + .unwrap(); + assert!(!b1.selected_for_changes); + + let b2 = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b2_id) + .unwrap(); + assert!(b2.selected_for_changes); +} + +#[tokio::test] +async fn unapply_virtual_branch_should_reset_selected_for_changes() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let b1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let b1 = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b1_id) + .unwrap(); + assert!(b1.selected_for_changes); + + controller + .unapply_virtual_branch(&project_id, &b1_id) + .await + .unwrap(); + + let b1 = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b1_id) + .unwrap(); + assert!(!b1.selected_for_changes); +} + +#[tokio::test] +async fn hunks_distribution() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches[0].files.len(), 1); + + controller + .create_virtual_branch( + &project_id, + &branch::BranchCreateRequest { + selected_for_changes: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + std::fs::write(repository.path().join("another_file.txt"), "content").unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[1].files.len(), 1); +} + +#[tokio::test] +async fn applying_first_branch() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + controller + .unapply_virtual_branch(&project_id, &branches[0].id) + .await + .unwrap(); + controller + .apply_virtual_branch(&project_id, &branches[0].id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].active); + assert!(branches[0].selected_for_changes); +} diff --git a/gitbutler-app/tests/suite/virtual_branches/set_base_branch.rs b/gitbutler-app/tests/suite/virtual_branches/set_base_branch.rs new file mode 100644 index 000000000..ca9e57d95 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/set_base_branch.rs @@ -0,0 +1,235 @@ +use super::*; + +#[tokio::test] +async fn success() { + let Test { + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); +} + +mod error { + use super::*; + + #[tokio::test] + async fn missing() { + let Test { + project_id, + controller, + .. + } = Test::default(); + + assert!(matches!( + controller + .set_base_branch( + &project_id, + &git::RemoteRefname::from_str("refs/remotes/origin/missing").unwrap(), + ) + .await + .unwrap_err(), + ControllerError::Action(errors::SetBaseBranchError::BranchNotFound(_)) + )); + } +} + +mod go_back_to_integration { + use pretty_assertions::assert_eq; + + use super::*; + + #[tokio::test] + async fn should_preserve_applied_vbranches() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + std::fs::write(repository.path().join("file.txt"), "one").unwrap(); + let oid_one = repository.commit_all("one"); + std::fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("two"); + repository.push(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let vbranch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + std::fs::write(repository.path().join("another file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &vbranch_id, "one", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + repository.checkout_commit(oid_one); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, vbranch_id); + assert!(branches[0].active); + } + + #[tokio::test] + async fn from_target_branch_index_conflicts() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + std::fs::write(repository.path().join("file.txt"), "one").unwrap(); + let oid_one = repository.commit_all("one"); + std::fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("two"); + repository.push(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert!(branches.is_empty()); + + repository.checkout_commit(oid_one); + std::fs::write(repository.path().join("file.txt"), "tree").unwrap(); + + assert!(matches!( + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap_err(), + ControllerError::Action(errors::SetBaseBranchError::DirtyWorkingDirectory) + )); + } + + #[tokio::test] + async fn from_target_branch_with_uncommited() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + std::fs::write(repository.path().join("file.txt"), "one").unwrap(); + let oid_one = repository.commit_all("one"); + std::fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("two"); + repository.push(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert!(branches.is_empty()); + + repository.checkout_commit(oid_one); + std::fs::write(repository.path().join("another file.txt"), "tree").unwrap(); + + assert!(matches!( + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .map_err(|error| dbg!(error)) + .unwrap_err(), + ControllerError::Action(errors::SetBaseBranchError::DirtyWorkingDirectory) + )); + } + + #[tokio::test] + async fn from_target_branch_with_commit() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + std::fs::write(repository.path().join("file.txt"), "one").unwrap(); + let oid_one = repository.commit_all("one"); + std::fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("two"); + repository.push(); + + let base = controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert!(branches.is_empty()); + + repository.checkout_commit(oid_one); + std::fs::write(repository.path().join("another file.txt"), "tree").unwrap(); + repository.commit_all("three"); + + let base_two = controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + assert_eq!(base_two, base); + } + + #[tokio::test] + async fn from_target_branch_without_any_changes() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + std::fs::write(repository.path().join("file.txt"), "one").unwrap(); + let oid_one = repository.commit_all("one"); + std::fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("two"); + repository.push(); + + let base = controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert!(branches.is_empty()); + + repository.checkout_commit(oid_one); + + let base_two = controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + assert_eq!(base_two, base); + } +} diff --git a/gitbutler-app/tests/suite/virtual_branches/squash.rs b/gitbutler-app/tests/suite/virtual_branches/squash.rs new file mode 100644 index 000000000..b0f2d9dc4 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/squash.rs @@ -0,0 +1,356 @@ +use super::*; + +#[tokio::test] +async fn head() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + let commit_four_oid = { + fs::write(repository.path().join("file four.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit four", None, false) + .await + .unwrap() + }; + + controller + .squash(&project_id, &branch_id, commit_four_oid) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!( + descriptions, + vec!["commit three\ncommit four", "commit two", "commit one"] + ); +} + +#[tokio::test] +async fn middle() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + let commit_two_oid = { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file four.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit four", None, false) + .await + .unwrap() + }; + + controller + .squash(&project_id, &branch_id, commit_two_oid) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!( + descriptions, + vec!["commit four", "commit three", "commit one\ncommit two"] + ); +} + +#[tokio::test] +async fn forcepush_allowed() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = Test::default(); + + projects + .update(&projects::UpdateRequest { + id: project_id, + ok_with_force_push: Some(true), + ..Default::default() + }) + .await + .unwrap(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + let commit_two_oid = { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file four.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit four", None, false) + .await + .unwrap() + }; + + controller + .squash(&project_id, &branch_id, commit_two_oid) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!( + descriptions, + vec!["commit four", "commit three", "commit one\ncommit two"] + ); + assert!(branch.requires_force); +} + +#[tokio::test] +async fn forcepush_forbidden() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + projects + .update(&projects::UpdateRequest { + id: project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + let commit_two_oid = { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file four.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit four", None, false) + .await + .unwrap() + }; + + assert!(matches!( + controller + .squash(&project_id, &branch_id, commit_two_oid) + .await + .unwrap_err(), + ControllerError::Action(errors::SquashError::ForcePushNotAllowed(_)) + )); +} + +#[tokio::test] +async fn root() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + assert!(matches!( + controller + .squash(&project_id, &branch_id, commit_one_oid) + .await + .unwrap_err(), + ControllerError::Action(errors::SquashError::CantSquashRootCommit) + )); +} diff --git a/gitbutler-app/tests/suite/virtual_branches/unapply.rs b/gitbutler-app/tests/suite/virtual_branches/unapply.rs new file mode 100644 index 000000000..f05e90768 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/unapply.rs @@ -0,0 +1,177 @@ +use super::*; + +#[tokio::test] +async fn unapply_with_data() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + controller + .unapply_virtual_branch(&project_id, &branches[0].id) + .await + .unwrap(); + + assert!(!repository.path().join("file.txt").exists()); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(!branches[0].active); +} + +#[tokio::test] +async fn conflicting() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a conflicting branch, and stash it + + std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].base_current); + assert!(branches[0].active); + assert_eq!(branches[0].files[0].hunks[0].diff, "@@ -1 +1 @@\n-first\n\\ No newline at end of file\n+conflict\n\\ No newline at end of file\n"); + + controller + .unapply_virtual_branch(&project_id, &branches[0].id) + .await + .unwrap(); + + branches[0].id + }; + + { + // update base branch, causing conflict + controller.update_base_branch(&project_id).await.unwrap(); + + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == branch_id) + .unwrap(); + assert!(!branch.base_current); + assert!(!branch.active); + } + + { + // apply branch, it should conflict + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert!(branch.base_current); + assert!(branch.conflicted); + assert_eq!(branch.files[0].hunks[0].diff, "@@ -1 +1,5 @@\n-first\n\\ No newline at end of file\n+<<<<<<< ours\n+conflict\n+=======\n+second\n+>>>>>>> theirs\n"); + } + + { + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert!(!branch.active); + assert!(!branch.base_current); + assert!(!branch.conflicted); + assert_eq!(branch.files[0].hunks[0].diff, "@@ -1 +1 @@\n-first\n\\ No newline at end of file\n+conflict\n\\ No newline at end of file\n"); + } +} + +#[tokio::test] +async fn delete_if_empty() { + let Test { + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + controller + .unapply_virtual_branch(&project_id, &branches[0].id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 0); +} diff --git a/gitbutler-app/tests/suite/virtual_branches/unapply_ownership.rs b/gitbutler-app/tests/suite/virtual_branches/unapply_ownership.rs new file mode 100644 index 000000000..71d12b565 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/unapply_ownership.rs @@ -0,0 +1,61 @@ +use crate::suite::virtual_branches::Test; +use gitbutler_app::virtual_branches::branch; +use gitbutler_app::virtual_branches::branch::BranchOwnershipClaims; +use std::fs; + +#[tokio::test] +async fn should_unapply_with_commits() { + let Test { + project_id, + controller, + repository, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write( + repository.path().join("file.txt"), + "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n", + ) + .unwrap(); + controller + .create_commit(&project_id, &branch_id, "test", None, false) + .await + .unwrap(); + + // change in the committed hunks leads to hunk locking + fs::write( + repository.path().join("file.txt"), + "_\n2\n3\n4\n5\n6\n7\n8\n9\n_\n", + ) + .unwrap(); + + controller + .unapply_ownership( + &project_id, + &"file.txt:1-5,7-11" + .parse::() + .unwrap(), + ) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert!(branch.files.is_empty()); +} diff --git a/gitbutler-app/tests/suite/virtual_branches/update_base_branch.rs b/gitbutler-app/tests/suite/virtual_branches/update_base_branch.rs new file mode 100644 index 000000000..d6a9eae94 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/update_base_branch.rs @@ -0,0 +1,1929 @@ +use super::*; + +mod unapplied_branch { + + use super::*; + + #[tokio::test] + async fn conflicts_with_uncommitted_work() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that is unapplied and contains not commited conflict + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(&project_id).await.unwrap(); + + // branch should not be changed. + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(!controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_not_pushed() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(&project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should not change the branch. + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_pushed() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(&project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should not change the branch. + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_not_pushed_fixed_with_more_work() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(&project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); + + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should rebase upstream, and leave uncommited file as is + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); // TODO: should be true + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); // TODO: should be true + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_pushed_fixed_with_more_work() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(&project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); + + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should not touch the branch + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].files.len(), 1); + assert!(!controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn no_conflicts() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); + + controller + .create_commit( + &project_id, + &branch_id, + "non conflicting commit", + None, + false, + ) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "still no conflicts").unwrap(); + + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // fetching remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should update branch base + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].upstream.is_none()); + assert!(controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + } + } + + #[tokio::test] + async fn integrated_commit_plus_work() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + repository.commit_all("first"); + repository.push(); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "second").unwrap(); + controller + .create_commit(&project_id, &branch_id, "second", None, false) + .await + .unwrap(); + + // more local work in the same branch + fs::write(repository.path().join("file2.txt"), "other").unwrap(); + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + { + // merge branch upstream + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + repository.merge(&branch.upstream.as_ref().unwrap().name); + repository.fetch(); + } + + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + branch_id + }; + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should remove integrated commit, but leave work + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(branches[0].upstream.is_none()); + assert!(controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + assert_eq!( + std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), + "other" + ); + } + } + + #[tokio::test] + async fn all_integrated() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "second").unwrap(); + + controller + .create_commit(&project_id, &branch_id, "second", None, false) + .await + .unwrap(); + + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + }; + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should remove identical branch + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + } + } + + #[tokio::test] + async fn integrate_work_while_being_behind() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // open pr + fs::write(repository.path().join("file2.txt"), "new file").unwrap(); + controller + .create_commit(&project_id, &branch_id, "second", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + } + + controller + .unapply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + { + // merge pr + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0[0] + .clone(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + repository.fetch(); + } + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // just removes integrated branch + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + } + } +} + +mod applied_branch { + + use super::*; + + #[tokio::test] + async fn conflicts_with_uncommitted_work() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + branch_id + }; + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should stash conflicing branch + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(!controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_not_pushed() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(&project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should stash the branch. + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_pushed() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(&project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should stash the branch. + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_not_pushed_fixed_with_more_work() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(&project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should rebase upstream, and leave uncommited file as is + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); // TODO: should be true + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); // TODO: should be true + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_pushed_fixed_with_more_work() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(&project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should merge upstream, and leave uncommited file as is. + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); // TODO: should be true + assert_eq!(branches[0].commits.len(), 1); // TODO: should be 2 + assert_eq!(branches[0].files.len(), 1); + assert!(!controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); // TODO: should be true + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + mod no_conflicts_pushed { + use super::*; + + #[tokio::test] + async fn force_push_ok() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + projects + .update(&projects::UpdateRequest { + id: project_id, + ok_with_force_push: Some(true), + ..Default::default() + }) + .await + .unwrap(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); + + controller + .create_commit(&project_id, &branch_id, "no conflicts", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); + + branch_id + }; + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // rebases branch, since the branch is pushed and force pushing is + // allowed + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].requires_force); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert!(!branches[0].commits[0].is_remote); + assert!(!branches[0].commits[0].is_integrated); + assert!(controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + } + + #[tokio::test] + async fn force_push_not_ok() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); + + controller + .create_commit(&project_id, &branch_id, "no conflicts", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); + + branch_id + }; + + projects + .update(&projects::UpdateRequest { + id: project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // creates a merge commit, since the branch is pushed + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(!branches[0].requires_force); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 2); + assert!(!branches[0].commits[0].is_remote); + assert!(!branches[0].commits[0].is_integrated); + assert!(branches[0].commits[1].is_remote); + assert!(!branches[0].commits[1].is_integrated); + assert!(controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + } + } + + #[tokio::test] + async fn no_conflicts() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); + + controller + .create_commit(&project_id, &branch_id, "no conflicts", None, false) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); + + branch_id + }; + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // just rebases branch + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert!(controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + + { + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + assert_eq!( + std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), + "still no conflict" + ); + } + } + + #[tokio::test] + async fn integrated_commit_plus_work() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + repository.commit_all("first"); + repository.push(); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "second").unwrap(); + + controller + .create_commit(&project_id, &branch_id, "second", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + { + // merge branch upstream + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + repository.fetch(); + } + + // more local work in the same branch + fs::write(repository.path().join("file2.txt"), "other").unwrap(); + + branch_id + }; + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // should remove integrated commit, but leave non integrated work as is + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(controller + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + assert_eq!( + std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), + "other" + ); + } + } + + #[tokio::test] + async fn integrated_with_locked_conflicting_hunks() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write( + repository.path().join("file.txt"), + "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n", + ) + .unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write( + repository.path().join("file.txt"), + "1\n2\n3\n4\n5\n6\n17\n8\n9\n10\n11\n12\n", + ) + .unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // branch has no conflict + let branch_id = { + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write( + repository.path().join("file.txt"), + "1\n2\n3\n4\n5\n6\n7\n8\n19\n10\n11\n12\n", + ) + .unwrap(); + + controller + .create_commit(&project_id, &branch_id, "first", None, false) + .await + .unwrap(); + + branch_id + }; + + // push the branch + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + // another locked conflicing hunk + fs::write( + repository.path().join("file.txt"), + "1\n2\n3\n4\n5\n6\n77\n8\n19\n10\n11\n12\n", + ) + .unwrap(); + + { + // merge branch remotely + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0[0] + .clone(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + } + + repository.fetch(); + + { + controller.update_base_branch(&project_id).await.unwrap(); + + // removes integrated commit, leaves non commited work as is + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(branches[0].commits.is_empty()); + assert!(!branches[0].files.is_empty()); + } + + { + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].files[0].hunks.len(), 1); + assert_eq!(branches[0].files[0].hunks[0].diff, "@@ -4,7 +4,11 @@\n 4\n 5\n 6\n-7\n+<<<<<<< ours\n+77\n+=======\n+17\n+>>>>>>> theirs\n 8\n 19\n 10\n"); + assert_eq!(branches[0].commits.len(), 0); + } + } + + #[tokio::test] + async fn integrated_with_locked_hunks() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = Test::default(); + + projects + .update(&projects::UpdateRequest { + id: project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "first").unwrap(); + + controller + .create_commit(&project_id, &branch_id, "first", None, false) + .await + .unwrap(); + + branch_id + }; + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + // another non-locked hunk + fs::write(repository.path().join("file.txt"), "first\nsecond").unwrap(); + + { + // push and merge branch remotely + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0[0] + .clone(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + } + + repository.fetch(); + + { + controller.update_base_branch(&project_id).await.unwrap(); + + // removes integrated commit, leaves non commited work as is + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].commits.is_empty()); + assert!(branches[0].upstream.is_none()); + assert_eq!(branches[0].files.len(), 1); + } + + { + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); // no merge commit + } + } + + #[tokio::test] + async fn integrated_with_non_locked_hunks() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "first").unwrap(); + + controller + .create_commit(&project_id, &branch_id, "first", None, false) + .await + .unwrap(); + + branch_id + }; + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + // another non-locked hunk + fs::write(repository.path().join("another_file.txt"), "first").unwrap(); + + { + // push and merge branch remotely + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0[0] + .clone(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + } + + repository.fetch(); + + { + controller.update_base_branch(&project_id).await.unwrap(); + + // removes integrated commit, leaves non commited work as is + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].commits.is_empty()); + assert!(branches[0].upstream.is_none()); + assert!(!branches[0].files.is_empty()); + } + + { + controller + .apply_virtual_branch(&project_id, &branch_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + } + } + + #[tokio::test] + async fn all_integrated() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "second").unwrap(); + + controller + .create_commit(&project_id, &branch_id, "second", None, false) + .await + .unwrap(); + }; + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // just removes integrated branch + + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + } + } + + #[tokio::test] + async fn integrate_work_while_being_behind() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // open pr + fs::write(repository.path().join("file2.txt"), "new file").unwrap(); + controller + .create_commit(&project_id, &branch_id, "second", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + } + + { + // merge pr + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0[0] + .clone(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + repository.fetch(); + } + + { + // fetch remote + controller.update_base_branch(&project_id).await.unwrap(); + + // just removes integrated branch + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + } + } +} diff --git a/gitbutler-app/tests/suite/virtual_branches/update_commit_message.rs b/gitbutler-app/tests/suite/virtual_branches/update_commit_message.rs new file mode 100644 index 000000000..34a4490e0 --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/update_commit_message.rs @@ -0,0 +1,384 @@ +use super::*; + +#[tokio::test] +async fn head() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + let commit_three_oid = { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + controller + .update_commit_message( + &project_id, + &branch_id, + commit_three_oid, + "commit three updated", + ) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + + assert_eq!( + descriptions, + vec!["commit three updated", "commit two", "commit one"] + ); +} + +#[tokio::test] +async fn middle() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + let commit_two_oid = { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + controller + .update_commit_message( + &project_id, + &branch_id, + commit_two_oid, + "commit two updated", + ) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!( + descriptions, + vec!["commit three", "commit two updated", "commit one"] + ); +} + +#[tokio::test] +async fn forcepush_allowed() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + projects + .update(&projects::UpdateRequest { + id: project_id, + ok_with_force_push: Some(true), + ..Default::default() + }) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + controller + .update_commit_message( + &project_id, + &branch_id, + commit_one_oid, + "commit one updated", + ) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!(descriptions, vec!["commit one updated"]); + assert!(branch.requires_force); +} + +#[tokio::test] +async fn forcepush_forbidden() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + projects + .update(&projects::UpdateRequest { + id: project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(&project_id, &branch_id, false, None) + .await + .unwrap(); + + assert!(matches!( + controller + .update_commit_message( + &project_id, + &branch_id, + commit_one_oid, + "commit one updated", + ) + .await + .unwrap_err(), + ControllerError::Action(errors::UpdateCommitMessageError::ForcePushNotAllowed(_)) + )); +} + +#[tokio::test] +async fn root() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + controller + .update_commit_message( + &project_id, + &branch_id, + commit_one_oid, + "commit one updated", + ) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!( + descriptions, + vec!["commit three", "commit two", "commit one updated"] + ); +} + +#[tokio::test] +async fn empty() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(&project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + assert!(matches!( + controller + .update_commit_message(&project_id, &branch_id, commit_one_oid, "",) + .await, + Err(ControllerError::Action( + errors::UpdateCommitMessageError::EmptyMessage + )) + )); +} diff --git a/gitbutler-app/tests/suite/virtual_branches/upstream.rs b/gitbutler-app/tests/suite/virtual_branches/upstream.rs new file mode 100644 index 000000000..28efb18cf --- /dev/null +++ b/gitbutler-app/tests/suite/virtual_branches/upstream.rs @@ -0,0 +1,149 @@ +use super::*; + +#[tokio::test] +async fn detect_upstream_commits() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let oid1 = { + // create first commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + let oid2 = { + // create second commit + fs::write(repository.path().join("file.txt"), "content2").unwrap(); + controller + .create_commit(&project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + // push + controller + .push_virtual_branch(&project_id, &branch1_id, false, None) + .await + .unwrap(); + + let oid3 = { + // create third commit + fs::write(repository.path().join("file.txt"), "content3").unwrap(); + controller + .create_commit(&project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + { + // should correctly detect pushed commits + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 3); + assert_eq!(branches[0].commits[0].id, oid3); + assert!(!branches[0].commits[0].is_remote); + assert_eq!(branches[0].commits[1].id, oid2); + assert!(branches[0].commits[1].is_remote); + assert_eq!(branches[0].commits[2].id, oid1); + assert!(branches[0].commits[2].is_remote); + } +} + +#[tokio::test] +async fn detect_integrated_commits() { + let Test { + repository, + project_id, + controller, + .. + } = Test::default(); + + controller + .set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(&project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let oid1 = { + // create first commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(&project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + let oid2 = { + // create second commit + fs::write(repository.path().join("file.txt"), "content2").unwrap(); + controller + .create_commit(&project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + // push + controller + .push_virtual_branch(&project_id, &branch1_id, false, None) + .await + .unwrap(); + + { + // merge branch upstream + let branch = controller + .list_virtual_branches(&project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch1_id) + .unwrap(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + repository.fetch(); + } + + let oid3 = { + // create third commit + fs::write(repository.path().join("file.txt"), "content3").unwrap(); + controller + .create_commit(&project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + { + // should correctly detect pushed commits + let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 3); + assert_eq!(branches[0].commits[0].id, oid3); + assert!(!branches[0].commits[0].is_integrated); + assert_eq!(branches[0].commits[1].id, oid2); + assert!(branches[0].commits[1].is_integrated); + assert_eq!(branches[0].commits[2].id, oid1); + assert!(branches[0].commits[2].is_integrated); + } +} diff --git a/gitbutler-app/tests/virtual_branches/branch/mod.rs b/gitbutler-app/tests/virtual_branches/branch/mod.rs new file mode 100644 index 000000000..7a3e97daa --- /dev/null +++ b/gitbutler-app/tests/virtual_branches/branch/mod.rs @@ -0,0 +1,5 @@ +use gitbutler_app::virtual_branches::Branch; + +mod reader; + +mod writer; diff --git a/gitbutler-app/tests/virtual_branches/branch/reader.rs b/gitbutler-app/tests/virtual_branches/branch/reader.rs new file mode 100644 index 000000000..10415bdd1 --- /dev/null +++ b/gitbutler-app/tests/virtual_branches/branch/reader.rs @@ -0,0 +1,96 @@ +use std::sync::atomic::{AtomicUsize, Ordering}; + +use anyhow::Result; +use once_cell::sync::Lazy; + +use crate::{Case, Suite}; +use gitbutler_app::virtual_branches::branch::BranchOwnershipClaims; +use gitbutler_app::virtual_branches::{branch, Branch, BranchId}; + +static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn test_branch() -> Branch { + TEST_INDEX.fetch_add(1, Ordering::Relaxed); + + Branch { + id: BranchId::generate(), + name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), + notes: String::new(), + applied: true, + order: TEST_INDEX.load(Ordering::Relaxed), + upstream: Some( + format!( + "refs/remotes/origin/upstream_{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + upstream_head: Some( + format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, + updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, + head: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + tree: format!( + "0123456789abcdef0123456789abcdef012345{}", + TEST_INDEX.load(Ordering::Relaxed) + 10 + ) + .parse() + .unwrap(), + ownership: BranchOwnershipClaims { + claims: vec![format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed)) + .parse() + .unwrap()], + }, + selected_for_changes: Some(1), + } +} + +#[test] +fn test_read_not_found() -> Result<()> { + let Case { gb_repository, .. } = Suite::default().new_case(); + + let session = gb_repository.get_or_create_current_session()?; + let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?; + + let reader = branch::Reader::new(&session_reader); + let result = reader.read(&BranchId::generate()); + assert!(result.is_err()); + assert_eq!(result.unwrap_err().to_string(), "file not found"); + + Ok(()) +} + +#[test] +fn test_read_override() -> Result<()> { + let Case { + gb_repository, + project, + .. + } = Suite::default().new_case(); + + let mut branch = test_branch(); + + let writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + writer.write(&mut branch)?; + + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?; + + let reader = branch::Reader::new(&session_reader); + + assert_eq!(branch, reader.read(&branch.id).unwrap()); + + Ok(()) +} diff --git a/gitbutler-app/tests/virtual_branches/branch/writer.rs b/gitbutler-app/tests/virtual_branches/branch/writer.rs new file mode 100644 index 000000000..472b8ad51 --- /dev/null +++ b/gitbutler-app/tests/virtual_branches/branch/writer.rs @@ -0,0 +1,217 @@ +use std::{ + fs, + sync::atomic::{AtomicUsize, Ordering}, +}; + +use anyhow::Context; +use gitbutler_app::virtual_branches::branch; +use once_cell::sync::Lazy; + +use crate::{Case, Suite}; + +use self::branch::BranchId; + +use super::*; + +static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn test_branch() -> Branch { + TEST_INDEX.fetch_add(1, Ordering::Relaxed); + + Branch { + id: BranchId::generate(), + name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), + notes: String::new(), + applied: true, + upstream: Some( + format!( + "refs/remotes/origin/upstream_{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + upstream_head: None, + created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, + updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, + head: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + tree: format!( + "0123456789abcdef0123456789abcdef012345{}", + TEST_INDEX.load(Ordering::Relaxed) + 10 + ) + .parse() + .unwrap(), + ownership: gitbutler_app::virtual_branches::branch::BranchOwnershipClaims { + claims: vec![gitbutler_app::virtual_branches::branch::OwnershipClaim { + file_path: format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed)).into(), + hunks: vec![], + }], + }, + order: TEST_INDEX.load(Ordering::Relaxed), + selected_for_changes: Some(1), + } +} + +#[test] +fn test_write_branch() -> anyhow::Result<()> { + let Case { + gb_repository, + project, + .. + } = Suite::default().new_case(); + + let mut branch = test_branch(); + + let writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + writer.write(&mut branch)?; + + let root = gb_repository + .root() + .join("branches") + .join(branch.id.to_string()); + + assert_eq!( + fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) + .context("Failed to read branch name")?, + branch.name + ); + assert_eq!( + fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? + .parse::() + .context("Failed to read branch applied")?, + branch.applied + ); + assert_eq!( + fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) + .context("Failed to read branch upstream")?, + branch.upstream.clone().unwrap().to_string() + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("created_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch created timestamp")? + .parse::() + .context("Failed to parse branch created timestamp")?, + branch.created_timestamp_ms + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("updated_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch updated timestamp")? + .parse::() + .context("Failed to parse branch updated timestamp")?, + branch.updated_timestamp_ms + ); + + writer.delete(&branch)?; + fs::read_dir(root).unwrap_err(); + + Ok(()) +} + +#[test] +fn test_should_create_session() -> anyhow::Result<()> { + let Case { + gb_repository, + project, + .. + } = Suite::default().new_case(); + + let mut branch = test_branch(); + + let writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + writer.write(&mut branch)?; + + assert!(gb_repository.get_current_session()?.is_some()); + + Ok(()) +} + +#[test] +fn test_should_update() -> anyhow::Result<()> { + let Case { + gb_repository, + project, + .. + } = Suite::default().new_case(); + + let mut branch = test_branch(); + + let writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + writer.write(&mut branch)?; + + let mut updated_branch = Branch { + name: "updated_name".to_string(), + applied: false, + upstream: Some("refs/remotes/origin/upstream_updated".parse().unwrap()), + created_timestamp_ms: 2, + updated_timestamp_ms: 3, + ownership: gitbutler_app::virtual_branches::branch::BranchOwnershipClaims { + claims: vec![], + }, + ..branch.clone() + }; + + writer.write(&mut updated_branch)?; + + let root = gb_repository + .root() + .join("branches") + .join(branch.id.to_string()); + + assert_eq!( + fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) + .context("Failed to read branch name")?, + updated_branch.name + ); + assert_eq!( + fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? + .parse::() + .context("Failed to read branch applied")?, + updated_branch.applied + ); + assert_eq!( + fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) + .context("Failed to read branch upstream")?, + updated_branch.upstream.unwrap().to_string() + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("created_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch created timestamp")? + .parse::() + .context("Failed to parse branch created timestamp")?, + updated_branch.created_timestamp_ms + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("updated_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch updated timestamp")? + .parse::() + .context("Failed to parse branch updated timestamp")?, + updated_branch.updated_timestamp_ms + ); + + Ok(()) +} diff --git a/gitbutler-app/tests/virtual_branches/iterator.rs b/gitbutler-app/tests/virtual_branches/iterator.rs new file mode 100644 index 000000000..b0726e3a2 --- /dev/null +++ b/gitbutler-app/tests/virtual_branches/iterator.rs @@ -0,0 +1,115 @@ +use std::sync::atomic::{AtomicUsize, Ordering}; + +use anyhow::Result; +use gitbutler_app::virtual_branches; +use once_cell::sync::Lazy; + +use crate::{Case, Suite}; + +static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn test_branch() -> virtual_branches::branch::Branch { + TEST_INDEX.fetch_add(1, Ordering::Relaxed); + + virtual_branches::branch::Branch { + id: virtual_branches::BranchId::generate(), + name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), + notes: String::new(), + applied: true, + upstream: Some( + format!( + "refs/remotes/origin/upstream_{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + upstream_head: None, + created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, + updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, + head: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + tree: format!( + "0123456789abcdef0123456789abcdef012345{}", + TEST_INDEX.load(Ordering::Relaxed) + 10 + ) + .parse() + .unwrap(), + ownership: virtual_branches::branch::BranchOwnershipClaims::default(), + order: TEST_INDEX.load(Ordering::Relaxed), + selected_for_changes: Some(1), + } +} + +static TEST_TARGET_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn test_target() -> virtual_branches::target::Target { + virtual_branches::target::Target { + branch: format!( + "refs/remotes/branch name{}/remote name {}", + TEST_TARGET_INDEX.load(Ordering::Relaxed), + TEST_TARGET_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + remote_url: format!("remote url {}", TEST_TARGET_INDEX.load(Ordering::Relaxed)), + sha: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_TARGET_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + } +} + +#[test] +fn test_empty_iterator() -> Result<()> { + let Case { gb_repository, .. } = Suite::default().new_case(); + + let session = gb_repository.get_or_create_current_session()?; + let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?; + + let iter = virtual_branches::Iterator::new(&session_reader)?; + + assert_eq!(iter.count(), 0); + + Ok(()) +} + +#[test] +fn test_iterate_all() -> Result<()> { + let Case { + gb_repository, + project, + .. + } = Suite::default().new_case(); + + let target_writer = + gitbutler_app::virtual_branches::target::Writer::new(&gb_repository, project.gb_dir())?; + target_writer.write_default(&test_target())?; + + let branch_writer = + gitbutler_app::virtual_branches::branch::Writer::new(&gb_repository, project.gb_dir())?; + let mut branch_1 = test_branch(); + branch_writer.write(&mut branch_1)?; + let mut branch_2 = test_branch(); + branch_writer.write(&mut branch_2)?; + let mut branch_3 = test_branch(); + branch_writer.write(&mut branch_3)?; + + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?; + + let iter = virtual_branches::Iterator::new(&session_reader)? + .collect::, gitbutler_app::reader::Error>>()?; + assert_eq!(iter.len(), 3); + assert!(iter.contains(&branch_1)); + assert!(iter.contains(&branch_2)); + assert!(iter.contains(&branch_3)); + + Ok(()) +} diff --git a/gitbutler-app/src/virtual_branches/tests.rs b/gitbutler-app/tests/virtual_branches/mod.rs similarity index 73% rename from gitbutler-app/src/virtual_branches/tests.rs rename to gitbutler-app/tests/virtual_branches/mod.rs index 75a31ab9b..81344c316 100644 --- a/gitbutler-app/src/virtual_branches/tests.rs +++ b/gitbutler-app/tests/virtual_branches/mod.rs @@ -1,21 +1,31 @@ -use std::{collections::HashMap, fs, io::Write, path}; +mod branch; +mod iterator; +mod target; + +use std::{collections::HashMap, io::Write}; use anyhow::{Context, Result}; use pretty_assertions::assert_eq; +use std::path::{Path, PathBuf}; #[cfg(target_family = "unix")] use std::{ fs::Permissions, os::unix::{fs::symlink, prelude::*}, }; -use crate::{ - gb_repository, git, project_repository, reader, sessions, - tests::{self, empty_bare_repository, Case, Suite}, +use crate::{commit_all, empty_bare_repository, Case, Suite}; +use gitbutler_app::{ + gb_repository, git, project_repository, reader, sessions, virtual_branches, virtual_branches::errors::CommitError, }; -use super::*; -use branch::{BranchCreateRequest, BranchOwnershipClaims}; +use gitbutler_app::virtual_branches::branch::{BranchCreateRequest, BranchOwnershipClaims}; +use gitbutler_app::virtual_branches::integration::verify_branch; +use gitbutler_app::virtual_branches::{ + apply_branch, commit, create_virtual_branch, is_remote_branch_mergeable, + is_virtual_branch_mergeable, list_remote_branches, merge_virtual_branch_upstream, + unapply_ownership, update_branch, +}; pub fn set_test_target( gb_repo: &gb_repository::Repository, @@ -31,15 +41,15 @@ pub fn set_test_target( .expect("failed to add remote"); remote.push(&["refs/heads/master:refs/heads/master"], None)?; - target::Writer::new(gb_repo, project_repository.project().gb_dir())? - .write_default(&target::Target { + virtual_branches::target::Writer::new(gb_repo, project_repository.project().gb_dir())? + .write_default(&virtual_branches::target::Target { branch: "refs/remotes/origin/master".parse().unwrap(), remote_url: remote_repo.path().to_str().unwrap().parse().unwrap(), sha: remote_repo.head().unwrap().target().unwrap(), }) .expect("failed to write target"); - super::integration::update_gitbutler_integration(gb_repo, project_repository) + virtual_branches::integration::update_gitbutler_integration(gb_repo, project_repository) .expect("failed to update integration"); Ok(()) @@ -53,14 +63,8 @@ fn test_commit_on_branch_then_change_file_then_get_status() -> Result<()> { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([ - ( - path::PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\n", - ), - ( - path::PathBuf::from("test2.txt"), - "line5\nline6\nline7\nline8\n", - ), + (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), + (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), ])); set_test_target(&gb_repository, &project_repository)?; @@ -74,11 +78,12 @@ fn test_commit_on_branch_then_change_file_then_get_status() -> Result<()> { .id; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), + Path::new(&project.path).join("test.txt"), "line0\nline1\nline2\nline3\nline4\n", )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches[0]; assert_eq!(branch.files.len(), 1); assert_eq!(branch.commits.len(), 0); @@ -96,18 +101,20 @@ fn test_commit_on_branch_then_change_file_then_get_status() -> Result<()> { )?; // status (no files) - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches[0]; assert_eq!(branch.files.len(), 0); assert_eq!(branch.commits.len(), 1); std::fs::write( - std::path::Path::new(&project.path).join("test2.txt"), + Path::new(&project.path).join("test2.txt"), "line5\nline6\nlineBLAH\nline7\nline8\n", )?; // should have just the last change now, the other line is committed - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches[0]; assert_eq!(branch.files.len(), 1); assert_eq!(branch.commits.len(), 1); @@ -124,14 +131,8 @@ fn test_signed_commit() -> Result<()> { project_repository, .. } = suite.new_case_with_files(HashMap::from([ - ( - path::PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\n", - ), - ( - path::PathBuf::from("test2.txt"), - "line5\nline6\nline7\nline8\n", - ), + (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), + (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), ])); set_test_target(&gb_repository, &project_repository)?; @@ -145,7 +146,7 @@ fn test_signed_commit() -> Result<()> { .id; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), + Path::new(&project.path).join("test.txt"), "line0\nline1\nline2\nline3\nline4\n", )?; @@ -167,7 +168,8 @@ fn test_signed_commit() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository).unwrap(); + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository).unwrap(); let commit_id = &branches[0].commits[0].id; let commit_obj = project_repository.git_repository.find_commit(*commit_id)?; // check the raw_header contains the string "SSH SIGNATURE" @@ -185,14 +187,14 @@ fn test_track_binary_files() -> Result<()> { .. } = Suite::default().new_case(); - let file_path = std::path::Path::new("test.txt"); + let file_path = Path::new("test.txt"); std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\n", )?; - let file_path2 = std::path::Path::new("test2.txt"); + let file_path2 = Path::new("test2.txt"); std::fs::write( - std::path::Path::new(&project.path).join(file_path2), + Path::new(&project.path).join(file_path2), "line5\nline6\nline7\nline8\n", )?; // add a binary file @@ -202,9 +204,9 @@ fn test_track_binary_files() -> Result<()> { 255, 255, 0, // Yellow pixel 0, 255, 0, // Green pixel ]; - let mut file = fs::File::create(std::path::Path::new(&project.path).join("image.bin"))?; + let mut file = std::fs::File::create(Path::new(&project.path).join("image.bin"))?; file.write_all(&image_data)?; - tests::commit_all(&project_repository.git_repository); + commit_all(&project_repository.git_repository); set_test_target(&gb_repository, &project_repository)?; @@ -218,7 +220,7 @@ fn test_track_binary_files() -> Result<()> { // test file change std::fs::write( - std::path::Path::new(&project.path).join(file_path2), + Path::new(&project.path).join(file_path2), "line5\nline6\nline7\nline8\nline9\n", )?; @@ -229,10 +231,11 @@ fn test_track_binary_files() -> Result<()> { 0, 0, 255, // Blue pixel 255, 255, 0, // Yellow pixel ]; - let mut file = fs::File::create(std::path::Path::new(&project.path).join("image.bin"))?; + let mut file = std::fs::File::create(Path::new(&project.path).join("image.bin"))?; file.write_all(&image_data)?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches[0]; assert_eq!(branch.files.len(), 2); let img_file = &branch @@ -259,7 +262,8 @@ fn test_track_binary_files() -> Result<()> { )?; // status (no files) - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository).unwrap(); + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository).unwrap(); let commit_id = &branches[0].commits[0].id; let commit_obj = project_repository.git_repository.find_commit(*commit_id)?; let tree = commit_obj.tree()?; @@ -273,7 +277,7 @@ fn test_track_binary_files() -> Result<()> { 255, 255, 0, // Yellow pixel 0, 0, 255, // Blue pixel ]; - let mut file = fs::File::create(std::path::Path::new(&project.path).join("image.bin"))?; + let mut file = std::fs::File::create(Path::new(&project.path).join("image.bin"))?; file.write_all(&image_data)?; // commit @@ -288,7 +292,8 @@ fn test_track_binary_files() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository).unwrap(); + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository).unwrap(); let commit_id = &branches[0].commits[0].id; // get tree from commit_id let commit_obj = project_repository.git_repository.find_commit(*commit_id)?; @@ -312,12 +317,8 @@ fn test_create_branch_with_ownership() -> Result<()> { set_test_target(&gb_repository, &project_repository)?; - let file_path = std::path::Path::new("test.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path), - "line1\nline2\n", - ) - .unwrap(); + let file_path = Path::new("test.txt"); + std::fs::write(Path::new(&project.path).join(file_path), "line1\nline2\n").unwrap(); let branch0 = create_virtual_branch( &gb_repository, @@ -326,11 +327,12 @@ fn test_create_branch_with_ownership() -> Result<()> { ) .expect("failed to create virtual branch"); - get_status_by_branch(&gb_repository, &project_repository).expect("failed to get status"); + virtual_branches::get_status_by_branch(&gb_repository, &project_repository) + .expect("failed to get status"); let current_session = gb_repository.get_or_create_current_session().unwrap(); let current_session_reader = sessions::Reader::open(&gb_repository, ¤t_session).unwrap(); - let branch_reader = branch::Reader::new(¤t_session_reader); + let branch_reader = virtual_branches::branch::Reader::new(¤t_session_reader); let branch0 = branch_reader.read(&branch0.id).unwrap(); let branch1 = create_virtual_branch( @@ -343,7 +345,7 @@ fn test_create_branch_with_ownership() -> Result<()> { ) .expect("failed to create virtual branch"); - let statuses = get_status_by_branch(&gb_repository, &project_repository) + let statuses = virtual_branches::get_status_by_branch(&gb_repository, &project_repository) .expect("failed to get status") .0; @@ -394,8 +396,8 @@ fn test_create_branch_in_the_middle() -> Result<()> { let current_session = gb_repository.get_or_create_current_session()?; let current_session_reader = sessions::Reader::open(&gb_repository, ¤t_session)?; - let mut branches = iterator::BranchIterator::new(¤t_session_reader)? - .collect::, reader::Error>>() + let mut branches = virtual_branches::Iterator::new(¤t_session_reader)? + .collect::, reader::Error>>() .expect("failed to read branches"); branches.sort_by_key(|b| b.order); assert_eq!(branches.len(), 3); @@ -426,8 +428,8 @@ fn test_create_branch_no_arguments() -> Result<()> { let current_session = gb_repository.get_or_create_current_session()?; let current_session_reader = sessions::Reader::open(&gb_repository, ¤t_session)?; - let branches = iterator::BranchIterator::new(¤t_session_reader)? - .collect::, reader::Error>>() + let branches = virtual_branches::Iterator::new(¤t_session_reader)? + .collect::, reader::Error>>() .expect("failed to read branches"); assert_eq!(branches.len(), 1); assert_eq!(branches[0].name, "Virtual branch"); @@ -449,11 +451,8 @@ fn test_hunk_expantion() -> Result<()> { set_test_target(&gb_repository, &project_repository)?; - let file_path = std::path::Path::new("test.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path), - "line1\nline2\n", - )?; + let file_path = Path::new("test.txt"); + std::fs::write(Path::new(&project.path).join(file_path), "line1\nline2\n")?; let branch1_id = create_virtual_branch( &gb_repository, @@ -470,7 +469,7 @@ fn test_hunk_expantion() -> Result<()> { .expect("failed to create virtual branch") .id; - let statuses = get_status_by_branch(&gb_repository, &project_repository) + let statuses = virtual_branches::get_status_by_branch(&gb_repository, &project_repository) .expect("failed to get status") .0; @@ -487,7 +486,7 @@ fn test_hunk_expantion() -> Result<()> { update_branch( &gb_repository, &project_repository, - branch::BranchUpdateRequest { + virtual_branches::branch::BranchUpdateRequest { id: branch1_id, order: Some(1), ..Default::default() @@ -496,7 +495,7 @@ fn test_hunk_expantion() -> Result<()> { update_branch( &gb_repository, &project_repository, - branch::BranchUpdateRequest { + virtual_branches::branch::BranchUpdateRequest { id: branch2_id, order: Some(0), ..Default::default() @@ -505,11 +504,11 @@ fn test_hunk_expantion() -> Result<()> { // a slightly different hunk should still go to the same branch std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\n", )?; - let statuses = get_status_by_branch(&gb_repository, &project_repository) + let statuses = virtual_branches::get_status_by_branch(&gb_repository, &project_repository) .expect("failed to get status") .0; let files_by_branch_id = statuses @@ -534,7 +533,7 @@ fn test_get_status_files_by_branch_no_hunks_no_branches() -> Result<()> { set_test_target(&gb_repository, &project_repository)?; - let statuses = get_status_by_branch(&gb_repository, &project_repository) + let statuses = virtual_branches::get_status_by_branch(&gb_repository, &project_repository) .expect("failed to get status") .0; @@ -554,11 +553,8 @@ fn test_get_status_files_by_branch() -> Result<()> { set_test_target(&gb_repository, &project_repository)?; - let file_path = std::path::Path::new("test.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path), - "line1\nline2\n", - )?; + let file_path = Path::new("test.txt"); + std::fs::write(Path::new(&project.path).join(file_path), "line1\nline2\n")?; let branch1_id = create_virtual_branch( &gb_repository, @@ -575,7 +571,7 @@ fn test_get_status_files_by_branch() -> Result<()> { .expect("failed to create virtual branch") .id; - let statuses = get_status_by_branch(&gb_repository, &project_repository) + let statuses = virtual_branches::get_status_by_branch(&gb_repository, &project_repository) .expect("failed to get status") .0; let files_by_branch_id = statuses @@ -598,7 +594,7 @@ fn test_move_hunks_multiple_sources() -> Result<()> { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), + PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\n", )])); @@ -627,14 +623,14 @@ fn test_move_hunks_multiple_sources() -> Result<()> { .id; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), - "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\n", - )?; + Path::new(&project.path).join("test.txt"), + "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\n", + )?; let current_session = gb_repository.get_or_create_current_session()?; let current_session_reader = sessions::Reader::open(&gb_repository, ¤t_session)?; - let branch_reader = branch::Reader::new(¤t_session_reader); - let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + let branch_reader = virtual_branches::branch::Reader::new(¤t_session_reader); + let branch_writer = virtual_branches::branch::Writer::new(&gb_repository, project.gb_dir())?; let mut branch2 = branch_reader.read(&branch2_id)?; branch2.ownership = BranchOwnershipClaims { claims: vec!["test.txt:1-5".parse()?], @@ -646,7 +642,7 @@ fn test_move_hunks_multiple_sources() -> Result<()> { }; branch_writer.write(&mut branch1)?; - let statuses = get_status_by_branch(&gb_repository, &project_repository) + let statuses = virtual_branches::get_status_by_branch(&gb_repository, &project_repository) .expect("failed to get status") .0; @@ -665,14 +661,14 @@ fn test_move_hunks_multiple_sources() -> Result<()> { update_branch( &gb_repository, &project_repository, - branch::BranchUpdateRequest { + virtual_branches::branch::BranchUpdateRequest { id: branch3_id, ownership: Some("test.txt:1-5,11-15".parse()?), ..Default::default() }, )?; - let statuses = get_status_by_branch(&gb_repository, &project_repository) + let statuses = virtual_branches::get_status_by_branch(&gb_repository, &project_repository) .expect("failed to get status") .0; @@ -686,15 +682,15 @@ fn test_move_hunks_multiple_sources() -> Result<()> { assert_eq!(files_by_branch_id[&branch2_id].len(), 0); assert_eq!(files_by_branch_id[&branch3_id].len(), 1); assert_eq!( - files_by_branch_id[&branch3_id][std::path::Path::new("test.txt")].len(), + files_by_branch_id[&branch3_id][Path::new("test.txt")].len(), 2 ); assert_eq!( - files_by_branch_id[&branch3_id][std::path::Path::new("test.txt")][0].diff, + files_by_branch_id[&branch3_id][Path::new("test.txt")][0].diff, "@@ -1,3 +1,4 @@\n+line0\n line1\n line2\n line3\n" ); assert_eq!( - files_by_branch_id[&branch3_id][std::path::Path::new("test.txt")][1].diff, + files_by_branch_id[&branch3_id][Path::new("test.txt")][1].diff, "@@ -10,3 +11,4 @@ line9\n line10\n line11\n line12\n+line13\n" ); Ok(()) @@ -705,19 +701,19 @@ fn test_move_hunks_partial_explicitly() -> Result<()> { let Case { project_repository, project, - gb_repository, + gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\n", + PathBuf::from("test.txt"), + "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\n", )])); set_test_target(&gb_repository, &project_repository)?; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), - "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\n", - )?; + Path::new(&project.path).join("test.txt"), + "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\n", + )?; let branch1_id = create_virtual_branch( &gb_repository, @@ -734,7 +730,7 @@ fn test_move_hunks_partial_explicitly() -> Result<()> { .expect("failed to create virtual branch") .id; - let statuses = get_status_by_branch(&gb_repository, &project_repository) + let statuses = virtual_branches::get_status_by_branch(&gb_repository, &project_repository) .expect("failed to get status") .0; let files_by_branch_id = statuses @@ -750,14 +746,14 @@ fn test_move_hunks_partial_explicitly() -> Result<()> { update_branch( &gb_repository, &project_repository, - branch::BranchUpdateRequest { + virtual_branches::branch::BranchUpdateRequest { id: branch2_id, ownership: Some("test.txt:1-5".parse()?), ..Default::default() }, )?; - let statuses = get_status_by_branch(&gb_repository, &project_repository) + let statuses = virtual_branches::get_status_by_branch(&gb_repository, &project_repository) .expect("failed to get status") .0; @@ -769,21 +765,21 @@ fn test_move_hunks_partial_explicitly() -> Result<()> { assert_eq!(files_by_branch_id.len(), 2); assert_eq!(files_by_branch_id[&branch1_id].len(), 1); assert_eq!( - files_by_branch_id[&branch1_id][std::path::Path::new("test.txt")].len(), + files_by_branch_id[&branch1_id][Path::new("test.txt")].len(), 1 ); assert_eq!( - files_by_branch_id[&branch1_id][std::path::Path::new("test.txt")][0].diff, + files_by_branch_id[&branch1_id][Path::new("test.txt")][0].diff, "@@ -11,3 +12,4 @@ line10\n line11\n line12\n line13\n+line14\n" ); assert_eq!(files_by_branch_id[&branch2_id].len(), 1); assert_eq!( - files_by_branch_id[&branch2_id][std::path::Path::new("test.txt")].len(), + files_by_branch_id[&branch2_id][Path::new("test.txt")].len(), 1 ); assert_eq!( - files_by_branch_id[&branch2_id][std::path::Path::new("test.txt")][0].diff, + files_by_branch_id[&branch2_id][Path::new("test.txt")][0].diff, "@@ -1,3 +1,4 @@\n+line0\n line1\n line2\n line3\n" ); @@ -798,16 +794,16 @@ fn test_add_new_hunk_to_the_end() -> Result<()> { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), + PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline13\nline14\n", )])); set_test_target(&gb_repository, &project_repository)?; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), - "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\nline15\n", - )?; + Path::new(&project.path).join("test.txt"), + "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\nline15\n", + )?; create_virtual_branch( &gb_repository, @@ -816,29 +812,29 @@ fn test_add_new_hunk_to_the_end() -> Result<()> { ) .expect("failed to create virtual branch"); - let statuses = get_status_by_branch(&gb_repository, &project_repository) + let statuses = virtual_branches::get_status_by_branch(&gb_repository, &project_repository) .expect("failed to get status") .0; assert_eq!( - statuses[0].1[std::path::Path::new("test.txt")][0].diff, + statuses[0].1[Path::new("test.txt")][0].diff, "@@ -11,5 +11,5 @@ line10\n line11\n line12\n line13\n-line13\n line14\n+line15\n" ); std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), - "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\nline15\n", - )?; + Path::new(&project.path).join("test.txt"), + "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\nline15\n", + )?; - let statuses = get_status_by_branch(&gb_repository, &project_repository) + let statuses = virtual_branches::get_status_by_branch(&gb_repository, &project_repository) .expect("failed to get status") .0; assert_eq!( - statuses[0].1[std::path::Path::new("test.txt")][0].diff, + statuses[0].1[Path::new("test.txt")][0].diff, "@@ -11,5 +12,5 @@ line10\n line11\n line12\n line13\n-line13\n line14\n+line15\n" ); assert_eq!( - statuses[0].1[std::path::Path::new("test.txt")][1].diff, + statuses[0].1[Path::new("test.txt")][1].diff, "@@ -1,3 +1,4 @@\n+line0\n line1\n line2\n line3\n" ); @@ -856,12 +852,12 @@ fn test_merge_vbranch_upstream_clean_rebase() -> Result<()> { } = suite.new_case(); // create a commit and set the target - let file_path = std::path::Path::new("test.txt"); + let file_path = Path::new("test.txt"); std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\n", )?; - tests::commit_all(&project_repository.git_repository); + commit_all(&project_repository.git_repository); let target_oid = project_repository .git_repository .head() @@ -870,11 +866,11 @@ fn test_merge_vbranch_upstream_clean_rebase() -> Result<()> { .unwrap(); std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nupstream\n", )?; // add a commit to the target branch it's pointing to so there is something "upstream" - tests::commit_all(&project_repository.git_repository); + commit_all(&project_repository.git_repository); let last_push = project_repository .git_repository .head() @@ -884,11 +880,11 @@ fn test_merge_vbranch_upstream_clean_rebase() -> Result<()> { // coworker adds some work std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nupstream\ncoworker work\n", )?; - tests::commit_all(&project_repository.git_repository); + commit_all(&project_repository.git_repository); let coworker_work = project_repository .git_repository .head() @@ -906,28 +902,24 @@ fn test_merge_vbranch_upstream_clean_rebase() -> Result<()> { // revert to our file std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nupstream\n", )?; set_test_target(&gb_repository, &project_repository)?; - target::Writer::new(&gb_repository, project_repository.project().gb_dir())?.write_default( - &target::Target { - branch: "refs/remotes/origin/master".parse().unwrap(), - remote_url: "origin".to_string(), - sha: target_oid, - }, - )?; + virtual_branches::target::Writer::new(&gb_repository, project_repository.project().gb_dir())? + .write_default(&virtual_branches::target::Target { + branch: "refs/remotes/origin/master".parse().unwrap(), + remote_url: "origin".to_string(), + sha: target_oid, + })?; // add some uncommitted work - let file_path2 = std::path::Path::new("test2.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path2), - "file2\n", - )?; + let file_path2 = Path::new("test2.txt"); + std::fs::write(Path::new(&project.path).join(file_path2), "file2\n")?; let remote_branch: git::RemoteRefname = "refs/remotes/origin/master".parse().unwrap(); - let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + let branch_writer = virtual_branches::branch::Writer::new(&gb_repository, project.gb_dir())?; let mut branch = create_virtual_branch( &gb_repository, &project_repository, @@ -941,7 +933,8 @@ fn test_merge_vbranch_upstream_clean_rebase() -> Result<()> { .context("failed to write target branch after push")?; // create the branch - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch1 = &branches[0]; assert_eq!(branch1.files.len(), 1); assert_eq!(branch1.commits.len(), 1); @@ -955,15 +948,16 @@ fn test_merge_vbranch_upstream_clean_rebase() -> Result<()> { None, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch1 = &branches[0]; - let contents = std::fs::read(std::path::Path::new(&project.path).join(file_path))?; + let contents = std::fs::read(Path::new(&project.path).join(file_path))?; assert_eq!( "line1\nline2\nline3\nline4\nupstream\ncoworker work\n", String::from_utf8(contents)? ); - let contents = std::fs::read(std::path::Path::new(&project.path).join(file_path2))?; + let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; assert_eq!("file2\n", String::from_utf8(contents)?); assert_eq!(branch1.files.len(), 1); assert_eq!(branch1.commits.len(), 2); @@ -982,12 +976,12 @@ fn test_merge_vbranch_upstream_conflict() -> Result<()> { } = Suite::default().new_case(); // create a commit and set the target - let file_path = std::path::Path::new("test.txt"); + let file_path = Path::new("test.txt"); std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\n", )?; - tests::commit_all(&project_repository.git_repository); + commit_all(&project_repository.git_repository); let target_oid = project_repository .git_repository .head() @@ -996,11 +990,11 @@ fn test_merge_vbranch_upstream_conflict() -> Result<()> { .unwrap(); std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nupstream\n", )?; // add a commit to the target branch it's pointing to so there is something "upstream" - tests::commit_all(&project_repository.git_repository); + commit_all(&project_repository.git_repository); let last_push = project_repository .git_repository .head() @@ -1010,11 +1004,11 @@ fn test_merge_vbranch_upstream_conflict() -> Result<()> { // coworker adds some work std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nupstream\ncoworker work\n", )?; - tests::commit_all(&project_repository.git_repository); + commit_all(&project_repository.git_repository); let coworker_work = project_repository .git_repository .head() @@ -1032,25 +1026,27 @@ fn test_merge_vbranch_upstream_conflict() -> Result<()> { // revert to our file std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nupstream\n", )?; set_test_target(&gb_repository, &project_repository)?; - target::Writer::new(&gb_repository, project.gb_dir())?.write_default(&target::Target { - branch: "refs/remotes/origin/master".parse().unwrap(), - remote_url: "origin".to_string(), - sha: target_oid, - })?; + virtual_branches::target::Writer::new(&gb_repository, project.gb_dir())?.write_default( + &virtual_branches::target::Target { + branch: "refs/remotes/origin/master".parse().unwrap(), + remote_url: "origin".to_string(), + sha: target_oid, + }, + )?; // add some uncommitted work std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nupstream\nother side\n", )?; let remote_branch: git::RemoteRefname = "refs/remotes/origin/master".parse().unwrap(); - let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + let branch_writer = virtual_branches::branch::Writer::new(&gb_repository, project.gb_dir())?; let mut branch = create_virtual_branch( &gb_repository, &project_repository, @@ -1064,7 +1060,8 @@ fn test_merge_vbranch_upstream_conflict() -> Result<()> { .context("failed to write target branch after push")?; // create the branch - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch1 = &branches[0]; assert_eq!(branch1.files.len(), 1); @@ -1073,9 +1070,10 @@ fn test_merge_vbranch_upstream_conflict() -> Result<()> { merge_virtual_branch_upstream(&gb_repository, &project_repository, &branch1.id, None, None)?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch1 = &branches[0]; - let contents = std::fs::read(std::path::Path::new(&project.path).join(file_path))?; + let contents = std::fs::read(Path::new(&project.path).join(file_path))?; assert_eq!( "line1\nline2\nline3\nline4\nupstream\n<<<<<<< ours\nother side\n=======\ncoworker work\n>>>>>>> theirs\n", @@ -1088,12 +1086,13 @@ fn test_merge_vbranch_upstream_conflict() -> Result<()> { // fix the conflict std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nupstream\nother side\ncoworker work\n", )?; // make gb see the conflict resolution - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; assert!(branches[0].conflicted); // commit the merge resolution @@ -1108,7 +1107,8 @@ fn test_merge_vbranch_upstream_conflict() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch1 = &branches[0]; assert!(!branch1.conflicted); assert_eq!(branch1.files.len(), 0); @@ -1130,14 +1130,14 @@ fn test_unapply_ownership_partial() -> Result<()> { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), + PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n", )])); set_test_target(&gb_repository, &project_repository)?; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), + Path::new(&project.path).join("test.txt"), "line1\nline2\nline3\nline4\nbranch1\n", )?; @@ -1148,14 +1148,15 @@ fn test_unapply_ownership_partial() -> Result<()> { ) .expect("failed to create virtual branch"); - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; assert_eq!(branches.len(), 1); assert_eq!(branches[0].files.len(), 1); assert_eq!(branches[0].ownership.claims.len(), 1); assert_eq!(branches[0].files[0].hunks.len(), 1); assert_eq!(branches[0].ownership.claims[0].hunks.len(), 1); assert_eq!( - fs::read_to_string(std::path::Path::new(&project.path).join("test.txt"))?, + std::fs::read_to_string(Path::new(&project.path).join("test.txt"))?, "line1\nline2\nline3\nline4\nbranch1\n" ); @@ -1166,12 +1167,13 @@ fn test_unapply_ownership_partial() -> Result<()> { ) .unwrap(); - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; assert_eq!(branches.len(), 1); assert_eq!(branches[0].files.len(), 0); assert_eq!(branches[0].ownership.claims.len(), 0); assert_eq!( - fs::read_to_string(std::path::Path::new(&project.path).join("test.txt"))?, + std::fs::read_to_string(Path::new(&project.path).join("test.txt"))?, "line1\nline2\nline3\nline4\n" ); @@ -1179,7 +1181,7 @@ fn test_unapply_ownership_partial() -> Result<()> { } #[test] -fn test_apply_unapply_branch() -> Result<()> { +fn unapply_branch() -> Result<()> { let Case { project, project_repository, @@ -1188,24 +1190,21 @@ fn test_apply_unapply_branch() -> Result<()> { } = Suite::default().new_case(); // create a commit and set the target - let file_path = std::path::Path::new("test.txt"); + let file_path = Path::new("test.txt"); std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\n", )?; - tests::commit_all(&project_repository.git_repository); + commit_all(&project_repository.git_repository); set_test_target(&gb_repository, &project_repository)?; std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nbranch1\n", )?; - let file_path2 = std::path::Path::new("test2.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path2), - "line5\nline6\n", - )?; + let file_path2 = Path::new("test2.txt"); + std::fs::write(Path::new(&project.path).join(file_path2), "line5\nline6\n")?; let branch1_id = create_virtual_branch( &gb_repository, @@ -1225,48 +1224,51 @@ fn test_apply_unapply_branch() -> Result<()> { update_branch( &gb_repository, &project_repository, - branch::BranchUpdateRequest { + virtual_branches::branch::BranchUpdateRequest { id: branch2_id, ownership: Some("test2.txt:1-3".parse()?), ..Default::default() }, )?; - let contents = std::fs::read(std::path::Path::new(&project.path).join(file_path))?; + let contents = std::fs::read(Path::new(&project.path).join(file_path))?; assert_eq!( "line1\nline2\nline3\nline4\nbranch1\n", String::from_utf8(contents)? ); - let contents = std::fs::read(std::path::Path::new(&project.path).join(file_path2))?; + let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; assert_eq!("line5\nline6\n", String::from_utf8(contents)?); - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 1); assert!(branch.active); - unapply_branch(&gb_repository, &project_repository, &branch1_id)?; + virtual_branches::unapply_branch(&gb_repository, &project_repository, &branch1_id)?; - let contents = std::fs::read(std::path::Path::new(&project.path).join(file_path))?; + let contents = std::fs::read(Path::new(&project.path).join(file_path))?; assert_eq!("line1\nline2\nline3\nline4\n", String::from_utf8(contents)?); - let contents = std::fs::read(std::path::Path::new(&project.path).join(file_path2))?; + let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; assert_eq!("line5\nline6\n", String::from_utf8(contents)?); - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 1); assert!(!branch.active); apply_branch(&gb_repository, &project_repository, &branch1_id, None, None)?; - let contents = std::fs::read(std::path::Path::new(&project.path).join(file_path))?; + let contents = std::fs::read(Path::new(&project.path).join(file_path))?; assert_eq!( "line1\nline2\nline3\nline4\nbranch1\n", String::from_utf8(contents)? ); - let contents = std::fs::read(std::path::Path::new(&project.path).join(file_path2))?; + let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; assert_eq!("line5\nline6\n", String::from_utf8(contents)?); - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 1); assert!(branch.active); @@ -1284,27 +1286,18 @@ fn test_apply_unapply_added_deleted_files() -> Result<()> { } = Suite::default().new_case(); // create a commit and set the target - let file_path = std::path::Path::new("test.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path), - "file1\n", - )?; - let file_path2 = std::path::Path::new("test2.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path2), - "file2\n", - )?; - tests::commit_all(&project_repository.git_repository); + let file_path = Path::new("test.txt"); + std::fs::write(Path::new(&project.path).join(file_path), "file1\n")?; + let file_path2 = Path::new("test2.txt"); + std::fs::write(Path::new(&project.path).join(file_path2), "file2\n")?; + commit_all(&project_repository.git_repository); set_test_target(&gb_repository, &project_repository)?; // rm file_path2, add file3 - std::fs::remove_file(std::path::Path::new(&project.path).join(file_path2))?; - let file_path3 = std::path::Path::new("test3.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path3), - "file3\n", - )?; + std::fs::remove_file(Path::new(&project.path).join(file_path2))?; + let file_path3 = Path::new("test3.txt"); + std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; let branch2_id = create_virtual_branch( &gb_repository, @@ -1324,7 +1317,7 @@ fn test_apply_unapply_added_deleted_files() -> Result<()> { update_branch( &gb_repository, &project_repository, - branch::BranchUpdateRequest { + virtual_branches::branch::BranchUpdateRequest { id: branch2_id, ownership: Some("test2.txt:0-0".parse()?), ..Default::default() @@ -1333,33 +1326,29 @@ fn test_apply_unapply_added_deleted_files() -> Result<()> { update_branch( &gb_repository, &project_repository, - branch::BranchUpdateRequest { + virtual_branches::branch::BranchUpdateRequest { id: branch3_id, ownership: Some("test3.txt:1-2".parse()?), ..Default::default() }, )?; - unapply_branch(&gb_repository, &project_repository, &branch2_id)?; + virtual_branches::unapply_branch(&gb_repository, &project_repository, &branch2_id)?; // check that file2 is back - let contents = std::fs::read(std::path::Path::new(&project.path).join(file_path2))?; + let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; assert_eq!("file2\n", String::from_utf8(contents)?); - unapply_branch(&gb_repository, &project_repository, &branch3_id)?; + virtual_branches::unapply_branch(&gb_repository, &project_repository, &branch3_id)?; // check that file3 is gone - assert!(!std::path::Path::new(&project.path) - .join(file_path3) - .exists()); + assert!(!Path::new(&project.path).join(file_path3).exists()); apply_branch(&gb_repository, &project_repository, &branch2_id, None, None)?; // check that file2 is gone - assert!(!std::path::Path::new(&project.path) - .join(file_path2) - .exists()); + assert!(!Path::new(&project.path).join(file_path2).exists()); apply_branch(&gb_repository, &project_repository, &branch3_id, None, None)?; // check that file3 is back - let contents = std::fs::read(std::path::Path::new(&project.path).join(file_path3))?; + let contents = std::fs::read(Path::new(&project.path).join(file_path3))?; assert_eq!("file3\n", String::from_utf8(contents)?); Ok(()) @@ -1375,24 +1364,21 @@ fn test_detect_mergeable_branch() -> Result<()> { } = Suite::default().new_case(); // create a commit and set the target - let file_path = std::path::Path::new("test.txt"); + let file_path = Path::new("test.txt"); std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\n", )?; - tests::commit_all(&project_repository.git_repository); + commit_all(&project_repository.git_repository); set_test_target(&gb_repository, &project_repository)?; std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nbranch1\n", )?; - let file_path4 = std::path::Path::new("test4.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path4), - "line5\nline6\n", - )?; + let file_path4 = Path::new("test4.txt"); + std::fs::write(Path::new(&project.path).join(file_path4), "line5\nline6\n")?; let branch1_id = create_virtual_branch( &gb_repository, @@ -1411,13 +1397,13 @@ fn test_detect_mergeable_branch() -> Result<()> { let current_session = gb_repository.get_or_create_current_session()?; let current_session_reader = sessions::Reader::open(&gb_repository, ¤t_session)?; - let branch_reader = branch::Reader::new(¤t_session_reader); - let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + let branch_reader = virtual_branches::branch::Reader::new(¤t_session_reader); + let branch_writer = virtual_branches::branch::Writer::new(&gb_repository, project.gb_dir())?; update_branch( &gb_repository, &project_repository, - branch::BranchUpdateRequest { + virtual_branches::branch::BranchUpdateRequest { id: branch2_id, ownership: Some("test4.txt:1-3".parse()?), ..Default::default() @@ -1426,8 +1412,8 @@ fn test_detect_mergeable_branch() -> Result<()> { .expect("failed to update branch"); // unapply both branches and create some conflicting ones - unapply_branch(&gb_repository, &project_repository, &branch1_id)?; - unapply_branch(&gb_repository, &project_repository, &branch2_id)?; + virtual_branches::unapply_branch(&gb_repository, &project_repository, &branch1_id)?; + virtual_branches::unapply_branch(&gb_repository, &project_repository, &branch2_id)?; project_repository .git_repository @@ -1438,10 +1424,10 @@ fn test_detect_mergeable_branch() -> Result<()> { // create an upstream remote conflicting commit std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nupstream\n", )?; - tests::commit_all(&project_repository.git_repository); + commit_all(&project_repository.git_repository); let up_target = project_repository .git_repository .head() @@ -1457,15 +1443,12 @@ fn test_detect_mergeable_branch() -> Result<()> { // revert content and write a mergeable branch std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\n", )?; - let file_path3 = std::path::Path::new("test3.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path3), - "file3\n", - )?; - tests::commit_all(&project_repository.git_repository); + let file_path3 = Path::new("test3.txt"); + std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; + commit_all(&project_repository.git_repository); let up_target = project_repository .git_repository .head() @@ -1479,7 +1462,7 @@ fn test_detect_mergeable_branch() -> Result<()> { "update target", )?; // remove file_path3 - std::fs::remove_file(std::path::Path::new(&project.path).join(file_path3))?; + std::fs::remove_file(Path::new(&project.path).join(file_path3))?; project_repository .git_repository @@ -1505,14 +1488,14 @@ fn test_detect_mergeable_branch() -> Result<()> { // branch3 conflicts with branch1 and remote_branch std::fs::write( - std::path::Path::new(&project.path).join(file_path), + Path::new(&project.path).join(file_path), "line1\nline2\nline3\nline4\nbranch3\n", )?; // branch4 conflicts with branch2 - let file_path2 = std::path::Path::new("test2.txt"); + let file_path2 = Path::new("test2.txt"); std::fs::write( - std::path::Path::new(&project.path).join(file_path2), + Path::new(&project.path).join(file_path2), "line1\nline2\nline3\nline4\nbranch4\n", )?; @@ -1522,7 +1505,8 @@ fn test_detect_mergeable_branch() -> Result<()> { }; branch_writer.write(&mut branch4)?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; assert_eq!(branches.len(), 4); let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); @@ -1575,9 +1559,9 @@ fn test_upstream_integrated_vbranch() -> Result<()> { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([ - (path::PathBuf::from("test.txt"), "file1\n"), - (path::PathBuf::from("test2.txt"), "file2\n"), - (path::PathBuf::from("test3.txt"), "file3\n"), + (PathBuf::from("test.txt"), "file1\n"), + (PathBuf::from("test2.txt"), "file2\n"), + (PathBuf::from("test3.txt"), "file3\n"), ])); let base_commit = project_repository @@ -1588,10 +1572,10 @@ fn test_upstream_integrated_vbranch() -> Result<()> { .unwrap(); std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), + Path::new(&project.path).join("test.txt"), "file1\nversion2\n", )?; - tests::commit_all(&project_repository.git_repository); + commit_all(&project_repository.git_repository); let upstream_commit = project_repository .git_repository @@ -1606,17 +1590,19 @@ fn test_upstream_integrated_vbranch() -> Result<()> { "update target", )?; - target::Writer::new(&gb_repository, project_repository.project().gb_dir())?.write_default( - &target::Target { - branch: "refs/remotes/origin/master".parse().unwrap(), - remote_url: "http://origin.com/project".to_string(), - sha: base_commit, - }, - )?; + virtual_branches::target::Writer::new(&gb_repository, project_repository.project().gb_dir())? + .write_default(&virtual_branches::target::Target { + branch: "refs/remotes/origin/master".parse().unwrap(), + remote_url: "http://origin.com/project".to_string(), + sha: base_commit, + })?; project_repository .git_repository .remote("origin", &"http://origin.com/project".parse().unwrap())?; - super::integration::update_gitbutler_integration(&gb_repository, &project_repository)?; + virtual_branches::integration::update_gitbutler_integration( + &gb_repository, + &project_repository, + )?; // create vbranches, one integrated, one not let branch1_id = create_virtual_branch( @@ -1642,19 +1628,19 @@ fn test_upstream_integrated_vbranch() -> Result<()> { .id; std::fs::write( - std::path::Path::new(&project.path).join("test2.txt"), + Path::new(&project.path).join("test2.txt"), "file2\nversion2\n", )?; std::fs::write( - std::path::Path::new(&project.path).join("test3.txt"), + Path::new(&project.path).join("test3.txt"), "file3\nversion2\n", )?; update_branch( &gb_repository, &project_repository, - branch::BranchUpdateRequest { + virtual_branches::branch::BranchUpdateRequest { id: branch1_id, name: Some("integrated".to_string()), ownership: Some("test.txt:1-2".parse()?), @@ -1665,7 +1651,7 @@ fn test_upstream_integrated_vbranch() -> Result<()> { update_branch( &gb_repository, &project_repository, - branch::BranchUpdateRequest { + virtual_branches::branch::BranchUpdateRequest { id: branch2_id, name: Some("not integrated".to_string()), ownership: Some("test2.txt:1-2".parse()?), @@ -1676,7 +1662,7 @@ fn test_upstream_integrated_vbranch() -> Result<()> { update_branch( &gb_repository, &project_repository, - branch::BranchUpdateRequest { + virtual_branches::branch::BranchUpdateRequest { id: branch3_id, name: Some("not committed".to_string()), ownership: Some("test3.txt:1-2".parse()?), @@ -1706,7 +1692,8 @@ fn test_upstream_integrated_vbranch() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert!(branch1.commits.iter().any(|c| c.is_integrated)); @@ -1734,8 +1721,8 @@ fn test_commit_same_hunk_twice() -> Result<()> { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + PathBuf::from("test.txt"), + "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", )])); set_test_target(&gb_repository, &project_repository)?; @@ -1749,11 +1736,12 @@ fn test_commit_same_hunk_twice() -> Result<()> { .id; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), - "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", - )?; + Path::new(&project.path).join("test.txt"), + "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 1); @@ -1772,7 +1760,8 @@ fn test_commit_same_hunk_twice() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 0, "no files expected"); @@ -1788,11 +1777,12 @@ fn test_commit_same_hunk_twice() -> Result<()> { // update same lines std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), - "line1\nPATCH1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", - )?; + Path::new(&project.path).join("test.txt"), + "line1\nPATCH1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 1, "one file should be changed"); @@ -1809,7 +1799,8 @@ fn test_commit_same_hunk_twice() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!( @@ -1835,8 +1826,8 @@ fn test_commit_same_file_twice() -> Result<()> { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + PathBuf::from("test.txt"), + "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", )])); set_test_target(&gb_repository, &project_repository)?; @@ -1850,11 +1841,12 @@ fn test_commit_same_file_twice() -> Result<()> { .id; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), - "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", - )?; + Path::new(&project.path).join("test.txt"), + "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 1); @@ -1873,7 +1865,8 @@ fn test_commit_same_file_twice() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 0, "no files expected"); @@ -1889,11 +1882,12 @@ fn test_commit_same_file_twice() -> Result<()> { // add second patch std::fs::write( - std::path::Path::new(&project.path).join("file.txt"), - "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\npatch2\nline11\nline12\n", - )?; + Path::new(&project.path).join("file.txt"), + "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\npatch2\nline11\nline12\n", + )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 1, "one file should be changed"); @@ -1910,7 +1904,8 @@ fn test_commit_same_file_twice() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!( @@ -1936,8 +1931,8 @@ fn test_commit_partial_by_hunk() -> Result<()> { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + PathBuf::from("test.txt"), + "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", )])); set_test_target(&gb_repository, &project_repository)?; @@ -1951,11 +1946,12 @@ fn test_commit_partial_by_hunk() -> Result<()> { .id; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), - "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\npatch2\nline11\nline12\n", - )?; + Path::new(&project.path).join("test.txt"), + "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\npatch2\nline11\nline12\n", + )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 1); @@ -1974,7 +1970,8 @@ fn test_commit_partial_by_hunk() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 1); @@ -1994,7 +1991,8 @@ fn test_commit_partial_by_hunk() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); assert_eq!(branch.files.len(), 0); @@ -2015,8 +2013,8 @@ fn test_commit_partial_by_file() -> Result<()> { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([ - (path::PathBuf::from("test.txt"), "file1\n"), - (path::PathBuf::from("test2.txt"), "file2\n"), + (PathBuf::from("test.txt"), "file1\n"), + (PathBuf::from("test2.txt"), "file2\n"), ])); let commit1_oid = project_repository @@ -2033,13 +2031,10 @@ fn test_commit_partial_by_file() -> Result<()> { set_test_target(&gb_repository, &project_repository)?; // remove file - std::fs::remove_file(std::path::Path::new(&project.path).join("test2.txt"))?; + std::fs::remove_file(Path::new(&project.path).join("test2.txt"))?; // add new file - let file_path3 = std::path::Path::new("test3.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path3), - "file3\n", - )?; + let file_path3 = Path::new("test3.txt"); + std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; let branch1_id = create_virtual_branch( &gb_repository, @@ -2061,7 +2056,8 @@ fn test_commit_partial_by_file() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); // branch one test.txt has just the 1st and 3rd hunks applied @@ -2091,8 +2087,8 @@ fn test_commit_add_and_delete_files() -> Result<()> { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([ - (path::PathBuf::from("test.txt"), "file1\n"), - (path::PathBuf::from("test2.txt"), "file2\n"), + (PathBuf::from("test.txt"), "file1\n"), + (PathBuf::from("test2.txt"), "file2\n"), ])); let commit1_oid = project_repository @@ -2109,13 +2105,10 @@ fn test_commit_add_and_delete_files() -> Result<()> { set_test_target(&gb_repository, &project_repository)?; // remove file - std::fs::remove_file(std::path::Path::new(&project.path).join("test2.txt"))?; + std::fs::remove_file(Path::new(&project.path).join("test2.txt"))?; // add new file - let file_path3 = std::path::Path::new("test3.txt"); - std::fs::write( - std::path::Path::new(&project.path).join(file_path3), - "file3\n", - )?; + let file_path3 = Path::new("test3.txt"); + std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; let branch1_id = create_virtual_branch( &gb_repository, @@ -2137,7 +2130,8 @@ fn test_commit_add_and_delete_files() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); // branch one test.txt has just the 1st and 3rd hunks applied @@ -2168,25 +2162,25 @@ fn test_commit_executable_and_symlinks() -> Result<()> { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([ - (path::PathBuf::from("test.txt"), "file1\n"), - (path::PathBuf::from("test2.txt"), "file2\n"), + (PathBuf::from("test.txt"), "file1\n"), + (PathBuf::from("test2.txt"), "file2\n"), ])); set_test_target(&gb_repository, &project_repository)?; // add symlinked file - let file_path3 = std::path::Path::new("test3.txt"); - let src = std::path::Path::new(&project.path).join("test2.txt"); - let dst = std::path::Path::new(&project.path).join(file_path3); + let file_path3 = Path::new("test3.txt"); + let src = Path::new(&project.path).join("test2.txt"); + let dst = Path::new(&project.path).join(file_path3); symlink(src, dst)?; // add executable - let file_path4 = std::path::Path::new("test4.bin"); - let exec = std::path::Path::new(&project.path).join(file_path4); + let file_path4 = Path::new("test4.bin"); + let exec = Path::new(&project.path).join(file_path4); std::fs::write(&exec, "exec\n")?; - let permissions = fs::metadata(&exec)?.permissions(); + let permissions = std::fs::metadata(&exec)?.permissions(); let new_permissions = Permissions::from_mode(permissions.mode() | 0o111); // Add execute permission - fs::set_permissions(&exec, new_permissions)?; + std::fs::set_permissions(&exec, new_permissions)?; let branch1_id = create_virtual_branch( &gb_repository, @@ -2208,7 +2202,8 @@ fn test_commit_executable_and_symlinks() -> Result<()> { false, )?; - let (branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); let commit = &branch1.commits[0].id; @@ -2237,7 +2232,7 @@ fn tree_to_file_list(repository: &git::Repository, tree: &git::Tree) -> Vec Result<()> { set_test_target(&gb_repository, &project_repository)?; - integration::verify_branch(&gb_repository, &project_repository).unwrap(); + verify_branch(&gb_repository, &project_repository).unwrap(); // write two commits - let file_path2 = std::path::Path::new("test2.txt"); - std::fs::write(std::path::Path::new(&project.path).join(file_path2), "file")?; - tests::commit_all(&project_repository.git_repository); - std::fs::write( - std::path::Path::new(&project.path).join(file_path2), - "update", - )?; - tests::commit_all(&project_repository.git_repository); + let file_path2 = Path::new("test2.txt"); + std::fs::write(Path::new(&project.path).join(file_path2), "file")?; + commit_all(&project_repository.git_repository); + std::fs::write(Path::new(&project.path).join(file_path2), "update")?; + commit_all(&project_repository.git_repository); // verify puts commits onto the virtual branch - integration::verify_branch(&gb_repository, &project_repository).unwrap(); + verify_branch(&gb_repository, &project_repository).unwrap(); // one virtual branch with two commits was created - let (virtual_branches, _, _) = list_virtual_branches(&gb_repository, &project_repository)?; + let (virtual_branches, _, _) = + virtual_branches::list_virtual_branches(&gb_repository, &project_repository)?; assert_eq!(virtual_branches.len(), 1); let branch = &virtual_branches.first().unwrap(); @@ -2330,13 +2323,13 @@ fn test_verify_branch_not_integration() -> Result<()> { set_test_target(&gb_repository, &project_repository)?; - integration::verify_branch(&gb_repository, &project_repository).unwrap(); + verify_branch(&gb_repository, &project_repository).unwrap(); project_repository .git_repository .set_head(&"refs/heads/master".parse().unwrap())?; - let verify_result = integration::verify_branch(&gb_repository, &project_repository); + let verify_result = verify_branch(&gb_repository, &project_repository); assert!(verify_result.is_err()); assert_eq!( verify_result.unwrap_err().to_string(), @@ -2355,14 +2348,8 @@ fn test_pre_commit_hook_rejection() -> Result<()> { project_repository, .. } = suite.new_case_with_files(HashMap::from([ - ( - path::PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\n", - ), - ( - path::PathBuf::from("test2.txt"), - "line5\nline6\nline7\nline8\n", - ), + (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), + (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), ])); set_test_target(&gb_repository, &project_repository)?; @@ -2376,7 +2363,7 @@ fn test_pre_commit_hook_rejection() -> Result<()> { .id; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), + Path::new(&project.path).join("test.txt"), "line0\nline1\nline2\nline3\nline4\n", )?; @@ -2424,14 +2411,8 @@ fn test_post_commit_hook() -> Result<()> { project_repository, .. } = suite.new_case_with_files(HashMap::from([ - ( - path::PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\n", - ), - ( - path::PathBuf::from("test2.txt"), - "line5\nline6\nline7\nline8\n", - ), + (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), + (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), ])); set_test_target(&gb_repository, &project_repository)?; @@ -2445,7 +2426,7 @@ fn test_post_commit_hook() -> Result<()> { .id; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), + Path::new(&project.path).join("test.txt"), "line0\nline1\nline2\nline3\nline4\n", )?; @@ -2493,14 +2474,8 @@ fn test_commit_msg_hook_rejection() -> Result<()> { project_repository, .. } = suite.new_case_with_files(HashMap::from([ - ( - path::PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\n", - ), - ( - path::PathBuf::from("test2.txt"), - "line5\nline6\nline7\nline8\n", - ), + (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), + (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), ])); set_test_target(&gb_repository, &project_repository)?; @@ -2514,7 +2489,7 @@ fn test_commit_msg_hook_rejection() -> Result<()> { .id; std::fs::write( - std::path::Path::new(&project.path).join("test.txt"), + Path::new(&project.path).join("test.txt"), "line0\nline1\nline2\nline3\nline4\n", )?; diff --git a/gitbutler-app/tests/virtual_branches/target/mod.rs b/gitbutler-app/tests/virtual_branches/target/mod.rs new file mode 100644 index 000000000..582c1894a --- /dev/null +++ b/gitbutler-app/tests/virtual_branches/target/mod.rs @@ -0,0 +1,2 @@ +mod reader; +mod writer; diff --git a/gitbutler-app/tests/virtual_branches/target/reader.rs b/gitbutler-app/tests/virtual_branches/target/reader.rs new file mode 100644 index 000000000..c09bcfe83 --- /dev/null +++ b/gitbutler-app/tests/virtual_branches/target/reader.rs @@ -0,0 +1,147 @@ +use gitbutler_app::virtual_branches::target::Target; +use gitbutler_app::virtual_branches::{target, BranchId}; +use std::sync::atomic::{AtomicUsize, Ordering}; + +use anyhow::Result; +use once_cell::sync::Lazy; + +use crate::{Case, Suite}; + +static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn test_branch() -> gitbutler_app::virtual_branches::branch::Branch { + TEST_INDEX.fetch_add(1, Ordering::Relaxed); + + gitbutler_app::virtual_branches::branch::Branch { + id: BranchId::generate(), + name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), + notes: String::new(), + applied: true, + upstream: Some( + format!( + "refs/remotes/origin/upstream_{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + upstream_head: None, + created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, + updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, + head: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + tree: format!( + "0123456789abcdef0123456789abcdef012345{}", + (TEST_INDEX.load(Ordering::Relaxed) + 10) + ) + .parse() + .unwrap(), + ownership: gitbutler_app::virtual_branches::branch::BranchOwnershipClaims { + claims: vec![gitbutler_app::virtual_branches::branch::OwnershipClaim { + file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(), + hunks: vec![], + }], + }, + order: TEST_INDEX.load(Ordering::Relaxed), + selected_for_changes: None, + } +} + +#[test] +fn test_read_not_found() -> Result<()> { + let Case { gb_repository, .. } = Suite::default().new_case(); + + let session = gb_repository.get_or_create_current_session()?; + let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?; + + let reader = target::Reader::new(&session_reader); + let result = reader.read(&BranchId::generate()); + assert!(result.is_err()); + assert_eq!(result.unwrap_err().to_string(), "file not found"); + + Ok(()) +} + +#[test] +fn test_read_deprecated_format() -> Result<()> { + let Case { gb_repository, .. } = Suite::default().new_case(); + + let writer = gitbutler_app::writer::DirWriter::open(gb_repository.root())?; + writer + .write_string("branches/target/name", "origin/master") + .unwrap(); + writer + .write_string( + "branches/target/remote", + "git@github.com:gitbutlerapp/gitbutler.git", + ) + .unwrap(); + writer + .write_string( + "branches/target/sha", + "dd945831869e9593448aa622fa4342bbfb84813d", + ) + .unwrap(); + + let session = gb_repository.get_or_create_current_session()?; + let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?; + let reader = target::Reader::new(&session_reader); + + let read = reader.read_default().unwrap(); + assert_eq!(read.branch.branch(), "master"); + assert_eq!(read.branch.remote(), "origin"); + assert_eq!(read.remote_url, "git@github.com:gitbutlerapp/gitbutler.git"); + assert_eq!( + read.sha.to_string(), + "dd945831869e9593448aa622fa4342bbfb84813d" + ); + + Ok(()) +} + +#[test] +fn test_read_override_target() -> Result<()> { + let Case { + gb_repository, + project, + .. + } = Suite::default().new_case(); + + let mut branch = test_branch(); + + let target = Target { + branch: "refs/remotes/remote/branch".parse().unwrap(), + remote_url: "remote url".to_string(), + sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(), + }; + + let default_target = Target { + branch: "refs/remotes/default remote/default branch" + .parse() + .unwrap(), + remote_url: "default remote url".to_string(), + sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), + }; + + let branch_writer = + gitbutler_app::virtual_branches::branch::Writer::new(&gb_repository, project.gb_dir())?; + branch_writer.write(&mut branch)?; + + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?; + + let target_writer = target::Writer::new(&gb_repository, project.gb_dir())?; + let reader = target::Reader::new(&session_reader); + + target_writer.write_default(&default_target)?; + assert_eq!(default_target, reader.read(&branch.id)?); + + target_writer.write(&branch.id, &target)?; + assert_eq!(target, reader.read(&branch.id)?); + + Ok(()) +} diff --git a/gitbutler-app/tests/virtual_branches/target/writer.rs b/gitbutler-app/tests/virtual_branches/target/writer.rs new file mode 100644 index 000000000..a37185faf --- /dev/null +++ b/gitbutler-app/tests/virtual_branches/target/writer.rs @@ -0,0 +1,210 @@ +use anyhow::Context; +use std::{ + fs, + sync::atomic::{AtomicUsize, Ordering}, +}; + +use once_cell::sync::Lazy; + +use crate::{Case, Suite}; +use gitbutler_app::virtual_branches::target::Target; +use gitbutler_app::virtual_branches::{branch, target, BranchId}; + +static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn test_branch() -> branch::Branch { + TEST_INDEX.fetch_add(1, Ordering::Relaxed); + + branch::Branch { + id: BranchId::generate(), + name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), + notes: format!("branch_notes_{}", TEST_INDEX.load(Ordering::Relaxed)), + applied: true, + created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, + upstream: Some( + format!( + "refs/remotes/origin/upstream_{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + upstream_head: None, + updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, + head: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + tree: format!( + "0123456789abcdef0123456789abcdef012345{}", + TEST_INDEX.load(Ordering::Relaxed) + 10 + ) + .parse() + .unwrap(), + ownership: branch::BranchOwnershipClaims { + claims: vec![branch::OwnershipClaim { + file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(), + hunks: vec![], + }], + }, + order: TEST_INDEX.load(Ordering::Relaxed), + selected_for_changes: None, + } +} + +#[test] +fn test_write() -> anyhow::Result<()> { + let Case { + gb_repository, + project, + .. + } = Suite::default().new_case(); + + let mut branch = test_branch(); + let target = Target { + branch: "refs/remotes/remote name/branch name".parse().unwrap(), + remote_url: "remote url".to_string(), + sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), + }; + + let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + branch_writer.write(&mut branch)?; + + let target_writer = target::Writer::new(&gb_repository, project.gb_dir())?; + target_writer.write(&branch.id, &target)?; + + let root = gb_repository + .root() + .join("branches") + .join(branch.id.to_string()); + + assert_eq!( + fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) + .context("Failed to read branch name")?, + branch.name + ); + assert_eq!( + fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap()) + .context("Failed to read branch target name")?, + format!("{}/{}", target.branch.remote(), target.branch.branch()) + ); + assert_eq!( + fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap()) + .context("Failed to read branch target name name")?, + target.branch.remote() + ); + assert_eq!( + fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap()) + .context("Failed to read branch target remote url")?, + target.remote_url + ); + assert_eq!( + fs::read_to_string(root.join("target").join("sha").to_str().unwrap()) + .context("Failed to read branch target sha")?, + target.sha.to_string() + ); + + assert_eq!( + fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? + .parse::() + .context("Failed to read branch applied")?, + branch.applied + ); + assert_eq!( + fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) + .context("Failed to read branch upstream")?, + branch.upstream.unwrap().to_string() + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("created_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch created timestamp")? + .parse::() + .context("Failed to parse branch created timestamp")?, + branch.created_timestamp_ms + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("updated_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch updated timestamp")? + .parse::() + .context("Failed to parse branch updated timestamp")?, + branch.updated_timestamp_ms + ); + + Ok(()) +} + +#[test] +fn test_should_update() -> anyhow::Result<()> { + let Case { + gb_repository, + project, + .. + } = Suite::default().new_case(); + + let mut branch = test_branch(); + let target = Target { + branch: "refs/remotes/remote name/branch name".parse().unwrap(), + remote_url: "remote url".to_string(), + sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), + }; + + let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + branch_writer.write(&mut branch)?; + let target_writer = target::Writer::new(&gb_repository, project.gb_dir())?; + target_writer.write(&branch.id, &target)?; + + let updated_target = Target { + branch: "refs/remotes/updated remote name/updated branch name" + .parse() + .unwrap(), + remote_url: "updated remote url".to_string(), + sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(), + }; + + target_writer.write(&branch.id, &updated_target)?; + + let root = gb_repository + .root() + .join("branches") + .join(branch.id.to_string()); + + assert_eq!( + fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap()) + .context("Failed to read branch target branch name")?, + format!( + "{}/{}", + updated_target.branch.remote(), + updated_target.branch.branch() + ) + ); + + assert_eq!( + fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap()) + .context("Failed to read branch target remote name")?, + updated_target.branch.remote() + ); + assert_eq!( + fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap()) + .context("Failed to read branch target remote url")?, + updated_target.remote_url + ); + assert_eq!( + fs::read_to_string(root.join("target").join("sha").to_str().unwrap()) + .context("Failed to read branch target sha")?, + updated_target.sha.to_string() + ); + + Ok(()) +} diff --git a/gitbutler-app/tests/watcher/mod.rs b/gitbutler-app/tests/watcher/mod.rs new file mode 100644 index 000000000..43b3cf786 --- /dev/null +++ b/gitbutler-app/tests/watcher/mod.rs @@ -0,0 +1,1579 @@ +mod handler { + use crate::init_opts_bare; + + fn test_remote_repository() -> anyhow::Result { + let path = tempfile::tempdir()?.path().to_str().unwrap().to_string(); + let repo_a = git2::Repository::init_opts(path, &init_opts_bare())?; + + Ok(repo_a) + } + + mod calculate_delta_handler { + use anyhow::Result; + use std::path::{Path, PathBuf}; + use std::{ + collections::HashMap, + sync::atomic::{AtomicUsize, Ordering}, + }; + + use once_cell::sync::Lazy; + + use crate::{commit_all, Case, Suite}; + use gitbutler_app::watcher::handlers::calculate_deltas_handler::Handler; + use gitbutler_app::{ + deltas::{self, operations::Operation}, + reader, sessions, + virtual_branches::{self, branch}, + }; + + use self::branch::BranchId; + + static TEST_TARGET_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + + fn test_target() -> virtual_branches::target::Target { + virtual_branches::target::Target { + branch: format!( + "refs/remotes/remote name {}/branch name {}", + TEST_TARGET_INDEX.load(Ordering::Relaxed), + TEST_TARGET_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + remote_url: format!("remote url {}", TEST_TARGET_INDEX.load(Ordering::Relaxed)), + sha: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_TARGET_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + } + } + + static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + + fn test_branch() -> branch::Branch { + TEST_INDEX.fetch_add(1, Ordering::Relaxed); + + branch::Branch { + id: BranchId::generate(), + name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), + notes: format!("branch_notes_{}", TEST_INDEX.load(Ordering::Relaxed)), + applied: true, + upstream: Some( + format!( + "refs/remotes/origin/upstream_{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + upstream_head: None, + created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, + updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, + head: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + tree: format!( + "0123456789abcdef0123456789abcdef012345{}", + TEST_INDEX.load(Ordering::Relaxed) + 10 + ) + .parse() + .unwrap(), + ownership: branch::BranchOwnershipClaims::default(), + order: TEST_INDEX.load(Ordering::Relaxed), + selected_for_changes: None, + } + } + + #[test] + fn test_register_existing_commited_file() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "test")])); + let listener = Handler::from_path(&suite.local_app_data); + + std::fs::write(project.path.join("test.txt"), "test2")?; + listener.handle("test.txt", &project.id)?; + + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = sessions::Reader::open(&gb_repository, &session)?; + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read_file("test.txt")?.unwrap(); + assert_eq!(deltas.len(), 1); + assert_eq!(deltas[0].operations.len(), 1); + assert_eq!( + deltas[0].operations[0], + Operation::Insert((4, "2".to_string())), + ); + assert_eq!( + std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, + "test2" + ); + + Ok(()) + } + + #[test] + fn test_register_must_init_current_session() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + std::fs::write(project.path.join("test.txt"), "test")?; + listener.handle("test.txt", &project.id)?; + + assert!(gb_repository.get_current_session()?.is_some()); + + Ok(()) + } + + #[test] + fn test_register_must_not_override_current_session() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + std::fs::write(project.path.join("test.txt"), "test")?; + listener.handle("test.txt", &project.id)?; + let session1 = gb_repository.get_current_session()?.unwrap(); + + std::fs::write(project.path.join("test.txt"), "test2")?; + listener.handle("test.txt", &project.id)?; + let session2 = gb_repository.get_current_session()?.unwrap(); + + assert_eq!(session1.id, session2.id); + + Ok(()) + } + + #[test] + fn test_register_binfile() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + std::fs::write( + project.path.join("test.bin"), + [0, 159, 146, 150, 159, 146, 150], + )?; + + listener.handle("test.bin", &project.id)?; + + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = sessions::Reader::open(&gb_repository, &session)?; + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read_file("test.bin")?.unwrap(); + + assert_eq!(deltas.len(), 1); + assert_eq!(deltas[0].operations.len(), 0); + assert_eq!( + std::fs::read_to_string(gb_repository.session_wd_path().join("test.bin"))?, + "" + ); + + Ok(()) + } + + #[test] + fn test_register_empty_new_file() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + std::fs::write(project.path.join("test.txt"), "")?; + + listener.handle("test.txt", &project.id)?; + + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = sessions::Reader::open(&gb_repository, &session)?; + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read_file("test.txt")?.unwrap(); + assert_eq!(deltas.len(), 1); + assert_eq!(deltas[0].operations.len(), 0); + assert_eq!( + std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, + "" + ); + + Ok(()) + } + + #[test] + fn test_register_new_file() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + std::fs::write(project.path.join("test.txt"), "test")?; + + listener.handle("test.txt", &project.id)?; + + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = sessions::Reader::open(&gb_repository, &session)?; + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read_file("test.txt")?.unwrap(); + assert_eq!(deltas.len(), 1); + assert_eq!(deltas[0].operations.len(), 1); + assert_eq!( + deltas[0].operations[0], + Operation::Insert((0, "test".to_string())), + ); + assert_eq!( + std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, + "test" + ); + + Ok(()) + } + + #[test] + fn test_register_no_changes_saved_thgoughout_flushes() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + project, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + // file change, wd and deltas are written + std::fs::write(project.path.join("test.txt"), "test")?; + listener.handle("test.txt", &project.id)?; + + // make two more sessions. + gb_repository.flush(&project_repository, None)?; + gb_repository.get_or_create_current_session()?; + gb_repository.flush(&project_repository, None)?; + + // after some sessions, files from the first change are still there. + let session = gb_repository.get_or_create_current_session()?; + let session_reader = sessions::Reader::open(&gb_repository, &session)?; + let files = session_reader.files(None)?; + assert_eq!(files.len(), 1); + + Ok(()) + } + + #[test] + fn test_register_new_file_twice() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + std::fs::write(project.path.join("test.txt"), "test")?; + listener.handle("test.txt", &project.id)?; + + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = sessions::Reader::open(&gb_repository, &session)?; + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read_file("test.txt")?.unwrap(); + assert_eq!(deltas.len(), 1); + assert_eq!(deltas[0].operations.len(), 1); + assert_eq!( + deltas[0].operations[0], + Operation::Insert((0, "test".to_string())), + ); + assert_eq!( + std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, + "test" + ); + + std::fs::write(project.path.join("test.txt"), "test2")?; + listener.handle("test.txt", &project.id)?; + + let deltas = deltas_reader.read_file("test.txt")?.unwrap(); + assert_eq!(deltas.len(), 2); + assert_eq!(deltas[0].operations.len(), 1); + assert_eq!( + deltas[0].operations[0], + Operation::Insert((0, "test".to_string())), + ); + assert_eq!(deltas[1].operations.len(), 1); + assert_eq!( + deltas[1].operations[0], + Operation::Insert((4, "2".to_string())), + ); + assert_eq!( + std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, + "test2" + ); + + Ok(()) + } + + #[test] + fn test_register_file_deleted() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + project, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + { + // write file + std::fs::write(project.path.join("test.txt"), "test")?; + listener.handle("test.txt", &project.id)?; + } + + { + // current session must have the deltas, but not the file (it didn't exist) + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = sessions::Reader::open(&gb_repository, &session)?; + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read_file("test.txt")?.unwrap(); + assert_eq!(deltas.len(), 1); + assert_eq!(deltas[0].operations.len(), 1); + assert_eq!( + deltas[0].operations[0], + Operation::Insert((0, "test".to_string())), + ); + assert_eq!( + std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?, + "test" + ); + + let files = session_reader.files(None).unwrap(); + assert!(files.is_empty()); + } + + gb_repository.flush(&project_repository, None)?; + + { + // file should be available in the next session, but not deltas just yet. + let session = gb_repository.get_or_create_current_session()?; + let session_reader = sessions::Reader::open(&gb_repository, &session)?; + let files = session_reader.files(None).unwrap(); + assert_eq!(files.len(), 1); + assert_eq!( + files[Path::new("test.txt")], + reader::Content::UTF8("test".to_string()) + ); + + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read(None)?; + assert!(deltas.is_empty()); + + // removing the file + std::fs::remove_file(project.path.join("test.txt"))?; + listener.handle("test.txt", &project.id)?; + + // deltas are recorded + let deltas = deltas_reader.read_file("test.txt")?.unwrap(); + assert_eq!(deltas.len(), 1); + assert_eq!(deltas[0].operations.len(), 1); + assert_eq!(deltas[0].operations[0], Operation::Delete((0, 4)),); + } + + gb_repository.flush(&project_repository, None)?; + + { + // since file was deleted in the previous session, it should not exist in the new one. + let session = gb_repository.get_or_create_current_session()?; + let session_reader = sessions::Reader::open(&gb_repository, &session)?; + let files = session_reader.files(None).unwrap(); + assert!(files.is_empty()); + } + + Ok(()) + } + + #[test] + fn test_flow_with_commits() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + project_repository, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + let size = 10; + let relative_file_path = Path::new("one/two/test.txt"); + for i in 1..=size { + std::fs::create_dir_all(Path::new(&project.path).join("one/two"))?; + // create a session with a single file change and flush it + std::fs::write( + Path::new(&project.path).join(relative_file_path), + i.to_string(), + )?; + + commit_all(&project_repository.git_repository); + listener.handle(relative_file_path, &project.id)?; + assert!(gb_repository.flush(&project_repository, None)?.is_some()); + } + + // get all the created sessions + let mut sessions: Vec = gb_repository + .get_sessions_iterator()? + .map(Result::unwrap) + .collect(); + assert_eq!(sessions.len(), size); + // verify sessions order is correct + let mut last_start = sessions[0].meta.start_timestamp_ms; + let mut last_end = sessions[0].meta.start_timestamp_ms; + sessions[1..].iter().for_each(|session| { + assert!(session.meta.start_timestamp_ms < last_start); + assert!(session.meta.last_timestamp_ms < last_end); + last_start = session.meta.start_timestamp_ms; + last_end = session.meta.last_timestamp_ms; + }); + + sessions.reverse(); + // try to reconstruct file state from operations for every session slice + for i in 0..sessions.len() { + let sessions_slice = &mut sessions[i..]; + + // collect all operations from sessions in the reverse order + let mut operations: Vec = vec![]; + for session in &mut *sessions_slice { + let session_reader = sessions::Reader::open(&gb_repository, session).unwrap(); + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas_by_filepath = deltas_reader.read(None).unwrap(); + for deltas in deltas_by_filepath.values() { + for delta in deltas { + delta.operations.iter().for_each(|operation| { + operations.push(operation.clone()); + }); + } + } + } + + let reader = + sessions::Reader::open(&gb_repository, sessions_slice.first().unwrap()) + .unwrap(); + let files = reader.files(None).unwrap(); + + if i == 0 { + assert_eq!(files.len(), 0); + } else { + assert_eq!(files.len(), 1); + } + + let base_file = files.get(&relative_file_path.to_path_buf()); + let mut text: Vec = match base_file { + Some(reader::Content::UTF8(file)) => file.chars().collect(), + _ => vec![], + }; + + for operation in operations { + operation.apply(&mut text).unwrap(); + } + + assert_eq!(text.iter().collect::(), size.to_string()); + } + Ok(()) + } + + #[test] + fn test_flow_no_commits() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + project_repository, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + let size = 10; + let relative_file_path = Path::new("one/two/test.txt"); + for i in 1..=size { + std::fs::create_dir_all(Path::new(&project.path).join("one/two"))?; + // create a session with a single file change and flush it + std::fs::write( + Path::new(&project.path).join(relative_file_path), + i.to_string(), + )?; + + listener.handle(relative_file_path, &project.id)?; + assert!(gb_repository.flush(&project_repository, None)?.is_some()); + } + + // get all the created sessions + let mut sessions: Vec = gb_repository + .get_sessions_iterator()? + .map(Result::unwrap) + .collect(); + assert_eq!(sessions.len(), size); + // verify sessions order is correct + let mut last_start = sessions[0].meta.start_timestamp_ms; + let mut last_end = sessions[0].meta.start_timestamp_ms; + sessions[1..].iter().for_each(|session| { + assert!(session.meta.start_timestamp_ms < last_start); + assert!(session.meta.last_timestamp_ms < last_end); + last_start = session.meta.start_timestamp_ms; + last_end = session.meta.last_timestamp_ms; + }); + + sessions.reverse(); + // try to reconstruct file state from operations for every session slice + for i in 0..sessions.len() { + let sessions_slice = &mut sessions[i..]; + + // collect all operations from sessions in the reverse order + let mut operations: Vec = vec![]; + for session in &mut *sessions_slice { + let session_reader = sessions::Reader::open(&gb_repository, session).unwrap(); + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas_by_filepath = deltas_reader.read(None).unwrap(); + for deltas in deltas_by_filepath.values() { + for delta in deltas { + delta.operations.iter().for_each(|operation| { + operations.push(operation.clone()); + }); + } + } + } + + let reader = + sessions::Reader::open(&gb_repository, sessions_slice.first().unwrap()) + .unwrap(); + let files = reader.files(None).unwrap(); + + if i == 0 { + assert_eq!(files.len(), 0); + } else { + assert_eq!(files.len(), 1); + } + + let base_file = files.get(&relative_file_path.to_path_buf()); + let mut text: Vec = match base_file { + Some(reader::Content::UTF8(file)) => file.chars().collect(), + _ => vec![], + }; + + for operation in operations { + operation.apply(&mut text).unwrap(); + } + + assert_eq!(text.iter().collect::(), size.to_string()); + } + Ok(()) + } + + #[test] + fn test_flow_signle_session() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + let size = 10_i32; + let relative_file_path = Path::new("one/two/test.txt"); + for i in 1_i32..=size { + std::fs::create_dir_all(Path::new(&project.path).join("one/two"))?; + // create a session with a single file change and flush it + std::fs::write( + Path::new(&project.path).join(relative_file_path), + i.to_string(), + )?; + + listener.handle(relative_file_path, &project.id)?; + } + + // collect all operations from sessions in the reverse order + let mut operations: Vec = vec![]; + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = sessions::Reader::open(&gb_repository, &session).unwrap(); + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas_by_filepath = deltas_reader.read(None).unwrap(); + for deltas in deltas_by_filepath.values() { + for delta in deltas { + delta.operations.iter().for_each(|operation| { + operations.push(operation.clone()); + }); + } + } + + let reader = sessions::Reader::open(&gb_repository, &session).unwrap(); + let files = reader.files(None).unwrap(); + + let base_file = files.get(&relative_file_path.to_path_buf()); + let mut text: Vec = match base_file { + Some(reader::Content::UTF8(file)) => file.chars().collect(), + _ => vec![], + }; + + for operation in operations { + operation.apply(&mut text).unwrap(); + } + + assert_eq!(text.iter().collect::(), size.to_string()); + Ok(()) + } + + #[test] + fn should_persist_branches_targets_state_between_sessions() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + project_repository, + .. + } = suite + .new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "hello world")])); + let listener = Handler::from_path(&suite.local_app_data); + + let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + let target_writer = + virtual_branches::target::Writer::new(&gb_repository, project.gb_dir())?; + let default_target = test_target(); + target_writer.write_default(&default_target)?; + let mut vbranch0 = test_branch(); + branch_writer.write(&mut vbranch0)?; + let mut vbranch1 = test_branch(); + let vbranch1_target = test_target(); + branch_writer.write(&mut vbranch1)?; + target_writer.write(&vbranch1.id, &vbranch1_target)?; + + std::fs::write(project.path.join("test.txt"), "hello world!").unwrap(); + listener.handle("test.txt", &project.id)?; + + let flushed_session = gb_repository.flush(&project_repository, None).unwrap(); + + // create a new session + let session = gb_repository.get_or_create_current_session().unwrap(); + assert_ne!(session.id, flushed_session.unwrap().id); + + // ensure that the virtual branch is still there and selected + let session_reader = sessions::Reader::open(&gb_repository, &session).unwrap(); + + let branches = virtual_branches::Iterator::new(&session_reader) + .unwrap() + .collect::, gitbutler_app::reader::Error>>() + .unwrap() + .into_iter() + .collect::>(); + assert_eq!(branches.len(), 2); + let branch_ids = branches.iter().map(|b| b.id).collect::>(); + assert!(branch_ids.contains(&vbranch0.id)); + assert!(branch_ids.contains(&vbranch1.id)); + + let target_reader = virtual_branches::target::Reader::new(&session_reader); + assert_eq!(target_reader.read_default().unwrap(), default_target); + assert_eq!(target_reader.read(&vbranch0.id).unwrap(), default_target); + assert_eq!(target_reader.read(&vbranch1.id).unwrap(), vbranch1_target); + + Ok(()) + } + + #[test] + fn should_restore_branches_targets_state_from_head_session() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + project_repository, + .. + } = suite + .new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "hello world")])); + let listener = Handler::from_path(&suite.local_app_data); + + let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?; + let target_writer = + virtual_branches::target::Writer::new(&gb_repository, project.gb_dir())?; + let default_target = test_target(); + target_writer.write_default(&default_target)?; + let mut vbranch0 = test_branch(); + branch_writer.write(&mut vbranch0)?; + let mut vbranch1 = test_branch(); + let vbranch1_target = test_target(); + branch_writer.write(&mut vbranch1)?; + target_writer.write(&vbranch1.id, &vbranch1_target)?; + + std::fs::write(project.path.join("test.txt"), "hello world!").unwrap(); + listener.handle("test.txt", &project.id).unwrap(); + + let flushed_session = gb_repository.flush(&project_repository, None).unwrap(); + + // hard delete branches state from disk + std::fs::remove_dir_all(gb_repository.root()).unwrap(); + + // create a new session + let session = gb_repository.get_or_create_current_session().unwrap(); + assert_ne!(session.id, flushed_session.unwrap().id); + + // ensure that the virtual branch is still there and selected + let session_reader = sessions::Reader::open(&gb_repository, &session).unwrap(); + + let branches = virtual_branches::Iterator::new(&session_reader) + .unwrap() + .collect::, gitbutler_app::reader::Error>>() + .unwrap() + .into_iter() + .collect::>(); + assert_eq!(branches.len(), 2); + let branch_ids = branches.iter().map(|b| b.id).collect::>(); + assert!(branch_ids.contains(&vbranch0.id)); + assert!(branch_ids.contains(&vbranch1.id)); + + let target_reader = virtual_branches::target::Reader::new(&session_reader); + assert_eq!(target_reader.read_default().unwrap(), default_target); + assert_eq!(target_reader.read(&vbranch0.id).unwrap(), default_target); + assert_eq!(target_reader.read(&vbranch1.id).unwrap(), vbranch1_target); + + Ok(()) + } + + mod flush_wd { + use super::*; + + #[test] + fn should_add_new_files_to_session_wd() { + let suite = Suite::default(); + let Case { + gb_repository, + project, + project_repository, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + // write a file into session + std::fs::write(project.path.join("test.txt"), "hello world!").unwrap(); + listener.handle("test.txt", &project.id).unwrap(); + + let flushed_session = gb_repository + .flush(&project_repository, None) + .unwrap() + .unwrap(); + { + // after flush it should be flushed into the commit + let session_commit = gb_repository + .git_repository() + .find_commit(flushed_session.hash.unwrap()) + .unwrap(); + let commit_reader = reader::Reader::from_commit( + gb_repository.git_repository(), + &session_commit, + ) + .unwrap(); + assert_eq!( + commit_reader.list_files(Path::new("wd")).unwrap(), + vec![Path::new("test.txt")] + ); + assert_eq!( + commit_reader.read(Path::new("wd/test.txt")).unwrap(), + reader::Content::UTF8("hello world!".to_string()) + ); + } + + // write another file into session + std::fs::create_dir_all(project.path.join("one/two")).unwrap(); + std::fs::write(project.path.join("one/two/test2.txt"), "hello world!").unwrap(); + listener.handle("one/two/test2.txt", &project.id).unwrap(); + + let flushed_session = gb_repository + .flush(&project_repository, None) + .unwrap() + .unwrap(); + { + // after flush, it should be flushed into the commit next to the previous one + let session_commit = gb_repository + .git_repository() + .find_commit(flushed_session.hash.unwrap()) + .unwrap(); + let commit_reader = reader::Reader::from_commit( + gb_repository.git_repository(), + &session_commit, + ) + .unwrap(); + assert_eq!( + commit_reader.list_files(Path::new("wd")).unwrap(), + vec![Path::new("one/two/test2.txt"), Path::new("test.txt"),] + ); + assert_eq!( + commit_reader.read(Path::new("wd/test.txt")).unwrap(), + reader::Content::UTF8("hello world!".to_string()) + ); + assert_eq!( + commit_reader + .read(Path::new("wd/one/two/test2.txt")) + .unwrap(), + reader::Content::UTF8("hello world!".to_string()) + ); + } + } + + #[test] + fn should_remove_deleted_files_from_session_wd() { + let suite = Suite::default(); + let Case { + gb_repository, + project, + project_repository, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + // write a file into session + std::fs::write(project.path.join("test.txt"), "hello world!").unwrap(); + listener.handle("test.txt", &project.id).unwrap(); + std::fs::create_dir_all(project.path.join("one/two")).unwrap(); + std::fs::write(project.path.join("one/two/test2.txt"), "hello world!").unwrap(); + listener.handle("one/two/test2.txt", &project.id).unwrap(); + + let flushed_session = gb_repository + .flush(&project_repository, None) + .unwrap() + .unwrap(); + { + // after flush it should be flushed into the commit + let session_commit = gb_repository + .git_repository() + .find_commit(flushed_session.hash.unwrap()) + .unwrap(); + let commit_reader = reader::Reader::from_commit( + gb_repository.git_repository(), + &session_commit, + ) + .unwrap(); + assert_eq!( + commit_reader.list_files(Path::new("wd")).unwrap(), + vec![Path::new("one/two/test2.txt"), Path::new("test.txt"),] + ); + assert_eq!( + commit_reader.read(Path::new("wd/test.txt")).unwrap(), + reader::Content::UTF8("hello world!".to_string()) + ); + assert_eq!( + commit_reader + .read(Path::new("wd/one/two/test2.txt")) + .unwrap(), + reader::Content::UTF8("hello world!".to_string()) + ); + } + + // rm the files + std::fs::remove_file(project.path.join("test.txt")).unwrap(); + listener.handle("test.txt", &project.id).unwrap(); + std::fs::remove_file(project.path.join("one/two/test2.txt")).unwrap(); + listener.handle("one/two/test2.txt", &project.id).unwrap(); + + let flushed_session = gb_repository + .flush(&project_repository, None) + .unwrap() + .unwrap(); + { + // after flush it should be removed from the commit + let session_commit = gb_repository + .git_repository() + .find_commit(flushed_session.hash.unwrap()) + .unwrap(); + let commit_reader = reader::Reader::from_commit( + gb_repository.git_repository(), + &session_commit, + ) + .unwrap(); + assert!(commit_reader + .list_files(Path::new("wd")) + .unwrap() + .is_empty()); + } + } + + #[test] + fn should_update_updated_files_in_session_wd() { + let suite = Suite::default(); + let Case { + gb_repository, + project, + project_repository, + .. + } = suite.new_case(); + let listener = Handler::from_path(&suite.local_app_data); + + // write a file into session + std::fs::write(project.path.join("test.txt"), "hello world!").unwrap(); + listener.handle("test.txt", &project.id).unwrap(); + std::fs::create_dir_all(project.path.join("one/two")).unwrap(); + std::fs::write(project.path.join("one/two/test2.txt"), "hello world!").unwrap(); + listener.handle("one/two/test2.txt", &project.id).unwrap(); + + let flushed_session = gb_repository + .flush(&project_repository, None) + .unwrap() + .unwrap(); + { + // after flush it should be flushed into the commit + let session_commit = gb_repository + .git_repository() + .find_commit(flushed_session.hash.unwrap()) + .unwrap(); + let commit_reader = reader::Reader::from_commit( + gb_repository.git_repository(), + &session_commit, + ) + .unwrap(); + assert_eq!( + commit_reader.list_files(Path::new("wd")).unwrap(), + vec![Path::new("one/two/test2.txt"), Path::new("test.txt"),] + ); + assert_eq!( + commit_reader.read(Path::new("wd/test.txt")).unwrap(), + reader::Content::UTF8("hello world!".to_string()) + ); + assert_eq!( + commit_reader + .read(Path::new("wd/one/two/test2.txt")) + .unwrap(), + reader::Content::UTF8("hello world!".to_string()) + ); + } + + // update the file + std::fs::write(project.path.join("test.txt"), "hello world!2").unwrap(); + listener.handle("test.txt", &project.id).unwrap(); + + std::fs::write(project.path.join("one/two/test2.txt"), "hello world!2").unwrap(); + listener.handle("one/two/test2.txt", &project.id).unwrap(); + + let flushed_session = gb_repository + .flush(&project_repository, None) + .unwrap() + .unwrap(); + { + // after flush it should be updated in the commit + let session_commit = gb_repository + .git_repository() + .find_commit(flushed_session.hash.unwrap()) + .unwrap(); + let commit_reader = reader::Reader::from_commit( + gb_repository.git_repository(), + &session_commit, + ) + .unwrap(); + assert_eq!( + commit_reader.list_files(Path::new("wd")).unwrap(), + vec![Path::new("one/two/test2.txt"), Path::new("test.txt"),] + ); + assert_eq!( + commit_reader.read(Path::new("wd/test.txt")).unwrap(), + reader::Content::UTF8("hello world!2".to_string()) + ); + assert_eq!( + commit_reader + .read(Path::new("wd/one/two/test2.txt")) + .unwrap(), + reader::Content::UTF8("hello world!2".to_string()) + ); + } + } + } + } + + mod fetch_gitbutler_data { + use std::time::SystemTime; + + use gitbutler_app::projects; + use pretty_assertions::assert_eq; + + use crate::watcher::handler::test_remote_repository; + use crate::{Case, Suite}; + use gitbutler_app::watcher::handlers::fetch_gitbutler_data::InnerHandler; + + #[tokio::test] + async fn test_fetch_success() -> anyhow::Result<()> { + let suite = Suite::default(); + let Case { project, .. } = suite.new_case(); + + let cloud = test_remote_repository()?; + + let api_project = projects::ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: cloud.path().to_str().unwrap().to_string(), + code_git_url: None, + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + suite + .projects + .update(&projects::UpdateRequest { + id: project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await?; + + let listener = InnerHandler { + local_data_dir: suite.local_app_data, + projects: suite.projects, + users: suite.users, + }; + + listener + .handle(&project.id, &SystemTime::now()) + .await + .unwrap(); + + Ok(()) + } + + #[tokio::test] + async fn test_fetch_fail_no_sync() { + let suite = Suite::default(); + let Case { project, .. } = suite.new_case(); + + let listener = InnerHandler { + local_data_dir: suite.local_app_data, + projects: suite.projects, + users: suite.users, + }; + + let res = listener.handle(&project.id, &SystemTime::now()).await; + + assert_eq!(&res.unwrap_err().to_string(), "sync disabled"); + } + } + + mod git_file_change { + use anyhow::Result; + use std::fs; + + use gitbutler_app::projects; + use pretty_assertions::assert_eq; + + use crate::{Case, Suite}; + use gitbutler_app::watcher::handlers::git_file_change::Handler; + use gitbutler_app::watcher::{handlers, Event}; + + #[test] + fn test_flush_session() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + gb_repository, + .. + } = suite.new_case(); + + assert!(gb_repository.get_current_session()?.is_none()); + create_new_session_via_new_file(&project, &suite); + assert!(gb_repository.get_current_session()?.is_some()); + + let listener = Handler::new(suite.local_app_data, suite.projects, suite.users); + + let flush_file_path = project.path.join(".git/GB_FLUSH"); + fs::write(flush_file_path.as_path(), "")?; + + let result = listener.handle("GB_FLUSH", &project.id)?; + + assert_eq!(result.len(), 1); + assert!(matches!(result[0], Event::Flush(_, _))); + + assert!(!flush_file_path.exists(), "flush file deleted"); + + Ok(()) + } + + #[test] + fn test_do_not_flush_session_if_file_is_missing() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + gb_repository, + .. + } = suite.new_case(); + + assert!(gb_repository.get_current_session()?.is_none()); + create_new_session_via_new_file(&project, &suite); + assert!(gb_repository.get_current_session()?.is_some()); + + let listener = Handler::new(suite.local_app_data, suite.projects, suite.users); + + let result = listener.handle("GB_FLUSH", &project.id)?; + + assert_eq!(result.len(), 0); + + Ok(()) + } + + fn create_new_session_via_new_file(project: &projects::Project, suite: &Suite) { + fs::write(project.path.join("test.txt"), "test").unwrap(); + + let file_change_listener = + handlers::calculate_deltas_handler::Handler::from_path(&suite.local_app_data); + file_change_listener + .handle("test.txt", &project.id) + .unwrap(); + } + + #[test] + fn test_flush_deletes_flush_file_without_session_to_flush() -> Result<()> { + let suite = Suite::default(); + let Case { project, .. } = suite.new_case(); + + let listener = Handler::new(suite.local_app_data, suite.projects, suite.users); + + let flush_file_path = project.path.join(".git/GB_FLUSH"); + fs::write(flush_file_path.as_path(), "")?; + + let result = listener.handle("GB_FLUSH", &project.id)?; + + assert_eq!(result.len(), 0); + + assert!(!flush_file_path.exists(), "flush file deleted"); + + Ok(()) + } + } + + mod push_project_to_gitbutler { + use anyhow::Result; + use gitbutler_app::{git, projects}; + use std::collections::HashMap; + use std::path::PathBuf; + + use crate::virtual_branches::set_test_target; + use crate::watcher::handler::test_remote_repository; + use crate::{Case, Suite}; + use gitbutler_app::project_repository::LogUntil; + use gitbutler_app::watcher::handlers::push_project_to_gitbutler::HandlerInner; + + fn log_walk(repo: &git2::Repository, head: git::Oid) -> Vec { + let mut walker = repo.revwalk().unwrap(); + walker.push(head.into()).unwrap(); + walker.map(|oid| oid.unwrap().into()).collect::>() + } + + #[tokio::test] + async fn test_push_error() -> Result<()> { + let suite = Suite::default(); + let Case { project, .. } = suite.new_case(); + + let api_project = projects::ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: String::new(), + code_git_url: Some(String::new()), + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + suite + .projects + .update(&projects::UpdateRequest { + id: project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await?; + + let listener = HandlerInner { + local_data_dir: suite.local_app_data, + project_store: suite.projects, + users: suite.users, + batch_size: 100, + }; + + let res = listener.handle(&project.id).await; + + res.unwrap_err(); + + Ok(()) + } + + #[tokio::test] + async fn test_push_simple() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + gb_repository, + project_repository, + .. + } = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "test")])); + + suite.sign_in(); + + set_test_target(&gb_repository, &project_repository).unwrap(); + + let target_id = gb_repository.default_target().unwrap().unwrap().sha; + + let reference = project_repository.l(target_id, LogUntil::End).unwrap(); + + let cloud_code = test_remote_repository()?; + + let api_project = projects::ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: String::new(), + code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()), + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + suite + .projects + .update(&projects::UpdateRequest { + id: project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await?; + + cloud_code.find_commit(target_id.into()).unwrap_err(); + + { + let listener = HandlerInner { + local_data_dir: suite.local_app_data, + project_store: suite.projects.clone(), + users: suite.users, + batch_size: 10, + }; + + let res = listener.handle(&project.id).await.unwrap(); + assert!(res.is_empty()); + } + + cloud_code.find_commit(target_id.into()).unwrap(); + + let pushed = log_walk(&cloud_code, target_id); + assert_eq!(reference.len(), pushed.len()); + assert_eq!(reference, pushed); + + assert_eq!( + suite + .projects + .get(&project.id) + .unwrap() + .gitbutler_code_push_state + .unwrap() + .id, + target_id + ); + + Ok(()) + } + + #[tokio::test] + async fn test_push_remote_ref() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + gb_repository, + project_repository, + .. + } = suite.new_case(); + + suite.sign_in(); + + set_test_target(&gb_repository, &project_repository).unwrap(); + + let cloud_code: git::Repository = test_remote_repository()?.into(); + + let remote_repo: git::Repository = test_remote_repository()?.into(); + + let last_commit = create_initial_commit(&remote_repo); + + remote_repo + .reference( + &git::Refname::Local(git::LocalRefname::new("refs/heads/testbranch", None)), + last_commit, + false, + "", + ) + .unwrap(); + + let mut remote = project_repository + .git_repository + .remote("tr", &remote_repo.path().to_str().unwrap().parse().unwrap()) + .unwrap(); + + remote + .fetch(&["+refs/heads/*:refs/remotes/tr/*"], None) + .unwrap(); + + project_repository + .git_repository + .find_commit(last_commit) + .unwrap(); + + let api_project = projects::ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: String::new(), + code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()), + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + suite + .projects + .update(&projects::UpdateRequest { + id: project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await?; + + { + let listener = HandlerInner { + local_data_dir: suite.local_app_data, + project_store: suite.projects.clone(), + users: suite.users, + batch_size: 10, + }; + + listener.handle(&project.id).await.unwrap(); + } + + cloud_code.find_commit(last_commit).unwrap(); + + Ok(()) + } + + fn create_initial_commit(repo: &git::Repository) -> git::Oid { + let signature = git::Signature::now("test", "test@email.com").unwrap(); + + let mut index = repo.index().unwrap(); + let oid = index.write_tree().unwrap(); + + repo.commit( + None, + &signature, + &signature, + "initial commit", + &repo.find_tree(oid).unwrap(), + &[], + ) + .unwrap() + } + + fn create_test_commits(repo: &git::Repository, commits: usize) -> git::Oid { + let signature = git::Signature::now("test", "test@email.com").unwrap(); + + let mut last = None; + + for i in 0..commits { + let mut index = repo.index().unwrap(); + let oid = index.write_tree().unwrap(); + let head = repo.head().unwrap(); + + last = Some( + repo.commit( + Some(&head.name().unwrap()), + &signature, + &signature, + format!("commit {i}").as_str(), + &repo.find_tree(oid).unwrap(), + &[&repo + .find_commit(repo.refname_to_id("HEAD").unwrap()) + .unwrap()], + ) + .unwrap(), + ); + } + + last.unwrap() + } + + #[tokio::test] + async fn test_push_batches() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + gb_repository, + project_repository, + .. + } = suite.new_case(); + + suite.sign_in(); + + { + let head: git::Oid = project_repository + .get_head() + .unwrap() + .peel_to_commit() + .unwrap() + .id(); + + let reference = project_repository.l(head, LogUntil::End).unwrap(); + assert_eq!(reference.len(), 2); + + let head = create_test_commits(&project_repository.git_repository, 10); + + let reference = project_repository.l(head, LogUntil::End).unwrap(); + assert_eq!(reference.len(), 12); + } + + set_test_target(&gb_repository, &project_repository).unwrap(); + + let target_id = gb_repository.default_target().unwrap().unwrap().sha; + + let reference = project_repository.l(target_id, LogUntil::End).unwrap(); + + let cloud_code = test_remote_repository()?; + + let api_project = projects::ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: String::new(), + code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()), + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + suite + .projects + .update(&projects::UpdateRequest { + id: project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await?; + + { + let listener = HandlerInner { + local_data_dir: suite.local_app_data.clone(), + project_store: suite.projects.clone(), + users: suite.users.clone(), + batch_size: 2, + }; + + listener.handle(&project.id).await.unwrap(); + } + + cloud_code.find_commit(target_id.into()).unwrap(); + + let pushed = log_walk(&cloud_code, target_id); + assert_eq!(reference.len(), pushed.len()); + assert_eq!(reference, pushed); + + assert_eq!( + suite + .projects + .get(&project.id) + .unwrap() + .gitbutler_code_push_state + .unwrap() + .id, + target_id + ); + + Ok(()) + } + + #[tokio::test] + async fn test_push_again_no_change() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + gb_repository, + project_repository, + .. + } = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "test")])); + + suite.sign_in(); + + set_test_target(&gb_repository, &project_repository).unwrap(); + + let target_id = gb_repository.default_target().unwrap().unwrap().sha; + + let reference = project_repository.l(target_id, LogUntil::End).unwrap(); + + let cloud_code = test_remote_repository()?; + + let api_project = projects::ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: String::new(), + code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()), + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + suite + .projects + .update(&projects::UpdateRequest { + id: project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await?; + + cloud_code.find_commit(target_id.into()).unwrap_err(); + + { + let listener = HandlerInner { + local_data_dir: suite.local_app_data, + project_store: suite.projects.clone(), + users: suite.users, + batch_size: 10, + }; + + let res = listener.handle(&project.id).await.unwrap(); + assert!(res.is_empty()); + } + + cloud_code.find_commit(target_id.into()).unwrap(); + + let pushed = log_walk(&cloud_code, target_id); + assert_eq!(reference.len(), pushed.len()); + assert_eq!(reference, pushed); + + assert_eq!( + suite + .projects + .get(&project.id) + .unwrap() + .gitbutler_code_push_state + .unwrap() + .id, + target_id + ); + + Ok(()) + } + } +}