Merge pull request #3360 from gitbutlerapp/separate-integration-tests-app

This commit is contained in:
Kiril Videlov 2024-03-28 15:05:20 +01:00 committed by GitHub
commit 8bb011dd11
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
83 changed files with 11013 additions and 10874 deletions

View File

@ -4,6 +4,15 @@ version = "0.0.0"
edition = "2021"
rust-version = "1.57"
authors = ["GitButler <gitbutler@gitbutler.com>"]
publish = false
[lib]
doctest = false
[[bin]]
name = "gitbutler-app"
path = "src/main.rs"
test = false
[build-dependencies]
tauri-build = { version = "1.5", features = [] }

View File

@ -46,28 +46,3 @@ impl Database {
Ok(result)
}
}
#[cfg(test)]
mod tests {
use crate::tests;
use super::*;
#[test]
fn smoke() {
let data_dir = tests::temp_dir();
let db = Database::open_in_directory(data_dir).unwrap();
db.transaction(|tx| {
tx.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)", [])
.unwrap();
tx.execute("INSERT INTO test (id) VALUES (1)", []).unwrap();
let mut stmt = tx.prepare("SELECT id FROM test").unwrap();
let mut rows = stmt.query([]).unwrap();
let row = rows.next().unwrap().unwrap();
let id: i32 = row.get(0).unwrap();
assert_eq!(id, 1_i32);
Ok(())
})
.unwrap();
}
}

View File

@ -120,107 +120,3 @@ fn insert_stmt<'conn>(
",
)?)
}
#[cfg(test)]
mod tests {
use crate::tests;
use super::*;
#[test]
fn insert_query() -> Result<()> {
let db = tests::test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session_id = SessionId::generate();
let file_path = path::PathBuf::from("file_path");
let delta1 = delta::Delta {
timestamp_ms: 0,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
};
let deltas = vec![delta1.clone()];
database.insert(&project_id, &session_id, &file_path, &deltas)?;
assert_eq!(
database.list_by_project_id_session_id(&project_id, &session_id, &None)?,
vec![(file_path.display().to_string(), vec![delta1])]
.into_iter()
.collect()
);
Ok(())
}
#[test]
fn insert_update() -> Result<()> {
let db = tests::test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session_id = SessionId::generate();
let file_path = path::PathBuf::from("file_path");
let delta1 = delta::Delta {
timestamp_ms: 0,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
};
let delta2 = delta::Delta {
timestamp_ms: 0,
operations: vec![operations::Operation::Insert((
0,
"updated_text".to_string(),
))],
};
database.insert(&project_id, &session_id, &file_path, &vec![delta1])?;
database.insert(&project_id, &session_id, &file_path, &vec![delta2.clone()])?;
assert_eq!(
database.list_by_project_id_session_id(&project_id, &session_id, &None)?,
vec![(file_path.display().to_string(), vec![delta2])]
.into_iter()
.collect()
);
Ok(())
}
#[test]
fn aggregate_deltas_by_file() -> Result<()> {
let db = tests::test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session_id = SessionId::generate();
let file_path1 = path::PathBuf::from("file_path1");
let file_path2 = path::PathBuf::from("file_path2");
let delta1 = delta::Delta {
timestamp_ms: 1,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
};
let delta2 = delta::Delta {
timestamp_ms: 2,
operations: vec![operations::Operation::Insert((
0,
"updated_text".to_string(),
))],
};
database.insert(&project_id, &session_id, &file_path1, &vec![delta1.clone()])?;
database.insert(&project_id, &session_id, &file_path2, &vec![delta1.clone()])?;
database.insert(&project_id, &session_id, &file_path2, &vec![delta2.clone()])?;
assert_eq!(
database.list_by_project_id_session_id(&project_id, &session_id, &None)?,
vec![
(file_path1.display().to_string(), vec![delta1.clone()]),
(file_path2.display().to_string(), vec![delta1, delta2])
]
.into_iter()
.collect()
);
Ok(())
}
}

View File

@ -71,46 +71,3 @@ impl<'writer> DeltasWriter<'writer> {
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::vec;
use crate::{
deltas, sessions,
tests::{Case, Suite},
};
use super::*;
use deltas::operations::Operation;
#[test]
fn write_no_vbranches() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let deltas_writer = DeltasWriter::new(&gb_repository)?;
let session = gb_repository.get_or_create_current_session()?;
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let deltas_reader = deltas::Reader::new(&session_reader);
let path = "test.txt";
let deltas = vec![
deltas::Delta {
operations: vec![Operation::Insert((0, "hello".to_string()))],
timestamp_ms: 0,
},
deltas::Delta {
operations: vec![Operation::Insert((5, " world".to_string()))],
timestamp_ms: 0,
},
];
deltas_writer.write(path, &deltas).unwrap();
assert_eq!(deltas_reader.read_file(path).unwrap(), Some(deltas));
assert_eq!(deltas_reader.read_file("not found").unwrap(), None);
Ok(())
}
}

View File

@ -1,6 +1,3 @@
mod repository;
#[cfg(test)]
mod repository_tests;
pub use repository::{RemoteError, Repository};

View File

@ -417,7 +417,6 @@ impl Repository {
}
}
#[cfg(test)]
pub fn flush(
&self,
project_repository: &project_repository::Repository,
@ -513,15 +512,15 @@ impl Repository {
}
}
pub(crate) fn root(&self) -> std::path::PathBuf {
pub fn root(&self) -> std::path::PathBuf {
self.git_repository.path().join("gitbutler")
}
pub(crate) fn session_path(&self) -> std::path::PathBuf {
pub fn session_path(&self) -> std::path::PathBuf {
self.root().join("session")
}
pub(crate) fn session_wd_path(&self) -> std::path::PathBuf {
pub fn session_wd_path(&self) -> std::path::PathBuf {
self.session_path().join("wd")
}
@ -963,35 +962,37 @@ pub enum RemoteError {
Other(#[from] anyhow::Error),
}
#[cfg(test)]
mod test {
use std::path::PathBuf;
use anyhow::Result;
use pretty_assertions::assert_eq;
use crate::tests::{Case, Suite};
#[test]
fn test_alternates_file_being_set() -> Result<()> {
let Case {
gb_repository,
project_repository,
..
} = Suite::default().new_case();
let file_content = std::fs::read_to_string(
gb_repository
.git_repository
.path()
.join("objects/info/alternates"),
)?;
let file_content = PathBuf::from(file_content.trim());
let project_path = project_repository.path().to_path_buf().join(".git/objects");
assert_eq!(file_content, project_path);
Ok(())
}
}
// TODO: this is a unit-test - could use code from `tests::common` via custom module path
// to make it work.
// #[cfg(test)]
// mod test {
// use std::path::PathBuf;
//
// use anyhow::Result;
// use pretty_assertions::assert_eq;
//
// use crate::tests::{Case, Suite};
//
// #[test]
// fn test_alternates_file_being_set() -> Result<()> {
// let Case {
// gb_repository,
// project_repository,
// ..
// } = Suite::default().new_case();
//
// let file_content = std::fs::read_to_string(
// gb_repository
// .git_repository
// .path()
// .join("objects/info/alternates"),
// )?;
//
// let file_content = PathBuf::from(file_content.trim());
// let project_path = project_repository.path().to_path_buf().join(".git/objects");
//
// assert_eq!(file_content, project_path);
//
// Ok(())
// }
// }

View File

@ -66,41 +66,3 @@ impl Config {
}
}
}
#[cfg(test)]
mod tests {
use crate::tests;
#[test]
pub fn test_set_str() {
let repo = tests::test_repository();
let mut config = repo.config().unwrap();
config.set_str("test.key", "test.value").unwrap();
assert_eq!(
config.get_string("test.key").unwrap().unwrap(),
"test.value"
);
}
#[test]
pub fn test_set_bool() {
let repo = tests::test_repository();
let mut config = repo.config().unwrap();
config.set_bool("test.key", true).unwrap();
assert!(config.get_bool("test.key").unwrap().unwrap());
}
#[test]
pub fn test_get_string_none() {
let repo = tests::test_repository();
let config = repo.config().unwrap();
assert_eq!(config.get_string("test.key").unwrap(), None);
}
#[test]
pub fn test_get_bool_none() {
let repo = tests::test_repository();
let config = repo.config().unwrap();
assert_eq!(config.get_bool("test.key").unwrap(), None);
}
}

View File

@ -122,7 +122,6 @@ impl Helper {
}
}
#[cfg(test)]
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Self {
let keys = keys::Controller::from_path(&path);
let users = users::Controller::from_path(path);
@ -391,317 +390,3 @@ impl Helper {
Ok(flow)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::{self, test_repository};
#[derive(Default)]
struct TestCase<'a> {
remote_url: &'a str,
github_access_token: Option<&'a str>,
preferred_key: projects::AuthKey,
home_dir: Option<PathBuf>,
}
impl TestCase<'_> {
fn run(&self) -> Vec<(String, Vec<Credential>)> {
let local_app_data = tests::temp_dir();
let users = users::Controller::from_path(&local_app_data);
let user = users::User {
github_access_token: self.github_access_token.map(ToString::to_string),
..Default::default()
};
users.set_user(&user).unwrap();
let keys = keys::Controller::from_path(&local_app_data);
let helper = Helper::new(keys, users, self.home_dir.clone());
let repo = test_repository();
repo.remote(
"origin",
&self.remote_url.parse().expect("failed to parse remote url"),
)
.unwrap();
let project = projects::Project {
path: repo.workdir().unwrap().to_path_buf(),
preferred_key: self.preferred_key.clone(),
..Default::default()
};
let project_repository = project_repository::Repository::open(&project).unwrap();
let flow = helper.help(&project_repository, "origin").unwrap();
flow.into_iter()
.map(|(remote, credentials)| {
(
remote.url().unwrap().as_ref().unwrap().to_string(),
credentials,
)
})
.collect::<Vec<_>>()
}
}
mod not_github {
use super::*;
mod with_preferred_key {
use super::*;
#[test]
fn https() {
let test_case = TestCase {
remote_url: "https://gitlab.com/test-gitbutler/test.git",
github_access_token: Some("token"),
preferred_key: projects::AuthKey::Local {
private_key_path: PathBuf::from("/tmp/id_rsa"),
},
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@gitlab.com:test-gitbutler/test.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Ssh(SshCredential::Keyfile {
key_path: PathBuf::from("/tmp/id_rsa"),
passphrase: None,
})]
);
}
#[test]
fn ssh() {
let test_case = TestCase {
remote_url: "git@gitlab.com:test-gitbutler/test.git",
github_access_token: Some("token"),
preferred_key: projects::AuthKey::Local {
private_key_path: PathBuf::from("/tmp/id_rsa"),
},
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@gitlab.com:test-gitbutler/test.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Ssh(SshCredential::Keyfile {
key_path: PathBuf::from("/tmp/id_rsa"),
passphrase: None,
})]
);
}
}
mod with_github_token {
use super::*;
#[test]
fn https() {
let test_case = TestCase {
remote_url: "https://gitlab.com/test-gitbutler/test.git",
github_access_token: Some("token"),
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@gitlab.com:test-gitbutler/test.git".to_string(),
);
assert_eq!(flow[0].1.len(), 1);
assert!(matches!(
flow[0].1[0],
Credential::Ssh(SshCredential::GitButlerKey(_))
));
}
#[test]
fn ssh() {
let test_case = TestCase {
remote_url: "git@gitlab.com:test-gitbutler/test.git",
github_access_token: Some("token"),
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@gitlab.com:test-gitbutler/test.git".to_string(),
);
assert_eq!(flow[0].1.len(), 1);
assert!(matches!(
flow[0].1[0],
Credential::Ssh(SshCredential::GitButlerKey(_))
));
}
}
}
mod github {
use super::*;
mod with_github_token {
use super::*;
#[test]
fn https() {
let test_case = TestCase {
remote_url: "https://github.com/gitbutlerapp/gitbutler.git",
github_access_token: Some("token"),
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"https://github.com/gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Https(HttpsCredential::GitHubToken(
"token".to_string()
))]
);
}
#[test]
fn ssh() {
let test_case = TestCase {
remote_url: "git@github.com:gitbutlerapp/gitbutler.git",
github_access_token: Some("token"),
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"https://github.com/gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Https(HttpsCredential::GitHubToken(
"token".to_string()
))]
);
}
}
mod without_github_token {
use super::*;
mod without_preferred_key {
use super::*;
#[test]
fn https() {
let test_case = TestCase {
remote_url: "https://github.com/gitbutlerapp/gitbutler.git",
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@github.com:gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(flow[0].1.len(), 1);
assert!(matches!(
flow[0].1[0],
Credential::Ssh(SshCredential::GitButlerKey(_))
));
}
#[test]
fn ssh() {
let test_case = TestCase {
remote_url: "git@github.com:gitbutlerapp/gitbutler.git",
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@github.com:gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(flow[0].1.len(), 1);
assert!(matches!(
flow[0].1[0],
Credential::Ssh(SshCredential::GitButlerKey(_))
));
}
}
mod with_preferred_key {
use super::*;
#[test]
fn https() {
let test_case = TestCase {
remote_url: "https://github.com/gitbutlerapp/gitbutler.git",
github_access_token: Some("token"),
preferred_key: projects::AuthKey::Local {
private_key_path: PathBuf::from("/tmp/id_rsa"),
},
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@github.com:gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Ssh(SshCredential::Keyfile {
key_path: PathBuf::from("/tmp/id_rsa"),
passphrase: None,
})]
);
}
#[test]
fn ssh() {
let test_case = TestCase {
remote_url: "git@github.com:gitbutlerapp/gitbutler.git",
github_access_token: Some("token"),
preferred_key: projects::AuthKey::Local {
private_key_path: PathBuf::from("/tmp/id_rsa"),
},
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@github.com:gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Ssh(SshCredential::Keyfile {
key_path: PathBuf::from("/tmp/id_rsa"),
passphrase: None,
})]
);
}
}
}
}
}

View File

@ -419,166 +419,3 @@ pub fn diff_files_to_hunks(
}
file_hunks
}
#[cfg(test)]
mod tests {
use crate::tests;
use super::*;
#[test]
fn diff_simple_text() {
let repository = tests::test_repository();
std::fs::write(repository.workdir().unwrap().join("file"), "hello").unwrap();
let head_commit_id = repository.head().unwrap().peel_to_commit().unwrap().id();
let diff = workdir(&repository, &head_commit_id, 0).unwrap();
assert_eq!(diff.len(), 1);
assert_eq!(
diff[&path::PathBuf::from("file")].clone().hunks.unwrap(),
vec![GitHunk {
old_start: 0,
old_lines: 0,
new_start: 1,
new_lines: 1,
diff: "@@ -0,0 +1 @@\n+hello\n\\ No newline at end of file\n".to_string(),
binary: false,
change_type: ChangeType::Added,
}]
);
}
#[test]
fn diff_empty_file() {
let repository = tests::test_repository();
std::fs::write(repository.workdir().unwrap().join("first"), "").unwrap();
let head_commit_id = repository.head().unwrap().peel_to_commit().unwrap().id();
let diff = workdir(&repository, &head_commit_id, 0).unwrap();
assert_eq!(diff.len(), 1);
assert_eq!(
diff[&path::PathBuf::from("first")].clone().hunks.unwrap(),
vec![GitHunk {
old_start: 0,
old_lines: 0,
new_start: 0,
new_lines: 0,
diff: String::new(),
binary: false,
change_type: ChangeType::Modified,
}]
);
}
#[test]
fn diff_multiple_empty_files() {
let repository = tests::test_repository();
std::fs::write(repository.workdir().unwrap().join("first"), "").unwrap();
std::fs::write(repository.workdir().unwrap().join("second"), "").unwrap();
let head_commit_id = repository.head().unwrap().peel_to_commit().unwrap().id();
let diff = workdir(&repository, &head_commit_id, 0).unwrap();
assert_eq!(diff.len(), 2);
assert_eq!(
diff[&path::PathBuf::from("first")].clone().hunks.unwrap(),
vec![GitHunk {
old_start: 0,
old_lines: 0,
new_start: 0,
new_lines: 0,
diff: String::new(),
binary: false,
change_type: ChangeType::Modified,
}]
);
assert_eq!(
diff[&path::PathBuf::from("second")].clone().hunks.unwrap(),
vec![GitHunk {
old_start: 0,
old_lines: 0,
new_start: 0,
new_lines: 0,
diff: String::new(),
binary: false,
change_type: ChangeType::Modified,
}]
);
}
#[test]
fn diff_binary() {
let repository = tests::test_repository();
std::fs::write(
repository.workdir().unwrap().join("image"),
[
255, 0, 0, // Red pixel
0, 0, 255, // Blue pixel
255, 255, 0, // Yellow pixel
0, 255, 0, // Green pixel
],
)
.unwrap();
let head_commit_id = repository.head().unwrap().peel_to_commit().unwrap().id();
let diff = workdir(&repository, &head_commit_id, 0).unwrap();
assert_eq!(
diff[&path::PathBuf::from("image")].clone().hunks.unwrap(),
vec![GitHunk {
old_start: 0,
old_lines: 0,
new_start: 0,
new_lines: 0,
diff: "71ae6e216f38164b6633e25d35abb043c3785af6".to_string(),
binary: true,
change_type: ChangeType::Added,
}]
);
}
#[test]
fn diff_some_lines_are_binary() {
let repository = tests::test_repository();
std::fs::write(
repository.workdir().unwrap().join("file"),
[
// butler/test/fixtures/git/1/8e/18ec9df5-65c5-4828-97ba-d91ec4903a74/objects/1f/9d7d5dd0d3d3ced66cee36bf1dd42bd33d0aa8
120, 1, 101, 144, 79, 75, 195, 64, 16, 197, 61, 239, 167, 120, 160, 224, 165, 77, 3,
5, 17, 111, 42, 42, 245, 162, 135, 22, 60, 118, 155, 76, 179, 75, 55, 59, 97, 103,
182, 177, 223, 222, 77, 244, 38, 204, 97, 254, 188, 247, 155, 97, 14, 129, 15, 88,
223, 213, 87, 215, 120, 243, 250, 148, 53, 80, 194, 110, 131, 103, 142, 13, 13, 42,
198, 60, 10, 54, 183, 61, 34, 163, 99, 110, 97, 21, 175, 190, 235, 237, 98, 238,
102, 241, 177, 195, 214, 250, 48, 250, 216, 66, 25, 71, 223, 229, 68, 224, 172, 24,
93, 17, 111, 48, 218, 168, 80, 71, 5, 187, 218, 125, 77, 154, 192, 124, 66, 240,
39, 170, 176, 117, 94, 80, 98, 154, 147, 21, 79, 82, 124, 246, 50, 169, 90, 134,
215, 9, 36, 190, 45, 192, 35, 62, 131, 189, 116, 137, 115, 108, 23, 56, 20, 190,
78, 94, 103, 5, 103, 74, 226, 57, 162, 225, 168, 137, 67, 101, 204, 123, 46, 156,
148, 227, 172, 121, 48, 102, 191, 223, 155, 27, 196, 225, 27, 250, 119, 107, 35,
130, 165, 71, 181, 242, 113, 200, 90, 205, 37, 151, 82, 199, 223, 124, 57, 90, 109,
92, 49, 13, 23, 117, 28, 215, 88, 246, 112, 170, 67, 37, 148, 202, 62, 220, 215,
117, 61, 99, 205, 71, 90, 64, 184, 167, 114, 78, 249, 5, 5, 161, 202, 188, 156, 41,
162, 79, 76, 255, 38, 63, 226, 30, 123, 106,
],
)
.unwrap();
let head_commit_id = repository.head().unwrap().peel_to_commit().unwrap().id();
let diff = workdir(&repository, &head_commit_id, 0).unwrap();
assert_eq!(
diff[&path::PathBuf::from("file")].clone().hunks.unwrap(),
vec![GitHunk {
old_start: 0,
old_lines: 0,
new_start: 0,
new_lines: 0,
diff: "3fc41b9ae6836a94f41c78b4ce69d78b6e7080f1".to_string(),
binary: true,
change_type: ChangeType::Added,
}]
);
}
}

View File

@ -26,24 +26,18 @@ impl From<git2::Repository> for Repository {
}
impl Repository {
#[cfg(test)]
pub fn init_bare<P: AsRef<Path>>(path: P) -> Result<Self> {
let inner = git2::Repository::init_opts(path, &crate::tests::init_opts_bare())?;
Ok(Repository(inner))
}
pub fn init<P: AsRef<Path>>(path: P) -> Result<Self> {
let inner = git2::Repository::init(path)?;
Ok(Repository(inner))
}
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
let inner = git2::Repository::open(path)?;
pub fn init_opts<P: AsRef<Path>>(path: P, opts: &git2::RepositoryInitOptions) -> Result<Self> {
let inner = git2::Repository::init_opts(path, opts)?;
Ok(Repository(inner))
}
pub fn init_opts<P: AsRef<Path>>(path: P, opts: &git2::RepositoryInitOptions) -> Result<Self> {
let inner = git2::Repository::init_opts(path, opts)?;
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
let inner = git2::Repository::open(path)?;
Ok(Repository(inner))
}

View File

@ -12,7 +12,6 @@ impl Controller {
Self { storage }
}
#[cfg(test)]
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Self {
Self::new(Storage::from_path(path))
}
@ -33,32 +32,3 @@ pub enum GetOrCreateError {
#[error(transparent)]
Other(#[from] anyhow::Error),
}
#[cfg(not(target_os = "windows"))]
#[cfg(test)]
mod tests {
use std::fs;
#[cfg(target_family = "unix")]
use std::os::unix::prelude::*;
use crate::tests::Suite;
use super::*;
#[test]
fn test_get_or_create() {
let suite = Suite::default();
let controller = Controller::new(Storage::from_path(&suite.local_app_data));
let once = controller.get_or_create().unwrap();
let twice = controller.get_or_create().unwrap();
assert_eq!(once, twice);
// check permissions of the private key
let permissions = fs::metadata(suite.local_app_data.join("keys/ed25519"))
.unwrap()
.permissions();
let perms = format!("{:o}", permissions.mode());
assert_eq!(perms, "100600");
}
}

View File

@ -20,7 +20,6 @@ impl Storage {
Storage { storage }
}
#[cfg(test)]
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Storage {
Storage::new(storage::Storage::new(path))
}

50
gitbutler-app/src/lib.rs Normal file
View File

@ -0,0 +1,50 @@
#![feature(error_generic_member_access)]
#![cfg_attr(windows, feature(windows_by_handle))]
#![cfg_attr(
all(windows, not(test), not(debug_assertions)),
windows_subsystem = "windows"
)]
// FIXME(qix-): Stuff we want to fix but don't have a lot of time for.
// FIXME(qix-): PRs welcome!
#![allow(
clippy::used_underscore_binding,
clippy::module_name_repetitions,
clippy::struct_field_names,
clippy::too_many_lines
)]
pub mod analytics;
pub mod app;
pub mod askpass;
pub mod assets;
pub mod commands;
pub mod database;
pub mod dedup;
pub mod deltas;
pub mod error;
pub mod events;
pub mod fs;
pub mod gb_repository;
pub mod git;
pub mod github;
pub mod id;
pub mod keys;
pub mod lock;
pub mod logs;
pub mod menu;
pub mod path;
pub mod project_repository;
pub mod projects;
pub mod reader;
pub mod sentry;
pub mod sessions;
pub mod ssh;
pub mod storage;
pub mod types;
pub mod users;
pub mod virtual_branches;
pub mod watcher;
#[cfg(target_os = "windows")]
pub mod windows;
pub mod writer;
pub mod zip;

View File

@ -49,98 +49,3 @@ impl Inner {
Ok(result)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::temp_dir;
#[tokio::test]
async fn test_lock_same_instance() {
let dir_path = temp_dir();
std::fs::write(dir_path.join("file.txt"), "").unwrap();
let dir = Dir::new(&dir_path).unwrap();
let (tx, rx) = std::sync::mpsc::sync_channel(1);
// spawn a task that will signal right after aquireing the lock
let _ = tokio::spawn({
let dir = dir.clone();
async move {
dir.batch(|root| {
tx.send(()).unwrap();
assert_eq!(
std::fs::read_to_string(root.join("file.txt")).unwrap(),
String::new()
);
std::fs::write(root.join("file.txt"), "1")
})
}
})
.await
.unwrap();
// then we wait until the lock is aquired
rx.recv().unwrap();
// and immidiately try to lock again
dir.batch(|root| {
assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1");
std::fs::write(root.join("file.txt"), "2")
})
.unwrap()
.unwrap();
assert_eq!(
std::fs::read_to_string(dir_path.join("file.txt")).unwrap(),
"2"
);
}
#[tokio::test]
async fn test_lock_different_instances() {
let dir_path = temp_dir();
std::fs::write(dir_path.join("file.txt"), "").unwrap();
let (tx, rx) = std::sync::mpsc::sync_channel(1);
// spawn a task that will signal right after aquireing the lock
let _ = tokio::spawn({
let dir_path = dir_path.clone();
async move {
// one dir instance is created on a separate thread
let dir = Dir::new(&dir_path).unwrap();
dir.batch(|root| {
tx.send(()).unwrap();
assert_eq!(
std::fs::read_to_string(root.join("file.txt")).unwrap(),
String::new()
);
std::fs::write(root.join("file.txt"), "1")
})
}
})
.await
.unwrap();
// another dir instance is created on the main thread
let dir = Dir::new(&dir_path).unwrap();
// then we wait until the lock is aquired
rx.recv().unwrap();
// and immidiately try to lock again
dir.batch(|root| {
assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1");
std::fs::write(root.join("file.txt"), "2")
})
.unwrap()
.unwrap();
assert_eq!(
std::fs::read_to_string(dir_path.join("file.txt")).unwrap(),
"2"
);
}
}

View File

@ -13,44 +13,28 @@
clippy::too_many_lines
)]
pub(crate) mod analytics;
pub(crate) mod app;
pub(crate) mod askpass;
pub(crate) mod assets;
pub(crate) mod commands;
pub(crate) mod database;
pub(crate) mod dedup;
pub(crate) mod deltas;
pub(crate) mod error;
pub(crate) mod events;
pub(crate) mod fs;
pub(crate) mod gb_repository;
pub(crate) mod git;
pub(crate) mod github;
pub(crate) mod id;
pub(crate) mod keys;
pub(crate) mod lock;
pub(crate) mod logs;
pub(crate) mod menu;
pub(crate) mod path;
pub(crate) mod project_repository;
pub(crate) mod projects;
pub(crate) mod reader;
pub(crate) mod sentry;
pub(crate) mod sessions;
pub(crate) mod ssh;
pub(crate) mod storage;
pub(crate) mod types;
pub(crate) mod users;
pub(crate) mod virtual_branches;
pub(crate) mod watcher;
use gitbutler_app::analytics;
use gitbutler_app::app;
use gitbutler_app::askpass;
use gitbutler_app::assets;
use gitbutler_app::commands;
use gitbutler_app::database;
use gitbutler_app::deltas;
use gitbutler_app::git;
use gitbutler_app::github;
use gitbutler_app::keys;
use gitbutler_app::logs;
use gitbutler_app::menu;
use gitbutler_app::projects;
use gitbutler_app::sentry;
use gitbutler_app::sessions;
use gitbutler_app::storage;
use gitbutler_app::users;
use gitbutler_app::virtual_branches;
use gitbutler_app::watcher;
#[cfg(target_os = "windows")]
pub(crate) mod windows;
pub(crate) mod writer;
pub(crate) mod zip;
#[cfg(test)]
pub(crate) mod tests;
use gitbutler_app::windows;
use gitbutler_app::zip;
use std::path::PathBuf;

View File

@ -312,7 +312,6 @@ impl Repository {
}
Ok(oids)
}
#[cfg(test)]
LogUntil::End => {
let mut revwalk = self
.git_repository
@ -654,7 +653,6 @@ pub enum LogUntil {
Commit(git::Oid),
Take(usize),
When(Box<OidFilter>),
#[cfg(test)]
End,
}

View File

@ -7,5 +7,4 @@ pub use controller::*;
pub use project::{AuthKey, CodePushState, FetchResult, Project, ProjectId};
pub use storage::UpdateRequest;
#[cfg(test)]
pub use project::ApiProject;

View File

@ -26,7 +26,6 @@ impl Controller {
}
}
#[cfg(test)]
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Self {
let pathbuf = path.as_ref().to_path_buf();
Self {

View File

@ -42,7 +42,6 @@ impl Storage {
Storage { storage }
}
#[cfg(test)]
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Storage {
Storage::new(storage::Storage::new(path))
}

View File

@ -142,7 +142,7 @@ pub struct CommitReader<'reader> {
}
impl<'reader> CommitReader<'reader> {
fn new(
pub fn new(
repository: &'reader git::Repository,
commit: &git::Commit<'reader>,
) -> Result<CommitReader<'reader>> {
@ -177,7 +177,7 @@ impl<'reader> CommitReader<'reader> {
Ok(Content::from(&blob))
}
fn list_files<P: AsRef<Path>>(&self, dir_path: P) -> Result<Vec<PathBuf>> {
pub fn list_files<P: AsRef<Path>>(&self, dir_path: P) -> Result<Vec<PathBuf>> {
let dir_path = dir_path.as_ref();
let mut files = vec![];
self.tree
@ -204,7 +204,7 @@ impl<'reader> CommitReader<'reader> {
Ok(files)
}
fn exists<P: AsRef<Path>>(&self, file_path: P) -> bool {
pub fn exists<P: AsRef<Path>>(&self, file_path: P) -> bool {
self.tree.get_path(file_path.normalize()).is_ok()
}
}
@ -441,189 +441,3 @@ impl TryFrom<&Content> for bool {
text.parse().map_err(FromError::ParseBool)
}
}
#[cfg(test)]
mod tests {
use super::*;
use anyhow::Result;
use crate::tests;
#[test]
fn test_directory_reader_read_file() -> Result<()> {
let dir = tests::temp_dir();
let file_path = Path::new("test.txt");
fs::write(dir.join(file_path), "test")?;
let reader = Reader::open(dir.clone())?;
assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string()));
Ok(())
}
#[test]
fn test_commit_reader_read_file() -> Result<()> {
let repository = tests::test_repository();
let file_path = Path::new("test.txt");
fs::write(repository.path().parent().unwrap().join(file_path), "test")?;
let oid = tests::commit_all(&repository);
fs::write(repository.path().parent().unwrap().join(file_path), "test2")?;
let reader = Reader::from_commit(&repository, &repository.find_commit(oid)?)?;
assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string()));
Ok(())
}
#[test]
fn test_reader_list_files_should_return_relative() -> Result<()> {
let dir = tests::temp_dir();
fs::write(dir.join("test1.txt"), "test")?;
fs::create_dir_all(dir.join("dir"))?;
fs::write(dir.join("dir").join("test.txt"), "test")?;
let reader = Reader::open(dir.clone())?;
let files = reader.list_files(Path::new("dir"))?;
assert_eq!(files.len(), 1);
assert!(files.contains(&Path::new("test.txt").to_path_buf()));
Ok(())
}
#[test]
fn test_reader_list_files() -> Result<()> {
let dir = tests::temp_dir();
fs::write(dir.join("test.txt"), "test")?;
fs::create_dir_all(dir.join("dir"))?;
fs::write(dir.join("dir").join("test.txt"), "test")?;
let reader = Reader::open(dir.clone())?;
let files = reader.list_files(Path::new(""))?;
assert_eq!(files.len(), 2);
assert!(files.contains(&Path::new("test.txt").to_path_buf()));
assert!(files.contains(&Path::new("dir/test.txt").to_path_buf()));
Ok(())
}
#[test]
fn test_commit_reader_list_files_should_return_relative() -> Result<()> {
let repository = tests::test_repository();
fs::write(
repository.path().parent().unwrap().join("test1.txt"),
"test",
)?;
fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?;
fs::write(
repository
.path()
.parent()
.unwrap()
.join("dir")
.join("test.txt"),
"test",
)?;
let oid = tests::commit_all(&repository);
fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?;
let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?;
let files = reader.list_files(Path::new("dir"))?;
assert_eq!(files.len(), 1);
assert!(files.contains(&Path::new("test.txt").to_path_buf()));
Ok(())
}
#[test]
fn test_commit_reader_list_files() -> Result<()> {
let repository = tests::test_repository();
fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?;
fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?;
fs::write(
repository
.path()
.parent()
.unwrap()
.join("dir")
.join("test.txt"),
"test",
)?;
let oid = tests::commit_all(&repository);
fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?;
let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?;
let files = reader.list_files(Path::new(""))?;
assert_eq!(files.len(), 2);
assert!(files.contains(&Path::new("test.txt").to_path_buf()));
assert!(files.contains(&Path::new("dir/test.txt").to_path_buf()));
Ok(())
}
#[test]
fn test_directory_reader_exists() -> Result<()> {
let dir = tests::temp_dir();
fs::write(dir.join("test.txt"), "test")?;
let reader = Reader::open(dir.clone())?;
assert!(reader.exists(Path::new("test.txt"))?);
assert!(!reader.exists(Path::new("test2.txt"))?);
Ok(())
}
#[test]
fn test_commit_reader_exists() -> Result<()> {
let repository = tests::test_repository();
fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?;
let oid = tests::commit_all(&repository);
fs::remove_file(repository.path().parent().unwrap().join("test.txt"))?;
let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?;
assert!(reader.exists(Path::new("test.txt")));
assert!(!reader.exists(Path::new("test2.txt")));
Ok(())
}
#[test]
fn test_from_bytes() {
for (bytes, expected) in [
("test".as_bytes(), Content::UTF8("test".to_string())),
(&[0, 159, 146, 150, 159, 146, 150], Content::Binary),
] {
assert_eq!(Content::from(bytes), expected);
}
}
#[test]
fn test_serialize_content() {
for (content, expected) in [
(
Content::UTF8("test".to_string()),
r#"{"type":"utf8","value":"test"}"#,
),
(Content::Binary, r#"{"type":"binary"}"#),
(Content::Large, r#"{"type":"large"}"#),
] {
assert_eq!(serde_json::to_string(&content).unwrap(), expected);
}
}
}

View File

@ -1,15 +1,12 @@
mod controller;
mod iterator;
mod reader;
mod session;
pub mod session;
mod writer;
pub mod commands;
pub mod database;
#[cfg(test)]
mod tests;
pub use controller::Controller;
pub use database::Database;
pub use iterator::SessionsIterator;

View File

@ -180,91 +180,3 @@ fn insert_stmt<'conn>(
",
)?)
}
#[cfg(test)]
mod tests {
use crate::tests;
use super::*;
#[test]
fn test_insert_query() -> Result<()> {
let db = tests::test_database();
println!("0");
let database = Database::new(db);
println!("1");
let project_id = ProjectId::generate();
let session1 = session::Session {
id: SessionId::generate(),
hash: None,
meta: session::Meta {
branch: None,
commit: None,
start_timestamp_ms: 1,
last_timestamp_ms: 2,
},
};
let session2 = session::Session {
id: SessionId::generate(),
hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()),
meta: session::Meta {
branch: Some("branch2".to_string()),
commit: Some("commit2".to_string()),
start_timestamp_ms: 3,
last_timestamp_ms: 4,
},
};
let sessions = vec![&session1, &session2];
database.insert(&project_id, &sessions)?;
assert_eq!(
database.list_by_project_id(&project_id, None)?,
vec![session2.clone(), session1.clone()]
);
assert_eq!(database.get_by_id(&session1.id)?.unwrap(), session1);
assert_eq!(database.get_by_id(&session2.id)?.unwrap(), session2);
assert_eq!(database.get_by_id(&SessionId::generate())?, None);
Ok(())
}
#[test]
fn test_update() -> Result<()> {
let db = tests::test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session = session::Session {
id: SessionId::generate(),
hash: None,
meta: session::Meta {
branch: None,
commit: None,
start_timestamp_ms: 1,
last_timestamp_ms: 2,
},
};
let session_updated = session::Session {
id: session.id,
hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()),
meta: session::Meta {
branch: Some("branch2".to_string()),
commit: Some("commit2".to_string()),
start_timestamp_ms: 3,
last_timestamp_ms: 4,
},
};
database.insert(&project_id, &[&session])?;
database.insert(&project_id, &[&session_updated])?;
assert_eq!(
database.list_by_project_id(&project_id, None)?,
vec![session_updated.clone()]
);
assert_eq!(database.get_by_id(&session.id)?.unwrap(), session_updated);
Ok(())
}
}

File diff suppressed because it is too large Load Diff

View File

@ -12,7 +12,6 @@ impl Controller {
Controller { storage }
}
#[cfg(test)]
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Controller {
Controller::new(Storage::from_path(path))
}

View File

@ -22,7 +22,6 @@ impl Storage {
Storage { storage }
}
#[cfg(test)]
pub fn from_path<P: AsRef<std::path::Path>>(path: P) -> Storage {
Storage::new(storage::Storage::new(path))
}

View File

@ -8,7 +8,7 @@ pub mod errors;
mod files;
pub use files::*;
mod integration;
pub mod integration;
pub use integration::GITBUTLER_INTEGRATION_REFERENCE;
mod base;
@ -22,11 +22,6 @@ pub mod commands;
mod iterator;
pub use iterator::BranchIterator as Iterator;
#[cfg(test)]
mod tests;
#[cfg(test)]
pub use tests::set_test_target;
mod r#virtual;
pub use r#virtual::*;

View File

@ -17,107 +17,3 @@ impl<'r> BranchReader<'r> {
Branch::from_reader(&self.reader.sub(format!("branches/{}", id)))
}
}
#[cfg(test)]
mod tests {
use std::sync::atomic::{AtomicUsize, Ordering};
use anyhow::Result;
use once_cell::sync::Lazy;
use crate::{
sessions,
tests::{Case, Suite},
virtual_branches::branch::BranchOwnershipClaims,
};
use super::{super::Writer, *};
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
Branch {
id: BranchId::generate(),
name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)),
notes: String::new(),
applied: true,
order: TEST_INDEX.load(Ordering::Relaxed),
upstream: Some(
format!(
"refs/remotes/origin/upstream_{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
upstream_head: Some(
format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128,
updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128,
head: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
tree: format!(
"0123456789abcdef0123456789abcdef012345{}",
TEST_INDEX.load(Ordering::Relaxed) + 10
)
.parse()
.unwrap(),
ownership: BranchOwnershipClaims {
claims: vec![format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed))
.parse()
.unwrap()],
},
selected_for_changes: Some(1),
}
}
#[test]
fn test_read_not_found() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = gb_repository.get_or_create_current_session()?;
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let reader = BranchReader::new(&session_reader);
let result = reader.read(&BranchId::generate());
assert!(result.is_err());
assert_eq!(result.unwrap_err().to_string(), "file not found");
Ok(())
}
#[test]
fn test_read_override() -> Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let writer = Writer::new(&gb_repository, project.gb_dir())?;
writer.write(&mut branch)?;
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let reader = BranchReader::new(&session_reader);
assert_eq!(branch, reader.read(&branch.id).unwrap());
Ok(())
}
}

View File

@ -158,224 +158,3 @@ impl<'writer> BranchWriter<'writer> {
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::{
fs,
sync::atomic::{AtomicUsize, Ordering},
};
use anyhow::Context;
use once_cell::sync::Lazy;
use crate::{
tests::{Case, Suite},
virtual_branches::branch,
};
use self::branch::BranchId;
use super::*;
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
Branch {
id: BranchId::generate(),
name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)),
notes: String::new(),
applied: true,
upstream: Some(
format!(
"refs/remotes/origin/upstream_{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
upstream_head: None,
created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128,
updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128,
head: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
tree: format!(
"0123456789abcdef0123456789abcdef012345{}",
TEST_INDEX.load(Ordering::Relaxed) + 10
)
.parse()
.unwrap(),
ownership: branch::BranchOwnershipClaims {
claims: vec![branch::OwnershipClaim {
file_path: format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed)).into(),
hunks: vec![],
}],
},
order: TEST_INDEX.load(Ordering::Relaxed),
selected_for_changes: Some(1),
}
}
#[test]
fn test_write_branch() -> Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let writer = BranchWriter::new(&gb_repository, project.gb_dir())?;
writer.write(&mut branch)?;
let root = gb_repository
.root()
.join("branches")
.join(branch.id.to_string());
assert_eq!(
fs::read_to_string(root.join("meta").join("name").to_str().unwrap())
.context("Failed to read branch name")?,
branch.name
);
assert_eq!(
fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())?
.parse::<bool>()
.context("Failed to read branch applied")?,
branch.applied
);
assert_eq!(
fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap())
.context("Failed to read branch upstream")?,
branch.upstream.clone().unwrap().to_string()
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("created_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch created timestamp")?
.parse::<u128>()
.context("Failed to parse branch created timestamp")?,
branch.created_timestamp_ms
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("updated_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch updated timestamp")?
.parse::<u128>()
.context("Failed to parse branch updated timestamp")?,
branch.updated_timestamp_ms
);
writer.delete(&branch)?;
fs::read_dir(root).unwrap_err();
Ok(())
}
#[test]
fn test_should_create_session() -> Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let writer = BranchWriter::new(&gb_repository, project.gb_dir())?;
writer.write(&mut branch)?;
assert!(gb_repository.get_current_session()?.is_some());
Ok(())
}
#[test]
fn test_should_update() -> Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let writer = BranchWriter::new(&gb_repository, project.gb_dir())?;
writer.write(&mut branch)?;
let mut updated_branch = Branch {
name: "updated_name".to_string(),
applied: false,
upstream: Some("refs/remotes/origin/upstream_updated".parse().unwrap()),
created_timestamp_ms: 2,
updated_timestamp_ms: 3,
ownership: branch::BranchOwnershipClaims { claims: vec![] },
..branch.clone()
};
writer.write(&mut updated_branch)?;
let root = gb_repository
.root()
.join("branches")
.join(branch.id.to_string());
assert_eq!(
fs::read_to_string(root.join("meta").join("name").to_str().unwrap())
.context("Failed to read branch name")?,
updated_branch.name
);
assert_eq!(
fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())?
.parse::<bool>()
.context("Failed to read branch applied")?,
updated_branch.applied
);
assert_eq!(
fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap())
.context("Failed to read branch upstream")?,
updated_branch.upstream.unwrap().to_string()
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("created_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch created timestamp")?
.parse::<u128>()
.context("Failed to parse branch created timestamp")?,
updated_branch.created_timestamp_ms
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("updated_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch updated timestamp")?
.parse::<u128>()
.context("Failed to parse branch updated timestamp")?,
updated_branch.updated_timestamp_ms
);
Ok(())
}
}

View File

@ -54,125 +54,3 @@ impl Iterator for BranchIterator<'_> {
Some(branch)
}
}
#[cfg(test)]
mod tests {
use std::sync::atomic::{AtomicUsize, Ordering};
use anyhow::Result;
use once_cell::sync::Lazy;
use crate::{
reader, sessions,
tests::{Case, Suite},
virtual_branches::target,
};
use super::*;
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> branch::Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
branch::Branch {
id: BranchId::generate(),
name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)),
notes: String::new(),
applied: true,
upstream: Some(
format!(
"refs/remotes/origin/upstream_{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
upstream_head: None,
created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128,
updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128,
head: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
tree: format!(
"0123456789abcdef0123456789abcdef012345{}",
TEST_INDEX.load(Ordering::Relaxed) + 10
)
.parse()
.unwrap(),
ownership: branch::BranchOwnershipClaims::default(),
order: TEST_INDEX.load(Ordering::Relaxed),
selected_for_changes: Some(1),
}
}
static TEST_TARGET_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_target() -> target::Target {
target::Target {
branch: format!(
"refs/remotes/branch name{}/remote name {}",
TEST_TARGET_INDEX.load(Ordering::Relaxed),
TEST_TARGET_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
remote_url: format!("remote url {}", TEST_TARGET_INDEX.load(Ordering::Relaxed)),
sha: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_TARGET_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
}
}
#[test]
fn test_empty_iterator() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = gb_repository.get_or_create_current_session()?;
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let iter = BranchIterator::new(&session_reader)?;
assert_eq!(iter.count(), 0);
Ok(())
}
#[test]
fn test_iterate_all() -> Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let target_writer = target::Writer::new(&gb_repository, project.gb_dir())?;
target_writer.write_default(&test_target())?;
let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
let mut branch_1 = test_branch();
branch_writer.write(&mut branch_1)?;
let mut branch_2 = test_branch();
branch_writer.write(&mut branch_2)?;
let mut branch_3 = test_branch();
branch_writer.write(&mut branch_3)?;
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let iter =
BranchIterator::new(&session_reader)?.collect::<Result<Vec<_>, reader::Error>>()?;
assert_eq!(iter.len(), 3);
assert!(iter.contains(&branch_1));
assert!(iter.contains(&branch_2));
assert!(iter.contains(&branch_3));
Ok(())
}
}

View File

@ -29,157 +29,3 @@ impl<'r> TargetReader<'r> {
Target::try_from(&self.reader.sub(format!("branches/{}/target", id)))
}
}
#[cfg(test)]
mod tests {
use std::sync::atomic::{AtomicUsize, Ordering};
use anyhow::Result;
use once_cell::sync::Lazy;
use crate::{
sessions,
tests::{Case, Suite},
virtual_branches::{branch, target::writer::TargetWriter},
};
use super::*;
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> branch::Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
branch::Branch {
id: BranchId::generate(),
name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)),
notes: String::new(),
applied: true,
upstream: Some(
format!(
"refs/remotes/origin/upstream_{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
upstream_head: None,
created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128,
updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128,
head: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
tree: format!(
"0123456789abcdef0123456789abcdef012345{}",
(TEST_INDEX.load(Ordering::Relaxed) + 10)
)
.parse()
.unwrap(),
ownership: branch::BranchOwnershipClaims {
claims: vec![branch::OwnershipClaim {
file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(),
hunks: vec![],
}],
},
order: TEST_INDEX.load(Ordering::Relaxed),
selected_for_changes: None,
}
}
#[test]
fn test_read_not_found() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = gb_repository.get_or_create_current_session()?;
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let reader = TargetReader::new(&session_reader);
let result = reader.read(&BranchId::generate());
assert!(result.is_err());
assert_eq!(result.unwrap_err().to_string(), "file not found");
Ok(())
}
#[test]
fn test_read_deprecated_format() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let writer = crate::writer::DirWriter::open(gb_repository.root())?;
writer
.write_string("branches/target/name", "origin/master")
.unwrap();
writer
.write_string(
"branches/target/remote",
"git@github.com:gitbutlerapp/gitbutler.git",
)
.unwrap();
writer
.write_string(
"branches/target/sha",
"dd945831869e9593448aa622fa4342bbfb84813d",
)
.unwrap();
let session = gb_repository.get_or_create_current_session()?;
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let reader = TargetReader::new(&session_reader);
let read = reader.read_default().unwrap();
assert_eq!(read.branch.branch(), "master");
assert_eq!(read.branch.remote(), "origin");
assert_eq!(read.remote_url, "git@github.com:gitbutlerapp/gitbutler.git");
assert_eq!(
read.sha.to_string(),
"dd945831869e9593448aa622fa4342bbfb84813d"
);
Ok(())
}
#[test]
fn test_read_override_target() -> Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let target = Target {
branch: "refs/remotes/remote/branch".parse().unwrap(),
remote_url: "remote url".to_string(),
sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(),
};
let default_target = Target {
branch: "refs/remotes/default remote/default branch"
.parse()
.unwrap(),
remote_url: "default remote url".to_string(),
sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(),
};
let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
branch_writer.write(&mut branch)?;
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let target_writer = TargetWriter::new(&gb_repository, project.gb_dir())?;
let reader = TargetReader::new(&session_reader);
target_writer.write_default(&default_target)?;
assert_eq!(default_target, reader.read(&branch.id)?);
target_writer.write(&branch.id, &target)?;
assert_eq!(target, reader.read(&branch.id)?);
Ok(())
}
}

View File

@ -107,219 +107,3 @@ impl<'writer> TargetWriter<'writer> {
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::{
fs,
sync::atomic::{AtomicUsize, Ordering},
};
use once_cell::sync::Lazy;
use crate::{
tests::{Case, Suite},
virtual_branches::branch,
};
use super::{super::Target, *};
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> branch::Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
branch::Branch {
id: BranchId::generate(),
name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)),
notes: format!("branch_notes_{}", TEST_INDEX.load(Ordering::Relaxed)),
applied: true,
created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128,
upstream: Some(
format!(
"refs/remotes/origin/upstream_{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
upstream_head: None,
updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128,
head: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
tree: format!(
"0123456789abcdef0123456789abcdef012345{}",
TEST_INDEX.load(Ordering::Relaxed) + 10
)
.parse()
.unwrap(),
ownership: branch::BranchOwnershipClaims {
claims: vec![branch::OwnershipClaim {
file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(),
hunks: vec![],
}],
},
order: TEST_INDEX.load(Ordering::Relaxed),
selected_for_changes: None,
}
}
#[test]
fn test_write() -> Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let target = Target {
branch: "refs/remotes/remote name/branch name".parse().unwrap(),
remote_url: "remote url".to_string(),
sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(),
};
let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
branch_writer.write(&mut branch)?;
let target_writer = TargetWriter::new(&gb_repository, project.gb_dir())?;
target_writer.write(&branch.id, &target)?;
let root = gb_repository
.root()
.join("branches")
.join(branch.id.to_string());
assert_eq!(
fs::read_to_string(root.join("meta").join("name").to_str().unwrap())
.context("Failed to read branch name")?,
branch.name
);
assert_eq!(
fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap())
.context("Failed to read branch target name")?,
format!("{}/{}", target.branch.remote(), target.branch.branch())
);
assert_eq!(
fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap())
.context("Failed to read branch target name name")?,
target.branch.remote()
);
assert_eq!(
fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap())
.context("Failed to read branch target remote url")?,
target.remote_url
);
assert_eq!(
fs::read_to_string(root.join("target").join("sha").to_str().unwrap())
.context("Failed to read branch target sha")?,
target.sha.to_string()
);
assert_eq!(
fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())?
.parse::<bool>()
.context("Failed to read branch applied")?,
branch.applied
);
assert_eq!(
fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap())
.context("Failed to read branch upstream")?,
branch.upstream.unwrap().to_string()
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("created_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch created timestamp")?
.parse::<u128>()
.context("Failed to parse branch created timestamp")?,
branch.created_timestamp_ms
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("updated_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch updated timestamp")?
.parse::<u128>()
.context("Failed to parse branch updated timestamp")?,
branch.updated_timestamp_ms
);
Ok(())
}
#[test]
fn test_should_update() -> Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let target = Target {
branch: "refs/remotes/remote name/branch name".parse().unwrap(),
remote_url: "remote url".to_string(),
sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(),
};
let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
branch_writer.write(&mut branch)?;
let target_writer = TargetWriter::new(&gb_repository, project.gb_dir())?;
target_writer.write(&branch.id, &target)?;
let updated_target = Target {
branch: "refs/remotes/updated remote name/updated branch name"
.parse()
.unwrap(),
remote_url: "updated remote url".to_string(),
sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(),
};
target_writer.write(&branch.id, &updated_target)?;
let root = gb_repository
.root()
.join("branches")
.join(branch.id.to_string());
assert_eq!(
fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap())
.context("Failed to read branch target branch name")?,
format!(
"{}/{}",
updated_target.branch.remote(),
updated_target.branch.branch()
)
);
assert_eq!(
fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap())
.context("Failed to read branch target remote name")?,
updated_target.branch.remote()
);
assert_eq!(
fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap())
.context("Failed to read branch target remote url")?,
updated_target.remote_url
);
assert_eq!(
fs::read_to_string(root.join("target").join("sha").to_str().unwrap())
.context("Failed to read branch target sha")?,
updated_target.sha.to_string()
);
Ok(())
}
}

View File

@ -1,6 +1,6 @@
mod dispatchers;
mod events;
mod handlers;
pub mod handlers;
use std::{collections::HashMap, path, sync::Arc, time};

View File

@ -1,13 +1,13 @@
mod analytics_handler;
mod calculate_deltas_handler;
pub mod calculate_deltas_handler;
mod caltulate_virtual_branches_handler;
mod fetch_gitbutler_data;
pub mod fetch_gitbutler_data;
mod filter_ignored_files;
mod flush_session;
mod git_file_change;
pub mod git_file_change;
mod index_handler;
mod push_gitbutler_data;
mod push_project_to_gitbutler;
pub mod push_project_to_gitbutler;
use std::time;
@ -195,11 +195,3 @@ impl Handler {
}
}
}
#[cfg(test)]
fn test_remote_repository() -> Result<git2::Repository> {
let path = tempfile::tempdir()?.path().to_str().unwrap().to_string();
let repo_a = git2::Repository::init_opts(path, &crate::tests::init_opts_bare())?;
Ok(repo_a)
}

View File

@ -10,7 +10,7 @@ use super::events;
#[derive(Clone)]
pub struct Handler {
inner: Arc<Mutex<InnerHandler>>,
state: Arc<Mutex<State>>,
}
impl TryFrom<&AppHandle> for Handler {
@ -22,8 +22,7 @@ impl TryFrom<&AppHandle> for Handler {
} else if let Some(app_data_dir) = value.path_resolver().app_data_dir() {
let projects = value.state::<projects::Controller>().inner().clone();
let users = value.state::<users::Controller>().inner().clone();
let inner = InnerHandler::new(app_data_dir, projects, users);
let handler = Handler::new(inner);
let handler = Handler::new(app_data_dir, projects, users);
value.manage(handler.clone());
Ok(handler)
} else {
@ -33,41 +32,17 @@ impl TryFrom<&AppHandle> for Handler {
}
impl Handler {
fn new(inner: InnerHandler) -> Self {
Self {
inner: Arc::new(Mutex::new(inner)),
}
}
pub async fn handle(
&self,
project_id: &ProjectId,
now: &time::SystemTime,
) -> Result<Vec<events::Event>> {
if let Ok(inner) = self.inner.try_lock() {
inner.handle(project_id, now).await
} else {
Ok(vec![])
}
}
}
struct InnerHandler {
local_data_dir: path::PathBuf,
projects: projects::Controller,
users: users::Controller,
}
impl InnerHandler {
fn new(
pub fn new(
local_data_dir: path::PathBuf,
projects: projects::Controller,
users: users::Controller,
) -> Self {
Self {
local_data_dir,
projects,
users,
state: Arc::new(Mutex::new(State {
local_data_dir,
projects,
users,
})),
}
}
@ -76,9 +51,21 @@ impl InnerHandler {
project_id: &ProjectId,
now: &time::SystemTime,
) -> Result<Vec<events::Event>> {
let user = self.users.get_user()?;
if let Ok(state) = self.state.try_lock() {
Self::handle_inner(&state, project_id, now).await
} else {
Ok(vec![])
}
}
let project = self
async fn handle_inner(
state: &State,
project_id: &ProjectId,
now: &time::SystemTime,
) -> Result<Vec<events::Event>> {
let user = state.users.get_user()?;
let project = state
.projects
.get(project_id)
.context("failed to get project")?;
@ -90,7 +77,7 @@ impl InnerHandler {
let project_repository =
project_repository::Repository::open(&project).context("failed to open repository")?;
let gb_repo = gb_repository::Repository::open(
&self.local_data_dir,
&state.local_data_dir,
&project_repository,
user.as_ref(),
)
@ -132,7 +119,8 @@ impl InnerHandler {
}
};
self.projects
state
.projects
.update(&projects::UpdateRequest {
id: *project_id,
gitbutler_data_last_fetched: Some(fetch_result),
@ -161,71 +149,8 @@ impl InnerHandler {
}
}
#[cfg(test)]
mod test {
use std::time::SystemTime;
use pretty_assertions::assert_eq;
use crate::tests::{Case, Suite};
use super::super::test_remote_repository;
use super::*;
#[tokio::test]
async fn test_fetch_success() -> Result<()> {
let suite = Suite::default();
let Case { project, .. } = suite.new_case();
let cloud = test_remote_repository()?;
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: cloud.path().to_str().unwrap().to_string(),
code_git_url: None,
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
let listener = InnerHandler {
local_data_dir: suite.local_app_data,
projects: suite.projects,
users: suite.users,
};
listener
.handle(&project.id, &SystemTime::now())
.await
.unwrap();
Ok(())
}
#[tokio::test]
async fn test_fetch_fail_no_sync() {
let suite = Suite::default();
let Case { project, .. } = suite.new_case();
let listener = InnerHandler {
local_data_dir: suite.local_app_data,
projects: suite.projects,
users: suite.users,
};
let res = listener.handle(&project.id, &SystemTime::now()).await;
assert_eq!(&res.unwrap_err().to_string(), "sync disabled");
}
struct State {
local_data_dir: path::PathBuf,
projects: projects::Controller,
users: users::Controller,
}

View File

@ -37,7 +37,7 @@ impl TryFrom<&AppHandle> for Handler {
}
impl Handler {
fn new(
pub fn new(
local_data_dir: path::PathBuf,
projects: projects::Controller,
users: users::Controller,
@ -132,109 +132,3 @@ impl Handler {
}
}
}
#[cfg(test)]
mod test {
use std::fs;
use events::Event;
use pretty_assertions::assert_eq;
use crate::{
tests::{Case, Suite},
watcher::handlers,
};
use super::*;
#[test]
fn test_flush_session() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
..
} = suite.new_case();
assert!(gb_repository.get_current_session()?.is_none());
create_new_session_via_new_file(&project, &suite);
assert!(gb_repository.get_current_session()?.is_some());
let listener = Handler {
local_data_dir: suite.local_app_data,
projects: suite.projects,
users: suite.users,
};
let flush_file_path = project.path.join(".git/GB_FLUSH");
fs::write(flush_file_path.as_path(), "")?;
let result = listener.handle("GB_FLUSH", &project.id)?;
assert_eq!(result.len(), 1);
assert!(matches!(result[0], Event::Flush(_, _)));
assert!(!flush_file_path.exists(), "flush file deleted");
Ok(())
}
#[test]
fn test_do_not_flush_session_if_file_is_missing() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
..
} = suite.new_case();
assert!(gb_repository.get_current_session()?.is_none());
create_new_session_via_new_file(&project, &suite);
assert!(gb_repository.get_current_session()?.is_some());
let listener = Handler {
local_data_dir: suite.local_app_data,
projects: suite.projects,
users: suite.users,
};
let result = listener.handle("GB_FLUSH", &project.id)?;
assert_eq!(result.len(), 0);
Ok(())
}
fn create_new_session_via_new_file(project: &projects::Project, suite: &Suite) {
fs::write(project.path.join("test.txt"), "test").unwrap();
let file_change_listener =
handlers::calculate_deltas_handler::Handler::from_path(&suite.local_app_data);
file_change_listener
.handle("test.txt", &project.id)
.unwrap();
}
#[test]
fn test_flush_deletes_flush_file_without_session_to_flush() -> Result<()> {
let suite = Suite::default();
let Case { project, .. } = suite.new_case();
let listener = Handler {
local_data_dir: suite.local_app_data,
projects: suite.projects,
users: suite.users,
};
let flush_file_path = project.path.join(".git/GB_FLUSH");
fs::write(flush_file_path.as_path(), "")?;
let result = listener.handle("GB_FLUSH", &project.id)?;
assert_eq!(result.len(), 0);
assert!(!flush_file_path.exists(), "flush file deleted");
Ok(())
}
}

View File

@ -17,7 +17,7 @@ use super::events;
#[derive(Clone)]
pub struct Handler {
inner: Arc<Mutex<HandlerInner>>,
inner: Arc<Mutex<State>>,
}
impl TryFrom<&AppHandle> for Handler {
@ -29,8 +29,7 @@ impl TryFrom<&AppHandle> for Handler {
} else if let Some(app_data_dir) = value.path_resolver().app_data_dir() {
let projects = value.state::<projects::Controller>().inner().clone();
let users = value.state::<users::Controller>().inner().clone();
let inner = HandlerInner::new(app_data_dir, projects, users);
let handler = Handler::new(inner);
let handler = Handler::new(app_data_dir, projects, users, 1000);
value.manage(handler.clone());
Ok(handler)
} else {
@ -40,44 +39,32 @@ impl TryFrom<&AppHandle> for Handler {
}
impl Handler {
fn new(inner: HandlerInner) -> Self {
pub fn new(
local_data_dir: path::PathBuf,
project_store: projects::Controller,
users: users::Controller,
batch_size: usize,
) -> Self {
Self {
inner: Arc::new(Mutex::new(inner)),
inner: Arc::new(Mutex::new(State {
local_data_dir,
project_store,
users,
batch_size,
})),
}
}
pub async fn handle(&self, project_id: &ProjectId) -> Result<Vec<events::Event>> {
if let Ok(inner) = self.inner.try_lock() {
inner.handle(project_id).await
if let Ok(state) = self.inner.try_lock() {
Self::handle_inner(&state, project_id).await
} else {
Ok(vec![])
}
}
}
pub struct HandlerInner {
local_data_dir: path::PathBuf,
project_store: projects::Controller,
users: users::Controller,
batch_size: usize,
}
impl HandlerInner {
fn new(
local_data_dir: path::PathBuf,
project_store: projects::Controller,
users: users::Controller,
) -> Self {
Self {
local_data_dir,
project_store,
users,
batch_size: 1000,
}
}
pub async fn handle(&self, project_id: &ProjectId) -> Result<Vec<events::Event>> {
let project = self
async fn handle_inner(state: &State, project_id: &ProjectId) -> Result<Vec<events::Event>> {
let project = state
.project_store
.get(project_id)
.context("failed to get project")?;
@ -86,7 +73,7 @@ impl HandlerInner {
return Ok(vec![]);
}
let user = self.users.get_user()?;
let user = state.users.get_user()?;
let project_repository =
project_repository::Repository::open(&project).context("failed to open repository")?;
@ -97,7 +84,7 @@ impl HandlerInner {
.copied();
let gb_repository = gb_repository::Repository::open(
&self.local_data_dir,
&state.local_data_dir,
&project_repository,
user.as_ref(),
)?;
@ -111,15 +98,15 @@ impl HandlerInner {
.unwrap_or_default();
if target_changed {
match self
.push_target(
&project_repository,
&default_target,
gb_code_last_commit,
project_id,
&user,
)
.await
match Self::push_target(
state,
&project_repository,
&default_target,
gb_code_last_commit,
project_id,
&user,
)
.await
{
Ok(()) => {}
Err(project_repository::RemoteError::Network) => return Ok(vec![]),
@ -134,13 +121,13 @@ impl HandlerInner {
};
// make sure last push time is updated
self.update_project(project_id, &default_target.sha).await?;
Self::update_project(state, project_id, &default_target.sha).await?;
Ok(vec![])
}
async fn push_target(
&self,
state: &State,
project_repository: &project_repository::Repository,
default_target: &crate::virtual_branches::target::Target,
gb_code_last_commit: Option<Oid>,
@ -149,7 +136,7 @@ impl HandlerInner {
) -> Result<(), project_repository::RemoteError> {
let ids = batch_rev_walk(
&project_repository.git_repository,
self.batch_size,
state.batch_size,
default_target.sha,
gb_code_last_commit,
)?;
@ -167,7 +154,7 @@ impl HandlerInner {
project_repository.push_to_gitbutler_server(user.as_ref(), &[&refspec])?;
self.update_project(project_id, id).await?;
Self::update_project(state, project_id, id).await?;
tracing::info!(
%project_id,
@ -193,11 +180,12 @@ impl HandlerInner {
}
async fn update_project(
&self,
state: &State,
project_id: &crate::id::Id<projects::Project>,
id: &Oid,
) -> Result<(), project_repository::RemoteError> {
self.project_store
state
.project_store
.update(&projects::UpdateRequest {
id: *project_id,
gitbutler_code_push_state: Some(CodePushState {
@ -213,6 +201,13 @@ impl HandlerInner {
}
}
struct State {
local_data_dir: path::PathBuf,
project_store: projects::Controller,
users: users::Controller,
batch_size: usize,
}
fn push_all_refs(
project_repository: &project_repository::Repository,
user: &Option<users::User>,
@ -285,421 +280,3 @@ fn batch_rev_walk(
}
Ok(oids)
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use std::path::PathBuf;
use crate::project_repository::LogUntil;
use crate::tests::{Case, Suite};
use crate::virtual_branches::set_test_target;
use super::super::test_remote_repository;
use super::*;
fn log_walk(repo: &git2::Repository, head: git::Oid) -> Vec<git::Oid> {
let mut walker = repo.revwalk().unwrap();
walker.push(head.into()).unwrap();
walker.map(|oid| oid.unwrap().into()).collect::<Vec<_>>()
}
#[tokio::test]
async fn test_push_error() -> Result<()> {
let suite = Suite::default();
let Case { project, .. } = suite.new_case();
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: String::new(),
code_git_url: Some(String::new()),
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
let listener = HandlerInner {
local_data_dir: suite.local_app_data,
project_store: suite.projects,
users: suite.users,
batch_size: 100,
};
let res = listener.handle(&project.id).await;
res.unwrap_err();
Ok(())
}
#[tokio::test]
async fn test_push_simple() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
project_repository,
..
} = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "test")]));
suite.sign_in();
set_test_target(&gb_repository, &project_repository).unwrap();
let target_id = gb_repository.default_target().unwrap().unwrap().sha;
let reference = project_repository.l(target_id, LogUntil::End).unwrap();
let cloud_code = test_remote_repository()?;
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: String::new(),
code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()),
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
cloud_code.find_commit(target_id.into()).unwrap_err();
{
let listener = HandlerInner {
local_data_dir: suite.local_app_data,
project_store: suite.projects.clone(),
users: suite.users,
batch_size: 10,
};
let res = listener.handle(&project.id).await.unwrap();
assert!(res.is_empty());
}
cloud_code.find_commit(target_id.into()).unwrap();
let pushed = log_walk(&cloud_code, target_id);
assert_eq!(reference.len(), pushed.len());
assert_eq!(reference, pushed);
assert_eq!(
suite
.projects
.get(&project.id)
.unwrap()
.gitbutler_code_push_state
.unwrap()
.id,
target_id
);
Ok(())
}
#[tokio::test]
async fn test_push_remote_ref() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
project_repository,
..
} = suite.new_case();
suite.sign_in();
set_test_target(&gb_repository, &project_repository).unwrap();
let cloud_code: git::Repository = test_remote_repository()?.into();
let remote_repo: git::Repository = test_remote_repository()?.into();
let last_commit = create_initial_commit(&remote_repo);
remote_repo
.reference(
&git::Refname::Local(git::LocalRefname::new("refs/heads/testbranch", None)),
last_commit,
false,
"",
)
.unwrap();
let mut remote = project_repository
.git_repository
.remote("tr", &remote_repo.path().to_str().unwrap().parse().unwrap())
.unwrap();
remote
.fetch(&["+refs/heads/*:refs/remotes/tr/*"], None)
.unwrap();
project_repository
.git_repository
.find_commit(last_commit)
.unwrap();
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: String::new(),
code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()),
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
{
let listener = HandlerInner {
local_data_dir: suite.local_app_data,
project_store: suite.projects.clone(),
users: suite.users,
batch_size: 10,
};
listener.handle(&project.id).await.unwrap();
}
cloud_code.find_commit(last_commit).unwrap();
Ok(())
}
fn create_initial_commit(repo: &git::Repository) -> git::Oid {
let signature = git::Signature::now("test", "test@email.com").unwrap();
let mut index = repo.index().unwrap();
let oid = index.write_tree().unwrap();
repo.commit(
None,
&signature,
&signature,
"initial commit",
&repo.find_tree(oid).unwrap(),
&[],
)
.unwrap()
}
fn create_test_commits(repo: &git::Repository, commits: usize) -> git::Oid {
let signature = git::Signature::now("test", "test@email.com").unwrap();
let mut last = None;
for i in 0..commits {
let mut index = repo.index().unwrap();
let oid = index.write_tree().unwrap();
let head = repo.head().unwrap();
last = Some(
repo.commit(
Some(&head.name().unwrap()),
&signature,
&signature,
format!("commit {i}").as_str(),
&repo.find_tree(oid).unwrap(),
&[&repo
.find_commit(repo.refname_to_id("HEAD").unwrap())
.unwrap()],
)
.unwrap(),
);
}
last.unwrap()
}
#[tokio::test]
async fn test_push_batches() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
project_repository,
..
} = suite.new_case();
suite.sign_in();
{
let head: git::Oid = project_repository
.get_head()
.unwrap()
.peel_to_commit()
.unwrap()
.id();
let reference = project_repository.l(head, LogUntil::End).unwrap();
assert_eq!(reference.len(), 2);
let head = create_test_commits(&project_repository.git_repository, 10);
let reference = project_repository.l(head, LogUntil::End).unwrap();
assert_eq!(reference.len(), 12);
}
set_test_target(&gb_repository, &project_repository).unwrap();
let target_id = gb_repository.default_target().unwrap().unwrap().sha;
let reference = project_repository.l(target_id, LogUntil::End).unwrap();
let cloud_code = test_remote_repository()?;
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: String::new(),
code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()),
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
{
let listener = HandlerInner {
local_data_dir: suite.local_app_data.clone(),
project_store: suite.projects.clone(),
users: suite.users.clone(),
batch_size: 2,
};
listener.handle(&project.id).await.unwrap();
}
cloud_code.find_commit(target_id.into()).unwrap();
let pushed = log_walk(&cloud_code, target_id);
assert_eq!(reference.len(), pushed.len());
assert_eq!(reference, pushed);
assert_eq!(
suite
.projects
.get(&project.id)
.unwrap()
.gitbutler_code_push_state
.unwrap()
.id,
target_id
);
Ok(())
}
#[tokio::test]
async fn test_push_again_no_change() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
project_repository,
..
} = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "test")]));
suite.sign_in();
set_test_target(&gb_repository, &project_repository).unwrap();
let target_id = gb_repository.default_target().unwrap().unwrap().sha;
let reference = project_repository.l(target_id, LogUntil::End).unwrap();
let cloud_code = test_remote_repository()?;
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: String::new(),
code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()),
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
cloud_code.find_commit(target_id.into()).unwrap_err();
{
let listener = HandlerInner {
local_data_dir: suite.local_app_data,
project_store: suite.projects.clone(),
users: suite.users,
batch_size: 10,
};
let res = listener.handle(&project.id).await.unwrap();
assert!(res.is_empty());
}
cloud_code.find_commit(target_id.into()).unwrap();
let pushed = log_walk(&cloud_code, target_id);
assert_eq!(reference.len(), pushed.len());
assert_eq!(reference, pushed);
assert_eq!(
suite
.projects
.get(&project.id)
.unwrap()
.gitbutler_code_push_state
.unwrap()
.id,
target_id
);
Ok(())
}
}

View File

@ -5,27 +5,36 @@ mod suite {
mod virtual_branches;
}
mod database;
mod deltas;
mod gb_repository;
mod git;
mod keys;
mod lock;
mod reader;
mod sessions;
pub mod virtual_branches;
mod watcher;
use std::{collections::HashMap, fs, path};
use tempfile::tempdir;
use crate::{database, gb_repository, git, keys, project_repository, projects, storage, users};
pub struct Suite {
pub local_app_data: path::PathBuf,
pub storage: storage::Storage,
pub users: users::Controller,
pub projects: projects::Controller,
pub keys: keys::Controller,
pub storage: gitbutler_app::storage::Storage,
pub users: gitbutler_app::users::Controller,
pub projects: gitbutler_app::projects::Controller,
pub keys: gitbutler_app::keys::Controller,
}
impl Default for Suite {
fn default() -> Self {
let local_app_data = temp_dir();
let storage = storage::Storage::new(&local_app_data);
let users = users::Controller::from_path(&local_app_data);
let projects = projects::Controller::from_path(&local_app_data);
let keys = keys::Controller::from_path(&local_app_data);
let storage = gitbutler_app::storage::Storage::new(&local_app_data);
let users = gitbutler_app::users::Controller::from_path(&local_app_data);
let projects = gitbutler_app::projects::Controller::from_path(&local_app_data);
let keys = gitbutler_app::keys::Controller::from_path(&local_app_data);
Self {
storage,
local_app_data,
@ -37,8 +46,8 @@ impl Default for Suite {
}
impl Suite {
pub fn sign_in(&self) -> users::User {
let user = users::User {
pub fn sign_in(&self) -> gitbutler_app::users::User {
let user = gitbutler_app::users::User {
name: Some("test".to_string()),
email: "test@email.com".to_string(),
access_token: "token".to_string(),
@ -48,7 +57,7 @@ impl Suite {
user
}
fn project(&self, fs: HashMap<path::PathBuf, &str>) -> projects::Project {
fn project(&self, fs: HashMap<path::PathBuf, &str>) -> gitbutler_app::projects::Project {
let repository = test_repository();
for (path, contents) in fs {
if let Some(parent) = path.parent() {
@ -80,20 +89,23 @@ impl Suite {
pub struct Case<'a> {
suite: &'a Suite,
pub project: projects::Project,
pub project_repository: project_repository::Repository,
pub gb_repository: gb_repository::Repository,
pub credentials: git::credentials::Helper,
pub project: gitbutler_app::projects::Project,
pub project_repository: gitbutler_app::project_repository::Repository,
pub gb_repository: gitbutler_app::gb_repository::Repository,
pub credentials: gitbutler_app::git::credentials::Helper,
}
impl<'a> Case<'a> {
fn new(suite: &'a Suite, project: projects::Project) -> Case<'a> {
let project_repository = project_repository::Repository::open(&project)
fn new(suite: &'a Suite, project: gitbutler_app::projects::Project) -> Case<'a> {
let project_repository = gitbutler_app::project_repository::Repository::open(&project)
.expect("failed to create project repository");
let gb_repository =
gb_repository::Repository::open(&suite.local_app_data, &project_repository, None)
.expect("failed to open gb repository");
let credentials = git::credentials::Helper::from_path(&suite.local_app_data);
let gb_repository = gitbutler_app::gb_repository::Repository::open(
&suite.local_app_data,
&project_repository,
None,
)
.expect("failed to open gb repository");
let credentials = gitbutler_app::git::credentials::Helper::from_path(&suite.local_app_data);
Case {
suite,
project,
@ -109,13 +121,14 @@ impl<'a> Case<'a> {
.projects
.get(&self.project.id)
.expect("failed to get project");
let project_repository = project_repository::Repository::open(&project)
let project_repository = gitbutler_app::project_repository::Repository::open(&project)
.expect("failed to create project repository");
let user = self.suite.users.get_user().expect("failed to get user");
let credentials = git::credentials::Helper::from_path(&self.suite.local_app_data);
let credentials =
gitbutler_app::git::credentials::Helper::from_path(&self.suite.local_app_data);
Self {
suite: self.suite,
gb_repository: gb_repository::Repository::open(
gb_repository: gitbutler_app::gb_repository::Repository::open(
&self.suite.local_app_data,
&project_repository,
user.as_ref(),
@ -128,8 +141,8 @@ impl<'a> Case<'a> {
}
}
pub fn test_database() -> database::Database {
database::Database::open_in_directory(temp_dir()).unwrap()
pub fn test_database() -> gitbutler_app::database::Database {
gitbutler_app::database::Database::open_in_directory(temp_dir()).unwrap()
}
pub fn temp_dir() -> path::PathBuf {
@ -138,18 +151,19 @@ pub fn temp_dir() -> path::PathBuf {
path
}
pub fn empty_bare_repository() -> git::Repository {
pub fn empty_bare_repository() -> gitbutler_app::git::Repository {
let path = temp_dir();
git::Repository::init_opts(path, &init_opts_bare()).expect("failed to init repository")
gitbutler_app::git::Repository::init_opts(path, &init_opts_bare())
.expect("failed to init repository")
}
pub fn test_repository() -> git::Repository {
pub fn test_repository() -> gitbutler_app::git::Repository {
let path = temp_dir();
let repository =
git::Repository::init_opts(path, &init_opts()).expect("failed to init repository");
let repository = gitbutler_app::git::Repository::init_opts(path, &init_opts())
.expect("failed to init repository");
let mut index = repository.index().expect("failed to get index");
let oid = index.write_tree().expect("failed to write tree");
let signature = git::Signature::now("test", "test@email.com").unwrap();
let signature = gitbutler_app::git::Signature::now("test", "test@email.com").unwrap();
repository
.commit(
Some(&"refs/heads/master".parse().unwrap()),
@ -163,14 +177,14 @@ pub fn test_repository() -> git::Repository {
repository
}
pub fn commit_all(repository: &git::Repository) -> git::Oid {
pub fn commit_all(repository: &gitbutler_app::git::Repository) -> gitbutler_app::git::Oid {
let mut index = repository.index().expect("failed to get index");
index
.add_all(["."], git2::IndexAddOption::DEFAULT, None)
.expect("failed to add all");
index.write().expect("failed to write index");
let oid = index.write_tree().expect("failed to write tree");
let signature = git::Signature::now("test", "test@email.com").unwrap();
let signature = gitbutler_app::git::Signature::now("test", "test@email.com").unwrap();
let head = repository.head().expect("failed to get head");
let commit_oid = repository
.commit(

View File

@ -1,6 +1,6 @@
#![allow(unused)]
use crate::git;
use crate::tests::init_opts;
use crate::init_opts;
use gitbutler_app::git;
use std::{path, str::from_utf8};
pub fn temp_dir() -> std::path::PathBuf {

View File

@ -0,0 +1,20 @@
use crate::temp_dir;
use gitbutler_app::database::Database;
#[test]
fn smoke() {
let data_dir = temp_dir();
let db = Database::open_in_directory(data_dir).unwrap();
db.transaction(|tx| {
tx.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)", [])
.unwrap();
tx.execute("INSERT INTO test (id) VALUES (1)", []).unwrap();
let mut stmt = tx.prepare("SELECT id FROM test").unwrap();
let mut rows = stmt.query([]).unwrap();
let row = rows.next().unwrap().unwrap();
let id: i32 = row.get(0).unwrap();
assert_eq!(id, 1_i32);
Ok(())
})
.unwrap();
}

View File

@ -0,0 +1,142 @@
mod database {
use crate::test_database;
use gitbutler_app::deltas::{operations, Database, Delta};
use gitbutler_app::projects::ProjectId;
use gitbutler_app::sessions::SessionId;
use std::path;
#[test]
fn insert_query() -> anyhow::Result<()> {
let db = test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session_id = SessionId::generate();
let file_path = path::PathBuf::from("file_path");
let delta1 = Delta {
timestamp_ms: 0,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
};
let deltas = vec![delta1.clone()];
database.insert(&project_id, &session_id, &file_path, &deltas)?;
assert_eq!(
database.list_by_project_id_session_id(&project_id, &session_id, &None)?,
vec![(file_path.display().to_string(), vec![delta1])]
.into_iter()
.collect()
);
Ok(())
}
#[test]
fn insert_update() -> anyhow::Result<()> {
let db = test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session_id = SessionId::generate();
let file_path = path::PathBuf::from("file_path");
let delta1 = Delta {
timestamp_ms: 0,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
};
let delta2 = Delta {
timestamp_ms: 0,
operations: vec![operations::Operation::Insert((
0,
"updated_text".to_string(),
))],
};
database.insert(&project_id, &session_id, &file_path, &vec![delta1])?;
database.insert(&project_id, &session_id, &file_path, &vec![delta2.clone()])?;
assert_eq!(
database.list_by_project_id_session_id(&project_id, &session_id, &None)?,
vec![(file_path.display().to_string(), vec![delta2])]
.into_iter()
.collect()
);
Ok(())
}
#[test]
fn aggregate_deltas_by_file() -> anyhow::Result<()> {
let db = test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session_id = SessionId::generate();
let file_path1 = path::PathBuf::from("file_path1");
let file_path2 = path::PathBuf::from("file_path2");
let delta1 = Delta {
timestamp_ms: 1,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
};
let delta2 = Delta {
timestamp_ms: 2,
operations: vec![operations::Operation::Insert((
0,
"updated_text".to_string(),
))],
};
database.insert(&project_id, &session_id, &file_path1, &vec![delta1.clone()])?;
database.insert(&project_id, &session_id, &file_path2, &vec![delta1.clone()])?;
database.insert(&project_id, &session_id, &file_path2, &vec![delta2.clone()])?;
assert_eq!(
database.list_by_project_id_session_id(&project_id, &session_id, &None)?,
vec![
(file_path1.display().to_string(), vec![delta1.clone()]),
(file_path2.display().to_string(), vec![delta1, delta2])
]
.into_iter()
.collect()
);
Ok(())
}
}
mod writer {
use gitbutler_app::deltas::operations::Operation;
use gitbutler_app::{deltas, sessions};
use std::vec;
use crate::{Case, Suite};
#[test]
fn write_no_vbranches() -> anyhow::Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let deltas_writer = deltas::Writer::new(&gb_repository)?;
let session = gb_repository.get_or_create_current_session()?;
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let deltas_reader = gitbutler_app::deltas::Reader::new(&session_reader);
let path = "test.txt";
let deltas = vec![
gitbutler_app::deltas::Delta {
operations: vec![Operation::Insert((0, "hello".to_string()))],
timestamp_ms: 0,
},
gitbutler_app::deltas::Delta {
operations: vec![Operation::Insert((5, " world".to_string()))],
timestamp_ms: 0,
},
];
deltas_writer.write(path, &deltas).unwrap();
assert_eq!(deltas_reader.read_file(path).unwrap(), Some(deltas));
assert_eq!(deltas_reader.read_file("not found").unwrap(), None);
Ok(())
}
}

View File

@ -0,0 +1,448 @@
use std::{collections::HashMap, path, thread, time};
use anyhow::Result;
use pretty_assertions::assert_eq;
use crate::init_opts_bare;
use crate::{Case, Suite};
use gitbutler_app::{
deltas::{self, operations::Operation},
projects::{self, ApiProject, ProjectId},
reader,
sessions::{self, SessionId},
};
fn test_remote_repository() -> Result<git2::Repository> {
let path = tempfile::tempdir()?.path().to_str().unwrap().to_string();
let repo_a = git2::Repository::init_opts(path, &init_opts_bare())?;
Ok(repo_a)
}
#[test]
fn test_get_current_session_writer_should_use_existing_session() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let current_session_1 = gb_repository.get_or_create_current_session()?;
let current_session_2 = gb_repository.get_or_create_current_session()?;
assert_eq!(current_session_1.id, current_session_2.id);
Ok(())
}
#[test]
fn test_must_not_return_init_session() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
assert!(gb_repository.get_current_session()?.is_none());
let iter = gb_repository.get_sessions_iterator()?;
assert_eq!(iter.count(), 0);
Ok(())
}
#[test]
fn test_must_not_flush_without_current_session() -> Result<()> {
let Case {
gb_repository,
project_repository,
..
} = Suite::default().new_case();
let session = gb_repository.flush(&project_repository, None)?;
assert!(session.is_none());
let iter = gb_repository.get_sessions_iterator()?;
assert_eq!(iter.count(), 0);
Ok(())
}
#[test]
fn test_non_empty_repository() -> Result<()> {
let Case {
gb_repository,
project_repository,
..
} = Suite::default()
.new_case_with_files(HashMap::from([(path::PathBuf::from("test.txt"), "test")]));
gb_repository.get_or_create_current_session()?;
gb_repository.flush(&project_repository, None)?;
Ok(())
}
#[test]
fn test_must_flush_current_session() -> Result<()> {
let Case {
gb_repository,
project_repository,
..
} = Suite::default().new_case();
gb_repository.get_or_create_current_session()?;
let session = gb_repository.flush(&project_repository, None)?;
assert!(session.is_some());
let iter = gb_repository.get_sessions_iterator()?;
assert_eq!(iter.count(), 1);
Ok(())
}
#[test]
fn test_list_deltas_from_current_session() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let current_session = gb_repository.get_or_create_current_session()?;
let writer = deltas::Writer::new(&gb_repository)?;
writer.write(
"test.txt",
&vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}],
)?;
let session_reader = sessions::Reader::open(&gb_repository, &current_session)?;
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read(None)?;
assert_eq!(deltas.len(), 1);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations.len(),
1
);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations[0],
Operation::Insert((0, "Hello World".to_string()))
);
Ok(())
}
#[test]
fn test_list_deltas_from_flushed_session() {
let Case {
gb_repository,
project_repository,
..
} = Suite::default().new_case();
let writer = deltas::Writer::new(&gb_repository).unwrap();
writer
.write(
"test.txt",
&vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}],
)
.unwrap();
let session = gb_repository.flush(&project_repository, None).unwrap();
let session_reader = sessions::Reader::open(&gb_repository, &session.unwrap()).unwrap();
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read(None).unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations.len(),
1
);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations[0],
Operation::Insert((0, "Hello World".to_string()))
);
}
#[test]
fn test_list_files_from_current_session() {
let Case { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([(
path::PathBuf::from("test.txt"),
"Hello World",
)]));
let current = gb_repository.get_or_create_current_session().unwrap();
let reader = sessions::Reader::open(&gb_repository, &current).unwrap();
let files = reader.files(None).unwrap();
assert_eq!(files.len(), 1);
assert_eq!(
files[&path::PathBuf::from("test.txt")],
reader::Content::UTF8("Hello World".to_string())
);
}
#[test]
fn test_list_files_from_flushed_session() {
let Case {
gb_repository,
project_repository,
..
} = Suite::default().new_case_with_files(HashMap::from([(
path::PathBuf::from("test.txt"),
"Hello World",
)]));
gb_repository.get_or_create_current_session().unwrap();
let session = gb_repository
.flush(&project_repository, None)
.unwrap()
.unwrap();
let reader = sessions::Reader::open(&gb_repository, &session).unwrap();
let files = reader.files(None).unwrap();
assert_eq!(files.len(), 1);
assert_eq!(
files[&path::PathBuf::from("test.txt")],
reader::Content::UTF8("Hello World".to_string())
);
}
#[tokio::test]
async fn test_remote_syncronization() {
// first, crate a remote, pretending it's a cloud
let cloud = test_remote_repository().unwrap();
let api_project = ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: cloud.path().to_str().unwrap().to_string(),
code_git_url: None,
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
let suite = Suite::default();
let user = suite.sign_in();
// create first local project, add files, deltas and flush a session
let case_one = suite.new_case_with_files(HashMap::from([(
path::PathBuf::from("test.txt"),
"Hello World",
)]));
suite
.projects
.update(&projects::UpdateRequest {
id: case_one.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_one = case_one.refresh();
let writer = deltas::Writer::new(&case_one.gb_repository).unwrap();
writer
.write(
"test.txt",
&vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}],
)
.unwrap();
let session_one = case_one
.gb_repository
.flush(&case_one.project_repository, Some(&user))
.unwrap()
.unwrap();
case_one.gb_repository.push(Some(&user)).unwrap();
// create second local project, fetch it and make sure session is there
let case_two = suite.new_case();
suite
.projects
.update(&projects::UpdateRequest {
id: case_two.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_two = case_two.refresh();
case_two.gb_repository.fetch(Some(&user)).unwrap();
// now it should have the session from the first local project synced
let sessions_two = case_two
.gb_repository
.get_sessions_iterator()
.unwrap()
.map(Result::unwrap)
.collect::<Vec<_>>();
assert_eq!(sessions_two.len(), 1);
assert_eq!(sessions_two[0].id, session_one.id);
let session_reader = sessions::Reader::open(&case_two.gb_repository, &sessions_two[0]).unwrap();
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read(None).unwrap();
let files = session_reader.files(None).unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(files.len(), 1);
assert_eq!(
files[&path::PathBuf::from("test.txt")],
reader::Content::UTF8("Hello World".to_string())
);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")],
vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}]
);
}
#[tokio::test]
async fn test_remote_sync_order() {
// first, crate a remote, pretending it's a cloud
let cloud = test_remote_repository().unwrap();
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: cloud.path().to_str().unwrap().to_string(),
code_git_url: None,
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
let suite = Suite::default();
let case_one = suite.new_case();
suite
.projects
.update(&projects::UpdateRequest {
id: case_one.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_one = case_one.refresh();
let case_two = suite.new_case();
suite
.projects
.update(&projects::UpdateRequest {
id: case_two.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_two = case_two.refresh();
let user = suite.sign_in();
// create session in the first project
case_one
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_one_first = case_one
.gb_repository
.flush(&case_one.project_repository, Some(&user))
.unwrap()
.unwrap();
case_one.gb_repository.push(Some(&user)).unwrap();
thread::sleep(time::Duration::from_secs(1));
// create session in the second project
case_two
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_two_first = case_two
.gb_repository
.flush(&case_two.project_repository, Some(&user))
.unwrap()
.unwrap();
case_two.gb_repository.push(Some(&user)).unwrap();
thread::sleep(time::Duration::from_secs(1));
// create second session in the first project
case_one
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_one_second = case_one
.gb_repository
.flush(&case_one.project_repository, Some(&user))
.unwrap()
.unwrap();
case_one.gb_repository.push(Some(&user)).unwrap();
thread::sleep(time::Duration::from_secs(1));
// create second session in the second project
case_two
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_two_second = case_two
.gb_repository
.flush(&case_two.project_repository, Some(&user))
.unwrap()
.unwrap();
case_two.gb_repository.push(Some(&user)).unwrap();
case_one.gb_repository.fetch(Some(&user)).unwrap();
let sessions_one = case_one
.gb_repository
.get_sessions_iterator()
.unwrap()
.map(Result::unwrap)
.collect::<Vec<_>>();
case_two.gb_repository.fetch(Some(&user)).unwrap();
let sessions_two = case_two
.gb_repository
.get_sessions_iterator()
.unwrap()
.map(Result::unwrap)
.collect::<Vec<_>>();
// make sure the sessions are the same on both repos
assert_eq!(sessions_one.len(), 4);
assert_eq!(sessions_two, sessions_one);
assert_eq!(sessions_one[0].id, session_two_second.id);
assert_eq!(sessions_one[1].id, session_one_second.id);
assert_eq!(sessions_one[2].id, session_two_first.id);
assert_eq!(sessions_one[3].id, session_one_first.id);
}
#[test]
fn test_gitbutler_file() {
let Case {
gb_repository,
project_repository,
..
} = Suite::default().new_case();
let session = gb_repository.get_or_create_current_session().unwrap();
let gitbutler_file_path = project_repository.path().join(".git/gitbutler.json");
assert!(gitbutler_file_path.exists());
let file_content: serde_json::Value =
serde_json::from_str(&std::fs::read_to_string(&gitbutler_file_path).unwrap()).unwrap();
let sid: SessionId = file_content["sessionId"].as_str().unwrap().parse().unwrap();
assert_eq!(sid, session.id);
let pid: ProjectId = file_content["repositoryId"]
.as_str()
.unwrap()
.parse()
.unwrap();
assert_eq!(pid, project_repository.project().id);
}

View File

@ -0,0 +1,34 @@
use crate::test_repository;
#[test]
pub fn test_set_str() {
let repo = test_repository();
let mut config = repo.config().unwrap();
config.set_str("test.key", "test.value").unwrap();
assert_eq!(
config.get_string("test.key").unwrap().unwrap(),
"test.value"
);
}
#[test]
pub fn test_set_bool() {
let repo = test_repository();
let mut config = repo.config().unwrap();
config.set_bool("test.key", true).unwrap();
assert!(config.get_bool("test.key").unwrap().unwrap());
}
#[test]
pub fn test_get_string_none() {
let repo = test_repository();
let config = repo.config().unwrap();
assert_eq!(config.get_string("test.key").unwrap(), None);
}
#[test]
pub fn test_get_bool_none() {
let repo = test_repository();
let config = repo.config().unwrap();
assert_eq!(config.get_bool("test.key").unwrap(), None);
}

View File

@ -0,0 +1,312 @@
use gitbutler_app::git::credentials::{Credential, Helper, HttpsCredential, SshCredential};
use gitbutler_app::{keys, project_repository, projects, users};
use std::path::PathBuf;
use crate::{temp_dir, test_repository};
#[derive(Default)]
struct TestCase<'a> {
remote_url: &'a str,
github_access_token: Option<&'a str>,
preferred_key: projects::AuthKey,
home_dir: Option<PathBuf>,
}
impl TestCase<'_> {
fn run(&self) -> Vec<(String, Vec<Credential>)> {
let local_app_data = temp_dir();
let users = users::Controller::from_path(&local_app_data);
let user = users::User {
github_access_token: self.github_access_token.map(ToString::to_string),
..Default::default()
};
users.set_user(&user).unwrap();
let keys = keys::Controller::from_path(&local_app_data);
let helper = Helper::new(keys, users, self.home_dir.clone());
let repo = test_repository();
repo.remote(
"origin",
&self.remote_url.parse().expect("failed to parse remote url"),
)
.unwrap();
let project = projects::Project {
path: repo.workdir().unwrap().to_path_buf(),
preferred_key: self.preferred_key.clone(),
..Default::default()
};
let project_repository = project_repository::Repository::open(&project).unwrap();
let flow = helper.help(&project_repository, "origin").unwrap();
flow.into_iter()
.map(|(remote, credentials)| {
(
remote.url().unwrap().as_ref().unwrap().to_string(),
credentials,
)
})
.collect::<Vec<_>>()
}
}
mod not_github {
use super::*;
mod with_preferred_key {
use super::*;
#[test]
fn https() {
let test_case = TestCase {
remote_url: "https://gitlab.com/test-gitbutler/test.git",
github_access_token: Some("token"),
preferred_key: projects::AuthKey::Local {
private_key_path: PathBuf::from("/tmp/id_rsa"),
},
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@gitlab.com:test-gitbutler/test.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Ssh(SshCredential::Keyfile {
key_path: PathBuf::from("/tmp/id_rsa"),
passphrase: None,
})]
);
}
#[test]
fn ssh() {
let test_case = TestCase {
remote_url: "git@gitlab.com:test-gitbutler/test.git",
github_access_token: Some("token"),
preferred_key: projects::AuthKey::Local {
private_key_path: PathBuf::from("/tmp/id_rsa"),
},
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@gitlab.com:test-gitbutler/test.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Ssh(SshCredential::Keyfile {
key_path: PathBuf::from("/tmp/id_rsa"),
passphrase: None,
})]
);
}
}
mod with_github_token {
use super::*;
#[test]
fn https() {
let test_case = TestCase {
remote_url: "https://gitlab.com/test-gitbutler/test.git",
github_access_token: Some("token"),
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@gitlab.com:test-gitbutler/test.git".to_string(),
);
assert_eq!(flow[0].1.len(), 1);
assert!(matches!(
flow[0].1[0],
Credential::Ssh(SshCredential::GitButlerKey(_))
));
}
#[test]
fn ssh() {
let test_case = TestCase {
remote_url: "git@gitlab.com:test-gitbutler/test.git",
github_access_token: Some("token"),
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@gitlab.com:test-gitbutler/test.git".to_string(),
);
assert_eq!(flow[0].1.len(), 1);
assert!(matches!(
flow[0].1[0],
Credential::Ssh(SshCredential::GitButlerKey(_))
));
}
}
}
mod github {
use super::*;
mod with_github_token {
use super::*;
#[test]
fn https() {
let test_case = TestCase {
remote_url: "https://github.com/gitbutlerapp/gitbutler.git",
github_access_token: Some("token"),
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"https://github.com/gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Https(HttpsCredential::GitHubToken(
"token".to_string()
))]
);
}
#[test]
fn ssh() {
let test_case = TestCase {
remote_url: "git@github.com:gitbutlerapp/gitbutler.git",
github_access_token: Some("token"),
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"https://github.com/gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Https(HttpsCredential::GitHubToken(
"token".to_string()
))]
);
}
}
mod without_github_token {
use super::*;
mod without_preferred_key {
use super::*;
#[test]
fn https() {
let test_case = TestCase {
remote_url: "https://github.com/gitbutlerapp/gitbutler.git",
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@github.com:gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(flow[0].1.len(), 1);
assert!(matches!(
flow[0].1[0],
Credential::Ssh(SshCredential::GitButlerKey(_))
));
}
#[test]
fn ssh() {
let test_case = TestCase {
remote_url: "git@github.com:gitbutlerapp/gitbutler.git",
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@github.com:gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(flow[0].1.len(), 1);
assert!(matches!(
flow[0].1[0],
Credential::Ssh(SshCredential::GitButlerKey(_))
));
}
}
mod with_preferred_key {
use super::*;
#[test]
fn https() {
let test_case = TestCase {
remote_url: "https://github.com/gitbutlerapp/gitbutler.git",
github_access_token: Some("token"),
preferred_key: projects::AuthKey::Local {
private_key_path: PathBuf::from("/tmp/id_rsa"),
},
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@github.com:gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Ssh(SshCredential::Keyfile {
key_path: PathBuf::from("/tmp/id_rsa"),
passphrase: None,
})]
);
}
#[test]
fn ssh() {
let test_case = TestCase {
remote_url: "git@github.com:gitbutlerapp/gitbutler.git",
github_access_token: Some("token"),
preferred_key: projects::AuthKey::Local {
private_key_path: PathBuf::from("/tmp/id_rsa"),
},
..Default::default()
};
let flow = test_case.run();
assert_eq!(flow.len(), 1);
assert_eq!(
flow[0].0,
"git@github.com:gitbutlerapp/gitbutler.git".to_string(),
);
assert_eq!(
flow[0].1,
vec![Credential::Ssh(SshCredential::Keyfile {
key_path: PathBuf::from("/tmp/id_rsa"),
passphrase: None,
})]
);
}
}
}
}

View File

@ -3,13 +3,13 @@ use std::{collections::HashMap, path, thread, time};
use anyhow::Result;
use pretty_assertions::assert_eq;
use crate::tests::init_opts_bare;
use crate::{
use crate::init_opts_bare;
use crate::{Case, Suite};
use gitbutler_app::{
deltas::{self, operations::Operation},
projects::{self, ApiProject, ProjectId},
reader,
sessions::{self, SessionId},
tests::{Case, Suite},
};
fn test_remote_repository() -> Result<git2::Repository> {

View File

@ -0,0 +1,3 @@
mod config;
mod credentials;
mod diff;

View File

@ -0,0 +1,27 @@
#[cfg(not(target_os = "windows"))]
mod not_windows {
use gitbutler_app::keys::storage::Storage;
use gitbutler_app::keys::Controller;
use std::fs;
#[cfg(target_family = "unix")]
use std::os::unix::prelude::*;
use crate::Suite;
#[test]
fn test_get_or_create() {
let suite = Suite::default();
let controller = Controller::new(Storage::from_path(&suite.local_app_data));
let once = controller.get_or_create().unwrap();
let twice = controller.get_or_create().unwrap();
assert_eq!(once, twice);
// check permissions of the private key
let permissions = fs::metadata(suite.local_app_data.join("keys/ed25519"))
.unwrap()
.permissions();
let perms = format!("{:o}", permissions.mode());
assert_eq!(perms, "100600");
}
}

View File

@ -0,0 +1 @@
mod controller;

View File

@ -0,0 +1,91 @@
use gitbutler_app::lock::Dir;
use crate::temp_dir;
#[tokio::test]
async fn test_lock_same_instance() {
let dir_path = temp_dir();
std::fs::write(dir_path.join("file.txt"), "").unwrap();
let dir = Dir::new(&dir_path).unwrap();
let (tx, rx) = std::sync::mpsc::sync_channel(1);
// spawn a task that will signal right after aquireing the lock
let _ = tokio::spawn({
let dir = dir.clone();
async move {
dir.batch(|root| {
tx.send(()).unwrap();
assert_eq!(
std::fs::read_to_string(root.join("file.txt")).unwrap(),
String::new()
);
std::fs::write(root.join("file.txt"), "1")
})
}
})
.await
.unwrap();
// then we wait until the lock is aquired
rx.recv().unwrap();
// and immidiately try to lock again
dir.batch(|root| {
assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1");
std::fs::write(root.join("file.txt"), "2")
})
.unwrap()
.unwrap();
assert_eq!(
std::fs::read_to_string(dir_path.join("file.txt")).unwrap(),
"2"
);
}
#[tokio::test]
async fn test_lock_different_instances() {
let dir_path = temp_dir();
std::fs::write(dir_path.join("file.txt"), "").unwrap();
let (tx, rx) = std::sync::mpsc::sync_channel(1);
// spawn a task that will signal right after aquireing the lock
let _ = tokio::spawn({
let dir_path = dir_path.clone();
async move {
// one dir instance is created on a separate thread
let dir = Dir::new(&dir_path).unwrap();
dir.batch(|root| {
tx.send(()).unwrap();
assert_eq!(
std::fs::read_to_string(root.join("file.txt")).unwrap(),
String::new()
);
std::fs::write(root.join("file.txt"), "1")
})
}
})
.await
.unwrap();
// another dir instance is created on the main thread
let dir = Dir::new(&dir_path).unwrap();
// then we wait until the lock is aquired
rx.recv().unwrap();
// and immidiately try to lock again
dir.batch(|root| {
assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1");
std::fs::write(root.join("file.txt"), "2")
})
.unwrap()
.unwrap();
assert_eq!(
std::fs::read_to_string(dir_path.join("file.txt")).unwrap(),
"2"
);
}

View File

@ -0,0 +1,183 @@
use gitbutler_app::reader::{CommitReader, Content, Reader};
use std::fs;
use std::path::Path;
use crate::{commit_all, temp_dir, test_repository};
use anyhow::Result;
#[test]
fn test_directory_reader_read_file() -> Result<()> {
let dir = temp_dir();
let file_path = Path::new("test.txt");
fs::write(dir.join(file_path), "test")?;
let reader = Reader::open(dir.clone())?;
assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string()));
Ok(())
}
#[test]
fn test_commit_reader_read_file() -> Result<()> {
let repository = test_repository();
let file_path = Path::new("test.txt");
fs::write(repository.path().parent().unwrap().join(file_path), "test")?;
let oid = commit_all(&repository);
fs::write(repository.path().parent().unwrap().join(file_path), "test2")?;
let reader = Reader::from_commit(&repository, &repository.find_commit(oid)?)?;
assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string()));
Ok(())
}
#[test]
fn test_reader_list_files_should_return_relative() -> Result<()> {
let dir = temp_dir();
fs::write(dir.join("test1.txt"), "test")?;
fs::create_dir_all(dir.join("dir"))?;
fs::write(dir.join("dir").join("test.txt"), "test")?;
let reader = Reader::open(dir.clone())?;
let files = reader.list_files(Path::new("dir"))?;
assert_eq!(files.len(), 1);
assert!(files.contains(&Path::new("test.txt").to_path_buf()));
Ok(())
}
#[test]
fn test_reader_list_files() -> Result<()> {
let dir = temp_dir();
fs::write(dir.join("test.txt"), "test")?;
fs::create_dir_all(dir.join("dir"))?;
fs::write(dir.join("dir").join("test.txt"), "test")?;
let reader = Reader::open(dir.clone())?;
let files = reader.list_files(Path::new(""))?;
assert_eq!(files.len(), 2);
assert!(files.contains(&Path::new("test.txt").to_path_buf()));
assert!(files.contains(&Path::new("dir/test.txt").to_path_buf()));
Ok(())
}
#[test]
fn test_commit_reader_list_files_should_return_relative() -> Result<()> {
let repository = test_repository();
fs::write(
repository.path().parent().unwrap().join("test1.txt"),
"test",
)?;
fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?;
fs::write(
repository
.path()
.parent()
.unwrap()
.join("dir")
.join("test.txt"),
"test",
)?;
let oid = commit_all(&repository);
fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?;
let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?;
let files = reader.list_files(Path::new("dir"))?;
assert_eq!(files.len(), 1);
assert!(files.contains(&Path::new("test.txt").to_path_buf()));
Ok(())
}
#[test]
fn test_commit_reader_list_files() -> Result<()> {
let repository = test_repository();
fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?;
fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?;
fs::write(
repository
.path()
.parent()
.unwrap()
.join("dir")
.join("test.txt"),
"test",
)?;
let oid = commit_all(&repository);
fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?;
let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?;
let files = reader.list_files(Path::new(""))?;
assert_eq!(files.len(), 2);
assert!(files.contains(&Path::new("test.txt").to_path_buf()));
assert!(files.contains(&Path::new("dir/test.txt").to_path_buf()));
Ok(())
}
#[test]
fn test_directory_reader_exists() -> Result<()> {
let dir = temp_dir();
fs::write(dir.join("test.txt"), "test")?;
let reader = Reader::open(dir.clone())?;
assert!(reader.exists(Path::new("test.txt"))?);
assert!(!reader.exists(Path::new("test2.txt"))?);
Ok(())
}
#[test]
fn test_commit_reader_exists() -> Result<()> {
let repository = test_repository();
fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?;
let oid = commit_all(&repository);
fs::remove_file(repository.path().parent().unwrap().join("test.txt"))?;
let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?;
assert!(reader.exists(Path::new("test.txt")));
assert!(!reader.exists(Path::new("test2.txt")));
Ok(())
}
#[test]
fn test_from_bytes() {
for (bytes, expected) in [
("test".as_bytes(), Content::UTF8("test".to_string())),
(&[0, 159, 146, 150, 159, 146, 150], Content::Binary),
] {
assert_eq!(Content::from(bytes), expected);
}
}
#[test]
fn test_serialize_content() {
for (content, expected) in [
(
Content::UTF8("test".to_string()),
r#"{"type":"utf8","value":"test"}"#,
),
(Content::Binary, r#"{"type":"binary"}"#),
(Content::Large, r#"{"type":"large"}"#),
] {
assert_eq!(serde_json::to_string(&content).unwrap(), expected);
}
}

View File

@ -0,0 +1,84 @@
use crate::test_database;
use gitbutler_app::projects::ProjectId;
use gitbutler_app::sessions::{session, Database, Session, SessionId};
#[test]
fn test_insert_query() -> anyhow::Result<()> {
let db = test_database();
println!("0");
let database = Database::new(db);
println!("1");
let project_id = ProjectId::generate();
let session1 = Session {
id: SessionId::generate(),
hash: None,
meta: session::Meta {
branch: None,
commit: None,
start_timestamp_ms: 1,
last_timestamp_ms: 2,
},
};
let session2 = session::Session {
id: SessionId::generate(),
hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()),
meta: session::Meta {
branch: Some("branch2".to_string()),
commit: Some("commit2".to_string()),
start_timestamp_ms: 3,
last_timestamp_ms: 4,
},
};
let sessions = vec![&session1, &session2];
database.insert(&project_id, &sessions)?;
assert_eq!(
database.list_by_project_id(&project_id, None)?,
vec![session2.clone(), session1.clone()]
);
assert_eq!(database.get_by_id(&session1.id)?.unwrap(), session1);
assert_eq!(database.get_by_id(&session2.id)?.unwrap(), session2);
assert_eq!(database.get_by_id(&SessionId::generate())?, None);
Ok(())
}
#[test]
fn test_update() -> anyhow::Result<()> {
let db = test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session = session::Session {
id: SessionId::generate(),
hash: None,
meta: session::Meta {
branch: None,
commit: None,
start_timestamp_ms: 1,
last_timestamp_ms: 2,
},
};
let session_updated = session::Session {
id: session.id,
hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()),
meta: session::Meta {
branch: Some("branch2".to_string()),
commit: Some("commit2".to_string()),
start_timestamp_ms: 3,
last_timestamp_ms: 4,
},
};
database.insert(&project_id, &[&session])?;
database.insert(&project_id, &[&session_updated])?;
assert_eq!(
database.list_by_project_id(&project_id, None)?,
vec![session_updated.clone()]
);
assert_eq!(database.get_by_id(&session.id)?.unwrap(), session_updated);
Ok(())
}

View File

@ -1,11 +1,9 @@
mod database;
use anyhow::Result;
use crate::{
sessions::{self, session::SessionId},
tests::{Case, Suite},
};
use super::Writer;
use crate::{Case, Suite};
use gitbutler_app::sessions::{self, session::SessionId};
#[test]
fn test_should_not_write_session_with_hash() {
@ -22,7 +20,7 @@ fn test_should_not_write_session_with_hash() {
},
};
assert!(Writer::new(&gb_repository)
assert!(sessions::Writer::new(&gb_repository)
.unwrap()
.write(&session)
.is_err());
@ -43,7 +41,7 @@ fn test_should_write_full_session() -> Result<()> {
},
};
Writer::new(&gb_repository)?.write(&session)?;
sessions::Writer::new(&gb_repository)?.write(&session)?;
assert_eq!(
std::fs::read_to_string(gb_repository.session_path().join("meta/id"))?,
@ -84,7 +82,7 @@ fn test_should_write_partial_session() -> Result<()> {
},
};
Writer::new(&gb_repository)?.write(&session)?;
sessions::Writer::new(&gb_repository)?.write(&session)?;
assert_eq!(
std::fs::read_to_string(gb_repository.session_path().join("meta/id"))?,

View File

@ -1,7 +1,5 @@
use crate::{
gb_repository, git, project_repository, projects,
tests::common::{paths, TestProject},
};
use crate::common::{paths, TestProject};
use gitbutler_app::{gb_repository, git, project_repository, projects};
use std::path;
mod init {

View File

@ -1,7 +1,6 @@
use crate::{
projects::Controller,
tests::common::{self, paths},
};
use gitbutler_app::projects::Controller;
use crate::common::{self, paths};
pub fn new() -> Controller {
let data_dir = paths::data_dir();
@ -22,7 +21,7 @@ mod add {
}
mod error {
use crate::projects::AddError;
use gitbutler_app::projects::AddError;
use super::*;

View File

@ -0,0 +1,352 @@
use super::*;
#[tokio::test]
async fn to_default_target() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
// amend without head commit
fs::write(repository.path().join("file2.txt"), "content").unwrap();
let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap();
assert!(matches!(
controller
.amend(&project_id, &branch_id, &to_amend)
.await
.unwrap_err(),
ControllerError::Action(errors::AmendError::BranchHasNoCommits)
));
}
#[tokio::test]
async fn forcepush_allowed() {
let Test {
repository,
project_id,
controller,
projects,
..
} = Test::default();
projects
.update(&projects::UpdateRequest {
id: project_id,
ok_with_force_push: Some(false),
..Default::default()
})
.await
.unwrap();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
projects
.update(&projects::UpdateRequest {
id: project_id,
ok_with_force_push: Some(true),
..Default::default()
})
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
// create commit
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap();
};
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
{
// amend another hunk
fs::write(repository.path().join("file2.txt"), "content2").unwrap();
let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap();
controller
.amend(&project_id, &branch_id, &to_amend)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert!(branch.requires_force);
assert_eq!(branch.commits.len(), 1);
assert_eq!(branch.files.len(), 0);
assert_eq!(branch.commits[0].files.len(), 2);
}
}
#[tokio::test]
async fn forcepush_forbidden() {
let Test {
repository,
project_id,
controller,
projects,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
projects
.update(&projects::UpdateRequest {
id: project_id,
ok_with_force_push: Some(false),
..Default::default()
})
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
// create commit
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap();
};
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
{
fs::write(repository.path().join("file2.txt"), "content2").unwrap();
let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap();
assert!(matches!(
controller
.amend(&project_id, &branch_id, &to_amend)
.await
.unwrap_err(),
ControllerError::Action(errors::AmendError::ForcePushNotAllowed(_))
));
}
}
#[tokio::test]
async fn non_locked_hunk() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
// create commit
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.commits.len(), 1);
assert_eq!(branch.files.len(), 0);
assert_eq!(branch.commits[0].files.len(), 1);
};
{
// amend another hunk
fs::write(repository.path().join("file2.txt"), "content2").unwrap();
let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap();
controller
.amend(&project_id, &branch_id, &to_amend)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.commits.len(), 1);
assert_eq!(branch.files.len(), 0);
assert_eq!(branch.commits[0].files.len(), 2);
}
}
#[tokio::test]
async fn locked_hunk() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
// create commit
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.commits.len(), 1);
assert_eq!(branch.files.len(), 0);
assert_eq!(branch.commits[0].files.len(), 1);
assert_eq!(
branch.commits[0].files[0].hunks[0].diff,
"@@ -0,0 +1 @@\n+content\n\\ No newline at end of file\n"
);
};
{
// amend another hunk
fs::write(repository.path().join("file.txt"), "more content").unwrap();
let to_amend: branch::BranchOwnershipClaims = "file.txt:1-2".parse().unwrap();
controller
.amend(&project_id, &branch_id, &to_amend)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.commits.len(), 1);
assert_eq!(branch.files.len(), 0);
assert_eq!(branch.commits[0].files.len(), 1);
assert_eq!(
branch.commits[0].files[0].hunks[0].diff,
"@@ -0,0 +1 @@\n+more content\n\\ No newline at end of file\n"
);
}
}
#[tokio::test]
async fn non_existing_ownership() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
// create commit
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.commits.len(), 1);
assert_eq!(branch.files.len(), 0);
assert_eq!(branch.commits[0].files.len(), 1);
};
{
// amend non existing hunk
let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap();
assert!(matches!(
controller
.amend(&project_id, &branch_id, &to_amend)
.await
.unwrap_err(),
ControllerError::Action(errors::AmendError::TargetOwnerhshipNotFound(_))
));
}
}

View File

@ -0,0 +1,278 @@
use super::*;
#[tokio::test]
async fn deltect_conflict() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = {
let branch1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
fs::write(repository.path().join("file.txt"), "branch one").unwrap();
branch1_id
};
// unapply first vbranch
controller
.unapply_virtual_branch(&project_id, &branch1_id)
.await
.unwrap();
{
// create another vbranch that conflicts with the first one
controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
fs::write(repository.path().join("file.txt"), "branch two").unwrap();
}
{
// it should not be possible to apply the first branch
assert!(!controller
.can_apply_virtual_branch(&project_id, &branch1_id)
.await
.unwrap());
assert!(matches!(
controller
.apply_virtual_branch(&project_id, &branch1_id)
.await,
Err(ControllerError::Action(
errors::ApplyBranchError::BranchConflicts(_)
))
));
}
}
#[tokio::test]
async fn rebase_commit() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
// make sure we have an undiscovered commit in the remote branch
{
fs::write(repository.path().join("file.txt"), "one").unwrap();
fs::write(repository.path().join("another_file.txt"), "").unwrap();
let first_commit_oid = repository.commit_all("first");
fs::write(repository.path().join("file.txt"), "two").unwrap();
repository.commit_all("second");
repository.push();
repository.reset_hard(Some(first_commit_oid));
}
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = {
// create a branch with some commited work
let branch1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
fs::write(repository.path().join("another_file.txt"), "virtual").unwrap();
controller
.create_commit(&project_id, &branch1_id, "virtual commit", None, false)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert!(branches[0].active);
assert_eq!(branches[0].files.len(), 0);
assert_eq!(branches[0].commits.len(), 1);
branch1_id
};
{
// unapply first vbranch
controller
.unapply_virtual_branch(&project_id, &branch1_id)
.await
.unwrap();
assert_eq!(
fs::read_to_string(repository.path().join("another_file.txt")).unwrap(),
""
);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"one"
);
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].files.len(), 0);
assert_eq!(branches[0].commits.len(), 1);
assert!(!branches[0].active);
}
{
// fetch remote
controller.update_base_branch(&project_id).await.unwrap();
// branch is stil unapplied
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].files.len(), 0);
assert_eq!(branches[0].commits.len(), 1);
assert!(!branches[0].active);
assert!(!branches[0].conflicted);
assert_eq!(
fs::read_to_string(repository.path().join("another_file.txt")).unwrap(),
""
);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"two"
);
}
{
// apply first vbranch again
controller
.apply_virtual_branch(&project_id, &branch1_id)
.await
.unwrap();
// it should be rebased
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].files.len(), 0);
assert_eq!(branches[0].commits.len(), 1);
assert!(branches[0].active);
assert!(!branches[0].conflicted);
assert_eq!(
fs::read_to_string(repository.path().join("another_file.txt")).unwrap(),
"virtual"
);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"two"
);
}
}
#[tokio::test]
async fn rebase_work() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
// make sure we have an undiscovered commit in the remote branch
{
let first_commit_oid = repository.commit_all("first");
fs::write(repository.path().join("file.txt"), "").unwrap();
repository.commit_all("second");
repository.push();
repository.reset_hard(Some(first_commit_oid));
}
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = {
// make a branch with some work
let branch1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
fs::write(repository.path().join("another_file.txt"), "").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert!(branches[0].active);
assert_eq!(branches[0].files.len(), 1);
assert_eq!(branches[0].commits.len(), 0);
branch1_id
};
{
// unapply first vbranch
controller
.unapply_virtual_branch(&project_id, &branch1_id)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].files.len(), 1);
assert_eq!(branches[0].commits.len(), 0);
assert!(!branches[0].active);
assert!(!repository.path().join("another_file.txt").exists());
assert!(!repository.path().join("file.txt").exists());
}
{
// fetch remote
controller.update_base_branch(&project_id).await.unwrap();
// first branch is stil unapplied
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].files.len(), 1);
assert_eq!(branches[0].commits.len(), 0);
assert!(!branches[0].active);
assert!(!branches[0].conflicted);
assert!(!repository.path().join("another_file.txt").exists());
assert!(repository.path().join("file.txt").exists());
}
{
// apply first vbranch again
controller
.apply_virtual_branch(&project_id, &branch1_id)
.await
.unwrap();
// workdir should be rebased, and work should be restored
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].files.len(), 1);
assert_eq!(branches[0].commits.len(), 0);
assert!(branches[0].active);
assert!(!branches[0].conflicted);
assert!(repository.path().join("another_file.txt").exists());
assert!(repository.path().join("file.txt").exists());
}
}

View File

@ -0,0 +1,382 @@
use super::*;
mod cleanly {
use super::*;
#[tokio::test]
async fn applied() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let commit_one = {
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit", None, false)
.await
.unwrap()
};
let commit_two = {
fs::write(repository.path().join("file.txt"), "content two").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit", None, false)
.await
.unwrap()
};
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
controller
.reset_virtual_branch(&project_id, &branch_id, commit_one)
.await
.unwrap();
repository.reset_hard(None);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"content"
);
let cherry_picked_commit_oid = controller
.cherry_pick(&project_id, &branch_id, commit_two)
.await
.unwrap();
assert!(cherry_picked_commit_oid.is_some());
assert!(repository.path().join("file.txt").exists());
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"content two"
);
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch_id);
assert!(branches[0].active);
assert_eq!(branches[0].commits.len(), 2);
assert_eq!(branches[0].commits[0].id, cherry_picked_commit_oid.unwrap());
assert_eq!(branches[0].commits[1].id, commit_one);
}
#[tokio::test]
async fn to_different_branch() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let commit_one = {
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit", None, false)
.await
.unwrap()
};
let commit_two = {
fs::write(repository.path().join("file_two.txt"), "content two").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit", None, false)
.await
.unwrap()
};
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
controller
.reset_virtual_branch(&project_id, &branch_id, commit_one)
.await
.unwrap();
repository.reset_hard(None);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"content"
);
assert!(!repository.path().join("file_two.txt").exists());
let branch_two_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let cherry_picked_commit_oid = controller
.cherry_pick(&project_id, &branch_two_id, commit_two)
.await
.unwrap();
assert!(cherry_picked_commit_oid.is_some());
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert!(repository.path().join("file_two.txt").exists());
assert_eq!(
fs::read_to_string(repository.path().join("file_two.txt")).unwrap(),
"content two"
);
assert_eq!(branches.len(), 2);
assert_eq!(branches[0].id, branch_id);
assert!(!branches[0].active);
assert_eq!(branches[0].commits.len(), 1);
assert_eq!(branches[0].commits[0].id, commit_one);
assert_eq!(branches[1].id, branch_two_id);
assert!(branches[1].active);
assert_eq!(branches[1].commits.len(), 1);
assert_eq!(branches[1].commits[0].id, cherry_picked_commit_oid.unwrap());
}
#[tokio::test]
async fn non_applied() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let commit_one_oid = {
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file_two.txt"), "content two").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit", None, false)
.await
.unwrap()
};
let commit_three_oid = {
fs::write(repository.path().join("file_three.txt"), "content three").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit", None, false)
.await
.unwrap()
};
controller
.reset_virtual_branch(&project_id, &branch_id, commit_one_oid)
.await
.unwrap();
controller
.unapply_virtual_branch(&project_id, &branch_id)
.await
.unwrap();
assert!(matches!(
controller
.cherry_pick(&project_id, &branch_id, commit_three_oid)
.await,
Err(ControllerError::Action(errors::CherryPickError::NotApplied))
));
}
}
mod with_conflicts {
use super::*;
#[tokio::test]
async fn applied() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let commit_one = {
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file_two.txt"), "content two").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit two", None, false)
.await
.unwrap()
};
let commit_three = {
fs::write(repository.path().join("file_three.txt"), "content three").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit three", None, false)
.await
.unwrap()
};
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
controller
.reset_virtual_branch(&project_id, &branch_id, commit_one)
.await
.unwrap();
repository.reset_hard(None);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"content"
);
assert!(!repository.path().join("file_two.txt").exists());
assert!(!repository.path().join("file_three.txt").exists());
// introduce conflict with the remote commit
fs::write(repository.path().join("file_three.txt"), "conflict").unwrap();
{
// cherry picking leads to conflict
let cherry_picked_commit_oid = controller
.cherry_pick(&project_id, &branch_id, commit_three)
.await
.unwrap();
assert!(cherry_picked_commit_oid.is_none());
assert_eq!(
fs::read_to_string(repository.path().join("file_three.txt")).unwrap(),
"<<<<<<< ours\nconflict\n=======\ncontent three\n>>>>>>> theirs\n"
);
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch_id);
assert!(branches[0].active);
assert!(branches[0].conflicted);
assert_eq!(branches[0].files.len(), 1);
assert!(branches[0].files[0].conflicted);
assert_eq!(branches[0].commits.len(), 1);
}
{
// conflict can be resolved
fs::write(repository.path().join("file_three.txt"), "resolved").unwrap();
let commited_oid = controller
.create_commit(&project_id, &branch_id, "resolution", None, false)
.await
.unwrap();
let commit = repository.find_commit(commited_oid).unwrap();
assert_eq!(commit.parent_count(), 2);
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch_id);
assert!(branches[0].active);
assert!(branches[0].requires_force);
assert!(!branches[0].conflicted);
assert_eq!(branches[0].commits.len(), 2);
// resolution commit is there
assert_eq!(branches[0].commits[0].id, commited_oid);
assert_eq!(branches[0].commits[1].id, commit_one);
}
}
#[tokio::test]
async fn non_applied() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
let commit_oid = {
let first = repository.commit_all("commit");
fs::write(repository.path().join("file.txt"), "content").unwrap();
let second = repository.commit_all("commit");
repository.push();
repository.reset_hard(Some(first));
second
};
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
// introduce conflict with the remote commit
fs::write(repository.path().join("file.txt"), "conflict").unwrap();
controller
.unapply_virtual_branch(&project_id, &branch_id)
.await
.unwrap();
assert!(matches!(
controller
.cherry_pick(&project_id, &branch_id, commit_oid)
.await,
Err(ControllerError::Action(errors::CherryPickError::NotApplied))
));
}
}

View File

@ -0,0 +1,198 @@
use super::*;
#[tokio::test]
async fn should_lock_updated_hunks() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
// by default, hunks are not locked
fs::write(repository.path().join("file.txt"), "content").unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.files.len(), 1);
assert_eq!(branch.files[0].path.display().to_string(), "file.txt");
assert_eq!(branch.files[0].hunks.len(), 1);
assert!(!branch.files[0].hunks[0].locked);
}
controller
.create_commit(&project_id, &branch_id, "test", None, false)
.await
.unwrap();
{
// change in the committed hunks leads to hunk locking
fs::write(repository.path().join("file.txt"), "updated content").unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.files.len(), 1);
assert_eq!(branch.files[0].path.display().to_string(), "file.txt");
assert_eq!(branch.files[0].hunks.len(), 1);
assert!(branch.files[0].hunks[0].locked);
}
}
#[tokio::test]
async fn should_not_lock_disjointed_hunks() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
let mut lines: Vec<_> = (0_i32..24_i32).map(|i| format!("line {}", i)).collect();
fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap();
repository.commit_all("my commit");
repository.push();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
// new hunk in the middle of the file
lines[12] = "commited stuff".to_string();
fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.files.len(), 1);
assert_eq!(branch.files[0].path.display().to_string(), "file.txt");
assert_eq!(branch.files[0].hunks.len(), 1);
assert!(!branch.files[0].hunks[0].locked);
}
controller
.create_commit(&project_id, &branch_id, "test commit", None, false)
.await
.unwrap();
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
{
// hunk before the commited part is not locked
let mut changed_lines = lines.clone();
changed_lines[0] = "updated line".to_string();
fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.files.len(), 1);
assert_eq!(branch.files[0].path.display().to_string(), "file.txt");
assert_eq!(branch.files[0].hunks.len(), 1);
assert!(!branch.files[0].hunks[0].locked);
// cleanup
fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap();
}
{
// hunk after the commited part is not locked
let mut changed_lines = lines.clone();
changed_lines[23] = "updated line".to_string();
fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.files.len(), 1);
assert_eq!(branch.files[0].path.display().to_string(), "file.txt");
assert_eq!(branch.files[0].hunks.len(), 1);
assert!(!branch.files[0].hunks[0].locked);
// cleanup
fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap();
}
{
// hunk before the commited part but with overlapping context
let mut changed_lines = lines.clone();
changed_lines[10] = "updated line".to_string();
fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.files.len(), 1);
assert_eq!(branch.files[0].path.display().to_string(), "file.txt");
assert_eq!(branch.files[0].hunks.len(), 1);
// TODO: We lock this hunk, but can we afford not lock it?
assert!(branch.files[0].hunks[0].locked);
// cleanup
fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap();
}
{
// hunk after the commited part but with overlapping context
let mut changed_lines = lines.clone();
changed_lines[14] = "updated line".to_string();
fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert_eq!(branch.files.len(), 1);
assert_eq!(branch.files[0].path.display().to_string(), "file.txt");
assert_eq!(branch.files[0].hunks.len(), 1);
// TODO: We lock this hunk, but can we afford not lock it?
assert!(branch.files[0].hunks[0].locked);
// cleanup
fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap();
}
}

View File

@ -0,0 +1,382 @@
use super::*;
#[tokio::test]
async fn integration() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_name = {
// make a remote branch
let branch_id = controller
.create_virtual_branch(&project_id, &super::branch::BranchCreateRequest::default())
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "first\n").unwrap();
controller
.create_commit(&project_id, &branch_id, "first", None, false)
.await
.unwrap();
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|branch| branch.id == branch_id)
.unwrap();
let name = branch.upstream.unwrap().name;
controller
.delete_virtual_branch(&project_id, &branch_id)
.await
.unwrap();
name
};
// checkout a existing remote branch
let branch_id = controller
.create_virtual_branch_from_branch(&project_id, &branch_name)
.await
.unwrap();
{
// add a commit
std::fs::write(repository.path().join("file.txt"), "first\nsecond").unwrap();
controller
.create_commit(&project_id, &branch_id, "second", None, false)
.await
.unwrap();
}
{
// meanwhile, there is a new commit on master
repository.checkout(&"refs/heads/master".parse().unwrap());
std::fs::write(repository.path().join("another.txt"), "").unwrap();
repository.commit_all("another");
repository.push_branch(&"refs/heads/master".parse().unwrap());
repository.checkout(&"refs/heads/gitbutler/integration".parse().unwrap());
}
{
// merge branch into master
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|branch| branch.id == branch_id)
.unwrap();
assert!(branch.commits[0].is_remote);
assert!(!branch.commits[0].is_integrated);
assert!(branch.commits[1].is_remote);
assert!(!branch.commits[1].is_integrated);
repository.rebase_and_merge(&branch_name);
}
{
// should mark commits as integrated
controller
.fetch_from_target(&project_id, None)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|branch| branch.id == branch_id)
.unwrap();
assert!(branch.commits[0].is_remote);
assert!(branch.commits[0].is_integrated);
assert!(branch.commits[1].is_remote);
assert!(branch.commits[1].is_integrated);
}
}
#[tokio::test]
async fn no_conflicts() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
{
// create a remote branch
let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap();
repository.checkout(&branch_name);
fs::write(repository.path().join("file.txt"), "first").unwrap();
repository.commit_all("first");
repository.push_branch(&branch_name);
repository.checkout(&"refs/heads/master".parse().unwrap());
}
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert!(branches.is_empty());
let branch_id = controller
.create_virtual_branch_from_branch(
&project_id,
&"refs/remotes/origin/branch".parse().unwrap(),
)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch_id);
assert_eq!(branches[0].commits.len(), 1);
assert_eq!(branches[0].commits[0].description, "first");
}
#[tokio::test]
async fn conflicts_with_uncommited() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
{
// create a remote branch
let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap();
repository.checkout(&branch_name);
fs::write(repository.path().join("file.txt"), "first").unwrap();
repository.commit_all("first");
repository.push_branch(&branch_name);
repository.checkout(&"refs/heads/master".parse().unwrap());
}
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
// create a local branch that conflicts with remote
{
std::fs::write(repository.path().join("file.txt"), "conflict").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
};
// branch should be created unapplied, because of the conflict
let new_branch_id = controller
.create_virtual_branch_from_branch(
&project_id,
&"refs/remotes/origin/branch".parse().unwrap(),
)
.await
.unwrap();
let new_branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|branch| branch.id == new_branch_id)
.unwrap();
assert!(!new_branch.active);
assert_eq!(new_branch.commits.len(), 1);
assert!(new_branch.upstream.is_some());
}
#[tokio::test]
async fn conflicts_with_commited() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
{
// create a remote branch
let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap();
repository.checkout(&branch_name);
fs::write(repository.path().join("file.txt"), "first").unwrap();
repository.commit_all("first");
repository.push_branch(&branch_name);
repository.checkout(&"refs/heads/master".parse().unwrap());
}
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
// create a local branch that conflicts with remote
{
std::fs::write(repository.path().join("file.txt"), "conflict").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
controller
.create_commit(&project_id, &branches[0].id, "hej", None, false)
.await
.unwrap();
};
// branch should be created unapplied, because of the conflict
let new_branch_id = controller
.create_virtual_branch_from_branch(
&project_id,
&"refs/remotes/origin/branch".parse().unwrap(),
)
.await
.unwrap();
let new_branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|branch| branch.id == new_branch_id)
.unwrap();
assert!(!new_branch.active);
assert_eq!(new_branch.commits.len(), 1);
assert!(new_branch.upstream.is_some());
}
#[tokio::test]
async fn from_default_target() {
let Test {
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
// branch should be created unapplied, because of the conflict
assert!(matches!(
controller
.create_virtual_branch_from_branch(
&project_id,
&"refs/remotes/origin/master".parse().unwrap(),
)
.await
.unwrap_err(),
ControllerError::Action(
errors::CreateVirtualBranchFromBranchError::CantMakeBranchFromDefaultTarget
)
));
}
#[tokio::test]
async fn from_non_existent_branch() {
let Test {
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
// branch should be created unapplied, because of the conflict
assert!(matches!(
controller
.create_virtual_branch_from_branch(
&project_id,
&"refs/remotes/origin/branch".parse().unwrap(),
)
.await
.unwrap_err(),
ControllerError::Action(errors::CreateVirtualBranchFromBranchError::BranchNotFound(
_
))
));
}
#[tokio::test]
async fn from_state_remote_branch() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
{
// create a remote branch
let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap();
repository.checkout(&branch_name);
fs::write(repository.path().join("file.txt"), "branch commit").unwrap();
repository.commit_all("branch commit");
repository.push_branch(&branch_name);
repository.checkout(&"refs/heads/master".parse().unwrap());
// make remote branch stale
std::fs::write(repository.path().join("antoher_file.txt"), "master commit").unwrap();
repository.commit_all("master commit");
repository.push();
}
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch_from_branch(
&project_id,
&"refs/remotes/origin/branch".parse().unwrap(),
)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch_id);
assert_eq!(branches[0].commits.len(), 1);
assert!(branches[0].files.is_empty());
assert_eq!(branches[0].commits[0].description, "branch commit");
}

View File

@ -0,0 +1,78 @@
use super::*;
#[tokio::test]
async fn should_unapply_diff() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
// write some
std::fs::write(repository.path().join("file.txt"), "content").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
controller
.delete_virtual_branch(&project_id, &branches[0].id)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 0);
assert!(!repository.path().join("file.txt").exists());
let refnames = repository
.references()
.into_iter()
.filter_map(|reference| reference.name().map(|name| name.to_string()))
.collect::<Vec<_>>();
assert!(!refnames.contains(&"refs/gitbutler/name".to_string()));
}
#[tokio::test]
async fn should_remove_reference() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let id = controller
.create_virtual_branch(
&project_id,
&branch::BranchCreateRequest {
name: Some("name".to_string()),
..Default::default()
},
)
.await
.unwrap();
controller
.delete_virtual_branch(&project_id, &id)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 0);
let refnames = repository
.references()
.into_iter()
.filter_map(|reference| reference.name().map(|name| name.to_string()))
.collect::<Vec<_>>();
assert!(!refnames.contains(&"refs/gitbutler/name".to_string()));
}

View File

@ -0,0 +1,46 @@
use super::*;
#[tokio::test]
async fn should_update_last_fetched() {
let Test {
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let before_fetch = controller.get_base_branch_data(&project_id).await.unwrap();
assert!(before_fetch.unwrap().last_fetched_ms.is_none());
let fetch = controller
.fetch_from_target(&project_id, None)
.await
.unwrap();
assert!(fetch.last_fetched_ms.is_some());
let after_fetch = controller.get_base_branch_data(&project_id).await.unwrap();
assert!(after_fetch.as_ref().unwrap().last_fetched_ms.is_some());
assert_eq!(fetch.last_fetched_ms, after_fetch.unwrap().last_fetched_ms);
let second_fetch = controller
.fetch_from_target(&project_id, None)
.await
.unwrap();
assert!(second_fetch.last_fetched_ms.is_some());
assert_ne!(fetch.last_fetched_ms, second_fetch.last_fetched_ms);
let after_second_fetch = controller.get_base_branch_data(&project_id).await.unwrap();
assert!(after_second_fetch
.as_ref()
.unwrap()
.last_fetched_ms
.is_some());
assert_eq!(
second_fetch.last_fetched_ms,
after_second_fetch.unwrap().last_fetched_ms
);
}

View File

@ -0,0 +1,211 @@
use super::*;
#[tokio::test]
async fn twice() {
let data_dir = paths::data_dir();
let keys = keys::Controller::from_path(&data_dir);
let projects = projects::Controller::from_path(&data_dir);
let users = users::Controller::from_path(&data_dir);
let helper = git::credentials::Helper::from_path(&data_dir);
let test_project = TestProject::default();
let controller = Controller::new(data_dir, projects.clone(), users, keys, helper);
{
let project = projects
.add(test_project.path())
.expect("failed to add project");
controller
.set_base_branch(&project.id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
assert!(controller
.list_virtual_branches(&project.id)
.await
.unwrap()
.0
.is_empty());
projects.delete(&project.id).await.unwrap();
controller
.list_virtual_branches(&project.id)
.await
.unwrap_err();
}
{
let project = projects.add(test_project.path()).unwrap();
controller
.set_base_branch(&project.id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
// even though project is on gitbutler/integration, we should not import it
assert!(controller
.list_virtual_branches(&project.id)
.await
.unwrap()
.0
.is_empty());
}
}
#[tokio::test]
async fn dirty_non_target() {
// a situation when you initialize project while being on the local verison of the master
// that has uncommited changes.
let Test {
repository,
project_id,
controller,
..
} = Test::default();
repository.checkout(&"refs/heads/some-feature".parse().unwrap());
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].files.len(), 1);
assert_eq!(branches[0].files[0].hunks.len(), 1);
assert!(branches[0].upstream.is_none());
assert_eq!(branches[0].name, "some-feature");
}
#[tokio::test]
async fn dirty_target() {
// a situation when you initialize project while being on the local verison of the master
// that has uncommited changes.
let Test {
repository,
project_id,
controller,
..
} = Test::default();
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].files.len(), 1);
assert_eq!(branches[0].files[0].hunks.len(), 1);
assert!(branches[0].upstream.is_none());
assert_eq!(branches[0].name, "master");
}
#[tokio::test]
async fn commit_on_non_target_local() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
repository.checkout(&"refs/heads/some-feature".parse().unwrap());
fs::write(repository.path().join("file.txt"), "content").unwrap();
repository.commit_all("commit on target");
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert!(branches[0].files.is_empty());
assert_eq!(branches[0].commits.len(), 1);
assert!(branches[0].upstream.is_none());
assert_eq!(branches[0].name, "some-feature");
}
#[tokio::test]
async fn commit_on_non_target_remote() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
repository.checkout(&"refs/heads/some-feature".parse().unwrap());
fs::write(repository.path().join("file.txt"), "content").unwrap();
repository.commit_all("commit on target");
repository.push_branch(&"refs/heads/some-feature".parse().unwrap());
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert!(branches[0].files.is_empty());
assert_eq!(branches[0].commits.len(), 1);
assert!(branches[0].upstream.is_some());
assert_eq!(branches[0].name, "some-feature");
}
#[tokio::test]
async fn commit_on_target() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
fs::write(repository.path().join("file.txt"), "content").unwrap();
repository.commit_all("commit on target");
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert!(branches[0].files.is_empty());
assert_eq!(branches[0].commits.len(), 1);
assert!(branches[0].upstream.is_none());
assert_eq!(branches[0].name, "master");
}
#[tokio::test]
async fn submodule() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
let submodule_url: git::Url = TestProject::default()
.path()
.display()
.to_string()
.parse()
.unwrap();
repository.add_submodule(&submodule_url, path::Path::new("submodule"));
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].files.len(), 1);
assert_eq!(branches[0].files[0].hunks.len(), 1);
}

View File

@ -0,0 +1,158 @@
use std::{fs, path, str::FromStr};
use crate::common::{paths, TestProject};
use gitbutler_app::{
git, keys,
projects::{self, ProjectId},
users,
virtual_branches::{branch, controller::ControllerError, errors, Controller},
};
struct Test {
repository: TestProject,
project_id: ProjectId,
projects: projects::Controller,
controller: Controller,
}
impl Default for Test {
fn default() -> Self {
let data_dir = paths::data_dir();
let keys = keys::Controller::from_path(&data_dir);
let projects = projects::Controller::from_path(&data_dir);
let users = users::Controller::from_path(&data_dir);
let helper = git::credentials::Helper::from_path(&data_dir);
let test_project = TestProject::default();
let project = projects
.add(test_project.path())
.expect("failed to add project");
Self {
repository: test_project,
project_id: project.id,
controller: Controller::new(data_dir, projects.clone(), users, keys, helper),
projects,
}
}
}
mod amend;
mod apply_virtual_branch;
mod cherry_pick;
mod create_commit;
mod create_virtual_branch_from_branch;
mod delete_virtual_branch;
mod fetch_from_target;
mod init;
mod move_commit_to_vbranch;
mod references;
mod reset_virtual_branch;
mod selected_for_changes;
mod set_base_branch;
mod squash;
mod unapply;
mod unapply_ownership;
mod update_base_branch;
mod update_commit_message;
mod upstream;
#[tokio::test]
async fn resolve_conflict_flow() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
// make sure we have an undiscovered commit in the remote branch
{
fs::write(repository.path().join("file.txt"), "first").unwrap();
let first_commit_oid = repository.commit_all("first");
fs::write(repository.path().join("file.txt"), "second").unwrap();
repository.commit_all("second");
repository.push();
repository.reset_hard(Some(first_commit_oid));
}
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = {
// make a branch that conflicts with the remote branch, but doesn't know about it yet
let branch1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
fs::write(repository.path().join("file.txt"), "conflict").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert!(branches[0].active);
branch1_id
};
{
// fetch remote
controller.update_base_branch(&project_id).await.unwrap();
// there is a conflict now, so the branch should be inactive
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert!(!branches[0].active);
}
{
// when we apply conflicted branch, it has conflict
controller
.apply_virtual_branch(&project_id, &branch1_id)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert!(branches[0].active);
assert!(branches[0].conflicted);
// and the conflict markers are in the file
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n"
);
}
{
// can't commit conflicts
assert!(matches!(
controller
.create_commit(&project_id, &branch1_id, "commit conflicts", None, false)
.await,
Err(ControllerError::Action(errors::CommitError::Conflicted(_)))
));
}
{
// fixing the conflict removes conflicted mark
fs::write(repository.path().join("file.txt"), "resolved").unwrap();
let commit_oid = controller
.create_commit(&project_id, &branch1_id, "resolution", None, false)
.await
.unwrap();
let commit = repository.find_commit(commit_oid).unwrap();
assert_eq!(commit.parent_count(), 2);
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert!(branches[0].active);
assert!(!branches[0].conflicted);
}
}

View File

@ -0,0 +1,324 @@
use crate::suite::virtual_branches::Test;
use gitbutler_app::git;
use gitbutler_app::virtual_branches::controller::ControllerError;
use gitbutler_app::virtual_branches::{branch, errors, BranchId};
use std::str::FromStr;
#[tokio::test]
async fn no_diffs() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "content").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
let source_branch_id = branches[0].id;
let commit_oid = controller
.create_commit(&project_id, &source_branch_id, "commit", None, false)
.await
.unwrap();
let target_branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
controller
.move_commit(&project_id, &target_branch_id, commit_oid)
.await
.unwrap();
let destination_branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == target_branch_id)
.unwrap();
let source_branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == source_branch_id)
.unwrap();
assert_eq!(destination_branch.commits.len(), 1);
assert_eq!(destination_branch.files.len(), 0);
assert_eq!(source_branch.commits.len(), 0);
assert_eq!(source_branch.files.len(), 0);
}
#[tokio::test]
async fn diffs_on_source_branch() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "content").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
let source_branch_id = branches[0].id;
let commit_oid = controller
.create_commit(&project_id, &source_branch_id, "commit", None, false)
.await
.unwrap();
std::fs::write(
repository.path().join("another file.txt"),
"another content",
)
.unwrap();
let target_branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
controller
.move_commit(&project_id, &target_branch_id, commit_oid)
.await
.unwrap();
let destination_branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == target_branch_id)
.unwrap();
let source_branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == source_branch_id)
.unwrap();
assert_eq!(destination_branch.commits.len(), 1);
assert_eq!(destination_branch.files.len(), 0);
assert_eq!(source_branch.commits.len(), 0);
assert_eq!(source_branch.files.len(), 1);
}
#[tokio::test]
async fn diffs_on_target_branch() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "content").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
let source_branch_id = branches[0].id;
let commit_oid = controller
.create_commit(&project_id, &source_branch_id, "commit", None, false)
.await
.unwrap();
let target_branch_id = controller
.create_virtual_branch(
&project_id,
&branch::BranchCreateRequest {
selected_for_changes: Some(true),
..Default::default()
},
)
.await
.unwrap();
std::fs::write(
repository.path().join("another file.txt"),
"another content",
)
.unwrap();
controller
.move_commit(&project_id, &target_branch_id, commit_oid)
.await
.unwrap();
let destination_branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == target_branch_id)
.unwrap();
let source_branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == source_branch_id)
.unwrap();
assert_eq!(destination_branch.commits.len(), 1);
assert_eq!(destination_branch.files.len(), 1);
assert_eq!(source_branch.commits.len(), 0);
assert_eq!(source_branch.files.len(), 0);
}
#[tokio::test]
async fn locked_hunks_on_source_branch() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "content").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
let source_branch_id = branches[0].id;
let commit_oid = controller
.create_commit(&project_id, &source_branch_id, "commit", None, false)
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "locked content").unwrap();
let target_branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
assert!(matches!(
controller
.move_commit(&project_id, &target_branch_id, commit_oid)
.await
.unwrap_err(),
ControllerError::Action(errors::MoveCommitError::SourceLocked)
));
}
#[tokio::test]
async fn no_commit() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "content").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
let source_branch_id = branches[0].id;
controller
.create_commit(&project_id, &source_branch_id, "commit", None, false)
.await
.unwrap();
let target_branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
assert!(matches!(
controller
.move_commit(
&project_id,
&target_branch_id,
git::Oid::from_str("a99c95cca7a60f1a2180c2f86fb18af97333c192").unwrap()
)
.await
.unwrap_err(),
ControllerError::Action(errors::MoveCommitError::CommitNotFound(_))
));
}
#[tokio::test]
async fn no_branch() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "content").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
let source_branch_id = branches[0].id;
let commit_oid = controller
.create_commit(&project_id, &source_branch_id, "commit", None, false)
.await
.unwrap();
assert!(matches!(
controller
.move_commit(&project_id, &BranchId::generate(), commit_oid)
.await
.unwrap_err(),
ControllerError::Action(errors::MoveCommitError::BranchNotFound(_))
));
}

View File

@ -0,0 +1,366 @@
use super::*;
mod create_virtual_branch {
use super::*;
#[tokio::test]
async fn simple() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch_id);
assert_eq!(branches[0].name, "Virtual branch");
let refnames = repository
.references()
.into_iter()
.filter_map(|reference| reference.name().map(|name| name.to_string()))
.collect::<Vec<_>>();
assert!(refnames.contains(&"refs/gitbutler/Virtual-branch".to_string()));
}
#[tokio::test]
async fn duplicate_name() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = controller
.create_virtual_branch(
&project_id,
&gitbutler_app::virtual_branches::branch::BranchCreateRequest {
name: Some("name".to_string()),
..Default::default()
},
)
.await
.unwrap();
let branch2_id = controller
.create_virtual_branch(
&project_id,
&gitbutler_app::virtual_branches::branch::BranchCreateRequest {
name: Some("name".to_string()),
..Default::default()
},
)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 2);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].name, "name");
assert_eq!(branches[1].id, branch2_id);
assert_eq!(branches[1].name, "name 1");
let refnames = repository
.references()
.into_iter()
.filter_map(|reference| reference.name().map(|name| name.to_string()))
.collect::<Vec<_>>();
assert!(refnames.contains(&"refs/gitbutler/name".to_string()));
assert!(refnames.contains(&"refs/gitbutler/name-1".to_string()));
}
}
mod update_virtual_branch {
use super::*;
#[tokio::test]
async fn simple() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(
&project_id,
&branch::BranchCreateRequest {
name: Some("name".to_string()),
..Default::default()
},
)
.await
.unwrap();
controller
.update_virtual_branch(
&project_id,
branch::BranchUpdateRequest {
id: branch_id,
name: Some("new name".to_string()),
..Default::default()
},
)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch_id);
assert_eq!(branches[0].name, "new name");
let refnames = repository
.references()
.into_iter()
.filter_map(|reference| reference.name().map(|name| name.to_string()))
.collect::<Vec<_>>();
assert!(!refnames.contains(&"refs/gitbutler/name".to_string()));
assert!(refnames.contains(&"refs/gitbutler/new-name".to_string()));
}
#[tokio::test]
async fn duplicate_name() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = controller
.create_virtual_branch(
&project_id,
&branch::BranchCreateRequest {
name: Some("name".to_string()),
..Default::default()
},
)
.await
.unwrap();
let branch2_id = controller
.create_virtual_branch(
&project_id,
&branch::BranchCreateRequest {
..Default::default()
},
)
.await
.unwrap();
controller
.update_virtual_branch(
&project_id,
branch::BranchUpdateRequest {
id: branch2_id,
name: Some("name".to_string()),
..Default::default()
},
)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 2);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].name, "name");
assert_eq!(branches[1].id, branch2_id);
assert_eq!(branches[1].name, "name 1");
let refnames = repository
.references()
.into_iter()
.filter_map(|reference| reference.name().map(|name| name.to_string()))
.collect::<Vec<_>>();
assert!(refnames.contains(&"refs/gitbutler/name".to_string()));
assert!(refnames.contains(&"refs/gitbutler/name-1".to_string()));
}
}
mod push_virtual_branch {
use super::*;
#[tokio::test]
async fn simple() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = controller
.create_virtual_branch(
&project_id,
&branch::BranchCreateRequest {
name: Some("name".to_string()),
..Default::default()
},
)
.await
.unwrap();
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch1_id, "test", None, false)
.await
.unwrap();
controller
.push_virtual_branch(&project_id, &branch1_id, false, None)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].name, "name");
assert_eq!(
branches[0].upstream.as_ref().unwrap().name.to_string(),
"refs/remotes/origin/name"
);
let refnames = repository
.references()
.into_iter()
.filter_map(|reference| reference.name().map(|name| name.to_string()))
.collect::<Vec<_>>();
assert!(refnames.contains(&branches[0].upstream.clone().unwrap().name.to_string()));
}
#[tokio::test]
async fn duplicate_names() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = {
// create and push branch with some work
let branch1_id = controller
.create_virtual_branch(
&project_id,
&branch::BranchCreateRequest {
name: Some("name".to_string()),
..Default::default()
},
)
.await
.unwrap();
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch1_id, "test", None, false)
.await
.unwrap();
controller
.push_virtual_branch(&project_id, &branch1_id, false, None)
.await
.unwrap();
branch1_id
};
// rename first branch
controller
.update_virtual_branch(
&project_id,
branch::BranchUpdateRequest {
id: branch1_id,
name: Some("updated name".to_string()),
..Default::default()
},
)
.await
.unwrap();
let branch2_id = {
// create another branch with first branch's old name and push it
let branch2_id = controller
.create_virtual_branch(
&project_id,
&branch::BranchCreateRequest {
name: Some("name".to_string()),
..Default::default()
},
)
.await
.unwrap();
fs::write(repository.path().join("file.txt"), "updated content").unwrap();
controller
.create_commit(&project_id, &branch2_id, "test", None, false)
.await
.unwrap();
controller
.push_virtual_branch(&project_id, &branch2_id, false, None)
.await
.unwrap();
branch2_id
};
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 2);
// first branch is pushing to old ref remotely
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].name, "updated name");
assert_eq!(
branches[0].upstream.as_ref().unwrap().name,
"refs/remotes/origin/name".parse().unwrap()
);
// new branch is pushing to new ref remotely
assert_eq!(branches[1].id, branch2_id);
assert_eq!(branches[1].name, "name");
assert_eq!(
branches[1].upstream.as_ref().unwrap().name,
"refs/remotes/origin/name-1".parse().unwrap()
);
let refnames = repository
.references()
.into_iter()
.filter_map(|reference| reference.name().map(|name| name.to_string()))
.collect::<Vec<_>>();
assert!(refnames.contains(&branches[0].upstream.clone().unwrap().name.to_string()));
assert!(refnames.contains(&branches[1].upstream.clone().unwrap().name.to_string()));
}
}

View File

@ -0,0 +1,267 @@
use crate::suite::virtual_branches::Test;
use gitbutler_app::virtual_branches::{
branch, controller::ControllerError, errors::ResetBranchError,
};
use std::fs;
#[tokio::test]
async fn to_head() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let oid = {
fs::write(repository.path().join("file.txt"), "content").unwrap();
// commit changes
let oid = controller
.create_commit(&project_id, &branch1_id, "commit", None, false)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].commits.len(), 1);
assert_eq!(branches[0].commits[0].id, oid);
assert_eq!(branches[0].files.len(), 0);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"content"
);
oid
};
{
// reset changes to head
controller
.reset_virtual_branch(&project_id, &branch1_id, oid)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].commits.len(), 1);
assert_eq!(branches[0].commits[0].id, oid);
assert_eq!(branches[0].files.len(), 0);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"content"
);
}
}
#[tokio::test]
async fn to_target() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
let base_branch = controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
fs::write(repository.path().join("file.txt"), "content").unwrap();
// commit changes
let oid = controller
.create_commit(&project_id, &branch1_id, "commit", None, false)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].commits.len(), 1);
assert_eq!(branches[0].commits[0].id, oid);
assert_eq!(branches[0].files.len(), 0);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"content"
);
}
{
// reset changes to head
controller
.reset_virtual_branch(&project_id, &branch1_id, base_branch.base_sha)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].commits.len(), 0);
assert_eq!(branches[0].files.len(), 1);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"content"
);
}
}
#[tokio::test]
async fn to_commit() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let first_commit_oid = {
// commit some changes
fs::write(repository.path().join("file.txt"), "content").unwrap();
let oid = controller
.create_commit(&project_id, &branch1_id, "commit", None, false)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].commits.len(), 1);
assert_eq!(branches[0].commits[0].id, oid);
assert_eq!(branches[0].files.len(), 0);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"content"
);
oid
};
{
// commit some more
fs::write(repository.path().join("file.txt"), "more content").unwrap();
let second_commit_oid = controller
.create_commit(&project_id, &branch1_id, "commit", None, false)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].commits.len(), 2);
assert_eq!(branches[0].commits[0].id, second_commit_oid);
assert_eq!(branches[0].commits[1].id, first_commit_oid);
assert_eq!(branches[0].files.len(), 0);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"more content"
);
}
{
// reset changes to the first commit
controller
.reset_virtual_branch(&project_id, &branch1_id, first_commit_oid)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].commits.len(), 1);
assert_eq!(branches[0].commits[0].id, first_commit_oid);
assert_eq!(branches[0].files.len(), 1);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"more content"
);
}
}
#[tokio::test]
async fn to_non_existing() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
fs::write(repository.path().join("file.txt"), "content").unwrap();
// commit changes
let oid = controller
.create_commit(&project_id, &branch1_id, "commit", None, false)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].commits.len(), 1);
assert_eq!(branches[0].commits[0].id, oid);
assert_eq!(branches[0].files.len(), 0);
assert_eq!(
fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"content"
);
oid
};
assert!(matches!(
controller
.reset_virtual_branch(
&project_id,
&branch1_id,
"fe14df8c66b73c6276f7bb26102ad91da680afcb".parse().unwrap()
)
.await,
Err(ControllerError::Action(
ResetBranchError::CommitNotFoundInBranch(_)
))
));
}

View File

@ -0,0 +1,375 @@
use super::*;
#[tokio::test]
async fn unapplying_selected_branch_selects_anther() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
std::fs::write(repository.path().join("file one.txt"), "").unwrap();
// first branch should be created as default
let b_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
// if default branch exists, new branch should not be created as default
let b2_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
let b = branches.iter().find(|b| b.id == b_id).unwrap();
let b2 = branches.iter().find(|b| b.id == b2_id).unwrap();
assert!(b.selected_for_changes);
assert!(!b2.selected_for_changes);
controller
.unapply_virtual_branch(&project_id, &b_id)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 2);
assert_eq!(branches[0].id, b.id);
assert!(!branches[0].selected_for_changes);
assert!(!branches[0].active);
assert_eq!(branches[1].id, b2.id);
assert!(branches[1].selected_for_changes);
assert!(branches[1].active);
}
#[tokio::test]
async fn deleting_selected_branch_selects_anther() {
let Test {
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
// first branch should be created as default
let b_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
// if default branch exists, new branch should not be created as default
let b2_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
let b = branches.iter().find(|b| b.id == b_id).unwrap();
let b2 = branches.iter().find(|b| b.id == b2_id).unwrap();
assert!(b.selected_for_changes);
assert!(!b2.selected_for_changes);
controller
.delete_virtual_branch(&project_id, &b_id)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, b2.id);
assert!(branches[0].selected_for_changes);
}
#[tokio::test]
async fn create_virtual_branch_should_set_selected_for_changes() {
let Test {
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
// first branch should be created as default
let b_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == b_id)
.unwrap();
assert!(branch.selected_for_changes);
// if default branch exists, new branch should not be created as default
let b_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == b_id)
.unwrap();
assert!(!branch.selected_for_changes);
// explicitly don't make this one default
let b_id = controller
.create_virtual_branch(
&project_id,
&branch::BranchCreateRequest {
selected_for_changes: Some(false),
..Default::default()
},
)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == b_id)
.unwrap();
assert!(!branch.selected_for_changes);
// explicitly make this one default
let b_id = controller
.create_virtual_branch(
&project_id,
&branch::BranchCreateRequest {
selected_for_changes: Some(true),
..Default::default()
},
)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == b_id)
.unwrap();
assert!(branch.selected_for_changes);
}
#[tokio::test]
async fn update_virtual_branch_should_reset_selected_for_changes() {
let Test {
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let b1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let b1 = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == b1_id)
.unwrap();
assert!(b1.selected_for_changes);
let b2_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let b2 = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == b2_id)
.unwrap();
assert!(!b2.selected_for_changes);
controller
.update_virtual_branch(
&project_id,
branch::BranchUpdateRequest {
id: b2_id,
selected_for_changes: Some(true),
..Default::default()
},
)
.await
.unwrap();
let b1 = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == b1_id)
.unwrap();
assert!(!b1.selected_for_changes);
let b2 = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == b2_id)
.unwrap();
assert!(b2.selected_for_changes);
}
#[tokio::test]
async fn unapply_virtual_branch_should_reset_selected_for_changes() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let b1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "content").unwrap();
let b1 = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == b1_id)
.unwrap();
assert!(b1.selected_for_changes);
controller
.unapply_virtual_branch(&project_id, &b1_id)
.await
.unwrap();
let b1 = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == b1_id)
.unwrap();
assert!(!b1.selected_for_changes);
}
#[tokio::test]
async fn hunks_distribution() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "content").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches[0].files.len(), 1);
controller
.create_virtual_branch(
&project_id,
&branch::BranchCreateRequest {
selected_for_changes: Some(true),
..Default::default()
},
)
.await
.unwrap();
std::fs::write(repository.path().join("another_file.txt"), "content").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches[0].files.len(), 1);
assert_eq!(branches[1].files.len(), 1);
}
#[tokio::test]
async fn applying_first_branch() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "content").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
controller
.unapply_virtual_branch(&project_id, &branches[0].id)
.await
.unwrap();
controller
.apply_virtual_branch(&project_id, &branches[0].id)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert!(branches[0].active);
assert!(branches[0].selected_for_changes);
}

View File

@ -0,0 +1,235 @@
use super::*;
#[tokio::test]
async fn success() {
let Test {
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
}
mod error {
use super::*;
#[tokio::test]
async fn missing() {
let Test {
project_id,
controller,
..
} = Test::default();
assert!(matches!(
controller
.set_base_branch(
&project_id,
&git::RemoteRefname::from_str("refs/remotes/origin/missing").unwrap(),
)
.await
.unwrap_err(),
ControllerError::Action(errors::SetBaseBranchError::BranchNotFound(_))
));
}
}
mod go_back_to_integration {
use pretty_assertions::assert_eq;
use super::*;
#[tokio::test]
async fn should_preserve_applied_vbranches() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
std::fs::write(repository.path().join("file.txt"), "one").unwrap();
let oid_one = repository.commit_all("one");
std::fs::write(repository.path().join("file.txt"), "two").unwrap();
repository.commit_all("two");
repository.push();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let vbranch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
std::fs::write(repository.path().join("another file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &vbranch_id, "one", None, false)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
repository.checkout_commit(oid_one);
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, vbranch_id);
assert!(branches[0].active);
}
#[tokio::test]
async fn from_target_branch_index_conflicts() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
std::fs::write(repository.path().join("file.txt"), "one").unwrap();
let oid_one = repository.commit_all("one");
std::fs::write(repository.path().join("file.txt"), "two").unwrap();
repository.commit_all("two");
repository.push();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert!(branches.is_empty());
repository.checkout_commit(oid_one);
std::fs::write(repository.path().join("file.txt"), "tree").unwrap();
assert!(matches!(
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap_err(),
ControllerError::Action(errors::SetBaseBranchError::DirtyWorkingDirectory)
));
}
#[tokio::test]
async fn from_target_branch_with_uncommited() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
std::fs::write(repository.path().join("file.txt"), "one").unwrap();
let oid_one = repository.commit_all("one");
std::fs::write(repository.path().join("file.txt"), "two").unwrap();
repository.commit_all("two");
repository.push();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert!(branches.is_empty());
repository.checkout_commit(oid_one);
std::fs::write(repository.path().join("another file.txt"), "tree").unwrap();
assert!(matches!(
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.map_err(|error| dbg!(error))
.unwrap_err(),
ControllerError::Action(errors::SetBaseBranchError::DirtyWorkingDirectory)
));
}
#[tokio::test]
async fn from_target_branch_with_commit() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
std::fs::write(repository.path().join("file.txt"), "one").unwrap();
let oid_one = repository.commit_all("one");
std::fs::write(repository.path().join("file.txt"), "two").unwrap();
repository.commit_all("two");
repository.push();
let base = controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert!(branches.is_empty());
repository.checkout_commit(oid_one);
std::fs::write(repository.path().join("another file.txt"), "tree").unwrap();
repository.commit_all("three");
let base_two = controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 0);
assert_eq!(base_two, base);
}
#[tokio::test]
async fn from_target_branch_without_any_changes() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
std::fs::write(repository.path().join("file.txt"), "one").unwrap();
let oid_one = repository.commit_all("one");
std::fs::write(repository.path().join("file.txt"), "two").unwrap();
repository.commit_all("two");
repository.push();
let base = controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert!(branches.is_empty());
repository.checkout_commit(oid_one);
let base_two = controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 0);
assert_eq!(base_two, base);
}
}

View File

@ -0,0 +1,356 @@
use super::*;
#[tokio::test]
async fn head() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
fs::write(repository.path().join("file one.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file two.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit two", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file three.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit three", None, false)
.await
.unwrap()
};
let commit_four_oid = {
fs::write(repository.path().join("file four.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit four", None, false)
.await
.unwrap()
};
controller
.squash(&project_id, &branch_id, commit_four_oid)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
let descriptions = branch
.commits
.iter()
.map(|c| c.description.clone())
.collect::<Vec<_>>();
assert_eq!(
descriptions,
vec!["commit three\ncommit four", "commit two", "commit one"]
);
}
#[tokio::test]
async fn middle() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
fs::write(repository.path().join("file one.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
let commit_two_oid = {
fs::write(repository.path().join("file two.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit two", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file three.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit three", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file four.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit four", None, false)
.await
.unwrap()
};
controller
.squash(&project_id, &branch_id, commit_two_oid)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
let descriptions = branch
.commits
.iter()
.map(|c| c.description.clone())
.collect::<Vec<_>>();
assert_eq!(
descriptions,
vec!["commit four", "commit three", "commit one\ncommit two"]
);
}
#[tokio::test]
async fn forcepush_allowed() {
let Test {
repository,
project_id,
controller,
projects,
..
} = Test::default();
projects
.update(&projects::UpdateRequest {
id: project_id,
ok_with_force_push: Some(true),
..Default::default()
})
.await
.unwrap();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
fs::write(repository.path().join("file one.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
let commit_two_oid = {
fs::write(repository.path().join("file two.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit two", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file three.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit three", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file four.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit four", None, false)
.await
.unwrap()
};
controller
.squash(&project_id, &branch_id, commit_two_oid)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
let descriptions = branch
.commits
.iter()
.map(|c| c.description.clone())
.collect::<Vec<_>>();
assert_eq!(
descriptions,
vec!["commit four", "commit three", "commit one\ncommit two"]
);
assert!(branch.requires_force);
}
#[tokio::test]
async fn forcepush_forbidden() {
let Test {
repository,
project_id,
controller,
projects,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
fs::write(repository.path().join("file one.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
projects
.update(&projects::UpdateRequest {
id: project_id,
ok_with_force_push: Some(false),
..Default::default()
})
.await
.unwrap();
let commit_two_oid = {
fs::write(repository.path().join("file two.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit two", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file three.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit three", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file four.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit four", None, false)
.await
.unwrap()
};
assert!(matches!(
controller
.squash(&project_id, &branch_id, commit_two_oid)
.await
.unwrap_err(),
ControllerError::Action(errors::SquashError::ForcePushNotAllowed(_))
));
}
#[tokio::test]
async fn root() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let commit_one_oid = {
fs::write(repository.path().join("file one.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
assert!(matches!(
controller
.squash(&project_id, &branch_id, commit_one_oid)
.await
.unwrap_err(),
ControllerError::Action(errors::SquashError::CantSquashRootCommit)
));
}

View File

@ -0,0 +1,177 @@
use super::*;
#[tokio::test]
async fn unapply_with_data() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
std::fs::write(repository.path().join("file.txt"), "content").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
controller
.unapply_virtual_branch(&project_id, &branches[0].id)
.await
.unwrap();
assert!(!repository.path().join("file.txt").exists());
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert!(!branches[0].active);
}
#[tokio::test]
async fn conflicting() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
// make sure we have an undiscovered commit in the remote branch
{
fs::write(repository.path().join("file.txt"), "first").unwrap();
let first_commit_oid = repository.commit_all("first");
fs::write(repository.path().join("file.txt"), "second").unwrap();
repository.commit_all("second");
repository.push();
repository.reset_hard(Some(first_commit_oid));
}
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = {
// make a conflicting branch, and stash it
std::fs::write(repository.path().join("file.txt"), "conflict").unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert!(branches[0].base_current);
assert!(branches[0].active);
assert_eq!(branches[0].files[0].hunks[0].diff, "@@ -1 +1 @@\n-first\n\\ No newline at end of file\n+conflict\n\\ No newline at end of file\n");
controller
.unapply_virtual_branch(&project_id, &branches[0].id)
.await
.unwrap();
branches[0].id
};
{
// update base branch, causing conflict
controller.update_base_branch(&project_id).await.unwrap();
assert_eq!(
std::fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"second"
);
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|branch| branch.id == branch_id)
.unwrap();
assert!(!branch.base_current);
assert!(!branch.active);
}
{
// apply branch, it should conflict
controller
.apply_virtual_branch(&project_id, &branch_id)
.await
.unwrap();
assert_eq!(
std::fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n"
);
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert!(branch.base_current);
assert!(branch.conflicted);
assert_eq!(branch.files[0].hunks[0].diff, "@@ -1 +1,5 @@\n-first\n\\ No newline at end of file\n+<<<<<<< ours\n+conflict\n+=======\n+second\n+>>>>>>> theirs\n");
}
{
controller
.unapply_virtual_branch(&project_id, &branch_id)
.await
.unwrap();
assert_eq!(
std::fs::read_to_string(repository.path().join("file.txt")).unwrap(),
"second"
);
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert!(!branch.active);
assert!(!branch.base_current);
assert!(!branch.conflicted);
assert_eq!(branch.files[0].hunks[0].diff, "@@ -1 +1 @@\n-first\n\\ No newline at end of file\n+conflict\n\\ No newline at end of file\n");
}
}
#[tokio::test]
async fn delete_if_empty() {
let Test {
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
controller
.unapply_virtual_branch(&project_id, &branches[0].id)
.await
.unwrap();
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 0);
}

View File

@ -0,0 +1,61 @@
use crate::suite::virtual_branches::Test;
use gitbutler_app::virtual_branches::branch;
use gitbutler_app::virtual_branches::branch::BranchOwnershipClaims;
use std::fs;
#[tokio::test]
async fn should_unapply_with_commits() {
let Test {
project_id,
controller,
repository,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
fs::write(
repository.path().join("file.txt"),
"1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n",
)
.unwrap();
controller
.create_commit(&project_id, &branch_id, "test", None, false)
.await
.unwrap();
// change in the committed hunks leads to hunk locking
fs::write(
repository.path().join("file.txt"),
"_\n2\n3\n4\n5\n6\n7\n8\n9\n_\n",
)
.unwrap();
controller
.unapply_ownership(
&project_id,
&"file.txt:1-5,7-11"
.parse::<BranchOwnershipClaims>()
.unwrap(),
)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
assert!(branch.files.is_empty());
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,384 @@
use super::*;
#[tokio::test]
async fn head() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
fs::write(repository.path().join("file one.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file two.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit two", None, false)
.await
.unwrap()
};
let commit_three_oid = {
fs::write(repository.path().join("file three.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit three", None, false)
.await
.unwrap()
};
controller
.update_commit_message(
&project_id,
&branch_id,
commit_three_oid,
"commit three updated",
)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
let descriptions = branch
.commits
.iter()
.map(|c| c.description.clone())
.collect::<Vec<_>>();
assert_eq!(
descriptions,
vec!["commit three updated", "commit two", "commit one"]
);
}
#[tokio::test]
async fn middle() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
{
fs::write(repository.path().join("file one.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
let commit_two_oid = {
fs::write(repository.path().join("file two.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit two", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file three.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit three", None, false)
.await
.unwrap()
};
controller
.update_commit_message(
&project_id,
&branch_id,
commit_two_oid,
"commit two updated",
)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
let descriptions = branch
.commits
.iter()
.map(|c| c.description.clone())
.collect::<Vec<_>>();
assert_eq!(
descriptions,
vec!["commit three", "commit two updated", "commit one"]
);
}
#[tokio::test]
async fn forcepush_allowed() {
let Test {
repository,
project_id,
controller,
projects,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
projects
.update(&projects::UpdateRequest {
id: project_id,
ok_with_force_push: Some(true),
..Default::default()
})
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let commit_one_oid = {
fs::write(repository.path().join("file one.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
controller
.update_commit_message(
&project_id,
&branch_id,
commit_one_oid,
"commit one updated",
)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
let descriptions = branch
.commits
.iter()
.map(|c| c.description.clone())
.collect::<Vec<_>>();
assert_eq!(descriptions, vec!["commit one updated"]);
assert!(branch.requires_force);
}
#[tokio::test]
async fn forcepush_forbidden() {
let Test {
repository,
project_id,
controller,
projects,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
projects
.update(&projects::UpdateRequest {
id: project_id,
ok_with_force_push: Some(false),
..Default::default()
})
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let commit_one_oid = {
fs::write(repository.path().join("file one.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
controller
.push_virtual_branch(&project_id, &branch_id, false, None)
.await
.unwrap();
assert!(matches!(
controller
.update_commit_message(
&project_id,
&branch_id,
commit_one_oid,
"commit one updated",
)
.await
.unwrap_err(),
ControllerError::Action(errors::UpdateCommitMessageError::ForcePushNotAllowed(_))
));
}
#[tokio::test]
async fn root() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let commit_one_oid = {
fs::write(repository.path().join("file one.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file two.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit two", None, false)
.await
.unwrap()
};
{
fs::write(repository.path().join("file three.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit three", None, false)
.await
.unwrap()
};
controller
.update_commit_message(
&project_id,
&branch_id,
commit_one_oid,
"commit one updated",
)
.await
.unwrap();
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch_id)
.unwrap();
let descriptions = branch
.commits
.iter()
.map(|c| c.description.clone())
.collect::<Vec<_>>();
assert_eq!(
descriptions,
vec!["commit three", "commit two", "commit one updated"]
);
}
#[tokio::test]
async fn empty() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let commit_one_oid = {
fs::write(repository.path().join("file one.txt"), "").unwrap();
controller
.create_commit(&project_id, &branch_id, "commit one", None, false)
.await
.unwrap()
};
assert!(matches!(
controller
.update_commit_message(&project_id, &branch_id, commit_one_oid, "",)
.await,
Err(ControllerError::Action(
errors::UpdateCommitMessageError::EmptyMessage
))
));
}

View File

@ -0,0 +1,149 @@
use super::*;
#[tokio::test]
async fn detect_upstream_commits() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let oid1 = {
// create first commit
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch1_id, "commit", None, false)
.await
.unwrap()
};
let oid2 = {
// create second commit
fs::write(repository.path().join("file.txt"), "content2").unwrap();
controller
.create_commit(&project_id, &branch1_id, "commit", None, false)
.await
.unwrap()
};
// push
controller
.push_virtual_branch(&project_id, &branch1_id, false, None)
.await
.unwrap();
let oid3 = {
// create third commit
fs::write(repository.path().join("file.txt"), "content3").unwrap();
controller
.create_commit(&project_id, &branch1_id, "commit", None, false)
.await
.unwrap()
};
{
// should correctly detect pushed commits
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].commits.len(), 3);
assert_eq!(branches[0].commits[0].id, oid3);
assert!(!branches[0].commits[0].is_remote);
assert_eq!(branches[0].commits[1].id, oid2);
assert!(branches[0].commits[1].is_remote);
assert_eq!(branches[0].commits[2].id, oid1);
assert!(branches[0].commits[2].is_remote);
}
}
#[tokio::test]
async fn detect_integrated_commits() {
let Test {
repository,
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let branch1_id = controller
.create_virtual_branch(&project_id, &branch::BranchCreateRequest::default())
.await
.unwrap();
let oid1 = {
// create first commit
fs::write(repository.path().join("file.txt"), "content").unwrap();
controller
.create_commit(&project_id, &branch1_id, "commit", None, false)
.await
.unwrap()
};
let oid2 = {
// create second commit
fs::write(repository.path().join("file.txt"), "content2").unwrap();
controller
.create_commit(&project_id, &branch1_id, "commit", None, false)
.await
.unwrap()
};
// push
controller
.push_virtual_branch(&project_id, &branch1_id, false, None)
.await
.unwrap();
{
// merge branch upstream
let branch = controller
.list_virtual_branches(&project_id)
.await
.unwrap()
.0
.into_iter()
.find(|b| b.id == branch1_id)
.unwrap();
repository.merge(&branch.upstream.as_ref().unwrap().name);
repository.fetch();
}
let oid3 = {
// create third commit
fs::write(repository.path().join("file.txt"), "content3").unwrap();
controller
.create_commit(&project_id, &branch1_id, "commit", None, false)
.await
.unwrap()
};
{
// should correctly detect pushed commits
let (branches, _, _) = controller.list_virtual_branches(&project_id).await.unwrap();
assert_eq!(branches.len(), 1);
assert_eq!(branches[0].id, branch1_id);
assert_eq!(branches[0].commits.len(), 3);
assert_eq!(branches[0].commits[0].id, oid3);
assert!(!branches[0].commits[0].is_integrated);
assert_eq!(branches[0].commits[1].id, oid2);
assert!(branches[0].commits[1].is_integrated);
assert_eq!(branches[0].commits[2].id, oid1);
assert!(branches[0].commits[2].is_integrated);
}
}

View File

@ -0,0 +1,5 @@
use gitbutler_app::virtual_branches::Branch;
mod reader;
mod writer;

View File

@ -0,0 +1,96 @@
use std::sync::atomic::{AtomicUsize, Ordering};
use anyhow::Result;
use once_cell::sync::Lazy;
use crate::{Case, Suite};
use gitbutler_app::virtual_branches::branch::BranchOwnershipClaims;
use gitbutler_app::virtual_branches::{branch, Branch, BranchId};
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
Branch {
id: BranchId::generate(),
name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)),
notes: String::new(),
applied: true,
order: TEST_INDEX.load(Ordering::Relaxed),
upstream: Some(
format!(
"refs/remotes/origin/upstream_{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
upstream_head: Some(
format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128,
updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128,
head: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
tree: format!(
"0123456789abcdef0123456789abcdef012345{}",
TEST_INDEX.load(Ordering::Relaxed) + 10
)
.parse()
.unwrap(),
ownership: BranchOwnershipClaims {
claims: vec![format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed))
.parse()
.unwrap()],
},
selected_for_changes: Some(1),
}
}
#[test]
fn test_read_not_found() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = gb_repository.get_or_create_current_session()?;
let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?;
let reader = branch::Reader::new(&session_reader);
let result = reader.read(&BranchId::generate());
assert!(result.is_err());
assert_eq!(result.unwrap_err().to_string(), "file not found");
Ok(())
}
#[test]
fn test_read_override() -> Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
writer.write(&mut branch)?;
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?;
let reader = branch::Reader::new(&session_reader);
assert_eq!(branch, reader.read(&branch.id).unwrap());
Ok(())
}

View File

@ -0,0 +1,217 @@
use std::{
fs,
sync::atomic::{AtomicUsize, Ordering},
};
use anyhow::Context;
use gitbutler_app::virtual_branches::branch;
use once_cell::sync::Lazy;
use crate::{Case, Suite};
use self::branch::BranchId;
use super::*;
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
Branch {
id: BranchId::generate(),
name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)),
notes: String::new(),
applied: true,
upstream: Some(
format!(
"refs/remotes/origin/upstream_{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
upstream_head: None,
created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128,
updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128,
head: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
tree: format!(
"0123456789abcdef0123456789abcdef012345{}",
TEST_INDEX.load(Ordering::Relaxed) + 10
)
.parse()
.unwrap(),
ownership: gitbutler_app::virtual_branches::branch::BranchOwnershipClaims {
claims: vec![gitbutler_app::virtual_branches::branch::OwnershipClaim {
file_path: format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed)).into(),
hunks: vec![],
}],
},
order: TEST_INDEX.load(Ordering::Relaxed),
selected_for_changes: Some(1),
}
}
#[test]
fn test_write_branch() -> anyhow::Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
writer.write(&mut branch)?;
let root = gb_repository
.root()
.join("branches")
.join(branch.id.to_string());
assert_eq!(
fs::read_to_string(root.join("meta").join("name").to_str().unwrap())
.context("Failed to read branch name")?,
branch.name
);
assert_eq!(
fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())?
.parse::<bool>()
.context("Failed to read branch applied")?,
branch.applied
);
assert_eq!(
fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap())
.context("Failed to read branch upstream")?,
branch.upstream.clone().unwrap().to_string()
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("created_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch created timestamp")?
.parse::<u128>()
.context("Failed to parse branch created timestamp")?,
branch.created_timestamp_ms
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("updated_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch updated timestamp")?
.parse::<u128>()
.context("Failed to parse branch updated timestamp")?,
branch.updated_timestamp_ms
);
writer.delete(&branch)?;
fs::read_dir(root).unwrap_err();
Ok(())
}
#[test]
fn test_should_create_session() -> anyhow::Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
writer.write(&mut branch)?;
assert!(gb_repository.get_current_session()?.is_some());
Ok(())
}
#[test]
fn test_should_update() -> anyhow::Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
writer.write(&mut branch)?;
let mut updated_branch = Branch {
name: "updated_name".to_string(),
applied: false,
upstream: Some("refs/remotes/origin/upstream_updated".parse().unwrap()),
created_timestamp_ms: 2,
updated_timestamp_ms: 3,
ownership: gitbutler_app::virtual_branches::branch::BranchOwnershipClaims {
claims: vec![],
},
..branch.clone()
};
writer.write(&mut updated_branch)?;
let root = gb_repository
.root()
.join("branches")
.join(branch.id.to_string());
assert_eq!(
fs::read_to_string(root.join("meta").join("name").to_str().unwrap())
.context("Failed to read branch name")?,
updated_branch.name
);
assert_eq!(
fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())?
.parse::<bool>()
.context("Failed to read branch applied")?,
updated_branch.applied
);
assert_eq!(
fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap())
.context("Failed to read branch upstream")?,
updated_branch.upstream.unwrap().to_string()
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("created_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch created timestamp")?
.parse::<u128>()
.context("Failed to parse branch created timestamp")?,
updated_branch.created_timestamp_ms
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("updated_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch updated timestamp")?
.parse::<u128>()
.context("Failed to parse branch updated timestamp")?,
updated_branch.updated_timestamp_ms
);
Ok(())
}

View File

@ -0,0 +1,115 @@
use std::sync::atomic::{AtomicUsize, Ordering};
use anyhow::Result;
use gitbutler_app::virtual_branches;
use once_cell::sync::Lazy;
use crate::{Case, Suite};
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> virtual_branches::branch::Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
virtual_branches::branch::Branch {
id: virtual_branches::BranchId::generate(),
name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)),
notes: String::new(),
applied: true,
upstream: Some(
format!(
"refs/remotes/origin/upstream_{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
upstream_head: None,
created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128,
updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128,
head: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
tree: format!(
"0123456789abcdef0123456789abcdef012345{}",
TEST_INDEX.load(Ordering::Relaxed) + 10
)
.parse()
.unwrap(),
ownership: virtual_branches::branch::BranchOwnershipClaims::default(),
order: TEST_INDEX.load(Ordering::Relaxed),
selected_for_changes: Some(1),
}
}
static TEST_TARGET_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_target() -> virtual_branches::target::Target {
virtual_branches::target::Target {
branch: format!(
"refs/remotes/branch name{}/remote name {}",
TEST_TARGET_INDEX.load(Ordering::Relaxed),
TEST_TARGET_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
remote_url: format!("remote url {}", TEST_TARGET_INDEX.load(Ordering::Relaxed)),
sha: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_TARGET_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
}
}
#[test]
fn test_empty_iterator() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = gb_repository.get_or_create_current_session()?;
let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?;
let iter = virtual_branches::Iterator::new(&session_reader)?;
assert_eq!(iter.count(), 0);
Ok(())
}
#[test]
fn test_iterate_all() -> Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let target_writer =
gitbutler_app::virtual_branches::target::Writer::new(&gb_repository, project.gb_dir())?;
target_writer.write_default(&test_target())?;
let branch_writer =
gitbutler_app::virtual_branches::branch::Writer::new(&gb_repository, project.gb_dir())?;
let mut branch_1 = test_branch();
branch_writer.write(&mut branch_1)?;
let mut branch_2 = test_branch();
branch_writer.write(&mut branch_2)?;
let mut branch_3 = test_branch();
branch_writer.write(&mut branch_3)?;
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?;
let iter = virtual_branches::Iterator::new(&session_reader)?
.collect::<Result<Vec<_>, gitbutler_app::reader::Error>>()?;
assert_eq!(iter.len(), 3);
assert!(iter.contains(&branch_1));
assert!(iter.contains(&branch_2));
assert!(iter.contains(&branch_3));
Ok(())
}

View File

@ -0,0 +1,2 @@
mod reader;
mod writer;

View File

@ -0,0 +1,147 @@
use gitbutler_app::virtual_branches::target::Target;
use gitbutler_app::virtual_branches::{target, BranchId};
use std::sync::atomic::{AtomicUsize, Ordering};
use anyhow::Result;
use once_cell::sync::Lazy;
use crate::{Case, Suite};
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> gitbutler_app::virtual_branches::branch::Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
gitbutler_app::virtual_branches::branch::Branch {
id: BranchId::generate(),
name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)),
notes: String::new(),
applied: true,
upstream: Some(
format!(
"refs/remotes/origin/upstream_{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
upstream_head: None,
created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128,
updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128,
head: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
tree: format!(
"0123456789abcdef0123456789abcdef012345{}",
(TEST_INDEX.load(Ordering::Relaxed) + 10)
)
.parse()
.unwrap(),
ownership: gitbutler_app::virtual_branches::branch::BranchOwnershipClaims {
claims: vec![gitbutler_app::virtual_branches::branch::OwnershipClaim {
file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(),
hunks: vec![],
}],
},
order: TEST_INDEX.load(Ordering::Relaxed),
selected_for_changes: None,
}
}
#[test]
fn test_read_not_found() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = gb_repository.get_or_create_current_session()?;
let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?;
let reader = target::Reader::new(&session_reader);
let result = reader.read(&BranchId::generate());
assert!(result.is_err());
assert_eq!(result.unwrap_err().to_string(), "file not found");
Ok(())
}
#[test]
fn test_read_deprecated_format() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let writer = gitbutler_app::writer::DirWriter::open(gb_repository.root())?;
writer
.write_string("branches/target/name", "origin/master")
.unwrap();
writer
.write_string(
"branches/target/remote",
"git@github.com:gitbutlerapp/gitbutler.git",
)
.unwrap();
writer
.write_string(
"branches/target/sha",
"dd945831869e9593448aa622fa4342bbfb84813d",
)
.unwrap();
let session = gb_repository.get_or_create_current_session()?;
let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?;
let reader = target::Reader::new(&session_reader);
let read = reader.read_default().unwrap();
assert_eq!(read.branch.branch(), "master");
assert_eq!(read.branch.remote(), "origin");
assert_eq!(read.remote_url, "git@github.com:gitbutlerapp/gitbutler.git");
assert_eq!(
read.sha.to_string(),
"dd945831869e9593448aa622fa4342bbfb84813d"
);
Ok(())
}
#[test]
fn test_read_override_target() -> Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let target = Target {
branch: "refs/remotes/remote/branch".parse().unwrap(),
remote_url: "remote url".to_string(),
sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(),
};
let default_target = Target {
branch: "refs/remotes/default remote/default branch"
.parse()
.unwrap(),
remote_url: "default remote url".to_string(),
sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(),
};
let branch_writer =
gitbutler_app::virtual_branches::branch::Writer::new(&gb_repository, project.gb_dir())?;
branch_writer.write(&mut branch)?;
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = gitbutler_app::sessions::Reader::open(&gb_repository, &session)?;
let target_writer = target::Writer::new(&gb_repository, project.gb_dir())?;
let reader = target::Reader::new(&session_reader);
target_writer.write_default(&default_target)?;
assert_eq!(default_target, reader.read(&branch.id)?);
target_writer.write(&branch.id, &target)?;
assert_eq!(target, reader.read(&branch.id)?);
Ok(())
}

View File

@ -0,0 +1,210 @@
use anyhow::Context;
use std::{
fs,
sync::atomic::{AtomicUsize, Ordering},
};
use once_cell::sync::Lazy;
use crate::{Case, Suite};
use gitbutler_app::virtual_branches::target::Target;
use gitbutler_app::virtual_branches::{branch, target, BranchId};
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> branch::Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
branch::Branch {
id: BranchId::generate(),
name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)),
notes: format!("branch_notes_{}", TEST_INDEX.load(Ordering::Relaxed)),
applied: true,
created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128,
upstream: Some(
format!(
"refs/remotes/origin/upstream_{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
upstream_head: None,
updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128,
head: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
tree: format!(
"0123456789abcdef0123456789abcdef012345{}",
TEST_INDEX.load(Ordering::Relaxed) + 10
)
.parse()
.unwrap(),
ownership: branch::BranchOwnershipClaims {
claims: vec![branch::OwnershipClaim {
file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(),
hunks: vec![],
}],
},
order: TEST_INDEX.load(Ordering::Relaxed),
selected_for_changes: None,
}
}
#[test]
fn test_write() -> anyhow::Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let target = Target {
branch: "refs/remotes/remote name/branch name".parse().unwrap(),
remote_url: "remote url".to_string(),
sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(),
};
let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
branch_writer.write(&mut branch)?;
let target_writer = target::Writer::new(&gb_repository, project.gb_dir())?;
target_writer.write(&branch.id, &target)?;
let root = gb_repository
.root()
.join("branches")
.join(branch.id.to_string());
assert_eq!(
fs::read_to_string(root.join("meta").join("name").to_str().unwrap())
.context("Failed to read branch name")?,
branch.name
);
assert_eq!(
fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap())
.context("Failed to read branch target name")?,
format!("{}/{}", target.branch.remote(), target.branch.branch())
);
assert_eq!(
fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap())
.context("Failed to read branch target name name")?,
target.branch.remote()
);
assert_eq!(
fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap())
.context("Failed to read branch target remote url")?,
target.remote_url
);
assert_eq!(
fs::read_to_string(root.join("target").join("sha").to_str().unwrap())
.context("Failed to read branch target sha")?,
target.sha.to_string()
);
assert_eq!(
fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())?
.parse::<bool>()
.context("Failed to read branch applied")?,
branch.applied
);
assert_eq!(
fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap())
.context("Failed to read branch upstream")?,
branch.upstream.unwrap().to_string()
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("created_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch created timestamp")?
.parse::<u128>()
.context("Failed to parse branch created timestamp")?,
branch.created_timestamp_ms
);
assert_eq!(
fs::read_to_string(
root.join("meta")
.join("updated_timestamp_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch updated timestamp")?
.parse::<u128>()
.context("Failed to parse branch updated timestamp")?,
branch.updated_timestamp_ms
);
Ok(())
}
#[test]
fn test_should_update() -> anyhow::Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let target = Target {
branch: "refs/remotes/remote name/branch name".parse().unwrap(),
remote_url: "remote url".to_string(),
sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(),
};
let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
branch_writer.write(&mut branch)?;
let target_writer = target::Writer::new(&gb_repository, project.gb_dir())?;
target_writer.write(&branch.id, &target)?;
let updated_target = Target {
branch: "refs/remotes/updated remote name/updated branch name"
.parse()
.unwrap(),
remote_url: "updated remote url".to_string(),
sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(),
};
target_writer.write(&branch.id, &updated_target)?;
let root = gb_repository
.root()
.join("branches")
.join(branch.id.to_string());
assert_eq!(
fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap())
.context("Failed to read branch target branch name")?,
format!(
"{}/{}",
updated_target.branch.remote(),
updated_target.branch.branch()
)
);
assert_eq!(
fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap())
.context("Failed to read branch target remote name")?,
updated_target.branch.remote()
);
assert_eq!(
fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap())
.context("Failed to read branch target remote url")?,
updated_target.remote_url
);
assert_eq!(
fs::read_to_string(root.join("target").join("sha").to_str().unwrap())
.context("Failed to read branch target sha")?,
updated_target.sha.to_string()
);
Ok(())
}

File diff suppressed because it is too large Load Diff