chore(app): additional minor fixes

Created on the fly while perusing the test-suite.

* avoid unnecessary `test_` prefix
* move more (and previously missed) test modules to integration level
* make `dedup` crate-private to have a reason to keep the tests where they are, same with similar functions
This commit is contained in:
Sebastian Thiel 2024-03-28 13:45:37 +01:00
parent 2e1bcfd7c4
commit bbf4d7c817
No known key found for this signature in database
GPG Key ID: 9CB5EE7895E8268B
47 changed files with 3155 additions and 3201 deletions

View File

@ -85,7 +85,7 @@ mod tests {
}
#[tokio::test]
async fn test_retry() {
async fn retry() {
let inner_client = MockClient::new();
let retry_client = super::Client::new(inner_client.clone());

View File

@ -1,10 +1,10 @@
pub fn dedup(existing: &[&str], new: &str) -> String {
pub(crate) fn dedup(existing: &[&str], new: &str) -> String {
dedup_fmt(existing, new, " ")
}
/// Makes sure that _new_ is not in _existing_ by adding a number to it.
/// the number is increased until the name is unique.
pub fn dedup_fmt(existing: &[&str], new: &str, separator: &str) -> String {
pub(crate) fn dedup_fmt(existing: &[&str], new: &str, separator: &str) -> String {
existing
.iter()
.filter_map(|x| {
@ -26,7 +26,7 @@ pub fn dedup_fmt(existing: &[&str], new: &str, separator: &str) -> String {
}
#[test]
fn test_dedup() {
fn tests() {
for (existing, new, expected) in [
(vec!["bar", "baz"], "foo", "foo"),
(vec!["foo", "bar", "baz"], "foo", "foo 1"),

View File

@ -83,270 +83,3 @@ impl Display for Document {
write!(f, "{}", self.doc.iter().collect::<String>())
}
}
#[cfg(test)]
mod tests {
use self::{delta::Delta, operations::Operation};
use super::*;
#[test]
fn test_new() {
let document = Document::new(
Some(&reader::Content::UTF8("hello world".to_string())),
vec![],
);
assert!(document.is_ok());
let document = document.unwrap();
assert_eq!(document.to_string(), "hello world");
assert_eq!(document.get_deltas().len(), 0);
}
#[test]
fn test_update() {
let document = Document::new(
Some(&reader::Content::UTF8("hello world".to_string())),
vec![],
);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("hello world!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world!");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((11, "!".to_string()))
);
}
#[test]
fn test_empty() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("hello world!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world!");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "hello world!".to_string()))
);
}
#[test]
fn test_from_deltas() {
let document = Document::new(
None,
vec![
Delta {
timestamp_ms: 0,
operations: vec![Operation::Insert((0, "hello".to_string()))],
},
Delta {
timestamp_ms: 1,
operations: vec![Operation::Insert((5, " world".to_string()))],
},
Delta {
timestamp_ms: 2,
operations: vec![
Operation::Delete((3, 7)),
Operation::Insert((4, "!".to_string())),
],
},
],
);
assert!(document.is_ok());
let document = document.unwrap();
assert_eq!(document.to_string(), "held!");
}
#[test]
fn test_complex_line() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("hello".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "hello".to_string()))
);
document
.update(Some(&reader::Content::UTF8("hello world".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 1);
assert_eq!(
document.get_deltas()[1].operations[0],
Operation::Insert((5, " world".to_string()))
);
document
.update(Some(&reader::Content::UTF8("held!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "held!");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 2);
assert_eq!(
document.get_deltas()[2].operations[0],
Operation::Delete((3, 7))
);
assert_eq!(
document.get_deltas()[2].operations[1],
Operation::Insert((4, "!".to_string())),
);
}
#[test]
fn test_multiline_add() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("first".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "first".to_string()))
);
document
.update(Some(&reader::Content::UTF8("first\ntwo".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first\ntwo");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 1);
assert_eq!(
document.get_deltas()[1].operations[0],
Operation::Insert((5, "\ntwo".to_string()))
);
document
.update(Some(&reader::Content::UTF8(
"first line\nline two".to_string(),
)))
.unwrap();
assert_eq!(document.to_string(), "first line\nline two");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 2);
assert_eq!(
document.get_deltas()[2].operations[0],
Operation::Insert((5, " line".to_string()))
);
assert_eq!(
document.get_deltas()[2].operations[1],
Operation::Insert((11, "line ".to_string()))
);
}
#[test]
fn test_multiline_remove() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8(
"first line\nline two".to_string(),
)))
.unwrap();
assert_eq!(document.to_string(), "first line\nline two");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "first line\nline two".to_string()))
);
document
.update(Some(&reader::Content::UTF8("first\ntwo".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first\ntwo");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 2);
assert_eq!(
document.get_deltas()[1].operations[0],
Operation::Delete((5, 5))
);
assert_eq!(
document.get_deltas()[1].operations[1],
Operation::Delete((6, 5))
);
document
.update(Some(&reader::Content::UTF8("first".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 1);
assert_eq!(
document.get_deltas()[2].operations[0],
Operation::Delete((5, 4))
);
document.update(None).unwrap();
assert_eq!(document.to_string(), "");
assert_eq!(document.get_deltas().len(), 4);
assert_eq!(document.get_deltas()[3].operations.len(), 1);
assert_eq!(
document.get_deltas()[3].operations[0],
Operation::Delete((0, 5))
);
}
#[test]
fn test_binary_to_text() {
let latest = reader::Content::Binary;
let current = reader::Content::UTF8("test".to_string());
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "test");
}
#[test]
fn test_binary_to_binary() {
let latest = reader::Content::Binary;
let current = reader::Content::Binary;
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "");
}
#[test]
fn test_text_to_binary() {
let latest = reader::Content::UTF8("text".to_string());
let current = reader::Content::Binary;
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "");
}
#[test]
fn test_unicode() {
let latest = reader::Content::UTF8("\u{1f31a}".to_string());
let current = reader::Content::UTF8("\u{1f31d}".to_string());
let mut document = Document::new(Some(&latest), vec![]).unwrap();
document.update(Some(&current)).unwrap();
assert_eq!(document.to_string(), "\u{1f31d}");
}
}

View File

@ -114,62 +114,3 @@ pub fn get_delta_operations(initial_text: &str, final_text: &str) -> Vec<Operati
merge_touching(&deltas)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_delta_operations_insert_end() {
let initial_text = "hello";
let final_text = "hello world!";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Insert((5, " world!".to_string())));
}
#[test]
fn test_get_delta_operations_insert_middle() {
let initial_text = "helloworld";
let final_text = "hello, world";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Insert((5, ", ".to_string())));
}
#[test]
fn test_get_delta_operations_insert_begin() {
let initial_text = "world";
let final_text = "hello world";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Insert((0, "hello ".to_string())));
}
#[test]
fn test_get_delta_operations_delete_end() {
let initial_text = "hello world!";
let final_text = "hello";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Delete((5, 7)));
}
#[test]
fn test_get_delta_operations_delete_middle() {
let initial_text = "hello, world";
let final_text = "helloworld";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Delete((5, 2)));
}
#[test]
fn test_get_delta_operations_delete_begin() {
let initial_text = "hello world";
let final_text = "world";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Delete((0, 6)));
}
}

View File

@ -157,7 +157,7 @@ pub mod gb {
use super::*;
#[test]
fn test_error_context() {
fn error_context() {
fn low_level_io() -> std::result::Result<(), std::io::Error> {
Err(std::io::Error::new(std::io::ErrorKind::Other, "oh no!"))
}

View File

@ -8,7 +8,7 @@ pub enum ConvertError {
UnsupportedPair { from: Scheme, to: Scheme },
}
pub fn to_https_url(url: &Url) -> Result<Url, ConvertError> {
pub(crate) fn to_https_url(url: &Url) -> Result<Url, ConvertError> {
match url.scheme {
Scheme::Https => Ok(url.clone()),
Scheme::Http => Ok(Url {
@ -33,7 +33,7 @@ pub fn to_https_url(url: &Url) -> Result<Url, ConvertError> {
}
}
pub fn to_ssh_url(url: &Url) -> Result<Url, ConvertError> {
pub(crate) fn to_ssh_url(url: &Url) -> Result<Url, ConvertError> {
match url.scheme {
Scheme::Ssh => Ok(url.clone()),
Scheme::Http | Scheme::Https => Ok(Url {

View File

@ -125,42 +125,3 @@ impl<'de> Deserialize<'de> for PublicKey {
Self::from_str(s.as_str()).map_err(serde::de::Error::custom)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_to_from_string_private() {
let private_key = PrivateKey::generate();
let serialized = private_key.to_string();
let deserialized: PrivateKey = serialized.parse().unwrap();
assert_eq!(private_key, deserialized);
}
#[test]
fn test_to_from_string_public() {
let private_key = PrivateKey::generate();
let public_key = private_key.public_key();
let serialized = public_key.to_string();
let deserialized: PublicKey = serialized.parse().unwrap();
assert_eq!(public_key, deserialized);
}
#[test]
fn test_serde_private() {
let private_key = PrivateKey::generate();
let serialized = serde_json::to_string(&private_key).unwrap();
let deserialized: PrivateKey = serde_json::from_str(&serialized).unwrap();
assert_eq!(private_key, deserialized);
}
#[test]
fn test_serde_public() {
let private_key = PrivateKey::generate();
let public_key = private_key.public_key();
let serialized = serde_json::to_string(&public_key).unwrap();
let deserialized: PublicKey = serde_json::from_str(&serialized).unwrap();
assert_eq!(public_key, deserialized);
}
}

View File

@ -88,26 +88,3 @@ impl core::ops::Not for DefaultTrue {
!self.0
}
}
#[cfg(test)]
mod tests {
use super::DefaultTrue;
#[test]
#[allow(clippy::bool_assert_comparison)]
fn test_default_true() {
let default_true = DefaultTrue::default();
assert!(default_true);
assert_eq!(default_true, true);
assert_eq!(!default_true, false);
assert!(!!default_true);
if !(*default_true) {
unreachable!("default_true is false")
}
let mut default_true = DefaultTrue::default();
*default_true = false;
assert!(!default_true);
}
}

View File

@ -176,158 +176,3 @@ impl fmt::Display for OwnershipClaim {
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_ownership() {
let ownership: OwnershipClaim = "foo/bar.rs:1-2,4-5".parse().unwrap();
assert_eq!(
ownership,
OwnershipClaim {
file_path: "foo/bar.rs".into(),
hunks: vec![(1..=2).into(), (4..=5).into()]
}
);
}
#[test]
fn parse_ownership_tricky_file_name() {
assert_eq!("file:name:1-2,4-5".parse::<OwnershipClaim>().unwrap(), {
OwnershipClaim {
file_path: "file:name".into(),
hunks: vec![(1..=2).into(), (4..=5).into()],
}
});
}
#[test]
fn parse_ownership_no_ranges() {
"foo/bar.rs".parse::<OwnershipClaim>().unwrap_err();
}
#[test]
fn ownership_to_from_string() {
let ownership = OwnershipClaim {
file_path: "foo/bar.rs".into(),
hunks: vec![(1..=2).into(), (4..=5).into()],
};
assert_eq!(ownership.to_string(), "foo/bar.rs:1-2,4-5".to_string());
assert_eq!(
ownership.to_string().parse::<OwnershipClaim>().unwrap(),
ownership
);
}
#[test]
fn test_plus() {
vec![
("file.txt:1-10", "another.txt:1-5", "file.txt:1-10"),
("file.txt:1-10,3-14", "file.txt:3-14", "file.txt:3-14,1-10"),
("file.txt:5-10", "file.txt:1-5", "file.txt:1-5,5-10"),
("file.txt:1-10", "file.txt:1-5", "file.txt:1-5,1-10"),
("file.txt:1-5,2-2", "file.txt:1-10", "file.txt:1-10,1-5,2-2"),
(
"file.txt:1-10",
"file.txt:8-15,20-25",
"file.txt:20-25,8-15,1-10",
),
("file.txt:1-10", "file.txt:1-10", "file.txt:1-10"),
("file.txt:1-10,3-15", "file.txt:1-10", "file.txt:1-10,3-15"),
]
.into_iter()
.map(|(a, b, expected)| {
(
a.parse::<OwnershipClaim>().unwrap(),
b.parse::<OwnershipClaim>().unwrap(),
expected.parse::<OwnershipClaim>().unwrap(),
)
})
.for_each(|(a, b, expected)| {
let got = a.plus(&b);
assert_eq!(
got, expected,
"{} plus {}, expected {}, got {}",
a, b, expected, got
);
});
}
#[test]
fn test_minus() {
vec![
(
"file.txt:1-10",
"another.txt:1-5",
(None, Some("file.txt:1-10")),
),
(
"file.txt:1-10",
"file.txt:1-5",
(None, Some("file.txt:1-10")),
),
(
"file.txt:1-10",
"file.txt:11-15",
(None, Some("file.txt:1-10")),
),
(
"file.txt:1-10",
"file.txt:1-10",
(Some("file.txt:1-10"), None),
),
(
"file.txt:1-10,11-15",
"file.txt:11-15",
(Some("file.txt:11-15"), Some("file.txt:1-10")),
),
(
"file.txt:1-10,11-15,15-17",
"file.txt:1-10,15-17",
(Some("file.txt:1-10,15-17"), Some("file.txt:11-15")),
),
]
.into_iter()
.map(|(a, b, expected)| {
(
a.parse::<OwnershipClaim>().unwrap(),
b.parse::<OwnershipClaim>().unwrap(),
(
expected.0.map(|s| s.parse::<OwnershipClaim>().unwrap()),
expected.1.map(|s| s.parse::<OwnershipClaim>().unwrap()),
),
)
})
.for_each(|(a, b, expected)| {
let got = a.minus(&b);
assert_eq!(
got, expected,
"{} minus {}, expected {:?}, got {:?}",
a, b, expected, got
);
});
}
#[test]
fn test_equal() {
vec![
("file.txt:1-10", "file.txt:1-10", true),
("file.txt:1-10", "file.txt:1-11", false),
("file.txt:1-10,11-15", "file.txt:11-15,1-10", false),
("file.txt:1-10,11-15", "file.txt:1-10,11-15", true),
]
.into_iter()
.map(|(a, b, expected)| {
(
a.parse::<OwnershipClaim>().unwrap(),
b.parse::<OwnershipClaim>().unwrap(),
expected,
)
})
.for_each(|(a, b, expected)| {
assert_eq!(a == b, expected, "{} == {}, expected {}", a, b, expected);
});
}
}

View File

@ -167,96 +167,3 @@ impl Hunk {
format!("{:x}", md5::compute(addition))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn to_from_string() {
let hunk = "1-2".parse::<Hunk>().unwrap();
assert_eq!("1-2", hunk.to_string());
}
#[test]
fn parse_invalid() {
"3-2".parse::<Hunk>().unwrap_err();
}
#[test]
fn parse_with_hash() {
assert_eq!(
"2-3-hash".parse::<Hunk>().unwrap(),
Hunk::new(2, 3, Some("hash".to_string()), None).unwrap()
);
}
#[test]
fn parse_with_timestamp() {
assert_eq!(
"2-3--123".parse::<Hunk>().unwrap(),
Hunk::new(2, 3, None, Some(123)).unwrap()
);
}
#[test]
fn parse_invalid_2() {
"3-2".parse::<Hunk>().unwrap_err();
}
#[test]
fn to_string_no_hash() {
assert_eq!(
"1-2--123",
Hunk::new(1, 2, None, Some(123)).unwrap().to_string()
);
}
#[test]
fn test_eq() {
for (a, b, expected) in vec![
(
"1-2".parse::<Hunk>().unwrap(),
"1-2".parse::<Hunk>().unwrap(),
true,
),
(
"1-2".parse::<Hunk>().unwrap(),
"2-3".parse::<Hunk>().unwrap(),
false,
),
(
"1-2-abc".parse::<Hunk>().unwrap(),
"1-2-abc".parse::<Hunk>().unwrap(),
true,
),
(
"1-2-abc".parse::<Hunk>().unwrap(),
"2-3-abc".parse::<Hunk>().unwrap(),
false,
),
(
"1-2".parse::<Hunk>().unwrap(),
"1-2-abc".parse::<Hunk>().unwrap(),
true,
),
(
"1-2-abc".parse::<Hunk>().unwrap(),
"1-2".parse::<Hunk>().unwrap(),
true,
),
(
"1-2-abc".parse::<Hunk>().unwrap(),
"1-2-bcd".parse::<Hunk>().unwrap(),
false,
),
(
"1-2-abc".parse::<Hunk>().unwrap(),
"2-3-bcd".parse::<Hunk>().unwrap(),
false,
),
] {
assert_eq!(a == b, expected, "comapring {} and {}", a, b);
}
}
}

View File

@ -181,290 +181,3 @@ pub fn reconcile_claims(
Ok(claim_outcomes)
}
#[cfg(test)]
mod tests {
use std::{path::PathBuf, vec};
use crate::virtual_branches::branch::Hunk;
use super::*;
#[test]
fn test_reconcile_ownership_simple() {
let branch_a = Branch {
name: "a".to_string(),
ownership: BranchOwnershipClaims {
claims: vec![OwnershipClaim {
file_path: PathBuf::from("foo"),
hunks: vec![
Hunk {
start: 1,
end: 3,
hash: Some("1,3".to_string()),
timestamp_ms: None,
},
Hunk {
start: 4,
end: 6,
hash: Some("4,6".to_string()),
timestamp_ms: None,
},
],
}],
},
applied: true,
..Default::default()
};
let branch_b = Branch {
name: "b".to_string(),
ownership: BranchOwnershipClaims {
claims: vec![OwnershipClaim {
file_path: PathBuf::from("foo"),
hunks: vec![Hunk {
start: 7,
end: 9,
hash: Some("7,9".to_string()),
timestamp_ms: None,
}],
}],
},
applied: true,
..Default::default()
};
let all_branches: Vec<Branch> = vec![branch_a.clone(), branch_b.clone()];
let claim: Vec<OwnershipClaim> = vec![OwnershipClaim {
file_path: PathBuf::from("foo"),
hunks: vec![
Hunk {
start: 4,
end: 6,
hash: Some("4,6".to_string()),
timestamp_ms: None,
},
Hunk {
start: 7,
end: 9,
hash: Some("9,7".to_string()),
timestamp_ms: None,
},
],
}];
let claim_outcomes = reconcile_claims(all_branches.clone(), &branch_b, &claim).unwrap();
assert_eq!(claim_outcomes.len(), all_branches.len());
assert_eq!(claim_outcomes[0].updated_branch.id, branch_a.id);
assert_eq!(claim_outcomes[1].updated_branch.id, branch_b.id);
assert_eq!(
claim_outcomes[0].updated_branch.ownership,
BranchOwnershipClaims {
claims: vec![OwnershipClaim {
file_path: PathBuf::from("foo"),
hunks: vec![Hunk {
start: 1,
end: 3,
hash: Some("1,3".to_string()),
timestamp_ms: None,
},],
}],
}
);
assert_eq!(
claim_outcomes[1].updated_branch.ownership,
BranchOwnershipClaims {
claims: vec![OwnershipClaim {
file_path: PathBuf::from("foo"),
hunks: vec![
Hunk {
start: 4,
end: 6,
hash: Some("4,6".to_string()),
timestamp_ms: None,
},
Hunk {
start: 7,
end: 9,
hash: Some("9,7".to_string()),
timestamp_ms: None,
},
],
}],
}
);
}
#[test]
fn test_ownership() {
let ownership = "src/main.rs:0-100\nsrc/main2.rs:200-300".parse::<BranchOwnershipClaims>();
assert!(ownership.is_ok());
let ownership = ownership.unwrap();
assert_eq!(ownership.claims.len(), 2);
assert_eq!(
ownership.claims[0],
"src/main.rs:0-100".parse::<OwnershipClaim>().unwrap()
);
assert_eq!(
ownership.claims[1],
"src/main2.rs:200-300".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn test_ownership_2() {
let ownership = "src/main.rs:0-100\nsrc/main2.rs:200-300".parse::<BranchOwnershipClaims>();
assert!(ownership.is_ok());
let ownership = ownership.unwrap();
assert_eq!(ownership.claims.len(), 2);
assert_eq!(
ownership.claims[0],
"src/main.rs:0-100".parse::<OwnershipClaim>().unwrap()
);
assert_eq!(
ownership.claims[1],
"src/main2.rs:200-300".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn test_put() {
let mut ownership = "src/main.rs:0-100"
.parse::<BranchOwnershipClaims>()
.unwrap();
ownership.put(&"src/main.rs:200-300".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 1);
assert_eq!(
ownership.claims[0],
"src/main.rs:200-300,0-100"
.parse::<OwnershipClaim>()
.unwrap()
);
}
#[test]
fn test_put_2() {
let mut ownership = "src/main.rs:0-100"
.parse::<BranchOwnershipClaims>()
.unwrap();
ownership.put(&"src/main.rs2:200-300".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 2);
assert_eq!(
ownership.claims[0],
"src/main.rs2:200-300".parse::<OwnershipClaim>().unwrap()
);
assert_eq!(
ownership.claims[1],
"src/main.rs:0-100".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn test_put_3() {
let mut ownership = "src/main.rs:0-100\nsrc/main2.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap();
ownership.put(&"src/main2.rs:200-300".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 2);
assert_eq!(
ownership.claims[0],
"src/main2.rs:200-300,100-200"
.parse::<OwnershipClaim>()
.unwrap()
);
assert_eq!(
ownership.claims[1],
"src/main.rs:0-100".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn test_put_4() {
let mut ownership = "src/main.rs:0-100\nsrc/main2.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap();
ownership.put(&"src/main2.rs:100-200".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 2);
assert_eq!(
ownership.claims[0],
"src/main2.rs:100-200".parse::<OwnershipClaim>().unwrap()
);
assert_eq!(
ownership.claims[1],
"src/main.rs:0-100".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn test_put_7() {
let mut ownership = "src/main.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap();
ownership.put(&"src/main.rs:100-200".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 1);
assert_eq!(
ownership.claims[0],
"src/main.rs:100-200".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn test_take_1() {
let mut ownership = "src/main.rs:100-200,200-300"
.parse::<BranchOwnershipClaims>()
.unwrap();
let taken = ownership.take(&"src/main.rs:100-200".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 1);
assert_eq!(
ownership.claims[0],
"src/main.rs:200-300".parse::<OwnershipClaim>().unwrap()
);
assert_eq!(
taken,
vec!["src/main.rs:100-200".parse::<OwnershipClaim>().unwrap()]
);
}
#[test]
fn test_equal() {
for (a, b, expected) in vec![
(
"src/main.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap(),
"src/main.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap(),
true,
),
(
"src/main.rs:100-200\nsrc/main1.rs:300-400\n"
.parse::<BranchOwnershipClaims>()
.unwrap(),
"src/main.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap(),
false,
),
(
"src/main.rs:100-200\nsrc/main1.rs:300-400\n"
.parse::<BranchOwnershipClaims>()
.unwrap(),
"src/main.rs:100-200\nsrc/main1.rs:300-400\n"
.parse::<BranchOwnershipClaims>()
.unwrap(),
true,
),
(
"src/main.rs:300-400\nsrc/main1.rs:100-200\n"
.parse::<BranchOwnershipClaims>()
.unwrap(),
"src/main1.rs:100-200\nsrc/main.rs:300-400\n"
.parse::<BranchOwnershipClaims>()
.unwrap(),
false,
),
] {
assert_eq!(a == b, expected, "{:#?} == {:#?}", a, b);
}
}
}

View File

@ -122,527 +122,3 @@ pub fn hunk_with_context(
hunk
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn replace_line_mid_file() {
let hunk_diff = r#"@@ -8 +8 @@ default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
+SERDE = ["dep:serde", "uuid/serde"]
"#;
let with_ctx = hunk_with_context(
hunk_diff,
8,
8,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
let expected = r#"@@ -5,7 +5,7 @@
[features]
default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
+SERDE = ["dep:serde", "uuid/serde"]
rusqlite = ["dep:rusqlite"]
[dependencies]
"#;
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 5);
assert_eq!(with_ctx.old_lines, 7);
assert_eq!(with_ctx.new_start, 5);
assert_eq!(with_ctx.new_lines, 7);
}
#[test]
fn replace_line_top_file() {
let hunk_diff = r#"@@ -2 +2 @@
-name = "gitbutler-core"
+NAME = "gitbutler-core"
"#;
let with_ctx = hunk_with_context(
hunk_diff,
2,
2,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(
with_ctx.diff.replace("\n \n", "\n\n"),
r#"@@ -1,5 +1,5 @@
[package]
-name = "gitbutler-core"
+NAME = "gitbutler-core"
version = "0.0.0"
edition = "2021"
"#
);
assert_eq!(with_ctx.old_start, 1);
assert_eq!(with_ctx.old_lines, 5);
assert_eq!(with_ctx.new_start, 1);
assert_eq!(with_ctx.new_lines, 5);
}
#[test]
fn replace_line_start_file() {
let hunk_diff = "@@ -1 +1 @@
-[package]
+[PACKAGE]
";
let with_ctx = hunk_with_context(
hunk_diff,
1,
1,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(
with_ctx.diff.replace("\n \n", "\n\n"),
r#"@@ -1,4 +1,4 @@
-[package]
+[PACKAGE]
name = "gitbutler-core"
version = "0.0.0"
edition = "2021"
"#
);
assert_eq!(with_ctx.old_start, 1);
assert_eq!(with_ctx.old_lines, 4);
assert_eq!(with_ctx.new_start, 1);
assert_eq!(with_ctx.new_lines, 4);
}
#[test]
fn replace_line_bottom_file() {
let hunk_diff = "@@ -13 +13 @@
-serde = { workspace = true, optional = true }
+SERDE = { workspace = true, optional = true }
";
let with_ctx = hunk_with_context(
hunk_diff,
13,
13,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(
with_ctx.diff.replace("\n \n", "\n\n"),
r#"@@ -10,5 +10,5 @@
[dependencies]
rusqlite = { workspace = true, optional = true }
-serde = { workspace = true, optional = true }
+SERDE = { workspace = true, optional = true }
uuid = { workspace = true, features = ["v4", "fast-rng"] }
"#
);
assert_eq!(with_ctx.old_start, 10);
assert_eq!(with_ctx.old_lines, 5);
assert_eq!(with_ctx.new_start, 10);
assert_eq!(with_ctx.new_lines, 5);
}
#[test]
fn replace_with_more_lines() {
let hunk_diff = r#"@@ -8 +8,4 @@
-serde = ["dep:serde", "uuid/serde"]
+one
+two
+three
+four
"#;
let with_ctx = hunk_with_context(
hunk_diff,
8,
8,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(
with_ctx.diff.replace("\n \n", "\n\n"),
r#"@@ -5,7 +5,10 @@
[features]
default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
+one
+two
+three
+four
rusqlite = ["dep:rusqlite"]
[dependencies]
"#
);
assert_eq!(with_ctx.old_start, 5);
assert_eq!(with_ctx.old_lines, 7);
assert_eq!(with_ctx.new_start, 5);
assert_eq!(with_ctx.new_lines, 10);
}
#[test]
fn replace_with_less_lines() {
let hunk_diff = r#"@@ -7,3 +7 @@
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
+foo = ["foo"]
"#;
let with_ctx = hunk_with_context(
hunk_diff,
7,
7,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(
with_ctx.diff.replace("\n \n", "\n\n"),
r#"@@ -4,9 +4,7 @@
edition = "2021"
[features]
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
+foo = ["foo"]
[dependencies]
rusqlite = { workspace = true, optional = true }
"#
);
assert_eq!(with_ctx.old_start, 4);
assert_eq!(with_ctx.old_lines, 9);
assert_eq!(with_ctx.new_start, 4);
assert_eq!(with_ctx.new_lines, 7);
}
#[test]
fn empty_string_doesnt_panic() {
let hunk_diff = "";
let with_ctx = hunk_with_context(
hunk_diff,
1,
1,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(with_ctx.diff, "");
}
#[test]
fn removed_file() {
let hunk_diff = r#"@@ -1,14 +0,0 @@
-[package]
-name = "gitbutler-core"
-version = "0.0.0"
-edition = "2021"
-
-[features]
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
-
-[dependencies]
-rusqlite = { workspace = true, optional = true }
-serde = { workspace = true, optional = true }
-uuid = { workspace = true, features = ["v4", "fast-rng"] }
"#;
let with_ctx = hunk_with_context(
hunk_diff,
1,
0,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), hunk_diff);
assert_eq!(with_ctx.old_start, 1);
assert_eq!(with_ctx.old_lines, 14);
assert_eq!(with_ctx.new_start, 0);
assert_eq!(with_ctx.new_lines, 0);
}
#[test]
fn new_file() {
let hunk_diff = "@@ -0,0 +1,5 @@
+line 1
+line 2
+line 3
+line 4
+line 5
";
let with_ctx = hunk_with_context(
hunk_diff,
0,
1,
false,
3,
&Vec::new(),
diff::ChangeType::Added,
);
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), hunk_diff);
assert_eq!(with_ctx.old_start, 0);
assert_eq!(with_ctx.old_lines, 0);
assert_eq!(with_ctx.new_start, 1);
assert_eq!(with_ctx.new_lines, 5);
}
#[test]
fn only_add_lines() {
let hunk_diff = "@@ -8,0 +9,3 @@
+one
+two
+three
";
let with_ctx = hunk_with_context(
hunk_diff,
8,
9,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
let expected = r#"@@ -6,6 +6,9 @@
[features]
default = ["serde", "rusqlite"]
serde = ["dep:serde", "uuid/serde"]
+one
+two
+three
rusqlite = ["dep:rusqlite"]
[dependencies]
"#;
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 6);
assert_eq!(with_ctx.old_lines, 6);
assert_eq!(with_ctx.new_start, 6);
assert_eq!(with_ctx.new_lines, 9);
}
#[test]
fn only_add_lines_with_additions_below() {
let hunk_diff = "@@ -8,0 +13,3 @@
+one
+two
+three
";
let with_ctx = hunk_with_context(
hunk_diff,
8,
13,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
let expected = r#"@@ -6,6 +10,9 @@
[features]
default = ["serde", "rusqlite"]
serde = ["dep:serde", "uuid/serde"]
+one
+two
+three
rusqlite = ["dep:rusqlite"]
[dependencies]
"#;
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 6);
assert_eq!(with_ctx.old_lines, 6);
assert_eq!(with_ctx.new_start, 10);
assert_eq!(with_ctx.new_lines, 9);
}
#[test]
fn only_remove_lines() {
let hunk_diff = r#"@@ -7,3 +6,0 @@
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
"#;
let expected = r#"@@ -4,9 +4,6 @@
edition = "2021"
[features]
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
[dependencies]
rusqlite = { workspace = true, optional = true }
"#;
let with_ctx = hunk_with_context(
hunk_diff,
7,
6,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 4);
assert_eq!(with_ctx.old_lines, 9);
assert_eq!(with_ctx.new_start, 4);
assert_eq!(with_ctx.new_lines, 6);
}
#[test]
fn only_remove_lines_with_additions_below() {
let hunk_diff = r#"@@ -7,3 +10,0 @@
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
"#;
let expected = r#"@@ -4,9 +8,6 @@
edition = "2021"
[features]
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
[dependencies]
rusqlite = { workspace = true, optional = true }
"#;
let with_ctx = hunk_with_context(
hunk_diff,
7,
10,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 4);
assert_eq!(with_ctx.old_lines, 9);
assert_eq!(with_ctx.new_start, 8);
assert_eq!(with_ctx.new_lines, 6);
}
#[test]
fn weird_testcase() {
let hunk_diff = "@@ -11,2 +10,0 @@
-
- @waiting_users = User.where(approved: false).count
";
let with_ctx = hunk_with_context(
hunk_diff,
11,
10,
false,
3,
&file_lines_2(),
diff::ChangeType::Added,
);
let expected = "@@ -8,8 +8,6 @@
.order(:created_at)
.page params[:page]
@total = @registrations.total_count
-
- @waiting_users = User.where(approved: false).count
end
def invite
";
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 8);
assert_eq!(with_ctx.old_lines, 8);
assert_eq!(with_ctx.new_start, 8);
assert_eq!(with_ctx.new_lines, 6);
}
#[test]
fn new_line_added() {
let hunk_diff = "@@ -2,0 +3 @@ alias(
+ newstuff
";
let with_ctx = hunk_with_context(
hunk_diff,
2,
3,
false,
3,
&file_lines_3(),
diff::ChangeType::Added,
);
let expected = r#"@@ -1,4 +1,5 @@
alias(
name = "rdeps",
+ newstuff
actual = "//java/com/videlov/rdeps:rdeps",
)
"#;
assert_eq!(with_ctx.diff, expected);
}
fn file_lines() -> Vec<&'static str> {
let file_lines_before = r#"[package]
name = "gitbutler-core"
version = "0.0.0"
edition = "2021"
[features]
default = ["serde", "rusqlite"]
serde = ["dep:serde", "uuid/serde"]
rusqlite = ["dep:rusqlite"]
[dependencies]
rusqlite = { workspace = true, optional = true }
serde = { workspace = true, optional = true }
uuid = { workspace = true, features = ["v4", "fast-rng"] }
"#;
file_lines_before.lines().collect::<Vec<_>>()
}
fn file_lines_2() -> Vec<&'static str> {
let file_lines_before = r#"class Admin::WaitingController < Admin::AdminController
def index
@registrations = Registration.where(invited_at: nil)
if params[:q]
@registrations = @registrations.where("email LIKE ?", "%#{params[:q]}%")
end
@registrations = @registrations.includes(:invite_code)
.order(:created_at)
.page params[:page]
@total = @registrations.total_count
@waiting_users = User.where(approved: false).count
end
def invite
if params[:id]
@registrations = Registration.where(id: params[:id])
"#;
file_lines_before.lines().collect::<Vec<_>>()
}
fn file_lines_3() -> Vec<&'static str> {
let file_lines_before = r#"alias(
name = "rdeps",
actual = "//java/com/videlov/rdeps:rdeps",
)
"#;
file_lines_before.lines().collect::<Vec<_>>()
}
}

View File

@ -4050,7 +4050,7 @@ mod tests {
}
#[test]
fn test_normalize_branch_name() {
fn normalize_branch_name_test() {
assert_eq!(normalize_branch_name("feature/branch"), "feature/branch");
assert_eq!(normalize_branch_name("foo#branch"), "foo#branch");
assert_eq!(normalize_branch_name("foo!branch"), "foo-branch");

View File

@ -88,7 +88,7 @@ mod tests {
use super::*;
#[test]
fn test_write() {
fn write() {
let root = tempfile::tempdir().unwrap();
let writer = DirWriter::open(root.path()).unwrap();
writer.write("foo/bar", b"baz").unwrap();
@ -99,7 +99,7 @@ mod tests {
}
#[test]
fn test_remove() {
fn remove() {
let root = tempfile::tempdir().unwrap();
let writer = DirWriter::open(root.path()).unwrap();
writer.remove("foo/bar").unwrap();

View File

@ -163,52 +163,3 @@ fn file_hash<P: AsRef<path::Path>>(digest: &mut Sha256, path: P) -> Result<()> {
);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs::File;
use std::io::Write;
use tempfile::tempdir;
#[test]
fn test_zip_dir() {
let tmp_dir = tempdir().unwrap();
let tmp_dir_path = tmp_dir.path();
let file_path = tmp_dir_path.join("test.txt");
let mut file = File::create(file_path).unwrap();
file.write_all(b"test").unwrap();
let zipper_cache = tempdir().unwrap();
let zipper = Zipper::new(zipper_cache.path());
let zip_file_path = zipper.zip(tmp_dir).unwrap();
assert!(zip_file_path.exists());
}
#[test]
fn test_zip_file() {
let tmp_dir = tempdir().unwrap();
let tmp_dir_path = tmp_dir.path();
let file_path = tmp_dir_path.join("test.txt");
let mut file = File::create(&file_path).unwrap();
file.write_all(b"test").unwrap();
let zipper_cache = tempdir().unwrap();
let zipper = Zipper::new(zipper_cache.path());
zipper.zip(file_path).unwrap_err();
}
#[test]
fn test_zip_once() {
let tmp_dir = tempdir().unwrap();
let tmp_dir_path = tmp_dir.path();
let file_path = tmp_dir_path.join("test.txt");
let mut file = File::create(file_path).unwrap();
file.write_all(b"test").unwrap();
let zipper_cache = tempdir().unwrap();
let zipper = Zipper::new(zipper_cache.path());
assert_eq!(zipper.zip(&tmp_dir).unwrap(), zipper.zip(&tmp_dir).unwrap());
assert_eq!(WalkDir::new(tmp_dir).into_iter().count(), 1);
}
}

View File

@ -13,15 +13,18 @@ mod keys;
mod lock;
mod reader;
mod sessions;
mod types;
pub mod virtual_branches;
mod watcher;
mod zip;
use std::{collections::HashMap, fs, path};
use std::path::PathBuf;
use std::{collections::HashMap, fs};
use tempfile::tempdir;
pub struct Suite {
pub local_app_data: path::PathBuf,
pub local_app_data: PathBuf,
pub storage: gitbutler_app::storage::Storage,
pub users: gitbutler_app::users::Controller,
pub projects: gitbutler_app::projects::Controller,
@ -57,7 +60,7 @@ impl Suite {
user
}
fn project(&self, fs: HashMap<path::PathBuf, &str>) -> gitbutler_app::projects::Project {
fn project(&self, fs: HashMap<PathBuf, &str>) -> gitbutler_app::projects::Project {
let repository = test_repository();
for (path, contents) in fs {
if let Some(parent) = path.parent() {
@ -77,7 +80,7 @@ impl Suite {
.expect("failed to add project")
}
pub fn new_case_with_files(&self, fs: HashMap<path::PathBuf, &str>) -> Case {
pub fn new_case_with_files(&self, fs: HashMap<PathBuf, &str>) -> Case {
let project = self.project(fs);
Case::new(self, project)
}
@ -145,7 +148,7 @@ pub fn test_database() -> gitbutler_app::database::Database {
gitbutler_app::database::Database::open_in_directory(temp_dir()).unwrap()
}
pub fn temp_dir() -> path::PathBuf {
pub fn temp_dir() -> PathBuf {
let path = tempdir().unwrap().path().to_path_buf();
fs::create_dir_all(&path).unwrap();
path

View File

@ -0,0 +1,263 @@
use gitbutler_app::deltas::operations::Operation;
use gitbutler_app::deltas::{Delta, Document};
use gitbutler_app::reader;
#[test]
fn new() {
let document = Document::new(
Some(&reader::Content::UTF8("hello world".to_string())),
vec![],
);
assert!(document.is_ok());
let document = document.unwrap();
assert_eq!(document.to_string(), "hello world");
assert_eq!(document.get_deltas().len(), 0);
}
#[test]
fn update() {
let document = Document::new(
Some(&reader::Content::UTF8("hello world".to_string())),
vec![],
);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("hello world!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world!");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((11, "!".to_string()))
);
}
#[test]
fn empty() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("hello world!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world!");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "hello world!".to_string()))
);
}
#[test]
fn from_deltas() {
let document = Document::new(
None,
vec![
Delta {
timestamp_ms: 0,
operations: vec![Operation::Insert((0, "hello".to_string()))],
},
Delta {
timestamp_ms: 1,
operations: vec![Operation::Insert((5, " world".to_string()))],
},
Delta {
timestamp_ms: 2,
operations: vec![
Operation::Delete((3, 7)),
Operation::Insert((4, "!".to_string())),
],
},
],
);
assert!(document.is_ok());
let document = document.unwrap();
assert_eq!(document.to_string(), "held!");
}
#[test]
fn complex_line() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("hello".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "hello".to_string()))
);
document
.update(Some(&reader::Content::UTF8("hello world".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 1);
assert_eq!(
document.get_deltas()[1].operations[0],
Operation::Insert((5, " world".to_string()))
);
document
.update(Some(&reader::Content::UTF8("held!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "held!");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 2);
assert_eq!(
document.get_deltas()[2].operations[0],
Operation::Delete((3, 7))
);
assert_eq!(
document.get_deltas()[2].operations[1],
Operation::Insert((4, "!".to_string())),
);
}
#[test]
fn multiline_add() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("first".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "first".to_string()))
);
document
.update(Some(&reader::Content::UTF8("first\ntwo".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first\ntwo");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 1);
assert_eq!(
document.get_deltas()[1].operations[0],
Operation::Insert((5, "\ntwo".to_string()))
);
document
.update(Some(&reader::Content::UTF8(
"first line\nline two".to_string(),
)))
.unwrap();
assert_eq!(document.to_string(), "first line\nline two");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 2);
assert_eq!(
document.get_deltas()[2].operations[0],
Operation::Insert((5, " line".to_string()))
);
assert_eq!(
document.get_deltas()[2].operations[1],
Operation::Insert((11, "line ".to_string()))
);
}
#[test]
fn multiline_remove() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8(
"first line\nline two".to_string(),
)))
.unwrap();
assert_eq!(document.to_string(), "first line\nline two");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "first line\nline two".to_string()))
);
document
.update(Some(&reader::Content::UTF8("first\ntwo".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first\ntwo");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 2);
assert_eq!(
document.get_deltas()[1].operations[0],
Operation::Delete((5, 5))
);
assert_eq!(
document.get_deltas()[1].operations[1],
Operation::Delete((6, 5))
);
document
.update(Some(&reader::Content::UTF8("first".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 1);
assert_eq!(
document.get_deltas()[2].operations[0],
Operation::Delete((5, 4))
);
document.update(None).unwrap();
assert_eq!(document.to_string(), "");
assert_eq!(document.get_deltas().len(), 4);
assert_eq!(document.get_deltas()[3].operations.len(), 1);
assert_eq!(
document.get_deltas()[3].operations[0],
Operation::Delete((0, 5))
);
}
#[test]
fn binary_to_text() {
let latest = reader::Content::Binary;
let current = reader::Content::UTF8("test".to_string());
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "test");
}
#[test]
fn binary_to_binary() {
let latest = reader::Content::Binary;
let current = reader::Content::Binary;
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "");
}
#[test]
fn text_to_binary() {
let latest = reader::Content::UTF8("text".to_string());
let current = reader::Content::Binary;
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "");
}
#[test]
fn unicode() {
let latest = reader::Content::UTF8("\u{1f31a}".to_string());
let current = reader::Content::UTF8("\u{1f31d}".to_string());
let mut document = Document::new(Some(&latest), vec![]).unwrap();
document.update(Some(&current)).unwrap();
assert_eq!(document.to_string(), "\u{1f31d}");
}

View File

@ -103,6 +103,9 @@ mod database {
}
}
mod document;
mod operations;
mod writer {
use gitbutler_app::deltas::operations::Operation;
use gitbutler_app::{deltas, sessions};

View File

@ -0,0 +1,55 @@
use gitbutler_app::deltas::operations::{get_delta_operations, Operation};
#[test]
fn get_delta_operations_insert_end() {
let initial_text = "hello";
let final_text = "hello world!";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Insert((5, " world!".to_string())));
}
#[test]
fn get_delta_operations_insert_middle() {
let initial_text = "helloworld";
let final_text = "hello, world";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Insert((5, ", ".to_string())));
}
#[test]
fn get_delta_operations_insert_begin() {
let initial_text = "world";
let final_text = "hello world";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Insert((0, "hello ".to_string())));
}
#[test]
fn get_delta_operations_delete_end() {
let initial_text = "hello world!";
let final_text = "hello";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Delete((5, 7)));
}
#[test]
fn get_delta_operations_delete_middle() {
let initial_text = "hello, world";
let final_text = "helloworld";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Delete((5, 2)));
}
#[test]
fn get_delta_operations_delete_begin() {
let initial_text = "hello world";
let final_text = "world";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Delete((0, 6)));
}

View File

@ -13,14 +13,14 @@ use gitbutler_app::{
sessions::{self, SessionId},
};
fn test_remote_repository() -> Result<git2::Repository> {
fn new_test_remote_repository() -> Result<git2::Repository> {
let path = tempfile::tempdir()?.path().to_str().unwrap().to_string();
let repo_a = git2::Repository::init_opts(path, &init_opts_bare())?;
Ok(repo_a)
}
#[test]
fn test_get_current_session_writer_should_use_existing_session() -> Result<()> {
fn get_current_session_writer_should_use_existing_session() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let current_session_1 = gb_repository.get_or_create_current_session()?;
@ -31,7 +31,7 @@ fn test_get_current_session_writer_should_use_existing_session() -> Result<()> {
}
#[test]
fn test_must_not_return_init_session() -> Result<()> {
fn must_not_return_init_session() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
assert!(gb_repository.get_current_session()?.is_none());
@ -43,7 +43,7 @@ fn test_must_not_return_init_session() -> Result<()> {
}
#[test]
fn test_must_not_flush_without_current_session() -> Result<()> {
fn must_not_flush_without_current_session() -> Result<()> {
let Case {
gb_repository,
project_repository,
@ -60,7 +60,7 @@ fn test_must_not_flush_without_current_session() -> Result<()> {
}
#[test]
fn test_non_empty_repository() -> Result<()> {
fn non_empty_repository() -> Result<()> {
let Case {
gb_repository,
project_repository,
@ -75,7 +75,7 @@ fn test_non_empty_repository() -> Result<()> {
}
#[test]
fn test_must_flush_current_session() -> Result<()> {
fn must_flush_current_session() -> Result<()> {
let Case {
gb_repository,
project_repository,
@ -94,7 +94,7 @@ fn test_must_flush_current_session() -> Result<()> {
}
#[test]
fn test_list_deltas_from_current_session() -> Result<()> {
fn list_deltas_from_current_session() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let current_session = gb_repository.get_or_create_current_session()?;
@ -125,7 +125,7 @@ fn test_list_deltas_from_current_session() -> Result<()> {
}
#[test]
fn test_list_deltas_from_flushed_session() {
fn list_deltas_from_flushed_session() {
let Case {
gb_repository,
project_repository,
@ -160,7 +160,7 @@ fn test_list_deltas_from_flushed_session() {
}
#[test]
fn test_list_files_from_current_session() {
fn list_files_from_current_session() {
let Case { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([(
path::PathBuf::from("test.txt"),
"Hello World",
@ -178,7 +178,7 @@ fn test_list_files_from_current_session() {
}
#[test]
fn test_list_files_from_flushed_session() {
fn list_files_from_flushed_session() {
let Case {
gb_repository,
project_repository,
@ -204,9 +204,9 @@ fn test_list_files_from_flushed_session() {
}
#[tokio::test]
async fn test_remote_syncronization() {
async fn remote_syncronization() {
// first, crate a remote, pretending it's a cloud
let cloud = test_remote_repository().unwrap();
let cloud = new_test_remote_repository().unwrap();
let api_project = ApiProject {
name: "test-sync".to_string(),
description: None,
@ -299,9 +299,9 @@ async fn test_remote_syncronization() {
}
#[tokio::test]
async fn test_remote_sync_order() {
async fn remote_sync_order() {
// first, crate a remote, pretending it's a cloud
let cloud = test_remote_repository().unwrap();
let cloud = new_test_remote_repository().unwrap();
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
@ -422,7 +422,7 @@ async fn test_remote_sync_order() {
}
#[test]
fn test_gitbutler_file() {
fn gitbutler_file() {
let Case {
gb_repository,
project_repository,

View File

@ -1,7 +1,7 @@
use crate::test_repository;
#[test]
pub fn test_set_str() {
pub fn set_str() {
let repo = test_repository();
let mut config = repo.config().unwrap();
config.set_str("test.key", "test.value").unwrap();
@ -12,7 +12,7 @@ pub fn test_set_str() {
}
#[test]
pub fn test_set_bool() {
pub fn set_bool() {
let repo = test_repository();
let mut config = repo.config().unwrap();
config.set_bool("test.key", true).unwrap();
@ -20,14 +20,14 @@ pub fn test_set_bool() {
}
#[test]
pub fn test_get_string_none() {
pub fn get_string_none() {
let repo = test_repository();
let config = repo.config().unwrap();
assert_eq!(config.get_string("test.key").unwrap(), None);
}
#[test]
pub fn test_get_bool_none() {
pub fn get_bool_none() {
let repo = test_repository();
let config = repo.config().unwrap();
assert_eq!(config.get_bool("test.key").unwrap(), None);

View File

@ -12,14 +12,14 @@ use gitbutler_app::{
sessions::{self, SessionId},
};
fn test_remote_repository() -> Result<git2::Repository> {
fn new_test_remote_repository() -> Result<git2::Repository> {
let path = tempfile::tempdir()?.path().to_str().unwrap().to_string();
let repo_a = git2::Repository::init_opts(path, &init_opts_bare())?;
Ok(repo_a)
}
#[test]
fn test_get_current_session_writer_should_use_existing_session() -> Result<()> {
fn get_current_session_writer_should_use_existing_session() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let current_session_1 = gb_repository.get_or_create_current_session()?;
@ -30,7 +30,7 @@ fn test_get_current_session_writer_should_use_existing_session() -> Result<()> {
}
#[test]
fn test_must_not_return_init_session() -> Result<()> {
fn must_not_return_init_session() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
assert!(gb_repository.get_current_session()?.is_none());
@ -42,7 +42,7 @@ fn test_must_not_return_init_session() -> Result<()> {
}
#[test]
fn test_must_not_flush_without_current_session() -> Result<()> {
fn must_not_flush_without_current_session() -> Result<()> {
let Case {
gb_repository,
project_repository,
@ -59,7 +59,7 @@ fn test_must_not_flush_without_current_session() -> Result<()> {
}
#[test]
fn test_non_empty_repository() -> Result<()> {
fn non_empty_repository() -> Result<()> {
let Case {
gb_repository,
project_repository,
@ -74,7 +74,7 @@ fn test_non_empty_repository() -> Result<()> {
}
#[test]
fn test_must_flush_current_session() -> Result<()> {
fn must_flush_current_session() -> Result<()> {
let Case {
gb_repository,
project_repository,
@ -93,7 +93,7 @@ fn test_must_flush_current_session() -> Result<()> {
}
#[test]
fn test_list_deltas_from_current_session() -> Result<()> {
fn list_deltas_from_current_session() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let current_session = gb_repository.get_or_create_current_session()?;
@ -124,7 +124,7 @@ fn test_list_deltas_from_current_session() -> Result<()> {
}
#[test]
fn test_list_deltas_from_flushed_session() {
fn list_deltas_from_flushed_session() {
let Case {
gb_repository,
project_repository,
@ -159,7 +159,7 @@ fn test_list_deltas_from_flushed_session() {
}
#[test]
fn test_list_files_from_current_session() {
fn list_files_from_current_session() {
let Case { gb_repository, .. } = Suite::default().new_case_with_files(HashMap::from([(
path::PathBuf::from("test.txt"),
"Hello World",
@ -177,7 +177,7 @@ fn test_list_files_from_current_session() {
}
#[test]
fn test_list_files_from_flushed_session() {
fn list_files_from_flushed_session() {
let Case {
gb_repository,
project_repository,
@ -203,9 +203,9 @@ fn test_list_files_from_flushed_session() {
}
#[tokio::test]
async fn test_remote_syncronization() {
async fn remote_syncronization() {
// first, crate a remote, pretending it's a cloud
let cloud = test_remote_repository().unwrap();
let cloud = new_test_remote_repository().unwrap();
let api_project = ApiProject {
name: "test-sync".to_string(),
description: None,
@ -298,9 +298,9 @@ async fn test_remote_syncronization() {
}
#[tokio::test]
async fn test_remote_sync_order() {
async fn remote_sync_order() {
// first, crate a remote, pretending it's a cloud
let cloud = test_remote_repository().unwrap();
let cloud = new_test_remote_repository().unwrap();
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
@ -421,7 +421,7 @@ async fn test_remote_sync_order() {
}
#[test]
fn test_gitbutler_file() {
fn gitbutler_file() {
let Case {
gb_repository,
project_repository,

View File

@ -1,27 +0,0 @@
#[cfg(not(target_os = "windows"))]
mod not_windows {
use gitbutler_app::keys::storage::Storage;
use gitbutler_app::keys::Controller;
use std::fs;
#[cfg(target_family = "unix")]
use std::os::unix::prelude::*;
use crate::Suite;
#[test]
fn test_get_or_create() {
let suite = Suite::default();
let controller = Controller::new(Storage::from_path(&suite.local_app_data));
let once = controller.get_or_create().unwrap();
let twice = controller.get_or_create().unwrap();
assert_eq!(once, twice);
// check permissions of the private key
let permissions = fs::metadata(suite.local_app_data.join("keys/ed25519"))
.unwrap()
.permissions();
let perms = format!("{:o}", permissions.mode());
assert_eq!(perms, "100600");
}
}

View File

@ -1 +1,65 @@
mod controller;
use gitbutler_app::keys::{PrivateKey, PublicKey};
mod controller {
#[cfg(not(target_os = "windows"))]
mod not_windows {
use gitbutler_app::keys::storage::Storage;
use gitbutler_app::keys::Controller;
use std::fs;
#[cfg(target_family = "unix")]
use std::os::unix::prelude::*;
use crate::Suite;
#[test]
fn get_or_create() {
let suite = Suite::default();
let controller = Controller::new(Storage::from_path(&suite.local_app_data));
let once = controller.get_or_create().unwrap();
let twice = controller.get_or_create().unwrap();
assert_eq!(once, twice);
// check permissions of the private key
let permissions = fs::metadata(suite.local_app_data.join("keys/ed25519"))
.unwrap()
.permissions();
let perms = format!("{:o}", permissions.mode());
assert_eq!(perms, "100600");
}
}
}
#[test]
fn to_from_string_private() {
let private_key = PrivateKey::generate();
let serialized = private_key.to_string();
let deserialized: PrivateKey = serialized.parse().unwrap();
assert_eq!(private_key, deserialized);
}
#[test]
fn to_from_string_public() {
let private_key = PrivateKey::generate();
let public_key = private_key.public_key();
let serialized = public_key.to_string();
let deserialized: PublicKey = serialized.parse().unwrap();
assert_eq!(public_key, deserialized);
}
#[test]
fn serde_private() {
let private_key = PrivateKey::generate();
let serialized = serde_json::to_string(&private_key).unwrap();
let deserialized: PrivateKey = serde_json::from_str(&serialized).unwrap();
assert_eq!(private_key, deserialized);
}
#[test]
fn serde_public() {
let private_key = PrivateKey::generate();
let public_key = private_key.public_key();
let serialized = serde_json::to_string(&public_key).unwrap();
let deserialized: PublicKey = serde_json::from_str(&serialized).unwrap();
assert_eq!(public_key, deserialized);
}

View File

@ -3,7 +3,7 @@ use gitbutler_app::lock::Dir;
use crate::temp_dir;
#[tokio::test]
async fn test_lock_same_instance() {
async fn lock_same_instance() {
let dir_path = temp_dir();
std::fs::write(dir_path.join("file.txt"), "").unwrap();
let dir = Dir::new(&dir_path).unwrap();
@ -45,7 +45,7 @@ async fn test_lock_same_instance() {
}
#[tokio::test]
async fn test_lock_different_instances() {
async fn lock_different_instances() {
let dir_path = temp_dir();
std::fs::write(dir_path.join("file.txt"), "").unwrap();

View File

@ -6,7 +6,7 @@ use crate::{commit_all, temp_dir, test_repository};
use anyhow::Result;
#[test]
fn test_directory_reader_read_file() -> Result<()> {
fn directory_reader_read_file() -> Result<()> {
let dir = temp_dir();
let file_path = Path::new("test.txt");
@ -19,7 +19,7 @@ fn test_directory_reader_read_file() -> Result<()> {
}
#[test]
fn test_commit_reader_read_file() -> Result<()> {
fn commit_reader_read_file() -> Result<()> {
let repository = test_repository();
let file_path = Path::new("test.txt");
@ -36,7 +36,7 @@ fn test_commit_reader_read_file() -> Result<()> {
}
#[test]
fn test_reader_list_files_should_return_relative() -> Result<()> {
fn reader_list_files_should_return_relative() -> Result<()> {
let dir = temp_dir();
fs::write(dir.join("test1.txt"), "test")?;
@ -52,7 +52,7 @@ fn test_reader_list_files_should_return_relative() -> Result<()> {
}
#[test]
fn test_reader_list_files() -> Result<()> {
fn reader_list_files() -> Result<()> {
let dir = temp_dir();
fs::write(dir.join("test.txt"), "test")?;
@ -69,7 +69,7 @@ fn test_reader_list_files() -> Result<()> {
}
#[test]
fn test_commit_reader_list_files_should_return_relative() -> Result<()> {
fn commit_reader_list_files_should_return_relative() -> Result<()> {
let repository = test_repository();
fs::write(
@ -100,7 +100,7 @@ fn test_commit_reader_list_files_should_return_relative() -> Result<()> {
}
#[test]
fn test_commit_reader_list_files() -> Result<()> {
fn commit_reader_list_files() -> Result<()> {
let repository = test_repository();
fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?;
@ -129,7 +129,7 @@ fn test_commit_reader_list_files() -> Result<()> {
}
#[test]
fn test_directory_reader_exists() -> Result<()> {
fn directory_reader_exists() -> Result<()> {
let dir = temp_dir();
fs::write(dir.join("test.txt"), "test")?;
@ -142,7 +142,7 @@ fn test_directory_reader_exists() -> Result<()> {
}
#[test]
fn test_commit_reader_exists() -> Result<()> {
fn commit_reader_exists() -> Result<()> {
let repository = test_repository();
fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?;
@ -159,7 +159,7 @@ fn test_commit_reader_exists() -> Result<()> {
}
#[test]
fn test_from_bytes() {
fn from_bytes() {
for (bytes, expected) in [
("test".as_bytes(), Content::UTF8("test".to_string())),
(&[0, 159, 146, 150, 159, 146, 150], Content::Binary),
@ -169,7 +169,7 @@ fn test_from_bytes() {
}
#[test]
fn test_serialize_content() {
fn serialize_content() {
for (content, expected) in [
(
Content::UTF8("test".to_string()),

View File

@ -3,7 +3,7 @@ use gitbutler_app::projects::ProjectId;
use gitbutler_app::sessions::{session, Database, Session, SessionId};
#[test]
fn test_insert_query() -> anyhow::Result<()> {
fn insert_query() -> anyhow::Result<()> {
let db = test_database();
println!("0");
let database = Database::new(db);
@ -46,7 +46,7 @@ fn test_insert_query() -> anyhow::Result<()> {
}
#[test]
fn test_update() -> anyhow::Result<()> {
fn update() -> anyhow::Result<()> {
let db = test_database();
let database = Database::new(db);

View File

@ -6,7 +6,7 @@ use crate::{Case, Suite};
use gitbutler_app::sessions::{self, session::SessionId};
#[test]
fn test_should_not_write_session_with_hash() {
fn should_not_write_session_with_hash() {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = sessions::Session {
@ -27,7 +27,7 @@ fn test_should_not_write_session_with_hash() {
}
#[test]
fn test_should_write_full_session() -> Result<()> {
fn should_write_full_session() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = sessions::Session {
@ -68,7 +68,7 @@ fn test_should_write_full_session() -> Result<()> {
}
#[test]
fn test_should_write_partial_session() -> Result<()> {
fn should_write_partial_session() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = sessions::Session {

View File

@ -0,0 +1,19 @@
use gitbutler_app::types::default_true::DefaultTrue;
#[test]
#[allow(clippy::bool_assert_comparison)]
fn default_true() {
let default_true = DefaultTrue::default();
assert!(default_true);
assert_eq!(default_true, true);
assert_eq!(!default_true, false);
assert!(!!default_true);
if !(*default_true) {
unreachable!("default_true is false")
}
let mut default_true = DefaultTrue::default();
*default_true = false;
assert!(!default_true);
}

View File

@ -0,0 +1,522 @@
use gitbutler_app::git::diff;
use gitbutler_app::virtual_branches::context::hunk_with_context;
#[test]
fn replace_line_mid_file() {
let hunk_diff = r#"@@ -8 +8 @@ default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
+SERDE = ["dep:serde", "uuid/serde"]
"#;
let with_ctx = hunk_with_context(
hunk_diff,
8,
8,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
let expected = r#"@@ -5,7 +5,7 @@
[features]
default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
+SERDE = ["dep:serde", "uuid/serde"]
rusqlite = ["dep:rusqlite"]
[dependencies]
"#;
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 5);
assert_eq!(with_ctx.old_lines, 7);
assert_eq!(with_ctx.new_start, 5);
assert_eq!(with_ctx.new_lines, 7);
}
#[test]
fn replace_line_top_file() {
let hunk_diff = r#"@@ -2 +2 @@
-name = "gitbutler-core"
+NAME = "gitbutler-core"
"#;
let with_ctx = hunk_with_context(
hunk_diff,
2,
2,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(
with_ctx.diff.replace("\n \n", "\n\n"),
r#"@@ -1,5 +1,5 @@
[package]
-name = "gitbutler-core"
+NAME = "gitbutler-core"
version = "0.0.0"
edition = "2021"
"#
);
assert_eq!(with_ctx.old_start, 1);
assert_eq!(with_ctx.old_lines, 5);
assert_eq!(with_ctx.new_start, 1);
assert_eq!(with_ctx.new_lines, 5);
}
#[test]
fn replace_line_start_file() {
let hunk_diff = "@@ -1 +1 @@
-[package]
+[PACKAGE]
";
let with_ctx = hunk_with_context(
hunk_diff,
1,
1,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(
with_ctx.diff.replace("\n \n", "\n\n"),
r#"@@ -1,4 +1,4 @@
-[package]
+[PACKAGE]
name = "gitbutler-core"
version = "0.0.0"
edition = "2021"
"#
);
assert_eq!(with_ctx.old_start, 1);
assert_eq!(with_ctx.old_lines, 4);
assert_eq!(with_ctx.new_start, 1);
assert_eq!(with_ctx.new_lines, 4);
}
#[test]
fn replace_line_bottom_file() {
let hunk_diff = "@@ -13 +13 @@
-serde = { workspace = true, optional = true }
+SERDE = { workspace = true, optional = true }
";
let with_ctx = hunk_with_context(
hunk_diff,
13,
13,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(
with_ctx.diff.replace("\n \n", "\n\n"),
r#"@@ -10,5 +10,5 @@
[dependencies]
rusqlite = { workspace = true, optional = true }
-serde = { workspace = true, optional = true }
+SERDE = { workspace = true, optional = true }
uuid = { workspace = true, features = ["v4", "fast-rng"] }
"#
);
assert_eq!(with_ctx.old_start, 10);
assert_eq!(with_ctx.old_lines, 5);
assert_eq!(with_ctx.new_start, 10);
assert_eq!(with_ctx.new_lines, 5);
}
#[test]
fn replace_with_more_lines() {
let hunk_diff = r#"@@ -8 +8,4 @@
-serde = ["dep:serde", "uuid/serde"]
+one
+two
+three
+four
"#;
let with_ctx = hunk_with_context(
hunk_diff,
8,
8,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(
with_ctx.diff.replace("\n \n", "\n\n"),
r#"@@ -5,7 +5,10 @@
[features]
default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
+one
+two
+three
+four
rusqlite = ["dep:rusqlite"]
[dependencies]
"#
);
assert_eq!(with_ctx.old_start, 5);
assert_eq!(with_ctx.old_lines, 7);
assert_eq!(with_ctx.new_start, 5);
assert_eq!(with_ctx.new_lines, 10);
}
#[test]
fn replace_with_less_lines() {
let hunk_diff = r#"@@ -7,3 +7 @@
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
+foo = ["foo"]
"#;
let with_ctx = hunk_with_context(
hunk_diff,
7,
7,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(
with_ctx.diff.replace("\n \n", "\n\n"),
r#"@@ -4,9 +4,7 @@
edition = "2021"
[features]
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
+foo = ["foo"]
[dependencies]
rusqlite = { workspace = true, optional = true }
"#
);
assert_eq!(with_ctx.old_start, 4);
assert_eq!(with_ctx.old_lines, 9);
assert_eq!(with_ctx.new_start, 4);
assert_eq!(with_ctx.new_lines, 7);
}
#[test]
fn empty_string_doesnt_panic() {
let hunk_diff = "";
let with_ctx = hunk_with_context(
hunk_diff,
1,
1,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(with_ctx.diff, "");
}
#[test]
fn removed_file() {
let hunk_diff = r#"@@ -1,14 +0,0 @@
-[package]
-name = "gitbutler-core"
-version = "0.0.0"
-edition = "2021"
-
-[features]
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
-
-[dependencies]
-rusqlite = { workspace = true, optional = true }
-serde = { workspace = true, optional = true }
-uuid = { workspace = true, features = ["v4", "fast-rng"] }
"#;
let with_ctx = hunk_with_context(
hunk_diff,
1,
0,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), hunk_diff);
assert_eq!(with_ctx.old_start, 1);
assert_eq!(with_ctx.old_lines, 14);
assert_eq!(with_ctx.new_start, 0);
assert_eq!(with_ctx.new_lines, 0);
}
#[test]
fn new_file() {
let hunk_diff = "@@ -0,0 +1,5 @@
+line 1
+line 2
+line 3
+line 4
+line 5
";
let with_ctx = hunk_with_context(
hunk_diff,
0,
1,
false,
3,
&Vec::new(),
diff::ChangeType::Added,
);
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), hunk_diff);
assert_eq!(with_ctx.old_start, 0);
assert_eq!(with_ctx.old_lines, 0);
assert_eq!(with_ctx.new_start, 1);
assert_eq!(with_ctx.new_lines, 5);
}
#[test]
fn only_add_lines() {
let hunk_diff = "@@ -8,0 +9,3 @@
+one
+two
+three
";
let with_ctx = hunk_with_context(
hunk_diff,
8,
9,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
let expected = r#"@@ -6,6 +6,9 @@
[features]
default = ["serde", "rusqlite"]
serde = ["dep:serde", "uuid/serde"]
+one
+two
+three
rusqlite = ["dep:rusqlite"]
[dependencies]
"#;
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 6);
assert_eq!(with_ctx.old_lines, 6);
assert_eq!(with_ctx.new_start, 6);
assert_eq!(with_ctx.new_lines, 9);
}
#[test]
fn only_add_lines_with_additions_below() {
let hunk_diff = "@@ -8,0 +13,3 @@
+one
+two
+three
";
let with_ctx = hunk_with_context(
hunk_diff,
8,
13,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
let expected = r#"@@ -6,6 +10,9 @@
[features]
default = ["serde", "rusqlite"]
serde = ["dep:serde", "uuid/serde"]
+one
+two
+three
rusqlite = ["dep:rusqlite"]
[dependencies]
"#;
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 6);
assert_eq!(with_ctx.old_lines, 6);
assert_eq!(with_ctx.new_start, 10);
assert_eq!(with_ctx.new_lines, 9);
}
#[test]
fn only_remove_lines() {
let hunk_diff = r#"@@ -7,3 +6,0 @@
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
"#;
let expected = r#"@@ -4,9 +4,6 @@
edition = "2021"
[features]
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
[dependencies]
rusqlite = { workspace = true, optional = true }
"#;
let with_ctx = hunk_with_context(
hunk_diff,
7,
6,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 4);
assert_eq!(with_ctx.old_lines, 9);
assert_eq!(with_ctx.new_start, 4);
assert_eq!(with_ctx.new_lines, 6);
}
#[test]
fn only_remove_lines_with_additions_below() {
let hunk_diff = r#"@@ -7,3 +10,0 @@
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
"#;
let expected = r#"@@ -4,9 +8,6 @@
edition = "2021"
[features]
-default = ["serde", "rusqlite"]
-serde = ["dep:serde", "uuid/serde"]
-rusqlite = ["dep:rusqlite"]
[dependencies]
rusqlite = { workspace = true, optional = true }
"#;
let with_ctx = hunk_with_context(
hunk_diff,
7,
10,
false,
3,
&file_lines(),
diff::ChangeType::Added,
);
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 4);
assert_eq!(with_ctx.old_lines, 9);
assert_eq!(with_ctx.new_start, 8);
assert_eq!(with_ctx.new_lines, 6);
}
#[test]
fn weird_testcase() {
let hunk_diff = "@@ -11,2 +10,0 @@
-
- @waiting_users = User.where(approved: false).count
";
let with_ctx = hunk_with_context(
hunk_diff,
11,
10,
false,
3,
&file_lines_2(),
diff::ChangeType::Added,
);
let expected = "@@ -8,8 +8,6 @@
.order(:created_at)
.page params[:page]
@total = @registrations.total_count
-
- @waiting_users = User.where(approved: false).count
end
def invite
";
assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected);
assert_eq!(with_ctx.old_start, 8);
assert_eq!(with_ctx.old_lines, 8);
assert_eq!(with_ctx.new_start, 8);
assert_eq!(with_ctx.new_lines, 6);
}
#[test]
fn new_line_added() {
let hunk_diff = "@@ -2,0 +3 @@ alias(
+ newstuff
";
let with_ctx = hunk_with_context(
hunk_diff,
2,
3,
false,
3,
&file_lines_3(),
diff::ChangeType::Added,
);
let expected = r#"@@ -1,4 +1,5 @@
alias(
name = "rdeps",
+ newstuff
actual = "//java/com/videlov/rdeps:rdeps",
)
"#;
assert_eq!(with_ctx.diff, expected);
}
fn file_lines() -> Vec<&'static str> {
let file_lines_before = r#"[package]
name = "gitbutler-core"
version = "0.0.0"
edition = "2021"
[features]
default = ["serde", "rusqlite"]
serde = ["dep:serde", "uuid/serde"]
rusqlite = ["dep:rusqlite"]
[dependencies]
rusqlite = { workspace = true, optional = true }
serde = { workspace = true, optional = true }
uuid = { workspace = true, features = ["v4", "fast-rng"] }
"#;
file_lines_before.lines().collect::<Vec<_>>()
}
fn file_lines_2() -> Vec<&'static str> {
let file_lines_before = r#"class Admin::WaitingController < Admin::AdminController
def index
@registrations = Registration.where(invited_at: nil)
if params[:q]
@registrations = @registrations.where("email LIKE ?", "%#{params[:q]}%")
end
@registrations = @registrations.includes(:invite_code)
.order(:created_at)
.page params[:page]
@total = @registrations.total_count
@waiting_users = User.where(approved: false).count
end
def invite
if params[:id]
@registrations = Registration.where(id: params[:id])
"#;
file_lines_before.lines().collect::<Vec<_>>()
}
fn file_lines_3() -> Vec<&'static str> {
let file_lines_before = r#"alias(
name = "rdeps",
actual = "//java/com/videlov/rdeps:rdeps",
)
"#;
file_lines_before.lines().collect::<Vec<_>>()
}

View File

@ -0,0 +1,151 @@
use gitbutler_app::virtual_branches::branch::OwnershipClaim;
#[test]
fn parse_ownership() {
let ownership: OwnershipClaim = "foo/bar.rs:1-2,4-5".parse().unwrap();
assert_eq!(
ownership,
OwnershipClaim {
file_path: "foo/bar.rs".into(),
hunks: vec![(1..=2).into(), (4..=5).into()]
}
);
}
#[test]
fn parse_ownership_tricky_file_name() {
assert_eq!("file:name:1-2,4-5".parse::<OwnershipClaim>().unwrap(), {
OwnershipClaim {
file_path: "file:name".into(),
hunks: vec![(1..=2).into(), (4..=5).into()],
}
});
}
#[test]
fn parse_ownership_no_ranges() {
"foo/bar.rs".parse::<OwnershipClaim>().unwrap_err();
}
#[test]
fn ownership_to_from_string() {
let ownership = OwnershipClaim {
file_path: "foo/bar.rs".into(),
hunks: vec![(1..=2).into(), (4..=5).into()],
};
assert_eq!(ownership.to_string(), "foo/bar.rs:1-2,4-5".to_string());
assert_eq!(
ownership.to_string().parse::<OwnershipClaim>().unwrap(),
ownership
);
}
#[test]
fn plus() {
vec![
("file.txt:1-10", "another.txt:1-5", "file.txt:1-10"),
("file.txt:1-10,3-14", "file.txt:3-14", "file.txt:3-14,1-10"),
("file.txt:5-10", "file.txt:1-5", "file.txt:1-5,5-10"),
("file.txt:1-10", "file.txt:1-5", "file.txt:1-5,1-10"),
("file.txt:1-5,2-2", "file.txt:1-10", "file.txt:1-10,1-5,2-2"),
(
"file.txt:1-10",
"file.txt:8-15,20-25",
"file.txt:20-25,8-15,1-10",
),
("file.txt:1-10", "file.txt:1-10", "file.txt:1-10"),
("file.txt:1-10,3-15", "file.txt:1-10", "file.txt:1-10,3-15"),
]
.into_iter()
.map(|(a, b, expected)| {
(
a.parse::<OwnershipClaim>().unwrap(),
b.parse::<OwnershipClaim>().unwrap(),
expected.parse::<OwnershipClaim>().unwrap(),
)
})
.for_each(|(a, b, expected)| {
let got = a.plus(&b);
assert_eq!(
got, expected,
"{} plus {}, expected {}, got {}",
a, b, expected, got
);
});
}
#[test]
fn minus() {
vec![
(
"file.txt:1-10",
"another.txt:1-5",
(None, Some("file.txt:1-10")),
),
(
"file.txt:1-10",
"file.txt:1-5",
(None, Some("file.txt:1-10")),
),
(
"file.txt:1-10",
"file.txt:11-15",
(None, Some("file.txt:1-10")),
),
(
"file.txt:1-10",
"file.txt:1-10",
(Some("file.txt:1-10"), None),
),
(
"file.txt:1-10,11-15",
"file.txt:11-15",
(Some("file.txt:11-15"), Some("file.txt:1-10")),
),
(
"file.txt:1-10,11-15,15-17",
"file.txt:1-10,15-17",
(Some("file.txt:1-10,15-17"), Some("file.txt:11-15")),
),
]
.into_iter()
.map(|(a, b, expected)| {
(
a.parse::<OwnershipClaim>().unwrap(),
b.parse::<OwnershipClaim>().unwrap(),
(
expected.0.map(|s| s.parse::<OwnershipClaim>().unwrap()),
expected.1.map(|s| s.parse::<OwnershipClaim>().unwrap()),
),
)
})
.for_each(|(a, b, expected)| {
let got = a.minus(&b);
assert_eq!(
got, expected,
"{} minus {}, expected {:?}, got {:?}",
a, b, expected, got
);
});
}
#[test]
fn equal() {
vec![
("file.txt:1-10", "file.txt:1-10", true),
("file.txt:1-10", "file.txt:1-11", false),
("file.txt:1-10,11-15", "file.txt:11-15,1-10", false),
("file.txt:1-10,11-15", "file.txt:1-10,11-15", true),
]
.into_iter()
.map(|(a, b, expected)| {
(
a.parse::<OwnershipClaim>().unwrap(),
b.parse::<OwnershipClaim>().unwrap(),
expected,
)
})
.for_each(|(a, b, expected)| {
assert_eq!(a == b, expected, "{} == {}, expected {}", a, b, expected);
});
}

View File

@ -0,0 +1,89 @@
use gitbutler_app::virtual_branches::branch::Hunk;
#[test]
fn to_from_string() {
let hunk = "1-2".parse::<Hunk>().unwrap();
assert_eq!("1-2", hunk.to_string());
}
#[test]
fn parse_invalid() {
"3-2".parse::<Hunk>().unwrap_err();
}
#[test]
fn parse_with_hash() {
assert_eq!(
"2-3-hash".parse::<Hunk>().unwrap(),
Hunk::new(2, 3, Some("hash".to_string()), None).unwrap()
);
}
#[test]
fn parse_with_timestamp() {
assert_eq!(
"2-3--123".parse::<Hunk>().unwrap(),
Hunk::new(2, 3, None, Some(123)).unwrap()
);
}
#[test]
fn parse_invalid_2() {
"3-2".parse::<Hunk>().unwrap_err();
}
#[test]
fn to_string_no_hash() {
assert_eq!(
"1-2--123",
Hunk::new(1, 2, None, Some(123)).unwrap().to_string()
);
}
#[test]
fn eq() {
for (a, b, expected) in vec![
(
"1-2".parse::<Hunk>().unwrap(),
"1-2".parse::<Hunk>().unwrap(),
true,
),
(
"1-2".parse::<Hunk>().unwrap(),
"2-3".parse::<Hunk>().unwrap(),
false,
),
(
"1-2-abc".parse::<Hunk>().unwrap(),
"1-2-abc".parse::<Hunk>().unwrap(),
true,
),
(
"1-2-abc".parse::<Hunk>().unwrap(),
"2-3-abc".parse::<Hunk>().unwrap(),
false,
),
(
"1-2".parse::<Hunk>().unwrap(),
"1-2-abc".parse::<Hunk>().unwrap(),
true,
),
(
"1-2-abc".parse::<Hunk>().unwrap(),
"1-2".parse::<Hunk>().unwrap(),
true,
),
(
"1-2-abc".parse::<Hunk>().unwrap(),
"1-2-bcd".parse::<Hunk>().unwrap(),
false,
),
(
"1-2-abc".parse::<Hunk>().unwrap(),
"2-3-bcd".parse::<Hunk>().unwrap(),
false,
),
] {
assert_eq!(a == b, expected, "comapring {} and {}", a, b);
}
}

View File

@ -1,5 +1,8 @@
use gitbutler_app::virtual_branches::Branch;
mod context;
mod file_ownership;
mod hunk;
mod ownership;
mod reader;
mod writer;

View File

@ -0,0 +1,284 @@
use gitbutler_app::virtual_branches::branch::{
reconcile_claims, BranchOwnershipClaims, Hunk, OwnershipClaim,
};
use gitbutler_app::virtual_branches::Branch;
use std::{path::PathBuf, vec};
#[test]
fn reconcile_ownership_simple() {
let branch_a = Branch {
name: "a".to_string(),
ownership: BranchOwnershipClaims {
claims: vec![OwnershipClaim {
file_path: PathBuf::from("foo"),
hunks: vec![
Hunk {
start: 1,
end: 3,
hash: Some("1,3".to_string()),
timestamp_ms: None,
},
Hunk {
start: 4,
end: 6,
hash: Some("4,6".to_string()),
timestamp_ms: None,
},
],
}],
},
applied: true,
..Default::default()
};
let branch_b = Branch {
name: "b".to_string(),
ownership: BranchOwnershipClaims {
claims: vec![OwnershipClaim {
file_path: PathBuf::from("foo"),
hunks: vec![Hunk {
start: 7,
end: 9,
hash: Some("7,9".to_string()),
timestamp_ms: None,
}],
}],
},
applied: true,
..Default::default()
};
let all_branches: Vec<Branch> = vec![branch_a.clone(), branch_b.clone()];
let claim: Vec<OwnershipClaim> = vec![OwnershipClaim {
file_path: PathBuf::from("foo"),
hunks: vec![
Hunk {
start: 4,
end: 6,
hash: Some("4,6".to_string()),
timestamp_ms: None,
},
Hunk {
start: 7,
end: 9,
hash: Some("9,7".to_string()),
timestamp_ms: None,
},
],
}];
let claim_outcomes = reconcile_claims(all_branches.clone(), &branch_b, &claim).unwrap();
assert_eq!(claim_outcomes.len(), all_branches.len());
assert_eq!(claim_outcomes[0].updated_branch.id, branch_a.id);
assert_eq!(claim_outcomes[1].updated_branch.id, branch_b.id);
assert_eq!(
claim_outcomes[0].updated_branch.ownership,
BranchOwnershipClaims {
claims: vec![OwnershipClaim {
file_path: PathBuf::from("foo"),
hunks: vec![Hunk {
start: 1,
end: 3,
hash: Some("1,3".to_string()),
timestamp_ms: None,
},],
}],
}
);
assert_eq!(
claim_outcomes[1].updated_branch.ownership,
BranchOwnershipClaims {
claims: vec![OwnershipClaim {
file_path: PathBuf::from("foo"),
hunks: vec![
Hunk {
start: 4,
end: 6,
hash: Some("4,6".to_string()),
timestamp_ms: None,
},
Hunk {
start: 7,
end: 9,
hash: Some("9,7".to_string()),
timestamp_ms: None,
},
],
}],
}
);
}
#[test]
fn ownership() {
let ownership = "src/main.rs:0-100\nsrc/main2.rs:200-300".parse::<BranchOwnershipClaims>();
assert!(ownership.is_ok());
let ownership = ownership.unwrap();
assert_eq!(ownership.claims.len(), 2);
assert_eq!(
ownership.claims[0],
"src/main.rs:0-100".parse::<OwnershipClaim>().unwrap()
);
assert_eq!(
ownership.claims[1],
"src/main2.rs:200-300".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn ownership_2() {
let ownership = "src/main.rs:0-100\nsrc/main2.rs:200-300".parse::<BranchOwnershipClaims>();
assert!(ownership.is_ok());
let ownership = ownership.unwrap();
assert_eq!(ownership.claims.len(), 2);
assert_eq!(
ownership.claims[0],
"src/main.rs:0-100".parse::<OwnershipClaim>().unwrap()
);
assert_eq!(
ownership.claims[1],
"src/main2.rs:200-300".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn put() {
let mut ownership = "src/main.rs:0-100"
.parse::<BranchOwnershipClaims>()
.unwrap();
ownership.put(&"src/main.rs:200-300".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 1);
assert_eq!(
ownership.claims[0],
"src/main.rs:200-300,0-100"
.parse::<OwnershipClaim>()
.unwrap()
);
}
#[test]
fn put_2() {
let mut ownership = "src/main.rs:0-100"
.parse::<BranchOwnershipClaims>()
.unwrap();
ownership.put(&"src/main.rs2:200-300".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 2);
assert_eq!(
ownership.claims[0],
"src/main.rs2:200-300".parse::<OwnershipClaim>().unwrap()
);
assert_eq!(
ownership.claims[1],
"src/main.rs:0-100".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn put_3() {
let mut ownership = "src/main.rs:0-100\nsrc/main2.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap();
ownership.put(&"src/main2.rs:200-300".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 2);
assert_eq!(
ownership.claims[0],
"src/main2.rs:200-300,100-200"
.parse::<OwnershipClaim>()
.unwrap()
);
assert_eq!(
ownership.claims[1],
"src/main.rs:0-100".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn put_4() {
let mut ownership = "src/main.rs:0-100\nsrc/main2.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap();
ownership.put(&"src/main2.rs:100-200".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 2);
assert_eq!(
ownership.claims[0],
"src/main2.rs:100-200".parse::<OwnershipClaim>().unwrap()
);
assert_eq!(
ownership.claims[1],
"src/main.rs:0-100".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn put_7() {
let mut ownership = "src/main.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap();
ownership.put(&"src/main.rs:100-200".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 1);
assert_eq!(
ownership.claims[0],
"src/main.rs:100-200".parse::<OwnershipClaim>().unwrap()
);
}
#[test]
fn take_1() {
let mut ownership = "src/main.rs:100-200,200-300"
.parse::<BranchOwnershipClaims>()
.unwrap();
let taken = ownership.take(&"src/main.rs:100-200".parse::<OwnershipClaim>().unwrap());
assert_eq!(ownership.claims.len(), 1);
assert_eq!(
ownership.claims[0],
"src/main.rs:200-300".parse::<OwnershipClaim>().unwrap()
);
assert_eq!(
taken,
vec!["src/main.rs:100-200".parse::<OwnershipClaim>().unwrap()]
);
}
#[test]
fn equal() {
for (a, b, expected) in vec![
(
"src/main.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap(),
"src/main.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap(),
true,
),
(
"src/main.rs:100-200\nsrc/main1.rs:300-400\n"
.parse::<BranchOwnershipClaims>()
.unwrap(),
"src/main.rs:100-200"
.parse::<BranchOwnershipClaims>()
.unwrap(),
false,
),
(
"src/main.rs:100-200\nsrc/main1.rs:300-400\n"
.parse::<BranchOwnershipClaims>()
.unwrap(),
"src/main.rs:100-200\nsrc/main1.rs:300-400\n"
.parse::<BranchOwnershipClaims>()
.unwrap(),
true,
),
(
"src/main.rs:300-400\nsrc/main1.rs:100-200\n"
.parse::<BranchOwnershipClaims>()
.unwrap(),
"src/main1.rs:100-200\nsrc/main.rs:300-400\n"
.parse::<BranchOwnershipClaims>()
.unwrap(),
false,
),
] {
assert_eq!(a == b, expected, "{:#?} == {:#?}", a, b);
}
}

View File

@ -58,7 +58,7 @@ fn test_branch() -> Branch {
}
#[test]
fn test_read_not_found() -> Result<()> {
fn read_not_found() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = gb_repository.get_or_create_current_session()?;
@ -73,7 +73,7 @@ fn test_read_not_found() -> Result<()> {
}
#[test]
fn test_read_override() -> Result<()> {
fn read_override() -> Result<()> {
let Case {
gb_repository,
project,

View File

@ -15,7 +15,7 @@ use super::*;
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> Branch {
fn new_test_branch() -> Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
Branch {
@ -58,14 +58,14 @@ fn test_branch() -> Branch {
}
#[test]
fn test_write_branch() -> anyhow::Result<()> {
fn write_branch() -> anyhow::Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let mut branch = new_test_branch();
let writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
writer.write(&mut branch)?;
@ -123,14 +123,14 @@ fn test_write_branch() -> anyhow::Result<()> {
}
#[test]
fn test_should_create_session() -> anyhow::Result<()> {
fn should_create_session() -> anyhow::Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let mut branch = new_test_branch();
let writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
writer.write(&mut branch)?;
@ -141,14 +141,14 @@ fn test_should_create_session() -> anyhow::Result<()> {
}
#[test]
fn test_should_update() -> anyhow::Result<()> {
fn should_update() -> anyhow::Result<()> {
let Case {
gb_repository,
project,
..
} = Suite::default().new_case();
let mut branch = test_branch();
let mut branch = new_test_branch();
let writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
writer.write(&mut branch)?;

View File

@ -8,7 +8,7 @@ use crate::{Case, Suite};
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_branch() -> virtual_branches::branch::Branch {
fn new_test_branch() -> virtual_branches::branch::Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
virtual_branches::branch::Branch {
@ -47,7 +47,7 @@ fn test_branch() -> virtual_branches::branch::Branch {
static TEST_TARGET_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn test_target() -> virtual_branches::target::Target {
fn new_test_target() -> virtual_branches::target::Target {
virtual_branches::target::Target {
branch: format!(
"refs/remotes/branch name{}/remote name {}",
@ -67,7 +67,7 @@ fn test_target() -> virtual_branches::target::Target {
}
#[test]
fn test_empty_iterator() -> Result<()> {
fn empty_iterator() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = gb_repository.get_or_create_current_session()?;
@ -81,7 +81,7 @@ fn test_empty_iterator() -> Result<()> {
}
#[test]
fn test_iterate_all() -> Result<()> {
fn iterate_all() -> Result<()> {
let Case {
gb_repository,
project,
@ -90,15 +90,15 @@ fn test_iterate_all() -> Result<()> {
let target_writer =
gitbutler_app::virtual_branches::target::Writer::new(&gb_repository, project.gb_dir())?;
target_writer.write_default(&test_target())?;
target_writer.write_default(&new_test_target())?;
let branch_writer =
gitbutler_app::virtual_branches::branch::Writer::new(&gb_repository, project.gb_dir())?;
let mut branch_1 = test_branch();
let mut branch_1 = new_test_branch();
branch_writer.write(&mut branch_1)?;
let mut branch_2 = test_branch();
let mut branch_2 = new_test_branch();
branch_writer.write(&mut branch_2)?;
let mut branch_3 = test_branch();
let mut branch_3 = new_test_branch();
branch_writer.write(&mut branch_3)?;
let session = gb_repository.get_current_session()?.unwrap();

View File

@ -56,7 +56,7 @@ pub fn set_test_target(
}
#[test]
fn test_commit_on_branch_then_change_file_then_get_status() -> Result<()> {
fn commit_on_branch_then_change_file_then_get_status() -> Result<()> {
let Case {
project,
project_repository,
@ -123,7 +123,7 @@ fn test_commit_on_branch_then_change_file_then_get_status() -> Result<()> {
}
#[test]
fn test_signed_commit() -> Result<()> {
fn signed_commit() -> Result<()> {
let suite = Suite::default();
let Case {
project,
@ -179,7 +179,7 @@ fn test_signed_commit() -> Result<()> {
}
#[test]
fn test_track_binary_files() -> Result<()> {
fn track_binary_files() -> Result<()> {
let Case {
project_repository,
project,
@ -307,7 +307,7 @@ fn test_track_binary_files() -> Result<()> {
}
#[test]
fn test_create_branch_with_ownership() -> Result<()> {
fn create_branch_with_ownership() -> Result<()> {
let Case {
project,
project_repository,
@ -362,7 +362,7 @@ fn test_create_branch_with_ownership() -> Result<()> {
}
#[test]
fn test_create_branch_in_the_middle() -> Result<()> {
fn create_branch_in_the_middle() -> Result<()> {
let Case {
project_repository,
gb_repository,
@ -409,7 +409,7 @@ fn test_create_branch_in_the_middle() -> Result<()> {
}
#[test]
fn test_create_branch_no_arguments() -> Result<()> {
fn create_branch_no_arguments() -> Result<()> {
let Case {
project_repository,
gb_repository,
@ -441,7 +441,7 @@ fn test_create_branch_no_arguments() -> Result<()> {
}
#[test]
fn test_hunk_expantion() -> Result<()> {
fn hunk_expantion() -> Result<()> {
let Case {
project_repository,
project,
@ -524,7 +524,7 @@ fn test_hunk_expantion() -> Result<()> {
}
#[test]
fn test_get_status_files_by_branch_no_hunks_no_branches() -> Result<()> {
fn get_status_files_by_branch_no_hunks_no_branches() -> Result<()> {
let Case {
project_repository,
gb_repository,
@ -543,7 +543,7 @@ fn test_get_status_files_by_branch_no_hunks_no_branches() -> Result<()> {
}
#[test]
fn test_get_status_files_by_branch() -> Result<()> {
fn get_status_files_by_branch() -> Result<()> {
let Case {
project_repository,
project,
@ -587,7 +587,7 @@ fn test_get_status_files_by_branch() -> Result<()> {
}
#[test]
fn test_move_hunks_multiple_sources() -> Result<()> {
fn move_hunks_multiple_sources() -> Result<()> {
let Case {
project_repository,
project,
@ -697,7 +697,7 @@ fn test_move_hunks_multiple_sources() -> Result<()> {
}
#[test]
fn test_move_hunks_partial_explicitly() -> Result<()> {
fn move_hunks_partial_explicitly() -> Result<()> {
let Case {
project_repository,
project,
@ -787,7 +787,7 @@ fn test_move_hunks_partial_explicitly() -> Result<()> {
}
#[test]
fn test_add_new_hunk_to_the_end() -> Result<()> {
fn add_new_hunk_to_the_end() -> Result<()> {
let Case {
project_repository,
project,
@ -842,7 +842,7 @@ fn test_add_new_hunk_to_the_end() -> Result<()> {
}
#[test]
fn test_merge_vbranch_upstream_clean_rebase() -> Result<()> {
fn merge_vbranch_upstream_clean_rebase() -> Result<()> {
let suite = Suite::default();
let Case {
project_repository,
@ -967,7 +967,7 @@ fn test_merge_vbranch_upstream_clean_rebase() -> Result<()> {
}
#[test]
fn test_merge_vbranch_upstream_conflict() -> Result<()> {
fn merge_vbranch_upstream_conflict() -> Result<()> {
let Case {
project_repository,
project,
@ -1123,7 +1123,7 @@ fn test_merge_vbranch_upstream_conflict() -> Result<()> {
}
#[test]
fn test_unapply_ownership_partial() -> Result<()> {
fn unapply_ownership_partial() -> Result<()> {
let Case {
project_repository,
project,
@ -1277,7 +1277,7 @@ fn unapply_branch() -> Result<()> {
}
#[test]
fn test_apply_unapply_added_deleted_files() -> Result<()> {
fn apply_unapply_added_deleted_files() -> Result<()> {
let Case {
project,
project_repository,
@ -1355,7 +1355,7 @@ fn test_apply_unapply_added_deleted_files() -> Result<()> {
}
#[test]
fn test_detect_mergeable_branch() -> Result<()> {
fn detect_mergeable_branch() -> Result<()> {
let Case {
project,
project_repository,
@ -1549,7 +1549,7 @@ fn test_detect_mergeable_branch() -> Result<()> {
}
#[test]
fn test_upstream_integrated_vbranch() -> Result<()> {
fn upstream_integrated_vbranch() -> Result<()> {
// ok, we need a vbranch with some work and an upstream target that also includes that work, but the base is behind
// plus a branch with work not in upstream so we can see that it is not included in the vbranch
@ -1714,7 +1714,7 @@ fn test_upstream_integrated_vbranch() -> Result<()> {
}
#[test]
fn test_commit_same_hunk_twice() -> Result<()> {
fn commit_same_hunk_twice() -> Result<()> {
let Case {
project_repository,
project,
@ -1819,7 +1819,7 @@ fn test_commit_same_hunk_twice() -> Result<()> {
}
#[test]
fn test_commit_same_file_twice() -> Result<()> {
fn commit_same_file_twice() -> Result<()> {
let Case {
project_repository,
project,
@ -1924,7 +1924,7 @@ fn test_commit_same_file_twice() -> Result<()> {
}
#[test]
fn test_commit_partial_by_hunk() -> Result<()> {
fn commit_partial_by_hunk() -> Result<()> {
let Case {
project_repository,
project,
@ -2006,7 +2006,7 @@ fn test_commit_partial_by_hunk() -> Result<()> {
}
#[test]
fn test_commit_partial_by_file() -> Result<()> {
fn commit_partial_by_file() -> Result<()> {
let Case {
project_repository,
project,
@ -2080,7 +2080,7 @@ fn test_commit_partial_by_file() -> Result<()> {
}
#[test]
fn test_commit_add_and_delete_files() -> Result<()> {
fn commit_add_and_delete_files() -> Result<()> {
let Case {
project_repository,
project,
@ -2155,7 +2155,7 @@ fn test_commit_add_and_delete_files() -> Result<()> {
#[test]
#[cfg(target_family = "unix")]
fn test_commit_executable_and_symlinks() -> Result<()> {
fn commit_executable_and_symlinks() -> Result<()> {
let Case {
project_repository,
project,
@ -2279,7 +2279,7 @@ fn tree_to_entry_list(
}
#[test]
fn test_verify_branch_commits_to_integration() -> Result<()> {
fn verify_branch_commits_to_integration() -> Result<()> {
let Case {
project_repository,
project,
@ -2314,7 +2314,7 @@ fn test_verify_branch_commits_to_integration() -> Result<()> {
}
#[test]
fn test_verify_branch_not_integration() -> Result<()> {
fn verify_branch_not_integration() -> Result<()> {
let Case {
project_repository,
gb_repository,
@ -2340,7 +2340,7 @@ fn test_verify_branch_not_integration() -> Result<()> {
}
#[test]
fn test_pre_commit_hook_rejection() -> Result<()> {
fn pre_commit_hook_rejection() -> Result<()> {
let suite = Suite::default();
let Case {
project,
@ -2403,7 +2403,7 @@ fn test_pre_commit_hook_rejection() -> Result<()> {
}
#[test]
fn test_post_commit_hook() -> Result<()> {
fn post_commit_hook() -> Result<()> {
let suite = Suite::default();
let Case {
project,
@ -2466,7 +2466,7 @@ fn test_post_commit_hook() -> Result<()> {
}
#[test]
fn test_commit_msg_hook_rejection() -> Result<()> {
fn commit_msg_hook_rejection() -> Result<()> {
let suite = Suite::default();
let Case {
project,

View File

@ -52,7 +52,7 @@ fn test_branch() -> gitbutler_app::virtual_branches::branch::Branch {
}
#[test]
fn test_read_not_found() -> Result<()> {
fn read_not_found() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let session = gb_repository.get_or_create_current_session()?;
@ -67,7 +67,7 @@ fn test_read_not_found() -> Result<()> {
}
#[test]
fn test_read_deprecated_format() -> Result<()> {
fn read_deprecated_format() -> Result<()> {
let Case { gb_repository, .. } = Suite::default().new_case();
let writer = gitbutler_app::writer::DirWriter::open(gb_repository.root())?;
@ -104,7 +104,7 @@ fn test_read_deprecated_format() -> Result<()> {
}
#[test]
fn test_read_override_target() -> Result<()> {
fn read_override_target() -> Result<()> {
let Case {
gb_repository,
project,

View File

@ -55,7 +55,7 @@ fn test_branch() -> branch::Branch {
}
#[test]
fn test_write() -> anyhow::Result<()> {
fn write() -> anyhow::Result<()> {
let Case {
gb_repository,
project,
@ -146,7 +146,7 @@ fn test_write() -> anyhow::Result<()> {
}
#[test]
fn test_should_update() -> anyhow::Result<()> {
fn should_update() -> anyhow::Result<()> {
let Case {
gb_repository,
project,

View File

@ -0,0 +1,971 @@
use anyhow::Result;
use std::path::{Path, PathBuf};
use std::{
collections::HashMap,
sync::atomic::{AtomicUsize, Ordering},
};
use once_cell::sync::Lazy;
use crate::{commit_all, Case, Suite};
use gitbutler_app::watcher::handlers::calculate_deltas_handler::Handler;
use gitbutler_app::{
deltas::{self, operations::Operation},
reader, sessions,
virtual_branches::{self, branch},
};
use self::branch::BranchId;
static TEST_TARGET_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn new_test_target() -> virtual_branches::target::Target {
virtual_branches::target::Target {
branch: format!(
"refs/remotes/remote name {}/branch name {}",
TEST_TARGET_INDEX.load(Ordering::Relaxed),
TEST_TARGET_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
remote_url: format!("remote url {}", TEST_TARGET_INDEX.load(Ordering::Relaxed)),
sha: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_TARGET_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
}
}
static TEST_INDEX: Lazy<AtomicUsize> = Lazy::new(|| AtomicUsize::new(0));
fn new_test_branch() -> branch::Branch {
TEST_INDEX.fetch_add(1, Ordering::Relaxed);
branch::Branch {
id: BranchId::generate(),
name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)),
notes: format!("branch_notes_{}", TEST_INDEX.load(Ordering::Relaxed)),
applied: true,
upstream: Some(
format!(
"refs/remotes/origin/upstream_{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
),
upstream_head: None,
created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128,
updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128,
head: format!(
"0123456789abcdef0123456789abcdef0123456{}",
TEST_INDEX.load(Ordering::Relaxed)
)
.parse()
.unwrap(),
tree: format!(
"0123456789abcdef0123456789abcdef012345{}",
TEST_INDEX.load(Ordering::Relaxed) + 10
)
.parse()
.unwrap(),
ownership: branch::BranchOwnershipClaims::default(),
order: TEST_INDEX.load(Ordering::Relaxed),
selected_for_changes: None,
}
}
#[test]
fn register_existing_commited_file() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
..
} = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "test")]));
let listener = Handler::from_path(&suite.local_app_data);
std::fs::write(project.path.join("test.txt"), "test2")?;
listener.handle("test.txt", &project.id)?;
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read_file("test.txt")?.unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(deltas[0].operations.len(), 1);
assert_eq!(
deltas[0].operations[0],
Operation::Insert((4, "2".to_string())),
);
assert_eq!(
std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?,
"test2"
);
Ok(())
}
#[test]
fn register_must_init_current_session() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
std::fs::write(project.path.join("test.txt"), "test")?;
listener.handle("test.txt", &project.id)?;
assert!(gb_repository.get_current_session()?.is_some());
Ok(())
}
#[test]
fn register_must_not_override_current_session() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
std::fs::write(project.path.join("test.txt"), "test")?;
listener.handle("test.txt", &project.id)?;
let session1 = gb_repository.get_current_session()?.unwrap();
std::fs::write(project.path.join("test.txt"), "test2")?;
listener.handle("test.txt", &project.id)?;
let session2 = gb_repository.get_current_session()?.unwrap();
assert_eq!(session1.id, session2.id);
Ok(())
}
#[test]
fn register_binfile() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
std::fs::write(
project.path.join("test.bin"),
[0, 159, 146, 150, 159, 146, 150],
)?;
listener.handle("test.bin", &project.id)?;
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read_file("test.bin")?.unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(deltas[0].operations.len(), 0);
assert_eq!(
std::fs::read_to_string(gb_repository.session_wd_path().join("test.bin"))?,
""
);
Ok(())
}
#[test]
fn register_empty_new_file() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
std::fs::write(project.path.join("test.txt"), "")?;
listener.handle("test.txt", &project.id)?;
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read_file("test.txt")?.unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(deltas[0].operations.len(), 0);
assert_eq!(
std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?,
""
);
Ok(())
}
#[test]
fn register_new_file() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
std::fs::write(project.path.join("test.txt"), "test")?;
listener.handle("test.txt", &project.id)?;
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read_file("test.txt")?.unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(deltas[0].operations.len(), 1);
assert_eq!(
deltas[0].operations[0],
Operation::Insert((0, "test".to_string())),
);
assert_eq!(
std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?,
"test"
);
Ok(())
}
#[test]
fn register_no_changes_saved_thgoughout_flushes() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
project,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
// file change, wd and deltas are written
std::fs::write(project.path.join("test.txt"), "test")?;
listener.handle("test.txt", &project.id)?;
// make two more sessions.
gb_repository.flush(&project_repository, None)?;
gb_repository.get_or_create_current_session()?;
gb_repository.flush(&project_repository, None)?;
// after some sessions, files from the first change are still there.
let session = gb_repository.get_or_create_current_session()?;
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let files = session_reader.files(None)?;
assert_eq!(files.len(), 1);
Ok(())
}
#[test]
fn register_new_file_twice() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
std::fs::write(project.path.join("test.txt"), "test")?;
listener.handle("test.txt", &project.id)?;
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read_file("test.txt")?.unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(deltas[0].operations.len(), 1);
assert_eq!(
deltas[0].operations[0],
Operation::Insert((0, "test".to_string())),
);
assert_eq!(
std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?,
"test"
);
std::fs::write(project.path.join("test.txt"), "test2")?;
listener.handle("test.txt", &project.id)?;
let deltas = deltas_reader.read_file("test.txt")?.unwrap();
assert_eq!(deltas.len(), 2);
assert_eq!(deltas[0].operations.len(), 1);
assert_eq!(
deltas[0].operations[0],
Operation::Insert((0, "test".to_string())),
);
assert_eq!(deltas[1].operations.len(), 1);
assert_eq!(
deltas[1].operations[0],
Operation::Insert((4, "2".to_string())),
);
assert_eq!(
std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?,
"test2"
);
Ok(())
}
#[test]
fn register_file_deleted() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
project,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
{
// write file
std::fs::write(project.path.join("test.txt"), "test")?;
listener.handle("test.txt", &project.id)?;
}
{
// current session must have the deltas, but not the file (it didn't exist)
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read_file("test.txt")?.unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(deltas[0].operations.len(), 1);
assert_eq!(
deltas[0].operations[0],
Operation::Insert((0, "test".to_string())),
);
assert_eq!(
std::fs::read_to_string(gb_repository.session_wd_path().join("test.txt"))?,
"test"
);
let files = session_reader.files(None).unwrap();
assert!(files.is_empty());
}
gb_repository.flush(&project_repository, None)?;
{
// file should be available in the next session, but not deltas just yet.
let session = gb_repository.get_or_create_current_session()?;
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let files = session_reader.files(None).unwrap();
assert_eq!(files.len(), 1);
assert_eq!(
files[Path::new("test.txt")],
reader::Content::UTF8("test".to_string())
);
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read(None)?;
assert!(deltas.is_empty());
// removing the file
std::fs::remove_file(project.path.join("test.txt"))?;
listener.handle("test.txt", &project.id)?;
// deltas are recorded
let deltas = deltas_reader.read_file("test.txt")?.unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(deltas[0].operations.len(), 1);
assert_eq!(deltas[0].operations[0], Operation::Delete((0, 4)),);
}
gb_repository.flush(&project_repository, None)?;
{
// since file was deleted in the previous session, it should not exist in the new one.
let session = gb_repository.get_or_create_current_session()?;
let session_reader = sessions::Reader::open(&gb_repository, &session)?;
let files = session_reader.files(None).unwrap();
assert!(files.is_empty());
}
Ok(())
}
#[test]
fn flow_with_commits() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
project_repository,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
let size = 10;
let relative_file_path = Path::new("one/two/test.txt");
for i in 1..=size {
std::fs::create_dir_all(Path::new(&project.path).join("one/two"))?;
// create a session with a single file change and flush it
std::fs::write(
Path::new(&project.path).join(relative_file_path),
i.to_string(),
)?;
commit_all(&project_repository.git_repository);
listener.handle(relative_file_path, &project.id)?;
assert!(gb_repository.flush(&project_repository, None)?.is_some());
}
// get all the created sessions
let mut sessions: Vec<sessions::Session> = gb_repository
.get_sessions_iterator()?
.map(Result::unwrap)
.collect();
assert_eq!(sessions.len(), size);
// verify sessions order is correct
let mut last_start = sessions[0].meta.start_timestamp_ms;
let mut last_end = sessions[0].meta.start_timestamp_ms;
sessions[1..].iter().for_each(|session| {
assert!(session.meta.start_timestamp_ms < last_start);
assert!(session.meta.last_timestamp_ms < last_end);
last_start = session.meta.start_timestamp_ms;
last_end = session.meta.last_timestamp_ms;
});
sessions.reverse();
// try to reconstruct file state from operations for every session slice
for i in 0..sessions.len() {
let sessions_slice = &mut sessions[i..];
// collect all operations from sessions in the reverse order
let mut operations: Vec<Operation> = vec![];
for session in &mut *sessions_slice {
let session_reader = sessions::Reader::open(&gb_repository, session).unwrap();
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas_by_filepath = deltas_reader.read(None).unwrap();
for deltas in deltas_by_filepath.values() {
for delta in deltas {
delta.operations.iter().for_each(|operation| {
operations.push(operation.clone());
});
}
}
}
let reader =
sessions::Reader::open(&gb_repository, sessions_slice.first().unwrap()).unwrap();
let files = reader.files(None).unwrap();
if i == 0 {
assert_eq!(files.len(), 0);
} else {
assert_eq!(files.len(), 1);
}
let base_file = files.get(&relative_file_path.to_path_buf());
let mut text: Vec<char> = match base_file {
Some(reader::Content::UTF8(file)) => file.chars().collect(),
_ => vec![],
};
for operation in operations {
operation.apply(&mut text).unwrap();
}
assert_eq!(text.iter().collect::<String>(), size.to_string());
}
Ok(())
}
#[test]
fn flow_no_commits() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
project_repository,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
let size = 10;
let relative_file_path = Path::new("one/two/test.txt");
for i in 1..=size {
std::fs::create_dir_all(Path::new(&project.path).join("one/two"))?;
// create a session with a single file change and flush it
std::fs::write(
Path::new(&project.path).join(relative_file_path),
i.to_string(),
)?;
listener.handle(relative_file_path, &project.id)?;
assert!(gb_repository.flush(&project_repository, None)?.is_some());
}
// get all the created sessions
let mut sessions: Vec<sessions::Session> = gb_repository
.get_sessions_iterator()?
.map(Result::unwrap)
.collect();
assert_eq!(sessions.len(), size);
// verify sessions order is correct
let mut last_start = sessions[0].meta.start_timestamp_ms;
let mut last_end = sessions[0].meta.start_timestamp_ms;
sessions[1..].iter().for_each(|session| {
assert!(session.meta.start_timestamp_ms < last_start);
assert!(session.meta.last_timestamp_ms < last_end);
last_start = session.meta.start_timestamp_ms;
last_end = session.meta.last_timestamp_ms;
});
sessions.reverse();
// try to reconstruct file state from operations for every session slice
for i in 0..sessions.len() {
let sessions_slice = &mut sessions[i..];
// collect all operations from sessions in the reverse order
let mut operations: Vec<Operation> = vec![];
for session in &mut *sessions_slice {
let session_reader = sessions::Reader::open(&gb_repository, session).unwrap();
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas_by_filepath = deltas_reader.read(None).unwrap();
for deltas in deltas_by_filepath.values() {
for delta in deltas {
delta.operations.iter().for_each(|operation| {
operations.push(operation.clone());
});
}
}
}
let reader =
sessions::Reader::open(&gb_repository, sessions_slice.first().unwrap()).unwrap();
let files = reader.files(None).unwrap();
if i == 0 {
assert_eq!(files.len(), 0);
} else {
assert_eq!(files.len(), 1);
}
let base_file = files.get(&relative_file_path.to_path_buf());
let mut text: Vec<char> = match base_file {
Some(reader::Content::UTF8(file)) => file.chars().collect(),
_ => vec![],
};
for operation in operations {
operation.apply(&mut text).unwrap();
}
assert_eq!(text.iter().collect::<String>(), size.to_string());
}
Ok(())
}
#[test]
fn flow_signle_session() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
let size = 10_i32;
let relative_file_path = Path::new("one/two/test.txt");
for i in 1_i32..=size {
std::fs::create_dir_all(Path::new(&project.path).join("one/two"))?;
// create a session with a single file change and flush it
std::fs::write(
Path::new(&project.path).join(relative_file_path),
i.to_string(),
)?;
listener.handle(relative_file_path, &project.id)?;
}
// collect all operations from sessions in the reverse order
let mut operations: Vec<Operation> = vec![];
let session = gb_repository.get_current_session()?.unwrap();
let session_reader = sessions::Reader::open(&gb_repository, &session).unwrap();
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas_by_filepath = deltas_reader.read(None).unwrap();
for deltas in deltas_by_filepath.values() {
for delta in deltas {
delta.operations.iter().for_each(|operation| {
operations.push(operation.clone());
});
}
}
let reader = sessions::Reader::open(&gb_repository, &session).unwrap();
let files = reader.files(None).unwrap();
let base_file = files.get(&relative_file_path.to_path_buf());
let mut text: Vec<char> = match base_file {
Some(reader::Content::UTF8(file)) => file.chars().collect(),
_ => vec![],
};
for operation in operations {
operation.apply(&mut text).unwrap();
}
assert_eq!(text.iter().collect::<String>(), size.to_string());
Ok(())
}
#[test]
fn should_persist_branches_targets_state_between_sessions() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
project_repository,
..
} = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "hello world")]));
let listener = Handler::from_path(&suite.local_app_data);
let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
let target_writer = virtual_branches::target::Writer::new(&gb_repository, project.gb_dir())?;
let default_target = new_test_target();
target_writer.write_default(&default_target)?;
let mut vbranch0 = new_test_branch();
branch_writer.write(&mut vbranch0)?;
let mut vbranch1 = new_test_branch();
let vbranch1_target = new_test_target();
branch_writer.write(&mut vbranch1)?;
target_writer.write(&vbranch1.id, &vbranch1_target)?;
std::fs::write(project.path.join("test.txt"), "hello world!").unwrap();
listener.handle("test.txt", &project.id)?;
let flushed_session = gb_repository.flush(&project_repository, None).unwrap();
// create a new session
let session = gb_repository.get_or_create_current_session().unwrap();
assert_ne!(session.id, flushed_session.unwrap().id);
// ensure that the virtual branch is still there and selected
let session_reader = sessions::Reader::open(&gb_repository, &session).unwrap();
let branches = virtual_branches::Iterator::new(&session_reader)
.unwrap()
.collect::<Result<Vec<virtual_branches::Branch>, gitbutler_app::reader::Error>>()
.unwrap()
.into_iter()
.collect::<Vec<virtual_branches::Branch>>();
assert_eq!(branches.len(), 2);
let branch_ids = branches.iter().map(|b| b.id).collect::<Vec<_>>();
assert!(branch_ids.contains(&vbranch0.id));
assert!(branch_ids.contains(&vbranch1.id));
let target_reader = virtual_branches::target::Reader::new(&session_reader);
assert_eq!(target_reader.read_default().unwrap(), default_target);
assert_eq!(target_reader.read(&vbranch0.id).unwrap(), default_target);
assert_eq!(target_reader.read(&vbranch1.id).unwrap(), vbranch1_target);
Ok(())
}
#[test]
fn should_restore_branches_targets_state_from_head_session() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project,
project_repository,
..
} = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "hello world")]));
let listener = Handler::from_path(&suite.local_app_data);
let branch_writer = branch::Writer::new(&gb_repository, project.gb_dir())?;
let target_writer = virtual_branches::target::Writer::new(&gb_repository, project.gb_dir())?;
let default_target = new_test_target();
target_writer.write_default(&default_target)?;
let mut vbranch0 = new_test_branch();
branch_writer.write(&mut vbranch0)?;
let mut vbranch1 = new_test_branch();
let vbranch1_target = new_test_target();
branch_writer.write(&mut vbranch1)?;
target_writer.write(&vbranch1.id, &vbranch1_target)?;
std::fs::write(project.path.join("test.txt"), "hello world!").unwrap();
listener.handle("test.txt", &project.id).unwrap();
let flushed_session = gb_repository.flush(&project_repository, None).unwrap();
// hard delete branches state from disk
std::fs::remove_dir_all(gb_repository.root()).unwrap();
// create a new session
let session = gb_repository.get_or_create_current_session().unwrap();
assert_ne!(session.id, flushed_session.unwrap().id);
// ensure that the virtual branch is still there and selected
let session_reader = sessions::Reader::open(&gb_repository, &session).unwrap();
let branches = virtual_branches::Iterator::new(&session_reader)
.unwrap()
.collect::<Result<Vec<virtual_branches::Branch>, gitbutler_app::reader::Error>>()
.unwrap()
.into_iter()
.collect::<Vec<virtual_branches::Branch>>();
assert_eq!(branches.len(), 2);
let branch_ids = branches.iter().map(|b| b.id).collect::<Vec<_>>();
assert!(branch_ids.contains(&vbranch0.id));
assert!(branch_ids.contains(&vbranch1.id));
let target_reader = virtual_branches::target::Reader::new(&session_reader);
assert_eq!(target_reader.read_default().unwrap(), default_target);
assert_eq!(target_reader.read(&vbranch0.id).unwrap(), default_target);
assert_eq!(target_reader.read(&vbranch1.id).unwrap(), vbranch1_target);
Ok(())
}
mod flush_wd {
use super::*;
#[test]
fn should_add_new_files_to_session_wd() {
let suite = Suite::default();
let Case {
gb_repository,
project,
project_repository,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
// write a file into session
std::fs::write(project.path.join("test.txt"), "hello world!").unwrap();
listener.handle("test.txt", &project.id).unwrap();
let flushed_session = gb_repository
.flush(&project_repository, None)
.unwrap()
.unwrap();
{
// after flush it should be flushed into the commit
let session_commit = gb_repository
.git_repository()
.find_commit(flushed_session.hash.unwrap())
.unwrap();
let commit_reader =
reader::Reader::from_commit(gb_repository.git_repository(), &session_commit)
.unwrap();
assert_eq!(
commit_reader.list_files(Path::new("wd")).unwrap(),
vec![Path::new("test.txt")]
);
assert_eq!(
commit_reader.read(Path::new("wd/test.txt")).unwrap(),
reader::Content::UTF8("hello world!".to_string())
);
}
// write another file into session
std::fs::create_dir_all(project.path.join("one/two")).unwrap();
std::fs::write(project.path.join("one/two/test2.txt"), "hello world!").unwrap();
listener.handle("one/two/test2.txt", &project.id).unwrap();
let flushed_session = gb_repository
.flush(&project_repository, None)
.unwrap()
.unwrap();
{
// after flush, it should be flushed into the commit next to the previous one
let session_commit = gb_repository
.git_repository()
.find_commit(flushed_session.hash.unwrap())
.unwrap();
let commit_reader =
reader::Reader::from_commit(gb_repository.git_repository(), &session_commit)
.unwrap();
assert_eq!(
commit_reader.list_files(Path::new("wd")).unwrap(),
vec![Path::new("one/two/test2.txt"), Path::new("test.txt"),]
);
assert_eq!(
commit_reader.read(Path::new("wd/test.txt")).unwrap(),
reader::Content::UTF8("hello world!".to_string())
);
assert_eq!(
commit_reader
.read(Path::new("wd/one/two/test2.txt"))
.unwrap(),
reader::Content::UTF8("hello world!".to_string())
);
}
}
#[test]
fn should_remove_deleted_files_from_session_wd() {
let suite = Suite::default();
let Case {
gb_repository,
project,
project_repository,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
// write a file into session
std::fs::write(project.path.join("test.txt"), "hello world!").unwrap();
listener.handle("test.txt", &project.id).unwrap();
std::fs::create_dir_all(project.path.join("one/two")).unwrap();
std::fs::write(project.path.join("one/two/test2.txt"), "hello world!").unwrap();
listener.handle("one/two/test2.txt", &project.id).unwrap();
let flushed_session = gb_repository
.flush(&project_repository, None)
.unwrap()
.unwrap();
{
// after flush it should be flushed into the commit
let session_commit = gb_repository
.git_repository()
.find_commit(flushed_session.hash.unwrap())
.unwrap();
let commit_reader =
reader::Reader::from_commit(gb_repository.git_repository(), &session_commit)
.unwrap();
assert_eq!(
commit_reader.list_files(Path::new("wd")).unwrap(),
vec![Path::new("one/two/test2.txt"), Path::new("test.txt"),]
);
assert_eq!(
commit_reader.read(Path::new("wd/test.txt")).unwrap(),
reader::Content::UTF8("hello world!".to_string())
);
assert_eq!(
commit_reader
.read(Path::new("wd/one/two/test2.txt"))
.unwrap(),
reader::Content::UTF8("hello world!".to_string())
);
}
// rm the files
std::fs::remove_file(project.path.join("test.txt")).unwrap();
listener.handle("test.txt", &project.id).unwrap();
std::fs::remove_file(project.path.join("one/two/test2.txt")).unwrap();
listener.handle("one/two/test2.txt", &project.id).unwrap();
let flushed_session = gb_repository
.flush(&project_repository, None)
.unwrap()
.unwrap();
{
// after flush it should be removed from the commit
let session_commit = gb_repository
.git_repository()
.find_commit(flushed_session.hash.unwrap())
.unwrap();
let commit_reader =
reader::Reader::from_commit(gb_repository.git_repository(), &session_commit)
.unwrap();
assert!(commit_reader
.list_files(Path::new("wd"))
.unwrap()
.is_empty());
}
}
#[test]
fn should_update_updated_files_in_session_wd() {
let suite = Suite::default();
let Case {
gb_repository,
project,
project_repository,
..
} = suite.new_case();
let listener = Handler::from_path(&suite.local_app_data);
// write a file into session
std::fs::write(project.path.join("test.txt"), "hello world!").unwrap();
listener.handle("test.txt", &project.id).unwrap();
std::fs::create_dir_all(project.path.join("one/two")).unwrap();
std::fs::write(project.path.join("one/two/test2.txt"), "hello world!").unwrap();
listener.handle("one/two/test2.txt", &project.id).unwrap();
let flushed_session = gb_repository
.flush(&project_repository, None)
.unwrap()
.unwrap();
{
// after flush it should be flushed into the commit
let session_commit = gb_repository
.git_repository()
.find_commit(flushed_session.hash.unwrap())
.unwrap();
let commit_reader =
reader::Reader::from_commit(gb_repository.git_repository(), &session_commit)
.unwrap();
assert_eq!(
commit_reader.list_files(Path::new("wd")).unwrap(),
vec![Path::new("one/two/test2.txt"), Path::new("test.txt"),]
);
assert_eq!(
commit_reader.read(Path::new("wd/test.txt")).unwrap(),
reader::Content::UTF8("hello world!".to_string())
);
assert_eq!(
commit_reader
.read(Path::new("wd/one/two/test2.txt"))
.unwrap(),
reader::Content::UTF8("hello world!".to_string())
);
}
// update the file
std::fs::write(project.path.join("test.txt"), "hello world!2").unwrap();
listener.handle("test.txt", &project.id).unwrap();
std::fs::write(project.path.join("one/two/test2.txt"), "hello world!2").unwrap();
listener.handle("one/two/test2.txt", &project.id).unwrap();
let flushed_session = gb_repository
.flush(&project_repository, None)
.unwrap()
.unwrap();
{
// after flush it should be updated in the commit
let session_commit = gb_repository
.git_repository()
.find_commit(flushed_session.hash.unwrap())
.unwrap();
let commit_reader =
reader::Reader::from_commit(gb_repository.git_repository(), &session_commit)
.unwrap();
assert_eq!(
commit_reader.list_files(Path::new("wd")).unwrap(),
vec![Path::new("one/two/test2.txt"), Path::new("test.txt"),]
);
assert_eq!(
commit_reader.read(Path::new("wd/test.txt")).unwrap(),
reader::Content::UTF8("hello world!2".to_string())
);
assert_eq!(
commit_reader
.read(Path::new("wd/one/two/test2.txt"))
.unwrap(),
reader::Content::UTF8("hello world!2".to_string())
);
}
}
}

View File

@ -0,0 +1,55 @@
use std::time::SystemTime;
use gitbutler_app::projects;
use pretty_assertions::assert_eq;
use crate::watcher::handler::test_remote_repository;
use crate::{Case, Suite};
use gitbutler_app::watcher::handlers::fetch_gitbutler_data::Handler;
#[tokio::test]
async fn fetch_success() -> anyhow::Result<()> {
let suite = Suite::default();
let Case { project, .. } = suite.new_case();
let cloud = test_remote_repository()?;
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: cloud.path().to_str().unwrap().to_string(),
code_git_url: None,
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
let listener = Handler::new(suite.local_app_data, suite.projects, suite.users);
listener
.handle(&project.id, &SystemTime::now())
.await
.unwrap();
Ok(())
}
#[tokio::test]
async fn fetch_fail_no_sync() {
let suite = Suite::default();
let Case { project, .. } = suite.new_case();
let listener = Handler::new(suite.local_app_data, suite.projects, suite.users);
let res = listener.handle(&project.id, &SystemTime::now()).await;
assert_eq!(&res.unwrap_err().to_string(), "sync disabled");
}

View File

@ -0,0 +1,88 @@
use anyhow::Result;
use std::fs;
use gitbutler_app::projects;
use pretty_assertions::assert_eq;
use crate::{Case, Suite};
use gitbutler_app::watcher::handlers::git_file_change::Handler;
use gitbutler_app::watcher::{handlers, Event};
#[test]
fn flush_session() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
..
} = suite.new_case();
assert!(gb_repository.get_current_session()?.is_none());
create_new_session_via_new_file(&project, &suite);
assert!(gb_repository.get_current_session()?.is_some());
let listener = Handler::new(suite.local_app_data, suite.projects, suite.users);
let flush_file_path = project.path.join(".git/GB_FLUSH");
fs::write(flush_file_path.as_path(), "")?;
let result = listener.handle("GB_FLUSH", &project.id)?;
assert_eq!(result.len(), 1);
assert!(matches!(result[0], Event::Flush(_, _)));
assert!(!flush_file_path.exists(), "flush file deleted");
Ok(())
}
#[test]
fn do_not_flush_session_if_file_is_missing() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
..
} = suite.new_case();
assert!(gb_repository.get_current_session()?.is_none());
create_new_session_via_new_file(&project, &suite);
assert!(gb_repository.get_current_session()?.is_some());
let listener = Handler::new(suite.local_app_data, suite.projects, suite.users);
let result = listener.handle("GB_FLUSH", &project.id)?;
assert_eq!(result.len(), 0);
Ok(())
}
fn create_new_session_via_new_file(project: &projects::Project, suite: &Suite) {
fs::write(project.path.join("test.txt"), "test").unwrap();
let file_change_listener =
handlers::calculate_deltas_handler::Handler::from_path(&suite.local_app_data);
file_change_listener
.handle("test.txt", &project.id)
.unwrap();
}
#[test]
fn flush_deletes_flush_file_without_session_to_flush() -> Result<()> {
let suite = Suite::default();
let Case { project, .. } = suite.new_case();
let listener = Handler::new(suite.local_app_data, suite.projects, suite.users);
let flush_file_path = project.path.join(".git/GB_FLUSH");
fs::write(flush_file_path.as_path(), "")?;
let result = listener.handle("GB_FLUSH", &project.id)?;
assert_eq!(result.len(), 0);
assert!(!flush_file_path.exists(), "flush file deleted");
Ok(())
}

View File

@ -0,0 +1,13 @@
use crate::init_opts_bare;
fn test_remote_repository() -> anyhow::Result<git2::Repository> {
let path = tempfile::tempdir()?.path().to_str().unwrap().to_string();
let repo_a = git2::Repository::init_opts(path, &init_opts_bare())?;
Ok(repo_a)
}
mod calculate_delta_handler;
mod fetch_gitbutler_data;
mod git_file_change;
mod push_project_to_gitbutler;

View File

@ -0,0 +1,405 @@
use anyhow::Result;
use gitbutler_app::{git, projects};
use std::collections::HashMap;
use std::path::PathBuf;
use crate::virtual_branches::set_test_target;
use crate::watcher::handler::test_remote_repository;
use crate::{Case, Suite};
use gitbutler_app::project_repository::LogUntil;
use gitbutler_app::watcher::handlers::push_project_to_gitbutler::Handler;
fn log_walk(repo: &git2::Repository, head: git::Oid) -> Vec<git::Oid> {
let mut walker = repo.revwalk().unwrap();
walker.push(head.into()).unwrap();
walker.map(|oid| oid.unwrap().into()).collect::<Vec<_>>()
}
#[tokio::test]
async fn push_error() -> Result<()> {
let suite = Suite::default();
let Case { project, .. } = suite.new_case();
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: String::new(),
code_git_url: Some(String::new()),
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
let listener = Handler::new(suite.local_app_data, suite.projects, suite.users, 100);
let res = listener.handle(&project.id).await;
res.unwrap_err();
Ok(())
}
#[tokio::test]
async fn push_simple() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
project_repository,
..
} = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "test")]));
suite.sign_in();
set_test_target(&gb_repository, &project_repository).unwrap();
let target_id = gb_repository.default_target().unwrap().unwrap().sha;
let reference = project_repository.l(target_id, LogUntil::End).unwrap();
let cloud_code = test_remote_repository()?;
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: String::new(),
code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()),
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
cloud_code.find_commit(target_id.into()).unwrap_err();
{
let listener = Handler::new(
suite.local_app_data,
suite.projects.clone(),
suite.users,
10,
);
let res = listener.handle(&project.id).await.unwrap();
assert!(res.is_empty());
}
cloud_code.find_commit(target_id.into()).unwrap();
let pushed = log_walk(&cloud_code, target_id);
assert_eq!(reference.len(), pushed.len());
assert_eq!(reference, pushed);
assert_eq!(
suite
.projects
.get(&project.id)
.unwrap()
.gitbutler_code_push_state
.unwrap()
.id,
target_id
);
Ok(())
}
#[tokio::test]
async fn push_remote_ref() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
project_repository,
..
} = suite.new_case();
suite.sign_in();
set_test_target(&gb_repository, &project_repository).unwrap();
let cloud_code: git::Repository = test_remote_repository()?.into();
let remote_repo: git::Repository = test_remote_repository()?.into();
let last_commit = create_initial_commit(&remote_repo);
remote_repo
.reference(
&git::Refname::Local(git::LocalRefname::new("refs/heads/testbranch", None)),
last_commit,
false,
"",
)
.unwrap();
let mut remote = project_repository
.git_repository
.remote("tr", &remote_repo.path().to_str().unwrap().parse().unwrap())
.unwrap();
remote
.fetch(&["+refs/heads/*:refs/remotes/tr/*"], None)
.unwrap();
project_repository
.git_repository
.find_commit(last_commit)
.unwrap();
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: String::new(),
code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()),
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
{
let listener = Handler::new(
suite.local_app_data,
suite.projects.clone(),
suite.users,
10,
);
listener.handle(&project.id).await.unwrap();
}
cloud_code.find_commit(last_commit).unwrap();
Ok(())
}
fn create_initial_commit(repo: &git::Repository) -> git::Oid {
let signature = git::Signature::now("test", "test@email.com").unwrap();
let mut index = repo.index().unwrap();
let oid = index.write_tree().unwrap();
repo.commit(
None,
&signature,
&signature,
"initial commit",
&repo.find_tree(oid).unwrap(),
&[],
)
.unwrap()
}
fn create_test_commits(repo: &git::Repository, commits: usize) -> git::Oid {
let signature = git::Signature::now("test", "test@email.com").unwrap();
let mut last = None;
for i in 0..commits {
let mut index = repo.index().unwrap();
let oid = index.write_tree().unwrap();
let head = repo.head().unwrap();
last = Some(
repo.commit(
Some(&head.name().unwrap()),
&signature,
&signature,
format!("commit {i}").as_str(),
&repo.find_tree(oid).unwrap(),
&[&repo
.find_commit(repo.refname_to_id("HEAD").unwrap())
.unwrap()],
)
.unwrap(),
);
}
last.unwrap()
}
#[tokio::test]
async fn push_batches() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
project_repository,
..
} = suite.new_case();
suite.sign_in();
{
let head: git::Oid = project_repository
.get_head()
.unwrap()
.peel_to_commit()
.unwrap()
.id();
let reference = project_repository.l(head, LogUntil::End).unwrap();
assert_eq!(reference.len(), 2);
let head = create_test_commits(&project_repository.git_repository, 10);
let reference = project_repository.l(head, LogUntil::End).unwrap();
assert_eq!(reference.len(), 12);
}
set_test_target(&gb_repository, &project_repository).unwrap();
let target_id = gb_repository.default_target().unwrap().unwrap().sha;
let reference = project_repository.l(target_id, LogUntil::End).unwrap();
let cloud_code = test_remote_repository()?;
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: String::new(),
code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()),
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
{
let listener = Handler::new(
suite.local_app_data.clone(),
suite.projects.clone(),
suite.users.clone(),
2,
);
listener.handle(&project.id).await.unwrap();
}
cloud_code.find_commit(target_id.into()).unwrap();
let pushed = log_walk(&cloud_code, target_id);
assert_eq!(reference.len(), pushed.len());
assert_eq!(reference, pushed);
assert_eq!(
suite
.projects
.get(&project.id)
.unwrap()
.gitbutler_code_push_state
.unwrap()
.id,
target_id
);
Ok(())
}
#[tokio::test]
async fn push_again_no_change() -> Result<()> {
let suite = Suite::default();
let Case {
project,
gb_repository,
project_repository,
..
} = suite.new_case_with_files(HashMap::from([(PathBuf::from("test.txt"), "test")]));
suite.sign_in();
set_test_target(&gb_repository, &project_repository).unwrap();
let target_id = gb_repository.default_target().unwrap().unwrap().sha;
let reference = project_repository.l(target_id, LogUntil::End).unwrap();
let cloud_code = test_remote_repository()?;
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: String::new(),
code_git_url: Some(cloud_code.path().to_str().unwrap().to_string()),
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
suite
.projects
.update(&projects::UpdateRequest {
id: project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await?;
cloud_code.find_commit(target_id.into()).unwrap_err();
{
let listener = Handler::new(
suite.local_app_data,
suite.projects.clone(),
suite.users,
10,
);
let res = listener.handle(&project.id).await.unwrap();
assert!(res.is_empty());
}
cloud_code.find_commit(target_id.into()).unwrap();
let pushed = log_walk(&cloud_code, target_id);
assert_eq!(reference.len(), pushed.len());
assert_eq!(reference, pushed);
assert_eq!(
suite
.projects
.get(&project.id)
.unwrap()
.gitbutler_code_push_state
.unwrap()
.id,
target_id
);
Ok(())
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,47 @@
use gitbutler_app::zip::Zipper;
use walkdir::WalkDir;
use std::fs::File;
use std::io::Write;
use tempfile::tempdir;
#[test]
fn zip_dir() {
let tmp_dir = tempdir().unwrap();
let tmp_dir_path = tmp_dir.path();
let file_path = tmp_dir_path.join("test.txt");
let mut file = File::create(file_path).unwrap();
file.write_all(b"test").unwrap();
let zipper_cache = tempdir().unwrap();
let zipper = Zipper::new(zipper_cache.path());
let zip_file_path = zipper.zip(tmp_dir).unwrap();
assert!(zip_file_path.exists());
}
#[test]
fn zip_file() {
let tmp_dir = tempdir().unwrap();
let tmp_dir_path = tmp_dir.path();
let file_path = tmp_dir_path.join("test.txt");
let mut file = File::create(&file_path).unwrap();
file.write_all(b"test").unwrap();
let zipper_cache = tempdir().unwrap();
let zipper = Zipper::new(zipper_cache.path());
zipper.zip(file_path).unwrap_err();
}
#[test]
fn zip_once() {
let tmp_dir = tempdir().unwrap();
let tmp_dir_path = tmp_dir.path();
let file_path = tmp_dir_path.join("test.txt");
let mut file = File::create(file_path).unwrap();
file.write_all(b"test").unwrap();
let zipper_cache = tempdir().unwrap();
let zipper = Zipper::new(zipper_cache.path());
assert_eq!(zipper.zip(&tmp_dir).unwrap(), zipper.zip(&tmp_dir).unwrap());
assert_eq!(WalkDir::new(tmp_dir).into_iter().count(), 1);
}