From bc2fff968cf292ad5f07e6200ac1f980e2420f8e Mon Sep 17 00:00:00 2001 From: Sebastian Thiel Date: Fri, 29 Mar 2024 10:04:26 +0100 Subject: [PATCH 1/5] chore(app): move library-portion to top-level `src/` folder. This allows the tauri-specific parts to remain in the `app` crate, which will help to eventually release a `cli` crate as well. --- Cargo.lock | 61 + Cargo.toml | 84 +- gitbutler-app/src/lib.rs | 11 +- src/askpass.rs | 63 + src/assets.rs | 204 + src/database.rs | 48 + src/database/migrations/V0__deltas.sql | 12 + src/database/migrations/V1__sessions.sql | 11 + src/database/migrations/V2__files.sql | 14 + src/database/migrations/V3__bookmarks.sql | 8 + .../migrations/V4__bookmarks_update.sql | 16 + .../migrations/V5__bookmarks_update.sql | 28 + .../V6__sessions_project_id_id_idx.sql | 1 + src/database/migrations/V7__drop_files.sql | 2 + .../migrations/V8__drop_bookmarks.sql | 1 + src/dedup.rs | 45 + src/deltas.rs | 15 + src/deltas/controller.rs | 33 + src/deltas/database.rs | 122 + src/deltas/delta.rs | 9 + src/deltas/document.rs | 85 + src/deltas/operations.rs | 116 + src/deltas/reader.rs | 89 + src/deltas/writer.rs | 73 + src/error.rs | 414 ++ src/error/sentry.rs | 89 + src/fs.rs | 30 + src/gb_repository.rs | 3 + src/gb_repository/repository.rs | 967 ++++ src/git.rs | 42 + src/git/blob.rs | 17 + src/git/branch.rs | 53 + src/git/commit.rs | 75 + src/git/config.rs | 68 + src/git/credentials.rs | 392 ++ src/git/diff.rs | 421 ++ src/git/error.rs | 62 + src/git/index.rs | 164 + src/git/oid.rs | 61 + src/git/reference.rs | 64 + src/git/reference/refname.rs | 137 + src/git/reference/refname/error.rs | 17 + src/git/reference/refname/local.rs | 94 + src/git/reference/refname/remote.rs | 93 + src/git/reference/refname/virtual.rs | 65 + src/git/remote.rs | 43 + src/git/repository.rs | 535 +++ src/git/show.rs | 22 + src/git/signature.rs | 67 + src/git/tree.rs | 147 + src/git/url.rs | 91 + src/git/url/convert.rs | 128 + src/git/url/parse.rs | 147 + src/git/url/scheme.rs | 54 + src/id.rs | 118 + src/keys.rs | 6 + src/keys/controller.rs | 34 + src/keys/key.rs | 127 + src/keys/storage.rs | 43 + src/lib.rs | 41 + src/lock.rs | 51 + src/path.rs | 48 + src/project_repository.rs | 8 + src/project_repository/config.rs | 51 + src/project_repository/conflicts.rs | 144 + src/project_repository/repository.rs | 698 +++ src/project_repository/signatures.rs | 22 + src/projects.rs | 9 + src/projects/controller.rs | 344 ++ src/projects/project.rs | 112 + src/projects/storage.rs | 162 + src/reader.rs | 443 ++ src/sessions.rs | 14 + src/sessions/controller.rs | 91 + src/sessions/database.rs | 182 + src/sessions/iterator.rs | 68 + src/sessions/reader.rs | 105 + src/sessions/session.rs | 126 + src/sessions/writer.rs | 108 + src/ssh.rs | 67 + src/storage.rs | 73 + src/types.rs | 1 + src/types/default_true.rs | 90 + src/users.rs | 6 + src/users/controller.rs | 57 + src/users/storage.rs | 46 + src/users/user.rs | 35 + src/virtual_branches.rs | 29 + src/virtual_branches/base.rs | 657 +++ src/virtual_branches/branch.rs | 237 + src/virtual_branches/branch/file_ownership.rs | 178 + src/virtual_branches/branch/hunk.rs | 169 + src/virtual_branches/branch/ownership.rs | 183 + src/virtual_branches/branch/reader.rs | 19 + src/virtual_branches/branch/writer.rs | 160 + src/virtual_branches/context.rs | 124 + src/virtual_branches/controller.rs | 1112 +++++ src/virtual_branches/errors.rs | 837 ++++ src/virtual_branches/files.rs | 96 + src/virtual_branches/integration.rs | 351 ++ src/virtual_branches/iterator.rs | 56 + src/virtual_branches/remote.rs | 185 + src/virtual_branches/state.rs | 136 + src/virtual_branches/target.rs | 105 + src/virtual_branches/target/reader.rs | 31 + src/virtual_branches/target/writer.rs | 109 + src/virtual_branches/virtual.rs | 4058 +++++++++++++++++ src/windows.rs | 24 + src/writer.rs | 114 + src/zip.rs | 164 + src/zip/controller.rs | 72 + tests/app.rs | 259 ++ tests/common/mod.rs | 355 ++ tests/database/mod.rs | 20 + tests/deltas/document.rs | 263 ++ tests/deltas/mod.rs | 146 + tests/deltas/operations.rs | 55 + tests/gb_repository/mod.rs | 490 ++ tests/git/config.rs | 34 + tests/git/credentials.rs | 312 ++ tests/git/diff.rs | 457 ++ tests/git/mod.rs | 3 + tests/keys/mod.rs | 65 + tests/lock/mod.rs | 91 + tests/reader/mod.rs | 183 + tests/sessions/database.rs | 84 + tests/sessions/mod.rs | 106 + tests/suite/gb_repository.rs | 149 + tests/suite/projects.rs | 71 + tests/suite/virtual_branches/amend.rs | 352 ++ .../virtual_branches/apply_virtual_branch.rs | 278 ++ tests/suite/virtual_branches/cherry_pick.rs | 382 ++ tests/suite/virtual_branches/create_commit.rs | 198 + .../create_virtual_branch_from_branch.rs | 382 ++ .../virtual_branches/delete_virtual_branch.rs | 78 + .../virtual_branches/fetch_from_target.rs | 46 + tests/suite/virtual_branches/init.rs | 213 + tests/suite/virtual_branches/mod.rs | 176 + .../move_commit_to_vbranch.rs | 324 ++ tests/suite/virtual_branches/references.rs | 366 ++ .../virtual_branches/reset_virtual_branch.rs | 265 ++ .../virtual_branches/selected_for_changes.rs | 375 ++ .../suite/virtual_branches/set_base_branch.rs | 235 + tests/suite/virtual_branches/squash.rs | 356 ++ tests/suite/virtual_branches/unapply.rs | 177 + .../virtual_branches/unapply_ownership.rs | 61 + .../virtual_branches/update_base_branch.rs | 1929 ++++++++ .../virtual_branches/update_commit_message.rs | 364 ++ tests/suite/virtual_branches/upstream.rs | 149 + tests/types/mod.rs | 19 + tests/virtual_branches/branch/context.rs | 522 +++ .../virtual_branches/branch/file_ownership.rs | 151 + tests/virtual_branches/branch/hunk.rs | 89 + tests/virtual_branches/branch/mod.rs | 8 + tests/virtual_branches/branch/ownership.rs | 284 ++ tests/virtual_branches/branch/reader.rs | 98 + tests/virtual_branches/branch/writer.rs | 218 + tests/virtual_branches/iterator.rs | 117 + tests/virtual_branches/mod.rs | 2549 +++++++++++ tests/virtual_branches/target/mod.rs | 2 + tests/virtual_branches/target/reader.rs | 150 + tests/virtual_branches/target/writer.rs | 212 + tests/zip/mod.rs | 47 + 163 files changed, 32758 insertions(+), 6 deletions(-) create mode 100644 src/askpass.rs create mode 100644 src/assets.rs create mode 100644 src/database.rs create mode 100644 src/database/migrations/V0__deltas.sql create mode 100644 src/database/migrations/V1__sessions.sql create mode 100644 src/database/migrations/V2__files.sql create mode 100644 src/database/migrations/V3__bookmarks.sql create mode 100644 src/database/migrations/V4__bookmarks_update.sql create mode 100644 src/database/migrations/V5__bookmarks_update.sql create mode 100644 src/database/migrations/V6__sessions_project_id_id_idx.sql create mode 100644 src/database/migrations/V7__drop_files.sql create mode 100644 src/database/migrations/V8__drop_bookmarks.sql create mode 100644 src/dedup.rs create mode 100644 src/deltas.rs create mode 100644 src/deltas/controller.rs create mode 100644 src/deltas/database.rs create mode 100644 src/deltas/delta.rs create mode 100644 src/deltas/document.rs create mode 100644 src/deltas/operations.rs create mode 100644 src/deltas/reader.rs create mode 100644 src/deltas/writer.rs create mode 100644 src/error.rs create mode 100644 src/error/sentry.rs create mode 100644 src/fs.rs create mode 100644 src/gb_repository.rs create mode 100644 src/gb_repository/repository.rs create mode 100644 src/git.rs create mode 100644 src/git/blob.rs create mode 100644 src/git/branch.rs create mode 100644 src/git/commit.rs create mode 100644 src/git/config.rs create mode 100644 src/git/credentials.rs create mode 100644 src/git/diff.rs create mode 100644 src/git/error.rs create mode 100644 src/git/index.rs create mode 100644 src/git/oid.rs create mode 100644 src/git/reference.rs create mode 100644 src/git/reference/refname.rs create mode 100644 src/git/reference/refname/error.rs create mode 100644 src/git/reference/refname/local.rs create mode 100644 src/git/reference/refname/remote.rs create mode 100644 src/git/reference/refname/virtual.rs create mode 100644 src/git/remote.rs create mode 100644 src/git/repository.rs create mode 100644 src/git/show.rs create mode 100644 src/git/signature.rs create mode 100644 src/git/tree.rs create mode 100644 src/git/url.rs create mode 100644 src/git/url/convert.rs create mode 100644 src/git/url/parse.rs create mode 100644 src/git/url/scheme.rs create mode 100644 src/id.rs create mode 100644 src/keys.rs create mode 100644 src/keys/controller.rs create mode 100644 src/keys/key.rs create mode 100644 src/keys/storage.rs create mode 100644 src/lib.rs create mode 100644 src/lock.rs create mode 100644 src/path.rs create mode 100644 src/project_repository.rs create mode 100644 src/project_repository/config.rs create mode 100644 src/project_repository/conflicts.rs create mode 100644 src/project_repository/repository.rs create mode 100644 src/project_repository/signatures.rs create mode 100644 src/projects.rs create mode 100644 src/projects/controller.rs create mode 100644 src/projects/project.rs create mode 100644 src/projects/storage.rs create mode 100644 src/reader.rs create mode 100644 src/sessions.rs create mode 100644 src/sessions/controller.rs create mode 100644 src/sessions/database.rs create mode 100644 src/sessions/iterator.rs create mode 100644 src/sessions/reader.rs create mode 100644 src/sessions/session.rs create mode 100644 src/sessions/writer.rs create mode 100644 src/ssh.rs create mode 100644 src/storage.rs create mode 100644 src/types.rs create mode 100644 src/types/default_true.rs create mode 100644 src/users.rs create mode 100644 src/users/controller.rs create mode 100644 src/users/storage.rs create mode 100644 src/users/user.rs create mode 100644 src/virtual_branches.rs create mode 100644 src/virtual_branches/base.rs create mode 100644 src/virtual_branches/branch.rs create mode 100644 src/virtual_branches/branch/file_ownership.rs create mode 100644 src/virtual_branches/branch/hunk.rs create mode 100644 src/virtual_branches/branch/ownership.rs create mode 100644 src/virtual_branches/branch/reader.rs create mode 100644 src/virtual_branches/branch/writer.rs create mode 100644 src/virtual_branches/context.rs create mode 100644 src/virtual_branches/controller.rs create mode 100644 src/virtual_branches/errors.rs create mode 100644 src/virtual_branches/files.rs create mode 100644 src/virtual_branches/integration.rs create mode 100644 src/virtual_branches/iterator.rs create mode 100644 src/virtual_branches/remote.rs create mode 100644 src/virtual_branches/state.rs create mode 100644 src/virtual_branches/target.rs create mode 100644 src/virtual_branches/target/reader.rs create mode 100644 src/virtual_branches/target/writer.rs create mode 100644 src/virtual_branches/virtual.rs create mode 100644 src/windows.rs create mode 100644 src/writer.rs create mode 100644 src/zip.rs create mode 100644 src/zip/controller.rs create mode 100644 tests/app.rs create mode 100644 tests/common/mod.rs create mode 100644 tests/database/mod.rs create mode 100644 tests/deltas/document.rs create mode 100644 tests/deltas/mod.rs create mode 100644 tests/deltas/operations.rs create mode 100644 tests/gb_repository/mod.rs create mode 100644 tests/git/config.rs create mode 100644 tests/git/credentials.rs create mode 100644 tests/git/diff.rs create mode 100644 tests/git/mod.rs create mode 100644 tests/keys/mod.rs create mode 100644 tests/lock/mod.rs create mode 100644 tests/reader/mod.rs create mode 100644 tests/sessions/database.rs create mode 100644 tests/sessions/mod.rs create mode 100644 tests/suite/gb_repository.rs create mode 100644 tests/suite/projects.rs create mode 100644 tests/suite/virtual_branches/amend.rs create mode 100644 tests/suite/virtual_branches/apply_virtual_branch.rs create mode 100644 tests/suite/virtual_branches/cherry_pick.rs create mode 100644 tests/suite/virtual_branches/create_commit.rs create mode 100644 tests/suite/virtual_branches/create_virtual_branch_from_branch.rs create mode 100644 tests/suite/virtual_branches/delete_virtual_branch.rs create mode 100644 tests/suite/virtual_branches/fetch_from_target.rs create mode 100644 tests/suite/virtual_branches/init.rs create mode 100644 tests/suite/virtual_branches/mod.rs create mode 100644 tests/suite/virtual_branches/move_commit_to_vbranch.rs create mode 100644 tests/suite/virtual_branches/references.rs create mode 100644 tests/suite/virtual_branches/reset_virtual_branch.rs create mode 100644 tests/suite/virtual_branches/selected_for_changes.rs create mode 100644 tests/suite/virtual_branches/set_base_branch.rs create mode 100644 tests/suite/virtual_branches/squash.rs create mode 100644 tests/suite/virtual_branches/unapply.rs create mode 100644 tests/suite/virtual_branches/unapply_ownership.rs create mode 100644 tests/suite/virtual_branches/update_base_branch.rs create mode 100644 tests/suite/virtual_branches/update_commit_message.rs create mode 100644 tests/suite/virtual_branches/upstream.rs create mode 100644 tests/types/mod.rs create mode 100644 tests/virtual_branches/branch/context.rs create mode 100644 tests/virtual_branches/branch/file_ownership.rs create mode 100644 tests/virtual_branches/branch/hunk.rs create mode 100644 tests/virtual_branches/branch/mod.rs create mode 100644 tests/virtual_branches/branch/ownership.rs create mode 100644 tests/virtual_branches/branch/reader.rs create mode 100644 tests/virtual_branches/branch/writer.rs create mode 100644 tests/virtual_branches/iterator.rs create mode 100644 tests/virtual_branches/mod.rs create mode 100644 tests/virtual_branches/target/mod.rs create mode 100644 tests/virtual_branches/target/reader.rs create mode 100644 tests/virtual_branches/target/writer.rs create mode 100644 tests/zip/mod.rs diff --git a/Cargo.lock b/Cargo.lock index 14a1bd556..6a0e54deb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1906,6 +1906,67 @@ dependencies = [ "thiserror", ] +[[package]] +name = "gitbutler" +version = "0.0.0" +dependencies = [ + "anyhow", + "async-trait", + "backoff", + "backtrace", + "bstr 1.9.1", + "byteorder", + "chrono", + "console-subscriber", + "diffy", + "filetime", + "fslock", + "futures", + "git2", + "git2-hooks", + "gitbutler-git", + "governor", + "itertools 0.12.1", + "lazy_static", + "log", + "md5", + "nonzero_ext", + "notify", + "notify-debouncer-full", + "num_cpus", + "once_cell", + "pretty_assertions", + "r2d2", + "r2d2_sqlite", + "rand 0.8.5", + "refinery", + "regex", + "reqwest 0.12.2", + "resolve-path", + "rusqlite", + "serde", + "serde_json", + "sha1", + "sha2", + "similar", + "slug", + "ssh-key", + "ssh2", + "tempfile", + "thiserror", + "tokio", + "tokio-util", + "toml 0.8.12", + "tracing", + "tracing-appender", + "tracing-subscriber", + "url", + "urlencoding", + "uuid", + "walkdir", + "zip", +] + [[package]] name = "gitbutler-app" version = "0.0.0" diff --git a/Cargo.toml b/Cargo.toml index 66f33920a..a8bcb0ed2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,3 +1,85 @@ +[package] +name = "gitbutler" +version = "0.0.0" +edition = "2021" +rust-version = "1.57" +authors = ["GitButler "] +publish = false + +[lib] +doctest = false + +[dev-dependencies] +once_cell = "1.19" +pretty_assertions = "1.4" + +[dependencies] +toml = "0.8.12" +anyhow = "1.0.81" +async-trait = "0.1.79" +backoff = "0.4.0" +backtrace = { version = "0.3.71", optional = true } +bstr = "1.9.1" +byteorder = "1.5.0" +chrono = { version = "0.4.37", features = ["serde"] } +console-subscriber = "0.2.0" +diffy = "0.3.0" +filetime = "0.2.23" +fslock = "0.2.1" +futures = "0.3" +git2.workspace = true +git2-hooks = "0.3" +governor = "0.6.3" +itertools = "0.12" +lazy_static = "1.4.0" +md5 = "0.7.0" +nonzero_ext = "0.3.0" +notify = { version = "6.0.1" } +notify-debouncer-full = "0.3.1" +num_cpus = "1.16.0" +once_cell = "1.19" +r2d2 = "0.8.10" +r2d2_sqlite = "0.22.0" +rand = "0.8.5" +refinery = { version = "0.8", features = [ "rusqlite" ] } +regex = "1.10" +reqwest = { version = "0.12.2", features = ["json"] } +resolve-path = "0.1.0" +rusqlite.workspace = true +serde.workspace = true +serde_json = { version = "1.0", features = [ "std", "arbitrary_precision" ] } +sha1 = "0.10.6" +sha2 = "0.10.8" +similar = { version = "2.4.0", features = ["unicode"] } +slug = "0.1.5" +ssh-key = { version = "0.6.5", features = [ "alloc", "ed25519" ] } +ssh2 = { version = "0.9.4", features = ["vendored-openssl"] } +log = "^0.4" +thiserror.workspace = true +tokio = { workspace = true, features = [ "full", "sync" ] } +tokio-util = "0.7.10" +tracing = "0.1.40" +tracing-appender = "0.2.3" +tracing-subscriber = "0.3.17" +url = { version = "2.5", features = ["serde"] } +urlencoding = "2.1.3" +uuid.workspace = true +walkdir = "2.5.0" +zip = "0.6.5" +tempfile = "3.10" +gitbutler-git = { path = "gitbutler-git" } + +[features] +# by default Tauri runs in production mode +# when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` is a URL +default = ["error-context"] +error-context = ["dep:backtrace"] + +[lints.clippy] +all = "deny" +perf = "deny" +correctness = "deny" + [workspace] members = [ "gitbutler-app", @@ -8,7 +90,7 @@ resolver = "2" [workspace.dependencies] git2 = { version = "0.18.3", features = ["vendored-openssl", "vendored-libgit2"] } -uuid = "1.8.0" +uuid = { version = "1.8.0", features = ["serde"] } serde = { version = "1.0", features = ["derive"] } thiserror = "1.0.58" rusqlite = { version = "0.29.0", features = [ "bundled", "blob" ] } diff --git a/gitbutler-app/src/lib.rs b/gitbutler-app/src/lib.rs index cb5bf5ba3..ce9dd0acd 100644 --- a/gitbutler-app/src/lib.rs +++ b/gitbutler-app/src/lib.rs @@ -16,13 +16,17 @@ pub mod analytics; pub mod app; pub mod askpass; -pub mod assets; pub mod commands; +pub mod events; +pub mod logs; +pub mod menu; +pub mod watcher; + +pub mod assets; pub mod database; pub mod dedup; pub mod deltas; pub mod error; -pub mod events; pub mod fs; pub mod gb_repository; pub mod git; @@ -30,8 +34,6 @@ pub mod github; pub mod id; pub mod keys; pub mod lock; -pub mod logs; -pub mod menu; pub mod path; pub mod project_repository; pub mod projects; @@ -43,7 +45,6 @@ pub mod storage; pub mod types; pub mod users; pub mod virtual_branches; -pub mod watcher; #[cfg(target_os = "windows")] pub mod windows; pub mod writer; diff --git a/src/askpass.rs b/src/askpass.rs new file mode 100644 index 000000000..33625a6bd --- /dev/null +++ b/src/askpass.rs @@ -0,0 +1,63 @@ +use std::{collections::HashMap, sync::Arc}; + +use serde::Serialize; +use tokio::sync::{oneshot, Mutex}; + +use crate::id::Id; +use crate::virtual_branches::BranchId; + +pub struct AskpassRequest { + sender: oneshot::Sender>, +} + +#[derive(Debug, Clone, serde::Serialize)] +// This is needed to end up with a struct with either `branch_id` or `action` +#[serde(untagged)] +pub enum Context { + Push { branch_id: Option }, + Fetch { action: String }, +} + +#[derive(Clone)] +pub struct AskpassBroker { + pending_requests: Arc, AskpassRequest>>>, + submit_prompt_event: Arc) + Send + Sync>, +} + +#[derive(Debug, Clone, serde::Serialize)] +pub struct PromptEvent { + id: Id, + prompt: String, + context: C, +} + +impl AskpassBroker { + pub fn init(submit_prompt: impl Fn(PromptEvent) + Send + Sync + 'static) -> Self { + Self { + pending_requests: Arc::new(Mutex::new(HashMap::new())), + submit_prompt_event: Arc::new(submit_prompt), + } + } + + pub async fn submit_prompt(&self, prompt: String, context: Context) -> Option { + let (sender, receiver) = oneshot::channel(); + let id = Id::generate(); + let request = AskpassRequest { sender }; + self.pending_requests.lock().await.insert(id, request); + (self.submit_prompt_event)(PromptEvent { + id, + prompt, + context, + }); + receiver.await.unwrap() + } + + pub async fn handle_response(&self, id: Id, response: Option) { + let mut pending_requests = self.pending_requests.lock().await; + if let Some(request) = pending_requests.remove(&id) { + let _ = request.sender.send(response); + } else { + log::warn!("received response for unknown askpass request: {}", id); + } + } +} diff --git a/src/assets.rs b/src/assets.rs new file mode 100644 index 000000000..510d330e8 --- /dev/null +++ b/src/assets.rs @@ -0,0 +1,204 @@ +use std::{collections::HashMap, path, sync}; + +use anyhow::Result; +use futures::future::join_all; +use tokio::sync::Semaphore; +use url::Url; + +use crate::{ + users, + virtual_branches::{ + Author, BaseBranch, RemoteBranchData, RemoteCommit, VirtualBranch, VirtualBranchCommit, + }, +}; + +#[derive(Clone)] +pub struct Proxy { + cache_dir: path::PathBuf, + + semaphores: sync::Arc>>, +} + +impl Proxy { + pub fn new(cache_dir: path::PathBuf) -> Self { + Proxy { + cache_dir, + semaphores: sync::Arc::new(tokio::sync::Mutex::new(HashMap::new())), + } + } + + pub async fn proxy_user(&self, user: users::User) -> users::User { + match Url::parse(&user.picture) { + Ok(picture) => users::User { + picture: self.proxy(&picture).await.map_or_else( + |error| { + tracing::error!(?error, "failed to proxy user picture"); + user.picture.clone() + }, + |url| url.to_string(), + ), + ..user + }, + Err(_) => user, + } + } + + async fn proxy_virtual_branch_commit( + &self, + commit: VirtualBranchCommit, + ) -> VirtualBranchCommit { + VirtualBranchCommit { + author: self.proxy_author(commit.author).await, + ..commit + } + } + + pub async fn proxy_virtual_branch(&self, branch: VirtualBranch) -> VirtualBranch { + VirtualBranch { + commits: join_all( + branch + .commits + .iter() + .map(|commit| self.proxy_virtual_branch_commit(commit.clone())) + .collect::>(), + ) + .await, + ..branch + } + } + + pub async fn proxy_virtual_branches(&self, branches: Vec) -> Vec { + join_all( + branches + .into_iter() + .map(|branch| self.proxy_virtual_branch(branch)) + .collect::>(), + ) + .await + } + + pub async fn proxy_remote_branch_data(&self, branch: RemoteBranchData) -> RemoteBranchData { + RemoteBranchData { + commits: join_all( + branch + .commits + .into_iter() + .map(|commit| self.proxy_remote_commit(commit)) + .collect::>(), + ) + .await, + ..branch + } + } + + async fn proxy_author(&self, author: Author) -> Author { + Author { + gravatar_url: self + .proxy(&author.gravatar_url) + .await + .unwrap_or_else(|error| { + tracing::error!(gravatar_url = %author.gravatar_url, ?error, "failed to proxy gravatar url"); + author.gravatar_url + }), + ..author + } + } + + async fn proxy_remote_commit(&self, commit: RemoteCommit) -> RemoteCommit { + RemoteCommit { + author: self.proxy_author(commit.author).await, + ..commit + } + } + + pub async fn proxy_base_branch(&self, base_branch: BaseBranch) -> BaseBranch { + BaseBranch { + recent_commits: join_all( + base_branch + .clone() + .recent_commits + .into_iter() + .map(|commit| self.proxy_remote_commit(commit)) + .collect::>(), + ) + .await, + upstream_commits: join_all( + base_branch + .clone() + .upstream_commits + .into_iter() + .map(|commit| self.proxy_remote_commit(commit)) + .collect::>(), + ) + .await, + ..base_branch.clone() + } + } + + // takes a url of a remote assets, downloads it into cache and returns a url that points to the cached file + pub async fn proxy(&self, src: &Url) -> Result { + #[cfg(unix)] + if src.scheme() == "asset" { + return Ok(src.clone()); + } + + if src.scheme() == "https" && src.host_str() == Some("asset.localhost") { + return Ok(src.clone()); + } + + let hash = md5::compute(src.to_string()); + let path = path::Path::new(src.path()); + let ext = path + .extension() + .map_or("jpg", |ext| ext.to_str().unwrap_or("jpg")); + let save_to = self.cache_dir.join(format!("{:X}.{}", hash, ext)); + + if save_to.exists() { + return Ok(build_asset_url(&save_to.display().to_string())); + } + + // only one download per url at a time + let mut semaphores = self.semaphores.lock().await; + let r = semaphores + .entry(src.clone()) + .or_insert_with(|| Semaphore::new(1)); + let _permit = r.acquire().await?; + + if save_to.exists() { + // check again, maybe url was downloaded + return Ok(build_asset_url(&save_to.display().to_string())); + } + + tracing::debug!(url = %src, "downloading image"); + + let resp = reqwest::get(src.clone()).await?; + if !resp.status().is_success() { + tracing::error!(url = %src, status = %resp.status(), "failed to download image"); + return Err(anyhow::anyhow!( + "Failed to download image {}: {}", + src, + resp.status() + )); + } + + let bytes = resp.bytes().await?; + std::fs::create_dir_all(&self.cache_dir)?; + std::fs::write(&save_to, bytes)?; + + Ok(build_asset_url(&save_to.display().to_string())) + } +} + +#[cfg(unix)] +fn build_asset_url(path: &str) -> Url { + Url::parse(&format!("asset://localhost/{}", urlencoding::encode(path))).unwrap() +} + +#[cfg(windows)] +fn build_asset_url(path: &str) -> Url { + Url::parse(&format!( + "https://asset.localhost/{}", + urlencoding::encode(path) + )) + .unwrap() +} diff --git a/src/database.rs b/src/database.rs new file mode 100644 index 000000000..467b56c84 --- /dev/null +++ b/src/database.rs @@ -0,0 +1,48 @@ +use std::{path, sync::Arc}; + +use anyhow::{Context, Result}; + +use r2d2::Pool; +use r2d2_sqlite::SqliteConnectionManager; +use refinery::config::Config; +use rusqlite::Transaction; + +mod embedded { + use refinery::embed_migrations; + embed_migrations!("src/database/migrations"); +} + +#[derive(Clone)] +pub struct Database { + pool: Arc>, +} + +impl Database { + pub fn open_in_directory>(path: P) -> Result { + let path = path.as_ref().to_path_buf().join("database.sqlite3"); + let manager = SqliteConnectionManager::file(&path); + let pool = r2d2::Pool::new(manager)?; + let mut cfg = Config::new(refinery::config::ConfigDbType::Sqlite) + .set_db_path(path.as_path().to_str().unwrap()); + embedded::migrations::runner() + .run(&mut cfg) + .map(|report| { + report + .applied_migrations() + .iter() + .for_each(|migration| tracing::info!(%migration, "migration applied")); + }) + .context("Failed to run migrations")?; + Ok(Self { + pool: Arc::new(pool), + }) + } + + pub fn transaction(&self, f: impl FnOnce(&Transaction) -> Result) -> Result { + let mut conn = self.pool.get()?; + let tx = conn.transaction().context("Failed to start transaction")?; + let result = f(&tx)?; + tx.commit().context("Failed to commit transaction")?; + Ok(result) + } +} diff --git a/src/database/migrations/V0__deltas.sql b/src/database/migrations/V0__deltas.sql new file mode 100644 index 000000000..395aabe67 --- /dev/null +++ b/src/database/migrations/V0__deltas.sql @@ -0,0 +1,12 @@ +CREATE TABLE `deltas` ( + `session_id` text NOT NULL, + `project_id` text NOT NULL, + `timestamp_ms` text NOT NULL, + `operations` blob NOT NULL, + `file_path` text NOT NULL, + PRIMARY KEY (`project_id`, `session_id`, `timestamp_ms`, `file_path`) +); + +CREATE INDEX `deltas_project_id_session_id_index` ON `deltas` (`project_id`, `session_id`); + +CREATE INDEX `deltas_project_id_session_id_file_path_index` ON `deltas` (`project_id`, `session_id`, `file_path`); diff --git a/src/database/migrations/V1__sessions.sql b/src/database/migrations/V1__sessions.sql new file mode 100644 index 000000000..fe92e1cb3 --- /dev/null +++ b/src/database/migrations/V1__sessions.sql @@ -0,0 +1,11 @@ +CREATE TABLE `sessions` ( + `id` text NOT NULL PRIMARY KEY, + `project_id` text NOT NULL, + `hash` text, + `branch` text, + `commit` text, + `start_timestamp_ms` text NOT NULL, + `last_timestamp_ms` text NOT NULL +); + +CREATE INDEX `sessions_project_id_index` ON `sessions` (`project_id`); diff --git a/src/database/migrations/V2__files.sql b/src/database/migrations/V2__files.sql new file mode 100644 index 000000000..2bd47f5fd --- /dev/null +++ b/src/database/migrations/V2__files.sql @@ -0,0 +1,14 @@ +CREATE TABLE `files` ( + `project_id` text NOT NULL, + `session_id` text NOT NULL, + `file_path` text NOT NULL, + `sha1` blob NOT NULL, + PRIMARY KEY (`project_id`, `session_id`, `file_path`) +); + +CREATE INDEX `files_project_id_session_id_index` ON `files` (`project_id`, `session_id`); + +CREATE TABLE `contents` ( + `sha1` blob NOT NULL PRIMARY KEY, + `content` blob NOT NULL +); diff --git a/src/database/migrations/V3__bookmarks.sql b/src/database/migrations/V3__bookmarks.sql new file mode 100644 index 000000000..e29e93202 --- /dev/null +++ b/src/database/migrations/V3__bookmarks.sql @@ -0,0 +1,8 @@ +CREATE TABLE `bookmarks` ( + `id` text NOT NULL PRIMARY KEY, + `project_id` text NOT NULL, + `timestamp_ms` text NOT NULL, + `note` text NOT NULL +); + +CREATE INDEX bookmarks_project_id_idx ON `bookmarks` (`project_id`); diff --git a/src/database/migrations/V4__bookmarks_update.sql b/src/database/migrations/V4__bookmarks_update.sql new file mode 100644 index 000000000..e068b765f --- /dev/null +++ b/src/database/migrations/V4__bookmarks_update.sql @@ -0,0 +1,16 @@ +ALTER TABLE `bookmarks` + ADD `created_timestamp_ms` text NOT NULL DEFAULT 0; + +UPDATE + `bookmarks` +SET + `created_timestamp_ms` = `timestamp_ms`; + +ALTER TABLE `bookmarks` + DROP COLUMN `timestamp_ms`; + +ALTER TABLE `bookmarks` + ADD `updated_timestamp_ms` text; + +ALTER TABLE `bookmarks` + ADD `deleted` boolean NOT NULL DEFAULT FALSE; diff --git a/src/database/migrations/V5__bookmarks_update.sql b/src/database/migrations/V5__bookmarks_update.sql new file mode 100644 index 000000000..e395a6421 --- /dev/null +++ b/src/database/migrations/V5__bookmarks_update.sql @@ -0,0 +1,28 @@ +ALTER TABLE bookmarks RENAME TO bookmarks_old; + +DROP INDEX `bookmarks_project_id_idx`; + +CREATE TABLE bookmarks ( + `project_id` text NOT NULL, + `timestamp_ms` text NOT NULL, + `note` text NOT NULL, + `deleted` boolean NOT NULL, + `created_timestamp_ms` text NOT NULL, + `updated_timestamp_ms` text NOT NULL, + PRIMARY KEY (`project_id`, `timestamp_ms`) +); + +CREATE INDEX `bookmarks_project_id_idx` ON `bookmarks` (`project_id`); + +INSERT INTO bookmarks (`project_id`, `timestamp_ms`, `note`, `deleted`, `created_timestamp_ms`, `updated_timestamp_ms`) +SELECT + `project_id`, + `created_timestamp_ms`, + `note`, + `deleted`, + `created_timestamp_ms`, + `updated_timestamp_ms` +FROM + bookmarks_old; + +DROP TABLE bookmarks_old; diff --git a/src/database/migrations/V6__sessions_project_id_id_idx.sql b/src/database/migrations/V6__sessions_project_id_id_idx.sql new file mode 100644 index 000000000..47c732087 --- /dev/null +++ b/src/database/migrations/V6__sessions_project_id_id_idx.sql @@ -0,0 +1 @@ +CREATE INDEX `sessions_project_id_id_index` ON `sessions` (`project_id`, `id`); diff --git a/src/database/migrations/V7__drop_files.sql b/src/database/migrations/V7__drop_files.sql new file mode 100644 index 000000000..d5700869a --- /dev/null +++ b/src/database/migrations/V7__drop_files.sql @@ -0,0 +1,2 @@ +DROP TABLE files; +DROP TABLE contents; diff --git a/src/database/migrations/V8__drop_bookmarks.sql b/src/database/migrations/V8__drop_bookmarks.sql new file mode 100644 index 000000000..f8b391275 --- /dev/null +++ b/src/database/migrations/V8__drop_bookmarks.sql @@ -0,0 +1 @@ +DROP TABLE bookmarks; diff --git a/src/dedup.rs b/src/dedup.rs new file mode 100644 index 000000000..7c59249b7 --- /dev/null +++ b/src/dedup.rs @@ -0,0 +1,45 @@ +pub(crate) fn dedup(existing: &[&str], new: &str) -> String { + dedup_fmt(existing, new, " ") +} + +/// Makes sure that _new_ is not in _existing_ by adding a number to it. +/// the number is increased until the name is unique. +pub(crate) fn dedup_fmt(existing: &[&str], new: &str, separator: &str) -> String { + existing + .iter() + .filter_map(|x| { + x.strip_prefix(new) + .and_then(|x| x.strip_prefix(separator).or(Some(""))) + .and_then(|x| { + if x.is_empty() { + Some(0_i32) + } else { + x.parse::().ok() + } + }) + }) + .max() + .map_or_else( + || new.to_string(), + |x| format!("{new}{separator}{}", x + 1_i32), + ) +} + +#[test] +fn tests() { + for (existing, new, expected) in [ + (vec!["bar", "baz"], "foo", "foo"), + (vec!["foo", "bar", "baz"], "foo", "foo 1"), + (vec!["foo", "foo 2"], "foo", "foo 3"), + (vec!["foo", "foo 1", "foo 2"], "foo", "foo 3"), + (vec!["foo", "foo 1", "foo 2"], "foo 1", "foo 1 1"), + (vec!["foo", "foo 1", "foo 2"], "foo 2", "foo 2 1"), + (vec!["foo", "foo 1", "foo 2"], "foo 3", "foo 3"), + (vec!["foo 2"], "foo", "foo 3"), + (vec!["foo", "foo 1", "foo 2", "foo 4"], "foo", "foo 5"), + (vec!["foo", "foo 0"], "foo", "foo 1"), + (vec!["foo 0"], "foo", "foo 1"), + ] { + assert_eq!(dedup(&existing, new), expected.to_string()); + } +} diff --git a/src/deltas.rs b/src/deltas.rs new file mode 100644 index 000000000..63d7c3db2 --- /dev/null +++ b/src/deltas.rs @@ -0,0 +1,15 @@ +mod controller; +mod delta; +mod document; +mod reader; +mod writer; + +pub mod database; +pub mod operations; + +pub use controller::Controller; +pub use database::Database; +pub use delta::Delta; +pub use document::Document; +pub use reader::DeltasReader as Reader; +pub use writer::DeltasWriter as Writer; diff --git a/src/deltas/controller.rs b/src/deltas/controller.rs new file mode 100644 index 000000000..ebb479e77 --- /dev/null +++ b/src/deltas/controller.rs @@ -0,0 +1,33 @@ +use std::collections::HashMap; + +use crate::{projects::ProjectId, sessions::SessionId}; + +use super::{database, Delta}; + +#[derive(Clone)] +pub struct Controller { + database: database::Database, +} + +#[derive(Debug, thiserror::Error)] +pub enum ListError { + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl Controller { + pub fn new(database: database::Database) -> Controller { + Controller { database } + } + + pub fn list_by_session_id( + &self, + project_id: &ProjectId, + session_id: &SessionId, + paths: &Option>, + ) -> Result>, ListError> { + self.database + .list_by_project_id_session_id(project_id, session_id, paths) + .map_err(Into::into) + } +} diff --git a/src/deltas/database.rs b/src/deltas/database.rs new file mode 100644 index 000000000..65492e5d0 --- /dev/null +++ b/src/deltas/database.rs @@ -0,0 +1,122 @@ +use std::{collections::HashMap, path}; + +use anyhow::{Context, Result}; + +use crate::{database, projects::ProjectId, sessions::SessionId}; + +use super::{delta, operations}; + +#[derive(Clone)] +pub struct Database { + database: database::Database, +} + +impl Database { + pub fn new(database: database::Database) -> Database { + Database { database } + } + + pub fn insert( + &self, + project_id: &ProjectId, + session_id: &SessionId, + file_path: &path::Path, + deltas: &Vec, + ) -> Result<()> { + self.database.transaction(|tx| -> Result<()> { + let mut stmt = insert_stmt(tx).context("Failed to prepare insert statement")?; + for delta in deltas { + let operations = serde_json::to_vec(&delta.operations) + .context("Failed to serialize operations")?; + let timestamp_ms = delta.timestamp_ms.to_string(); + stmt.execute(rusqlite::named_params! { + ":project_id": project_id, + ":session_id": session_id, + ":file_path": file_path.display().to_string(), + ":timestamp_ms": timestamp_ms, + ":operations": operations, + }) + .context("Failed to execute insert statement")?; + } + Ok(()) + })?; + + Ok(()) + } + + pub fn list_by_project_id_session_id( + &self, + project_id: &ProjectId, + session_id: &SessionId, + file_path_filter: &Option>, + ) -> Result>> { + self.database + .transaction(|tx| -> Result>> { + let mut stmt = list_by_project_id_session_id_stmt(tx) + .context("Failed to prepare query statement")?; + let mut rows = stmt + .query(rusqlite::named_params! { + ":project_id": project_id, + ":session_id": session_id, + }) + .context("Failed to execute query statement")?; + let mut deltas: HashMap> = HashMap::new(); + while let Some(row) = rows + .next() + .context("Failed to iterate over query results")? + { + let file_path: String = row.get(0).context("Failed to get file_path")?; + if let Some(file_path_filter) = &file_path_filter { + if !file_path_filter.contains(&file_path.as_str()) { + continue; + } + } + let timestamp_ms: String = row.get(1).context("Failed to get timestamp_ms")?; + let operations: Vec = row.get(2).context("Failed to get operations")?; + let operations: Vec = + serde_json::from_slice(&operations) + .context("Failed to deserialize operations")?; + let timestamp_ms: u128 = timestamp_ms + .parse() + .context("Failed to parse timestamp_ms as u64")?; + let delta = delta::Delta { + operations, + timestamp_ms, + }; + if let Some(deltas_for_file_path) = deltas.get_mut(&file_path) { + deltas_for_file_path.push(delta); + } else { + deltas.insert(file_path, vec![delta]); + } + } + Ok(deltas) + }) + } +} + +fn list_by_project_id_session_id_stmt<'conn>( + tx: &'conn rusqlite::Transaction, +) -> Result> { + Ok(tx.prepare_cached( + " + SELECT `file_path`, `timestamp_ms`, `operations` + FROM `deltas` + WHERE `session_id` = :session_id AND `project_id` = :project_id + ORDER BY `timestamp_ms` ASC", + )?) +} + +fn insert_stmt<'conn>( + tx: &'conn rusqlite::Transaction, +) -> Result> { + Ok(tx.prepare_cached( + "INSERT INTO `deltas` ( + `project_id`, `session_id`, `timestamp_ms`, `operations`, `file_path` + ) VALUES ( + :project_id, :session_id, :timestamp_ms, :operations, :file_path + ) + ON CONFLICT(`project_id`, `session_id`, `file_path`, `timestamp_ms`) DO UPDATE SET + `operations` = :operations + ", + )?) +} diff --git a/src/deltas/delta.rs b/src/deltas/delta.rs new file mode 100644 index 000000000..99879282d --- /dev/null +++ b/src/deltas/delta.rs @@ -0,0 +1,9 @@ +use super::operations; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Delta { + pub operations: Vec, + pub timestamp_ms: u128, +} diff --git a/src/deltas/document.rs b/src/deltas/document.rs new file mode 100644 index 000000000..5bf09c4d5 --- /dev/null +++ b/src/deltas/document.rs @@ -0,0 +1,85 @@ +use crate::reader; + +use super::{delta, operations}; +use anyhow::Result; +use std::{ + fmt::{Display, Formatter}, + time::SystemTime, +}; + +#[derive(Debug, Clone, Default)] +pub struct Document { + doc: Vec, + deltas: Vec, +} + +fn apply_deltas(doc: &mut Vec, deltas: &Vec) -> Result<()> { + for delta in deltas { + for operation in &delta.operations { + operation.apply(doc)?; + } + } + Ok(()) +} + +impl Document { + pub fn get_deltas(&self) -> Vec { + self.deltas.clone() + } + + // returns a text document where internal state is seeded with value, and deltas are applied. + pub fn new(value: Option<&reader::Content>, deltas: Vec) -> Result { + let mut all_deltas = vec![]; + if let Some(reader::Content::UTF8(value)) = value { + all_deltas.push(delta::Delta { + operations: operations::get_delta_operations("", value), + timestamp_ms: 0, + }); + } + all_deltas.append(&mut deltas.clone()); + let mut doc = vec![]; + apply_deltas(&mut doc, &all_deltas)?; + Ok(Document { doc, deltas }) + } + + pub fn update(&mut self, value: Option<&reader::Content>) -> Result> { + let new_text = match value { + Some(reader::Content::UTF8(value)) => value, + Some(_) | None => "", + }; + + let operations = operations::get_delta_operations(&self.to_string(), new_text); + let delta = if operations.is_empty() { + if let Some(reader::Content::UTF8(value)) = value { + if !value.is_empty() { + return Ok(None); + } + } + + delta::Delta { + operations, + timestamp_ms: SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_millis(), + } + } else { + delta::Delta { + operations, + timestamp_ms: SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_millis(), + } + }; + apply_deltas(&mut self.doc, &vec![delta.clone()])?; + self.deltas.push(delta.clone()); + Ok(Some(delta)) + } +} + +impl Display for Document { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.doc.iter().collect::()) + } +} diff --git a/src/deltas/operations.rs b/src/deltas/operations.rs new file mode 100644 index 000000000..6374834e0 --- /dev/null +++ b/src/deltas/operations.rs @@ -0,0 +1,116 @@ +use std::cmp::Ordering; + +use anyhow::Result; +use serde::{Deserialize, Serialize}; +use similar::{ChangeTag, TextDiff}; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub enum Operation { + // corresponds to YText.insert(index, chunk) + Insert((usize, String)), + // corresponds to YText.remove_range(index, len) + Delete((usize, usize)), +} + +impl Operation { + pub fn apply(&self, text: &mut Vec) -> Result<()> { + match self { + Operation::Insert((index, chunk)) => match index.cmp(&text.len()) { + Ordering::Greater => Err(anyhow::anyhow!( + "Index out of bounds, {} > {}", + index, + text.len() + )), + Ordering::Equal => { + text.extend(chunk.chars()); + Ok(()) + } + Ordering::Less => { + text.splice(*index..*index, chunk.chars()); + Ok(()) + } + }, + Operation::Delete((index, len)) => { + if *index > text.len() { + Err(anyhow::anyhow!( + "Index out of bounds, {} > {}", + index, + text.len() + )) + } else if *index + *len > text.len() { + Err(anyhow::anyhow!( + "Index + length out of bounds, {} > {}", + index + len, + text.len() + )) + } else { + text.splice(*index..(*index + *len), "".chars()); + Ok(()) + } + } + } + } +} + +// merges touching operations of the same type in to one operation +// e.g. [Insert((0, "hello")), Insert((5, " world"))] -> [Insert((0, "hello world"))] +// e.g. [Delete((0, 5)), Delete((5, 5))] -> [Delete((0, 10))] +// e.g. [Insert((0, "hello")), Delete((0, 5))] -> [Insert((0, "hello")), Delete((0, 5))] +fn merge_touching(ops: &Vec) -> Vec { + let mut merged = vec![]; + + for op in ops { + match (merged.last_mut(), op) { + (Some(Operation::Insert((index, chunk))), Operation::Insert((index2, chunk2))) => { + if *index + chunk.len() == *index2 { + chunk.push_str(chunk2); + } else { + merged.push(op.clone()); + } + } + (Some(Operation::Delete((index, len))), Operation::Delete((index2, len2))) => { + if *index == *index2 { + *len += len2; + } else { + merged.push(op.clone()); + } + } + _ => merged.push(op.clone()), + } + } + + merged +} + +pub fn get_delta_operations(initial_text: &str, final_text: &str) -> Vec { + if initial_text == final_text { + return vec![]; + } + + let changeset = TextDiff::configure().diff_graphemes(initial_text, final_text); + let mut deltas = vec![]; + + let mut offset = 0; + for change in changeset.iter_all_changes() { + match change.tag() { + ChangeTag::Delete => { + deltas.push(Operation::Delete(( + offset, + change.as_str().unwrap_or("").chars().count(), + ))); + } + ChangeTag::Insert => { + let text = change.as_str().unwrap(); + deltas.push(Operation::Insert((offset, text.to_string()))); + offset = change.new_index().unwrap() + text.chars().count(); + } + ChangeTag::Equal => { + let text = change.as_str().unwrap(); + offset = change.new_index().unwrap() + text.chars().count(); + } + } + } + + merge_touching(&deltas) +} diff --git a/src/deltas/reader.rs b/src/deltas/reader.rs new file mode 100644 index 000000000..9470268d1 --- /dev/null +++ b/src/deltas/reader.rs @@ -0,0 +1,89 @@ +use std::{collections::HashMap, path}; + +use anyhow::{Context, Result}; + +use crate::{reader, sessions}; + +use super::Delta; + +pub struct DeltasReader<'reader> { + reader: &'reader reader::Reader<'reader>, +} + +impl<'reader> From<&'reader reader::Reader<'reader>> for DeltasReader<'reader> { + fn from(reader: &'reader reader::Reader<'reader>) -> Self { + DeltasReader { reader } + } +} + +#[derive(thiserror::Error, Debug)] +pub enum ReadError { + #[error("not found")] + NotFound, + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl<'reader> DeltasReader<'reader> { + pub fn new(reader: &'reader sessions::Reader<'reader>) -> Self { + DeltasReader { + reader: reader.reader(), + } + } + + pub fn read_file>(&self, path: P) -> Result>> { + match self.read(Some(&[path.as_ref()])) { + Ok(deltas) => Ok(deltas.into_iter().next().map(|(_, deltas)| deltas)), + Err(ReadError::NotFound) => Ok(None), + Err(err) => Err(err.into()), + } + } + + pub fn read( + &self, + filter: Option<&[&path::Path]>, + ) -> Result>, ReadError> { + let deltas_dir = path::Path::new("session/deltas"); + let mut paths = self.reader.list_files(deltas_dir)?; + if let Some(filter) = filter { + paths = paths + .into_iter() + .filter(|file_path| filter.iter().any(|path| file_path.eq(path))) + .collect::>(); + } + paths = paths.iter().map(|path| deltas_dir.join(path)).collect(); + let files = self.reader.batch(&paths).context("failed to batch read")?; + + let files = files + .into_iter() + .map(|file| { + file.map_err(|error| match error { + reader::Error::NotFound => ReadError::NotFound, + error => ReadError::Other(error.into()), + }) + }) + .collect::, _>>()?; + + Ok(paths + .into_iter() + .zip(files) + .filter_map(|(path, file)| { + path.strip_prefix(deltas_dir) + .ok() + .map(|path| (path.to_path_buf(), file)) + }) + .filter_map(|(path, file)| { + if let reader::Content::UTF8(content) = file { + if content.is_empty() { + // this is a leftover from some bug, shouldn't happen anymore + return None; + } + let deltas = serde_json::from_str(&content).ok()?; + Some(Ok((path, deltas))) + } else { + Some(Err(anyhow::anyhow!("unexpected content type"))) + } + }) + .collect::>>()?) + } +} diff --git a/src/deltas/writer.rs b/src/deltas/writer.rs new file mode 100644 index 000000000..98c738581 --- /dev/null +++ b/src/deltas/writer.rs @@ -0,0 +1,73 @@ +use std::path::PathBuf; + +use anyhow::Result; + +use crate::{gb_repository, writer}; + +use super::Delta; + +pub struct DeltasWriter<'writer> { + repository: &'writer gb_repository::Repository, + writer: writer::DirWriter, +} + +impl<'writer> DeltasWriter<'writer> { + pub fn new(repository: &'writer gb_repository::Repository) -> Result { + writer::DirWriter::open(repository.root()).map(|writer| Self { repository, writer }) + } + + pub fn write>(&self, path: P, deltas: &Vec) -> Result<()> { + self.repository.mark_active_session()?; + + let _lock = self.repository.lock(); + + let path = path.as_ref(); + let raw_deltas = serde_json::to_string(&deltas)?; + + self.writer + .write_string(PathBuf::from("session/deltas").join(path), &raw_deltas)?; + + tracing::debug!( + project_id = %self.repository.get_project_id(), + path = %path.display(), + "wrote deltas" + ); + + Ok(()) + } + + pub fn remove_wd_file>(&self, path: P) -> Result<()> { + self.repository.mark_active_session()?; + + let _lock = self.repository.lock(); + + let path = path.as_ref(); + self.writer.remove(PathBuf::from("session/wd").join(path))?; + + tracing::debug!( + project_id = %self.repository.get_project_id(), + path = %path.display(), + "deleted session wd file" + ); + + Ok(()) + } + + pub fn write_wd_file>(&self, path: P, contents: &str) -> Result<()> { + self.repository.mark_active_session()?; + + let _lock = self.repository.lock(); + + let path = path.as_ref(); + self.writer + .write_string(PathBuf::from("session/wd").join(path), contents)?; + + tracing::debug!( + project_id = %self.repository.get_project_id(), + path = %path.display(), + "wrote session wd file" + ); + + Ok(()) + } +} diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 000000000..7fd068dd7 --- /dev/null +++ b/src/error.rs @@ -0,0 +1,414 @@ +#[cfg(feature = "sentry")] +mod sentry; + +pub use legacy::*; + +pub mod gb { + #[cfg(feature = "error-context")] + pub use error_context::*; + + #[cfg(feature = "error-context")] + mod error_context { + use super::{ErrorKind, Result, WithContext}; + use backtrace::Backtrace; + use std::collections::BTreeMap; + + #[derive(Debug)] + pub struct Context { + pub backtrace: Backtrace, + pub caused_by: Option>, + pub vars: BTreeMap, + } + + impl Default for Context { + fn default() -> Self { + Self { + backtrace: Backtrace::new_unresolved(), + caused_by: None, + vars: BTreeMap::default(), + } + } + } + + #[derive(Debug)] + pub struct ErrorContext { + error: ErrorKind, + context: Context, + } + + impl core::fmt::Display for ErrorContext { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + self.error.fmt(f) + } + } + + impl std::error::Error for ErrorContext { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + self.context + .caused_by + .as_ref() + .map(|e| e as &dyn std::error::Error) + } + + fn provide<'a>(&'a self, request: &mut std::error::Request<'a>) { + if request.would_be_satisfied_by_ref_of::() { + request.provide_ref(&self.context.backtrace); + } + } + } + + impl ErrorContext { + #[inline] + pub fn error(&self) -> &ErrorKind { + &self.error + } + + #[inline] + pub fn context(&self) -> &Context { + &self.context + } + + pub fn into_owned(self) -> (ErrorKind, Context) { + (self.error, self.context) + } + } + + impl> WithContext for E { + fn add_err_context, V: Into>( + self, + name: K, + value: V, + ) -> ErrorContext { + let mut e = self.into(); + e.context.vars.insert(name.into(), value.into()); + e + } + + fn wrap_err>(self, error: K) -> ErrorContext { + let mut new_err = ErrorContext { + error: error.into(), + context: Context::default(), + }; + + new_err.context.caused_by = Some(Box::new(self.into())); + new_err + } + } + + impl WithContext> for std::result::Result + where + E: Into, + { + #[inline] + fn add_err_context, V: Into>( + self, + name: K, + value: V, + ) -> Result { + self.map_err(|e| { + ErrorContext { + error: e.into(), + context: Context::default(), + } + .add_err_context(name, value) + }) + } + + #[inline] + fn wrap_err>(self, error: K) -> Result { + self.map_err(|e| { + ErrorContext { + error: e.into(), + context: Context::default(), + } + .wrap_err(error) + }) + } + } + + #[cfg(feature = "error-context")] + impl serde::Serialize for ErrorContext { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeSeq; + let mut seq = serializer.serialize_seq(None)?; + let mut current = Some(self); + while let Some(err) = current { + seq.serialize_element(&err.error)?; + current = err.context.caused_by.as_deref(); + } + seq.end() + } + } + + impl From for ErrorContext { + fn from(error: ErrorKind) -> Self { + Self { + error, + context: Context::default(), + } + } + } + + #[cfg(test)] + mod tests { + use super::*; + + #[test] + fn error_context() { + fn low_level_io() -> std::result::Result<(), std::io::Error> { + Err(std::io::Error::new(std::io::ErrorKind::Other, "oh no!")) + } + + fn app_level_io() -> Result<()> { + low_level_io().add_err_context("foo", "bar")?; + unreachable!(); + } + + use std::error::Error; + + let r = app_level_io(); + assert!(r.is_err()); + let e = r.unwrap_err(); + assert_eq!(e.context().vars.get("foo"), Some(&"bar".to_string())); + assert!(e.source().is_none()); + assert!(e.to_string().starts_with("io.other-error:")); + } + } + } + + pub trait WithContext { + fn add_err_context, V: Into>(self, name: K, value: V) -> R; + fn wrap_err>(self, error: E) -> R; + } + + #[cfg(not(feature = "error-context"))] + pub struct Context; + + pub trait ErrorCode { + fn code(&self) -> String; + fn message(&self) -> String; + } + + #[derive(Debug, thiserror::Error)] + pub enum ErrorKind { + Io(#[from] ::std::io::Error), + Git(#[from] ::git2::Error), + CommonDirNotAvailable(String), + } + + impl ErrorCode for std::io::Error { + fn code(&self) -> String { + slug::slugify(self.kind().to_string()) + } + + fn message(&self) -> String { + self.to_string() + } + } + + impl ErrorCode for git2::Error { + fn code(&self) -> String { + slug::slugify(format!("{:?}", self.class())) + } + + fn message(&self) -> String { + self.to_string() + } + } + + impl ErrorCode for ErrorKind { + fn code(&self) -> String { + match self { + ErrorKind::Io(e) => format!("io.{}", ::code(e)), + ErrorKind::Git(e) => format!("git.{}", ::code(e)), + ErrorKind::CommonDirNotAvailable(_) => "no-common-dir".to_string(), + } + } + + fn message(&self) -> String { + match self { + ErrorKind::Io(e) => ::message(e), + ErrorKind::Git(e) => ::message(e), + ErrorKind::CommonDirNotAvailable(s) => format!("{s} is not available"), + } + } + } + + impl core::fmt::Display for ErrorKind { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + format!( + "{}: {}", + ::code(self), + ::message(self) + ) + .fmt(f) + } + } + + #[cfg(not(feature = "error-context"))] + pub type Error = ErrorKind; + #[cfg(feature = "error-context")] + pub type Error = ErrorContext; + + pub type Result = ::std::result::Result; + + #[cfg(not(feature = "error-context"))] + impl ErrorKind { + #[inline] + pub fn error(&self) -> &Error { + self + } + + #[inline] + pub fn context(&self) -> Option<&Context> { + None + } + } + + #[cfg(not(feature = "error-context"))] + impl WithContext for ErrorKind { + #[inline] + fn add_err_context, V: Into>(self, _name: K, _value: V) -> Error { + self + } + + #[inline] + fn wrap_err(self, _error: Error) -> Error { + self + } + } + + #[cfg(not(feature = "error-context"))] + impl WithContext> for std::result::Result { + #[inline] + fn add_err_context, V: Into>( + self, + _name: K, + _value: V, + ) -> std::result::Result { + self + } + + #[inline] + fn wrap_err(self, _error: Error) -> std::result::Result { + self + } + } + + #[cfg(feature = "error-context")] + impl serde::Serialize for ErrorKind { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeTuple; + let mut seq = serializer.serialize_tuple(2)?; + seq.serialize_element(&self.code())?; + seq.serialize_element(&self.message())?; + seq.end() + } + } +} + +//#[deprecated( +// note = "the types in the error::legacy::* module are deprecated; use error::gb::Error and error::gb::Result instead" +//)] +mod legacy { + use core::fmt; + + use crate::{keys, projects, users}; + use serde::{ser::SerializeMap, Serialize}; + + #[derive(Debug)] + pub enum Code { + Unknown, + Validation, + Projects, + Branches, + ProjectGitAuth, + ProjectGitRemote, + ProjectConflict, + ProjectHead, + Menu, + PreCommitHook, + CommitMsgHook, + } + + impl fmt::Display for Code { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Code::Menu => write!(f, "errors.menu"), + Code::Unknown => write!(f, "errors.unknown"), + Code::Validation => write!(f, "errors.validation"), + Code::Projects => write!(f, "errors.projects"), + Code::Branches => write!(f, "errors.branches"), + Code::ProjectGitAuth => write!(f, "errors.projects.git.auth"), + Code::ProjectGitRemote => write!(f, "errors.projects.git.remote"), + Code::ProjectHead => write!(f, "errors.projects.head"), + Code::ProjectConflict => write!(f, "errors.projects.conflict"), + //TODO: rename js side to be more precise what kind of hook error this is + Code::PreCommitHook => write!(f, "errors.hook"), + Code::CommitMsgHook => write!(f, "errors.hooks.commit.msg"), + } + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Error { + #[error("[{code}]: {message}")] + UserError { code: Code, message: String }, + #[error("[errors.unknown]: Something went wrong")] + Unknown, + } + + impl Serialize for Error { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let (code, message) = match self { + Error::UserError { code, message } => (code.to_string(), message.to_string()), + Error::Unknown => ( + Code::Unknown.to_string(), + "Something went wrong".to_string(), + ), + }; + + let mut map = serializer.serialize_map(Some(2))?; + map.serialize_entry("code", &code)?; + map.serialize_entry("message", &message)?; + map.end() + } + } + + impl From for Error { + fn from(error: anyhow::Error) -> Self { + tracing::error!(?error); + Error::Unknown + } + } + + impl From for Error { + fn from(error: keys::GetOrCreateError) -> Self { + tracing::error!(?error); + Error::Unknown + } + } + + impl From for Error { + fn from(error: users::GetError) -> Self { + tracing::error!(?error); + Error::Unknown + } + } + + impl From for Error { + fn from(error: projects::controller::GetError) -> Self { + tracing::error!(?error); + Error::Unknown + } + } +} diff --git a/src/error/sentry.rs b/src/error/sentry.rs new file mode 100644 index 000000000..b321454be --- /dev/null +++ b/src/error/sentry.rs @@ -0,0 +1,89 @@ +use crate::error::gb::{ErrorCode, ErrorContext}; +use sentry::{ + protocol::{value::Map, Event, Exception, Value}, + types::Uuid, +}; +use std::collections::BTreeMap; + +pub trait SentrySender { + fn send_to_sentry(self) -> Uuid; +} + +impl> SentrySender for E { + fn send_to_sentry(self) -> Uuid { + let sentry_event = self.into().into(); + sentry::capture_event(sentry_event) + } +} + +trait PopulateException { + fn populate_exception( + self, + exceptions: &mut Vec, + vars: &mut BTreeMap, + ); +} + +impl PopulateException for ErrorContext { + fn populate_exception( + self, + exceptions: &mut Vec, + vars: &mut BTreeMap, + ) { + let (error, mut context) = self.into_owned(); + + let mut exc = Exception { + ty: error.code(), + value: Some(error.message()), + ..Exception::default() + }; + + if let Some(cause) = context.caused_by { + cause.populate_exception(exceptions, vars); + } + + // We don't resolve at capture time because it can DRASTICALLY + // slow down the application (can take up to 0.5s to resolve + // a *single* frame). We do it here, only when a Sentry event + // is being created. + context.backtrace.resolve(); + exc.stacktrace = + sentry::integrations::backtrace::backtrace_to_stacktrace(&context.backtrace); + + ::backtrace::clear_symbol_cache(); + + vars.insert( + error.code(), + Value::Object( + context + .vars + .into_iter() + .map(|(k, v)| (k, v.into())) + .collect(), + ), + ); + exceptions.push(exc); + } +} + +impl From for Event<'_> { + fn from(error_context: ErrorContext) -> Self { + let mut sentry_event = Event { + message: Some(format!( + "{}: {}", + error_context.error().code(), + error_context.error().message() + )), + ..Event::default() + }; + + let mut vars = BTreeMap::new(); + error_context.populate_exception(&mut sentry_event.exception.values, &mut vars); + + sentry_event + .extra + .insert("context_vars".into(), Value::Object(Map::from_iter(vars))); + + sentry_event + } +} diff --git a/src/fs.rs b/src/fs.rs new file mode 100644 index 000000000..ad203f885 --- /dev/null +++ b/src/fs.rs @@ -0,0 +1,30 @@ +use std::path::{Path, PathBuf}; + +use anyhow::Result; +use walkdir::WalkDir; + +// Returns an ordered list of relative paths for files inside a directory recursively. +pub fn list_files>(dir_path: P, ignore_prefixes: &[P]) -> Result> { + let mut files = vec![]; + let dir_path = dir_path.as_ref(); + if !dir_path.exists() { + return Ok(files); + } + for entry in WalkDir::new(dir_path) { + let entry = entry?; + if !entry.file_type().is_dir() { + let path = entry.path(); + let path = path.strip_prefix(dir_path)?; + let path = path.to_path_buf(); + if ignore_prefixes + .iter() + .any(|prefix| path.starts_with(prefix.as_ref())) + { + continue; + } + files.push(path); + } + } + files.sort(); + Ok(files) +} diff --git a/src/gb_repository.rs b/src/gb_repository.rs new file mode 100644 index 000000000..5e4879e0e --- /dev/null +++ b/src/gb_repository.rs @@ -0,0 +1,3 @@ +mod repository; + +pub use repository::{RemoteError, Repository}; diff --git a/src/gb_repository/repository.rs b/src/gb_repository/repository.rs new file mode 100644 index 000000000..682a38b0c --- /dev/null +++ b/src/gb_repository/repository.rs @@ -0,0 +1,967 @@ +use std::{ + collections::{HashMap, HashSet}, + fs::File, + io::{BufReader, Read}, + path, time, +}; + +#[cfg(target_os = "windows")] +use crate::windows::MetadataShim; +#[cfg(target_family = "unix")] +use std::os::unix::prelude::*; + +use anyhow::{anyhow, Context, Result}; +use filetime::FileTime; +use fslock::LockFile; +use sha2::{Digest, Sha256}; + +use crate::{ + deltas, fs, git, project_repository, + projects::{self, ProjectId}, + reader, sessions, + sessions::SessionId, + users, + virtual_branches::{self, target}, +}; + +pub struct Repository { + git_repository: git::Repository, + project: projects::Project, + lock_path: path::PathBuf, +} + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("path not found: {0}")] + ProjectPathNotFound(path::PathBuf), + #[error(transparent)] + Git(#[from] git::Error), + #[error(transparent)] + Other(#[from] anyhow::Error), + #[error("path has invalid utf-8 bytes: {0}")] + InvalidUnicodePath(path::PathBuf), +} + +impl Repository { + pub fn open( + root: &path::Path, + project_repository: &project_repository::Repository, + user: Option<&users::User>, + ) -> Result { + let project = project_repository.project(); + let project_objects_path = project.path.join(".git/objects"); + if !project_objects_path.exists() { + return Err(Error::ProjectPathNotFound(project_objects_path)); + } + + let projects_dir = root.join("projects"); + let path = projects_dir.join(project.id.to_string()); + + let lock_path = projects_dir.join(format!("{}.lock", project.id)); + + if path.exists() { + let git_repository = git::Repository::open(path.clone()) + .with_context(|| format!("{}: failed to open git repository", path.display()))?; + + git_repository + .add_disk_alternate(project_objects_path.to_str().unwrap()) + .context("failed to add disk alternate")?; + + Result::Ok(Self { + git_repository, + project: project.clone(), + lock_path, + }) + } else { + std::fs::create_dir_all(&path).context("failed to create project directory")?; + + let git_repository = git::Repository::init_opts( + &path, + git2::RepositoryInitOptions::new() + .bare(true) + .initial_head("refs/heads/current") + .external_template(false), + ) + .with_context(|| format!("{}: failed to initialize git repository", path.display()))?; + + git_repository + .add_disk_alternate(project_objects_path.to_str().unwrap()) + .context("failed to add disk alternate")?; + + let gb_repository = Self { + git_repository, + project: project.clone(), + lock_path, + }; + + let _lock = gb_repository.lock(); + let session = gb_repository.create_current_session(project_repository)?; + drop(_lock); + + gb_repository + .flush_session(project_repository, &session, user) + .context("failed to run initial flush")?; + + Result::Ok(gb_repository) + } + } + + pub fn get_project_id(&self) -> &ProjectId { + &self.project.id + } + + fn remote(&self, user: Option<&users::User>) -> Result> { + // only push if logged in + let access_token = match user { + Some(user) => user.access_token.clone(), + None => return Ok(None), + }; + + // only push if project is connected + let remote_url = match &self.project.api { + Some(api) => api.git_url.clone(), + None => return Ok(None), + }; + + let remote = self + .git_repository + .remote_anonymous(&remote_url.parse().unwrap()) + .with_context(|| { + format!( + "failed to create anonymous remote for {}", + remote_url.as_str() + ) + })?; + + Ok(Some((remote, access_token))) + } + + pub fn fetch(&self, user: Option<&users::User>) -> Result<(), RemoteError> { + let (mut remote, access_token) = match self.remote(user)? { + Some((remote, access_token)) => (remote, access_token), + None => return Result::Ok(()), + }; + + let mut callbacks = git2::RemoteCallbacks::new(); + if self.project.omit_certificate_check.unwrap_or(false) { + callbacks.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk)); + } + callbacks.push_update_reference(move |refname, message| { + tracing::debug!( + project_id = %self.project.id, + refname, + message, + "pulling reference" + ); + Result::Ok(()) + }); + callbacks.push_transfer_progress(move |one, two, three| { + tracing::debug!( + project_id = %self.project.id, + "transferred {}/{}/{} objects", + one, + two, + three + ); + }); + + let mut fetch_opts = git2::FetchOptions::new(); + fetch_opts.remote_callbacks(callbacks); + let auth_header = format!("Authorization: {}", access_token); + let headers = &[auth_header.as_str()]; + fetch_opts.custom_headers(headers); + + remote + .fetch(&["refs/heads/*:refs/remotes/*"], Some(&mut fetch_opts)) + .map_err(|error| match error { + git::Error::Network(error) => { + tracing::warn!(project_id = %self.project.id, error = %error, "failed to fetch gb repo"); + RemoteError::Network + } + error => RemoteError::Other(error.into()), + })?; + + tracing::info!( + project_id = %self.project.id, + "gb repo fetched", + ); + + Result::Ok(()) + } + + pub fn push(&self, user: Option<&users::User>) -> Result<(), RemoteError> { + let (mut remote, access_token) = match self.remote(user)? { + Some((remote, access_token)) => (remote, access_token), + None => return Ok(()), + }; + + // Set the remote's callbacks + let mut callbacks = git2::RemoteCallbacks::new(); + if self.project.omit_certificate_check.unwrap_or(false) { + callbacks.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk)); + } + callbacks.push_update_reference(move |refname, message| { + tracing::debug!( + project_id = %self.project.id, + refname, + message, + "pushing reference" + ); + Result::Ok(()) + }); + callbacks.push_transfer_progress(move |current, total, bytes| { + tracing::debug!( + project_id = %self.project.id, + "transferred {}/{}/{} objects", + current, + total, + bytes + ); + }); + + let mut push_options = git2::PushOptions::new(); + push_options.remote_callbacks(callbacks); + let auth_header = format!("Authorization: {}", access_token); + let headers = &[auth_header.as_str()]; + push_options.custom_headers(headers); + + let remote_refspec = format!("refs/heads/current:refs/heads/{}", self.project.id); + + // Push to the remote + remote + .push(&[&remote_refspec], Some(&mut push_options)).map_err(|error| match error { + git::Error::Network(error) => { + tracing::warn!(project_id = %self.project.id, error = %error, "failed to push gb repo"); + RemoteError::Network + } + error => RemoteError::Other(error.into()), + })?; + + tracing::info!(project_id = %self.project.id, "gb repository pushed"); + + Ok(()) + } + + // take branches from the last session and put them into the current session + fn copy_branches(&self) -> Result<()> { + let last_session = self + .get_sessions_iterator() + .context("failed to get sessions iterator")? + .next(); + if last_session.is_none() { + return Ok(()); + } + let last_session = last_session + .unwrap() + .context("failed to read last session")?; + let last_session_reader = sessions::Reader::open(self, &last_session) + .context("failed to open last session reader")?; + + let branches = virtual_branches::Iterator::new(&last_session_reader) + .context("failed to read virtual branches")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")? + .into_iter() + .collect::>(); + + let src_target_reader = virtual_branches::target::Reader::new(&last_session_reader); + let dst_target_writer = virtual_branches::target::Writer::new(self, self.project.gb_dir()) + .context("failed to open target writer for current session")?; + + // copy default target + let default_target = match src_target_reader.read_default() { + Result::Ok(target) => Ok(Some(target)), + Err(reader::Error::NotFound) => Ok(None), + Err(err) => Err(err).context("failed to read default target"), + }?; + if let Some(default_target) = default_target.as_ref() { + dst_target_writer + .write_default(default_target) + .context("failed to write default target")?; + } + + // copy branch targets + for branch in &branches { + let target = src_target_reader + .read(&branch.id) + .with_context(|| format!("{}: failed to read target", branch.id))?; + if let Some(default_target) = default_target.as_ref() { + if *default_target == target { + continue; + } + } + dst_target_writer + .write(&branch.id, &target) + .with_context(|| format!("{}: failed to write target", branch.id))?; + } + + let dst_branch_writer = virtual_branches::branch::Writer::new(self, self.project.gb_dir()) + .context("failed to open branch writer for current session")?; + + // copy branches that we don't already have + for branch in &branches { + dst_branch_writer + .write(&mut branch.clone()) + .with_context(|| format!("{}: failed to write branch", branch.id))?; + } + + Ok(()) + } + + fn create_current_session( + &self, + project_repository: &project_repository::Repository, + ) -> Result { + let now_ms = time::SystemTime::now() + .duration_since(time::UNIX_EPOCH) + .unwrap() + .as_millis(); + + let meta = match project_repository.get_head() { + Result::Ok(head) => sessions::Meta { + start_timestamp_ms: now_ms, + last_timestamp_ms: now_ms, + branch: head.name().map(|name| name.to_string()), + commit: Some(head.peel_to_commit()?.id().to_string()), + }, + Err(_) => sessions::Meta { + start_timestamp_ms: now_ms, + last_timestamp_ms: now_ms, + branch: None, + commit: None, + }, + }; + + let session = sessions::Session { + id: SessionId::generate(), + hash: None, + meta, + }; + + // write session to disk + sessions::Writer::new(self) + .context("failed to create session writer")? + .write(&session) + .context("failed to write session")?; + + tracing::info!( + project_id = %self.project.id, + session_id = %session.id, + "created new session" + ); + + self.flush_gitbutler_file(&session.id)?; + + Ok(session) + } + + pub fn lock(&self) -> LockFile { + let mut lockfile = LockFile::open(&self.lock_path).expect("failed to open lock file"); + lockfile.lock().expect("failed to obtain lock on lock file"); + lockfile + } + + pub fn mark_active_session(&self) -> Result<()> { + let current_session = self + .get_or_create_current_session() + .context("failed to get current session")?; + + let updated_session = sessions::Session { + meta: sessions::Meta { + last_timestamp_ms: time::SystemTime::now() + .duration_since(time::UNIX_EPOCH) + .unwrap() + .as_millis(), + ..current_session.meta + }, + ..current_session + }; + + sessions::Writer::new(self) + .context("failed to create session writer")? + .write(&updated_session) + .context("failed to write session")?; + + Ok(()) + } + + pub fn get_latest_session(&self) -> Result> { + if let Some(current_session) = self.get_current_session()? { + Ok(Some(current_session)) + } else { + let mut sessions_iterator = self.get_sessions_iterator()?; + sessions_iterator + .next() + .transpose() + .context("failed to get latest session") + } + } + + pub fn get_or_create_current_session(&self) -> Result { + let _lock = self.lock(); + + let reader = reader::Reader::open(&self.root())?; + match sessions::Session::try_from(&reader) { + Result::Ok(session) => Ok(session), + Err(sessions::SessionError::NoSession) => { + let project_repository = project_repository::Repository::open(&self.project) + .context("failed to open project repository")?; + let session = self + .create_current_session(&project_repository) + .context("failed to create current session")?; + drop(_lock); + self.copy_branches().context("failed to unpack branches")?; + Ok(session) + } + Err(err) => Err(err).context("failed to read current session"), + } + } + + pub fn flush( + &self, + project_repository: &project_repository::Repository, + user: Option<&users::User>, + ) -> Result> { + let current_session = self + .get_current_session() + .context("failed to get current session")?; + if current_session.is_none() { + return Ok(None); + } + + let current_session = current_session.unwrap(); + let current_session = self + .flush_session(project_repository, ¤t_session, user) + .context(format!("failed to flush session {}", current_session.id))?; + Ok(Some(current_session)) + } + + pub fn flush_session( + &self, + project_repository: &project_repository::Repository, + session: &sessions::Session, + user: Option<&users::User>, + ) -> Result { + if session.hash.is_some() { + return Ok(session.clone()); + } + + if !self.root().exists() { + return Err(anyhow!("nothing to flush")); + } + + let _lock = self.lock(); + + // update last timestamp + let session_writer = + sessions::Writer::new(self).context("failed to create session writer")?; + session_writer.write(session)?; + + let mut tree_builder = self.git_repository.treebuilder(None); + + tree_builder.upsert( + "session", + build_session_tree(self).context("failed to build session tree")?, + git::FileMode::Tree, + ); + tree_builder.upsert( + "wd", + build_wd_tree(self, project_repository) + .context("failed to build working directory tree")?, + git::FileMode::Tree, + ); + tree_builder.upsert( + "branches", + build_branches_tree(self).context("failed to build branches tree")?, + git::FileMode::Tree, + ); + + let tree_id = tree_builder.write().context("failed to write tree")?; + + let commit_oid = + write_gb_commit(tree_id, self, user).context("failed to write gb commit")?; + + tracing::info!( + project_id = %self.project.id, + session_id = %session.id, + %commit_oid, + "flushed session" + ); + + session_writer.remove()?; + + let session = sessions::Session { + hash: Some(commit_oid), + ..session.clone() + }; + + Ok(session) + } + + pub fn get_sessions_iterator(&self) -> Result> { + sessions::SessionsIterator::new(&self.git_repository) + } + + pub fn get_current_session(&self) -> Result> { + let _lock = self.lock(); + let reader = reader::Reader::open(&self.root())?; + match sessions::Session::try_from(&reader) { + Ok(session) => Ok(Some(session)), + Err(sessions::SessionError::NoSession) => Ok(None), + Err(sessions::SessionError::Other(err)) => Err(err), + } + } + + pub fn root(&self) -> std::path::PathBuf { + self.git_repository.path().join("gitbutler") + } + + pub fn session_path(&self) -> std::path::PathBuf { + self.root().join("session") + } + + pub fn git_repository_path(&self) -> &std::path::Path { + self.git_repository.path() + } + + pub fn session_wd_path(&self) -> std::path::PathBuf { + self.session_path().join("wd") + } + + pub fn default_target(&self) -> Result> { + if let Some(latest_session) = self.get_latest_session()? { + let latest_session_reader = sessions::Reader::open(self, &latest_session) + .context("failed to open current session")?; + let target_reader = target::Reader::new(&latest_session_reader); + match target_reader.read_default() { + Result::Ok(target) => Ok(Some(target)), + Err(reader::Error::NotFound) => Ok(None), + Err(err) => Err(err.into()), + } + } else { + Ok(None) + } + } + + fn flush_gitbutler_file(&self, session_id: &SessionId) -> Result<()> { + let gb_path = self.git_repository.path(); + let project_id = self.project.id.to_string(); + let gb_file_content = serde_json::json!({ + "sessionId": session_id, + "repositoryId": project_id, + "gbPath": gb_path, + "api": self.project.api, + }); + + let gb_file_path = self.project.path.join(".git/gitbutler.json"); + std::fs::write(&gb_file_path, gb_file_content.to_string())?; + + tracing::debug!("gitbutler file updated: {:?}", gb_file_path); + + Ok(()) + } + + pub fn git_repository(&self) -> &git::Repository { + &self.git_repository + } +} + +fn build_wd_tree( + gb_repository: &Repository, + project_repository: &project_repository::Repository, +) -> Result { + match gb_repository + .git_repository + .find_reference(&"refs/heads/current".parse().unwrap()) + { + Result::Ok(reference) => build_wd_tree_from_reference(gb_repository, &reference) + .context("failed to build wd index"), + Err(git::Error::NotFound(_)) => build_wd_tree_from_repo(gb_repository, project_repository) + .context("failed to build wd index"), + Err(e) => Err(e.into()), + } +} + +fn build_wd_tree_from_reference( + gb_repository: &Repository, + reference: &git::Reference, +) -> Result { + // start off with the last tree as a base + let tree = reference.peel_to_tree()?; + let wd_tree_entry = tree.get_name("wd").unwrap(); + let wd_tree = gb_repository.git_repository.find_tree(wd_tree_entry.id())?; + let mut index = git::Index::try_from(&wd_tree)?; + + // write updated files on top of the last tree + for file_path in fs::list_files(gb_repository.session_wd_path(), &[]).with_context(|| { + format!( + "failed to session working directory files list files in {}", + gb_repository.session_wd_path().display() + ) + })? { + add_wd_path( + &mut index, + &gb_repository.session_wd_path(), + &file_path, + gb_repository, + ) + .with_context(|| { + format!( + "failed to add session working directory path {}", + file_path.display() + ) + })?; + } + + let session_reader = reader::Reader::open(&gb_repository.root())?; + let deltas = deltas::Reader::from(&session_reader) + .read(None) + .context("failed to read deltas")?; + let wd_files = session_reader.list_files(path::Path::new("session/wd"))?; + let wd_files = wd_files.iter().collect::>(); + + // if a file has delta, but doesn't exist in wd, it was deleted + let deleted_files = deltas + .keys() + .filter(|key| !wd_files.contains(key)) + .collect::>(); + + for deleted_file in deleted_files { + index + .remove_path(deleted_file) + .context("failed to remove path")?; + } + + let wd_tree_oid = index + .write_tree_to(&gb_repository.git_repository) + .context("failed to write wd tree")?; + Ok(wd_tree_oid) +} + +// build wd index from the working directory files new session wd files +// this is important because we want to make sure session files are in sync with session deltas +fn build_wd_tree_from_repo( + gb_repository: &Repository, + project_repository: &project_repository::Repository, +) -> Result { + let mut index = git::Index::new()?; + + let mut added: HashMap = HashMap::new(); + + // first, add session/wd files. session/wd are written at the same time as deltas, so it's important to add them first + // to make sure they are in sync with the deltas + for file_path in fs::list_files(gb_repository.session_wd_path(), &[]).with_context(|| { + format!( + "failed to session working directory files list files in {}", + gb_repository.session_wd_path().display() + ) + })? { + if project_repository + .git_repository + .is_path_ignored(&file_path) + .unwrap_or(true) + { + continue; + } + + add_wd_path( + &mut index, + &gb_repository.session_wd_path(), + &file_path, + gb_repository, + ) + .with_context(|| { + format!( + "failed to add session working directory path {}", + file_path.display() + ) + })?; + added.insert(file_path.to_string_lossy().to_string(), true); + } + + // finally, add files from the working directory if they aren't already in the index + for file_path in fs::list_files(project_repository.root(), &[path::Path::new(".git")]) + .with_context(|| { + format!( + "failed to working directory list files in {}", + project_repository.root().display() + ) + })? + { + if added.contains_key(&file_path.to_string_lossy().to_string()) { + continue; + } + + if project_repository + .git_repository + .is_path_ignored(&file_path) + .unwrap_or(true) + { + continue; + } + + add_wd_path( + &mut index, + project_repository.root(), + &file_path, + gb_repository, + ) + .with_context(|| { + format!( + "failed to add working directory path {}", + file_path.display() + ) + })?; + } + + let tree_oid = index + .write_tree_to(&gb_repository.git_repository) + .context("failed to write tree to repo")?; + Ok(tree_oid) +} + +// take a file path we see and add it to our in-memory index +// we call this from build_initial_wd_tree, which is smart about using the existing index to avoid rehashing files that haven't changed +// and also looks for large files and puts in a placeholder hash in the LFS format +// TODO: actually upload the file to LFS +fn add_wd_path( + index: &mut git::Index, + dir: &std::path::Path, + rel_file_path: &std::path::Path, + gb_repository: &Repository, +) -> Result<()> { + let file_path = dir.join(rel_file_path); + + let metadata = std::fs::symlink_metadata(&file_path).context("failed to get metadata for")?; + let modify_time = FileTime::from_last_modification_time(&metadata); + let create_time = FileTime::from_creation_time(&metadata).unwrap_or(modify_time); + + // look for files that are bigger than 4GB, which are not supported by git + // insert a pointer as the blob content instead + // TODO: size limit should be configurable + let blob = if metadata.is_symlink() { + // it's a symlink, make the content the path of the link + let link_target = std::fs::read_link(&file_path)?; + // if the link target is inside the project repository, make it relative + let link_target = link_target.strip_prefix(dir).unwrap_or(&link_target); + gb_repository.git_repository.blob( + link_target + .to_str() + .ok_or_else(|| Error::InvalidUnicodePath(link_target.into()))? + .as_bytes(), + )? + } else if metadata.len() > 100_000_000 { + tracing::warn!( + project_id = %gb_repository.project.id, + path = %file_path.display(), + "file too big" + ); + + // get a sha256 hash of the file first + let sha = sha256_digest(&file_path)?; + + // put togther a git lfs pointer file: https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md + let mut lfs_pointer = String::from("version https://git-lfs.github.com/spec/v1\n"); + lfs_pointer.push_str("oid sha256:"); + lfs_pointer.push_str(&sha); + lfs_pointer.push('\n'); + lfs_pointer.push_str("size "); + lfs_pointer.push_str(&metadata.len().to_string()); + lfs_pointer.push('\n'); + + // write the file to the .git/lfs/objects directory + // create the directory recursively if it doesn't exist + let lfs_objects_dir = gb_repository.git_repository.path().join("lfs/objects"); + std::fs::create_dir_all(lfs_objects_dir.clone())?; + let lfs_path = lfs_objects_dir.join(sha); + std::fs::copy(file_path, lfs_path)?; + + gb_repository.git_repository.blob(lfs_pointer.as_bytes())? + } else { + // read the file into a blob, get the object id + gb_repository.git_repository.blob_path(&file_path)? + }; + + // create a new IndexEntry from the file metadata + // truncation is ok https://libgit2.org/libgit2/#HEAD/type/git_index_entry + #[allow(clippy::cast_possible_truncation)] + index + .add(&git::IndexEntry { + ctime: create_time, + mtime: modify_time, + dev: metadata.dev() as u32, + ino: metadata.ino() as u32, + mode: 33188, + uid: metadata.uid(), + gid: metadata.gid(), + file_size: metadata.len() as u32, + flags: 10, // normal flags for normal file (for the curious: https://git-scm.com/docs/index-format) + flags_extended: 0, // no extended flags + path: rel_file_path.to_str().unwrap().to_string().into(), + id: blob, + }) + .with_context(|| format!("failed to add index entry for {}", rel_file_path.display()))?; + + Ok(()) +} + +/// calculates sha256 digest of a large file as lowercase hex string via streaming buffer +/// used to calculate the hash of large files that are not supported by git +fn sha256_digest(path: &std::path::Path) -> Result { + let input = File::open(path)?; + let mut reader = BufReader::new(input); + + let digest = { + let mut hasher = Sha256::new(); + let mut buffer = [0; 1024]; + loop { + let count = reader.read(&mut buffer)?; + if count == 0 { + break; + } + hasher.update(&buffer[..count]); + } + hasher.finalize() + }; + Ok(format!("{:X}", digest)) +} + +fn build_branches_tree(gb_repository: &Repository) -> Result { + let mut index = git::Index::new()?; + + let branches_dir = gb_repository.root().join("branches"); + for file_path in + fs::list_files(&branches_dir, &[]).context("failed to find branches directory")? + { + let file_path = std::path::Path::new(&file_path); + add_file_to_index( + gb_repository, + &mut index, + file_path, + &branches_dir.join(file_path), + ) + .context("failed to add branch file to index")?; + } + + let tree_oid = index + .write_tree_to(&gb_repository.git_repository) + .context("failed to write index to tree")?; + + Ok(tree_oid) +} + +fn build_session_tree(gb_repository: &Repository) -> Result { + let mut index = git::Index::new()?; + + // add all files in the working directory to the in-memory index, skipping for matching entries in the repo index + for file_path in fs::list_files( + gb_repository.session_path(), + &[path::Path::new("wd").to_path_buf()], + ) + .context("failed to list session files")? + { + add_file_to_index( + gb_repository, + &mut index, + &file_path, + &gb_repository.session_path().join(&file_path), + ) + .with_context(|| format!("failed to add session file: {}", file_path.display()))?; + } + + let tree_oid = index + .write_tree_to(&gb_repository.git_repository) + .context("failed to write index to tree")?; + + Ok(tree_oid) +} + +// this is a helper function for build_gb_tree that takes paths under .git/gb/session and adds them to the in-memory index +fn add_file_to_index( + gb_repository: &Repository, + index: &mut git::Index, + rel_file_path: &std::path::Path, + abs_file_path: &std::path::Path, +) -> Result<()> { + let blob = gb_repository.git_repository.blob_path(abs_file_path)?; + let metadata = abs_file_path.metadata()?; + let modified_time = FileTime::from_last_modification_time(&metadata); + let create_time = FileTime::from_creation_time(&metadata).unwrap_or(modified_time); + + // create a new IndexEntry from the file metadata + // truncation is ok https://libgit2.org/libgit2/#HEAD/type/git_index_entry + #[allow(clippy::cast_possible_truncation)] + index + .add(&git::IndexEntry { + ctime: create_time, + mtime: modified_time, + dev: metadata.dev() as u32, + ino: metadata.ino() as u32, + mode: 33188, + uid: metadata.uid(), + gid: metadata.gid(), + file_size: metadata.len() as u32, + flags: 10, // normal flags for normal file (for the curious: https://git-scm.com/docs/index-format) + flags_extended: 0, // no extended flags + path: rel_file_path.to_str().unwrap().into(), + id: blob, + }) + .with_context(|| format!("Failed to add file to index: {}", abs_file_path.display()))?; + + Ok(()) +} + +// write a new commit object to the repo +// this is called once we have a tree of deltas, metadata and current wd snapshot +// and either creates or updates the refs/heads/current ref +fn write_gb_commit( + tree_id: git::Oid, + gb_repository: &Repository, + user: Option<&users::User>, +) -> Result { + let comitter = git::Signature::now("gitbutler", "gitbutler@localhost")?; + let author = match user { + None => comitter.clone(), + Some(user) => git::Signature::try_from(user)?, + }; + + let current_refname: git::Refname = "refs/heads/current".parse().unwrap(); + + match gb_repository + .git_repository + .find_reference(¤t_refname) + { + Result::Ok(reference) => { + let last_commit = reference.peel_to_commit()?; + let new_commit = gb_repository.git_repository.commit( + Some(¤t_refname), + &author, // author + &comitter, // committer + "gitbutler check", // commit message + &gb_repository.git_repository.find_tree(tree_id).unwrap(), // tree + &[&last_commit], // parents + )?; + Ok(new_commit) + } + Err(git::Error::NotFound(_)) => { + let new_commit = gb_repository.git_repository.commit( + Some(¤t_refname), + &author, // author + &comitter, // committer + "gitbutler check", // commit message + &gb_repository.git_repository.find_tree(tree_id).unwrap(), // tree + &[], // parents + )?; + Ok(new_commit) + } + Err(e) => Err(e.into()), + } +} + +#[derive(Debug, thiserror::Error)] +pub enum RemoteError { + #[error("network error")] + Network, + #[error(transparent)] + Other(#[from] anyhow::Error), +} diff --git a/src/git.rs b/src/git.rs new file mode 100644 index 000000000..1aab9ff90 --- /dev/null +++ b/src/git.rs @@ -0,0 +1,42 @@ +pub mod credentials; +pub mod diff; +pub mod show; + +mod blob; +pub use blob::*; + +mod error; +pub use error::*; + +mod reference; +pub use reference::*; +mod repository; + +pub use repository::*; + +mod commit; +pub use commit::*; + +mod branch; +pub use branch::*; + +mod tree; +pub use tree::*; + +mod remote; +pub use remote::*; + +mod index; +pub use index::*; + +mod oid; +pub use oid::*; + +mod signature; +pub use signature::*; + +mod config; +pub use config::*; + +mod url; +pub use self::url::*; diff --git a/src/git/blob.rs b/src/git/blob.rs new file mode 100644 index 000000000..10e902c04 --- /dev/null +++ b/src/git/blob.rs @@ -0,0 +1,17 @@ +pub struct Blob<'a>(git2::Blob<'a>); + +impl<'a> From> for Blob<'a> { + fn from(value: git2::Blob<'a>) -> Self { + Self(value) + } +} + +impl Blob<'_> { + pub fn content(&self) -> &[u8] { + self.0.content() + } + + pub fn size(&self) -> usize { + self.0.size() + } +} diff --git a/src/git/branch.rs b/src/git/branch.rs new file mode 100644 index 000000000..701d805be --- /dev/null +++ b/src/git/branch.rs @@ -0,0 +1,53 @@ +use super::{Commit, Oid, Result, Tree}; + +pub struct Branch<'repo> { + branch: git2::Branch<'repo>, +} + +impl<'repo> From> for Branch<'repo> { + fn from(branch: git2::Branch<'repo>) -> Self { + Self { branch } + } +} + +impl<'repo> Branch<'repo> { + pub fn name(&self) -> Option<&str> { + self.branch.get().name() + } + + pub fn refname(&self) -> Option<&str> { + self.branch.get().name() + } + + pub fn target(&self) -> Option { + self.branch.get().target().map(Into::into) + } + + pub fn upstream(&self) -> Result> { + self.branch.upstream().map(Into::into).map_err(Into::into) + } + + pub fn refname_bytes(&self) -> &[u8] { + self.branch.get().name_bytes() + } + + pub fn peel_to_tree(&self) -> Result> { + self.branch + .get() + .peel_to_tree() + .map_err(Into::into) + .map(Into::into) + } + + pub fn peel_to_commit(&self) -> Result> { + self.branch + .get() + .peel_to_commit() + .map(Into::into) + .map_err(Into::into) + } + + pub fn is_remote(&self) -> bool { + self.branch.get().is_remote() + } +} diff --git a/src/git/commit.rs b/src/git/commit.rs new file mode 100644 index 000000000..43f03ab00 --- /dev/null +++ b/src/git/commit.rs @@ -0,0 +1,75 @@ +use super::{Oid, Result, Signature, Tree}; + +pub struct Commit<'repo> { + commit: git2::Commit<'repo>, +} + +impl<'repo> From> for Commit<'repo> { + fn from(commit: git2::Commit<'repo>) -> Self { + Self { commit } + } +} + +impl<'repo> From<&'repo git2::Commit<'repo>> for Commit<'repo> { + fn from(commit: &'repo git2::Commit<'repo>) -> Self { + Self { + commit: commit.clone(), + } + } +} + +impl<'repo> From<&'repo Commit<'repo>> for &'repo git2::Commit<'repo> { + fn from(val: &'repo Commit<'repo>) -> Self { + &val.commit + } +} + +impl<'repo> Commit<'repo> { + pub fn id(&self) -> Oid { + self.commit.id().into() + } + + pub fn parent_count(&self) -> usize { + self.commit.parent_count() + } + + pub fn tree(&self) -> Result> { + self.commit.tree().map(Into::into).map_err(Into::into) + } + + pub fn tree_id(&self) -> Oid { + self.commit.tree_id().into() + } + + pub fn parents(&self) -> Result>> { + let mut parents = vec![]; + for i in 0..self.parent_count() { + parents.push(self.parent(i)?); + } + Ok(parents) + } + + pub fn parent(&self, n: usize) -> Result> { + self.commit.parent(n).map(Into::into).map_err(Into::into) + } + + pub fn time(&self) -> git2::Time { + self.commit.time() + } + + pub fn author(&self) -> Signature<'_> { + self.commit.author().into() + } + + pub fn message(&self) -> Option<&str> { + self.commit.message() + } + + pub fn committer(&self) -> Signature<'_> { + self.commit.committer().into() + } + + pub fn raw_header(&self) -> Option<&str> { + self.commit.raw_header() + } +} diff --git a/src/git/config.rs b/src/git/config.rs new file mode 100644 index 000000000..5afe4ffb9 --- /dev/null +++ b/src/git/config.rs @@ -0,0 +1,68 @@ +use super::{Error, Result}; + +pub struct Config { + config: git2::Config, +} + +impl From for Config { + fn from(config: git2::Config) -> Self { + Self { config } + } +} + +impl From for git2::Config { + fn from(v: Config) -> Self { + v.config + } +} + +impl Config { + pub fn set_str(&mut self, key: &str, value: &str) -> Result<()> { + self.config.set_str(key, value).map_err(Into::into) + } + + pub fn set_bool(&mut self, key: &str, value: bool) -> Result<()> { + self.config.set_bool(key, value).map_err(Into::into) + } + + pub fn set_multivar(&mut self, key: &str, regexp: &str, value: &str) -> Result<()> { + self.config + .set_multivar(key, regexp, value) + .map_err(Into::into) + } + + pub fn get_string(&self, key: &str) -> Result> { + match self.config.get_string(key).map_err(Into::into) { + Ok(value) => Ok(Some(value)), + Err(Error::NotFound(_)) => Ok(None), + Err(e) => Err(e), + } + } + + pub fn get_bool(&self, key: &str) -> Result> { + match self.config.get_bool(key).map_err(Into::into) { + Ok(value) => Ok(Some(value)), + Err(Error::NotFound(_)) => Ok(None), + Err(e) => Err(e), + } + } + + pub fn set_local(&self, key: &str, val: &str) -> Result<()> { + match self.config.open_level(git2::ConfigLevel::Local) { + Ok(mut local) => local.set_str(key, val).map_err(Into::into), + Err(e) => Err(e.into()), + } + } + + pub fn get_local(&self, key: &str) -> Result> { + match self + .config + .open_level(git2::ConfigLevel::Local) + .and_then(|local| local.get_string(key)) + { + Ok(value) => Ok(Some(value)), + Err(e) if e.code() == git2::ErrorCode::NotFound => Ok(None), + Err(e) => Err(e.into()), + } + } +} diff --git a/src/git/credentials.rs b/src/git/credentials.rs new file mode 100644 index 000000000..c7e0a452b --- /dev/null +++ b/src/git/credentials.rs @@ -0,0 +1,392 @@ +use std::path::PathBuf; + +use crate::{keys, project_repository, projects, users}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum SshCredential { + Keyfile { + key_path: PathBuf, + passphrase: Option, + }, + GitButlerKey(Box), +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum HttpsCredential { + CredentialHelper { username: String, password: String }, + GitHubToken(String), +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Credential { + Noop, + Ssh(SshCredential), + Https(HttpsCredential), +} + +impl From for git2::RemoteCallbacks<'_> { + fn from(value: Credential) -> Self { + let mut remote_callbacks = git2::RemoteCallbacks::new(); + match value { + Credential::Noop => {} + Credential::Ssh(SshCredential::Keyfile { + key_path, + passphrase, + }) => { + remote_callbacks.credentials(move |url, _username_from_url, _allowed_types| { + use resolve_path::PathResolveExt; + let key_path = key_path.resolve(); + tracing::info!( + "authenticating with {} using key {}", + url, + key_path.display() + ); + git2::Cred::ssh_key("git", None, &key_path, passphrase.as_deref()) + }); + } + Credential::Ssh(SshCredential::GitButlerKey(key)) => { + remote_callbacks.credentials(move |url, _username_from_url, _allowed_types| { + tracing::info!("authenticating with {} using gitbutler's key", url); + git2::Cred::ssh_key_from_memory("git", None, &key.to_string(), None) + }); + } + Credential::Https(HttpsCredential::CredentialHelper { username, password }) => { + remote_callbacks.credentials(move |url, _username_from_url, _allowed_types| { + tracing::info!("authenticating with {url} as '{username}' with password using credential helper"); + git2::Cred::userpass_plaintext(&username, &password) + }); + } + Credential::Https(HttpsCredential::GitHubToken(token)) => { + remote_callbacks.credentials(move |url, _username_from_url, _allowed_types| { + tracing::info!("authenticating with {url} using github token"); + git2::Cred::userpass_plaintext("git", &token) + }); + } + }; + remote_callbacks + } +} + +#[derive(Clone)] +pub struct Helper { + keys: keys::Controller, + users: users::Controller, + home_dir: Option, +} + +#[derive(Debug, thiserror::Error)] +pub enum HelpError { + #[error("no url set for remote")] + NoUrlSet, + #[error("failed to convert url: {0}")] + UrlConvertError(#[from] super::ConvertError), + #[error(transparent)] + Users(#[from] users::GetError), + #[error(transparent)] + Key(#[from] keys::GetOrCreateError), + #[error(transparent)] + Git(#[from] super::Error), +} + +impl From for crate::error::Error { + fn from(value: HelpError) -> Self { + match value { + HelpError::NoUrlSet => Self::UserError { + code: crate::error::Code::ProjectGitRemote, + message: "no url set for remote".to_string(), + }, + HelpError::UrlConvertError(error) => Self::UserError { + code: crate::error::Code::ProjectGitRemote, + message: error.to_string(), + }, + HelpError::Users(error) => error.into(), + HelpError::Key(error) => error.into(), + HelpError::Git(error) => { + tracing::error!(?error, "failed to create auth credentials"); + Self::Unknown + } + } + } +} + +impl Helper { + pub fn new( + keys: keys::Controller, + users: users::Controller, + home_dir: Option, + ) -> Self { + Self { + keys, + users, + home_dir, + } + } + + pub fn from_path>(path: P) -> Self { + let keys = keys::Controller::from_path(&path); + let users = users::Controller::from_path(path); + let home_dir = std::env::var_os("HOME").map(PathBuf::from); + Self::new(keys, users, home_dir) + } + + pub fn help<'a>( + &'a self, + project_repository: &'a project_repository::Repository, + remote_name: &str, + ) -> Result)>, HelpError> { + let remote = project_repository.git_repository.find_remote(remote_name)?; + let remote_url = remote.url()?.ok_or(HelpError::NoUrlSet)?; + + // if file, no auth needed. + if remote_url.scheme == super::Scheme::File { + return Ok(vec![(remote, vec![Credential::Noop])]); + } + + match &project_repository.project().preferred_key { + projects::AuthKey::Local { private_key_path } => { + let ssh_remote = if remote_url.scheme == super::Scheme::Ssh { + Ok(remote) + } else { + let ssh_url = remote_url.as_ssh()?; + project_repository.git_repository.remote_anonymous(&ssh_url) + }?; + + Ok(vec![( + ssh_remote, + vec![Credential::Ssh(SshCredential::Keyfile { + key_path: private_key_path.clone(), + passphrase: None, + })], + )]) + } + projects::AuthKey::GitCredentialsHelper => { + let https_remote = if remote_url.scheme == super::Scheme::Https { + Ok(remote) + } else { + let url = remote_url.as_https()?; + project_repository.git_repository.remote_anonymous(&url) + }?; + let flow = Self::https_flow(project_repository, &remote_url)? + .into_iter() + .map(Credential::Https) + .collect::>(); + Ok(vec![(https_remote, flow)]) + } + projects::AuthKey::Generated => { + let generated_flow = self.generated_flow(remote, project_repository)?; + + let remote = project_repository.git_repository.find_remote(remote_name)?; + let default_flow = self.default_flow(remote, project_repository)?; + + Ok(vec![generated_flow, default_flow] + .into_iter() + .flatten() + .collect()) + } + projects::AuthKey::Default => self.default_flow(remote, project_repository), + projects::AuthKey::SystemExecutable => { + tracing::error!("WARNING: FIXME: this codepath should NEVER be hit. Something is seriously wrong."); + self.default_flow(remote, project_repository) + } + } + } + + fn generated_flow<'a>( + &'a self, + remote: super::Remote<'a>, + project_repository: &'a project_repository::Repository, + ) -> Result)>, HelpError> { + let remote_url = remote.url()?.ok_or(HelpError::NoUrlSet)?; + + let ssh_remote = if remote_url.scheme == super::Scheme::Ssh { + Ok(remote) + } else { + let ssh_url = remote_url.as_ssh()?; + project_repository.git_repository.remote_anonymous(&ssh_url) + }?; + + let key = self.keys.get_or_create()?; + Ok(vec![( + ssh_remote, + vec![Credential::Ssh(SshCredential::GitButlerKey(Box::new(key)))], + )]) + } + + fn default_flow<'a>( + &'a self, + remote: super::Remote<'a>, + project_repository: &'a project_repository::Repository, + ) -> Result)>, HelpError> { + let remote_url = remote.url()?.ok_or(HelpError::NoUrlSet)?; + + // is github is authenticated, only try github. + if remote_url.is_github() { + if let Some(github_access_token) = self + .users + .get_user()? + .and_then(|user| user.github_access_token) + { + let https_remote = if remote_url.scheme == super::Scheme::Https { + Ok(remote) + } else { + let url = remote_url.as_https()?; + project_repository.git_repository.remote_anonymous(&url) + }?; + return Ok(vec![( + https_remote, + vec![Credential::Https(HttpsCredential::GitHubToken( + github_access_token, + ))], + )]); + } + } + + match remote_url.scheme { + super::Scheme::Https => { + let mut flow = vec![]; + + let https_flow = Self::https_flow(project_repository, &remote_url)? + .into_iter() + .map(Credential::Https) + .collect::>(); + + if !https_flow.is_empty() { + flow.push((remote, https_flow)); + } + + if let Ok(ssh_url) = remote_url.as_ssh() { + let ssh_flow = self + .ssh_flow()? + .into_iter() + .map(Credential::Ssh) + .collect::>(); + if !ssh_flow.is_empty() { + flow.push(( + project_repository + .git_repository + .remote_anonymous(&ssh_url)?, + ssh_flow, + )); + } + } + + Ok(flow) + } + super::Scheme::Ssh => { + let mut flow = vec![]; + + let ssh_flow = self + .ssh_flow()? + .into_iter() + .map(Credential::Ssh) + .collect::>(); + if !ssh_flow.is_empty() { + flow.push((remote, ssh_flow)); + } + + if let Ok(https_url) = remote_url.as_https() { + let https_flow = Self::https_flow(project_repository, &https_url)? + .into_iter() + .map(Credential::Https) + .collect::>(); + if !https_flow.is_empty() { + flow.push(( + project_repository + .git_repository + .remote_anonymous(&https_url)?, + https_flow, + )); + } + } + + Ok(flow) + } + _ => { + let mut flow = vec![]; + + if let Ok(https_url) = remote_url.as_https() { + let https_flow = Self::https_flow(project_repository, &https_url)? + .into_iter() + .map(Credential::Https) + .collect::>(); + + if !https_flow.is_empty() { + flow.push(( + project_repository + .git_repository + .remote_anonymous(&https_url)?, + https_flow, + )); + } + } + + if let Ok(ssh_url) = remote_url.as_ssh() { + let ssh_flow = self + .ssh_flow()? + .into_iter() + .map(Credential::Ssh) + .collect::>(); + if !ssh_flow.is_empty() { + flow.push(( + project_repository + .git_repository + .remote_anonymous(&ssh_url)?, + ssh_flow, + )); + } + } + + Ok(flow) + } + } + } + + fn https_flow( + project_repository: &project_repository::Repository, + remote_url: &super::Url, + ) -> Result, HelpError> { + let mut flow = vec![]; + + let mut helper = git2::CredentialHelper::new(&remote_url.to_string()); + let config = project_repository.git_repository.config()?; + helper.config(&git2::Config::from(config)); + if let Some((username, password)) = helper.execute() { + flow.push(HttpsCredential::CredentialHelper { username, password }); + } + + Ok(flow) + } + + fn ssh_flow(&self) -> Result, HelpError> { + let mut flow = vec![]; + if let Some(home_path) = self.home_dir.as_ref() { + let id_rsa_path = home_path.join(".ssh").join("id_rsa"); + if id_rsa_path.exists() { + flow.push(SshCredential::Keyfile { + key_path: id_rsa_path.clone(), + passphrase: None, + }); + } + + let id_ed25519_path = home_path.join(".ssh").join("id_ed25519"); + if id_ed25519_path.exists() { + flow.push(SshCredential::Keyfile { + key_path: id_ed25519_path.clone(), + passphrase: None, + }); + } + + let id_ecdsa_path = home_path.join(".ssh").join("id_ecdsa"); + if id_ecdsa_path.exists() { + flow.push(SshCredential::Keyfile { + key_path: id_ecdsa_path.clone(), + passphrase: None, + }); + } + } + + let key = self.keys.get_or_create()?; + flow.push(SshCredential::GitButlerKey(Box::new(key))); + Ok(flow) + } +} diff --git a/src/git/diff.rs b/src/git/diff.rs new file mode 100644 index 000000000..807d295d0 --- /dev/null +++ b/src/git/diff.rs @@ -0,0 +1,421 @@ +use std::{collections::HashMap, path, str}; + +use anyhow::{Context, Result}; +use serde::{Deserialize, Serialize}; + +use crate::git; + +use super::Repository; + +/// The type of change +#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ChangeType { + /// Entry does not exist in old version + Added, + /// Entry does not exist in new version + Deleted, + /// Entry content changed between old and new + Modified, +} +impl From for ChangeType { + fn from(v: git2::Delta) -> Self { + use git2::Delta as D; + use ChangeType as C; + match v { + D::Untracked | D::Added => C::Added, + D::Modified + | D::Unmodified + | D::Renamed + | D::Copied + | D::Typechange + | D::Conflicted => C::Modified, + D::Ignored | D::Unreadable | D::Deleted => C::Deleted, + } + } +} + +#[derive(Debug, PartialEq, Clone, Serialize)] +pub struct GitHunk { + pub old_start: u32, + pub old_lines: u32, + pub new_start: u32, + pub new_lines: u32, + pub diff: String, + pub binary: bool, + pub change_type: ChangeType, +} + +impl GitHunk { + pub fn contains(&self, line: u32) -> bool { + self.new_start <= line && self.new_start + self.new_lines >= line + } +} + +pub struct Options { + pub context_lines: u32, +} + +impl Default for Options { + fn default() -> Self { + Self { context_lines: 3 } + } +} + +#[derive(Debug, PartialEq, Clone, Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct FileDiff { + pub old_path: Option, + pub new_path: Option, + pub hunks: Option>, + pub skipped: bool, + pub binary: bool, + pub old_size_bytes: u64, + pub new_size_bytes: u64, +} + +pub fn workdir( + repository: &Repository, + commit_oid: &git::Oid, + context_lines: u32, +) -> Result> { + let commit = repository + .find_commit(*commit_oid) + .context("failed to find commit")?; + let tree = commit.tree().context("failed to find tree")?; + + let mut diff_opts = git2::DiffOptions::new(); + diff_opts + .recurse_untracked_dirs(true) + .include_untracked(true) + .show_binary(true) + .show_untracked_content(true) + .ignore_submodules(true) + .context_lines(context_lines); + + let mut diff = repository.diff_tree_to_workdir(Some(&tree), Some(&mut diff_opts))?; + let (mut diff_opts, skipped_files) = without_large_files(50_000_000, &diff, diff_opts); + if !skipped_files.is_empty() { + diff = repository.diff_tree_to_workdir(Some(&tree), Some(&mut diff_opts))?; + } + let diff_files = hunks_by_filepath(repository, &diff); + diff_files.map(|mut df| { + for (key, value) in skipped_files { + df.insert(key, value); + } + df + }) +} + +pub fn trees( + repository: &Repository, + old_tree: &git::Tree, + new_tree: &git::Tree, + context_lines: u32, +) -> Result> { + let mut diff_opts = git2::DiffOptions::new(); + diff_opts + .recurse_untracked_dirs(true) + .include_untracked(true) + .show_binary(true) + .ignore_submodules(true) + .context_lines(context_lines) + .show_untracked_content(true); + + let diff = + repository.diff_tree_to_tree(Some(old_tree), Some(new_tree), Some(&mut diff_opts))?; + + hunks_by_filepath(repository, &diff) +} + +pub fn without_large_files( + size_limit_bytes: u64, + diff: &git2::Diff, + mut diff_opts: git2::DiffOptions, +) -> (git2::DiffOptions, HashMap) { + let mut skipped_files: HashMap = HashMap::new(); + for delta in diff.deltas() { + if delta.new_file().size() > size_limit_bytes { + if let Some(path) = delta.new_file().path() { + skipped_files.insert( + path.to_path_buf(), + FileDiff { + old_path: delta.old_file().path().map(std::path::Path::to_path_buf), + new_path: delta.new_file().path().map(std::path::Path::to_path_buf), + hunks: None, + skipped: true, + binary: true, + old_size_bytes: delta.old_file().size(), + new_size_bytes: delta.new_file().size(), + }, + ); + } + } else if let Some(path) = delta.new_file().path() { + if let Some(path) = path.to_str() { + diff_opts.pathspec(path); + } + } + } + (diff_opts, skipped_files) +} + +fn hunks_by_filepath( + repository: &Repository, + diff: &git2::Diff, +) -> Result> { + // find all the hunks + let mut hunks_by_filepath: HashMap> = HashMap::new(); + let mut diff_files: HashMap = HashMap::new(); + + diff.print( + git2::DiffFormat::Patch, + |delta, hunk, line: git2::DiffLine<'_>| { + let change_type: ChangeType = delta.status().into(); + let file_path = delta.new_file().path().unwrap_or_else(|| { + delta + .old_file() + .path() + .expect("failed to get file name from diff") + }); + + hunks_by_filepath + .entry(file_path.to_path_buf()) + .or_default(); + + let new_start = hunk.as_ref().map_or(0, git2::DiffHunk::new_start); + let new_lines = hunk.as_ref().map_or(0, git2::DiffHunk::new_lines); + let old_start = hunk.as_ref().map_or(0, git2::DiffHunk::old_start); + let old_lines = hunk.as_ref().map_or(0, git2::DiffHunk::old_lines); + + if let Some((line, is_binary)) = match line.origin() { + '+' | '-' | ' ' => { + if let Ok(content) = str::from_utf8(line.content()) { + Some((format!("{}{}", line.origin(), content), false)) + } else { + let full_path = repository.workdir().unwrap().join(file_path); + // save the file_path to the odb + if !delta.new_file().id().is_zero() && full_path.exists() { + // the binary file wasnt deleted + repository.blob_path(full_path.as_path()).unwrap(); + } + Some((delta.new_file().id().to_string(), true)) + } + } + 'B' => { + let full_path = repository.workdir().unwrap().join(file_path); + // save the file_path to the odb + if !delta.new_file().id().is_zero() && full_path.exists() { + // the binary file wasnt deleted + repository.blob_path(full_path.as_path()).unwrap(); + } + Some((delta.new_file().id().to_string(), true)) + } + 'F' => None, + _ => { + if let Ok(content) = str::from_utf8(line.content()) { + Some((content.to_string(), false)) + } else { + let full_path = repository.workdir().unwrap().join(file_path); + // save the file_path to the odb + if !delta.new_file().id().is_zero() && full_path.exists() { + // the binary file wasnt deleted + repository.blob_path(full_path.as_path()).unwrap(); + } + Some((delta.new_file().id().to_string(), true)) + } + } + } { + let hunks = hunks_by_filepath + .entry(file_path.to_path_buf()) + .or_default(); + + if let Some(previous_hunk) = hunks.last_mut() { + let hunk_did_not_change = previous_hunk.old_start == old_start + && previous_hunk.old_lines == old_lines + && previous_hunk.new_start == new_start + && previous_hunk.new_lines == new_lines; + + if hunk_did_not_change { + if is_binary { + // binary overrides the diff + previous_hunk.binary = true; + previous_hunk.old_start = 0; + previous_hunk.old_lines = 0; + previous_hunk.new_start = 0; + previous_hunk.new_lines = 0; + previous_hunk.diff = line; + } else if !previous_hunk.binary { + // append non binary hunks + previous_hunk.diff.push_str(&line); + } + } else { + hunks.push(GitHunk { + old_start, + old_lines, + new_start, + new_lines, + diff: line, + binary: is_binary, + change_type, + }); + } + } else { + hunks.push(GitHunk { + old_start, + old_lines, + new_start, + new_lines, + diff: line, + binary: is_binary, + change_type, + }); + } + } + diff_files.insert( + file_path.to_path_buf(), + FileDiff { + old_path: delta.old_file().path().map(std::path::Path::to_path_buf), + new_path: delta.new_file().path().map(std::path::Path::to_path_buf), + hunks: None, + skipped: false, + binary: delta.new_file().is_binary(), + old_size_bytes: delta.old_file().size(), + new_size_bytes: delta.new_file().size(), + }, + ); + + true + }, + ) + .context("failed to print diff")?; + + let hunks_by_filepath: HashMap> = hunks_by_filepath + .into_iter() + .map(|(k, v)| { + if let Some(binary_hunk) = v.iter().find(|hunk| hunk.binary) { + if v.len() > 1 { + // if there are multiple hunks with binary among them, then the binary hunk + // takes precedence + ( + k, + vec![GitHunk { + old_start: 0, + old_lines: 0, + new_start: 0, + new_lines: 0, + diff: binary_hunk.diff.clone(), + binary: true, + change_type: binary_hunk.change_type, + }], + ) + } else { + (k, v) + } + } else if v.is_empty() { + // this is a new file + ( + k, + vec![GitHunk { + old_start: 0, + old_lines: 0, + new_start: 0, + new_lines: 0, + diff: String::new(), + binary: false, + change_type: ChangeType::Modified, + }], + ) + } else { + (k, v) + } + }) + .collect(); + + for (file_path, diff_file) in &mut diff_files { + diff_file.hunks = hunks_by_filepath.get(file_path).cloned(); + } + Ok(diff_files) +} + +// returns None if cannot reverse the patch header +fn reverse_patch_header(header: &str) -> Option { + use itertools::Itertools; + + let mut parts = header.split_whitespace(); + + match parts.next() { + Some("@@") => {} + _ => return None, + }; + + let old_range = parts.next()?; + let new_range = parts.next()?; + + match parts.next() { + Some("@@") => {} + _ => return None, + }; + + Some(format!( + "@@ {} {} @@ {}", + new_range.replace('+', "-"), + old_range.replace('-', "+"), + parts.join(" ") + )) +} + +fn reverse_patch(patch: &str) -> Option { + let mut reversed = String::new(); + for line in patch.lines() { + if line.starts_with("@@") { + if let Some(header) = reverse_patch_header(line) { + reversed.push_str(&header); + reversed.push('\n'); + } else { + return None; + } + } else if line.starts_with('+') { + reversed.push_str(&line.replacen('+', "-", 1)); + reversed.push('\n'); + } else if line.starts_with('-') { + reversed.push_str(&line.replacen('-', "+", 1)); + reversed.push('\n'); + } else { + reversed.push_str(line); + reversed.push('\n'); + } + } + Some(reversed) +} + +// returns None if cannot reverse the hunk +pub fn reverse_hunk(hunk: &GitHunk) -> Option { + if hunk.binary { + None + } else { + reverse_patch(&hunk.diff).map(|diff| GitHunk { + old_start: hunk.new_start, + old_lines: hunk.new_lines, + new_start: hunk.old_start, + new_lines: hunk.old_lines, + diff, + binary: hunk.binary, + change_type: hunk.change_type, + }) + } +} + +pub fn diff_files_to_hunks( + files: &HashMap, +) -> HashMap> { + let mut file_hunks: HashMap> = HashMap::new(); + for (file_path, diff_file) in files { + if !diff_file.skipped { + file_hunks.insert( + file_path.clone(), + diff_file.hunks.clone().unwrap_or_default(), + ); + } + } + file_hunks +} diff --git a/src/git/error.rs b/src/git/error.rs new file mode 100644 index 000000000..298e39ea5 --- /dev/null +++ b/src/git/error.rs @@ -0,0 +1,62 @@ +use std::str::Utf8Error; + +use crate::keys; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("not found: {0}")] + NotFound(git2::Error), + #[error("authentication failed")] + Auth(git2::Error), + #[error("sign error: {0}")] + Signing(keys::SignError), + #[error("remote url error: {0}")] + Url(super::url::ParseError), + #[error("io error: {0}")] + Io(#[from] std::io::Error), + #[error("network error: {0}")] + Network(git2::Error), + #[error("hook error: {0}")] + Hooks(#[from] git2_hooks::HooksError), + #[error("http error: {0}")] + Http(git2::Error), + #[error("checkout error: {0}")] + Checkout(git2::Error), + #[error(transparent)] + Other(git2::Error), + #[error(transparent)] + Utf8(#[from] Utf8Error), +} + +impl From for Error { + fn from(err: git2::Error) -> Self { + match err.class() { + git2::ErrorClass::Ssh => match err.code() { + git2::ErrorCode::GenericError | git2::ErrorCode::Auth => Error::Auth(err), + _ => Error::Other(err), + }, + git2::ErrorClass::Checkout => Error::Checkout(err), + git2::ErrorClass::Http => Error::Http(err), + git2::ErrorClass::Net => Error::Network(err), + _ => match err.code() { + git2::ErrorCode::NotFound => Error::NotFound(err), + git2::ErrorCode::Auth => Error::Auth(err), + _ => Error::Other(err), + }, + } + } +} + +impl From for Error { + fn from(err: keys::SignError) -> Self { + Error::Signing(err) + } +} + +impl From for Error { + fn from(err: super::url::ParseError) -> Self { + Error::Url(err) + } +} + +pub type Result = std::result::Result; diff --git a/src/git/index.rs b/src/git/index.rs new file mode 100644 index 000000000..8b0293bef --- /dev/null +++ b/src/git/index.rs @@ -0,0 +1,164 @@ +use std::path; + +use filetime::FileTime; + +use super::{Error, Oid, Repository, Result, Tree}; + +pub struct Index { + index: git2::Index, +} + +impl TryFrom> for Index { + type Error = Error; + + fn try_from(value: Tree<'_>) -> std::result::Result { + Self::try_from(&value) + } +} + +impl TryFrom<&Tree<'_>> for Index { + type Error = Error; + + fn try_from(value: &Tree) -> Result { + let mut empty_index = Self::new()?; + empty_index.read_tree(value)?; + Ok(empty_index) + } +} + +impl<'a> From<&'a mut Index> for &'a mut git2::Index { + fn from(index: &'a mut Index) -> Self { + &mut index.index + } +} + +impl From for Index { + fn from(index: git2::Index) -> Self { + Self { index } + } +} + +impl Index { + pub fn new() -> Result { + Ok(Index { + index: git2::Index::new()?, + }) + } + + pub fn add_all( + &mut self, + pathspecs: I, + flag: git2::IndexAddOption, + cb: Option<&mut git2::IndexMatchedPath<'_>>, + ) -> Result<()> + where + T: git2::IntoCString, + I: IntoIterator, + { + self.index.add_all(pathspecs, flag, cb).map_err(Into::into) + } + + pub fn conflicts(&self) -> Result { + self.index.conflicts().map_err(Into::into) + } + + pub fn read_tree(&mut self, tree: &Tree) -> Result<()> { + self.index.read_tree(tree.into()).map_err(Into::into) + } + + pub fn write_tree_to(&mut self, repo: &Repository) -> Result { + self.index + .write_tree_to(repo.into()) + .map(Into::into) + .map_err(Into::into) + } + + pub fn has_conflicts(&self) -> bool { + self.index.has_conflicts() + } + + pub fn write_tree(&mut self) -> Result { + self.index.write_tree().map(Into::into).map_err(Into::into) + } + + pub fn add(&mut self, entry: &IndexEntry) -> Result<()> { + self.index.add(&entry.clone().into()).map_err(Into::into) + } + + pub fn write(&mut self) -> Result<()> { + self.index.write().map_err(Into::into) + } + + pub fn add_path(&mut self, path: &path::Path) -> Result<()> { + self.index.add_path(path).map_err(Into::into) + } + + pub fn remove_path(&mut self, path: &path::Path) -> Result<()> { + self.index.remove_path(path).map_err(Into::into) + } + + pub fn get_path(&self, path: &path::Path, stage: i32) -> Option { + self.index.get_path(path, stage).map(Into::into) + } +} + +#[derive(Debug, Clone)] +pub struct IndexEntry { + pub ctime: FileTime, + pub mtime: FileTime, + pub dev: u32, + pub ino: u32, + pub mode: u32, + pub uid: u32, + pub gid: u32, + pub file_size: u32, + pub id: Oid, + pub flags: u16, + pub flags_extended: u16, + pub path: Vec, +} + +impl From for IndexEntry { + fn from(value: git2::IndexEntry) -> Self { + Self { + ctime: FileTime::from_unix_time( + i64::from(value.ctime.seconds()), + value.ctime.nanoseconds(), + ), + mtime: FileTime::from_unix_time( + i64::from(value.mtime.seconds()), + value.mtime.nanoseconds(), + ), + dev: value.dev, + ino: value.ino, + mode: value.mode, + uid: value.uid, + gid: value.gid, + file_size: value.file_size, + id: value.id.into(), + flags: value.flags, + flags_extended: value.flags_extended, + path: value.path, + } + } +} + +impl From for git2::IndexEntry { + #[allow(clippy::cast_possible_truncation)] + fn from(entry: IndexEntry) -> Self { + Self { + ctime: git2::IndexTime::new(entry.ctime.seconds() as i32, entry.ctime.nanoseconds()), + mtime: git2::IndexTime::new(entry.mtime.seconds() as i32, entry.mtime.nanoseconds()), + dev: entry.dev, + ino: entry.ino, + mode: entry.mode, + uid: entry.uid, + gid: entry.gid, + file_size: entry.file_size, + id: entry.id.into(), + flags: entry.flags, + flags_extended: entry.flags_extended, + path: entry.path, + } + } +} diff --git a/src/git/oid.rs b/src/git/oid.rs new file mode 100644 index 000000000..3e0718db4 --- /dev/null +++ b/src/git/oid.rs @@ -0,0 +1,61 @@ +use std::{fmt, str::FromStr}; + +use serde::{Deserialize, Serialize}; + +#[derive(Debug, PartialEq, Copy, Clone, Hash, Eq)] +pub struct Oid { + oid: git2::Oid, +} + +impl Default for Oid { + fn default() -> Self { + git2::Oid::zero().into() + } +} + +impl Serialize for Oid { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.oid.to_string().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for Oid { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + git2::Oid::from_str(&s) + .map_err(|e| serde::de::Error::custom(format!("invalid oid: {}", e))) + .map(Into::into) + } +} + +impl fmt::Display for Oid { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.oid.fmt(f) + } +} + +impl FromStr for Oid { + type Err = git2::Error; + + fn from_str(s: &str) -> Result { + git2::Oid::from_str(s).map(Into::into) + } +} + +impl From for Oid { + fn from(oid: git2::Oid) -> Self { + Self { oid } + } +} + +impl From for git2::Oid { + fn from(oid: Oid) -> Self { + oid.oid + } +} diff --git a/src/git/reference.rs b/src/git/reference.rs new file mode 100644 index 000000000..27c360014 --- /dev/null +++ b/src/git/reference.rs @@ -0,0 +1,64 @@ +mod refname; +pub use refname::{LocalRefname, Refname, RemoteRefname, VirtualRefname}; + +use super::{Commit, Oid, Result, Tree}; + +pub struct Reference<'repo> { + reference: git2::Reference<'repo>, +} + +impl<'repo> From> for Reference<'repo> { + fn from(reference: git2::Reference<'repo>) -> Self { + Reference { reference } + } +} + +impl<'repo> Reference<'repo> { + pub fn name(&self) -> Option { + self.reference + .name() + .map(|name| name.parse().expect("libgit2 provides valid refnames")) + } + + pub fn name_bytes(&self) -> &[u8] { + self.reference.name_bytes() + } + + pub fn target(&self) -> Option { + self.reference.target().map(Into::into) + } + + pub fn peel_to_commit(&self) -> Result> { + self.reference + .peel_to_commit() + .map(Into::into) + .map_err(Into::into) + } + + pub fn peel_to_tree(&self) -> Result> { + self.reference + .peel_to_tree() + .map(Into::into) + .map_err(Into::into) + } + + pub fn rename( + &mut self, + new_name: &Refname, + force: bool, + log_message: &str, + ) -> Result> { + self.reference + .rename(&new_name.to_string(), force, log_message) + .map(Into::into) + .map_err(Into::into) + } + + pub fn delete(&mut self) -> Result<()> { + self.reference.delete().map_err(Into::into) + } + + pub fn is_remote(&self) -> bool { + self.reference.is_remote() + } +} diff --git a/src/git/reference/refname.rs b/src/git/reference/refname.rs new file mode 100644 index 000000000..bdedb8b92 --- /dev/null +++ b/src/git/reference/refname.rs @@ -0,0 +1,137 @@ +mod error; +mod local; +mod remote; +mod r#virtual; + +use std::{fmt, str::FromStr}; + +use serde::{Deserialize, Serialize}; + +pub use error::Error; +pub use local::Refname as LocalRefname; +pub use r#virtual::Refname as VirtualRefname; +pub use remote::Refname as RemoteRefname; + +use crate::git; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Refname { + Other(String), + Remote(RemoteRefname), + Local(LocalRefname), + Virtual(VirtualRefname), +} + +impl From<&RemoteRefname> for Refname { + fn from(value: &RemoteRefname) -> Self { + Self::Remote(value.clone()) + } +} + +impl From for Refname { + fn from(value: RemoteRefname) -> Self { + Self::Remote(value) + } +} + +impl From for Refname { + fn from(value: VirtualRefname) -> Self { + Self::Virtual(value) + } +} + +impl From<&VirtualRefname> for Refname { + fn from(value: &VirtualRefname) -> Self { + Self::Virtual(value.clone()) + } +} + +impl From for Refname { + fn from(value: LocalRefname) -> Self { + Self::Local(value) + } +} + +impl From<&LocalRefname> for Refname { + fn from(value: &LocalRefname) -> Self { + Self::Local(value.clone()) + } +} + +impl Refname { + pub fn branch(&self) -> Option<&str> { + match self { + Self::Other(_) => None, + Self::Remote(remote) => Some(remote.branch()), + Self::Local(local) => Some(local.branch()), + Self::Virtual(r#virtual) => Some(r#virtual.branch()), + } + } + + pub fn simple_name(&self) -> String { + match self { + Refname::Virtual(virtual_refname) => virtual_refname.branch().to_string(), + Refname::Local(local) => local.branch().to_string(), + Refname::Remote(remote) => { + format!("{}/{}", remote.remote(), remote.branch()) + } + Refname::Other(raw) => raw.to_string(), + } + } +} + +impl FromStr for Refname { + type Err = Error; + + fn from_str(value: &str) -> Result { + match value { + value if value.starts_with("refs/remotes/") => Ok(Self::Remote(value.parse()?)), + value if value.starts_with("refs/heads/") => Ok(Self::Local(value.parse()?)), + value if value.starts_with("refs/gitbutler/") => Ok(Self::Virtual(value.parse()?)), + "HEAD" => Ok(Self::Other(value.to_string())), + value if value.starts_with("refs/") => Ok(Self::Other(value.to_string())), + _ => Err(Error::InvalidName(value.to_string())), + } + } +} + +impl TryFrom<&git::Branch<'_>> for Refname { + type Error = Error; + + fn try_from(value: &git::Branch<'_>) -> std::result::Result { + if value.is_remote() { + Ok(Self::Remote(RemoteRefname::try_from(value)?)) + } else { + Ok(Self::Local(LocalRefname::try_from(value)?)) + } + } +} + +impl fmt::Display for Refname { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Other(raw) => raw.fmt(f), + Self::Remote(remote) => remote.fmt(f), + Self::Local(local) => local.fmt(f), + Self::Virtual(r#virtual) => r#virtual.fmt(f), + } + } +} + +impl Serialize for Refname { + fn serialize(&self, serializer: S) -> Result { + match self { + Self::Other(raw) => raw.serialize(serializer), + Self::Remote(remote) => remote.serialize(serializer), + Self::Local(local) => local.serialize(serializer), + Self::Virtual(r#virtual) => r#virtual.serialize(serializer), + } + } +} + +impl<'d> Deserialize<'d> for Refname { + fn deserialize>(deserializer: D) -> Result { + let name = String::deserialize(deserializer)?; + name.parse().map_err(serde::de::Error::custom) + } +} diff --git a/src/git/reference/refname/error.rs b/src/git/reference/refname/error.rs new file mode 100644 index 000000000..a964fe399 --- /dev/null +++ b/src/git/reference/refname/error.rs @@ -0,0 +1,17 @@ +use crate::git; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("branch name is invalid: {0}")] + InvalidName(String), + #[error("reference is not a tag: {0}")] + NotTag(String), + #[error("branch is not local: {0}")] + NotLocal(String), + #[error("branch is not remote: {0}")] + NotRemote(String), + #[error(transparent)] + Git(#[from] git::Error), + #[error(transparent)] + Utf8(#[from] std::string::FromUtf8Error), +} diff --git a/src/git/reference/refname/local.rs b/src/git/reference/refname/local.rs new file mode 100644 index 000000000..022bfc961 --- /dev/null +++ b/src/git/reference/refname/local.rs @@ -0,0 +1,94 @@ +use std::{fmt, str::FromStr}; + +use serde::{Deserialize, Serialize}; + +use crate::git; + +use super::{error::Error, remote}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Refname { + // contains name of the branch, e.x. "master" or "main" + branch: String, + // contains name of the remote branch, if the local branch is tracking a remote branch + remote: Option, +} + +impl Refname { + pub fn new(branch: &str, remote: Option) -> Self { + Self { + branch: branch.to_string(), + remote, + } + } + + pub fn branch(&self) -> &str { + &self.branch + } + + pub fn remote(&self) -> Option<&remote::Refname> { + self.remote.as_ref() + } +} + +impl Serialize for Refname { + fn serialize(&self, serializer: S) -> Result { + serializer.serialize_str(&self.to_string()) + } +} + +impl<'d> Deserialize<'d> for Refname { + fn deserialize>(deserializer: D) -> Result { + let name = String::deserialize(deserializer)?; + name.as_str().parse().map_err(serde::de::Error::custom) + } +} + +impl fmt::Display for Refname { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "refs/heads/{}", self.branch) + } +} + +impl FromStr for Refname { + type Err = Error; + + fn from_str(value: &str) -> Result { + if !value.starts_with("refs/heads/") { + return Err(Error::NotLocal(value.to_string())); + } + + if let Some(branch) = value.strip_prefix("refs/heads/") { + Ok(Self { + branch: branch.to_string(), + remote: None, + }) + } else { + Err(Error::InvalidName(value.to_string())) + } + } +} + +impl TryFrom<&git::Branch<'_>> for Refname { + type Error = Error; + + fn try_from(value: &git::Branch<'_>) -> std::result::Result { + let branch_name = String::from_utf8(value.refname_bytes().to_vec()).map_err(Error::Utf8)?; + if value.is_remote() { + Err(Error::NotLocal(branch_name)) + } else { + let branch: Self = branch_name.parse()?; + match value.upstream() { + Ok(upstream) => Ok(Self { + remote: Some(remote::Refname::try_from(&upstream)?), + ..branch + }), + Err(git::Error::NotFound(_)) => Ok(Self { + remote: None, + ..branch + }), + Err(error) => Err(error.into()), + } + } + } +} diff --git a/src/git/reference/refname/remote.rs b/src/git/reference/refname/remote.rs new file mode 100644 index 000000000..d14ab3b5f --- /dev/null +++ b/src/git/reference/refname/remote.rs @@ -0,0 +1,93 @@ +use std::{fmt, str::FromStr}; + +use serde::{Deserialize, Serialize}; + +use crate::git; + +use super::error::Error; + +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub struct Refname { + // contains name of the remote, e.x. "origin" or "upstream" + remote: String, + // contains name of the branch, e.x. "master" or "main" + branch: String, +} + +impl Refname { + pub fn new(remote: &str, branch: &str) -> Self { + Self { + remote: remote.to_string(), + branch: branch.to_string(), + } + } + + pub fn with_branch(&self, branch: &str) -> Self { + Self { + branch: branch.to_string(), + remote: self.remote.clone(), + } + } + + pub fn branch(&self) -> &str { + &self.branch + } + + pub fn remote(&self) -> &str { + &self.remote + } +} + +impl fmt::Display for Refname { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "refs/remotes/{}/{}", self.remote, self.branch) + } +} + +impl Serialize for Refname { + fn serialize(&self, serializer: S) -> Result { + serializer.serialize_str(&self.to_string()) + } +} + +impl<'d> Deserialize<'d> for Refname { + fn deserialize>(deserializer: D) -> Result { + let name = String::deserialize(deserializer)?; + name.as_str().parse().map_err(serde::de::Error::custom) + } +} + +impl FromStr for Refname { + type Err = Error; + + fn from_str(value: &str) -> Result { + if !value.starts_with("refs/remotes/") { + return Err(Error::NotRemote(value.to_string())); + }; + + let value = value.strip_prefix("refs/remotes/").unwrap(); + + if let Some((remote, branch)) = value.split_once('/') { + Ok(Self { + remote: remote.to_string(), + branch: branch.to_string(), + }) + } else { + Err(Error::InvalidName(value.to_string())) + } + } +} + +impl TryFrom<&git::Branch<'_>> for Refname { + type Error = Error; + + fn try_from(value: &git::Branch<'_>) -> std::result::Result { + let refname = String::from_utf8(value.refname_bytes().to_vec()).map_err(Error::Utf8)?; + + if !value.is_remote() { + return Err(Error::NotRemote(refname)); + } + + refname.parse() + } +} diff --git a/src/git/reference/refname/virtual.rs b/src/git/reference/refname/virtual.rs new file mode 100644 index 000000000..5d05a6a15 --- /dev/null +++ b/src/git/reference/refname/virtual.rs @@ -0,0 +1,65 @@ +use std::{fmt, str::FromStr}; + +use serde::{Deserialize, Serialize}; + +use crate::virtual_branches::normalize_branch_name; +use crate::virtual_branches::Branch; + +use super::error::Error; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Refname { + // contains slug of the virtual branch name + branch: String, +} + +impl Refname { + pub fn branch(&self) -> &str { + &self.branch + } +} + +impl From<&Branch> for Refname { + fn from(value: &Branch) -> Self { + Self { + branch: normalize_branch_name(&value.name), + } + } +} + +impl Serialize for Refname { + fn serialize(&self, serializer: S) -> Result { + serializer.serialize_str(&self.to_string()) + } +} + +impl<'d> Deserialize<'d> for Refname { + fn deserialize>(deserializer: D) -> Result { + let name = String::deserialize(deserializer)?; + name.as_str().parse().map_err(serde::de::Error::custom) + } +} + +impl fmt::Display for Refname { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "refs/gitbutler/{}", self.branch) + } +} + +impl FromStr for Refname { + type Err = Error; + + fn from_str(value: &str) -> Result { + if !value.starts_with("refs/gitbutler/") { + return Err(Error::NotLocal(value.to_string())); + } + + if let Some(branch) = value.strip_prefix("refs/gitbutler/") { + Ok(Self { + branch: branch.to_string(), + }) + } else { + Err(Error::InvalidName(value.to_string())) + } + } +} diff --git a/src/git/remote.rs b/src/git/remote.rs new file mode 100644 index 000000000..15f28034c --- /dev/null +++ b/src/git/remote.rs @@ -0,0 +1,43 @@ +use std::str::FromStr; + +use super::{Result, Url}; + +pub struct Remote<'repo> { + inner: git2::Remote<'repo>, +} + +impl<'repo> From> for Remote<'repo> { + fn from(inner: git2::Remote<'repo>) -> Self { + Self { inner } + } +} + +impl<'repo> Remote<'repo> { + pub fn name(&self) -> Option<&str> { + self.inner.name() + } + + pub fn url(&self) -> Result> { + self.inner + .url() + .map(FromStr::from_str) + .transpose() + .map_err(Into::into) + } + + pub fn push( + &mut self, + refspec: &[&str], + opts: Option<&mut git2::PushOptions<'_>>, + ) -> Result<()> { + self.inner.push(refspec, opts).map_err(Into::into) + } + + pub fn fetch( + &mut self, + refspec: &[&str], + opts: Option<&mut git2::FetchOptions<'_>>, + ) -> Result<()> { + self.inner.fetch(refspec, opts, None).map_err(Into::into) + } +} diff --git a/src/git/repository.rs b/src/git/repository.rs new file mode 100644 index 000000000..3091657d9 --- /dev/null +++ b/src/git/repository.rs @@ -0,0 +1,535 @@ +use std::{io::Write, path::Path, str}; + +use git2::Submodule; +use git2_hooks::HookResult; + +use crate::{keys, path::Normalize}; + +use super::{ + Blob, Branch, Commit, Config, Index, Oid, Reference, Refname, Remote, Result, Signature, Tree, + TreeBuilder, Url, +}; + +// wrapper around git2::Repository to get control over how it's used. +pub struct Repository(git2::Repository); + +impl<'a> From<&'a Repository> for &'a git2::Repository { + fn from(repo: &'a Repository) -> Self { + &repo.0 + } +} + +impl From for Repository { + fn from(repo: git2::Repository) -> Self { + Self(repo) + } +} + +impl Repository { + pub fn init>(path: P) -> Result { + let inner = git2::Repository::init(path)?; + Ok(Repository(inner)) + } + + pub fn init_opts>(path: P, opts: &git2::RepositoryInitOptions) -> Result { + let inner = git2::Repository::init_opts(path, opts)?; + Ok(Repository(inner)) + } + + pub fn open>(path: P) -> Result { + let inner = git2::Repository::open(path)?; + Ok(Repository(inner)) + } + + pub fn add_disk_alternate>(&self, path: P) -> Result<()> { + let alternates_path = self.0.path().join("objects/info/alternates"); + if !alternates_path.exists() { + let path = path.as_ref().normalize(); + let mut alternates_file = std::fs::File::create(&alternates_path)?; + alternates_file.write_all(path.as_path().as_os_str().as_encoded_bytes())?; + alternates_file.write_all(b"\n")?; + self.0.odb().and_then(|odb| odb.refresh())?; + } + + Ok(()) + } + + pub fn add_submodule>(&self, url: &Url, path: P) -> Result> { + self.0 + .submodule(&url.to_string(), path.as_ref(), false) + .map_err(Into::into) + } + + pub fn rebase( + &self, + branch_oid: Option, + upstream_oid: Option, + onto_oid: Option, + opts: Option<&mut git2::RebaseOptions<'_>>, + ) -> Result> { + let annotated_branch = if let Some(branch) = branch_oid { + Some(self.0.find_annotated_commit(branch.into())?) + } else { + None + }; + + let annotated_upstream = if let Some(upstream) = upstream_oid { + Some(self.0.find_annotated_commit(upstream.into())?) + } else { + None + }; + + let annotated_onto = if let Some(onto) = onto_oid { + Some(self.0.find_annotated_commit(onto.into())?) + } else { + None + }; + + self.0 + .rebase( + annotated_branch.as_ref(), + annotated_upstream.as_ref(), + annotated_onto.as_ref(), + opts, + ) + .map_err(Into::into) + } + + pub fn is_descendant_of(&self, a: Oid, b: Oid) -> Result { + self.0 + .graph_descendant_of(a.into(), b.into()) + .map_err(Into::into) + } + + pub fn merge_base(&self, one: Oid, two: Oid) -> Result { + self.0 + .merge_base(one.into(), two.into()) + .map(Oid::from) + .map_err(Into::into) + } + + pub fn merge_trees( + &self, + ancestor_tree: &Tree<'_>, + our_tree: &Tree<'_>, + their_tree: &Tree<'_>, + ) -> Result { + self.0 + .merge_trees( + ancestor_tree.into(), + our_tree.into(), + their_tree.into(), + None, + ) + .map(Index::from) + .map_err(Into::into) + } + + pub fn diff_tree_to_tree( + &self, + old_tree: Option<&Tree<'_>>, + new_tree: Option<&Tree<'_>>, + opts: Option<&mut git2::DiffOptions>, + ) -> Result> { + self.0 + .diff_tree_to_tree(old_tree.map(Into::into), new_tree.map(Into::into), opts) + .map_err(Into::into) + } + + pub fn diff_tree_to_workdir( + &self, + old_tree: Option<&Tree<'_>>, + opts: Option<&mut git2::DiffOptions>, + ) -> Result> { + if let Ok(mut index) = self.0.index() { + index.update_all(vec!["*"], None)?; + } + self.0 + .diff_tree_to_workdir_with_index(old_tree.map(Into::into), opts) + .map_err(Into::into) + } + + pub fn reset( + &self, + commit: &Commit<'_>, + kind: git2::ResetType, + checkout: Option<&mut git2::build::CheckoutBuilder<'_>>, + ) -> Result<()> { + let commit: &git2::Commit = commit.into(); + self.0 + .reset(commit.as_object(), kind, checkout) + .map_err(Into::into) + } + + pub fn find_reference(&self, name: &Refname) -> Result { + self.0 + .find_reference(&name.to_string()) + .map(Reference::from) + .map_err(Into::into) + } + + pub fn head(&self) -> Result { + self.0.head().map(Reference::from).map_err(Into::into) + } + + pub fn find_tree(&self, id: Oid) -> Result { + self.0 + .find_tree(id.into()) + .map(Tree::from) + .map_err(Into::into) + } + + pub fn find_commit(&self, id: Oid) -> Result { + self.0 + .find_commit(id.into()) + .map(Commit::from) + .map_err(Into::into) + } + + pub fn find_blob(&self, id: Oid) -> Result { + self.0 + .find_blob(id.into()) + .map(Into::into) + .map_err(Into::into) + } + + pub fn revwalk(&self) -> Result { + self.0.revwalk().map_err(Into::into) + } + + pub fn is_path_ignored>(&self, path: P) -> Result { + self.0.is_path_ignored(path).map_err(Into::into) + } + + pub fn branches( + &self, + filter: Option, + ) -> Result>> { + self.0 + .branches(filter) + .map(|branches| { + branches.map(|branch| { + branch + .map(|(branch, branch_type)| (Branch::from(branch), branch_type)) + .map_err(Into::into) + }) + }) + .map_err(Into::into) + } + + pub fn index(&self) -> Result { + self.0.index().map(Into::into).map_err(Into::into) + } + + pub fn index_size(&self) -> Result { + Ok(self.0.index()?.len()) + } + + pub fn blob_path>(&self, path: P) -> Result { + self.0 + .blob_path(path.as_ref()) + .map(Into::into) + .map_err(Into::into) + } + + pub fn cherry_pick(&self, base: &Commit, target: &Commit) -> Result { + self.0 + .cherrypick_commit(target.into(), base.into(), 0, None) + .map(Into::into) + .map_err(Into::into) + } + + pub fn blob(&self, data: &[u8]) -> Result { + self.0.blob(data).map(Into::into).map_err(Into::into) + } + + pub fn commit( + &self, + update_ref: Option<&Refname>, + author: &Signature<'_>, + committer: &Signature<'_>, + message: &str, + tree: &Tree<'_>, + parents: &[&Commit<'_>], + ) -> Result { + let parents: Vec<&git2::Commit> = parents + .iter() + .map(|c| c.to_owned().into()) + .collect::>(); + self.0 + .commit( + update_ref.map(ToString::to_string).as_deref(), + author.into(), + committer.into(), + message, + tree.into(), + &parents, + ) + .map(Into::into) + .map_err(Into::into) + } + + pub fn commit_signed( + &self, + author: &Signature<'_>, + message: &str, + tree: &Tree<'_>, + parents: &[&Commit<'_>], + key: &keys::PrivateKey, + ) -> Result { + let parents: Vec<&git2::Commit> = parents + .iter() + .map(|c| c.to_owned().into()) + .collect::>(); + let commit_buffer = self.0.commit_create_buffer( + author.into(), + // author and committer must be the same + // for signed commits + author.into(), + message, + tree.into(), + &parents, + )?; + let commit_buffer = str::from_utf8(&commit_buffer).unwrap(); + let signature = key.sign(commit_buffer.as_bytes())?; + self.0 + .commit_signed(commit_buffer, &signature, None) + .map(Into::into) + .map_err(Into::into) + } + + pub fn config(&self) -> Result { + self.0.config().map(Into::into).map_err(Into::into) + } + + pub fn treebuilder<'repo>(&'repo self, tree: Option<&'repo Tree>) -> TreeBuilder<'repo> { + TreeBuilder::new(self, tree) + } + + pub fn path(&self) -> &Path { + self.0.path() + } + + pub fn workdir(&self) -> Option<&Path> { + self.0.workdir() + } + + pub fn branch_upstream_name(&self, branch_name: &str) -> Result { + self.0 + .branch_upstream_name(branch_name) + .map(|s| s.as_str().unwrap().to_string()) + .map_err(Into::into) + } + + pub fn branch_remote_name(&self, refname: &str) -> Result { + self.0 + .branch_remote_name(refname) + .map(|s| s.as_str().unwrap().to_string()) + .map_err(Into::into) + } + + pub fn branch_upstream_remote(&self, branch_name: &str) -> Result { + self.0 + .branch_upstream_remote(branch_name) + .map(|s| s.as_str().unwrap().to_string()) + .map_err(Into::into) + } + + pub fn statuses( + &self, + options: Option<&mut git2::StatusOptions>, + ) -> Result> { + self.0.statuses(options).map_err(Into::into) + } + + pub fn remote_anonymous(&self, url: &super::Url) -> Result { + self.0 + .remote_anonymous(&url.to_string()) + .map(Into::into) + .map_err(Into::into) + } + + pub fn find_remote(&self, name: &str) -> Result { + self.0.find_remote(name).map(Into::into).map_err(Into::into) + } + + pub fn find_branch(&self, name: &Refname) -> Result { + self.0 + .find_branch( + &name.simple_name(), + match name { + Refname::Virtual(_) | Refname::Local(_) | Refname::Other(_) => { + git2::BranchType::Local + } + Refname::Remote(_) => git2::BranchType::Remote, + }, + ) + .map(Into::into) + .map_err(Into::into) + } + + pub fn refname_to_id(&self, name: &str) -> Result { + self.0 + .refname_to_id(name) + .map(Into::into) + .map_err(Into::into) + } + + pub fn checkout_head(&self, opts: Option<&mut git2::build::CheckoutBuilder>) -> Result<()> { + self.0.checkout_head(opts).map_err(Into::into) + } + + pub fn checkout_index<'a>(&'a self, index: &'a mut Index) -> CheckoutIndexBuilder { + CheckoutIndexBuilder { + index: index.into(), + repo: &self.0, + checkout_builder: git2::build::CheckoutBuilder::new(), + } + } + + pub fn checkout_index_path>(&self, path: P) -> Result<()> { + let mut builder = git2::build::CheckoutBuilder::new(); + builder.path(path.as_ref()); + builder.force(); + + let mut index = self.0.index()?; + self.0 + .checkout_index(Some(&mut index), Some(&mut builder))?; + + Ok(()) + } + + pub fn checkout_tree<'a>(&'a self, tree: &'a Tree<'a>) -> CheckoutTreeBuidler { + CheckoutTreeBuidler { + tree: tree.into(), + repo: &self.0, + checkout_builder: git2::build::CheckoutBuilder::new(), + } + } + + pub fn set_head(&self, refname: &Refname) -> Result<()> { + self.0.set_head(&refname.to_string()).map_err(Into::into) + } + + pub fn set_head_detached(&self, commitish: Oid) -> Result<()> { + self.0 + .set_head_detached(commitish.into()) + .map_err(Into::into) + } + + pub fn branch(&self, name: &Refname, target: &Commit, force: bool) -> Result { + self.0 + .branch(&name.to_string(), target.into(), force) + .map(Into::into) + .map_err(Into::into) + } + + pub fn reference( + &self, + name: &Refname, + id: Oid, + force: bool, + log_message: &str, + ) -> Result { + self.0 + .reference(&name.to_string(), id.into(), force, log_message) + .map(Into::into) + .map_err(Into::into) + } + + pub fn get_wd_tree(&self) -> Result { + let mut index = self.0.index()?; + index.add_all(["*"], git2::IndexAddOption::DEFAULT, None)?; + let oid = index.write_tree()?; + self.0.find_tree(oid).map(Into::into).map_err(Into::into) + } + + pub fn remote(&self, name: &str, url: &Url) -> Result { + self.0 + .remote(name, &url.to_string()) + .map(Into::into) + .map_err(Into::into) + } + + pub fn references(&self) -> Result>> { + self.0 + .references() + .map(|iter| iter.map(|reference| reference.map(Into::into).map_err(Into::into))) + .map_err(Into::into) + } + + pub fn references_glob(&self, glob: &str) -> Result>> { + self.0 + .references_glob(glob) + .map(|iter| iter.map(|reference| reference.map(Into::into).map_err(Into::into))) + .map_err(Into::into) + } + + pub fn run_hook_pre_commit(&self) -> Result { + let res = git2_hooks::hooks_pre_commit(&self.0, Some(&["../.husky"]))?; + Ok(res) + } + + pub fn run_hook_commit_msg(&self, msg: &mut String) -> Result { + let res = git2_hooks::hooks_commit_msg(&self.0, Some(&["../.husky"]), msg)?; + Ok(res) + } + + pub fn run_hook_post_commit(&self) -> Result<()> { + git2_hooks::hooks_post_commit(&self.0, Some(&["../.husky"]))?; + Ok(()) + } +} + +pub struct CheckoutTreeBuidler<'a> { + repo: &'a git2::Repository, + tree: &'a git2::Tree<'a>, + checkout_builder: git2::build::CheckoutBuilder<'a>, +} + +impl CheckoutTreeBuidler<'_> { + pub fn force(&mut self) -> &mut Self { + self.checkout_builder.force(); + self + } + + pub fn remove_untracked(&mut self) -> &mut Self { + self.checkout_builder.remove_untracked(true); + self + } + + pub fn checkout(&mut self) -> Result<()> { + self.repo + .checkout_tree(self.tree.as_object(), Some(&mut self.checkout_builder)) + .map_err(Into::into) + } +} + +pub struct CheckoutIndexBuilder<'a> { + repo: &'a git2::Repository, + index: &'a mut git2::Index, + checkout_builder: git2::build::CheckoutBuilder<'a>, +} + +impl CheckoutIndexBuilder<'_> { + pub fn force(&mut self) -> &mut Self { + self.checkout_builder.force(); + self + } + + pub fn allow_conflicts(&mut self) -> &mut Self { + self.checkout_builder.allow_conflicts(true); + self + } + + pub fn conflict_style_merge(&mut self) -> &mut Self { + self.checkout_builder.conflict_style_merge(true); + self + } + + pub fn checkout(&mut self) -> Result<()> { + self.repo + .checkout_index(Some(&mut self.index), Some(&mut self.checkout_builder)) + .map_err(Into::into) + } +} diff --git a/src/git/show.rs b/src/git/show.rs new file mode 100644 index 000000000..2062abde8 --- /dev/null +++ b/src/git/show.rs @@ -0,0 +1,22 @@ +use super::Repository; +use crate::git; +use std::{path, str}; + +use super::Result; + +pub fn show_file_at_tree>( + repository: &Repository, + file_path: P, + tree: &git::Tree, +) -> Result { + let file_path = file_path.as_ref(); + match tree.get_path(file_path) { + Ok(tree_entry) => { + let blob = repository.find_blob(tree_entry.id())?; + let content = str::from_utf8(blob.content())?; + Ok(content.to_string()) + } + // If a file was introduced in this commit, the content in the parent tree is the empty string + Err(_) => Ok(String::new()), + } +} diff --git a/src/git/signature.rs b/src/git/signature.rs new file mode 100644 index 000000000..46851dbfa --- /dev/null +++ b/src/git/signature.rs @@ -0,0 +1,67 @@ +use crate::users; + +pub struct Signature<'a> { + signature: git2::Signature<'a>, +} + +impl Clone for Signature<'static> { + fn clone(&self) -> Self { + Self { + signature: self.signature.clone(), + } + } +} + +impl<'a> From> for git2::Signature<'a> { + fn from(value: Signature<'a>) -> Self { + value.signature + } +} + +impl<'a> From<&'a Signature<'a>> for &'a git2::Signature<'a> { + fn from(value: &'a Signature<'a>) -> Self { + &value.signature + } +} + +impl<'a> From> for Signature<'a> { + fn from(value: git2::Signature<'a>) -> Self { + Self { signature: value } + } +} + +impl TryFrom<&users::User> for Signature<'_> { + type Error = super::Error; + + fn try_from(value: &users::User) -> Result { + if let Some(name) = &value.name { + git2::Signature::now(name, &value.email) + .map(Into::into) + .map_err(Into::into) + } else if let Some(name) = &value.given_name { + git2::Signature::now(name, &value.email) + .map(Into::into) + .map_err(Into::into) + } else { + git2::Signature::now(&value.email, &value.email) + .map(Into::into) + .map_err(Into::into) + } + } +} + +impl Signature<'_> { + pub fn now(name: &str, email: &str) -> Result { + git2::Signature::now(name, email) + .map(Into::into) + .map_err(Into::into) + } + + pub fn name(&self) -> Option<&str> { + self.signature.name() + } + + pub fn email(&self) -> Option<&str> { + self.signature.email() + } +} diff --git a/src/git/tree.rs b/src/git/tree.rs new file mode 100644 index 000000000..1d7e84486 --- /dev/null +++ b/src/git/tree.rs @@ -0,0 +1,147 @@ +use std::path::Path; + +use super::{Oid, Repository, Result}; +use crate::path::Normalize; + +pub struct Tree<'repo> { + tree: git2::Tree<'repo>, +} + +impl<'repo> From> for Tree<'repo> { + fn from(tree: git2::Tree<'repo>) -> Self { + Tree { tree } + } +} + +impl<'repo> From<&'repo Tree<'repo>> for &'repo git2::Tree<'repo> { + fn from(tree: &'repo Tree<'repo>) -> Self { + &tree.tree + } +} + +impl<'repo> Tree<'repo> { + pub fn id(&self) -> Oid { + self.tree.id().into() + } + + pub fn get_path>(&self, path: P) -> Result> { + self.tree + .get_path(path.normalize().as_path()) + .map(Into::into) + .map_err(Into::into) + } + + pub fn walk(&self, mut callback: C) -> Result<()> + where + C: FnMut(&str, &TreeEntry) -> TreeWalkResult, + { + self.tree + .walk(git2::TreeWalkMode::PreOrder, |root, entry| { + match callback(root, &entry.clone().into()) { + TreeWalkResult::Continue => git2::TreeWalkResult::Ok, + TreeWalkResult::Skip => git2::TreeWalkResult::Skip, + TreeWalkResult::Stop => git2::TreeWalkResult::Abort, + } + }) + .map_err(Into::into) + } + + pub fn get_name(&self, filename: &str) -> Option { + self.tree.get_name(filename).map(Into::into) + } +} + +pub enum TreeWalkResult { + Continue, + Skip, + Stop, +} + +pub struct TreeEntry<'repo> { + entry: git2::TreeEntry<'repo>, +} + +impl<'repo> From> for TreeEntry<'repo> { + fn from(entry: git2::TreeEntry<'repo>) -> Self { + TreeEntry { entry } + } +} + +impl<'repo> TreeEntry<'repo> { + pub fn filemode(&self) -> i32 { + self.entry.filemode() + } + + pub fn to_object(&self, repo: &'repo Repository) -> Result { + self.entry.to_object(repo.into()).map_err(Into::into) + } + + pub fn kind(&self) -> Option { + self.entry.kind() + } + + pub fn id(&self) -> Oid { + self.entry.id().into() + } + + pub fn name(&self) -> Option<&str> { + self.entry.name() + } +} + +#[derive(PartialEq)] +pub enum FileMode { + Blob, + BlobExecutable, + Link, + Tree, +} + +impl From for git2::FileMode { + fn from(filemod: FileMode) -> Self { + match filemod { + FileMode::Blob => git2::FileMode::Blob, + FileMode::BlobExecutable => git2::FileMode::BlobExecutable, + FileMode::Link => git2::FileMode::Link, + FileMode::Tree => git2::FileMode::Tree, + } + } +} + +pub struct TreeBuilder<'repo> { + repo: &'repo git2::Repository, + builder: git2::build::TreeUpdateBuilder, + base: Option<&'repo git2::Tree<'repo>>, +} + +impl<'repo> TreeBuilder<'repo> { + pub fn new(repo: &'repo Repository, base: Option<&'repo Tree>) -> Self { + TreeBuilder { + repo: repo.into(), + builder: git2::build::TreeUpdateBuilder::new(), + base: base.map(Into::into), + } + } + + pub fn upsert>(&mut self, filename: P, oid: Oid, filemode: FileMode) { + self.builder + .upsert(filename.as_ref(), oid.into(), filemode.into()); + } + + pub fn remove>(&mut self, filename: P) { + self.builder.remove(filename.as_ref()); + } + + pub fn write(&mut self) -> Result { + let repo: &git2::Repository = self.repo; + if let Some(base) = self.base { + let tree_id = self.builder.create_updated(repo, base)?; + Ok(tree_id.into()) + } else { + let empty_tree_id = repo.treebuilder(None)?.write()?; + let empty_tree = repo.find_tree(empty_tree_id)?; + let tree_id = self.builder.create_updated(repo, &empty_tree)?; + Ok(tree_id.into()) + } + } +} diff --git a/src/git/url.rs b/src/git/url.rs new file mode 100644 index 000000000..e11b7f81a --- /dev/null +++ b/src/git/url.rs @@ -0,0 +1,91 @@ +mod convert; +mod parse; +mod scheme; + +use std::str::FromStr; + +use bstr::ByteSlice; +pub use convert::ConvertError; +pub use parse::Error as ParseError; +pub use scheme::Scheme; + +#[derive(Default, Clone, Hash, PartialEq, Eq, Debug, thiserror::Error)] +pub struct Url { + /// The URL scheme. + pub scheme: Scheme, + /// The user to impersonate on the remote. + user: Option, + /// The password associated with a user. + password: Option, + /// The host to which to connect. Localhost is implied if `None`. + pub host: Option, + /// When serializing, use the alternative forms as it was parsed as such. + serialize_alternative_form: bool, + /// The port to use when connecting to a host. If `None`, standard ports depending on `scheme` will be used. + pub port: Option, + /// The path portion of the URL, usually the location of the git repository. + pub path: bstr::BString, +} + +impl Url { + pub fn is_github(&self) -> bool { + self.host + .as_ref() + .map_or(false, |host| host.contains("github.com")) + } +} + +impl std::fmt::Display for Url { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if !(self.serialize_alternative_form + && (self.scheme == Scheme::File || self.scheme == Scheme::Ssh)) + { + f.write_str(self.scheme.as_str())?; + f.write_str("://")?; + } + match (&self.user, &self.host) { + (Some(user), Some(host)) => { + f.write_str(user)?; + if let Some(password) = &self.password { + f.write_str(":")?; + f.write_str(password)?; + } + f.write_str("@")?; + f.write_str(host)?; + } + (None, Some(host)) => { + f.write_str(host)?; + } + (None, None) => {} + (Some(_user), None) => { + unreachable!("BUG: should not be possible to have a user but no host") + } + }; + if let Some(port) = &self.port { + f.write_str(&format!(":{}", port))?; + } + if self.serialize_alternative_form && self.scheme == Scheme::Ssh { + f.write_str(":")?; + } + f.write_str(self.path.to_str().unwrap())?; + Ok(()) + } +} + +impl Url { + pub fn as_ssh(&self) -> Result { + convert::to_ssh_url(self) + } + + pub fn as_https(&self) -> Result { + convert::to_https_url(self) + } +} + +impl FromStr for Url { + type Err = parse::Error; + + fn from_str(s: &str) -> Result { + parse::parse(s.as_bytes().into()) + } +} diff --git a/src/git/url/convert.rs b/src/git/url/convert.rs new file mode 100644 index 000000000..19c31ffd7 --- /dev/null +++ b/src/git/url/convert.rs @@ -0,0 +1,128 @@ +use bstr::ByteSlice; + +use super::{Scheme, Url}; + +#[derive(Debug, PartialEq, thiserror::Error)] +pub enum ConvertError { + #[error("Could not convert {from} to {to}")] + UnsupportedPair { from: Scheme, to: Scheme }, +} + +pub(crate) fn to_https_url(url: &Url) -> Result { + match url.scheme { + Scheme::Https => Ok(url.clone()), + Scheme::Http => Ok(Url { + scheme: Scheme::Https, + ..url.clone() + }), + Scheme::Ssh => Ok(Url { + scheme: Scheme::Https, + user: None, + serialize_alternative_form: true, + path: if url.path.starts_with(&[b'/']) { + url.path.clone() + } else { + format!("/{}", url.path.to_str().unwrap()).into() + }, + ..url.clone() + }), + _ => Err(ConvertError::UnsupportedPair { + from: url.scheme.clone(), + to: Scheme::Ssh, + }), + } +} + +pub(crate) fn to_ssh_url(url: &Url) -> Result { + match url.scheme { + Scheme::Ssh => Ok(url.clone()), + Scheme::Http | Scheme::Https => Ok(Url { + scheme: Scheme::Ssh, + user: Some("git".to_string()), + serialize_alternative_form: true, + path: if url.path.starts_with(&[b'/']) { + url.path.trim_start_with(|c| c == '/').into() + } else { + url.path.clone() + }, + ..url.clone() + }), + _ => Err(ConvertError::UnsupportedPair { + from: url.scheme.clone(), + to: Scheme::Ssh, + }), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn to_https_url_test() { + for (input, expected) in [ + ( + "https://github.com/gitbutlerapp/gitbutler.git", + "https://github.com/gitbutlerapp/gitbutler.git", + ), + ( + "http://github.com/gitbutlerapp/gitbutler.git", + "https://github.com/gitbutlerapp/gitbutler.git", + ), + ( + "git@github.com:gitbutlerapp/gitbutler.git", + "https://github.com/gitbutlerapp/gitbutler.git", + ), + ( + "ssh://git@github.com/gitbutlerapp/gitbutler.git", + "https://github.com/gitbutlerapp/gitbutler.git", + ), + ( + "git@bitbucket.org:gitbutler-nikita/test.git", + "https://bitbucket.org/gitbutler-nikita/test.git", + ), + ( + "https://bitbucket.org/gitbutler-nikita/test.git", + "https://bitbucket.org/gitbutler-nikita/test.git", + ), + ] { + let url = input.parse().unwrap(); + let https_url = to_https_url(&url).unwrap(); + assert_eq!(https_url.to_string(), expected, "test case {}", url); + } + } + + #[test] + fn to_ssh_url_test() { + for (input, expected) in [ + ( + "git@github.com:gitbutlerapp/gitbutler.git", + "git@github.com:gitbutlerapp/gitbutler.git", + ), + ( + "https://github.com/gitbutlerapp/gitbutler.git", + "git@github.com:gitbutlerapp/gitbutler.git", + ), + ( + "https://github.com/gitbutlerapp/gitbutler.git", + "git@github.com:gitbutlerapp/gitbutler.git", + ), + ( + "ssh://git@github.com/gitbutlerapp/gitbutler.git", + "ssh://git@github.com/gitbutlerapp/gitbutler.git", + ), + ( + "https://bitbucket.org/gitbutler-nikita/test.git", + "git@bitbucket.org:gitbutler-nikita/test.git", + ), + ( + "git@bitbucket.org:gitbutler-nikita/test.git", + "git@bitbucket.org:gitbutler-nikita/test.git", + ), + ] { + let url = input.parse().unwrap(); + let ssh_url = to_ssh_url(&url).unwrap(); + assert_eq!(ssh_url.to_string(), expected, "test case {}", url); + } + } +} diff --git a/src/git/url/parse.rs b/src/git/url/parse.rs new file mode 100644 index 000000000..66a204d9c --- /dev/null +++ b/src/git/url/parse.rs @@ -0,0 +1,147 @@ +use std::borrow::Cow; + +pub use bstr; +use bstr::{BStr, BString, ByteSlice}; + +use super::{Scheme, Url}; + +/// The Error returned by [`parse()`] +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("Could not decode URL as UTF8")] + Utf8(#[from] std::str::Utf8Error), + #[error(transparent)] + Url(#[from] url::ParseError), + #[error("URLs need to specify the path to the repository")] + MissingResourceLocation, + #[error("file URLs require an absolute or relative path to the repository")] + MissingRepositoryPath, + #[error("\"{url}\" is not a valid local path")] + NotALocalFile { url: BString }, + #[error("Relative URLs are not permitted: {url:?}")] + RelativeUrl { url: String }, +} + +fn str_to_protocol(s: &str) -> Scheme { + Scheme::from(s) +} + +fn guess_protocol(url: &[u8]) -> Option<&str> { + match url.find_byte(b':') { + Some(colon_pos) => { + if url[..colon_pos].find_byteset(b"@.").is_some() { + "ssh" + } else { + url.get(colon_pos + 1..).and_then(|from_colon| { + (from_colon.contains(&b'/') || from_colon.contains(&b'\\')).then_some("file") + })? + } + } + None => "file", + } + .into() +} + +/// Extract the path part from an SCP-like URL `[user@]host.xz:path/to/repo.git/` +fn extract_scp_path(url: &str) -> Option<&str> { + url.splitn(2, ':').last() +} + +fn sanitize_for_protocol<'a>(protocol: &str, url: &'a str) -> Cow<'a, str> { + match protocol { + "ssh" => url.replacen(':', "/", 1).into(), + _ => url.into(), + } +} + +fn has_no_explicit_protocol(url: &[u8]) -> bool { + url.find(b"://").is_none() +} + +fn to_owned_url(url: &url::Url) -> Url { + let password = url.password(); + Url { + serialize_alternative_form: false, + scheme: str_to_protocol(url.scheme()), + password: password.map(ToOwned::to_owned), + user: if url.username().is_empty() && password.is_none() { + None + } else { + Some(url.username().into()) + }, + host: url.host_str().map(Into::into), + port: url.port(), + path: url.path().into(), + } +} + +/// Parse the given `bytes` as git url. +/// +/// # Note +/// +/// We cannot and should never have to deal with UTF-16 encoded windows strings, so bytes input is acceptable. +/// For file-paths, we don't expect UTF8 encoding either. +pub fn parse(input: &BStr) -> Result { + let guessed_protocol = + guess_protocol(input).ok_or_else(|| Error::NotALocalFile { url: input.into() })?; + let path_without_file_protocol = input.strip_prefix(b"file://"); + if path_without_file_protocol.is_some() + || (has_no_explicit_protocol(input) && guessed_protocol == "file") + { + let path = + path_without_file_protocol.map_or_else(|| input.into(), |stripped_path| stripped_path); + if path.is_empty() { + return Err(Error::MissingRepositoryPath); + } + let input_starts_with_file_protocol = input.starts_with(b"file://"); + if input_starts_with_file_protocol { + let wanted = &[b'/']; + if !wanted.iter().any(|w| path.contains(w)) { + return Err(Error::MissingRepositoryPath); + } + } + return Ok(Url { + scheme: Scheme::File, + path: path.into(), + serialize_alternative_form: !input_starts_with_file_protocol, + ..Default::default() + }); + } + + let url_str = std::str::from_utf8(input)?; + let (mut url, mut scp_path) = match url::Url::parse(url_str) { + Ok(url) => (url, None), + Err(url::ParseError::RelativeUrlWithoutBase) => { + // happens with bare paths as well as scp like paths. The latter contain a ':' past the host portion, + // which we are trying to detect. + ( + url::Url::parse(&format!( + "{}://{}", + guessed_protocol, + sanitize_for_protocol(guessed_protocol, url_str) + ))?, + extract_scp_path(url_str), + ) + } + Err(err) => return Err(err.into()), + }; + // SCP like URLs without user parse as 'something' with the scheme being the 'host'. Hosts always have dots. + if url.scheme().find('.').is_some() { + // try again with prefixed protocol + url = url::Url::parse(&format!("ssh://{}", sanitize_for_protocol("ssh", url_str)))?; + scp_path = extract_scp_path(url_str); + } + if url.path().is_empty() && ["ssh", "git"].contains(&url.scheme()) { + return Err(Error::MissingResourceLocation); + } + if url.cannot_be_a_base() { + return Err(Error::RelativeUrl { url: url.into() }); + } + + let mut url = to_owned_url(&url); + if let Some(path) = scp_path { + url.path = path.into(); + url.serialize_alternative_form = true; + } + Ok(url) +} diff --git a/src/git/url/scheme.rs b/src/git/url/scheme.rs new file mode 100644 index 000000000..31239b5e8 --- /dev/null +++ b/src/git/url/scheme.rs @@ -0,0 +1,54 @@ +/// A scheme or protocol for use in a [`Url`][super::Url]. +/// +/// It defines how to talk to a given repository. +#[derive(Default, PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] +pub enum Scheme { + /// A local resource that is accessible on the current host. + File, + /// A git daemon, like `File` over TCP/IP. + Git, + /// Launch `git-upload-pack` through an `ssh` tunnel. + #[default] + Ssh, + /// Use the HTTP protocol to talk to git servers. + Http, + /// Use the HTTPS protocol to talk to git servers. + Https, + /// Any other protocol or transport that isn't known at compile time. + /// + /// It's used to support plug-in transports. + Ext(String), +} + +impl<'a> From<&'a str> for Scheme { + fn from(value: &'a str) -> Self { + match value { + "ssh" => Scheme::Ssh, + "file" => Scheme::File, + "git" => Scheme::Git, + "http" => Scheme::Http, + "https" => Scheme::Https, + unknown => Scheme::Ext(unknown.into()), + } + } +} + +impl Scheme { + /// Return ourselves parseable name. + pub fn as_str(&self) -> &str { + match self { + Self::File => "file", + Self::Git => "git", + Self::Ssh => "ssh", + Self::Http => "http", + Self::Https => "https", + Self::Ext(name) => name.as_str(), + } + } +} + +impl std::fmt::Display for Scheme { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.as_str()) + } +} diff --git a/src/id.rs b/src/id.rs new file mode 100644 index 000000000..9e33f515a --- /dev/null +++ b/src/id.rs @@ -0,0 +1,118 @@ +//! A generic UUID-based wrapper, via a newtype pattern +//! with a few key integrations used throughout the library. + +use std::{fmt, hash::Hash, marker::PhantomData, str}; + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use uuid::Uuid; + +/// A generic UUID-based newtype. +/// +/// `Default` is implemented to generate a new UUID +/// via [`Uuid::new_v4`]. +pub struct Id(Uuid, PhantomData); + +impl Hash for Id { + fn hash(&self, state: &mut H) { + self.0.hash(state); + } +} + +impl PartialOrd for Id { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Id { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.0.cmp(&other.0) + } +} + +impl Id { + #[must_use] + pub fn generate() -> Self { + Id(Uuid::new_v4(), PhantomData) + } +} + +impl Default for Id { + fn default() -> Self { + Self::generate() + } +} + +impl rusqlite::types::FromSql for Id { + fn column_result(value: rusqlite::types::ValueRef<'_>) -> rusqlite::types::FromSqlResult { + Uuid::parse_str(value.as_str()?) + .map(Into::into) + .map_err(|error| rusqlite::types::FromSqlError::Other(Box::new(error))) + } +} + +impl rusqlite::ToSql for Id { + fn to_sql(&self) -> rusqlite::Result> { + Ok(rusqlite::types::ToSqlOutput::from(self.0.to_string())) + } +} + +impl PartialEq for Id { + fn eq(&self, other: &Self) -> bool { + self.0.eq(&other.0) + } +} + +impl Eq for Id {} + +impl From for Id { + fn from(value: Uuid) -> Self { + Self(value, PhantomData) + } +} + +impl<'de, T> Deserialize<'de> for Id { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Uuid::deserialize(deserializer).map(Into::into) + } +} + +impl Serialize for Id { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + self.0.serialize(serializer) + } +} + +impl Clone for Id { + fn clone(&self) -> Self { + *self + } +} + +impl fmt::Display for Id { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl fmt::Debug for Id { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl Copy for Id {} + +impl str::FromStr for Id { + type Err = uuid::Error; + + fn from_str(s: &str) -> Result { + Uuid::parse_str(s).map(Into::into) + } +} diff --git a/src/keys.rs b/src/keys.rs new file mode 100644 index 000000000..69b03a6b2 --- /dev/null +++ b/src/keys.rs @@ -0,0 +1,6 @@ +mod controller; +mod key; +pub mod storage; + +pub use controller::*; +pub use key::{PrivateKey, PublicKey, SignError}; diff --git a/src/keys/controller.rs b/src/keys/controller.rs new file mode 100644 index 000000000..de9096521 --- /dev/null +++ b/src/keys/controller.rs @@ -0,0 +1,34 @@ +use anyhow::Context; + +use super::{storage::Storage, PrivateKey}; + +#[derive(Clone)] +pub struct Controller { + storage: Storage, +} + +impl Controller { + pub fn new(storage: Storage) -> Self { + Self { storage } + } + + pub fn from_path>(path: P) -> Self { + Self::new(Storage::from_path(path)) + } + + pub fn get_or_create(&self) -> Result { + if let Some(key) = self.storage.get().context("failed to get key")? { + Ok(key) + } else { + let key = PrivateKey::generate(); + self.storage.create(&key).context("failed to save key")?; + Ok(key) + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum GetOrCreateError { + #[error(transparent)] + Other(#[from] anyhow::Error), +} diff --git a/src/keys/key.rs b/src/keys/key.rs new file mode 100644 index 000000000..dacde6a7e --- /dev/null +++ b/src/keys/key.rs @@ -0,0 +1,127 @@ +use std::{fmt, str::FromStr}; + +use ssh_key::{HashAlg, LineEnding, SshSig}; + +use rand::rngs::OsRng; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Eq)] +pub struct PrivateKey(ssh_key::PrivateKey); + +#[derive(Debug, thiserror::Error)] +pub enum SignError { + #[error(transparent)] + Ssh(#[from] ssh_key::Error), +} + +impl PrivateKey { + pub fn generate() -> Self { + Self::default() + } + + pub fn public_key(&self) -> PublicKey { + PublicKey::from(self) + } + + pub fn sign(&self, bytes: &[u8]) -> Result { + let sig = SshSig::sign(&self.0, "git", HashAlg::Sha512, bytes)?; + sig.to_pem(LineEnding::default()).map_err(Into::into) + } +} + +impl Default for PrivateKey { + fn default() -> Self { + let ed25519_keypair = ssh_key::private::Ed25519Keypair::random(&mut OsRng); + let ed25519_key = ssh_key::PrivateKey::from(ed25519_keypair); + Self(ed25519_key) + } +} + +impl PartialEq for PrivateKey { + fn eq(&self, other: &Self) -> bool { + self.0.to_bytes().eq(&other.0.to_bytes()) + } +} + +impl Serialize for PrivateKey { + fn serialize(&self, serializer: S) -> Result { + self.to_string().serialize(serializer) + } +} + +impl FromStr for PrivateKey { + type Err = ssh_key::Error; + + fn from_str(s: &str) -> Result { + let key = ssh_key::PrivateKey::from_openssh(s.as_bytes())?; + Ok(Self(key)) + } +} + +impl fmt::Display for PrivateKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0 + .to_openssh(ssh_key::LineEnding::default()) + .map_err(|_| fmt::Error)? + .fmt(f) + } +} + +impl<'de> Deserialize<'de> for PrivateKey { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + Self::from_str(&s).map_err(serde::de::Error::custom) + } +} + +#[derive(Debug)] +pub struct PublicKey(ssh_key::PublicKey); + +impl From<&PrivateKey> for PublicKey { + fn from(value: &PrivateKey) -> Self { + Self(value.0.public_key().clone()) + } +} + +impl PartialEq for PublicKey { + fn eq(&self, other: &Self) -> bool { + self.0.to_bytes().eq(&other.0.to_bytes()) + } +} + +impl fmt::Display for PublicKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.to_openssh().map_err(|_| fmt::Error)?.fmt(f) + } +} + +impl FromStr for PublicKey { + type Err = ssh_key::Error; + + fn from_str(s: &str) -> Result { + let key = ssh_key::PublicKey::from_openssh(s)?; + Ok(Self(key)) + } +} + +impl Serialize for PublicKey { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.to_string().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for PublicKey { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + Self::from_str(s.as_str()).map_err(serde::de::Error::custom) + } +} diff --git a/src/keys/storage.rs b/src/keys/storage.rs new file mode 100644 index 000000000..e6dac6506 --- /dev/null +++ b/src/keys/storage.rs @@ -0,0 +1,43 @@ +use crate::storage; + +use super::PrivateKey; + +#[derive(Clone)] +pub struct Storage { + storage: storage::Storage, +} + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("IO error: {0}")] + Storage(#[from] storage::Error), + #[error("SSH key error: {0}")] + SSHKey(#[from] ssh_key::Error), +} + +impl Storage { + pub fn new(storage: storage::Storage) -> Storage { + Storage { storage } + } + + pub fn from_path>(path: P) -> Storage { + Storage::new(storage::Storage::new(path)) + } + + pub fn get(&self) -> Result, Error> { + self.storage + .read("keys/ed25519") + .map_err(Error::Storage) + .and_then(|s| s.map(|s| s.parse().map_err(Error::SSHKey)).transpose()) + } + + pub fn create(&self, key: &PrivateKey) -> Result<(), Error> { + self.storage + .write("keys/ed25519", &key.to_string()) + .map_err(Error::Storage)?; + self.storage + .write("keys/ed25519.pub", &key.public_key().to_string()) + .map_err(Error::Storage)?; + Ok(()) + } +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 000000000..675c84f92 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,41 @@ +#![feature(error_generic_member_access)] +#![cfg_attr(windows, feature(windows_by_handle))] +#![cfg_attr( + all(windows, not(test), not(debug_assertions)), + windows_subsystem = "windows" +)] +// FIXME(qix-): Stuff we want to fix but don't have a lot of time for. +// FIXME(qix-): PRs welcome! +#![allow( + clippy::used_underscore_binding, + clippy::module_name_repetitions, + clippy::struct_field_names, + clippy::too_many_lines +)] + +pub mod askpass; +pub mod assets; +pub mod database; +pub mod dedup; +pub mod deltas; +pub mod error; +pub mod fs; +pub mod gb_repository; +pub mod git; +pub mod id; +pub mod keys; +pub mod lock; +pub mod path; +pub mod project_repository; +pub mod projects; +pub mod reader; +pub mod sessions; +pub mod ssh; +pub mod storage; +pub mod types; +pub mod users; +pub mod virtual_branches; +#[cfg(target_os = "windows")] +pub mod windows; +pub mod writer; +pub mod zip; diff --git a/src/lock.rs b/src/lock.rs new file mode 100644 index 000000000..2783c77a3 --- /dev/null +++ b/src/lock.rs @@ -0,0 +1,51 @@ +use std::sync::{Arc, Mutex}; + +#[derive(Debug, Clone)] +pub struct Dir { + inner: Arc, +} + +impl Dir { + pub fn new>(path: P) -> Result { + Inner::new(path).map(Arc::new).map(|inner| Self { inner }) + } + + pub fn batch( + &self, + action: impl FnOnce(&std::path::Path) -> R, + ) -> Result { + self.inner.batch(action) + } +} + +#[derive(Debug)] +struct Inner { + path: std::path::PathBuf, + flock: Mutex, +} + +impl Inner { + fn new>(path: P) -> Result { + let path = path.as_ref().to_path_buf(); + if !path.exists() { + std::fs::create_dir_all(&path)?; + } else if !path.is_dir() { + return Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + format!("{} is not a directory", path.display()), + )); + } + let flock = fslock::LockFile::open(&path.with_extension("lock")).map(Mutex::new)?; + Ok(Self { path, flock }) + } + + fn batch(&self, action: impl FnOnce(&std::path::Path) -> R) -> Result { + let mut flock = self.flock.lock().unwrap(); + + flock.lock()?; + let result = action(&self.path); + flock.unlock()?; + + Ok(result) + } +} diff --git a/src/path.rs b/src/path.rs new file mode 100644 index 000000000..8f0145387 --- /dev/null +++ b/src/path.rs @@ -0,0 +1,48 @@ +use std::path::{Component, Path, PathBuf}; + +/// Normalize a path to remove any `.` and `..` components +/// and standardize the path separator to the system's default. +/// +/// This trait is automatically implemented for anything convertible +/// to a `&Path` (via `AsRef`). +pub trait Normalize { + /// Normalize a path to remove any `.` and `..` components + /// and standardize the path separator to the system's default. + fn normalize(&self) -> PathBuf; +} + +impl> Normalize for P { + fn normalize(&self) -> PathBuf { + // Note: Copied from Cargo's codebase: + // https://github.com/rust-lang/cargo/blob/2e4cfc2b7d43328b207879228a2ca7d427d188bb/src/cargo/util/paths.rs#L65-L90 + // License: MIT OR Apache-2.0 (this function only) + // + // Small modifications made by GitButler. + + let path = self.as_ref(); + let mut components = path.components().peekable(); + let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().copied() { + components.next(); + PathBuf::from(c.as_os_str()) + } else { + PathBuf::new() + }; + + for component in components { + match component { + Component::Prefix(..) => unreachable!(), + Component::RootDir => { + ret.push(component.as_os_str()); + } + Component::CurDir => {} + Component::ParentDir => { + ret.pop(); + } + Component::Normal(c) => { + ret.push(c); + } + } + } + ret + } +} diff --git a/src/project_repository.rs b/src/project_repository.rs new file mode 100644 index 000000000..79ba8b1d0 --- /dev/null +++ b/src/project_repository.rs @@ -0,0 +1,8 @@ +mod config; +pub mod conflicts; +mod repository; + +pub use config::Config; +pub use repository::{LogUntil, OpenError, RemoteError, Repository}; + +pub mod signatures; diff --git a/src/project_repository/config.rs b/src/project_repository/config.rs new file mode 100644 index 000000000..51943b5cb --- /dev/null +++ b/src/project_repository/config.rs @@ -0,0 +1,51 @@ +use crate::git; + +pub struct Config<'a> { + git_repository: &'a git::Repository, +} + +impl<'a> From<&'a git::Repository> for Config<'a> { + fn from(value: &'a git::Repository) -> Self { + Self { + git_repository: value, + } + } +} + +impl Config<'_> { + pub fn sign_commits(&self) -> Result { + let sign_commits = self + .git_repository + .config()? + .get_bool("gitbutler.signCommits") + .unwrap_or(Some(false)) + .unwrap_or(false); + Ok(sign_commits) + } + + pub fn user_real_comitter(&self) -> Result { + let gb_comitter = self + .git_repository + .config()? + .get_string("gitbutler.gitbutlerCommitter") + .unwrap_or(Some("0".to_string())) + .unwrap_or("0".to_string()); + Ok(gb_comitter == "0") + } + + pub fn user_name(&self) -> Result, git::Error> { + self.git_repository.config()?.get_string("user.name") + } + + pub fn user_email(&self) -> Result, git::Error> { + self.git_repository.config()?.get_string("user.email") + } + + pub fn set_local(&self, key: &str, val: &str) -> Result<(), git::Error> { + self.git_repository.config()?.set_local(key, val) + } + + pub fn get_local(&self, key: &str) -> Result, git::Error> { + self.git_repository.config()?.get_local(key) + } +} diff --git a/src/project_repository/conflicts.rs b/src/project_repository/conflicts.rs new file mode 100644 index 000000000..88fe96b9e --- /dev/null +++ b/src/project_repository/conflicts.rs @@ -0,0 +1,144 @@ +// stuff to manage merge conflict state +// this is the dumbest possible way to do this, but it is a placeholder +// conflicts are stored one path per line in .git/conflicts +// merge parent is stored in .git/base_merge_parent +// conflicts are removed as they are resolved, the conflicts file is removed when there are no more conflicts +// the merge parent file is removed when the merge is complete + +use std::{ + io::{BufRead, Write}, + path::{Path, PathBuf}, +}; + +use anyhow::Result; +use itertools::Itertools; + +use crate::git; + +use super::Repository; + +pub fn mark, A: AsRef<[P]>>( + repository: &Repository, + paths: A, + parent: Option, +) -> Result<()> { + let paths = paths.as_ref(); + if paths.is_empty() { + return Ok(()); + } + let conflicts_path = repository.git_repository.path().join("conflicts"); + // write all the file paths to a file on disk + let mut file = std::fs::File::create(conflicts_path)?; + for path in paths { + file.write_all(path.as_ref().as_os_str().as_encoded_bytes())?; + file.write_all(b"\n")?; + } + + if let Some(parent) = parent { + let merge_path = repository.git_repository.path().join("base_merge_parent"); + // write all the file paths to a file on disk + let mut file = std::fs::File::create(merge_path)?; + file.write_all(parent.to_string().as_bytes())?; + } + + Ok(()) +} + +pub fn merge_parent(repository: &Repository) -> Result> { + let merge_path = repository.git_repository.path().join("base_merge_parent"); + if !merge_path.exists() { + return Ok(None); + } + + let file = std::fs::File::open(merge_path)?; + let reader = std::io::BufReader::new(file); + let mut lines = reader.lines(); + if let Some(parent) = lines.next() { + let parent = parent?; + let parent: git::Oid = parent.parse()?; + Ok(Some(parent)) + } else { + Ok(None) + } +} + +pub fn resolve>(repository: &Repository, path: P) -> Result<()> { + let path = path.as_ref(); + let conflicts_path = repository.git_repository.path().join("conflicts"); + let file = std::fs::File::open(conflicts_path.clone())?; + let reader = std::io::BufReader::new(file); + let mut remaining = Vec::new(); + for line in reader.lines().map_ok(PathBuf::from) { + let line = line?; + if line != path { + remaining.push(line); + } + } + + // remove file + std::fs::remove_file(conflicts_path)?; + + // re-write file if needed + if !remaining.is_empty() { + mark(repository, &remaining, None)?; + } + Ok(()) +} + +pub fn conflicting_files(repository: &Repository) -> Result> { + let conflicts_path = repository.git_repository.path().join("conflicts"); + if !conflicts_path.exists() { + return Ok(vec![]); + } + + let file = std::fs::File::open(conflicts_path)?; + let reader = std::io::BufReader::new(file); + Ok(reader.lines().map_while(Result::ok).collect()) +} + +pub fn is_conflicting>(repository: &Repository, path: Option

) -> Result { + let conflicts_path = repository.git_repository.path().join("conflicts"); + if !conflicts_path.exists() { + return Ok(false); + } + + let file = std::fs::File::open(conflicts_path)?; + let reader = std::io::BufReader::new(file); + let mut files = reader.lines().map_ok(PathBuf::from); + if let Some(pathname) = path { + let pathname = pathname.as_ref(); + + // check if pathname is one of the lines in conflicts_path file + for line in files { + let line = line?; + + if line == pathname { + return Ok(true); + } + } + Ok(false) + } else { + Ok(files.next().transpose().map(|x| x.is_some())?) + } +} + +// is this project still in a resolving conflict state? +// - could be that there are no more conflicts, but the state is not committed +pub fn is_resolving(repository: &Repository) -> bool { + repository + .git_repository + .path() + .join("base_merge_parent") + .exists() +} + +pub fn clear(repository: &Repository) -> Result<()> { + let merge_path = repository.git_repository.path().join("base_merge_parent"); + std::fs::remove_file(merge_path)?; + + for file in conflicting_files(repository)? { + resolve(repository, &file)?; + } + + Ok(()) +} diff --git a/src/project_repository/repository.rs b/src/project_repository/repository.rs new file mode 100644 index 000000000..064572483 --- /dev/null +++ b/src/project_repository/repository.rs @@ -0,0 +1,698 @@ +use std::{ + path, + str::FromStr, + sync::{atomic::AtomicUsize, Arc}, +}; + +use anyhow::{Context, Result}; + +use crate::{ + askpass, + askpass::AskpassBroker, + git::{self, credentials::HelpError, Url}, + keys, + projects::{self, AuthKey}, + ssh, users, + virtual_branches::{Branch, BranchId}, +}; + +use super::conflicts; + +pub struct Repository { + pub git_repository: git::Repository, + project: projects::Project, +} + +#[derive(Debug, thiserror::Error)] +pub enum OpenError { + #[error("repository not found at {0}")] + NotFound(path::PathBuf), + #[error(transparent)] + Other(anyhow::Error), +} + +impl From for crate::error::Error { + fn from(value: OpenError) -> Self { + match value { + OpenError::NotFound(path) => crate::error::Error::UserError { + code: crate::error::Code::Projects, + message: format!("{} not found", path.display()), + }, + OpenError::Other(error) => { + tracing::error!(?error); + crate::error::Error::Unknown + } + } + } +} + +impl Repository { + pub fn open(project: &projects::Project) -> Result { + git::Repository::open(&project.path) + .map_err(|error| match error { + git::Error::NotFound(_) => OpenError::NotFound(project.path.clone()), + other => OpenError::Other(other.into()), + }) + .map(|git_repository| { + // XXX(qix-): This is a temporary measure to disable GC on the project repository. + // XXX(qix-): We do this because the internal repository we use to store the "virtual" + // XXX(qix-): refs and information use Git's alternative-objects mechanism to refer + // XXX(qix-): to the project repository's objects. However, the project repository + // XXX(qix-): has no knowledge of these refs, and will GC them away (usually after + // XXX(qix-): about 2 weeks) which will corrupt the internal repository. + // XXX(qix-): + // XXX(qix-): We will ultimately move away from an internal repository for a variety + // XXX(qix-): of reasons, but for now, this is a simple, short-term solution that we + // XXX(qix-): can clean up later on. We're aware this isn't ideal. + if let Ok(config) = git_repository.config().as_mut(){ + let should_set = match config.get_bool("gitbutler.didSetPrune") { + Ok(None | Some(false)) => true, + Ok(Some(true)) => false, + Err(error) => { + tracing::warn!( + "failed to get gitbutler.didSetPrune for repository at {}; cannot disable gc: {}", + project.path.display(), + error + ); + false + } + }; + + if should_set { + if let Err(error) = config.set_str("gc.pruneExpire", "never").and_then(|()| config.set_bool("gitbutler.didSetPrune", true)) { + tracing::warn!( + "failed to set gc.auto to false for repository at {}; cannot disable gc: {}", + project.path.display(), + error + ); + } + } + } else { + tracing::warn!( + "failed to get config for repository at {}; cannot disable gc", + project.path.display() + ); + } + + git_repository + }) + .map(|git_repository| Self { + git_repository, + project: project.clone(), + }) + } + + pub fn is_resolving(&self) -> bool { + conflicts::is_resolving(self) + } + + pub fn path(&self) -> &path::Path { + path::Path::new(&self.project.path) + } + + pub fn config(&self) -> super::Config { + super::Config::from(&self.git_repository) + } + + pub fn git_signatures<'a>( + &self, + user: Option<&users::User>, + ) -> Result<(git::Signature<'a>, git::Signature<'a>)> { + super::signatures::signatures(self, user).context("failed to get signatures") + } + + pub fn project(&self) -> &projects::Project { + &self.project + } + + pub fn set_project(&mut self, project: &projects::Project) { + self.project = project.clone(); + } + + pub fn git_index_size(&self) -> Result { + let head = self.git_repository.index_size()?; + Ok(head) + } + + pub fn get_head(&self) -> Result { + let head = self.git_repository.head()?; + Ok(head) + } + + pub fn get_wd_tree(&self) -> Result { + let tree = self.git_repository.get_wd_tree()?; + Ok(tree) + } + + pub fn is_path_ignored>(&self, path: P) -> Result { + let path = path.as_ref(); + let ignored = self.git_repository.is_path_ignored(path)?; + Ok(ignored) + } + + pub fn root(&self) -> &std::path::Path { + self.git_repository.path().parent().unwrap() + } + + pub fn git_remote_branches(&self) -> Result> { + self.git_repository + .branches(Some(git2::BranchType::Remote))? + .flatten() + .map(|(branch, _)| branch) + .map(|branch| { + git::RemoteRefname::try_from(&branch) + .context("failed to convert branch to remote name") + }) + .collect::>>() + } + + pub fn git_test_push( + &self, + credentials: &git::credentials::Helper, + remote_name: &str, + branch_name: &str, + askpass: Option<(AskpassBroker, Option)>, + ) -> Result<()> { + let target_branch_refname = + git::Refname::from_str(&format!("refs/remotes/{}/{}", remote_name, branch_name))?; + let branch = self.git_repository.find_branch(&target_branch_refname)?; + let commit_id = branch.peel_to_commit()?.id(); + + let now = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or(std::time::Duration::from_secs(0)) + .as_millis() + .to_string(); + let branch_name = format!("test-push-{}", now); + + let refname = git::RemoteRefname::from_str(&format!( + "refs/remotes/{}/{}", + remote_name, branch_name, + ))?; + + match self.push( + &commit_id, + &refname, + false, + credentials, + None, + askpass.clone(), + ) { + Ok(()) => Ok(()), + Err(e) => Err(anyhow::anyhow!(e.to_string())), + }?; + + let empty_refspec = Some(format!(":refs/heads/{}", branch_name)); + match self.push( + &commit_id, + &refname, + false, + credentials, + empty_refspec, + askpass, + ) { + Ok(()) => Ok(()), + Err(e) => Err(anyhow::anyhow!(e.to_string())), + }?; + + Ok(()) + } + + pub fn add_branch_reference(&self, branch: &Branch) -> Result<()> { + let (should_write, with_force) = + match self.git_repository.find_reference(&branch.refname().into()) { + Ok(reference) => match reference.target() { + Some(head_oid) => Ok((head_oid != branch.head, true)), + None => Ok((true, true)), + }, + Err(git::Error::NotFound(_)) => Ok((true, false)), + Err(error) => Err(error), + } + .context("failed to lookup reference")?; + + if should_write { + self.git_repository + .reference( + &branch.refname().into(), + branch.head, + with_force, + "new vbranch", + ) + .context("failed to create branch reference")?; + } + + Ok(()) + } + + pub fn delete_branch_reference(&self, branch: &Branch) -> Result<()> { + match self.git_repository.find_reference(&branch.refname().into()) { + Ok(mut reference) => { + reference + .delete() + .context("failed to delete branch reference")?; + Ok(()) + } + Err(git::Error::NotFound(_)) => Ok(()), + Err(error) => Err(error), + } + .context("failed to lookup reference") + } + + // returns a list of commit oids from the first oid to the second oid + pub fn l(&self, from: git::Oid, to: LogUntil) -> Result> { + match to { + LogUntil::Commit(oid) => { + let mut revwalk = self + .git_repository + .revwalk() + .context("failed to create revwalk")?; + revwalk + .push(from.into()) + .context(format!("failed to push {}", from))?; + revwalk + .hide(oid.into()) + .context(format!("failed to hide {}", oid))?; + revwalk + .map(|oid| oid.map(Into::into)) + .collect::, _>>() + } + LogUntil::Take(n) => { + let mut revwalk = self + .git_repository + .revwalk() + .context("failed to create revwalk")?; + revwalk + .push(from.into()) + .context(format!("failed to push {}", from))?; + revwalk + .take(n) + .map(|oid| oid.map(Into::into)) + .collect::, _>>() + } + LogUntil::When(cond) => { + let mut revwalk = self + .git_repository + .revwalk() + .context("failed to create revwalk")?; + revwalk + .push(from.into()) + .context(format!("failed to push {}", from))?; + let mut oids: Vec = vec![]; + for oid in revwalk { + let oid = oid.context("failed to get oid")?; + oids.push(oid.into()); + + let commit = self + .git_repository + .find_commit(oid.into()) + .context("failed to find commit")?; + + if cond(&commit).context("failed to check condition")? { + break; + } + } + Ok(oids) + } + LogUntil::End => { + let mut revwalk = self + .git_repository + .revwalk() + .context("failed to create revwalk")?; + revwalk + .push(from.into()) + .context(format!("failed to push {}", from))?; + revwalk + .map(|oid| oid.map(Into::into)) + .collect::, _>>() + } + } + .context("failed to collect oids") + } + + // returns a list of commits from the first oid to the second oid + pub fn log(&self, from: git::Oid, to: LogUntil) -> Result> { + self.l(from, to)? + .into_iter() + .map(|oid| self.git_repository.find_commit(oid)) + .collect::, _>>() + .context("failed to collect commits") + } + + // returns the number of commits between the first oid to the second oid + pub fn distance(&self, from: git::Oid, to: git::Oid) -> Result { + let oids = self.l(from, LogUntil::Commit(to))?; + Ok(oids.len().try_into()?) + } + + pub fn commit( + &self, + user: Option<&users::User>, + message: &str, + tree: &git::Tree, + parents: &[&git::Commit], + signing_key: Option<&keys::PrivateKey>, + ) -> Result { + let (author, committer) = self.git_signatures(user)?; + if let Some(key) = signing_key { + self.git_repository + .commit_signed(&author, message, tree, parents, key) + .context("failed to commit signed") + } else { + self.git_repository + .commit(None, &author, &committer, message, tree, parents) + .context("failed to commit") + } + } + + pub fn push_to_gitbutler_server( + &self, + user: Option<&users::User>, + ref_specs: &[&str], + ) -> Result { + let url = self + .project + .api + .as_ref() + .ok_or(RemoteError::Other(anyhow::anyhow!("api not set")))? + .code_git_url + .as_ref() + .ok_or(RemoteError::Other(anyhow::anyhow!("code_git_url not set")))? + .as_str() + .parse::() + .map_err(|e| RemoteError::Other(e.into()))?; + + tracing::debug!( + project_id = %self.project.id, + %url, + "pushing code to gb repo", + ); + + let access_token = user + .map(|user| user.access_token.clone()) + .ok_or(RemoteError::Auth)?; + + let mut callbacks = git2::RemoteCallbacks::new(); + if self.project.omit_certificate_check.unwrap_or(false) { + callbacks.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk)); + } + let bytes_pushed = Arc::new(AtomicUsize::new(0)); + let total_objects = Arc::new(AtomicUsize::new(0)); + { + let byte_counter = Arc::::clone(&bytes_pushed); + let total_counter = Arc::::clone(&total_objects); + callbacks.push_transfer_progress(move |_current, total, bytes| { + byte_counter.store(bytes, std::sync::atomic::Ordering::Relaxed); + total_counter.store(total, std::sync::atomic::Ordering::Relaxed); + }); + } + + let mut push_options = git2::PushOptions::new(); + push_options.remote_callbacks(callbacks); + let auth_header = format!("Authorization: {}", access_token); + let headers = &[auth_header.as_str()]; + push_options.custom_headers(headers); + + let mut remote = self + .git_repository + .remote_anonymous(&url) + .map_err(|e| RemoteError::Other(e.into()))?; + + remote + .push(ref_specs, Some(&mut push_options)) + .map_err(|error| match error { + git::Error::Network(error) => { + tracing::warn!(project_id = %self.project.id, ?error, "git push failed",); + RemoteError::Network + } + git::Error::Auth(error) => { + tracing::warn!(project_id = %self.project.id, ?error, "git push failed",); + RemoteError::Auth + } + error => RemoteError::Other(error.into()), + })?; + + let bytes_pushed = bytes_pushed.load(std::sync::atomic::Ordering::Relaxed); + let total_objects_pushed = total_objects.load(std::sync::atomic::Ordering::Relaxed); + + tracing::debug!( + project_id = %self.project.id, + ref_spec = ref_specs.join(" "), + bytes = bytes_pushed, + objects = total_objects_pushed, + "pushed to gb repo tmp ref", + ); + + Ok(total_objects_pushed > 0) + } + + pub fn push( + &self, + head: &git::Oid, + branch: &git::RemoteRefname, + with_force: bool, + credentials: &git::credentials::Helper, + refspec: Option, + askpass_broker: Option<(AskpassBroker, Option)>, + ) -> Result<(), RemoteError> { + let refspec = refspec.unwrap_or_else(|| { + if with_force { + format!("+{}:refs/heads/{}", head, branch.branch()) + } else { + format!("{}:refs/heads/{}", head, branch.branch()) + } + }); + + // NOTE(qix-): This is a nasty hack, however the codebase isn't structured + // NOTE(qix-): in a way that allows us to really incorporate new backends + // NOTE(qix-): without a lot of work. This is a temporary measure to + // NOTE(qix-): work around a time-sensitive change that was necessary + // NOTE(qix-): without having to refactor a large portion of the codebase. + if self.project.preferred_key == AuthKey::SystemExecutable { + let path = self.path().to_path_buf(); + let remote = branch.remote().to_string(); + return std::thread::spawn(move || { + tokio::runtime::Runtime::new() + .unwrap() + .block_on(gitbutler_git::push( + path, + gitbutler_git::tokio::TokioExecutor, + &remote, + gitbutler_git::RefSpec::parse(refspec).unwrap(), + with_force, + handle_git_prompt_push, + askpass_broker, + )) + }) + .join() + .unwrap() + .map_err(|e| RemoteError::Other(e.into())); + } + + let auth_flows = credentials.help(self, branch.remote())?; + for (mut remote, callbacks) in auth_flows { + if let Some(url) = remote.url().context("failed to get remote url")? { + if !self.project.omit_certificate_check.unwrap_or(false) { + ssh::check_known_host(&url).context("failed to check known host")?; + } + } + let mut update_refs_error: Option = None; + for callback in callbacks { + let mut cbs: git2::RemoteCallbacks = callback.into(); + if self.project.omit_certificate_check.unwrap_or(false) { + cbs.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk)); + } + cbs.push_update_reference(|_reference: &str, status: Option<&str>| { + if let Some(status) = status { + update_refs_error = Some(git2::Error::from_str(status)); + return Err(git2::Error::from_str(status)); + }; + Ok(()) + }); + + let push_result = remote.push( + &[refspec.as_str()], + Some(&mut git2::PushOptions::new().remote_callbacks(cbs)), + ); + match push_result { + Ok(()) => { + tracing::info!( + project_id = %self.project.id, + remote = %branch.remote(), + %head, + branch = branch.branch(), + "pushed git branch" + ); + return Ok(()); + } + Err(git::Error::Auth(error) | git::Error::Http(error)) => { + tracing::warn!(project_id = %self.project.id, ?error, "git push failed"); + continue; + } + Err(git::Error::Network(error)) => { + tracing::warn!(project_id = %self.project.id, ?error, "git push failed"); + return Err(RemoteError::Network); + } + Err(error) => { + if let Some(e) = update_refs_error.as_ref() { + return Err(RemoteError::Other(anyhow::anyhow!(e.to_string()))); + } + return Err(RemoteError::Other(error.into())); + } + } + } + } + + Err(RemoteError::Auth) + } + + pub fn fetch( + &self, + remote_name: &str, + credentials: &git::credentials::Helper, + askpass: Option<(AskpassBroker, String)>, + ) -> Result<(), RemoteError> { + let refspec = format!("+refs/heads/*:refs/remotes/{}/*", remote_name); + + // NOTE(qix-): This is a nasty hack, however the codebase isn't structured + // NOTE(qix-): in a way that allows us to really incorporate new backends + // NOTE(qix-): without a lot of work. This is a temporary measure to + // NOTE(qix-): work around a time-sensitive change that was necessary + // NOTE(qix-): without having to refactor a large portion of the codebase. + if self.project.preferred_key == AuthKey::SystemExecutable { + let path = self.path().to_path_buf(); + let remote = remote_name.to_string(); + return std::thread::spawn(move || { + tokio::runtime::Runtime::new() + .unwrap() + .block_on(gitbutler_git::fetch( + path, + gitbutler_git::tokio::TokioExecutor, + &remote, + gitbutler_git::RefSpec::parse(refspec).unwrap(), + handle_git_prompt_fetch, + askpass, + )) + }) + .join() + .unwrap() + .map_err(|e| RemoteError::Other(e.into())); + } + + let auth_flows = credentials.help(self, remote_name)?; + for (mut remote, callbacks) in auth_flows { + if let Some(url) = remote.url().context("failed to get remote url")? { + if !self.project.omit_certificate_check.unwrap_or(false) { + ssh::check_known_host(&url).context("failed to check known host")?; + } + } + for callback in callbacks { + let mut fetch_opts = git2::FetchOptions::new(); + let mut cbs: git2::RemoteCallbacks = callback.into(); + if self.project.omit_certificate_check.unwrap_or(false) { + cbs.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk)); + } + fetch_opts.remote_callbacks(cbs); + fetch_opts.prune(git2::FetchPrune::On); + + match remote.fetch(&[&refspec], Some(&mut fetch_opts)) { + Ok(()) => { + tracing::info!(project_id = %self.project.id, %refspec, "git fetched"); + return Ok(()); + } + Err(git::Error::Auth(error) | git::Error::Http(error)) => { + tracing::warn!(project_id = %self.project.id, ?error, "fetch failed"); + continue; + } + Err(git::Error::Network(error)) => { + tracing::warn!(project_id = %self.project.id, ?error, "fetch failed"); + return Err(RemoteError::Network); + } + Err(error) => return Err(RemoteError::Other(error.into())), + } + } + } + + Err(RemoteError::Auth) + } +} + +#[derive(Debug, thiserror::Error)] +pub enum RemoteError { + #[error(transparent)] + Help(#[from] HelpError), + #[error("network failed")] + Network, + #[error("authentication failed")] + Auth, + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl From for crate::error::Error { + fn from(value: RemoteError) -> Self { + match value { + RemoteError::Help(error) => error.into(), + RemoteError::Network => crate::error::Error::UserError { + code: crate::error::Code::ProjectGitRemote, + message: "Network erorr occured".to_string(), + }, + RemoteError::Auth => crate::error::Error::UserError { + code: crate::error::Code::ProjectGitAuth, + message: "Project remote authentication error".to_string(), + }, + RemoteError::Other(error) => { + tracing::error!(?error); + crate::error::Error::Unknown + } + } + } +} + +type OidFilter = dyn Fn(&git::Commit) -> Result; + +pub enum LogUntil { + Commit(git::Oid), + Take(usize), + When(Box), + End, +} + +#[derive(Debug, Clone, serde::Serialize)] +struct AskpassPromptPushContext { + branch_id: Option, +} + +#[derive(Debug, Clone, serde::Serialize)] +struct AskpassPromptFetchContext { + action: String, +} + +async fn handle_git_prompt_push( + prompt: String, + askpass: Option<(AskpassBroker, Option)>, +) -> Option { + if let Some((askpass_broker, branch_id)) = askpass { + tracing::info!("received prompt for branch push {branch_id:?}: {prompt:?}"); + askpass_broker + .submit_prompt(prompt, askpass::Context::Push { branch_id }) + .await + } else { + tracing::warn!("received askpass push prompt but no broker was supplied; returning None"); + None + } +} + +async fn handle_git_prompt_fetch( + prompt: String, + askpass: Option<(AskpassBroker, String)>, +) -> Option { + if let Some((askpass_broker, action)) = askpass { + tracing::info!("received prompt for fetch with action {action:?}: {prompt:?}"); + askpass_broker + .submit_prompt(prompt, askpass::Context::Fetch { action }) + .await + } else { + tracing::warn!("received askpass fetch prompt but no broker was supplied; returning None"); + None + } +} diff --git a/src/project_repository/signatures.rs b/src/project_repository/signatures.rs new file mode 100644 index 000000000..392e3360f --- /dev/null +++ b/src/project_repository/signatures.rs @@ -0,0 +1,22 @@ +use crate::{git, users}; + +pub fn signatures<'a>( + project_repository: &super::Repository, + user: Option<&users::User>, +) -> Result<(git::Signature<'a>, git::Signature<'a>), git::Error> { + let config = project_repository.config(); + + let author = match (user, config.user_name()?, config.user_email()?) { + (_, Some(name), Some(email)) => git::Signature::now(&name, &email)?, + (Some(user), _, _) => git::Signature::try_from(user)?, + _ => git::Signature::now("GitButler", "gitbutler@gitbutler.com")?, + }; + + let comitter = if config.user_real_comitter()? { + author.clone() + } else { + git::Signature::now("GitButler", "gitbutler@gitbutler.com")? + }; + + Ok((author, comitter)) +} diff --git a/src/projects.rs b/src/projects.rs new file mode 100644 index 000000000..8a139a6b7 --- /dev/null +++ b/src/projects.rs @@ -0,0 +1,9 @@ +pub mod controller; +mod project; +pub mod storage; + +pub use controller::*; +pub use project::{AuthKey, CodePushState, FetchResult, Project, ProjectId}; +pub use storage::UpdateRequest; + +pub use project::ApiProject; diff --git a/src/projects/controller.rs b/src/projects/controller.rs new file mode 100644 index 000000000..a3ccb02a1 --- /dev/null +++ b/src/projects/controller.rs @@ -0,0 +1,344 @@ +use super::{storage, storage::UpdateRequest, Project, ProjectId}; +use crate::{gb_repository, project_repository, users}; +use anyhow::Context; +use async_trait::async_trait; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +#[async_trait] +pub trait Watchers { + fn watch(&self, project: &Project) -> anyhow::Result<()>; + async fn stop(&self, id: ProjectId) -> anyhow::Result<()>; + async fn fetch(&self, id: ProjectId) -> anyhow::Result<()>; + async fn push(&self, id: ProjectId) -> anyhow::Result<()>; +} + +#[derive(Clone)] +pub struct Controller { + local_data_dir: PathBuf, + projects_storage: storage::Storage, + users: users::Controller, + watchers: Option>, +} + +impl Controller { + pub fn new( + local_data_dir: PathBuf, + projects_storage: storage::Storage, + users: users::Controller, + watchers: Option, + ) -> Self { + Self { + local_data_dir, + projects_storage, + users, + watchers: watchers.map(|w| Arc::new(w) as Arc<_>), + } + } + + pub fn from_path>(path: P) -> Self { + let pathbuf = path.as_ref().to_path_buf(); + Self { + local_data_dir: pathbuf.clone(), + projects_storage: storage::Storage::from_path(&pathbuf), + users: users::Controller::from_path(&pathbuf), + watchers: None, + } + } + + pub fn add>(&self, path: P) -> Result { + let path = path.as_ref(); + let all_projects = self + .projects_storage + .list() + .context("failed to list projects from storage")?; + if all_projects.iter().any(|project| project.path == path) { + return Err(AddError::AlreadyExists); + } + if !path.exists() { + return Err(AddError::PathNotFound); + } + if !path.is_dir() { + return Err(AddError::NotADirectory); + } + if !path.join(".git").exists() { + return Err(AddError::NotAGitRepository); + }; + + if path.join(".gitmodules").exists() { + return Err(AddError::SubmodulesNotSupported); + } + + let id = uuid::Uuid::new_v4().to_string(); + + // title is the base name of the file + let title = path + .iter() + .last() + .map_or_else(|| id.clone(), |p| p.to_str().unwrap().to_string()); + + let project = Project { + id: ProjectId::generate(), + title, + path: path.to_path_buf(), + api: None, + use_diff_context: Some(true), + ..Default::default() + }; + + // create all required directories to avoid racing later + let user = self.users.get_user()?; + let project_repository = project_repository::Repository::open(&project)?; + gb_repository::Repository::open(&self.local_data_dir, &project_repository, user.as_ref()) + .context("failed to open repository")?; + + self.projects_storage + .add(&project) + .context("failed to add project to storage")?; + + // Create a .git/gitbutler directory for app data + if let Err(error) = std::fs::create_dir_all(project.gb_dir()) { + tracing::error!(project_id = %project.id, ?error, "failed to create {:?} on project add", project.gb_dir()); + } + + if let Some(watchers) = &self.watchers { + watchers.watch(&project)?; + } + + Ok(project) + } + + pub async fn update(&self, project: &UpdateRequest) -> Result { + if let Some(super::AuthKey::Local { + private_key_path, .. + }) = &project.preferred_key + { + use resolve_path::PathResolveExt; + let private_key_path = private_key_path.resolve(); + + if !private_key_path.exists() { + return Err(UpdateError::Validation(UpdateValidationError::KeyNotFound( + private_key_path.to_path_buf(), + ))); + } + + if !private_key_path.is_file() { + return Err(UpdateError::Validation(UpdateValidationError::KeyNotFile( + private_key_path.to_path_buf(), + ))); + } + } + + let updated = self + .projects_storage + .update(project) + .map_err(|error| match error { + super::storage::Error::NotFound => UpdateError::NotFound, + error => UpdateError::Other(error.into()), + })?; + + if let Some(watchers) = &self.watchers { + if let Some(api) = &project.api { + if api.sync { + if let Err(error) = watchers.fetch(project.id).await { + tracing::error!( + project_id = %project.id, + ?error, + "failed to post fetch project event" + ); + } + } + + if let Err(error) = watchers.push(project.id).await { + tracing::error!( + project_id = %project.id, + ?error, + "failed to post push project event" + ); + } + } + } + + Ok(updated) + } + + pub fn get(&self, id: &ProjectId) -> Result { + let project = self.projects_storage.get(id).map_err(|error| match error { + super::storage::Error::NotFound => GetError::NotFound, + error => GetError::Other(error.into()), + }); + if let Ok(project) = &project { + if !project.gb_dir().exists() { + if let Err(error) = std::fs::create_dir_all(project.gb_dir()) { + tracing::error!(project_id = %project.id, ?error, "failed to create {:?} on project get", project.gb_dir()); + } + } + // Clean up old virtual_branches.toml that was never used + if project + .path + .join(".git") + .join("virtual_branches.toml") + .exists() + { + if let Err(error) = + std::fs::remove_file(project.path.join(".git").join("virtual_branches.toml")) + { + tracing::error!(project_id = %project.id, ?error, "failed to remove old virtual_branches.toml"); + } + } + } + project + } + + pub fn list(&self) -> Result, ListError> { + self.projects_storage + .list() + .map_err(|error| ListError::Other(error.into())) + } + + pub async fn delete(&self, id: &ProjectId) -> Result<(), DeleteError> { + let project = match self.projects_storage.get(id) { + Ok(project) => Ok(project), + Err(super::storage::Error::NotFound) => return Ok(()), + Err(error) => Err(DeleteError::Other(error.into())), + }?; + + if let Some(watchers) = &self.watchers { + if let Err(error) = watchers.stop(*id).await { + tracing::error!( + project_id = %id, + ?error, + "failed to stop watcher for project", + ); + } + } + + self.projects_storage + .purge(&project.id) + .map_err(|error| DeleteError::Other(error.into()))?; + + if let Err(error) = std::fs::remove_dir_all( + self.local_data_dir + .join("projects") + .join(project.id.to_string()), + ) { + tracing::error!(project_id = %id, ?error, "failed to remove project data",); + } + + if let Err(error) = std::fs::remove_file(project.path.join(".git/gitbutler.json")) { + tracing::error!(project_id = %project.id, ?error, "failed to remove .git/gitbutler.json data",); + } + + let virtual_branches_path = project.path.join(".git/virtual_branches.toml"); + if virtual_branches_path.exists() { + if let Err(error) = std::fs::remove_file(virtual_branches_path) { + tracing::error!(project_id = %project.id, ?error, "failed to remove .git/virtual_branches.toml data",); + } + } + + Ok(()) + } + + pub fn get_local_config( + &self, + id: &ProjectId, + key: &str, + ) -> Result, ConfigError> { + let project = self.projects_storage.get(id).map_err(|error| match error { + super::storage::Error::NotFound => ConfigError::NotFound, + error => ConfigError::Other(error.into()), + })?; + + let repo = project_repository::Repository::open(&project) + .map_err(|e| ConfigError::Other(e.into()))?; + repo.config() + .get_local(key) + .map_err(|e| ConfigError::Other(e.into())) + } + + pub fn set_local_config( + &self, + id: &ProjectId, + key: &str, + value: &str, + ) -> Result<(), ConfigError> { + let project = self.projects_storage.get(id).map_err(|error| match error { + super::storage::Error::NotFound => ConfigError::NotFound, + error => ConfigError::Other(error.into()), + })?; + + let repo = project_repository::Repository::open(&project) + .map_err(|e| ConfigError::Other(e.into()))?; + repo.config() + .set_local(key, value) + .map_err(|e| ConfigError::Other(e.into()))?; + + Ok(()) + } +} + +#[derive(Debug, thiserror::Error)] +pub enum ConfigError { + #[error("project not found")] + NotFound, + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum DeleteError { + #[error(transparent)] + Other(anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum ListError { + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum GetError { + #[error("project not found")] + NotFound, + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum UpdateError { + #[error("project not found")] + NotFound, + #[error(transparent)] + Validation(UpdateValidationError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum UpdateValidationError { + #[error("{0} not found")] + KeyNotFound(PathBuf), + #[error("{0} is not a file")] + KeyNotFile(PathBuf), +} + +#[derive(Debug, thiserror::Error)] +pub enum AddError { + #[error("not a directory")] + NotADirectory, + #[error("not a git repository")] + NotAGitRepository, + #[error("path not found")] + PathNotFound, + #[error("project already exists")] + AlreadyExists, + #[error("submodules not supported")] + SubmodulesNotSupported, + #[error(transparent)] + User(#[from] users::GetError), + #[error(transparent)] + OpenProjectRepository(#[from] project_repository::OpenError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} diff --git a/src/projects/project.rs b/src/projects/project.rs new file mode 100644 index 000000000..ded7c8edb --- /dev/null +++ b/src/projects/project.rs @@ -0,0 +1,112 @@ +use std::{ + path::{self, PathBuf}, + time, +}; + +use serde::{Deserialize, Serialize}; + +use crate::{git, id::Id, types::default_true::DefaultTrue}; + +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub enum AuthKey { + #[default] + Default, + Generated, + SystemExecutable, + GitCredentialsHelper, + Local { + private_key_path: path::PathBuf, + }, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct ApiProject { + pub name: String, + pub description: Option, + pub repository_id: String, + pub git_url: String, + pub code_git_url: Option, + pub created_at: String, + pub updated_at: String, + pub sync: bool, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub enum FetchResult { + Fetched { + timestamp: time::SystemTime, + }, + Error { + timestamp: time::SystemTime, + error: String, + }, +} + +impl FetchResult { + pub fn timestamp(&self) -> &time::SystemTime { + match self { + FetchResult::Fetched { timestamp } | FetchResult::Error { timestamp, .. } => timestamp, + } + } +} + +#[derive(Debug, Deserialize, Serialize, Copy, Clone)] +pub struct CodePushState { + pub id: git::Oid, + pub timestamp: time::SystemTime, +} + +pub type ProjectId = Id; + +#[derive(Debug, Deserialize, Serialize, Clone, Default)] +pub struct Project { + pub id: ProjectId, + pub title: String, + pub description: Option, + pub path: path::PathBuf, + #[serde(default)] + pub preferred_key: AuthKey, + /// if ok_with_force_push is true, we'll not try to avoid force pushing + /// for example, when updating base branch + #[serde(default)] + pub ok_with_force_push: DefaultTrue, + pub api: Option, + #[serde(default)] + pub gitbutler_data_last_fetch: Option, + #[serde(default)] + pub gitbutler_code_push_state: Option, + #[serde(default)] + pub project_data_last_fetch: Option, + #[serde(default)] + pub omit_certificate_check: Option, + #[serde(default)] + pub use_diff_context: Option, +} + +impl AsRef for Project { + fn as_ref(&self) -> &Project { + self + } +} + +impl Project { + pub fn is_sync_enabled(&self) -> bool { + self.api.as_ref().map(|api| api.sync).unwrap_or_default() + } + + pub fn has_code_url(&self) -> bool { + self.api + .as_ref() + .map(|api| api.code_git_url.is_some()) + .unwrap_or_default() + } + + /// Returns the path to the directory containing the `GitButler` state for this project. + /// + /// Normally this is `.git/gitbutler` in the project's repository. + pub fn gb_dir(&self) -> PathBuf { + self.path.join(".git").join("gitbutler") + } +} diff --git a/src/projects/storage.rs b/src/projects/storage.rs new file mode 100644 index 000000000..dab7adbb0 --- /dev/null +++ b/src/projects/storage.rs @@ -0,0 +1,162 @@ +use serde::{Deserialize, Serialize}; + +use crate::{ + projects::{project, ProjectId}, + storage, +}; + +const PROJECTS_FILE: &str = "projects.json"; + +#[derive(Debug, Clone)] +pub struct Storage { + storage: storage::Storage, +} + +#[derive(Debug, Serialize, Deserialize, Default)] +pub struct UpdateRequest { + pub id: ProjectId, + pub title: Option, + pub description: Option, + pub api: Option, + pub gitbutler_data_last_fetched: Option, + pub preferred_key: Option, + pub ok_with_force_push: Option, + pub gitbutler_code_push_state: Option, + pub project_data_last_fetched: Option, + pub omit_certificate_check: Option, + pub use_diff_context: Option, +} + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error(transparent)] + Storage(#[from] storage::Error), + #[error(transparent)] + Json(#[from] serde_json::Error), + #[error("project not found")] + NotFound, +} + +impl Storage { + pub fn new(storage: storage::Storage) -> Storage { + Storage { storage } + } + + pub fn from_path>(path: P) -> Storage { + Storage::new(storage::Storage::new(path)) + } + + pub fn list(&self) -> Result, Error> { + match self.storage.read(PROJECTS_FILE)? { + Some(projects) => { + let all_projects: Vec = serde_json::from_str(&projects)?; + let all_projects: Vec = all_projects + .into_iter() + .map(|mut p| { + // backwards compatibility for description field + if let Some(api_description) = + p.api.as_ref().and_then(|api| api.description.as_ref()) + { + p.description = Some(api_description.to_string()); + } + p + }) + .collect(); + Ok(all_projects) + } + None => Ok(vec![]), + } + } + + pub fn get(&self, id: &ProjectId) -> Result { + let projects = self.list()?; + for project in &projects { + self.update(&UpdateRequest { + id: project.id, + preferred_key: Some(project.preferred_key.clone()), + ..Default::default() + })?; + } + match projects.into_iter().find(|p| p.id == *id) { + Some(project) => Ok(project), + None => Err(Error::NotFound), + } + } + + pub fn update(&self, update_request: &UpdateRequest) -> Result { + let mut projects = self.list()?; + let project = projects + .iter_mut() + .find(|p| p.id == update_request.id) + .ok_or(Error::NotFound)?; + + if let Some(title) = &update_request.title { + project.title = title.clone(); + } + + if let Some(description) = &update_request.description { + project.description = Some(description.clone()); + } + + if let Some(api) = &update_request.api { + project.api = Some(api.clone()); + } + + if let Some(preferred_key) = &update_request.preferred_key { + project.preferred_key = preferred_key.clone(); + } + + if let Some(gitbutler_data_last_fetched) = + update_request.gitbutler_data_last_fetched.as_ref() + { + project.gitbutler_data_last_fetch = Some(gitbutler_data_last_fetched.clone()); + } + + if let Some(project_data_last_fetched) = update_request.project_data_last_fetched.as_ref() { + project.project_data_last_fetch = Some(project_data_last_fetched.clone()); + } + + if let Some(state) = update_request.gitbutler_code_push_state { + project.gitbutler_code_push_state = Some(state); + } + + if let Some(ok_with_force_push) = update_request.ok_with_force_push { + *project.ok_with_force_push = ok_with_force_push; + } + + if let Some(omit_certificate_check) = update_request.omit_certificate_check { + project.omit_certificate_check = Some(omit_certificate_check); + } + + if let Some(use_diff_context) = update_request.use_diff_context { + project.use_diff_context = Some(use_diff_context); + } + + self.storage + .write(PROJECTS_FILE, &serde_json::to_string_pretty(&projects)?)?; + + Ok(projects + .iter() + .find(|p| p.id == update_request.id) + .unwrap() + .clone()) + } + + pub fn purge(&self, id: &ProjectId) -> Result<(), Error> { + let mut projects = self.list()?; + if let Some(index) = projects.iter().position(|p| p.id == *id) { + projects.remove(index); + self.storage + .write(PROJECTS_FILE, &serde_json::to_string_pretty(&projects)?)?; + } + Ok(()) + } + + pub fn add(&self, project: &project::Project) -> Result<(), Error> { + let mut projects = self.list()?; + projects.push(project.clone()); + let projects = serde_json::to_string_pretty(&projects)?; + self.storage.write(PROJECTS_FILE, &projects)?; + Ok(()) + } +} diff --git a/src/reader.rs b/src/reader.rs new file mode 100644 index 000000000..5f64a5072 --- /dev/null +++ b/src/reader.rs @@ -0,0 +1,443 @@ +use std::{ + fs, io, num, + path::{Path, PathBuf}, + str, + sync::Arc, +}; + +use anyhow::{Context, Result}; +use serde::{ser::SerializeStruct, Serialize}; + +use crate::{git, lock, path::Normalize}; + +#[derive(Debug, Clone, thiserror::Error)] +pub enum Error { + #[error("file not found")] + NotFound, + #[error("io error: {0}")] + Io(Arc), + #[error(transparent)] + From(FromError), +} + +impl From for Error { + fn from(error: io::Error) -> Self { + Error::Io(Arc::new(error)) + } +} + +impl From for Error { + fn from(error: FromError) -> Self { + Error::From(error) + } +} + +pub enum Reader<'reader> { + Filesystem(FilesystemReader), + Commit(CommitReader<'reader>), + Prefixed(PrefixedReader<'reader>), +} + +impl<'reader> Reader<'reader> { + pub fn open>(root: P) -> Result { + FilesystemReader::open(root).map(Reader::Filesystem) + } + + pub fn sub>(&'reader self, prefix: P) -> Self { + Reader::Prefixed(PrefixedReader::new(self, prefix)) + } + + pub fn commit_id(&self) -> Option { + match self { + Reader::Filesystem(_) => None, + Reader::Commit(reader) => Some(reader.get_commit_oid()), + Reader::Prefixed(reader) => reader.reader.commit_id(), + } + } + + pub fn from_commit( + repository: &'reader git::Repository, + commit: &git::Commit<'reader>, + ) -> Result { + Ok(Reader::Commit(CommitReader::new(repository, commit)?)) + } + + pub fn exists>(&self, file_path: P) -> Result { + match self { + Reader::Filesystem(reader) => reader.exists(file_path), + Reader::Commit(reader) => Ok(reader.exists(file_path)), + Reader::Prefixed(reader) => reader.exists(file_path), + } + } + + pub fn read>(&self, path: P) -> Result { + let mut contents = self.batch(&[path])?; + contents + .pop() + .expect("batch should return at least one result") + } + + pub fn batch>( + &self, + paths: &[P], + ) -> Result>, io::Error> { + match self { + Reader::Filesystem(reader) => reader.batch(|root| { + paths + .iter() + .map(|path| { + let path = root.join(path); + if !path.exists() { + return Err(Error::NotFound); + } + let content = Content::read_from_file(&path)?; + Ok(content) + }) + .collect() + }), + Reader::Commit(reader) => Ok(paths + .iter() + .map(|path| reader.read(path.normalize())) + .collect()), + Reader::Prefixed(reader) => reader.batch(paths), + } + } + + pub fn list_files>(&self, dir_path: P) -> Result> { + match self { + Reader::Filesystem(reader) => reader.list_files(dir_path.as_ref()), + Reader::Commit(reader) => reader.list_files(dir_path.as_ref()), + Reader::Prefixed(reader) => reader.list_files(dir_path.as_ref()), + } + } +} + +pub struct FilesystemReader(lock::Dir); + +impl FilesystemReader { + fn open>(root: P) -> Result { + lock::Dir::new(root).map(Self) + } + + fn exists>(&self, path: P) -> Result { + let exists = self.0.batch(|root| root.join(path.as_ref()).exists())?; + Ok(exists) + } + + fn batch(&self, action: impl FnOnce(&Path) -> R) -> Result { + self.0.batch(action) + } + + fn list_files>(&self, path: P) -> Result> { + let path = path.as_ref(); + self.0 + .batch(|root| crate::fs::list_files(root.join(path).as_path(), &[Path::new(".git")]))? + } +} + +pub struct CommitReader<'reader> { + repository: &'reader git::Repository, + commit_oid: git::Oid, + tree: git::Tree<'reader>, +} + +impl<'reader> CommitReader<'reader> { + pub fn new( + repository: &'reader git::Repository, + commit: &git::Commit<'reader>, + ) -> Result> { + let tree = commit + .tree() + .with_context(|| format!("{}: tree not found", commit.id()))?; + Ok(CommitReader { + repository, + tree, + commit_oid: commit.id(), + }) + } + + pub fn get_commit_oid(&self) -> git::Oid { + self.commit_oid + } + + fn read>(&self, path: P) -> Result { + let path = path.as_ref(); + let entry = match self + .tree + .get_path(Path::new(path)) + .context(format!("{}: tree entry not found", path.display())) + { + Ok(entry) => entry, + Err(_) => return Err(Error::NotFound), + }; + let blob = match self.repository.find_blob(entry.id()) { + Ok(blob) => blob, + Err(_) => return Err(Error::NotFound), + }; + Ok(Content::from(&blob)) + } + + pub fn list_files>(&self, dir_path: P) -> Result> { + let dir_path = dir_path.as_ref(); + let mut files = vec![]; + self.tree + .walk(|root, entry| { + if entry.kind() == Some(git2::ObjectType::Tree) { + return git::TreeWalkResult::Continue; + } + + if entry.name().is_none() { + return git::TreeWalkResult::Continue; + } + let entry_path = Path::new(root).join(entry.name().unwrap()); + + if !entry_path.starts_with(dir_path) { + return git::TreeWalkResult::Continue; + } + + files.push(entry_path.strip_prefix(dir_path).unwrap().to_path_buf()); + + git::TreeWalkResult::Continue + }) + .with_context(|| format!("{}: tree walk failed", dir_path.display()))?; + + Ok(files) + } + + pub fn exists>(&self, file_path: P) -> bool { + self.tree.get_path(file_path.normalize()).is_ok() + } +} + +pub struct PrefixedReader<'r> { + reader: &'r Reader<'r>, + prefix: PathBuf, +} + +impl<'r> PrefixedReader<'r> { + fn new>(reader: &'r Reader, prefix: P) -> Self { + PrefixedReader { + reader, + prefix: prefix.as_ref().to_path_buf(), + } + } + + pub fn batch>( + &self, + paths: &[P], + ) -> Result>, io::Error> { + let paths = paths + .iter() + .map(|path| self.prefix.join(path)) + .collect::>(); + self.reader.batch(paths.as_slice()) + } + + fn list_files>(&self, dir_path: P) -> Result> { + self.reader.list_files(self.prefix.join(dir_path.as_ref())) + } + + fn exists>(&self, file_path: P) -> Result { + self.reader.exists(self.prefix.join(file_path.as_ref())) + } +} + +#[derive(Debug, Clone, thiserror::Error)] +pub enum FromError { + #[error(transparent)] + ParseInt(#[from] num::ParseIntError), + #[error(transparent)] + ParseBool(#[from] str::ParseBoolError), + #[error("file is binary")] + Binary, + #[error("file too large")] + Large, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum Content { + UTF8(String), + Binary, + Large, +} + +impl Serialize for Content { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + match self { + Content::UTF8(text) => { + let mut state = serializer.serialize_struct("Content", 2)?; + state.serialize_field("type", "utf8")?; + state.serialize_field("value", text)?; + state.end() + } + Content::Binary => { + let mut state = serializer.serialize_struct("Content", 1)?; + state.serialize_field("type", "binary")?; + state.end() + } + Content::Large => { + let mut state = serializer.serialize_struct("Content", 1)?; + state.serialize_field("type", "large")?; + state.end() + } + } + } +} + +impl Content { + const MAX_SIZE: usize = 1024 * 1024 * 10; // 10 MB + + pub fn read_from_file>(path: P) -> Result { + let path = path.as_ref(); + let metadata = fs::metadata(path)?; + if metadata.len() > Content::MAX_SIZE as u64 { + return Ok(Content::Large); + } + let content = fs::read(path)?; + Ok(content.as_slice().into()) + } +} + +impl From<&str> for Content { + fn from(text: &str) -> Self { + if text.len() > Self::MAX_SIZE { + Content::Large + } else { + Content::UTF8(text.to_string()) + } + } +} + +impl From<&git::Blob<'_>> for Content { + fn from(value: &git::Blob) -> Self { + if value.size() > Content::MAX_SIZE { + Content::Large + } else { + value.content().into() + } + } +} + +impl From<&[u8]> for Content { + fn from(bytes: &[u8]) -> Self { + if bytes.len() > Self::MAX_SIZE { + Content::Large + } else { + match String::from_utf8(bytes.to_vec()) { + Err(_) => Content::Binary, + Ok(text) => Content::UTF8(text), + } + } + } +} + +impl TryFrom<&Content> for usize { + type Error = FromError; + + fn try_from(content: &Content) -> Result { + match content { + Content::UTF8(text) => text.parse().map_err(FromError::ParseInt), + Content::Binary => Err(FromError::Binary), + Content::Large => Err(FromError::Large), + } + } +} + +impl TryFrom for usize { + type Error = FromError; + + fn try_from(content: Content) -> Result { + Self::try_from(&content) + } +} + +impl TryFrom<&Content> for String { + type Error = FromError; + + fn try_from(content: &Content) -> Result { + match content { + Content::UTF8(text) => Ok(text.clone()), + Content::Binary => Err(FromError::Binary), + Content::Large => Err(FromError::Large), + } + } +} + +impl TryFrom for String { + type Error = FromError; + + fn try_from(content: Content) -> Result { + Self::try_from(&content) + } +} + +impl TryFrom for i64 { + type Error = FromError; + + fn try_from(content: Content) -> Result { + Self::try_from(&content) + } +} + +impl TryFrom<&Content> for i64 { + type Error = FromError; + + fn try_from(content: &Content) -> Result { + let text: String = content.try_into()?; + text.parse().map_err(FromError::ParseInt) + } +} + +impl TryFrom for u64 { + type Error = FromError; + + fn try_from(content: Content) -> Result { + Self::try_from(&content) + } +} + +impl TryFrom<&Content> for u64 { + type Error = FromError; + + fn try_from(content: &Content) -> Result { + let text: String = content.try_into()?; + text.parse().map_err(FromError::ParseInt) + } +} + +impl TryFrom for u128 { + type Error = FromError; + + fn try_from(content: Content) -> Result { + Self::try_from(&content) + } +} + +impl TryFrom<&Content> for u128 { + type Error = FromError; + + fn try_from(content: &Content) -> Result { + let text: String = content.try_into()?; + text.parse().map_err(FromError::ParseInt) + } +} + +impl TryFrom for bool { + type Error = FromError; + + fn try_from(content: Content) -> Result { + Self::try_from(&content) + } +} + +impl TryFrom<&Content> for bool { + type Error = FromError; + + fn try_from(content: &Content) -> Result { + let text: String = content.try_into()?; + text.parse().map_err(FromError::ParseBool) + } +} diff --git a/src/sessions.rs b/src/sessions.rs new file mode 100644 index 000000000..1ac34de9c --- /dev/null +++ b/src/sessions.rs @@ -0,0 +1,14 @@ +mod controller; +mod iterator; +mod reader; +pub mod session; +mod writer; + +pub mod database; + +pub use controller::Controller; +pub use database::Database; +pub use iterator::SessionsIterator; +pub use reader::SessionReader as Reader; +pub use session::{Meta, Session, SessionError, SessionId}; +pub use writer::SessionWriter as Writer; diff --git a/src/sessions/controller.rs b/src/sessions/controller.rs new file mode 100644 index 000000000..9ea409200 --- /dev/null +++ b/src/sessions/controller.rs @@ -0,0 +1,91 @@ +use std::path; + +use anyhow::Context; + +use crate::{ + gb_repository, project_repository, + projects::{self, ProjectId}, + users, +}; + +use super::{Database, Session}; + +#[derive(Clone)] +pub struct Controller { + local_data_dir: path::PathBuf, + sessions_database: Database, + + projects: projects::Controller, + users: users::Controller, +} + +#[derive(Debug, thiserror::Error)] +pub enum ListError { + #[error(transparent)] + ProjectsError(#[from] projects::GetError), + #[error(transparent)] + ProjectRepositoryError(#[from] project_repository::OpenError), + #[error(transparent)] + UsersError(#[from] users::GetError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl Controller { + pub fn new( + local_data_dir: path::PathBuf, + sessions_database: Database, + projects: projects::Controller, + users: users::Controller, + ) -> Self { + Self { + local_data_dir, + sessions_database, + projects, + users, + } + } + + pub fn list( + &self, + project_id: &ProjectId, + earliest_timestamp_ms: Option, + ) -> Result, ListError> { + let sessions = self + .sessions_database + .list_by_project_id(project_id, earliest_timestamp_ms)?; + + let project = self.projects.get(project_id)?; + let project_repository = project_repository::Repository::open(&project)?; + let user = self.users.get_user()?; + let gb_repository = gb_repository::Repository::open( + &self.local_data_dir, + &project_repository, + user.as_ref(), + ) + .context("failed to open gb repository")?; + + // this is a hack to account for a case when we have a session created, but fs was never + // touched, so the wathcer never picked up the session + let current_session = gb_repository + .get_current_session() + .context("failed to get current session")?; + let have_to_index = matches!( + (current_session.as_ref(), sessions.first()), + (Some(_), None) + ); + if !have_to_index { + return Ok(sessions); + } + + let sessions_iter = gb_repository.get_sessions_iterator()?; + let mut sessions = sessions_iter.collect::, _>>()?; + self.sessions_database + .insert(project_id, &sessions.iter().collect::>())?; + if let Some(session) = current_session { + self.sessions_database.insert(project_id, &[&session])?; + sessions.insert(0, session); + } + Ok(sessions) + } +} diff --git a/src/sessions/database.rs b/src/sessions/database.rs new file mode 100644 index 000000000..3c60790f9 --- /dev/null +++ b/src/sessions/database.rs @@ -0,0 +1,182 @@ +use anyhow::{Context, Result}; + +use crate::{database, projects::ProjectId}; + +use super::session::{self, SessionId}; + +#[derive(Clone)] +pub struct Database { + database: database::Database, +} + +impl Database { + pub fn new(database: database::Database) -> Database { + Database { database } + } + + pub fn insert(&self, project_id: &ProjectId, sessions: &[&session::Session]) -> Result<()> { + self.database.transaction(|tx| -> Result<()> { + let mut stmt = insert_stmt(tx).context("Failed to prepare insert statement")?; + for session in sessions { + stmt.execute(rusqlite::named_params! { + ":id": session.id, + ":project_id": project_id, + ":hash": session.hash.map(|hash| hash.to_string()), + ":branch": session.meta.branch, + ":commit": session.meta.commit, + ":start_timestamp_ms": session.meta.start_timestamp_ms.to_string(), + ":last_timestamp_ms": session.meta.last_timestamp_ms.to_string(), + }) + .context("Failed to execute insert statement")?; + } + Ok(()) + })?; + + Ok(()) + } + + pub fn list_by_project_id( + &self, + project_id: &ProjectId, + earliest_timestamp_ms: Option, + ) -> Result> { + self.database.transaction(|tx| { + let mut stmt = list_by_project_id_stmt(tx) + .context("Failed to prepare list_by_project_id statement")?; + let mut rows = stmt + .query(rusqlite::named_params! { + ":project_id": project_id, + }) + .context("Failed to execute list_by_project_id statement")?; + + let mut sessions = Vec::new(); + while let Some(row) = rows + .next() + .context("Failed to iterate over list_by_project_id results")? + { + let session = parse_row(row)?; + + if let Some(earliest_timestamp_ms) = earliest_timestamp_ms { + if session.meta.last_timestamp_ms < earliest_timestamp_ms { + continue; + } + } + + sessions.push(session); + } + Ok(sessions) + }) + } + + pub fn get_by_project_id_id( + &self, + project_id: &ProjectId, + id: &SessionId, + ) -> Result> { + self.database.transaction(|tx| { + let mut stmt = get_by_project_id_id_stmt(tx) + .context("Failed to prepare get_by_project_id_id statement")?; + let mut rows = stmt + .query(rusqlite::named_params! { + ":project_id": project_id, + ":id": id, + }) + .context("Failed to execute get_by_project_id_id statement")?; + if let Some(row) = rows + .next() + .context("Failed to iterate over get_by_project_id_id results")? + { + Ok(Some(parse_row(row)?)) + } else { + Ok(None) + } + }) + } + + pub fn get_by_id(&self, id: &SessionId) -> Result> { + self.database.transaction(|tx| { + let mut stmt = get_by_id_stmt(tx).context("Failed to prepare get_by_id statement")?; + let mut rows = stmt + .query(rusqlite::named_params! { + ":id": id, + }) + .context("Failed to execute get_by_id statement")?; + if let Some(row) = rows + .next() + .context("Failed to iterate over get_by_id results")? + { + Ok(Some(parse_row(row)?)) + } else { + Ok(None) + } + }) + } +} + +fn parse_row(row: &rusqlite::Row) -> Result { + Ok(session::Session { + id: row.get(0).context("Failed to get id")?, + hash: row + .get::>(2) + .context("Failed to get hash")? + .map(|hash| hash.parse().context("Failed to parse hash")) + .transpose()?, + meta: session::Meta { + branch: row.get(3).context("Failed to get branch")?, + commit: row.get(4).context("Failed to get commit")?, + start_timestamp_ms: row + .get::(5) + .context("Failed to get start_timestamp_ms")? + .parse() + .context("Failed to parse start_timestamp_ms")?, + last_timestamp_ms: row + .get::(6) + .context("Failed to get last_timestamp_ms")? + .parse() + .context("Failed to parse last_timestamp_ms")?, + }, + }) +} + +fn list_by_project_id_stmt<'conn>( + tx: &'conn rusqlite::Transaction, +) -> Result> { + Ok(tx.prepare_cached( + "SELECT `id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms` FROM `sessions` WHERE `project_id` = :project_id ORDER BY `start_timestamp_ms` DESC", + )?) +} + +fn get_by_project_id_id_stmt<'conn>( + tx: &'conn rusqlite::Transaction, +) -> Result> { + Ok(tx.prepare_cached( + "SELECT `id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms` FROM `sessions` WHERE `project_id` = :project_id AND `id` = :id", + )?) +} + +fn get_by_id_stmt<'conn>( + tx: &'conn rusqlite::Transaction, +) -> Result> { + Ok(tx.prepare_cached( + "SELECT `id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms` FROM `sessions` WHERE `id` = :id", + )?) +} + +fn insert_stmt<'conn>( + tx: &'conn rusqlite::Transaction, +) -> Result> { + Ok(tx.prepare_cached( + "INSERT INTO 'sessions' ( + `id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms` + ) VALUES ( + :id, :project_id, :hash, :branch, :commit, :start_timestamp_ms, :last_timestamp_ms + ) ON CONFLICT(`id`) DO UPDATE SET + `project_id` = :project_id, + `hash` = :hash, + `branch` = :branch, + `commit` = :commit, + `start_timestamp_ms` = :start_timestamp_ms, + `last_timestamp_ms` = :last_timestamp_ms + ", + )?) +} diff --git a/src/sessions/iterator.rs b/src/sessions/iterator.rs new file mode 100644 index 000000000..249db0eed --- /dev/null +++ b/src/sessions/iterator.rs @@ -0,0 +1,68 @@ +use anyhow::{Context, Result}; + +use crate::{git, reader}; + +use super::{Session, SessionError}; + +pub struct SessionsIterator<'iterator> { + git_repository: &'iterator git::Repository, + iter: git2::Revwalk<'iterator>, +} + +impl<'iterator> SessionsIterator<'iterator> { + pub(crate) fn new(git_repository: &'iterator git::Repository) -> Result { + let mut iter = git_repository + .revwalk() + .context("failed to create revwalk")?; + + iter.set_sorting(git2::Sort::TOPOLOGICAL | git2::Sort::TIME) + .context("failed to set sorting")?; + + let branches = git_repository.branches(None)?; + for branch in branches { + let (branch, _) = branch.context("failed to get branch")?; + iter.push(branch.peel_to_commit()?.id().into()) + .with_context(|| format!("failed to push branch {:?}", branch.name()))?; + } + + Ok(Self { + git_repository, + iter, + }) + } +} + +impl<'iterator> Iterator for SessionsIterator<'iterator> { + type Item = Result; + + fn next(&mut self) -> Option { + match self.iter.next() { + Some(Result::Ok(oid)) => { + let commit = match self.git_repository.find_commit(oid.into()) { + Result::Ok(commit) => commit, + Err(err) => return Some(Err(err.into())), + }; + + if commit.parent_count() == 0 { + // skip initial commit, as it's impossible to get a list of files from it + // it's only used to bootstrap the history + return self.next(); + } + + let commit_reader = match reader::Reader::from_commit(self.git_repository, &commit) + { + Result::Ok(commit_reader) => commit_reader, + Err(err) => return Some(Err(err)), + }; + let session = match Session::try_from(&commit_reader) { + Result::Ok(session) => session, + Err(SessionError::NoSession) => return None, + Err(err) => return Some(Err(err.into())), + }; + Some(Ok(session)) + } + Some(Err(err)) => Some(Err(err.into())), + None => None, + } + } +} diff --git a/src/sessions/reader.rs b/src/sessions/reader.rs new file mode 100644 index 000000000..8ba8405a0 --- /dev/null +++ b/src/sessions/reader.rs @@ -0,0 +1,105 @@ +use std::{collections::HashMap, path}; + +use anyhow::{anyhow, Context, Result}; + +use crate::{gb_repository, reader}; + +use super::Session; + +pub struct SessionReader<'reader> { + // reader for the current session. commit or wd + reader: reader::Reader<'reader>, + // reader for the previous session's commit + previous_reader: reader::Reader<'reader>, +} + +#[derive(thiserror::Error, Debug)] +pub enum FileError { + #[error(transparent)] + Reader(#[from] reader::Error), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl<'reader> SessionReader<'reader> { + pub fn reader(&self) -> &reader::Reader<'reader> { + &self.reader + } + + pub fn open(repository: &'reader gb_repository::Repository, session: &Session) -> Result { + let wd_reader = reader::Reader::open(&repository.root())?; + + if let Ok(reader::Content::UTF8(current_session_id)) = wd_reader.read("session/meta/id") { + if current_session_id == session.id.to_string() { + let head_commit = repository.git_repository().head()?.peel_to_commit()?; + return Ok(SessionReader { + reader: wd_reader, + previous_reader: reader::Reader::from_commit( + repository.git_repository(), + &head_commit, + )?, + }); + } + } + + let session_hash = if let Some(hash) = &session.hash { + hash + } else { + return Err(anyhow!( + "can not open reader for {} because it has no commit hash nor it is a current session", + session.id + )); + }; + + let commit = repository + .git_repository() + .find_commit(*session_hash) + .context("failed to get commit")?; + let commit_reader = reader::Reader::from_commit(repository.git_repository(), &commit)?; + + Ok(SessionReader { + reader: commit_reader, + previous_reader: reader::Reader::from_commit( + repository.git_repository(), + &commit.parent(0)?, + )?, + }) + } + + pub fn files( + &self, + filter: Option<&[&path::Path]>, + ) -> Result, FileError> { + let wd_dir = path::Path::new("wd"); + let mut paths = self.previous_reader.list_files(wd_dir)?; + if let Some(filter) = filter { + paths = paths + .into_iter() + .filter(|file_path| filter.iter().any(|path| file_path.eq(path))) + .collect::>(); + } + paths = paths.iter().map(|path| wd_dir.join(path)).collect(); + let files = self + .previous_reader + .batch(&paths) + .context("failed to batch read")?; + + let files = files.into_iter().collect::, _>>()?; + + Ok(paths + .into_iter() + .zip(files) + .filter_map(|(path, file)| { + path.strip_prefix(wd_dir) + .ok() + .map(|path| (path.to_path_buf(), file)) + }) + .collect::>()) + } + + pub fn file>(&self, path: P) -> Result { + let path = path.as_ref(); + self.previous_reader + .read(std::path::Path::new("wd").join(path)) + } +} diff --git a/src/sessions/session.rs b/src/sessions/session.rs new file mode 100644 index 000000000..c0feecc4e --- /dev/null +++ b/src/sessions/session.rs @@ -0,0 +1,126 @@ +use std::path; + +use anyhow::{Context, Result}; +use serde::Serialize; +use thiserror::Error; + +use crate::{git, id::Id, reader}; + +#[derive(Debug, Clone, Serialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Meta { + // timestamp of when the session was created + pub start_timestamp_ms: u128, + // timestamp of when the session was last active + pub last_timestamp_ms: u128, + // session branch name + pub branch: Option, + // session commit hash + pub commit: Option, +} + +pub type SessionId = Id; + +#[derive(Debug, Clone, Serialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Session { + pub id: SessionId, + // if hash is not set, the session is not saved aka current + pub hash: Option, + pub meta: Meta, +} + +#[derive(Error, Debug)] +pub enum SessionError { + #[error("session does not exist")] + NoSession, + #[error("{0}")] + Other(#[from] anyhow::Error), +} + +impl TryFrom<&reader::Reader<'_>> for Session { + type Error = SessionError; + + fn try_from(reader: &reader::Reader) -> Result { + let results = reader + .batch(&[ + path::Path::new("session/meta/id"), + path::Path::new("session/meta/start"), + path::Path::new("session/meta/last"), + path::Path::new("session/meta/branch"), + path::Path::new("session/meta/commit"), + ]) + .context("failed to batch read")?; + + let id = &results[0]; + let start_timestamp_ms = &results[1]; + let last_timestamp_ms = &results[2]; + let branch = &results[3]; + let commit = &results[4]; + + let id = id.clone().map_err(|error| match error { + reader::Error::NotFound => SessionError::NoSession, + error => SessionError::Other(error.into()), + })?; + let id: String = id + .try_into() + .context("failed to parse session id as string") + .map_err(SessionError::Other)?; + let id: SessionId = id.parse().context("failed to parse session id as uuid")?; + + let start_timestamp_ms = start_timestamp_ms.clone().map_err(|error| match error { + reader::Error::NotFound => SessionError::NoSession, + error => SessionError::Other(error.into()), + })?; + + let start_timestamp_ms: u128 = start_timestamp_ms + .try_into() + .context("failed to parse session start timestamp as number") + .map_err(SessionError::Other)?; + + let last_timestamp_ms = last_timestamp_ms.clone().map_err(|error| match error { + reader::Error::NotFound => SessionError::NoSession, + error => SessionError::Other(error.into()), + })?; + + let last_timestamp_ms: u128 = last_timestamp_ms + .try_into() + .context("failed to parse session last timestamp as number") + .map_err(SessionError::Other)?; + + let branch = match branch.clone() { + Ok(branch) => { + let branch = branch + .try_into() + .context("failed to parse session branch as string")?; + Ok(Some(branch)) + } + Err(reader::Error::NotFound) => Ok(None), + Err(e) => Err(e), + } + .context("failed to parse session branch as string")?; + + let commit = match commit.clone() { + Ok(commit) => { + let commit = commit + .try_into() + .context("failed to parse session commit as string")?; + Ok(Some(commit)) + } + Err(reader::Error::NotFound) => Ok(None), + Err(e) => Err(e), + } + .context("failed to parse session commit as string")?; + + Ok(Self { + id, + hash: reader.commit_id(), + meta: Meta { + start_timestamp_ms, + last_timestamp_ms, + branch, + commit, + }, + }) + } +} diff --git a/src/sessions/writer.rs b/src/sessions/writer.rs new file mode 100644 index 000000000..9d8d1a162 --- /dev/null +++ b/src/sessions/writer.rs @@ -0,0 +1,108 @@ +use std::time; + +use anyhow::{anyhow, Context, Result}; + +use crate::{gb_repository, reader, writer}; + +use super::Session; + +pub struct SessionWriter<'writer> { + repository: &'writer gb_repository::Repository, + writer: writer::DirWriter, +} + +impl<'writer> SessionWriter<'writer> { + pub fn new(repository: &'writer gb_repository::Repository) -> Result { + writer::DirWriter::open(repository.root()) + .map(|writer| SessionWriter { repository, writer }) + } + + pub fn remove(&self) -> Result<()> { + self.writer.remove("session")?; + + tracing::debug!( + project_id = %self.repository.get_project_id(), + "deleted session" + ); + + Ok(()) + } + + pub fn write(&self, session: &Session) -> Result<()> { + if session.hash.is_some() { + return Err(anyhow!("can not open writer for a session with a hash")); + } + + let reader = reader::Reader::open(&self.repository.root()) + .context("failed to open current session reader")?; + + let current_session_id = + if let Ok(reader::Content::UTF8(current_session_id)) = reader.read("session/meta/id") { + Some(current_session_id) + } else { + None + }; + + if current_session_id.is_some() + && current_session_id.as_ref() != Some(&session.id.to_string()) + { + return Err(anyhow!( + "{}: can not open writer for {} because a writer for {} is still open", + self.repository.get_project_id(), + session.id, + current_session_id.unwrap() + )); + } + + let mut batch = vec![writer::BatchTask::Write( + "session/meta/last", + time::SystemTime::now() + .duration_since(time::SystemTime::UNIX_EPOCH) + .unwrap() + .as_millis() + .to_string(), + )]; + + if current_session_id.is_some() + && current_session_id.as_ref() == Some(&session.id.to_string()) + { + self.writer + .batch(&batch) + .context("failed to write last timestamp")?; + return Ok(()); + } + + batch.push(writer::BatchTask::Write( + "session/meta/id", + session.id.to_string(), + )); + batch.push(writer::BatchTask::Write( + "session/meta/start", + session.meta.start_timestamp_ms.to_string(), + )); + + if let Some(branch) = session.meta.branch.as_ref() { + batch.push(writer::BatchTask::Write( + "session/meta/branch", + branch.to_string(), + )); + } else { + batch.push(writer::BatchTask::Remove("session/meta/branch")); + } + + if let Some(commit) = session.meta.commit.as_ref() { + batch.push(writer::BatchTask::Write( + "session/meta/commit", + commit.to_string(), + )); + } else { + batch.push(writer::BatchTask::Remove("session/meta/commit")); + } + + self.writer + .batch(&batch) + .context("failed to write session meta")?; + + Ok(()) + } +} diff --git a/src/ssh.rs b/src/ssh.rs new file mode 100644 index 000000000..fe4f62a84 --- /dev/null +++ b/src/ssh.rs @@ -0,0 +1,67 @@ +use std::{env, fs, path::Path}; + +use ssh2::{self, CheckResult, KnownHostFileKind}; + +use crate::git; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error(transparent)] + Ssh(ssh2::Error), + #[error(transparent)] + Io(std::io::Error), + #[error("mismatched host key")] + MismatchedHostKey, + #[error("failed to check the known hosts")] + Failure, +} + +pub fn check_known_host(remote_url: &git::Url) -> Result<(), Error> { + if remote_url.scheme != git::Scheme::Ssh { + return Ok(()); + } + + let host = if let Some(host) = remote_url.host.as_ref() { + host + } else { + return Ok(()); + }; + + let mut session = ssh2::Session::new().map_err(Error::Ssh)?; + session + .set_tcp_stream(std::net::TcpStream::connect(format!("{}:22", host)).map_err(Error::Io)?); + session.handshake().map_err(Error::Ssh)?; + + let mut known_hosts = session.known_hosts().map_err(Error::Ssh)?; + + // Initialize the known hosts with a global known hosts file + let dotssh = Path::new(&env::var("HOME").unwrap()).join(".ssh"); + let file = dotssh.join("known_hosts"); + if !file.exists() { + fs::create_dir_all(&dotssh).map_err(Error::Io)?; + fs::File::create(&file).map_err(Error::Io)?; + } + + known_hosts + .read_file(&file, KnownHostFileKind::OpenSSH) + .map_err(Error::Ssh)?; + + // Now check to see if the seesion's host key is anywhere in the known + // hosts file + let (key, key_type) = session.host_key().unwrap(); + match known_hosts.check(host, key) { + CheckResult::Match => Ok(()), + CheckResult::Mismatch => Err(Error::MismatchedHostKey), + CheckResult::Failure => Err(Error::Failure), + CheckResult::NotFound => { + tracing::info!("adding host key for {}", host); + known_hosts + .add(host, key, "added by gitbutler client", key_type.into()) + .map_err(Error::Ssh)?; + known_hosts + .write_file(&file, KnownHostFileKind::OpenSSH) + .map_err(Error::Ssh)?; + Ok(()) + } + } +} diff --git a/src/storage.rs b/src/storage.rs new file mode 100644 index 000000000..1533016cf --- /dev/null +++ b/src/storage.rs @@ -0,0 +1,73 @@ +use std::{ + fs, + path::{Path, PathBuf}, + sync::{Arc, RwLock}, +}; + +#[cfg(target_family = "unix")] +use std::os::unix::prelude::*; + +#[derive(Debug, Default, Clone)] +pub struct Storage { + local_data_dir: Arc>, +} + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error(transparent)] + IO(#[from] std::io::Error), +} + +impl Storage { + pub fn new>(local_data_dir: P) -> Storage { + Storage { + local_data_dir: Arc::new(RwLock::new(local_data_dir.as_ref().to_path_buf())), + } + } + + pub fn read>(&self, path: P) -> Result, Error> { + let local_data_dir = self.local_data_dir.read().unwrap(); + let file_path = local_data_dir.join(path); + if !file_path.exists() { + return Ok(None); + } + let contents = fs::read_to_string(&file_path).map_err(Error::IO)?; + Ok(Some(contents)) + } + + pub fn write>(&self, path: P, content: &str) -> Result<(), Error> { + let local_data_dir = self.local_data_dir.write().unwrap(); + let file_path = local_data_dir.join(path); + let dir = file_path.parent().unwrap(); + if !dir.exists() { + fs::create_dir_all(dir).map_err(Error::IO)?; + } + fs::write(&file_path, content).map_err(Error::IO)?; + + // Set the permissions to be user-only. We can't actually + // do this on Windows, so we ignore that platform. + #[cfg(target_family = "unix")] + { + let metadata = fs::metadata(file_path.clone())?; + let mut permissions = metadata.permissions(); + permissions.set_mode(0o600); // User read/write + fs::set_permissions(file_path.clone(), permissions)?; + } + + Ok(()) + } + + pub fn delete>(&self, path: P) -> Result<(), Error> { + let local_data_dir = self.local_data_dir.write().unwrap(); + let file_path = local_data_dir.join(path); + if !file_path.exists() { + return Ok(()); + } + if file_path.is_dir() { + fs::remove_dir_all(file_path.clone()).map_err(Error::IO)?; + } else { + fs::remove_file(file_path.clone()).map_err(Error::IO)?; + } + Ok(()) + } +} diff --git a/src/types.rs b/src/types.rs new file mode 100644 index 000000000..3fa5f859e --- /dev/null +++ b/src/types.rs @@ -0,0 +1 @@ +pub mod default_true; diff --git a/src/types/default_true.rs b/src/types/default_true.rs new file mode 100644 index 000000000..7ab2c1d69 --- /dev/null +++ b/src/types/default_true.rs @@ -0,0 +1,90 @@ +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct DefaultTrue(bool); + +impl core::fmt::Debug for DefaultTrue { + #[inline] + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + ::fmt(&self.0, f) + } +} + +impl core::fmt::Display for DefaultTrue { + #[inline] + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + ::fmt(&self.0, f) + } +} + +impl Default for DefaultTrue { + #[inline] + fn default() -> Self { + DefaultTrue(true) + } +} + +impl From for bool { + #[inline] + fn from(default_true: DefaultTrue) -> Self { + default_true.0 + } +} + +impl From for DefaultTrue { + #[inline] + fn from(boolean: bool) -> Self { + DefaultTrue(boolean) + } +} + +impl serde::Serialize for DefaultTrue { + #[inline] + fn serialize(&self, serializer: S) -> Result { + serializer.serialize_bool(self.0) + } +} + +impl<'de> serde::Deserialize<'de> for DefaultTrue { + #[inline] + fn deserialize>(deserializer: D) -> Result { + Ok(DefaultTrue(bool::deserialize(deserializer)?)) + } +} + +impl core::ops::Deref for DefaultTrue { + type Target = bool; + + #[inline] + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl core::ops::DerefMut for DefaultTrue { + #[inline] + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl PartialEq for DefaultTrue { + #[inline] + fn eq(&self, other: &bool) -> bool { + self.0 == *other + } +} + +impl PartialEq for bool { + #[inline] + fn eq(&self, other: &DefaultTrue) -> bool { + *self == other.0 + } +} + +impl core::ops::Not for DefaultTrue { + type Output = bool; + + #[inline] + fn not(self) -> Self::Output { + !self.0 + } +} diff --git a/src/users.rs b/src/users.rs new file mode 100644 index 000000000..7c26c7204 --- /dev/null +++ b/src/users.rs @@ -0,0 +1,6 @@ +pub mod controller; +pub mod storage; +mod user; + +pub use controller::*; +pub use user::User; diff --git a/src/users/controller.rs b/src/users/controller.rs new file mode 100644 index 000000000..b6c4d4d66 --- /dev/null +++ b/src/users/controller.rs @@ -0,0 +1,57 @@ +use anyhow::Context; + +use super::{storage::Storage, User}; + +#[derive(Clone)] +pub struct Controller { + storage: Storage, +} + +impl Controller { + pub fn new(storage: Storage) -> Controller { + Controller { storage } + } + + pub fn from_path>(path: P) -> Controller { + Controller::new(Storage::from_path(path)) + } + + pub fn get_user(&self) -> Result, GetError> { + self.storage + .get() + .context("failed to get user") + .map_err(Into::into) + } + + pub fn set_user(&self, user: &User) -> Result<(), SetError> { + self.storage + .set(user) + .context("failed to set user") + .map_err(Into::into) + } + + pub fn delete_user(&self) -> Result<(), DeleteError> { + self.storage + .delete() + .context("failed to delete user") + .map_err(Into::into) + } +} + +#[derive(Debug, thiserror::Error)] +pub enum GetError { + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum SetError { + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum DeleteError { + #[error(transparent)] + Other(#[from] anyhow::Error), +} diff --git a/src/users/storage.rs b/src/users/storage.rs new file mode 100644 index 000000000..8c77323c3 --- /dev/null +++ b/src/users/storage.rs @@ -0,0 +1,46 @@ +use anyhow::Result; + +use crate::{storage, users::user}; + +const USER_FILE: &str = "user.json"; + +#[derive(Debug, Clone)] +pub struct Storage { + storage: storage::Storage, +} + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error(transparent)] + Storage(#[from] storage::Error), + #[error(transparent)] + Json(#[from] serde_json::Error), +} + +impl Storage { + pub fn new(storage: storage::Storage) -> Storage { + Storage { storage } + } + + pub fn from_path>(path: P) -> Storage { + Storage::new(storage::Storage::new(path)) + } + + pub fn get(&self) -> Result, Error> { + match self.storage.read(USER_FILE)? { + Some(data) => Ok(Some(serde_json::from_str(&data)?)), + None => Ok(None), + } + } + + pub fn set(&self, user: &user::User) -> Result<(), Error> { + let data = serde_json::to_string(user)?; + self.storage.write(USER_FILE, &data)?; + Ok(()) + } + + pub fn delete(&self) -> Result<(), Error> { + self.storage.delete(USER_FILE)?; + Ok(()) + } +} diff --git a/src/users/user.rs b/src/users/user.rs new file mode 100644 index 000000000..655c30a87 --- /dev/null +++ b/src/users/user.rs @@ -0,0 +1,35 @@ +use serde::{Deserialize, Serialize}; + +use crate::git; + +#[derive(Debug, Deserialize, Serialize, Clone, Default)] +pub struct User { + pub id: u64, + pub name: Option, + pub given_name: Option, + pub family_name: Option, + pub email: String, + pub picture: String, + pub locale: Option, + pub created_at: String, + pub updated_at: String, + pub access_token: String, + pub role: Option, + pub github_access_token: Option, + #[serde(default)] + pub github_username: Option, +} + +impl TryFrom for git::Signature<'_> { + type Error = git::Error; + + fn try_from(value: User) -> Result { + if let Some(name) = value.name { + git::Signature::now(&name, &value.email) + } else if let Some(name) = value.given_name { + git::Signature::now(&name, &value.email) + } else { + git::Signature::now(&value.email, &value.email) + } + } +} diff --git a/src/virtual_branches.rs b/src/virtual_branches.rs new file mode 100644 index 000000000..cb85550a8 --- /dev/null +++ b/src/virtual_branches.rs @@ -0,0 +1,29 @@ +pub mod branch; +pub use branch::{Branch, BranchId}; +pub mod context; +pub mod target; + +pub mod errors; + +mod files; +pub use files::*; + +pub mod integration; +pub use integration::GITBUTLER_INTEGRATION_REFERENCE; + +mod base; +pub use base::*; + +pub mod controller; +pub use controller::Controller; + +mod iterator; +pub use iterator::BranchIterator as Iterator; + +mod r#virtual; +pub use r#virtual::*; + +mod remote; +pub use remote::*; + +mod state; diff --git a/src/virtual_branches/base.rs b/src/virtual_branches/base.rs new file mode 100644 index 000000000..26f0bdfa5 --- /dev/null +++ b/src/virtual_branches/base.rs @@ -0,0 +1,657 @@ +use std::time; + +use anyhow::{Context, Result}; +use serde::Serialize; + +use crate::{ + gb_repository, + git::{self, diff}, + keys, + project_repository::{self, LogUntil}, + projects::FetchResult, + reader, sessions, users, + virtual_branches::branch::BranchOwnershipClaims, +}; + +use super::{ + branch, errors, + integration::{update_gitbutler_integration, GITBUTLER_INTEGRATION_REFERENCE}, + target, BranchId, RemoteCommit, +}; + +#[derive(Debug, Serialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase")] +pub struct BaseBranch { + pub branch_name: String, + pub remote_name: String, + pub remote_url: String, + pub base_sha: git::Oid, + pub current_sha: git::Oid, + pub behind: usize, + pub upstream_commits: Vec, + pub recent_commits: Vec, + pub last_fetched_ms: Option, +} + +pub fn get_base_branch_data( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, +) -> Result, errors::GetBaseBranchDataError> { + match gb_repository + .default_target() + .context("failed to get default target")? + { + None => Ok(None), + Some(target) => { + let base = target_to_base_branch(project_repository, &target) + .context("failed to convert default target to base branch")?; + Ok(Some(base)) + } + } +} + +fn go_back_to_integration( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + default_target: &target::Target, +) -> Result { + let statuses = project_repository + .git_repository + .statuses(Some( + git2::StatusOptions::new() + .show(git2::StatusShow::IndexAndWorkdir) + .include_untracked(true), + )) + .context("failed to get status")?; + if !statuses.is_empty() { + return Err(errors::SetBaseBranchError::DirtyWorkingDirectory); + } + + let latest_session = gb_repository + .get_latest_session()? + .context("no session found")?; + let session_reader = sessions::Reader::open(gb_repository, &latest_session)?; + + let all_virtual_branches = super::iterator::BranchIterator::new(&session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")?; + + let applied_virtual_branches = all_virtual_branches + .iter() + .filter(|branch| branch.applied) + .collect::>(); + + let target_commit = project_repository + .git_repository + .find_commit(default_target.sha) + .context("failed to find target commit")?; + + let base_tree = target_commit + .tree() + .context("failed to get base tree from commit")?; + let mut final_tree = target_commit + .tree() + .context("failed to get base tree from commit")?; + for branch in &applied_virtual_branches { + // merge this branches tree with our tree + let branch_head = project_repository + .git_repository + .find_commit(branch.head) + .context("failed to find branch head")?; + let branch_tree = branch_head + .tree() + .context("failed to get branch head tree")?; + let mut result = project_repository + .git_repository + .merge_trees(&base_tree, &final_tree, &branch_tree) + .context("failed to merge")?; + let final_tree_oid = result + .write_tree_to(&project_repository.git_repository) + .context("failed to write tree")?; + final_tree = project_repository + .git_repository + .find_tree(final_tree_oid) + .context("failed to find written tree")?; + } + + project_repository + .git_repository + .checkout_tree(&final_tree) + .force() + .checkout() + .context("failed to checkout tree")?; + + let base = target_to_base_branch(project_repository, default_target)?; + update_gitbutler_integration(gb_repository, project_repository)?; + Ok(base) +} + +pub fn set_base_branch( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + target_branch_ref: &git::RemoteRefname, +) -> Result { + let repo = &project_repository.git_repository; + + // if target exists, and it is the same as the requested branch, we should go back + if let Some(target) = gb_repository.default_target()? { + if target.branch.eq(target_branch_ref) { + return go_back_to_integration(gb_repository, project_repository, &target); + } + } + + // lookup a branch by name + let target_branch = match repo.find_branch(&target_branch_ref.clone().into()) { + Ok(branch) => Ok(branch), + Err(git::Error::NotFound(_)) => Err(errors::SetBaseBranchError::BranchNotFound( + target_branch_ref.clone(), + )), + Err(error) => Err(errors::SetBaseBranchError::Other(error.into())), + }?; + + let remote = repo + .find_remote(target_branch_ref.remote()) + .context(format!( + "failed to find remote for branch {}", + target_branch.name().unwrap() + ))?; + let remote_url = remote + .url() + .context(format!( + "failed to get remote url for {}", + target_branch_ref.remote() + ))? + .unwrap(); + + let target_branch_head = target_branch.peel_to_commit().context(format!( + "failed to peel branch {} to commit", + target_branch.name().unwrap() + ))?; + + let current_head = repo.head().context("Failed to get HEAD reference")?; + let current_head_commit = current_head + .peel_to_commit() + .context("Failed to peel HEAD reference to commit")?; + + // calculate the commit as the merge-base between HEAD in project_repository and this target commit + let target_commit_oid = repo + .merge_base(current_head_commit.id(), target_branch_head.id()) + .context(format!( + "Failed to calculate merge base between {} and {}", + current_head_commit.id(), + target_branch_head.id() + ))?; + + let target = target::Target { + branch: target_branch_ref.clone(), + remote_url: remote_url.to_string(), + sha: target_commit_oid, + }; + + let target_writer = target::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create target writer")?; + target_writer.write_default(&target)?; + + let head_name: git::Refname = current_head + .name() + .context("Failed to get HEAD reference name")?; + if !head_name + .to_string() + .eq(&GITBUTLER_INTEGRATION_REFERENCE.to_string()) + { + // if there are any commits on the head branch or uncommitted changes in the working directory, we need to + // put them into a virtual branch + + let use_context = project_repository + .project() + .use_diff_context + .unwrap_or(false); + let context_lines = if use_context { 3_u32 } else { 0_u32 }; + let wd_diff = diff::workdir(repo, ¤t_head_commit.id(), context_lines)?; + let wd_diff = diff::diff_files_to_hunks(&wd_diff); + if !wd_diff.is_empty() || current_head_commit.id() != target.sha { + let hunks_by_filepath = + super::virtual_hunks_by_filepath(&project_repository.project().path, &wd_diff); + + // assign ownership to the branch + let ownership = hunks_by_filepath.values().flatten().fold( + BranchOwnershipClaims::default(), + |mut ownership, hunk| { + ownership.put( + &format!("{}:{}", hunk.file_path.display(), hunk.id) + .parse() + .unwrap(), + ); + ownership + }, + ); + + let now_ms = time::UNIX_EPOCH + .elapsed() + .context("failed to get elapsed time")? + .as_millis(); + + let (upstream, upstream_head) = if let git::Refname::Local(head_name) = &head_name { + let upstream_name = target_branch_ref.with_branch(head_name.branch()); + if upstream_name.eq(target_branch_ref) { + (None, None) + } else { + match repo.find_reference(&git::Refname::from(&upstream_name)) { + Ok(upstream) => { + let head = upstream + .peel_to_commit() + .map(|commit| commit.id()) + .context(format!( + "failed to peel upstream {} to commit", + upstream.name().unwrap() + ))?; + Ok((Some(upstream_name), Some(head))) + } + Err(git::Error::NotFound(_)) => Ok((None, None)), + Err(error) => Err(error), + } + .context(format!("failed to find upstream for {}", head_name))? + } + } else { + (None, None) + }; + + let mut branch = branch::Branch { + id: BranchId::generate(), + name: head_name.to_string().replace("refs/heads/", ""), + notes: String::new(), + applied: true, + upstream, + upstream_head, + created_timestamp_ms: now_ms, + updated_timestamp_ms: now_ms, + head: current_head_commit.id(), + tree: super::write_tree_onto_commit( + project_repository, + current_head_commit.id(), + &wd_diff, + )?, + ownership, + order: 0, + selected_for_changes: None, + }; + + let branch_writer = + branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create branch writer")?; + branch_writer.write(&mut branch)?; + } + } + + set_exclude_decoration(project_repository)?; + + super::integration::update_gitbutler_integration(gb_repository, project_repository)?; + + let base = target_to_base_branch(project_repository, &target)?; + Ok(base) +} + +fn set_exclude_decoration(project_repository: &project_repository::Repository) -> Result<()> { + let repo = &project_repository.git_repository; + let mut config = repo.config()?; + config + .set_multivar("log.excludeDecoration", "refs/gitbutler", "refs/gitbutler") + .context("failed to set log.excludeDecoration")?; + Ok(()) +} + +fn _print_tree(repo: &git2::Repository, tree: &git2::Tree) -> Result<()> { + println!("tree id: {}", tree.id()); + for entry in tree { + println!( + " entry: {} {}", + entry.name().unwrap_or_default(), + entry.id() + ); + // get entry contents + let object = entry.to_object(repo).context("failed to get object")?; + let blob = object.as_blob().context("failed to get blob")?; + // convert content to string + if let Ok(content) = std::str::from_utf8(blob.content()) { + println!(" blob: {}", content); + } else { + println!(" blob: BINARY"); + } + } + Ok(()) +} + +// try to update the target branch +// this means that we need to: +// determine if what the target branch is now pointing to is mergeable with our current working directory +// merge the target branch into our current working directory +// update the target sha +pub fn update_base_branch( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + user: Option<&users::User>, + signing_key: Option<&keys::PrivateKey>, +) -> Result<(), errors::UpdateBaseBranchError> { + if project_repository.is_resolving() { + return Err(errors::UpdateBaseBranchError::Conflict( + errors::ProjectConflictError { + project_id: project_repository.project().id, + }, + )); + } + + // look up the target and see if there is a new oid + let target = gb_repository + .default_target() + .context("failed to get default target")? + .ok_or_else(|| { + errors::UpdateBaseBranchError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + let repo = &project_repository.git_repository; + let target_branch = repo + .find_branch(&target.branch.clone().into()) + .context(format!("failed to find branch {}", target.branch))?; + + let new_target_commit = target_branch + .peel_to_commit() + .context(format!("failed to peel branch {} to commit", target.branch))?; + + // if the target has not changed, do nothing + if new_target_commit.id() == target.sha { + return Ok(()); + } + + // ok, target has changed, so now we need to merge it into our current work and update our branches + + // get tree from new target + let new_target_tree = new_target_commit + .tree() + .context("failed to get new target commit tree")?; + + let old_target_tree = repo + .find_commit(target.sha) + .and_then(|commit| commit.tree()) + .context(format!( + "failed to get old target commit tree {}", + target.sha + ))?; + + let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create branch writer")?; + + let use_context = project_repository + .project() + .use_diff_context + .unwrap_or(false); + let context_lines = if use_context { 3_u32 } else { 0_u32 }; + + // try to update every branch + let updated_vbranches = super::get_status_by_branch(gb_repository, project_repository)? + .0 + .into_iter() + .map(|(branch, _)| branch) + .map( + |mut branch: branch::Branch| -> Result> { + let branch_tree = repo.find_tree(branch.tree)?; + + let branch_head_commit = repo.find_commit(branch.head).context(format!( + "failed to find commit {} for branch {}", + branch.head, branch.id + ))?; + let branch_head_tree = branch_head_commit.tree().context(format!( + "failed to find tree for commit {} for branch {}", + branch.head, branch.id + ))?; + + let result_integrated_detected = + |mut branch: branch::Branch| -> Result> { + // branch head tree is the same as the new target tree. + // meaning we can safely use the new target commit as the branch head. + + branch.head = new_target_commit.id(); + + // it also means that the branch is fully integrated into the target. + // disconnect it from the upstream + branch.upstream = None; + branch.upstream_head = None; + + let non_commited_files = diff::trees( + &project_repository.git_repository, + &branch_head_tree, + &branch_tree, + context_lines, + )?; + if non_commited_files.is_empty() { + // if there are no commited files, then the branch is fully merged + // and we can delete it. + branch_writer.delete(&branch)?; + project_repository.delete_branch_reference(&branch)?; + Ok(None) + } else { + branch_writer.write(&mut branch)?; + Ok(Some(branch)) + } + }; + + if branch_head_tree.id() == new_target_tree.id() { + return result_integrated_detected(branch); + } + + // try to merge branch head with new target + let mut branch_tree_merge_index = repo + .merge_trees(&old_target_tree, &branch_tree, &new_target_tree) + .context(format!("failed to merge trees for branch {}", branch.id))?; + + if branch_tree_merge_index.has_conflicts() { + // branch tree conflicts with new target, unapply branch for now. we'll handle it later, when user applies it back. + branch.applied = false; + branch_writer.write(&mut branch)?; + return Ok(Some(branch)); + } + + let branch_merge_index_tree_oid = branch_tree_merge_index.write_tree_to(repo)?; + + if branch_merge_index_tree_oid == new_target_tree.id() { + return result_integrated_detected(branch); + } + + if branch.head == target.sha { + // there are no commits on the branch, so we can just update the head to the new target and calculate the new tree + branch.head = new_target_commit.id(); + branch.tree = branch_merge_index_tree_oid; + branch_writer.write(&mut branch)?; + return Ok(Some(branch)); + } + + let mut branch_head_merge_index = repo + .merge_trees(&old_target_tree, &branch_head_tree, &new_target_tree) + .context(format!( + "failed to merge head tree for branch {}", + branch.id + ))?; + + if branch_head_merge_index.has_conflicts() { + // branch commits conflict with new target, make sure the branch is + // unapplied. conflicts witll be dealt with when applying it back. + branch.applied = false; + branch_writer.write(&mut branch)?; + return Ok(Some(branch)); + } + + // branch commits do not conflict with new target, so lets merge them + let branch_head_merge_tree_oid = branch_head_merge_index + .write_tree_to(repo) + .context(format!( + "failed to write head merge index for {}", + branch.id + ))?; + + let ok_with_force_push = project_repository.project().ok_with_force_push; + + let result_merge = |mut branch: branch::Branch| -> Result> { + // branch was pushed to upstream, and user doesn't like force pushing. + // create a merge commit to avoid the need of force pushing then. + let branch_head_merge_tree = repo + .find_tree(branch_head_merge_tree_oid) + .context("failed to find tree")?; + + let new_target_head = project_repository + .commit( + user, + format!( + "Merged {}/{} into {}", + target.branch.remote(), + target.branch.branch(), + branch.name + ) + .as_str(), + &branch_head_merge_tree, + &[&branch_head_commit, &new_target_commit], + signing_key, + ) + .context("failed to commit merge")?; + + branch.head = new_target_head; + branch.tree = branch_merge_index_tree_oid; + branch_writer.write(&mut branch)?; + Ok(Some(branch)) + }; + + if branch.upstream.is_some() && !ok_with_force_push { + return result_merge(branch); + } + + // branch was not pushed to upstream yet. attempt a rebase, + let (_, committer) = project_repository.git_signatures(user)?; + let mut rebase_options = git2::RebaseOptions::new(); + rebase_options.quiet(true); + rebase_options.inmemory(true); + let mut rebase = repo + .rebase( + Some(branch.head), + Some(new_target_commit.id()), + None, + Some(&mut rebase_options), + ) + .context("failed to rebase")?; + + let mut rebase_success = true; + // check to see if these commits have already been pushed + let mut last_rebase_head = branch.head; + while rebase.next().is_some() { + let index = rebase + .inmemory_index() + .context("failed to get inmemory index")?; + if index.has_conflicts() { + rebase_success = false; + break; + } + + if let Ok(commit_id) = rebase.commit(None, &committer.clone().into(), None) { + last_rebase_head = commit_id.into(); + } else { + rebase_success = false; + break; + } + } + + if rebase_success { + // rebase worked out, rewrite the branch head + rebase.finish(None).context("failed to finish rebase")?; + branch.head = last_rebase_head; + branch.tree = branch_merge_index_tree_oid; + branch_writer.write(&mut branch)?; + return Ok(Some(branch)); + } + + // rebase failed, do a merge commit + rebase.abort().context("failed to abort rebase")?; + + result_merge(branch) + }, + ) + .collect::>>()? + .into_iter() + .flatten() + .collect::>(); + + // ok, now all the problematic branches have been unapplied + // now we calculate and checkout new tree for the working directory + + let final_tree = updated_vbranches + .iter() + .filter(|branch| branch.applied) + .fold(new_target_commit.tree(), |final_tree, branch| { + let final_tree = final_tree?; + let branch_tree = repo.find_tree(branch.tree)?; + let mut merge_result = repo.merge_trees(&new_target_tree, &final_tree, &branch_tree)?; + let final_tree_oid = merge_result.write_tree_to(repo)?; + repo.find_tree(final_tree_oid) + }) + .context("failed to calculate final tree")?; + + repo.checkout_tree(&final_tree).force().checkout().context( + "failed to checkout index, this should not have happened, we should have already detected this", + )?; + + // write new target oid + let target_writer = target::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create target writer")?; + target_writer.write_default(&target::Target { + sha: new_target_commit.id(), + ..target + })?; + + super::integration::update_gitbutler_integration(gb_repository, project_repository)?; + + Ok(()) +} + +pub fn target_to_base_branch( + project_repository: &project_repository::Repository, + target: &target::Target, +) -> Result { + let repo = &project_repository.git_repository; + let branch = repo.find_branch(&target.branch.clone().into())?; + let commit = branch.peel_to_commit()?; + let oid = commit.id(); + + // gather a list of commits between oid and target.sha + let upstream_commits = project_repository + .log(oid, project_repository::LogUntil::Commit(target.sha)) + .context("failed to get upstream commits")? + .iter() + .map(super::commit_to_remote_commit) + .collect::>(); + + // get some recent commits + let recent_commits = project_repository + .log(target.sha, LogUntil::Take(20)) + .context("failed to get recent commits")? + .iter() + .map(super::commit_to_remote_commit) + .collect::>(); + + let base = super::BaseBranch { + branch_name: format!("{}/{}", target.branch.remote(), target.branch.branch()), + remote_name: target.branch.remote().to_string(), + remote_url: target.remote_url.clone(), + base_sha: target.sha, + current_sha: oid, + behind: upstream_commits.len(), + upstream_commits, + recent_commits, + last_fetched_ms: project_repository + .project() + .project_data_last_fetch + .as_ref() + .map(FetchResult::timestamp) + .copied() + .map(|t| t.duration_since(time::UNIX_EPOCH).unwrap().as_millis()), + }; + Ok(base) +} diff --git a/src/virtual_branches/branch.rs b/src/virtual_branches/branch.rs new file mode 100644 index 000000000..ab6bf4012 --- /dev/null +++ b/src/virtual_branches/branch.rs @@ -0,0 +1,237 @@ +mod file_ownership; +mod hunk; +mod ownership; +mod reader; +mod writer; + +pub use file_ownership::OwnershipClaim; +pub use hunk::Hunk; +pub use ownership::reconcile_claims; +pub use ownership::BranchOwnershipClaims; +pub use reader::BranchReader as Reader; +pub use writer::BranchWriter as Writer; + +use serde::{Deserialize, Serialize}; + +use anyhow::Result; + +use crate::{git, id::Id}; + +pub type BranchId = Id; + +// this is the struct for the virtual branch data that is stored in our data +// store. it is more or less equivalent to a git branch reference, but it is not +// stored or accessible from the git repository itself. it is stored in our +// session storage under the branches/ directory. +#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Default)] +pub struct Branch { + pub id: BranchId, + pub name: String, + pub notes: String, + pub applied: bool, + pub upstream: Option, + // upstream_head is the last commit on we've pushed to the upstream branch + pub upstream_head: Option, + #[serde( + serialize_with = "serialize_u128", + deserialize_with = "deserialize_u128" + )] + pub created_timestamp_ms: u128, + #[serde( + serialize_with = "serialize_u128", + deserialize_with = "deserialize_u128" + )] + pub updated_timestamp_ms: u128, + /// tree is the last git tree written to a session, or merge base tree if this is new. use this for delta calculation from the session data + pub tree: git::Oid, + /// head is id of the last "virtual" commit in this branch + pub head: git::Oid, + pub ownership: BranchOwnershipClaims, + // order is the number by which UI should sort branches + pub order: usize, + // is Some(timestamp), the branch is considered a default destination for new changes. + // if more than one branch is selected, the branch with the highest timestamp wins. + pub selected_for_changes: Option, +} + +fn serialize_u128(x: &u128, s: S) -> Result +where + S: serde::Serializer, +{ + s.serialize_str(&x.to_string()) +} + +fn deserialize_u128<'de, D>(d: D) -> Result +where + D: serde::Deserializer<'de>, +{ + let s = String::deserialize(d)?; + let x: u128 = s.parse().map_err(serde::de::Error::custom)?; + Ok(x) +} + +impl Branch { + pub fn refname(&self) -> git::VirtualRefname { + self.into() + } +} + +#[derive(Debug, Serialize, Deserialize, Default)] +pub struct BranchUpdateRequest { + pub id: BranchId, + pub name: Option, + pub notes: Option, + pub ownership: Option, + pub order: Option, + pub upstream: Option, // just the branch name, so not refs/remotes/origin/branchA, just branchA + pub selected_for_changes: Option, +} + +#[derive(Debug, Serialize, Deserialize, Default)] +pub struct BranchCreateRequest { + pub name: Option, + pub ownership: Option, + pub order: Option, + pub selected_for_changes: Option, +} + +impl Branch { + pub fn from_reader(reader: &crate::reader::Reader<'_>) -> Result { + let results = reader.batch(&[ + "id", + "meta/name", + "meta/notes", + "meta/applied", + "meta/order", + "meta/upstream", + "meta/upstream_head", + "meta/tree", + "meta/head", + "meta/created_timestamp_ms", + "meta/updated_timestamp_ms", + "meta/ownership", + "meta/selected_for_changes", + ])?; + + let id: String = results[0].clone()?.try_into()?; + let id: BranchId = id.parse().map_err(|e| { + crate::reader::Error::Io( + std::io::Error::new(std::io::ErrorKind::Other, format!("id: {}", e)).into(), + ) + })?; + let name: String = results[1].clone()?.try_into()?; + + let notes: String = match results[2].clone() { + Ok(notes) => Ok(notes.try_into()?), + Err(crate::reader::Error::NotFound) => Ok(String::new()), + Err(e) => Err(e), + }?; + + let applied = match results[3].clone() { + Ok(applied) => applied.try_into(), + _ => Ok(false), + } + .unwrap_or(false); + + let order: usize = match results[4].clone() { + Ok(order) => Ok(order.try_into()?), + Err(crate::reader::Error::NotFound) => Ok(0), + Err(e) => Err(e), + }?; + + let upstream = match results[5].clone() { + Ok(crate::reader::Content::UTF8(upstream)) => { + if upstream.is_empty() { + Ok(None) + } else { + upstream + .parse::() + .map(Some) + .map_err(|e| { + crate::reader::Error::Io( + std::io::Error::new( + std::io::ErrorKind::Other, + format!("meta/upstream: {}", e), + ) + .into(), + ) + }) + } + } + Ok(_) | Err(crate::reader::Error::NotFound) => Ok(None), + Err(e) => Err(e), + }?; + + let upstream_head = match results[6].clone() { + Ok(crate::reader::Content::UTF8(upstream_head)) => { + upstream_head.parse().map(Some).map_err(|e| { + crate::reader::Error::Io( + std::io::Error::new( + std::io::ErrorKind::Other, + format!("meta/upstream_head: {}", e), + ) + .into(), + ) + }) + } + Ok(_) | Err(crate::reader::Error::NotFound) => Ok(None), + Err(e) => Err(e), + }?; + + let tree: String = results[7].clone()?.try_into()?; + let head: String = results[8].clone()?.try_into()?; + let created_timestamp_ms = results[9].clone()?.try_into()?; + let updated_timestamp_ms = results[10].clone()?.try_into()?; + + let ownership_string: String = results[11].clone()?.try_into()?; + let ownership = ownership_string.parse().map_err(|e| { + crate::reader::Error::Io( + std::io::Error::new(std::io::ErrorKind::Other, format!("meta/ownership: {}", e)) + .into(), + ) + })?; + + let selected_for_changes = match results[12].clone() { + Ok(raw_ts) => { + let ts = raw_ts.try_into().map_err(|e| { + crate::reader::Error::Io( + std::io::Error::new( + std::io::ErrorKind::Other, + format!("meta/selected_for_changes: {}", e), + ) + .into(), + ) + })?; + Ok(Some(ts)) + } + Err(crate::reader::Error::NotFound) => Ok(None), + Err(e) => Err(e), + }?; + + Ok(Self { + id, + name, + notes, + applied, + upstream, + upstream_head, + tree: tree.parse().map_err(|e| { + crate::reader::Error::Io( + std::io::Error::new(std::io::ErrorKind::Other, format!("meta/tree: {}", e)) + .into(), + ) + })?, + head: head.parse().map_err(|e| { + crate::reader::Error::Io( + std::io::Error::new(std::io::ErrorKind::Other, format!("meta/head: {}", e)) + .into(), + ) + })?, + created_timestamp_ms, + updated_timestamp_ms, + ownership, + order, + selected_for_changes, + }) + } +} diff --git a/src/virtual_branches/branch/file_ownership.rs b/src/virtual_branches/branch/file_ownership.rs new file mode 100644 index 000000000..e040d2a47 --- /dev/null +++ b/src/virtual_branches/branch/file_ownership.rs @@ -0,0 +1,178 @@ +use std::{fmt, path, str::FromStr, vec}; + +use anyhow::{Context, Result}; + +use super::hunk::Hunk; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct OwnershipClaim { + pub file_path: path::PathBuf, + pub hunks: Vec, +} + +impl FromStr for OwnershipClaim { + type Err = anyhow::Error; + + fn from_str(value: &str) -> std::result::Result { + let mut file_path_parts = vec![]; + let mut ranges = vec![]; + for part in value.split(':').rev() { + match part + .split(',') + .map(str::parse) + .collect::>>() + { + Ok(rr) => ranges.extend(rr), + Err(_) => { + file_path_parts.insert(0, part); + } + } + } + + if ranges.is_empty() { + Err(anyhow::anyhow!("ownership ranges cannot be empty")) + } else { + Ok(Self { + file_path: file_path_parts + .join(":") + .parse() + .context(format!("failed to parse file path from {}", value))?, + hunks: ranges.clone(), + }) + } + } +} + +impl OwnershipClaim { + pub fn is_full(&self) -> bool { + self.hunks.is_empty() + } + + pub fn contains(&self, another: &OwnershipClaim) -> bool { + if !self.file_path.eq(&another.file_path) { + return false; + } + + if self.hunks.is_empty() { + // full ownership contains any partial ownership + return true; + } + + if another.hunks.is_empty() { + // partial ownership contains no full ownership + return false; + } + + another.hunks.iter().all(|hunk| self.hunks.contains(hunk)) + } + + // return a copy of self, with another ranges added + pub fn plus(&self, another: &OwnershipClaim) -> OwnershipClaim { + if !self.file_path.eq(&another.file_path) { + return self.clone(); + } + + if self.hunks.is_empty() { + // full ownership + partial ownership = full ownership + return self.clone(); + } + + if another.hunks.is_empty() { + // partial ownership + full ownership = full ownership + return another.clone(); + } + + let mut hunks = self + .hunks + .iter() + .filter(|hunk| !another.hunks.contains(hunk)) + .cloned() + .collect::>(); + + another.hunks.iter().for_each(|hunk| { + hunks.insert(0, hunk.clone()); + }); + + OwnershipClaim { + file_path: self.file_path.clone(), + hunks, + } + } + + // returns (taken, remaining) + // if all of the ranges are removed, return None + pub fn minus( + &self, + another: &OwnershipClaim, + ) -> (Option, Option) { + if !self.file_path.eq(&another.file_path) { + // no changes + return (None, Some(self.clone())); + } + + if another.hunks.is_empty() { + // any ownership - full ownership = empty ownership + return (Some(self.clone()), None); + } + + if self.hunks.is_empty() { + // full ownership - partial ownership = full ownership, since we don't know all the + // hunks. + return (None, Some(self.clone())); + } + + let mut left = self.hunks.clone(); + let mut taken = vec![]; + for range in &another.hunks { + left = left + .iter() + .flat_map(|r: &Hunk| -> Vec { + if r.eq(range) { + taken.push(r.clone()); + vec![] + } else { + vec![r.clone()] + } + }) + .collect(); + } + + ( + if taken.is_empty() { + None + } else { + Some(OwnershipClaim { + file_path: self.file_path.clone(), + hunks: taken, + }) + }, + if left.is_empty() { + None + } else { + Some(OwnershipClaim { + file_path: self.file_path.clone(), + hunks: left, + }) + }, + ) + } +} + +impl fmt::Display for OwnershipClaim { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { + if self.hunks.is_empty() { + write!(f, "{}", self.file_path.display()) + } else { + write!( + f, + "{}:{}", + self.file_path.display(), + self.hunks + .iter() + .map(ToString::to_string) + .collect::>() + .join(",") + ) + } + } +} diff --git a/src/virtual_branches/branch/hunk.rs b/src/virtual_branches/branch/hunk.rs new file mode 100644 index 000000000..a2271c5d5 --- /dev/null +++ b/src/virtual_branches/branch/hunk.rs @@ -0,0 +1,169 @@ +use std::{fmt::Display, ops::RangeInclusive, str::FromStr}; + +use anyhow::{anyhow, Context, Result}; + +use crate::git::diff; + +#[derive(Debug, Eq, Clone)] +pub struct Hunk { + pub hash: Option, + pub timestamp_ms: Option, + pub start: u32, + pub end: u32, +} + +impl From<&diff::GitHunk> for Hunk { + fn from(hunk: &diff::GitHunk) -> Self { + Hunk { + start: hunk.new_start, + end: hunk.new_start + hunk.new_lines, + hash: Some(Hunk::hash(&hunk.diff)), + timestamp_ms: None, + } + } +} + +impl PartialEq for Hunk { + fn eq(&self, other: &Self) -> bool { + if self.hash.is_some() && other.hash.is_some() { + self.hash == other.hash && self.start == other.start && self.end == other.end + } else { + self.start == other.start && self.end == other.end + } + } +} + +impl From> for Hunk { + fn from(range: RangeInclusive) -> Self { + Hunk { + start: *range.start(), + end: *range.end(), + hash: None, + timestamp_ms: None, + } + } +} + +impl FromStr for Hunk { + type Err = anyhow::Error; + + fn from_str(s: &str) -> std::result::Result { + let mut range = s.split('-'); + let start = if let Some(raw_start) = range.next() { + raw_start + .parse::() + .context(format!("failed to parse start of range: {}", s)) + } else { + Err(anyhow!("invalid range: {}", s)) + }?; + + let end = if let Some(raw_end) = range.next() { + raw_end + .parse::() + .context(format!("failed to parse end of range: {}", s)) + } else { + Err(anyhow!("invalid range: {}", s)) + }?; + + let hash = if let Some(raw_hash) = range.next() { + if raw_hash.is_empty() { + None + } else { + Some(raw_hash.to_string()) + } + } else { + None + }; + + let timestamp_ms = if let Some(raw_timestamp_ms) = range.next() { + Some( + raw_timestamp_ms + .parse::() + .context(format!("failed to parse timestamp_ms of range: {}", s))?, + ) + } else { + None + }; + + Hunk::new(start, end, hash, timestamp_ms) + } +} + +impl Display for Hunk { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}-{}", self.start, self.end)?; + match (self.hash.as_ref(), self.timestamp_ms.as_ref()) { + (Some(hash), Some(timestamp_ms)) => write!(f, "-{}-{}", hash, timestamp_ms), + (Some(hash), None) => write!(f, "-{}", hash), + (None, Some(timestamp_ms)) => write!(f, "--{}", timestamp_ms), + (None, None) => Ok(()), + } + } +} + +impl Hunk { + pub fn new( + start: u32, + end: u32, + hash: Option, + timestamp_ms: Option, + ) -> Result { + if start > end { + Err(anyhow!("invalid range: {}-{}", start, end)) + } else { + Ok(Hunk { + hash, + timestamp_ms, + start, + end, + }) + } + } + + pub fn with_hash(&self, hash: &str) -> Self { + Hunk { + start: self.start, + end: self.end, + hash: Some(hash.to_string()), + timestamp_ms: self.timestamp_ms, + } + } + + pub fn with_timestamp(&self, timestamp_ms: u128) -> Self { + Hunk { + start: self.start, + end: self.end, + hash: self.hash.clone(), + timestamp_ms: Some(timestamp_ms), + } + } + + pub fn timestam_ms(&self) -> Option { + self.timestamp_ms + } + + pub fn contains(&self, line: u32) -> bool { + self.start <= line && self.end >= line + } + + pub fn intersects(&self, another: &diff::GitHunk) -> bool { + self.contains(another.new_start) + || self.contains(another.new_start + another.new_lines) + || another.contains(self.start) + || another.contains(self.end) + } + + pub fn shallow_eq(&self, other: &diff::GitHunk) -> bool { + self.start == other.new_start && self.end == other.new_start + other.new_lines + } + + pub fn hash(diff: &str) -> String { + let addition = diff + .lines() + .skip(1) // skip the first line which is the diff header + .filter(|line| line.starts_with('+') || line.starts_with('-')) // exclude context lines + .collect::>() + .join("\n"); + format!("{:x}", md5::compute(addition)) + } +} diff --git a/src/virtual_branches/branch/ownership.rs b/src/virtual_branches/branch/ownership.rs new file mode 100644 index 000000000..dda2e78ff --- /dev/null +++ b/src/virtual_branches/branch/ownership.rs @@ -0,0 +1,183 @@ +use std::{collections::HashSet, fmt, str::FromStr}; + +use itertools::Itertools; +use serde::{Deserialize, Serialize, Serializer}; + +use super::{Branch, OwnershipClaim}; +use anyhow::Result; + +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct BranchOwnershipClaims { + pub claims: Vec, +} + +impl Serialize for BranchOwnershipClaims { + fn serialize(&self, serializer: S) -> Result { + serializer.serialize_str(self.to_string().as_str()) + } +} + +impl<'de> Deserialize<'de> for BranchOwnershipClaims { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + s.parse().map_err(serde::de::Error::custom) + } +} + +impl fmt::Display for BranchOwnershipClaims { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for file in &self.claims { + writeln!(f, "{}", file)?; + } + Ok(()) + } +} + +impl FromStr for BranchOwnershipClaims { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let mut ownership = BranchOwnershipClaims::default(); + for line in s.lines() { + ownership.claims.push(line.parse()?); + } + Ok(ownership) + } +} + +impl BranchOwnershipClaims { + pub fn is_empty(&self) -> bool { + self.claims.is_empty() + } + + pub fn contains(&self, another: &BranchOwnershipClaims) -> bool { + if another.is_empty() { + return true; + } + + if self.is_empty() { + return false; + } + + for file_ownership in &another.claims { + let mut found = false; + for self_file_ownership in &self.claims { + if self_file_ownership.file_path == file_ownership.file_path + && self_file_ownership.contains(file_ownership) + { + found = true; + break; + } + } + if !found { + return false; + } + } + + true + } + + pub fn put(&mut self, ownership: &OwnershipClaim) { + let target = self + .claims + .iter() + .filter(|o| !o.is_full()) // only consider explicit ownership + .find(|o| o.file_path == ownership.file_path) + .cloned(); + + self.claims + .retain(|o| o.is_full() || o.file_path != ownership.file_path); + + if let Some(target) = target { + self.claims.insert(0, target.plus(ownership)); + } else { + self.claims.insert(0, ownership.clone()); + } + } + + // modifies the ownership in-place and returns the file ownership that was taken, if any. + pub fn take(&mut self, ownership: &OwnershipClaim) -> Vec { + let mut taken = Vec::new(); + let mut remaining = Vec::new(); + for file_ownership in &self.claims { + if file_ownership.file_path == ownership.file_path { + let (taken_ownership, remaining_ownership) = file_ownership.minus(ownership); + if let Some(taken_ownership) = taken_ownership { + taken.push(taken_ownership); + } + if let Some(remaining_ownership) = remaining_ownership { + remaining.push(remaining_ownership); + } + } else { + remaining.push(file_ownership.clone()); + } + } + + self.claims = remaining; + + taken + } +} + +#[derive(Debug, Clone)] +pub struct ClaimOutcome { + pub updated_branch: Branch, + pub removed_claims: Vec, +} +pub fn reconcile_claims( + all_branches: Vec, + claiming_branch: &Branch, + new_claims: &[OwnershipClaim], +) -> Result> { + let mut other_branches = all_branches + .into_iter() + .filter(|branch| branch.applied) + .filter(|branch| branch.id != claiming_branch.id) + .collect::>(); + + let mut claim_outcomes: Vec = Vec::new(); + + for branch in &mut other_branches { + let taken = new_claims + .iter() + .flat_map(|c| branch.ownership.take(c)) + .collect_vec(); + claim_outcomes.push(ClaimOutcome { + updated_branch: branch.clone(), + removed_claims: taken, + }); + } + + // Add the claiming branch to the list of outcomes + claim_outcomes.push(ClaimOutcome { + updated_branch: Branch { + ownership: BranchOwnershipClaims { + claims: new_claims.to_owned(), + }, + ..claiming_branch.clone() + }, + removed_claims: Vec::new(), + }); + + // Check the outcomes consistency and error out if they would result in a hunk being claimed by multiple branches + let mut seen = HashSet::new(); + for outcome in claim_outcomes.clone() { + for claim in outcome.updated_branch.ownership.claims { + for hunk in claim.hunks { + if !seen.insert(format!( + "{}-{}-{}", + claim.file_path.to_str().unwrap_or_default(), + hunk.start, + hunk.end + )) { + return Err(anyhow::anyhow!("inconsistent ownership claims")); + } + } + } + } + + Ok(claim_outcomes) +} diff --git a/src/virtual_branches/branch/reader.rs b/src/virtual_branches/branch/reader.rs new file mode 100644 index 000000000..cebc0c009 --- /dev/null +++ b/src/virtual_branches/branch/reader.rs @@ -0,0 +1,19 @@ +use crate::{reader, sessions}; + +use super::{Branch, BranchId}; + +pub struct BranchReader<'r> { + reader: &'r reader::Reader<'r>, +} + +impl<'r> BranchReader<'r> { + pub fn new(reader: &'r sessions::Reader<'r>) -> Self { + Self { + reader: reader.reader(), + } + } + + pub fn read(&self, id: &BranchId) -> Result { + Branch::from_reader(&self.reader.sub(format!("branches/{}", id))) + } +} diff --git a/src/virtual_branches/branch/writer.rs b/src/virtual_branches/branch/writer.rs new file mode 100644 index 000000000..821bdc8fe --- /dev/null +++ b/src/virtual_branches/branch/writer.rs @@ -0,0 +1,160 @@ +use std::path; + +use anyhow::Result; + +use crate::{gb_repository, reader, virtual_branches::state::VirtualBranchesHandle, writer}; + +use super::Branch; + +pub struct BranchWriter<'writer> { + repository: &'writer gb_repository::Repository, + writer: writer::DirWriter, + reader: reader::Reader<'writer>, + state_handle: VirtualBranchesHandle, +} + +impl<'writer> BranchWriter<'writer> { + pub fn new>( + repository: &'writer gb_repository::Repository, + path: P, + ) -> Result { + let reader = reader::Reader::open(repository.root())?; + let writer = writer::DirWriter::open(repository.root())?; + let state_handle = VirtualBranchesHandle::new(path.as_ref()); + Ok(Self { + repository, + writer, + reader, + state_handle, + }) + } + + pub fn delete(&self, branch: &Branch) -> Result<()> { + match self + .reader + .sub(format!("branches/{}", branch.id)) + .read("id") + { + Ok(_) => { + self.repository.mark_active_session()?; + let _lock = self.repository.lock(); + self.writer.remove(format!("branches/{}", branch.id))?; + // Write in the state file as well + let _ = self.state_handle.remove_branch(branch.id); + Ok(()) + } + Err(reader::Error::NotFound) => Ok(()), + Err(err) => Err(err.into()), + } + } + + pub fn write(&self, branch: &mut Branch) -> Result<()> { + let reader = self.reader.sub(format!("branches/{}", branch.id)); + match Branch::from_reader(&reader) { + Ok(existing) if existing.eq(branch) => return Ok(()), + Ok(_) | Err(reader::Error::NotFound) => {} + Err(err) => return Err(err.into()), + } + + self.repository.mark_active_session()?; + + branch.updated_timestamp_ms = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH)? + .as_millis(); + + let mut batch = vec![]; + + batch.push(writer::BatchTask::Write( + format!("branches/{}/id", branch.id), + branch.id.to_string(), + )); + + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/name", branch.id), + branch.name.clone(), + )); + + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/notes", branch.id), + branch.notes.clone(), + )); + + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/order", branch.id), + branch.order.to_string(), + )); + + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/applied", branch.id), + branch.applied.to_string(), + )); + + if let Some(upstream) = &branch.upstream { + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/upstream", branch.id), + upstream.to_string(), + )); + } else { + batch.push(writer::BatchTask::Remove(format!( + "branches/{}/meta/upstream", + branch.id + ))); + } + + if let Some(upstream_head) = &branch.upstream_head { + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/upstream_head", branch.id), + upstream_head.to_string(), + )); + } else { + batch.push(writer::BatchTask::Remove(format!( + "branches/{}/meta/upstream_head", + branch.id + ))); + } + + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/tree", branch.id), + branch.tree.to_string(), + )); + + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/head", branch.id), + branch.head.to_string(), + )); + + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/created_timestamp_ms", branch.id), + branch.created_timestamp_ms.to_string(), + )); + + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/updated_timestamp_ms", branch.id), + branch.updated_timestamp_ms.to_string(), + )); + + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/ownership", branch.id), + branch.ownership.to_string(), + )); + + if let Some(selected_for_changes) = branch.selected_for_changes { + batch.push(writer::BatchTask::Write( + format!("branches/{}/meta/selected_for_changes", branch.id), + selected_for_changes.to_string(), + )); + } else { + batch.push(writer::BatchTask::Remove(format!( + "branches/{}/meta/selected_for_changes", + branch.id + ))); + } + + self.writer.batch(&batch)?; + + // Write in the state file as well + self.state_handle.set_branch(branch.clone())?; + + Ok(()) + } +} diff --git a/src/virtual_branches/context.rs b/src/virtual_branches/context.rs new file mode 100644 index 000000000..3733fc587 --- /dev/null +++ b/src/virtual_branches/context.rs @@ -0,0 +1,124 @@ +use crate::git::diff; + +pub fn hunk_with_context( + hunk_diff: &str, + hunk_old_start_line: usize, + hunk_new_start_line: usize, + is_binary: bool, + context_lines: usize, + file_lines_before: &[&str], + change_type: diff::ChangeType, +) -> diff::GitHunk { + let diff_lines = hunk_diff + .lines() + .map(std::string::ToString::to_string) + .collect::>(); + if diff_lines.is_empty() { + #[allow(clippy::cast_possible_truncation)] + return diff::GitHunk { + diff: hunk_diff.to_owned(), + old_start: hunk_old_start_line as u32, + old_lines: 0, + new_start: hunk_new_start_line as u32, + new_lines: 0, + binary: is_binary, + change_type, + }; + } + + let new_file = hunk_old_start_line == 0; + let deleted_file = hunk_new_start_line == 0; + + let removed_count = diff_lines + .iter() + .filter(|line| line.starts_with('-')) + .count(); + let added_count = diff_lines + .iter() + .filter(|line| line.starts_with('+')) + .count(); + + // Get context lines before the diff + let mut context_before = Vec::new(); + let before_context_ending_index = if removed_count == 0 { + // Compensate for when the removed_count is 0 + hunk_old_start_line + } else { + hunk_old_start_line.saturating_sub(1) + }; + let before_context_starting_index = before_context_ending_index.saturating_sub(context_lines); + + for index in before_context_starting_index..before_context_ending_index { + if let Some(l) = file_lines_before.get(index) { + let mut s = (*l).to_string(); + s.insert(0, ' '); + context_before.push(s); + } + } + + // Get context lines after the diff + let mut context_after = Vec::new(); + let after_context_starting_index = before_context_ending_index + removed_count; + let after_context_ending_index = after_context_starting_index + context_lines; + + for index in after_context_starting_index..after_context_ending_index { + if let Some(l) = file_lines_before.get(index) { + let mut s = (*l).to_string(); + s.insert(0, ' '); + context_after.push(s); + } + } + + let start_line_before = if new_file { + // If we've created a new file, start_line_before should be 0 + 0 + } else { + before_context_starting_index + 1 + }; + + let start_line_after = if deleted_file { + // If we've deleted a new file, start_line_after should be 0 + 0 + } else if added_count == 0 { + // Compensate for when the added_count is 0 + hunk_new_start_line.saturating_sub(context_before.len()) + 1 + } else { + hunk_new_start_line.saturating_sub(context_before.len()) + }; + + let line_count_before = removed_count + context_before.len() + context_after.len(); + let line_count_after = added_count + context_before.len() + context_after.len(); + let header = format!( + "@@ -{},{} +{},{} @@", + start_line_before, line_count_before, start_line_after, line_count_after + ); + + let body = &diff_lines[1..]; + // Update unidiff body with context lines + let mut b = Vec::new(); + b.extend(context_before.clone()); + b.extend_from_slice(body); + b.extend(context_after.clone()); + let body = b; + + // Construct a new diff with updated header and body + let mut diff_lines = Vec::new(); + diff_lines.push(header); + diff_lines.extend(body); + let mut diff = diff_lines.join("\n"); + // Add trailing newline + diff.push('\n'); + + #[allow(clippy::cast_possible_truncation)] + let hunk = diff::GitHunk { + diff, + old_start: start_line_before as u32, + old_lines: line_count_before as u32, + new_start: start_line_after as u32, + new_lines: line_count_after as u32, + binary: is_binary, + change_type, + }; + + hunk +} diff --git a/src/virtual_branches/controller.rs b/src/virtual_branches/controller.rs new file mode 100644 index 000000000..0c09dc3ef --- /dev/null +++ b/src/virtual_branches/controller.rs @@ -0,0 +1,1112 @@ +use std::{collections::HashMap, path, sync::Arc}; + +use anyhow::Context; +use tokio::{sync::Semaphore, task::JoinHandle}; + +use crate::{ + askpass::AskpassBroker, + error::Error, + gb_repository, git, keys, project_repository, + projects::{self, ProjectId}, + users, + virtual_branches::state::{VirtualBranches, VirtualBranchesHandle}, +}; + +use super::{ + branch::{BranchId, BranchOwnershipClaims}, + errors::{ + self, FetchFromTargetError, GetBaseBranchDataError, GetRemoteBranchDataError, + IsRemoteBranchMergableError, ListRemoteBranchesError, + }, + target_to_base_branch, BaseBranch, RemoteBranchFile, +}; + +#[derive(Clone)] +pub struct Controller { + local_data_dir: path::PathBuf, + projects: projects::Controller, + users: users::Controller, + keys: keys::Controller, + helper: git::credentials::Helper, + + by_project_id: Arc>>, +} + +impl Controller { + pub fn new( + local_data_dir: path::PathBuf, + projects: projects::Controller, + users: users::Controller, + keys: keys::Controller, + helper: git::credentials::Helper, + ) -> Self { + Self { + by_project_id: Arc::new(tokio::sync::Mutex::new(HashMap::new())), + + local_data_dir, + projects, + users, + keys, + helper, + } + } + + async fn inner(&self, project_id: &ProjectId) -> ControllerInner { + self.by_project_id + .lock() + .await + .entry(*project_id) + .or_insert_with(|| { + ControllerInner::new( + &self.local_data_dir, + &self.projects, + &self.users, + &self.keys, + &self.helper, + ) + }) + .clone() + } + + pub async fn create_commit( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + message: &str, + ownership: Option<&BranchOwnershipClaims>, + run_hooks: bool, + ) -> Result> { + self.inner(project_id) + .await + .create_commit(project_id, branch_id, message, ownership, run_hooks) + .await + } + + pub async fn can_apply_remote_branch( + &self, + project_id: &ProjectId, + branch_name: &git::RemoteRefname, + ) -> Result> { + self.inner(project_id) + .await + .can_apply_remote_branch(project_id, branch_name) + } + + pub async fn can_apply_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ) -> Result { + self.inner(project_id) + .await + .can_apply_virtual_branch(project_id, branch_id) + } + + /// Retrieves the virtual branches state from the gitbutler repository (legacy state) and persists it into a flat TOML file + pub async fn save_vbranches_state( + &self, + project_id: &ProjectId, + branch_ids: Vec, + ) -> Result<(), Error> { + let vbranches_state = self + .inner(project_id) + .await + .get_vbranches_state(project_id, branch_ids)?; + let project = self.projects.get(project_id).map_err(Error::from)?; + // TODO: this should be constructed somewhere else + let state_handle = VirtualBranchesHandle::new(project.path.join(".git").as_path()); + if let Some(default_target) = vbranches_state.default_target { + state_handle.set_default_target(default_target)?; + } + for (id, target) in vbranches_state.branch_targets { + state_handle.set_branch_target(id, target)?; + } + for (_, branch) in vbranches_state.branches { + state_handle.set_branch(branch)?; + } + Ok(()) + } + + pub async fn list_virtual_branches( + &self, + project_id: &ProjectId, + ) -> Result< + (Vec, bool, Vec), + ControllerError, + > { + self.inner(project_id) + .await + .list_virtual_branches(project_id) + .await + } + + pub async fn create_virtual_branch( + &self, + project_id: &ProjectId, + create: &super::branch::BranchCreateRequest, + ) -> Result> { + self.inner(project_id) + .await + .create_virtual_branch(project_id, create) + .await + } + + pub async fn create_virtual_branch_from_branch( + &self, + project_id: &ProjectId, + branch: &git::Refname, + ) -> Result> { + self.inner(project_id) + .await + .create_virtual_branch_from_branch(project_id, branch) + .await + } + + pub async fn get_base_branch_data( + &self, + project_id: &ProjectId, + ) -> Result, ControllerError> { + self.inner(project_id) + .await + .get_base_branch_data(project_id) + } + + pub async fn list_remote_commit_files( + &self, + project_id: &ProjectId, + commit_oid: git::Oid, + ) -> Result, Error> { + self.inner(project_id) + .await + .list_remote_commit_files(project_id, commit_oid) + } + + pub async fn set_base_branch( + &self, + project_id: &ProjectId, + target_branch: &git::RemoteRefname, + ) -> Result> { + self.inner(project_id) + .await + .set_base_branch(project_id, target_branch) + } + + pub async fn merge_virtual_branch_upstream( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .merge_virtual_branch_upstream(project_id, branch_id) + .await + } + + pub async fn update_base_branch( + &self, + project_id: &ProjectId, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .update_base_branch(project_id) + .await + } + + pub async fn update_virtual_branch( + &self, + project_id: &ProjectId, + branch_update: super::branch::BranchUpdateRequest, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .update_virtual_branch(project_id, branch_update) + .await + } + pub async fn delete_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .delete_virtual_branch(project_id, branch_id) + .await + } + + pub async fn apply_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .apply_virtual_branch(project_id, branch_id) + .await + } + + pub async fn unapply_ownership( + &self, + project_id: &ProjectId, + ownership: &BranchOwnershipClaims, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .unapply_ownership(project_id, ownership) + .await + } + + pub async fn reset_files( + &self, + project_id: &ProjectId, + files: &Vec, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .reset_files(project_id, files) + .await + } + + pub async fn amend( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ownership: &BranchOwnershipClaims, + ) -> Result> { + self.inner(project_id) + .await + .amend(project_id, branch_id, ownership) + .await + } + + pub async fn reset_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + target_commit_oid: git::Oid, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .reset_virtual_branch(project_id, branch_id, target_commit_oid) + .await + } + + pub async fn unapply_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .unapply_virtual_branch(project_id, branch_id) + .await + } + + pub async fn push_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + with_force: bool, + askpass: Option<(AskpassBroker, Option)>, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .push_virtual_branch(project_id, branch_id, with_force, askpass) + .await + } + + pub async fn cherry_pick( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + commit_oid: git::Oid, + ) -> Result, ControllerError> { + self.inner(project_id) + .await + .cherry_pick(project_id, branch_id, commit_oid) + .await + } + + pub async fn list_remote_branches( + &self, + project_id: &ProjectId, + ) -> Result, ControllerError> { + self.inner(project_id) + .await + .list_remote_branches(project_id) + } + + pub async fn get_remote_branch_data( + &self, + project_id: &ProjectId, + refname: &git::Refname, + ) -> Result> { + self.inner(project_id) + .await + .get_remote_branch_data(project_id, refname) + } + + pub async fn squash( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + commit_oid: git::Oid, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .squash(project_id, branch_id, commit_oid) + .await + } + + pub async fn update_commit_message( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + commit_oid: git::Oid, + message: &str, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .update_commit_message(project_id, branch_id, commit_oid, message) + .await + } + + pub async fn fetch_from_target( + &self, + project_id: &ProjectId, + askpass: Option<(AskpassBroker, String)>, + ) -> Result> { + self.inner(project_id) + .await + .fetch_from_target(project_id, askpass) + .await + } + + pub async fn move_commit( + &self, + project_id: &ProjectId, + target_branch_id: &BranchId, + commit_oid: git::Oid, + ) -> Result<(), ControllerError> { + self.inner(project_id) + .await + .move_commit(project_id, target_branch_id, commit_oid) + .await + } +} + +#[derive(Clone)] +struct ControllerInner { + local_data_dir: path::PathBuf, + semaphore: Arc, + + projects: projects::Controller, + users: users::Controller, + keys: keys::Controller, + helper: git::credentials::Helper, +} + +#[derive(Debug, thiserror::Error)] +pub enum ControllerError +where + E: Into, +{ + #[error(transparent)] + VerifyError(#[from] errors::VerifyError), + #[error(transparent)] + Action(E), + #[error(transparent)] + User(#[from] Error), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl ControllerInner { + pub fn new( + data_dir: &path::Path, + projects: &projects::Controller, + users: &users::Controller, + keys: &keys::Controller, + helper: &git::credentials::Helper, + ) -> Self { + Self { + local_data_dir: data_dir.to_path_buf(), + semaphore: Arc::new(Semaphore::new(1)), + projects: projects.clone(), + users: users.clone(), + keys: keys.clone(), + helper: helper.clone(), + } + } + + pub async fn create_commit( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + message: &str, + ownership: Option<&BranchOwnershipClaims>, + run_hooks: bool, + ) -> Result> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, user| { + let signing_key = project_repository + .config() + .sign_commits() + .context("failed to get sign commits option")? + .then(|| { + self.keys + .get_or_create() + .context("failed to get private key") + }) + .transpose()?; + + super::commit( + gb_repository, + project_repository, + branch_id, + message, + ownership, + signing_key.as_ref(), + user, + run_hooks, + ) + .map_err(Into::into) + }) + } + + pub fn can_apply_remote_branch( + &self, + project_id: &ProjectId, + branch_name: &git::RemoteRefname, + ) -> Result> { + let project = self.projects.get(project_id).map_err(Error::from)?; + let project_repository = + project_repository::Repository::open(&project).map_err(Error::from)?; + let user = self.users.get_user().map_err(Error::from)?; + let gb_repository = gb_repository::Repository::open( + &self.local_data_dir, + &project_repository, + user.as_ref(), + ) + .context("failed to open gitbutler repository")?; + super::is_remote_branch_mergeable(&gb_repository, &project_repository, branch_name) + .map_err(ControllerError::Action) + } + + pub fn can_apply_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ) -> Result { + let project = self.projects.get(project_id)?; + let project_repository = project_repository::Repository::open(&project)?; + let user = self.users.get_user().context("failed to get user")?; + let gb_repository = gb_repository::Repository::open( + &self.local_data_dir, + &project_repository, + user.as_ref(), + ) + .context("failed to open gitbutler repository")?; + super::is_virtual_branch_mergeable(&gb_repository, &project_repository, branch_id) + .map_err(Into::into) + } + + /// Retrieves the virtual branches state from the gitbutler repository (legacy state) + pub fn get_vbranches_state( + &self, + project_id: &ProjectId, + branch_ids: Vec, + ) -> Result { + let project = self.projects.get(project_id)?; + let project_repository = project_repository::Repository::open(&project)?; + let user = self.users.get_user().context("failed to get user")?; + let gb_repository = gb_repository::Repository::open( + &self.local_data_dir, + &project_repository, + user.as_ref(), + ) + .context("failed to open gitbutler repository")?; + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get or create current session")?; + let session_reader = crate::sessions::Reader::open(&gb_repository, ¤t_session) + .context("failed to open current session")?; + let target_reader = super::target::Reader::new(&session_reader); + let branch_reader = super::branch::Reader::new(&session_reader); + + let default_target = target_reader + .read_default() + .context("failed to read target")?; + + let mut branches: HashMap = HashMap::new(); + let mut branch_targets: HashMap = HashMap::new(); + + for branch_id in branch_ids { + let branch = branch_reader + .read(&branch_id) + .context("failed to read branch")?; + branches.insert(branch_id, branch); + let target = target_reader + .read(&branch_id) + .context("failed to read target")?; + branch_targets.insert(branch_id, target); + } + + Ok(VirtualBranches { + default_target: Some(default_target), + branch_targets, + branches, + }) + } + + pub async fn list_virtual_branches( + &self, + project_id: &ProjectId, + ) -> Result< + (Vec, bool, Vec), + ControllerError, + > { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, _| { + super::list_virtual_branches(gb_repository, project_repository).map_err(Into::into) + }) + } + + pub async fn create_virtual_branch( + &self, + project_id: &ProjectId, + create: &super::branch::BranchCreateRequest, + ) -> Result> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, _| { + let branch_id = + super::create_virtual_branch(gb_repository, project_repository, create)?.id; + Ok(branch_id) + }) + } + + pub async fn create_virtual_branch_from_branch( + &self, + project_id: &ProjectId, + branch: &git::Refname, + ) -> Result> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, user| { + let signing_key = project_repository + .config() + .sign_commits() + .context("failed to get sign commits option")? + .then(|| { + self.keys + .get_or_create() + .context("failed to get private key") + }) + .transpose()?; + + super::create_virtual_branch_from_branch( + gb_repository, + project_repository, + branch, + signing_key.as_ref(), + user, + ) + }) + } + + pub fn get_base_branch_data( + &self, + project_id: &ProjectId, + ) -> Result, ControllerError> { + let project = self.projects.get(project_id).map_err(Error::from)?; + let project_repository = + project_repository::Repository::open(&project).map_err(Error::from)?; + let user = self.users.get_user().map_err(Error::from)?; + let gb_repository = gb_repository::Repository::open( + &self.local_data_dir, + &project_repository, + user.as_ref(), + ) + .context("failed to open gitbutler repository")?; + let base_branch = super::get_base_branch_data(&gb_repository, &project_repository) + .map_err(ControllerError::Action)?; + Ok(base_branch) + } + + pub fn list_remote_commit_files( + &self, + project_id: &ProjectId, + commit_oid: git::Oid, + ) -> Result, Error> { + let project = self.projects.get(project_id)?; + let project_repository = project_repository::Repository::open(&project)?; + let use_context = project_repository + .project() + .use_diff_context + .unwrap_or(false); + let context_lines = if use_context { 3_u32 } else { 0_u32 }; + super::list_remote_commit_files( + &project_repository.git_repository, + commit_oid, + context_lines, + ) + .map_err(Into::into) + } + + pub fn set_base_branch( + &self, + project_id: &ProjectId, + target_branch: &git::RemoteRefname, + ) -> Result> { + let project = self.projects.get(project_id).map_err(Error::from)?; + let user = self.users.get_user().map_err(Error::from)?; + let project_repository = + project_repository::Repository::open(&project).map_err(Error::from)?; + let gb_repository = gb_repository::Repository::open( + &self.local_data_dir, + &project_repository, + user.as_ref(), + ) + .context("failed to open gitbutler repository")?; + + super::set_base_branch(&gb_repository, &project_repository, target_branch) + .map_err(ControllerError::Action) + } + + pub async fn merge_virtual_branch_upstream( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, user| { + let signing_key = project_repository + .config() + .sign_commits() + .context("failed to get sign commits option")? + .then(|| { + self.keys + .get_or_create() + .context("failed to get private key") + }) + .transpose()?; + + super::merge_virtual_branch_upstream( + gb_repository, + project_repository, + branch_id, + signing_key.as_ref(), + user, + ) + .map_err(Into::into) + }) + } + + pub async fn update_base_branch( + &self, + project_id: &ProjectId, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, user| { + let signing_key = project_repository + .config() + .sign_commits() + .context("failed to get sign commits option")? + .then(|| { + self.keys + .get_or_create() + .context("failed to get private key") + }) + .transpose()?; + + super::update_base_branch( + gb_repository, + project_repository, + user, + signing_key.as_ref(), + ) + .map_err(Into::into) + }) + } + + pub async fn update_virtual_branch( + &self, + project_id: &ProjectId, + branch_update: super::branch::BranchUpdateRequest, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, _| { + super::update_branch(gb_repository, project_repository, branch_update)?; + Ok(()) + }) + } + + pub async fn delete_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, _| { + super::delete_branch(gb_repository, project_repository, branch_id)?; + Ok(()) + }) + } + + pub async fn apply_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, user| { + let signing_key = project_repository + .config() + .sign_commits() + .context("failed to get sign commits option")? + .then(|| { + self.keys + .get_or_create() + .context("failed to get private key") + }) + .transpose()?; + + super::apply_branch( + gb_repository, + project_repository, + branch_id, + signing_key.as_ref(), + user, + ) + .map_err(Into::into) + }) + } + + pub async fn unapply_ownership( + &self, + project_id: &ProjectId, + ownership: &BranchOwnershipClaims, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, _| { + super::unapply_ownership(gb_repository, project_repository, ownership) + .map_err(Into::into) + }) + } + + pub async fn reset_files( + &self, + project_id: &ProjectId, + ownership: &Vec, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |_, project_repository, _| { + super::reset_files(project_repository, ownership).map_err(Into::into) + }) + } + + pub async fn amend( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ownership: &BranchOwnershipClaims, + ) -> Result> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, _| { + super::amend(gb_repository, project_repository, branch_id, ownership) + .map_err(Into::into) + }) + } + + pub async fn reset_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + target_commit_oid: git::Oid, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, _| { + super::reset_branch( + gb_repository, + project_repository, + branch_id, + target_commit_oid, + ) + .map_err(Into::into) + }) + } + + pub async fn unapply_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, _| { + super::unapply_branch(gb_repository, project_repository, branch_id) + .map(|_| ()) + .map_err(Into::into) + }) + } + + pub async fn push_virtual_branch( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + with_force: bool, + askpass: Option<(AskpassBroker, Option)>, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + let helper = self.helper.clone(); + let project_id = *project_id; + let branch_id = *branch_id; + self.with_verify_branch_async(&project_id, move |gb_repository, project_repository, _| { + super::push( + project_repository, + gb_repository, + &branch_id, + with_force, + &helper, + askpass, + ) + })? + .await + .map_err(|e| ControllerError::Other(e.into()))? + .map_err(ControllerError::Action) + } + + pub async fn cherry_pick( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + commit_oid: git::Oid, + ) -> Result, ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, _| { + super::cherry_pick(gb_repository, project_repository, branch_id, commit_oid) + .map_err(Into::into) + }) + } + + pub fn list_remote_branches( + &self, + project_id: &ProjectId, + ) -> Result, ControllerError> { + let project = self.projects.get(project_id).map_err(Error::from)?; + let project_repository = + project_repository::Repository::open(&project).map_err(Error::from)?; + let user = self.users.get_user().map_err(Error::from)?; + let gb_repository = gb_repository::Repository::open( + &self.local_data_dir, + &project_repository, + user.as_ref(), + ) + .context("failed to open gitbutler repository")?; + super::list_remote_branches(&gb_repository, &project_repository) + .map_err(ControllerError::Action) + } + + pub fn get_remote_branch_data( + &self, + project_id: &ProjectId, + refname: &git::Refname, + ) -> Result> { + let project = self.projects.get(project_id).map_err(Error::from)?; + let project_repository = + project_repository::Repository::open(&project).map_err(Error::from)?; + let user = self.users.get_user().map_err(Error::from)?; + let gb_repository = gb_repository::Repository::open( + &self.local_data_dir, + &project_repository, + user.as_ref(), + ) + .context("failed to open gitbutler repository")?; + super::get_branch_data(&gb_repository, &project_repository, refname) + .map_err(ControllerError::Action) + } + + pub async fn squash( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + commit_oid: git::Oid, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, _| { + super::squash(gb_repository, project_repository, branch_id, commit_oid) + .map_err(Into::into) + }) + } + + pub async fn update_commit_message( + &self, + project_id: &ProjectId, + branch_id: &BranchId, + commit_oid: git::Oid, + message: &str, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + self.with_verify_branch(project_id, |gb_repository, project_repository, _| { + super::update_commit_message( + gb_repository, + project_repository, + branch_id, + commit_oid, + message, + ) + .map_err(Into::into) + }) + } + + pub async fn fetch_from_target( + &self, + project_id: &ProjectId, + askpass: Option<(AskpassBroker, String)>, + ) -> Result> { + let project = self.projects.get(project_id).map_err(Error::from)?; + let mut project_repository = + project_repository::Repository::open(&project).map_err(Error::from)?; + let user = self.users.get_user().map_err(Error::from)?; + let gb_repository = gb_repository::Repository::open( + &self.local_data_dir, + &project_repository, + user.as_ref(), + ) + .context("failed to open gitbutler repository")?; + + let default_target = gb_repository + .default_target() + .context("failed to get default target")? + .ok_or(FetchFromTargetError::DefaultTargetNotSet( + errors::DefaultTargetNotSetError { + project_id: *project_id, + }, + )) + .map_err(ControllerError::Action)?; + + let project_data_last_fetched = match project_repository + .fetch(default_target.branch.remote(), &self.helper, askpass) + .map_err(errors::FetchFromTargetError::Remote) + { + Ok(()) => projects::FetchResult::Fetched { + timestamp: std::time::SystemTime::now(), + }, + Err(error) => projects::FetchResult::Error { + timestamp: std::time::SystemTime::now(), + error: error.to_string(), + }, + }; + + let updated_project = self + .projects + .update(&projects::UpdateRequest { + id: *project_id, + project_data_last_fetched: Some(project_data_last_fetched), + ..Default::default() + }) + .await + .context("failed to update project")?; + + project_repository.set_project(&updated_project); + + let base_branch = target_to_base_branch(&project_repository, &default_target) + .context("failed to convert target to base branch")?; + + Ok(base_branch) + } + + pub async fn move_commit( + &self, + project_id: &ProjectId, + target_branch_id: &BranchId, + commit_oid: git::Oid, + ) -> Result<(), ControllerError> { + let _permit = self.semaphore.acquire().await; + + self.with_verify_branch(project_id, |gb_repository, project_repository, user| { + let signing_key = project_repository + .config() + .sign_commits() + .context("failed to get sign commits option")? + .then(|| { + self.keys + .get_or_create() + .context("failed to get private key") + }) + .transpose()?; + super::move_commit( + gb_repository, + project_repository, + target_branch_id, + commit_oid, + user, + signing_key.as_ref(), + ) + .map_err(Into::into) + }) + } +} + +impl ControllerInner { + fn with_verify_branch>( + &self, + project_id: &ProjectId, + action: impl FnOnce( + &gb_repository::Repository, + &project_repository::Repository, + Option<&users::User>, + ) -> Result, + ) -> Result> { + let project = self.projects.get(project_id).map_err(Error::from)?; + let project_repository = + project_repository::Repository::open(&project).map_err(Error::from)?; + let user = self.users.get_user().map_err(Error::from)?; + let gb_repository = gb_repository::Repository::open( + &self.local_data_dir, + &project_repository, + user.as_ref(), + ) + .context("failed to open gitbutler repository")?; + super::integration::verify_branch(&gb_repository, &project_repository)?; + action(&gb_repository, &project_repository, user.as_ref()).map_err(ControllerError::Action) + } + + fn with_verify_branch_async + Send + 'static>( + &self, + project_id: &ProjectId, + action: impl FnOnce( + &gb_repository::Repository, + &project_repository::Repository, + Option<&users::User>, + ) -> Result + + Send + + 'static, + ) -> Result>, ControllerError> { + let local_data_dir = self.local_data_dir.clone(); + let project = self.projects.get(project_id).map_err(Error::from)?; + let project_repository = + project_repository::Repository::open(&project).map_err(Error::from)?; + let user = self.users.get_user().map_err(Error::from)?; + let gb_repository = + gb_repository::Repository::open(&local_data_dir, &project_repository, user.as_ref()) + .context("failed to open gitbutler repository")?; + super::integration::verify_branch(&gb_repository, &project_repository)?; + Ok(tokio::task::spawn_blocking(move || { + action(&gb_repository, &project_repository, user.as_ref()) + })) + } +} diff --git a/src/virtual_branches/errors.rs b/src/virtual_branches/errors.rs new file mode 100644 index 000000000..f772623fa --- /dev/null +++ b/src/virtual_branches/errors.rs @@ -0,0 +1,837 @@ +use crate::{ + error::Error, + git, + project_repository::{self, RemoteError}, + projects::ProjectId, +}; + +use super::{branch::BranchOwnershipClaims, BranchId, GITBUTLER_INTEGRATION_REFERENCE}; + +#[derive(Debug, thiserror::Error)] +pub enum VerifyError { + #[error("head is detached")] + DetachedHead, + #[error("head is {0}")] + InvalidHead(String), + #[error("integration commit not found")] + NoIntegrationCommit, + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl From for crate::error::Error { + fn from(value: VerifyError) -> Self { + match value { + VerifyError::DetachedHead => crate::error::Error::UserError { + code: crate::error::Code::ProjectHead, + message: format!( + "Project in detached head state. Please checkout {0} to continue.", + GITBUTLER_INTEGRATION_REFERENCE.branch() + ), + }, + VerifyError::InvalidHead(head) => crate::error::Error::UserError { + code: crate::error::Code::ProjectHead, + message: format!( + "Project is on {}. Please checkout {} to continue.", + head, + GITBUTLER_INTEGRATION_REFERENCE.branch() + ), + }, + VerifyError::NoIntegrationCommit => crate::error::Error::UserError { + code: crate::error::Code::ProjectHead, + message: "GibButler's integration commit not found on head.".to_string(), + }, + VerifyError::Other(error) => { + tracing::error!(?error); + crate::error::Error::Unknown + } + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum DeleteBranchError { + #[error(transparent)] + UnapplyBranch(#[from] UnapplyBranchError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum ResetBranchError { + #[error("commit {0} not in the branch")] + CommitNotFoundInBranch(git::Oid), + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum ApplyBranchError { + #[error("project")] + Conflict(ProjectConflictError), + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error("branch conflicts with other branches - sorry bro.")] + BranchConflicts(BranchId), + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum UnapplyOwnershipError { + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("project is in conflict state")] + Conflict(ProjectConflictError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum UnapplyBranchError { + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum FlushAppliedVbranchesError { + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum ListVirtualBranchesError { + #[error("project")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum CreateVirtualBranchError { + #[error("project")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum MergeVirtualBranchUpstreamError { + #[error("project")] + Conflict(ProjectConflictError), + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum CommitError { + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("will not commit conflicted files")] + Conflicted(ProjectConflictError), + #[error("commit hook rejected")] + CommitHookRejected(String), + #[error("commit msg hook rejected")] + CommitMsgHookRejected(String), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum PushError { + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error(transparent)] + Remote(#[from] project_repository::RemoteError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum IsRemoteBranchMergableError { + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("branch not found")] + BranchNotFound(git::RemoteRefname), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum IsVirtualBranchMergeable { + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug)] +pub struct ForcePushNotAllowedError { + pub project_id: ProjectId, +} + +impl From for Error { + fn from(_value: ForcePushNotAllowedError) -> Self { + Error::UserError { + code: crate::error::Code::Branches, + message: "Action will lead to force pushing, which is not allowed for this".to_string(), + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum AmendError { + #[error("force push not allowed")] + ForcePushNotAllowed(ForcePushNotAllowedError), + #[error("target ownership not found")] + TargetOwnerhshipNotFound(BranchOwnershipClaims), + #[error("branch has no commits")] + BranchHasNoCommits, + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error("project is in conflict state")] + Conflict(ProjectConflictError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} +#[derive(Debug, thiserror::Error)] +pub enum CherryPickError { + #[error("target commit {0} not found ")] + CommitNotFound(git::Oid), + #[error("can not cherry pick not applied branch")] + NotApplied, + #[error("project is in conflict state")] + Conflict(ProjectConflictError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum SquashError { + #[error("force push not allowed")] + ForcePushNotAllowed(ForcePushNotAllowedError), + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("commit {0} not in the branch")] + CommitNotFound(git::Oid), + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error("project is in conflict state")] + Conflict(ProjectConflictError), + #[error("can not squash root commit")] + CantSquashRootCommit, + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum FetchFromTargetError { + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("failed to fetch")] + Remote(RemoteError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl From for Error { + fn from(value: FetchFromTargetError) -> Self { + match value { + FetchFromTargetError::DefaultTargetNotSet(error) => error.into(), + FetchFromTargetError::Remote(error) => error.into(), + FetchFromTargetError::Other(error) => { + tracing::error!(?error, "fetch from target error"); + Error::Unknown + } + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum UpdateCommitMessageError { + #[error("force push not allowed")] + ForcePushNotAllowed(ForcePushNotAllowedError), + #[error("empty message")] + EmptyMessage, + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("commit {0} not in the branch")] + CommitNotFound(git::Oid), + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error("project is in conflict state")] + Conflict(ProjectConflictError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl From for Error { + fn from(value: UpdateCommitMessageError) -> Self { + match value { + UpdateCommitMessageError::ForcePushNotAllowed(error) => error.into(), + UpdateCommitMessageError::EmptyMessage => Error::UserError { + message: "Commit message can not be empty".to_string(), + code: crate::error::Code::Branches, + }, + UpdateCommitMessageError::DefaultTargetNotSet(error) => error.into(), + UpdateCommitMessageError::CommitNotFound(oid) => Error::UserError { + message: format!("Commit {} not found", oid), + code: crate::error::Code::Branches, + }, + UpdateCommitMessageError::BranchNotFound(error) => error.into(), + UpdateCommitMessageError::Conflict(error) => error.into(), + UpdateCommitMessageError::Other(error) => { + tracing::error!(?error, "update commit message error"); + Error::Unknown + } + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum GetBaseBranchDataError { + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum SetBaseBranchError { + #[error("wd is dirty")] + DirtyWorkingDirectory, + #[error("branch {0} not found")] + BranchNotFound(git::RemoteRefname), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum UpdateBaseBranchError { + #[error("project is in conflicting state")] + Conflict(ProjectConflictError), + #[error("no default target set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum MoveCommitError { + #[error("source branch contains hunks locked to the target commit")] + SourceLocked, + #[error("project is in conflicted state")] + Conflicted(ProjectConflictError), + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error("commit not found")] + CommitNotFound(git::Oid), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl From for crate::error::Error { + fn from(value: MoveCommitError) -> Self { + match value { + MoveCommitError::SourceLocked => Error::UserError { + message: "Source branch contains hunks locked to the target commit".to_string(), + code: crate::error::Code::Branches, + }, + MoveCommitError::Conflicted(error) => error.into(), + MoveCommitError::DefaultTargetNotSet(error) => error.into(), + MoveCommitError::BranchNotFound(error) => error.into(), + MoveCommitError::CommitNotFound(oid) => Error::UserError { + message: format!("Commit {} not found", oid), + code: crate::error::Code::Branches, + }, + MoveCommitError::Other(error) => { + tracing::error!(?error, "move commit to vbranch error"); + Error::Unknown + } + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum CreateVirtualBranchFromBranchError { + #[error("failed to apply")] + ApplyBranch(ApplyBranchError), + #[error("can't make branch from default target")] + CantMakeBranchFromDefaultTarget, + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("{0} not found")] + BranchNotFound(git::Refname), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug)] +pub struct ProjectConflictError { + pub project_id: ProjectId, +} + +impl From for Error { + fn from(value: ProjectConflictError) -> Self { + Error::UserError { + code: crate::error::Code::ProjectConflict, + message: format!("project {} is in a conflicted state", value.project_id), + } + } +} + +#[derive(Debug)] +pub struct DefaultTargetNotSetError { + pub project_id: ProjectId, +} + +impl From for Error { + fn from(value: DefaultTargetNotSetError) -> Self { + Error::UserError { + code: crate::error::Code::ProjectConflict, + message: format!( + "project {} does not have a default target set", + value.project_id + ), + } + } +} + +#[derive(Debug)] +pub struct BranchNotFoundError { + pub project_id: ProjectId, + pub branch_id: BranchId, +} + +impl From for Error { + fn from(value: BranchNotFoundError) -> Self { + Error::UserError { + code: crate::error::Code::Branches, + message: format!("branch {} not found", value.branch_id), + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum UpdateBranchError { + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error("branch not found")] + BranchNotFound(BranchNotFoundError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl From for Error { + fn from(value: UpdateBranchError) -> Self { + match value { + UpdateBranchError::DefaultTargetNotSet(error) => error.into(), + UpdateBranchError::BranchNotFound(error) => error.into(), + UpdateBranchError::Other(error) => { + tracing::error!(?error, "update branch error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: CreateVirtualBranchFromBranchError) -> Self { + match value { + CreateVirtualBranchFromBranchError::ApplyBranch(error) => error.into(), + CreateVirtualBranchFromBranchError::CantMakeBranchFromDefaultTarget => { + Error::UserError { + message: "Can not create a branch from default target".to_string(), + code: crate::error::Code::Branches, + } + } + CreateVirtualBranchFromBranchError::DefaultTargetNotSet(error) => error.into(), + CreateVirtualBranchFromBranchError::BranchNotFound(name) => Error::UserError { + message: format!("Branch {} not found", name), + code: crate::error::Code::Branches, + }, + CreateVirtualBranchFromBranchError::Other(error) => { + tracing::error!(?error, "create virtual branch from branch error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: CommitError) -> Self { + match value { + CommitError::BranchNotFound(error) => error.into(), + CommitError::DefaultTargetNotSet(error) => error.into(), + CommitError::Conflicted(error) => error.into(), + CommitError::CommitHookRejected(error) => Error::UserError { + code: crate::error::Code::PreCommitHook, + message: error, + }, + CommitError::CommitMsgHookRejected(error) => Error::UserError { + code: crate::error::Code::CommitMsgHook, + message: error, + }, + CommitError::Other(error) => { + tracing::error!(?error, "commit error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: IsRemoteBranchMergableError) -> Self { + match value { + IsRemoteBranchMergableError::BranchNotFound(name) => Error::UserError { + message: format!("Remote branch {} not found", name), + code: crate::error::Code::Branches, + }, + IsRemoteBranchMergableError::DefaultTargetNotSet(error) => error.into(), + IsRemoteBranchMergableError::Other(error) => { + tracing::error!(?error, "is remote branch mergable error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: DeleteBranchError) -> Self { + match value { + DeleteBranchError::UnapplyBranch(error) => error.into(), + DeleteBranchError::Other(error) => { + tracing::error!(?error, "delete branch error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: ApplyBranchError) -> Self { + match value { + ApplyBranchError::DefaultTargetNotSet(error) => error.into(), + ApplyBranchError::Conflict(error) => error.into(), + ApplyBranchError::BranchNotFound(error) => error.into(), + ApplyBranchError::BranchConflicts(id) => Error::UserError { + message: format!("Branch {} is in a conflicing state", id), + code: crate::error::Code::Branches, + }, + ApplyBranchError::Other(error) => { + tracing::error!(?error, "apply branch error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: IsVirtualBranchMergeable) -> Self { + match value { + IsVirtualBranchMergeable::BranchNotFound(error) => error.into(), + IsVirtualBranchMergeable::DefaultTargetNotSet(error) => error.into(), + IsVirtualBranchMergeable::Other(error) => { + tracing::error!(?error, "is remote branch mergable error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: ListVirtualBranchesError) -> Self { + match value { + ListVirtualBranchesError::DefaultTargetNotSet(error) => error.into(), + ListVirtualBranchesError::Other(error) => { + tracing::error!(?error, "list virtual branches error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: CreateVirtualBranchError) -> Self { + match value { + CreateVirtualBranchError::DefaultTargetNotSet(error) => error.into(), + CreateVirtualBranchError::Other(error) => { + tracing::error!(?error, "create virtual branch error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: GetBaseBranchDataError) -> Self { + match value { + GetBaseBranchDataError::Other(error) => { + tracing::error!(?error, "get base branch data error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: ListRemoteCommitFilesError) -> Self { + match value { + ListRemoteCommitFilesError::CommitNotFound(oid) => Error::UserError { + message: format!("Commit {} not found", oid), + code: crate::error::Code::Branches, + }, + ListRemoteCommitFilesError::Other(error) => { + tracing::error!(?error, "list remote commit files error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: SetBaseBranchError) -> Self { + match value { + SetBaseBranchError::DirtyWorkingDirectory => Error::UserError { + message: "Current HEAD is dirty.".to_string(), + code: crate::error::Code::ProjectConflict, + }, + SetBaseBranchError::BranchNotFound(name) => Error::UserError { + message: format!("remote branch '{}' not found", name), + code: crate::error::Code::Branches, + }, + SetBaseBranchError::Other(error) => { + tracing::error!(?error, "set base branch error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: MergeVirtualBranchUpstreamError) -> Self { + match value { + MergeVirtualBranchUpstreamError::BranchNotFound(error) => error.into(), + MergeVirtualBranchUpstreamError::Conflict(error) => error.into(), + MergeVirtualBranchUpstreamError::Other(error) => { + tracing::error!(?error, "merge virtual branch upstream error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: UpdateBaseBranchError) -> Self { + match value { + UpdateBaseBranchError::Conflict(error) => error.into(), + UpdateBaseBranchError::DefaultTargetNotSet(error) => error.into(), + UpdateBaseBranchError::Other(error) => { + tracing::error!(?error, "update base branch error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: UnapplyOwnershipError) -> Self { + match value { + UnapplyOwnershipError::DefaultTargetNotSet(error) => error.into(), + UnapplyOwnershipError::Conflict(error) => error.into(), + UnapplyOwnershipError::Other(error) => { + tracing::error!(?error, "unapply ownership error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: AmendError) -> Self { + match value { + AmendError::ForcePushNotAllowed(error) => error.into(), + AmendError::Conflict(error) => error.into(), + AmendError::BranchNotFound(error) => error.into(), + AmendError::BranchHasNoCommits => Error::UserError { + message: "Branch has no commits - there is nothing to amend to".to_string(), + code: crate::error::Code::Branches, + }, + AmendError::DefaultTargetNotSet(error) => error.into(), + AmendError::TargetOwnerhshipNotFound(_) => Error::UserError { + message: "target ownership not found".to_string(), + code: crate::error::Code::Branches, + }, + AmendError::Other(error) => { + tracing::error!(?error, "amend error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: ResetBranchError) -> Self { + match value { + ResetBranchError::BranchNotFound(error) => error.into(), + ResetBranchError::DefaultTargetNotSet(error) => error.into(), + ResetBranchError::CommitNotFoundInBranch(oid) => Error::UserError { + code: crate::error::Code::Branches, + message: format!("commit {} not found", oid), + }, + ResetBranchError::Other(error) => { + tracing::error!(?error, "reset branch error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: UnapplyBranchError) -> Self { + match value { + UnapplyBranchError::DefaultTargetNotSet(error) => error.into(), + UnapplyBranchError::BranchNotFound(error) => error.into(), + UnapplyBranchError::Other(error) => { + tracing::error!(?error, "unapply branch error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: PushError) -> Self { + match value { + PushError::Remote(error) => error.into(), + PushError::BranchNotFound(error) => error.into(), + PushError::DefaultTargetNotSet(error) => error.into(), + PushError::Other(error) => { + tracing::error!(?error, "push error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: FlushAppliedVbranchesError) -> Self { + match value { + FlushAppliedVbranchesError::Other(error) => { + tracing::error!(?error, "flush workspace error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: CherryPickError) -> Self { + match value { + CherryPickError::NotApplied => Error::UserError { + message: "can not cherry pick non applied branch".to_string(), + code: crate::error::Code::Branches, + }, + CherryPickError::Conflict(error) => error.into(), + CherryPickError::CommitNotFound(oid) => Error::UserError { + message: format!("commit {oid} not found"), + code: crate::error::Code::Branches, + }, + CherryPickError::Other(error) => { + tracing::error!(?error, "cherry pick error"); + Error::Unknown + } + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum ListRemoteCommitFilesError { + #[error("failed to find commit {0}")] + CommitNotFound(git::Oid), + #[error("failed to find commit")] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum ListRemoteBranchesError { + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug, thiserror::Error)] +pub enum GetRemoteBranchDataError { + #[error("default target not set")] + DefaultTargetNotSet(DefaultTargetNotSetError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +impl From for Error { + fn from(value: GetRemoteBranchDataError) -> Self { + match value { + GetRemoteBranchDataError::DefaultTargetNotSet(error) => error.into(), + GetRemoteBranchDataError::Other(error) => { + tracing::error!(?error, "get remote branch data error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: ListRemoteBranchesError) -> Self { + match value { + ListRemoteBranchesError::DefaultTargetNotSet(error) => error.into(), + ListRemoteBranchesError::Other(error) => { + tracing::error!(?error, "list remote branches error"); + Error::Unknown + } + } + } +} + +impl From for Error { + fn from(value: SquashError) -> Self { + match value { + SquashError::ForcePushNotAllowed(error) => error.into(), + SquashError::DefaultTargetNotSet(error) => error.into(), + SquashError::BranchNotFound(error) => error.into(), + SquashError::Conflict(error) => error.into(), + SquashError::CantSquashRootCommit => Error::UserError { + message: "can not squash root branch commit".to_string(), + code: crate::error::Code::Branches, + }, + SquashError::CommitNotFound(oid) => Error::UserError { + message: format!("commit {oid} not found"), + code: crate::error::Code::Branches, + }, + SquashError::Other(error) => { + tracing::error!(?error, "squash error"); + Error::Unknown + } + } + } +} diff --git a/src/virtual_branches/files.rs b/src/virtual_branches/files.rs new file mode 100644 index 000000000..508500aa8 --- /dev/null +++ b/src/virtual_branches/files.rs @@ -0,0 +1,96 @@ +use std::path; + +use anyhow::{Context, Result}; +use serde::Serialize; + +use crate::git::{self, diff, show}; + +use super::errors; +use crate::virtual_branches::context; + +#[derive(Debug, PartialEq, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteBranchFile { + pub path: path::PathBuf, + pub hunks: Vec, + pub binary: bool, +} + +pub fn list_remote_commit_files( + repository: &git::Repository, + commit_oid: git::Oid, + context_lines: u32, +) -> Result, errors::ListRemoteCommitFilesError> { + let commit = match repository.find_commit(commit_oid) { + Ok(commit) => Ok(commit), + Err(git::Error::NotFound(_)) => Err(errors::ListRemoteCommitFilesError::CommitNotFound( + commit_oid, + )), + Err(error) => Err(errors::ListRemoteCommitFilesError::Other(error.into())), + }?; + + if commit.parent_count() == 0 { + return Ok(vec![]); + } + + let parent = commit.parent(0).context("failed to get parent commit")?; + let commit_tree = commit.tree().context("failed to get commit tree")?; + let parent_tree = parent.tree().context("failed to get parent tree")?; + let diff = diff::trees(repository, &parent_tree, &commit_tree, context_lines)?; + let diff = diff::diff_files_to_hunks(&diff); + + let mut files = diff + .into_iter() + .map(|(file_path, hunks)| RemoteBranchFile { + path: file_path.clone(), + hunks: hunks.clone(), + binary: hunks.iter().any(|h| h.binary), + }) + .collect::>(); + + if context_lines == 0 { + files = files_with_hunk_context(repository, &parent_tree, files, 3) + .context("failed to add context to hunk")?; + } + Ok(files) +} + +fn files_with_hunk_context( + repository: &git::Repository, + parent_tree: &git::Tree, + mut files: Vec, + context_lines: usize, +) -> Result> { + for file in &mut files { + if file.binary { + continue; + } + // Get file content as it looked before the diffs + let file_content_before = + show::show_file_at_tree(repository, file.path.clone(), parent_tree) + .context("failed to get file contents at HEAD")?; + let file_lines_before = file_content_before.split('\n').collect::>(); + + file.hunks = file + .hunks + .iter() + .map(|hunk| { + if hunk.diff.is_empty() { + // noop on empty diff + hunk.clone() + } else { + context::hunk_with_context( + &hunk.diff, + hunk.old_start as usize, + hunk.new_start as usize, + hunk.binary, + context_lines, + &file_lines_before, + hunk.change_type, + ) + } + }) + .collect::>(); + } + Ok(files) +} diff --git a/src/virtual_branches/integration.rs b/src/virtual_branches/integration.rs new file mode 100644 index 000000000..833718083 --- /dev/null +++ b/src/virtual_branches/integration.rs @@ -0,0 +1,351 @@ +use std::io::{Read, Write}; + +use anyhow::{Context, Result}; +use lazy_static::lazy_static; + +use crate::{ + gb_repository, + git::{self}, + project_repository::{self, LogUntil}, + reader, sessions, + virtual_branches::branch::BranchCreateRequest, +}; + +use super::errors; + +lazy_static! { + pub static ref GITBUTLER_INTEGRATION_REFERENCE: git::LocalRefname = + git::LocalRefname::new("gitbutler/integration", None); +} + +const GITBUTLER_INTEGRATION_COMMIT_AUTHOR_NAME: &str = "GitButler"; +const GITBUTLER_INTEGRATION_COMMIT_AUTHOR_EMAIL: &str = "gitbutler@gitbutler.com"; + +pub fn update_gitbutler_integration( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, +) -> Result<()> { + let target = gb_repository + .default_target() + .context("failed to get target")? + .context("no target set")?; + + let repo = &project_repository.git_repository; + + // write the currrent target sha to a temp branch as a parent + repo.reference( + &GITBUTLER_INTEGRATION_REFERENCE.clone().into(), + target.sha, + true, + "update target", + )?; + + // get commit object from target.sha + let target_commit = repo.find_commit(target.sha)?; + + // get current repo head for reference + let head = repo.head()?; + let mut prev_head = head.name().unwrap().to_string(); + let mut prev_sha = head.target().unwrap().to_string(); + let integration_file = repo.path().join("integration"); + if prev_head == GITBUTLER_INTEGRATION_REFERENCE.to_string() { + // read the .git/integration file + if let Ok(mut integration_file) = std::fs::File::open(integration_file) { + let mut prev_data = String::new(); + integration_file.read_to_string(&mut prev_data)?; + let parts: Vec<&str> = prev_data.split(':').collect(); + + prev_head = parts[0].to_string(); + prev_sha = parts[1].to_string(); + } + } else { + // we are moving from a regular branch to our gitbutler integration branch, save the original + // write a file to .git/integration with the previous head and name + let mut file = std::fs::File::create(integration_file)?; + prev_head.push(':'); + prev_head.push_str(&prev_sha); + file.write_all(prev_head.as_bytes())?; + } + + // commit index to temp head for the merge + repo.set_head(&GITBUTLER_INTEGRATION_REFERENCE.clone().into()) + .context("failed to set head")?; + + let latest_session = gb_repository + .get_latest_session() + .context("failed to get latest session")? + .context("latest session not found")?; + let session_reader = sessions::Reader::open(gb_repository, &latest_session) + .context("failed to open current session")?; + + // get all virtual branches, we need to try to update them all + let all_virtual_branches = super::iterator::BranchIterator::new(&session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")?; + + let applied_virtual_branches = all_virtual_branches + .iter() + .filter(|branch| branch.applied) + .collect::>(); + + let base_tree = target_commit.tree()?; + let mut final_tree = target_commit.tree()?; + for branch in &applied_virtual_branches { + // merge this branches tree with our tree + let branch_head = repo.find_commit(branch.head)?; + let branch_tree = branch_head.tree()?; + if let Ok(mut result) = repo.merge_trees(&base_tree, &final_tree, &branch_tree) { + if !result.has_conflicts() { + let final_tree_oid = result.write_tree_to(repo)?; + final_tree = repo.find_tree(final_tree_oid)?; + } + } + } + + // message that says how to get back to where they were + let mut message = "GitButler Integration Commit".to_string(); + message.push_str("\n\n"); + message.push_str( + "This is an integration commit for the virtual branches that GitButler is tracking.\n\n", + ); + message.push_str( + "Due to GitButler managing multiple virtual branches, you cannot switch back and\n", + ); + message.push_str("forth between git branches and virtual branches easily. \n\n"); + + message.push_str("If you switch to another branch, GitButler will need to be reinitialized.\n"); + message.push_str("If you commit on this branch, GitButler will throw it away.\n\n"); + message.push_str("Here are the branches that are currently applied:\n"); + for branch in &applied_virtual_branches { + message.push_str(" - "); + message.push_str(branch.name.as_str()); + message.push_str(format!(" ({})", &branch.refname()).as_str()); + message.push('\n'); + + if branch.head != target.sha { + message.push_str(" branch head: "); + message.push_str(&branch.head.to_string()); + message.push('\n'); + } + for file in &branch.ownership.claims { + message.push_str(" - "); + message.push_str(&file.file_path.display().to_string()); + message.push('\n'); + } + } + message.push_str("\nYour previous branch was: "); + message.push_str(&prev_head); + message.push_str("\n\n"); + message.push_str("The sha for that commit was: "); + message.push_str(&prev_sha); + message.push_str("\n\n"); + message.push_str("For more information about what we're doing here, check out our docs:\n"); + message.push_str("https://docs.gitbutler.com/features/virtual-branches/integration-branch\n"); + + let committer = git::Signature::now( + GITBUTLER_INTEGRATION_COMMIT_AUTHOR_NAME, + GITBUTLER_INTEGRATION_COMMIT_AUTHOR_EMAIL, + )?; + + repo.commit( + Some(&"refs/heads/gitbutler/integration".parse().unwrap()), + &committer, + &committer, + &message, + &final_tree, + &[&target_commit], + )?; + + // write final_tree as the current index + let mut index = repo.index()?; + index.read_tree(&final_tree)?; + index.write()?; + + // finally, update the refs/gitbutler/ heads to the states of the current virtual branches + for branch in &all_virtual_branches { + let wip_tree = repo.find_tree(branch.tree)?; + let mut branch_head = repo.find_commit(branch.head)?; + let head_tree = branch_head.tree()?; + + // create a wip commit if there is wip + if head_tree.id() != wip_tree.id() { + let mut message = "GitButler WIP Commit".to_string(); + message.push_str("\n\n"); + message.push_str("This is a WIP commit for the virtual branch '"); + message.push_str(branch.name.as_str()); + message.push_str("'\n\n"); + message.push_str("This commit is used to store the state of the virtual branch\n"); + message.push_str("while you are working on it. It is not meant to be used for\n"); + message.push_str("anything else.\n\n"); + let branch_head_oid = repo.commit( + None, + &committer, + &committer, + &message, + &wip_tree, + &[&branch_head], + )?; + branch_head = repo.find_commit(branch_head_oid)?; + } + + repo.reference( + &branch.refname().into(), + branch_head.id(), + true, + "update virtual branch", + )?; + } + + Ok(()) +} + +pub fn verify_branch( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, +) -> Result<(), errors::VerifyError> { + verify_head_is_set(project_repository)?; + verify_head_is_clean(gb_repository, project_repository)?; + Ok(()) +} + +fn verify_head_is_clean( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, +) -> Result<(), errors::VerifyError> { + let head_commit = project_repository + .git_repository + .head() + .context("failed to get head")? + .peel_to_commit() + .context("failed to peel to commit")?; + + let mut extra_commits = project_repository + .log( + head_commit.id(), + LogUntil::When(Box::new(|commit| Ok(is_integration_commit(commit)))), + ) + .context("failed to get log")?; + + let integration_commit = extra_commits.pop(); + + if integration_commit.is_none() { + // no integration commit found + return Err(errors::VerifyError::NoIntegrationCommit); + } + + if extra_commits.is_empty() { + // no extra commits found, so we're good + return Ok(()); + } + + project_repository + .git_repository + .reset( + integration_commit.as_ref().unwrap(), + git2::ResetType::Soft, + None, + ) + .context("failed to reset to integration commit")?; + + let mut new_branch = super::create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest { + name: extra_commits + .last() + .unwrap() + .message() + .map(ToString::to_string), + ..Default::default() + }, + ) + .context("failed to create virtual branch")?; + + // rebasing the extra commits onto the new branch + let writer = super::branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + extra_commits.reverse(); + let mut head = new_branch.head; + for commit in extra_commits { + let new_branch_head = project_repository + .git_repository + .find_commit(head) + .context("failed to find new branch head")?; + + let rebased_commit_oid = project_repository + .git_repository + .commit( + None, + &commit.author(), + &commit.committer(), + commit.message().unwrap(), + &commit.tree().unwrap(), + &[&new_branch_head], + ) + .context(format!( + "failed to rebase commit {} onto new branch", + commit.id() + ))?; + + let rebased_commit = project_repository + .git_repository + .find_commit(rebased_commit_oid) + .context(format!( + "failed to find rebased commit {}", + rebased_commit_oid + ))?; + + new_branch.head = rebased_commit.id(); + new_branch.tree = rebased_commit.tree_id(); + writer + .write(&mut new_branch) + .context("failed to write branch")?; + + head = rebased_commit.id(); + } + Ok(()) +} + +fn verify_head_is_set( + project_repository: &project_repository::Repository, +) -> Result<(), errors::VerifyError> { + match project_repository + .get_head() + .context("failed to get head") + .map_err(errors::VerifyError::Other)? + .name() + { + Some(refname) if refname.to_string() == GITBUTLER_INTEGRATION_REFERENCE.to_string() => { + Ok(()) + } + None => Err(errors::VerifyError::DetachedHead), + Some(head_name) => Err(errors::VerifyError::InvalidHead(head_name.to_string())), + } +} + +fn is_integration_commit(commit: &git::Commit) -> bool { + is_integration_commit_author(commit) && is_integration_commit_message(commit) +} + +fn is_integration_commit_author(commit: &git::Commit) -> bool { + is_integration_commit_author_email(commit) && is_integration_commit_author_name(commit) +} + +fn is_integration_commit_author_email(commit: &git::Commit) -> bool { + commit.author().email().map_or(false, |email| { + email == GITBUTLER_INTEGRATION_COMMIT_AUTHOR_EMAIL + }) +} + +fn is_integration_commit_author_name(commit: &git::Commit) -> bool { + commit.author().name().map_or(false, |name| { + name == GITBUTLER_INTEGRATION_COMMIT_AUTHOR_NAME + }) +} + +fn is_integration_commit_message(commit: &git::Commit) -> bool { + commit.message().map_or(false, |message| { + message.starts_with("GitButler Integration Commit") + }) +} diff --git a/src/virtual_branches/iterator.rs b/src/virtual_branches/iterator.rs new file mode 100644 index 000000000..c169bf62d --- /dev/null +++ b/src/virtual_branches/iterator.rs @@ -0,0 +1,56 @@ +use std::collections::HashSet; + +use anyhow::Result; + +use crate::sessions; + +use super::branch::{self, BranchId}; + +pub struct BranchIterator<'i> { + branch_reader: branch::Reader<'i>, + ids: Vec, +} + +impl<'i> BranchIterator<'i> { + pub fn new(session_reader: &'i sessions::Reader<'i>) -> Result { + let reader = session_reader.reader(); + let ids_itarator = reader + .list_files("branches")? + .into_iter() + .map(|file_path| { + file_path + .iter() + .next() + .unwrap() + .to_string_lossy() + .to_string() + }) + .filter(|file_path| file_path != "selected") + .filter(|file_path| file_path != "target"); + let unique_ids: HashSet = ids_itarator.collect(); + let mut ids: Vec = unique_ids + .into_iter() + .map(|id| id.parse()) + .filter_map(Result::ok) + .collect(); + ids.sort(); + Ok(Self { + branch_reader: branch::Reader::new(session_reader), + ids, + }) + } +} + +impl Iterator for BranchIterator<'_> { + type Item = Result; + + fn next(&mut self) -> Option { + if self.ids.is_empty() { + return None; + } + + let id = self.ids.remove(0); + let branch = self.branch_reader.read(&id); + Some(branch) + } +} diff --git a/src/virtual_branches/remote.rs b/src/virtual_branches/remote.rs new file mode 100644 index 000000000..0cb0d13dc --- /dev/null +++ b/src/virtual_branches/remote.rs @@ -0,0 +1,185 @@ +use anyhow::{Context, Result}; +use serde::Serialize; + +use crate::{ + gb_repository, git, + project_repository::{self, LogUntil}, +}; + +use super::{errors, Author}; + +// this struct is a mapping to the view `RemoteBranch` type in Typescript +// found in src-tauri/src/routes/repo/[project_id]/types.ts +// +// it holds data calculated for presentation purposes of one Git branch +// with comparison data to the Target commit, determining if it is mergeable, +// and how far ahead or behind the Target it is. +// an array of them can be requested from the frontend to show in the sidebar +// Tray and should only contain branches that have not been converted into +// virtual branches yet (ie, we have no `Branch` struct persisted in our data. +#[derive(Debug, Clone, Serialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct RemoteBranch { + pub sha: git::Oid, + pub name: git::Refname, + pub upstream: Option, + pub last_commit_timestamp_ms: Option, + pub last_commit_author: Option, +} + +#[derive(Debug, Clone, Serialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct RemoteBranchData { + pub sha: git::Oid, + pub name: git::Refname, + pub upstream: Option, + pub behind: u32, + pub commits: Vec, +} + +#[derive(Debug, Clone, PartialEq, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct RemoteCommit { + pub id: String, + pub description: String, + pub created_at: u128, + pub author: Author, +} + +pub fn list_remote_branches( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, +) -> Result, errors::ListRemoteBranchesError> { + let default_target = gb_repository + .default_target() + .context("failed to get default target")? + .ok_or_else(|| { + errors::ListRemoteBranchesError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + let remote_branches = project_repository + .git_repository + .branches(Some(git2::BranchType::Remote)) + .context("failed to list remote branches")? + .flatten() + .map(|(branch, _)| branch) + .map(|branch| branch_to_remote_branch(&branch)) + .collect::>>() + .context("failed to convert branches")? + .into_iter() + .flatten() + .filter(|branch| branch.name.branch() != Some(default_target.branch.branch())) + .collect::>(); + + Ok(remote_branches) +} + +pub fn get_branch_data( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + refname: &git::Refname, +) -> Result { + let default_target = gb_repository + .default_target() + .context("failed to get default target")? + .ok_or_else(|| { + errors::GetRemoteBranchDataError::DefaultTargetNotSet( + errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }, + ) + })?; + + let branch = project_repository + .git_repository + .find_branch(refname) + .context(format!("failed to find branch with refname {refname}"))?; + + let branch_data = branch_to_remote_branch_data(project_repository, &branch, default_target.sha) + .context("failed to get branch data")?; + + branch_data + .ok_or_else(|| { + errors::GetRemoteBranchDataError::Other(anyhow::anyhow!("no data found for branch")) + }) + .map(|branch_data| RemoteBranchData { + sha: branch_data.sha, + name: branch_data.name, + upstream: branch_data.upstream, + behind: branch_data.behind, + commits: branch_data.commits, + }) +} + +pub fn branch_to_remote_branch(branch: &git::Branch) -> Result> { + let commit = branch.peel_to_commit()?; + branch + .target() + .map(|sha| { + let name = git::Refname::try_from(branch).context("could not get branch name")?; + Ok(RemoteBranch { + sha, + upstream: if let git::Refname::Local(local_name) = &name { + local_name.remote().cloned() + } else { + None + }, + name, + last_commit_timestamp_ms: commit + .time() + .seconds() + .try_into() + .map(|t: u128| t * 1000) + .ok(), + last_commit_author: commit.author().name().map(std::string::ToString::to_string), + }) + }) + .transpose() +} + +pub fn branch_to_remote_branch_data( + project_repository: &project_repository::Repository, + branch: &git::Branch, + base: git::Oid, +) -> Result> { + branch + .target() + .map(|sha| { + let ahead = project_repository + .log(sha, LogUntil::Commit(base)) + .context("failed to get ahead commits")?; + + let name = git::Refname::try_from(branch).context("could not get branch name")?; + + let count_behind = project_repository + .distance(base, sha) + .context("failed to get behind count")?; + + Ok(RemoteBranchData { + sha, + upstream: if let git::Refname::Local(local_name) = &name { + local_name.remote().cloned() + } else { + None + }, + name, + behind: count_behind, + commits: ahead + .into_iter() + .map(|commit| commit_to_remote_commit(&commit)) + .collect::>(), + }) + }) + .transpose() +} + +pub fn commit_to_remote_commit(commit: &git::Commit) -> RemoteCommit { + RemoteCommit { + id: commit.id().to_string(), + description: commit.message().unwrap_or_default().to_string(), + created_at: commit.time().seconds().try_into().unwrap(), + author: commit.author().into(), + } +} diff --git a/src/virtual_branches/state.rs b/src/virtual_branches/state.rs new file mode 100644 index 000000000..479a251f6 --- /dev/null +++ b/src/virtual_branches/state.rs @@ -0,0 +1,136 @@ +use std::{ + collections::HashMap, + fs::File, + io::{Read, Write}, + path::{Path, PathBuf}, +}; + +use anyhow::Result; +use serde::{Deserialize, Serialize}; + +use crate::virtual_branches::BranchId; + +use super::{target::Target, Branch}; + +/// The state of virtual branches data, as persisted in a TOML file. +#[derive(Serialize, Deserialize, Debug, Default)] +pub struct VirtualBranches { + /// This is the target/base that is set when a repo is added to gb + pub default_target: Option, + /// The targets for each virtual branch + pub branch_targets: HashMap, + /// The current state of the virtual branches + pub branches: HashMap, +} +/// A handle to the state of virtual branches. +/// +/// For all operations, if the state file does not exist, it will be created. +pub struct VirtualBranchesHandle { + /// The path to the file containing the virtual branches state. + file_path: PathBuf, +} + +impl VirtualBranchesHandle { + /// Creates a new concurrency-safe handle to the state of virtual branches. + pub fn new(base_path: &Path) -> Self { + let file_path = base_path.join("virtual_branches.toml"); + Self { file_path } + } + + /// Persists the default target for the given repository. + /// + /// Errors if the file cannot be read or written. + pub fn set_default_target(&self, target: Target) -> Result<()> { + let mut virtual_branches = self.read_file()?; + virtual_branches.default_target = Some(target); + self.write_file(&virtual_branches)?; + Ok(()) + } + + /// Gets the default target for the given repository. + /// + /// Errors if the file cannot be read or written. + #[allow(dead_code)] + pub fn get_default_target(&self) -> Result> { + let virtual_branches = self.read_file()?; + Ok(virtual_branches.default_target) + } + + /// Sets the target for the given virtual branch. + /// + /// Errors if the file cannot be read or written. + pub fn set_branch_target(&self, id: BranchId, target: Target) -> Result<()> { + let mut virtual_branches = self.read_file()?; + virtual_branches.branch_targets.insert(id, target); + self.write_file(&virtual_branches)?; + Ok(()) + } + + /// Gets the target for the given virtual branch. + /// + /// Errors if the file cannot be read or written. + #[allow(dead_code)] + pub fn get_branch_target(&self, id: BranchId) -> Result> { + let virtual_branches = self.read_file()?; + Ok(virtual_branches.branch_targets.get(&id).cloned()) + } + + /// Sets the state of the given virtual branch. + /// + /// Errors if the file cannot be read or written. + pub fn set_branch(&self, branch: Branch) -> Result<()> { + let mut virtual_branches = self.read_file()?; + virtual_branches.branches.insert(branch.id, branch); + self.write_file(&virtual_branches)?; + Ok(()) + } + + /// Removes the given virtual branch. + /// + /// Errors if the file cannot be read or written. + #[allow(dead_code)] + pub fn remove_branch(&self, id: BranchId) -> Result<()> { + let mut virtual_branches = self.read_file()?; + virtual_branches.branches.remove(&id); + self.write_file(&virtual_branches)?; + Ok(()) + } + + /// Gets the state of the given virtual branch. + /// + /// Errors if the file cannot be read or written. + #[allow(dead_code)] + pub fn get_branch(&self, id: BranchId) -> Result> { + let virtual_branches = self.read_file()?; + Ok(virtual_branches.branches.get(&id).cloned()) + } + + /// Reads and parses the state file. + /// + /// If the file does not exist, it will be created. + fn read_file(&self) -> Result { + // let file_path = &self.file_path.lock().await; + if !self.file_path.exists() { + return Ok(VirtualBranches::default()); + } + let mut file: File = File::open(self.file_path.as_path())?; + let mut contents = String::new(); + file.read_to_string(&mut contents)?; + let virtual_branches: VirtualBranches = toml::from_str(&contents)?; + Ok(virtual_branches) + } + + fn write_file(&self, virtual_branches: &VirtualBranches) -> Result<()> { + write(self.file_path.as_path(), virtual_branches) + } +} + +fn write>(file_path: P, virtual_branches: &VirtualBranches) -> Result<()> { + let contents = toml::to_string(&virtual_branches)?; + let temp_file = tempfile::NamedTempFile::new_in(file_path.as_ref().parent().unwrap())?; + let (mut file, temp_path) = temp_file.keep()?; + file.write_all(contents.as_bytes())?; + drop(file); + std::fs::rename(temp_path, file_path.as_ref())?; + Ok(()) +} diff --git a/src/virtual_branches/target.rs b/src/virtual_branches/target.rs new file mode 100644 index 000000000..ff97e4d31 --- /dev/null +++ b/src/virtual_branches/target.rs @@ -0,0 +1,105 @@ +mod reader; +mod writer; + +use std::str::FromStr; + +use serde::{ser::SerializeStruct, Deserializer, Serializer}; +use serde::{Deserialize, Serialize}; + +pub use reader::TargetReader as Reader; +pub use writer::TargetWriter as Writer; + +use crate::git; + +#[derive(Debug, PartialEq, Clone)] +pub struct Target { + pub branch: git::RemoteRefname, + pub remote_url: String, + pub sha: git::Oid, +} + +impl Serialize for Target { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut state = serializer.serialize_struct("Target", 5)?; + state.serialize_field("branchName", &self.branch.branch())?; + state.serialize_field("remoteName", &self.branch.remote())?; + state.serialize_field("remoteUrl", &self.remote_url)?; + state.serialize_field("sha", &self.sha.to_string())?; + state.end() + } +} + +impl<'de> serde::Deserialize<'de> for Target { + fn deserialize>(d: D) -> Result { + #[derive(Debug, Deserialize)] + #[serde(rename_all = "camelCase")] + struct TargetData { + branch_name: String, + remote_name: String, + remote_url: String, + sha: String, + } + let target_data: TargetData = serde::Deserialize::deserialize(d)?; + let sha = git::Oid::from_str(&target_data.sha) + .map_err(|x| serde::de::Error::custom(x.message()))?; + + let target = Target { + branch: git::RemoteRefname::new(&target_data.remote_name, &target_data.branch_name), + remote_url: target_data.remote_url, + sha, + }; + Ok(target) + } +} + +impl Target { + fn try_from(reader: &crate::reader::Reader) -> Result { + let results = reader.batch(&["name", "branch_name", "remote", "remote_url", "sha"])?; + + let name = results[0].clone(); + let branch_name = results[1].clone(); + let remote = results[2].clone(); + let remote_url = results[3].clone(); + let sha = results[4].clone(); + + let branch_name = match name { + Ok(branch) => { + let branch: String = branch.try_into()?; + Ok(branch.clone()) + } + Err(crate::reader::Error::NotFound) => { + // fallback to the old format + let branch_name: String = branch_name?.try_into()?; + Ok(branch_name) + } + Err(e) => Err(crate::reader::Error::Io( + std::io::Error::new(std::io::ErrorKind::Other, format!("branch: {}", e)).into(), + )), + }?; + + let remote_url: String = match remote_url { + Ok(url) => Ok(url.try_into()?), + // fallback to the old format + Err(crate::reader::Error::NotFound) => Ok(remote?.try_into()?), + Err(error) => Err(crate::reader::Error::Io( + std::io::Error::new(std::io::ErrorKind::Other, format!("remote: {}", error)).into(), + )), + }?; + + let sha: String = sha?.try_into()?; + let sha = sha.parse().map_err(|e| { + crate::reader::Error::Io( + std::io::Error::new(std::io::ErrorKind::InvalidData, format!("sha: {}", e)).into(), + ) + })?; + + Ok(Self { + branch: format!("refs/remotes/{}", branch_name).parse().unwrap(), + remote_url, + sha, + }) + } +} diff --git a/src/virtual_branches/target/reader.rs b/src/virtual_branches/target/reader.rs new file mode 100644 index 000000000..d5fabd7d0 --- /dev/null +++ b/src/virtual_branches/target/reader.rs @@ -0,0 +1,31 @@ +use crate::{reader, sessions, virtual_branches::BranchId}; + +use super::Target; + +pub struct TargetReader<'r> { + reader: &'r reader::Reader<'r>, +} + +impl<'r> TargetReader<'r> { + pub fn new(reader: &'r sessions::Reader<'r>) -> Self { + Self { + reader: reader.reader(), + } + } + + pub fn read_default(&self) -> Result { + Target::try_from(&self.reader.sub("branches/target")) + } + + pub fn read(&self, id: &BranchId) -> Result { + if !self + .reader + .exists(format!("branches/{}/target", id)) + .map_err(reader::Error::from)? + { + return self.read_default(); + } + + Target::try_from(&self.reader.sub(format!("branches/{}/target", id))) + } +} diff --git a/src/virtual_branches/target/writer.rs b/src/virtual_branches/target/writer.rs new file mode 100644 index 000000000..6aabd4008 --- /dev/null +++ b/src/virtual_branches/target/writer.rs @@ -0,0 +1,109 @@ +use std::path; + +use anyhow::{Context, Result}; + +use crate::{ + gb_repository, reader, + virtual_branches::{state::VirtualBranchesHandle, BranchId}, + writer, +}; + +use super::Target; + +pub struct TargetWriter<'writer> { + repository: &'writer gb_repository::Repository, + writer: writer::DirWriter, + reader: reader::Reader<'writer>, + state_handle: VirtualBranchesHandle, +} + +impl<'writer> TargetWriter<'writer> { + pub fn new>( + repository: &'writer gb_repository::Repository, + path: P, + ) -> Result { + let reader = reader::Reader::open(&repository.root())?; + let writer = writer::DirWriter::open(repository.root())?; + let state_handle = VirtualBranchesHandle::new(path.as_ref()); + Ok(Self { + repository, + writer, + reader, + state_handle, + }) + } + + pub fn write_default(&self, target: &Target) -> Result<()> { + let reader = self.reader.sub("branches/target"); + match Target::try_from(&reader) { + Ok(existing) if existing.eq(target) => return Ok(()), + Ok(_) | Err(reader::Error::NotFound) => {} + Err(e) => return Err(e.into()), + }; + + self.repository.mark_active_session()?; + + let batch = vec![ + writer::BatchTask::Write( + "branches/target/branch_name", + format!("{}/{}", target.branch.remote(), target.branch.branch()), + ), + writer::BatchTask::Write( + "branches/target/remote_name", + target.branch.remote().to_string(), + ), + writer::BatchTask::Write("branches/target/remote_url", target.remote_url.clone()), + writer::BatchTask::Write("branches/target/sha", target.sha.to_string()), + ]; + + self.writer + .batch(&batch) + .context("Failed to write default target")?; + + // Write in the state file as well + let _ = self.state_handle.set_default_target(target.clone()); + + Ok(()) + } + + pub fn write(&self, id: &BranchId, target: &Target) -> Result<()> { + let reader = self.reader.sub(format!("branches/{}/target", id)); + match Target::try_from(&reader) { + Ok(existing) if existing.eq(target) => return Ok(()), + Ok(_) | Err(reader::Error::NotFound) => {} + Err(e) => return Err(e.into()), + }; + + self.repository + .mark_active_session() + .context("Failed to get or create current session")?; + + let batch = vec![ + writer::BatchTask::Write( + format!("branches/{}/target/branch_name", id), + format!("{}/{}", target.branch.remote(), target.branch.branch()), + ), + writer::BatchTask::Write( + format!("branches/{}/target/remote_name", id), + target.branch.remote().to_string(), + ), + writer::BatchTask::Write( + format!("branches/{}/target/remote_url", id), + target.remote_url.clone(), + ), + writer::BatchTask::Write( + format!("branches/{}/target/sha", id), + target.sha.to_string(), + ), + ]; + + self.writer + .batch(&batch) + .context("Failed to write target")?; + + // Write in the state file as well + let _ = self.state_handle.set_branch_target(*id, target.clone()); + + Ok(()) + } +} diff --git a/src/virtual_branches/virtual.rs b/src/virtual_branches/virtual.rs new file mode 100644 index 000000000..132d2be23 --- /dev/null +++ b/src/virtual_branches/virtual.rs @@ -0,0 +1,4058 @@ +use std::{ + collections::HashMap, + hash::Hash, + path::{Path, PathBuf}, + time, vec, +}; + +#[cfg(target_family = "unix")] +use std::os::unix::prelude::*; + +use anyhow::{bail, Context, Result}; +use bstr::ByteSlice; +use diffy::{apply, Patch}; +use git2_hooks::HookResult; +use regex::Regex; +use serde::Serialize; + +use crate::{ + askpass::AskpassBroker, + dedup::{dedup, dedup_fmt}, + gb_repository, + git::{ + self, + diff::{self, diff_files_to_hunks, GitHunk}, + show, Commit, Refname, RemoteRefname, + }, + keys, + project_repository::{self, conflicts, LogUntil}, + reader, sessions, users, +}; + +use super::{ + branch::{ + self, Branch, BranchCreateRequest, BranchId, BranchOwnershipClaims, Hunk, OwnershipClaim, + }, + branch_to_remote_branch, context, errors, target, Iterator, RemoteBranch, +}; + +type AppliedStatuses = Vec<(branch::Branch, HashMap>)>; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("path contains invalid utf-8 characters: {0}")] + InvalidUnicodePath(PathBuf), +} + +// this struct is a mapping to the view `Branch` type in Typescript +// found in src-tauri/src/routes/repo/[project_id]/types.ts +// it holds a materialized view for presentation purposes of the Branch struct in Rust +// which is our persisted data structure for virtual branches +// +// it is not persisted, it is only used for presentation purposes through the ipc +// +#[derive(Debug, PartialEq, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +#[allow(clippy::struct_excessive_bools)] +pub struct VirtualBranch { + pub id: BranchId, + pub name: String, + pub notes: String, + pub active: bool, + pub files: Vec, + pub commits: Vec, + pub requires_force: bool, // does this branch require a force push to the upstream? + pub conflicted: bool, // is this branch currently in a conflicted state (only for the workspace) + pub order: usize, // the order in which this branch should be displayed in the UI + pub upstream: Option, // the upstream branch where this branch pushes to, if any + pub upstream_name: Option, // the upstream branch where this branch will push to on next push + pub base_current: bool, // is this vbranch based on the current base branch? if false, this needs to be manually merged with conflicts + pub ownership: BranchOwnershipClaims, + pub updated_at: u128, + pub selected_for_changes: bool, + pub head: git::Oid, +} + +#[derive(Debug, PartialEq, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct VirtualBranches { + pub branches: Vec, + pub skipped_files: Vec, +} + +// this is the struct that maps to the view `Commit` type in Typescript +// it is derived from walking the git commits between the `Branch.head` commit +// and the `Target.sha` commit, or, everything that is uniquely committed to +// the virtual branch we assign it to. an array of them are returned as part of +// the `VirtualBranch` struct +// +// it is not persisted, it is only used for presentation purposes through the ipc +// +#[derive(Debug, PartialEq, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct VirtualBranchCommit { + pub id: git::Oid, + pub description: String, + pub created_at: u128, + pub author: Author, + pub is_remote: bool, + pub files: Vec, + pub is_integrated: bool, + pub parent_ids: Vec, + pub branch_id: BranchId, +} + +// this struct is a mapping to the view `File` type in Typescript +// found in src-tauri/src/routes/repo/[project_id]/types.ts +// it holds a materialized view for presentation purposes of one entry of the +// `Branch.ownership` vector in Rust. an array of them are returned as part of +// the `VirtualBranch` struct, which map to each entry of the `Branch.ownership` vector +// +// it is not persisted, it is only used for presentation purposes through the ipc +// +#[derive(Debug, PartialEq, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct VirtualBranchFile { + pub id: String, + pub path: PathBuf, + pub hunks: Vec, + pub modified_at: u128, + pub conflicted: bool, + pub binary: bool, + pub large: bool, +} + +// this struct is a mapping to the view `Hunk` type in Typescript +// found in src-tauri/src/routes/repo/[project_id]/types.ts +// it holds a materialized view for presentation purposes of one entry of the +// each hunk in one `Branch.ownership` vector entry in Rust. +// an array of them are returned as part of the `VirtualBranchFile` struct +// +// it is not persisted, it is only used for presentation purposes through the ipc +// +#[derive(Debug, PartialEq, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct VirtualBranchHunk { + pub id: String, + pub diff: String, + pub modified_at: u128, + pub file_path: PathBuf, + pub hash: String, + pub old_start: u32, + pub start: u32, + pub end: u32, + pub binary: bool, + pub locked: bool, + pub locked_to: Option, + pub change_type: diff::ChangeType, +} + +#[derive(Debug, Serialize, Hash, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct Author { + pub name: String, + pub email: String, + pub gravatar_url: url::Url, +} + +impl From> for Author { + fn from(value: git::Signature) -> Self { + let name = value.name().unwrap_or_default().to_string(); + let email = value.email().unwrap_or_default().to_string(); + + let gravatar_url = url::Url::parse(&format!( + "https://www.gravatar.com/avatar/{:x}?s=100&r=g&d=retro", + md5::compute(email.to_lowercase()) + )) + .unwrap(); + + Author { + name, + email, + gravatar_url, + } + } +} + +pub fn normalize_branch_name(name: &str) -> String { + let pattern = Regex::new("[^A-Za-z0-9_/.#]+").unwrap(); + pattern.replace_all(name, "-").to_string() +} + +pub fn get_default_target( + session_reader: &sessions::Reader, +) -> Result, reader::Error> { + let target_reader = target::Reader::new(session_reader); + match target_reader.read_default() { + Ok(target) => Ok(Some(target)), + Err(reader::Error::NotFound) => Ok(None), + Err(error) => Err(error), + } +} + +pub fn apply_branch( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_id: &BranchId, + signing_key: Option<&keys::PrivateKey>, + user: Option<&users::User>, +) -> Result<(), errors::ApplyBranchError> { + if project_repository.is_resolving() { + return Err(errors::ApplyBranchError::Conflict( + errors::ProjectConflictError { + project_id: project_repository.project().id, + }, + )); + } + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get or create current session")?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) + .context("failed to open current session")?; + + let repo = &project_repository.git_repository; + + let default_target = get_default_target(¤t_session_reader) + .context("failed to get default target")? + .ok_or_else(|| { + errors::ApplyBranchError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create branch writer")?; + + let mut branch = match branch::Reader::new(¤t_session_reader).read(branch_id) { + Ok(branch) => Ok(branch), + Err(reader::Error::NotFound) => Err(errors::ApplyBranchError::BranchNotFound( + errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *branch_id, + }, + )), + Err(error) => Err(errors::ApplyBranchError::Other(error.into())), + }?; + + if branch.applied { + return Ok(()); + } + + let target_commit = repo + .find_commit(default_target.sha) + .context("failed to find target commit")?; + let target_tree = target_commit.tree().context("failed to get target tree")?; + + // calculate the merge base and make sure it's the same as the target commit + // if not, we need to merge or rebase the branch to get it up to date + + let merge_base = repo + .merge_base(default_target.sha, branch.head) + .context(format!( + "failed to find merge base between {} and {}", + default_target.sha, branch.head + ))?; + if merge_base != default_target.sha { + // Branch is out of date, merge or rebase it + let merge_base_tree = repo + .find_commit(merge_base) + .context(format!("failed to find merge base commit {}", merge_base))? + .tree() + .context("failed to find merge base tree")?; + + let branch_tree = repo + .find_tree(branch.tree) + .context("failed to find branch tree")?; + + let mut merge_index = repo + .merge_trees(&merge_base_tree, &branch_tree, &target_tree) + .context("failed to merge trees")?; + + if merge_index.has_conflicts() { + // currently we can only deal with the merge problem branch + for mut branch in super::get_status_by_branch(gb_repository, project_repository)? + .0 + .into_iter() + .map(|(branch, _)| branch) + .filter(|branch| branch.applied) + { + branch.applied = false; + writer.write(&mut branch)?; + } + + // apply the branch + branch.applied = true; + writer.write(&mut branch)?; + + // checkout the conflicts + repo.checkout_index(&mut merge_index) + .allow_conflicts() + .conflict_style_merge() + .force() + .checkout() + .context("failed to checkout index")?; + + // mark conflicts + let conflicts = merge_index + .conflicts() + .context("failed to get merge index conflicts")?; + let mut merge_conflicts = Vec::new(); + for path in conflicts.flatten() { + if let Some(ours) = path.our { + let path = std::str::from_utf8(&ours.path) + .context("failed to convert path to utf8")? + .to_string(); + merge_conflicts.push(path); + } + } + conflicts::mark( + project_repository, + &merge_conflicts, + Some(default_target.sha), + )?; + + return Ok(()); + } + + let head_commit = repo + .find_commit(branch.head) + .context("failed to find head commit")?; + + let merged_branch_tree_oid = merge_index + .write_tree_to(repo) + .context("failed to write tree")?; + + let merged_branch_tree = repo + .find_tree(merged_branch_tree_oid) + .context("failed to find tree")?; + + let ok_with_force_push = project_repository.project().ok_with_force_push; + if branch.upstream.is_some() && !ok_with_force_push { + // branch was pushed to upstream, and user doesn't like force pushing. + // create a merge commit to avoid the need of force pushing then. + + let new_branch_head = project_repository.commit( + user, + format!( + "Merged {}/{} into {}", + default_target.branch.remote(), + default_target.branch.branch(), + branch.name + ) + .as_str(), + &merged_branch_tree, + &[&head_commit, &target_commit], + signing_key, + )?; + + // ok, update the virtual branch + branch.head = new_branch_head; + branch.tree = merged_branch_tree_oid; + writer.write(&mut branch)?; + } else { + // branch was not pushed to upstream yet. attempt a rebase, + let (_, committer) = project_repository.git_signatures(user)?; + let mut rebase_options = git2::RebaseOptions::new(); + rebase_options.quiet(true); + rebase_options.inmemory(true); + let mut rebase = repo + .rebase( + Some(branch.head), + Some(target_commit.id()), + None, + Some(&mut rebase_options), + ) + .context("failed to rebase")?; + + let mut rebase_success = true; + // check to see if these commits have already been pushed + let mut last_rebase_head = branch.head; + while rebase.next().is_some() { + let index = rebase + .inmemory_index() + .context("failed to get inmemory index")?; + if index.has_conflicts() { + rebase_success = false; + break; + } + + if let Ok(commit_id) = rebase.commit(None, &committer.clone().into(), None) { + last_rebase_head = commit_id.into(); + } else { + rebase_success = false; + break; + } + } + + if rebase_success { + // rebase worked out, rewrite the branch head + rebase.finish(None).context("failed to finish rebase")?; + branch.head = last_rebase_head; + branch.tree = merged_branch_tree_oid; + } else { + // rebase failed, do a merge commit + rebase.abort().context("failed to abort rebase")?; + + // get tree from merge_tree_oid + let merge_tree = repo + .find_tree(merged_branch_tree_oid) + .context("failed to find tree")?; + + // commit the merge tree oid + let new_branch_head = project_repository + .commit( + user, + format!( + "Merged {}/{} into {}", + default_target.branch.remote(), + default_target.branch.branch(), + branch.name + ) + .as_str(), + &merge_tree, + &[&head_commit, &target_commit], + signing_key, + ) + .context("failed to commit merge")?; + + branch.head = new_branch_head; + branch.tree = merged_branch_tree_oid; + } + } + } + + let wd_tree = project_repository.get_wd_tree()?; + + let branch_tree = repo + .find_tree(branch.tree) + .context("failed to find branch tree")?; + + // check index for conflicts + let mut merge_index = repo + .merge_trees(&target_tree, &wd_tree, &branch_tree) + .context("failed to merge trees")?; + + if merge_index.has_conflicts() { + return Err(errors::ApplyBranchError::BranchConflicts(*branch_id)); + } + + // apply the branch + branch.applied = true; + writer.write(&mut branch)?; + + ensure_selected_for_changes(¤t_session_reader, &writer) + .context("failed to ensure selected for changes")?; + + // checkout the merge index + repo.checkout_index(&mut merge_index) + .force() + .checkout() + .context("failed to checkout index")?; + + super::integration::update_gitbutler_integration(gb_repository, project_repository)?; + + Ok(()) +} + +pub fn unapply_ownership( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + ownership: &BranchOwnershipClaims, +) -> Result<(), errors::UnapplyOwnershipError> { + if conflicts::is_resolving(project_repository) { + return Err(errors::UnapplyOwnershipError::Conflict( + errors::ProjectConflictError { + project_id: project_repository.project().id, + }, + )); + } + + let latest_session = gb_repository + .get_latest_session() + .context("failed to get or create current session")? + .ok_or_else(|| { + errors::UnapplyOwnershipError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + let latest_session_reader = sessions::Reader::open(gb_repository, &latest_session) + .context("failed to open current session")?; + + let default_target = get_default_target(&latest_session_reader) + .context("failed to get default target")? + .ok_or_else(|| { + errors::UnapplyOwnershipError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + let applied_branches = Iterator::new(&latest_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")? + .into_iter() + .filter(|b| b.applied) + .collect::>(); + + let (applied_statuses, _) = get_applied_status( + gb_repository, + project_repository, + &default_target, + applied_branches, + ) + .context("failed to get status by branch")?; + + let hunks_to_unapply = applied_statuses + .iter() + .map( + |(branch, branch_files)| -> Result> { + let branch_files = calculate_non_commited_diffs( + project_repository, + branch, + &default_target, + branch_files, + )?; + + let mut hunks_to_unapply = Vec::new(); + for (path, hunks) in branch_files { + let ownership_hunks: Vec<&Hunk> = ownership + .claims + .iter() + .filter(|o| o.file_path == path) + .flat_map(|f| &f.hunks) + .collect(); + for hunk in hunks { + if ownership_hunks.contains(&&Hunk::from(&hunk)) { + hunks_to_unapply.push((path.clone(), hunk)); + } + } + } + + hunks_to_unapply.sort_by(|a, b| a.1.old_start.cmp(&b.1.old_start)); + + Ok(hunks_to_unapply) + }, + ) + .collect::>>()? + .into_iter() + .flatten() + .collect::>(); + + let mut diff = HashMap::new(); + for h in hunks_to_unapply { + if let Some(reversed_hunk) = diff::reverse_hunk(&h.1) { + diff.entry(h.0).or_insert_with(Vec::new).push(reversed_hunk); + } else { + return Err(errors::UnapplyOwnershipError::Other(anyhow::anyhow!( + "failed to reverse hunk" + ))); + } + } + + let repo = &project_repository.git_repository; + + let target_commit = repo + .find_commit(default_target.sha) + .context("failed to find target commit")?; + + let base_tree = target_commit.tree().context("failed to get target tree")?; + let final_tree = applied_statuses.into_iter().fold( + target_commit.tree().context("failed to get target tree"), + |final_tree, status| { + let final_tree = final_tree?; + let tree_oid = write_tree(project_repository, &default_target, &status.1)?; + let branch_tree = repo.find_tree(tree_oid)?; + let mut result = repo.merge_trees(&base_tree, &final_tree, &branch_tree)?; + let final_tree_oid = result.write_tree_to(repo)?; + repo.find_tree(final_tree_oid) + .context("failed to find tree") + }, + )?; + + let final_tree_oid = write_tree_onto_tree(project_repository, &final_tree, &diff)?; + let final_tree = repo + .find_tree(final_tree_oid) + .context("failed to find tree")?; + + repo.checkout_tree(&final_tree) + .force() + .remove_untracked() + .checkout() + .context("failed to checkout tree")?; + + super::integration::update_gitbutler_integration(gb_repository, project_repository)?; + + Ok(()) +} + +// reset a file in the project to the index state +pub fn reset_files( + project_repository: &project_repository::Repository, + files: &Vec, +) -> Result<(), errors::UnapplyOwnershipError> { + if conflicts::is_resolving(project_repository) { + return Err(errors::UnapplyOwnershipError::Conflict( + errors::ProjectConflictError { + project_id: project_repository.project().id, + }, + )); + } + + // for each tree, we need to checkout the entry from the index at that path + // or if it doesn't exist, remove the file from the working directory + let repo = &project_repository.git_repository; + let index = repo.index().context("failed to get index")?; + for file in files { + let entry = index.get_path(Path::new(file), 0); + if entry.is_some() { + repo.checkout_index_path(Path::new(file)) + .context("failed to checkout index")?; + } else { + // find the project root + let project_root = &project_repository.project().path; + let path = Path::new(file); + //combine the project root with the file path + let path = &project_root.join(path); + std::fs::remove_file(path).context("failed to remove file")?; + } + } + + Ok(()) +} + +// to unapply a branch, we need to write the current tree out, then remove those file changes from the wd +pub fn unapply_branch( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_id: &BranchId, +) -> Result, errors::UnapplyBranchError> { + let session = &gb_repository + .get_or_create_current_session() + .context("failed to get or create currnt session")?; + + let current_session_reader = + sessions::Reader::open(gb_repository, session).context("failed to open current session")?; + + let branch_reader = branch::Reader::new(¤t_session_reader); + + let mut target_branch = branch_reader.read(branch_id).map_err(|error| match error { + reader::Error::NotFound => { + errors::UnapplyBranchError::BranchNotFound(errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *branch_id, + }) + } + error => errors::UnapplyBranchError::Other(error.into()), + })?; + + if !target_branch.applied { + return Ok(Some(target_branch)); + } + + let default_target = get_default_target(¤t_session_reader) + .context("failed to get default target")? + .ok_or_else(|| { + errors::UnapplyBranchError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + let repo = &project_repository.git_repository; + let target_commit = repo + .find_commit(default_target.sha) + .context("failed to find target commit")?; + + let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + + let final_tree = if conflicts::is_resolving(project_repository) { + // when applying branch leads to a conflict, all other branches are unapplied. + // this means we can just reset to the default target tree. + { + target_branch.applied = false; + target_branch.selected_for_changes = None; + branch_writer.write(&mut target_branch)?; + } + + conflicts::clear(project_repository).context("failed to clear conflicts")?; + + target_commit.tree().context("failed to get target tree")? + } else { + // if we are not resolving, we need to merge the rest of the applied branches + let applied_branches = Iterator::new(¤t_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")? + .into_iter() + .filter(|b| b.applied) + .collect::>(); + + let (applied_statuses, _) = get_applied_status( + gb_repository, + project_repository, + &default_target, + applied_branches, + ) + .context("failed to get status by branch")?; + + let status = applied_statuses + .iter() + .find(|(s, _)| s.id == target_branch.id) + .context("failed to find status for branch"); + + if let Ok((_, files)) = status { + if files.is_empty() { + // if there is nothing to unapply, remove the branch straight away + branch_writer + .delete(&target_branch) + .context("Failed to remove branch")?; + + ensure_selected_for_changes(¤t_session_reader, &branch_writer) + .context("failed to ensure selected for changes")?; + + project_repository.delete_branch_reference(&target_branch)?; + return Ok(None); + } + + target_branch.tree = write_tree(project_repository, &default_target, files)?; + target_branch.applied = false; + target_branch.selected_for_changes = None; + branch_writer.write(&mut target_branch)?; + } + + let target_commit = repo + .find_commit(default_target.sha) + .context("failed to find target commit")?; + + // ok, update the wd with the union of the rest of the branches + let base_tree = target_commit.tree().context("failed to get target tree")?; + + // go through the other applied branches and merge them into the final tree + // then check that out into the working directory + let final_tree = applied_statuses + .into_iter() + .filter(|(branch, _)| &branch.id != branch_id) + .fold( + target_commit.tree().context("failed to get target tree"), + |final_tree, status| { + let final_tree = final_tree?; + let tree_oid = write_tree(project_repository, &default_target, &status.1)?; + let branch_tree = repo.find_tree(tree_oid)?; + let mut result = repo.merge_trees(&base_tree, &final_tree, &branch_tree)?; + let final_tree_oid = result.write_tree_to(repo)?; + repo.find_tree(final_tree_oid) + .context("failed to find tree") + }, + )?; + + ensure_selected_for_changes(¤t_session_reader, &branch_writer) + .context("failed to ensure selected for changes")?; + + final_tree + }; + + // checkout final_tree into the working directory + repo.checkout_tree(&final_tree) + .force() + .remove_untracked() + .checkout() + .context("failed to checkout tree")?; + + super::integration::update_gitbutler_integration(gb_repository, project_repository)?; + + Ok(Some(target_branch)) +} + +fn find_base_tree<'a>( + repo: &'a git::Repository, + branch_commit: &'a git::Commit<'a>, + target_commit: &'a git::Commit<'a>, +) -> Result> { + // find merge base between target_commit and branch_commit + let merge_base = repo + .merge_base(target_commit.id(), branch_commit.id()) + .context("failed to find merge base")?; + // turn oid into a commit + let merge_base_commit = repo + .find_commit(merge_base) + .context("failed to find merge base commit")?; + let base_tree = merge_base_commit + .tree() + .context("failed to get base tree object")?; + Ok(base_tree) +} + +pub fn list_virtual_branches( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, +) -> Result<(Vec, bool, Vec), errors::ListVirtualBranchesError> { + let mut branches: Vec = Vec::new(); + + let default_target = gb_repository + .default_target() + .context("failed to get default target")? + .ok_or_else(|| { + errors::ListVirtualBranchesError::DefaultTargetNotSet( + errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }, + ) + })?; + + let (statuses, skipped_files) = get_status_by_branch(gb_repository, project_repository)?; + let max_selected_for_changes = statuses + .iter() + .filter_map(|(branch, _)| branch.selected_for_changes) + .max() + .unwrap_or(-1); + + for (branch, files) in &statuses { + // check if head tree does not match target tree + // if so, we diff the head tree and the new write_tree output to see what is new and filter the hunks to just those + let files = + calculate_non_commited_diffs(project_repository, branch, &default_target, files)?; + + let repo = &project_repository.git_repository; + + let upstream_branch = match branch + .upstream + .as_ref() + .map(|name| repo.find_branch(&git::Refname::from(name))) + .transpose() + { + Err(git::Error::NotFound(_)) => Ok(None), + Err(error) => Err(error), + Ok(branch) => Ok(branch), + } + .context(format!( + "failed to find upstream branch for {}", + branch.name + ))?; + + let upstram_branch_commit = upstream_branch + .as_ref() + .map(git::Branch::peel_to_commit) + .transpose() + .context(format!( + "failed to find upstream branch commit for {}", + branch.name + ))?; + + // find upstream commits if we found an upstream reference + let mut pushed_commits = HashMap::new(); + if let Some(upstream) = &upstram_branch_commit { + let merge_base = + repo.merge_base(upstream.id(), default_target.sha) + .context(format!( + "failed to find merge base between {} and {}", + upstream.id(), + default_target.sha + ))?; + for oid in project_repository.l(upstream.id(), LogUntil::Commit(merge_base))? { + pushed_commits.insert(oid, true); + } + } + + let mut is_integrated = false; + let mut is_remote = false; + + // find all commits on head that are not on target.sha + let commits = project_repository + .log(branch.head, LogUntil::Commit(default_target.sha)) + .context(format!("failed to get log for branch {}", branch.name))? + .iter() + .map(|commit| { + is_remote = if is_remote { + is_remote + } else { + pushed_commits.contains_key(&commit.id()) + }; + + // only check for integration if we haven't already found an integration + is_integrated = if is_integrated { + is_integrated + } else { + is_commit_integrated(project_repository, &default_target, commit)? + }; + + commit_to_vbranch_commit( + project_repository, + branch, + commit, + is_integrated, + is_remote, + ) + }) + .collect::>>()?; + + // if the branch is not applied, check to see if it's mergeable and up to date + let mut base_current = true; + if !branch.applied { + // determine if this branch is up to date with the target/base + let merge_base = repo + .merge_base(default_target.sha, branch.head) + .context("failed to find merge base")?; + if merge_base != default_target.sha { + base_current = false; + } + } + + let upstream = upstream_branch + .map(|upstream_branch| branch_to_remote_branch(&upstream_branch)) + .transpose()? + .flatten(); + + let mut files = diffs_to_virtual_files(project_repository, &files); + files.sort_by(|a, b| { + branch + .ownership + .claims + .iter() + .position(|o| o.file_path.eq(&a.path)) + .unwrap_or(999) + .cmp( + &branch + .ownership + .claims + .iter() + .position(|id| id.file_path.eq(&b.path)) + .unwrap_or(999), + ) + }); + + let requires_force = is_requires_force(project_repository, branch)?; + let branch = VirtualBranch { + id: branch.id, + name: branch.name.clone(), + notes: branch.notes.clone(), + active: branch.applied, + files, + order: branch.order, + commits, + requires_force, + upstream, + upstream_name: branch + .upstream + .clone() + .and_then(|r| Refname::from(r).branch().map(Into::into)), + conflicted: conflicts::is_resolving(project_repository), + base_current, + ownership: branch.ownership.clone(), + updated_at: branch.updated_timestamp_ms, + selected_for_changes: branch.selected_for_changes == Some(max_selected_for_changes), + head: branch.head, + }; + branches.push(branch); + } + + let branches = branches_with_large_files_abridged(branches); + let mut branches = branches_with_hunk_locks(branches, project_repository)?; + + // If there no context lines are used internally, add them here, before returning to the UI + if context_lines(project_repository) == 0 { + for branch in &mut branches { + branch.files = files_with_hunk_context( + &project_repository.git_repository, + branch.files.clone(), + 3, + branch.head, + ) + .context("failed to add hunk context")?; + } + } + + branches.sort_by(|a, b| a.order.cmp(&b.order)); + + super::integration::update_gitbutler_integration(gb_repository, project_repository)?; + + let uses_diff_context = project_repository + .project() + .use_diff_context + .unwrap_or(false); + Ok((branches, uses_diff_context, skipped_files)) +} + +fn branches_with_large_files_abridged(mut branches: Vec) -> Vec { + for branch in &mut branches { + for file in &mut branch.files { + // Diffs larger than 500kb are considered large + if file.hunks.iter().any(|hunk| hunk.diff.len() > 500_000) { + file.large = true; + file.hunks + .iter_mut() + .for_each(|hunk| hunk.diff = String::new()); + } + } + } + branches +} + +fn branches_with_hunk_locks( + mut branches: Vec, + project_repository: &project_repository::Repository, +) -> Result> { + let all_commits: Vec = branches + .clone() + .iter() + .filter(|branch| branch.active) + .flat_map(|vbranch| vbranch.commits.clone()) + .collect(); + + for commit in all_commits { + let commit = project_repository.git_repository.find_commit(commit.id)?; + let parent = commit.parent(0).context("failed to get parent commit")?; + let commit_tree = commit.tree().context("failed to get commit tree")?; + let parent_tree = parent.tree().context("failed to get parent tree")?; + let commited_file_diffs = diff::trees( + &project_repository.git_repository, + &parent_tree, + &commit_tree, + context_lines(project_repository), + )?; + let commited_file_diffs = diff::diff_files_to_hunks(&commited_file_diffs); + for branch in &mut branches { + for file in &mut branch.files { + for hunk in &mut file.hunks { + let locked = + commited_file_diffs + .get(&file.path) + .map_or(false, |committed_hunks| { + committed_hunks.iter().any(|committed_hunk| { + joined( + committed_hunk.old_start, + committed_hunk.old_start + committed_hunk.new_lines, + hunk.start, + hunk.end, + ) + }) + }); + if locked { + hunk.locked = true; + hunk.locked_to = Some(commit.id()); + } + } + } + } + } + Ok(branches) +} + +fn joined(start_a: u32, end_a: u32, start_b: u32, end_b: u32) -> bool { + (start_a <= start_b && end_a >= start_b) || (start_a <= end_b && end_a >= end_b) +} + +fn files_with_hunk_context( + repository: &git::Repository, + mut files: Vec, + context_lines: usize, + branch_head: git::Oid, +) -> Result> { + for file in &mut files { + if file.binary { + continue; + } + // Get file content as it looked before the diffs + let branch_head_commit = repository.find_commit(branch_head)?; + let head_tree = branch_head_commit.tree()?; + let file_content_before = + show::show_file_at_tree(repository, file.path.clone(), &head_tree) + .context("failed to get file contents at base")?; + let file_lines_before = file_content_before.split('\n').collect::>(); + + // Update each hunk with contex lines before & after + file.hunks = file + .hunks + .iter() + .map(|hunk| { + if hunk.diff.is_empty() { + // noop on empty diff + hunk.clone() + } else { + let hunk_with_ctx = context::hunk_with_context( + &hunk.diff, + hunk.old_start as usize, + hunk.start as usize, + hunk.binary, + context_lines, + &file_lines_before, + hunk.change_type, + ); + to_virtual_branch_hunk(hunk.clone(), hunk_with_ctx) + } + }) + .collect::>(); + } + Ok(files) +} + +fn to_virtual_branch_hunk( + mut hunk: VirtualBranchHunk, + diff_with_context: diff::GitHunk, +) -> VirtualBranchHunk { + hunk.diff = diff_with_context.diff; + hunk.start = diff_with_context.new_start; + hunk.end = diff_with_context.new_start + diff_with_context.new_lines; + hunk +} + +fn is_requires_force( + project_repository: &project_repository::Repository, + branch: &branch::Branch, +) -> Result { + let upstream = if let Some(upstream) = &branch.upstream { + upstream + } else { + return Ok(false); + }; + + let reference = match project_repository + .git_repository + .refname_to_id(&upstream.to_string()) + { + Ok(reference) => reference, + Err(git::Error::NotFound(_)) => return Ok(false), + Err(other) => return Err(other).context("failed to find upstream reference"), + }; + + let upstream_commit = project_repository + .git_repository + .find_commit(reference) + .context("failed to find upstream commit")?; + + let merge_base = project_repository + .git_repository + .merge_base(upstream_commit.id(), branch.head)?; + + Ok(merge_base != upstream_commit.id()) +} + +// given a virtual branch and it's files that are calculated off of a default target, +// return files adjusted to the branch's head commit +pub fn calculate_non_commited_diffs( + project_repository: &project_repository::Repository, + branch: &branch::Branch, + default_target: &target::Target, + files: &HashMap>, +) -> Result>> { + if default_target.sha == branch.head && !branch.applied { + return Ok(files.clone()); + }; + + let branch_tree = if branch.applied { + let target_plus_wd_oid = write_tree(project_repository, default_target, files)?; + project_repository + .git_repository + .find_tree(target_plus_wd_oid) + } else { + project_repository.git_repository.find_tree(branch.tree) + }?; + + let branch_head = project_repository + .git_repository + .find_commit(branch.head)? + .tree()?; + + // do a diff between branch.head and the tree we _would_ commit + let non_commited_diff = diff::trees( + &project_repository.git_repository, + &branch_head, + &branch_tree, + context_lines(project_repository), + ) + .context("failed to diff trees")?; + let mut non_commited_diff = diff::diff_files_to_hunks(&non_commited_diff); + + let workspace_diff = diff::workdir( + &project_repository.git_repository, + &branch.head, + context_lines(project_repository), + )?; + let workspace_diff = diff::diff_files_to_hunks(&workspace_diff); + + // record conflicts resolution + // TODO: this feels out of place. move it somewhere else? + let conflicting_files = conflicts::conflicting_files(project_repository)?; + for (file_path, non_commited_hunks) in &non_commited_diff { + let mut conflicted = false; + if conflicting_files.contains(&file_path.display().to_string()) { + // check file for conflict markers, resolve the file if there are none in any hunk + for hunk in non_commited_hunks { + if hunk.diff.contains("<<<<<<< ours") { + conflicted = true; + } + if hunk.diff.contains(">>>>>>> theirs") { + conflicted = true; + } + } + if !conflicted { + conflicts::resolve(project_repository, &file_path.display().to_string()).unwrap(); + } + } + } + + // Revert back to the original line numbers from all hunks in the workspace + // This is done because the hunks in non_commited_diff have line numbers relative to the vbranch, which would be incorrect for the workspace + // Applies only to branches that are applied (in the workspace) + if branch.applied { + non_commited_diff = non_commited_diff + .into_iter() + .map(|(path, uncommitted_hunks)| { + let all_hunks = workspace_diff.get(&path); + if let Some(all_hunks) = all_hunks { + let hunks = line_agnostic_hunk_intersection(uncommitted_hunks, all_hunks); + (path, hunks) + } else { + (path, uncommitted_hunks) + } + }) + .collect(); + } + + Ok(non_commited_diff) +} + +/// Given two lists of hunks, returns the intersection based on the diff content and disregarding line numbers +/// +/// Since the hunks are not identical, the retuned hunks are the ones from the second argument +/// # Arguments +/// * `left` - A list of hunks +/// * `right` - A list of hunks to return from +/// # Returns +/// * A list of hunks that are present in both `left` and `right`, copied from `right` +fn line_agnostic_hunk_intersection(left: Vec, right: &Vec) -> Vec { + let mut result = Vec::new(); + for l in left { + // Skip the header containing line numbers + let l_diff = l.diff.split("@@").collect::>().pop(); + for r in right { + let r_diff = r.diff.split("@@").collect::>().pop(); + if l_diff == r_diff { + result.push(r.clone()); + break; + } + } + } + result +} + +fn list_virtual_commit_files( + project_repository: &project_repository::Repository, + commit: &git::Commit, +) -> Result> { + if commit.parent_count() == 0 { + return Ok(vec![]); + } + let parent = commit.parent(0).context("failed to get parent commit")?; + let commit_tree = commit.tree().context("failed to get commit tree")?; + let parent_tree = parent.tree().context("failed to get parent tree")?; + let diff = diff::trees( + &project_repository.git_repository, + &parent_tree, + &commit_tree, + context_lines(project_repository), + )?; + let diff = diff::diff_files_to_hunks(&diff); + let hunks_by_filepath = virtual_hunks_by_filepath(&project_repository.project().path, &diff); + Ok(virtual_hunks_to_virtual_files( + project_repository, + &hunks_by_filepath + .values() + .flatten() + .cloned() + .collect::>(), + )) +} + +fn commit_to_vbranch_commit( + repository: &project_repository::Repository, + branch: &branch::Branch, + commit: &git::Commit, + is_integrated: bool, + is_remote: bool, +) -> Result { + let timestamp = u128::try_from(commit.time().seconds())?; + let signature = commit.author(); + let message = commit.message().unwrap().to_string(); + + let files = + list_virtual_commit_files(repository, commit).context("failed to list commit files")?; + + let parent_ids = commit.parents()?.iter().map(Commit::id).collect::>(); + + let commit = VirtualBranchCommit { + id: commit.id(), + created_at: timestamp * 1000, + author: Author::from(signature), + description: message, + is_remote, + files, + is_integrated, + parent_ids, + branch_id: branch.id, + }; + + Ok(commit) +} + +pub fn create_virtual_branch( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + create: &BranchCreateRequest, +) -> Result { + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get or create currnt session")?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) + .context("failed to open current session")?; + + let default_target = get_default_target(¤t_session_reader) + .context("failed to get default target")? + .ok_or_else(|| { + errors::CreateVirtualBranchError::DefaultTargetNotSet( + errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }, + ) + })?; + + let commit = project_repository + .git_repository + .find_commit(default_target.sha) + .context("failed to find default target commit")?; + + let tree = commit + .tree() + .context("failed to find defaut target commit tree")?; + + let mut all_virtual_branches = Iterator::new(¤t_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")?; + all_virtual_branches.sort_by_key(|branch| branch.order); + + let order = create + .order + .unwrap_or(all_virtual_branches.len()) + .clamp(0, all_virtual_branches.len()); + + let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + + let selected_for_changes = if let Some(selected_for_changes) = create.selected_for_changes { + if selected_for_changes { + for mut other_branch in Iterator::new(¤t_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")? + { + other_branch.selected_for_changes = None; + branch_writer.write(&mut other_branch)?; + } + Some(chrono::Utc::now().timestamp_millis()) + } else { + None + } + } else { + (!all_virtual_branches + .iter() + .any(|b| b.selected_for_changes.is_some())) + .then_some(chrono::Utc::now().timestamp_millis()) + }; + + // make space for the new branch + for (i, branch) in all_virtual_branches.iter().enumerate() { + let mut branch = branch.clone(); + let new_order = if i < order { i } else { i + 1 }; + if branch.order != new_order { + branch.order = new_order; + branch_writer + .write(&mut branch) + .context("failed to write branch")?; + } + } + + let now = time::UNIX_EPOCH + .elapsed() + .context("failed to get elapsed time")? + .as_millis(); + + let name = dedup( + &all_virtual_branches + .iter() + .map(|b| b.name.as_str()) + .collect::>(), + create + .name + .as_ref() + .unwrap_or(&"Virtual branch".to_string()), + ); + + let mut branch = Branch { + id: BranchId::generate(), + name, + notes: String::new(), + applied: true, + upstream: None, + upstream_head: None, + tree: tree.id(), + head: default_target.sha, + created_timestamp_ms: now, + updated_timestamp_ms: now, + ownership: BranchOwnershipClaims::default(), + order, + selected_for_changes, + }; + + if let Some(ownership) = &create.ownership { + set_ownership( + ¤t_session_reader, + &branch_writer, + &mut branch, + ownership, + ) + .context("failed to set ownership")?; + } + + branch_writer + .write(&mut branch) + .context("failed to write branch")?; + + project_repository.add_branch_reference(&branch)?; + + Ok(branch) +} + +pub fn merge_virtual_branch_upstream( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_id: &BranchId, + signing_key: Option<&keys::PrivateKey>, + user: Option<&users::User>, +) -> Result<(), errors::MergeVirtualBranchUpstreamError> { + if conflicts::is_conflicting::<&Path>(project_repository, None)? { + return Err(errors::MergeVirtualBranchUpstreamError::Conflict( + errors::ProjectConflictError { + project_id: project_repository.project().id, + }, + )); + } + + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get current session")?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) + .context("failed to open current session")?; + + // get the branch + let branch_reader = branch::Reader::new(¤t_session_reader); + let mut branch = match branch_reader.read(branch_id) { + Ok(branch) => Ok(branch), + Err(reader::Error::NotFound) => Err( + errors::MergeVirtualBranchUpstreamError::BranchNotFound(errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *branch_id, + }), + ), + Err(error) => Err(errors::MergeVirtualBranchUpstreamError::Other(error.into())), + }?; + + // check if the branch upstream can be merged into the wd cleanly + let repo = &project_repository.git_repository; + + // get upstream from the branch and find the remote branch + let mut upstream_commit = None; + let upstream_branch = branch + .upstream + .as_ref() + .context("no upstream branch found")?; + if let Ok(upstream_oid) = repo.refname_to_id(&upstream_branch.to_string()) { + if let Ok(upstream_commit_obj) = repo.find_commit(upstream_oid) { + upstream_commit = Some(upstream_commit_obj); + } + } + + // if there is no upstream commit, then there is nothing to do + if upstream_commit.is_none() { + // no upstream commit, no merge to be done + return Ok(()); + } + + // there is an upstream commit, so lets check it out + let upstream_commit = upstream_commit.unwrap(); + let remote_tree = upstream_commit.tree().context("failed to get tree")?; + + if upstream_commit.id() == branch.head { + // upstream is already merged, nothing to do + return Ok(()); + } + + // if any other branches are applied, unapply them + let applied_branches = Iterator::new(¤t_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")? + .into_iter() + .filter(|b| b.applied) + .filter(|b| b.id != *branch_id) + .collect::>(); + + // unapply all other branches + for other_branch in applied_branches { + unapply_branch(gb_repository, project_repository, &other_branch.id) + .context("failed to unapply branch")?; + } + + // get merge base from remote branch commit and target commit + let merge_base = repo + .merge_base(upstream_commit.id(), branch.head) + .context("failed to find merge base")?; + let merge_tree = repo + .find_commit(merge_base) + .and_then(|c| c.tree()) + .context(format!( + "failed to find merge base commit {} tree", + merge_base + ))?; + + // get wd tree + let wd_tree = project_repository.get_wd_tree()?; + + // try to merge our wd tree with the upstream tree + let mut merge_index = repo + .merge_trees(&merge_tree, &wd_tree, &remote_tree) + .context("failed to merge trees")?; + + if merge_index.has_conflicts() { + // checkout the conflicts + repo.checkout_index(&mut merge_index) + .allow_conflicts() + .conflict_style_merge() + .force() + .checkout() + .context("failed to checkout index")?; + + // mark conflicts + let conflicts = merge_index.conflicts().context("failed to get conflicts")?; + let mut merge_conflicts = Vec::new(); + for path in conflicts.flatten() { + if let Some(ours) = path.our { + let path = std::str::from_utf8(&ours.path) + .context("failed to convert path to utf8")? + .to_string(); + merge_conflicts.push(path); + } + } + conflicts::mark( + project_repository, + &merge_conflicts, + Some(upstream_commit.id()), + )?; + } else { + let merge_tree_oid = merge_index + .write_tree_to(repo) + .context("failed to write tree")?; + let merge_tree = repo + .find_tree(merge_tree_oid) + .context("failed to find merge tree")?; + let branch_writer = + branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + + if *project_repository.project().ok_with_force_push { + // attempt a rebase + let (_, committer) = project_repository.git_signatures(user)?; + let mut rebase_options = git2::RebaseOptions::new(); + rebase_options.quiet(true); + rebase_options.inmemory(true); + let mut rebase = repo + .rebase( + Some(branch.head), + Some(upstream_commit.id()), + None, + Some(&mut rebase_options), + ) + .context("failed to rebase")?; + + let mut rebase_success = true; + // check to see if these commits have already been pushed + let mut last_rebase_head = upstream_commit.id(); + while rebase.next().is_some() { + let index = rebase + .inmemory_index() + .context("failed to get inmemory index")?; + if index.has_conflicts() { + rebase_success = false; + break; + } + + if let Ok(commit_id) = rebase.commit(None, &committer.clone().into(), None) { + last_rebase_head = commit_id.into(); + } else { + rebase_success = false; + break; + } + } + + if rebase_success { + // rebase worked out, rewrite the branch head + rebase.finish(None).context("failed to finish rebase")?; + + project_repository + .git_repository + .checkout_tree(&merge_tree) + .force() + .checkout() + .context("failed to checkout tree")?; + + branch.head = last_rebase_head; + branch.tree = merge_tree_oid; + branch_writer.write(&mut branch)?; + super::integration::update_gitbutler_integration( + gb_repository, + project_repository, + )?; + + return Ok(()); + } + + rebase.abort().context("failed to abort rebase")?; + } + + let head_commit = repo + .find_commit(branch.head) + .context("failed to find head commit")?; + + let new_branch_head = project_repository.commit( + user, + format!( + "Merged {}/{} into {}", + upstream_branch.remote(), + upstream_branch.branch(), + branch.name + ) + .as_str(), + &merge_tree, + &[&head_commit, &upstream_commit], + signing_key, + )?; + + // checkout the merge tree + repo.checkout_tree(&merge_tree) + .force() + .checkout() + .context("failed to checkout tree")?; + + // write the branch data + branch.head = new_branch_head; + branch.tree = merge_tree_oid; + branch_writer.write(&mut branch)?; + } + + super::integration::update_gitbutler_integration(gb_repository, project_repository)?; + + Ok(()) +} + +pub fn update_branch( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_update: branch::BranchUpdateRequest, +) -> Result { + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get or create currnt session")?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) + .context("failed to open current session")?; + let branch_reader = branch::Reader::new(¤t_session_reader); + let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + + let mut branch = branch_reader + .read(&branch_update.id) + .map_err(|error| match error { + reader::Error::NotFound => { + errors::UpdateBranchError::BranchNotFound(errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: branch_update.id, + }) + } + _ => errors::UpdateBranchError::Other(error.into()), + })?; + + if let Some(ownership) = branch_update.ownership { + set_ownership( + ¤t_session_reader, + &branch_writer, + &mut branch, + &ownership, + ) + .context("failed to set ownership")?; + } + + if let Some(name) = branch_update.name { + let all_virtual_branches = Iterator::new(¤t_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")?; + + project_repository.delete_branch_reference(&branch)?; + + branch.name = dedup( + &all_virtual_branches + .iter() + .map(|b| b.name.as_str()) + .collect::>(), + &name, + ); + + project_repository.add_branch_reference(&branch)?; + }; + + if let Some(updated_upstream) = branch_update.upstream { + let default_target = get_default_target(¤t_session_reader) + .context("failed to get default target")? + .ok_or_else(|| { + errors::UpdateBranchError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + let remote_branch = format!( + "refs/remotes/{}/{}", + default_target.branch.remote(), + normalize_branch_name(&updated_upstream) + ) + .parse::() + .unwrap(); + branch.upstream = Some(remote_branch); + }; + + if let Some(notes) = branch_update.notes { + branch.notes = notes; + }; + + if let Some(order) = branch_update.order { + branch.order = order; + }; + + if let Some(selected_for_changes) = branch_update.selected_for_changes { + branch.selected_for_changes = if selected_for_changes { + for mut other_branch in Iterator::new(¤t_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")? + .into_iter() + .filter(|b| b.id != branch.id) + { + other_branch.selected_for_changes = None; + branch_writer.write(&mut other_branch)?; + } + Some(chrono::Utc::now().timestamp_millis()) + } else { + None + }; + }; + + branch_writer + .write(&mut branch) + .context("failed to write target branch")?; + + Ok(branch) +} + +pub fn delete_branch( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_id: &BranchId, +) -> Result<(), errors::DeleteBranchError> { + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get or create currnt session")?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) + .context("failed to open current session")?; + let branch_reader = branch::Reader::new(¤t_session_reader); + let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + + let branch = match branch_reader.read(branch_id) { + Ok(branch) => Ok(branch), + Err(reader::Error::NotFound) => return Ok(()), + Err(error) => Err(error), + } + .context("failed to read branch")?; + + if branch.applied && unapply_branch(gb_repository, project_repository, branch_id)?.is_none() { + return Ok(()); + } + + branch_writer + .delete(&branch) + .context("Failed to remove branch")?; + + project_repository.delete_branch_reference(&branch)?; + + ensure_selected_for_changes(¤t_session_reader, &branch_writer) + .context("failed to ensure selected for changes")?; + + Ok(()) +} + +fn ensure_selected_for_changes( + current_session_reader: &sessions::Reader, + branch_writer: &branch::Writer, +) -> Result<()> { + let mut applied_branches = Iterator::new(current_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")? + .into_iter() + .filter(|b| b.applied) + .collect::>(); + + if applied_branches.is_empty() { + println!("no applied branches"); + return Ok(()); + } + + if applied_branches + .iter() + .any(|b| b.selected_for_changes.is_some()) + { + println!("some branches already selected for changes"); + return Ok(()); + } + + applied_branches.sort_by_key(|branch| branch.order); + + applied_branches[0].selected_for_changes = Some(chrono::Utc::now().timestamp_millis()); + branch_writer.write(&mut applied_branches[0])?; + Ok(()) +} + +fn set_ownership( + session_reader: &sessions::Reader, + branch_writer: &branch::Writer, + target_branch: &mut branch::Branch, + ownership: &branch::BranchOwnershipClaims, +) -> Result<()> { + if target_branch.ownership.eq(ownership) { + // nothing to update + return Ok(()); + } + + let virtual_branches = Iterator::new(session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")?; + + let mut claim_outcomes = + branch::reconcile_claims(virtual_branches, target_branch, &ownership.claims)?; + for claim_outcome in &mut claim_outcomes { + if !claim_outcome.removed_claims.is_empty() { + branch_writer + .write(&mut claim_outcome.updated_branch) + .context("failed to write ownership for branch".to_string())?; + } + } + + // Updates the claiming branch that was passed as mutable state with the new ownership claims + // TODO: remove mutable reference to target_branch + target_branch.ownership = ownership.clone(); + + Ok(()) +} + +fn get_mtime(cache: &mut HashMap, file_path: &PathBuf) -> u128 { + if let Some(mtime) = cache.get(file_path) { + *mtime + } else { + let mtime = file_path + .metadata() + .map_or_else( + |_| time::SystemTime::now(), + |metadata| { + metadata + .modified() + .or(metadata.created()) + .unwrap_or_else(|_| time::SystemTime::now()) + }, + ) + .duration_since(time::UNIX_EPOCH) + .unwrap() + .as_millis(); + cache.insert(file_path.clone(), mtime); + mtime + } +} + +pub fn virtual_hunks_by_filepath( + project_path: &Path, + diff: &HashMap>, +) -> HashMap> { + let mut mtimes: HashMap = HashMap::new(); + diff.iter() + .map(|(file_path, hunks)| { + let hunks = hunks + .iter() + .map(|hunk| VirtualBranchHunk { + id: format!("{}-{}", hunk.new_start, hunk.new_start + hunk.new_lines), + modified_at: get_mtime(&mut mtimes, &project_path.join(file_path)), + file_path: file_path.clone(), + diff: hunk.diff.clone(), + old_start: hunk.old_start, + start: hunk.new_start, + end: hunk.new_start + hunk.new_lines, + binary: hunk.binary, + hash: Hunk::hash(&hunk.diff), + locked: false, + locked_to: None, + change_type: hunk.change_type, + }) + .collect::>(); + (file_path.clone(), hunks) + }) + .collect::>() +} + +pub type BranchStatus = HashMap>; + +// list the virtual branches and their file statuses (statusi?) +#[allow(clippy::type_complexity)] +pub fn get_status_by_branch( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, +) -> Result<(Vec<(branch::Branch, BranchStatus)>, Vec)> { + let latest_session = gb_repository + .get_latest_session() + .context("failed to get latest session")? + .context("latest session not found")?; + let session_reader = sessions::Reader::open(gb_repository, &latest_session) + .context("failed to open current session")?; + + let default_target = + match get_default_target(&session_reader).context("failed to read default target")? { + Some(target) => target, + None => { + return Ok((vec![], vec![])); + } + }; + + let virtual_branches = Iterator::new(&session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")?; + + let applied_virtual_branches = virtual_branches + .iter() + .filter(|branch| branch.applied) + .cloned() + .collect::>(); + + let (applied_status, skipped_files) = get_applied_status( + gb_repository, + project_repository, + &default_target, + applied_virtual_branches, + )?; + + let non_applied_virtual_branches = virtual_branches + .into_iter() + .filter(|branch| !branch.applied) + .collect::>(); + + let non_applied_status = get_non_applied_status( + project_repository, + &default_target, + non_applied_virtual_branches, + )?; + + Ok(( + applied_status + .into_iter() + .chain(non_applied_status) + .collect(), + skipped_files, + )) +} + +// given a list of non applied virtual branches, return the status of each file, comparing the default target with +// virtual branch latest tree +// +// ownerships are not taken into account here, as they are not relevant for non applied branches +fn get_non_applied_status( + project_repository: &project_repository::Repository, + default_target: &target::Target, + virtual_branches: Vec, +) -> Result> { + virtual_branches + .into_iter() + .map( + |branch| -> Result<(branch::Branch, HashMap>)> { + if branch.applied { + bail!("branch {} is applied", branch.name); + } + let branch_tree = project_repository + .git_repository + .find_tree(branch.tree) + .context(format!("failed to find tree {}", branch.tree))?; + + let target_tree = project_repository + .git_repository + .find_commit(default_target.sha) + .context("failed to find target commit")? + .tree() + .context("failed to find target tree")?; + + let diff = diff::trees( + &project_repository.git_repository, + &target_tree, + &branch_tree, + context_lines(project_repository), + )?; + + Ok((branch, diff::diff_files_to_hunks(&diff))) + }, + ) + .collect::>>() +} + +// given a list of applied virtual branches, return the status of each file, comparing the default target with +// the working directory +// +// ownerships are updated if nessessary +fn get_applied_status( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + default_target: &target::Target, + mut virtual_branches: Vec, +) -> Result<(AppliedStatuses, Vec)> { + let base_file_diffs = diff::workdir( + &project_repository.git_repository, + &default_target.sha, + context_lines(project_repository), + ) + .context("failed to diff workdir")?; + + let mut base_diffs: HashMap> = + diff_files_to_hunks(&base_file_diffs); + let mut skipped_files: Vec = Vec::new(); + for (_, file_diff) in base_file_diffs { + if file_diff.skipped { + skipped_files.push(file_diff); + } + } + + // sort by order, so that the default branch is first (left in the ui) + virtual_branches.sort_by(|a, b| a.order.cmp(&b.order)); + + if virtual_branches.is_empty() && !base_diffs.is_empty() { + // no virtual branches, but hunks: create default branch + virtual_branches = vec![create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .context("failed to create default branch")?]; + } + + // align branch ownership to the real hunks: + // - update shifted hunks + // - remove non existent hunks + + let mut diffs_by_branch: HashMap>> = + virtual_branches + .iter() + .map(|branch| (branch.id, HashMap::new())) + .collect(); + + let mut mtimes = HashMap::new(); + + for branch in &mut virtual_branches { + if !branch.applied { + bail!("branch {} is not applied", branch.name); + } + + let old_claims = branch.ownership.claims.clone(); + let new_claims = old_claims + .iter() + .filter_map(|claim| { + let git_diff_hunks = match base_diffs.get_mut(&claim.file_path) { + None => return None, + Some(hunks) => hunks, + }; + + let mtime = get_mtime(&mut mtimes, &claim.file_path); + + let claimed_hunks: Vec = claim + .hunks + .iter() + .filter_map(|claimed_hunk| { + // if any of the current hunks intersects with the owned hunk, we want to keep it + for (i, git_diff_hunk) in git_diff_hunks.iter().enumerate() { + let hash = Hunk::hash(&git_diff_hunk.diff); + // Eq compares hashes first, and if one of the hunks lacks a hash, it compares line numbers + if claimed_hunk.eq(&Hunk::from(git_diff_hunk)) { + // try to re-use old timestamp + let timestamp = claimed_hunk.timestam_ms().unwrap_or(mtime); + // push hunk to the end of the list, preserving the order + diffs_by_branch + .entry(branch.id) + .or_default() + .entry(claim.file_path.clone()) + .or_default() + .push(git_diff_hunk.clone()); + + git_diff_hunks.remove(i); + return Some( + claimed_hunk + .with_timestamp(timestamp) + .with_hash(hash.as_str()), + ); + } else if claimed_hunk.intersects(git_diff_hunk) { + // if it's an intersection, push the hunk to the beginning, + // indicating the the hunk has been updated + diffs_by_branch + .entry(branch.id) + .or_default() + .entry(claim.file_path.clone()) + .or_default() + .insert(0, git_diff_hunk.clone()); + + let updated_hunk = Hunk { + start: git_diff_hunk.new_start, + end: git_diff_hunk.new_start + git_diff_hunk.new_lines, + timestamp_ms: Some(mtime), + hash: Some(hash.clone()), + }; + + // remove the hunk from the current hunks because each hunk can + // only be owned once + git_diff_hunks.remove(i); + + // return updated version, with new hash and/or timestamp + return Some(updated_hunk); + } + } + None + }) + .collect(); + + if claimed_hunks.is_empty() { + // No need for an empty claim + None + } else { + Some(OwnershipClaim { + file_path: claim.file_path.clone(), + hunks: claimed_hunks, + }) + } + }) + .collect(); + + branch.ownership = BranchOwnershipClaims { claims: new_claims }; + } + + let max_selected_for_changes = virtual_branches + .iter() + .filter_map(|b| b.selected_for_changes) + .max() + .unwrap_or(-1); + let default_vbranch_pos = virtual_branches + .iter() + .position(|b| b.selected_for_changes == Some(max_selected_for_changes)) + .unwrap_or(0); + + // put the remaining hunks into the default (first) branch + for (filepath, hunks) in base_diffs { + for hunk in hunks { + virtual_branches[default_vbranch_pos] + .ownership + .put(&OwnershipClaim { + file_path: filepath.clone(), + hunks: vec![Hunk::from(&hunk) + .with_timestamp(get_mtime(&mut mtimes, &filepath)) + .with_hash(Hunk::hash(hunk.diff.as_str()).as_str())], + }); + diffs_by_branch + .entry(virtual_branches[default_vbranch_pos].id) + .or_default() + .entry(filepath.clone()) + .or_default() + .push(hunk.clone()); + } + } + + let mut hunks_by_branch = diffs_by_branch + .into_iter() + .map(|(branch_id, hunks)| { + ( + virtual_branches + .iter() + .find(|b| b.id.eq(&branch_id)) + .unwrap() + .clone(), + hunks, + ) + }) + .collect::>(); + + // write updated state if not resolving + if !project_repository.is_resolving() { + let branch_writer = + branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + for (vbranch, files) in &mut hunks_by_branch { + vbranch.tree = write_tree(project_repository, default_target, files)?; + branch_writer + .write(vbranch) + .context(format!("failed to write virtual branch {}", vbranch.name))?; + } + } + + Ok((hunks_by_branch, skipped_files)) +} + +fn virtual_hunks_to_virtual_files( + project_repository: &project_repository::Repository, + hunks: &[VirtualBranchHunk], +) -> Vec { + hunks + .iter() + .fold(HashMap::>::new(), |mut acc, hunk| { + acc.entry(hunk.file_path.clone()) + .or_default() + .push(hunk.clone()); + acc + }) + .into_iter() + .map(|(file_path, hunks)| VirtualBranchFile { + id: file_path.display().to_string(), + path: file_path.clone(), + hunks: hunks.clone(), + binary: hunks.iter().any(|h| h.binary), + large: false, + modified_at: hunks.iter().map(|h| h.modified_at).max().unwrap_or(0), + conflicted: conflicts::is_conflicting( + project_repository, + Some(&file_path.display().to_string()), + ) + .unwrap_or(false), + }) + .collect::>() +} + +// reset virtual branch to a specific commit +pub fn reset_branch( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_id: &BranchId, + target_commit_oid: git::Oid, +) -> Result<(), errors::ResetBranchError> { + let current_session = gb_repository.get_or_create_current_session()?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; + + let default_target = get_default_target(¤t_session_reader) + .context("failed to read default target")? + .ok_or_else(|| { + errors::ResetBranchError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + let branch_reader = branch::Reader::new(¤t_session_reader); + let mut branch = match branch_reader.read(branch_id) { + Ok(branch) => Ok(branch), + Err(reader::Error::NotFound) => Err(errors::ResetBranchError::BranchNotFound( + errors::BranchNotFoundError { + branch_id: *branch_id, + project_id: project_repository.project().id, + }, + )), + Err(error) => Err(errors::ResetBranchError::Other(error.into())), + }?; + + if branch.head == target_commit_oid { + // nothing to do + return Ok(()); + } + + if default_target.sha != target_commit_oid + && !project_repository + .l(branch.head, LogUntil::Commit(default_target.sha))? + .contains(&target_commit_oid) + { + return Err(errors::ResetBranchError::CommitNotFoundInBranch( + target_commit_oid, + )); + } + + let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + branch.head = target_commit_oid; + branch_writer + .write(&mut branch) + .context("failed to write branch")?; + + super::integration::update_gitbutler_integration(gb_repository, project_repository) + .context("failed to update gitbutler integration")?; + + Ok(()) +} + +fn diffs_to_virtual_files( + project_repository: &project_repository::Repository, + diffs: &HashMap>, +) -> Vec { + let hunks_by_filepath = virtual_hunks_by_filepath(&project_repository.project().path, diffs); + virtual_hunks_to_virtual_files( + project_repository, + &hunks_by_filepath + .values() + .flatten() + .cloned() + .collect::>(), + ) +} + +// this function takes a list of file ownership, +// constructs a tree from those changes on top of the target +// and writes it as a new tree for storage +pub fn write_tree( + project_repository: &project_repository::Repository, + target: &target::Target, + files: &HashMap>, +) -> Result { + write_tree_onto_commit(project_repository, target.sha, files) +} + +pub fn write_tree_onto_commit( + project_repository: &project_repository::Repository, + commit_oid: git::Oid, + files: &HashMap>, +) -> Result { + // read the base sha into an index + let git_repository = &project_repository.git_repository; + + let head_commit = git_repository.find_commit(commit_oid)?; + let base_tree = head_commit.tree()?; + + write_tree_onto_tree(project_repository, &base_tree, files) +} + +pub fn write_tree_onto_tree( + project_repository: &project_repository::Repository, + base_tree: &git::Tree, + files: &HashMap>, +) -> Result { + let git_repository = &project_repository.git_repository; + let mut builder = git_repository.treebuilder(Some(base_tree)); + // now update the index with content in the working directory for each file + for (filepath, hunks) in files { + // convert this string to a Path + let rel_path = Path::new(&filepath); + let full_path = project_repository.path().join(rel_path); + + let is_submodule = + full_path.is_dir() && hunks.len() == 1 && hunks[0].diff.contains("Subproject commit"); + + // if file exists + if full_path.exists() { + // if file is executable, use 755, otherwise 644 + let mut filemode = git::FileMode::Blob; + // check if full_path file is executable + if let Ok(metadata) = std::fs::symlink_metadata(&full_path) { + #[cfg(target_family = "unix")] + { + if metadata.permissions().mode() & 0o111 != 0 { + filemode = git::FileMode::BlobExecutable; + } + } + #[cfg(target_os = "windows")] + { + // TODO(qix-): Pull from `core.filemode` config option to determine + // TODO(qix-): the behavior on windows. For now, we set this to true. + // TODO(qix-): It's not ideal, but it gets us to a windows build faster. + filemode = git::FileMode::BlobExecutable; + } + + if metadata.file_type().is_symlink() { + filemode = git::FileMode::Link; + } + } + + // get the blob + if filemode == git::FileMode::Link { + // it's a symlink, make the content the path of the link + let link_target = std::fs::read_link(&full_path)?; + + // if the link target is inside the project repository, make it relative + let link_target = link_target + .strip_prefix(project_repository.path()) + .unwrap_or(&link_target); + + let blob_oid = git_repository.blob( + link_target + .to_str() + .ok_or_else(|| Error::InvalidUnicodePath(link_target.into()))? + .as_bytes(), + )?; + builder.upsert(rel_path, blob_oid, filemode); + } else if let Ok(tree_entry) = base_tree.get_path(rel_path) { + if hunks.len() == 1 && hunks[0].binary { + let new_blob_oid = &hunks[0].diff; + // convert string to Oid + let new_blob_oid = new_blob_oid.parse().context("failed to diff as oid")?; + builder.upsert(rel_path, new_blob_oid, filemode); + } else { + // blob from tree_entry + let blob = tree_entry + .to_object(git_repository) + .unwrap() + .peel_to_blob() + .context("failed to get blob")?; + + let mut blob_contents = blob.content().to_str()?.to_string(); + + let mut hunks = hunks.clone(); + hunks.sort_by_key(|hunk| hunk.new_start); + let mut all_diffs = String::new(); + for hunk in hunks { + all_diffs.push_str(&hunk.diff); + } + + let patch = Patch::from_str(&all_diffs)?; + blob_contents = apply(&blob_contents, &patch) + .context(format!("failed to apply {}", &all_diffs))?; + + // create a blob + let new_blob_oid = git_repository.blob(blob_contents.as_bytes())?; + // upsert into the builder + builder.upsert(rel_path, new_blob_oid, filemode); + } + } else if is_submodule { + let mut blob_contents = String::new(); + + let mut hunks = hunks.clone(); + hunks.sort_by_key(|hunk| hunk.new_start); + for hunk in hunks { + let patch = Patch::from_str(&hunk.diff)?; + blob_contents = apply(&blob_contents, &patch) + .context(format!("failed to apply {}", &hunk.diff))?; + } + + // create a blob + let new_blob_oid = git_repository.blob(blob_contents.as_bytes())?; + // upsert into the builder + builder.upsert(rel_path, new_blob_oid, filemode); + } else { + // create a git blob from a file on disk + let blob_oid = git_repository + .blob_path(&full_path) + .context(format!("failed to create blob from path {:?}", &full_path))?; + builder.upsert(rel_path, blob_oid, filemode); + } + } else if base_tree.get_path(rel_path).is_ok() { + // remove file from index if it exists in the base tree + builder.remove(rel_path); + } else { + // file not in index or base tree, do nothing + // this is the + } + } + + // now write out the tree + let tree_oid = builder.write().context("failed to write updated tree")?; + + Ok(tree_oid) +} + +fn _print_tree(repo: &git2::Repository, tree: &git2::Tree) -> Result<()> { + println!("tree id: {}", tree.id()); + for entry in tree { + println!( + " entry: {} {}", + entry.name().unwrap_or_default(), + entry.id() + ); + // get entry contents + let object = entry.to_object(repo).context("failed to get object")?; + let blob = object.as_blob().context("failed to get blob")?; + // convert content to string + if let Ok(content) = std::str::from_utf8(blob.content()) { + println!(" blob: {}", content); + } else { + println!(" blob: BINARY"); + } + } + Ok(()) +} + +#[allow(clippy::too_many_arguments)] +pub fn commit( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_id: &BranchId, + message: &str, + ownership: Option<&branch::BranchOwnershipClaims>, + signing_key: Option<&keys::PrivateKey>, + user: Option<&users::User>, + run_hooks: bool, +) -> Result { + let mut message_buffer = message.to_owned(); + + if run_hooks { + let hook_result = project_repository + .git_repository + .run_hook_commit_msg(&mut message_buffer) + .context("failed to run hook")?; + + if let HookResult::RunNotSuccessful { stdout, .. } = hook_result { + return Err(errors::CommitError::CommitMsgHookRejected(stdout)); + } + + let hook_result = project_repository + .git_repository + .run_hook_pre_commit() + .context("failed to run hook")?; + + if let HookResult::RunNotSuccessful { stdout, .. } = hook_result { + return Err(errors::CommitError::CommitHookRejected(stdout)); + } + } + + let message = &message_buffer; + + let default_target = gb_repository + .default_target() + .context("failed to get default target")? + .ok_or_else(|| { + errors::CommitError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + // get the files to commit + let (mut statuses, _) = get_status_by_branch(gb_repository, project_repository) + .context("failed to get status by branch")?; + + let (ref mut branch, files) = statuses + .iter_mut() + .find(|(branch, _)| branch.id == *branch_id) + .ok_or_else(|| { + errors::CommitError::BranchNotFound(errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *branch_id, + }) + })?; + + let files = calculate_non_commited_diffs(project_repository, branch, &default_target, files)?; + if conflicts::is_conflicting::<&Path>(project_repository, None)? { + return Err(errors::CommitError::Conflicted( + errors::ProjectConflictError { + project_id: project_repository.project().id, + }, + )); + } + + let tree_oid = if let Some(ownership) = ownership { + let files = files + .iter() + .filter_map(|(filepath, hunks)| { + let hunks = hunks + .iter() + .filter(|hunk| { + ownership + .claims + .iter() + .find(|f| f.file_path.eq(filepath)) + .map_or(false, |f| { + f.hunks.iter().any(|h| { + h.start == hunk.new_start + && h.end == hunk.new_start + hunk.new_lines + }) + }) + }) + .cloned() + .collect::>(); + if hunks.is_empty() { + None + } else { + Some((filepath.clone(), hunks)) + } + }) + .collect::>(); + write_tree_onto_commit(project_repository, branch.head, &files)? + } else { + write_tree_onto_commit(project_repository, branch.head, &files)? + }; + + let git_repository = &project_repository.git_repository; + let parent_commit = git_repository + .find_commit(branch.head) + .context(format!("failed to find commit {:?}", branch.head))?; + let tree = git_repository + .find_tree(tree_oid) + .context(format!("failed to find tree {:?}", tree_oid))?; + + // now write a commit, using a merge parent if it exists + let extra_merge_parent = + conflicts::merge_parent(project_repository).context("failed to get merge parent")?; + + let commit_oid = match extra_merge_parent { + Some(merge_parent) => { + let merge_parent = git_repository + .find_commit(merge_parent) + .context(format!("failed to find merge parent {:?}", merge_parent))?; + let commit_oid = project_repository.commit( + user, + message, + &tree, + &[&parent_commit, &merge_parent], + signing_key, + )?; + conflicts::clear(project_repository).context("failed to clear conflicts")?; + commit_oid + } + None => project_repository.commit(user, message, &tree, &[&parent_commit], signing_key)?, + }; + + if run_hooks { + project_repository + .git_repository + .run_hook_post_commit() + .context("failed to run hook")?; + } + + // update the virtual branch head + let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + branch.tree = tree_oid; + branch.head = commit_oid; + writer.write(branch).context("failed to write branch")?; + + super::integration::update_gitbutler_integration(gb_repository, project_repository) + .context("failed to update gitbutler integration")?; + + Ok(commit_oid) +} + +pub fn push( + project_repository: &project_repository::Repository, + gb_repository: &gb_repository::Repository, + branch_id: &BranchId, + with_force: bool, + credentials: &git::credentials::Helper, + askpass: Option<(AskpassBroker, Option)>, +) -> Result<(), errors::PushError> { + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get or create currnt session") + .map_err(errors::PushError::Other)?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) + .context("failed to open current session") + .map_err(errors::PushError::Other)?; + + let branch_reader = branch::Reader::new(¤t_session_reader); + let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + + let mut vbranch = branch_reader.read(branch_id).map_err(|error| match error { + reader::Error::NotFound => errors::PushError::BranchNotFound(errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *branch_id, + }), + error => errors::PushError::Other(error.into()), + })?; + + let remote_branch = if let Some(upstream_branch) = vbranch.upstream.as_ref() { + upstream_branch.clone() + } else { + let default_target = get_default_target(¤t_session_reader) + .context("failed to get default target")? + .ok_or_else(|| { + errors::PushError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + let remote_branch = format!( + "refs/remotes/{}/{}", + default_target.branch.remote(), + normalize_branch_name(&vbranch.name) + ) + .parse::() + .context("failed to parse remote branch name")?; + + let remote_branches = project_repository.git_remote_branches()?; + let existing_branches = remote_branches + .iter() + .map(RemoteRefname::branch) + .map(str::to_lowercase) // git is weird about case sensitivity here, assume not case sensitive + .collect::>(); + + remote_branch.with_branch(&dedup_fmt( + &existing_branches + .iter() + .map(String::as_str) + .collect::>(), + remote_branch.branch(), + "-", + )) + }; + + project_repository.push( + &vbranch.head, + &remote_branch, + with_force, + credentials, + None, + askpass.clone(), + )?; + + vbranch.upstream = Some(remote_branch.clone()); + vbranch.upstream_head = Some(vbranch.head); + branch_writer + .write(&mut vbranch) + .context("failed to write target branch after push")?; + project_repository.fetch( + remote_branch.remote(), + credentials, + askpass.map(|(broker, _)| (broker, "modal".to_string())), + )?; + + Ok(()) +} + +fn is_commit_integrated( + project_repository: &project_repository::Repository, + target: &target::Target, + commit: &git::Commit, +) -> Result { + let remote_branch = project_repository + .git_repository + .find_branch(&target.branch.clone().into())?; + let remote_head = remote_branch.peel_to_commit()?; + let upstream_commits = project_repository.l( + remote_head.id(), + project_repository::LogUntil::Commit(target.sha), + )?; + + if target.sha.eq(&commit.id()) { + // could not be integrated if heads are the same. + return Ok(false); + } + + if upstream_commits.is_empty() { + // could not be integrated - there is nothing new upstream. + return Ok(false); + } + + if upstream_commits.contains(&commit.id()) { + return Ok(true); + } + + let merge_base_id = project_repository + .git_repository + .merge_base(target.sha, commit.id())?; + if merge_base_id.eq(&commit.id()) { + // if merge branch is the same as branch head and there are upstream commits + // then it's integrated + return Ok(true); + } + + let merge_base = project_repository + .git_repository + .find_commit(merge_base_id)?; + let merge_base_tree = merge_base.tree()?; + let upstream = project_repository + .git_repository + .find_commit(remote_head.id())?; + let upstream_tree = upstream.tree()?; + + if merge_base_tree.id() == upstream_tree.id() { + // if merge base is the same as upstream tree, then it's integrated + return Ok(true); + } + + // try to merge our tree into the upstream tree + let mut merge_index = project_repository + .git_repository + .merge_trees(&merge_base_tree, &commit.tree()?, &upstream_tree) + .context("failed to merge trees")?; + + if merge_index.has_conflicts() { + return Ok(false); + } + + let merge_tree_oid = merge_index + .write_tree_to(&project_repository.git_repository) + .context("failed to write tree")?; + + // if the merge_tree is the same as the new_target_tree and there are no files (uncommitted changes) + // then the vbranch is fully merged + Ok(merge_tree_oid == upstream_tree.id()) +} + +pub fn is_remote_branch_mergeable( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_name: &git::RemoteRefname, +) -> Result { + // get the current target + let latest_session = gb_repository.get_latest_session()?.ok_or_else(|| { + errors::IsRemoteBranchMergableError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + let session_reader = sessions::Reader::open(gb_repository, &latest_session) + .context("failed to open current session")?; + + let default_target = get_default_target(&session_reader) + .context("failed to get default target")? + .ok_or_else(|| { + errors::IsRemoteBranchMergableError::DefaultTargetNotSet( + errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }, + ) + })?; + + let target_commit = project_repository + .git_repository + .find_commit(default_target.sha) + .context("failed to find target commit")?; + + let branch = match project_repository + .git_repository + .find_branch(&branch_name.into()) + { + Ok(branch) => Ok(branch), + Err(git::Error::NotFound(_)) => Err(errors::IsRemoteBranchMergableError::BranchNotFound( + branch_name.clone(), + )), + Err(error) => Err(errors::IsRemoteBranchMergableError::Other(error.into())), + }?; + let branch_oid = branch.target().context("detatched head")?; + let branch_commit = project_repository + .git_repository + .find_commit(branch_oid) + .context("failed to find branch commit")?; + + let base_tree = find_base_tree( + &project_repository.git_repository, + &branch_commit, + &target_commit, + )?; + + let wd_tree = project_repository.get_wd_tree()?; + + let branch_tree = branch_commit.tree().context("failed to find branch tree")?; + let mergeable = !project_repository + .git_repository + .merge_trees(&base_tree, &branch_tree, &wd_tree) + .context("failed to merge trees")? + .has_conflicts(); + + Ok(mergeable) +} + +pub fn is_virtual_branch_mergeable( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_id: &BranchId, +) -> Result { + let latest_session = gb_repository.get_latest_session()?.ok_or_else(|| { + errors::IsVirtualBranchMergeable::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + let session_reader = sessions::Reader::open(gb_repository, &latest_session) + .context("failed to open current session reader")?; + let branch_reader = branch::Reader::new(&session_reader); + let branch = match branch_reader.read(branch_id) { + Ok(branch) => Ok(branch), + Err(reader::Error::NotFound) => Err(errors::IsVirtualBranchMergeable::BranchNotFound( + errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *branch_id, + }, + )), + Err(error) => Err(errors::IsVirtualBranchMergeable::Other(error.into())), + }?; + + if branch.applied { + return Ok(true); + } + + let default_target = get_default_target(&session_reader) + .context("failed to read default target")? + .ok_or_else(|| { + errors::IsVirtualBranchMergeable::DefaultTargetNotSet( + errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }, + ) + })?; + + // determine if this branch is up to date with the target/base + let merge_base = project_repository + .git_repository + .merge_base(default_target.sha, branch.head) + .context("failed to find merge base")?; + + if merge_base != default_target.sha { + return Ok(false); + } + + let branch_commit = project_repository + .git_repository + .find_commit(branch.head) + .context("failed to find branch commit")?; + + let target_commit = project_repository + .git_repository + .find_commit(default_target.sha) + .context("failed to find target commit")?; + + let base_tree = find_base_tree( + &project_repository.git_repository, + &branch_commit, + &target_commit, + )?; + + let wd_tree = project_repository.get_wd_tree()?; + + // determine if this tree is mergeable + let branch_tree = project_repository + .git_repository + .find_tree(branch.tree) + .context("failed to find branch tree")?; + + let is_mergeable = !project_repository + .git_repository + .merge_trees(&base_tree, &branch_tree, &wd_tree) + .context("failed to merge trees")? + .has_conflicts(); + + Ok(is_mergeable) +} + +pub fn amend( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_id: &BranchId, + target_ownership: &BranchOwnershipClaims, +) -> Result { + if conflicts::is_conflicting::<&Path>(project_repository, None)? { + return Err(errors::AmendError::Conflict(errors::ProjectConflictError { + project_id: project_repository.project().id, + })); + } + + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get or create current session")?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) + .context("failed to open current session")?; + + let all_branches = Iterator::new(¤t_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")? + .into_iter() + .collect::>(); + + if !all_branches.iter().any(|b| b.id == *branch_id) { + return Err(errors::AmendError::BranchNotFound( + errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *branch_id, + }, + )); + } + + let applied_branches = all_branches + .into_iter() + .filter(|b| b.applied) + .collect::>(); + + if !applied_branches.iter().any(|b| b.id == *branch_id) { + return Err(errors::AmendError::BranchNotFound( + errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *branch_id, + }, + )); + } + + let default_target = get_default_target(¤t_session_reader) + .context("failed to read default target")? + .ok_or_else(|| { + errors::AmendError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + let (mut applied_statuses, _) = get_applied_status( + gb_repository, + project_repository, + &default_target, + applied_branches, + )?; + + let (ref mut target_branch, target_status) = applied_statuses + .iter_mut() + .find(|(b, _)| b.id == *branch_id) + .ok_or_else(|| { + errors::AmendError::BranchNotFound(errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *branch_id, + }) + })?; + + if target_branch.upstream.is_some() && !project_repository.project().ok_with_force_push { + // amending to a pushed head commit will cause a force push that is not allowed + return Err(errors::AmendError::ForcePushNotAllowed( + errors::ForcePushNotAllowedError { + project_id: project_repository.project().id, + }, + )); + } + + if project_repository + .l( + target_branch.head, + project_repository::LogUntil::Commit(default_target.sha), + )? + .is_empty() + { + return Err(errors::AmendError::BranchHasNoCommits); + } + + let diffs_to_consider = calculate_non_commited_diffs( + project_repository, + target_branch, + &default_target, + target_status, + )?; + + let head_commit = project_repository + .git_repository + .find_commit(target_branch.head) + .context("failed to find head commit")?; + + let diffs_to_amend = target_ownership + .claims + .iter() + .filter_map(|file_ownership| { + let hunks = diffs_to_consider + .get(&file_ownership.file_path) + .map(|hunks| { + hunks + .iter() + .filter(|hunk| { + file_ownership.hunks.iter().any(|owned_hunk| { + owned_hunk.start == hunk.new_start + && owned_hunk.end == hunk.new_start + hunk.new_lines + }) + }) + .cloned() + .collect::>() + }) + .unwrap_or_default(); + if hunks.is_empty() { + None + } else { + Some((file_ownership.file_path.clone(), hunks)) + } + }) + .collect::>(); + + if diffs_to_amend.is_empty() { + return Err(errors::AmendError::TargetOwnerhshipNotFound( + target_ownership.clone(), + )); + } + + let new_tree_oid = + write_tree_onto_commit(project_repository, target_branch.head, &diffs_to_amend)?; + let new_tree = project_repository + .git_repository + .find_tree(new_tree_oid) + .context("failed to find new tree")?; + + let parents = head_commit + .parents() + .context("failed to find head commit parents")?; + + let commit_oid = project_repository + .git_repository + .commit( + None, + &head_commit.author(), + &head_commit.committer(), + head_commit.message().unwrap_or_default(), + &new_tree, + &parents.iter().collect::>(), + ) + .context("failed to create commit")?; + + let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + target_branch.head = commit_oid; + branch_writer.write(target_branch)?; + + super::integration::update_gitbutler_integration(gb_repository, project_repository)?; + + Ok(commit_oid) +} + +pub fn cherry_pick( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_id: &BranchId, + target_commit_oid: git::Oid, +) -> Result, errors::CherryPickError> { + if conflicts::is_conflicting::<&Path>(project_repository, None)? { + return Err(errors::CherryPickError::Conflict( + errors::ProjectConflictError { + project_id: project_repository.project().id, + }, + )); + } + + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get or create current session")?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) + .context("failed to open current session")?; + let branch_reader = branch::Reader::new(¤t_session_reader); + let mut branch = branch_reader + .read(branch_id) + .context("failed to read branch")?; + + if !branch.applied { + // todo? + return Err(errors::CherryPickError::NotApplied); + } + + let target_commit = project_repository + .git_repository + .find_commit(target_commit_oid) + .map_err(|error| match error { + git::Error::NotFound(_) => errors::CherryPickError::CommitNotFound(target_commit_oid), + error => errors::CherryPickError::Other(error.into()), + })?; + + let branch_head_commit = project_repository + .git_repository + .find_commit(branch.head) + .context("failed to find branch tree")?; + + let default_target = get_default_target(¤t_session_reader) + .context("failed to read default target")? + .context("no default target set")?; + + // if any other branches are applied, unapply them + let applied_branches = Iterator::new(¤t_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")? + .into_iter() + .filter(|b| b.applied) + .collect::>(); + + let (applied_statuses, _) = get_applied_status( + gb_repository, + project_repository, + &default_target, + applied_branches, + )?; + + let branch_files = applied_statuses + .iter() + .find(|(b, _)| b.id == *branch_id) + .map(|(_, f)| f) + .context("branch status not found")?; + + // create a wip commit. we'll use it to offload cherrypick conflicts calculation to libgit. + let wip_commit = { + let wip_tree_oid = write_tree(project_repository, &default_target, branch_files)?; + let wip_tree = project_repository + .git_repository + .find_tree(wip_tree_oid) + .context("failed to find tree")?; + + let signature = git::Signature::now("GitButler", "gitbutler@gitbutler.com") + .context("failed to make gb signature")?; + let oid = project_repository + .git_repository + .commit( + None, + &signature, + &signature, + "wip cherry picking commit", + &wip_tree, + &[&branch_head_commit], + ) + .context("failed to commit wip work")?; + project_repository + .git_repository + .find_commit(oid) + .context("failed to find wip commit")? + }; + + let mut cherrypick_index = project_repository + .git_repository + .cherry_pick(&wip_commit, &target_commit) + .context("failed to cherry pick")?; + + // unapply other branches + for other_branch in applied_statuses + .iter() + .filter(|(b, _)| b.id != branch.id) + .map(|(b, _)| b) + { + unapply_branch(gb_repository, project_repository, &other_branch.id) + .context("failed to unapply branch")?; + } + + let commit_oid = if cherrypick_index.has_conflicts() { + // checkout the conflicts + project_repository + .git_repository + .checkout_index(&mut cherrypick_index) + .allow_conflicts() + .conflict_style_merge() + .force() + .checkout() + .context("failed to checkout conflicts")?; + + // mark conflicts + let conflicts = cherrypick_index + .conflicts() + .context("failed to get conflicts")?; + let mut merge_conflicts = Vec::new(); + for path in conflicts.flatten() { + if let Some(ours) = path.our { + let path = std::str::from_utf8(&ours.path) + .context("failed to convert path")? + .to_string(); + merge_conflicts.push(path); + } + } + conflicts::mark(project_repository, &merge_conflicts, Some(branch.head))?; + + None + } else { + let merge_tree_oid = cherrypick_index + .write_tree_to(&project_repository.git_repository) + .context("failed to write merge tree")?; + let merge_tree = project_repository + .git_repository + .find_tree(merge_tree_oid) + .context("failed to find merge tree")?; + + let branch_head_commit = project_repository + .git_repository + .find_commit(branch.head) + .context("failed to find branch head commit")?; + + let commit_oid = project_repository + .git_repository + .commit( + None, + &target_commit.author(), + &target_commit.committer(), + target_commit.message().unwrap_or_default(), + &merge_tree, + &[&branch_head_commit], + ) + .context("failed to create commit")?; + + // checkout final_tree into the working directory + project_repository + .git_repository + .checkout_tree(&merge_tree) + .force() + .remove_untracked() + .checkout() + .context("failed to checkout final tree")?; + + // update branch status + let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + branch.head = commit_oid; + writer + .write(&mut branch) + .context("failed to write branch")?; + + Some(commit_oid) + }; + + super::integration::update_gitbutler_integration(gb_repository, project_repository) + .context("failed to update gitbutler integration")?; + + Ok(commit_oid) +} + +/// squashes a commit from a virtual branch into it's parent. +pub fn squash( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_id: &BranchId, + commit_oid: git::Oid, +) -> Result<(), errors::SquashError> { + if conflicts::is_conflicting::<&Path>(project_repository, None)? { + return Err(errors::SquashError::Conflict( + errors::ProjectConflictError { + project_id: project_repository.project().id, + }, + )); + } + + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get or create current session")?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) + .context("failed to open current session")?; + let branch_reader = branch::Reader::new(¤t_session_reader); + + let default_target = get_default_target(¤t_session_reader) + .context("failed to read default target")? + .ok_or_else(|| { + errors::SquashError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + let mut branch = branch_reader.read(branch_id).map_err(|error| match error { + reader::Error::NotFound => { + errors::SquashError::BranchNotFound(errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *branch_id, + }) + } + error => errors::SquashError::Other(error.into()), + })?; + + let branch_commit_oids = project_repository.l( + branch.head, + project_repository::LogUntil::Commit(default_target.sha), + )?; + + if !branch_commit_oids.contains(&commit_oid) { + return Err(errors::SquashError::CommitNotFound(commit_oid)); + } + + let commit_to_squash = project_repository + .git_repository + .find_commit(commit_oid) + .context("failed to find commit")?; + + let parent_commit = commit_to_squash + .parent(0) + .context("failed to find parent commit")?; + + let pushed_commit_oids = branch.upstream_head.map_or_else( + || Ok(vec![]), + |upstream_head| { + project_repository.l( + upstream_head, + project_repository::LogUntil::Commit(default_target.sha), + ) + }, + )?; + + if pushed_commit_oids.contains(&parent_commit.id()) + && !project_repository.project().ok_with_force_push + { + // squashing into a pushed commit will cause a force push that is not allowed + return Err(errors::SquashError::ForcePushNotAllowed( + errors::ForcePushNotAllowedError { + project_id: project_repository.project().id, + }, + )); + } + + if !branch_commit_oids.contains(&parent_commit.id()) { + return Err(errors::SquashError::CantSquashRootCommit); + } + + let ids_to_rebase = { + let ids = branch_commit_oids + .split(|oid| oid.eq(&commit_oid)) + .collect::>(); + ids.first().copied() + }; + + // create a commit that: + // * has the tree of the target commit + // * has the message combined of the target commit and parent commit + // * has parents of the parents commit. + let parents = parent_commit + .parents() + .context("failed to find head commit parents")?; + + let new_commit_oid = project_repository + .git_repository + .commit( + None, + &commit_to_squash.author(), + &commit_to_squash.committer(), + &format!( + "{}\n{}", + parent_commit.message().unwrap_or_default(), + commit_to_squash.message().unwrap_or_default(), + ), + &commit_to_squash.tree().context("failed to find tree")?, + &parents.iter().collect::>(), + ) + .context("failed to commit")?; + + let new_head_id = if let Some(ids_to_rebase) = ids_to_rebase { + let mut ids_to_rebase = ids_to_rebase.to_vec(); + ids_to_rebase.reverse(); + + // now, rebase unchanged commits onto the new commit + let commits_to_rebase = ids_to_rebase + .iter() + .map(|oid| project_repository.git_repository.find_commit(*oid)) + .collect::, _>>() + .context("failed to read commits to rebase")?; + + commits_to_rebase + .into_iter() + .fold( + project_repository + .git_repository + .find_commit(new_commit_oid) + .context("failed to find new commit"), + |head, to_rebase| { + let head = head?; + + let mut cherrypick_index = project_repository + .git_repository + .cherry_pick(&head, &to_rebase) + .context("failed to cherry pick")?; + + if cherrypick_index.has_conflicts() { + bail!("failed to rebase"); + } + + let merge_tree_oid = cherrypick_index + .write_tree_to(&project_repository.git_repository) + .context("failed to write merge tree")?; + + let merge_tree = project_repository + .git_repository + .find_tree(merge_tree_oid) + .context("failed to find merge tree")?; + + let commit_oid = project_repository + .git_repository + .commit( + None, + &to_rebase.author(), + &to_rebase.committer(), + to_rebase.message().unwrap_or_default(), + &merge_tree, + &[&head], + ) + .context("failed to create commit")?; + + project_repository + .git_repository + .find_commit(commit_oid) + .context("failed to find commit") + }, + )? + .id() + } else { + new_commit_oid + }; + + // save new branch head + let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + branch.head = new_head_id; + writer + .write(&mut branch) + .context("failed to write branch")?; + + super::integration::update_gitbutler_integration(gb_repository, project_repository)?; + + Ok(()) +} + +pub fn update_commit_message( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + branch_id: &BranchId, + commit_oid: git::Oid, + message: &str, +) -> Result<(), errors::UpdateCommitMessageError> { + if message.is_empty() { + return Err(errors::UpdateCommitMessageError::EmptyMessage); + } + + if conflicts::is_conflicting::<&Path>(project_repository, None)? { + return Err(errors::UpdateCommitMessageError::Conflict( + errors::ProjectConflictError { + project_id: project_repository.project().id, + }, + )); + } + + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get or create current session")?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) + .context("failed to open current session")?; + let branch_reader = branch::Reader::new(¤t_session_reader); + + let default_target = get_default_target(¤t_session_reader) + .context("failed to read default target")? + .ok_or_else(|| { + errors::UpdateCommitMessageError::DefaultTargetNotSet( + errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }, + ) + })?; + + let mut branch = branch_reader.read(branch_id).map_err(|error| match error { + reader::Error::NotFound => { + errors::UpdateCommitMessageError::BranchNotFound(errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *branch_id, + }) + } + error => errors::UpdateCommitMessageError::Other(error.into()), + })?; + + let branch_commit_oids = project_repository.l( + branch.head, + project_repository::LogUntil::Commit(default_target.sha), + )?; + + if !branch_commit_oids.contains(&commit_oid) { + return Err(errors::UpdateCommitMessageError::CommitNotFound(commit_oid)); + } + + let pushed_commit_oids = branch.upstream_head.map_or_else( + || Ok(vec![]), + |upstream_head| { + project_repository.l( + upstream_head, + project_repository::LogUntil::Commit(default_target.sha), + ) + }, + )?; + + if pushed_commit_oids.contains(&commit_oid) && !project_repository.project().ok_with_force_push + { + // updating the message of a pushed commit will cause a force push that is not allowed + return Err(errors::UpdateCommitMessageError::ForcePushNotAllowed( + errors::ForcePushNotAllowedError { + project_id: project_repository.project().id, + }, + )); + } + + let target_commit = project_repository + .git_repository + .find_commit(commit_oid) + .context("failed to find commit")?; + + let ids_to_rebase = { + let ids = branch_commit_oids + .split(|oid| oid.eq(&commit_oid)) + .collect::>(); + ids.first().copied() + }; + + let parents = target_commit + .parents() + .context("failed to find head commit parents")?; + + let new_commit_oid = project_repository + .git_repository + .commit( + None, + &target_commit.author(), + &target_commit.committer(), + message, + &target_commit.tree().context("failed to find tree")?, + &parents.iter().collect::>(), + ) + .context("failed to commit")?; + + let new_head_id = if let Some(ids_to_rebase) = ids_to_rebase { + let mut ids_to_rebase = ids_to_rebase.to_vec(); + ids_to_rebase.reverse(); + // now, rebase unchanged commits onto the new commit + let commits_to_rebase = ids_to_rebase + .iter() + .map(|oid| project_repository.git_repository.find_commit(*oid)) + .collect::, _>>() + .context("failed to read commits to rebase")?; + + commits_to_rebase + .into_iter() + .fold( + project_repository + .git_repository + .find_commit(new_commit_oid) + .context("failed to find new commit"), + |head, to_rebase| { + let head = head?; + + let mut cherrypick_index = project_repository + .git_repository + .cherry_pick(&head, &to_rebase) + .context("failed to cherry pick")?; + + if cherrypick_index.has_conflicts() { + bail!("failed to rebase"); + } + + let merge_tree_oid = cherrypick_index + .write_tree_to(&project_repository.git_repository) + .context("failed to write merge tree")?; + + let merge_tree = project_repository + .git_repository + .find_tree(merge_tree_oid) + .context("failed to find merge tree")?; + + let commit_oid = project_repository + .git_repository + .commit( + None, + &to_rebase.author(), + &to_rebase.committer(), + to_rebase.message().unwrap_or_default(), + &merge_tree, + &[&head], + ) + .context("failed to create commit")?; + + project_repository + .git_repository + .find_commit(commit_oid) + .context("failed to find commit") + }, + )? + .id() + } else { + new_commit_oid + }; + + // save new branch head + let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + branch.head = new_head_id; + writer + .write(&mut branch) + .context("failed to write branch")?; + + super::integration::update_gitbutler_integration(gb_repository, project_repository)?; + + Ok(()) +} + +/// moves commit on top of the to target branch +pub fn move_commit( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + target_branch_id: &BranchId, + commit_oid: git::Oid, + user: Option<&users::User>, + signing_key: Option<&keys::PrivateKey>, +) -> Result<(), errors::MoveCommitError> { + if project_repository.is_resolving() { + return Err(errors::MoveCommitError::Conflicted( + errors::ProjectConflictError { + project_id: project_repository.project().id, + }, + )); + } + + let latest_session = gb_repository + .get_latest_session() + .context("failed to get or create current session")? + .ok_or_else(|| { + errors::MoveCommitError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + let latest_session_reader = sessions::Reader::open(gb_repository, &latest_session) + .context("failed to open current session")?; + + let applied_branches = Iterator::new(&latest_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")? + .into_iter() + .filter(|b| b.applied) + .collect::>(); + + if !applied_branches.iter().any(|b| b.id == *target_branch_id) { + return Err(errors::MoveCommitError::BranchNotFound( + errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *target_branch_id, + }, + )); + } + + let default_target = super::get_default_target(&latest_session_reader) + .context("failed to get default target")? + .ok_or_else(|| { + errors::MoveCommitError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }) + })?; + + let (mut applied_statuses, _) = get_applied_status( + gb_repository, + project_repository, + &default_target, + applied_branches, + )?; + + let (ref mut source_branch, source_status) = applied_statuses + .iter_mut() + .find(|(b, _)| b.head == commit_oid) + .ok_or_else(|| errors::MoveCommitError::CommitNotFound(commit_oid))?; + + let source_branch_non_comitted_files = calculate_non_commited_diffs( + project_repository, + source_branch, + &default_target, + source_status, + )?; + + let source_branch_head = project_repository + .git_repository + .find_commit(commit_oid) + .context("failed to find commit")?; + let source_branch_head_parent = source_branch_head + .parent(0) + .context("failed to get parent commit")?; + let source_branch_head_tree = source_branch_head + .tree() + .context("failed to get commit tree")?; + let source_branch_head_parent_tree = source_branch_head_parent + .tree() + .context("failed to get parent tree")?; + let branch_head_diff = diff::trees( + &project_repository.git_repository, + &source_branch_head_parent_tree, + &source_branch_head_tree, + context_lines(project_repository), + )?; + let branch_head_diff = diff::diff_files_to_hunks(&branch_head_diff); + + let is_source_locked = source_branch_non_comitted_files + .iter() + .any(|(path, hunks)| { + branch_head_diff.get(path).map_or(false, |head_diff_hunks| { + hunks.iter().any(|hunk| { + head_diff_hunks.iter().any(|head_hunk| { + joined( + head_hunk.new_start, + head_hunk.new_start + head_hunk.new_lines, + hunk.new_start, + hunk.new_start + hunk.new_lines, + ) + }) + }) + }) + }); + + if is_source_locked { + return Err(errors::MoveCommitError::SourceLocked); + } + + let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + let branch_reader = branch::Reader::new(&latest_session_reader); + + // move files ownerships from source branch to the destination branch + + let ownerships_to_transfer = branch_head_diff + .iter() + .map(|(file_path, hunks)| { + ( + file_path.clone(), + hunks.iter().map(Into::into).collect::>(), + ) + }) + .map(|(file_path, hunks)| OwnershipClaim { file_path, hunks }) + .flat_map(|file_ownership| source_branch.ownership.take(&file_ownership)) + .collect::>(); + + // reset the source branch to the parent commit + { + source_branch.head = source_branch_head_parent.id(); + branch_writer.write(source_branch)?; + } + + // move the commit to destination branch target branch + { + let mut destination_branch = + branch_reader + .read(target_branch_id) + .map_err(|error| match error { + reader::Error::NotFound => { + errors::MoveCommitError::BranchNotFound(errors::BranchNotFoundError { + project_id: project_repository.project().id, + branch_id: *target_branch_id, + }) + } + error => errors::MoveCommitError::Other(error.into()), + })?; + + for ownership in ownerships_to_transfer { + destination_branch.ownership.put(&ownership); + } + + let new_destination_tree_oid = write_tree_onto_commit( + project_repository, + destination_branch.head, + &branch_head_diff, + ) + .context("failed to write tree onto commit")?; + let new_destination_tree = project_repository + .git_repository + .find_tree(new_destination_tree_oid) + .context("failed to find tree")?; + + let new_destination_head_oid = project_repository + .commit( + user, + source_branch_head.message().unwrap_or_default(), + &new_destination_tree, + &[&project_repository + .git_repository + .find_commit(destination_branch.head) + .context("failed to get dst branch head commit")?], + signing_key, + ) + .context("failed to commit")?; + + destination_branch.head = new_destination_head_oid; + branch_writer.write(&mut destination_branch)?; + } + + super::integration::update_gitbutler_integration(gb_repository, project_repository) + .context("failed to update gitbutler integration")?; + + Ok(()) +} + +pub fn create_virtual_branch_from_branch( + gb_repository: &gb_repository::Repository, + project_repository: &project_repository::Repository, + upstream: &git::Refname, + signing_key: Option<&keys::PrivateKey>, + user: Option<&users::User>, +) -> Result { + if !matches!(upstream, git::Refname::Local(_) | git::Refname::Remote(_)) { + return Err(errors::CreateVirtualBranchFromBranchError::BranchNotFound( + upstream.clone(), + )); + } + + let current_session = gb_repository + .get_or_create_current_session() + .context("failed to get or create current session")?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) + .context("failed to open current session")?; + + let default_target = super::get_default_target(¤t_session_reader) + .context("failed to get default target")? + .ok_or_else(|| { + errors::CreateVirtualBranchFromBranchError::DefaultTargetNotSet( + errors::DefaultTargetNotSetError { + project_id: project_repository.project().id, + }, + ) + })?; + + if let git::Refname::Remote(remote_upstream) = upstream { + if default_target.branch.eq(remote_upstream) { + return Err( + errors::CreateVirtualBranchFromBranchError::CantMakeBranchFromDefaultTarget, + ); + } + } + + let repo = &project_repository.git_repository; + let head_reference = match repo.find_reference(upstream) { + Ok(head) => Ok(head), + Err(git::Error::NotFound(_)) => Err( + errors::CreateVirtualBranchFromBranchError::BranchNotFound(upstream.clone()), + ), + Err(error) => Err(errors::CreateVirtualBranchFromBranchError::Other( + error.into(), + )), + }?; + let head_commit = head_reference + .peel_to_commit() + .context("failed to peel to commit")?; + let head_commit_tree = head_commit.tree().context("failed to find tree")?; + + let all_virtual_branches = Iterator::new(¤t_session_reader) + .context("failed to create branch iterator")? + .collect::, reader::Error>>() + .context("failed to read virtual branches")? + .into_iter() + .collect::>(); + + let order = all_virtual_branches.len(); + + let selected_for_changes = (!all_virtual_branches + .iter() + .any(|b| b.selected_for_changes.is_some())) + .then_some(chrono::Utc::now().timestamp_millis()); + + let now = time::UNIX_EPOCH + .elapsed() + .context("failed to get elapsed time")? + .as_millis(); + + // only set upstream if it's not the default target + let upstream_branch = match upstream { + git::Refname::Other(_) | git::Refname::Virtual(_) => { + // we only support local or remote branches + return Err(errors::CreateVirtualBranchFromBranchError::BranchNotFound( + upstream.clone(), + )); + } + git::Refname::Remote(remote) => Some(remote.clone()), + git::Refname::Local(local) => local.remote().cloned(), + }; + + // add file ownership based off the diff + let target_commit = repo + .find_commit(default_target.sha) + .map_err(|error| errors::CreateVirtualBranchFromBranchError::Other(error.into()))?; + let merge_base_oid = repo + .merge_base(target_commit.id(), head_commit.id()) + .map_err(|error| errors::CreateVirtualBranchFromBranchError::Other(error.into()))?; + let merge_base_tree = repo + .find_commit(merge_base_oid) + .map_err(|error| errors::CreateVirtualBranchFromBranchError::Other(error.into()))? + .tree() + .map_err(|error| errors::CreateVirtualBranchFromBranchError::Other(error.into()))?; + + // do a diff between the head of this branch and the target base + let diff = diff::trees( + &project_repository.git_repository, + &merge_base_tree, + &head_commit_tree, + context_lines(project_repository), + ) + .context("failed to diff trees")?; + let diff = diff::diff_files_to_hunks(&diff); + + let hunks_by_filepath = + super::virtual_hunks_by_filepath(&project_repository.project().path, &diff); + + // assign ownership to the branch + let ownership = hunks_by_filepath.values().flatten().fold( + branch::BranchOwnershipClaims::default(), + |mut ownership, hunk| { + ownership.put( + &format!("{}:{}", hunk.file_path.display(), hunk.id) + .parse() + .unwrap(), + ); + ownership + }, + ); + + let mut branch = branch::Branch { + id: BranchId::generate(), + name: upstream + .branch() + .expect("always a branch reference") + .to_string(), + notes: String::new(), + applied: false, + upstream_head: upstream_branch.is_some().then_some(head_commit.id()), + upstream: upstream_branch, + tree: head_commit_tree.id(), + head: head_commit.id(), + created_timestamp_ms: now, + updated_timestamp_ms: now, + ownership, + order, + selected_for_changes, + }; + + let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) + .context("failed to create writer")?; + writer + .write(&mut branch) + .context("failed to write branch")?; + + project_repository.add_branch_reference(&branch)?; + + match apply_branch( + gb_repository, + project_repository, + &branch.id, + signing_key, + user, + ) { + Ok(()) => Ok(branch.id), + Err(errors::ApplyBranchError::BranchConflicts(_)) => { + // if branch conflicts with the workspace, it's ok. keep it unapplied + Ok(branch.id) + } + Err(error) => Err(errors::CreateVirtualBranchFromBranchError::ApplyBranch( + error, + )), + } +} + +pub fn context_lines(project_repository: &project_repository::Repository) -> u32 { + let use_context = project_repository + .project() + .use_diff_context + .unwrap_or(false); + + if use_context { + 3_u32 + } else { + 0_u32 + } +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn joined_test() { + assert!(!joined(10, 13, 6, 9)); + assert!(joined(10, 13, 7, 10)); + assert!(joined(10, 13, 8, 11)); + assert!(joined(10, 13, 9, 12)); + assert!(joined(10, 13, 10, 13)); + assert!(joined(10, 13, 11, 14)); + assert!(joined(10, 13, 12, 15)); + assert!(joined(10, 13, 13, 16)); + assert!(!joined(10, 13, 14, 17)); + } + + #[test] + fn normalize_branch_name_test() { + assert_eq!(normalize_branch_name("feature/branch"), "feature/branch"); + assert_eq!(normalize_branch_name("foo#branch"), "foo#branch"); + assert_eq!(normalize_branch_name("foo!branch"), "foo-branch"); + } +} diff --git a/src/windows.rs b/src/windows.rs new file mode 100644 index 000000000..bf0e3ac95 --- /dev/null +++ b/src/windows.rs @@ -0,0 +1,24 @@ +use std::os::windows::fs::MetadataExt; + +pub trait MetadataShim { + fn ino(&self) -> u64; + fn dev(&self) -> u64; + fn uid(&self) -> u32; + fn gid(&self) -> u32; +} + +impl MetadataShim for std::fs::Metadata { + fn ino(&self) -> u64 { + self.file_index().expect("file metadata constructed based on directory listing instead of a file (see https://doc.rust-lang.org/std/os/windows/fs/trait.MetadataExt.html#tymethod.file_index)") + } + #[allow(clippy::cast_lossless)] + fn dev(&self) -> u64 { + self.volume_serial_number().expect("file metadata constructed based on directory listing instead of a file (see https://doc.rust-lang.org/std/os/windows/fs/trait.MetadataExt.html#tymethod.volume_serial_number)") as u64 + } + fn uid(&self) -> u32 { + 0 + } + fn gid(&self) -> u32 { + 0 + } +} diff --git a/src/writer.rs b/src/writer.rs new file mode 100644 index 000000000..dff82d562 --- /dev/null +++ b/src/writer.rs @@ -0,0 +1,114 @@ +use std::path::Path; + +use anyhow::Result; + +use crate::lock; + +pub struct DirWriter(lock::Dir); + +impl DirWriter { + pub fn open>(root: P) -> Result { + lock::Dir::new(root).map(Self) + } +} + +impl DirWriter { + fn write(&self, path: P, contents: C) -> Result<(), std::io::Error> + where + P: AsRef, + C: AsRef<[u8]>, + { + self.batch(&[BatchTask::Write(path, contents)]) + } + + pub fn remove>(&self, path: P) -> Result<(), std::io::Error> { + self.0.batch(|root| { + let path = root.join(path); + if path.exists() { + if path.is_dir() { + std::fs::remove_dir_all(path) + } else { + std::fs::remove_file(path) + } + } else { + Ok(()) + } + })? + } + + pub fn batch(&self, values: &[BatchTask]) -> Result<(), std::io::Error> + where + P: AsRef, + C: AsRef<[u8]>, + { + self.0.batch(|root| { + for value in values { + match value { + BatchTask::Write(path, contents) => { + let path = root.join(path); + if let Some(dir_path) = path.parent() { + if !dir_path.exists() { + std::fs::create_dir_all(dir_path)?; + } + }; + std::fs::write(path, contents)?; + } + BatchTask::Remove(path) => { + let path = root.join(path); + if path.exists() { + if path.is_dir() { + std::fs::remove_dir_all(path)?; + } else { + std::fs::remove_file(path)?; + } + } + } + } + } + Ok(()) + })? + } + + pub fn write_string>( + &self, + path: P, + contents: &str, + ) -> Result<(), std::io::Error> { + self.write(path, contents) + } +} + +pub enum BatchTask, C: AsRef<[u8]>> { + Write(P, C), + Remove(P), +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn write() { + let root = tempfile::tempdir().unwrap(); + let writer = DirWriter::open(root.path()).unwrap(); + writer.write("foo/bar", b"baz").unwrap(); + assert_eq!( + std::fs::read_to_string(root.path().join("foo/bar")).unwrap(), + "baz" + ); + } + + #[test] + fn remove() { + let root = tempfile::tempdir().unwrap(); + let writer = DirWriter::open(root.path()).unwrap(); + writer.remove("foo/bar").unwrap(); + assert!(!root.path().join("foo/bar").exists()); + writer.write("foo/bar", b"baz").unwrap(); + writer.remove("foo/bar").unwrap(); + assert!(!root.path().join("foo/bar").exists()); + writer.write("parent/child", b"baz").unwrap(); + writer.remove("parent").unwrap(); + assert!(!root.path().join("parent").exists()); + } +} diff --git a/src/zip.rs b/src/zip.rs new file mode 100644 index 000000000..577e9fc02 --- /dev/null +++ b/src/zip.rs @@ -0,0 +1,164 @@ +mod controller; +pub use controller::Controller; + +use std::{ + fs, + io::{self, Read, Write}, + path, time, +}; + +use anyhow::{Context, Result}; +use sha2::{Digest, Sha256}; +use walkdir::{DirEntry, WalkDir}; +use zip::{result::ZipError, write, CompressionMethod, ZipWriter}; + +#[derive(Clone)] +pub struct Zipper { + cache: path::PathBuf, +} + +impl Zipper { + pub fn new>(cache_dir: P) -> Self { + let cache = cache_dir.as_ref().to_path_buf().join("archives"); + Self { cache } + } + + // takes a path to create zip of, returns path of a created archive. + pub fn zip>(&self, path: P) -> Result { + let path = path.as_ref(); + if !path.exists() { + return Err(anyhow::anyhow!("{} does not exist", path.display())); + } + if !path.is_dir() { + return Err(anyhow::anyhow!("{} is not a directory", path.display())); + } + let path_hash = calculate_path_hash(path)?; + fs::create_dir_all(&self.cache).context("failed to create cache dir")?; + let archive_path = self.cache.join(format!("{}.zip", path_hash)); + if !archive_path.exists() { + doit(path, &archive_path, CompressionMethod::Bzip2)?; + } + Ok(archive_path) + } +} + +fn doit>( + src_dir: P, + dst_file: P, + method: zip::CompressionMethod, +) -> zip::result::ZipResult<()> { + let src = src_dir.as_ref(); + let dst = dst_file.as_ref(); + if !src.is_dir() { + return Err(ZipError::FileNotFound); + } + + let file = fs::File::create(dst).unwrap(); + + let walkdir = WalkDir::new(src); + let it = walkdir.into_iter(); + + zip_dir(&mut it.filter_map(Result::ok), src, file, method)?; + + Ok(()) +} + +fn zip_dir( + it: &mut dyn Iterator, + prefix: &path::Path, + writer: T, + method: zip::CompressionMethod, +) -> zip::result::ZipResult<()> +where + T: io::Write + io::Seek, +{ + let mut zip = ZipWriter::new(writer); + let options = write::FileOptions::default() + .compression_method(method) + .unix_permissions(0o755); + + let mut buffer = Vec::new(); + for entry in it { + let path = entry.path(); + let name = path.strip_prefix(prefix).unwrap(); + + // Write file or directory explicitly + // Some unzip tools unzip files with directory paths correctly, some do not! + if path.is_file() { + #[allow(deprecated)] + zip.start_file_from_path(name, options)?; + let mut f = fs::File::open(path)?; + + f.read_to_end(&mut buffer)?; + zip.write_all(&buffer)?; + buffer.clear(); + } else if !name.as_os_str().is_empty() { + // Only if not root! Avoids path spec / warning + // and mapname conversion failed error on unzip + #[allow(deprecated)] + zip.add_directory_from_path(name, options)?; + } + } + zip.finish()?; + Result::Ok(()) +} + +// returns hash of a path by calculating metadata hash of all files in it. +fn calculate_path_hash>(path: P) -> Result { + let path = path.as_ref(); + let mut hasher = Sha256::new(); + + if path.is_dir() { + let entries = fs::read_dir(path)?; + let mut entry_paths: Vec<_> = entries + .filter_map(|entry| entry.ok().map(|e| e.path())) + .collect(); + entry_paths.sort(); + + for entry_path in entry_paths { + file_hash(&mut hasher, &entry_path).with_context(|| { + format!( + "failed to calculate hash of file {}", + entry_path.to_str().unwrap() + ) + })?; + } + } else if path.is_file() { + file_hash(&mut hasher, path).with_context(|| { + format!( + "failed to calculate hash of file {}", + path.to_str().unwrap() + ) + })?; + } + + Ok(format!("{:X}", hasher.finalize())) +} + +fn file_hash>(digest: &mut Sha256, path: P) -> Result<()> { + let path = path.as_ref(); + let metadata = fs::metadata(path).context("failed to get metadata")?; + digest.update(path.to_str().unwrap().as_bytes()); + digest.update(metadata.len().to_string().as_bytes()); + digest.update( + metadata + .modified() + .unwrap_or(time::UNIX_EPOCH) + .duration_since(time::UNIX_EPOCH) + .unwrap() + .as_secs() + .to_string() + .as_bytes(), + ); + digest.update( + metadata + .created() + .unwrap_or(time::UNIX_EPOCH) + .duration_since(time::UNIX_EPOCH) + .unwrap() + .as_secs() + .to_string() + .as_bytes(), + ); + Ok(()) +} diff --git a/src/zip/controller.rs b/src/zip/controller.rs new file mode 100644 index 000000000..972a27f7b --- /dev/null +++ b/src/zip/controller.rs @@ -0,0 +1,72 @@ +use std::path; + +use crate::projects::{self, ProjectId}; + +use super::Zipper; + +#[derive(Clone)] +pub struct Controller { + local_data_dir: path::PathBuf, + logs_dir: path::PathBuf, + zipper: Zipper, + #[allow(clippy::struct_field_names)] + projects_controller: projects::Controller, +} + +impl Controller { + pub fn new( + local_data_dir: path::PathBuf, + logs_dir: path::PathBuf, + zipper: Zipper, + projects_controller: projects::Controller, + ) -> Self { + Self { + local_data_dir, + logs_dir, + zipper, + projects_controller, + } + } + + pub fn archive(&self, project_id: &ProjectId) -> Result { + let project = self.projects_controller.get(project_id)?; + self.zipper.zip(project.path).map_err(Into::into) + } + + pub fn data_archive(&self, project_id: &ProjectId) -> Result { + let project = self.projects_controller.get(project_id)?; + self.zipper + .zip( + self.local_data_dir + .join("projects") + .join(project.id.to_string()), + ) + .map_err(Into::into) + } + + pub fn logs_archive(&self) -> Result { + self.zipper.zip(&self.logs_dir).map_err(Into::into) + } +} + +#[derive(thiserror::Error, Debug)] +pub enum ArchiveError { + #[error(transparent)] + GetProject(#[from] projects::GetError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(thiserror::Error, Debug)] +pub enum DataArchiveError { + #[error(transparent)] + GetProject(#[from] projects::GetError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(thiserror::Error, Debug)] +pub enum LogsArchiveError { + #[error(transparent)] + Other(#[from] anyhow::Error), +} diff --git a/tests/app.rs b/tests/app.rs new file mode 100644 index 000000000..8781ad1e5 --- /dev/null +++ b/tests/app.rs @@ -0,0 +1,259 @@ +const VAR_NO_CLEANUP: &str = "GITBUTLER_TESTS_NO_CLEANUP"; + +pub(crate) mod common; +mod suite { + mod gb_repository; + mod projects; + mod virtual_branches; +} + +mod database; +mod deltas; +mod gb_repository; +mod git; +mod keys; +mod lock; +mod reader; +mod sessions; +mod types; +pub mod virtual_branches; +mod zip; + +use std::path::{Path, PathBuf}; +use std::{collections::HashMap, fs}; + +use tempfile::{tempdir, TempDir}; + +pub struct Suite { + pub local_app_data: Option, + pub storage: gitbutler::storage::Storage, + pub users: gitbutler::users::Controller, + pub projects: gitbutler::projects::Controller, + pub keys: gitbutler::keys::Controller, +} + +impl Drop for Suite { + fn drop(&mut self) { + if std::env::var_os(VAR_NO_CLEANUP).is_some() { + let _ = self.local_app_data.take().unwrap().into_path(); + } + } +} + +impl Default for Suite { + fn default() -> Self { + let local_app_data = temp_dir(); + let storage = gitbutler::storage::Storage::new(&local_app_data); + let users = gitbutler::users::Controller::from_path(&local_app_data); + let projects = gitbutler::projects::Controller::from_path(&local_app_data); + let keys = gitbutler::keys::Controller::from_path(&local_app_data); + Self { + storage, + local_app_data: Some(local_app_data), + users, + projects, + keys, + } + } +} + +impl Suite { + pub fn local_app_data(&self) -> &Path { + self.local_app_data.as_ref().unwrap().path() + } + pub fn sign_in(&self) -> gitbutler::users::User { + let user = gitbutler::users::User { + name: Some("test".to_string()), + email: "test@email.com".to_string(), + access_token: "token".to_string(), + ..Default::default() + }; + self.users.set_user(&user).expect("failed to add user"); + user + } + + fn project(&self, fs: HashMap) -> (gitbutler::projects::Project, TempDir) { + let (repository, tmp) = test_repository(); + for (path, contents) in fs { + if let Some(parent) = path.parent() { + fs::create_dir_all(repository.path().parent().unwrap().join(parent)) + .expect("failed to create dir"); + } + fs::write( + repository.path().parent().unwrap().join(&path), + contents.as_bytes(), + ) + .expect("failed to write file"); + } + commit_all(&repository); + + ( + self.projects + .add(repository.path().parent().unwrap()) + .expect("failed to add project"), + tmp, + ) + } + + pub fn new_case_with_files(&self, fs: HashMap) -> Case { + let (project, project_tmp) = self.project(fs); + Case::new(self, project, project_tmp) + } + + pub fn new_case(&self) -> Case { + self.new_case_with_files(HashMap::new()) + } +} + +pub struct Case<'a> { + suite: &'a Suite, + pub project: gitbutler::projects::Project, + pub project_repository: gitbutler::project_repository::Repository, + pub gb_repository: gitbutler::gb_repository::Repository, + pub credentials: gitbutler::git::credentials::Helper, + /// The directory containing the `project_repository` + project_tmp: Option, +} + +impl Drop for Case<'_> { + fn drop(&mut self) { + if let Some(tmp) = self + .project_tmp + .take() + .filter(|_| std::env::var_os(VAR_NO_CLEANUP).is_some()) + { + let _ = tmp.into_path(); + } + } +} + +impl<'a> Case<'a> { + fn new( + suite: &'a Suite, + project: gitbutler::projects::Project, + project_tmp: TempDir, + ) -> Case<'a> { + let project_repository = gitbutler::project_repository::Repository::open(&project) + .expect("failed to create project repository"); + let gb_repository = gitbutler::gb_repository::Repository::open( + suite.local_app_data(), + &project_repository, + None, + ) + .expect("failed to open gb repository"); + let credentials = gitbutler::git::credentials::Helper::from_path(suite.local_app_data()); + Case { + suite, + project, + gb_repository, + project_repository, + project_tmp: Some(project_tmp), + credentials, + } + } + + pub fn refresh(mut self) -> Self { + let project = self + .suite + .projects + .get(&self.project.id) + .expect("failed to get project"); + let project_repository = gitbutler::project_repository::Repository::open(&project) + .expect("failed to create project repository"); + let user = self.suite.users.get_user().expect("failed to get user"); + let credentials = + gitbutler::git::credentials::Helper::from_path(self.suite.local_app_data()); + Self { + suite: self.suite, + gb_repository: gitbutler::gb_repository::Repository::open( + self.suite.local_app_data(), + &project_repository, + user.as_ref(), + ) + .expect("failed to open gb repository"), + credentials, + project_repository, + project, + project_tmp: self.project_tmp.take(), + } + } +} + +pub fn test_database() -> (gitbutler::database::Database, TempDir) { + let tmp = temp_dir(); + let db = gitbutler::database::Database::open_in_directory(&tmp).unwrap(); + (db, tmp) +} + +pub fn temp_dir() -> TempDir { + tempdir().unwrap() +} + +pub fn empty_bare_repository() -> (gitbutler::git::Repository, TempDir) { + let tmp = temp_dir(); + ( + gitbutler::git::Repository::init_opts(&tmp, &init_opts_bare()) + .expect("failed to init repository"), + tmp, + ) +} + +pub fn test_repository() -> (gitbutler::git::Repository, TempDir) { + let tmp = temp_dir(); + let repository = gitbutler::git::Repository::init_opts(&tmp, &init_opts()) + .expect("failed to init repository"); + let mut index = repository.index().expect("failed to get index"); + let oid = index.write_tree().expect("failed to write tree"); + let signature = gitbutler::git::Signature::now("test", "test@email.com").unwrap(); + repository + .commit( + Some(&"refs/heads/master".parse().unwrap()), + &signature, + &signature, + "Initial commit", + &repository.find_tree(oid).expect("failed to find tree"), + &[], + ) + .expect("failed to commit"); + (repository, tmp) +} + +pub fn commit_all(repository: &gitbutler::git::Repository) -> gitbutler::git::Oid { + let mut index = repository.index().expect("failed to get index"); + index + .add_all(["."], git2::IndexAddOption::DEFAULT, None) + .expect("failed to add all"); + index.write().expect("failed to write index"); + let oid = index.write_tree().expect("failed to write tree"); + let signature = gitbutler::git::Signature::now("test", "test@email.com").unwrap(); + let head = repository.head().expect("failed to get head"); + let commit_oid = repository + .commit( + Some(&head.name().unwrap()), + &signature, + &signature, + "some commit", + &repository.find_tree(oid).expect("failed to find tree"), + &[&repository + .find_commit( + repository + .refname_to_id("HEAD") + .expect("failed to get head"), + ) + .expect("failed to find commit")], + ) + .expect("failed to commit"); + commit_oid +} + +fn init_opts() -> git2::RepositoryInitOptions { + let mut opts = git2::RepositoryInitOptions::new(); + opts.initial_head("master"); + opts +} + +pub fn init_opts_bare() -> git2::RepositoryInitOptions { + let mut opts = init_opts(); + opts.bare(true); + opts +} diff --git a/tests/common/mod.rs b/tests/common/mod.rs new file mode 100644 index 000000000..6a337dd80 --- /dev/null +++ b/tests/common/mod.rs @@ -0,0 +1,355 @@ +#![allow(unused)] +use crate::{init_opts, VAR_NO_CLEANUP}; +use gitbutler::git; +use std::{path, str::from_utf8}; +use tempfile::TempDir; + +pub fn temp_dir() -> TempDir { + tempfile::tempdir().unwrap() +} + +pub struct TestProject { + local_repository: git::Repository, + local_tmp: Option, + remote_repository: git::Repository, + remote_tmp: Option, +} + +impl Drop for TestProject { + fn drop(&mut self) { + if std::env::var_os(VAR_NO_CLEANUP).is_some() { + let _ = self.local_tmp.take().unwrap().into_path(); + let _ = self.remote_tmp.take().unwrap().into_path(); + } + } +} + +impl Default for TestProject { + fn default() -> Self { + let local_tmp = temp_dir(); + let local_repository = git::Repository::init_opts(local_tmp.path(), &init_opts()) + .expect("failed to init repository"); + let mut index = local_repository.index().expect("failed to get index"); + let oid = index.write_tree().expect("failed to write tree"); + let signature = git::Signature::now("test", "test@email.com").unwrap(); + local_repository + .commit( + Some(&"refs/heads/master".parse().unwrap()), + &signature, + &signature, + "Initial commit", + &local_repository + .find_tree(oid) + .expect("failed to find tree"), + &[], + ) + .expect("failed to commit"); + + let remote_tmp = temp_dir(); + let remote_repository = git::Repository::init_opts( + remote_tmp.path(), + git2::RepositoryInitOptions::new() + .bare(true) + .external_template(false), + ) + .expect("failed to init repository"); + + { + let mut remote = local_repository + .remote( + "origin", + &remote_repository + .path() + .to_str() + .expect("failed to convert path to str") + .parse() + .unwrap(), + ) + .expect("failed to add remote"); + remote + .push(&["refs/heads/master:refs/heads/master"], None) + .expect("failed to push"); + } + + Self { + local_repository, + local_tmp: Some(local_tmp), + remote_repository, + remote_tmp: Some(remote_tmp), + } + } +} + +impl TestProject { + pub fn path(&self) -> &std::path::Path { + self.local_repository.workdir().unwrap() + } + + pub fn push_branch(&self, branch: &git::LocalRefname) { + let mut origin = self.local_repository.find_remote("origin").unwrap(); + origin.push(&[&format!("{branch}:{branch}")], None).unwrap(); + } + + pub fn push(&self) { + let mut origin = self.local_repository.find_remote("origin").unwrap(); + origin + .push(&["refs/heads/master:refs/heads/master"], None) + .unwrap(); + } + + /// git add -A + /// git reset --hard + pub fn reset_hard(&self, oid: Option) { + let mut index = self.local_repository.index().expect("failed to get index"); + index + .add_all(["."], git2::IndexAddOption::DEFAULT, None) + .expect("failed to add all"); + index.write().expect("failed to write index"); + + let head = self.local_repository.head().unwrap(); + let commit = oid.map_or(head.peel_to_commit().unwrap(), |oid| { + self.local_repository.find_commit(oid).unwrap() + }); + + let head_ref = head.name().unwrap(); + let head_ref = self.local_repository.find_reference(&head_ref).unwrap(); + + self.local_repository + .reset(&commit, git2::ResetType::Hard, None) + .unwrap(); + } + + /// fetch remote into local + pub fn fetch(&self) { + let mut remote = self.local_repository.find_remote("origin").unwrap(); + remote + .fetch(&["+refs/heads/*:refs/remotes/origin/*"], None) + .unwrap(); + } + + pub fn rebase_and_merge(&self, branch_name: &git::Refname) { + let branch_name: git::Refname = match branch_name { + git::Refname::Local(local) => format!("refs/heads/{}", local.branch()).parse().unwrap(), + git::Refname::Remote(remote) => { + format!("refs/heads/{}", remote.branch()).parse().unwrap() + } + _ => "INVALID".parse().unwrap(), // todo + }; + let branch = self.remote_repository.find_branch(&branch_name).unwrap(); + let branch_commit = branch.peel_to_commit().unwrap(); + + let master_branch = { + let name: git::Refname = "refs/heads/master".parse().unwrap(); + self.remote_repository.find_branch(&name).unwrap() + }; + let master_branch_commit = master_branch.peel_to_commit().unwrap(); + + let mut rebase_options = git2::RebaseOptions::new(); + rebase_options.quiet(true); + rebase_options.inmemory(true); + + let mut rebase = self + .remote_repository + .rebase( + Some(branch_commit.id()), + Some(master_branch_commit.id()), + None, + Some(&mut rebase_options), + ) + .unwrap(); + + let mut rebase_success = true; + let mut last_rebase_head = branch_commit.id(); + while let Some(Ok(op)) = rebase.next() { + let commit = self.remote_repository.find_commit(op.id().into()).unwrap(); + let index = rebase.inmemory_index().unwrap(); + if index.has_conflicts() { + rebase_success = false; + break; + } + + if let Ok(commit_id) = rebase.commit(None, &commit.committer().into(), None) { + last_rebase_head = commit_id.into(); + } else { + rebase_success = false; + break; + }; + } + + if rebase_success { + self.remote_repository + .reference( + &"refs/heads/master".parse().unwrap(), + last_rebase_head, + true, + &format!("rebase: {}", branch_name), + ) + .unwrap(); + } else { + rebase.abort().unwrap(); + } + } + + /// works like if we'd open and merge a PR on github. does not update local. + pub fn merge(&self, branch_name: &git::Refname) { + let branch_name: git::Refname = match branch_name { + git::Refname::Local(local) => format!("refs/heads/{}", local.branch()).parse().unwrap(), + git::Refname::Remote(remote) => { + format!("refs/heads/{}", remote.branch()).parse().unwrap() + } + _ => "INVALID".parse().unwrap(), // todo + }; + let branch = self.remote_repository.find_branch(&branch_name).unwrap(); + let branch_commit = branch.peel_to_commit().unwrap(); + + let master_branch = { + let name: git::Refname = "refs/heads/master".parse().unwrap(); + self.remote_repository.find_branch(&name).unwrap() + }; + let master_branch_commit = master_branch.peel_to_commit().unwrap(); + + let merge_base = { + let oid = self + .remote_repository + .merge_base(branch_commit.id(), master_branch_commit.id()) + .unwrap(); + self.remote_repository.find_commit(oid).unwrap() + }; + let merge_tree = { + let mut merge_index = self + .remote_repository + .merge_trees( + &merge_base.tree().unwrap(), + &master_branch.peel_to_tree().unwrap(), + &branch.peel_to_tree().unwrap(), + ) + .unwrap(); + let oid = merge_index.write_tree_to(&self.remote_repository).unwrap(); + self.remote_repository.find_tree(oid).unwrap() + }; + + self.remote_repository + .commit( + Some(&"refs/heads/master".parse().unwrap()), + &branch_commit.author(), + &branch_commit.committer(), + &format!("Merge pull request from {}", branch_name), + &merge_tree, + &[&master_branch_commit, &branch_commit], + ) + .unwrap(); + } + + pub fn find_commit(&self, oid: git::Oid) -> Result { + self.local_repository.find_commit(oid) + } + + pub fn checkout_commit(&self, commit_oid: git::Oid) { + let commit = self.local_repository.find_commit(commit_oid).unwrap(); + let commit_tree = commit.tree().unwrap(); + + self.local_repository.set_head_detached(commit_oid).unwrap(); + self.local_repository + .checkout_tree(&commit_tree) + .force() + .checkout() + .unwrap(); + } + + pub fn checkout(&self, branch: &git::LocalRefname) { + let branch: git::Refname = branch.into(); + let tree = match self.local_repository.find_branch(&branch) { + Ok(branch) => branch.peel_to_tree(), + Err(git::Error::NotFound(_)) => { + let head_commit = self + .local_repository + .head() + .unwrap() + .peel_to_commit() + .unwrap(); + self.local_repository + .reference(&branch, head_commit.id(), false, "new branch") + .unwrap(); + head_commit.tree() + } + Err(error) => Err(error), + } + .unwrap(); + self.local_repository.set_head(&branch).unwrap(); + self.local_repository + .checkout_tree(&tree) + .force() + .checkout() + .unwrap(); + } + + /// takes all changes in the working directory and commits them into local + pub fn commit_all(&self, message: &str) -> git::Oid { + let head = self.local_repository.head().unwrap(); + let mut index = self.local_repository.index().expect("failed to get index"); + index + .add_all(["."], git2::IndexAddOption::DEFAULT, None) + .expect("failed to add all"); + index.write().expect("failed to write index"); + let oid = index.write_tree().expect("failed to write tree"); + let signature = git::Signature::now("test", "test@email.com").unwrap(); + self.local_repository + .commit( + head.name().as_ref(), + &signature, + &signature, + message, + &self + .local_repository + .find_tree(oid) + .expect("failed to find tree"), + &[&self + .local_repository + .find_commit( + self.local_repository + .refname_to_id("HEAD") + .expect("failed to get head"), + ) + .expect("failed to find commit")], + ) + .expect("failed to commit") + } + + pub fn references(&self) -> Vec { + self.local_repository + .references() + .expect("failed to get references") + .collect::, _>>() + .expect("failed to read references") + } + + pub fn add_submodule(&self, url: &git::Url, path: &path::Path) { + let mut submodule = self.local_repository.add_submodule(url, path).unwrap(); + let repo = submodule.open().unwrap(); + + // checkout submodule's master head + repo.find_remote("origin") + .unwrap() + .fetch(&["+refs/heads/*:refs/heads/*"], None, None) + .unwrap(); + let reference = repo.find_reference("refs/heads/master").unwrap(); + let reference_head = repo.find_commit(reference.target().unwrap()).unwrap(); + repo.checkout_tree(reference_head.tree().unwrap().as_object(), None) + .unwrap(); + + // be sure that `HEAD` points to the actual head - `git2` seems to initialize it + // with `init.defaultBranch`, causing failure otherwise. + repo.set_head("refs/heads/master"); + submodule.add_finalize().unwrap(); + } +} + +pub mod paths { + use super::temp_dir; + use std::path; + use tempfile::TempDir; + + pub fn data_dir() -> TempDir { + temp_dir() + } +} diff --git a/tests/database/mod.rs b/tests/database/mod.rs new file mode 100644 index 000000000..97a95c517 --- /dev/null +++ b/tests/database/mod.rs @@ -0,0 +1,20 @@ +use crate::temp_dir; +use gitbutler::database::Database; + +#[test] +fn smoke() { + let data_dir = temp_dir(); + let db = Database::open_in_directory(data_dir.path()).unwrap(); + db.transaction(|tx| { + tx.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)", []) + .unwrap(); + tx.execute("INSERT INTO test (id) VALUES (1)", []).unwrap(); + let mut stmt = tx.prepare("SELECT id FROM test").unwrap(); + let mut rows = stmt.query([]).unwrap(); + let row = rows.next().unwrap().unwrap(); + let id: i32 = row.get(0).unwrap(); + assert_eq!(id, 1_i32); + Ok(()) + }) + .unwrap(); +} diff --git a/tests/deltas/document.rs b/tests/deltas/document.rs new file mode 100644 index 000000000..6dae00aba --- /dev/null +++ b/tests/deltas/document.rs @@ -0,0 +1,263 @@ +use gitbutler::deltas::operations::Operation; +use gitbutler::deltas::{Delta, Document}; +use gitbutler::reader; + +#[test] +fn new() { + let document = Document::new( + Some(&reader::Content::UTF8("hello world".to_string())), + vec![], + ); + assert!(document.is_ok()); + let document = document.unwrap(); + assert_eq!(document.to_string(), "hello world"); + assert_eq!(document.get_deltas().len(), 0); +} + +#[test] +fn update() { + let document = Document::new( + Some(&reader::Content::UTF8("hello world".to_string())), + vec![], + ); + assert!(document.is_ok()); + let mut document = document.unwrap(); + document + .update(Some(&reader::Content::UTF8("hello world!".to_string()))) + .unwrap(); + assert_eq!(document.to_string(), "hello world!"); + assert_eq!(document.get_deltas().len(), 1); + assert_eq!(document.get_deltas()[0].operations.len(), 1); + assert_eq!( + document.get_deltas()[0].operations[0], + Operation::Insert((11, "!".to_string())) + ); +} + +#[test] +fn empty() { + let document = Document::new(None, vec![]); + assert!(document.is_ok()); + let mut document = document.unwrap(); + document + .update(Some(&reader::Content::UTF8("hello world!".to_string()))) + .unwrap(); + assert_eq!(document.to_string(), "hello world!"); + assert_eq!(document.get_deltas().len(), 1); + assert_eq!(document.get_deltas()[0].operations.len(), 1); + assert_eq!( + document.get_deltas()[0].operations[0], + Operation::Insert((0, "hello world!".to_string())) + ); +} + +#[test] +fn from_deltas() { + let document = Document::new( + None, + vec![ + Delta { + timestamp_ms: 0, + operations: vec![Operation::Insert((0, "hello".to_string()))], + }, + Delta { + timestamp_ms: 1, + operations: vec![Operation::Insert((5, " world".to_string()))], + }, + Delta { + timestamp_ms: 2, + operations: vec![ + Operation::Delete((3, 7)), + Operation::Insert((4, "!".to_string())), + ], + }, + ], + ); + assert!(document.is_ok()); + let document = document.unwrap(); + assert_eq!(document.to_string(), "held!"); +} + +#[test] +fn complex_line() { + let document = Document::new(None, vec![]); + assert!(document.is_ok()); + let mut document = document.unwrap(); + + document + .update(Some(&reader::Content::UTF8("hello".to_string()))) + .unwrap(); + assert_eq!(document.to_string(), "hello"); + assert_eq!(document.get_deltas().len(), 1); + assert_eq!(document.get_deltas()[0].operations.len(), 1); + assert_eq!( + document.get_deltas()[0].operations[0], + Operation::Insert((0, "hello".to_string())) + ); + + document + .update(Some(&reader::Content::UTF8("hello world".to_string()))) + .unwrap(); + assert_eq!(document.to_string(), "hello world"); + assert_eq!(document.get_deltas().len(), 2); + assert_eq!(document.get_deltas()[1].operations.len(), 1); + assert_eq!( + document.get_deltas()[1].operations[0], + Operation::Insert((5, " world".to_string())) + ); + + document + .update(Some(&reader::Content::UTF8("held!".to_string()))) + .unwrap(); + assert_eq!(document.to_string(), "held!"); + assert_eq!(document.get_deltas().len(), 3); + assert_eq!(document.get_deltas()[2].operations.len(), 2); + assert_eq!( + document.get_deltas()[2].operations[0], + Operation::Delete((3, 7)) + ); + assert_eq!( + document.get_deltas()[2].operations[1], + Operation::Insert((4, "!".to_string())), + ); +} + +#[test] +fn multiline_add() { + let document = Document::new(None, vec![]); + assert!(document.is_ok()); + let mut document = document.unwrap(); + + document + .update(Some(&reader::Content::UTF8("first".to_string()))) + .unwrap(); + assert_eq!(document.to_string(), "first"); + assert_eq!(document.get_deltas().len(), 1); + assert_eq!(document.get_deltas()[0].operations.len(), 1); + assert_eq!( + document.get_deltas()[0].operations[0], + Operation::Insert((0, "first".to_string())) + ); + + document + .update(Some(&reader::Content::UTF8("first\ntwo".to_string()))) + .unwrap(); + assert_eq!(document.to_string(), "first\ntwo"); + assert_eq!(document.get_deltas().len(), 2); + assert_eq!(document.get_deltas()[1].operations.len(), 1); + assert_eq!( + document.get_deltas()[1].operations[0], + Operation::Insert((5, "\ntwo".to_string())) + ); + + document + .update(Some(&reader::Content::UTF8( + "first line\nline two".to_string(), + ))) + .unwrap(); + assert_eq!(document.to_string(), "first line\nline two"); + assert_eq!(document.get_deltas().len(), 3); + assert_eq!(document.get_deltas()[2].operations.len(), 2); + assert_eq!( + document.get_deltas()[2].operations[0], + Operation::Insert((5, " line".to_string())) + ); + assert_eq!( + document.get_deltas()[2].operations[1], + Operation::Insert((11, "line ".to_string())) + ); +} + +#[test] +fn multiline_remove() { + let document = Document::new(None, vec![]); + assert!(document.is_ok()); + let mut document = document.unwrap(); + + document + .update(Some(&reader::Content::UTF8( + "first line\nline two".to_string(), + ))) + .unwrap(); + assert_eq!(document.to_string(), "first line\nline two"); + assert_eq!(document.get_deltas().len(), 1); + assert_eq!(document.get_deltas()[0].operations.len(), 1); + assert_eq!( + document.get_deltas()[0].operations[0], + Operation::Insert((0, "first line\nline two".to_string())) + ); + + document + .update(Some(&reader::Content::UTF8("first\ntwo".to_string()))) + .unwrap(); + assert_eq!(document.to_string(), "first\ntwo"); + assert_eq!(document.get_deltas().len(), 2); + assert_eq!(document.get_deltas()[1].operations.len(), 2); + assert_eq!( + document.get_deltas()[1].operations[0], + Operation::Delete((5, 5)) + ); + assert_eq!( + document.get_deltas()[1].operations[1], + Operation::Delete((6, 5)) + ); + + document + .update(Some(&reader::Content::UTF8("first".to_string()))) + .unwrap(); + assert_eq!(document.to_string(), "first"); + assert_eq!(document.get_deltas().len(), 3); + assert_eq!(document.get_deltas()[2].operations.len(), 1); + assert_eq!( + document.get_deltas()[2].operations[0], + Operation::Delete((5, 4)) + ); + + document.update(None).unwrap(); + assert_eq!(document.to_string(), ""); + assert_eq!(document.get_deltas().len(), 4); + assert_eq!(document.get_deltas()[3].operations.len(), 1); + assert_eq!( + document.get_deltas()[3].operations[0], + Operation::Delete((0, 5)) + ); +} + +#[test] +fn binary_to_text() { + let latest = reader::Content::Binary; + let current = reader::Content::UTF8("test".to_string()); + let mut document = Document::new(Some(&latest), vec![]).unwrap(); + let new_deltas = document.update(Some(¤t)).unwrap(); + assert!(new_deltas.is_some()); + assert_eq!(document.to_string(), "test"); +} + +#[test] +fn binary_to_binary() { + let latest = reader::Content::Binary; + let current = reader::Content::Binary; + let mut document = Document::new(Some(&latest), vec![]).unwrap(); + let new_deltas = document.update(Some(¤t)).unwrap(); + assert!(new_deltas.is_some()); + assert_eq!(document.to_string(), ""); +} + +#[test] +fn text_to_binary() { + let latest = reader::Content::UTF8("text".to_string()); + let current = reader::Content::Binary; + let mut document = Document::new(Some(&latest), vec![]).unwrap(); + let new_deltas = document.update(Some(¤t)).unwrap(); + assert!(new_deltas.is_some()); + assert_eq!(document.to_string(), ""); +} + +#[test] +fn unicode() { + let latest = reader::Content::UTF8("\u{1f31a}".to_string()); + let current = reader::Content::UTF8("\u{1f31d}".to_string()); + let mut document = Document::new(Some(&latest), vec![]).unwrap(); + document.update(Some(¤t)).unwrap(); + assert_eq!(document.to_string(), "\u{1f31d}"); +} diff --git a/tests/deltas/mod.rs b/tests/deltas/mod.rs new file mode 100644 index 000000000..8cc2dfb69 --- /dev/null +++ b/tests/deltas/mod.rs @@ -0,0 +1,146 @@ +mod database { + use crate::test_database; + use gitbutler::deltas::{operations, Database, Delta}; + use gitbutler::projects::ProjectId; + use gitbutler::sessions::SessionId; + use std::path; + + #[test] + fn insert_query() -> anyhow::Result<()> { + let (db, _tmp) = test_database(); + let database = Database::new(db); + + let project_id = ProjectId::generate(); + let session_id = SessionId::generate(); + let file_path = path::PathBuf::from("file_path"); + let delta1 = Delta { + timestamp_ms: 0, + operations: vec![operations::Operation::Insert((0, "text".to_string()))], + }; + let deltas = vec![delta1.clone()]; + + database.insert(&project_id, &session_id, &file_path, &deltas)?; + + assert_eq!( + database.list_by_project_id_session_id(&project_id, &session_id, &None)?, + vec![(file_path.display().to_string(), vec![delta1])] + .into_iter() + .collect() + ); + + Ok(()) + } + + #[test] + fn insert_update() -> anyhow::Result<()> { + let (db, _tmp) = test_database(); + let database = Database::new(db); + + let project_id = ProjectId::generate(); + let session_id = SessionId::generate(); + let file_path = path::PathBuf::from("file_path"); + let delta1 = Delta { + timestamp_ms: 0, + operations: vec![operations::Operation::Insert((0, "text".to_string()))], + }; + let delta2 = Delta { + timestamp_ms: 0, + operations: vec![operations::Operation::Insert(( + 0, + "updated_text".to_string(), + ))], + }; + + database.insert(&project_id, &session_id, &file_path, &vec![delta1])?; + database.insert(&project_id, &session_id, &file_path, &vec![delta2.clone()])?; + + assert_eq!( + database.list_by_project_id_session_id(&project_id, &session_id, &None)?, + vec![(file_path.display().to_string(), vec![delta2])] + .into_iter() + .collect() + ); + + Ok(()) + } + + #[test] + fn aggregate_deltas_by_file() -> anyhow::Result<()> { + let (db, _tmp) = test_database(); + let database = Database::new(db); + + let project_id = ProjectId::generate(); + let session_id = SessionId::generate(); + let file_path1 = path::PathBuf::from("file_path1"); + let file_path2 = path::PathBuf::from("file_path2"); + let delta1 = Delta { + timestamp_ms: 1, + operations: vec![operations::Operation::Insert((0, "text".to_string()))], + }; + let delta2 = Delta { + timestamp_ms: 2, + operations: vec![operations::Operation::Insert(( + 0, + "updated_text".to_string(), + ))], + }; + + database.insert(&project_id, &session_id, &file_path1, &vec![delta1.clone()])?; + database.insert(&project_id, &session_id, &file_path2, &vec![delta1.clone()])?; + database.insert(&project_id, &session_id, &file_path2, &vec![delta2.clone()])?; + + assert_eq!( + database.list_by_project_id_session_id(&project_id, &session_id, &None)?, + vec![ + (file_path1.display().to_string(), vec![delta1.clone()]), + (file_path2.display().to_string(), vec![delta1, delta2]) + ] + .into_iter() + .collect() + ); + + Ok(()) + } +} + +mod document; +mod operations; + +mod writer { + use gitbutler::deltas::operations::Operation; + use gitbutler::{deltas, sessions}; + use std::vec; + + use crate::{Case, Suite}; + + #[test] + fn write_no_vbranches() -> anyhow::Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let deltas_writer = deltas::Writer::new(gb_repository)?; + + let session = gb_repository.get_or_create_current_session()?; + let session_reader = sessions::Reader::open(gb_repository, &session)?; + let deltas_reader = gitbutler::deltas::Reader::new(&session_reader); + + let path = "test.txt"; + let deltas = vec![ + gitbutler::deltas::Delta { + operations: vec![Operation::Insert((0, "hello".to_string()))], + timestamp_ms: 0, + }, + gitbutler::deltas::Delta { + operations: vec![Operation::Insert((5, " world".to_string()))], + timestamp_ms: 0, + }, + ]; + + deltas_writer.write(path, &deltas).unwrap(); + + assert_eq!(deltas_reader.read_file(path).unwrap(), Some(deltas)); + assert_eq!(deltas_reader.read_file("not found").unwrap(), None); + + Ok(()) + } +} diff --git a/tests/deltas/operations.rs b/tests/deltas/operations.rs new file mode 100644 index 000000000..b273d4096 --- /dev/null +++ b/tests/deltas/operations.rs @@ -0,0 +1,55 @@ +use gitbutler::deltas::operations::{get_delta_operations, Operation}; + +#[test] +fn get_delta_operations_insert_end() { + let initial_text = "hello"; + let final_text = "hello world!"; + let operations = get_delta_operations(initial_text, final_text); + assert_eq!(operations.len(), 1); + assert_eq!(operations[0], Operation::Insert((5, " world!".to_string()))); +} + +#[test] +fn get_delta_operations_insert_middle() { + let initial_text = "helloworld"; + let final_text = "hello, world"; + let operations = get_delta_operations(initial_text, final_text); + assert_eq!(operations.len(), 1); + assert_eq!(operations[0], Operation::Insert((5, ", ".to_string()))); +} + +#[test] +fn get_delta_operations_insert_begin() { + let initial_text = "world"; + let final_text = "hello world"; + let operations = get_delta_operations(initial_text, final_text); + assert_eq!(operations.len(), 1); + assert_eq!(operations[0], Operation::Insert((0, "hello ".to_string()))); +} + +#[test] +fn get_delta_operations_delete_end() { + let initial_text = "hello world!"; + let final_text = "hello"; + let operations = get_delta_operations(initial_text, final_text); + assert_eq!(operations.len(), 1); + assert_eq!(operations[0], Operation::Delete((5, 7))); +} + +#[test] +fn get_delta_operations_delete_middle() { + let initial_text = "hello, world"; + let final_text = "helloworld"; + let operations = get_delta_operations(initial_text, final_text); + assert_eq!(operations.len(), 1); + assert_eq!(operations[0], Operation::Delete((5, 2))); +} + +#[test] +fn get_delta_operations_delete_begin() { + let initial_text = "hello world"; + let final_text = "world"; + let operations = get_delta_operations(initial_text, final_text); + assert_eq!(operations.len(), 1); + assert_eq!(operations[0], Operation::Delete((0, 6))); +} diff --git a/tests/gb_repository/mod.rs b/tests/gb_repository/mod.rs new file mode 100644 index 000000000..efe55788c --- /dev/null +++ b/tests/gb_repository/mod.rs @@ -0,0 +1,490 @@ +use std::{collections::HashMap, path, thread, time}; + +use anyhow::Result; +use pretty_assertions::assert_eq; +use tempfile::TempDir; + +use crate::init_opts_bare; +use crate::{Case, Suite}; + +use gitbutler::{ + deltas::{self, operations::Operation}, + projects::{self, ApiProject, ProjectId}, + reader, + sessions::{self, SessionId}, +}; + +mod repository { + use std::path::PathBuf; + + use crate::{Case, Suite}; + use anyhow::Result; + use pretty_assertions::assert_eq; + + #[test] + fn alternates_file_being_set() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case(); + + let file_content = std::fs::read_to_string( + gb_repository + .git_repository_path() + .join("objects/info/alternates"), + )?; + + let file_content = PathBuf::from(file_content.trim()); + let project_path = project_repository.path().to_path_buf().join(".git/objects"); + + assert_eq!(file_content, project_path); + + Ok(()) + } +} + +fn new_test_remote_repository() -> Result<(git2::Repository, TempDir)> { + let tmp = tempfile::tempdir()?; + let path = tmp.path().to_str().unwrap().to_string(); + let repo_a = git2::Repository::init_opts(path, &init_opts_bare())?; + Ok((repo_a, tmp)) +} + +#[test] +fn get_current_session_writer_should_use_existing_session() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let current_session_1 = gb_repository.get_or_create_current_session()?; + let current_session_2 = gb_repository.get_or_create_current_session()?; + assert_eq!(current_session_1.id, current_session_2.id); + + Ok(()) +} + +#[test] +fn must_not_return_init_session() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + assert!(gb_repository.get_current_session()?.is_none()); + + let iter = gb_repository.get_sessions_iterator()?; + assert_eq!(iter.count(), 0); + + Ok(()) +} + +#[test] +fn must_not_flush_without_current_session() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case(); + + let session = gb_repository.flush(project_repository, None)?; + assert!(session.is_none()); + + let iter = gb_repository.get_sessions_iterator()?; + assert_eq!(iter.count(), 0); + + Ok(()) +} + +#[test] +fn non_empty_repository() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case_with_files(HashMap::from([(path::PathBuf::from("test.txt"), "test")])); + + gb_repository.get_or_create_current_session()?; + gb_repository.flush(project_repository, None)?; + + Ok(()) +} + +#[test] +fn must_flush_current_session() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case(); + + gb_repository.get_or_create_current_session()?; + + let session = gb_repository.flush(project_repository, None)?; + assert!(session.is_some()); + + let iter = gb_repository.get_sessions_iterator()?; + assert_eq!(iter.count(), 1); + + Ok(()) +} + +#[test] +fn list_deltas_from_current_session() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let current_session = gb_repository.get_or_create_current_session()?; + let writer = deltas::Writer::new(gb_repository)?; + writer.write( + "test.txt", + &vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }], + )?; + + let session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read(None)?; + + assert_eq!(deltas.len(), 1); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations.len(), + 1 + ); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations[0], + Operation::Insert((0, "Hello World".to_string())) + ); + + Ok(()) +} + +#[test] +fn list_deltas_from_flushed_session() { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case(); + + let writer = deltas::Writer::new(gb_repository).unwrap(); + writer + .write( + "test.txt", + &vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }], + ) + .unwrap(); + let session = gb_repository.flush(project_repository, None).unwrap(); + + let session_reader = sessions::Reader::open(gb_repository, &session.unwrap()).unwrap(); + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read(None).unwrap(); + + assert_eq!(deltas.len(), 1); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations.len(), + 1 + ); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations[0], + Operation::Insert((0, "Hello World".to_string())) + ); +} + +#[test] +fn list_files_from_current_session() { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case_with_files(HashMap::from([( + path::PathBuf::from("test.txt"), + "Hello World", + )])); + + let current = gb_repository.get_or_create_current_session().unwrap(); + let reader = sessions::Reader::open(gb_repository, ¤t).unwrap(); + let files = reader.files(None).unwrap(); + + assert_eq!(files.len(), 1); + assert_eq!( + files[&path::PathBuf::from("test.txt")], + reader::Content::UTF8("Hello World".to_string()) + ); +} + +#[test] +fn list_files_from_flushed_session() { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case_with_files(HashMap::from([( + path::PathBuf::from("test.txt"), + "Hello World", + )])); + + gb_repository.get_or_create_current_session().unwrap(); + let session = gb_repository + .flush(project_repository, None) + .unwrap() + .unwrap(); + let reader = sessions::Reader::open(gb_repository, &session).unwrap(); + let files = reader.files(None).unwrap(); + + assert_eq!(files.len(), 1); + assert_eq!( + files[&path::PathBuf::from("test.txt")], + reader::Content::UTF8("Hello World".to_string()) + ); +} + +#[tokio::test] +async fn remote_syncronization() { + // first, crate a remote, pretending it's a cloud + let (cloud, _tmp) = new_test_remote_repository().unwrap(); + let api_project = ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: cloud.path().to_str().unwrap().to_string(), + code_git_url: None, + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + let suite = Suite::default(); + let user = suite.sign_in(); + + // create first local project, add files, deltas and flush a session + let case_one = suite.new_case_with_files(HashMap::from([( + path::PathBuf::from("test.txt"), + "Hello World", + )])); + suite + .projects + .update(&projects::UpdateRequest { + id: case_one.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_one = case_one.refresh(); + + let writer = deltas::Writer::new(&case_one.gb_repository).unwrap(); + writer + .write( + "test.txt", + &vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }], + ) + .unwrap(); + let session_one = case_one + .gb_repository + .flush(&case_one.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_one.gb_repository.push(Some(&user)).unwrap(); + + // create second local project, fetch it and make sure session is there + let case_two = suite.new_case(); + suite + .projects + .update(&projects::UpdateRequest { + id: case_two.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_two = case_two.refresh(); + + case_two.gb_repository.fetch(Some(&user)).unwrap(); + + // now it should have the session from the first local project synced + let sessions_two = case_two + .gb_repository + .get_sessions_iterator() + .unwrap() + .map(Result::unwrap) + .collect::>(); + assert_eq!(sessions_two.len(), 1); + assert_eq!(sessions_two[0].id, session_one.id); + + let session_reader = sessions::Reader::open(&case_two.gb_repository, &sessions_two[0]).unwrap(); + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read(None).unwrap(); + let files = session_reader.files(None).unwrap(); + assert_eq!(deltas.len(), 1); + assert_eq!(files.len(), 1); + assert_eq!( + files[&path::PathBuf::from("test.txt")], + reader::Content::UTF8("Hello World".to_string()) + ); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")], + vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }] + ); +} + +#[tokio::test] +async fn remote_sync_order() { + // first, crate a remote, pretending it's a cloud + let (cloud, _tmp) = new_test_remote_repository().unwrap(); + let api_project = projects::ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: cloud.path().to_str().unwrap().to_string(), + code_git_url: None, + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + let suite = Suite::default(); + + let case_one = suite.new_case(); + suite + .projects + .update(&projects::UpdateRequest { + id: case_one.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_one = case_one.refresh(); + + let case_two = suite.new_case(); + suite + .projects + .update(&projects::UpdateRequest { + id: case_two.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_two = case_two.refresh(); + + let user = suite.sign_in(); + + // create session in the first project + case_one + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_one_first = case_one + .gb_repository + .flush(&case_one.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_one.gb_repository.push(Some(&user)).unwrap(); + + thread::sleep(time::Duration::from_secs(1)); + + // create session in the second project + case_two + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_two_first = case_two + .gb_repository + .flush(&case_two.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_two.gb_repository.push(Some(&user)).unwrap(); + + thread::sleep(time::Duration::from_secs(1)); + + // create second session in the first project + case_one + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_one_second = case_one + .gb_repository + .flush(&case_one.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_one.gb_repository.push(Some(&user)).unwrap(); + + thread::sleep(time::Duration::from_secs(1)); + + // create second session in the second project + case_two + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_two_second = case_two + .gb_repository + .flush(&case_two.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_two.gb_repository.push(Some(&user)).unwrap(); + + case_one.gb_repository.fetch(Some(&user)).unwrap(); + let sessions_one = case_one + .gb_repository + .get_sessions_iterator() + .unwrap() + .map(Result::unwrap) + .collect::>(); + + case_two.gb_repository.fetch(Some(&user)).unwrap(); + let sessions_two = case_two + .gb_repository + .get_sessions_iterator() + .unwrap() + .map(Result::unwrap) + .collect::>(); + + // make sure the sessions are the same on both repos + assert_eq!(sessions_one.len(), 4); + assert_eq!(sessions_two, sessions_one); + + assert_eq!(sessions_one[0].id, session_two_second.id); + assert_eq!(sessions_one[1].id, session_one_second.id); + assert_eq!(sessions_one[2].id, session_two_first.id); + assert_eq!(sessions_one[3].id, session_one_first.id); +} + +#[test] +fn gitbutler_file() { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case(); + + let session = gb_repository.get_or_create_current_session().unwrap(); + + let gitbutler_file_path = project_repository.path().join(".git/gitbutler.json"); + assert!(gitbutler_file_path.exists()); + + let file_content: serde_json::Value = + serde_json::from_str(&std::fs::read_to_string(&gitbutler_file_path).unwrap()).unwrap(); + let sid: SessionId = file_content["sessionId"].as_str().unwrap().parse().unwrap(); + assert_eq!(sid, session.id); + + let pid: ProjectId = file_content["repositoryId"] + .as_str() + .unwrap() + .parse() + .unwrap(); + assert_eq!(pid, project_repository.project().id); +} diff --git a/tests/git/config.rs b/tests/git/config.rs new file mode 100644 index 000000000..730401d70 --- /dev/null +++ b/tests/git/config.rs @@ -0,0 +1,34 @@ +use crate::test_repository; + +#[test] +pub fn set_str() { + let (repo, _tmp) = test_repository(); + let mut config = repo.config().unwrap(); + config.set_str("test.key", "test.value").unwrap(); + assert_eq!( + config.get_string("test.key").unwrap().unwrap(), + "test.value" + ); +} + +#[test] +pub fn set_bool() { + let (repo, _tmp) = test_repository(); + let mut config = repo.config().unwrap(); + config.set_bool("test.key", true).unwrap(); + assert!(config.get_bool("test.key").unwrap().unwrap()); +} + +#[test] +pub fn get_string_none() { + let (repo, _tmp) = test_repository(); + let config = repo.config().unwrap(); + assert_eq!(config.get_string("test.key").unwrap(), None); +} + +#[test] +pub fn get_bool_none() { + let (repo, _tmp) = test_repository(); + let config = repo.config().unwrap(); + assert_eq!(config.get_bool("test.key").unwrap(), None); +} diff --git a/tests/git/credentials.rs b/tests/git/credentials.rs new file mode 100644 index 000000000..92e5c9e27 --- /dev/null +++ b/tests/git/credentials.rs @@ -0,0 +1,312 @@ +use gitbutler::git::credentials::{Credential, Helper, HttpsCredential, SshCredential}; +use gitbutler::{keys, project_repository, projects, users}; +use std::path::PathBuf; + +use crate::{temp_dir, test_repository}; + +#[derive(Default)] +struct TestCase<'a> { + remote_url: &'a str, + github_access_token: Option<&'a str>, + preferred_key: projects::AuthKey, + home_dir: Option, +} + +impl TestCase<'_> { + fn run(&self) -> Vec<(String, Vec)> { + let local_app_data = temp_dir(); + + let users = users::Controller::from_path(&local_app_data); + let user = users::User { + github_access_token: self.github_access_token.map(ToString::to_string), + ..Default::default() + }; + users.set_user(&user).unwrap(); + + let keys = keys::Controller::from_path(&local_app_data); + let helper = Helper::new(keys, users, self.home_dir.clone()); + + let (repo, _tmp) = test_repository(); + repo.remote( + "origin", + &self.remote_url.parse().expect("failed to parse remote url"), + ) + .unwrap(); + let project = projects::Project { + path: repo.workdir().unwrap().to_path_buf(), + preferred_key: self.preferred_key.clone(), + ..Default::default() + }; + let project_repository = project_repository::Repository::open(&project).unwrap(); + + let flow = helper.help(&project_repository, "origin").unwrap(); + flow.into_iter() + .map(|(remote, credentials)| { + ( + remote.url().unwrap().as_ref().unwrap().to_string(), + credentials, + ) + }) + .collect::>() + } +} + +mod not_github { + use super::*; + + mod with_preferred_key { + use super::*; + + #[test] + fn https() { + let test_case = TestCase { + remote_url: "https://gitlab.com/test-gitbutler/test.git", + github_access_token: Some("token"), + preferred_key: projects::AuthKey::Local { + private_key_path: PathBuf::from("/tmp/id_rsa"), + }, + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "git@gitlab.com:test-gitbutler/test.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Ssh(SshCredential::Keyfile { + key_path: PathBuf::from("/tmp/id_rsa"), + passphrase: None, + })] + ); + } + + #[test] + fn ssh() { + let test_case = TestCase { + remote_url: "git@gitlab.com:test-gitbutler/test.git", + github_access_token: Some("token"), + preferred_key: projects::AuthKey::Local { + private_key_path: PathBuf::from("/tmp/id_rsa"), + }, + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "git@gitlab.com:test-gitbutler/test.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Ssh(SshCredential::Keyfile { + key_path: PathBuf::from("/tmp/id_rsa"), + passphrase: None, + })] + ); + } + } + + mod with_github_token { + use super::*; + + #[test] + fn https() { + let test_case = TestCase { + remote_url: "https://gitlab.com/test-gitbutler/test.git", + github_access_token: Some("token"), + ..Default::default() + }; + let flow = test_case.run(); + + assert_eq!(flow.len(), 1); + + assert_eq!( + flow[0].0, + "git@gitlab.com:test-gitbutler/test.git".to_string(), + ); + assert_eq!(flow[0].1.len(), 1); + assert!(matches!( + flow[0].1[0], + Credential::Ssh(SshCredential::GitButlerKey(_)) + )); + } + + #[test] + fn ssh() { + let test_case = TestCase { + remote_url: "git@gitlab.com:test-gitbutler/test.git", + github_access_token: Some("token"), + ..Default::default() + }; + let flow = test_case.run(); + + assert_eq!(flow.len(), 1); + + assert_eq!( + flow[0].0, + "git@gitlab.com:test-gitbutler/test.git".to_string(), + ); + assert_eq!(flow[0].1.len(), 1); + assert!(matches!( + flow[0].1[0], + Credential::Ssh(SshCredential::GitButlerKey(_)) + )); + } + } +} + +mod github { + use super::*; + + mod with_github_token { + use super::*; + + #[test] + fn https() { + let test_case = TestCase { + remote_url: "https://github.com/gitbutlerapp/gitbutler.git", + github_access_token: Some("token"), + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "https://github.com/gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Https(HttpsCredential::GitHubToken( + "token".to_string() + ))] + ); + } + + #[test] + fn ssh() { + let test_case = TestCase { + remote_url: "git@github.com:gitbutlerapp/gitbutler.git", + github_access_token: Some("token"), + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "https://github.com/gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Https(HttpsCredential::GitHubToken( + "token".to_string() + ))] + ); + } + } + + mod without_github_token { + use super::*; + + mod without_preferred_key { + use super::*; + + #[test] + fn https() { + let test_case = TestCase { + remote_url: "https://github.com/gitbutlerapp/gitbutler.git", + ..Default::default() + }; + let flow = test_case.run(); + + assert_eq!(flow.len(), 1); + + assert_eq!( + flow[0].0, + "git@github.com:gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!(flow[0].1.len(), 1); + assert!(matches!( + flow[0].1[0], + Credential::Ssh(SshCredential::GitButlerKey(_)) + )); + } + + #[test] + fn ssh() { + let test_case = TestCase { + remote_url: "git@github.com:gitbutlerapp/gitbutler.git", + ..Default::default() + }; + let flow = test_case.run(); + + assert_eq!(flow.len(), 1); + + assert_eq!( + flow[0].0, + "git@github.com:gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!(flow[0].1.len(), 1); + assert!(matches!( + flow[0].1[0], + Credential::Ssh(SshCredential::GitButlerKey(_)) + )); + } + } + + mod with_preferred_key { + use super::*; + + #[test] + fn https() { + let test_case = TestCase { + remote_url: "https://github.com/gitbutlerapp/gitbutler.git", + github_access_token: Some("token"), + preferred_key: projects::AuthKey::Local { + private_key_path: PathBuf::from("/tmp/id_rsa"), + }, + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "git@github.com:gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Ssh(SshCredential::Keyfile { + key_path: PathBuf::from("/tmp/id_rsa"), + passphrase: None, + })] + ); + } + + #[test] + fn ssh() { + let test_case = TestCase { + remote_url: "git@github.com:gitbutlerapp/gitbutler.git", + github_access_token: Some("token"), + preferred_key: projects::AuthKey::Local { + private_key_path: PathBuf::from("/tmp/id_rsa"), + }, + ..Default::default() + }; + let flow = test_case.run(); + assert_eq!(flow.len(), 1); + assert_eq!( + flow[0].0, + "git@github.com:gitbutlerapp/gitbutler.git".to_string(), + ); + assert_eq!( + flow[0].1, + vec![Credential::Ssh(SshCredential::Keyfile { + key_path: PathBuf::from("/tmp/id_rsa"), + passphrase: None, + })] + ); + } + } + } +} diff --git a/tests/git/diff.rs b/tests/git/diff.rs new file mode 100644 index 000000000..557e9dfd2 --- /dev/null +++ b/tests/git/diff.rs @@ -0,0 +1,457 @@ +use std::{collections::HashMap, path, thread, time}; + +use anyhow::Result; +use pretty_assertions::assert_eq; +use tempfile::TempDir; + +use crate::init_opts_bare; +use crate::{Case, Suite}; +use gitbutler::{ + deltas::{self, operations::Operation}, + projects::{self, ApiProject, ProjectId}, + reader, + sessions::{self, SessionId}, +}; + +fn new_test_remote_repository() -> Result<(git2::Repository, TempDir)> { + let tmp = tempfile::tempdir()?; + let repo_a = git2::Repository::init_opts(&tmp, &init_opts_bare())?; + Ok((repo_a, tmp)) +} + +#[test] +fn get_current_session_writer_should_use_existing_session() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let current_session_1 = gb_repository.get_or_create_current_session()?; + let current_session_2 = gb_repository.get_or_create_current_session()?; + assert_eq!(current_session_1.id, current_session_2.id); + + Ok(()) +} + +#[test] +fn must_not_return_init_session() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + assert!(gb_repository.get_current_session()?.is_none()); + + let iter = gb_repository.get_sessions_iterator()?; + assert_eq!(iter.count(), 0); + + Ok(()) +} + +#[test] +fn must_not_flush_without_current_session() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case(); + + let session = gb_repository.flush(project_repository, None)?; + assert!(session.is_none()); + + let iter = gb_repository.get_sessions_iterator()?; + assert_eq!(iter.count(), 0); + + Ok(()) +} + +#[test] +fn non_empty_repository() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case_with_files(HashMap::from([(path::PathBuf::from("test.txt"), "test")])); + + gb_repository.get_or_create_current_session()?; + gb_repository.flush(project_repository, None)?; + + Ok(()) +} + +#[test] +fn must_flush_current_session() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case(); + + gb_repository.get_or_create_current_session()?; + + let session = gb_repository.flush(project_repository, None)?; + assert!(session.is_some()); + + let iter = gb_repository.get_sessions_iterator()?; + assert_eq!(iter.count(), 1); + + Ok(()) +} + +#[test] +fn list_deltas_from_current_session() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let current_session = gb_repository.get_or_create_current_session()?; + let writer = deltas::Writer::new(gb_repository)?; + writer.write( + "test.txt", + &vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }], + )?; + + let session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read(None)?; + + assert_eq!(deltas.len(), 1); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations.len(), + 1 + ); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations[0], + Operation::Insert((0, "Hello World".to_string())) + ); + + Ok(()) +} + +#[test] +fn list_deltas_from_flushed_session() { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case(); + + let writer = deltas::Writer::new(gb_repository).unwrap(); + writer + .write( + "test.txt", + &vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }], + ) + .unwrap(); + let session = gb_repository.flush(project_repository, None).unwrap(); + + let session_reader = sessions::Reader::open(gb_repository, &session.unwrap()).unwrap(); + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read(None).unwrap(); + + assert_eq!(deltas.len(), 1); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations.len(), + 1 + ); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")][0].operations[0], + Operation::Insert((0, "Hello World".to_string())) + ); +} + +#[test] +fn list_files_from_current_session() { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case_with_files(HashMap::from([( + path::PathBuf::from("test.txt"), + "Hello World", + )])); + + let current = gb_repository.get_or_create_current_session().unwrap(); + let reader = sessions::Reader::open(gb_repository, ¤t).unwrap(); + let files = reader.files(None).unwrap(); + + assert_eq!(files.len(), 1); + assert_eq!( + files[&path::PathBuf::from("test.txt")], + reader::Content::UTF8("Hello World".to_string()) + ); +} + +#[test] +fn list_files_from_flushed_session() { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case_with_files(HashMap::from([( + path::PathBuf::from("test.txt"), + "Hello World", + )])); + + gb_repository.get_or_create_current_session().unwrap(); + let session = gb_repository + .flush(project_repository, None) + .unwrap() + .unwrap(); + let reader = sessions::Reader::open(gb_repository, &session).unwrap(); + let files = reader.files(None).unwrap(); + + assert_eq!(files.len(), 1); + assert_eq!( + files[&path::PathBuf::from("test.txt")], + reader::Content::UTF8("Hello World".to_string()) + ); +} + +#[tokio::test] +async fn remote_syncronization() { + // first, crate a remote, pretending it's a cloud + let (cloud, _tmp) = new_test_remote_repository().unwrap(); + let api_project = ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: cloud.path().to_str().unwrap().to_string(), + code_git_url: None, + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + let suite = Suite::default(); + let user = suite.sign_in(); + + // create first local project, add files, deltas and flush a session + let case_one = suite.new_case_with_files(HashMap::from([( + path::PathBuf::from("test.txt"), + "Hello World", + )])); + suite + .projects + .update(&projects::UpdateRequest { + id: case_one.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_one = case_one.refresh(); + + let writer = deltas::Writer::new(&case_one.gb_repository).unwrap(); + writer + .write( + "test.txt", + &vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }], + ) + .unwrap(); + let session_one = case_one + .gb_repository + .flush(&case_one.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_one.gb_repository.push(Some(&user)).unwrap(); + + // create second local project, fetch it and make sure session is there + let case_two = suite.new_case(); + suite + .projects + .update(&projects::UpdateRequest { + id: case_two.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_two = case_two.refresh(); + + case_two.gb_repository.fetch(Some(&user)).unwrap(); + + // now it should have the session from the first local project synced + let sessions_two = case_two + .gb_repository + .get_sessions_iterator() + .unwrap() + .map(Result::unwrap) + .collect::>(); + assert_eq!(sessions_two.len(), 1); + assert_eq!(sessions_two[0].id, session_one.id); + + let session_reader = sessions::Reader::open(&case_two.gb_repository, &sessions_two[0]).unwrap(); + let deltas_reader = deltas::Reader::new(&session_reader); + let deltas = deltas_reader.read(None).unwrap(); + let files = session_reader.files(None).unwrap(); + assert_eq!(deltas.len(), 1); + assert_eq!(files.len(), 1); + assert_eq!( + files[&path::PathBuf::from("test.txt")], + reader::Content::UTF8("Hello World".to_string()) + ); + assert_eq!( + deltas[&path::PathBuf::from("test.txt")], + vec![deltas::Delta { + operations: vec![Operation::Insert((0, "Hello World".to_string()))], + timestamp_ms: 0, + }] + ); +} + +#[tokio::test] +async fn remote_sync_order() { + // first, crate a remote, pretending it's a cloud + let (cloud, _tmp) = new_test_remote_repository().unwrap(); + let api_project = projects::ApiProject { + name: "test-sync".to_string(), + description: None, + repository_id: "123".to_string(), + git_url: cloud.path().to_str().unwrap().to_string(), + code_git_url: None, + created_at: 0_i32.to_string(), + updated_at: 0_i32.to_string(), + sync: true, + }; + + let suite = Suite::default(); + + let case_one = suite.new_case(); + suite + .projects + .update(&projects::UpdateRequest { + id: case_one.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_one = case_one.refresh(); + + let case_two = suite.new_case(); + suite + .projects + .update(&projects::UpdateRequest { + id: case_two.project.id, + api: Some(api_project.clone()), + ..Default::default() + }) + .await + .unwrap(); + let case_two = case_two.refresh(); + + let user = suite.sign_in(); + + // create session in the first project + case_one + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_one_first = case_one + .gb_repository + .flush(&case_one.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_one.gb_repository.push(Some(&user)).unwrap(); + + thread::sleep(time::Duration::from_secs(1)); + + // create session in the second project + case_two + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_two_first = case_two + .gb_repository + .flush(&case_two.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_two.gb_repository.push(Some(&user)).unwrap(); + + thread::sleep(time::Duration::from_secs(1)); + + // create second session in the first project + case_one + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_one_second = case_one + .gb_repository + .flush(&case_one.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_one.gb_repository.push(Some(&user)).unwrap(); + + thread::sleep(time::Duration::from_secs(1)); + + // create second session in the second project + case_two + .gb_repository + .get_or_create_current_session() + .unwrap(); + let session_two_second = case_two + .gb_repository + .flush(&case_two.project_repository, Some(&user)) + .unwrap() + .unwrap(); + case_two.gb_repository.push(Some(&user)).unwrap(); + + case_one.gb_repository.fetch(Some(&user)).unwrap(); + let sessions_one = case_one + .gb_repository + .get_sessions_iterator() + .unwrap() + .map(Result::unwrap) + .collect::>(); + + case_two.gb_repository.fetch(Some(&user)).unwrap(); + let sessions_two = case_two + .gb_repository + .get_sessions_iterator() + .unwrap() + .map(Result::unwrap) + .collect::>(); + + // make sure the sessions are the same on both repos + assert_eq!(sessions_one.len(), 4); + assert_eq!(sessions_two, sessions_one); + + assert_eq!(sessions_one[0].id, session_two_second.id); + assert_eq!(sessions_one[1].id, session_one_second.id); + assert_eq!(sessions_one[2].id, session_two_first.id); + assert_eq!(sessions_one[3].id, session_one_first.id); +} + +#[test] +fn gitbutler_file() { + let suite = Suite::default(); + let Case { + gb_repository, + project_repository, + .. + } = &suite.new_case(); + + let session = gb_repository.get_or_create_current_session().unwrap(); + + let gitbutler_file_path = project_repository.path().join(".git/gitbutler.json"); + assert!(gitbutler_file_path.exists()); + + let file_content: serde_json::Value = + serde_json::from_str(&std::fs::read_to_string(&gitbutler_file_path).unwrap()).unwrap(); + let sid: SessionId = file_content["sessionId"].as_str().unwrap().parse().unwrap(); + assert_eq!(sid, session.id); + + let pid: ProjectId = file_content["repositoryId"] + .as_str() + .unwrap() + .parse() + .unwrap(); + assert_eq!(pid, project_repository.project().id); +} diff --git a/tests/git/mod.rs b/tests/git/mod.rs new file mode 100644 index 000000000..23bc6d0b7 --- /dev/null +++ b/tests/git/mod.rs @@ -0,0 +1,3 @@ +mod config; +mod credentials; +mod diff; diff --git a/tests/keys/mod.rs b/tests/keys/mod.rs new file mode 100644 index 000000000..4068cee55 --- /dev/null +++ b/tests/keys/mod.rs @@ -0,0 +1,65 @@ +use gitbutler::keys::{PrivateKey, PublicKey}; + +mod controller { + #[cfg(not(target_os = "windows"))] + mod not_windows { + use gitbutler::keys::storage::Storage; + use gitbutler::keys::Controller; + use std::fs; + #[cfg(target_family = "unix")] + use std::os::unix::prelude::*; + + use crate::Suite; + + #[test] + fn get_or_create() { + let suite = Suite::default(); + let controller = Controller::new(Storage::from_path(suite.local_app_data())); + + let once = controller.get_or_create().unwrap(); + let twice = controller.get_or_create().unwrap(); + assert_eq!(once, twice); + + // check permissions of the private key + let permissions = fs::metadata(suite.local_app_data().join("keys/ed25519")) + .unwrap() + .permissions(); + let perms = format!("{:o}", permissions.mode()); + assert_eq!(perms, "100600"); + } + } +} + +#[test] +fn to_from_string_private() { + let private_key = PrivateKey::generate(); + let serialized = private_key.to_string(); + let deserialized: PrivateKey = serialized.parse().unwrap(); + assert_eq!(private_key, deserialized); +} + +#[test] +fn to_from_string_public() { + let private_key = PrivateKey::generate(); + let public_key = private_key.public_key(); + let serialized = public_key.to_string(); + let deserialized: PublicKey = serialized.parse().unwrap(); + assert_eq!(public_key, deserialized); +} + +#[test] +fn serde_private() { + let private_key = PrivateKey::generate(); + let serialized = serde_json::to_string(&private_key).unwrap(); + let deserialized: PrivateKey = serde_json::from_str(&serialized).unwrap(); + assert_eq!(private_key, deserialized); +} + +#[test] +fn serde_public() { + let private_key = PrivateKey::generate(); + let public_key = private_key.public_key(); + let serialized = serde_json::to_string(&public_key).unwrap(); + let deserialized: PublicKey = serde_json::from_str(&serialized).unwrap(); + assert_eq!(public_key, deserialized); +} diff --git a/tests/lock/mod.rs b/tests/lock/mod.rs new file mode 100644 index 000000000..071f42992 --- /dev/null +++ b/tests/lock/mod.rs @@ -0,0 +1,91 @@ +use gitbutler::lock::Dir; + +use crate::temp_dir; + +#[tokio::test] +async fn lock_same_instance() { + let dir_path = temp_dir(); + std::fs::write(dir_path.path().join("file.txt"), "").unwrap(); + let dir = Dir::new(dir_path.path()).unwrap(); + + let (tx, rx) = std::sync::mpsc::sync_channel(1); + + // spawn a task that will signal right after aquireing the lock + let _ = tokio::spawn({ + let dir = dir.clone(); + async move { + dir.batch(|root| { + tx.send(()).unwrap(); + assert_eq!( + std::fs::read_to_string(root.join("file.txt")).unwrap(), + String::new() + ); + std::fs::write(root.join("file.txt"), "1") + }) + } + }) + .await + .unwrap(); + + // then we wait until the lock is aquired + rx.recv().unwrap(); + + // and immidiately try to lock again + dir.batch(|root| { + assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1"); + std::fs::write(root.join("file.txt"), "2") + }) + .unwrap() + .unwrap(); + + assert_eq!( + std::fs::read_to_string(dir_path.path().join("file.txt")).unwrap(), + "2" + ); +} + +#[tokio::test] +async fn lock_different_instances() { + let dir_path = temp_dir(); + std::fs::write(dir_path.path().join("file.txt"), "").unwrap(); + + let (tx, rx) = std::sync::mpsc::sync_channel(1); + + // spawn a task that will signal right after aquireing the lock + let _ = tokio::spawn({ + let dir_path = dir_path.path().to_owned(); + async move { + // one dir instance is created on a separate thread + let dir = Dir::new(&dir_path).unwrap(); + dir.batch(|root| { + tx.send(()).unwrap(); + assert_eq!( + std::fs::read_to_string(root.join("file.txt")).unwrap(), + String::new() + ); + std::fs::write(root.join("file.txt"), "1") + }) + } + }) + .await + .unwrap(); + + // another dir instance is created on the main thread + let dir = Dir::new(&dir_path).unwrap(); + + // then we wait until the lock is aquired + rx.recv().unwrap(); + + // and immidiately try to lock again + dir.batch(|root| { + assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1"); + std::fs::write(root.join("file.txt"), "2") + }) + .unwrap() + .unwrap(); + + assert_eq!( + std::fs::read_to_string(dir_path.path().join("file.txt")).unwrap(), + "2" + ); +} diff --git a/tests/reader/mod.rs b/tests/reader/mod.rs new file mode 100644 index 000000000..418d5fbf8 --- /dev/null +++ b/tests/reader/mod.rs @@ -0,0 +1,183 @@ +use gitbutler::reader::{CommitReader, Content, Reader}; +use std::fs; +use std::path::Path; + +use crate::{commit_all, temp_dir, test_repository}; +use anyhow::Result; + +#[test] +fn directory_reader_read_file() -> Result<()> { + let dir = temp_dir(); + + let file_path = Path::new("test.txt"); + fs::write(dir.path().join(file_path), "test")?; + + let reader = Reader::open(dir.path())?; + assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string())); + + Ok(()) +} + +#[test] +fn commit_reader_read_file() -> Result<()> { + let (repository, _tmp) = test_repository(); + + let file_path = Path::new("test.txt"); + fs::write(repository.path().parent().unwrap().join(file_path), "test")?; + + let oid = commit_all(&repository); + + fs::write(repository.path().parent().unwrap().join(file_path), "test2")?; + + let reader = Reader::from_commit(&repository, &repository.find_commit(oid)?)?; + assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string())); + + Ok(()) +} + +#[test] +fn reader_list_files_should_return_relative() -> Result<()> { + let dir = temp_dir(); + + fs::write(dir.path().join("test1.txt"), "test")?; + fs::create_dir_all(dir.path().join("dir"))?; + fs::write(dir.path().join("dir").join("test.txt"), "test")?; + + let reader = Reader::open(dir.path())?; + let files = reader.list_files(Path::new("dir"))?; + assert_eq!(files.len(), 1); + assert!(files.contains(&Path::new("test.txt").to_path_buf())); + + Ok(()) +} + +#[test] +fn reader_list_files() -> Result<()> { + let dir = temp_dir(); + + fs::write(dir.path().join("test.txt"), "test")?; + fs::create_dir_all(dir.path().join("dir"))?; + fs::write(dir.path().join("dir").join("test.txt"), "test")?; + + let reader = Reader::open(dir.path())?; + let files = reader.list_files(Path::new(""))?; + assert_eq!(files.len(), 2); + assert!(files.contains(&Path::new("test.txt").to_path_buf())); + assert!(files.contains(&Path::new("dir/test.txt").to_path_buf())); + + Ok(()) +} + +#[test] +fn commit_reader_list_files_should_return_relative() -> Result<()> { + let (repository, _tmp) = test_repository(); + + fs::write( + repository.path().parent().unwrap().join("test1.txt"), + "test", + )?; + fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?; + fs::write( + repository + .path() + .parent() + .unwrap() + .join("dir") + .join("test.txt"), + "test", + )?; + + let oid = commit_all(&repository); + + fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?; + + let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; + let files = reader.list_files(Path::new("dir"))?; + assert_eq!(files.len(), 1); + assert!(files.contains(&Path::new("test.txt").to_path_buf())); + + Ok(()) +} + +#[test] +fn commit_reader_list_files() -> Result<()> { + let (repository, _tmp) = test_repository(); + + fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?; + fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?; + fs::write( + repository + .path() + .parent() + .unwrap() + .join("dir") + .join("test.txt"), + "test", + )?; + + let oid = commit_all(&repository); + + fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?; + + let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; + let files = reader.list_files(Path::new(""))?; + assert_eq!(files.len(), 2); + assert!(files.contains(&Path::new("test.txt").to_path_buf())); + assert!(files.contains(&Path::new("dir/test.txt").to_path_buf())); + + Ok(()) +} + +#[test] +fn directory_reader_exists() -> Result<()> { + let dir = temp_dir(); + + fs::write(dir.path().join("test.txt"), "test")?; + + let reader = Reader::open(dir.path())?; + assert!(reader.exists(Path::new("test.txt"))?); + assert!(!reader.exists(Path::new("test2.txt"))?); + + Ok(()) +} + +#[test] +fn commit_reader_exists() -> Result<()> { + let (repository, _tmp) = test_repository(); + + fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?; + + let oid = commit_all(&repository); + + fs::remove_file(repository.path().parent().unwrap().join("test.txt"))?; + + let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; + assert!(reader.exists(Path::new("test.txt"))); + assert!(!reader.exists(Path::new("test2.txt"))); + + Ok(()) +} + +#[test] +fn from_bytes() { + for (bytes, expected) in [ + ("test".as_bytes(), Content::UTF8("test".to_string())), + (&[0, 159, 146, 150, 159, 146, 150], Content::Binary), + ] { + assert_eq!(Content::from(bytes), expected); + } +} + +#[test] +fn serialize_content() { + for (content, expected) in [ + ( + Content::UTF8("test".to_string()), + r#"{"type":"utf8","value":"test"}"#, + ), + (Content::Binary, r#"{"type":"binary"}"#), + (Content::Large, r#"{"type":"large"}"#), + ] { + assert_eq!(serde_json::to_string(&content).unwrap(), expected); + } +} diff --git a/tests/sessions/database.rs b/tests/sessions/database.rs new file mode 100644 index 000000000..70cb8310e --- /dev/null +++ b/tests/sessions/database.rs @@ -0,0 +1,84 @@ +use crate::test_database; +use gitbutler::projects::ProjectId; +use gitbutler::sessions::{session, Database, Session, SessionId}; + +#[test] +fn insert_query() -> anyhow::Result<()> { + let (db, _tmp) = test_database(); + println!("0"); + let database = Database::new(db); + println!("1"); + + let project_id = ProjectId::generate(); + let session1 = Session { + id: SessionId::generate(), + hash: None, + meta: session::Meta { + branch: None, + commit: None, + start_timestamp_ms: 1, + last_timestamp_ms: 2, + }, + }; + let session2 = session::Session { + id: SessionId::generate(), + hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()), + meta: session::Meta { + branch: Some("branch2".to_string()), + commit: Some("commit2".to_string()), + start_timestamp_ms: 3, + last_timestamp_ms: 4, + }, + }; + let sessions = vec![&session1, &session2]; + + database.insert(&project_id, &sessions)?; + + assert_eq!( + database.list_by_project_id(&project_id, None)?, + vec![session2.clone(), session1.clone()] + ); + assert_eq!(database.get_by_id(&session1.id)?.unwrap(), session1); + assert_eq!(database.get_by_id(&session2.id)?.unwrap(), session2); + assert_eq!(database.get_by_id(&SessionId::generate())?, None); + + Ok(()) +} + +#[test] +fn update() -> anyhow::Result<()> { + let (db, _tmp) = test_database(); + let database = Database::new(db); + + let project_id = ProjectId::generate(); + let session = session::Session { + id: SessionId::generate(), + hash: None, + meta: session::Meta { + branch: None, + commit: None, + start_timestamp_ms: 1, + last_timestamp_ms: 2, + }, + }; + let session_updated = session::Session { + id: session.id, + hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()), + meta: session::Meta { + branch: Some("branch2".to_string()), + commit: Some("commit2".to_string()), + start_timestamp_ms: 3, + last_timestamp_ms: 4, + }, + }; + database.insert(&project_id, &[&session])?; + database.insert(&project_id, &[&session_updated])?; + + assert_eq!( + database.list_by_project_id(&project_id, None)?, + vec![session_updated.clone()] + ); + assert_eq!(database.get_by_id(&session.id)?.unwrap(), session_updated); + + Ok(()) +} diff --git a/tests/sessions/mod.rs b/tests/sessions/mod.rs new file mode 100644 index 000000000..6671f7a02 --- /dev/null +++ b/tests/sessions/mod.rs @@ -0,0 +1,106 @@ +mod database; + +use anyhow::Result; + +use crate::{Case, Suite}; +use gitbutler::sessions::{self, session::SessionId}; + +#[test] +fn should_not_write_session_with_hash() { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let session = sessions::Session { + id: SessionId::generate(), + hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()), + meta: sessions::Meta { + start_timestamp_ms: 0, + last_timestamp_ms: 1, + branch: Some("branch".to_string()), + commit: Some("commit".to_string()), + }, + }; + + assert!(sessions::Writer::new(gb_repository) + .unwrap() + .write(&session) + .is_err()); +} + +#[test] +fn should_write_full_session() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let session = sessions::Session { + id: SessionId::generate(), + hash: None, + meta: sessions::Meta { + start_timestamp_ms: 0, + last_timestamp_ms: 1, + branch: Some("branch".to_string()), + commit: Some("commit".to_string()), + }, + }; + + sessions::Writer::new(gb_repository)?.write(&session)?; + + assert_eq!( + std::fs::read_to_string(gb_repository.session_path().join("meta/id"))?, + session.id.to_string() + ); + assert_eq!( + std::fs::read_to_string(gb_repository.session_path().join("meta/commit"))?, + "commit" + ); + assert_eq!( + std::fs::read_to_string(gb_repository.session_path().join("meta/branch"))?, + "branch" + ); + assert_eq!( + std::fs::read_to_string(gb_repository.session_path().join("meta/start"))?, + "0" + ); + assert_ne!( + std::fs::read_to_string(gb_repository.session_path().join("meta/last"))?, + "1" + ); + + Ok(()) +} + +#[test] +fn should_write_partial_session() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let session = sessions::Session { + id: SessionId::generate(), + hash: None, + meta: sessions::Meta { + start_timestamp_ms: 0, + last_timestamp_ms: 1, + branch: None, + commit: None, + }, + }; + + sessions::Writer::new(gb_repository)?.write(&session)?; + + assert_eq!( + std::fs::read_to_string(gb_repository.session_path().join("meta/id"))?, + session.id.to_string() + ); + assert!(!gb_repository.session_path().join("meta/commit").exists()); + assert!(!gb_repository.session_path().join("meta/branch").exists()); + assert_eq!( + std::fs::read_to_string(gb_repository.session_path().join("meta/start"))?, + "0" + ); + assert_ne!( + std::fs::read_to_string(gb_repository.session_path().join("meta/last"))?, + "1" + ); + + Ok(()) +} diff --git a/tests/suite/gb_repository.rs b/tests/suite/gb_repository.rs new file mode 100644 index 000000000..d02414c83 --- /dev/null +++ b/tests/suite/gb_repository.rs @@ -0,0 +1,149 @@ +use crate::common::{paths, TestProject}; +use gitbutler::{gb_repository, git, project_repository, projects}; +use std::path; + +mod init { + use super::*; + + #[test] + fn handle_file_symlink() { + let test_project = TestProject::default(); + + let data_dir = paths::data_dir(); + let projects = projects::Controller::from_path(&data_dir); + + let project = projects + .add(test_project.path()) + .expect("failed to add project"); + + std::fs::write(project.path.join("file"), "content").unwrap(); + std::fs::hard_link(project.path.join("file"), project.path.join("link")).unwrap(); + + let project_repository = project_repository::Repository::open(&project).unwrap(); + + gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); + } + + #[test] + #[cfg(target_family = "unix")] + fn handle_dir_symlink() { + let test_project = TestProject::default(); + + let data_dir = paths::data_dir(); + let projects = projects::Controller::from_path(&data_dir); + + let project = projects + .add(test_project.path()) + .expect("failed to add project"); + + std::fs::create_dir_all(project.path.join("dir")).unwrap(); + std::fs::write(project.path.join("dir/file"), "content").unwrap(); + std::os::unix::fs::symlink(project.path.join("dir"), project.path.join("dir_link")) + .unwrap(); + + let project_repository = project_repository::Repository::open(&project).unwrap(); + + gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); + } + + #[test] + #[cfg(target_family = "unix")] + fn handle_dir_symlink_symlink() { + let test_project = TestProject::default(); + + let data_dir = paths::data_dir(); + let projects = projects::Controller::from_path(&data_dir); + + let project = projects + .add(test_project.path()) + .expect("failed to add project"); + + std::fs::create_dir_all(project.path.join("dir")).unwrap(); + std::fs::write(project.path.join("dir/file"), "content").unwrap(); + std::os::unix::fs::symlink(project.path.join("dir"), project.path.join("dir_link")) + .unwrap(); + std::os::unix::fs::symlink( + project.path.join("dir_link"), + project.path.join("link_link"), + ) + .unwrap(); + + let project_repository = project_repository::Repository::open(&project).unwrap(); + + gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); + } +} + +mod flush { + use super::*; + + #[test] + fn handle_file_symlink() { + let test_project = TestProject::default(); + + let data_dir = paths::data_dir(); + let projects = projects::Controller::from_path(&data_dir); + + let project = projects + .add(test_project.path()) + .expect("failed to add project"); + + let project_repository = project_repository::Repository::open(&project).unwrap(); + + let gb_repo = + gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); + + std::fs::write(project.path.join("file"), "content").unwrap(); + std::fs::hard_link(project.path.join("file"), project.path.join("link")).unwrap(); + + gb_repo.flush(&project_repository, None).unwrap(); + } + + #[test] + #[cfg(target_family = "unix")] + fn handle_dir_symlink() { + let test_project = TestProject::default(); + + let data_dir = paths::data_dir(); + let projects = projects::Controller::from_path(&data_dir); + + let project = projects + .add(test_project.path()) + .expect("failed to add project"); + + let project_repository = project_repository::Repository::open(&project).unwrap(); + + let gb_repo = + gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); + + std::fs::create_dir_all(project.path.join("dir")).unwrap(); + std::fs::write(project.path.join("dir/file"), "content").unwrap(); + std::os::unix::fs::symlink(project.path.join("dir"), project.path.join("dir_link")) + .unwrap(); + + gb_repo.flush(&project_repository, None).unwrap(); + } + + #[test] + fn handle_submodules() { + let test_project = TestProject::default(); + + let data_dir = paths::data_dir(); + let projects = projects::Controller::from_path(&data_dir); + + let project = projects + .add(test_project.path()) + .expect("failed to add project"); + + let project_repository = project_repository::Repository::open(&project).unwrap(); + + let gb_repo = + gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); + + let project = TestProject::default(); + let submodule_url: git::Url = project.path().display().to_string().parse().unwrap(); + test_project.add_submodule(&submodule_url, path::Path::new("submodule")); + + gb_repo.flush(&project_repository, None).unwrap(); + } +} diff --git a/tests/suite/projects.rs b/tests/suite/projects.rs new file mode 100644 index 000000000..5d30240c2 --- /dev/null +++ b/tests/suite/projects.rs @@ -0,0 +1,71 @@ +use gitbutler::projects::Controller; +use tempfile::TempDir; + +use crate::common::{self, paths}; + +pub fn new() -> (Controller, TempDir) { + let data_dir = paths::data_dir(); + let controller = Controller::from_path(&data_dir); + (controller, data_dir) +} + +mod add { + use super::*; + + #[test] + fn success() { + let (controller, _tmp) = new(); + let repository = common::TestProject::default(); + let path = repository.path(); + let project = controller.add(path).unwrap(); + assert_eq!(project.path, path); + assert_eq!(project.title, path.iter().last().unwrap().to_str().unwrap()); + } + + mod error { + use gitbutler::projects::AddError; + + use super::*; + + #[test] + fn missing() { + let (controller, _tmp) = new(); + let tmp = tempfile::tempdir().unwrap(); + assert!(matches!( + controller.add(tmp.path().join("missing")), + Err(AddError::PathNotFound) + )); + } + + #[test] + fn not_git() { + let (controller, _tmp) = new(); + let tmp = tempfile::tempdir().unwrap(); + let path = tmp.path(); + std::fs::write(path.join("file.txt"), "hello world").unwrap(); + assert!(matches!( + controller.add(path), + Err(AddError::NotAGitRepository) + )); + } + + #[test] + fn empty() { + let (controller, _tmp) = new(); + let tmp = tempfile::tempdir().unwrap(); + assert!(matches!( + controller.add(tmp.path()), + Err(AddError::NotAGitRepository) + )); + } + + #[test] + fn twice() { + let (controller, _tmp) = new(); + let repository = common::TestProject::default(); + let path = repository.path(); + controller.add(path).unwrap(); + assert!(matches!(controller.add(path), Err(AddError::AlreadyExists))); + } + } +} diff --git a/tests/suite/virtual_branches/amend.rs b/tests/suite/virtual_branches/amend.rs new file mode 100644 index 000000000..3465e904f --- /dev/null +++ b/tests/suite/virtual_branches/amend.rs @@ -0,0 +1,352 @@ +use super::*; + +#[tokio::test] +async fn to_default_target() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + // amend without head commit + fs::write(repository.path().join("file2.txt"), "content").unwrap(); + let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); + assert!(matches!( + controller + .amend(project_id, &branch_id, &to_amend) + .await + .unwrap_err(), + ControllerError::Action(errors::AmendError::BranchHasNoCommits) + )); +} + +#[tokio::test] +async fn forcepush_allowed() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = &Test::default(); + + projects + .update(&projects::UpdateRequest { + id: *project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + projects + .update(&projects::UpdateRequest { + id: *project_id, + ok_with_force_push: Some(true), + ..Default::default() + }) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // create commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap(); + }; + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + { + // amend another hunk + fs::write(repository.path().join("file2.txt"), "content2").unwrap(); + let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); + controller + .amend(project_id, &branch_id, &to_amend) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert!(branch.requires_force); + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 2); + } +} + +#[tokio::test] +async fn forcepush_forbidden() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + projects + .update(&projects::UpdateRequest { + id: *project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // create commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap(); + }; + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + { + fs::write(repository.path().join("file2.txt"), "content2").unwrap(); + let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); + assert!(matches!( + controller + .amend(project_id, &branch_id, &to_amend) + .await + .unwrap_err(), + ControllerError::Action(errors::AmendError::ForcePushNotAllowed(_)) + )); + } +} + +#[tokio::test] +async fn non_locked_hunk() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // create commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 1); + }; + + { + // amend another hunk + fs::write(repository.path().join("file2.txt"), "content2").unwrap(); + let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); + controller + .amend(project_id, &branch_id, &to_amend) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 2); + } +} + +#[tokio::test] +async fn locked_hunk() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // create commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 1); + assert_eq!( + branch.commits[0].files[0].hunks[0].diff, + "@@ -0,0 +1 @@\n+content\n\\ No newline at end of file\n" + ); + }; + + { + // amend another hunk + fs::write(repository.path().join("file.txt"), "more content").unwrap(); + let to_amend: branch::BranchOwnershipClaims = "file.txt:1-2".parse().unwrap(); + controller + .amend(project_id, &branch_id, &to_amend) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 1); + assert_eq!( + branch.commits[0].files[0].hunks[0].diff, + "@@ -0,0 +1 @@\n+more content\n\\ No newline at end of file\n" + ); + } +} + +#[tokio::test] +async fn non_existing_ownership() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // create commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits[0].files.len(), 1); + }; + + { + // amend non existing hunk + let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); + assert!(matches!( + controller + .amend(project_id, &branch_id, &to_amend) + .await + .unwrap_err(), + ControllerError::Action(errors::AmendError::TargetOwnerhshipNotFound(_)) + )); + } +} diff --git a/tests/suite/virtual_branches/apply_virtual_branch.rs b/tests/suite/virtual_branches/apply_virtual_branch.rs new file mode 100644 index 000000000..b422439fa --- /dev/null +++ b/tests/suite/virtual_branches/apply_virtual_branch.rs @@ -0,0 +1,278 @@ +use super::*; + +#[tokio::test] +async fn deltect_conflict() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = { + let branch1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + fs::write(repository.path().join("file.txt"), "branch one").unwrap(); + + branch1_id + }; + + // unapply first vbranch + controller + .unapply_virtual_branch(project_id, &branch1_id) + .await + .unwrap(); + + { + // create another vbranch that conflicts with the first one + controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + fs::write(repository.path().join("file.txt"), "branch two").unwrap(); + } + + { + // it should not be possible to apply the first branch + assert!(!controller + .can_apply_virtual_branch(project_id, &branch1_id) + .await + .unwrap()); + + assert!(matches!( + controller + .apply_virtual_branch(project_id, &branch1_id) + .await, + Err(ControllerError::Action( + errors::ApplyBranchError::BranchConflicts(_) + )) + )); + } +} + +#[tokio::test] +async fn rebase_commit() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "one").unwrap(); + fs::write(repository.path().join("another_file.txt"), "").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = { + // create a branch with some commited work + let branch1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + fs::write(repository.path().join("another_file.txt"), "virtual").unwrap(); + + controller + .create_commit(project_id, &branch1_id, "virtual commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(branches[0].active); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + + branch1_id + }; + + { + // unapply first vbranch + controller + .unapply_virtual_branch(project_id, &branch1_id) + .await + .unwrap(); + + assert_eq!( + fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), + "" + ); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "one" + ); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!branches[0].active); + } + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // branch is stil unapplied + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!branches[0].active); + assert!(!branches[0].conflicted); + + assert_eq!( + fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), + "" + ); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "two" + ); + } + + { + // apply first vbranch again + controller + .apply_virtual_branch(project_id, &branch1_id) + .await + .unwrap(); + + // it should be rebased + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + + assert_eq!( + fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), + "virtual" + ); + + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "two" + ); + } +} + +#[tokio::test] +async fn rebase_work() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = { + // make a branch with some work + let branch1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + fs::write(repository.path().join("another_file.txt"), "").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(branches[0].active); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + + branch1_id + }; + + { + // unapply first vbranch + controller + .unapply_virtual_branch(project_id, &branch1_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(!branches[0].active); + + assert!(!repository.path().join("another_file.txt").exists()); + assert!(!repository.path().join("file.txt").exists()); + } + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // first branch is stil unapplied + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(!branches[0].active); + assert!(!branches[0].conflicted); + + assert!(!repository.path().join("another_file.txt").exists()); + assert!(repository.path().join("file.txt").exists()); + } + + { + // apply first vbranch again + controller + .apply_virtual_branch(project_id, &branch1_id) + .await + .unwrap(); + + // workdir should be rebased, and work should be restored + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + + assert!(repository.path().join("another_file.txt").exists()); + assert!(repository.path().join("file.txt").exists()); + } +} diff --git a/tests/suite/virtual_branches/cherry_pick.rs b/tests/suite/virtual_branches/cherry_pick.rs new file mode 100644 index 000000000..212219fcd --- /dev/null +++ b/tests/suite/virtual_branches/cherry_pick.rs @@ -0,0 +1,382 @@ +use super::*; + +mod cleanly { + + use super::*; + + #[tokio::test] + async fn applied() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one = { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + let commit_two = { + fs::write(repository.path().join("file.txt"), "content two").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + controller + .reset_virtual_branch(project_id, &branch_id, commit_one) + .await + .unwrap(); + + repository.reset_hard(None); + + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + + let cherry_picked_commit_oid = controller + .cherry_pick(project_id, &branch_id, commit_two) + .await + .unwrap(); + assert!(cherry_picked_commit_oid.is_some()); + assert!(repository.path().join("file.txt").exists()); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content two" + ); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert_eq!(branches[0].commits.len(), 2); + assert_eq!(branches[0].commits[0].id, cherry_picked_commit_oid.unwrap()); + assert_eq!(branches[0].commits[1].id, commit_one); + } + + #[tokio::test] + async fn to_different_branch() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one = { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + let commit_two = { + fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + controller + .reset_virtual_branch(project_id, &branch_id, commit_one) + .await + .unwrap(); + + repository.reset_hard(None); + + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + assert!(!repository.path().join("file_two.txt").exists()); + + let branch_two_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let cherry_picked_commit_oid = controller + .cherry_pick(project_id, &branch_two_id, commit_two) + .await + .unwrap(); + assert!(cherry_picked_commit_oid.is_some()); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert!(repository.path().join("file_two.txt").exists()); + assert_eq!( + fs::read_to_string(repository.path().join("file_two.txt")).unwrap(), + "content two" + ); + + assert_eq!(branches.len(), 2); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, commit_one); + + assert_eq!(branches[1].id, branch_two_id); + assert!(branches[1].active); + assert_eq!(branches[1].commits.len(), 1); + assert_eq!(branches[1].commits[0].id, cherry_picked_commit_oid.unwrap()); + } + + #[tokio::test] + async fn non_applied() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + let commit_three_oid = { + fs::write(repository.path().join("file_three.txt"), "content three").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit", None, false) + .await + .unwrap() + }; + + controller + .reset_virtual_branch(project_id, &branch_id, commit_one_oid) + .await + .unwrap(); + + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + assert!(matches!( + controller + .cherry_pick(project_id, &branch_id, commit_three_oid) + .await, + Err(ControllerError::Action(errors::CherryPickError::NotApplied)) + )); + } +} + +mod with_conflicts { + + use super::*; + + #[tokio::test] + async fn applied() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one = { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + let commit_three = { + fs::write(repository.path().join("file_three.txt"), "content three").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + controller + .reset_virtual_branch(project_id, &branch_id, commit_one) + .await + .unwrap(); + + repository.reset_hard(None); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + assert!(!repository.path().join("file_two.txt").exists()); + assert!(!repository.path().join("file_three.txt").exists()); + + // introduce conflict with the remote commit + fs::write(repository.path().join("file_three.txt"), "conflict").unwrap(); + + { + // cherry picking leads to conflict + let cherry_picked_commit_oid = controller + .cherry_pick(project_id, &branch_id, commit_three) + .await + .unwrap(); + assert!(cherry_picked_commit_oid.is_none()); + + assert_eq!( + fs::read_to_string(repository.path().join("file_three.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\ncontent three\n>>>>>>> theirs\n" + ); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert_eq!(branches[0].files.len(), 1); + assert!(branches[0].files[0].conflicted); + assert_eq!(branches[0].commits.len(), 1); + } + + { + // conflict can be resolved + fs::write(repository.path().join("file_three.txt"), "resolved").unwrap(); + let commited_oid = controller + .create_commit(project_id, &branch_id, "resolution", None, false) + .await + .unwrap(); + + let commit = repository.find_commit(commited_oid).unwrap(); + assert_eq!(commit.parent_count(), 2); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].requires_force); + assert!(!branches[0].conflicted); + assert_eq!(branches[0].commits.len(), 2); + // resolution commit is there + assert_eq!(branches[0].commits[0].id, commited_oid); + assert_eq!(branches[0].commits[1].id, commit_one); + } + } + + #[tokio::test] + async fn non_applied() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + let commit_oid = { + let first = repository.commit_all("commit"); + fs::write(repository.path().join("file.txt"), "content").unwrap(); + let second = repository.commit_all("commit"); + repository.push(); + repository.reset_hard(Some(first)); + second + }; + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + // introduce conflict with the remote commit + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + assert!(matches!( + controller + .cherry_pick(project_id, &branch_id, commit_oid) + .await, + Err(ControllerError::Action(errors::CherryPickError::NotApplied)) + )); + } +} diff --git a/tests/suite/virtual_branches/create_commit.rs b/tests/suite/virtual_branches/create_commit.rs new file mode 100644 index 000000000..95a2ebfbe --- /dev/null +++ b/tests/suite/virtual_branches/create_commit.rs @@ -0,0 +1,198 @@ +use super::*; + +#[tokio::test] +async fn should_lock_updated_hunks() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // by default, hunks are not locked + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + assert!(!branch.files[0].hunks[0].locked); + } + + controller + .create_commit(project_id, &branch_id, "test", None, false) + .await + .unwrap(); + + { + // change in the committed hunks leads to hunk locking + fs::write(repository.path().join("file.txt"), "updated content").unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + assert!(branch.files[0].hunks[0].locked); + } +} + +#[tokio::test] +async fn should_not_lock_disjointed_hunks() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + let mut lines: Vec<_> = (0_i32..24_i32).map(|i| format!("line {}", i)).collect(); + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + repository.commit_all("my commit"); + repository.push(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // new hunk in the middle of the file + lines[12] = "commited stuff".to_string(); + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + assert!(!branch.files[0].hunks[0].locked); + } + + controller + .create_commit(project_id, &branch_id, "test commit", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + { + // hunk before the commited part is not locked + let mut changed_lines = lines.clone(); + changed_lines[0] = "updated line".to_string(); + fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + assert!(!branch.files[0].hunks[0].locked); + // cleanup + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + } + { + // hunk after the commited part is not locked + let mut changed_lines = lines.clone(); + changed_lines[23] = "updated line".to_string(); + fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + assert!(!branch.files[0].hunks[0].locked); + // cleanup + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + } + { + // hunk before the commited part but with overlapping context + let mut changed_lines = lines.clone(); + changed_lines[10] = "updated line".to_string(); + fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + // TODO: We lock this hunk, but can we afford not lock it? + assert!(branch.files[0].hunks[0].locked); + // cleanup + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + } + { + // hunk after the commited part but with overlapping context + let mut changed_lines = lines.clone(); + changed_lines[14] = "updated line".to_string(); + fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); + assert_eq!(branch.files[0].hunks.len(), 1); + // TODO: We lock this hunk, but can we afford not lock it? + assert!(branch.files[0].hunks[0].locked); + // cleanup + fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); + } +} diff --git a/tests/suite/virtual_branches/create_virtual_branch_from_branch.rs b/tests/suite/virtual_branches/create_virtual_branch_from_branch.rs new file mode 100644 index 000000000..f3a62b6f2 --- /dev/null +++ b/tests/suite/virtual_branches/create_virtual_branch_from_branch.rs @@ -0,0 +1,382 @@ +use super::*; + +#[tokio::test] +async fn integration() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_name = { + // make a remote branch + + let branch_id = controller + .create_virtual_branch(project_id, &super::branch::BranchCreateRequest::default()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "first\n").unwrap(); + controller + .create_commit(project_id, &branch_id, "first", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == branch_id) + .unwrap(); + + let name = branch.upstream.unwrap().name; + + controller + .delete_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + name + }; + + // checkout a existing remote branch + let branch_id = controller + .create_virtual_branch_from_branch(project_id, &branch_name) + .await + .unwrap(); + + { + // add a commit + std::fs::write(repository.path().join("file.txt"), "first\nsecond").unwrap(); + + controller + .create_commit(project_id, &branch_id, "second", None, false) + .await + .unwrap(); + } + + { + // meanwhile, there is a new commit on master + repository.checkout(&"refs/heads/master".parse().unwrap()); + std::fs::write(repository.path().join("another.txt"), "").unwrap(); + repository.commit_all("another"); + repository.push_branch(&"refs/heads/master".parse().unwrap()); + repository.checkout(&"refs/heads/gitbutler/integration".parse().unwrap()); + } + + { + // merge branch into master + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == branch_id) + .unwrap(); + + assert!(branch.commits[0].is_remote); + assert!(!branch.commits[0].is_integrated); + assert!(branch.commits[1].is_remote); + assert!(!branch.commits[1].is_integrated); + + repository.rebase_and_merge(&branch_name); + } + + { + // should mark commits as integrated + controller + .fetch_from_target(project_id, None) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == branch_id) + .unwrap(); + + assert!(branch.commits[0].is_remote); + assert!(branch.commits[0].is_integrated); + assert!(branch.commits[1].is_remote); + assert!(branch.commits[1].is_integrated); + } +} + +#[tokio::test] +async fn no_conflicts() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + { + // create a remote branch + let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); + repository.checkout(&branch_name); + fs::write(repository.path().join("file.txt"), "first").unwrap(); + repository.commit_all("first"); + repository.push_branch(&branch_name); + repository.checkout(&"refs/heads/master".parse().unwrap()); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert!(branches.is_empty()); + + let branch_id = controller + .create_virtual_branch_from_branch( + project_id, + &"refs/remotes/origin/branch".parse().unwrap(), + ) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].description, "first"); +} + +#[tokio::test] +async fn conflicts_with_uncommited() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + { + // create a remote branch + let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); + repository.checkout(&branch_name); + fs::write(repository.path().join("file.txt"), "first").unwrap(); + repository.commit_all("first"); + repository.push_branch(&branch_name); + repository.checkout(&"refs/heads/master".parse().unwrap()); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // create a local branch that conflicts with remote + { + std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + }; + + // branch should be created unapplied, because of the conflict + + let new_branch_id = controller + .create_virtual_branch_from_branch( + project_id, + &"refs/remotes/origin/branch".parse().unwrap(), + ) + .await + .unwrap(); + let new_branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == new_branch_id) + .unwrap(); + assert!(!new_branch.active); + assert_eq!(new_branch.commits.len(), 1); + assert!(new_branch.upstream.is_some()); +} + +#[tokio::test] +async fn conflicts_with_commited() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + { + // create a remote branch + let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); + repository.checkout(&branch_name); + fs::write(repository.path().join("file.txt"), "first").unwrap(); + repository.commit_all("first"); + repository.push_branch(&branch_name); + repository.checkout(&"refs/heads/master".parse().unwrap()); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // create a local branch that conflicts with remote + { + std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + controller + .create_commit(project_id, &branches[0].id, "hej", None, false) + .await + .unwrap(); + }; + + // branch should be created unapplied, because of the conflict + + let new_branch_id = controller + .create_virtual_branch_from_branch( + project_id, + &"refs/remotes/origin/branch".parse().unwrap(), + ) + .await + .unwrap(); + let new_branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == new_branch_id) + .unwrap(); + assert!(!new_branch.active); + assert_eq!(new_branch.commits.len(), 1); + assert!(new_branch.upstream.is_some()); +} + +#[tokio::test] +async fn from_default_target() { + let Test { + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // branch should be created unapplied, because of the conflict + + assert!(matches!( + controller + .create_virtual_branch_from_branch( + project_id, + &"refs/remotes/origin/master".parse().unwrap(), + ) + .await + .unwrap_err(), + ControllerError::Action( + errors::CreateVirtualBranchFromBranchError::CantMakeBranchFromDefaultTarget + ) + )); +} + +#[tokio::test] +async fn from_non_existent_branch() { + let Test { + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // branch should be created unapplied, because of the conflict + + assert!(matches!( + controller + .create_virtual_branch_from_branch( + project_id, + &"refs/remotes/origin/branch".parse().unwrap(), + ) + .await + .unwrap_err(), + ControllerError::Action(errors::CreateVirtualBranchFromBranchError::BranchNotFound( + _ + )) + )); +} + +#[tokio::test] +async fn from_state_remote_branch() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + { + // create a remote branch + let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); + repository.checkout(&branch_name); + fs::write(repository.path().join("file.txt"), "branch commit").unwrap(); + repository.commit_all("branch commit"); + repository.push_branch(&branch_name); + repository.checkout(&"refs/heads/master".parse().unwrap()); + + // make remote branch stale + std::fs::write(repository.path().join("antoher_file.txt"), "master commit").unwrap(); + repository.commit_all("master commit"); + repository.push(); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch_from_branch( + project_id, + &"refs/remotes/origin/branch".parse().unwrap(), + ) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].files.is_empty()); + assert_eq!(branches[0].commits[0].description, "branch commit"); +} diff --git a/tests/suite/virtual_branches/delete_virtual_branch.rs b/tests/suite/virtual_branches/delete_virtual_branch.rs new file mode 100644 index 000000000..b930a0763 --- /dev/null +++ b/tests/suite/virtual_branches/delete_virtual_branch.rs @@ -0,0 +1,78 @@ +use super::*; + +#[tokio::test] +async fn should_unapply_diff() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // write some + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + + controller + .delete_virtual_branch(project_id, &branches[0].id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + assert!(!repository.path().join("file.txt").exists()); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); +} + +#[tokio::test] +async fn should_remove_reference() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let id = controller + .create_virtual_branch( + project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + controller + .delete_virtual_branch(project_id, &id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); +} diff --git a/tests/suite/virtual_branches/fetch_from_target.rs b/tests/suite/virtual_branches/fetch_from_target.rs new file mode 100644 index 000000000..7b3f1c72f --- /dev/null +++ b/tests/suite/virtual_branches/fetch_from_target.rs @@ -0,0 +1,46 @@ +use super::*; + +#[tokio::test] +async fn should_update_last_fetched() { + let Test { + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let before_fetch = controller.get_base_branch_data(project_id).await.unwrap(); + assert!(before_fetch.unwrap().last_fetched_ms.is_none()); + + let fetch = controller + .fetch_from_target(project_id, None) + .await + .unwrap(); + assert!(fetch.last_fetched_ms.is_some()); + + let after_fetch = controller.get_base_branch_data(project_id).await.unwrap(); + assert!(after_fetch.as_ref().unwrap().last_fetched_ms.is_some()); + assert_eq!(fetch.last_fetched_ms, after_fetch.unwrap().last_fetched_ms); + + let second_fetch = controller + .fetch_from_target(project_id, None) + .await + .unwrap(); + assert!(second_fetch.last_fetched_ms.is_some()); + assert_ne!(fetch.last_fetched_ms, second_fetch.last_fetched_ms); + + let after_second_fetch = controller.get_base_branch_data(project_id).await.unwrap(); + assert!(after_second_fetch + .as_ref() + .unwrap() + .last_fetched_ms + .is_some()); + assert_eq!( + second_fetch.last_fetched_ms, + after_second_fetch.unwrap().last_fetched_ms + ); +} diff --git a/tests/suite/virtual_branches/init.rs b/tests/suite/virtual_branches/init.rs new file mode 100644 index 000000000..9cf4c478e --- /dev/null +++ b/tests/suite/virtual_branches/init.rs @@ -0,0 +1,213 @@ +use super::*; + +#[tokio::test] +async fn twice() { + let data_dir = paths::data_dir(); + let keys = keys::Controller::from_path(&data_dir); + let projects = projects::Controller::from_path(&data_dir); + let users = users::Controller::from_path(&data_dir); + let helper = git::credentials::Helper::from_path(&data_dir); + + let test_project = TestProject::default(); + + let controller = Controller::new( + data_dir.path().into(), + projects.clone(), + users, + keys, + helper, + ); + + { + let project = projects + .add(test_project.path()) + .expect("failed to add project"); + controller + .set_base_branch(&project.id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + assert!(controller + .list_virtual_branches(&project.id) + .await + .unwrap() + .0 + .is_empty()); + projects.delete(&project.id).await.unwrap(); + controller + .list_virtual_branches(&project.id) + .await + .unwrap_err(); + } + + { + let project = projects.add(test_project.path()).unwrap(); + controller + .set_base_branch(&project.id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // even though project is on gitbutler/integration, we should not import it + assert!(controller + .list_virtual_branches(&project.id) + .await + .unwrap() + .0 + .is_empty()); + } +} + +#[tokio::test] +async fn dirty_non_target() { + // a situation when you initialize project while being on the local verison of the master + // that has uncommited changes. + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + repository.checkout(&"refs/heads/some-feature".parse().unwrap()); + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].files[0].hunks.len(), 1); + assert!(branches[0].upstream.is_none()); + assert_eq!(branches[0].name, "some-feature"); +} + +#[tokio::test] +async fn dirty_target() { + // a situation when you initialize project while being on the local verison of the master + // that has uncommited changes. + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].files[0].hunks.len(), 1); + assert!(branches[0].upstream.is_none()); + assert_eq!(branches[0].name, "master"); +} + +#[tokio::test] +async fn commit_on_non_target_local() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + repository.checkout(&"refs/heads/some-feature".parse().unwrap()); + fs::write(repository.path().join("file.txt"), "content").unwrap(); + repository.commit_all("commit on target"); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].files.is_empty()); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].upstream.is_none()); + assert_eq!(branches[0].name, "some-feature"); +} + +#[tokio::test] +async fn commit_on_non_target_remote() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + repository.checkout(&"refs/heads/some-feature".parse().unwrap()); + fs::write(repository.path().join("file.txt"), "content").unwrap(); + repository.commit_all("commit on target"); + repository.push_branch(&"refs/heads/some-feature".parse().unwrap()); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].files.is_empty()); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].upstream.is_some()); + assert_eq!(branches[0].name, "some-feature"); +} + +#[tokio::test] +async fn commit_on_target() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + repository.commit_all("commit on target"); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].files.is_empty()); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].upstream.is_none()); + assert_eq!(branches[0].name, "master"); +} + +#[tokio::test] +async fn submodule() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + let project = TestProject::default(); + let submodule_url: git::Url = project.path().display().to_string().parse().unwrap(); + repository.add_submodule(&submodule_url, path::Path::new("submodule")); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].files[0].hunks.len(), 1); +} diff --git a/tests/suite/virtual_branches/mod.rs b/tests/suite/virtual_branches/mod.rs new file mode 100644 index 000000000..c71ebd6a5 --- /dev/null +++ b/tests/suite/virtual_branches/mod.rs @@ -0,0 +1,176 @@ +use std::{fs, path, str::FromStr}; +use tempfile::TempDir; + +use crate::common::{paths, TestProject}; +use crate::VAR_NO_CLEANUP; +use gitbutler::{ + git, keys, + projects::{self, ProjectId}, + users, + virtual_branches::{branch, controller::ControllerError, errors, Controller}, +}; + +struct Test { + repository: TestProject, + project_id: ProjectId, + projects: projects::Controller, + controller: Controller, + data_dir: Option, +} + +impl Drop for Test { + fn drop(&mut self) { + if std::env::var_os(VAR_NO_CLEANUP).is_some() { + let _ = self.data_dir.take().unwrap().into_path(); + } + } +} + +impl Default for Test { + fn default() -> Self { + let data_dir = paths::data_dir(); + let keys = keys::Controller::from_path(&data_dir); + let projects = projects::Controller::from_path(&data_dir); + let users = users::Controller::from_path(&data_dir); + let helper = git::credentials::Helper::from_path(&data_dir); + + let test_project = TestProject::default(); + let project = projects + .add(test_project.path()) + .expect("failed to add project"); + + Self { + repository: test_project, + project_id: project.id, + controller: Controller::new( + data_dir.path().into(), + projects.clone(), + users, + keys, + helper, + ), + projects, + data_dir: Some(data_dir), + } + } +} + +mod amend; +mod apply_virtual_branch; +mod cherry_pick; +mod create_commit; +mod create_virtual_branch_from_branch; +mod delete_virtual_branch; +mod fetch_from_target; +mod init; +mod move_commit_to_vbranch; +mod references; +mod reset_virtual_branch; +mod selected_for_changes; +mod set_base_branch; +mod squash; +mod unapply; +mod unapply_ownership; +mod update_base_branch; +mod update_commit_message; +mod upstream; + +#[tokio::test] +async fn resolve_conflict_flow() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(branches[0].active); + + branch1_id + }; + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // there is a conflict now, so the branch should be inactive + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(!branches[0].active); + } + + { + // when we apply conflicted branch, it has conflict + controller + .apply_virtual_branch(project_id, &branch1_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + + // and the conflict markers are in the file + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + + { + // can't commit conflicts + assert!(matches!( + controller + .create_commit(project_id, &branch1_id, "commit conflicts", None, false) + .await, + Err(ControllerError::Action(errors::CommitError::Conflicted(_))) + )); + } + + { + // fixing the conflict removes conflicted mark + fs::write(repository.path().join("file.txt"), "resolved").unwrap(); + let commit_oid = controller + .create_commit(project_id, &branch1_id, "resolution", None, false) + .await + .unwrap(); + + let commit = repository.find_commit(commit_oid).unwrap(); + assert_eq!(commit.parent_count(), 2); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + } +} diff --git a/tests/suite/virtual_branches/move_commit_to_vbranch.rs b/tests/suite/virtual_branches/move_commit_to_vbranch.rs new file mode 100644 index 000000000..ad4a0400e --- /dev/null +++ b/tests/suite/virtual_branches/move_commit_to_vbranch.rs @@ -0,0 +1,324 @@ +use crate::suite::virtual_branches::Test; +use gitbutler::git; +use gitbutler::virtual_branches::controller::ControllerError; +use gitbutler::virtual_branches::{branch, errors, BranchId}; +use std::str::FromStr; + +#[tokio::test] +async fn no_diffs() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + let commit_oid = controller + .create_commit(project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + let target_branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + controller + .move_commit(project_id, &target_branch_id, commit_oid) + .await + .unwrap(); + + let destination_branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == target_branch_id) + .unwrap(); + + let source_branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == source_branch_id) + .unwrap(); + + assert_eq!(destination_branch.commits.len(), 1); + assert_eq!(destination_branch.files.len(), 0); + assert_eq!(source_branch.commits.len(), 0); + assert_eq!(source_branch.files.len(), 0); +} + +#[tokio::test] +async fn diffs_on_source_branch() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + let commit_oid = controller + .create_commit(project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + std::fs::write( + repository.path().join("another file.txt"), + "another content", + ) + .unwrap(); + + let target_branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + controller + .move_commit(project_id, &target_branch_id, commit_oid) + .await + .unwrap(); + + let destination_branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == target_branch_id) + .unwrap(); + + let source_branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == source_branch_id) + .unwrap(); + + assert_eq!(destination_branch.commits.len(), 1); + assert_eq!(destination_branch.files.len(), 0); + assert_eq!(source_branch.commits.len(), 0); + assert_eq!(source_branch.files.len(), 1); +} + +#[tokio::test] +async fn diffs_on_target_branch() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + let commit_oid = controller + .create_commit(project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + let target_branch_id = controller + .create_virtual_branch( + project_id, + &branch::BranchCreateRequest { + selected_for_changes: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + + std::fs::write( + repository.path().join("another file.txt"), + "another content", + ) + .unwrap(); + + controller + .move_commit(project_id, &target_branch_id, commit_oid) + .await + .unwrap(); + + let destination_branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == target_branch_id) + .unwrap(); + + let source_branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == source_branch_id) + .unwrap(); + + assert_eq!(destination_branch.commits.len(), 1); + assert_eq!(destination_branch.files.len(), 1); + assert_eq!(source_branch.commits.len(), 0); + assert_eq!(source_branch.files.len(), 0); +} + +#[tokio::test] +async fn locked_hunks_on_source_branch() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + let commit_oid = controller + .create_commit(project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "locked content").unwrap(); + + let target_branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + assert!(matches!( + controller + .move_commit(project_id, &target_branch_id, commit_oid) + .await + .unwrap_err(), + ControllerError::Action(errors::MoveCommitError::SourceLocked) + )); +} + +#[tokio::test] +async fn no_commit() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + controller + .create_commit(project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + let target_branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + assert!(matches!( + controller + .move_commit( + project_id, + &target_branch_id, + git::Oid::from_str("a99c95cca7a60f1a2180c2f86fb18af97333c192").unwrap() + ) + .await + .unwrap_err(), + ControllerError::Action(errors::MoveCommitError::CommitNotFound(_)) + )); +} + +#[tokio::test] +async fn no_branch() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + let source_branch_id = branches[0].id; + + let commit_oid = controller + .create_commit(project_id, &source_branch_id, "commit", None, false) + .await + .unwrap(); + + assert!(matches!( + controller + .move_commit(project_id, &BranchId::generate(), commit_oid) + .await + .unwrap_err(), + ControllerError::Action(errors::MoveCommitError::BranchNotFound(_)) + )); +} diff --git a/tests/suite/virtual_branches/references.rs b/tests/suite/virtual_branches/references.rs new file mode 100644 index 000000000..3f4d4ff40 --- /dev/null +++ b/tests/suite/virtual_branches/references.rs @@ -0,0 +1,366 @@ +use super::*; + +mod create_virtual_branch { + use super::*; + + #[tokio::test] + async fn simple() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert_eq!(branches[0].name, "Virtual branch"); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(refnames.contains(&"refs/gitbutler/Virtual-branch".to_string())); + } + + #[tokio::test] + async fn duplicate_name() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch( + project_id, + &gitbutler::virtual_branches::branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let branch2_id = controller + .create_virtual_branch( + project_id, + &gitbutler::virtual_branches::branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 2); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].name, "name"); + assert_eq!(branches[1].id, branch2_id); + assert_eq!(branches[1].name, "name 1"); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(refnames.contains(&"refs/gitbutler/name".to_string())); + assert!(refnames.contains(&"refs/gitbutler/name-1".to_string())); + } +} + +mod update_virtual_branch { + use super::*; + + #[tokio::test] + async fn simple() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch( + project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + controller + .update_virtual_branch( + project_id, + branch::BranchUpdateRequest { + id: branch_id, + name: Some("new name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert_eq!(branches[0].name, "new name"); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); + assert!(refnames.contains(&"refs/gitbutler/new-name".to_string())); + } + + #[tokio::test] + async fn duplicate_name() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch( + project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let branch2_id = controller + .create_virtual_branch( + project_id, + &branch::BranchCreateRequest { + ..Default::default() + }, + ) + .await + .unwrap(); + + controller + .update_virtual_branch( + project_id, + branch::BranchUpdateRequest { + id: branch2_id, + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 2); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].name, "name"); + assert_eq!(branches[1].id, branch2_id); + assert_eq!(branches[1].name, "name 1"); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(refnames.contains(&"refs/gitbutler/name".to_string())); + assert!(refnames.contains(&"refs/gitbutler/name-1".to_string())); + } +} + +mod push_virtual_branch { + + use super::*; + + #[tokio::test] + async fn simple() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch( + project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + controller + .create_commit(project_id, &branch1_id, "test", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(project_id, &branch1_id, false, None) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].name, "name"); + assert_eq!( + branches[0].upstream.as_ref().unwrap().name.to_string(), + "refs/remotes/origin/name" + ); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(refnames.contains(&branches[0].upstream.clone().unwrap().name.to_string())); + } + + #[tokio::test] + async fn duplicate_names() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = { + // create and push branch with some work + let branch1_id = controller + .create_virtual_branch( + project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch1_id, "test", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(project_id, &branch1_id, false, None) + .await + .unwrap(); + branch1_id + }; + + // rename first branch + controller + .update_virtual_branch( + project_id, + branch::BranchUpdateRequest { + id: branch1_id, + name: Some("updated name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + + let branch2_id = { + // create another branch with first branch's old name and push it + let branch2_id = controller + .create_virtual_branch( + project_id, + &branch::BranchCreateRequest { + name: Some("name".to_string()), + ..Default::default() + }, + ) + .await + .unwrap(); + fs::write(repository.path().join("file.txt"), "updated content").unwrap(); + controller + .create_commit(project_id, &branch2_id, "test", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(project_id, &branch2_id, false, None) + .await + .unwrap(); + branch2_id + }; + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 2); + // first branch is pushing to old ref remotely + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].name, "updated name"); + assert_eq!( + branches[0].upstream.as_ref().unwrap().name, + "refs/remotes/origin/name".parse().unwrap() + ); + // new branch is pushing to new ref remotely + assert_eq!(branches[1].id, branch2_id); + assert_eq!(branches[1].name, "name"); + assert_eq!( + branches[1].upstream.as_ref().unwrap().name, + "refs/remotes/origin/name-1".parse().unwrap() + ); + + let refnames = repository + .references() + .into_iter() + .filter_map(|reference| reference.name().map(|name| name.to_string())) + .collect::>(); + assert!(refnames.contains(&branches[0].upstream.clone().unwrap().name.to_string())); + assert!(refnames.contains(&branches[1].upstream.clone().unwrap().name.to_string())); + } +} diff --git a/tests/suite/virtual_branches/reset_virtual_branch.rs b/tests/suite/virtual_branches/reset_virtual_branch.rs new file mode 100644 index 000000000..bbae5a12f --- /dev/null +++ b/tests/suite/virtual_branches/reset_virtual_branch.rs @@ -0,0 +1,265 @@ +use crate::suite::virtual_branches::Test; +use gitbutler::virtual_branches::{branch, controller::ControllerError, errors::ResetBranchError}; +use std::fs; + +#[tokio::test] +async fn to_head() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let oid = { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + // commit changes + let oid = controller + .create_commit(project_id, &branch1_id, "commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + + oid + }; + + { + // reset changes to head + controller + .reset_virtual_branch(project_id, &branch1_id, oid) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + } +} + +#[tokio::test] +async fn to_target() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + let base_branch = controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + // commit changes + let oid = controller + .create_commit(project_id, &branch1_id, "commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + } + + { + // reset changes to head + controller + .reset_virtual_branch(project_id, &branch1_id, base_branch.base_sha) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 0); + assert_eq!(branches[0].files.len(), 1); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + } +} + +#[tokio::test] +async fn to_commit() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let first_commit_oid = { + // commit some changes + + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let oid = controller + .create_commit(project_id, &branch1_id, "commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + + oid + }; + + { + // commit some more + fs::write(repository.path().join("file.txt"), "more content").unwrap(); + + let second_commit_oid = controller + .create_commit(project_id, &branch1_id, "commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 2); + assert_eq!(branches[0].commits[0].id, second_commit_oid); + assert_eq!(branches[0].commits[1].id, first_commit_oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "more content" + ); + } + + { + // reset changes to the first commit + controller + .reset_virtual_branch(project_id, &branch1_id, first_commit_oid) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, first_commit_oid); + assert_eq!(branches[0].files.len(), 1); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "more content" + ); + } +} + +#[tokio::test] +async fn to_non_existing() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file.txt"), "content").unwrap(); + + // commit changes + let oid = controller + .create_commit(project_id, &branch1_id, "commit", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].commits[0].id, oid); + assert_eq!(branches[0].files.len(), 0); + assert_eq!( + fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "content" + ); + + oid + }; + + assert!(matches!( + controller + .reset_virtual_branch( + project_id, + &branch1_id, + "fe14df8c66b73c6276f7bb26102ad91da680afcb".parse().unwrap() + ) + .await, + Err(ControllerError::Action( + ResetBranchError::CommitNotFoundInBranch(_) + )) + )); +} diff --git a/tests/suite/virtual_branches/selected_for_changes.rs b/tests/suite/virtual_branches/selected_for_changes.rs new file mode 100644 index 000000000..cfeb16a20 --- /dev/null +++ b/tests/suite/virtual_branches/selected_for_changes.rs @@ -0,0 +1,375 @@ +use super::*; + +#[tokio::test] +async fn unapplying_selected_branch_selects_anther() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file one.txt"), "").unwrap(); + + // first branch should be created as default + let b_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + // if default branch exists, new branch should not be created as default + let b2_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + + let b = branches.iter().find(|b| b.id == b_id).unwrap(); + + let b2 = branches.iter().find(|b| b.id == b2_id).unwrap(); + + assert!(b.selected_for_changes); + assert!(!b2.selected_for_changes); + + controller + .unapply_virtual_branch(project_id, &b_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + + assert_eq!(branches.len(), 2); + assert_eq!(branches[0].id, b.id); + assert!(!branches[0].selected_for_changes); + assert!(!branches[0].active); + assert_eq!(branches[1].id, b2.id); + assert!(branches[1].selected_for_changes); + assert!(branches[1].active); +} + +#[tokio::test] +async fn deleting_selected_branch_selects_anther() { + let Test { + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // first branch should be created as default + let b_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + // if default branch exists, new branch should not be created as default + let b2_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + + let b = branches.iter().find(|b| b.id == b_id).unwrap(); + + let b2 = branches.iter().find(|b| b.id == b2_id).unwrap(); + + assert!(b.selected_for_changes); + assert!(!b2.selected_for_changes); + + controller + .delete_virtual_branch(project_id, &b_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, b2.id); + assert!(branches[0].selected_for_changes); +} + +#[tokio::test] +async fn create_virtual_branch_should_set_selected_for_changes() { + let Test { + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // first branch should be created as default + let b_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b_id) + .unwrap(); + assert!(branch.selected_for_changes); + + // if default branch exists, new branch should not be created as default + let b_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b_id) + .unwrap(); + assert!(!branch.selected_for_changes); + + // explicitly don't make this one default + let b_id = controller + .create_virtual_branch( + project_id, + &branch::BranchCreateRequest { + selected_for_changes: Some(false), + ..Default::default() + }, + ) + .await + .unwrap(); + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b_id) + .unwrap(); + assert!(!branch.selected_for_changes); + + // explicitly make this one default + let b_id = controller + .create_virtual_branch( + project_id, + &branch::BranchCreateRequest { + selected_for_changes: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b_id) + .unwrap(); + assert!(branch.selected_for_changes); +} + +#[tokio::test] +async fn update_virtual_branch_should_reset_selected_for_changes() { + let Test { + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let b1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + let b1 = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b1_id) + .unwrap(); + assert!(b1.selected_for_changes); + + let b2_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + let b2 = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b2_id) + .unwrap(); + assert!(!b2.selected_for_changes); + + controller + .update_virtual_branch( + project_id, + branch::BranchUpdateRequest { + id: b2_id, + selected_for_changes: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + + let b1 = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b1_id) + .unwrap(); + assert!(!b1.selected_for_changes); + + let b2 = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b2_id) + .unwrap(); + assert!(b2.selected_for_changes); +} + +#[tokio::test] +async fn unapply_virtual_branch_should_reset_selected_for_changes() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let b1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let b1 = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b1_id) + .unwrap(); + assert!(b1.selected_for_changes); + + controller + .unapply_virtual_branch(project_id, &b1_id) + .await + .unwrap(); + + let b1 = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == b1_id) + .unwrap(); + assert!(!b1.selected_for_changes); +} + +#[tokio::test] +async fn hunks_distribution() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches[0].files.len(), 1); + + controller + .create_virtual_branch( + project_id, + &branch::BranchCreateRequest { + selected_for_changes: Some(true), + ..Default::default() + }, + ) + .await + .unwrap(); + std::fs::write(repository.path().join("another_file.txt"), "content").unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[1].files.len(), 1); +} + +#[tokio::test] +async fn applying_first_branch() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + controller + .unapply_virtual_branch(project_id, &branches[0].id) + .await + .unwrap(); + controller + .apply_virtual_branch(project_id, &branches[0].id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].active); + assert!(branches[0].selected_for_changes); +} diff --git a/tests/suite/virtual_branches/set_base_branch.rs b/tests/suite/virtual_branches/set_base_branch.rs new file mode 100644 index 000000000..23dd2da50 --- /dev/null +++ b/tests/suite/virtual_branches/set_base_branch.rs @@ -0,0 +1,235 @@ +use super::*; + +#[tokio::test] +async fn success() { + let Test { + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); +} + +mod error { + use super::*; + + #[tokio::test] + async fn missing() { + let Test { + project_id, + controller, + .. + } = &Test::default(); + + assert!(matches!( + controller + .set_base_branch( + project_id, + &git::RemoteRefname::from_str("refs/remotes/origin/missing").unwrap(), + ) + .await + .unwrap_err(), + ControllerError::Action(errors::SetBaseBranchError::BranchNotFound(_)) + )); + } +} + +mod go_back_to_integration { + use pretty_assertions::assert_eq; + + use super::*; + + #[tokio::test] + async fn should_preserve_applied_vbranches() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + std::fs::write(repository.path().join("file.txt"), "one").unwrap(); + let oid_one = repository.commit_all("one"); + std::fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("two"); + repository.push(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let vbranch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + std::fs::write(repository.path().join("another file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &vbranch_id, "one", None, false) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + repository.checkout_commit(oid_one); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, vbranch_id); + assert!(branches[0].active); + } + + #[tokio::test] + async fn from_target_branch_index_conflicts() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + std::fs::write(repository.path().join("file.txt"), "one").unwrap(); + let oid_one = repository.commit_all("one"); + std::fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("two"); + repository.push(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert!(branches.is_empty()); + + repository.checkout_commit(oid_one); + std::fs::write(repository.path().join("file.txt"), "tree").unwrap(); + + assert!(matches!( + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap_err(), + ControllerError::Action(errors::SetBaseBranchError::DirtyWorkingDirectory) + )); + } + + #[tokio::test] + async fn from_target_branch_with_uncommited() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + std::fs::write(repository.path().join("file.txt"), "one").unwrap(); + let oid_one = repository.commit_all("one"); + std::fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("two"); + repository.push(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert!(branches.is_empty()); + + repository.checkout_commit(oid_one); + std::fs::write(repository.path().join("another file.txt"), "tree").unwrap(); + + assert!(matches!( + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .map_err(|error| dbg!(error)) + .unwrap_err(), + ControllerError::Action(errors::SetBaseBranchError::DirtyWorkingDirectory) + )); + } + + #[tokio::test] + async fn from_target_branch_with_commit() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + std::fs::write(repository.path().join("file.txt"), "one").unwrap(); + let oid_one = repository.commit_all("one"); + std::fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("two"); + repository.push(); + + let base = controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert!(branches.is_empty()); + + repository.checkout_commit(oid_one); + std::fs::write(repository.path().join("another file.txt"), "tree").unwrap(); + repository.commit_all("three"); + + let base_two = controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + assert_eq!(base_two, base); + } + + #[tokio::test] + async fn from_target_branch_without_any_changes() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + std::fs::write(repository.path().join("file.txt"), "one").unwrap(); + let oid_one = repository.commit_all("one"); + std::fs::write(repository.path().join("file.txt"), "two").unwrap(); + repository.commit_all("two"); + repository.push(); + + let base = controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert!(branches.is_empty()); + + repository.checkout_commit(oid_one); + + let base_two = controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + assert_eq!(base_two, base); + } +} diff --git a/tests/suite/virtual_branches/squash.rs b/tests/suite/virtual_branches/squash.rs new file mode 100644 index 000000000..52d390fa9 --- /dev/null +++ b/tests/suite/virtual_branches/squash.rs @@ -0,0 +1,356 @@ +use super::*; + +#[tokio::test] +async fn head() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + let commit_four_oid = { + fs::write(repository.path().join("file four.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit four", None, false) + .await + .unwrap() + }; + + controller + .squash(project_id, &branch_id, commit_four_oid) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!( + descriptions, + vec!["commit three\ncommit four", "commit two", "commit one"] + ); +} + +#[tokio::test] +async fn middle() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + let commit_two_oid = { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file four.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit four", None, false) + .await + .unwrap() + }; + + controller + .squash(project_id, &branch_id, commit_two_oid) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!( + descriptions, + vec!["commit four", "commit three", "commit one\ncommit two"] + ); +} + +#[tokio::test] +async fn forcepush_allowed() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = &Test::default(); + + projects + .update(&projects::UpdateRequest { + id: *project_id, + ok_with_force_push: Some(true), + ..Default::default() + }) + .await + .unwrap(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + let commit_two_oid = { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file four.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit four", None, false) + .await + .unwrap() + }; + + controller + .squash(project_id, &branch_id, commit_two_oid) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!( + descriptions, + vec!["commit four", "commit three", "commit one\ncommit two"] + ); + assert!(branch.requires_force); +} + +#[tokio::test] +async fn forcepush_forbidden() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + projects + .update(&projects::UpdateRequest { + id: *project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + let commit_two_oid = { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file four.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit four", None, false) + .await + .unwrap() + }; + + assert!(matches!( + controller + .squash(project_id, &branch_id, commit_two_oid) + .await + .unwrap_err(), + ControllerError::Action(errors::SquashError::ForcePushNotAllowed(_)) + )); +} + +#[tokio::test] +async fn root() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + assert!(matches!( + controller + .squash(project_id, &branch_id, commit_one_oid) + .await + .unwrap_err(), + ControllerError::Action(errors::SquashError::CantSquashRootCommit) + )); +} diff --git a/tests/suite/virtual_branches/unapply.rs b/tests/suite/virtual_branches/unapply.rs new file mode 100644 index 000000000..7bfd69aaf --- /dev/null +++ b/tests/suite/virtual_branches/unapply.rs @@ -0,0 +1,177 @@ +use super::*; + +#[tokio::test] +async fn unapply_with_data() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + std::fs::write(repository.path().join("file.txt"), "content").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + controller + .unapply_virtual_branch(project_id, &branches[0].id) + .await + .unwrap(); + + assert!(!repository.path().join("file.txt").exists()); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(!branches[0].active); +} + +#[tokio::test] +async fn conflicting() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a conflicting branch, and stash it + + std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].base_current); + assert!(branches[0].active); + assert_eq!(branches[0].files[0].hunks[0].diff, "@@ -1 +1 @@\n-first\n\\ No newline at end of file\n+conflict\n\\ No newline at end of file\n"); + + controller + .unapply_virtual_branch(project_id, &branches[0].id) + .await + .unwrap(); + + branches[0].id + }; + + { + // update base branch, causing conflict + controller.update_base_branch(project_id).await.unwrap(); + + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|branch| branch.id == branch_id) + .unwrap(); + assert!(!branch.base_current); + assert!(!branch.active); + } + + { + // apply branch, it should conflict + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert!(branch.base_current); + assert!(branch.conflicted); + assert_eq!(branch.files[0].hunks[0].diff, "@@ -1 +1,5 @@\n-first\n\\ No newline at end of file\n+<<<<<<< ours\n+conflict\n+=======\n+second\n+>>>>>>> theirs\n"); + } + + { + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert!(!branch.active); + assert!(!branch.base_current); + assert!(!branch.conflicted); + assert_eq!(branch.files[0].hunks[0].diff, "@@ -1 +1 @@\n-first\n\\ No newline at end of file\n+conflict\n\\ No newline at end of file\n"); + } +} + +#[tokio::test] +async fn delete_if_empty() { + let Test { + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + + controller + .unapply_virtual_branch(project_id, &branches[0].id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 0); +} diff --git a/tests/suite/virtual_branches/unapply_ownership.rs b/tests/suite/virtual_branches/unapply_ownership.rs new file mode 100644 index 000000000..d283bcac2 --- /dev/null +++ b/tests/suite/virtual_branches/unapply_ownership.rs @@ -0,0 +1,61 @@ +use crate::suite::virtual_branches::Test; +use gitbutler::virtual_branches::branch; +use gitbutler::virtual_branches::branch::BranchOwnershipClaims; +use std::fs; + +#[tokio::test] +async fn should_unapply_with_commits() { + let Test { + project_id, + controller, + repository, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write( + repository.path().join("file.txt"), + "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n", + ) + .unwrap(); + controller + .create_commit(project_id, &branch_id, "test", None, false) + .await + .unwrap(); + + // change in the committed hunks leads to hunk locking + fs::write( + repository.path().join("file.txt"), + "_\n2\n3\n4\n5\n6\n7\n8\n9\n_\n", + ) + .unwrap(); + + controller + .unapply_ownership( + project_id, + &"file.txt:1-5,7-11" + .parse::() + .unwrap(), + ) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + assert!(branch.files.is_empty()); +} diff --git a/tests/suite/virtual_branches/update_base_branch.rs b/tests/suite/virtual_branches/update_base_branch.rs new file mode 100644 index 000000000..30735255d --- /dev/null +++ b/tests/suite/virtual_branches/update_base_branch.rs @@ -0,0 +1,1929 @@ +use super::*; + +mod unapplied_branch { + + use super::*; + + #[tokio::test] + async fn conflicts_with_uncommitted_work() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that is unapplied and contains not commited conflict + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(project_id).await.unwrap(); + + // branch should not be changed. + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(!controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_not_pushed() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(project_id).await.unwrap(); + + // should not change the branch. + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_pushed() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(project_id).await.unwrap(); + + // should not change the branch. + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_not_pushed_fixed_with_more_work() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); + + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(project_id).await.unwrap(); + + // should rebase upstream, and leave uncommited file as is + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); // TODO: should be true + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); // TODO: should be true + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_pushed_fixed_with_more_work() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); + + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(project_id).await.unwrap(); + + // should not touch the branch + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!(branches[0].files.len(), 1); + assert!(!controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn no_conflicts() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); + + controller + .create_commit( + project_id, + &branch_id, + "non conflicting commit", + None, + false, + ) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "still no conflicts").unwrap(); + + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + branch_id + }; + + { + // fetching remote + controller.update_base_branch(project_id).await.unwrap(); + + // should update branch base + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert!(branches[0].upstream.is_none()); + assert!(controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + } + } + + #[tokio::test] + async fn integrated_commit_plus_work() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + repository.commit_all("first"); + repository.push(); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "second").unwrap(); + controller + .create_commit(project_id, &branch_id, "second", None, false) + .await + .unwrap(); + + // more local work in the same branch + fs::write(repository.path().join("file2.txt"), "other").unwrap(); + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + { + // merge branch upstream + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + repository.merge(&branch.upstream.as_ref().unwrap().name); + repository.fetch(); + } + + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + branch_id + }; + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // should remove integrated commit, but leave work + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(branches[0].upstream.is_none()); + assert!(controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + assert_eq!( + std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), + "other" + ); + } + } + + #[tokio::test] + async fn all_integrated() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "second").unwrap(); + + controller + .create_commit(project_id, &branch_id, "second", None, false) + .await + .unwrap(); + + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + }; + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // should remove identical branch + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + } + } + + #[tokio::test] + async fn integrate_work_while_being_behind() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // open pr + fs::write(repository.path().join("file2.txt"), "new file").unwrap(); + controller + .create_commit(project_id, &branch_id, "second", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + } + + controller + .unapply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + { + // merge pr + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0[0] + .clone(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + repository.fetch(); + } + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // just removes integrated branch + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + } + } +} + +mod applied_branch { + + use super::*; + + #[tokio::test] + async fn conflicts_with_uncommitted_work() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + + branch_id + }; + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // should stash conflicing branch + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(!controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_not_pushed() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(project_id).await.unwrap(); + + // should stash the branch. + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_pushed() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(project_id).await.unwrap(); + + // should stash the branch. + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_not_pushed_fixed_with_more_work() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(project_id).await.unwrap(); + + // should rebase upstream, and leave uncommited file as is + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); // TODO: should be true + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert!(!controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); // TODO: should be true + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + #[tokio::test] + async fn commited_conflict_pushed_fixed_with_more_work() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch with a commit that conflicts with upstream, and work that fixes + // that conflict + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "conflict").unwrap(); + controller + .create_commit(project_id, &branch_id, "conflicting commit", None, false) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); + + branch_id + }; + + { + // when fetching remote + controller.update_base_branch(project_id).await.unwrap(); + + // should merge upstream, and leave uncommited file as is. + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(!branches[0].base_current); // TODO: should be true + assert_eq!(branches[0].commits.len(), 1); // TODO: should be 2 + assert_eq!(branches[0].files.len(), 1); + assert!(!controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); // TODO: should be true + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" + ); + } + } + + mod no_conflicts_pushed { + use super::*; + + #[tokio::test] + async fn force_push_ok() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + projects + .update(&projects::UpdateRequest { + id: *project_id, + ok_with_force_push: Some(true), + ..Default::default() + }) + .await + .unwrap(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); + + controller + .create_commit(project_id, &branch_id, "no conflicts", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); + + branch_id + }; + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // rebases branch, since the branch is pushed and force pushing is + // allowed + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].requires_force); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert!(!branches[0].commits[0].is_remote); + assert!(!branches[0].commits[0].is_integrated); + assert!(controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + } + + #[tokio::test] + async fn force_push_not_ok() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); + + controller + .create_commit(project_id, &branch_id, "no conflicts", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); + + branch_id + }; + + projects + .update(&projects::UpdateRequest { + id: *project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // creates a merge commit, since the branch is pushed + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(!branches[0].requires_force); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 2); + assert!(!branches[0].commits[0].is_remote); + assert!(!branches[0].commits[0].is_integrated); + assert!(branches[0].commits[1].is_remote); + assert!(!branches[0].commits[1].is_integrated); + assert!(controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + } + } + + #[tokio::test] + async fn no_conflicts() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); + + controller + .create_commit(project_id, &branch_id, "no conflicts", None, false) + .await + .unwrap(); + + fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); + + branch_id + }; + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // just rebases branch + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert!(controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + + { + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 1); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + assert_eq!( + std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), + "still no conflict" + ); + } + } + + #[tokio::test] + async fn integrated_commit_plus_work() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + repository.commit_all("first"); + repository.push(); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "second").unwrap(); + + controller + .create_commit(project_id, &branch_id, "second", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + { + // merge branch upstream + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + repository.fetch(); + } + + // more local work in the same branch + fs::write(repository.path().join("file2.txt"), "other").unwrap(); + + branch_id + }; + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // should remove integrated commit, but leave non integrated work as is + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert!(controller + .can_apply_virtual_branch(project_id, &branch_id) + .await + .unwrap()); + } + + { + // applying the branch should produce conflict markers + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + assert_eq!( + std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), + "second" + ); + assert_eq!( + std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), + "other" + ); + } + } + + #[tokio::test] + async fn integrated_with_locked_conflicting_hunks() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write( + repository.path().join("file.txt"), + "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n", + ) + .unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write( + repository.path().join("file.txt"), + "1\n2\n3\n4\n5\n6\n17\n8\n9\n10\n11\n12\n", + ) + .unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + // branch has no conflict + let branch_id = { + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write( + repository.path().join("file.txt"), + "1\n2\n3\n4\n5\n6\n7\n8\n19\n10\n11\n12\n", + ) + .unwrap(); + + controller + .create_commit(project_id, &branch_id, "first", None, false) + .await + .unwrap(); + + branch_id + }; + + // push the branch + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + // another locked conflicing hunk + fs::write( + repository.path().join("file.txt"), + "1\n2\n3\n4\n5\n6\n77\n8\n19\n10\n11\n12\n", + ) + .unwrap(); + + { + // merge branch remotely + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0[0] + .clone(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + } + + repository.fetch(); + + { + controller.update_base_branch(project_id).await.unwrap(); + + // removes integrated commit, leaves non commited work as is + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(!branches[0].active); + assert!(branches[0].commits.is_empty()); + assert!(!branches[0].files.is_empty()); + } + + { + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].active); + assert!(branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].files[0].hunks.len(), 1); + assert_eq!(branches[0].files[0].hunks[0].diff, "@@ -4,7 +4,11 @@\n 4\n 5\n 6\n-7\n+<<<<<<< ours\n+77\n+=======\n+17\n+>>>>>>> theirs\n 8\n 19\n 10\n"); + assert_eq!(branches[0].commits.len(), 0); + } + } + + #[tokio::test] + async fn integrated_with_locked_hunks() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = &Test::default(); + + projects + .update(&projects::UpdateRequest { + id: *project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "first").unwrap(); + + controller + .create_commit(project_id, &branch_id, "first", None, false) + .await + .unwrap(); + + branch_id + }; + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + // another non-locked hunk + fs::write(repository.path().join("file.txt"), "first\nsecond").unwrap(); + + { + // push and merge branch remotely + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0[0] + .clone(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + } + + repository.fetch(); + + { + controller.update_base_branch(project_id).await.unwrap(); + + // removes integrated commit, leaves non commited work as is + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].commits.is_empty()); + assert!(branches[0].upstream.is_none()); + assert_eq!(branches[0].files.len(), 1); + } + + { + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); // no merge commit + } + } + + #[tokio::test] + async fn integrated_with_non_locked_hunks() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "first").unwrap(); + + controller + .create_commit(project_id, &branch_id, "first", None, false) + .await + .unwrap(); + + branch_id + }; + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + // another non-locked hunk + fs::write(repository.path().join("another_file.txt"), "first").unwrap(); + + { + // push and merge branch remotely + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0[0] + .clone(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + } + + repository.fetch(); + + { + controller.update_base_branch(project_id).await.unwrap(); + + // removes integrated commit, leaves non commited work as is + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch_id); + assert!(branches[0].active); + assert!(branches[0].commits.is_empty()); + assert!(branches[0].upstream.is_none()); + assert!(!branches[0].files.is_empty()); + } + + { + controller + .apply_virtual_branch(project_id, &branch_id) + .await + .unwrap(); + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert!(branches[0].active); + assert!(!branches[0].conflicted); + assert!(branches[0].base_current); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].commits.len(), 0); + } + } + + #[tokio::test] + async fn all_integrated() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + { + // make a branch that conflicts with the remote branch, but doesn't know about it yet + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + fs::write(repository.path().join("file.txt"), "second").unwrap(); + + controller + .create_commit(project_id, &branch_id, "second", None, false) + .await + .unwrap(); + }; + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // just removes integrated branch + + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + } + } + + #[tokio::test] + async fn integrate_work_while_being_behind() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + // make sure we have an undiscovered commit in the remote branch + { + fs::write(repository.path().join("file.txt"), "first").unwrap(); + let first_commit_oid = repository.commit_all("first"); + fs::write(repository.path().join("file.txt"), "second").unwrap(); + repository.commit_all("second"); + repository.push(); + repository.reset_hard(Some(first_commit_oid)); + } + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + // open pr + fs::write(repository.path().join("file2.txt"), "new file").unwrap(); + controller + .create_commit(project_id, &branch_id, "second", None, false) + .await + .unwrap(); + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + } + + { + // merge pr + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0[0] + .clone(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + repository.fetch(); + } + + { + // fetch remote + controller.update_base_branch(project_id).await.unwrap(); + + // just removes integrated branch + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 0); + } + } +} diff --git a/tests/suite/virtual_branches/update_commit_message.rs b/tests/suite/virtual_branches/update_commit_message.rs new file mode 100644 index 000000000..a5ca0f5d6 --- /dev/null +++ b/tests/suite/virtual_branches/update_commit_message.rs @@ -0,0 +1,364 @@ +use super::*; + +#[tokio::test] +async fn head() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + let commit_three_oid = { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + controller + .update_commit_message( + project_id, + &branch_id, + commit_three_oid, + "commit three updated", + ) + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + + assert_eq!( + descriptions, + vec!["commit three updated", "commit two", "commit one"] + ); +} + +#[tokio::test] +async fn middle() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + let commit_two_oid = { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + controller + .update_commit_message(project_id, &branch_id, commit_two_oid, "commit two updated") + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!( + descriptions, + vec!["commit three", "commit two updated", "commit one"] + ); +} + +#[tokio::test] +async fn forcepush_allowed() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + projects + .update(&projects::UpdateRequest { + id: *project_id, + ok_with_force_push: Some(true), + ..Default::default() + }) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + controller + .update_commit_message(project_id, &branch_id, commit_one_oid, "commit one updated") + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!(descriptions, vec!["commit one updated"]); + assert!(branch.requires_force); +} + +#[tokio::test] +async fn forcepush_forbidden() { + let Test { + repository, + project_id, + controller, + projects, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + projects + .update(&projects::UpdateRequest { + id: *project_id, + ok_with_force_push: Some(false), + ..Default::default() + }) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + controller + .push_virtual_branch(project_id, &branch_id, false, None) + .await + .unwrap(); + + assert!(matches!( + controller + .update_commit_message(project_id, &branch_id, commit_one_oid, "commit one updated",) + .await + .unwrap_err(), + ControllerError::Action(errors::UpdateCommitMessageError::ForcePushNotAllowed(_)) + )); +} + +#[tokio::test] +async fn root() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file two.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit two", None, false) + .await + .unwrap() + }; + + { + fs::write(repository.path().join("file three.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit three", None, false) + .await + .unwrap() + }; + + controller + .update_commit_message(project_id, &branch_id, commit_one_oid, "commit one updated") + .await + .unwrap(); + + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch_id) + .unwrap(); + + let descriptions = branch + .commits + .iter() + .map(|c| c.description.clone()) + .collect::>(); + assert_eq!( + descriptions, + vec!["commit three", "commit two", "commit one updated"] + ); +} + +#[tokio::test] +async fn empty() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let commit_one_oid = { + fs::write(repository.path().join("file one.txt"), "").unwrap(); + controller + .create_commit(project_id, &branch_id, "commit one", None, false) + .await + .unwrap() + }; + + assert!(matches!( + controller + .update_commit_message(project_id, &branch_id, commit_one_oid, "",) + .await, + Err(ControllerError::Action( + errors::UpdateCommitMessageError::EmptyMessage + )) + )); +} diff --git a/tests/suite/virtual_branches/upstream.rs b/tests/suite/virtual_branches/upstream.rs new file mode 100644 index 000000000..aca22ac38 --- /dev/null +++ b/tests/suite/virtual_branches/upstream.rs @@ -0,0 +1,149 @@ +use super::*; + +#[tokio::test] +async fn detect_upstream_commits() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let oid1 = { + // create first commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + let oid2 = { + // create second commit + fs::write(repository.path().join("file.txt"), "content2").unwrap(); + controller + .create_commit(project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + // push + controller + .push_virtual_branch(project_id, &branch1_id, false, None) + .await + .unwrap(); + + let oid3 = { + // create third commit + fs::write(repository.path().join("file.txt"), "content3").unwrap(); + controller + .create_commit(project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + { + // should correctly detect pushed commits + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 3); + assert_eq!(branches[0].commits[0].id, oid3); + assert!(!branches[0].commits[0].is_remote); + assert_eq!(branches[0].commits[1].id, oid2); + assert!(branches[0].commits[1].is_remote); + assert_eq!(branches[0].commits[2].id, oid1); + assert!(branches[0].commits[2].is_remote); + } +} + +#[tokio::test] +async fn detect_integrated_commits() { + let Test { + repository, + project_id, + controller, + .. + } = &Test::default(); + + controller + .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) + .await + .unwrap(); + + let branch1_id = controller + .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) + .await + .unwrap(); + + let oid1 = { + // create first commit + fs::write(repository.path().join("file.txt"), "content").unwrap(); + controller + .create_commit(project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + let oid2 = { + // create second commit + fs::write(repository.path().join("file.txt"), "content2").unwrap(); + controller + .create_commit(project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + // push + controller + .push_virtual_branch(project_id, &branch1_id, false, None) + .await + .unwrap(); + + { + // merge branch upstream + let branch = controller + .list_virtual_branches(project_id) + .await + .unwrap() + .0 + .into_iter() + .find(|b| b.id == branch1_id) + .unwrap(); + repository.merge(&branch.upstream.as_ref().unwrap().name); + repository.fetch(); + } + + let oid3 = { + // create third commit + fs::write(repository.path().join("file.txt"), "content3").unwrap(); + controller + .create_commit(project_id, &branch1_id, "commit", None, false) + .await + .unwrap() + }; + + { + // should correctly detect pushed commits + let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].id, branch1_id); + assert_eq!(branches[0].commits.len(), 3); + assert_eq!(branches[0].commits[0].id, oid3); + assert!(!branches[0].commits[0].is_integrated); + assert_eq!(branches[0].commits[1].id, oid2); + assert!(branches[0].commits[1].is_integrated); + assert_eq!(branches[0].commits[2].id, oid1); + assert!(branches[0].commits[2].is_integrated); + } +} diff --git a/tests/types/mod.rs b/tests/types/mod.rs new file mode 100644 index 000000000..2eb45eede --- /dev/null +++ b/tests/types/mod.rs @@ -0,0 +1,19 @@ +use gitbutler::types::default_true::DefaultTrue; + +#[test] +#[allow(clippy::bool_assert_comparison)] +fn default_true() { + let default_true = DefaultTrue::default(); + assert!(default_true); + assert_eq!(default_true, true); + assert_eq!(!default_true, false); + assert!(!!default_true); + + if !(*default_true) { + unreachable!("default_true is false") + } + + let mut default_true = DefaultTrue::default(); + *default_true = false; + assert!(!default_true); +} diff --git a/tests/virtual_branches/branch/context.rs b/tests/virtual_branches/branch/context.rs new file mode 100644 index 000000000..05601f81e --- /dev/null +++ b/tests/virtual_branches/branch/context.rs @@ -0,0 +1,522 @@ +use gitbutler::git::diff; +use gitbutler::virtual_branches::context::hunk_with_context; + +#[test] +fn replace_line_mid_file() { + let hunk_diff = r#"@@ -8 +8 @@ default = ["serde", "rusqlite"] +-serde = ["dep:serde", "uuid/serde"] ++SERDE = ["dep:serde", "uuid/serde"] +"#; + let with_ctx = hunk_with_context( + hunk_diff, + 8, + 8, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + let expected = r#"@@ -5,7 +5,7 @@ + + [features] + default = ["serde", "rusqlite"] +-serde = ["dep:serde", "uuid/serde"] ++SERDE = ["dep:serde", "uuid/serde"] + rusqlite = ["dep:rusqlite"] + + [dependencies] +"#; + assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); + assert_eq!(with_ctx.old_start, 5); + assert_eq!(with_ctx.old_lines, 7); + assert_eq!(with_ctx.new_start, 5); + assert_eq!(with_ctx.new_lines, 7); +} + +#[test] +fn replace_line_top_file() { + let hunk_diff = r#"@@ -2 +2 @@ +-name = "gitbutler-core" ++NAME = "gitbutler-core" +"#; + let with_ctx = hunk_with_context( + hunk_diff, + 2, + 2, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + assert_eq!( + with_ctx.diff.replace("\n \n", "\n\n"), + r#"@@ -1,5 +1,5 @@ + [package] +-name = "gitbutler-core" ++NAME = "gitbutler-core" + version = "0.0.0" + edition = "2021" + +"# + ); + assert_eq!(with_ctx.old_start, 1); + assert_eq!(with_ctx.old_lines, 5); + assert_eq!(with_ctx.new_start, 1); + assert_eq!(with_ctx.new_lines, 5); +} + +#[test] +fn replace_line_start_file() { + let hunk_diff = "@@ -1 +1 @@ +-[package] ++[PACKAGE] +"; + let with_ctx = hunk_with_context( + hunk_diff, + 1, + 1, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + assert_eq!( + with_ctx.diff.replace("\n \n", "\n\n"), + r#"@@ -1,4 +1,4 @@ +-[package] ++[PACKAGE] + name = "gitbutler-core" + version = "0.0.0" + edition = "2021" +"# + ); + assert_eq!(with_ctx.old_start, 1); + assert_eq!(with_ctx.old_lines, 4); + assert_eq!(with_ctx.new_start, 1); + assert_eq!(with_ctx.new_lines, 4); +} + +#[test] +fn replace_line_bottom_file() { + let hunk_diff = "@@ -13 +13 @@ +-serde = { workspace = true, optional = true } ++SERDE = { workspace = true, optional = true } +"; + let with_ctx = hunk_with_context( + hunk_diff, + 13, + 13, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + assert_eq!( + with_ctx.diff.replace("\n \n", "\n\n"), + r#"@@ -10,5 +10,5 @@ + + [dependencies] + rusqlite = { workspace = true, optional = true } +-serde = { workspace = true, optional = true } ++SERDE = { workspace = true, optional = true } + uuid = { workspace = true, features = ["v4", "fast-rng"] } +"# + ); + assert_eq!(with_ctx.old_start, 10); + assert_eq!(with_ctx.old_lines, 5); + assert_eq!(with_ctx.new_start, 10); + assert_eq!(with_ctx.new_lines, 5); +} + +#[test] +fn replace_with_more_lines() { + let hunk_diff = r#"@@ -8 +8,4 @@ +-serde = ["dep:serde", "uuid/serde"] ++one ++two ++three ++four +"#; + let with_ctx = hunk_with_context( + hunk_diff, + 8, + 8, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + assert_eq!( + with_ctx.diff.replace("\n \n", "\n\n"), + r#"@@ -5,7 +5,10 @@ + + [features] + default = ["serde", "rusqlite"] +-serde = ["dep:serde", "uuid/serde"] ++one ++two ++three ++four + rusqlite = ["dep:rusqlite"] + + [dependencies] +"# + ); + assert_eq!(with_ctx.old_start, 5); + assert_eq!(with_ctx.old_lines, 7); + assert_eq!(with_ctx.new_start, 5); + assert_eq!(with_ctx.new_lines, 10); +} + +#[test] +fn replace_with_less_lines() { + let hunk_diff = r#"@@ -7,3 +7 @@ +-default = ["serde", "rusqlite"] +-serde = ["dep:serde", "uuid/serde"] +-rusqlite = ["dep:rusqlite"] ++foo = ["foo"] +"#; + let with_ctx = hunk_with_context( + hunk_diff, + 7, + 7, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + assert_eq!( + with_ctx.diff.replace("\n \n", "\n\n"), + r#"@@ -4,9 +4,7 @@ + edition = "2021" + + [features] +-default = ["serde", "rusqlite"] +-serde = ["dep:serde", "uuid/serde"] +-rusqlite = ["dep:rusqlite"] ++foo = ["foo"] + + [dependencies] + rusqlite = { workspace = true, optional = true } +"# + ); + assert_eq!(with_ctx.old_start, 4); + assert_eq!(with_ctx.old_lines, 9); + assert_eq!(with_ctx.new_start, 4); + assert_eq!(with_ctx.new_lines, 7); +} + +#[test] +fn empty_string_doesnt_panic() { + let hunk_diff = ""; + let with_ctx = hunk_with_context( + hunk_diff, + 1, + 1, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + assert_eq!(with_ctx.diff, ""); +} + +#[test] +fn removed_file() { + let hunk_diff = r#"@@ -1,14 +0,0 @@ +-[package] +-name = "gitbutler-core" +-version = "0.0.0" +-edition = "2021" +- +-[features] +-default = ["serde", "rusqlite"] +-serde = ["dep:serde", "uuid/serde"] +-rusqlite = ["dep:rusqlite"] +- +-[dependencies] +-rusqlite = { workspace = true, optional = true } +-serde = { workspace = true, optional = true } +-uuid = { workspace = true, features = ["v4", "fast-rng"] } +"#; + let with_ctx = hunk_with_context( + hunk_diff, + 1, + 0, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), hunk_diff); + assert_eq!(with_ctx.old_start, 1); + assert_eq!(with_ctx.old_lines, 14); + assert_eq!(with_ctx.new_start, 0); + assert_eq!(with_ctx.new_lines, 0); +} +#[test] +fn new_file() { + let hunk_diff = "@@ -0,0 +1,5 @@ ++line 1 ++line 2 ++line 3 ++line 4 ++line 5 +"; + let with_ctx = hunk_with_context( + hunk_diff, + 0, + 1, + false, + 3, + &Vec::new(), + diff::ChangeType::Added, + ); + assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), hunk_diff); + assert_eq!(with_ctx.old_start, 0); + assert_eq!(with_ctx.old_lines, 0); + assert_eq!(with_ctx.new_start, 1); + assert_eq!(with_ctx.new_lines, 5); +} + +#[test] +fn only_add_lines() { + let hunk_diff = "@@ -8,0 +9,3 @@ ++one ++two ++three +"; + let with_ctx = hunk_with_context( + hunk_diff, + 8, + 9, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + let expected = r#"@@ -6,6 +6,9 @@ + [features] + default = ["serde", "rusqlite"] + serde = ["dep:serde", "uuid/serde"] ++one ++two ++three + rusqlite = ["dep:rusqlite"] + + [dependencies] +"#; + assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); + assert_eq!(with_ctx.old_start, 6); + assert_eq!(with_ctx.old_lines, 6); + assert_eq!(with_ctx.new_start, 6); + assert_eq!(with_ctx.new_lines, 9); +} + +#[test] +fn only_add_lines_with_additions_below() { + let hunk_diff = "@@ -8,0 +13,3 @@ ++one ++two ++three +"; + let with_ctx = hunk_with_context( + hunk_diff, + 8, + 13, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + let expected = r#"@@ -6,6 +10,9 @@ + [features] + default = ["serde", "rusqlite"] + serde = ["dep:serde", "uuid/serde"] ++one ++two ++three + rusqlite = ["dep:rusqlite"] + + [dependencies] +"#; + assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); + assert_eq!(with_ctx.old_start, 6); + assert_eq!(with_ctx.old_lines, 6); + assert_eq!(with_ctx.new_start, 10); + assert_eq!(with_ctx.new_lines, 9); +} + +#[test] +fn only_remove_lines() { + let hunk_diff = r#"@@ -7,3 +6,0 @@ +-default = ["serde", "rusqlite"] +-serde = ["dep:serde", "uuid/serde"] +-rusqlite = ["dep:rusqlite"] +"#; + let expected = r#"@@ -4,9 +4,6 @@ + edition = "2021" + + [features] +-default = ["serde", "rusqlite"] +-serde = ["dep:serde", "uuid/serde"] +-rusqlite = ["dep:rusqlite"] + + [dependencies] + rusqlite = { workspace = true, optional = true } +"#; + let with_ctx = hunk_with_context( + hunk_diff, + 7, + 6, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); + assert_eq!(with_ctx.old_start, 4); + assert_eq!(with_ctx.old_lines, 9); + assert_eq!(with_ctx.new_start, 4); + assert_eq!(with_ctx.new_lines, 6); +} + +#[test] +fn only_remove_lines_with_additions_below() { + let hunk_diff = r#"@@ -7,3 +10,0 @@ +-default = ["serde", "rusqlite"] +-serde = ["dep:serde", "uuid/serde"] +-rusqlite = ["dep:rusqlite"] +"#; + let expected = r#"@@ -4,9 +8,6 @@ + edition = "2021" + + [features] +-default = ["serde", "rusqlite"] +-serde = ["dep:serde", "uuid/serde"] +-rusqlite = ["dep:rusqlite"] + + [dependencies] + rusqlite = { workspace = true, optional = true } +"#; + let with_ctx = hunk_with_context( + hunk_diff, + 7, + 10, + false, + 3, + &file_lines(), + diff::ChangeType::Added, + ); + assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); + assert_eq!(with_ctx.old_start, 4); + assert_eq!(with_ctx.old_lines, 9); + assert_eq!(with_ctx.new_start, 8); + assert_eq!(with_ctx.new_lines, 6); +} + +#[test] +fn weird_testcase() { + let hunk_diff = "@@ -11,2 +10,0 @@ +- +- @waiting_users = User.where(approved: false).count +"; + let with_ctx = hunk_with_context( + hunk_diff, + 11, + 10, + false, + 3, + &file_lines_2(), + diff::ChangeType::Added, + ); + let expected = "@@ -8,8 +8,6 @@ + .order(:created_at) + .page params[:page] + @total = @registrations.total_count +- +- @waiting_users = User.where(approved: false).count + end + + def invite +"; + assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); + assert_eq!(with_ctx.old_start, 8); + assert_eq!(with_ctx.old_lines, 8); + assert_eq!(with_ctx.new_start, 8); + assert_eq!(with_ctx.new_lines, 6); +} + +#[test] +fn new_line_added() { + let hunk_diff = "@@ -2,0 +3 @@ alias( ++ newstuff +"; + let with_ctx = hunk_with_context( + hunk_diff, + 2, + 3, + false, + 3, + &file_lines_3(), + diff::ChangeType::Added, + ); + let expected = r#"@@ -1,4 +1,5 @@ + alias( + name = "rdeps", ++ newstuff + actual = "//java/com/videlov/rdeps:rdeps", + ) +"#; + assert_eq!(with_ctx.diff, expected); +} + +fn file_lines() -> Vec<&'static str> { + let file_lines_before = r#"[package] +name = "gitbutler-core" +version = "0.0.0" +edition = "2021" + +[features] +default = ["serde", "rusqlite"] +serde = ["dep:serde", "uuid/serde"] +rusqlite = ["dep:rusqlite"] + +[dependencies] +rusqlite = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +uuid = { workspace = true, features = ["v4", "fast-rng"] } +"#; + file_lines_before.lines().collect::>() +} + +fn file_lines_2() -> Vec<&'static str> { + let file_lines_before = r#"class Admin::WaitingController < Admin::AdminController + def index + @registrations = Registration.where(invited_at: nil) + if params[:q] + @registrations = @registrations.where("email LIKE ?", "%#{params[:q]}%") + end + @registrations = @registrations.includes(:invite_code) + .order(:created_at) + .page params[:page] + @total = @registrations.total_count + + @waiting_users = User.where(approved: false).count + end + + def invite + if params[:id] + @registrations = Registration.where(id: params[:id]) +"#; + file_lines_before.lines().collect::>() +} + +fn file_lines_3() -> Vec<&'static str> { + let file_lines_before = r#"alias( + name = "rdeps", + actual = "//java/com/videlov/rdeps:rdeps", +) +"#; + file_lines_before.lines().collect::>() +} diff --git a/tests/virtual_branches/branch/file_ownership.rs b/tests/virtual_branches/branch/file_ownership.rs new file mode 100644 index 000000000..ab30481be --- /dev/null +++ b/tests/virtual_branches/branch/file_ownership.rs @@ -0,0 +1,151 @@ +use gitbutler::virtual_branches::branch::OwnershipClaim; + +#[test] +fn parse_ownership() { + let ownership: OwnershipClaim = "foo/bar.rs:1-2,4-5".parse().unwrap(); + assert_eq!( + ownership, + OwnershipClaim { + file_path: "foo/bar.rs".into(), + hunks: vec![(1..=2).into(), (4..=5).into()] + } + ); +} + +#[test] +fn parse_ownership_tricky_file_name() { + assert_eq!("file:name:1-2,4-5".parse::().unwrap(), { + OwnershipClaim { + file_path: "file:name".into(), + hunks: vec![(1..=2).into(), (4..=5).into()], + } + }); +} + +#[test] +fn parse_ownership_no_ranges() { + "foo/bar.rs".parse::().unwrap_err(); +} + +#[test] +fn ownership_to_from_string() { + let ownership = OwnershipClaim { + file_path: "foo/bar.rs".into(), + hunks: vec![(1..=2).into(), (4..=5).into()], + }; + assert_eq!(ownership.to_string(), "foo/bar.rs:1-2,4-5".to_string()); + assert_eq!( + ownership.to_string().parse::().unwrap(), + ownership + ); +} + +#[test] +fn plus() { + vec![ + ("file.txt:1-10", "another.txt:1-5", "file.txt:1-10"), + ("file.txt:1-10,3-14", "file.txt:3-14", "file.txt:3-14,1-10"), + ("file.txt:5-10", "file.txt:1-5", "file.txt:1-5,5-10"), + ("file.txt:1-10", "file.txt:1-5", "file.txt:1-5,1-10"), + ("file.txt:1-5,2-2", "file.txt:1-10", "file.txt:1-10,1-5,2-2"), + ( + "file.txt:1-10", + "file.txt:8-15,20-25", + "file.txt:20-25,8-15,1-10", + ), + ("file.txt:1-10", "file.txt:1-10", "file.txt:1-10"), + ("file.txt:1-10,3-15", "file.txt:1-10", "file.txt:1-10,3-15"), + ] + .into_iter() + .map(|(a, b, expected)| { + ( + a.parse::().unwrap(), + b.parse::().unwrap(), + expected.parse::().unwrap(), + ) + }) + .for_each(|(a, b, expected)| { + let got = a.plus(&b); + assert_eq!( + got, expected, + "{} plus {}, expected {}, got {}", + a, b, expected, got + ); + }); +} + +#[test] +fn minus() { + vec![ + ( + "file.txt:1-10", + "another.txt:1-5", + (None, Some("file.txt:1-10")), + ), + ( + "file.txt:1-10", + "file.txt:1-5", + (None, Some("file.txt:1-10")), + ), + ( + "file.txt:1-10", + "file.txt:11-15", + (None, Some("file.txt:1-10")), + ), + ( + "file.txt:1-10", + "file.txt:1-10", + (Some("file.txt:1-10"), None), + ), + ( + "file.txt:1-10,11-15", + "file.txt:11-15", + (Some("file.txt:11-15"), Some("file.txt:1-10")), + ), + ( + "file.txt:1-10,11-15,15-17", + "file.txt:1-10,15-17", + (Some("file.txt:1-10,15-17"), Some("file.txt:11-15")), + ), + ] + .into_iter() + .map(|(a, b, expected)| { + ( + a.parse::().unwrap(), + b.parse::().unwrap(), + ( + expected.0.map(|s| s.parse::().unwrap()), + expected.1.map(|s| s.parse::().unwrap()), + ), + ) + }) + .for_each(|(a, b, expected)| { + let got = a.minus(&b); + assert_eq!( + got, expected, + "{} minus {}, expected {:?}, got {:?}", + a, b, expected, got + ); + }); +} + +#[test] +fn equal() { + vec![ + ("file.txt:1-10", "file.txt:1-10", true), + ("file.txt:1-10", "file.txt:1-11", false), + ("file.txt:1-10,11-15", "file.txt:11-15,1-10", false), + ("file.txt:1-10,11-15", "file.txt:1-10,11-15", true), + ] + .into_iter() + .map(|(a, b, expected)| { + ( + a.parse::().unwrap(), + b.parse::().unwrap(), + expected, + ) + }) + .for_each(|(a, b, expected)| { + assert_eq!(a == b, expected, "{} == {}, expected {}", a, b, expected); + }); +} diff --git a/tests/virtual_branches/branch/hunk.rs b/tests/virtual_branches/branch/hunk.rs new file mode 100644 index 000000000..5703a9407 --- /dev/null +++ b/tests/virtual_branches/branch/hunk.rs @@ -0,0 +1,89 @@ +use gitbutler::virtual_branches::branch::Hunk; + +#[test] +fn to_from_string() { + let hunk = "1-2".parse::().unwrap(); + assert_eq!("1-2", hunk.to_string()); +} + +#[test] +fn parse_invalid() { + "3-2".parse::().unwrap_err(); +} + +#[test] +fn parse_with_hash() { + assert_eq!( + "2-3-hash".parse::().unwrap(), + Hunk::new(2, 3, Some("hash".to_string()), None).unwrap() + ); +} + +#[test] +fn parse_with_timestamp() { + assert_eq!( + "2-3--123".parse::().unwrap(), + Hunk::new(2, 3, None, Some(123)).unwrap() + ); +} + +#[test] +fn parse_invalid_2() { + "3-2".parse::().unwrap_err(); +} + +#[test] +fn to_string_no_hash() { + assert_eq!( + "1-2--123", + Hunk::new(1, 2, None, Some(123)).unwrap().to_string() + ); +} + +#[test] +fn eq() { + for (a, b, expected) in vec![ + ( + "1-2".parse::().unwrap(), + "1-2".parse::().unwrap(), + true, + ), + ( + "1-2".parse::().unwrap(), + "2-3".parse::().unwrap(), + false, + ), + ( + "1-2-abc".parse::().unwrap(), + "1-2-abc".parse::().unwrap(), + true, + ), + ( + "1-2-abc".parse::().unwrap(), + "2-3-abc".parse::().unwrap(), + false, + ), + ( + "1-2".parse::().unwrap(), + "1-2-abc".parse::().unwrap(), + true, + ), + ( + "1-2-abc".parse::().unwrap(), + "1-2".parse::().unwrap(), + true, + ), + ( + "1-2-abc".parse::().unwrap(), + "1-2-bcd".parse::().unwrap(), + false, + ), + ( + "1-2-abc".parse::().unwrap(), + "2-3-bcd".parse::().unwrap(), + false, + ), + ] { + assert_eq!(a == b, expected, "comapring {} and {}", a, b); + } +} diff --git a/tests/virtual_branches/branch/mod.rs b/tests/virtual_branches/branch/mod.rs new file mode 100644 index 000000000..5264e4f0e --- /dev/null +++ b/tests/virtual_branches/branch/mod.rs @@ -0,0 +1,8 @@ +use gitbutler::virtual_branches::Branch; + +mod context; +mod file_ownership; +mod hunk; +mod ownership; +mod reader; +mod writer; diff --git a/tests/virtual_branches/branch/ownership.rs b/tests/virtual_branches/branch/ownership.rs new file mode 100644 index 000000000..54d68efd3 --- /dev/null +++ b/tests/virtual_branches/branch/ownership.rs @@ -0,0 +1,284 @@ +use gitbutler::virtual_branches::branch::{ + reconcile_claims, BranchOwnershipClaims, Hunk, OwnershipClaim, +}; +use gitbutler::virtual_branches::Branch; + +use std::{path::PathBuf, vec}; + +#[test] +fn reconcile_ownership_simple() { + let branch_a = Branch { + name: "a".to_string(), + ownership: BranchOwnershipClaims { + claims: vec![OwnershipClaim { + file_path: PathBuf::from("foo"), + hunks: vec![ + Hunk { + start: 1, + end: 3, + hash: Some("1,3".to_string()), + timestamp_ms: None, + }, + Hunk { + start: 4, + end: 6, + hash: Some("4,6".to_string()), + timestamp_ms: None, + }, + ], + }], + }, + applied: true, + ..Default::default() + }; + let branch_b = Branch { + name: "b".to_string(), + ownership: BranchOwnershipClaims { + claims: vec![OwnershipClaim { + file_path: PathBuf::from("foo"), + hunks: vec![Hunk { + start: 7, + end: 9, + hash: Some("7,9".to_string()), + timestamp_ms: None, + }], + }], + }, + applied: true, + ..Default::default() + }; + let all_branches: Vec = vec![branch_a.clone(), branch_b.clone()]; + let claim: Vec = vec![OwnershipClaim { + file_path: PathBuf::from("foo"), + hunks: vec![ + Hunk { + start: 4, + end: 6, + hash: Some("4,6".to_string()), + timestamp_ms: None, + }, + Hunk { + start: 7, + end: 9, + hash: Some("9,7".to_string()), + timestamp_ms: None, + }, + ], + }]; + let claim_outcomes = reconcile_claims(all_branches.clone(), &branch_b, &claim).unwrap(); + assert_eq!(claim_outcomes.len(), all_branches.len()); + assert_eq!(claim_outcomes[0].updated_branch.id, branch_a.id); + assert_eq!(claim_outcomes[1].updated_branch.id, branch_b.id); + + assert_eq!( + claim_outcomes[0].updated_branch.ownership, + BranchOwnershipClaims { + claims: vec![OwnershipClaim { + file_path: PathBuf::from("foo"), + hunks: vec![Hunk { + start: 1, + end: 3, + hash: Some("1,3".to_string()), + timestamp_ms: None, + },], + }], + } + ); + + assert_eq!( + claim_outcomes[1].updated_branch.ownership, + BranchOwnershipClaims { + claims: vec![OwnershipClaim { + file_path: PathBuf::from("foo"), + hunks: vec![ + Hunk { + start: 4, + end: 6, + hash: Some("4,6".to_string()), + timestamp_ms: None, + }, + Hunk { + start: 7, + end: 9, + hash: Some("9,7".to_string()), + timestamp_ms: None, + }, + ], + }], + } + ); +} + +#[test] +fn ownership() { + let ownership = "src/main.rs:0-100\nsrc/main2.rs:200-300".parse::(); + assert!(ownership.is_ok()); + let ownership = ownership.unwrap(); + assert_eq!(ownership.claims.len(), 2); + assert_eq!( + ownership.claims[0], + "src/main.rs:0-100".parse::().unwrap() + ); + assert_eq!( + ownership.claims[1], + "src/main2.rs:200-300".parse::().unwrap() + ); +} + +#[test] +fn ownership_2() { + let ownership = "src/main.rs:0-100\nsrc/main2.rs:200-300".parse::(); + assert!(ownership.is_ok()); + let ownership = ownership.unwrap(); + assert_eq!(ownership.claims.len(), 2); + assert_eq!( + ownership.claims[0], + "src/main.rs:0-100".parse::().unwrap() + ); + assert_eq!( + ownership.claims[1], + "src/main2.rs:200-300".parse::().unwrap() + ); +} + +#[test] +fn put() { + let mut ownership = "src/main.rs:0-100" + .parse::() + .unwrap(); + ownership.put(&"src/main.rs:200-300".parse::().unwrap()); + assert_eq!(ownership.claims.len(), 1); + assert_eq!( + ownership.claims[0], + "src/main.rs:200-300,0-100" + .parse::() + .unwrap() + ); +} + +#[test] +fn put_2() { + let mut ownership = "src/main.rs:0-100" + .parse::() + .unwrap(); + ownership.put(&"src/main.rs2:200-300".parse::().unwrap()); + assert_eq!(ownership.claims.len(), 2); + assert_eq!( + ownership.claims[0], + "src/main.rs2:200-300".parse::().unwrap() + ); + assert_eq!( + ownership.claims[1], + "src/main.rs:0-100".parse::().unwrap() + ); +} + +#[test] +fn put_3() { + let mut ownership = "src/main.rs:0-100\nsrc/main2.rs:100-200" + .parse::() + .unwrap(); + ownership.put(&"src/main2.rs:200-300".parse::().unwrap()); + assert_eq!(ownership.claims.len(), 2); + assert_eq!( + ownership.claims[0], + "src/main2.rs:200-300,100-200" + .parse::() + .unwrap() + ); + assert_eq!( + ownership.claims[1], + "src/main.rs:0-100".parse::().unwrap() + ); +} + +#[test] +fn put_4() { + let mut ownership = "src/main.rs:0-100\nsrc/main2.rs:100-200" + .parse::() + .unwrap(); + ownership.put(&"src/main2.rs:100-200".parse::().unwrap()); + assert_eq!(ownership.claims.len(), 2); + assert_eq!( + ownership.claims[0], + "src/main2.rs:100-200".parse::().unwrap() + ); + assert_eq!( + ownership.claims[1], + "src/main.rs:0-100".parse::().unwrap() + ); +} + +#[test] +fn put_7() { + let mut ownership = "src/main.rs:100-200" + .parse::() + .unwrap(); + ownership.put(&"src/main.rs:100-200".parse::().unwrap()); + assert_eq!(ownership.claims.len(), 1); + assert_eq!( + ownership.claims[0], + "src/main.rs:100-200".parse::().unwrap() + ); +} + +#[test] +fn take_1() { + let mut ownership = "src/main.rs:100-200,200-300" + .parse::() + .unwrap(); + let taken = ownership.take(&"src/main.rs:100-200".parse::().unwrap()); + assert_eq!(ownership.claims.len(), 1); + assert_eq!( + ownership.claims[0], + "src/main.rs:200-300".parse::().unwrap() + ); + assert_eq!( + taken, + vec!["src/main.rs:100-200".parse::().unwrap()] + ); +} + +#[test] +fn equal() { + for (a, b, expected) in vec![ + ( + "src/main.rs:100-200" + .parse::() + .unwrap(), + "src/main.rs:100-200" + .parse::() + .unwrap(), + true, + ), + ( + "src/main.rs:100-200\nsrc/main1.rs:300-400\n" + .parse::() + .unwrap(), + "src/main.rs:100-200" + .parse::() + .unwrap(), + false, + ), + ( + "src/main.rs:100-200\nsrc/main1.rs:300-400\n" + .parse::() + .unwrap(), + "src/main.rs:100-200\nsrc/main1.rs:300-400\n" + .parse::() + .unwrap(), + true, + ), + ( + "src/main.rs:300-400\nsrc/main1.rs:100-200\n" + .parse::() + .unwrap(), + "src/main1.rs:100-200\nsrc/main.rs:300-400\n" + .parse::() + .unwrap(), + false, + ), + ] { + assert_eq!(a == b, expected, "{:#?} == {:#?}", a, b); + } +} diff --git a/tests/virtual_branches/branch/reader.rs b/tests/virtual_branches/branch/reader.rs new file mode 100644 index 000000000..f99c5816d --- /dev/null +++ b/tests/virtual_branches/branch/reader.rs @@ -0,0 +1,98 @@ +use std::sync::atomic::{AtomicUsize, Ordering}; + +use anyhow::Result; +use once_cell::sync::Lazy; + +use crate::{Case, Suite}; +use gitbutler::virtual_branches::branch::BranchOwnershipClaims; +use gitbutler::virtual_branches::{branch, Branch, BranchId}; + +static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn test_branch() -> Branch { + TEST_INDEX.fetch_add(1, Ordering::Relaxed); + + Branch { + id: BranchId::generate(), + name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), + notes: String::new(), + applied: true, + order: TEST_INDEX.load(Ordering::Relaxed), + upstream: Some( + format!( + "refs/remotes/origin/upstream_{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + upstream_head: Some( + format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, + updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, + head: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + tree: format!( + "0123456789abcdef0123456789abcdef012345{}", + TEST_INDEX.load(Ordering::Relaxed) + 10 + ) + .parse() + .unwrap(), + ownership: BranchOwnershipClaims { + claims: vec![format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed)) + .parse() + .unwrap()], + }, + selected_for_changes: Some(1), + } +} + +#[test] +fn read_not_found() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let session = gb_repository.get_or_create_current_session()?; + let session_reader = gitbutler::sessions::Reader::open(gb_repository, &session)?; + + let reader = branch::Reader::new(&session_reader); + let result = reader.read(&BranchId::generate()); + assert!(result.is_err()); + assert_eq!(result.unwrap_err().to_string(), "file not found"); + + Ok(()) +} + +#[test] +fn read_override() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = &suite.new_case(); + + let mut branch = test_branch(); + + let writer = branch::Writer::new(gb_repository, project.gb_dir())?; + writer.write(&mut branch)?; + + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = gitbutler::sessions::Reader::open(gb_repository, &session)?; + + let reader = branch::Reader::new(&session_reader); + + assert_eq!(branch, reader.read(&branch.id).unwrap()); + + Ok(()) +} diff --git a/tests/virtual_branches/branch/writer.rs b/tests/virtual_branches/branch/writer.rs new file mode 100644 index 000000000..9fcc8598a --- /dev/null +++ b/tests/virtual_branches/branch/writer.rs @@ -0,0 +1,218 @@ +use std::{ + fs, + sync::atomic::{AtomicUsize, Ordering}, +}; + +use anyhow::Context; +use gitbutler::virtual_branches::branch; +use once_cell::sync::Lazy; + +use crate::{Case, Suite}; + +use self::branch::BranchId; + +use super::*; + +static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn new_test_branch() -> Branch { + TEST_INDEX.fetch_add(1, Ordering::Relaxed); + + Branch { + id: BranchId::generate(), + name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), + notes: String::new(), + applied: true, + upstream: Some( + format!( + "refs/remotes/origin/upstream_{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + upstream_head: None, + created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, + updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, + head: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + tree: format!( + "0123456789abcdef0123456789abcdef012345{}", + TEST_INDEX.load(Ordering::Relaxed) + 10 + ) + .parse() + .unwrap(), + ownership: gitbutler::virtual_branches::branch::BranchOwnershipClaims { + claims: vec![gitbutler::virtual_branches::branch::OwnershipClaim { + file_path: format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed)).into(), + hunks: vec![], + }], + }, + order: TEST_INDEX.load(Ordering::Relaxed), + selected_for_changes: Some(1), + } +} + +#[test] +fn write_branch() -> anyhow::Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = &suite.new_case(); + + let mut branch = new_test_branch(); + + let writer = branch::Writer::new(gb_repository, project.gb_dir())?; + writer.write(&mut branch)?; + + let root = gb_repository + .root() + .join("branches") + .join(branch.id.to_string()); + + assert_eq!( + fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) + .context("Failed to read branch name")?, + branch.name + ); + assert_eq!( + fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? + .parse::() + .context("Failed to read branch applied")?, + branch.applied + ); + assert_eq!( + fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) + .context("Failed to read branch upstream")?, + branch.upstream.clone().unwrap().to_string() + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("created_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch created timestamp")? + .parse::() + .context("Failed to parse branch created timestamp")?, + branch.created_timestamp_ms + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("updated_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch updated timestamp")? + .parse::() + .context("Failed to parse branch updated timestamp")?, + branch.updated_timestamp_ms + ); + + writer.delete(&branch)?; + fs::read_dir(root).unwrap_err(); + + Ok(()) +} + +#[test] +fn should_create_session() -> anyhow::Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = &suite.new_case(); + + let mut branch = new_test_branch(); + + let writer = branch::Writer::new(gb_repository, project.gb_dir())?; + writer.write(&mut branch)?; + + assert!(gb_repository.get_current_session()?.is_some()); + + Ok(()) +} + +#[test] +fn should_update() -> anyhow::Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = &suite.new_case(); + + let mut branch = new_test_branch(); + + let writer = branch::Writer::new(gb_repository, project.gb_dir())?; + writer.write(&mut branch)?; + + let mut updated_branch = Branch { + name: "updated_name".to_string(), + applied: false, + upstream: Some("refs/remotes/origin/upstream_updated".parse().unwrap()), + created_timestamp_ms: 2, + updated_timestamp_ms: 3, + ownership: gitbutler::virtual_branches::branch::BranchOwnershipClaims { claims: vec![] }, + ..branch.clone() + }; + + writer.write(&mut updated_branch)?; + + let root = gb_repository + .root() + .join("branches") + .join(branch.id.to_string()); + + assert_eq!( + fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) + .context("Failed to read branch name")?, + updated_branch.name + ); + assert_eq!( + fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? + .parse::() + .context("Failed to read branch applied")?, + updated_branch.applied + ); + assert_eq!( + fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) + .context("Failed to read branch upstream")?, + updated_branch.upstream.unwrap().to_string() + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("created_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch created timestamp")? + .parse::() + .context("Failed to parse branch created timestamp")?, + updated_branch.created_timestamp_ms + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("updated_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch updated timestamp")? + .parse::() + .context("Failed to parse branch updated timestamp")?, + updated_branch.updated_timestamp_ms + ); + + Ok(()) +} diff --git a/tests/virtual_branches/iterator.rs b/tests/virtual_branches/iterator.rs new file mode 100644 index 000000000..df2521773 --- /dev/null +++ b/tests/virtual_branches/iterator.rs @@ -0,0 +1,117 @@ +use std::sync::atomic::{AtomicUsize, Ordering}; + +use anyhow::Result; +use gitbutler::virtual_branches; +use once_cell::sync::Lazy; + +use crate::{Case, Suite}; + +static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn new_test_branch() -> virtual_branches::branch::Branch { + TEST_INDEX.fetch_add(1, Ordering::Relaxed); + + virtual_branches::branch::Branch { + id: virtual_branches::BranchId::generate(), + name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), + notes: String::new(), + applied: true, + upstream: Some( + format!( + "refs/remotes/origin/upstream_{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + upstream_head: None, + created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, + updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, + head: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + tree: format!( + "0123456789abcdef0123456789abcdef012345{}", + TEST_INDEX.load(Ordering::Relaxed) + 10 + ) + .parse() + .unwrap(), + ownership: virtual_branches::branch::BranchOwnershipClaims::default(), + order: TEST_INDEX.load(Ordering::Relaxed), + selected_for_changes: Some(1), + } +} + +static TEST_TARGET_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn new_test_target() -> virtual_branches::target::Target { + virtual_branches::target::Target { + branch: format!( + "refs/remotes/branch name{}/remote name {}", + TEST_TARGET_INDEX.load(Ordering::Relaxed), + TEST_TARGET_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + remote_url: format!("remote url {}", TEST_TARGET_INDEX.load(Ordering::Relaxed)), + sha: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_TARGET_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + } +} + +#[test] +fn empty_iterator() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let session = gb_repository.get_or_create_current_session()?; + let session_reader = gitbutler::sessions::Reader::open(gb_repository, &session)?; + + let iter = virtual_branches::Iterator::new(&session_reader)?; + + assert_eq!(iter.count(), 0); + + Ok(()) +} + +#[test] +fn iterate_all() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = &suite.new_case(); + + let target_writer = + gitbutler::virtual_branches::target::Writer::new(gb_repository, project.gb_dir())?; + target_writer.write_default(&new_test_target())?; + + let branch_writer = + gitbutler::virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; + let mut branch_1 = new_test_branch(); + branch_writer.write(&mut branch_1)?; + let mut branch_2 = new_test_branch(); + branch_writer.write(&mut branch_2)?; + let mut branch_3 = new_test_branch(); + branch_writer.write(&mut branch_3)?; + + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = gitbutler::sessions::Reader::open(gb_repository, &session)?; + + let iter = virtual_branches::Iterator::new(&session_reader)? + .collect::, gitbutler::reader::Error>>()?; + assert_eq!(iter.len(), 3); + assert!(iter.contains(&branch_1)); + assert!(iter.contains(&branch_2)); + assert!(iter.contains(&branch_3)); + + Ok(()) +} diff --git a/tests/virtual_branches/mod.rs b/tests/virtual_branches/mod.rs new file mode 100644 index 000000000..3d255c155 --- /dev/null +++ b/tests/virtual_branches/mod.rs @@ -0,0 +1,2549 @@ +mod branch; +mod iterator; +mod target; + +use std::{collections::HashMap, io::Write}; + +use anyhow::{Context, Result}; +use pretty_assertions::assert_eq; +use std::path::{Path, PathBuf}; +#[cfg(target_family = "unix")] +use std::{ + fs::Permissions, + os::unix::{fs::symlink, prelude::*}, +}; + +use crate::{commit_all, empty_bare_repository, Case, Suite}; +use gitbutler::{ + gb_repository, git, project_repository, reader, sessions, virtual_branches, + virtual_branches::errors::CommitError, +}; + +use gitbutler::virtual_branches::branch::{BranchCreateRequest, BranchOwnershipClaims}; +use gitbutler::virtual_branches::integration::verify_branch; +use gitbutler::virtual_branches::{ + apply_branch, commit, create_virtual_branch, is_remote_branch_mergeable, + is_virtual_branch_mergeable, list_remote_branches, merge_virtual_branch_upstream, + unapply_ownership, update_branch, +}; + +pub fn set_test_target( + gb_repo: &gb_repository::Repository, + project_repository: &project_repository::Repository, +) -> Result<()> { + let (remote_repo, _tmp) = empty_bare_repository(); + let mut remote = project_repository + .git_repository + .remote( + "origin", + &remote_repo.path().to_str().unwrap().parse().unwrap(), + ) + .expect("failed to add remote"); + remote.push(&["refs/heads/master:refs/heads/master"], None)?; + + virtual_branches::target::Writer::new(gb_repo, project_repository.project().gb_dir())? + .write_default(&virtual_branches::target::Target { + branch: "refs/remotes/origin/master".parse().unwrap(), + remote_url: remote_repo.path().to_str().unwrap().parse().unwrap(), + sha: remote_repo.head().unwrap().target().unwrap(), + }) + .expect("failed to write target"); + + virtual_branches::integration::update_gitbutler_integration(gb_repo, project_repository) + .expect("failed to update integration"); + + Ok(()) +} + +#[test] +fn commit_on_branch_then_change_file_then_get_status() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + project_repository, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([ + (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), + (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), + ])); + + set_test_target(gb_repository, project_repository)?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line0\nline1\nline2\nline3\nline4\n", + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches[0]; + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.commits.len(), 0); + + // commit + commit( + gb_repository, + project_repository, + &branch1_id, + "test commit", + None, + None, + None, + false, + )?; + + // status (no files) + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches[0]; + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits.len(), 1); + + std::fs::write( + Path::new(&project.path).join("test2.txt"), + "line5\nline6\nlineBLAH\nline7\nline8\n", + )?; + + // should have just the last change now, the other line is committed + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches[0]; + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.commits.len(), 1); + + Ok(()) +} + +#[test] +fn signed_commit() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + gb_repository, + project_repository, + .. + } = &suite.new_case_with_files(HashMap::from([ + (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), + (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), + ])); + + set_test_target(gb_repository, project_repository)?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line0\nline1\nline2\nline3\nline4\n", + )?; + + let mut config = project_repository + .git_repository + .config() + .with_context(|| "failed to get config")?; + config.set_str("gitbutler.signCommits", "true")?; + + // commit + commit( + gb_repository, + project_repository, + &branch1_id, + "test commit", + None, + Some(suite.keys.get_or_create()?).as_ref(), + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository).unwrap(); + let commit_id = &branches[0].commits[0].id; + let commit_obj = project_repository.git_repository.find_commit(*commit_id)?; + // check the raw_header contains the string "SSH SIGNATURE" + assert!(commit_obj.raw_header().unwrap().contains("SSH SIGNATURE")); + + Ok(()) +} + +#[test] +fn track_binary_files() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case(); + + let file_path = Path::new("test.txt"); + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\n", + )?; + let file_path2 = Path::new("test2.txt"); + std::fs::write( + Path::new(&project.path).join(file_path2), + "line5\nline6\nline7\nline8\n", + )?; + // add a binary file + let image_data: [u8; 12] = [ + 255, 0, 0, // Red pixel + 0, 0, 255, // Blue pixel + 255, 255, 0, // Yellow pixel + 0, 255, 0, // Green pixel + ]; + let mut file = std::fs::File::create(Path::new(&project.path).join("image.bin"))?; + file.write_all(&image_data)?; + commit_all(&project_repository.git_repository); + + set_test_target(gb_repository, project_repository)?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + // test file change + std::fs::write( + Path::new(&project.path).join(file_path2), + "line5\nline6\nline7\nline8\nline9\n", + )?; + + // add a binary file + let image_data: [u8; 12] = [ + 255, 0, 0, // Red pixel + 0, 255, 0, // Green pixel + 0, 0, 255, // Blue pixel + 255, 255, 0, // Yellow pixel + ]; + let mut file = std::fs::File::create(Path::new(&project.path).join("image.bin"))?; + file.write_all(&image_data)?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches[0]; + assert_eq!(branch.files.len(), 2); + let img_file = &branch + .files + .iter() + .find(|b| b.path.as_os_str() == "image.bin") + .unwrap(); + assert!(img_file.binary); + assert_eq!( + img_file.hunks[0].diff, + "944996dd82015a616247c72b251e41661e528ae1" + ); + + // commit + commit( + gb_repository, + project_repository, + &branch1_id, + "test commit", + None, + None, + None, + false, + )?; + + // status (no files) + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository).unwrap(); + let commit_id = &branches[0].commits[0].id; + let commit_obj = project_repository.git_repository.find_commit(*commit_id)?; + let tree = commit_obj.tree()?; + let files = tree_to_entry_list(&project_repository.git_repository, &tree); + assert_eq!(files[0].0, "image.bin"); + assert_eq!(files[0].3, "944996dd82015a616247c72b251e41661e528ae1"); + + let image_data: [u8; 12] = [ + 0, 255, 0, // Green pixel + 255, 0, 0, // Red pixel + 255, 255, 0, // Yellow pixel + 0, 0, 255, // Blue pixel + ]; + let mut file = std::fs::File::create(Path::new(&project.path).join("image.bin"))?; + file.write_all(&image_data)?; + + // commit + commit( + gb_repository, + project_repository, + &branch1_id, + "test commit", + None, + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository).unwrap(); + let commit_id = &branches[0].commits[0].id; + // get tree from commit_id + let commit_obj = project_repository.git_repository.find_commit(*commit_id)?; + let tree = commit_obj.tree()?; + let files = tree_to_entry_list(&project_repository.git_repository, &tree); + + assert_eq!(files[0].0, "image.bin"); + assert_eq!(files[0].3, "ea6901a04d1eed6ebf6822f4360bda9f008fa317"); + + Ok(()) +} + +#[test] +fn create_branch_with_ownership() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + project_repository, + gb_repository, + .. + } = &suite.new_case(); + + set_test_target(gb_repository, project_repository)?; + + let file_path = Path::new("test.txt"); + std::fs::write(Path::new(&project.path).join(file_path), "line1\nline2\n").unwrap(); + + let branch0 = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch"); + + virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status"); + + let current_session = gb_repository.get_or_create_current_session().unwrap(); + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session).unwrap(); + let branch_reader = virtual_branches::branch::Reader::new(¤t_session_reader); + let branch0 = branch_reader.read(&branch0.id).unwrap(); + + let branch1 = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest { + ownership: Some(branch0.ownership), + ..Default::default() + }, + ) + .expect("failed to create virtual branch"); + + let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status") + .0; + + let files_by_branch_id = statuses + .iter() + .map(|(branch, files)| (branch.id, files)) + .collect::>(); + + assert_eq!(files_by_branch_id.len(), 2); + assert_eq!(files_by_branch_id[&branch0.id].len(), 0); + assert_eq!(files_by_branch_id[&branch1.id].len(), 1); + + Ok(()) +} + +#[test] +fn create_branch_in_the_middle() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + gb_repository, + .. + } = &suite.new_case(); + + set_test_target(gb_repository, project_repository)?; + + create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch"); + create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch"); + create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest { + order: Some(1), + ..Default::default() + }, + ) + .expect("failed to create virtual branch"); + + let current_session = gb_repository.get_or_create_current_session()?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; + + let mut branches = virtual_branches::Iterator::new(¤t_session_reader)? + .collect::, reader::Error>>() + .expect("failed to read branches"); + branches.sort_by_key(|b| b.order); + assert_eq!(branches.len(), 3); + assert_eq!(branches[0].name, "Virtual branch"); + assert_eq!(branches[1].name, "Virtual branch 2"); + assert_eq!(branches[2].name, "Virtual branch 1"); + + Ok(()) +} + +#[test] +fn create_branch_no_arguments() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + gb_repository, + .. + } = &suite.new_case(); + + set_test_target(gb_repository, project_repository)?; + + create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch"); + + let current_session = gb_repository.get_or_create_current_session()?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; + + let branches = virtual_branches::Iterator::new(¤t_session_reader)? + .collect::, reader::Error>>() + .expect("failed to read branches"); + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].name, "Virtual branch"); + assert!(branches[0].applied); + assert_eq!(branches[0].ownership, BranchOwnershipClaims::default()); + assert_eq!(branches[0].order, 0); + + Ok(()) +} + +#[test] +fn hunk_expantion() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case(); + + set_test_target(gb_repository, project_repository)?; + + let file_path = Path::new("test.txt"); + std::fs::write(Path::new(&project.path).join(file_path), "line1\nline2\n")?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + let branch2_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status") + .0; + + let files_by_branch_id = statuses + .iter() + .map(|(branch, files)| (branch.id, files)) + .collect::>(); + + assert_eq!(files_by_branch_id.len(), 2); + assert_eq!(files_by_branch_id[&branch1_id].len(), 1); + assert_eq!(files_by_branch_id[&branch2_id].len(), 0); + + // even though selected branch has changed + update_branch( + gb_repository, + project_repository, + virtual_branches::branch::BranchUpdateRequest { + id: branch1_id, + order: Some(1), + ..Default::default() + }, + )?; + update_branch( + gb_repository, + project_repository, + virtual_branches::branch::BranchUpdateRequest { + id: branch2_id, + order: Some(0), + ..Default::default() + }, + )?; + + // a slightly different hunk should still go to the same branch + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\n", + )?; + + let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status") + .0; + let files_by_branch_id = statuses + .iter() + .map(|(branch, files)| (branch.id, files)) + .collect::>(); + + assert_eq!(files_by_branch_id.len(), 2); + assert_eq!(files_by_branch_id[&branch1_id].len(), 1); + assert_eq!(files_by_branch_id[&branch2_id].len(), 0); + + Ok(()) +} + +#[test] +fn get_status_files_by_branch_no_hunks_no_branches() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + gb_repository, + .. + } = &suite.new_case(); + + set_test_target(gb_repository, project_repository)?; + + let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status") + .0; + + assert_eq!(statuses.len(), 0); + + Ok(()) +} + +#[test] +fn get_status_files_by_branch() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case(); + + set_test_target(gb_repository, project_repository)?; + + let file_path = Path::new("test.txt"); + std::fs::write(Path::new(&project.path).join(file_path), "line1\nline2\n")?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + let branch2_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status") + .0; + let files_by_branch_id = statuses + .iter() + .map(|(branch, files)| (branch.id, files)) + .collect::>(); + + assert_eq!(files_by_branch_id.len(), 2); + assert_eq!(files_by_branch_id[&branch1_id].len(), 1); + assert_eq!(files_by_branch_id[&branch2_id].len(), 0); + + Ok(()) +} + +#[test] +fn move_hunks_multiple_sources() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([( + PathBuf::from("test.txt"), + "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\n", + )])); + + set_test_target(gb_repository, project_repository)?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + let branch2_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + let branch3_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\n", + )?; + + let current_session = gb_repository.get_or_create_current_session()?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; + let branch_reader = virtual_branches::branch::Reader::new(¤t_session_reader); + let branch_writer = virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; + let mut branch2 = branch_reader.read(&branch2_id)?; + branch2.ownership = BranchOwnershipClaims { + claims: vec!["test.txt:1-5".parse()?], + }; + branch_writer.write(&mut branch2)?; + let mut branch1 = branch_reader.read(&branch1_id)?; + branch1.ownership = BranchOwnershipClaims { + claims: vec!["test.txt:11-15".parse()?], + }; + branch_writer.write(&mut branch1)?; + + let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status") + .0; + + let files_by_branch_id = statuses + .iter() + .map(|(branch, files)| (branch.id, files)) + .collect::>(); + + assert_eq!(files_by_branch_id.len(), 3); + assert_eq!(files_by_branch_id[&branch1_id].len(), 1); + // assert_eq!(files_by_branch_id[&branch1_id][0].hunks.len(), 1); + assert_eq!(files_by_branch_id[&branch2_id].len(), 1); + // assert_eq!(files_by_branch_id[&branch2_id][0].hunks.len(), 1); + assert_eq!(files_by_branch_id[&branch3_id].len(), 0); + + update_branch( + gb_repository, + project_repository, + virtual_branches::branch::BranchUpdateRequest { + id: branch3_id, + ownership: Some("test.txt:1-5,11-15".parse()?), + ..Default::default() + }, + )?; + + let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status") + .0; + + let files_by_branch_id = statuses + .iter() + .map(|(branch, files)| (branch.id, files)) + .collect::>(); + + assert_eq!(files_by_branch_id.len(), 3); + assert_eq!(files_by_branch_id[&branch1_id].len(), 0); + assert_eq!(files_by_branch_id[&branch2_id].len(), 0); + assert_eq!(files_by_branch_id[&branch3_id].len(), 1); + assert_eq!( + files_by_branch_id[&branch3_id][Path::new("test.txt")].len(), + 2 + ); + assert_eq!( + files_by_branch_id[&branch3_id][Path::new("test.txt")][0].diff, + "@@ -1,3 +1,4 @@\n+line0\n line1\n line2\n line3\n" + ); + assert_eq!( + files_by_branch_id[&branch3_id][Path::new("test.txt")][1].diff, + "@@ -10,3 +11,4 @@ line9\n line10\n line11\n line12\n+line13\n" + ); + Ok(()) +} + +#[test] +fn move_hunks_partial_explicitly() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([( + PathBuf::from("test.txt"), + "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\n", + )])); + + set_test_target(gb_repository, project_repository)?; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\n", + )?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + let branch2_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status") + .0; + let files_by_branch_id = statuses + .iter() + .map(|(branch, files)| (branch.id, files)) + .collect::>(); + + assert_eq!(files_by_branch_id.len(), 2); + assert_eq!(files_by_branch_id[&branch1_id].len(), 1); + // assert_eq!(files_by_branch_id[&branch1_id][0].hunks.len(), 2); + assert_eq!(files_by_branch_id[&branch2_id].len(), 0); + + update_branch( + gb_repository, + project_repository, + virtual_branches::branch::BranchUpdateRequest { + id: branch2_id, + ownership: Some("test.txt:1-5".parse()?), + ..Default::default() + }, + )?; + + let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status") + .0; + + let files_by_branch_id = statuses + .iter() + .map(|(branch, files)| (branch.id, files)) + .collect::>(); + + assert_eq!(files_by_branch_id.len(), 2); + assert_eq!(files_by_branch_id[&branch1_id].len(), 1); + assert_eq!( + files_by_branch_id[&branch1_id][Path::new("test.txt")].len(), + 1 + ); + assert_eq!( + files_by_branch_id[&branch1_id][Path::new("test.txt")][0].diff, + "@@ -11,3 +12,4 @@ line10\n line11\n line12\n line13\n+line14\n" + ); + + assert_eq!(files_by_branch_id[&branch2_id].len(), 1); + assert_eq!( + files_by_branch_id[&branch2_id][Path::new("test.txt")].len(), + 1 + ); + assert_eq!( + files_by_branch_id[&branch2_id][Path::new("test.txt")][0].diff, + "@@ -1,3 +1,4 @@\n+line0\n line1\n line2\n line3\n" + ); + + Ok(()) +} + +#[test] +fn add_new_hunk_to_the_end() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([( + PathBuf::from("test.txt"), + "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline13\nline14\n", + )])); + + set_test_target(gb_repository, project_repository)?; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\nline15\n", + )?; + + create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch"); + + let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status") + .0; + assert_eq!( + statuses[0].1[Path::new("test.txt")][0].diff, + "@@ -11,5 +11,5 @@ line10\n line11\n line12\n line13\n-line13\n line14\n+line15\n" + ); + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\nline15\n", + )?; + + let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) + .expect("failed to get status") + .0; + + assert_eq!( + statuses[0].1[Path::new("test.txt")][0].diff, + "@@ -11,5 +12,5 @@ line10\n line11\n line12\n line13\n-line13\n line14\n+line15\n" + ); + assert_eq!( + statuses[0].1[Path::new("test.txt")][1].diff, + "@@ -1,3 +1,4 @@\n+line0\n line1\n line2\n line3\n" + ); + + Ok(()) +} + +#[test] +fn merge_vbranch_upstream_clean_rebase() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case(); + + // create a commit and set the target + let file_path = Path::new("test.txt"); + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\n", + )?; + commit_all(&project_repository.git_repository); + let target_oid = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nupstream\n", + )?; + // add a commit to the target branch it's pointing to so there is something "upstream" + commit_all(&project_repository.git_repository); + let last_push = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + + // coworker adds some work + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nupstream\ncoworker work\n", + )?; + + commit_all(&project_repository.git_repository); + let coworker_work = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + + //update repo ref refs/remotes/origin/master to up_target oid + project_repository.git_repository.reference( + &"refs/remotes/origin/master".parse().unwrap(), + coworker_work, + true, + "update target", + )?; + + // revert to our file + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nupstream\n", + )?; + + set_test_target(gb_repository, project_repository)?; + virtual_branches::target::Writer::new(gb_repository, project_repository.project().gb_dir())? + .write_default(&virtual_branches::target::Target { + branch: "refs/remotes/origin/master".parse().unwrap(), + remote_url: "origin".to_string(), + sha: target_oid, + })?; + + // add some uncommitted work + let file_path2 = Path::new("test2.txt"); + std::fs::write(Path::new(&project.path).join(file_path2), "file2\n")?; + + let remote_branch: git::RemoteRefname = "refs/remotes/origin/master".parse().unwrap(); + let branch_writer = virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; + let mut branch = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch"); + branch.upstream = Some(remote_branch.clone()); + branch.head = last_push; + branch_writer + .write(&mut branch) + .context("failed to write target branch after push")?; + + // create the branch + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch1 = &branches[0]; + assert_eq!(branch1.files.len(), 1); + assert_eq!(branch1.commits.len(), 1); + // assert_eq!(branch1.upstream.as_ref().unwrap().commits.len(), 1); + + merge_virtual_branch_upstream( + gb_repository, + project_repository, + &branch1.id, + Some(suite.keys.get_or_create()?).as_ref(), + None, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch1 = &branches[0]; + + let contents = std::fs::read(Path::new(&project.path).join(file_path))?; + assert_eq!( + "line1\nline2\nline3\nline4\nupstream\ncoworker work\n", + String::from_utf8(contents)? + ); + let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; + assert_eq!("file2\n", String::from_utf8(contents)?); + assert_eq!(branch1.files.len(), 1); + assert_eq!(branch1.commits.len(), 2); + // assert_eq!(branch1.upstream.as_ref().unwrap().commits.len(), 0); + + Ok(()) +} + +#[test] +fn merge_vbranch_upstream_conflict() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case(); + + // create a commit and set the target + let file_path = Path::new("test.txt"); + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\n", + )?; + commit_all(&project_repository.git_repository); + let target_oid = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nupstream\n", + )?; + // add a commit to the target branch it's pointing to so there is something "upstream" + commit_all(&project_repository.git_repository); + let last_push = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + + // coworker adds some work + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nupstream\ncoworker work\n", + )?; + + commit_all(&project_repository.git_repository); + let coworker_work = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + + //update repo ref refs/remotes/origin/master to up_target oid + project_repository.git_repository.reference( + &"refs/remotes/origin/master".parse().unwrap(), + coworker_work, + true, + "update target", + )?; + + // revert to our file + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nupstream\n", + )?; + + set_test_target(gb_repository, project_repository)?; + virtual_branches::target::Writer::new(gb_repository, project.gb_dir())?.write_default( + &virtual_branches::target::Target { + branch: "refs/remotes/origin/master".parse().unwrap(), + remote_url: "origin".to_string(), + sha: target_oid, + }, + )?; + + // add some uncommitted work + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nupstream\nother side\n", + )?; + + let remote_branch: git::RemoteRefname = "refs/remotes/origin/master".parse().unwrap(); + let branch_writer = virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; + let mut branch = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch"); + branch.upstream = Some(remote_branch.clone()); + branch.head = last_push; + branch_writer + .write(&mut branch) + .context("failed to write target branch after push")?; + + // create the branch + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch1 = &branches[0]; + + assert_eq!(branch1.files.len(), 1); + assert_eq!(branch1.commits.len(), 1); + // assert_eq!(branch1.upstream.as_ref().unwrap().commits.len(), 1); + + merge_virtual_branch_upstream(gb_repository, project_repository, &branch1.id, None, None)?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch1 = &branches[0]; + let contents = std::fs::read(Path::new(&project.path).join(file_path))?; + + assert_eq!( + "line1\nline2\nline3\nline4\nupstream\n<<<<<<< ours\nother side\n=======\ncoworker work\n>>>>>>> theirs\n", + String::from_utf8(contents)? + ); + + assert_eq!(branch1.files.len(), 1); + assert_eq!(branch1.commits.len(), 1); + assert!(branch1.conflicted); + + // fix the conflict + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nupstream\nother side\ncoworker work\n", + )?; + + // make gb see the conflict resolution + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + assert!(branches[0].conflicted); + + // commit the merge resolution + commit( + gb_repository, + project_repository, + &branch1.id, + "fix merge conflict", + None, + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch1 = &branches[0]; + assert!(!branch1.conflicted); + assert_eq!(branch1.files.len(), 0); + assert_eq!(branch1.commits.len(), 3); + + // make sure the last commit was a merge commit (2 parents) + let last_id = &branch1.commits[0].id; + let last_commit = project_repository.git_repository.find_commit(*last_id)?; + assert_eq!(last_commit.parent_count(), 2); + + Ok(()) +} + +#[test] +fn unapply_ownership_partial() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([( + PathBuf::from("test.txt"), + "line1\nline2\nline3\nline4\n", + )])); + + set_test_target(gb_repository, project_repository)?; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line1\nline2\nline3\nline4\nbranch1\n", + )?; + + create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch"); + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].files.len(), 1); + assert_eq!(branches[0].ownership.claims.len(), 1); + assert_eq!(branches[0].files[0].hunks.len(), 1); + assert_eq!(branches[0].ownership.claims[0].hunks.len(), 1); + assert_eq!( + std::fs::read_to_string(Path::new(&project.path).join("test.txt"))?, + "line1\nline2\nline3\nline4\nbranch1\n" + ); + + unapply_ownership( + gb_repository, + project_repository, + &"test.txt:2-6".parse().unwrap(), + ) + .unwrap(); + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + assert_eq!(branches.len(), 1); + assert_eq!(branches[0].files.len(), 0); + assert_eq!(branches[0].ownership.claims.len(), 0); + assert_eq!( + std::fs::read_to_string(Path::new(&project.path).join("test.txt"))?, + "line1\nline2\nline3\nline4\n" + ); + + Ok(()) +} + +#[test] +fn unapply_branch() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + project_repository, + gb_repository, + .. + } = &suite.new_case(); + + // create a commit and set the target + let file_path = Path::new("test.txt"); + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\n", + )?; + commit_all(&project_repository.git_repository); + + set_test_target(gb_repository, project_repository)?; + + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nbranch1\n", + )?; + let file_path2 = Path::new("test2.txt"); + std::fs::write(Path::new(&project.path).join(file_path2), "line5\nline6\n")?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + let branch2_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + update_branch( + gb_repository, + project_repository, + virtual_branches::branch::BranchUpdateRequest { + id: branch2_id, + ownership: Some("test2.txt:1-3".parse()?), + ..Default::default() + }, + )?; + + let contents = std::fs::read(Path::new(&project.path).join(file_path))?; + assert_eq!( + "line1\nline2\nline3\nline4\nbranch1\n", + String::from_utf8(contents)? + ); + let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; + assert_eq!("line5\nline6\n", String::from_utf8(contents)?); + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + assert_eq!(branch.files.len(), 1); + assert!(branch.active); + + virtual_branches::unapply_branch(gb_repository, project_repository, &branch1_id)?; + + let contents = std::fs::read(Path::new(&project.path).join(file_path))?; + assert_eq!("line1\nline2\nline3\nline4\n", String::from_utf8(contents)?); + let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; + assert_eq!("line5\nline6\n", String::from_utf8(contents)?); + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + assert_eq!(branch.files.len(), 1); + assert!(!branch.active); + + apply_branch(gb_repository, project_repository, &branch1_id, None, None)?; + let contents = std::fs::read(Path::new(&project.path).join(file_path))?; + assert_eq!( + "line1\nline2\nline3\nline4\nbranch1\n", + String::from_utf8(contents)? + ); + let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; + assert_eq!("line5\nline6\n", String::from_utf8(contents)?); + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + assert_eq!(branch.files.len(), 1); + assert!(branch.active); + + Ok(()) +} + +#[test] +fn apply_unapply_added_deleted_files() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + project_repository, + gb_repository, + .. + } = &suite.new_case(); + + // create a commit and set the target + let file_path = Path::new("test.txt"); + std::fs::write(Path::new(&project.path).join(file_path), "file1\n")?; + let file_path2 = Path::new("test2.txt"); + std::fs::write(Path::new(&project.path).join(file_path2), "file2\n")?; + commit_all(&project_repository.git_repository); + + set_test_target(gb_repository, project_repository)?; + + // rm file_path2, add file3 + std::fs::remove_file(Path::new(&project.path).join(file_path2))?; + let file_path3 = Path::new("test3.txt"); + std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; + + let branch2_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + let branch3_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + update_branch( + gb_repository, + project_repository, + virtual_branches::branch::BranchUpdateRequest { + id: branch2_id, + ownership: Some("test2.txt:0-0".parse()?), + ..Default::default() + }, + )?; + update_branch( + gb_repository, + project_repository, + virtual_branches::branch::BranchUpdateRequest { + id: branch3_id, + ownership: Some("test3.txt:1-2".parse()?), + ..Default::default() + }, + )?; + + virtual_branches::unapply_branch(gb_repository, project_repository, &branch2_id)?; + // check that file2 is back + let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; + assert_eq!("file2\n", String::from_utf8(contents)?); + + virtual_branches::unapply_branch(gb_repository, project_repository, &branch3_id)?; + // check that file3 is gone + assert!(!Path::new(&project.path).join(file_path3).exists()); + + apply_branch(gb_repository, project_repository, &branch2_id, None, None)?; + // check that file2 is gone + assert!(!Path::new(&project.path).join(file_path2).exists()); + + apply_branch(gb_repository, project_repository, &branch3_id, None, None)?; + // check that file3 is back + let contents = std::fs::read(Path::new(&project.path).join(file_path3))?; + assert_eq!("file3\n", String::from_utf8(contents)?); + + Ok(()) +} + +#[test] +fn detect_mergeable_branch() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + project_repository, + gb_repository, + .. + } = &suite.new_case(); + + // create a commit and set the target + let file_path = Path::new("test.txt"); + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\n", + )?; + commit_all(&project_repository.git_repository); + + set_test_target(gb_repository, project_repository)?; + + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nbranch1\n", + )?; + let file_path4 = Path::new("test4.txt"); + std::fs::write(Path::new(&project.path).join(file_path4), "line5\nline6\n")?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + let branch2_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + let current_session = gb_repository.get_or_create_current_session()?; + let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; + let branch_reader = virtual_branches::branch::Reader::new(¤t_session_reader); + let branch_writer = virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; + + update_branch( + gb_repository, + project_repository, + virtual_branches::branch::BranchUpdateRequest { + id: branch2_id, + ownership: Some("test4.txt:1-3".parse()?), + ..Default::default() + }, + ) + .expect("failed to update branch"); + + // unapply both branches and create some conflicting ones + virtual_branches::unapply_branch(gb_repository, project_repository, &branch1_id)?; + virtual_branches::unapply_branch(gb_repository, project_repository, &branch2_id)?; + + project_repository + .git_repository + .set_head(&"refs/heads/master".parse().unwrap())?; + project_repository + .git_repository + .checkout_head(Some(&mut git2::build::CheckoutBuilder::default().force()))?; + + // create an upstream remote conflicting commit + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nupstream\n", + )?; + commit_all(&project_repository.git_repository); + let up_target = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + project_repository.git_repository.reference( + &"refs/remotes/origin/remote_branch".parse().unwrap(), + up_target, + true, + "update target", + )?; + + // revert content and write a mergeable branch + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\n", + )?; + let file_path3 = Path::new("test3.txt"); + std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; + commit_all(&project_repository.git_repository); + let up_target = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + project_repository.git_repository.reference( + &"refs/remotes/origin/remote_branch2".parse().unwrap(), + up_target, + true, + "update target", + )?; + // remove file_path3 + std::fs::remove_file(Path::new(&project.path).join(file_path3))?; + + project_repository + .git_repository + .set_head(&"refs/heads/gitbutler/integration".parse().unwrap())?; + project_repository + .git_repository + .checkout_head(Some(&mut git2::build::CheckoutBuilder::default().force()))?; + + // create branches that conflict with our earlier branches + create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch"); + let branch4_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + // branch3 conflicts with branch1 and remote_branch + std::fs::write( + Path::new(&project.path).join(file_path), + "line1\nline2\nline3\nline4\nbranch3\n", + )?; + + // branch4 conflicts with branch2 + let file_path2 = Path::new("test2.txt"); + std::fs::write( + Path::new(&project.path).join(file_path2), + "line1\nline2\nline3\nline4\nbranch4\n", + )?; + + let mut branch4 = branch_reader.read(&branch4_id)?; + branch4.ownership = BranchOwnershipClaims { + claims: vec!["test2.txt:1-6".parse()?], + }; + branch_writer.write(&mut branch4)?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + assert_eq!(branches.len(), 4); + + let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + assert!(!branch1.active); + assert!(!is_virtual_branch_mergeable(gb_repository, project_repository, &branch1.id).unwrap()); + + let branch2 = &branches.iter().find(|b| b.id == branch2_id).unwrap(); + assert!(!branch2.active); + assert!(is_virtual_branch_mergeable(gb_repository, project_repository, &branch2.id).unwrap()); + + let remotes = + list_remote_branches(gb_repository, project_repository).expect("failed to list remotes"); + let _remote1 = &remotes + .iter() + .find(|b| b.name.to_string() == "refs/remotes/origin/remote_branch") + .unwrap(); + assert!(!is_remote_branch_mergeable( + gb_repository, + project_repository, + &"refs/remotes/origin/remote_branch".parse().unwrap() + ) + .unwrap()); + // assert_eq!(remote1.commits.len(), 1); + + let _remote2 = &remotes + .iter() + .find(|b| b.name.to_string() == "refs/remotes/origin/remote_branch2") + .unwrap(); + assert!(is_remote_branch_mergeable( + gb_repository, + project_repository, + &"refs/remotes/origin/remote_branch2".parse().unwrap() + ) + .unwrap()); + // assert_eq!(remote2.commits.len(), 2); + + Ok(()) +} + +#[test] +fn upstream_integrated_vbranch() -> Result<()> { + // ok, we need a vbranch with some work and an upstream target that also includes that work, but the base is behind + // plus a branch with work not in upstream so we can see that it is not included in the vbranch + + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([ + (PathBuf::from("test.txt"), "file1\n"), + (PathBuf::from("test2.txt"), "file2\n"), + (PathBuf::from("test3.txt"), "file3\n"), + ])); + + let base_commit = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "file1\nversion2\n", + )?; + commit_all(&project_repository.git_repository); + + let upstream_commit = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + project_repository.git_repository.reference( + &"refs/remotes/origin/master".parse().unwrap(), + upstream_commit, + true, + "update target", + )?; + + virtual_branches::target::Writer::new(gb_repository, project_repository.project().gb_dir())? + .write_default(&virtual_branches::target::Target { + branch: "refs/remotes/origin/master".parse().unwrap(), + remote_url: "http://origin.com/project".to_string(), + sha: base_commit, + })?; + project_repository + .git_repository + .remote("origin", &"http://origin.com/project".parse().unwrap())?; + virtual_branches::integration::update_gitbutler_integration(gb_repository, project_repository)?; + + // create vbranches, one integrated, one not + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + let branch2_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + let branch3_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + std::fs::write( + Path::new(&project.path).join("test2.txt"), + "file2\nversion2\n", + )?; + + std::fs::write( + Path::new(&project.path).join("test3.txt"), + "file3\nversion2\n", + )?; + + update_branch( + gb_repository, + project_repository, + virtual_branches::branch::BranchUpdateRequest { + id: branch1_id, + name: Some("integrated".to_string()), + ownership: Some("test.txt:1-2".parse()?), + ..Default::default() + }, + )?; + + update_branch( + gb_repository, + project_repository, + virtual_branches::branch::BranchUpdateRequest { + id: branch2_id, + name: Some("not integrated".to_string()), + ownership: Some("test2.txt:1-2".parse()?), + ..Default::default() + }, + )?; + + update_branch( + gb_repository, + project_repository, + virtual_branches::branch::BranchUpdateRequest { + id: branch3_id, + name: Some("not committed".to_string()), + ownership: Some("test3.txt:1-2".parse()?), + ..Default::default() + }, + )?; + + // create a new virtual branch from the remote branch + commit( + gb_repository, + project_repository, + &branch1_id, + "integrated commit", + None, + None, + None, + false, + )?; + commit( + gb_repository, + project_repository, + &branch2_id, + "non-integrated commit", + None, + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + + let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + assert!(branch1.commits.iter().any(|c| c.is_integrated)); + assert_eq!(branch1.files.len(), 0); + assert_eq!(branch1.commits.len(), 1); + + let branch2 = &branches.iter().find(|b| b.id == branch2_id).unwrap(); + assert!(!branch2.commits.iter().any(|c| c.is_integrated)); + assert_eq!(branch2.files.len(), 0); + assert_eq!(branch2.commits.len(), 1); + + let branch3 = &branches.iter().find(|b| b.id == branch3_id).unwrap(); + assert!(!branch3.commits.iter().any(|c| c.is_integrated)); + assert_eq!(branch3.files.len(), 1); + assert_eq!(branch3.commits.len(), 0); + + Ok(()) +} + +#[test] +fn commit_same_hunk_twice() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([( + PathBuf::from("test.txt"), + "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + )])); + + set_test_target(gb_repository, project_repository)?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].hunks.len(), 1); + assert_eq!(branch.commits.len(), 0); + + // commit + commit( + gb_repository, + project_repository, + &branch1_id, + "first commit to test.txt", + None, + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + assert_eq!(branch.files.len(), 0, "no files expected"); + + assert_eq!(branch.commits.len(), 1, "file should have been commited"); + assert_eq!(branch.commits[0].files.len(), 1, "hunks expected"); + assert_eq!( + branch.commits[0].files[0].hunks.len(), + 1, + "one hunk should have been commited" + ); + + // update same lines + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line1\nPATCH1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + assert_eq!(branch.files.len(), 1, "one file should be changed"); + assert_eq!(branch.commits.len(), 1, "commit is still there"); + + commit( + gb_repository, + project_repository, + &branch1_id, + "second commit to test.txt", + None, + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + assert_eq!( + branch.files.len(), + 0, + "all changes should have been commited" + ); + + assert_eq!(branch.commits.len(), 2, "two commits expected"); + assert_eq!(branch.commits[0].files.len(), 1); + assert_eq!(branch.commits[0].files[0].hunks.len(), 1); + assert_eq!(branch.commits[1].files.len(), 1); + assert_eq!(branch.commits[1].files[0].hunks.len(), 1); + + Ok(()) +} + +#[test] +fn commit_same_file_twice() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([( + PathBuf::from("test.txt"), + "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + )])); + + set_test_target(gb_repository, project_repository)?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].hunks.len(), 1); + assert_eq!(branch.commits.len(), 0); + + // commit + commit( + gb_repository, + project_repository, + &branch1_id, + "first commit to test.txt", + None, + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + assert_eq!(branch.files.len(), 0, "no files expected"); + + assert_eq!(branch.commits.len(), 1, "file should have been commited"); + assert_eq!(branch.commits[0].files.len(), 1, "hunks expected"); + assert_eq!( + branch.commits[0].files[0].hunks.len(), + 1, + "one hunk should have been commited" + ); + + // add second patch + + std::fs::write( + Path::new(&project.path).join("file.txt"), + "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\npatch2\nline11\nline12\n", + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + assert_eq!(branch.files.len(), 1, "one file should be changed"); + assert_eq!(branch.commits.len(), 1, "commit is still there"); + + commit( + gb_repository, + project_repository, + &branch1_id, + "second commit to test.txt", + None, + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + assert_eq!( + branch.files.len(), + 0, + "all changes should have been commited" + ); + + assert_eq!(branch.commits.len(), 2, "two commits expected"); + assert_eq!(branch.commits[0].files.len(), 1); + assert_eq!(branch.commits[0].files[0].hunks.len(), 1); + assert_eq!(branch.commits[1].files.len(), 1); + assert_eq!(branch.commits[1].files[0].hunks.len(), 1); + + Ok(()) +} + +#[test] +fn commit_partial_by_hunk() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([( + PathBuf::from("test.txt"), + "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", + )])); + + set_test_target(gb_repository, project_repository)?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\npatch2\nline11\nline12\n", + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].hunks.len(), 2); + assert_eq!(branch.commits.len(), 0); + + // commit + commit( + gb_repository, + project_repository, + &branch1_id, + "first commit to test.txt", + Some(&"test.txt:1-6".parse::().unwrap()), + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + assert_eq!(branch.files.len(), 1); + assert_eq!(branch.files[0].hunks.len(), 1); + assert_eq!(branch.commits.len(), 1); + assert_eq!(branch.commits[0].files.len(), 1); + assert_eq!(branch.commits[0].files[0].hunks.len(), 1); + + commit( + gb_repository, + project_repository, + &branch1_id, + "second commit to test.txt", + Some(&"test.txt:16-22".parse::().unwrap()), + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + assert_eq!(branch.files.len(), 0); + assert_eq!(branch.commits.len(), 2); + assert_eq!(branch.commits[0].files.len(), 1); + assert_eq!(branch.commits[0].files[0].hunks.len(), 1); + assert_eq!(branch.commits[1].files.len(), 1); + assert_eq!(branch.commits[1].files[0].hunks.len(), 1); + + Ok(()) +} + +#[test] +fn commit_partial_by_file() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([ + (PathBuf::from("test.txt"), "file1\n"), + (PathBuf::from("test2.txt"), "file2\n"), + ])); + + let commit1_oid = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + let commit1 = project_repository + .git_repository + .find_commit(commit1_oid) + .unwrap(); + + set_test_target(gb_repository, project_repository)?; + + // remove file + std::fs::remove_file(Path::new(&project.path).join("test2.txt"))?; + // add new file + let file_path3 = Path::new("test3.txt"); + std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + // commit + commit( + gb_repository, + project_repository, + &branch1_id, + "branch1 commit", + None, + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + // branch one test.txt has just the 1st and 3rd hunks applied + let commit2 = &branch1.commits[0].id; + let commit2 = project_repository + .git_repository + .find_commit(*commit2) + .expect("failed to get commit object"); + + let tree = commit1.tree().expect("failed to get tree"); + let file_list = tree_to_file_list(&project_repository.git_repository, &tree); + assert_eq!(file_list, vec!["test.txt", "test2.txt"]); + + // get the tree + let tree = commit2.tree().expect("failed to get tree"); + let file_list = tree_to_file_list(&project_repository.git_repository, &tree); + assert_eq!(file_list, vec!["test.txt", "test3.txt"]); + + Ok(()) +} + +#[test] +fn commit_add_and_delete_files() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([ + (PathBuf::from("test.txt"), "file1\n"), + (PathBuf::from("test2.txt"), "file2\n"), + ])); + + let commit1_oid = project_repository + .git_repository + .head() + .unwrap() + .target() + .unwrap(); + let commit1 = project_repository + .git_repository + .find_commit(commit1_oid) + .unwrap(); + + set_test_target(gb_repository, project_repository)?; + + // remove file + std::fs::remove_file(Path::new(&project.path).join("test2.txt"))?; + // add new file + let file_path3 = Path::new("test3.txt"); + std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + // commit + commit( + gb_repository, + project_repository, + &branch1_id, + "branch1 commit", + None, + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + // branch one test.txt has just the 1st and 3rd hunks applied + let commit2 = &branch1.commits[0].id; + let commit2 = project_repository + .git_repository + .find_commit(*commit2) + .expect("failed to get commit object"); + + let tree = commit1.tree().expect("failed to get tree"); + let file_list = tree_to_file_list(&project_repository.git_repository, &tree); + assert_eq!(file_list, vec!["test.txt", "test2.txt"]); + + // get the tree + let tree = commit2.tree().expect("failed to get tree"); + let file_list = tree_to_file_list(&project_repository.git_repository, &tree); + assert_eq!(file_list, vec!["test.txt", "test3.txt"]); + + Ok(()) +} + +#[test] +#[cfg(target_family = "unix")] +fn commit_executable_and_symlinks() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case_with_files(HashMap::from([ + (PathBuf::from("test.txt"), "file1\n"), + (PathBuf::from("test2.txt"), "file2\n"), + ])); + + set_test_target(gb_repository, project_repository)?; + + // add symlinked file + let file_path3 = Path::new("test3.txt"); + let src = Path::new(&project.path).join("test2.txt"); + let dst = Path::new(&project.path).join(file_path3); + symlink(src, dst)?; + + // add executable + let file_path4 = Path::new("test4.bin"); + let exec = Path::new(&project.path).join(file_path4); + std::fs::write(&exec, "exec\n")?; + let permissions = std::fs::metadata(&exec)?.permissions(); + let new_permissions = Permissions::from_mode(permissions.mode() | 0o111); // Add execute permission + std::fs::set_permissions(&exec, new_permissions)?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + // commit + commit( + gb_repository, + project_repository, + &branch1_id, + "branch1 commit", + None, + None, + None, + false, + )?; + + let (branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); + + let commit = &branch1.commits[0].id; + let commit = project_repository + .git_repository + .find_commit(*commit) + .expect("failed to get commit object"); + + let tree = commit.tree().expect("failed to get tree"); + + let list = tree_to_entry_list(&project_repository.git_repository, &tree); + assert_eq!(list[0].0, "test.txt"); + assert_eq!(list[0].1, "100644"); + assert_eq!(list[1].0, "test2.txt"); + assert_eq!(list[1].1, "100644"); + assert_eq!(list[2].0, "test3.txt"); + assert_eq!(list[2].1, "120000"); + assert_eq!(list[2].2, "test2.txt"); + assert_eq!(list[3].0, "test4.bin"); + assert_eq!(list[3].1, "100755"); + + Ok(()) +} + +fn tree_to_file_list(repository: &git::Repository, tree: &git::Tree) -> Vec { + let mut file_list = Vec::new(); + tree.walk(|_, entry| { + let path = entry.name().unwrap(); + let entry = tree.get_path(Path::new(path)).unwrap(); + let object = entry.to_object(repository).unwrap(); + if object.kind() == Some(git2::ObjectType::Blob) { + file_list.push(path.to_string()); + } + git::TreeWalkResult::Continue + }) + .expect("failed to walk tree"); + file_list +} + +fn tree_to_entry_list( + repository: &git::Repository, + tree: &git::Tree, +) -> Vec<(String, String, String, String)> { + let mut file_list = Vec::new(); + tree.walk(|_root, entry| { + let path = entry.name().unwrap(); + let entry = tree.get_path(Path::new(path)).unwrap(); + let object = entry.to_object(repository).unwrap(); + let blob = object.as_blob().expect("failed to get blob"); + // convert content to string + let octal_mode = format!("{:o}", entry.filemode()); + if let Ok(content) = + std::str::from_utf8(blob.content()).context("failed to convert content to string") + { + file_list.push(( + path.to_string(), + octal_mode, + content.to_string(), + blob.id().to_string(), + )); + } else { + file_list.push(( + path.to_string(), + octal_mode, + "BINARY".to_string(), + blob.id().to_string(), + )); + } + git::TreeWalkResult::Continue + }) + .expect("failed to walk tree"); + file_list +} + +#[test] +fn verify_branch_commits_to_integration() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + project, + gb_repository, + .. + } = &suite.new_case(); + + set_test_target(gb_repository, project_repository)?; + + verify_branch(gb_repository, project_repository).unwrap(); + + // write two commits + let file_path2 = Path::new("test2.txt"); + std::fs::write(Path::new(&project.path).join(file_path2), "file")?; + commit_all(&project_repository.git_repository); + std::fs::write(Path::new(&project.path).join(file_path2), "update")?; + commit_all(&project_repository.git_repository); + + // verify puts commits onto the virtual branch + verify_branch(gb_repository, project_repository).unwrap(); + + // one virtual branch with two commits was created + let (virtual_branches, _, _) = + virtual_branches::list_virtual_branches(gb_repository, project_repository)?; + assert_eq!(virtual_branches.len(), 1); + + let branch = &virtual_branches.first().unwrap(); + assert_eq!(branch.commits.len(), 2); + assert_eq!(branch.commits.len(), 2); + + Ok(()) +} + +#[test] +fn verify_branch_not_integration() -> Result<()> { + let suite = Suite::default(); + let Case { + project_repository, + gb_repository, + .. + } = &suite.new_case(); + + set_test_target(gb_repository, project_repository)?; + + verify_branch(gb_repository, project_repository).unwrap(); + + project_repository + .git_repository + .set_head(&"refs/heads/master".parse().unwrap())?; + + let verify_result = verify_branch(gb_repository, project_repository); + assert!(verify_result.is_err()); + assert_eq!( + verify_result.unwrap_err().to_string(), + "head is refs/heads/master" + ); + + Ok(()) +} + +#[test] +fn pre_commit_hook_rejection() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + gb_repository, + project_repository, + .. + } = &suite.new_case_with_files(HashMap::from([ + (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), + (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), + ])); + + set_test_target(gb_repository, project_repository)?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line0\nline1\nline2\nline3\nline4\n", + )?; + + let hook = b"#!/bin/sh + echo 'rejected' + exit 1 + "; + + git2_hooks::create_hook( + (&project_repository.git_repository).into(), + git2_hooks::HOOK_PRE_COMMIT, + hook, + ); + + let res = commit( + gb_repository, + project_repository, + &branch1_id, + "test commit", + None, + Some(suite.keys.get_or_create()?).as_ref(), + None, + true, + ); + + let error = res.unwrap_err(); + + assert!(matches!(error, CommitError::CommitHookRejected(_))); + + let CommitError::CommitHookRejected(output) = error else { + unreachable!() + }; + + assert_eq!(&output, "rejected\n"); + + Ok(()) +} + +#[test] +fn post_commit_hook() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + gb_repository, + project_repository, + .. + } = &suite.new_case_with_files(HashMap::from([ + (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), + (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), + ])); + + set_test_target(gb_repository, project_repository)?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line0\nline1\nline2\nline3\nline4\n", + )?; + + let hook = b"#!/bin/sh + touch hook_ran + "; + + git2_hooks::create_hook( + (&project_repository.git_repository).into(), + git2_hooks::HOOK_POST_COMMIT, + hook, + ); + + let hook_ran_proof = project_repository + .git_repository + .path() + .parent() + .unwrap() + .join("hook_ran"); + + assert!(!hook_ran_proof.exists()); + + commit( + gb_repository, + project_repository, + &branch1_id, + "test commit", + None, + Some(suite.keys.get_or_create()?).as_ref(), + None, + true, + )?; + + assert!(hook_ran_proof.exists()); + + Ok(()) +} + +#[test] +fn commit_msg_hook_rejection() -> Result<()> { + let suite = Suite::default(); + let Case { + project, + gb_repository, + project_repository, + .. + } = &suite.new_case_with_files(HashMap::from([ + (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), + (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), + ])); + + set_test_target(gb_repository, project_repository)?; + + let branch1_id = create_virtual_branch( + gb_repository, + project_repository, + &BranchCreateRequest::default(), + ) + .expect("failed to create virtual branch") + .id; + + std::fs::write( + Path::new(&project.path).join("test.txt"), + "line0\nline1\nline2\nline3\nline4\n", + )?; + + let hook = b"#!/bin/sh + echo 'rejected' + exit 1 + "; + + git2_hooks::create_hook( + (&project_repository.git_repository).into(), + git2_hooks::HOOK_COMMIT_MSG, + hook, + ); + + let res = commit( + gb_repository, + project_repository, + &branch1_id, + "test commit", + None, + Some(suite.keys.get_or_create()?).as_ref(), + None, + true, + ); + + let error = res.unwrap_err(); + + assert!(matches!(error, CommitError::CommitMsgHookRejected(_))); + + let CommitError::CommitMsgHookRejected(output) = error else { + unreachable!() + }; + + assert_eq!(&output, "rejected\n"); + + Ok(()) +} diff --git a/tests/virtual_branches/target/mod.rs b/tests/virtual_branches/target/mod.rs new file mode 100644 index 000000000..582c1894a --- /dev/null +++ b/tests/virtual_branches/target/mod.rs @@ -0,0 +1,2 @@ +mod reader; +mod writer; diff --git a/tests/virtual_branches/target/reader.rs b/tests/virtual_branches/target/reader.rs new file mode 100644 index 000000000..f6aa4349d --- /dev/null +++ b/tests/virtual_branches/target/reader.rs @@ -0,0 +1,150 @@ +use gitbutler::virtual_branches::target::Target; +use gitbutler::virtual_branches::{target, BranchId}; +use std::sync::atomic::{AtomicUsize, Ordering}; + +use anyhow::Result; +use once_cell::sync::Lazy; + +use crate::{Case, Suite}; + +static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn test_branch() -> gitbutler::virtual_branches::branch::Branch { + TEST_INDEX.fetch_add(1, Ordering::Relaxed); + + gitbutler::virtual_branches::branch::Branch { + id: BranchId::generate(), + name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), + notes: String::new(), + applied: true, + upstream: Some( + format!( + "refs/remotes/origin/upstream_{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + upstream_head: None, + created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, + updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, + head: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + tree: format!( + "0123456789abcdef0123456789abcdef012345{}", + (TEST_INDEX.load(Ordering::Relaxed) + 10) + ) + .parse() + .unwrap(), + ownership: gitbutler::virtual_branches::branch::BranchOwnershipClaims { + claims: vec![gitbutler::virtual_branches::branch::OwnershipClaim { + file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(), + hunks: vec![], + }], + }, + order: TEST_INDEX.load(Ordering::Relaxed), + selected_for_changes: None, + } +} + +#[test] +fn read_not_found() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let session = gb_repository.get_or_create_current_session()?; + let session_reader = gitbutler::sessions::Reader::open(gb_repository, &session)?; + + let reader = target::Reader::new(&session_reader); + let result = reader.read(&BranchId::generate()); + assert!(result.is_err()); + assert_eq!(result.unwrap_err().to_string(), "file not found"); + + Ok(()) +} + +#[test] +fn read_deprecated_format() -> Result<()> { + let suite = Suite::default(); + let Case { gb_repository, .. } = &suite.new_case(); + + let writer = gitbutler::writer::DirWriter::open(gb_repository.root())?; + writer + .write_string("branches/target/name", "origin/master") + .unwrap(); + writer + .write_string( + "branches/target/remote", + "git@github.com:gitbutlerapp/gitbutler.git", + ) + .unwrap(); + writer + .write_string( + "branches/target/sha", + "dd945831869e9593448aa622fa4342bbfb84813d", + ) + .unwrap(); + + let session = gb_repository.get_or_create_current_session()?; + let session_reader = gitbutler::sessions::Reader::open(gb_repository, &session)?; + let reader = target::Reader::new(&session_reader); + + let read = reader.read_default().unwrap(); + assert_eq!(read.branch.branch(), "master"); + assert_eq!(read.branch.remote(), "origin"); + assert_eq!(read.remote_url, "git@github.com:gitbutlerapp/gitbutler.git"); + assert_eq!( + read.sha.to_string(), + "dd945831869e9593448aa622fa4342bbfb84813d" + ); + + Ok(()) +} + +#[test] +fn read_override_target() -> Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = &suite.new_case(); + + let mut branch = test_branch(); + + let target = Target { + branch: "refs/remotes/remote/branch".parse().unwrap(), + remote_url: "remote url".to_string(), + sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(), + }; + + let default_target = Target { + branch: "refs/remotes/default remote/default branch" + .parse() + .unwrap(), + remote_url: "default remote url".to_string(), + sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), + }; + + let branch_writer = + gitbutler::virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; + branch_writer.write(&mut branch)?; + + let session = gb_repository.get_current_session()?.unwrap(); + let session_reader = gitbutler::sessions::Reader::open(gb_repository, &session)?; + + let target_writer = target::Writer::new(gb_repository, project.gb_dir())?; + let reader = target::Reader::new(&session_reader); + + target_writer.write_default(&default_target)?; + assert_eq!(default_target, reader.read(&branch.id)?); + + target_writer.write(&branch.id, &target)?; + assert_eq!(target, reader.read(&branch.id)?); + + Ok(()) +} diff --git a/tests/virtual_branches/target/writer.rs b/tests/virtual_branches/target/writer.rs new file mode 100644 index 000000000..5fbe22031 --- /dev/null +++ b/tests/virtual_branches/target/writer.rs @@ -0,0 +1,212 @@ +use anyhow::Context; +use std::{ + fs, + sync::atomic::{AtomicUsize, Ordering}, +}; + +use once_cell::sync::Lazy; + +use crate::{Case, Suite}; +use gitbutler::virtual_branches::target::Target; +use gitbutler::virtual_branches::{branch, target, BranchId}; + +static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); + +fn test_branch() -> branch::Branch { + TEST_INDEX.fetch_add(1, Ordering::Relaxed); + + branch::Branch { + id: BranchId::generate(), + name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), + notes: format!("branch_notes_{}", TEST_INDEX.load(Ordering::Relaxed)), + applied: true, + created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, + upstream: Some( + format!( + "refs/remotes/origin/upstream_{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + ), + upstream_head: None, + updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, + head: format!( + "0123456789abcdef0123456789abcdef0123456{}", + TEST_INDEX.load(Ordering::Relaxed) + ) + .parse() + .unwrap(), + tree: format!( + "0123456789abcdef0123456789abcdef012345{}", + TEST_INDEX.load(Ordering::Relaxed) + 10 + ) + .parse() + .unwrap(), + ownership: branch::BranchOwnershipClaims { + claims: vec![branch::OwnershipClaim { + file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(), + hunks: vec![], + }], + }, + order: TEST_INDEX.load(Ordering::Relaxed), + selected_for_changes: None, + } +} + +#[test] +fn write() -> anyhow::Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = &suite.new_case(); + + let mut branch = test_branch(); + let target = Target { + branch: "refs/remotes/remote name/branch name".parse().unwrap(), + remote_url: "remote url".to_string(), + sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), + }; + + let branch_writer = branch::Writer::new(gb_repository, project.gb_dir())?; + branch_writer.write(&mut branch)?; + + let target_writer = target::Writer::new(gb_repository, project.gb_dir())?; + target_writer.write(&branch.id, &target)?; + + let root = gb_repository + .root() + .join("branches") + .join(branch.id.to_string()); + + assert_eq!( + fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) + .context("Failed to read branch name")?, + branch.name + ); + assert_eq!( + fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap()) + .context("Failed to read branch target name")?, + format!("{}/{}", target.branch.remote(), target.branch.branch()) + ); + assert_eq!( + fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap()) + .context("Failed to read branch target name name")?, + target.branch.remote() + ); + assert_eq!( + fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap()) + .context("Failed to read branch target remote url")?, + target.remote_url + ); + assert_eq!( + fs::read_to_string(root.join("target").join("sha").to_str().unwrap()) + .context("Failed to read branch target sha")?, + target.sha.to_string() + ); + + assert_eq!( + fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? + .parse::() + .context("Failed to read branch applied")?, + branch.applied + ); + assert_eq!( + fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) + .context("Failed to read branch upstream")?, + branch.upstream.unwrap().to_string() + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("created_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch created timestamp")? + .parse::() + .context("Failed to parse branch created timestamp")?, + branch.created_timestamp_ms + ); + assert_eq!( + fs::read_to_string( + root.join("meta") + .join("updated_timestamp_ms") + .to_str() + .unwrap() + ) + .context("Failed to read branch updated timestamp")? + .parse::() + .context("Failed to parse branch updated timestamp")?, + branch.updated_timestamp_ms + ); + + Ok(()) +} + +#[test] +fn should_update() -> anyhow::Result<()> { + let suite = Suite::default(); + let Case { + gb_repository, + project, + .. + } = &suite.new_case(); + + let mut branch = test_branch(); + let target = Target { + branch: "refs/remotes/remote name/branch name".parse().unwrap(), + remote_url: "remote url".to_string(), + sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), + }; + + let branch_writer = branch::Writer::new(gb_repository, project.gb_dir())?; + branch_writer.write(&mut branch)?; + let target_writer = target::Writer::new(gb_repository, project.gb_dir())?; + target_writer.write(&branch.id, &target)?; + + let updated_target = Target { + branch: "refs/remotes/updated remote name/updated branch name" + .parse() + .unwrap(), + remote_url: "updated remote url".to_string(), + sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(), + }; + + target_writer.write(&branch.id, &updated_target)?; + + let root = gb_repository + .root() + .join("branches") + .join(branch.id.to_string()); + + assert_eq!( + fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap()) + .context("Failed to read branch target branch name")?, + format!( + "{}/{}", + updated_target.branch.remote(), + updated_target.branch.branch() + ) + ); + + assert_eq!( + fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap()) + .context("Failed to read branch target remote name")?, + updated_target.branch.remote() + ); + assert_eq!( + fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap()) + .context("Failed to read branch target remote url")?, + updated_target.remote_url + ); + assert_eq!( + fs::read_to_string(root.join("target").join("sha").to_str().unwrap()) + .context("Failed to read branch target sha")?, + updated_target.sha.to_string() + ); + + Ok(()) +} diff --git a/tests/zip/mod.rs b/tests/zip/mod.rs new file mode 100644 index 000000000..52778c7a3 --- /dev/null +++ b/tests/zip/mod.rs @@ -0,0 +1,47 @@ +use gitbutler::zip::Zipper; +use walkdir::WalkDir; + +use std::fs::File; +use std::io::Write; +use tempfile::tempdir; + +#[test] +fn zip_dir() { + let tmp_dir = tempdir().unwrap(); + let tmp_dir_path = tmp_dir.path(); + let file_path = tmp_dir_path.join("test.txt"); + let mut file = File::create(file_path).unwrap(); + file.write_all(b"test").unwrap(); + + let zipper_cache = tempdir().unwrap(); + let zipper = Zipper::new(zipper_cache.path()); + let zip_file_path = zipper.zip(tmp_dir).unwrap(); + assert!(zip_file_path.exists()); +} + +#[test] +fn zip_file() { + let tmp_dir = tempdir().unwrap(); + let tmp_dir_path = tmp_dir.path(); + let file_path = tmp_dir_path.join("test.txt"); + let mut file = File::create(&file_path).unwrap(); + file.write_all(b"test").unwrap(); + + let zipper_cache = tempdir().unwrap(); + let zipper = Zipper::new(zipper_cache.path()); + zipper.zip(file_path).unwrap_err(); +} + +#[test] +fn zip_once() { + let tmp_dir = tempdir().unwrap(); + let tmp_dir_path = tmp_dir.path(); + let file_path = tmp_dir_path.join("test.txt"); + let mut file = File::create(file_path).unwrap(); + file.write_all(b"test").unwrap(); + + let zipper_cache = tempdir().unwrap(); + let zipper = Zipper::new(zipper_cache.path()); + assert_eq!(zipper.zip(&tmp_dir).unwrap(), zipper.zip(&tmp_dir).unwrap()); + assert_eq!(WalkDir::new(tmp_dir).into_iter().count(), 1); +} From 3b89ed50f9fb6d7ab4c800a0c3add4dbb5ccaff5 Mon Sep 17 00:00:00 2001 From: Sebastian Thiel Date: Fri, 29 Mar 2024 17:48:54 +0100 Subject: [PATCH 2/5] minimize library dependencies --- Cargo.lock | 12 ------------ Cargo.toml | 13 ------------- 2 files changed, 25 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6a0e54deb..e9e895945 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1912,12 +1912,9 @@ version = "0.0.0" dependencies = [ "anyhow", "async-trait", - "backoff", "backtrace", "bstr 1.9.1", - "byteorder", "chrono", - "console-subscriber", "diffy", "filetime", "fslock", @@ -1925,15 +1922,10 @@ dependencies = [ "git2", "git2-hooks", "gitbutler-git", - "governor", "itertools 0.12.1", "lazy_static", "log", "md5", - "nonzero_ext", - "notify", - "notify-debouncer-full", - "num_cpus", "once_cell", "pretty_assertions", "r2d2", @@ -1946,7 +1938,6 @@ dependencies = [ "rusqlite", "serde", "serde_json", - "sha1", "sha2", "similar", "slug", @@ -1955,11 +1946,8 @@ dependencies = [ "tempfile", "thiserror", "tokio", - "tokio-util", "toml 0.8.12", "tracing", - "tracing-appender", - "tracing-subscriber", "url", "urlencoding", "uuid", diff --git a/Cargo.toml b/Cargo.toml index a8bcb0ed2..cdbb099e4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,27 +17,18 @@ pretty_assertions = "1.4" toml = "0.8.12" anyhow = "1.0.81" async-trait = "0.1.79" -backoff = "0.4.0" backtrace = { version = "0.3.71", optional = true } bstr = "1.9.1" -byteorder = "1.5.0" chrono = { version = "0.4.37", features = ["serde"] } -console-subscriber = "0.2.0" diffy = "0.3.0" filetime = "0.2.23" fslock = "0.2.1" futures = "0.3" git2.workspace = true git2-hooks = "0.3" -governor = "0.6.3" itertools = "0.12" lazy_static = "1.4.0" md5 = "0.7.0" -nonzero_ext = "0.3.0" -notify = { version = "6.0.1" } -notify-debouncer-full = "0.3.1" -num_cpus = "1.16.0" -once_cell = "1.19" r2d2 = "0.8.10" r2d2_sqlite = "0.22.0" rand = "0.8.5" @@ -48,7 +39,6 @@ resolve-path = "0.1.0" rusqlite.workspace = true serde.workspace = true serde_json = { version = "1.0", features = [ "std", "arbitrary_precision" ] } -sha1 = "0.10.6" sha2 = "0.10.8" similar = { version = "2.4.0", features = ["unicode"] } slug = "0.1.5" @@ -57,10 +47,7 @@ ssh2 = { version = "0.9.4", features = ["vendored-openssl"] } log = "^0.4" thiserror.workspace = true tokio = { workspace = true, features = [ "full", "sync" ] } -tokio-util = "0.7.10" tracing = "0.1.40" -tracing-appender = "0.2.3" -tracing-subscriber = "0.3.17" url = { version = "2.5", features = ["serde"] } urlencoding = "2.1.3" uuid.workspace = true From 2dbdc6ea99b3e3128924f3687dcdabef4a7fbfbb Mon Sep 17 00:00:00 2001 From: Sebastian Thiel Date: Fri, 29 Mar 2024 19:23:20 +0100 Subject: [PATCH 3/5] integrate library into `app` Note that small `commands` modules are now inlined for ease of use. --- Cargo.lock | 1 + gitbutler-app/Cargo.toml | 1 + gitbutler-app/src/analytics.rs | 2 +- gitbutler-app/src/app.rs | 4 +- gitbutler-app/src/askpass.rs | 72 +- gitbutler-app/src/assets.rs | 204 - gitbutler-app/src/commands.rs | 14 +- gitbutler-app/src/database.rs | 48 - .../src/database/migrations/V0__deltas.sql | 12 - .../src/database/migrations/V1__sessions.sql | 11 - .../src/database/migrations/V2__files.sql | 14 - .../src/database/migrations/V3__bookmarks.sql | 8 - .../migrations/V4__bookmarks_update.sql | 16 - .../migrations/V5__bookmarks_update.sql | 28 - .../V6__sessions_project_id_id_idx.sql | 1 - .../database/migrations/V7__drop_files.sql | 2 - .../migrations/V8__drop_bookmarks.sql | 1 - gitbutler-app/src/dedup.rs | 45 - gitbutler-app/src/deltas.rs | 55 +- gitbutler-app/src/deltas/commands.rs | 41 - gitbutler-app/src/deltas/controller.rs | 33 - gitbutler-app/src/deltas/database.rs | 122 - gitbutler-app/src/deltas/delta.rs | 9 - gitbutler-app/src/deltas/document.rs | 85 - gitbutler-app/src/deltas/operations.rs | 116 - gitbutler-app/src/deltas/reader.rs | 89 - gitbutler-app/src/deltas/writer.rs | 73 - gitbutler-app/src/error.rs | 20 +- gitbutler-app/src/events.rs | 2 +- gitbutler-app/src/fs.rs | 30 - gitbutler-app/src/gb_repository.rs | 3 - gitbutler-app/src/gb_repository/repository.rs | 967 ---- gitbutler-app/src/git.rs | 42 - gitbutler-app/src/git/blob.rs | 17 - gitbutler-app/src/git/branch.rs | 53 - gitbutler-app/src/git/commit.rs | 75 - gitbutler-app/src/git/config.rs | 68 - gitbutler-app/src/git/credentials.rs | 392 -- gitbutler-app/src/git/diff.rs | 421 -- gitbutler-app/src/git/error.rs | 62 - gitbutler-app/src/git/index.rs | 164 - gitbutler-app/src/git/oid.rs | 61 - gitbutler-app/src/git/reference.rs | 64 - gitbutler-app/src/git/reference/refname.rs | 137 - .../src/git/reference/refname/error.rs | 17 - .../src/git/reference/refname/local.rs | 94 - .../src/git/reference/refname/remote.rs | 93 - .../src/git/reference/refname/virtual.rs | 65 - gitbutler-app/src/git/remote.rs | 43 - gitbutler-app/src/git/repository.rs | 535 --- gitbutler-app/src/git/show.rs | 22 - gitbutler-app/src/git/signature.rs | 67 - gitbutler-app/src/git/tree.rs | 147 - gitbutler-app/src/git/url.rs | 91 - gitbutler-app/src/git/url/convert.rs | 128 - gitbutler-app/src/git/url/parse.rs | 147 - gitbutler-app/src/git/url/scheme.rs | 54 - gitbutler-app/src/github.rs | 83 +- gitbutler-app/src/github/commands.rs | 80 - gitbutler-app/src/id.rs | 118 - gitbutler-app/src/keys.rs | 34 +- gitbutler-app/src/keys/commands.rs | 27 - gitbutler-app/src/keys/controller.rs | 34 - gitbutler-app/src/keys/key.rs | 127 - gitbutler-app/src/keys/storage.rs | 43 - gitbutler-app/src/lib.rs | 19 +- gitbutler-app/src/lock.rs | 51 - gitbutler-app/src/main.rs | 45 +- gitbutler-app/src/path.rs | 48 - gitbutler-app/src/project_repository.rs | 8 - .../src/project_repository/config.rs | 51 - .../src/project_repository/conflicts.rs | 144 - .../src/project_repository/repository.rs | 697 --- .../src/project_repository/signatures.rs | 22 - gitbutler-app/src/projects.rs | 212 +- gitbutler-app/src/projects/commands.rs | 201 - gitbutler-app/src/projects/controller.rs | 340 -- gitbutler-app/src/projects/project.rs | 112 - gitbutler-app/src/projects/storage.rs | 162 - gitbutler-app/src/reader.rs | 443 -- gitbutler-app/src/sentry.rs | 2 +- gitbutler-app/src/sessions.rs | 53 +- gitbutler-app/src/sessions/commands.rs | 39 - gitbutler-app/src/sessions/controller.rs | 91 - gitbutler-app/src/sessions/database.rs | 182 - gitbutler-app/src/sessions/iterator.rs | 68 - gitbutler-app/src/sessions/reader.rs | 105 - gitbutler-app/src/sessions/session.rs | 126 - gitbutler-app/src/sessions/writer.rs | 108 - gitbutler-app/src/ssh.rs | 67 - gitbutler-app/src/storage.rs | 73 - gitbutler-app/src/types.rs | 1 - gitbutler-app/src/types/default_true.rs | 90 - gitbutler-app/src/users.rs | 87 +- gitbutler-app/src/users/commands.rs | 79 - gitbutler-app/src/users/controller.rs | 57 - gitbutler-app/src/users/storage.rs | 46 - gitbutler-app/src/users/user.rs | 35 - gitbutler-app/src/virtual_branches.rs | 551 ++- gitbutler-app/src/virtual_branches/base.rs | 657 --- gitbutler-app/src/virtual_branches/branch.rs | 237 - .../virtual_branches/branch/file_ownership.rs | 178 - .../src/virtual_branches/branch/hunk.rs | 169 - .../src/virtual_branches/branch/ownership.rs | 183 - .../src/virtual_branches/branch/reader.rs | 19 - .../src/virtual_branches/branch/writer.rs | 160 - .../src/virtual_branches/commands.rs | 518 --- gitbutler-app/src/virtual_branches/context.rs | 124 - .../src/virtual_branches/controller.rs | 1112 ----- gitbutler-app/src/virtual_branches/errors.rs | 837 ---- gitbutler-app/src/virtual_branches/files.rs | 96 - .../src/virtual_branches/integration.rs | 351 -- .../src/virtual_branches/iterator.rs | 56 - gitbutler-app/src/virtual_branches/remote.rs | 185 - gitbutler-app/src/virtual_branches/state.rs | 136 - gitbutler-app/src/virtual_branches/target.rs | 105 - .../src/virtual_branches/target/reader.rs | 31 - .../src/virtual_branches/target/writer.rs | 109 - gitbutler-app/src/virtual_branches/virtual.rs | 4058 ----------------- gitbutler-app/src/watcher.rs | 21 +- gitbutler-app/src/watcher/dispatchers.rs | 2 +- .../src/watcher/dispatchers/file_change.rs | 3 +- gitbutler-app/src/watcher/events.rs | 6 +- .../src/watcher/handlers/analytics_handler.rs | 3 +- .../handlers/calculate_deltas_handler.rs | 2 +- .../caltulate_virtual_branches_handler.rs | 5 +- .../watcher/handlers/fetch_gitbutler_data.rs | 2 +- .../watcher/handlers/filter_ignored_files.rs | 2 +- .../src/watcher/handlers/flush_session.rs | 4 +- .../src/watcher/handlers/git_file_change.rs | 5 +- .../src/watcher/handlers/index_handler.rs | 5 +- .../watcher/handlers/push_gitbutler_data.rs | 6 +- .../handlers/push_project_to_gitbutler.rs | 10 +- gitbutler-app/src/windows.rs | 24 - gitbutler-app/src/writer.rs | 114 - gitbutler-app/src/zip.rs | 228 +- gitbutler-app/src/zip/commands.rs | 85 - gitbutler-app/src/zip/controller.rs | 72 - gitbutler-app/tests/app.rs | 263 +- gitbutler-app/tests/common/mod.rs | 355 -- gitbutler-app/tests/database/mod.rs | 20 - gitbutler-app/tests/deltas/document.rs | 263 -- gitbutler-app/tests/deltas/mod.rs | 146 - gitbutler-app/tests/deltas/operations.rs | 55 - gitbutler-app/tests/gb_repository/mod.rs | 490 -- gitbutler-app/tests/git/config.rs | 34 - gitbutler-app/tests/git/credentials.rs | 312 -- gitbutler-app/tests/git/diff.rs | 457 -- gitbutler-app/tests/git/mod.rs | 3 - gitbutler-app/tests/keys/mod.rs | 65 - gitbutler-app/tests/lock/mod.rs | 91 - gitbutler-app/tests/reader/mod.rs | 183 - gitbutler-app/tests/sessions/database.rs | 84 - gitbutler-app/tests/sessions/mod.rs | 106 - gitbutler-app/tests/suite/gb_repository.rs | 149 - gitbutler-app/tests/suite/projects.rs | 71 - .../tests/suite/virtual_branches/amend.rs | 352 -- .../virtual_branches/apply_virtual_branch.rs | 278 -- .../suite/virtual_branches/cherry_pick.rs | 382 -- .../suite/virtual_branches/create_commit.rs | 198 - .../create_virtual_branch_from_branch.rs | 382 -- .../virtual_branches/delete_virtual_branch.rs | 78 - .../virtual_branches/fetch_from_target.rs | 46 - .../tests/suite/virtual_branches/init.rs | 213 - .../tests/suite/virtual_branches/mod.rs | 176 - .../move_commit_to_vbranch.rs | 324 -- .../suite/virtual_branches/references.rs | 366 -- .../virtual_branches/reset_virtual_branch.rs | 267 -- .../virtual_branches/selected_for_changes.rs | 375 -- .../suite/virtual_branches/set_base_branch.rs | 235 - .../tests/suite/virtual_branches/squash.rs | 356 -- .../tests/suite/virtual_branches/unapply.rs | 177 - .../virtual_branches/unapply_ownership.rs | 61 - .../virtual_branches/update_base_branch.rs | 1929 -------- .../virtual_branches/update_commit_message.rs | 364 -- .../tests/suite/virtual_branches/upstream.rs | 149 - gitbutler-app/tests/types/mod.rs | 19 - .../tests/virtual_branches/branch/context.rs | 522 --- .../virtual_branches/branch/file_ownership.rs | 151 - .../tests/virtual_branches/branch/hunk.rs | 89 - .../tests/virtual_branches/branch/mod.rs | 8 - .../virtual_branches/branch/ownership.rs | 284 -- .../tests/virtual_branches/branch/reader.rs | 98 - .../tests/virtual_branches/branch/writer.rs | 220 - .../tests/virtual_branches/iterator.rs | 117 - gitbutler-app/tests/virtual_branches/mod.rs | 2549 ----------- .../tests/virtual_branches/target/mod.rs | 2 - .../tests/virtual_branches/target/reader.rs | 150 - .../tests/virtual_branches/target/writer.rs | 212 - .../handler/calculate_delta_handler.rs | 10 +- .../watcher/handler/fetch_gitbutler_data.rs | 4 +- .../tests/watcher/handler/git_file_change.rs | 4 +- gitbutler-app/tests/watcher/handler/mod.rs | 2 +- .../handler/push_project_to_gitbutler.rs | 8 +- gitbutler-app/tests/zip/mod.rs | 47 - src/deltas.rs | 2 +- src/keys.rs | 2 +- src/sessions.rs | 2 +- src/zip.rs | 2 +- tests/app.rs | 242 +- tests/database/mod.rs | 2 +- tests/deltas/mod.rs | 4 +- tests/gb_repository/mod.rs | 6 +- tests/git/config.rs | 2 +- tests/git/credentials.rs | 2 +- tests/git/diff.rs | 4 +- tests/keys/mod.rs | 2 +- tests/lock/mod.rs | 2 +- tests/reader/mod.rs | 2 +- tests/sessions/database.rs | 2 +- tests/sessions/mod.rs | 2 +- tests/shared/mod.rs | 61 + tests/shared/suite.rs | 227 + .../{common/mod.rs => shared/test_project.rs} | 13 +- tests/suite/gb_repository.rs | 2 +- tests/suite/projects.rs | 6 +- tests/suite/virtual_branches/mod.rs | 4 +- tests/virtual_branches/branch/reader.rs | 2 +- tests/virtual_branches/branch/writer.rs | 2 +- tests/virtual_branches/iterator.rs | 2 +- tests/virtual_branches/mod.rs | 36 +- tests/virtual_branches/target/reader.rs | 2 +- tests/virtual_branches/target/writer.rs | 2 +- 223 files changed, 1538 insertions(+), 33521 deletions(-) delete mode 100644 gitbutler-app/src/assets.rs delete mode 100644 gitbutler-app/src/database.rs delete mode 100644 gitbutler-app/src/database/migrations/V0__deltas.sql delete mode 100644 gitbutler-app/src/database/migrations/V1__sessions.sql delete mode 100644 gitbutler-app/src/database/migrations/V2__files.sql delete mode 100644 gitbutler-app/src/database/migrations/V3__bookmarks.sql delete mode 100644 gitbutler-app/src/database/migrations/V4__bookmarks_update.sql delete mode 100644 gitbutler-app/src/database/migrations/V5__bookmarks_update.sql delete mode 100644 gitbutler-app/src/database/migrations/V6__sessions_project_id_id_idx.sql delete mode 100644 gitbutler-app/src/database/migrations/V7__drop_files.sql delete mode 100644 gitbutler-app/src/database/migrations/V8__drop_bookmarks.sql delete mode 100644 gitbutler-app/src/dedup.rs delete mode 100644 gitbutler-app/src/deltas/commands.rs delete mode 100644 gitbutler-app/src/deltas/controller.rs delete mode 100644 gitbutler-app/src/deltas/database.rs delete mode 100644 gitbutler-app/src/deltas/delta.rs delete mode 100644 gitbutler-app/src/deltas/document.rs delete mode 100644 gitbutler-app/src/deltas/operations.rs delete mode 100644 gitbutler-app/src/deltas/reader.rs delete mode 100644 gitbutler-app/src/deltas/writer.rs delete mode 100644 gitbutler-app/src/fs.rs delete mode 100644 gitbutler-app/src/gb_repository.rs delete mode 100644 gitbutler-app/src/gb_repository/repository.rs delete mode 100644 gitbutler-app/src/git.rs delete mode 100644 gitbutler-app/src/git/blob.rs delete mode 100644 gitbutler-app/src/git/branch.rs delete mode 100644 gitbutler-app/src/git/commit.rs delete mode 100644 gitbutler-app/src/git/config.rs delete mode 100644 gitbutler-app/src/git/credentials.rs delete mode 100644 gitbutler-app/src/git/diff.rs delete mode 100644 gitbutler-app/src/git/error.rs delete mode 100644 gitbutler-app/src/git/index.rs delete mode 100644 gitbutler-app/src/git/oid.rs delete mode 100644 gitbutler-app/src/git/reference.rs delete mode 100644 gitbutler-app/src/git/reference/refname.rs delete mode 100644 gitbutler-app/src/git/reference/refname/error.rs delete mode 100644 gitbutler-app/src/git/reference/refname/local.rs delete mode 100644 gitbutler-app/src/git/reference/refname/remote.rs delete mode 100644 gitbutler-app/src/git/reference/refname/virtual.rs delete mode 100644 gitbutler-app/src/git/remote.rs delete mode 100644 gitbutler-app/src/git/repository.rs delete mode 100644 gitbutler-app/src/git/show.rs delete mode 100644 gitbutler-app/src/git/signature.rs delete mode 100644 gitbutler-app/src/git/tree.rs delete mode 100644 gitbutler-app/src/git/url.rs delete mode 100644 gitbutler-app/src/git/url/convert.rs delete mode 100644 gitbutler-app/src/git/url/parse.rs delete mode 100644 gitbutler-app/src/git/url/scheme.rs delete mode 100644 gitbutler-app/src/github/commands.rs delete mode 100644 gitbutler-app/src/id.rs delete mode 100644 gitbutler-app/src/keys/commands.rs delete mode 100644 gitbutler-app/src/keys/controller.rs delete mode 100644 gitbutler-app/src/keys/key.rs delete mode 100644 gitbutler-app/src/keys/storage.rs delete mode 100644 gitbutler-app/src/lock.rs delete mode 100644 gitbutler-app/src/path.rs delete mode 100644 gitbutler-app/src/project_repository.rs delete mode 100644 gitbutler-app/src/project_repository/config.rs delete mode 100644 gitbutler-app/src/project_repository/conflicts.rs delete mode 100644 gitbutler-app/src/project_repository/repository.rs delete mode 100644 gitbutler-app/src/project_repository/signatures.rs delete mode 100644 gitbutler-app/src/projects/commands.rs delete mode 100644 gitbutler-app/src/projects/controller.rs delete mode 100644 gitbutler-app/src/projects/project.rs delete mode 100644 gitbutler-app/src/projects/storage.rs delete mode 100644 gitbutler-app/src/reader.rs delete mode 100644 gitbutler-app/src/sessions/controller.rs delete mode 100644 gitbutler-app/src/sessions/database.rs delete mode 100644 gitbutler-app/src/sessions/iterator.rs delete mode 100644 gitbutler-app/src/sessions/reader.rs delete mode 100644 gitbutler-app/src/sessions/session.rs delete mode 100644 gitbutler-app/src/sessions/writer.rs delete mode 100644 gitbutler-app/src/ssh.rs delete mode 100644 gitbutler-app/src/storage.rs delete mode 100644 gitbutler-app/src/types.rs delete mode 100644 gitbutler-app/src/types/default_true.rs delete mode 100644 gitbutler-app/src/users/commands.rs delete mode 100644 gitbutler-app/src/users/controller.rs delete mode 100644 gitbutler-app/src/users/storage.rs delete mode 100644 gitbutler-app/src/users/user.rs delete mode 100644 gitbutler-app/src/virtual_branches/base.rs delete mode 100644 gitbutler-app/src/virtual_branches/branch.rs delete mode 100644 gitbutler-app/src/virtual_branches/branch/file_ownership.rs delete mode 100644 gitbutler-app/src/virtual_branches/branch/hunk.rs delete mode 100644 gitbutler-app/src/virtual_branches/branch/ownership.rs delete mode 100644 gitbutler-app/src/virtual_branches/branch/reader.rs delete mode 100644 gitbutler-app/src/virtual_branches/branch/writer.rs delete mode 100644 gitbutler-app/src/virtual_branches/commands.rs delete mode 100644 gitbutler-app/src/virtual_branches/context.rs delete mode 100644 gitbutler-app/src/virtual_branches/controller.rs delete mode 100644 gitbutler-app/src/virtual_branches/errors.rs delete mode 100644 gitbutler-app/src/virtual_branches/files.rs delete mode 100644 gitbutler-app/src/virtual_branches/integration.rs delete mode 100644 gitbutler-app/src/virtual_branches/iterator.rs delete mode 100644 gitbutler-app/src/virtual_branches/remote.rs delete mode 100644 gitbutler-app/src/virtual_branches/state.rs delete mode 100644 gitbutler-app/src/virtual_branches/target.rs delete mode 100644 gitbutler-app/src/virtual_branches/target/reader.rs delete mode 100644 gitbutler-app/src/virtual_branches/target/writer.rs delete mode 100644 gitbutler-app/src/virtual_branches/virtual.rs delete mode 100644 gitbutler-app/src/windows.rs delete mode 100644 gitbutler-app/src/writer.rs delete mode 100644 gitbutler-app/src/zip/commands.rs delete mode 100644 gitbutler-app/src/zip/controller.rs delete mode 100644 gitbutler-app/tests/common/mod.rs delete mode 100644 gitbutler-app/tests/database/mod.rs delete mode 100644 gitbutler-app/tests/deltas/document.rs delete mode 100644 gitbutler-app/tests/deltas/mod.rs delete mode 100644 gitbutler-app/tests/deltas/operations.rs delete mode 100644 gitbutler-app/tests/gb_repository/mod.rs delete mode 100644 gitbutler-app/tests/git/config.rs delete mode 100644 gitbutler-app/tests/git/credentials.rs delete mode 100644 gitbutler-app/tests/git/diff.rs delete mode 100644 gitbutler-app/tests/git/mod.rs delete mode 100644 gitbutler-app/tests/keys/mod.rs delete mode 100644 gitbutler-app/tests/lock/mod.rs delete mode 100644 gitbutler-app/tests/reader/mod.rs delete mode 100644 gitbutler-app/tests/sessions/database.rs delete mode 100644 gitbutler-app/tests/sessions/mod.rs delete mode 100644 gitbutler-app/tests/suite/gb_repository.rs delete mode 100644 gitbutler-app/tests/suite/projects.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/amend.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/apply_virtual_branch.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/cherry_pick.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/create_commit.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/create_virtual_branch_from_branch.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/delete_virtual_branch.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/fetch_from_target.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/init.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/mod.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/move_commit_to_vbranch.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/references.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/reset_virtual_branch.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/selected_for_changes.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/set_base_branch.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/squash.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/unapply.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/unapply_ownership.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/update_base_branch.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/update_commit_message.rs delete mode 100644 gitbutler-app/tests/suite/virtual_branches/upstream.rs delete mode 100644 gitbutler-app/tests/types/mod.rs delete mode 100644 gitbutler-app/tests/virtual_branches/branch/context.rs delete mode 100644 gitbutler-app/tests/virtual_branches/branch/file_ownership.rs delete mode 100644 gitbutler-app/tests/virtual_branches/branch/hunk.rs delete mode 100644 gitbutler-app/tests/virtual_branches/branch/mod.rs delete mode 100644 gitbutler-app/tests/virtual_branches/branch/ownership.rs delete mode 100644 gitbutler-app/tests/virtual_branches/branch/reader.rs delete mode 100644 gitbutler-app/tests/virtual_branches/branch/writer.rs delete mode 100644 gitbutler-app/tests/virtual_branches/iterator.rs delete mode 100644 gitbutler-app/tests/virtual_branches/mod.rs delete mode 100644 gitbutler-app/tests/virtual_branches/target/mod.rs delete mode 100644 gitbutler-app/tests/virtual_branches/target/reader.rs delete mode 100644 gitbutler-app/tests/virtual_branches/target/writer.rs delete mode 100644 gitbutler-app/tests/zip/mod.rs create mode 100644 tests/shared/mod.rs create mode 100644 tests/shared/suite.rs rename tests/{common/mod.rs => shared/test_project.rs} (98%) diff --git a/Cargo.lock b/Cargo.lock index e9e895945..e8ddbaed3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1973,6 +1973,7 @@ dependencies = [ "futures", "git2", "git2-hooks", + "gitbutler", "gitbutler-git", "governor", "itertools 0.12.1", diff --git a/gitbutler-app/Cargo.toml b/gitbutler-app/Cargo.toml index 0269f44e7..81f9248a2 100644 --- a/gitbutler-app/Cargo.toml +++ b/gitbutler-app/Cargo.toml @@ -84,6 +84,7 @@ walkdir = "2.5.0" zip = "0.6.5" tempfile = "3.10" gitbutler-git = { path = "../gitbutler-git" } +gitbutler = { path = "../" } [lints.clippy] all = "deny" diff --git a/gitbutler-app/src/analytics.rs b/gitbutler-app/src/analytics.rs index 4d515280b..8a248e997 100644 --- a/gitbutler-app/src/analytics.rs +++ b/gitbutler-app/src/analytics.rs @@ -2,7 +2,7 @@ use std::{fmt, str, sync::Arc}; use tauri::AppHandle; -use crate::{projects::ProjectId, users::User}; +use gitbutler::{projects::ProjectId, users::User}; mod posthog; diff --git a/gitbutler-app/src/app.rs b/gitbutler-app/src/app.rs index 40a67896a..cc18c97cb 100644 --- a/gitbutler-app/src/app.rs +++ b/gitbutler-app/src/app.rs @@ -2,7 +2,8 @@ use std::{collections::HashMap, path}; use anyhow::{Context, Result}; -use crate::{ +use crate::watcher; +use gitbutler::{ askpass::AskpassBroker, gb_repository, git, project_repository::{self, conflicts}, @@ -11,7 +12,6 @@ use crate::{ sessions::{self, SessionId}, users, virtual_branches::BranchId, - watcher, }; #[derive(Clone)] diff --git a/gitbutler-app/src/askpass.rs b/gitbutler-app/src/askpass.rs index 4efcd5ad8..5a54913fa 100644 --- a/gitbutler-app/src/askpass.rs +++ b/gitbutler-app/src/askpass.rs @@ -1,70 +1,10 @@ -use std::{collections::HashMap, sync::Arc}; - -use serde::Serialize; -use tauri::{AppHandle, Manager}; -use tokio::sync::{oneshot, Mutex}; - -use crate::id::Id; - -pub struct AskpassRequest { - sender: oneshot::Sender>, -} - -#[derive(Clone)] -pub struct AskpassBroker { - pending_requests: Arc, AskpassRequest>>>, - handle: AppHandle, -} - -#[derive(Debug, Clone, serde::Serialize)] -struct PromptEvent { - id: Id, - prompt: String, - context: C, -} - -impl AskpassBroker { - pub fn init(handle: AppHandle) -> Self { - Self { - pending_requests: Arc::new(Mutex::new(HashMap::new())), - handle, - } - } - - pub async fn submit_prompt( - &self, - prompt: String, - context: C, - ) -> Option { - let (sender, receiver) = oneshot::channel(); - let id = Id::generate(); - let request = AskpassRequest { sender }; - self.pending_requests.lock().await.insert(id, request); - self.handle - .emit_all( - "git_prompt", - PromptEvent { - id, - prompt, - context, - }, - ) - .expect("failed to emit askpass event"); - receiver.await.unwrap() - } - - pub async fn handle_response(&self, id: Id, response: Option) { - let mut pending_requests = self.pending_requests.lock().await; - if let Some(request) = pending_requests.remove(&id) { - let _ = request.sender.send(response); - } else { - log::warn!("received response for unknown askpass request: {}", id); - } - } -} - pub mod commands { - use super::{AppHandle, AskpassBroker, AskpassRequest, Id, Manager}; + use gitbutler::{ + askpass::{AskpassBroker, AskpassRequest}, + id::Id, + }; + use tauri::{AppHandle, Manager}; + #[tauri::command(async)] #[tracing::instrument(skip(handle, response))] pub async fn submit_prompt_response( diff --git a/gitbutler-app/src/assets.rs b/gitbutler-app/src/assets.rs deleted file mode 100644 index 510d330e8..000000000 --- a/gitbutler-app/src/assets.rs +++ /dev/null @@ -1,204 +0,0 @@ -use std::{collections::HashMap, path, sync}; - -use anyhow::Result; -use futures::future::join_all; -use tokio::sync::Semaphore; -use url::Url; - -use crate::{ - users, - virtual_branches::{ - Author, BaseBranch, RemoteBranchData, RemoteCommit, VirtualBranch, VirtualBranchCommit, - }, -}; - -#[derive(Clone)] -pub struct Proxy { - cache_dir: path::PathBuf, - - semaphores: sync::Arc>>, -} - -impl Proxy { - pub fn new(cache_dir: path::PathBuf) -> Self { - Proxy { - cache_dir, - semaphores: sync::Arc::new(tokio::sync::Mutex::new(HashMap::new())), - } - } - - pub async fn proxy_user(&self, user: users::User) -> users::User { - match Url::parse(&user.picture) { - Ok(picture) => users::User { - picture: self.proxy(&picture).await.map_or_else( - |error| { - tracing::error!(?error, "failed to proxy user picture"); - user.picture.clone() - }, - |url| url.to_string(), - ), - ..user - }, - Err(_) => user, - } - } - - async fn proxy_virtual_branch_commit( - &self, - commit: VirtualBranchCommit, - ) -> VirtualBranchCommit { - VirtualBranchCommit { - author: self.proxy_author(commit.author).await, - ..commit - } - } - - pub async fn proxy_virtual_branch(&self, branch: VirtualBranch) -> VirtualBranch { - VirtualBranch { - commits: join_all( - branch - .commits - .iter() - .map(|commit| self.proxy_virtual_branch_commit(commit.clone())) - .collect::>(), - ) - .await, - ..branch - } - } - - pub async fn proxy_virtual_branches(&self, branches: Vec) -> Vec { - join_all( - branches - .into_iter() - .map(|branch| self.proxy_virtual_branch(branch)) - .collect::>(), - ) - .await - } - - pub async fn proxy_remote_branch_data(&self, branch: RemoteBranchData) -> RemoteBranchData { - RemoteBranchData { - commits: join_all( - branch - .commits - .into_iter() - .map(|commit| self.proxy_remote_commit(commit)) - .collect::>(), - ) - .await, - ..branch - } - } - - async fn proxy_author(&self, author: Author) -> Author { - Author { - gravatar_url: self - .proxy(&author.gravatar_url) - .await - .unwrap_or_else(|error| { - tracing::error!(gravatar_url = %author.gravatar_url, ?error, "failed to proxy gravatar url"); - author.gravatar_url - }), - ..author - } - } - - async fn proxy_remote_commit(&self, commit: RemoteCommit) -> RemoteCommit { - RemoteCommit { - author: self.proxy_author(commit.author).await, - ..commit - } - } - - pub async fn proxy_base_branch(&self, base_branch: BaseBranch) -> BaseBranch { - BaseBranch { - recent_commits: join_all( - base_branch - .clone() - .recent_commits - .into_iter() - .map(|commit| self.proxy_remote_commit(commit)) - .collect::>(), - ) - .await, - upstream_commits: join_all( - base_branch - .clone() - .upstream_commits - .into_iter() - .map(|commit| self.proxy_remote_commit(commit)) - .collect::>(), - ) - .await, - ..base_branch.clone() - } - } - - // takes a url of a remote assets, downloads it into cache and returns a url that points to the cached file - pub async fn proxy(&self, src: &Url) -> Result { - #[cfg(unix)] - if src.scheme() == "asset" { - return Ok(src.clone()); - } - - if src.scheme() == "https" && src.host_str() == Some("asset.localhost") { - return Ok(src.clone()); - } - - let hash = md5::compute(src.to_string()); - let path = path::Path::new(src.path()); - let ext = path - .extension() - .map_or("jpg", |ext| ext.to_str().unwrap_or("jpg")); - let save_to = self.cache_dir.join(format!("{:X}.{}", hash, ext)); - - if save_to.exists() { - return Ok(build_asset_url(&save_to.display().to_string())); - } - - // only one download per url at a time - let mut semaphores = self.semaphores.lock().await; - let r = semaphores - .entry(src.clone()) - .or_insert_with(|| Semaphore::new(1)); - let _permit = r.acquire().await?; - - if save_to.exists() { - // check again, maybe url was downloaded - return Ok(build_asset_url(&save_to.display().to_string())); - } - - tracing::debug!(url = %src, "downloading image"); - - let resp = reqwest::get(src.clone()).await?; - if !resp.status().is_success() { - tracing::error!(url = %src, status = %resp.status(), "failed to download image"); - return Err(anyhow::anyhow!( - "Failed to download image {}: {}", - src, - resp.status() - )); - } - - let bytes = resp.bytes().await?; - std::fs::create_dir_all(&self.cache_dir)?; - std::fs::write(&save_to, bytes)?; - - Ok(build_asset_url(&save_to.display().to_string())) - } -} - -#[cfg(unix)] -fn build_asset_url(path: &str) -> Url { - Url::parse(&format!("asset://localhost/{}", urlencoding::encode(path))).unwrap() -} - -#[cfg(windows)] -fn build_asset_url(path: &str) -> Url { - Url::parse(&format!( - "https://asset.localhost/{}", - urlencoding::encode(path) - )) - .unwrap() -} diff --git a/gitbutler-app/src/commands.rs b/gitbutler-app/src/commands.rs index 11aa9777f..a7124e111 100644 --- a/gitbutler-app/src/commands.rs +++ b/gitbutler-app/src/commands.rs @@ -4,12 +4,12 @@ use anyhow::Context; use tauri::Manager; use tracing::instrument; -use crate::{ - app, +use crate::{app, watcher}; +use gitbutler::{ error::{Code, Error}, gb_repository, git, project_repository, projects, reader, sessions::SessionId, - users, watcher, + users, }; impl From for Error { @@ -71,13 +71,13 @@ pub async fn git_test_push( branch_name: &str, ) -> Result<(), Error> { let app = handle.state::(); - let helper = handle.state::(); + let helper = handle.state::(); let project_id = project_id.parse().map_err(|_| Error::UserError { code: Code::Validation, message: "Malformed project id".to_string(), })?; let askpass_broker = handle - .state::() + .state::() .inner() .clone(); app.git_test_push( @@ -102,13 +102,13 @@ pub async fn git_test_fetch( action: Option, ) -> Result<(), Error> { let app = handle.state::(); - let helper = handle.state::(); + let helper = handle.state::(); let project_id = project_id.parse().map_err(|_| Error::UserError { code: Code::Validation, message: "Malformed project id".to_string(), })?; let askpass_broker = handle - .state::() + .state::() .inner() .clone(); app.git_test_fetch( diff --git a/gitbutler-app/src/database.rs b/gitbutler-app/src/database.rs deleted file mode 100644 index 467b56c84..000000000 --- a/gitbutler-app/src/database.rs +++ /dev/null @@ -1,48 +0,0 @@ -use std::{path, sync::Arc}; - -use anyhow::{Context, Result}; - -use r2d2::Pool; -use r2d2_sqlite::SqliteConnectionManager; -use refinery::config::Config; -use rusqlite::Transaction; - -mod embedded { - use refinery::embed_migrations; - embed_migrations!("src/database/migrations"); -} - -#[derive(Clone)] -pub struct Database { - pool: Arc>, -} - -impl Database { - pub fn open_in_directory>(path: P) -> Result { - let path = path.as_ref().to_path_buf().join("database.sqlite3"); - let manager = SqliteConnectionManager::file(&path); - let pool = r2d2::Pool::new(manager)?; - let mut cfg = Config::new(refinery::config::ConfigDbType::Sqlite) - .set_db_path(path.as_path().to_str().unwrap()); - embedded::migrations::runner() - .run(&mut cfg) - .map(|report| { - report - .applied_migrations() - .iter() - .for_each(|migration| tracing::info!(%migration, "migration applied")); - }) - .context("Failed to run migrations")?; - Ok(Self { - pool: Arc::new(pool), - }) - } - - pub fn transaction(&self, f: impl FnOnce(&Transaction) -> Result) -> Result { - let mut conn = self.pool.get()?; - let tx = conn.transaction().context("Failed to start transaction")?; - let result = f(&tx)?; - tx.commit().context("Failed to commit transaction")?; - Ok(result) - } -} diff --git a/gitbutler-app/src/database/migrations/V0__deltas.sql b/gitbutler-app/src/database/migrations/V0__deltas.sql deleted file mode 100644 index 395aabe67..000000000 --- a/gitbutler-app/src/database/migrations/V0__deltas.sql +++ /dev/null @@ -1,12 +0,0 @@ -CREATE TABLE `deltas` ( - `session_id` text NOT NULL, - `project_id` text NOT NULL, - `timestamp_ms` text NOT NULL, - `operations` blob NOT NULL, - `file_path` text NOT NULL, - PRIMARY KEY (`project_id`, `session_id`, `timestamp_ms`, `file_path`) -); - -CREATE INDEX `deltas_project_id_session_id_index` ON `deltas` (`project_id`, `session_id`); - -CREATE INDEX `deltas_project_id_session_id_file_path_index` ON `deltas` (`project_id`, `session_id`, `file_path`); diff --git a/gitbutler-app/src/database/migrations/V1__sessions.sql b/gitbutler-app/src/database/migrations/V1__sessions.sql deleted file mode 100644 index fe92e1cb3..000000000 --- a/gitbutler-app/src/database/migrations/V1__sessions.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE `sessions` ( - `id` text NOT NULL PRIMARY KEY, - `project_id` text NOT NULL, - `hash` text, - `branch` text, - `commit` text, - `start_timestamp_ms` text NOT NULL, - `last_timestamp_ms` text NOT NULL -); - -CREATE INDEX `sessions_project_id_index` ON `sessions` (`project_id`); diff --git a/gitbutler-app/src/database/migrations/V2__files.sql b/gitbutler-app/src/database/migrations/V2__files.sql deleted file mode 100644 index 2bd47f5fd..000000000 --- a/gitbutler-app/src/database/migrations/V2__files.sql +++ /dev/null @@ -1,14 +0,0 @@ -CREATE TABLE `files` ( - `project_id` text NOT NULL, - `session_id` text NOT NULL, - `file_path` text NOT NULL, - `sha1` blob NOT NULL, - PRIMARY KEY (`project_id`, `session_id`, `file_path`) -); - -CREATE INDEX `files_project_id_session_id_index` ON `files` (`project_id`, `session_id`); - -CREATE TABLE `contents` ( - `sha1` blob NOT NULL PRIMARY KEY, - `content` blob NOT NULL -); diff --git a/gitbutler-app/src/database/migrations/V3__bookmarks.sql b/gitbutler-app/src/database/migrations/V3__bookmarks.sql deleted file mode 100644 index e29e93202..000000000 --- a/gitbutler-app/src/database/migrations/V3__bookmarks.sql +++ /dev/null @@ -1,8 +0,0 @@ -CREATE TABLE `bookmarks` ( - `id` text NOT NULL PRIMARY KEY, - `project_id` text NOT NULL, - `timestamp_ms` text NOT NULL, - `note` text NOT NULL -); - -CREATE INDEX bookmarks_project_id_idx ON `bookmarks` (`project_id`); diff --git a/gitbutler-app/src/database/migrations/V4__bookmarks_update.sql b/gitbutler-app/src/database/migrations/V4__bookmarks_update.sql deleted file mode 100644 index e068b765f..000000000 --- a/gitbutler-app/src/database/migrations/V4__bookmarks_update.sql +++ /dev/null @@ -1,16 +0,0 @@ -ALTER TABLE `bookmarks` - ADD `created_timestamp_ms` text NOT NULL DEFAULT 0; - -UPDATE - `bookmarks` -SET - `created_timestamp_ms` = `timestamp_ms`; - -ALTER TABLE `bookmarks` - DROP COLUMN `timestamp_ms`; - -ALTER TABLE `bookmarks` - ADD `updated_timestamp_ms` text; - -ALTER TABLE `bookmarks` - ADD `deleted` boolean NOT NULL DEFAULT FALSE; diff --git a/gitbutler-app/src/database/migrations/V5__bookmarks_update.sql b/gitbutler-app/src/database/migrations/V5__bookmarks_update.sql deleted file mode 100644 index e395a6421..000000000 --- a/gitbutler-app/src/database/migrations/V5__bookmarks_update.sql +++ /dev/null @@ -1,28 +0,0 @@ -ALTER TABLE bookmarks RENAME TO bookmarks_old; - -DROP INDEX `bookmarks_project_id_idx`; - -CREATE TABLE bookmarks ( - `project_id` text NOT NULL, - `timestamp_ms` text NOT NULL, - `note` text NOT NULL, - `deleted` boolean NOT NULL, - `created_timestamp_ms` text NOT NULL, - `updated_timestamp_ms` text NOT NULL, - PRIMARY KEY (`project_id`, `timestamp_ms`) -); - -CREATE INDEX `bookmarks_project_id_idx` ON `bookmarks` (`project_id`); - -INSERT INTO bookmarks (`project_id`, `timestamp_ms`, `note`, `deleted`, `created_timestamp_ms`, `updated_timestamp_ms`) -SELECT - `project_id`, - `created_timestamp_ms`, - `note`, - `deleted`, - `created_timestamp_ms`, - `updated_timestamp_ms` -FROM - bookmarks_old; - -DROP TABLE bookmarks_old; diff --git a/gitbutler-app/src/database/migrations/V6__sessions_project_id_id_idx.sql b/gitbutler-app/src/database/migrations/V6__sessions_project_id_id_idx.sql deleted file mode 100644 index 47c732087..000000000 --- a/gitbutler-app/src/database/migrations/V6__sessions_project_id_id_idx.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE INDEX `sessions_project_id_id_index` ON `sessions` (`project_id`, `id`); diff --git a/gitbutler-app/src/database/migrations/V7__drop_files.sql b/gitbutler-app/src/database/migrations/V7__drop_files.sql deleted file mode 100644 index d5700869a..000000000 --- a/gitbutler-app/src/database/migrations/V7__drop_files.sql +++ /dev/null @@ -1,2 +0,0 @@ -DROP TABLE files; -DROP TABLE contents; diff --git a/gitbutler-app/src/database/migrations/V8__drop_bookmarks.sql b/gitbutler-app/src/database/migrations/V8__drop_bookmarks.sql deleted file mode 100644 index f8b391275..000000000 --- a/gitbutler-app/src/database/migrations/V8__drop_bookmarks.sql +++ /dev/null @@ -1 +0,0 @@ -DROP TABLE bookmarks; diff --git a/gitbutler-app/src/dedup.rs b/gitbutler-app/src/dedup.rs deleted file mode 100644 index 7c59249b7..000000000 --- a/gitbutler-app/src/dedup.rs +++ /dev/null @@ -1,45 +0,0 @@ -pub(crate) fn dedup(existing: &[&str], new: &str) -> String { - dedup_fmt(existing, new, " ") -} - -/// Makes sure that _new_ is not in _existing_ by adding a number to it. -/// the number is increased until the name is unique. -pub(crate) fn dedup_fmt(existing: &[&str], new: &str, separator: &str) -> String { - existing - .iter() - .filter_map(|x| { - x.strip_prefix(new) - .and_then(|x| x.strip_prefix(separator).or(Some(""))) - .and_then(|x| { - if x.is_empty() { - Some(0_i32) - } else { - x.parse::().ok() - } - }) - }) - .max() - .map_or_else( - || new.to_string(), - |x| format!("{new}{separator}{}", x + 1_i32), - ) -} - -#[test] -fn tests() { - for (existing, new, expected) in [ - (vec!["bar", "baz"], "foo", "foo"), - (vec!["foo", "bar", "baz"], "foo", "foo 1"), - (vec!["foo", "foo 2"], "foo", "foo 3"), - (vec!["foo", "foo 1", "foo 2"], "foo", "foo 3"), - (vec!["foo", "foo 1", "foo 2"], "foo 1", "foo 1 1"), - (vec!["foo", "foo 1", "foo 2"], "foo 2", "foo 2 1"), - (vec!["foo", "foo 1", "foo 2"], "foo 3", "foo 3"), - (vec!["foo 2"], "foo", "foo 3"), - (vec!["foo", "foo 1", "foo 2", "foo 4"], "foo", "foo 5"), - (vec!["foo", "foo 0"], "foo", "foo 1"), - (vec!["foo 0"], "foo", "foo 1"), - ] { - assert_eq!(dedup(&existing, new), expected.to_string()); - } -} diff --git a/gitbutler-app/src/deltas.rs b/gitbutler-app/src/deltas.rs index f4359556a..6c5bef0e5 100644 --- a/gitbutler-app/src/deltas.rs +++ b/gitbutler-app/src/deltas.rs @@ -1,16 +1,43 @@ -mod controller; -mod delta; -mod document; -mod reader; -mod writer; +pub mod commands { + use std::collections::HashMap; -pub mod commands; -pub mod database; -pub mod operations; + use tauri::{AppHandle, Manager}; + use tracing::instrument; -pub use controller::Controller; -pub use database::Database; -pub use delta::Delta; -pub use document::Document; -pub use reader::DeltasReader as Reader; -pub use writer::DeltasWriter as Writer; + use crate::error::{Code, Error}; + + use gitbutler::deltas::{controller::ListError, Controller, Delta}; + + impl From for Error { + fn from(value: ListError) -> Self { + match value { + ListError::Other(error) => { + tracing::error!(?error); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn list_deltas( + handle: AppHandle, + project_id: &str, + session_id: &str, + paths: Option>, + ) -> Result>, Error> { + let session_id = session_id.parse().map_err(|_| Error::UserError { + message: "Malformed session id".to_string(), + code: Code::Validation, + })?; + let project_id = project_id.parse().map_err(|_| Error::UserError { + code: Code::Validation, + message: "Malformed project id".to_string(), + })?; + handle + .state::() + .list_by_session_id(&project_id, &session_id, &paths) + .map_err(Into::into) + } +} diff --git a/gitbutler-app/src/deltas/commands.rs b/gitbutler-app/src/deltas/commands.rs deleted file mode 100644 index 479369aae..000000000 --- a/gitbutler-app/src/deltas/commands.rs +++ /dev/null @@ -1,41 +0,0 @@ -use std::collections::HashMap; - -use tauri::{AppHandle, Manager}; -use tracing::instrument; - -use crate::error::{Code, Error}; - -use super::{controller::ListError, Controller, Delta}; - -impl From for Error { - fn from(value: ListError) -> Self { - match value { - ListError::Other(error) => { - tracing::error!(?error); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn list_deltas( - handle: AppHandle, - project_id: &str, - session_id: &str, - paths: Option>, -) -> Result>, Error> { - let session_id = session_id.parse().map_err(|_| Error::UserError { - message: "Malformed session id".to_string(), - code: Code::Validation, - })?; - let project_id = project_id.parse().map_err(|_| Error::UserError { - code: Code::Validation, - message: "Malformed project id".to_string(), - })?; - handle - .state::() - .list_by_session_id(&project_id, &session_id, &paths) - .map_err(Into::into) -} diff --git a/gitbutler-app/src/deltas/controller.rs b/gitbutler-app/src/deltas/controller.rs deleted file mode 100644 index ebb479e77..000000000 --- a/gitbutler-app/src/deltas/controller.rs +++ /dev/null @@ -1,33 +0,0 @@ -use std::collections::HashMap; - -use crate::{projects::ProjectId, sessions::SessionId}; - -use super::{database, Delta}; - -#[derive(Clone)] -pub struct Controller { - database: database::Database, -} - -#[derive(Debug, thiserror::Error)] -pub enum ListError { - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl Controller { - pub fn new(database: database::Database) -> Controller { - Controller { database } - } - - pub fn list_by_session_id( - &self, - project_id: &ProjectId, - session_id: &SessionId, - paths: &Option>, - ) -> Result>, ListError> { - self.database - .list_by_project_id_session_id(project_id, session_id, paths) - .map_err(Into::into) - } -} diff --git a/gitbutler-app/src/deltas/database.rs b/gitbutler-app/src/deltas/database.rs deleted file mode 100644 index 65492e5d0..000000000 --- a/gitbutler-app/src/deltas/database.rs +++ /dev/null @@ -1,122 +0,0 @@ -use std::{collections::HashMap, path}; - -use anyhow::{Context, Result}; - -use crate::{database, projects::ProjectId, sessions::SessionId}; - -use super::{delta, operations}; - -#[derive(Clone)] -pub struct Database { - database: database::Database, -} - -impl Database { - pub fn new(database: database::Database) -> Database { - Database { database } - } - - pub fn insert( - &self, - project_id: &ProjectId, - session_id: &SessionId, - file_path: &path::Path, - deltas: &Vec, - ) -> Result<()> { - self.database.transaction(|tx| -> Result<()> { - let mut stmt = insert_stmt(tx).context("Failed to prepare insert statement")?; - for delta in deltas { - let operations = serde_json::to_vec(&delta.operations) - .context("Failed to serialize operations")?; - let timestamp_ms = delta.timestamp_ms.to_string(); - stmt.execute(rusqlite::named_params! { - ":project_id": project_id, - ":session_id": session_id, - ":file_path": file_path.display().to_string(), - ":timestamp_ms": timestamp_ms, - ":operations": operations, - }) - .context("Failed to execute insert statement")?; - } - Ok(()) - })?; - - Ok(()) - } - - pub fn list_by_project_id_session_id( - &self, - project_id: &ProjectId, - session_id: &SessionId, - file_path_filter: &Option>, - ) -> Result>> { - self.database - .transaction(|tx| -> Result>> { - let mut stmt = list_by_project_id_session_id_stmt(tx) - .context("Failed to prepare query statement")?; - let mut rows = stmt - .query(rusqlite::named_params! { - ":project_id": project_id, - ":session_id": session_id, - }) - .context("Failed to execute query statement")?; - let mut deltas: HashMap> = HashMap::new(); - while let Some(row) = rows - .next() - .context("Failed to iterate over query results")? - { - let file_path: String = row.get(0).context("Failed to get file_path")?; - if let Some(file_path_filter) = &file_path_filter { - if !file_path_filter.contains(&file_path.as_str()) { - continue; - } - } - let timestamp_ms: String = row.get(1).context("Failed to get timestamp_ms")?; - let operations: Vec = row.get(2).context("Failed to get operations")?; - let operations: Vec = - serde_json::from_slice(&operations) - .context("Failed to deserialize operations")?; - let timestamp_ms: u128 = timestamp_ms - .parse() - .context("Failed to parse timestamp_ms as u64")?; - let delta = delta::Delta { - operations, - timestamp_ms, - }; - if let Some(deltas_for_file_path) = deltas.get_mut(&file_path) { - deltas_for_file_path.push(delta); - } else { - deltas.insert(file_path, vec![delta]); - } - } - Ok(deltas) - }) - } -} - -fn list_by_project_id_session_id_stmt<'conn>( - tx: &'conn rusqlite::Transaction, -) -> Result> { - Ok(tx.prepare_cached( - " - SELECT `file_path`, `timestamp_ms`, `operations` - FROM `deltas` - WHERE `session_id` = :session_id AND `project_id` = :project_id - ORDER BY `timestamp_ms` ASC", - )?) -} - -fn insert_stmt<'conn>( - tx: &'conn rusqlite::Transaction, -) -> Result> { - Ok(tx.prepare_cached( - "INSERT INTO `deltas` ( - `project_id`, `session_id`, `timestamp_ms`, `operations`, `file_path` - ) VALUES ( - :project_id, :session_id, :timestamp_ms, :operations, :file_path - ) - ON CONFLICT(`project_id`, `session_id`, `file_path`, `timestamp_ms`) DO UPDATE SET - `operations` = :operations - ", - )?) -} diff --git a/gitbutler-app/src/deltas/delta.rs b/gitbutler-app/src/deltas/delta.rs deleted file mode 100644 index 99879282d..000000000 --- a/gitbutler-app/src/deltas/delta.rs +++ /dev/null @@ -1,9 +0,0 @@ -use super::operations; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct Delta { - pub operations: Vec, - pub timestamp_ms: u128, -} diff --git a/gitbutler-app/src/deltas/document.rs b/gitbutler-app/src/deltas/document.rs deleted file mode 100644 index 5bf09c4d5..000000000 --- a/gitbutler-app/src/deltas/document.rs +++ /dev/null @@ -1,85 +0,0 @@ -use crate::reader; - -use super::{delta, operations}; -use anyhow::Result; -use std::{ - fmt::{Display, Formatter}, - time::SystemTime, -}; - -#[derive(Debug, Clone, Default)] -pub struct Document { - doc: Vec, - deltas: Vec, -} - -fn apply_deltas(doc: &mut Vec, deltas: &Vec) -> Result<()> { - for delta in deltas { - for operation in &delta.operations { - operation.apply(doc)?; - } - } - Ok(()) -} - -impl Document { - pub fn get_deltas(&self) -> Vec { - self.deltas.clone() - } - - // returns a text document where internal state is seeded with value, and deltas are applied. - pub fn new(value: Option<&reader::Content>, deltas: Vec) -> Result { - let mut all_deltas = vec![]; - if let Some(reader::Content::UTF8(value)) = value { - all_deltas.push(delta::Delta { - operations: operations::get_delta_operations("", value), - timestamp_ms: 0, - }); - } - all_deltas.append(&mut deltas.clone()); - let mut doc = vec![]; - apply_deltas(&mut doc, &all_deltas)?; - Ok(Document { doc, deltas }) - } - - pub fn update(&mut self, value: Option<&reader::Content>) -> Result> { - let new_text = match value { - Some(reader::Content::UTF8(value)) => value, - Some(_) | None => "", - }; - - let operations = operations::get_delta_operations(&self.to_string(), new_text); - let delta = if operations.is_empty() { - if let Some(reader::Content::UTF8(value)) = value { - if !value.is_empty() { - return Ok(None); - } - } - - delta::Delta { - operations, - timestamp_ms: SystemTime::now() - .duration_since(SystemTime::UNIX_EPOCH) - .unwrap() - .as_millis(), - } - } else { - delta::Delta { - operations, - timestamp_ms: SystemTime::now() - .duration_since(SystemTime::UNIX_EPOCH) - .unwrap() - .as_millis(), - } - }; - apply_deltas(&mut self.doc, &vec![delta.clone()])?; - self.deltas.push(delta.clone()); - Ok(Some(delta)) - } -} - -impl Display for Document { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.doc.iter().collect::()) - } -} diff --git a/gitbutler-app/src/deltas/operations.rs b/gitbutler-app/src/deltas/operations.rs deleted file mode 100644 index 6374834e0..000000000 --- a/gitbutler-app/src/deltas/operations.rs +++ /dev/null @@ -1,116 +0,0 @@ -use std::cmp::Ordering; - -use anyhow::Result; -use serde::{Deserialize, Serialize}; -use similar::{ChangeTag, TextDiff}; - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub enum Operation { - // corresponds to YText.insert(index, chunk) - Insert((usize, String)), - // corresponds to YText.remove_range(index, len) - Delete((usize, usize)), -} - -impl Operation { - pub fn apply(&self, text: &mut Vec) -> Result<()> { - match self { - Operation::Insert((index, chunk)) => match index.cmp(&text.len()) { - Ordering::Greater => Err(anyhow::anyhow!( - "Index out of bounds, {} > {}", - index, - text.len() - )), - Ordering::Equal => { - text.extend(chunk.chars()); - Ok(()) - } - Ordering::Less => { - text.splice(*index..*index, chunk.chars()); - Ok(()) - } - }, - Operation::Delete((index, len)) => { - if *index > text.len() { - Err(anyhow::anyhow!( - "Index out of bounds, {} > {}", - index, - text.len() - )) - } else if *index + *len > text.len() { - Err(anyhow::anyhow!( - "Index + length out of bounds, {} > {}", - index + len, - text.len() - )) - } else { - text.splice(*index..(*index + *len), "".chars()); - Ok(()) - } - } - } - } -} - -// merges touching operations of the same type in to one operation -// e.g. [Insert((0, "hello")), Insert((5, " world"))] -> [Insert((0, "hello world"))] -// e.g. [Delete((0, 5)), Delete((5, 5))] -> [Delete((0, 10))] -// e.g. [Insert((0, "hello")), Delete((0, 5))] -> [Insert((0, "hello")), Delete((0, 5))] -fn merge_touching(ops: &Vec) -> Vec { - let mut merged = vec![]; - - for op in ops { - match (merged.last_mut(), op) { - (Some(Operation::Insert((index, chunk))), Operation::Insert((index2, chunk2))) => { - if *index + chunk.len() == *index2 { - chunk.push_str(chunk2); - } else { - merged.push(op.clone()); - } - } - (Some(Operation::Delete((index, len))), Operation::Delete((index2, len2))) => { - if *index == *index2 { - *len += len2; - } else { - merged.push(op.clone()); - } - } - _ => merged.push(op.clone()), - } - } - - merged -} - -pub fn get_delta_operations(initial_text: &str, final_text: &str) -> Vec { - if initial_text == final_text { - return vec![]; - } - - let changeset = TextDiff::configure().diff_graphemes(initial_text, final_text); - let mut deltas = vec![]; - - let mut offset = 0; - for change in changeset.iter_all_changes() { - match change.tag() { - ChangeTag::Delete => { - deltas.push(Operation::Delete(( - offset, - change.as_str().unwrap_or("").chars().count(), - ))); - } - ChangeTag::Insert => { - let text = change.as_str().unwrap(); - deltas.push(Operation::Insert((offset, text.to_string()))); - offset = change.new_index().unwrap() + text.chars().count(); - } - ChangeTag::Equal => { - let text = change.as_str().unwrap(); - offset = change.new_index().unwrap() + text.chars().count(); - } - } - } - - merge_touching(&deltas) -} diff --git a/gitbutler-app/src/deltas/reader.rs b/gitbutler-app/src/deltas/reader.rs deleted file mode 100644 index 9470268d1..000000000 --- a/gitbutler-app/src/deltas/reader.rs +++ /dev/null @@ -1,89 +0,0 @@ -use std::{collections::HashMap, path}; - -use anyhow::{Context, Result}; - -use crate::{reader, sessions}; - -use super::Delta; - -pub struct DeltasReader<'reader> { - reader: &'reader reader::Reader<'reader>, -} - -impl<'reader> From<&'reader reader::Reader<'reader>> for DeltasReader<'reader> { - fn from(reader: &'reader reader::Reader<'reader>) -> Self { - DeltasReader { reader } - } -} - -#[derive(thiserror::Error, Debug)] -pub enum ReadError { - #[error("not found")] - NotFound, - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl<'reader> DeltasReader<'reader> { - pub fn new(reader: &'reader sessions::Reader<'reader>) -> Self { - DeltasReader { - reader: reader.reader(), - } - } - - pub fn read_file>(&self, path: P) -> Result>> { - match self.read(Some(&[path.as_ref()])) { - Ok(deltas) => Ok(deltas.into_iter().next().map(|(_, deltas)| deltas)), - Err(ReadError::NotFound) => Ok(None), - Err(err) => Err(err.into()), - } - } - - pub fn read( - &self, - filter: Option<&[&path::Path]>, - ) -> Result>, ReadError> { - let deltas_dir = path::Path::new("session/deltas"); - let mut paths = self.reader.list_files(deltas_dir)?; - if let Some(filter) = filter { - paths = paths - .into_iter() - .filter(|file_path| filter.iter().any(|path| file_path.eq(path))) - .collect::>(); - } - paths = paths.iter().map(|path| deltas_dir.join(path)).collect(); - let files = self.reader.batch(&paths).context("failed to batch read")?; - - let files = files - .into_iter() - .map(|file| { - file.map_err(|error| match error { - reader::Error::NotFound => ReadError::NotFound, - error => ReadError::Other(error.into()), - }) - }) - .collect::, _>>()?; - - Ok(paths - .into_iter() - .zip(files) - .filter_map(|(path, file)| { - path.strip_prefix(deltas_dir) - .ok() - .map(|path| (path.to_path_buf(), file)) - }) - .filter_map(|(path, file)| { - if let reader::Content::UTF8(content) = file { - if content.is_empty() { - // this is a leftover from some bug, shouldn't happen anymore - return None; - } - let deltas = serde_json::from_str(&content).ok()?; - Some(Ok((path, deltas))) - } else { - Some(Err(anyhow::anyhow!("unexpected content type"))) - } - }) - .collect::>>()?) - } -} diff --git a/gitbutler-app/src/deltas/writer.rs b/gitbutler-app/src/deltas/writer.rs deleted file mode 100644 index 98c738581..000000000 --- a/gitbutler-app/src/deltas/writer.rs +++ /dev/null @@ -1,73 +0,0 @@ -use std::path::PathBuf; - -use anyhow::Result; - -use crate::{gb_repository, writer}; - -use super::Delta; - -pub struct DeltasWriter<'writer> { - repository: &'writer gb_repository::Repository, - writer: writer::DirWriter, -} - -impl<'writer> DeltasWriter<'writer> { - pub fn new(repository: &'writer gb_repository::Repository) -> Result { - writer::DirWriter::open(repository.root()).map(|writer| Self { repository, writer }) - } - - pub fn write>(&self, path: P, deltas: &Vec) -> Result<()> { - self.repository.mark_active_session()?; - - let _lock = self.repository.lock(); - - let path = path.as_ref(); - let raw_deltas = serde_json::to_string(&deltas)?; - - self.writer - .write_string(PathBuf::from("session/deltas").join(path), &raw_deltas)?; - - tracing::debug!( - project_id = %self.repository.get_project_id(), - path = %path.display(), - "wrote deltas" - ); - - Ok(()) - } - - pub fn remove_wd_file>(&self, path: P) -> Result<()> { - self.repository.mark_active_session()?; - - let _lock = self.repository.lock(); - - let path = path.as_ref(); - self.writer.remove(PathBuf::from("session/wd").join(path))?; - - tracing::debug!( - project_id = %self.repository.get_project_id(), - path = %path.display(), - "deleted session wd file" - ); - - Ok(()) - } - - pub fn write_wd_file>(&self, path: P, contents: &str) -> Result<()> { - self.repository.mark_active_session()?; - - let _lock = self.repository.lock(); - - let path = path.as_ref(); - self.writer - .write_string(PathBuf::from("session/wd").join(path), contents)?; - - tracing::debug!( - project_id = %self.repository.get_project_id(), - path = %path.display(), - "wrote session wd file" - ); - - Ok(()) - } -} diff --git a/gitbutler-app/src/error.rs b/gitbutler-app/src/error.rs index 19546ac09..ea7baa8bf 100644 --- a/gitbutler-app/src/error.rs +++ b/gitbutler-app/src/error.rs @@ -1,9 +1,9 @@ #[cfg(feature = "sentry")] mod sentry; -pub use legacy::*; +pub(crate) use legacy::*; -pub mod gb { +pub(crate) mod gb { #[cfg(feature = "error-context")] pub use error_context::*; @@ -319,6 +319,7 @@ pub mod gb { mod legacy { use core::fmt; + use gitbutler::project_repository; use serde::{ser::SerializeMap, Serialize}; #[derive(Debug)] @@ -389,4 +390,19 @@ mod legacy { Error::Unknown } } + + impl From for Error { + fn from(value: project_repository::OpenError) -> Self { + match value { + project_repository::OpenError::NotFound(path) => Error::UserError { + code: Code::Projects, + message: format!("{} not found", path.display()), + }, + project_repository::OpenError::Other(error) => { + tracing::error!(?error); + Error::Unknown + } + } + } + } } diff --git a/gitbutler-app/src/events.rs b/gitbutler-app/src/events.rs index 67ae38621..1f2d06175 100644 --- a/gitbutler-app/src/events.rs +++ b/gitbutler-app/src/events.rs @@ -1,7 +1,7 @@ use anyhow::{Context, Result}; use tauri::{AppHandle, Manager}; -use crate::{ +use gitbutler::{ deltas, projects::ProjectId, reader, diff --git a/gitbutler-app/src/fs.rs b/gitbutler-app/src/fs.rs deleted file mode 100644 index ad203f885..000000000 --- a/gitbutler-app/src/fs.rs +++ /dev/null @@ -1,30 +0,0 @@ -use std::path::{Path, PathBuf}; - -use anyhow::Result; -use walkdir::WalkDir; - -// Returns an ordered list of relative paths for files inside a directory recursively. -pub fn list_files>(dir_path: P, ignore_prefixes: &[P]) -> Result> { - let mut files = vec![]; - let dir_path = dir_path.as_ref(); - if !dir_path.exists() { - return Ok(files); - } - for entry in WalkDir::new(dir_path) { - let entry = entry?; - if !entry.file_type().is_dir() { - let path = entry.path(); - let path = path.strip_prefix(dir_path)?; - let path = path.to_path_buf(); - if ignore_prefixes - .iter() - .any(|prefix| path.starts_with(prefix.as_ref())) - { - continue; - } - files.push(path); - } - } - files.sort(); - Ok(files) -} diff --git a/gitbutler-app/src/gb_repository.rs b/gitbutler-app/src/gb_repository.rs deleted file mode 100644 index 5e4879e0e..000000000 --- a/gitbutler-app/src/gb_repository.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod repository; - -pub use repository::{RemoteError, Repository}; diff --git a/gitbutler-app/src/gb_repository/repository.rs b/gitbutler-app/src/gb_repository/repository.rs deleted file mode 100644 index 682a38b0c..000000000 --- a/gitbutler-app/src/gb_repository/repository.rs +++ /dev/null @@ -1,967 +0,0 @@ -use std::{ - collections::{HashMap, HashSet}, - fs::File, - io::{BufReader, Read}, - path, time, -}; - -#[cfg(target_os = "windows")] -use crate::windows::MetadataShim; -#[cfg(target_family = "unix")] -use std::os::unix::prelude::*; - -use anyhow::{anyhow, Context, Result}; -use filetime::FileTime; -use fslock::LockFile; -use sha2::{Digest, Sha256}; - -use crate::{ - deltas, fs, git, project_repository, - projects::{self, ProjectId}, - reader, sessions, - sessions::SessionId, - users, - virtual_branches::{self, target}, -}; - -pub struct Repository { - git_repository: git::Repository, - project: projects::Project, - lock_path: path::PathBuf, -} - -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error("path not found: {0}")] - ProjectPathNotFound(path::PathBuf), - #[error(transparent)] - Git(#[from] git::Error), - #[error(transparent)] - Other(#[from] anyhow::Error), - #[error("path has invalid utf-8 bytes: {0}")] - InvalidUnicodePath(path::PathBuf), -} - -impl Repository { - pub fn open( - root: &path::Path, - project_repository: &project_repository::Repository, - user: Option<&users::User>, - ) -> Result { - let project = project_repository.project(); - let project_objects_path = project.path.join(".git/objects"); - if !project_objects_path.exists() { - return Err(Error::ProjectPathNotFound(project_objects_path)); - } - - let projects_dir = root.join("projects"); - let path = projects_dir.join(project.id.to_string()); - - let lock_path = projects_dir.join(format!("{}.lock", project.id)); - - if path.exists() { - let git_repository = git::Repository::open(path.clone()) - .with_context(|| format!("{}: failed to open git repository", path.display()))?; - - git_repository - .add_disk_alternate(project_objects_path.to_str().unwrap()) - .context("failed to add disk alternate")?; - - Result::Ok(Self { - git_repository, - project: project.clone(), - lock_path, - }) - } else { - std::fs::create_dir_all(&path).context("failed to create project directory")?; - - let git_repository = git::Repository::init_opts( - &path, - git2::RepositoryInitOptions::new() - .bare(true) - .initial_head("refs/heads/current") - .external_template(false), - ) - .with_context(|| format!("{}: failed to initialize git repository", path.display()))?; - - git_repository - .add_disk_alternate(project_objects_path.to_str().unwrap()) - .context("failed to add disk alternate")?; - - let gb_repository = Self { - git_repository, - project: project.clone(), - lock_path, - }; - - let _lock = gb_repository.lock(); - let session = gb_repository.create_current_session(project_repository)?; - drop(_lock); - - gb_repository - .flush_session(project_repository, &session, user) - .context("failed to run initial flush")?; - - Result::Ok(gb_repository) - } - } - - pub fn get_project_id(&self) -> &ProjectId { - &self.project.id - } - - fn remote(&self, user: Option<&users::User>) -> Result> { - // only push if logged in - let access_token = match user { - Some(user) => user.access_token.clone(), - None => return Ok(None), - }; - - // only push if project is connected - let remote_url = match &self.project.api { - Some(api) => api.git_url.clone(), - None => return Ok(None), - }; - - let remote = self - .git_repository - .remote_anonymous(&remote_url.parse().unwrap()) - .with_context(|| { - format!( - "failed to create anonymous remote for {}", - remote_url.as_str() - ) - })?; - - Ok(Some((remote, access_token))) - } - - pub fn fetch(&self, user: Option<&users::User>) -> Result<(), RemoteError> { - let (mut remote, access_token) = match self.remote(user)? { - Some((remote, access_token)) => (remote, access_token), - None => return Result::Ok(()), - }; - - let mut callbacks = git2::RemoteCallbacks::new(); - if self.project.omit_certificate_check.unwrap_or(false) { - callbacks.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk)); - } - callbacks.push_update_reference(move |refname, message| { - tracing::debug!( - project_id = %self.project.id, - refname, - message, - "pulling reference" - ); - Result::Ok(()) - }); - callbacks.push_transfer_progress(move |one, two, three| { - tracing::debug!( - project_id = %self.project.id, - "transferred {}/{}/{} objects", - one, - two, - three - ); - }); - - let mut fetch_opts = git2::FetchOptions::new(); - fetch_opts.remote_callbacks(callbacks); - let auth_header = format!("Authorization: {}", access_token); - let headers = &[auth_header.as_str()]; - fetch_opts.custom_headers(headers); - - remote - .fetch(&["refs/heads/*:refs/remotes/*"], Some(&mut fetch_opts)) - .map_err(|error| match error { - git::Error::Network(error) => { - tracing::warn!(project_id = %self.project.id, error = %error, "failed to fetch gb repo"); - RemoteError::Network - } - error => RemoteError::Other(error.into()), - })?; - - tracing::info!( - project_id = %self.project.id, - "gb repo fetched", - ); - - Result::Ok(()) - } - - pub fn push(&self, user: Option<&users::User>) -> Result<(), RemoteError> { - let (mut remote, access_token) = match self.remote(user)? { - Some((remote, access_token)) => (remote, access_token), - None => return Ok(()), - }; - - // Set the remote's callbacks - let mut callbacks = git2::RemoteCallbacks::new(); - if self.project.omit_certificate_check.unwrap_or(false) { - callbacks.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk)); - } - callbacks.push_update_reference(move |refname, message| { - tracing::debug!( - project_id = %self.project.id, - refname, - message, - "pushing reference" - ); - Result::Ok(()) - }); - callbacks.push_transfer_progress(move |current, total, bytes| { - tracing::debug!( - project_id = %self.project.id, - "transferred {}/{}/{} objects", - current, - total, - bytes - ); - }); - - let mut push_options = git2::PushOptions::new(); - push_options.remote_callbacks(callbacks); - let auth_header = format!("Authorization: {}", access_token); - let headers = &[auth_header.as_str()]; - push_options.custom_headers(headers); - - let remote_refspec = format!("refs/heads/current:refs/heads/{}", self.project.id); - - // Push to the remote - remote - .push(&[&remote_refspec], Some(&mut push_options)).map_err(|error| match error { - git::Error::Network(error) => { - tracing::warn!(project_id = %self.project.id, error = %error, "failed to push gb repo"); - RemoteError::Network - } - error => RemoteError::Other(error.into()), - })?; - - tracing::info!(project_id = %self.project.id, "gb repository pushed"); - - Ok(()) - } - - // take branches from the last session and put them into the current session - fn copy_branches(&self) -> Result<()> { - let last_session = self - .get_sessions_iterator() - .context("failed to get sessions iterator")? - .next(); - if last_session.is_none() { - return Ok(()); - } - let last_session = last_session - .unwrap() - .context("failed to read last session")?; - let last_session_reader = sessions::Reader::open(self, &last_session) - .context("failed to open last session reader")?; - - let branches = virtual_branches::Iterator::new(&last_session_reader) - .context("failed to read virtual branches")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")? - .into_iter() - .collect::>(); - - let src_target_reader = virtual_branches::target::Reader::new(&last_session_reader); - let dst_target_writer = virtual_branches::target::Writer::new(self, self.project.gb_dir()) - .context("failed to open target writer for current session")?; - - // copy default target - let default_target = match src_target_reader.read_default() { - Result::Ok(target) => Ok(Some(target)), - Err(reader::Error::NotFound) => Ok(None), - Err(err) => Err(err).context("failed to read default target"), - }?; - if let Some(default_target) = default_target.as_ref() { - dst_target_writer - .write_default(default_target) - .context("failed to write default target")?; - } - - // copy branch targets - for branch in &branches { - let target = src_target_reader - .read(&branch.id) - .with_context(|| format!("{}: failed to read target", branch.id))?; - if let Some(default_target) = default_target.as_ref() { - if *default_target == target { - continue; - } - } - dst_target_writer - .write(&branch.id, &target) - .with_context(|| format!("{}: failed to write target", branch.id))?; - } - - let dst_branch_writer = virtual_branches::branch::Writer::new(self, self.project.gb_dir()) - .context("failed to open branch writer for current session")?; - - // copy branches that we don't already have - for branch in &branches { - dst_branch_writer - .write(&mut branch.clone()) - .with_context(|| format!("{}: failed to write branch", branch.id))?; - } - - Ok(()) - } - - fn create_current_session( - &self, - project_repository: &project_repository::Repository, - ) -> Result { - let now_ms = time::SystemTime::now() - .duration_since(time::UNIX_EPOCH) - .unwrap() - .as_millis(); - - let meta = match project_repository.get_head() { - Result::Ok(head) => sessions::Meta { - start_timestamp_ms: now_ms, - last_timestamp_ms: now_ms, - branch: head.name().map(|name| name.to_string()), - commit: Some(head.peel_to_commit()?.id().to_string()), - }, - Err(_) => sessions::Meta { - start_timestamp_ms: now_ms, - last_timestamp_ms: now_ms, - branch: None, - commit: None, - }, - }; - - let session = sessions::Session { - id: SessionId::generate(), - hash: None, - meta, - }; - - // write session to disk - sessions::Writer::new(self) - .context("failed to create session writer")? - .write(&session) - .context("failed to write session")?; - - tracing::info!( - project_id = %self.project.id, - session_id = %session.id, - "created new session" - ); - - self.flush_gitbutler_file(&session.id)?; - - Ok(session) - } - - pub fn lock(&self) -> LockFile { - let mut lockfile = LockFile::open(&self.lock_path).expect("failed to open lock file"); - lockfile.lock().expect("failed to obtain lock on lock file"); - lockfile - } - - pub fn mark_active_session(&self) -> Result<()> { - let current_session = self - .get_or_create_current_session() - .context("failed to get current session")?; - - let updated_session = sessions::Session { - meta: sessions::Meta { - last_timestamp_ms: time::SystemTime::now() - .duration_since(time::UNIX_EPOCH) - .unwrap() - .as_millis(), - ..current_session.meta - }, - ..current_session - }; - - sessions::Writer::new(self) - .context("failed to create session writer")? - .write(&updated_session) - .context("failed to write session")?; - - Ok(()) - } - - pub fn get_latest_session(&self) -> Result> { - if let Some(current_session) = self.get_current_session()? { - Ok(Some(current_session)) - } else { - let mut sessions_iterator = self.get_sessions_iterator()?; - sessions_iterator - .next() - .transpose() - .context("failed to get latest session") - } - } - - pub fn get_or_create_current_session(&self) -> Result { - let _lock = self.lock(); - - let reader = reader::Reader::open(&self.root())?; - match sessions::Session::try_from(&reader) { - Result::Ok(session) => Ok(session), - Err(sessions::SessionError::NoSession) => { - let project_repository = project_repository::Repository::open(&self.project) - .context("failed to open project repository")?; - let session = self - .create_current_session(&project_repository) - .context("failed to create current session")?; - drop(_lock); - self.copy_branches().context("failed to unpack branches")?; - Ok(session) - } - Err(err) => Err(err).context("failed to read current session"), - } - } - - pub fn flush( - &self, - project_repository: &project_repository::Repository, - user: Option<&users::User>, - ) -> Result> { - let current_session = self - .get_current_session() - .context("failed to get current session")?; - if current_session.is_none() { - return Ok(None); - } - - let current_session = current_session.unwrap(); - let current_session = self - .flush_session(project_repository, ¤t_session, user) - .context(format!("failed to flush session {}", current_session.id))?; - Ok(Some(current_session)) - } - - pub fn flush_session( - &self, - project_repository: &project_repository::Repository, - session: &sessions::Session, - user: Option<&users::User>, - ) -> Result { - if session.hash.is_some() { - return Ok(session.clone()); - } - - if !self.root().exists() { - return Err(anyhow!("nothing to flush")); - } - - let _lock = self.lock(); - - // update last timestamp - let session_writer = - sessions::Writer::new(self).context("failed to create session writer")?; - session_writer.write(session)?; - - let mut tree_builder = self.git_repository.treebuilder(None); - - tree_builder.upsert( - "session", - build_session_tree(self).context("failed to build session tree")?, - git::FileMode::Tree, - ); - tree_builder.upsert( - "wd", - build_wd_tree(self, project_repository) - .context("failed to build working directory tree")?, - git::FileMode::Tree, - ); - tree_builder.upsert( - "branches", - build_branches_tree(self).context("failed to build branches tree")?, - git::FileMode::Tree, - ); - - let tree_id = tree_builder.write().context("failed to write tree")?; - - let commit_oid = - write_gb_commit(tree_id, self, user).context("failed to write gb commit")?; - - tracing::info!( - project_id = %self.project.id, - session_id = %session.id, - %commit_oid, - "flushed session" - ); - - session_writer.remove()?; - - let session = sessions::Session { - hash: Some(commit_oid), - ..session.clone() - }; - - Ok(session) - } - - pub fn get_sessions_iterator(&self) -> Result> { - sessions::SessionsIterator::new(&self.git_repository) - } - - pub fn get_current_session(&self) -> Result> { - let _lock = self.lock(); - let reader = reader::Reader::open(&self.root())?; - match sessions::Session::try_from(&reader) { - Ok(session) => Ok(Some(session)), - Err(sessions::SessionError::NoSession) => Ok(None), - Err(sessions::SessionError::Other(err)) => Err(err), - } - } - - pub fn root(&self) -> std::path::PathBuf { - self.git_repository.path().join("gitbutler") - } - - pub fn session_path(&self) -> std::path::PathBuf { - self.root().join("session") - } - - pub fn git_repository_path(&self) -> &std::path::Path { - self.git_repository.path() - } - - pub fn session_wd_path(&self) -> std::path::PathBuf { - self.session_path().join("wd") - } - - pub fn default_target(&self) -> Result> { - if let Some(latest_session) = self.get_latest_session()? { - let latest_session_reader = sessions::Reader::open(self, &latest_session) - .context("failed to open current session")?; - let target_reader = target::Reader::new(&latest_session_reader); - match target_reader.read_default() { - Result::Ok(target) => Ok(Some(target)), - Err(reader::Error::NotFound) => Ok(None), - Err(err) => Err(err.into()), - } - } else { - Ok(None) - } - } - - fn flush_gitbutler_file(&self, session_id: &SessionId) -> Result<()> { - let gb_path = self.git_repository.path(); - let project_id = self.project.id.to_string(); - let gb_file_content = serde_json::json!({ - "sessionId": session_id, - "repositoryId": project_id, - "gbPath": gb_path, - "api": self.project.api, - }); - - let gb_file_path = self.project.path.join(".git/gitbutler.json"); - std::fs::write(&gb_file_path, gb_file_content.to_string())?; - - tracing::debug!("gitbutler file updated: {:?}", gb_file_path); - - Ok(()) - } - - pub fn git_repository(&self) -> &git::Repository { - &self.git_repository - } -} - -fn build_wd_tree( - gb_repository: &Repository, - project_repository: &project_repository::Repository, -) -> Result { - match gb_repository - .git_repository - .find_reference(&"refs/heads/current".parse().unwrap()) - { - Result::Ok(reference) => build_wd_tree_from_reference(gb_repository, &reference) - .context("failed to build wd index"), - Err(git::Error::NotFound(_)) => build_wd_tree_from_repo(gb_repository, project_repository) - .context("failed to build wd index"), - Err(e) => Err(e.into()), - } -} - -fn build_wd_tree_from_reference( - gb_repository: &Repository, - reference: &git::Reference, -) -> Result { - // start off with the last tree as a base - let tree = reference.peel_to_tree()?; - let wd_tree_entry = tree.get_name("wd").unwrap(); - let wd_tree = gb_repository.git_repository.find_tree(wd_tree_entry.id())?; - let mut index = git::Index::try_from(&wd_tree)?; - - // write updated files on top of the last tree - for file_path in fs::list_files(gb_repository.session_wd_path(), &[]).with_context(|| { - format!( - "failed to session working directory files list files in {}", - gb_repository.session_wd_path().display() - ) - })? { - add_wd_path( - &mut index, - &gb_repository.session_wd_path(), - &file_path, - gb_repository, - ) - .with_context(|| { - format!( - "failed to add session working directory path {}", - file_path.display() - ) - })?; - } - - let session_reader = reader::Reader::open(&gb_repository.root())?; - let deltas = deltas::Reader::from(&session_reader) - .read(None) - .context("failed to read deltas")?; - let wd_files = session_reader.list_files(path::Path::new("session/wd"))?; - let wd_files = wd_files.iter().collect::>(); - - // if a file has delta, but doesn't exist in wd, it was deleted - let deleted_files = deltas - .keys() - .filter(|key| !wd_files.contains(key)) - .collect::>(); - - for deleted_file in deleted_files { - index - .remove_path(deleted_file) - .context("failed to remove path")?; - } - - let wd_tree_oid = index - .write_tree_to(&gb_repository.git_repository) - .context("failed to write wd tree")?; - Ok(wd_tree_oid) -} - -// build wd index from the working directory files new session wd files -// this is important because we want to make sure session files are in sync with session deltas -fn build_wd_tree_from_repo( - gb_repository: &Repository, - project_repository: &project_repository::Repository, -) -> Result { - let mut index = git::Index::new()?; - - let mut added: HashMap = HashMap::new(); - - // first, add session/wd files. session/wd are written at the same time as deltas, so it's important to add them first - // to make sure they are in sync with the deltas - for file_path in fs::list_files(gb_repository.session_wd_path(), &[]).with_context(|| { - format!( - "failed to session working directory files list files in {}", - gb_repository.session_wd_path().display() - ) - })? { - if project_repository - .git_repository - .is_path_ignored(&file_path) - .unwrap_or(true) - { - continue; - } - - add_wd_path( - &mut index, - &gb_repository.session_wd_path(), - &file_path, - gb_repository, - ) - .with_context(|| { - format!( - "failed to add session working directory path {}", - file_path.display() - ) - })?; - added.insert(file_path.to_string_lossy().to_string(), true); - } - - // finally, add files from the working directory if they aren't already in the index - for file_path in fs::list_files(project_repository.root(), &[path::Path::new(".git")]) - .with_context(|| { - format!( - "failed to working directory list files in {}", - project_repository.root().display() - ) - })? - { - if added.contains_key(&file_path.to_string_lossy().to_string()) { - continue; - } - - if project_repository - .git_repository - .is_path_ignored(&file_path) - .unwrap_or(true) - { - continue; - } - - add_wd_path( - &mut index, - project_repository.root(), - &file_path, - gb_repository, - ) - .with_context(|| { - format!( - "failed to add working directory path {}", - file_path.display() - ) - })?; - } - - let tree_oid = index - .write_tree_to(&gb_repository.git_repository) - .context("failed to write tree to repo")?; - Ok(tree_oid) -} - -// take a file path we see and add it to our in-memory index -// we call this from build_initial_wd_tree, which is smart about using the existing index to avoid rehashing files that haven't changed -// and also looks for large files and puts in a placeholder hash in the LFS format -// TODO: actually upload the file to LFS -fn add_wd_path( - index: &mut git::Index, - dir: &std::path::Path, - rel_file_path: &std::path::Path, - gb_repository: &Repository, -) -> Result<()> { - let file_path = dir.join(rel_file_path); - - let metadata = std::fs::symlink_metadata(&file_path).context("failed to get metadata for")?; - let modify_time = FileTime::from_last_modification_time(&metadata); - let create_time = FileTime::from_creation_time(&metadata).unwrap_or(modify_time); - - // look for files that are bigger than 4GB, which are not supported by git - // insert a pointer as the blob content instead - // TODO: size limit should be configurable - let blob = if metadata.is_symlink() { - // it's a symlink, make the content the path of the link - let link_target = std::fs::read_link(&file_path)?; - // if the link target is inside the project repository, make it relative - let link_target = link_target.strip_prefix(dir).unwrap_or(&link_target); - gb_repository.git_repository.blob( - link_target - .to_str() - .ok_or_else(|| Error::InvalidUnicodePath(link_target.into()))? - .as_bytes(), - )? - } else if metadata.len() > 100_000_000 { - tracing::warn!( - project_id = %gb_repository.project.id, - path = %file_path.display(), - "file too big" - ); - - // get a sha256 hash of the file first - let sha = sha256_digest(&file_path)?; - - // put togther a git lfs pointer file: https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md - let mut lfs_pointer = String::from("version https://git-lfs.github.com/spec/v1\n"); - lfs_pointer.push_str("oid sha256:"); - lfs_pointer.push_str(&sha); - lfs_pointer.push('\n'); - lfs_pointer.push_str("size "); - lfs_pointer.push_str(&metadata.len().to_string()); - lfs_pointer.push('\n'); - - // write the file to the .git/lfs/objects directory - // create the directory recursively if it doesn't exist - let lfs_objects_dir = gb_repository.git_repository.path().join("lfs/objects"); - std::fs::create_dir_all(lfs_objects_dir.clone())?; - let lfs_path = lfs_objects_dir.join(sha); - std::fs::copy(file_path, lfs_path)?; - - gb_repository.git_repository.blob(lfs_pointer.as_bytes())? - } else { - // read the file into a blob, get the object id - gb_repository.git_repository.blob_path(&file_path)? - }; - - // create a new IndexEntry from the file metadata - // truncation is ok https://libgit2.org/libgit2/#HEAD/type/git_index_entry - #[allow(clippy::cast_possible_truncation)] - index - .add(&git::IndexEntry { - ctime: create_time, - mtime: modify_time, - dev: metadata.dev() as u32, - ino: metadata.ino() as u32, - mode: 33188, - uid: metadata.uid(), - gid: metadata.gid(), - file_size: metadata.len() as u32, - flags: 10, // normal flags for normal file (for the curious: https://git-scm.com/docs/index-format) - flags_extended: 0, // no extended flags - path: rel_file_path.to_str().unwrap().to_string().into(), - id: blob, - }) - .with_context(|| format!("failed to add index entry for {}", rel_file_path.display()))?; - - Ok(()) -} - -/// calculates sha256 digest of a large file as lowercase hex string via streaming buffer -/// used to calculate the hash of large files that are not supported by git -fn sha256_digest(path: &std::path::Path) -> Result { - let input = File::open(path)?; - let mut reader = BufReader::new(input); - - let digest = { - let mut hasher = Sha256::new(); - let mut buffer = [0; 1024]; - loop { - let count = reader.read(&mut buffer)?; - if count == 0 { - break; - } - hasher.update(&buffer[..count]); - } - hasher.finalize() - }; - Ok(format!("{:X}", digest)) -} - -fn build_branches_tree(gb_repository: &Repository) -> Result { - let mut index = git::Index::new()?; - - let branches_dir = gb_repository.root().join("branches"); - for file_path in - fs::list_files(&branches_dir, &[]).context("failed to find branches directory")? - { - let file_path = std::path::Path::new(&file_path); - add_file_to_index( - gb_repository, - &mut index, - file_path, - &branches_dir.join(file_path), - ) - .context("failed to add branch file to index")?; - } - - let tree_oid = index - .write_tree_to(&gb_repository.git_repository) - .context("failed to write index to tree")?; - - Ok(tree_oid) -} - -fn build_session_tree(gb_repository: &Repository) -> Result { - let mut index = git::Index::new()?; - - // add all files in the working directory to the in-memory index, skipping for matching entries in the repo index - for file_path in fs::list_files( - gb_repository.session_path(), - &[path::Path::new("wd").to_path_buf()], - ) - .context("failed to list session files")? - { - add_file_to_index( - gb_repository, - &mut index, - &file_path, - &gb_repository.session_path().join(&file_path), - ) - .with_context(|| format!("failed to add session file: {}", file_path.display()))?; - } - - let tree_oid = index - .write_tree_to(&gb_repository.git_repository) - .context("failed to write index to tree")?; - - Ok(tree_oid) -} - -// this is a helper function for build_gb_tree that takes paths under .git/gb/session and adds them to the in-memory index -fn add_file_to_index( - gb_repository: &Repository, - index: &mut git::Index, - rel_file_path: &std::path::Path, - abs_file_path: &std::path::Path, -) -> Result<()> { - let blob = gb_repository.git_repository.blob_path(abs_file_path)?; - let metadata = abs_file_path.metadata()?; - let modified_time = FileTime::from_last_modification_time(&metadata); - let create_time = FileTime::from_creation_time(&metadata).unwrap_or(modified_time); - - // create a new IndexEntry from the file metadata - // truncation is ok https://libgit2.org/libgit2/#HEAD/type/git_index_entry - #[allow(clippy::cast_possible_truncation)] - index - .add(&git::IndexEntry { - ctime: create_time, - mtime: modified_time, - dev: metadata.dev() as u32, - ino: metadata.ino() as u32, - mode: 33188, - uid: metadata.uid(), - gid: metadata.gid(), - file_size: metadata.len() as u32, - flags: 10, // normal flags for normal file (for the curious: https://git-scm.com/docs/index-format) - flags_extended: 0, // no extended flags - path: rel_file_path.to_str().unwrap().into(), - id: blob, - }) - .with_context(|| format!("Failed to add file to index: {}", abs_file_path.display()))?; - - Ok(()) -} - -// write a new commit object to the repo -// this is called once we have a tree of deltas, metadata and current wd snapshot -// and either creates or updates the refs/heads/current ref -fn write_gb_commit( - tree_id: git::Oid, - gb_repository: &Repository, - user: Option<&users::User>, -) -> Result { - let comitter = git::Signature::now("gitbutler", "gitbutler@localhost")?; - let author = match user { - None => comitter.clone(), - Some(user) => git::Signature::try_from(user)?, - }; - - let current_refname: git::Refname = "refs/heads/current".parse().unwrap(); - - match gb_repository - .git_repository - .find_reference(¤t_refname) - { - Result::Ok(reference) => { - let last_commit = reference.peel_to_commit()?; - let new_commit = gb_repository.git_repository.commit( - Some(¤t_refname), - &author, // author - &comitter, // committer - "gitbutler check", // commit message - &gb_repository.git_repository.find_tree(tree_id).unwrap(), // tree - &[&last_commit], // parents - )?; - Ok(new_commit) - } - Err(git::Error::NotFound(_)) => { - let new_commit = gb_repository.git_repository.commit( - Some(¤t_refname), - &author, // author - &comitter, // committer - "gitbutler check", // commit message - &gb_repository.git_repository.find_tree(tree_id).unwrap(), // tree - &[], // parents - )?; - Ok(new_commit) - } - Err(e) => Err(e.into()), - } -} - -#[derive(Debug, thiserror::Error)] -pub enum RemoteError { - #[error("network error")] - Network, - #[error(transparent)] - Other(#[from] anyhow::Error), -} diff --git a/gitbutler-app/src/git.rs b/gitbutler-app/src/git.rs deleted file mode 100644 index 1aab9ff90..000000000 --- a/gitbutler-app/src/git.rs +++ /dev/null @@ -1,42 +0,0 @@ -pub mod credentials; -pub mod diff; -pub mod show; - -mod blob; -pub use blob::*; - -mod error; -pub use error::*; - -mod reference; -pub use reference::*; -mod repository; - -pub use repository::*; - -mod commit; -pub use commit::*; - -mod branch; -pub use branch::*; - -mod tree; -pub use tree::*; - -mod remote; -pub use remote::*; - -mod index; -pub use index::*; - -mod oid; -pub use oid::*; - -mod signature; -pub use signature::*; - -mod config; -pub use config::*; - -mod url; -pub use self::url::*; diff --git a/gitbutler-app/src/git/blob.rs b/gitbutler-app/src/git/blob.rs deleted file mode 100644 index 10e902c04..000000000 --- a/gitbutler-app/src/git/blob.rs +++ /dev/null @@ -1,17 +0,0 @@ -pub struct Blob<'a>(git2::Blob<'a>); - -impl<'a> From> for Blob<'a> { - fn from(value: git2::Blob<'a>) -> Self { - Self(value) - } -} - -impl Blob<'_> { - pub fn content(&self) -> &[u8] { - self.0.content() - } - - pub fn size(&self) -> usize { - self.0.size() - } -} diff --git a/gitbutler-app/src/git/branch.rs b/gitbutler-app/src/git/branch.rs deleted file mode 100644 index 701d805be..000000000 --- a/gitbutler-app/src/git/branch.rs +++ /dev/null @@ -1,53 +0,0 @@ -use super::{Commit, Oid, Result, Tree}; - -pub struct Branch<'repo> { - branch: git2::Branch<'repo>, -} - -impl<'repo> From> for Branch<'repo> { - fn from(branch: git2::Branch<'repo>) -> Self { - Self { branch } - } -} - -impl<'repo> Branch<'repo> { - pub fn name(&self) -> Option<&str> { - self.branch.get().name() - } - - pub fn refname(&self) -> Option<&str> { - self.branch.get().name() - } - - pub fn target(&self) -> Option { - self.branch.get().target().map(Into::into) - } - - pub fn upstream(&self) -> Result> { - self.branch.upstream().map(Into::into).map_err(Into::into) - } - - pub fn refname_bytes(&self) -> &[u8] { - self.branch.get().name_bytes() - } - - pub fn peel_to_tree(&self) -> Result> { - self.branch - .get() - .peel_to_tree() - .map_err(Into::into) - .map(Into::into) - } - - pub fn peel_to_commit(&self) -> Result> { - self.branch - .get() - .peel_to_commit() - .map(Into::into) - .map_err(Into::into) - } - - pub fn is_remote(&self) -> bool { - self.branch.get().is_remote() - } -} diff --git a/gitbutler-app/src/git/commit.rs b/gitbutler-app/src/git/commit.rs deleted file mode 100644 index 43f03ab00..000000000 --- a/gitbutler-app/src/git/commit.rs +++ /dev/null @@ -1,75 +0,0 @@ -use super::{Oid, Result, Signature, Tree}; - -pub struct Commit<'repo> { - commit: git2::Commit<'repo>, -} - -impl<'repo> From> for Commit<'repo> { - fn from(commit: git2::Commit<'repo>) -> Self { - Self { commit } - } -} - -impl<'repo> From<&'repo git2::Commit<'repo>> for Commit<'repo> { - fn from(commit: &'repo git2::Commit<'repo>) -> Self { - Self { - commit: commit.clone(), - } - } -} - -impl<'repo> From<&'repo Commit<'repo>> for &'repo git2::Commit<'repo> { - fn from(val: &'repo Commit<'repo>) -> Self { - &val.commit - } -} - -impl<'repo> Commit<'repo> { - pub fn id(&self) -> Oid { - self.commit.id().into() - } - - pub fn parent_count(&self) -> usize { - self.commit.parent_count() - } - - pub fn tree(&self) -> Result> { - self.commit.tree().map(Into::into).map_err(Into::into) - } - - pub fn tree_id(&self) -> Oid { - self.commit.tree_id().into() - } - - pub fn parents(&self) -> Result>> { - let mut parents = vec![]; - for i in 0..self.parent_count() { - parents.push(self.parent(i)?); - } - Ok(parents) - } - - pub fn parent(&self, n: usize) -> Result> { - self.commit.parent(n).map(Into::into).map_err(Into::into) - } - - pub fn time(&self) -> git2::Time { - self.commit.time() - } - - pub fn author(&self) -> Signature<'_> { - self.commit.author().into() - } - - pub fn message(&self) -> Option<&str> { - self.commit.message() - } - - pub fn committer(&self) -> Signature<'_> { - self.commit.committer().into() - } - - pub fn raw_header(&self) -> Option<&str> { - self.commit.raw_header() - } -} diff --git a/gitbutler-app/src/git/config.rs b/gitbutler-app/src/git/config.rs deleted file mode 100644 index 5afe4ffb9..000000000 --- a/gitbutler-app/src/git/config.rs +++ /dev/null @@ -1,68 +0,0 @@ -use super::{Error, Result}; - -pub struct Config { - config: git2::Config, -} - -impl From for Config { - fn from(config: git2::Config) -> Self { - Self { config } - } -} - -impl From for git2::Config { - fn from(v: Config) -> Self { - v.config - } -} - -impl Config { - pub fn set_str(&mut self, key: &str, value: &str) -> Result<()> { - self.config.set_str(key, value).map_err(Into::into) - } - - pub fn set_bool(&mut self, key: &str, value: bool) -> Result<()> { - self.config.set_bool(key, value).map_err(Into::into) - } - - pub fn set_multivar(&mut self, key: &str, regexp: &str, value: &str) -> Result<()> { - self.config - .set_multivar(key, regexp, value) - .map_err(Into::into) - } - - pub fn get_string(&self, key: &str) -> Result> { - match self.config.get_string(key).map_err(Into::into) { - Ok(value) => Ok(Some(value)), - Err(Error::NotFound(_)) => Ok(None), - Err(e) => Err(e), - } - } - - pub fn get_bool(&self, key: &str) -> Result> { - match self.config.get_bool(key).map_err(Into::into) { - Ok(value) => Ok(Some(value)), - Err(Error::NotFound(_)) => Ok(None), - Err(e) => Err(e), - } - } - - pub fn set_local(&self, key: &str, val: &str) -> Result<()> { - match self.config.open_level(git2::ConfigLevel::Local) { - Ok(mut local) => local.set_str(key, val).map_err(Into::into), - Err(e) => Err(e.into()), - } - } - - pub fn get_local(&self, key: &str) -> Result> { - match self - .config - .open_level(git2::ConfigLevel::Local) - .and_then(|local| local.get_string(key)) - { - Ok(value) => Ok(Some(value)), - Err(e) if e.code() == git2::ErrorCode::NotFound => Ok(None), - Err(e) => Err(e.into()), - } - } -} diff --git a/gitbutler-app/src/git/credentials.rs b/gitbutler-app/src/git/credentials.rs deleted file mode 100644 index c7e0a452b..000000000 --- a/gitbutler-app/src/git/credentials.rs +++ /dev/null @@ -1,392 +0,0 @@ -use std::path::PathBuf; - -use crate::{keys, project_repository, projects, users}; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum SshCredential { - Keyfile { - key_path: PathBuf, - passphrase: Option, - }, - GitButlerKey(Box), -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum HttpsCredential { - CredentialHelper { username: String, password: String }, - GitHubToken(String), -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum Credential { - Noop, - Ssh(SshCredential), - Https(HttpsCredential), -} - -impl From for git2::RemoteCallbacks<'_> { - fn from(value: Credential) -> Self { - let mut remote_callbacks = git2::RemoteCallbacks::new(); - match value { - Credential::Noop => {} - Credential::Ssh(SshCredential::Keyfile { - key_path, - passphrase, - }) => { - remote_callbacks.credentials(move |url, _username_from_url, _allowed_types| { - use resolve_path::PathResolveExt; - let key_path = key_path.resolve(); - tracing::info!( - "authenticating with {} using key {}", - url, - key_path.display() - ); - git2::Cred::ssh_key("git", None, &key_path, passphrase.as_deref()) - }); - } - Credential::Ssh(SshCredential::GitButlerKey(key)) => { - remote_callbacks.credentials(move |url, _username_from_url, _allowed_types| { - tracing::info!("authenticating with {} using gitbutler's key", url); - git2::Cred::ssh_key_from_memory("git", None, &key.to_string(), None) - }); - } - Credential::Https(HttpsCredential::CredentialHelper { username, password }) => { - remote_callbacks.credentials(move |url, _username_from_url, _allowed_types| { - tracing::info!("authenticating with {url} as '{username}' with password using credential helper"); - git2::Cred::userpass_plaintext(&username, &password) - }); - } - Credential::Https(HttpsCredential::GitHubToken(token)) => { - remote_callbacks.credentials(move |url, _username_from_url, _allowed_types| { - tracing::info!("authenticating with {url} using github token"); - git2::Cred::userpass_plaintext("git", &token) - }); - } - }; - remote_callbacks - } -} - -#[derive(Clone)] -pub struct Helper { - keys: keys::Controller, - users: users::Controller, - home_dir: Option, -} - -#[derive(Debug, thiserror::Error)] -pub enum HelpError { - #[error("no url set for remote")] - NoUrlSet, - #[error("failed to convert url: {0}")] - UrlConvertError(#[from] super::ConvertError), - #[error(transparent)] - Users(#[from] users::GetError), - #[error(transparent)] - Key(#[from] keys::GetOrCreateError), - #[error(transparent)] - Git(#[from] super::Error), -} - -impl From for crate::error::Error { - fn from(value: HelpError) -> Self { - match value { - HelpError::NoUrlSet => Self::UserError { - code: crate::error::Code::ProjectGitRemote, - message: "no url set for remote".to_string(), - }, - HelpError::UrlConvertError(error) => Self::UserError { - code: crate::error::Code::ProjectGitRemote, - message: error.to_string(), - }, - HelpError::Users(error) => error.into(), - HelpError::Key(error) => error.into(), - HelpError::Git(error) => { - tracing::error!(?error, "failed to create auth credentials"); - Self::Unknown - } - } - } -} - -impl Helper { - pub fn new( - keys: keys::Controller, - users: users::Controller, - home_dir: Option, - ) -> Self { - Self { - keys, - users, - home_dir, - } - } - - pub fn from_path>(path: P) -> Self { - let keys = keys::Controller::from_path(&path); - let users = users::Controller::from_path(path); - let home_dir = std::env::var_os("HOME").map(PathBuf::from); - Self::new(keys, users, home_dir) - } - - pub fn help<'a>( - &'a self, - project_repository: &'a project_repository::Repository, - remote_name: &str, - ) -> Result)>, HelpError> { - let remote = project_repository.git_repository.find_remote(remote_name)?; - let remote_url = remote.url()?.ok_or(HelpError::NoUrlSet)?; - - // if file, no auth needed. - if remote_url.scheme == super::Scheme::File { - return Ok(vec![(remote, vec![Credential::Noop])]); - } - - match &project_repository.project().preferred_key { - projects::AuthKey::Local { private_key_path } => { - let ssh_remote = if remote_url.scheme == super::Scheme::Ssh { - Ok(remote) - } else { - let ssh_url = remote_url.as_ssh()?; - project_repository.git_repository.remote_anonymous(&ssh_url) - }?; - - Ok(vec![( - ssh_remote, - vec![Credential::Ssh(SshCredential::Keyfile { - key_path: private_key_path.clone(), - passphrase: None, - })], - )]) - } - projects::AuthKey::GitCredentialsHelper => { - let https_remote = if remote_url.scheme == super::Scheme::Https { - Ok(remote) - } else { - let url = remote_url.as_https()?; - project_repository.git_repository.remote_anonymous(&url) - }?; - let flow = Self::https_flow(project_repository, &remote_url)? - .into_iter() - .map(Credential::Https) - .collect::>(); - Ok(vec![(https_remote, flow)]) - } - projects::AuthKey::Generated => { - let generated_flow = self.generated_flow(remote, project_repository)?; - - let remote = project_repository.git_repository.find_remote(remote_name)?; - let default_flow = self.default_flow(remote, project_repository)?; - - Ok(vec![generated_flow, default_flow] - .into_iter() - .flatten() - .collect()) - } - projects::AuthKey::Default => self.default_flow(remote, project_repository), - projects::AuthKey::SystemExecutable => { - tracing::error!("WARNING: FIXME: this codepath should NEVER be hit. Something is seriously wrong."); - self.default_flow(remote, project_repository) - } - } - } - - fn generated_flow<'a>( - &'a self, - remote: super::Remote<'a>, - project_repository: &'a project_repository::Repository, - ) -> Result)>, HelpError> { - let remote_url = remote.url()?.ok_or(HelpError::NoUrlSet)?; - - let ssh_remote = if remote_url.scheme == super::Scheme::Ssh { - Ok(remote) - } else { - let ssh_url = remote_url.as_ssh()?; - project_repository.git_repository.remote_anonymous(&ssh_url) - }?; - - let key = self.keys.get_or_create()?; - Ok(vec![( - ssh_remote, - vec![Credential::Ssh(SshCredential::GitButlerKey(Box::new(key)))], - )]) - } - - fn default_flow<'a>( - &'a self, - remote: super::Remote<'a>, - project_repository: &'a project_repository::Repository, - ) -> Result)>, HelpError> { - let remote_url = remote.url()?.ok_or(HelpError::NoUrlSet)?; - - // is github is authenticated, only try github. - if remote_url.is_github() { - if let Some(github_access_token) = self - .users - .get_user()? - .and_then(|user| user.github_access_token) - { - let https_remote = if remote_url.scheme == super::Scheme::Https { - Ok(remote) - } else { - let url = remote_url.as_https()?; - project_repository.git_repository.remote_anonymous(&url) - }?; - return Ok(vec![( - https_remote, - vec![Credential::Https(HttpsCredential::GitHubToken( - github_access_token, - ))], - )]); - } - } - - match remote_url.scheme { - super::Scheme::Https => { - let mut flow = vec![]; - - let https_flow = Self::https_flow(project_repository, &remote_url)? - .into_iter() - .map(Credential::Https) - .collect::>(); - - if !https_flow.is_empty() { - flow.push((remote, https_flow)); - } - - if let Ok(ssh_url) = remote_url.as_ssh() { - let ssh_flow = self - .ssh_flow()? - .into_iter() - .map(Credential::Ssh) - .collect::>(); - if !ssh_flow.is_empty() { - flow.push(( - project_repository - .git_repository - .remote_anonymous(&ssh_url)?, - ssh_flow, - )); - } - } - - Ok(flow) - } - super::Scheme::Ssh => { - let mut flow = vec![]; - - let ssh_flow = self - .ssh_flow()? - .into_iter() - .map(Credential::Ssh) - .collect::>(); - if !ssh_flow.is_empty() { - flow.push((remote, ssh_flow)); - } - - if let Ok(https_url) = remote_url.as_https() { - let https_flow = Self::https_flow(project_repository, &https_url)? - .into_iter() - .map(Credential::Https) - .collect::>(); - if !https_flow.is_empty() { - flow.push(( - project_repository - .git_repository - .remote_anonymous(&https_url)?, - https_flow, - )); - } - } - - Ok(flow) - } - _ => { - let mut flow = vec![]; - - if let Ok(https_url) = remote_url.as_https() { - let https_flow = Self::https_flow(project_repository, &https_url)? - .into_iter() - .map(Credential::Https) - .collect::>(); - - if !https_flow.is_empty() { - flow.push(( - project_repository - .git_repository - .remote_anonymous(&https_url)?, - https_flow, - )); - } - } - - if let Ok(ssh_url) = remote_url.as_ssh() { - let ssh_flow = self - .ssh_flow()? - .into_iter() - .map(Credential::Ssh) - .collect::>(); - if !ssh_flow.is_empty() { - flow.push(( - project_repository - .git_repository - .remote_anonymous(&ssh_url)?, - ssh_flow, - )); - } - } - - Ok(flow) - } - } - } - - fn https_flow( - project_repository: &project_repository::Repository, - remote_url: &super::Url, - ) -> Result, HelpError> { - let mut flow = vec![]; - - let mut helper = git2::CredentialHelper::new(&remote_url.to_string()); - let config = project_repository.git_repository.config()?; - helper.config(&git2::Config::from(config)); - if let Some((username, password)) = helper.execute() { - flow.push(HttpsCredential::CredentialHelper { username, password }); - } - - Ok(flow) - } - - fn ssh_flow(&self) -> Result, HelpError> { - let mut flow = vec![]; - if let Some(home_path) = self.home_dir.as_ref() { - let id_rsa_path = home_path.join(".ssh").join("id_rsa"); - if id_rsa_path.exists() { - flow.push(SshCredential::Keyfile { - key_path: id_rsa_path.clone(), - passphrase: None, - }); - } - - let id_ed25519_path = home_path.join(".ssh").join("id_ed25519"); - if id_ed25519_path.exists() { - flow.push(SshCredential::Keyfile { - key_path: id_ed25519_path.clone(), - passphrase: None, - }); - } - - let id_ecdsa_path = home_path.join(".ssh").join("id_ecdsa"); - if id_ecdsa_path.exists() { - flow.push(SshCredential::Keyfile { - key_path: id_ecdsa_path.clone(), - passphrase: None, - }); - } - } - - let key = self.keys.get_or_create()?; - flow.push(SshCredential::GitButlerKey(Box::new(key))); - Ok(flow) - } -} diff --git a/gitbutler-app/src/git/diff.rs b/gitbutler-app/src/git/diff.rs deleted file mode 100644 index 807d295d0..000000000 --- a/gitbutler-app/src/git/diff.rs +++ /dev/null @@ -1,421 +0,0 @@ -use std::{collections::HashMap, path, str}; - -use anyhow::{Context, Result}; -use serde::{Deserialize, Serialize}; - -use crate::git; - -use super::Repository; - -/// The type of change -#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum ChangeType { - /// Entry does not exist in old version - Added, - /// Entry does not exist in new version - Deleted, - /// Entry content changed between old and new - Modified, -} -impl From for ChangeType { - fn from(v: git2::Delta) -> Self { - use git2::Delta as D; - use ChangeType as C; - match v { - D::Untracked | D::Added => C::Added, - D::Modified - | D::Unmodified - | D::Renamed - | D::Copied - | D::Typechange - | D::Conflicted => C::Modified, - D::Ignored | D::Unreadable | D::Deleted => C::Deleted, - } - } -} - -#[derive(Debug, PartialEq, Clone, Serialize)] -pub struct GitHunk { - pub old_start: u32, - pub old_lines: u32, - pub new_start: u32, - pub new_lines: u32, - pub diff: String, - pub binary: bool, - pub change_type: ChangeType, -} - -impl GitHunk { - pub fn contains(&self, line: u32) -> bool { - self.new_start <= line && self.new_start + self.new_lines >= line - } -} - -pub struct Options { - pub context_lines: u32, -} - -impl Default for Options { - fn default() -> Self { - Self { context_lines: 3 } - } -} - -#[derive(Debug, PartialEq, Clone, Serialize, Default)] -#[serde(rename_all = "camelCase")] -pub struct FileDiff { - pub old_path: Option, - pub new_path: Option, - pub hunks: Option>, - pub skipped: bool, - pub binary: bool, - pub old_size_bytes: u64, - pub new_size_bytes: u64, -} - -pub fn workdir( - repository: &Repository, - commit_oid: &git::Oid, - context_lines: u32, -) -> Result> { - let commit = repository - .find_commit(*commit_oid) - .context("failed to find commit")?; - let tree = commit.tree().context("failed to find tree")?; - - let mut diff_opts = git2::DiffOptions::new(); - diff_opts - .recurse_untracked_dirs(true) - .include_untracked(true) - .show_binary(true) - .show_untracked_content(true) - .ignore_submodules(true) - .context_lines(context_lines); - - let mut diff = repository.diff_tree_to_workdir(Some(&tree), Some(&mut diff_opts))?; - let (mut diff_opts, skipped_files) = without_large_files(50_000_000, &diff, diff_opts); - if !skipped_files.is_empty() { - diff = repository.diff_tree_to_workdir(Some(&tree), Some(&mut diff_opts))?; - } - let diff_files = hunks_by_filepath(repository, &diff); - diff_files.map(|mut df| { - for (key, value) in skipped_files { - df.insert(key, value); - } - df - }) -} - -pub fn trees( - repository: &Repository, - old_tree: &git::Tree, - new_tree: &git::Tree, - context_lines: u32, -) -> Result> { - let mut diff_opts = git2::DiffOptions::new(); - diff_opts - .recurse_untracked_dirs(true) - .include_untracked(true) - .show_binary(true) - .ignore_submodules(true) - .context_lines(context_lines) - .show_untracked_content(true); - - let diff = - repository.diff_tree_to_tree(Some(old_tree), Some(new_tree), Some(&mut diff_opts))?; - - hunks_by_filepath(repository, &diff) -} - -pub fn without_large_files( - size_limit_bytes: u64, - diff: &git2::Diff, - mut diff_opts: git2::DiffOptions, -) -> (git2::DiffOptions, HashMap) { - let mut skipped_files: HashMap = HashMap::new(); - for delta in diff.deltas() { - if delta.new_file().size() > size_limit_bytes { - if let Some(path) = delta.new_file().path() { - skipped_files.insert( - path.to_path_buf(), - FileDiff { - old_path: delta.old_file().path().map(std::path::Path::to_path_buf), - new_path: delta.new_file().path().map(std::path::Path::to_path_buf), - hunks: None, - skipped: true, - binary: true, - old_size_bytes: delta.old_file().size(), - new_size_bytes: delta.new_file().size(), - }, - ); - } - } else if let Some(path) = delta.new_file().path() { - if let Some(path) = path.to_str() { - diff_opts.pathspec(path); - } - } - } - (diff_opts, skipped_files) -} - -fn hunks_by_filepath( - repository: &Repository, - diff: &git2::Diff, -) -> Result> { - // find all the hunks - let mut hunks_by_filepath: HashMap> = HashMap::new(); - let mut diff_files: HashMap = HashMap::new(); - - diff.print( - git2::DiffFormat::Patch, - |delta, hunk, line: git2::DiffLine<'_>| { - let change_type: ChangeType = delta.status().into(); - let file_path = delta.new_file().path().unwrap_or_else(|| { - delta - .old_file() - .path() - .expect("failed to get file name from diff") - }); - - hunks_by_filepath - .entry(file_path.to_path_buf()) - .or_default(); - - let new_start = hunk.as_ref().map_or(0, git2::DiffHunk::new_start); - let new_lines = hunk.as_ref().map_or(0, git2::DiffHunk::new_lines); - let old_start = hunk.as_ref().map_or(0, git2::DiffHunk::old_start); - let old_lines = hunk.as_ref().map_or(0, git2::DiffHunk::old_lines); - - if let Some((line, is_binary)) = match line.origin() { - '+' | '-' | ' ' => { - if let Ok(content) = str::from_utf8(line.content()) { - Some((format!("{}{}", line.origin(), content), false)) - } else { - let full_path = repository.workdir().unwrap().join(file_path); - // save the file_path to the odb - if !delta.new_file().id().is_zero() && full_path.exists() { - // the binary file wasnt deleted - repository.blob_path(full_path.as_path()).unwrap(); - } - Some((delta.new_file().id().to_string(), true)) - } - } - 'B' => { - let full_path = repository.workdir().unwrap().join(file_path); - // save the file_path to the odb - if !delta.new_file().id().is_zero() && full_path.exists() { - // the binary file wasnt deleted - repository.blob_path(full_path.as_path()).unwrap(); - } - Some((delta.new_file().id().to_string(), true)) - } - 'F' => None, - _ => { - if let Ok(content) = str::from_utf8(line.content()) { - Some((content.to_string(), false)) - } else { - let full_path = repository.workdir().unwrap().join(file_path); - // save the file_path to the odb - if !delta.new_file().id().is_zero() && full_path.exists() { - // the binary file wasnt deleted - repository.blob_path(full_path.as_path()).unwrap(); - } - Some((delta.new_file().id().to_string(), true)) - } - } - } { - let hunks = hunks_by_filepath - .entry(file_path.to_path_buf()) - .or_default(); - - if let Some(previous_hunk) = hunks.last_mut() { - let hunk_did_not_change = previous_hunk.old_start == old_start - && previous_hunk.old_lines == old_lines - && previous_hunk.new_start == new_start - && previous_hunk.new_lines == new_lines; - - if hunk_did_not_change { - if is_binary { - // binary overrides the diff - previous_hunk.binary = true; - previous_hunk.old_start = 0; - previous_hunk.old_lines = 0; - previous_hunk.new_start = 0; - previous_hunk.new_lines = 0; - previous_hunk.diff = line; - } else if !previous_hunk.binary { - // append non binary hunks - previous_hunk.diff.push_str(&line); - } - } else { - hunks.push(GitHunk { - old_start, - old_lines, - new_start, - new_lines, - diff: line, - binary: is_binary, - change_type, - }); - } - } else { - hunks.push(GitHunk { - old_start, - old_lines, - new_start, - new_lines, - diff: line, - binary: is_binary, - change_type, - }); - } - } - diff_files.insert( - file_path.to_path_buf(), - FileDiff { - old_path: delta.old_file().path().map(std::path::Path::to_path_buf), - new_path: delta.new_file().path().map(std::path::Path::to_path_buf), - hunks: None, - skipped: false, - binary: delta.new_file().is_binary(), - old_size_bytes: delta.old_file().size(), - new_size_bytes: delta.new_file().size(), - }, - ); - - true - }, - ) - .context("failed to print diff")?; - - let hunks_by_filepath: HashMap> = hunks_by_filepath - .into_iter() - .map(|(k, v)| { - if let Some(binary_hunk) = v.iter().find(|hunk| hunk.binary) { - if v.len() > 1 { - // if there are multiple hunks with binary among them, then the binary hunk - // takes precedence - ( - k, - vec![GitHunk { - old_start: 0, - old_lines: 0, - new_start: 0, - new_lines: 0, - diff: binary_hunk.diff.clone(), - binary: true, - change_type: binary_hunk.change_type, - }], - ) - } else { - (k, v) - } - } else if v.is_empty() { - // this is a new file - ( - k, - vec![GitHunk { - old_start: 0, - old_lines: 0, - new_start: 0, - new_lines: 0, - diff: String::new(), - binary: false, - change_type: ChangeType::Modified, - }], - ) - } else { - (k, v) - } - }) - .collect(); - - for (file_path, diff_file) in &mut diff_files { - diff_file.hunks = hunks_by_filepath.get(file_path).cloned(); - } - Ok(diff_files) -} - -// returns None if cannot reverse the patch header -fn reverse_patch_header(header: &str) -> Option { - use itertools::Itertools; - - let mut parts = header.split_whitespace(); - - match parts.next() { - Some("@@") => {} - _ => return None, - }; - - let old_range = parts.next()?; - let new_range = parts.next()?; - - match parts.next() { - Some("@@") => {} - _ => return None, - }; - - Some(format!( - "@@ {} {} @@ {}", - new_range.replace('+', "-"), - old_range.replace('-', "+"), - parts.join(" ") - )) -} - -fn reverse_patch(patch: &str) -> Option { - let mut reversed = String::new(); - for line in patch.lines() { - if line.starts_with("@@") { - if let Some(header) = reverse_patch_header(line) { - reversed.push_str(&header); - reversed.push('\n'); - } else { - return None; - } - } else if line.starts_with('+') { - reversed.push_str(&line.replacen('+', "-", 1)); - reversed.push('\n'); - } else if line.starts_with('-') { - reversed.push_str(&line.replacen('-', "+", 1)); - reversed.push('\n'); - } else { - reversed.push_str(line); - reversed.push('\n'); - } - } - Some(reversed) -} - -// returns None if cannot reverse the hunk -pub fn reverse_hunk(hunk: &GitHunk) -> Option { - if hunk.binary { - None - } else { - reverse_patch(&hunk.diff).map(|diff| GitHunk { - old_start: hunk.new_start, - old_lines: hunk.new_lines, - new_start: hunk.old_start, - new_lines: hunk.old_lines, - diff, - binary: hunk.binary, - change_type: hunk.change_type, - }) - } -} - -pub fn diff_files_to_hunks( - files: &HashMap, -) -> HashMap> { - let mut file_hunks: HashMap> = HashMap::new(); - for (file_path, diff_file) in files { - if !diff_file.skipped { - file_hunks.insert( - file_path.clone(), - diff_file.hunks.clone().unwrap_or_default(), - ); - } - } - file_hunks -} diff --git a/gitbutler-app/src/git/error.rs b/gitbutler-app/src/git/error.rs deleted file mode 100644 index 298e39ea5..000000000 --- a/gitbutler-app/src/git/error.rs +++ /dev/null @@ -1,62 +0,0 @@ -use std::str::Utf8Error; - -use crate::keys; - -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error("not found: {0}")] - NotFound(git2::Error), - #[error("authentication failed")] - Auth(git2::Error), - #[error("sign error: {0}")] - Signing(keys::SignError), - #[error("remote url error: {0}")] - Url(super::url::ParseError), - #[error("io error: {0}")] - Io(#[from] std::io::Error), - #[error("network error: {0}")] - Network(git2::Error), - #[error("hook error: {0}")] - Hooks(#[from] git2_hooks::HooksError), - #[error("http error: {0}")] - Http(git2::Error), - #[error("checkout error: {0}")] - Checkout(git2::Error), - #[error(transparent)] - Other(git2::Error), - #[error(transparent)] - Utf8(#[from] Utf8Error), -} - -impl From for Error { - fn from(err: git2::Error) -> Self { - match err.class() { - git2::ErrorClass::Ssh => match err.code() { - git2::ErrorCode::GenericError | git2::ErrorCode::Auth => Error::Auth(err), - _ => Error::Other(err), - }, - git2::ErrorClass::Checkout => Error::Checkout(err), - git2::ErrorClass::Http => Error::Http(err), - git2::ErrorClass::Net => Error::Network(err), - _ => match err.code() { - git2::ErrorCode::NotFound => Error::NotFound(err), - git2::ErrorCode::Auth => Error::Auth(err), - _ => Error::Other(err), - }, - } - } -} - -impl From for Error { - fn from(err: keys::SignError) -> Self { - Error::Signing(err) - } -} - -impl From for Error { - fn from(err: super::url::ParseError) -> Self { - Error::Url(err) - } -} - -pub type Result = std::result::Result; diff --git a/gitbutler-app/src/git/index.rs b/gitbutler-app/src/git/index.rs deleted file mode 100644 index 8b0293bef..000000000 --- a/gitbutler-app/src/git/index.rs +++ /dev/null @@ -1,164 +0,0 @@ -use std::path; - -use filetime::FileTime; - -use super::{Error, Oid, Repository, Result, Tree}; - -pub struct Index { - index: git2::Index, -} - -impl TryFrom> for Index { - type Error = Error; - - fn try_from(value: Tree<'_>) -> std::result::Result { - Self::try_from(&value) - } -} - -impl TryFrom<&Tree<'_>> for Index { - type Error = Error; - - fn try_from(value: &Tree) -> Result { - let mut empty_index = Self::new()?; - empty_index.read_tree(value)?; - Ok(empty_index) - } -} - -impl<'a> From<&'a mut Index> for &'a mut git2::Index { - fn from(index: &'a mut Index) -> Self { - &mut index.index - } -} - -impl From for Index { - fn from(index: git2::Index) -> Self { - Self { index } - } -} - -impl Index { - pub fn new() -> Result { - Ok(Index { - index: git2::Index::new()?, - }) - } - - pub fn add_all( - &mut self, - pathspecs: I, - flag: git2::IndexAddOption, - cb: Option<&mut git2::IndexMatchedPath<'_>>, - ) -> Result<()> - where - T: git2::IntoCString, - I: IntoIterator, - { - self.index.add_all(pathspecs, flag, cb).map_err(Into::into) - } - - pub fn conflicts(&self) -> Result { - self.index.conflicts().map_err(Into::into) - } - - pub fn read_tree(&mut self, tree: &Tree) -> Result<()> { - self.index.read_tree(tree.into()).map_err(Into::into) - } - - pub fn write_tree_to(&mut self, repo: &Repository) -> Result { - self.index - .write_tree_to(repo.into()) - .map(Into::into) - .map_err(Into::into) - } - - pub fn has_conflicts(&self) -> bool { - self.index.has_conflicts() - } - - pub fn write_tree(&mut self) -> Result { - self.index.write_tree().map(Into::into).map_err(Into::into) - } - - pub fn add(&mut self, entry: &IndexEntry) -> Result<()> { - self.index.add(&entry.clone().into()).map_err(Into::into) - } - - pub fn write(&mut self) -> Result<()> { - self.index.write().map_err(Into::into) - } - - pub fn add_path(&mut self, path: &path::Path) -> Result<()> { - self.index.add_path(path).map_err(Into::into) - } - - pub fn remove_path(&mut self, path: &path::Path) -> Result<()> { - self.index.remove_path(path).map_err(Into::into) - } - - pub fn get_path(&self, path: &path::Path, stage: i32) -> Option { - self.index.get_path(path, stage).map(Into::into) - } -} - -#[derive(Debug, Clone)] -pub struct IndexEntry { - pub ctime: FileTime, - pub mtime: FileTime, - pub dev: u32, - pub ino: u32, - pub mode: u32, - pub uid: u32, - pub gid: u32, - pub file_size: u32, - pub id: Oid, - pub flags: u16, - pub flags_extended: u16, - pub path: Vec, -} - -impl From for IndexEntry { - fn from(value: git2::IndexEntry) -> Self { - Self { - ctime: FileTime::from_unix_time( - i64::from(value.ctime.seconds()), - value.ctime.nanoseconds(), - ), - mtime: FileTime::from_unix_time( - i64::from(value.mtime.seconds()), - value.mtime.nanoseconds(), - ), - dev: value.dev, - ino: value.ino, - mode: value.mode, - uid: value.uid, - gid: value.gid, - file_size: value.file_size, - id: value.id.into(), - flags: value.flags, - flags_extended: value.flags_extended, - path: value.path, - } - } -} - -impl From for git2::IndexEntry { - #[allow(clippy::cast_possible_truncation)] - fn from(entry: IndexEntry) -> Self { - Self { - ctime: git2::IndexTime::new(entry.ctime.seconds() as i32, entry.ctime.nanoseconds()), - mtime: git2::IndexTime::new(entry.mtime.seconds() as i32, entry.mtime.nanoseconds()), - dev: entry.dev, - ino: entry.ino, - mode: entry.mode, - uid: entry.uid, - gid: entry.gid, - file_size: entry.file_size, - id: entry.id.into(), - flags: entry.flags, - flags_extended: entry.flags_extended, - path: entry.path, - } - } -} diff --git a/gitbutler-app/src/git/oid.rs b/gitbutler-app/src/git/oid.rs deleted file mode 100644 index 3e0718db4..000000000 --- a/gitbutler-app/src/git/oid.rs +++ /dev/null @@ -1,61 +0,0 @@ -use std::{fmt, str::FromStr}; - -use serde::{Deserialize, Serialize}; - -#[derive(Debug, PartialEq, Copy, Clone, Hash, Eq)] -pub struct Oid { - oid: git2::Oid, -} - -impl Default for Oid { - fn default() -> Self { - git2::Oid::zero().into() - } -} - -impl Serialize for Oid { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - self.oid.to_string().serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for Oid { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - git2::Oid::from_str(&s) - .map_err(|e| serde::de::Error::custom(format!("invalid oid: {}", e))) - .map(Into::into) - } -} - -impl fmt::Display for Oid { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.oid.fmt(f) - } -} - -impl FromStr for Oid { - type Err = git2::Error; - - fn from_str(s: &str) -> Result { - git2::Oid::from_str(s).map(Into::into) - } -} - -impl From for Oid { - fn from(oid: git2::Oid) -> Self { - Self { oid } - } -} - -impl From for git2::Oid { - fn from(oid: Oid) -> Self { - oid.oid - } -} diff --git a/gitbutler-app/src/git/reference.rs b/gitbutler-app/src/git/reference.rs deleted file mode 100644 index 27c360014..000000000 --- a/gitbutler-app/src/git/reference.rs +++ /dev/null @@ -1,64 +0,0 @@ -mod refname; -pub use refname::{LocalRefname, Refname, RemoteRefname, VirtualRefname}; - -use super::{Commit, Oid, Result, Tree}; - -pub struct Reference<'repo> { - reference: git2::Reference<'repo>, -} - -impl<'repo> From> for Reference<'repo> { - fn from(reference: git2::Reference<'repo>) -> Self { - Reference { reference } - } -} - -impl<'repo> Reference<'repo> { - pub fn name(&self) -> Option { - self.reference - .name() - .map(|name| name.parse().expect("libgit2 provides valid refnames")) - } - - pub fn name_bytes(&self) -> &[u8] { - self.reference.name_bytes() - } - - pub fn target(&self) -> Option { - self.reference.target().map(Into::into) - } - - pub fn peel_to_commit(&self) -> Result> { - self.reference - .peel_to_commit() - .map(Into::into) - .map_err(Into::into) - } - - pub fn peel_to_tree(&self) -> Result> { - self.reference - .peel_to_tree() - .map(Into::into) - .map_err(Into::into) - } - - pub fn rename( - &mut self, - new_name: &Refname, - force: bool, - log_message: &str, - ) -> Result> { - self.reference - .rename(&new_name.to_string(), force, log_message) - .map(Into::into) - .map_err(Into::into) - } - - pub fn delete(&mut self) -> Result<()> { - self.reference.delete().map_err(Into::into) - } - - pub fn is_remote(&self) -> bool { - self.reference.is_remote() - } -} diff --git a/gitbutler-app/src/git/reference/refname.rs b/gitbutler-app/src/git/reference/refname.rs deleted file mode 100644 index bdedb8b92..000000000 --- a/gitbutler-app/src/git/reference/refname.rs +++ /dev/null @@ -1,137 +0,0 @@ -mod error; -mod local; -mod remote; -mod r#virtual; - -use std::{fmt, str::FromStr}; - -use serde::{Deserialize, Serialize}; - -pub use error::Error; -pub use local::Refname as LocalRefname; -pub use r#virtual::Refname as VirtualRefname; -pub use remote::Refname as RemoteRefname; - -use crate::git; - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Refname { - Other(String), - Remote(RemoteRefname), - Local(LocalRefname), - Virtual(VirtualRefname), -} - -impl From<&RemoteRefname> for Refname { - fn from(value: &RemoteRefname) -> Self { - Self::Remote(value.clone()) - } -} - -impl From for Refname { - fn from(value: RemoteRefname) -> Self { - Self::Remote(value) - } -} - -impl From for Refname { - fn from(value: VirtualRefname) -> Self { - Self::Virtual(value) - } -} - -impl From<&VirtualRefname> for Refname { - fn from(value: &VirtualRefname) -> Self { - Self::Virtual(value.clone()) - } -} - -impl From for Refname { - fn from(value: LocalRefname) -> Self { - Self::Local(value) - } -} - -impl From<&LocalRefname> for Refname { - fn from(value: &LocalRefname) -> Self { - Self::Local(value.clone()) - } -} - -impl Refname { - pub fn branch(&self) -> Option<&str> { - match self { - Self::Other(_) => None, - Self::Remote(remote) => Some(remote.branch()), - Self::Local(local) => Some(local.branch()), - Self::Virtual(r#virtual) => Some(r#virtual.branch()), - } - } - - pub fn simple_name(&self) -> String { - match self { - Refname::Virtual(virtual_refname) => virtual_refname.branch().to_string(), - Refname::Local(local) => local.branch().to_string(), - Refname::Remote(remote) => { - format!("{}/{}", remote.remote(), remote.branch()) - } - Refname::Other(raw) => raw.to_string(), - } - } -} - -impl FromStr for Refname { - type Err = Error; - - fn from_str(value: &str) -> Result { - match value { - value if value.starts_with("refs/remotes/") => Ok(Self::Remote(value.parse()?)), - value if value.starts_with("refs/heads/") => Ok(Self::Local(value.parse()?)), - value if value.starts_with("refs/gitbutler/") => Ok(Self::Virtual(value.parse()?)), - "HEAD" => Ok(Self::Other(value.to_string())), - value if value.starts_with("refs/") => Ok(Self::Other(value.to_string())), - _ => Err(Error::InvalidName(value.to_string())), - } - } -} - -impl TryFrom<&git::Branch<'_>> for Refname { - type Error = Error; - - fn try_from(value: &git::Branch<'_>) -> std::result::Result { - if value.is_remote() { - Ok(Self::Remote(RemoteRefname::try_from(value)?)) - } else { - Ok(Self::Local(LocalRefname::try_from(value)?)) - } - } -} - -impl fmt::Display for Refname { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Other(raw) => raw.fmt(f), - Self::Remote(remote) => remote.fmt(f), - Self::Local(local) => local.fmt(f), - Self::Virtual(r#virtual) => r#virtual.fmt(f), - } - } -} - -impl Serialize for Refname { - fn serialize(&self, serializer: S) -> Result { - match self { - Self::Other(raw) => raw.serialize(serializer), - Self::Remote(remote) => remote.serialize(serializer), - Self::Local(local) => local.serialize(serializer), - Self::Virtual(r#virtual) => r#virtual.serialize(serializer), - } - } -} - -impl<'d> Deserialize<'d> for Refname { - fn deserialize>(deserializer: D) -> Result { - let name = String::deserialize(deserializer)?; - name.parse().map_err(serde::de::Error::custom) - } -} diff --git a/gitbutler-app/src/git/reference/refname/error.rs b/gitbutler-app/src/git/reference/refname/error.rs deleted file mode 100644 index a964fe399..000000000 --- a/gitbutler-app/src/git/reference/refname/error.rs +++ /dev/null @@ -1,17 +0,0 @@ -use crate::git; - -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error("branch name is invalid: {0}")] - InvalidName(String), - #[error("reference is not a tag: {0}")] - NotTag(String), - #[error("branch is not local: {0}")] - NotLocal(String), - #[error("branch is not remote: {0}")] - NotRemote(String), - #[error(transparent)] - Git(#[from] git::Error), - #[error(transparent)] - Utf8(#[from] std::string::FromUtf8Error), -} diff --git a/gitbutler-app/src/git/reference/refname/local.rs b/gitbutler-app/src/git/reference/refname/local.rs deleted file mode 100644 index 022bfc961..000000000 --- a/gitbutler-app/src/git/reference/refname/local.rs +++ /dev/null @@ -1,94 +0,0 @@ -use std::{fmt, str::FromStr}; - -use serde::{Deserialize, Serialize}; - -use crate::git; - -use super::{error::Error, remote}; - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Refname { - // contains name of the branch, e.x. "master" or "main" - branch: String, - // contains name of the remote branch, if the local branch is tracking a remote branch - remote: Option, -} - -impl Refname { - pub fn new(branch: &str, remote: Option) -> Self { - Self { - branch: branch.to_string(), - remote, - } - } - - pub fn branch(&self) -> &str { - &self.branch - } - - pub fn remote(&self) -> Option<&remote::Refname> { - self.remote.as_ref() - } -} - -impl Serialize for Refname { - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_str(&self.to_string()) - } -} - -impl<'d> Deserialize<'d> for Refname { - fn deserialize>(deserializer: D) -> Result { - let name = String::deserialize(deserializer)?; - name.as_str().parse().map_err(serde::de::Error::custom) - } -} - -impl fmt::Display for Refname { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "refs/heads/{}", self.branch) - } -} - -impl FromStr for Refname { - type Err = Error; - - fn from_str(value: &str) -> Result { - if !value.starts_with("refs/heads/") { - return Err(Error::NotLocal(value.to_string())); - } - - if let Some(branch) = value.strip_prefix("refs/heads/") { - Ok(Self { - branch: branch.to_string(), - remote: None, - }) - } else { - Err(Error::InvalidName(value.to_string())) - } - } -} - -impl TryFrom<&git::Branch<'_>> for Refname { - type Error = Error; - - fn try_from(value: &git::Branch<'_>) -> std::result::Result { - let branch_name = String::from_utf8(value.refname_bytes().to_vec()).map_err(Error::Utf8)?; - if value.is_remote() { - Err(Error::NotLocal(branch_name)) - } else { - let branch: Self = branch_name.parse()?; - match value.upstream() { - Ok(upstream) => Ok(Self { - remote: Some(remote::Refname::try_from(&upstream)?), - ..branch - }), - Err(git::Error::NotFound(_)) => Ok(Self { - remote: None, - ..branch - }), - Err(error) => Err(error.into()), - } - } - } -} diff --git a/gitbutler-app/src/git/reference/refname/remote.rs b/gitbutler-app/src/git/reference/refname/remote.rs deleted file mode 100644 index d14ab3b5f..000000000 --- a/gitbutler-app/src/git/reference/refname/remote.rs +++ /dev/null @@ -1,93 +0,0 @@ -use std::{fmt, str::FromStr}; - -use serde::{Deserialize, Serialize}; - -use crate::git; - -use super::error::Error; - -#[derive(Debug, Clone, Hash, PartialEq, Eq)] -pub struct Refname { - // contains name of the remote, e.x. "origin" or "upstream" - remote: String, - // contains name of the branch, e.x. "master" or "main" - branch: String, -} - -impl Refname { - pub fn new(remote: &str, branch: &str) -> Self { - Self { - remote: remote.to_string(), - branch: branch.to_string(), - } - } - - pub fn with_branch(&self, branch: &str) -> Self { - Self { - branch: branch.to_string(), - remote: self.remote.clone(), - } - } - - pub fn branch(&self) -> &str { - &self.branch - } - - pub fn remote(&self) -> &str { - &self.remote - } -} - -impl fmt::Display for Refname { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "refs/remotes/{}/{}", self.remote, self.branch) - } -} - -impl Serialize for Refname { - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_str(&self.to_string()) - } -} - -impl<'d> Deserialize<'d> for Refname { - fn deserialize>(deserializer: D) -> Result { - let name = String::deserialize(deserializer)?; - name.as_str().parse().map_err(serde::de::Error::custom) - } -} - -impl FromStr for Refname { - type Err = Error; - - fn from_str(value: &str) -> Result { - if !value.starts_with("refs/remotes/") { - return Err(Error::NotRemote(value.to_string())); - }; - - let value = value.strip_prefix("refs/remotes/").unwrap(); - - if let Some((remote, branch)) = value.split_once('/') { - Ok(Self { - remote: remote.to_string(), - branch: branch.to_string(), - }) - } else { - Err(Error::InvalidName(value.to_string())) - } - } -} - -impl TryFrom<&git::Branch<'_>> for Refname { - type Error = Error; - - fn try_from(value: &git::Branch<'_>) -> std::result::Result { - let refname = String::from_utf8(value.refname_bytes().to_vec()).map_err(Error::Utf8)?; - - if !value.is_remote() { - return Err(Error::NotRemote(refname)); - } - - refname.parse() - } -} diff --git a/gitbutler-app/src/git/reference/refname/virtual.rs b/gitbutler-app/src/git/reference/refname/virtual.rs deleted file mode 100644 index 5d05a6a15..000000000 --- a/gitbutler-app/src/git/reference/refname/virtual.rs +++ /dev/null @@ -1,65 +0,0 @@ -use std::{fmt, str::FromStr}; - -use serde::{Deserialize, Serialize}; - -use crate::virtual_branches::normalize_branch_name; -use crate::virtual_branches::Branch; - -use super::error::Error; - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Refname { - // contains slug of the virtual branch name - branch: String, -} - -impl Refname { - pub fn branch(&self) -> &str { - &self.branch - } -} - -impl From<&Branch> for Refname { - fn from(value: &Branch) -> Self { - Self { - branch: normalize_branch_name(&value.name), - } - } -} - -impl Serialize for Refname { - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_str(&self.to_string()) - } -} - -impl<'d> Deserialize<'d> for Refname { - fn deserialize>(deserializer: D) -> Result { - let name = String::deserialize(deserializer)?; - name.as_str().parse().map_err(serde::de::Error::custom) - } -} - -impl fmt::Display for Refname { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "refs/gitbutler/{}", self.branch) - } -} - -impl FromStr for Refname { - type Err = Error; - - fn from_str(value: &str) -> Result { - if !value.starts_with("refs/gitbutler/") { - return Err(Error::NotLocal(value.to_string())); - } - - if let Some(branch) = value.strip_prefix("refs/gitbutler/") { - Ok(Self { - branch: branch.to_string(), - }) - } else { - Err(Error::InvalidName(value.to_string())) - } - } -} diff --git a/gitbutler-app/src/git/remote.rs b/gitbutler-app/src/git/remote.rs deleted file mode 100644 index 15f28034c..000000000 --- a/gitbutler-app/src/git/remote.rs +++ /dev/null @@ -1,43 +0,0 @@ -use std::str::FromStr; - -use super::{Result, Url}; - -pub struct Remote<'repo> { - inner: git2::Remote<'repo>, -} - -impl<'repo> From> for Remote<'repo> { - fn from(inner: git2::Remote<'repo>) -> Self { - Self { inner } - } -} - -impl<'repo> Remote<'repo> { - pub fn name(&self) -> Option<&str> { - self.inner.name() - } - - pub fn url(&self) -> Result> { - self.inner - .url() - .map(FromStr::from_str) - .transpose() - .map_err(Into::into) - } - - pub fn push( - &mut self, - refspec: &[&str], - opts: Option<&mut git2::PushOptions<'_>>, - ) -> Result<()> { - self.inner.push(refspec, opts).map_err(Into::into) - } - - pub fn fetch( - &mut self, - refspec: &[&str], - opts: Option<&mut git2::FetchOptions<'_>>, - ) -> Result<()> { - self.inner.fetch(refspec, opts, None).map_err(Into::into) - } -} diff --git a/gitbutler-app/src/git/repository.rs b/gitbutler-app/src/git/repository.rs deleted file mode 100644 index 3091657d9..000000000 --- a/gitbutler-app/src/git/repository.rs +++ /dev/null @@ -1,535 +0,0 @@ -use std::{io::Write, path::Path, str}; - -use git2::Submodule; -use git2_hooks::HookResult; - -use crate::{keys, path::Normalize}; - -use super::{ - Blob, Branch, Commit, Config, Index, Oid, Reference, Refname, Remote, Result, Signature, Tree, - TreeBuilder, Url, -}; - -// wrapper around git2::Repository to get control over how it's used. -pub struct Repository(git2::Repository); - -impl<'a> From<&'a Repository> for &'a git2::Repository { - fn from(repo: &'a Repository) -> Self { - &repo.0 - } -} - -impl From for Repository { - fn from(repo: git2::Repository) -> Self { - Self(repo) - } -} - -impl Repository { - pub fn init>(path: P) -> Result { - let inner = git2::Repository::init(path)?; - Ok(Repository(inner)) - } - - pub fn init_opts>(path: P, opts: &git2::RepositoryInitOptions) -> Result { - let inner = git2::Repository::init_opts(path, opts)?; - Ok(Repository(inner)) - } - - pub fn open>(path: P) -> Result { - let inner = git2::Repository::open(path)?; - Ok(Repository(inner)) - } - - pub fn add_disk_alternate>(&self, path: P) -> Result<()> { - let alternates_path = self.0.path().join("objects/info/alternates"); - if !alternates_path.exists() { - let path = path.as_ref().normalize(); - let mut alternates_file = std::fs::File::create(&alternates_path)?; - alternates_file.write_all(path.as_path().as_os_str().as_encoded_bytes())?; - alternates_file.write_all(b"\n")?; - self.0.odb().and_then(|odb| odb.refresh())?; - } - - Ok(()) - } - - pub fn add_submodule>(&self, url: &Url, path: P) -> Result> { - self.0 - .submodule(&url.to_string(), path.as_ref(), false) - .map_err(Into::into) - } - - pub fn rebase( - &self, - branch_oid: Option, - upstream_oid: Option, - onto_oid: Option, - opts: Option<&mut git2::RebaseOptions<'_>>, - ) -> Result> { - let annotated_branch = if let Some(branch) = branch_oid { - Some(self.0.find_annotated_commit(branch.into())?) - } else { - None - }; - - let annotated_upstream = if let Some(upstream) = upstream_oid { - Some(self.0.find_annotated_commit(upstream.into())?) - } else { - None - }; - - let annotated_onto = if let Some(onto) = onto_oid { - Some(self.0.find_annotated_commit(onto.into())?) - } else { - None - }; - - self.0 - .rebase( - annotated_branch.as_ref(), - annotated_upstream.as_ref(), - annotated_onto.as_ref(), - opts, - ) - .map_err(Into::into) - } - - pub fn is_descendant_of(&self, a: Oid, b: Oid) -> Result { - self.0 - .graph_descendant_of(a.into(), b.into()) - .map_err(Into::into) - } - - pub fn merge_base(&self, one: Oid, two: Oid) -> Result { - self.0 - .merge_base(one.into(), two.into()) - .map(Oid::from) - .map_err(Into::into) - } - - pub fn merge_trees( - &self, - ancestor_tree: &Tree<'_>, - our_tree: &Tree<'_>, - their_tree: &Tree<'_>, - ) -> Result { - self.0 - .merge_trees( - ancestor_tree.into(), - our_tree.into(), - their_tree.into(), - None, - ) - .map(Index::from) - .map_err(Into::into) - } - - pub fn diff_tree_to_tree( - &self, - old_tree: Option<&Tree<'_>>, - new_tree: Option<&Tree<'_>>, - opts: Option<&mut git2::DiffOptions>, - ) -> Result> { - self.0 - .diff_tree_to_tree(old_tree.map(Into::into), new_tree.map(Into::into), opts) - .map_err(Into::into) - } - - pub fn diff_tree_to_workdir( - &self, - old_tree: Option<&Tree<'_>>, - opts: Option<&mut git2::DiffOptions>, - ) -> Result> { - if let Ok(mut index) = self.0.index() { - index.update_all(vec!["*"], None)?; - } - self.0 - .diff_tree_to_workdir_with_index(old_tree.map(Into::into), opts) - .map_err(Into::into) - } - - pub fn reset( - &self, - commit: &Commit<'_>, - kind: git2::ResetType, - checkout: Option<&mut git2::build::CheckoutBuilder<'_>>, - ) -> Result<()> { - let commit: &git2::Commit = commit.into(); - self.0 - .reset(commit.as_object(), kind, checkout) - .map_err(Into::into) - } - - pub fn find_reference(&self, name: &Refname) -> Result { - self.0 - .find_reference(&name.to_string()) - .map(Reference::from) - .map_err(Into::into) - } - - pub fn head(&self) -> Result { - self.0.head().map(Reference::from).map_err(Into::into) - } - - pub fn find_tree(&self, id: Oid) -> Result { - self.0 - .find_tree(id.into()) - .map(Tree::from) - .map_err(Into::into) - } - - pub fn find_commit(&self, id: Oid) -> Result { - self.0 - .find_commit(id.into()) - .map(Commit::from) - .map_err(Into::into) - } - - pub fn find_blob(&self, id: Oid) -> Result { - self.0 - .find_blob(id.into()) - .map(Into::into) - .map_err(Into::into) - } - - pub fn revwalk(&self) -> Result { - self.0.revwalk().map_err(Into::into) - } - - pub fn is_path_ignored>(&self, path: P) -> Result { - self.0.is_path_ignored(path).map_err(Into::into) - } - - pub fn branches( - &self, - filter: Option, - ) -> Result>> { - self.0 - .branches(filter) - .map(|branches| { - branches.map(|branch| { - branch - .map(|(branch, branch_type)| (Branch::from(branch), branch_type)) - .map_err(Into::into) - }) - }) - .map_err(Into::into) - } - - pub fn index(&self) -> Result { - self.0.index().map(Into::into).map_err(Into::into) - } - - pub fn index_size(&self) -> Result { - Ok(self.0.index()?.len()) - } - - pub fn blob_path>(&self, path: P) -> Result { - self.0 - .blob_path(path.as_ref()) - .map(Into::into) - .map_err(Into::into) - } - - pub fn cherry_pick(&self, base: &Commit, target: &Commit) -> Result { - self.0 - .cherrypick_commit(target.into(), base.into(), 0, None) - .map(Into::into) - .map_err(Into::into) - } - - pub fn blob(&self, data: &[u8]) -> Result { - self.0.blob(data).map(Into::into).map_err(Into::into) - } - - pub fn commit( - &self, - update_ref: Option<&Refname>, - author: &Signature<'_>, - committer: &Signature<'_>, - message: &str, - tree: &Tree<'_>, - parents: &[&Commit<'_>], - ) -> Result { - let parents: Vec<&git2::Commit> = parents - .iter() - .map(|c| c.to_owned().into()) - .collect::>(); - self.0 - .commit( - update_ref.map(ToString::to_string).as_deref(), - author.into(), - committer.into(), - message, - tree.into(), - &parents, - ) - .map(Into::into) - .map_err(Into::into) - } - - pub fn commit_signed( - &self, - author: &Signature<'_>, - message: &str, - tree: &Tree<'_>, - parents: &[&Commit<'_>], - key: &keys::PrivateKey, - ) -> Result { - let parents: Vec<&git2::Commit> = parents - .iter() - .map(|c| c.to_owned().into()) - .collect::>(); - let commit_buffer = self.0.commit_create_buffer( - author.into(), - // author and committer must be the same - // for signed commits - author.into(), - message, - tree.into(), - &parents, - )?; - let commit_buffer = str::from_utf8(&commit_buffer).unwrap(); - let signature = key.sign(commit_buffer.as_bytes())?; - self.0 - .commit_signed(commit_buffer, &signature, None) - .map(Into::into) - .map_err(Into::into) - } - - pub fn config(&self) -> Result { - self.0.config().map(Into::into).map_err(Into::into) - } - - pub fn treebuilder<'repo>(&'repo self, tree: Option<&'repo Tree>) -> TreeBuilder<'repo> { - TreeBuilder::new(self, tree) - } - - pub fn path(&self) -> &Path { - self.0.path() - } - - pub fn workdir(&self) -> Option<&Path> { - self.0.workdir() - } - - pub fn branch_upstream_name(&self, branch_name: &str) -> Result { - self.0 - .branch_upstream_name(branch_name) - .map(|s| s.as_str().unwrap().to_string()) - .map_err(Into::into) - } - - pub fn branch_remote_name(&self, refname: &str) -> Result { - self.0 - .branch_remote_name(refname) - .map(|s| s.as_str().unwrap().to_string()) - .map_err(Into::into) - } - - pub fn branch_upstream_remote(&self, branch_name: &str) -> Result { - self.0 - .branch_upstream_remote(branch_name) - .map(|s| s.as_str().unwrap().to_string()) - .map_err(Into::into) - } - - pub fn statuses( - &self, - options: Option<&mut git2::StatusOptions>, - ) -> Result> { - self.0.statuses(options).map_err(Into::into) - } - - pub fn remote_anonymous(&self, url: &super::Url) -> Result { - self.0 - .remote_anonymous(&url.to_string()) - .map(Into::into) - .map_err(Into::into) - } - - pub fn find_remote(&self, name: &str) -> Result { - self.0.find_remote(name).map(Into::into).map_err(Into::into) - } - - pub fn find_branch(&self, name: &Refname) -> Result { - self.0 - .find_branch( - &name.simple_name(), - match name { - Refname::Virtual(_) | Refname::Local(_) | Refname::Other(_) => { - git2::BranchType::Local - } - Refname::Remote(_) => git2::BranchType::Remote, - }, - ) - .map(Into::into) - .map_err(Into::into) - } - - pub fn refname_to_id(&self, name: &str) -> Result { - self.0 - .refname_to_id(name) - .map(Into::into) - .map_err(Into::into) - } - - pub fn checkout_head(&self, opts: Option<&mut git2::build::CheckoutBuilder>) -> Result<()> { - self.0.checkout_head(opts).map_err(Into::into) - } - - pub fn checkout_index<'a>(&'a self, index: &'a mut Index) -> CheckoutIndexBuilder { - CheckoutIndexBuilder { - index: index.into(), - repo: &self.0, - checkout_builder: git2::build::CheckoutBuilder::new(), - } - } - - pub fn checkout_index_path>(&self, path: P) -> Result<()> { - let mut builder = git2::build::CheckoutBuilder::new(); - builder.path(path.as_ref()); - builder.force(); - - let mut index = self.0.index()?; - self.0 - .checkout_index(Some(&mut index), Some(&mut builder))?; - - Ok(()) - } - - pub fn checkout_tree<'a>(&'a self, tree: &'a Tree<'a>) -> CheckoutTreeBuidler { - CheckoutTreeBuidler { - tree: tree.into(), - repo: &self.0, - checkout_builder: git2::build::CheckoutBuilder::new(), - } - } - - pub fn set_head(&self, refname: &Refname) -> Result<()> { - self.0.set_head(&refname.to_string()).map_err(Into::into) - } - - pub fn set_head_detached(&self, commitish: Oid) -> Result<()> { - self.0 - .set_head_detached(commitish.into()) - .map_err(Into::into) - } - - pub fn branch(&self, name: &Refname, target: &Commit, force: bool) -> Result { - self.0 - .branch(&name.to_string(), target.into(), force) - .map(Into::into) - .map_err(Into::into) - } - - pub fn reference( - &self, - name: &Refname, - id: Oid, - force: bool, - log_message: &str, - ) -> Result { - self.0 - .reference(&name.to_string(), id.into(), force, log_message) - .map(Into::into) - .map_err(Into::into) - } - - pub fn get_wd_tree(&self) -> Result { - let mut index = self.0.index()?; - index.add_all(["*"], git2::IndexAddOption::DEFAULT, None)?; - let oid = index.write_tree()?; - self.0.find_tree(oid).map(Into::into).map_err(Into::into) - } - - pub fn remote(&self, name: &str, url: &Url) -> Result { - self.0 - .remote(name, &url.to_string()) - .map(Into::into) - .map_err(Into::into) - } - - pub fn references(&self) -> Result>> { - self.0 - .references() - .map(|iter| iter.map(|reference| reference.map(Into::into).map_err(Into::into))) - .map_err(Into::into) - } - - pub fn references_glob(&self, glob: &str) -> Result>> { - self.0 - .references_glob(glob) - .map(|iter| iter.map(|reference| reference.map(Into::into).map_err(Into::into))) - .map_err(Into::into) - } - - pub fn run_hook_pre_commit(&self) -> Result { - let res = git2_hooks::hooks_pre_commit(&self.0, Some(&["../.husky"]))?; - Ok(res) - } - - pub fn run_hook_commit_msg(&self, msg: &mut String) -> Result { - let res = git2_hooks::hooks_commit_msg(&self.0, Some(&["../.husky"]), msg)?; - Ok(res) - } - - pub fn run_hook_post_commit(&self) -> Result<()> { - git2_hooks::hooks_post_commit(&self.0, Some(&["../.husky"]))?; - Ok(()) - } -} - -pub struct CheckoutTreeBuidler<'a> { - repo: &'a git2::Repository, - tree: &'a git2::Tree<'a>, - checkout_builder: git2::build::CheckoutBuilder<'a>, -} - -impl CheckoutTreeBuidler<'_> { - pub fn force(&mut self) -> &mut Self { - self.checkout_builder.force(); - self - } - - pub fn remove_untracked(&mut self) -> &mut Self { - self.checkout_builder.remove_untracked(true); - self - } - - pub fn checkout(&mut self) -> Result<()> { - self.repo - .checkout_tree(self.tree.as_object(), Some(&mut self.checkout_builder)) - .map_err(Into::into) - } -} - -pub struct CheckoutIndexBuilder<'a> { - repo: &'a git2::Repository, - index: &'a mut git2::Index, - checkout_builder: git2::build::CheckoutBuilder<'a>, -} - -impl CheckoutIndexBuilder<'_> { - pub fn force(&mut self) -> &mut Self { - self.checkout_builder.force(); - self - } - - pub fn allow_conflicts(&mut self) -> &mut Self { - self.checkout_builder.allow_conflicts(true); - self - } - - pub fn conflict_style_merge(&mut self) -> &mut Self { - self.checkout_builder.conflict_style_merge(true); - self - } - - pub fn checkout(&mut self) -> Result<()> { - self.repo - .checkout_index(Some(&mut self.index), Some(&mut self.checkout_builder)) - .map_err(Into::into) - } -} diff --git a/gitbutler-app/src/git/show.rs b/gitbutler-app/src/git/show.rs deleted file mode 100644 index 2062abde8..000000000 --- a/gitbutler-app/src/git/show.rs +++ /dev/null @@ -1,22 +0,0 @@ -use super::Repository; -use crate::git; -use std::{path, str}; - -use super::Result; - -pub fn show_file_at_tree>( - repository: &Repository, - file_path: P, - tree: &git::Tree, -) -> Result { - let file_path = file_path.as_ref(); - match tree.get_path(file_path) { - Ok(tree_entry) => { - let blob = repository.find_blob(tree_entry.id())?; - let content = str::from_utf8(blob.content())?; - Ok(content.to_string()) - } - // If a file was introduced in this commit, the content in the parent tree is the empty string - Err(_) => Ok(String::new()), - } -} diff --git a/gitbutler-app/src/git/signature.rs b/gitbutler-app/src/git/signature.rs deleted file mode 100644 index 46851dbfa..000000000 --- a/gitbutler-app/src/git/signature.rs +++ /dev/null @@ -1,67 +0,0 @@ -use crate::users; - -pub struct Signature<'a> { - signature: git2::Signature<'a>, -} - -impl Clone for Signature<'static> { - fn clone(&self) -> Self { - Self { - signature: self.signature.clone(), - } - } -} - -impl<'a> From> for git2::Signature<'a> { - fn from(value: Signature<'a>) -> Self { - value.signature - } -} - -impl<'a> From<&'a Signature<'a>> for &'a git2::Signature<'a> { - fn from(value: &'a Signature<'a>) -> Self { - &value.signature - } -} - -impl<'a> From> for Signature<'a> { - fn from(value: git2::Signature<'a>) -> Self { - Self { signature: value } - } -} - -impl TryFrom<&users::User> for Signature<'_> { - type Error = super::Error; - - fn try_from(value: &users::User) -> Result { - if let Some(name) = &value.name { - git2::Signature::now(name, &value.email) - .map(Into::into) - .map_err(Into::into) - } else if let Some(name) = &value.given_name { - git2::Signature::now(name, &value.email) - .map(Into::into) - .map_err(Into::into) - } else { - git2::Signature::now(&value.email, &value.email) - .map(Into::into) - .map_err(Into::into) - } - } -} - -impl Signature<'_> { - pub fn now(name: &str, email: &str) -> Result { - git2::Signature::now(name, email) - .map(Into::into) - .map_err(Into::into) - } - - pub fn name(&self) -> Option<&str> { - self.signature.name() - } - - pub fn email(&self) -> Option<&str> { - self.signature.email() - } -} diff --git a/gitbutler-app/src/git/tree.rs b/gitbutler-app/src/git/tree.rs deleted file mode 100644 index 1d7e84486..000000000 --- a/gitbutler-app/src/git/tree.rs +++ /dev/null @@ -1,147 +0,0 @@ -use std::path::Path; - -use super::{Oid, Repository, Result}; -use crate::path::Normalize; - -pub struct Tree<'repo> { - tree: git2::Tree<'repo>, -} - -impl<'repo> From> for Tree<'repo> { - fn from(tree: git2::Tree<'repo>) -> Self { - Tree { tree } - } -} - -impl<'repo> From<&'repo Tree<'repo>> for &'repo git2::Tree<'repo> { - fn from(tree: &'repo Tree<'repo>) -> Self { - &tree.tree - } -} - -impl<'repo> Tree<'repo> { - pub fn id(&self) -> Oid { - self.tree.id().into() - } - - pub fn get_path>(&self, path: P) -> Result> { - self.tree - .get_path(path.normalize().as_path()) - .map(Into::into) - .map_err(Into::into) - } - - pub fn walk(&self, mut callback: C) -> Result<()> - where - C: FnMut(&str, &TreeEntry) -> TreeWalkResult, - { - self.tree - .walk(git2::TreeWalkMode::PreOrder, |root, entry| { - match callback(root, &entry.clone().into()) { - TreeWalkResult::Continue => git2::TreeWalkResult::Ok, - TreeWalkResult::Skip => git2::TreeWalkResult::Skip, - TreeWalkResult::Stop => git2::TreeWalkResult::Abort, - } - }) - .map_err(Into::into) - } - - pub fn get_name(&self, filename: &str) -> Option { - self.tree.get_name(filename).map(Into::into) - } -} - -pub enum TreeWalkResult { - Continue, - Skip, - Stop, -} - -pub struct TreeEntry<'repo> { - entry: git2::TreeEntry<'repo>, -} - -impl<'repo> From> for TreeEntry<'repo> { - fn from(entry: git2::TreeEntry<'repo>) -> Self { - TreeEntry { entry } - } -} - -impl<'repo> TreeEntry<'repo> { - pub fn filemode(&self) -> i32 { - self.entry.filemode() - } - - pub fn to_object(&self, repo: &'repo Repository) -> Result { - self.entry.to_object(repo.into()).map_err(Into::into) - } - - pub fn kind(&self) -> Option { - self.entry.kind() - } - - pub fn id(&self) -> Oid { - self.entry.id().into() - } - - pub fn name(&self) -> Option<&str> { - self.entry.name() - } -} - -#[derive(PartialEq)] -pub enum FileMode { - Blob, - BlobExecutable, - Link, - Tree, -} - -impl From for git2::FileMode { - fn from(filemod: FileMode) -> Self { - match filemod { - FileMode::Blob => git2::FileMode::Blob, - FileMode::BlobExecutable => git2::FileMode::BlobExecutable, - FileMode::Link => git2::FileMode::Link, - FileMode::Tree => git2::FileMode::Tree, - } - } -} - -pub struct TreeBuilder<'repo> { - repo: &'repo git2::Repository, - builder: git2::build::TreeUpdateBuilder, - base: Option<&'repo git2::Tree<'repo>>, -} - -impl<'repo> TreeBuilder<'repo> { - pub fn new(repo: &'repo Repository, base: Option<&'repo Tree>) -> Self { - TreeBuilder { - repo: repo.into(), - builder: git2::build::TreeUpdateBuilder::new(), - base: base.map(Into::into), - } - } - - pub fn upsert>(&mut self, filename: P, oid: Oid, filemode: FileMode) { - self.builder - .upsert(filename.as_ref(), oid.into(), filemode.into()); - } - - pub fn remove>(&mut self, filename: P) { - self.builder.remove(filename.as_ref()); - } - - pub fn write(&mut self) -> Result { - let repo: &git2::Repository = self.repo; - if let Some(base) = self.base { - let tree_id = self.builder.create_updated(repo, base)?; - Ok(tree_id.into()) - } else { - let empty_tree_id = repo.treebuilder(None)?.write()?; - let empty_tree = repo.find_tree(empty_tree_id)?; - let tree_id = self.builder.create_updated(repo, &empty_tree)?; - Ok(tree_id.into()) - } - } -} diff --git a/gitbutler-app/src/git/url.rs b/gitbutler-app/src/git/url.rs deleted file mode 100644 index e11b7f81a..000000000 --- a/gitbutler-app/src/git/url.rs +++ /dev/null @@ -1,91 +0,0 @@ -mod convert; -mod parse; -mod scheme; - -use std::str::FromStr; - -use bstr::ByteSlice; -pub use convert::ConvertError; -pub use parse::Error as ParseError; -pub use scheme::Scheme; - -#[derive(Default, Clone, Hash, PartialEq, Eq, Debug, thiserror::Error)] -pub struct Url { - /// The URL scheme. - pub scheme: Scheme, - /// The user to impersonate on the remote. - user: Option, - /// The password associated with a user. - password: Option, - /// The host to which to connect. Localhost is implied if `None`. - pub host: Option, - /// When serializing, use the alternative forms as it was parsed as such. - serialize_alternative_form: bool, - /// The port to use when connecting to a host. If `None`, standard ports depending on `scheme` will be used. - pub port: Option, - /// The path portion of the URL, usually the location of the git repository. - pub path: bstr::BString, -} - -impl Url { - pub fn is_github(&self) -> bool { - self.host - .as_ref() - .map_or(false, |host| host.contains("github.com")) - } -} - -impl std::fmt::Display for Url { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if !(self.serialize_alternative_form - && (self.scheme == Scheme::File || self.scheme == Scheme::Ssh)) - { - f.write_str(self.scheme.as_str())?; - f.write_str("://")?; - } - match (&self.user, &self.host) { - (Some(user), Some(host)) => { - f.write_str(user)?; - if let Some(password) = &self.password { - f.write_str(":")?; - f.write_str(password)?; - } - f.write_str("@")?; - f.write_str(host)?; - } - (None, Some(host)) => { - f.write_str(host)?; - } - (None, None) => {} - (Some(_user), None) => { - unreachable!("BUG: should not be possible to have a user but no host") - } - }; - if let Some(port) = &self.port { - f.write_str(&format!(":{}", port))?; - } - if self.serialize_alternative_form && self.scheme == Scheme::Ssh { - f.write_str(":")?; - } - f.write_str(self.path.to_str().unwrap())?; - Ok(()) - } -} - -impl Url { - pub fn as_ssh(&self) -> Result { - convert::to_ssh_url(self) - } - - pub fn as_https(&self) -> Result { - convert::to_https_url(self) - } -} - -impl FromStr for Url { - type Err = parse::Error; - - fn from_str(s: &str) -> Result { - parse::parse(s.as_bytes().into()) - } -} diff --git a/gitbutler-app/src/git/url/convert.rs b/gitbutler-app/src/git/url/convert.rs deleted file mode 100644 index 19c31ffd7..000000000 --- a/gitbutler-app/src/git/url/convert.rs +++ /dev/null @@ -1,128 +0,0 @@ -use bstr::ByteSlice; - -use super::{Scheme, Url}; - -#[derive(Debug, PartialEq, thiserror::Error)] -pub enum ConvertError { - #[error("Could not convert {from} to {to}")] - UnsupportedPair { from: Scheme, to: Scheme }, -} - -pub(crate) fn to_https_url(url: &Url) -> Result { - match url.scheme { - Scheme::Https => Ok(url.clone()), - Scheme::Http => Ok(Url { - scheme: Scheme::Https, - ..url.clone() - }), - Scheme::Ssh => Ok(Url { - scheme: Scheme::Https, - user: None, - serialize_alternative_form: true, - path: if url.path.starts_with(&[b'/']) { - url.path.clone() - } else { - format!("/{}", url.path.to_str().unwrap()).into() - }, - ..url.clone() - }), - _ => Err(ConvertError::UnsupportedPair { - from: url.scheme.clone(), - to: Scheme::Ssh, - }), - } -} - -pub(crate) fn to_ssh_url(url: &Url) -> Result { - match url.scheme { - Scheme::Ssh => Ok(url.clone()), - Scheme::Http | Scheme::Https => Ok(Url { - scheme: Scheme::Ssh, - user: Some("git".to_string()), - serialize_alternative_form: true, - path: if url.path.starts_with(&[b'/']) { - url.path.trim_start_with(|c| c == '/').into() - } else { - url.path.clone() - }, - ..url.clone() - }), - _ => Err(ConvertError::UnsupportedPair { - from: url.scheme.clone(), - to: Scheme::Ssh, - }), - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn to_https_url_test() { - for (input, expected) in [ - ( - "https://github.com/gitbutlerapp/gitbutler.git", - "https://github.com/gitbutlerapp/gitbutler.git", - ), - ( - "http://github.com/gitbutlerapp/gitbutler.git", - "https://github.com/gitbutlerapp/gitbutler.git", - ), - ( - "git@github.com:gitbutlerapp/gitbutler.git", - "https://github.com/gitbutlerapp/gitbutler.git", - ), - ( - "ssh://git@github.com/gitbutlerapp/gitbutler.git", - "https://github.com/gitbutlerapp/gitbutler.git", - ), - ( - "git@bitbucket.org:gitbutler-nikita/test.git", - "https://bitbucket.org/gitbutler-nikita/test.git", - ), - ( - "https://bitbucket.org/gitbutler-nikita/test.git", - "https://bitbucket.org/gitbutler-nikita/test.git", - ), - ] { - let url = input.parse().unwrap(); - let https_url = to_https_url(&url).unwrap(); - assert_eq!(https_url.to_string(), expected, "test case {}", url); - } - } - - #[test] - fn to_ssh_url_test() { - for (input, expected) in [ - ( - "git@github.com:gitbutlerapp/gitbutler.git", - "git@github.com:gitbutlerapp/gitbutler.git", - ), - ( - "https://github.com/gitbutlerapp/gitbutler.git", - "git@github.com:gitbutlerapp/gitbutler.git", - ), - ( - "https://github.com/gitbutlerapp/gitbutler.git", - "git@github.com:gitbutlerapp/gitbutler.git", - ), - ( - "ssh://git@github.com/gitbutlerapp/gitbutler.git", - "ssh://git@github.com/gitbutlerapp/gitbutler.git", - ), - ( - "https://bitbucket.org/gitbutler-nikita/test.git", - "git@bitbucket.org:gitbutler-nikita/test.git", - ), - ( - "git@bitbucket.org:gitbutler-nikita/test.git", - "git@bitbucket.org:gitbutler-nikita/test.git", - ), - ] { - let url = input.parse().unwrap(); - let ssh_url = to_ssh_url(&url).unwrap(); - assert_eq!(ssh_url.to_string(), expected, "test case {}", url); - } - } -} diff --git a/gitbutler-app/src/git/url/parse.rs b/gitbutler-app/src/git/url/parse.rs deleted file mode 100644 index 66a204d9c..000000000 --- a/gitbutler-app/src/git/url/parse.rs +++ /dev/null @@ -1,147 +0,0 @@ -use std::borrow::Cow; - -pub use bstr; -use bstr::{BStr, BString, ByteSlice}; - -use super::{Scheme, Url}; - -/// The Error returned by [`parse()`] -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error("Could not decode URL as UTF8")] - Utf8(#[from] std::str::Utf8Error), - #[error(transparent)] - Url(#[from] url::ParseError), - #[error("URLs need to specify the path to the repository")] - MissingResourceLocation, - #[error("file URLs require an absolute or relative path to the repository")] - MissingRepositoryPath, - #[error("\"{url}\" is not a valid local path")] - NotALocalFile { url: BString }, - #[error("Relative URLs are not permitted: {url:?}")] - RelativeUrl { url: String }, -} - -fn str_to_protocol(s: &str) -> Scheme { - Scheme::from(s) -} - -fn guess_protocol(url: &[u8]) -> Option<&str> { - match url.find_byte(b':') { - Some(colon_pos) => { - if url[..colon_pos].find_byteset(b"@.").is_some() { - "ssh" - } else { - url.get(colon_pos + 1..).and_then(|from_colon| { - (from_colon.contains(&b'/') || from_colon.contains(&b'\\')).then_some("file") - })? - } - } - None => "file", - } - .into() -} - -/// Extract the path part from an SCP-like URL `[user@]host.xz:path/to/repo.git/` -fn extract_scp_path(url: &str) -> Option<&str> { - url.splitn(2, ':').last() -} - -fn sanitize_for_protocol<'a>(protocol: &str, url: &'a str) -> Cow<'a, str> { - match protocol { - "ssh" => url.replacen(':', "/", 1).into(), - _ => url.into(), - } -} - -fn has_no_explicit_protocol(url: &[u8]) -> bool { - url.find(b"://").is_none() -} - -fn to_owned_url(url: &url::Url) -> Url { - let password = url.password(); - Url { - serialize_alternative_form: false, - scheme: str_to_protocol(url.scheme()), - password: password.map(ToOwned::to_owned), - user: if url.username().is_empty() && password.is_none() { - None - } else { - Some(url.username().into()) - }, - host: url.host_str().map(Into::into), - port: url.port(), - path: url.path().into(), - } -} - -/// Parse the given `bytes` as git url. -/// -/// # Note -/// -/// We cannot and should never have to deal with UTF-16 encoded windows strings, so bytes input is acceptable. -/// For file-paths, we don't expect UTF8 encoding either. -pub fn parse(input: &BStr) -> Result { - let guessed_protocol = - guess_protocol(input).ok_or_else(|| Error::NotALocalFile { url: input.into() })?; - let path_without_file_protocol = input.strip_prefix(b"file://"); - if path_without_file_protocol.is_some() - || (has_no_explicit_protocol(input) && guessed_protocol == "file") - { - let path = - path_without_file_protocol.map_or_else(|| input.into(), |stripped_path| stripped_path); - if path.is_empty() { - return Err(Error::MissingRepositoryPath); - } - let input_starts_with_file_protocol = input.starts_with(b"file://"); - if input_starts_with_file_protocol { - let wanted = &[b'/']; - if !wanted.iter().any(|w| path.contains(w)) { - return Err(Error::MissingRepositoryPath); - } - } - return Ok(Url { - scheme: Scheme::File, - path: path.into(), - serialize_alternative_form: !input_starts_with_file_protocol, - ..Default::default() - }); - } - - let url_str = std::str::from_utf8(input)?; - let (mut url, mut scp_path) = match url::Url::parse(url_str) { - Ok(url) => (url, None), - Err(url::ParseError::RelativeUrlWithoutBase) => { - // happens with bare paths as well as scp like paths. The latter contain a ':' past the host portion, - // which we are trying to detect. - ( - url::Url::parse(&format!( - "{}://{}", - guessed_protocol, - sanitize_for_protocol(guessed_protocol, url_str) - ))?, - extract_scp_path(url_str), - ) - } - Err(err) => return Err(err.into()), - }; - // SCP like URLs without user parse as 'something' with the scheme being the 'host'. Hosts always have dots. - if url.scheme().find('.').is_some() { - // try again with prefixed protocol - url = url::Url::parse(&format!("ssh://{}", sanitize_for_protocol("ssh", url_str)))?; - scp_path = extract_scp_path(url_str); - } - if url.path().is_empty() && ["ssh", "git"].contains(&url.scheme()) { - return Err(Error::MissingResourceLocation); - } - if url.cannot_be_a_base() { - return Err(Error::RelativeUrl { url: url.into() }); - } - - let mut url = to_owned_url(&url); - if let Some(path) = scp_path { - url.path = path.into(); - url.serialize_alternative_form = true; - } - Ok(url) -} diff --git a/gitbutler-app/src/git/url/scheme.rs b/gitbutler-app/src/git/url/scheme.rs deleted file mode 100644 index 31239b5e8..000000000 --- a/gitbutler-app/src/git/url/scheme.rs +++ /dev/null @@ -1,54 +0,0 @@ -/// A scheme or protocol for use in a [`Url`][super::Url]. -/// -/// It defines how to talk to a given repository. -#[derive(Default, PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] -pub enum Scheme { - /// A local resource that is accessible on the current host. - File, - /// A git daemon, like `File` over TCP/IP. - Git, - /// Launch `git-upload-pack` through an `ssh` tunnel. - #[default] - Ssh, - /// Use the HTTP protocol to talk to git servers. - Http, - /// Use the HTTPS protocol to talk to git servers. - Https, - /// Any other protocol or transport that isn't known at compile time. - /// - /// It's used to support plug-in transports. - Ext(String), -} - -impl<'a> From<&'a str> for Scheme { - fn from(value: &'a str) -> Self { - match value { - "ssh" => Scheme::Ssh, - "file" => Scheme::File, - "git" => Scheme::Git, - "http" => Scheme::Http, - "https" => Scheme::Https, - unknown => Scheme::Ext(unknown.into()), - } - } -} - -impl Scheme { - /// Return ourselves parseable name. - pub fn as_str(&self) -> &str { - match self { - Self::File => "file", - Self::Git => "git", - Self::Ssh => "ssh", - Self::Http => "http", - Self::Https => "https", - Self::Ext(name) => name.as_str(), - } - } -} - -impl std::fmt::Display for Scheme { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(self.as_str()) - } -} diff --git a/gitbutler-app/src/github.rs b/gitbutler-app/src/github.rs index 82b6da3c0..1a1041780 100644 --- a/gitbutler-app/src/github.rs +++ b/gitbutler-app/src/github.rs @@ -1 +1,82 @@ -pub mod commands; +pub mod commands { + use std::collections::HashMap; + + use anyhow::{Context, Result}; + use serde::{Deserialize, Serialize}; + use tracing::instrument; + + use crate::error::Error; + + const GITHUB_CLIENT_ID: &str = "cd51880daa675d9e6452"; + + #[derive(Debug, Deserialize, Serialize, Clone, Default)] + pub struct Verification { + pub user_code: String, + pub device_code: String, + } + + #[tauri::command(async)] + #[instrument] + pub async fn init_device_oauth() -> Result { + let mut req_body = HashMap::new(); + req_body.insert("client_id", GITHUB_CLIENT_ID); + req_body.insert("scope", "repo"); + + let mut headers = reqwest::header::HeaderMap::new(); + headers.insert( + reqwest::header::ACCEPT, + reqwest::header::HeaderValue::from_static("application/json"), + ); + + let client = reqwest::Client::new(); + let res = client + .post("https://github.com/login/device/code") + .headers(headers) + .json(&req_body) + .send() + .await + .context("Failed to send request")?; + + let rsp_body = res.text().await.context("Failed to get response body")?; + + serde_json::from_str(&rsp_body) + .context("Failed to parse response body") + .map_err(Into::into) + } + + #[tauri::command(async)] + #[instrument] + pub async fn check_auth_status(device_code: &str) -> Result { + #[derive(Debug, Deserialize, Serialize, Clone, Default)] + struct AccessTokenContainer { + access_token: String, + } + + let mut req_body = HashMap::new(); + req_body.insert("client_id", GITHUB_CLIENT_ID); + req_body.insert("device_code", device_code); + req_body.insert("grant_type", "urn:ietf:params:oauth:grant-type:device_code"); + + let mut headers = reqwest::header::HeaderMap::new(); + headers.insert( + reqwest::header::ACCEPT, + reqwest::header::HeaderValue::from_static("application/json"), + ); + + let client = reqwest::Client::new(); + let res = client + .post("https://github.com/login/oauth/access_token") + .headers(headers) + .json(&req_body) + .send() + .await + .context("Failed to send request")?; + + let rsp_body = res.text().await.context("Failed to get response body")?; + + serde_json::from_str::(&rsp_body) + .map(|rsp_body| rsp_body.access_token) + .context("Failed to parse response body") + .map_err(Into::into) + } +} diff --git a/gitbutler-app/src/github/commands.rs b/gitbutler-app/src/github/commands.rs deleted file mode 100644 index 18406bcaa..000000000 --- a/gitbutler-app/src/github/commands.rs +++ /dev/null @@ -1,80 +0,0 @@ -use std::collections::HashMap; - -use anyhow::{Context, Result}; -use serde::{Deserialize, Serialize}; -use tracing::instrument; - -use crate::error::Error; - -const GITHUB_CLIENT_ID: &str = "cd51880daa675d9e6452"; - -#[derive(Debug, Deserialize, Serialize, Clone, Default)] -pub struct Verification { - pub user_code: String, - pub device_code: String, -} - -#[tauri::command(async)] -#[instrument] -pub async fn init_device_oauth() -> Result { - let mut req_body = HashMap::new(); - req_body.insert("client_id", GITHUB_CLIENT_ID); - req_body.insert("scope", "repo"); - - let mut headers = reqwest::header::HeaderMap::new(); - headers.insert( - reqwest::header::ACCEPT, - reqwest::header::HeaderValue::from_static("application/json"), - ); - - let client = reqwest::Client::new(); - let res = client - .post("https://github.com/login/device/code") - .headers(headers) - .json(&req_body) - .send() - .await - .context("Failed to send request")?; - - let rsp_body = res.text().await.context("Failed to get response body")?; - - serde_json::from_str(&rsp_body) - .context("Failed to parse response body") - .map_err(Into::into) -} - -#[tauri::command(async)] -#[instrument] -pub async fn check_auth_status(device_code: &str) -> Result { - #[derive(Debug, Deserialize, Serialize, Clone, Default)] - struct AccessTokenContainer { - access_token: String, - } - - let mut req_body = HashMap::new(); - req_body.insert("client_id", GITHUB_CLIENT_ID); - req_body.insert("device_code", device_code); - req_body.insert("grant_type", "urn:ietf:params:oauth:grant-type:device_code"); - - let mut headers = reqwest::header::HeaderMap::new(); - headers.insert( - reqwest::header::ACCEPT, - reqwest::header::HeaderValue::from_static("application/json"), - ); - - let client = reqwest::Client::new(); - let res = client - .post("https://github.com/login/oauth/access_token") - .headers(headers) - .json(&req_body) - .send() - .await - .context("Failed to send request")?; - - let rsp_body = res.text().await.context("Failed to get response body")?; - - serde_json::from_str::(&rsp_body) - .map(|rsp_body| rsp_body.access_token) - .context("Failed to parse response body") - .map_err(Into::into) -} diff --git a/gitbutler-app/src/id.rs b/gitbutler-app/src/id.rs deleted file mode 100644 index 9e33f515a..000000000 --- a/gitbutler-app/src/id.rs +++ /dev/null @@ -1,118 +0,0 @@ -//! A generic UUID-based wrapper, via a newtype pattern -//! with a few key integrations used throughout the library. - -use std::{fmt, hash::Hash, marker::PhantomData, str}; - -use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use uuid::Uuid; - -/// A generic UUID-based newtype. -/// -/// `Default` is implemented to generate a new UUID -/// via [`Uuid::new_v4`]. -pub struct Id(Uuid, PhantomData); - -impl Hash for Id { - fn hash(&self, state: &mut H) { - self.0.hash(state); - } -} - -impl PartialOrd for Id { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for Id { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.0.cmp(&other.0) - } -} - -impl Id { - #[must_use] - pub fn generate() -> Self { - Id(Uuid::new_v4(), PhantomData) - } -} - -impl Default for Id { - fn default() -> Self { - Self::generate() - } -} - -impl rusqlite::types::FromSql for Id { - fn column_result(value: rusqlite::types::ValueRef<'_>) -> rusqlite::types::FromSqlResult { - Uuid::parse_str(value.as_str()?) - .map(Into::into) - .map_err(|error| rusqlite::types::FromSqlError::Other(Box::new(error))) - } -} - -impl rusqlite::ToSql for Id { - fn to_sql(&self) -> rusqlite::Result> { - Ok(rusqlite::types::ToSqlOutput::from(self.0.to_string())) - } -} - -impl PartialEq for Id { - fn eq(&self, other: &Self) -> bool { - self.0.eq(&other.0) - } -} - -impl Eq for Id {} - -impl From for Id { - fn from(value: Uuid) -> Self { - Self(value, PhantomData) - } -} - -impl<'de, T> Deserialize<'de> for Id { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Uuid::deserialize(deserializer).map(Into::into) - } -} - -impl Serialize for Id { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.0.serialize(serializer) - } -} - -impl Clone for Id { - fn clone(&self) -> Self { - *self - } -} - -impl fmt::Display for Id { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -impl fmt::Debug for Id { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -impl Copy for Id {} - -impl str::FromStr for Id { - type Err = uuid::Error; - - fn from_str(s: &str) -> Result { - Uuid::parse_str(s).map(Into::into) - } -} diff --git a/gitbutler-app/src/keys.rs b/gitbutler-app/src/keys.rs index d0d35e52b..c30ab5cb4 100644 --- a/gitbutler-app/src/keys.rs +++ b/gitbutler-app/src/keys.rs @@ -1,7 +1,29 @@ -pub mod commands; -mod controller; -mod key; -pub mod storage; +pub mod commands { + use tauri::Manager; + use tracing::instrument; -pub use controller::*; -pub use key::{PrivateKey, PublicKey, SignError}; + use crate::error::Error; + + use gitbutler::keys::{controller, PublicKey}; + + impl From for Error { + fn from(value: controller::GetOrCreateError) -> Self { + match value { + controller::GetOrCreateError::Other(error) => { + tracing::error!(?error, "failed to get or create key"); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn get_public_key(handle: tauri::AppHandle) -> Result { + handle + .state::() + .get_or_create() + .map(|key| key.public_key()) + .map_err(Into::into) + } +} diff --git a/gitbutler-app/src/keys/commands.rs b/gitbutler-app/src/keys/commands.rs deleted file mode 100644 index 4fe620aa3..000000000 --- a/gitbutler-app/src/keys/commands.rs +++ /dev/null @@ -1,27 +0,0 @@ -use tauri::Manager; -use tracing::instrument; - -use crate::error::Error; - -use super::{controller, PublicKey}; - -impl From for Error { - fn from(value: controller::GetOrCreateError) -> Self { - match value { - controller::GetOrCreateError::Other(error) => { - tracing::error!(?error, "failed to get or create key"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn get_public_key(handle: tauri::AppHandle) -> Result { - handle - .state::() - .get_or_create() - .map(|key| key.public_key()) - .map_err(Into::into) -} diff --git a/gitbutler-app/src/keys/controller.rs b/gitbutler-app/src/keys/controller.rs deleted file mode 100644 index de9096521..000000000 --- a/gitbutler-app/src/keys/controller.rs +++ /dev/null @@ -1,34 +0,0 @@ -use anyhow::Context; - -use super::{storage::Storage, PrivateKey}; - -#[derive(Clone)] -pub struct Controller { - storage: Storage, -} - -impl Controller { - pub fn new(storage: Storage) -> Self { - Self { storage } - } - - pub fn from_path>(path: P) -> Self { - Self::new(Storage::from_path(path)) - } - - pub fn get_or_create(&self) -> Result { - if let Some(key) = self.storage.get().context("failed to get key")? { - Ok(key) - } else { - let key = PrivateKey::generate(); - self.storage.create(&key).context("failed to save key")?; - Ok(key) - } - } -} - -#[derive(Debug, thiserror::Error)] -pub enum GetOrCreateError { - #[error(transparent)] - Other(#[from] anyhow::Error), -} diff --git a/gitbutler-app/src/keys/key.rs b/gitbutler-app/src/keys/key.rs deleted file mode 100644 index dacde6a7e..000000000 --- a/gitbutler-app/src/keys/key.rs +++ /dev/null @@ -1,127 +0,0 @@ -use std::{fmt, str::FromStr}; - -use ssh_key::{HashAlg, LineEnding, SshSig}; - -use rand::rngs::OsRng; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Clone, Eq)] -pub struct PrivateKey(ssh_key::PrivateKey); - -#[derive(Debug, thiserror::Error)] -pub enum SignError { - #[error(transparent)] - Ssh(#[from] ssh_key::Error), -} - -impl PrivateKey { - pub fn generate() -> Self { - Self::default() - } - - pub fn public_key(&self) -> PublicKey { - PublicKey::from(self) - } - - pub fn sign(&self, bytes: &[u8]) -> Result { - let sig = SshSig::sign(&self.0, "git", HashAlg::Sha512, bytes)?; - sig.to_pem(LineEnding::default()).map_err(Into::into) - } -} - -impl Default for PrivateKey { - fn default() -> Self { - let ed25519_keypair = ssh_key::private::Ed25519Keypair::random(&mut OsRng); - let ed25519_key = ssh_key::PrivateKey::from(ed25519_keypair); - Self(ed25519_key) - } -} - -impl PartialEq for PrivateKey { - fn eq(&self, other: &Self) -> bool { - self.0.to_bytes().eq(&other.0.to_bytes()) - } -} - -impl Serialize for PrivateKey { - fn serialize(&self, serializer: S) -> Result { - self.to_string().serialize(serializer) - } -} - -impl FromStr for PrivateKey { - type Err = ssh_key::Error; - - fn from_str(s: &str) -> Result { - let key = ssh_key::PrivateKey::from_openssh(s.as_bytes())?; - Ok(Self(key)) - } -} - -impl fmt::Display for PrivateKey { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0 - .to_openssh(ssh_key::LineEnding::default()) - .map_err(|_| fmt::Error)? - .fmt(f) - } -} - -impl<'de> Deserialize<'de> for PrivateKey { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - Self::from_str(&s).map_err(serde::de::Error::custom) - } -} - -#[derive(Debug)] -pub struct PublicKey(ssh_key::PublicKey); - -impl From<&PrivateKey> for PublicKey { - fn from(value: &PrivateKey) -> Self { - Self(value.0.public_key().clone()) - } -} - -impl PartialEq for PublicKey { - fn eq(&self, other: &Self) -> bool { - self.0.to_bytes().eq(&other.0.to_bytes()) - } -} - -impl fmt::Display for PublicKey { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.to_openssh().map_err(|_| fmt::Error)?.fmt(f) - } -} - -impl FromStr for PublicKey { - type Err = ssh_key::Error; - - fn from_str(s: &str) -> Result { - let key = ssh_key::PublicKey::from_openssh(s)?; - Ok(Self(key)) - } -} - -impl Serialize for PublicKey { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - self.to_string().serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for PublicKey { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - Self::from_str(s.as_str()).map_err(serde::de::Error::custom) - } -} diff --git a/gitbutler-app/src/keys/storage.rs b/gitbutler-app/src/keys/storage.rs deleted file mode 100644 index e6dac6506..000000000 --- a/gitbutler-app/src/keys/storage.rs +++ /dev/null @@ -1,43 +0,0 @@ -use crate::storage; - -use super::PrivateKey; - -#[derive(Clone)] -pub struct Storage { - storage: storage::Storage, -} - -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error("IO error: {0}")] - Storage(#[from] storage::Error), - #[error("SSH key error: {0}")] - SSHKey(#[from] ssh_key::Error), -} - -impl Storage { - pub fn new(storage: storage::Storage) -> Storage { - Storage { storage } - } - - pub fn from_path>(path: P) -> Storage { - Storage::new(storage::Storage::new(path)) - } - - pub fn get(&self) -> Result, Error> { - self.storage - .read("keys/ed25519") - .map_err(Error::Storage) - .and_then(|s| s.map(|s| s.parse().map_err(Error::SSHKey)).transpose()) - } - - pub fn create(&self, key: &PrivateKey) -> Result<(), Error> { - self.storage - .write("keys/ed25519", &key.to_string()) - .map_err(Error::Storage)?; - self.storage - .write("keys/ed25519.pub", &key.public_key().to_string()) - .map_err(Error::Storage)?; - Ok(()) - } -} diff --git a/gitbutler-app/src/lib.rs b/gitbutler-app/src/lib.rs index ce9dd0acd..77c35a4ca 100644 --- a/gitbutler-app/src/lib.rs +++ b/gitbutler-app/src/lib.rs @@ -15,37 +15,20 @@ pub mod analytics; pub mod app; -pub mod askpass; pub mod commands; pub mod events; pub mod logs; pub mod menu; pub mod watcher; -pub mod assets; -pub mod database; -pub mod dedup; +pub mod askpass; pub mod deltas; pub mod error; -pub mod fs; -pub mod gb_repository; -pub mod git; pub mod github; -pub mod id; pub mod keys; -pub mod lock; -pub mod path; -pub mod project_repository; pub mod projects; -pub mod reader; pub mod sentry; pub mod sessions; -pub mod ssh; -pub mod storage; -pub mod types; pub mod users; pub mod virtual_branches; -#[cfg(target_os = "windows")] -pub mod windows; -pub mod writer; pub mod zip; diff --git a/gitbutler-app/src/lock.rs b/gitbutler-app/src/lock.rs deleted file mode 100644 index 2783c77a3..000000000 --- a/gitbutler-app/src/lock.rs +++ /dev/null @@ -1,51 +0,0 @@ -use std::sync::{Arc, Mutex}; - -#[derive(Debug, Clone)] -pub struct Dir { - inner: Arc, -} - -impl Dir { - pub fn new>(path: P) -> Result { - Inner::new(path).map(Arc::new).map(|inner| Self { inner }) - } - - pub fn batch( - &self, - action: impl FnOnce(&std::path::Path) -> R, - ) -> Result { - self.inner.batch(action) - } -} - -#[derive(Debug)] -struct Inner { - path: std::path::PathBuf, - flock: Mutex, -} - -impl Inner { - fn new>(path: P) -> Result { - let path = path.as_ref().to_path_buf(); - if !path.exists() { - std::fs::create_dir_all(&path)?; - } else if !path.is_dir() { - return Err(std::io::Error::new( - std::io::ErrorKind::NotFound, - format!("{} is not a directory", path.display()), - )); - } - let flock = fslock::LockFile::open(&path.with_extension("lock")).map(Mutex::new)?; - Ok(Self { path, flock }) - } - - fn batch(&self, action: impl FnOnce(&std::path::Path) -> R) -> Result { - let mut flock = self.flock.lock().unwrap(); - - flock.lock()?; - let result = action(&self.path); - flock.unlock()?; - - Ok(result) - } -} diff --git a/gitbutler-app/src/main.rs b/gitbutler-app/src/main.rs index 8432c49c1..7f86ff9ef 100644 --- a/gitbutler-app/src/main.rs +++ b/gitbutler-app/src/main.rs @@ -13,14 +13,17 @@ clippy::too_many_lines )] +use gitbutler::assets; +use gitbutler::database; +use gitbutler::git; +use gitbutler::storage; +#[cfg(target_os = "windows")] +use gitbutler::windows; use gitbutler_app::analytics; use gitbutler_app::app; use gitbutler_app::askpass; -use gitbutler_app::assets; use gitbutler_app::commands; -use gitbutler_app::database; use gitbutler_app::deltas; -use gitbutler_app::git; use gitbutler_app::github; use gitbutler_app::keys; use gitbutler_app::logs; @@ -28,12 +31,9 @@ use gitbutler_app::menu; use gitbutler_app::projects; use gitbutler_app::sentry; use gitbutler_app::sessions; -use gitbutler_app::storage; use gitbutler_app::users; use gitbutler_app::virtual_branches; use gitbutler_app::watcher; -#[cfg(target_os = "windows")] -use gitbutler_app::windows; use gitbutler_app::zip; use std::path::PathBuf; @@ -101,7 +101,12 @@ fn main() { tracing::info!(version = %app_handle.package_info().version, name = %app_handle.package_info().name, "starting app"); - let askpass_broker = askpass::AskpassBroker::init(app_handle.clone()); + let askpass_broker = gitbutler::askpass::AskpassBroker::init({ + let handle = app_handle.clone(); + move |event| { + handle.emit_all("git_prompt", event).expect("tauri event emission doesn't fail in practice") + } + }); app_handle.manage(askpass_broker); let storage_controller = storage::Storage::new(&app_data_dir); @@ -110,16 +115,16 @@ fn main() { let watcher_controller = watcher::Watchers::new(app_handle.clone()); app_handle.manage(watcher_controller.clone()); - let projects_storage_controller = projects::storage::Storage::new(storage_controller.clone()); + let projects_storage_controller = gitbutler::projects::storage::Storage::new(storage_controller.clone()); app_handle.manage(projects_storage_controller.clone()); - let users_storage_controller = users::storage::Storage::new(storage_controller.clone()); + let users_storage_controller = gitbutler::users::storage::Storage::new(storage_controller.clone()); app_handle.manage(users_storage_controller.clone()); - let users_controller = users::Controller::new(users_storage_controller.clone()); + let users_controller = gitbutler::users::Controller::new(users_storage_controller.clone()); app_handle.manage(users_controller.clone()); - let projects_controller = projects::Controller::new( + let projects_controller = gitbutler::projects::Controller::new( app_data_dir.clone(), projects_storage_controller.clone(), users_controller.clone(), @@ -132,21 +137,21 @@ fn main() { let database_controller = database::Database::open_in_directory(&app_data_dir).expect("failed to open database"); app_handle.manage(database_controller.clone()); - let zipper = zip::Zipper::new(&app_cache_dir); + let zipper = gitbutler::zip::Zipper::new(&app_cache_dir); app_handle.manage(zipper.clone()); - app_handle.manage(zip::Controller::new(app_data_dir.clone(), app_log_dir.clone(), zipper.clone(), projects_controller.clone())); + app_handle.manage(gitbutler::zip::Controller::new(app_data_dir.clone(), app_log_dir.clone(), zipper.clone(), projects_controller.clone())); - let deltas_database_controller = deltas::database::Database::new(database_controller.clone()); + let deltas_database_controller = gitbutler::deltas::database::Database::new(database_controller.clone()); app_handle.manage(deltas_database_controller.clone()); - let deltas_controller = deltas::Controller::new(deltas_database_controller.clone()); + let deltas_controller = gitbutler::deltas::Controller::new(deltas_database_controller.clone()); app_handle.manage(deltas_controller); - let keys_storage_controller = keys::storage::Storage::new(storage_controller.clone()); + let keys_storage_controller = gitbutler::keys::storage::Storage::new(storage_controller.clone()); app_handle.manage(keys_storage_controller.clone()); - let keys_controller = keys::Controller::new(keys_storage_controller.clone()); + let keys_controller = gitbutler::keys::Controller::new(keys_storage_controller.clone()); app_handle.manage(keys_controller.clone()); let git_credentials_controller = git::credentials::Helper::new( @@ -156,7 +161,7 @@ fn main() { ); app_handle.manage(git_credentials_controller.clone()); - app_handle.manage(virtual_branches::controller::Controller::new( + app_handle.manage(gitbutler::virtual_branches::controller::Controller::new( app_data_dir.clone(), projects_controller.clone(), users_controller.clone(), @@ -196,10 +201,10 @@ fn main() { }; } - let sessions_database_controller = sessions::database::Database::new(database_controller.clone()); + let sessions_database_controller = gitbutler::sessions::database::Database::new(database_controller.clone()); app_handle.manage(sessions_database_controller.clone()); - app_handle.manage(sessions::Controller::new( + app_handle.manage(gitbutler::sessions::Controller::new( app_data_dir.clone(), sessions_database_controller.clone(), projects_controller.clone(), diff --git a/gitbutler-app/src/path.rs b/gitbutler-app/src/path.rs deleted file mode 100644 index 8f0145387..000000000 --- a/gitbutler-app/src/path.rs +++ /dev/null @@ -1,48 +0,0 @@ -use std::path::{Component, Path, PathBuf}; - -/// Normalize a path to remove any `.` and `..` components -/// and standardize the path separator to the system's default. -/// -/// This trait is automatically implemented for anything convertible -/// to a `&Path` (via `AsRef`). -pub trait Normalize { - /// Normalize a path to remove any `.` and `..` components - /// and standardize the path separator to the system's default. - fn normalize(&self) -> PathBuf; -} - -impl> Normalize for P { - fn normalize(&self) -> PathBuf { - // Note: Copied from Cargo's codebase: - // https://github.com/rust-lang/cargo/blob/2e4cfc2b7d43328b207879228a2ca7d427d188bb/src/cargo/util/paths.rs#L65-L90 - // License: MIT OR Apache-2.0 (this function only) - // - // Small modifications made by GitButler. - - let path = self.as_ref(); - let mut components = path.components().peekable(); - let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().copied() { - components.next(); - PathBuf::from(c.as_os_str()) - } else { - PathBuf::new() - }; - - for component in components { - match component { - Component::Prefix(..) => unreachable!(), - Component::RootDir => { - ret.push(component.as_os_str()); - } - Component::CurDir => {} - Component::ParentDir => { - ret.pop(); - } - Component::Normal(c) => { - ret.push(c); - } - } - } - ret - } -} diff --git a/gitbutler-app/src/project_repository.rs b/gitbutler-app/src/project_repository.rs deleted file mode 100644 index 79ba8b1d0..000000000 --- a/gitbutler-app/src/project_repository.rs +++ /dev/null @@ -1,8 +0,0 @@ -mod config; -pub mod conflicts; -mod repository; - -pub use config::Config; -pub use repository::{LogUntil, OpenError, RemoteError, Repository}; - -pub mod signatures; diff --git a/gitbutler-app/src/project_repository/config.rs b/gitbutler-app/src/project_repository/config.rs deleted file mode 100644 index 51943b5cb..000000000 --- a/gitbutler-app/src/project_repository/config.rs +++ /dev/null @@ -1,51 +0,0 @@ -use crate::git; - -pub struct Config<'a> { - git_repository: &'a git::Repository, -} - -impl<'a> From<&'a git::Repository> for Config<'a> { - fn from(value: &'a git::Repository) -> Self { - Self { - git_repository: value, - } - } -} - -impl Config<'_> { - pub fn sign_commits(&self) -> Result { - let sign_commits = self - .git_repository - .config()? - .get_bool("gitbutler.signCommits") - .unwrap_or(Some(false)) - .unwrap_or(false); - Ok(sign_commits) - } - - pub fn user_real_comitter(&self) -> Result { - let gb_comitter = self - .git_repository - .config()? - .get_string("gitbutler.gitbutlerCommitter") - .unwrap_or(Some("0".to_string())) - .unwrap_or("0".to_string()); - Ok(gb_comitter == "0") - } - - pub fn user_name(&self) -> Result, git::Error> { - self.git_repository.config()?.get_string("user.name") - } - - pub fn user_email(&self) -> Result, git::Error> { - self.git_repository.config()?.get_string("user.email") - } - - pub fn set_local(&self, key: &str, val: &str) -> Result<(), git::Error> { - self.git_repository.config()?.set_local(key, val) - } - - pub fn get_local(&self, key: &str) -> Result, git::Error> { - self.git_repository.config()?.get_local(key) - } -} diff --git a/gitbutler-app/src/project_repository/conflicts.rs b/gitbutler-app/src/project_repository/conflicts.rs deleted file mode 100644 index 88fe96b9e..000000000 --- a/gitbutler-app/src/project_repository/conflicts.rs +++ /dev/null @@ -1,144 +0,0 @@ -// stuff to manage merge conflict state -// this is the dumbest possible way to do this, but it is a placeholder -// conflicts are stored one path per line in .git/conflicts -// merge parent is stored in .git/base_merge_parent -// conflicts are removed as they are resolved, the conflicts file is removed when there are no more conflicts -// the merge parent file is removed when the merge is complete - -use std::{ - io::{BufRead, Write}, - path::{Path, PathBuf}, -}; - -use anyhow::Result; -use itertools::Itertools; - -use crate::git; - -use super::Repository; - -pub fn mark, A: AsRef<[P]>>( - repository: &Repository, - paths: A, - parent: Option, -) -> Result<()> { - let paths = paths.as_ref(); - if paths.is_empty() { - return Ok(()); - } - let conflicts_path = repository.git_repository.path().join("conflicts"); - // write all the file paths to a file on disk - let mut file = std::fs::File::create(conflicts_path)?; - for path in paths { - file.write_all(path.as_ref().as_os_str().as_encoded_bytes())?; - file.write_all(b"\n")?; - } - - if let Some(parent) = parent { - let merge_path = repository.git_repository.path().join("base_merge_parent"); - // write all the file paths to a file on disk - let mut file = std::fs::File::create(merge_path)?; - file.write_all(parent.to_string().as_bytes())?; - } - - Ok(()) -} - -pub fn merge_parent(repository: &Repository) -> Result> { - let merge_path = repository.git_repository.path().join("base_merge_parent"); - if !merge_path.exists() { - return Ok(None); - } - - let file = std::fs::File::open(merge_path)?; - let reader = std::io::BufReader::new(file); - let mut lines = reader.lines(); - if let Some(parent) = lines.next() { - let parent = parent?; - let parent: git::Oid = parent.parse()?; - Ok(Some(parent)) - } else { - Ok(None) - } -} - -pub fn resolve>(repository: &Repository, path: P) -> Result<()> { - let path = path.as_ref(); - let conflicts_path = repository.git_repository.path().join("conflicts"); - let file = std::fs::File::open(conflicts_path.clone())?; - let reader = std::io::BufReader::new(file); - let mut remaining = Vec::new(); - for line in reader.lines().map_ok(PathBuf::from) { - let line = line?; - if line != path { - remaining.push(line); - } - } - - // remove file - std::fs::remove_file(conflicts_path)?; - - // re-write file if needed - if !remaining.is_empty() { - mark(repository, &remaining, None)?; - } - Ok(()) -} - -pub fn conflicting_files(repository: &Repository) -> Result> { - let conflicts_path = repository.git_repository.path().join("conflicts"); - if !conflicts_path.exists() { - return Ok(vec![]); - } - - let file = std::fs::File::open(conflicts_path)?; - let reader = std::io::BufReader::new(file); - Ok(reader.lines().map_while(Result::ok).collect()) -} - -pub fn is_conflicting>(repository: &Repository, path: Option

) -> Result { - let conflicts_path = repository.git_repository.path().join("conflicts"); - if !conflicts_path.exists() { - return Ok(false); - } - - let file = std::fs::File::open(conflicts_path)?; - let reader = std::io::BufReader::new(file); - let mut files = reader.lines().map_ok(PathBuf::from); - if let Some(pathname) = path { - let pathname = pathname.as_ref(); - - // check if pathname is one of the lines in conflicts_path file - for line in files { - let line = line?; - - if line == pathname { - return Ok(true); - } - } - Ok(false) - } else { - Ok(files.next().transpose().map(|x| x.is_some())?) - } -} - -// is this project still in a resolving conflict state? -// - could be that there are no more conflicts, but the state is not committed -pub fn is_resolving(repository: &Repository) -> bool { - repository - .git_repository - .path() - .join("base_merge_parent") - .exists() -} - -pub fn clear(repository: &Repository) -> Result<()> { - let merge_path = repository.git_repository.path().join("base_merge_parent"); - std::fs::remove_file(merge_path)?; - - for file in conflicting_files(repository)? { - resolve(repository, &file)?; - } - - Ok(()) -} diff --git a/gitbutler-app/src/project_repository/repository.rs b/gitbutler-app/src/project_repository/repository.rs deleted file mode 100644 index d51f097ac..000000000 --- a/gitbutler-app/src/project_repository/repository.rs +++ /dev/null @@ -1,697 +0,0 @@ -use std::{ - path, - str::FromStr, - sync::{atomic::AtomicUsize, Arc}, -}; - -use anyhow::{Context, Result}; - -use crate::{ - askpass::AskpassBroker, - git::{self, credentials::HelpError, Url}, - keys, - projects::{self, AuthKey}, - ssh, users, - virtual_branches::{Branch, BranchId}, -}; - -use super::conflicts; - -pub struct Repository { - pub git_repository: git::Repository, - project: projects::Project, -} - -#[derive(Debug, thiserror::Error)] -pub enum OpenError { - #[error("repository not found at {0}")] - NotFound(path::PathBuf), - #[error(transparent)] - Other(anyhow::Error), -} - -impl From for crate::error::Error { - fn from(value: OpenError) -> Self { - match value { - OpenError::NotFound(path) => crate::error::Error::UserError { - code: crate::error::Code::Projects, - message: format!("{} not found", path.display()), - }, - OpenError::Other(error) => { - tracing::error!(?error); - crate::error::Error::Unknown - } - } - } -} - -impl Repository { - pub fn open(project: &projects::Project) -> Result { - git::Repository::open(&project.path) - .map_err(|error| match error { - git::Error::NotFound(_) => OpenError::NotFound(project.path.clone()), - other => OpenError::Other(other.into()), - }) - .map(|git_repository| { - // XXX(qix-): This is a temporary measure to disable GC on the project repository. - // XXX(qix-): We do this because the internal repository we use to store the "virtual" - // XXX(qix-): refs and information use Git's alternative-objects mechanism to refer - // XXX(qix-): to the project repository's objects. However, the project repository - // XXX(qix-): has no knowledge of these refs, and will GC them away (usually after - // XXX(qix-): about 2 weeks) which will corrupt the internal repository. - // XXX(qix-): - // XXX(qix-): We will ultimately move away from an internal repository for a variety - // XXX(qix-): of reasons, but for now, this is a simple, short-term solution that we - // XXX(qix-): can clean up later on. We're aware this isn't ideal. - if let Ok(config) = git_repository.config().as_mut(){ - let should_set = match config.get_bool("gitbutler.didSetPrune") { - Ok(None | Some(false)) => true, - Ok(Some(true)) => false, - Err(error) => { - tracing::warn!( - "failed to get gitbutler.didSetPrune for repository at {}; cannot disable gc: {}", - project.path.display(), - error - ); - false - } - }; - - if should_set { - if let Err(error) = config.set_str("gc.pruneExpire", "never").and_then(|()| config.set_bool("gitbutler.didSetPrune", true)) { - tracing::warn!( - "failed to set gc.auto to false for repository at {}; cannot disable gc: {}", - project.path.display(), - error - ); - } - } - } else { - tracing::warn!( - "failed to get config for repository at {}; cannot disable gc", - project.path.display() - ); - } - - git_repository - }) - .map(|git_repository| Self { - git_repository, - project: project.clone(), - }) - } - - pub fn is_resolving(&self) -> bool { - conflicts::is_resolving(self) - } - - pub fn path(&self) -> &path::Path { - path::Path::new(&self.project.path) - } - - pub fn config(&self) -> super::Config { - super::Config::from(&self.git_repository) - } - - pub fn git_signatures<'a>( - &self, - user: Option<&users::User>, - ) -> Result<(git::Signature<'a>, git::Signature<'a>)> { - super::signatures::signatures(self, user).context("failed to get signatures") - } - - pub fn project(&self) -> &projects::Project { - &self.project - } - - pub fn set_project(&mut self, project: &projects::Project) { - self.project = project.clone(); - } - - pub fn git_index_size(&self) -> Result { - let head = self.git_repository.index_size()?; - Ok(head) - } - - pub fn get_head(&self) -> Result { - let head = self.git_repository.head()?; - Ok(head) - } - - pub fn get_wd_tree(&self) -> Result { - let tree = self.git_repository.get_wd_tree()?; - Ok(tree) - } - - pub fn is_path_ignored>(&self, path: P) -> Result { - let path = path.as_ref(); - let ignored = self.git_repository.is_path_ignored(path)?; - Ok(ignored) - } - - pub fn root(&self) -> &std::path::Path { - self.git_repository.path().parent().unwrap() - } - - pub fn git_remote_branches(&self) -> Result> { - self.git_repository - .branches(Some(git2::BranchType::Remote))? - .flatten() - .map(|(branch, _)| branch) - .map(|branch| { - git::RemoteRefname::try_from(&branch) - .context("failed to convert branch to remote name") - }) - .collect::>>() - } - - pub fn git_test_push( - &self, - credentials: &git::credentials::Helper, - remote_name: &str, - branch_name: &str, - askpass: Option<(AskpassBroker, Option)>, - ) -> Result<()> { - let target_branch_refname = - git::Refname::from_str(&format!("refs/remotes/{}/{}", remote_name, branch_name))?; - let branch = self.git_repository.find_branch(&target_branch_refname)?; - let commit_id = branch.peel_to_commit()?.id(); - - let now = std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap_or(std::time::Duration::from_secs(0)) - .as_millis() - .to_string(); - let branch_name = format!("test-push-{}", now); - - let refname = git::RemoteRefname::from_str(&format!( - "refs/remotes/{}/{}", - remote_name, branch_name, - ))?; - - match self.push( - &commit_id, - &refname, - false, - credentials, - None, - askpass.clone(), - ) { - Ok(()) => Ok(()), - Err(e) => Err(anyhow::anyhow!(e.to_string())), - }?; - - let empty_refspec = Some(format!(":refs/heads/{}", branch_name)); - match self.push( - &commit_id, - &refname, - false, - credentials, - empty_refspec, - askpass, - ) { - Ok(()) => Ok(()), - Err(e) => Err(anyhow::anyhow!(e.to_string())), - }?; - - Ok(()) - } - - pub fn add_branch_reference(&self, branch: &Branch) -> Result<()> { - let (should_write, with_force) = - match self.git_repository.find_reference(&branch.refname().into()) { - Ok(reference) => match reference.target() { - Some(head_oid) => Ok((head_oid != branch.head, true)), - None => Ok((true, true)), - }, - Err(git::Error::NotFound(_)) => Ok((true, false)), - Err(error) => Err(error), - } - .context("failed to lookup reference")?; - - if should_write { - self.git_repository - .reference( - &branch.refname().into(), - branch.head, - with_force, - "new vbranch", - ) - .context("failed to create branch reference")?; - } - - Ok(()) - } - - pub fn delete_branch_reference(&self, branch: &Branch) -> Result<()> { - match self.git_repository.find_reference(&branch.refname().into()) { - Ok(mut reference) => { - reference - .delete() - .context("failed to delete branch reference")?; - Ok(()) - } - Err(git::Error::NotFound(_)) => Ok(()), - Err(error) => Err(error), - } - .context("failed to lookup reference") - } - - // returns a list of commit oids from the first oid to the second oid - pub fn l(&self, from: git::Oid, to: LogUntil) -> Result> { - match to { - LogUntil::Commit(oid) => { - let mut revwalk = self - .git_repository - .revwalk() - .context("failed to create revwalk")?; - revwalk - .push(from.into()) - .context(format!("failed to push {}", from))?; - revwalk - .hide(oid.into()) - .context(format!("failed to hide {}", oid))?; - revwalk - .map(|oid| oid.map(Into::into)) - .collect::, _>>() - } - LogUntil::Take(n) => { - let mut revwalk = self - .git_repository - .revwalk() - .context("failed to create revwalk")?; - revwalk - .push(from.into()) - .context(format!("failed to push {}", from))?; - revwalk - .take(n) - .map(|oid| oid.map(Into::into)) - .collect::, _>>() - } - LogUntil::When(cond) => { - let mut revwalk = self - .git_repository - .revwalk() - .context("failed to create revwalk")?; - revwalk - .push(from.into()) - .context(format!("failed to push {}", from))?; - let mut oids: Vec = vec![]; - for oid in revwalk { - let oid = oid.context("failed to get oid")?; - oids.push(oid.into()); - - let commit = self - .git_repository - .find_commit(oid.into()) - .context("failed to find commit")?; - - if cond(&commit).context("failed to check condition")? { - break; - } - } - Ok(oids) - } - LogUntil::End => { - let mut revwalk = self - .git_repository - .revwalk() - .context("failed to create revwalk")?; - revwalk - .push(from.into()) - .context(format!("failed to push {}", from))?; - revwalk - .map(|oid| oid.map(Into::into)) - .collect::, _>>() - } - } - .context("failed to collect oids") - } - - // returns a list of commits from the first oid to the second oid - pub fn log(&self, from: git::Oid, to: LogUntil) -> Result> { - self.l(from, to)? - .into_iter() - .map(|oid| self.git_repository.find_commit(oid)) - .collect::, _>>() - .context("failed to collect commits") - } - - // returns the number of commits between the first oid to the second oid - pub fn distance(&self, from: git::Oid, to: git::Oid) -> Result { - let oids = self.l(from, LogUntil::Commit(to))?; - Ok(oids.len().try_into()?) - } - - pub fn commit( - &self, - user: Option<&users::User>, - message: &str, - tree: &git::Tree, - parents: &[&git::Commit], - signing_key: Option<&keys::PrivateKey>, - ) -> Result { - let (author, committer) = self.git_signatures(user)?; - if let Some(key) = signing_key { - self.git_repository - .commit_signed(&author, message, tree, parents, key) - .context("failed to commit signed") - } else { - self.git_repository - .commit(None, &author, &committer, message, tree, parents) - .context("failed to commit") - } - } - - pub fn push_to_gitbutler_server( - &self, - user: Option<&users::User>, - ref_specs: &[&str], - ) -> Result { - let url = self - .project - .api - .as_ref() - .ok_or(RemoteError::Other(anyhow::anyhow!("api not set")))? - .code_git_url - .as_ref() - .ok_or(RemoteError::Other(anyhow::anyhow!("code_git_url not set")))? - .as_str() - .parse::() - .map_err(|e| RemoteError::Other(e.into()))?; - - tracing::debug!( - project_id = %self.project.id, - %url, - "pushing code to gb repo", - ); - - let access_token = user - .map(|user| user.access_token.clone()) - .ok_or(RemoteError::Auth)?; - - let mut callbacks = git2::RemoteCallbacks::new(); - if self.project.omit_certificate_check.unwrap_or(false) { - callbacks.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk)); - } - let bytes_pushed = Arc::new(AtomicUsize::new(0)); - let total_objects = Arc::new(AtomicUsize::new(0)); - { - let byte_counter = Arc::::clone(&bytes_pushed); - let total_counter = Arc::::clone(&total_objects); - callbacks.push_transfer_progress(move |_current, total, bytes| { - byte_counter.store(bytes, std::sync::atomic::Ordering::Relaxed); - total_counter.store(total, std::sync::atomic::Ordering::Relaxed); - }); - } - - let mut push_options = git2::PushOptions::new(); - push_options.remote_callbacks(callbacks); - let auth_header = format!("Authorization: {}", access_token); - let headers = &[auth_header.as_str()]; - push_options.custom_headers(headers); - - let mut remote = self - .git_repository - .remote_anonymous(&url) - .map_err(|e| RemoteError::Other(e.into()))?; - - remote - .push(ref_specs, Some(&mut push_options)) - .map_err(|error| match error { - git::Error::Network(error) => { - tracing::warn!(project_id = %self.project.id, ?error, "git push failed",); - RemoteError::Network - } - git::Error::Auth(error) => { - tracing::warn!(project_id = %self.project.id, ?error, "git push failed",); - RemoteError::Auth - } - error => RemoteError::Other(error.into()), - })?; - - let bytes_pushed = bytes_pushed.load(std::sync::atomic::Ordering::Relaxed); - let total_objects_pushed = total_objects.load(std::sync::atomic::Ordering::Relaxed); - - tracing::debug!( - project_id = %self.project.id, - ref_spec = ref_specs.join(" "), - bytes = bytes_pushed, - objects = total_objects_pushed, - "pushed to gb repo tmp ref", - ); - - Ok(total_objects_pushed > 0) - } - - pub fn push( - &self, - head: &git::Oid, - branch: &git::RemoteRefname, - with_force: bool, - credentials: &git::credentials::Helper, - refspec: Option, - askpass_broker: Option<(AskpassBroker, Option)>, - ) -> Result<(), RemoteError> { - let refspec = refspec.unwrap_or_else(|| { - if with_force { - format!("+{}:refs/heads/{}", head, branch.branch()) - } else { - format!("{}:refs/heads/{}", head, branch.branch()) - } - }); - - // NOTE(qix-): This is a nasty hack, however the codebase isn't structured - // NOTE(qix-): in a way that allows us to really incorporate new backends - // NOTE(qix-): without a lot of work. This is a temporary measure to - // NOTE(qix-): work around a time-sensitive change that was necessary - // NOTE(qix-): without having to refactor a large portion of the codebase. - if self.project.preferred_key == AuthKey::SystemExecutable { - let path = self.path().to_path_buf(); - let remote = branch.remote().to_string(); - return std::thread::spawn(move || { - tokio::runtime::Runtime::new() - .unwrap() - .block_on(gitbutler_git::push( - path, - gitbutler_git::tokio::TokioExecutor, - &remote, - gitbutler_git::RefSpec::parse(refspec).unwrap(), - with_force, - handle_git_prompt_push, - askpass_broker, - )) - }) - .join() - .unwrap() - .map_err(|e| RemoteError::Other(e.into())); - } - - let auth_flows = credentials.help(self, branch.remote())?; - for (mut remote, callbacks) in auth_flows { - if let Some(url) = remote.url().context("failed to get remote url")? { - if !self.project.omit_certificate_check.unwrap_or(false) { - ssh::check_known_host(&url).context("failed to check known host")?; - } - } - let mut update_refs_error: Option = None; - for callback in callbacks { - let mut cbs: git2::RemoteCallbacks = callback.into(); - if self.project.omit_certificate_check.unwrap_or(false) { - cbs.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk)); - } - cbs.push_update_reference(|_reference: &str, status: Option<&str>| { - if let Some(status) = status { - update_refs_error = Some(git2::Error::from_str(status)); - return Err(git2::Error::from_str(status)); - }; - Ok(()) - }); - - let push_result = remote.push( - &[refspec.as_str()], - Some(&mut git2::PushOptions::new().remote_callbacks(cbs)), - ); - match push_result { - Ok(()) => { - tracing::info!( - project_id = %self.project.id, - remote = %branch.remote(), - %head, - branch = branch.branch(), - "pushed git branch" - ); - return Ok(()); - } - Err(git::Error::Auth(error) | git::Error::Http(error)) => { - tracing::warn!(project_id = %self.project.id, ?error, "git push failed"); - continue; - } - Err(git::Error::Network(error)) => { - tracing::warn!(project_id = %self.project.id, ?error, "git push failed"); - return Err(RemoteError::Network); - } - Err(error) => { - if let Some(e) = update_refs_error.as_ref() { - return Err(RemoteError::Other(anyhow::anyhow!(e.to_string()))); - } - return Err(RemoteError::Other(error.into())); - } - } - } - } - - Err(RemoteError::Auth) - } - - pub fn fetch( - &self, - remote_name: &str, - credentials: &git::credentials::Helper, - askpass: Option<(AskpassBroker, String)>, - ) -> Result<(), RemoteError> { - let refspec = format!("+refs/heads/*:refs/remotes/{}/*", remote_name); - - // NOTE(qix-): This is a nasty hack, however the codebase isn't structured - // NOTE(qix-): in a way that allows us to really incorporate new backends - // NOTE(qix-): without a lot of work. This is a temporary measure to - // NOTE(qix-): work around a time-sensitive change that was necessary - // NOTE(qix-): without having to refactor a large portion of the codebase. - if self.project.preferred_key == AuthKey::SystemExecutable { - let path = self.path().to_path_buf(); - let remote = remote_name.to_string(); - return std::thread::spawn(move || { - tokio::runtime::Runtime::new() - .unwrap() - .block_on(gitbutler_git::fetch( - path, - gitbutler_git::tokio::TokioExecutor, - &remote, - gitbutler_git::RefSpec::parse(refspec).unwrap(), - handle_git_prompt_fetch, - askpass, - )) - }) - .join() - .unwrap() - .map_err(|e| RemoteError::Other(e.into())); - } - - let auth_flows = credentials.help(self, remote_name)?; - for (mut remote, callbacks) in auth_flows { - if let Some(url) = remote.url().context("failed to get remote url")? { - if !self.project.omit_certificate_check.unwrap_or(false) { - ssh::check_known_host(&url).context("failed to check known host")?; - } - } - for callback in callbacks { - let mut fetch_opts = git2::FetchOptions::new(); - let mut cbs: git2::RemoteCallbacks = callback.into(); - if self.project.omit_certificate_check.unwrap_or(false) { - cbs.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk)); - } - fetch_opts.remote_callbacks(cbs); - fetch_opts.prune(git2::FetchPrune::On); - - match remote.fetch(&[&refspec], Some(&mut fetch_opts)) { - Ok(()) => { - tracing::info!(project_id = %self.project.id, %refspec, "git fetched"); - return Ok(()); - } - Err(git::Error::Auth(error) | git::Error::Http(error)) => { - tracing::warn!(project_id = %self.project.id, ?error, "fetch failed"); - continue; - } - Err(git::Error::Network(error)) => { - tracing::warn!(project_id = %self.project.id, ?error, "fetch failed"); - return Err(RemoteError::Network); - } - Err(error) => return Err(RemoteError::Other(error.into())), - } - } - } - - Err(RemoteError::Auth) - } -} - -#[derive(Debug, thiserror::Error)] -pub enum RemoteError { - #[error(transparent)] - Help(#[from] HelpError), - #[error("network failed")] - Network, - #[error("authentication failed")] - Auth, - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl From for crate::error::Error { - fn from(value: RemoteError) -> Self { - match value { - RemoteError::Help(error) => error.into(), - RemoteError::Network => crate::error::Error::UserError { - code: crate::error::Code::ProjectGitRemote, - message: "Network erorr occured".to_string(), - }, - RemoteError::Auth => crate::error::Error::UserError { - code: crate::error::Code::ProjectGitAuth, - message: "Project remote authentication error".to_string(), - }, - RemoteError::Other(error) => { - tracing::error!(?error); - crate::error::Error::Unknown - } - } - } -} - -type OidFilter = dyn Fn(&git::Commit) -> Result; - -pub enum LogUntil { - Commit(git::Oid), - Take(usize), - When(Box), - End, -} - -#[derive(Debug, Clone, serde::Serialize)] -struct AskpassPromptPushContext { - branch_id: Option, -} - -#[derive(Debug, Clone, serde::Serialize)] -struct AskpassPromptFetchContext { - action: String, -} - -async fn handle_git_prompt_push( - prompt: String, - askpass: Option<(AskpassBroker, Option)>, -) -> Option { - if let Some((askpass_broker, branch_id)) = askpass { - tracing::info!("received prompt for branch push {branch_id:?}: {prompt:?}"); - askpass_broker - .submit_prompt(prompt, AskpassPromptPushContext { branch_id }) - .await - } else { - tracing::warn!("received askpass push prompt but no broker was supplied; returning None"); - None - } -} - -async fn handle_git_prompt_fetch( - prompt: String, - askpass: Option<(AskpassBroker, String)>, -) -> Option { - if let Some((askpass_broker, action)) = askpass { - tracing::info!("received prompt for fetch with action {action:?}: {prompt:?}"); - askpass_broker - .submit_prompt(prompt, AskpassPromptFetchContext { action }) - .await - } else { - tracing::warn!("received askpass fetch prompt but no broker was supplied; returning None"); - None - } -} diff --git a/gitbutler-app/src/project_repository/signatures.rs b/gitbutler-app/src/project_repository/signatures.rs deleted file mode 100644 index 392e3360f..000000000 --- a/gitbutler-app/src/project_repository/signatures.rs +++ /dev/null @@ -1,22 +0,0 @@ -use crate::{git, users}; - -pub fn signatures<'a>( - project_repository: &super::Repository, - user: Option<&users::User>, -) -> Result<(git::Signature<'a>, git::Signature<'a>), git::Error> { - let config = project_repository.config(); - - let author = match (user, config.user_name()?, config.user_email()?) { - (_, Some(name), Some(email)) => git::Signature::now(&name, &email)?, - (Some(user), _, _) => git::Signature::try_from(user)?, - _ => git::Signature::now("GitButler", "gitbutler@gitbutler.com")?, - }; - - let comitter = if config.user_real_comitter()? { - author.clone() - } else { - git::Signature::now("GitButler", "gitbutler@gitbutler.com")? - }; - - Ok((author, comitter)) -} diff --git a/gitbutler-app/src/projects.rs b/gitbutler-app/src/projects.rs index 8189caa70..fd741244d 100644 --- a/gitbutler-app/src/projects.rs +++ b/gitbutler-app/src/projects.rs @@ -1,10 +1,206 @@ -pub mod commands; -mod controller; -mod project; -pub mod storage; +pub mod commands { + use std::path; -pub use controller::*; -pub use project::{AuthKey, CodePushState, FetchResult, Project, ProjectId}; -pub use storage::UpdateRequest; + use tauri::Manager; + use tracing::instrument; -pub use project::ApiProject; + use crate::error::{Code, Error}; + + use gitbutler::projects::{ + self, + controller::{self, Controller}, + }; + + impl From for Error { + fn from(value: controller::UpdateError) -> Self { + match value { + controller::UpdateError::Validation( + controller::UpdateValidationError::KeyNotFound(path), + ) => Error::UserError { + code: Code::Projects, + message: format!("'{}' not found", path.display()), + }, + controller::UpdateError::Validation( + controller::UpdateValidationError::KeyNotFile(path), + ) => Error::UserError { + code: Code::Projects, + message: format!("'{}' is not a file", path.display()), + }, + controller::UpdateError::NotFound => Error::UserError { + code: Code::Projects, + message: "Project not found".into(), + }, + controller::UpdateError::Other(error) => { + tracing::error!(?error, "failed to update project"); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn update_project( + handle: tauri::AppHandle, + project: projects::UpdateRequest, + ) -> Result { + handle + .state::() + .update(&project) + .await + .map_err(Into::into) + } + + impl From for Error { + fn from(value: controller::AddError) -> Self { + match value { + controller::AddError::NotAGitRepository => Error::UserError { + code: Code::Projects, + message: "Must be a git directory".to_string(), + }, + controller::AddError::AlreadyExists => Error::UserError { + code: Code::Projects, + message: "Project already exists".to_string(), + }, + controller::AddError::OpenProjectRepository(error) => error.into(), + controller::AddError::NotADirectory => Error::UserError { + code: Code::Projects, + message: "Not a directory".to_string(), + }, + controller::AddError::PathNotFound => Error::UserError { + code: Code::Projects, + message: "Path not found".to_string(), + }, + controller::AddError::SubmodulesNotSupported => Error::UserError { + code: Code::Projects, + message: "Repositories with git submodules are not supported".to_string(), + }, + controller::AddError::User(error) => error.into(), + controller::AddError::Other(error) => { + tracing::error!(?error, "failed to add project"); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn add_project( + handle: tauri::AppHandle, + path: &path::Path, + ) -> Result { + handle.state::().add(path).map_err(Into::into) + } + + impl From for Error { + fn from(value: controller::GetError) -> Self { + match value { + controller::GetError::NotFound => Error::UserError { + code: Code::Projects, + message: "Project not found".into(), + }, + controller::GetError::Other(error) => { + tracing::error!(?error, "failed to get project"); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn get_project( + handle: tauri::AppHandle, + id: &str, + ) -> Result { + let id = id.parse().map_err(|_| Error::UserError { + code: Code::Validation, + message: "Malformed project id".into(), + })?; + handle.state::().get(&id).map_err(Into::into) + } + + impl From for Error { + fn from(value: controller::ListError) -> Self { + match value { + controller::ListError::Other(error) => { + tracing::error!(?error, "failed to list projects"); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn list_projects(handle: tauri::AppHandle) -> Result, Error> { + handle.state::().list().map_err(Into::into) + } + + impl From for Error { + fn from(value: controller::DeleteError) -> Self { + match value { + controller::DeleteError::Other(error) => { + tracing::error!(?error, "failed to delete project"); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn delete_project(handle: tauri::AppHandle, id: &str) -> Result<(), Error> { + let id = id.parse().map_err(|_| Error::UserError { + code: Code::Validation, + message: "Malformed project id".into(), + })?; + handle + .state::() + .delete(&id) + .await + .map_err(Into::into) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn git_get_local_config( + handle: tauri::AppHandle, + id: &str, + key: &str, + ) -> Result, Error> { + let id = id.parse().map_err(|_| Error::UserError { + code: Code::Validation, + message: "Malformed project id".into(), + })?; + handle + .state::() + .get_local_config(&id, key) + .map_err(|e| Error::UserError { + code: Code::Projects, + message: e.to_string(), + }) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn git_set_local_config( + handle: tauri::AppHandle, + id: &str, + key: &str, + value: &str, + ) -> Result<(), Error> { + let id = id.parse().map_err(|_| Error::UserError { + code: Code::Validation, + message: "Malformed project id".into(), + })?; + handle + .state::() + .set_local_config(&id, key, value) + .map_err(|e| Error::UserError { + code: Code::Projects, + message: e.to_string(), + }) + } +} diff --git a/gitbutler-app/src/projects/commands.rs b/gitbutler-app/src/projects/commands.rs deleted file mode 100644 index c4f9a7629..000000000 --- a/gitbutler-app/src/projects/commands.rs +++ /dev/null @@ -1,201 +0,0 @@ -use std::path; - -use tauri::Manager; -use tracing::instrument; - -use crate::{ - error::{Code, Error}, - projects, -}; - -use super::controller::{self, Controller}; - -impl From for Error { - fn from(value: controller::UpdateError) -> Self { - match value { - controller::UpdateError::Validation( - controller::UpdateValidationError::KeyNotFound(path), - ) => Error::UserError { - code: Code::Projects, - message: format!("'{}' not found", path.display()), - }, - controller::UpdateError::Validation(controller::UpdateValidationError::KeyNotFile( - path, - )) => Error::UserError { - code: Code::Projects, - message: format!("'{}' is not a file", path.display()), - }, - controller::UpdateError::NotFound => Error::UserError { - code: Code::Projects, - message: "Project not found".into(), - }, - controller::UpdateError::Other(error) => { - tracing::error!(?error, "failed to update project"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn update_project( - handle: tauri::AppHandle, - project: projects::UpdateRequest, -) -> Result { - handle - .state::() - .update(&project) - .await - .map_err(Into::into) -} - -impl From for Error { - fn from(value: controller::AddError) -> Self { - match value { - controller::AddError::NotAGitRepository => Error::UserError { - code: Code::Projects, - message: "Must be a git directory".to_string(), - }, - controller::AddError::AlreadyExists => Error::UserError { - code: Code::Projects, - message: "Project already exists".to_string(), - }, - controller::AddError::OpenProjectRepository(error) => error.into(), - controller::AddError::NotADirectory => Error::UserError { - code: Code::Projects, - message: "Not a directory".to_string(), - }, - controller::AddError::PathNotFound => Error::UserError { - code: Code::Projects, - message: "Path not found".to_string(), - }, - controller::AddError::SubmodulesNotSupported => Error::UserError { - code: Code::Projects, - message: "Repositories with git submodules are not supported".to_string(), - }, - controller::AddError::User(error) => error.into(), - controller::AddError::Other(error) => { - tracing::error!(?error, "failed to add project"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn add_project( - handle: tauri::AppHandle, - path: &path::Path, -) -> Result { - handle.state::().add(path).map_err(Into::into) -} - -impl From for Error { - fn from(value: controller::GetError) -> Self { - match value { - controller::GetError::NotFound => Error::UserError { - code: Code::Projects, - message: "Project not found".into(), - }, - controller::GetError::Other(error) => { - tracing::error!(?error, "failed to get project"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn get_project(handle: tauri::AppHandle, id: &str) -> Result { - let id = id.parse().map_err(|_| Error::UserError { - code: Code::Validation, - message: "Malformed project id".into(), - })?; - handle.state::().get(&id).map_err(Into::into) -} - -impl From for Error { - fn from(value: controller::ListError) -> Self { - match value { - controller::ListError::Other(error) => { - tracing::error!(?error, "failed to list projects"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn list_projects(handle: tauri::AppHandle) -> Result, Error> { - handle.state::().list().map_err(Into::into) -} - -impl From for Error { - fn from(value: controller::DeleteError) -> Self { - match value { - controller::DeleteError::Other(error) => { - tracing::error!(?error, "failed to delete project"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn delete_project(handle: tauri::AppHandle, id: &str) -> Result<(), Error> { - let id = id.parse().map_err(|_| Error::UserError { - code: Code::Validation, - message: "Malformed project id".into(), - })?; - handle - .state::() - .delete(&id) - .await - .map_err(Into::into) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn git_get_local_config( - handle: tauri::AppHandle, - id: &str, - key: &str, -) -> Result, Error> { - let id = id.parse().map_err(|_| Error::UserError { - code: Code::Validation, - message: "Malformed project id".into(), - })?; - handle - .state::() - .get_local_config(&id, key) - .map_err(|e| Error::UserError { - code: Code::Projects, - message: e.to_string(), - }) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn git_set_local_config( - handle: tauri::AppHandle, - id: &str, - key: &str, - value: &str, -) -> Result<(), Error> { - let id = id.parse().map_err(|_| Error::UserError { - code: Code::Validation, - message: "Malformed project id".into(), - })?; - handle - .state::() - .set_local_config(&id, key, value) - .map_err(|e| Error::UserError { - code: Code::Projects, - message: e.to_string(), - }) -} diff --git a/gitbutler-app/src/projects/controller.rs b/gitbutler-app/src/projects/controller.rs deleted file mode 100644 index 6f2c30baf..000000000 --- a/gitbutler-app/src/projects/controller.rs +++ /dev/null @@ -1,340 +0,0 @@ -use super::{storage, storage::UpdateRequest, Project, ProjectId}; -use crate::{gb_repository, project_repository, users, watcher}; -use anyhow::Context; -use std::path::{Path, PathBuf}; - -#[derive(Clone)] -pub struct Controller { - local_data_dir: PathBuf, - projects_storage: storage::Storage, - users: users::Controller, - watchers: Option, -} - -impl Controller { - pub fn new( - local_data_dir: PathBuf, - projects_storage: storage::Storage, - users: users::Controller, - watchers: Option, - ) -> Self { - Self { - local_data_dir, - projects_storage, - users, - watchers, - } - } - - pub fn from_path>(path: P) -> Self { - let pathbuf = path.as_ref().to_path_buf(); - Self { - local_data_dir: pathbuf.clone(), - projects_storage: storage::Storage::from_path(&pathbuf), - users: users::Controller::from_path(&pathbuf), - watchers: None, - } - } - - pub fn add>(&self, path: P) -> Result { - let path = path.as_ref(); - let all_projects = self - .projects_storage - .list() - .context("failed to list projects from storage")?; - if all_projects.iter().any(|project| project.path == path) { - return Err(AddError::AlreadyExists); - } - if !path.exists() { - return Err(AddError::PathNotFound); - } - if !path.is_dir() { - return Err(AddError::NotADirectory); - } - if !path.join(".git").exists() { - return Err(AddError::NotAGitRepository); - }; - - if path.join(".gitmodules").exists() { - return Err(AddError::SubmodulesNotSupported); - } - - let id = uuid::Uuid::new_v4().to_string(); - - // title is the base name of the file - let title = path - .iter() - .last() - .map_or_else(|| id.clone(), |p| p.to_str().unwrap().to_string()); - - let project = Project { - id: ProjectId::generate(), - title, - path: path.to_path_buf(), - api: None, - use_diff_context: Some(true), - ..Default::default() - }; - - // create all required directories to avoid racing later - let user = self.users.get_user()?; - let project_repository = project_repository::Repository::open(&project)?; - gb_repository::Repository::open(&self.local_data_dir, &project_repository, user.as_ref()) - .context("failed to open repository")?; - - self.projects_storage - .add(&project) - .context("failed to add project to storage")?; - - // Create a .git/gitbutler directory for app data - if let Err(error) = std::fs::create_dir_all(project.gb_dir()) { - tracing::error!(project_id = %project.id, ?error, "failed to create {:?} on project add", project.gb_dir()); - } - - if let Some(watchers) = &self.watchers { - watchers.watch(&project)?; - } - - Ok(project) - } - - pub async fn update(&self, project: &UpdateRequest) -> Result { - if let Some(super::AuthKey::Local { - private_key_path, .. - }) = &project.preferred_key - { - use resolve_path::PathResolveExt; - let private_key_path = private_key_path.resolve(); - - if !private_key_path.exists() { - return Err(UpdateError::Validation(UpdateValidationError::KeyNotFound( - private_key_path.to_path_buf(), - ))); - } - - if !private_key_path.is_file() { - return Err(UpdateError::Validation(UpdateValidationError::KeyNotFile( - private_key_path.to_path_buf(), - ))); - } - } - - let updated = self - .projects_storage - .update(project) - .map_err(|error| match error { - super::storage::Error::NotFound => UpdateError::NotFound, - error => UpdateError::Other(error.into()), - })?; - - if let Some(watchers) = &self.watchers { - if let Some(api) = &project.api { - if api.sync { - if let Err(error) = watchers - .post(watcher::Event::FetchGitbutlerData(project.id)) - .await - { - tracing::error!( - project_id = %project.id, - ?error, - "failed to post fetch project event" - ); - } - } - - if let Err(error) = watchers - .post(watcher::Event::PushGitbutlerData(project.id)) - .await - { - tracing::error!( - project_id = %project.id, - ?error, - "failed to post push project event" - ); - } - } - } - - Ok(updated) - } - - pub fn get(&self, id: &ProjectId) -> Result { - let project = self.projects_storage.get(id).map_err(|error| match error { - super::storage::Error::NotFound => GetError::NotFound, - error => GetError::Other(error.into()), - }); - if let Ok(project) = &project { - if !project.gb_dir().exists() { - if let Err(error) = std::fs::create_dir_all(project.gb_dir()) { - tracing::error!(project_id = %project.id, ?error, "failed to create {:?} on project get", project.gb_dir()); - } - } - // Clean up old virtual_branches.toml that was never used - if project - .path - .join(".git") - .join("virtual_branches.toml") - .exists() - { - if let Err(error) = - std::fs::remove_file(project.path.join(".git").join("virtual_branches.toml")) - { - tracing::error!(project_id = %project.id, ?error, "failed to remove old virtual_branches.toml"); - } - } - } - project - } - - pub fn list(&self) -> Result, ListError> { - self.projects_storage - .list() - .map_err(|error| ListError::Other(error.into())) - } - - pub async fn delete(&self, id: &ProjectId) -> Result<(), DeleteError> { - let project = match self.projects_storage.get(id) { - Ok(project) => Ok(project), - Err(super::storage::Error::NotFound) => return Ok(()), - Err(error) => Err(DeleteError::Other(error.into())), - }?; - - if let Some(watchers) = &self.watchers { - if let Err(error) = watchers.stop(id).await { - tracing::error!( - project_id = %id, - ?error, - "failed to stop watcher for project", - ); - } - } - - self.projects_storage - .purge(&project.id) - .map_err(|error| DeleteError::Other(error.into()))?; - - if let Err(error) = std::fs::remove_dir_all( - self.local_data_dir - .join("projects") - .join(project.id.to_string()), - ) { - tracing::error!(project_id = %id, ?error, "failed to remove project data",); - } - - if let Err(error) = std::fs::remove_file(project.path.join(".git/gitbutler.json")) { - tracing::error!(project_id = %project.id, ?error, "failed to remove .git/gitbutler.json data",); - } - - let virtual_branches_path = project.path.join(".git/virtual_branches.toml"); - if virtual_branches_path.exists() { - if let Err(error) = std::fs::remove_file(virtual_branches_path) { - tracing::error!(project_id = %project.id, ?error, "failed to remove .git/virtual_branches.toml data",); - } - } - - Ok(()) - } - - pub fn get_local_config( - &self, - id: &ProjectId, - key: &str, - ) -> Result, ConfigError> { - let project = self.projects_storage.get(id).map_err(|error| match error { - super::storage::Error::NotFound => ConfigError::NotFound, - error => ConfigError::Other(error.into()), - })?; - - let repo = project_repository::Repository::open(&project) - .map_err(|e| ConfigError::Other(e.into()))?; - repo.config() - .get_local(key) - .map_err(|e| ConfigError::Other(e.into())) - } - - pub fn set_local_config( - &self, - id: &ProjectId, - key: &str, - value: &str, - ) -> Result<(), ConfigError> { - let project = self.projects_storage.get(id).map_err(|error| match error { - super::storage::Error::NotFound => ConfigError::NotFound, - error => ConfigError::Other(error.into()), - })?; - - let repo = project_repository::Repository::open(&project) - .map_err(|e| ConfigError::Other(e.into()))?; - repo.config() - .set_local(key, value) - .map_err(|e| ConfigError::Other(e.into()))?; - - Ok(()) - } -} - -#[derive(Debug, thiserror::Error)] -pub enum ConfigError { - #[error("project not found")] - NotFound, - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum DeleteError { - #[error(transparent)] - Other(anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum ListError { - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum GetError { - #[error("project not found")] - NotFound, - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum UpdateError { - #[error("project not found")] - NotFound, - #[error(transparent)] - Validation(UpdateValidationError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum UpdateValidationError { - #[error("{0} not found")] - KeyNotFound(PathBuf), - #[error("{0} is not a file")] - KeyNotFile(PathBuf), -} - -#[derive(Debug, thiserror::Error)] -pub enum AddError { - #[error("not a directory")] - NotADirectory, - #[error("not a git repository")] - NotAGitRepository, - #[error("path not found")] - PathNotFound, - #[error("project already exists")] - AlreadyExists, - #[error("submodules not supported")] - SubmodulesNotSupported, - #[error(transparent)] - User(#[from] users::GetError), - #[error(transparent)] - OpenProjectRepository(#[from] project_repository::OpenError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} diff --git a/gitbutler-app/src/projects/project.rs b/gitbutler-app/src/projects/project.rs deleted file mode 100644 index ded7c8edb..000000000 --- a/gitbutler-app/src/projects/project.rs +++ /dev/null @@ -1,112 +0,0 @@ -use std::{ - path::{self, PathBuf}, - time, -}; - -use serde::{Deserialize, Serialize}; - -use crate::{git, id::Id, types::default_true::DefaultTrue}; - -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub enum AuthKey { - #[default] - Default, - Generated, - SystemExecutable, - GitCredentialsHelper, - Local { - private_key_path: path::PathBuf, - }, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct ApiProject { - pub name: String, - pub description: Option, - pub repository_id: String, - pub git_url: String, - pub code_git_url: Option, - pub created_at: String, - pub updated_at: String, - pub sync: bool, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -#[serde(rename_all = "camelCase")] -pub enum FetchResult { - Fetched { - timestamp: time::SystemTime, - }, - Error { - timestamp: time::SystemTime, - error: String, - }, -} - -impl FetchResult { - pub fn timestamp(&self) -> &time::SystemTime { - match self { - FetchResult::Fetched { timestamp } | FetchResult::Error { timestamp, .. } => timestamp, - } - } -} - -#[derive(Debug, Deserialize, Serialize, Copy, Clone)] -pub struct CodePushState { - pub id: git::Oid, - pub timestamp: time::SystemTime, -} - -pub type ProjectId = Id; - -#[derive(Debug, Deserialize, Serialize, Clone, Default)] -pub struct Project { - pub id: ProjectId, - pub title: String, - pub description: Option, - pub path: path::PathBuf, - #[serde(default)] - pub preferred_key: AuthKey, - /// if ok_with_force_push is true, we'll not try to avoid force pushing - /// for example, when updating base branch - #[serde(default)] - pub ok_with_force_push: DefaultTrue, - pub api: Option, - #[serde(default)] - pub gitbutler_data_last_fetch: Option, - #[serde(default)] - pub gitbutler_code_push_state: Option, - #[serde(default)] - pub project_data_last_fetch: Option, - #[serde(default)] - pub omit_certificate_check: Option, - #[serde(default)] - pub use_diff_context: Option, -} - -impl AsRef for Project { - fn as_ref(&self) -> &Project { - self - } -} - -impl Project { - pub fn is_sync_enabled(&self) -> bool { - self.api.as_ref().map(|api| api.sync).unwrap_or_default() - } - - pub fn has_code_url(&self) -> bool { - self.api - .as_ref() - .map(|api| api.code_git_url.is_some()) - .unwrap_or_default() - } - - /// Returns the path to the directory containing the `GitButler` state for this project. - /// - /// Normally this is `.git/gitbutler` in the project's repository. - pub fn gb_dir(&self) -> PathBuf { - self.path.join(".git").join("gitbutler") - } -} diff --git a/gitbutler-app/src/projects/storage.rs b/gitbutler-app/src/projects/storage.rs deleted file mode 100644 index dab7adbb0..000000000 --- a/gitbutler-app/src/projects/storage.rs +++ /dev/null @@ -1,162 +0,0 @@ -use serde::{Deserialize, Serialize}; - -use crate::{ - projects::{project, ProjectId}, - storage, -}; - -const PROJECTS_FILE: &str = "projects.json"; - -#[derive(Debug, Clone)] -pub struct Storage { - storage: storage::Storage, -} - -#[derive(Debug, Serialize, Deserialize, Default)] -pub struct UpdateRequest { - pub id: ProjectId, - pub title: Option, - pub description: Option, - pub api: Option, - pub gitbutler_data_last_fetched: Option, - pub preferred_key: Option, - pub ok_with_force_push: Option, - pub gitbutler_code_push_state: Option, - pub project_data_last_fetched: Option, - pub omit_certificate_check: Option, - pub use_diff_context: Option, -} - -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error(transparent)] - Storage(#[from] storage::Error), - #[error(transparent)] - Json(#[from] serde_json::Error), - #[error("project not found")] - NotFound, -} - -impl Storage { - pub fn new(storage: storage::Storage) -> Storage { - Storage { storage } - } - - pub fn from_path>(path: P) -> Storage { - Storage::new(storage::Storage::new(path)) - } - - pub fn list(&self) -> Result, Error> { - match self.storage.read(PROJECTS_FILE)? { - Some(projects) => { - let all_projects: Vec = serde_json::from_str(&projects)?; - let all_projects: Vec = all_projects - .into_iter() - .map(|mut p| { - // backwards compatibility for description field - if let Some(api_description) = - p.api.as_ref().and_then(|api| api.description.as_ref()) - { - p.description = Some(api_description.to_string()); - } - p - }) - .collect(); - Ok(all_projects) - } - None => Ok(vec![]), - } - } - - pub fn get(&self, id: &ProjectId) -> Result { - let projects = self.list()?; - for project in &projects { - self.update(&UpdateRequest { - id: project.id, - preferred_key: Some(project.preferred_key.clone()), - ..Default::default() - })?; - } - match projects.into_iter().find(|p| p.id == *id) { - Some(project) => Ok(project), - None => Err(Error::NotFound), - } - } - - pub fn update(&self, update_request: &UpdateRequest) -> Result { - let mut projects = self.list()?; - let project = projects - .iter_mut() - .find(|p| p.id == update_request.id) - .ok_or(Error::NotFound)?; - - if let Some(title) = &update_request.title { - project.title = title.clone(); - } - - if let Some(description) = &update_request.description { - project.description = Some(description.clone()); - } - - if let Some(api) = &update_request.api { - project.api = Some(api.clone()); - } - - if let Some(preferred_key) = &update_request.preferred_key { - project.preferred_key = preferred_key.clone(); - } - - if let Some(gitbutler_data_last_fetched) = - update_request.gitbutler_data_last_fetched.as_ref() - { - project.gitbutler_data_last_fetch = Some(gitbutler_data_last_fetched.clone()); - } - - if let Some(project_data_last_fetched) = update_request.project_data_last_fetched.as_ref() { - project.project_data_last_fetch = Some(project_data_last_fetched.clone()); - } - - if let Some(state) = update_request.gitbutler_code_push_state { - project.gitbutler_code_push_state = Some(state); - } - - if let Some(ok_with_force_push) = update_request.ok_with_force_push { - *project.ok_with_force_push = ok_with_force_push; - } - - if let Some(omit_certificate_check) = update_request.omit_certificate_check { - project.omit_certificate_check = Some(omit_certificate_check); - } - - if let Some(use_diff_context) = update_request.use_diff_context { - project.use_diff_context = Some(use_diff_context); - } - - self.storage - .write(PROJECTS_FILE, &serde_json::to_string_pretty(&projects)?)?; - - Ok(projects - .iter() - .find(|p| p.id == update_request.id) - .unwrap() - .clone()) - } - - pub fn purge(&self, id: &ProjectId) -> Result<(), Error> { - let mut projects = self.list()?; - if let Some(index) = projects.iter().position(|p| p.id == *id) { - projects.remove(index); - self.storage - .write(PROJECTS_FILE, &serde_json::to_string_pretty(&projects)?)?; - } - Ok(()) - } - - pub fn add(&self, project: &project::Project) -> Result<(), Error> { - let mut projects = self.list()?; - projects.push(project.clone()); - let projects = serde_json::to_string_pretty(&projects)?; - self.storage.write(PROJECTS_FILE, &projects)?; - Ok(()) - } -} diff --git a/gitbutler-app/src/reader.rs b/gitbutler-app/src/reader.rs deleted file mode 100644 index 5f64a5072..000000000 --- a/gitbutler-app/src/reader.rs +++ /dev/null @@ -1,443 +0,0 @@ -use std::{ - fs, io, num, - path::{Path, PathBuf}, - str, - sync::Arc, -}; - -use anyhow::{Context, Result}; -use serde::{ser::SerializeStruct, Serialize}; - -use crate::{git, lock, path::Normalize}; - -#[derive(Debug, Clone, thiserror::Error)] -pub enum Error { - #[error("file not found")] - NotFound, - #[error("io error: {0}")] - Io(Arc), - #[error(transparent)] - From(FromError), -} - -impl From for Error { - fn from(error: io::Error) -> Self { - Error::Io(Arc::new(error)) - } -} - -impl From for Error { - fn from(error: FromError) -> Self { - Error::From(error) - } -} - -pub enum Reader<'reader> { - Filesystem(FilesystemReader), - Commit(CommitReader<'reader>), - Prefixed(PrefixedReader<'reader>), -} - -impl<'reader> Reader<'reader> { - pub fn open>(root: P) -> Result { - FilesystemReader::open(root).map(Reader::Filesystem) - } - - pub fn sub>(&'reader self, prefix: P) -> Self { - Reader::Prefixed(PrefixedReader::new(self, prefix)) - } - - pub fn commit_id(&self) -> Option { - match self { - Reader::Filesystem(_) => None, - Reader::Commit(reader) => Some(reader.get_commit_oid()), - Reader::Prefixed(reader) => reader.reader.commit_id(), - } - } - - pub fn from_commit( - repository: &'reader git::Repository, - commit: &git::Commit<'reader>, - ) -> Result { - Ok(Reader::Commit(CommitReader::new(repository, commit)?)) - } - - pub fn exists>(&self, file_path: P) -> Result { - match self { - Reader::Filesystem(reader) => reader.exists(file_path), - Reader::Commit(reader) => Ok(reader.exists(file_path)), - Reader::Prefixed(reader) => reader.exists(file_path), - } - } - - pub fn read>(&self, path: P) -> Result { - let mut contents = self.batch(&[path])?; - contents - .pop() - .expect("batch should return at least one result") - } - - pub fn batch>( - &self, - paths: &[P], - ) -> Result>, io::Error> { - match self { - Reader::Filesystem(reader) => reader.batch(|root| { - paths - .iter() - .map(|path| { - let path = root.join(path); - if !path.exists() { - return Err(Error::NotFound); - } - let content = Content::read_from_file(&path)?; - Ok(content) - }) - .collect() - }), - Reader::Commit(reader) => Ok(paths - .iter() - .map(|path| reader.read(path.normalize())) - .collect()), - Reader::Prefixed(reader) => reader.batch(paths), - } - } - - pub fn list_files>(&self, dir_path: P) -> Result> { - match self { - Reader::Filesystem(reader) => reader.list_files(dir_path.as_ref()), - Reader::Commit(reader) => reader.list_files(dir_path.as_ref()), - Reader::Prefixed(reader) => reader.list_files(dir_path.as_ref()), - } - } -} - -pub struct FilesystemReader(lock::Dir); - -impl FilesystemReader { - fn open>(root: P) -> Result { - lock::Dir::new(root).map(Self) - } - - fn exists>(&self, path: P) -> Result { - let exists = self.0.batch(|root| root.join(path.as_ref()).exists())?; - Ok(exists) - } - - fn batch(&self, action: impl FnOnce(&Path) -> R) -> Result { - self.0.batch(action) - } - - fn list_files>(&self, path: P) -> Result> { - let path = path.as_ref(); - self.0 - .batch(|root| crate::fs::list_files(root.join(path).as_path(), &[Path::new(".git")]))? - } -} - -pub struct CommitReader<'reader> { - repository: &'reader git::Repository, - commit_oid: git::Oid, - tree: git::Tree<'reader>, -} - -impl<'reader> CommitReader<'reader> { - pub fn new( - repository: &'reader git::Repository, - commit: &git::Commit<'reader>, - ) -> Result> { - let tree = commit - .tree() - .with_context(|| format!("{}: tree not found", commit.id()))?; - Ok(CommitReader { - repository, - tree, - commit_oid: commit.id(), - }) - } - - pub fn get_commit_oid(&self) -> git::Oid { - self.commit_oid - } - - fn read>(&self, path: P) -> Result { - let path = path.as_ref(); - let entry = match self - .tree - .get_path(Path::new(path)) - .context(format!("{}: tree entry not found", path.display())) - { - Ok(entry) => entry, - Err(_) => return Err(Error::NotFound), - }; - let blob = match self.repository.find_blob(entry.id()) { - Ok(blob) => blob, - Err(_) => return Err(Error::NotFound), - }; - Ok(Content::from(&blob)) - } - - pub fn list_files>(&self, dir_path: P) -> Result> { - let dir_path = dir_path.as_ref(); - let mut files = vec![]; - self.tree - .walk(|root, entry| { - if entry.kind() == Some(git2::ObjectType::Tree) { - return git::TreeWalkResult::Continue; - } - - if entry.name().is_none() { - return git::TreeWalkResult::Continue; - } - let entry_path = Path::new(root).join(entry.name().unwrap()); - - if !entry_path.starts_with(dir_path) { - return git::TreeWalkResult::Continue; - } - - files.push(entry_path.strip_prefix(dir_path).unwrap().to_path_buf()); - - git::TreeWalkResult::Continue - }) - .with_context(|| format!("{}: tree walk failed", dir_path.display()))?; - - Ok(files) - } - - pub fn exists>(&self, file_path: P) -> bool { - self.tree.get_path(file_path.normalize()).is_ok() - } -} - -pub struct PrefixedReader<'r> { - reader: &'r Reader<'r>, - prefix: PathBuf, -} - -impl<'r> PrefixedReader<'r> { - fn new>(reader: &'r Reader, prefix: P) -> Self { - PrefixedReader { - reader, - prefix: prefix.as_ref().to_path_buf(), - } - } - - pub fn batch>( - &self, - paths: &[P], - ) -> Result>, io::Error> { - let paths = paths - .iter() - .map(|path| self.prefix.join(path)) - .collect::>(); - self.reader.batch(paths.as_slice()) - } - - fn list_files>(&self, dir_path: P) -> Result> { - self.reader.list_files(self.prefix.join(dir_path.as_ref())) - } - - fn exists>(&self, file_path: P) -> Result { - self.reader.exists(self.prefix.join(file_path.as_ref())) - } -} - -#[derive(Debug, Clone, thiserror::Error)] -pub enum FromError { - #[error(transparent)] - ParseInt(#[from] num::ParseIntError), - #[error(transparent)] - ParseBool(#[from] str::ParseBoolError), - #[error("file is binary")] - Binary, - #[error("file too large")] - Large, -} - -#[derive(Debug, Clone, PartialEq)] -pub enum Content { - UTF8(String), - Binary, - Large, -} - -impl Serialize for Content { - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - match self { - Content::UTF8(text) => { - let mut state = serializer.serialize_struct("Content", 2)?; - state.serialize_field("type", "utf8")?; - state.serialize_field("value", text)?; - state.end() - } - Content::Binary => { - let mut state = serializer.serialize_struct("Content", 1)?; - state.serialize_field("type", "binary")?; - state.end() - } - Content::Large => { - let mut state = serializer.serialize_struct("Content", 1)?; - state.serialize_field("type", "large")?; - state.end() - } - } - } -} - -impl Content { - const MAX_SIZE: usize = 1024 * 1024 * 10; // 10 MB - - pub fn read_from_file>(path: P) -> Result { - let path = path.as_ref(); - let metadata = fs::metadata(path)?; - if metadata.len() > Content::MAX_SIZE as u64 { - return Ok(Content::Large); - } - let content = fs::read(path)?; - Ok(content.as_slice().into()) - } -} - -impl From<&str> for Content { - fn from(text: &str) -> Self { - if text.len() > Self::MAX_SIZE { - Content::Large - } else { - Content::UTF8(text.to_string()) - } - } -} - -impl From<&git::Blob<'_>> for Content { - fn from(value: &git::Blob) -> Self { - if value.size() > Content::MAX_SIZE { - Content::Large - } else { - value.content().into() - } - } -} - -impl From<&[u8]> for Content { - fn from(bytes: &[u8]) -> Self { - if bytes.len() > Self::MAX_SIZE { - Content::Large - } else { - match String::from_utf8(bytes.to_vec()) { - Err(_) => Content::Binary, - Ok(text) => Content::UTF8(text), - } - } - } -} - -impl TryFrom<&Content> for usize { - type Error = FromError; - - fn try_from(content: &Content) -> Result { - match content { - Content::UTF8(text) => text.parse().map_err(FromError::ParseInt), - Content::Binary => Err(FromError::Binary), - Content::Large => Err(FromError::Large), - } - } -} - -impl TryFrom for usize { - type Error = FromError; - - fn try_from(content: Content) -> Result { - Self::try_from(&content) - } -} - -impl TryFrom<&Content> for String { - type Error = FromError; - - fn try_from(content: &Content) -> Result { - match content { - Content::UTF8(text) => Ok(text.clone()), - Content::Binary => Err(FromError::Binary), - Content::Large => Err(FromError::Large), - } - } -} - -impl TryFrom for String { - type Error = FromError; - - fn try_from(content: Content) -> Result { - Self::try_from(&content) - } -} - -impl TryFrom for i64 { - type Error = FromError; - - fn try_from(content: Content) -> Result { - Self::try_from(&content) - } -} - -impl TryFrom<&Content> for i64 { - type Error = FromError; - - fn try_from(content: &Content) -> Result { - let text: String = content.try_into()?; - text.parse().map_err(FromError::ParseInt) - } -} - -impl TryFrom for u64 { - type Error = FromError; - - fn try_from(content: Content) -> Result { - Self::try_from(&content) - } -} - -impl TryFrom<&Content> for u64 { - type Error = FromError; - - fn try_from(content: &Content) -> Result { - let text: String = content.try_into()?; - text.parse().map_err(FromError::ParseInt) - } -} - -impl TryFrom for u128 { - type Error = FromError; - - fn try_from(content: Content) -> Result { - Self::try_from(&content) - } -} - -impl TryFrom<&Content> for u128 { - type Error = FromError; - - fn try_from(content: &Content) -> Result { - let text: String = content.try_into()?; - text.parse().map_err(FromError::ParseInt) - } -} - -impl TryFrom for bool { - type Error = FromError; - - fn try_from(content: Content) -> Result { - Self::try_from(&content) - } -} - -impl TryFrom<&Content> for bool { - type Error = FromError; - - fn try_from(content: &Content) -> Result { - let text: String = content.try_into()?; - text.parse().map_err(FromError::ParseBool) - } -} diff --git a/gitbutler-app/src/sentry.rs b/gitbutler-app/src/sentry.rs index 2b2f9d9ad..9ca06ed08 100644 --- a/gitbutler-app/src/sentry.rs +++ b/gitbutler-app/src/sentry.rs @@ -12,7 +12,7 @@ use sentry_tracing::SentryLayer; use tracing::Subscriber; use tracing_subscriber::registry::LookupSpan; -use crate::users; +use gitbutler::users; static SENTRY_QUOTA: Quota = Quota::per_second(nonzero!(1_u32)); // 1 per second at most. static SENTRY_LIMIT: OnceCell> = OnceCell::new(); diff --git a/gitbutler-app/src/sessions.rs b/gitbutler-app/src/sessions.rs index c904b9115..09a450f79 100644 --- a/gitbutler-app/src/sessions.rs +++ b/gitbutler-app/src/sessions.rs @@ -1,15 +1,42 @@ -mod controller; -mod iterator; -mod reader; -pub mod session; -mod writer; +pub mod commands { + use tauri::{AppHandle, Manager}; + use tracing::instrument; -pub mod commands; -pub mod database; + use crate::error::{Code, Error}; -pub use controller::Controller; -pub use database::Database; -pub use iterator::SessionsIterator; -pub use reader::SessionReader as Reader; -pub use session::{Meta, Session, SessionError, SessionId}; -pub use writer::SessionWriter as Writer; + use gitbutler::sessions::{ + Session, + {controller::ListError, Controller}, + }; + + impl From for Error { + fn from(value: ListError) -> Self { + match value { + ListError::UsersError(error) => Error::from(error), + ListError::ProjectsError(error) => Error::from(error), + ListError::ProjectRepositoryError(error) => Error::from(error), + ListError::Other(error) => { + tracing::error!(?error); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn list_sessions( + handle: AppHandle, + project_id: &str, + earliest_timestamp_ms: Option, + ) -> Result, Error> { + let project_id = project_id.parse().map_err(|_| Error::UserError { + code: Code::Validation, + message: "Malformed project id".to_string(), + })?; + handle + .state::() + .list(&project_id, earliest_timestamp_ms) + .map_err(Into::into) + } +} diff --git a/gitbutler-app/src/sessions/commands.rs b/gitbutler-app/src/sessions/commands.rs index 57e3a6a95..8b1378917 100644 --- a/gitbutler-app/src/sessions/commands.rs +++ b/gitbutler-app/src/sessions/commands.rs @@ -1,40 +1 @@ -use tauri::{AppHandle, Manager}; -use tracing::instrument; -use crate::error::{Code, Error}; - -use super::{ - controller::{Controller, ListError}, - Session, -}; - -impl From for Error { - fn from(value: ListError) -> Self { - match value { - ListError::UsersError(error) => Error::from(error), - ListError::ProjectsError(error) => Error::from(error), - ListError::ProjectRepositoryError(error) => Error::from(error), - ListError::Other(error) => { - tracing::error!(?error); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn list_sessions( - handle: AppHandle, - project_id: &str, - earliest_timestamp_ms: Option, -) -> Result, Error> { - let project_id = project_id.parse().map_err(|_| Error::UserError { - code: Code::Validation, - message: "Malformed project id".to_string(), - })?; - handle - .state::() - .list(&project_id, earliest_timestamp_ms) - .map_err(Into::into) -} diff --git a/gitbutler-app/src/sessions/controller.rs b/gitbutler-app/src/sessions/controller.rs deleted file mode 100644 index 9ea409200..000000000 --- a/gitbutler-app/src/sessions/controller.rs +++ /dev/null @@ -1,91 +0,0 @@ -use std::path; - -use anyhow::Context; - -use crate::{ - gb_repository, project_repository, - projects::{self, ProjectId}, - users, -}; - -use super::{Database, Session}; - -#[derive(Clone)] -pub struct Controller { - local_data_dir: path::PathBuf, - sessions_database: Database, - - projects: projects::Controller, - users: users::Controller, -} - -#[derive(Debug, thiserror::Error)] -pub enum ListError { - #[error(transparent)] - ProjectsError(#[from] projects::GetError), - #[error(transparent)] - ProjectRepositoryError(#[from] project_repository::OpenError), - #[error(transparent)] - UsersError(#[from] users::GetError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl Controller { - pub fn new( - local_data_dir: path::PathBuf, - sessions_database: Database, - projects: projects::Controller, - users: users::Controller, - ) -> Self { - Self { - local_data_dir, - sessions_database, - projects, - users, - } - } - - pub fn list( - &self, - project_id: &ProjectId, - earliest_timestamp_ms: Option, - ) -> Result, ListError> { - let sessions = self - .sessions_database - .list_by_project_id(project_id, earliest_timestamp_ms)?; - - let project = self.projects.get(project_id)?; - let project_repository = project_repository::Repository::open(&project)?; - let user = self.users.get_user()?; - let gb_repository = gb_repository::Repository::open( - &self.local_data_dir, - &project_repository, - user.as_ref(), - ) - .context("failed to open gb repository")?; - - // this is a hack to account for a case when we have a session created, but fs was never - // touched, so the wathcer never picked up the session - let current_session = gb_repository - .get_current_session() - .context("failed to get current session")?; - let have_to_index = matches!( - (current_session.as_ref(), sessions.first()), - (Some(_), None) - ); - if !have_to_index { - return Ok(sessions); - } - - let sessions_iter = gb_repository.get_sessions_iterator()?; - let mut sessions = sessions_iter.collect::, _>>()?; - self.sessions_database - .insert(project_id, &sessions.iter().collect::>())?; - if let Some(session) = current_session { - self.sessions_database.insert(project_id, &[&session])?; - sessions.insert(0, session); - } - Ok(sessions) - } -} diff --git a/gitbutler-app/src/sessions/database.rs b/gitbutler-app/src/sessions/database.rs deleted file mode 100644 index 3c60790f9..000000000 --- a/gitbutler-app/src/sessions/database.rs +++ /dev/null @@ -1,182 +0,0 @@ -use anyhow::{Context, Result}; - -use crate::{database, projects::ProjectId}; - -use super::session::{self, SessionId}; - -#[derive(Clone)] -pub struct Database { - database: database::Database, -} - -impl Database { - pub fn new(database: database::Database) -> Database { - Database { database } - } - - pub fn insert(&self, project_id: &ProjectId, sessions: &[&session::Session]) -> Result<()> { - self.database.transaction(|tx| -> Result<()> { - let mut stmt = insert_stmt(tx).context("Failed to prepare insert statement")?; - for session in sessions { - stmt.execute(rusqlite::named_params! { - ":id": session.id, - ":project_id": project_id, - ":hash": session.hash.map(|hash| hash.to_string()), - ":branch": session.meta.branch, - ":commit": session.meta.commit, - ":start_timestamp_ms": session.meta.start_timestamp_ms.to_string(), - ":last_timestamp_ms": session.meta.last_timestamp_ms.to_string(), - }) - .context("Failed to execute insert statement")?; - } - Ok(()) - })?; - - Ok(()) - } - - pub fn list_by_project_id( - &self, - project_id: &ProjectId, - earliest_timestamp_ms: Option, - ) -> Result> { - self.database.transaction(|tx| { - let mut stmt = list_by_project_id_stmt(tx) - .context("Failed to prepare list_by_project_id statement")?; - let mut rows = stmt - .query(rusqlite::named_params! { - ":project_id": project_id, - }) - .context("Failed to execute list_by_project_id statement")?; - - let mut sessions = Vec::new(); - while let Some(row) = rows - .next() - .context("Failed to iterate over list_by_project_id results")? - { - let session = parse_row(row)?; - - if let Some(earliest_timestamp_ms) = earliest_timestamp_ms { - if session.meta.last_timestamp_ms < earliest_timestamp_ms { - continue; - } - } - - sessions.push(session); - } - Ok(sessions) - }) - } - - pub fn get_by_project_id_id( - &self, - project_id: &ProjectId, - id: &SessionId, - ) -> Result> { - self.database.transaction(|tx| { - let mut stmt = get_by_project_id_id_stmt(tx) - .context("Failed to prepare get_by_project_id_id statement")?; - let mut rows = stmt - .query(rusqlite::named_params! { - ":project_id": project_id, - ":id": id, - }) - .context("Failed to execute get_by_project_id_id statement")?; - if let Some(row) = rows - .next() - .context("Failed to iterate over get_by_project_id_id results")? - { - Ok(Some(parse_row(row)?)) - } else { - Ok(None) - } - }) - } - - pub fn get_by_id(&self, id: &SessionId) -> Result> { - self.database.transaction(|tx| { - let mut stmt = get_by_id_stmt(tx).context("Failed to prepare get_by_id statement")?; - let mut rows = stmt - .query(rusqlite::named_params! { - ":id": id, - }) - .context("Failed to execute get_by_id statement")?; - if let Some(row) = rows - .next() - .context("Failed to iterate over get_by_id results")? - { - Ok(Some(parse_row(row)?)) - } else { - Ok(None) - } - }) - } -} - -fn parse_row(row: &rusqlite::Row) -> Result { - Ok(session::Session { - id: row.get(0).context("Failed to get id")?, - hash: row - .get::>(2) - .context("Failed to get hash")? - .map(|hash| hash.parse().context("Failed to parse hash")) - .transpose()?, - meta: session::Meta { - branch: row.get(3).context("Failed to get branch")?, - commit: row.get(4).context("Failed to get commit")?, - start_timestamp_ms: row - .get::(5) - .context("Failed to get start_timestamp_ms")? - .parse() - .context("Failed to parse start_timestamp_ms")?, - last_timestamp_ms: row - .get::(6) - .context("Failed to get last_timestamp_ms")? - .parse() - .context("Failed to parse last_timestamp_ms")?, - }, - }) -} - -fn list_by_project_id_stmt<'conn>( - tx: &'conn rusqlite::Transaction, -) -> Result> { - Ok(tx.prepare_cached( - "SELECT `id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms` FROM `sessions` WHERE `project_id` = :project_id ORDER BY `start_timestamp_ms` DESC", - )?) -} - -fn get_by_project_id_id_stmt<'conn>( - tx: &'conn rusqlite::Transaction, -) -> Result> { - Ok(tx.prepare_cached( - "SELECT `id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms` FROM `sessions` WHERE `project_id` = :project_id AND `id` = :id", - )?) -} - -fn get_by_id_stmt<'conn>( - tx: &'conn rusqlite::Transaction, -) -> Result> { - Ok(tx.prepare_cached( - "SELECT `id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms` FROM `sessions` WHERE `id` = :id", - )?) -} - -fn insert_stmt<'conn>( - tx: &'conn rusqlite::Transaction, -) -> Result> { - Ok(tx.prepare_cached( - "INSERT INTO 'sessions' ( - `id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms` - ) VALUES ( - :id, :project_id, :hash, :branch, :commit, :start_timestamp_ms, :last_timestamp_ms - ) ON CONFLICT(`id`) DO UPDATE SET - `project_id` = :project_id, - `hash` = :hash, - `branch` = :branch, - `commit` = :commit, - `start_timestamp_ms` = :start_timestamp_ms, - `last_timestamp_ms` = :last_timestamp_ms - ", - )?) -} diff --git a/gitbutler-app/src/sessions/iterator.rs b/gitbutler-app/src/sessions/iterator.rs deleted file mode 100644 index 249db0eed..000000000 --- a/gitbutler-app/src/sessions/iterator.rs +++ /dev/null @@ -1,68 +0,0 @@ -use anyhow::{Context, Result}; - -use crate::{git, reader}; - -use super::{Session, SessionError}; - -pub struct SessionsIterator<'iterator> { - git_repository: &'iterator git::Repository, - iter: git2::Revwalk<'iterator>, -} - -impl<'iterator> SessionsIterator<'iterator> { - pub(crate) fn new(git_repository: &'iterator git::Repository) -> Result { - let mut iter = git_repository - .revwalk() - .context("failed to create revwalk")?; - - iter.set_sorting(git2::Sort::TOPOLOGICAL | git2::Sort::TIME) - .context("failed to set sorting")?; - - let branches = git_repository.branches(None)?; - for branch in branches { - let (branch, _) = branch.context("failed to get branch")?; - iter.push(branch.peel_to_commit()?.id().into()) - .with_context(|| format!("failed to push branch {:?}", branch.name()))?; - } - - Ok(Self { - git_repository, - iter, - }) - } -} - -impl<'iterator> Iterator for SessionsIterator<'iterator> { - type Item = Result; - - fn next(&mut self) -> Option { - match self.iter.next() { - Some(Result::Ok(oid)) => { - let commit = match self.git_repository.find_commit(oid.into()) { - Result::Ok(commit) => commit, - Err(err) => return Some(Err(err.into())), - }; - - if commit.parent_count() == 0 { - // skip initial commit, as it's impossible to get a list of files from it - // it's only used to bootstrap the history - return self.next(); - } - - let commit_reader = match reader::Reader::from_commit(self.git_repository, &commit) - { - Result::Ok(commit_reader) => commit_reader, - Err(err) => return Some(Err(err)), - }; - let session = match Session::try_from(&commit_reader) { - Result::Ok(session) => session, - Err(SessionError::NoSession) => return None, - Err(err) => return Some(Err(err.into())), - }; - Some(Ok(session)) - } - Some(Err(err)) => Some(Err(err.into())), - None => None, - } - } -} diff --git a/gitbutler-app/src/sessions/reader.rs b/gitbutler-app/src/sessions/reader.rs deleted file mode 100644 index 8ba8405a0..000000000 --- a/gitbutler-app/src/sessions/reader.rs +++ /dev/null @@ -1,105 +0,0 @@ -use std::{collections::HashMap, path}; - -use anyhow::{anyhow, Context, Result}; - -use crate::{gb_repository, reader}; - -use super::Session; - -pub struct SessionReader<'reader> { - // reader for the current session. commit or wd - reader: reader::Reader<'reader>, - // reader for the previous session's commit - previous_reader: reader::Reader<'reader>, -} - -#[derive(thiserror::Error, Debug)] -pub enum FileError { - #[error(transparent)] - Reader(#[from] reader::Error), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl<'reader> SessionReader<'reader> { - pub fn reader(&self) -> &reader::Reader<'reader> { - &self.reader - } - - pub fn open(repository: &'reader gb_repository::Repository, session: &Session) -> Result { - let wd_reader = reader::Reader::open(&repository.root())?; - - if let Ok(reader::Content::UTF8(current_session_id)) = wd_reader.read("session/meta/id") { - if current_session_id == session.id.to_string() { - let head_commit = repository.git_repository().head()?.peel_to_commit()?; - return Ok(SessionReader { - reader: wd_reader, - previous_reader: reader::Reader::from_commit( - repository.git_repository(), - &head_commit, - )?, - }); - } - } - - let session_hash = if let Some(hash) = &session.hash { - hash - } else { - return Err(anyhow!( - "can not open reader for {} because it has no commit hash nor it is a current session", - session.id - )); - }; - - let commit = repository - .git_repository() - .find_commit(*session_hash) - .context("failed to get commit")?; - let commit_reader = reader::Reader::from_commit(repository.git_repository(), &commit)?; - - Ok(SessionReader { - reader: commit_reader, - previous_reader: reader::Reader::from_commit( - repository.git_repository(), - &commit.parent(0)?, - )?, - }) - } - - pub fn files( - &self, - filter: Option<&[&path::Path]>, - ) -> Result, FileError> { - let wd_dir = path::Path::new("wd"); - let mut paths = self.previous_reader.list_files(wd_dir)?; - if let Some(filter) = filter { - paths = paths - .into_iter() - .filter(|file_path| filter.iter().any(|path| file_path.eq(path))) - .collect::>(); - } - paths = paths.iter().map(|path| wd_dir.join(path)).collect(); - let files = self - .previous_reader - .batch(&paths) - .context("failed to batch read")?; - - let files = files.into_iter().collect::, _>>()?; - - Ok(paths - .into_iter() - .zip(files) - .filter_map(|(path, file)| { - path.strip_prefix(wd_dir) - .ok() - .map(|path| (path.to_path_buf(), file)) - }) - .collect::>()) - } - - pub fn file>(&self, path: P) -> Result { - let path = path.as_ref(); - self.previous_reader - .read(std::path::Path::new("wd").join(path)) - } -} diff --git a/gitbutler-app/src/sessions/session.rs b/gitbutler-app/src/sessions/session.rs deleted file mode 100644 index c0feecc4e..000000000 --- a/gitbutler-app/src/sessions/session.rs +++ /dev/null @@ -1,126 +0,0 @@ -use std::path; - -use anyhow::{Context, Result}; -use serde::Serialize; -use thiserror::Error; - -use crate::{git, id::Id, reader}; - -#[derive(Debug, Clone, Serialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct Meta { - // timestamp of when the session was created - pub start_timestamp_ms: u128, - // timestamp of when the session was last active - pub last_timestamp_ms: u128, - // session branch name - pub branch: Option, - // session commit hash - pub commit: Option, -} - -pub type SessionId = Id; - -#[derive(Debug, Clone, Serialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct Session { - pub id: SessionId, - // if hash is not set, the session is not saved aka current - pub hash: Option, - pub meta: Meta, -} - -#[derive(Error, Debug)] -pub enum SessionError { - #[error("session does not exist")] - NoSession, - #[error("{0}")] - Other(#[from] anyhow::Error), -} - -impl TryFrom<&reader::Reader<'_>> for Session { - type Error = SessionError; - - fn try_from(reader: &reader::Reader) -> Result { - let results = reader - .batch(&[ - path::Path::new("session/meta/id"), - path::Path::new("session/meta/start"), - path::Path::new("session/meta/last"), - path::Path::new("session/meta/branch"), - path::Path::new("session/meta/commit"), - ]) - .context("failed to batch read")?; - - let id = &results[0]; - let start_timestamp_ms = &results[1]; - let last_timestamp_ms = &results[2]; - let branch = &results[3]; - let commit = &results[4]; - - let id = id.clone().map_err(|error| match error { - reader::Error::NotFound => SessionError::NoSession, - error => SessionError::Other(error.into()), - })?; - let id: String = id - .try_into() - .context("failed to parse session id as string") - .map_err(SessionError::Other)?; - let id: SessionId = id.parse().context("failed to parse session id as uuid")?; - - let start_timestamp_ms = start_timestamp_ms.clone().map_err(|error| match error { - reader::Error::NotFound => SessionError::NoSession, - error => SessionError::Other(error.into()), - })?; - - let start_timestamp_ms: u128 = start_timestamp_ms - .try_into() - .context("failed to parse session start timestamp as number") - .map_err(SessionError::Other)?; - - let last_timestamp_ms = last_timestamp_ms.clone().map_err(|error| match error { - reader::Error::NotFound => SessionError::NoSession, - error => SessionError::Other(error.into()), - })?; - - let last_timestamp_ms: u128 = last_timestamp_ms - .try_into() - .context("failed to parse session last timestamp as number") - .map_err(SessionError::Other)?; - - let branch = match branch.clone() { - Ok(branch) => { - let branch = branch - .try_into() - .context("failed to parse session branch as string")?; - Ok(Some(branch)) - } - Err(reader::Error::NotFound) => Ok(None), - Err(e) => Err(e), - } - .context("failed to parse session branch as string")?; - - let commit = match commit.clone() { - Ok(commit) => { - let commit = commit - .try_into() - .context("failed to parse session commit as string")?; - Ok(Some(commit)) - } - Err(reader::Error::NotFound) => Ok(None), - Err(e) => Err(e), - } - .context("failed to parse session commit as string")?; - - Ok(Self { - id, - hash: reader.commit_id(), - meta: Meta { - start_timestamp_ms, - last_timestamp_ms, - branch, - commit, - }, - }) - } -} diff --git a/gitbutler-app/src/sessions/writer.rs b/gitbutler-app/src/sessions/writer.rs deleted file mode 100644 index 9d8d1a162..000000000 --- a/gitbutler-app/src/sessions/writer.rs +++ /dev/null @@ -1,108 +0,0 @@ -use std::time; - -use anyhow::{anyhow, Context, Result}; - -use crate::{gb_repository, reader, writer}; - -use super::Session; - -pub struct SessionWriter<'writer> { - repository: &'writer gb_repository::Repository, - writer: writer::DirWriter, -} - -impl<'writer> SessionWriter<'writer> { - pub fn new(repository: &'writer gb_repository::Repository) -> Result { - writer::DirWriter::open(repository.root()) - .map(|writer| SessionWriter { repository, writer }) - } - - pub fn remove(&self) -> Result<()> { - self.writer.remove("session")?; - - tracing::debug!( - project_id = %self.repository.get_project_id(), - "deleted session" - ); - - Ok(()) - } - - pub fn write(&self, session: &Session) -> Result<()> { - if session.hash.is_some() { - return Err(anyhow!("can not open writer for a session with a hash")); - } - - let reader = reader::Reader::open(&self.repository.root()) - .context("failed to open current session reader")?; - - let current_session_id = - if let Ok(reader::Content::UTF8(current_session_id)) = reader.read("session/meta/id") { - Some(current_session_id) - } else { - None - }; - - if current_session_id.is_some() - && current_session_id.as_ref() != Some(&session.id.to_string()) - { - return Err(anyhow!( - "{}: can not open writer for {} because a writer for {} is still open", - self.repository.get_project_id(), - session.id, - current_session_id.unwrap() - )); - } - - let mut batch = vec![writer::BatchTask::Write( - "session/meta/last", - time::SystemTime::now() - .duration_since(time::SystemTime::UNIX_EPOCH) - .unwrap() - .as_millis() - .to_string(), - )]; - - if current_session_id.is_some() - && current_session_id.as_ref() == Some(&session.id.to_string()) - { - self.writer - .batch(&batch) - .context("failed to write last timestamp")?; - return Ok(()); - } - - batch.push(writer::BatchTask::Write( - "session/meta/id", - session.id.to_string(), - )); - batch.push(writer::BatchTask::Write( - "session/meta/start", - session.meta.start_timestamp_ms.to_string(), - )); - - if let Some(branch) = session.meta.branch.as_ref() { - batch.push(writer::BatchTask::Write( - "session/meta/branch", - branch.to_string(), - )); - } else { - batch.push(writer::BatchTask::Remove("session/meta/branch")); - } - - if let Some(commit) = session.meta.commit.as_ref() { - batch.push(writer::BatchTask::Write( - "session/meta/commit", - commit.to_string(), - )); - } else { - batch.push(writer::BatchTask::Remove("session/meta/commit")); - } - - self.writer - .batch(&batch) - .context("failed to write session meta")?; - - Ok(()) - } -} diff --git a/gitbutler-app/src/ssh.rs b/gitbutler-app/src/ssh.rs deleted file mode 100644 index fe4f62a84..000000000 --- a/gitbutler-app/src/ssh.rs +++ /dev/null @@ -1,67 +0,0 @@ -use std::{env, fs, path::Path}; - -use ssh2::{self, CheckResult, KnownHostFileKind}; - -use crate::git; - -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error(transparent)] - Ssh(ssh2::Error), - #[error(transparent)] - Io(std::io::Error), - #[error("mismatched host key")] - MismatchedHostKey, - #[error("failed to check the known hosts")] - Failure, -} - -pub fn check_known_host(remote_url: &git::Url) -> Result<(), Error> { - if remote_url.scheme != git::Scheme::Ssh { - return Ok(()); - } - - let host = if let Some(host) = remote_url.host.as_ref() { - host - } else { - return Ok(()); - }; - - let mut session = ssh2::Session::new().map_err(Error::Ssh)?; - session - .set_tcp_stream(std::net::TcpStream::connect(format!("{}:22", host)).map_err(Error::Io)?); - session.handshake().map_err(Error::Ssh)?; - - let mut known_hosts = session.known_hosts().map_err(Error::Ssh)?; - - // Initialize the known hosts with a global known hosts file - let dotssh = Path::new(&env::var("HOME").unwrap()).join(".ssh"); - let file = dotssh.join("known_hosts"); - if !file.exists() { - fs::create_dir_all(&dotssh).map_err(Error::Io)?; - fs::File::create(&file).map_err(Error::Io)?; - } - - known_hosts - .read_file(&file, KnownHostFileKind::OpenSSH) - .map_err(Error::Ssh)?; - - // Now check to see if the seesion's host key is anywhere in the known - // hosts file - let (key, key_type) = session.host_key().unwrap(); - match known_hosts.check(host, key) { - CheckResult::Match => Ok(()), - CheckResult::Mismatch => Err(Error::MismatchedHostKey), - CheckResult::Failure => Err(Error::Failure), - CheckResult::NotFound => { - tracing::info!("adding host key for {}", host); - known_hosts - .add(host, key, "added by gitbutler client", key_type.into()) - .map_err(Error::Ssh)?; - known_hosts - .write_file(&file, KnownHostFileKind::OpenSSH) - .map_err(Error::Ssh)?; - Ok(()) - } - } -} diff --git a/gitbutler-app/src/storage.rs b/gitbutler-app/src/storage.rs deleted file mode 100644 index 1533016cf..000000000 --- a/gitbutler-app/src/storage.rs +++ /dev/null @@ -1,73 +0,0 @@ -use std::{ - fs, - path::{Path, PathBuf}, - sync::{Arc, RwLock}, -}; - -#[cfg(target_family = "unix")] -use std::os::unix::prelude::*; - -#[derive(Debug, Default, Clone)] -pub struct Storage { - local_data_dir: Arc>, -} - -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error(transparent)] - IO(#[from] std::io::Error), -} - -impl Storage { - pub fn new>(local_data_dir: P) -> Storage { - Storage { - local_data_dir: Arc::new(RwLock::new(local_data_dir.as_ref().to_path_buf())), - } - } - - pub fn read>(&self, path: P) -> Result, Error> { - let local_data_dir = self.local_data_dir.read().unwrap(); - let file_path = local_data_dir.join(path); - if !file_path.exists() { - return Ok(None); - } - let contents = fs::read_to_string(&file_path).map_err(Error::IO)?; - Ok(Some(contents)) - } - - pub fn write>(&self, path: P, content: &str) -> Result<(), Error> { - let local_data_dir = self.local_data_dir.write().unwrap(); - let file_path = local_data_dir.join(path); - let dir = file_path.parent().unwrap(); - if !dir.exists() { - fs::create_dir_all(dir).map_err(Error::IO)?; - } - fs::write(&file_path, content).map_err(Error::IO)?; - - // Set the permissions to be user-only. We can't actually - // do this on Windows, so we ignore that platform. - #[cfg(target_family = "unix")] - { - let metadata = fs::metadata(file_path.clone())?; - let mut permissions = metadata.permissions(); - permissions.set_mode(0o600); // User read/write - fs::set_permissions(file_path.clone(), permissions)?; - } - - Ok(()) - } - - pub fn delete>(&self, path: P) -> Result<(), Error> { - let local_data_dir = self.local_data_dir.write().unwrap(); - let file_path = local_data_dir.join(path); - if !file_path.exists() { - return Ok(()); - } - if file_path.is_dir() { - fs::remove_dir_all(file_path.clone()).map_err(Error::IO)?; - } else { - fs::remove_file(file_path.clone()).map_err(Error::IO)?; - } - Ok(()) - } -} diff --git a/gitbutler-app/src/types.rs b/gitbutler-app/src/types.rs deleted file mode 100644 index 3fa5f859e..000000000 --- a/gitbutler-app/src/types.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod default_true; diff --git a/gitbutler-app/src/types/default_true.rs b/gitbutler-app/src/types/default_true.rs deleted file mode 100644 index 7ab2c1d69..000000000 --- a/gitbutler-app/src/types/default_true.rs +++ /dev/null @@ -1,90 +0,0 @@ -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct DefaultTrue(bool); - -impl core::fmt::Debug for DefaultTrue { - #[inline] - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - ::fmt(&self.0, f) - } -} - -impl core::fmt::Display for DefaultTrue { - #[inline] - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - ::fmt(&self.0, f) - } -} - -impl Default for DefaultTrue { - #[inline] - fn default() -> Self { - DefaultTrue(true) - } -} - -impl From for bool { - #[inline] - fn from(default_true: DefaultTrue) -> Self { - default_true.0 - } -} - -impl From for DefaultTrue { - #[inline] - fn from(boolean: bool) -> Self { - DefaultTrue(boolean) - } -} - -impl serde::Serialize for DefaultTrue { - #[inline] - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_bool(self.0) - } -} - -impl<'de> serde::Deserialize<'de> for DefaultTrue { - #[inline] - fn deserialize>(deserializer: D) -> Result { - Ok(DefaultTrue(bool::deserialize(deserializer)?)) - } -} - -impl core::ops::Deref for DefaultTrue { - type Target = bool; - - #[inline] - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl core::ops::DerefMut for DefaultTrue { - #[inline] - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl PartialEq for DefaultTrue { - #[inline] - fn eq(&self, other: &bool) -> bool { - self.0 == *other - } -} - -impl PartialEq for bool { - #[inline] - fn eq(&self, other: &DefaultTrue) -> bool { - *self == other.0 - } -} - -impl core::ops::Not for DefaultTrue { - type Output = bool; - - #[inline] - fn not(self) -> Self::Output { - !self.0 - } -} diff --git a/gitbutler-app/src/users.rs b/gitbutler-app/src/users.rs index 67c452d2f..27dd350fb 100644 --- a/gitbutler-app/src/users.rs +++ b/gitbutler-app/src/users.rs @@ -1,7 +1,82 @@ -pub mod commands; -pub mod controller; -pub mod storage; -mod user; +pub mod commands { + use tauri::{AppHandle, Manager}; + use tracing::instrument; -pub use controller::*; -pub use user::User; + use crate::{error::Error, sentry}; + + use gitbutler::{ + assets, + users::controller::{self, Controller, GetError}, + users::User, + }; + + impl From for Error { + fn from(value: GetError) -> Self { + match value { + GetError::Other(error) => { + tracing::error!(?error, "failed to get user"); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn get_user(handle: AppHandle) -> Result, Error> { + let app = handle.state::(); + let proxy = handle.state::(); + + match app.get_user()? { + Some(user) => Ok(Some(proxy.proxy_user(user).await)), + None => Ok(None), + } + } + + impl From for Error { + fn from(value: controller::SetError) -> Self { + match value { + controller::SetError::Other(error) => { + tracing::error!(?error, "failed to set user"); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn set_user(handle: AppHandle, user: User) -> Result { + let app = handle.state::(); + let proxy = handle.state::(); + + app.set_user(&user)?; + + sentry::configure_scope(Some(&user)); + + Ok(proxy.proxy_user(user).await) + } + + impl From for Error { + fn from(value: controller::DeleteError) -> Self { + match value { + controller::DeleteError::Other(error) => { + tracing::error!(?error, "failed to delete user"); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn delete_user(handle: AppHandle) -> Result<(), Error> { + let app = handle.state::(); + + app.delete_user()?; + + sentry::configure_scope(None); + + Ok(()) + } +} diff --git a/gitbutler-app/src/users/commands.rs b/gitbutler-app/src/users/commands.rs deleted file mode 100644 index 70cef0153..000000000 --- a/gitbutler-app/src/users/commands.rs +++ /dev/null @@ -1,79 +0,0 @@ -use tauri::{AppHandle, Manager}; -use tracing::instrument; - -use crate::{assets, error::Error, sentry}; - -use super::{ - controller::{self, Controller, GetError}, - User, -}; - -impl From for Error { - fn from(value: GetError) -> Self { - match value { - GetError::Other(error) => { - tracing::error!(?error, "failed to get user"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn get_user(handle: AppHandle) -> Result, Error> { - let app = handle.state::(); - let proxy = handle.state::(); - - match app.get_user()? { - Some(user) => Ok(Some(proxy.proxy_user(user).await)), - None => Ok(None), - } -} - -impl From for Error { - fn from(value: controller::SetError) -> Self { - match value { - controller::SetError::Other(error) => { - tracing::error!(?error, "failed to set user"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn set_user(handle: AppHandle, user: User) -> Result { - let app = handle.state::(); - let proxy = handle.state::(); - - app.set_user(&user)?; - - sentry::configure_scope(Some(&user)); - - Ok(proxy.proxy_user(user).await) -} - -impl From for Error { - fn from(value: controller::DeleteError) -> Self { - match value { - controller::DeleteError::Other(error) => { - tracing::error!(?error, "failed to delete user"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn delete_user(handle: AppHandle) -> Result<(), Error> { - let app = handle.state::(); - - app.delete_user()?; - - sentry::configure_scope(None); - - Ok(()) -} diff --git a/gitbutler-app/src/users/controller.rs b/gitbutler-app/src/users/controller.rs deleted file mode 100644 index b6c4d4d66..000000000 --- a/gitbutler-app/src/users/controller.rs +++ /dev/null @@ -1,57 +0,0 @@ -use anyhow::Context; - -use super::{storage::Storage, User}; - -#[derive(Clone)] -pub struct Controller { - storage: Storage, -} - -impl Controller { - pub fn new(storage: Storage) -> Controller { - Controller { storage } - } - - pub fn from_path>(path: P) -> Controller { - Controller::new(Storage::from_path(path)) - } - - pub fn get_user(&self) -> Result, GetError> { - self.storage - .get() - .context("failed to get user") - .map_err(Into::into) - } - - pub fn set_user(&self, user: &User) -> Result<(), SetError> { - self.storage - .set(user) - .context("failed to set user") - .map_err(Into::into) - } - - pub fn delete_user(&self) -> Result<(), DeleteError> { - self.storage - .delete() - .context("failed to delete user") - .map_err(Into::into) - } -} - -#[derive(Debug, thiserror::Error)] -pub enum GetError { - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum SetError { - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum DeleteError { - #[error(transparent)] - Other(#[from] anyhow::Error), -} diff --git a/gitbutler-app/src/users/storage.rs b/gitbutler-app/src/users/storage.rs deleted file mode 100644 index 8c77323c3..000000000 --- a/gitbutler-app/src/users/storage.rs +++ /dev/null @@ -1,46 +0,0 @@ -use anyhow::Result; - -use crate::{storage, users::user}; - -const USER_FILE: &str = "user.json"; - -#[derive(Debug, Clone)] -pub struct Storage { - storage: storage::Storage, -} - -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error(transparent)] - Storage(#[from] storage::Error), - #[error(transparent)] - Json(#[from] serde_json::Error), -} - -impl Storage { - pub fn new(storage: storage::Storage) -> Storage { - Storage { storage } - } - - pub fn from_path>(path: P) -> Storage { - Storage::new(storage::Storage::new(path)) - } - - pub fn get(&self) -> Result, Error> { - match self.storage.read(USER_FILE)? { - Some(data) => Ok(Some(serde_json::from_str(&data)?)), - None => Ok(None), - } - } - - pub fn set(&self, user: &user::User) -> Result<(), Error> { - let data = serde_json::to_string(user)?; - self.storage.write(USER_FILE, &data)?; - Ok(()) - } - - pub fn delete(&self) -> Result<(), Error> { - self.storage.delete(USER_FILE)?; - Ok(()) - } -} diff --git a/gitbutler-app/src/users/user.rs b/gitbutler-app/src/users/user.rs deleted file mode 100644 index 655c30a87..000000000 --- a/gitbutler-app/src/users/user.rs +++ /dev/null @@ -1,35 +0,0 @@ -use serde::{Deserialize, Serialize}; - -use crate::git; - -#[derive(Debug, Deserialize, Serialize, Clone, Default)] -pub struct User { - pub id: u64, - pub name: Option, - pub given_name: Option, - pub family_name: Option, - pub email: String, - pub picture: String, - pub locale: Option, - pub created_at: String, - pub updated_at: String, - pub access_token: String, - pub role: Option, - pub github_access_token: Option, - #[serde(default)] - pub github_username: Option, -} - -impl TryFrom for git::Signature<'_> { - type Error = git::Error; - - fn try_from(value: User) -> Result { - if let Some(name) = value.name { - git::Signature::now(&name, &value.email) - } else if let Some(name) = value.given_name { - git::Signature::now(&name, &value.email) - } else { - git::Signature::now(&value.email, &value.email) - } - } -} diff --git a/gitbutler-app/src/virtual_branches.rs b/gitbutler-app/src/virtual_branches.rs index ae76e2ce8..3eca0e14c 100644 --- a/gitbutler-app/src/virtual_branches.rs +++ b/gitbutler-app/src/virtual_branches.rs @@ -1,31 +1,540 @@ -pub mod branch; -pub use branch::{Branch, BranchId}; -pub mod context; -pub mod target; +pub mod commands { + use anyhow::Context; + use tauri::{AppHandle, Manager}; + use tracing::instrument; -pub mod errors; + use gitbutler::error::{Code, Error}; -mod files; -pub use files::*; + use crate::watcher; + use gitbutler::askpass::AskpassBroker; + use gitbutler::virtual_branches::{RemoteBranch, RemoteBranchData}; + use gitbutler::{ + assets, git, projects, + projects::ProjectId, + virtual_branches::branch::{self, BranchId, BranchOwnershipClaims}, + virtual_branches::controller::{Controller, ControllerError}, + virtual_branches::BaseBranch, + virtual_branches::{RemoteBranchFile, VirtualBranches}, + }; -pub mod integration; -pub use integration::GITBUTLER_INTEGRATION_REFERENCE; + fn into_error>(value: ControllerError) -> Error { + match value { + ControllerError::User(error) => error, + ControllerError::Action(error) => error.into(), + ControllerError::VerifyError(error) => error.into(), + ControllerError::Other(error) => { + tracing::error!(?error, "failed to verify branch"); + Error::Unknown + } + } + } -mod base; -pub use base::*; + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn commit_virtual_branch( + handle: AppHandle, + project_id: ProjectId, + branch: BranchId, + message: &str, + ownership: Option, + run_hooks: bool, + ) -> Result { + let oid = handle + .state::() + .create_commit(&project_id, &branch, message, ownership.as_ref(), run_hooks) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(oid) + } -pub mod controller; -pub use controller::Controller; + /// This is a test command. It retrieves the virtual branches state from the gitbutler repository (legacy state) and persists it into a flat TOML file + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn save_vbranches_state( + handle: AppHandle, + project_id: ProjectId, + branch_ids: Vec, + ) -> Result<(), Error> { + handle + .state::() + .save_vbranches_state(&project_id, branch_ids) + .await?; + return Ok(()); + } -pub mod commands; + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn list_virtual_branches( + handle: AppHandle, + project_id: ProjectId, + ) -> Result { + let (branches, uses_diff_context, skipped_files) = handle + .state::() + .list_virtual_branches(&project_id) + .await + .map_err(into_error)?; -mod iterator; -pub use iterator::BranchIterator as Iterator; + // Migration: If use_diff_context is not already set and if there are no vbranches, set use_diff_context to true + let has_active_branches = branches.iter().any(|branch| branch.active); + if !uses_diff_context && !has_active_branches { + let _ = handle + .state::() + .update(&projects::UpdateRequest { + id: project_id, + use_diff_context: Some(true), + ..Default::default() + }) + .await; + } -mod r#virtual; -pub use r#virtual::*; + let proxy = handle.state::(); + let branches = proxy.proxy_virtual_branches(branches).await; + Ok(VirtualBranches { + branches, + skipped_files, + }) + } -mod remote; -pub use remote::*; + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn create_virtual_branch( + handle: AppHandle, + project_id: ProjectId, + branch: branch::BranchCreateRequest, + ) -> Result { + let branch_id = handle + .state::() + .create_virtual_branch(&project_id, &branch) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(branch_id) + } -mod state; + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn create_virtual_branch_from_branch( + handle: AppHandle, + project_id: ProjectId, + branch: git::Refname, + ) -> Result { + let branch_id = handle + .state::() + .create_virtual_branch_from_branch(&project_id, &branch) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(branch_id) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn merge_virtual_branch_upstream( + handle: AppHandle, + project_id: ProjectId, + branch: BranchId, + ) -> Result<(), Error> { + handle + .state::() + .merge_virtual_branch_upstream(&project_id, &branch) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn get_base_branch_data( + handle: AppHandle, + project_id: ProjectId, + ) -> Result, Error> { + if let Some(base_branch) = handle + .state::() + .get_base_branch_data(&project_id) + .await + .map_err(into_error)? + { + let proxy = handle.state::(); + let base_branch = proxy.proxy_base_branch(base_branch).await; + Ok(Some(base_branch)) + } else { + Ok(None) + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn set_base_branch( + handle: AppHandle, + project_id: ProjectId, + branch: &str, + ) -> Result { + let branch_name = format!("refs/remotes/{}", branch) + .parse() + .context("Invalid branch name")?; + let base_branch = handle + .state::() + .set_base_branch(&project_id, &branch_name) + .await + .map_err(into_error)?; + let base_branch = handle + .state::() + .proxy_base_branch(base_branch) + .await; + emit_vbranches(&handle, &project_id).await; + Ok(base_branch) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn update_base_branch(handle: AppHandle, project_id: ProjectId) -> Result<(), Error> { + handle + .state::() + .update_base_branch(&project_id) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn update_virtual_branch( + handle: AppHandle, + project_id: ProjectId, + branch: branch::BranchUpdateRequest, + ) -> Result<(), Error> { + handle + .state::() + .update_virtual_branch(&project_id, branch) + .await + .map_err(into_error)?; + + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn delete_virtual_branch( + handle: AppHandle, + project_id: ProjectId, + branch_id: BranchId, + ) -> Result<(), Error> { + handle + .state::() + .delete_virtual_branch(&project_id, &branch_id) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn apply_branch( + handle: AppHandle, + project_id: ProjectId, + branch: BranchId, + ) -> Result<(), Error> { + handle + .state::() + .apply_virtual_branch(&project_id, &branch) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn unapply_branch( + handle: AppHandle, + project_id: ProjectId, + branch: BranchId, + ) -> Result<(), Error> { + handle + .state::() + .unapply_virtual_branch(&project_id, &branch) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn unapply_ownership( + handle: AppHandle, + project_id: ProjectId, + ownership: BranchOwnershipClaims, + ) -> Result<(), Error> { + handle + .state::() + .unapply_ownership(&project_id, &ownership) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn reset_files( + handle: AppHandle, + project_id: ProjectId, + files: &str, + ) -> Result<(), Error> { + // convert files to Vec + let files = files + .split('\n') + .map(std::string::ToString::to_string) + .collect::>(); + handle + .state::() + .reset_files(&project_id, &files) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn push_virtual_branch( + handle: AppHandle, + project_id: ProjectId, + branch_id: BranchId, + with_force: bool, + ) -> Result<(), Error> { + let askpass_broker = handle.state::(); + handle + .state::() + .push_virtual_branch( + &project_id, + &branch_id, + with_force, + Some((askpass_broker.inner().clone(), Some(branch_id))), + ) + .await + .map_err(|e| Error::UserError { + code: Code::Unknown, + message: e.to_string(), + })?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn can_apply_virtual_branch( + handle: AppHandle, + project_id: ProjectId, + branch_id: BranchId, + ) -> Result { + handle + .state::() + .can_apply_virtual_branch(&project_id, &branch_id) + .await + .map_err(Into::into) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn can_apply_remote_branch( + handle: AppHandle, + project_id: ProjectId, + branch: git::RemoteRefname, + ) -> Result { + handle + .state::() + .can_apply_remote_branch(&project_id, &branch) + .await + .map_err(into_error) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn list_remote_commit_files( + handle: AppHandle, + project_id: ProjectId, + commit_oid: git::Oid, + ) -> Result, Error> { + handle + .state::() + .list_remote_commit_files(&project_id, commit_oid) + .await + .map_err(Into::into) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn reset_virtual_branch( + handle: AppHandle, + project_id: ProjectId, + branch_id: BranchId, + target_commit_oid: git::Oid, + ) -> Result<(), Error> { + handle + .state::() + .reset_virtual_branch(&project_id, &branch_id, target_commit_oid) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn cherry_pick_onto_virtual_branch( + handle: AppHandle, + project_id: ProjectId, + branch_id: BranchId, + target_commit_oid: git::Oid, + ) -> Result, Error> { + let oid = handle + .state::() + .cherry_pick(&project_id, &branch_id, target_commit_oid) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(oid) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn amend_virtual_branch( + handle: AppHandle, + project_id: ProjectId, + branch_id: BranchId, + ownership: BranchOwnershipClaims, + ) -> Result { + let oid = handle + .state::() + .amend(&project_id, &branch_id, &ownership) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(oid) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn list_remote_branches( + handle: tauri::AppHandle, + project_id: ProjectId, + ) -> Result, Error> { + let branches = handle + .state::() + .list_remote_branches(&project_id) + .await + .map_err(into_error)?; + Ok(branches) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn get_remote_branch_data( + handle: tauri::AppHandle, + project_id: ProjectId, + refname: git::Refname, + ) -> Result { + let branch_data = handle + .state::() + .get_remote_branch_data(&project_id, &refname) + .await + .map_err(into_error)?; + let branch_data = handle + .state::() + .proxy_remote_branch_data(branch_data) + .await; + Ok(branch_data) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn squash_branch_commit( + handle: tauri::AppHandle, + project_id: ProjectId, + branch_id: BranchId, + target_commit_oid: git::Oid, + ) -> Result<(), Error> { + handle + .state::() + .squash(&project_id, &branch_id, target_commit_oid) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn fetch_from_target( + handle: tauri::AppHandle, + project_id: ProjectId, + action: Option, + ) -> Result { + let askpass_broker = handle.state::().inner().clone(); + let base_branch = handle + .state::() + .fetch_from_target( + &project_id, + Some(( + askpass_broker, + action.unwrap_or_else(|| "unknown".to_string()), + )), + ) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(base_branch) + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn move_commit( + handle: tauri::AppHandle, + project_id: ProjectId, + commit_oid: git::Oid, + target_branch_id: BranchId, + ) -> Result<(), Error> { + handle + .state::() + .move_commit(&project_id, &target_branch_id, commit_oid) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + // XXX(qix-): Is this command used? + #[allow(dead_code)] + pub async fn update_commit_message( + handle: tauri::AppHandle, + project_id: ProjectId, + branch_id: BranchId, + commit_oid: git::Oid, + message: &str, + ) -> Result<(), Error> { + handle + .state::() + .update_commit_message(&project_id, &branch_id, commit_oid, message) + .await + .map_err(into_error)?; + emit_vbranches(&handle, &project_id).await; + Ok(()) + } + + async fn emit_vbranches(handle: &AppHandle, project_id: &projects::ProjectId) { + if let Err(error) = handle + .state::() + .post(watcher::Event::CalculateVirtualBranches(*project_id)) + .await + { + tracing::error!(?error); + } + } +} diff --git a/gitbutler-app/src/virtual_branches/base.rs b/gitbutler-app/src/virtual_branches/base.rs deleted file mode 100644 index 26f0bdfa5..000000000 --- a/gitbutler-app/src/virtual_branches/base.rs +++ /dev/null @@ -1,657 +0,0 @@ -use std::time; - -use anyhow::{Context, Result}; -use serde::Serialize; - -use crate::{ - gb_repository, - git::{self, diff}, - keys, - project_repository::{self, LogUntil}, - projects::FetchResult, - reader, sessions, users, - virtual_branches::branch::BranchOwnershipClaims, -}; - -use super::{ - branch, errors, - integration::{update_gitbutler_integration, GITBUTLER_INTEGRATION_REFERENCE}, - target, BranchId, RemoteCommit, -}; - -#[derive(Debug, Serialize, PartialEq, Clone)] -#[serde(rename_all = "camelCase")] -pub struct BaseBranch { - pub branch_name: String, - pub remote_name: String, - pub remote_url: String, - pub base_sha: git::Oid, - pub current_sha: git::Oid, - pub behind: usize, - pub upstream_commits: Vec, - pub recent_commits: Vec, - pub last_fetched_ms: Option, -} - -pub fn get_base_branch_data( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, -) -> Result, errors::GetBaseBranchDataError> { - match gb_repository - .default_target() - .context("failed to get default target")? - { - None => Ok(None), - Some(target) => { - let base = target_to_base_branch(project_repository, &target) - .context("failed to convert default target to base branch")?; - Ok(Some(base)) - } - } -} - -fn go_back_to_integration( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - default_target: &target::Target, -) -> Result { - let statuses = project_repository - .git_repository - .statuses(Some( - git2::StatusOptions::new() - .show(git2::StatusShow::IndexAndWorkdir) - .include_untracked(true), - )) - .context("failed to get status")?; - if !statuses.is_empty() { - return Err(errors::SetBaseBranchError::DirtyWorkingDirectory); - } - - let latest_session = gb_repository - .get_latest_session()? - .context("no session found")?; - let session_reader = sessions::Reader::open(gb_repository, &latest_session)?; - - let all_virtual_branches = super::iterator::BranchIterator::new(&session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")?; - - let applied_virtual_branches = all_virtual_branches - .iter() - .filter(|branch| branch.applied) - .collect::>(); - - let target_commit = project_repository - .git_repository - .find_commit(default_target.sha) - .context("failed to find target commit")?; - - let base_tree = target_commit - .tree() - .context("failed to get base tree from commit")?; - let mut final_tree = target_commit - .tree() - .context("failed to get base tree from commit")?; - for branch in &applied_virtual_branches { - // merge this branches tree with our tree - let branch_head = project_repository - .git_repository - .find_commit(branch.head) - .context("failed to find branch head")?; - let branch_tree = branch_head - .tree() - .context("failed to get branch head tree")?; - let mut result = project_repository - .git_repository - .merge_trees(&base_tree, &final_tree, &branch_tree) - .context("failed to merge")?; - let final_tree_oid = result - .write_tree_to(&project_repository.git_repository) - .context("failed to write tree")?; - final_tree = project_repository - .git_repository - .find_tree(final_tree_oid) - .context("failed to find written tree")?; - } - - project_repository - .git_repository - .checkout_tree(&final_tree) - .force() - .checkout() - .context("failed to checkout tree")?; - - let base = target_to_base_branch(project_repository, default_target)?; - update_gitbutler_integration(gb_repository, project_repository)?; - Ok(base) -} - -pub fn set_base_branch( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - target_branch_ref: &git::RemoteRefname, -) -> Result { - let repo = &project_repository.git_repository; - - // if target exists, and it is the same as the requested branch, we should go back - if let Some(target) = gb_repository.default_target()? { - if target.branch.eq(target_branch_ref) { - return go_back_to_integration(gb_repository, project_repository, &target); - } - } - - // lookup a branch by name - let target_branch = match repo.find_branch(&target_branch_ref.clone().into()) { - Ok(branch) => Ok(branch), - Err(git::Error::NotFound(_)) => Err(errors::SetBaseBranchError::BranchNotFound( - target_branch_ref.clone(), - )), - Err(error) => Err(errors::SetBaseBranchError::Other(error.into())), - }?; - - let remote = repo - .find_remote(target_branch_ref.remote()) - .context(format!( - "failed to find remote for branch {}", - target_branch.name().unwrap() - ))?; - let remote_url = remote - .url() - .context(format!( - "failed to get remote url for {}", - target_branch_ref.remote() - ))? - .unwrap(); - - let target_branch_head = target_branch.peel_to_commit().context(format!( - "failed to peel branch {} to commit", - target_branch.name().unwrap() - ))?; - - let current_head = repo.head().context("Failed to get HEAD reference")?; - let current_head_commit = current_head - .peel_to_commit() - .context("Failed to peel HEAD reference to commit")?; - - // calculate the commit as the merge-base between HEAD in project_repository and this target commit - let target_commit_oid = repo - .merge_base(current_head_commit.id(), target_branch_head.id()) - .context(format!( - "Failed to calculate merge base between {} and {}", - current_head_commit.id(), - target_branch_head.id() - ))?; - - let target = target::Target { - branch: target_branch_ref.clone(), - remote_url: remote_url.to_string(), - sha: target_commit_oid, - }; - - let target_writer = target::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create target writer")?; - target_writer.write_default(&target)?; - - let head_name: git::Refname = current_head - .name() - .context("Failed to get HEAD reference name")?; - if !head_name - .to_string() - .eq(&GITBUTLER_INTEGRATION_REFERENCE.to_string()) - { - // if there are any commits on the head branch or uncommitted changes in the working directory, we need to - // put them into a virtual branch - - let use_context = project_repository - .project() - .use_diff_context - .unwrap_or(false); - let context_lines = if use_context { 3_u32 } else { 0_u32 }; - let wd_diff = diff::workdir(repo, ¤t_head_commit.id(), context_lines)?; - let wd_diff = diff::diff_files_to_hunks(&wd_diff); - if !wd_diff.is_empty() || current_head_commit.id() != target.sha { - let hunks_by_filepath = - super::virtual_hunks_by_filepath(&project_repository.project().path, &wd_diff); - - // assign ownership to the branch - let ownership = hunks_by_filepath.values().flatten().fold( - BranchOwnershipClaims::default(), - |mut ownership, hunk| { - ownership.put( - &format!("{}:{}", hunk.file_path.display(), hunk.id) - .parse() - .unwrap(), - ); - ownership - }, - ); - - let now_ms = time::UNIX_EPOCH - .elapsed() - .context("failed to get elapsed time")? - .as_millis(); - - let (upstream, upstream_head) = if let git::Refname::Local(head_name) = &head_name { - let upstream_name = target_branch_ref.with_branch(head_name.branch()); - if upstream_name.eq(target_branch_ref) { - (None, None) - } else { - match repo.find_reference(&git::Refname::from(&upstream_name)) { - Ok(upstream) => { - let head = upstream - .peel_to_commit() - .map(|commit| commit.id()) - .context(format!( - "failed to peel upstream {} to commit", - upstream.name().unwrap() - ))?; - Ok((Some(upstream_name), Some(head))) - } - Err(git::Error::NotFound(_)) => Ok((None, None)), - Err(error) => Err(error), - } - .context(format!("failed to find upstream for {}", head_name))? - } - } else { - (None, None) - }; - - let mut branch = branch::Branch { - id: BranchId::generate(), - name: head_name.to_string().replace("refs/heads/", ""), - notes: String::new(), - applied: true, - upstream, - upstream_head, - created_timestamp_ms: now_ms, - updated_timestamp_ms: now_ms, - head: current_head_commit.id(), - tree: super::write_tree_onto_commit( - project_repository, - current_head_commit.id(), - &wd_diff, - )?, - ownership, - order: 0, - selected_for_changes: None, - }; - - let branch_writer = - branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create branch writer")?; - branch_writer.write(&mut branch)?; - } - } - - set_exclude_decoration(project_repository)?; - - super::integration::update_gitbutler_integration(gb_repository, project_repository)?; - - let base = target_to_base_branch(project_repository, &target)?; - Ok(base) -} - -fn set_exclude_decoration(project_repository: &project_repository::Repository) -> Result<()> { - let repo = &project_repository.git_repository; - let mut config = repo.config()?; - config - .set_multivar("log.excludeDecoration", "refs/gitbutler", "refs/gitbutler") - .context("failed to set log.excludeDecoration")?; - Ok(()) -} - -fn _print_tree(repo: &git2::Repository, tree: &git2::Tree) -> Result<()> { - println!("tree id: {}", tree.id()); - for entry in tree { - println!( - " entry: {} {}", - entry.name().unwrap_or_default(), - entry.id() - ); - // get entry contents - let object = entry.to_object(repo).context("failed to get object")?; - let blob = object.as_blob().context("failed to get blob")?; - // convert content to string - if let Ok(content) = std::str::from_utf8(blob.content()) { - println!(" blob: {}", content); - } else { - println!(" blob: BINARY"); - } - } - Ok(()) -} - -// try to update the target branch -// this means that we need to: -// determine if what the target branch is now pointing to is mergeable with our current working directory -// merge the target branch into our current working directory -// update the target sha -pub fn update_base_branch( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - user: Option<&users::User>, - signing_key: Option<&keys::PrivateKey>, -) -> Result<(), errors::UpdateBaseBranchError> { - if project_repository.is_resolving() { - return Err(errors::UpdateBaseBranchError::Conflict( - errors::ProjectConflictError { - project_id: project_repository.project().id, - }, - )); - } - - // look up the target and see if there is a new oid - let target = gb_repository - .default_target() - .context("failed to get default target")? - .ok_or_else(|| { - errors::UpdateBaseBranchError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - let repo = &project_repository.git_repository; - let target_branch = repo - .find_branch(&target.branch.clone().into()) - .context(format!("failed to find branch {}", target.branch))?; - - let new_target_commit = target_branch - .peel_to_commit() - .context(format!("failed to peel branch {} to commit", target.branch))?; - - // if the target has not changed, do nothing - if new_target_commit.id() == target.sha { - return Ok(()); - } - - // ok, target has changed, so now we need to merge it into our current work and update our branches - - // get tree from new target - let new_target_tree = new_target_commit - .tree() - .context("failed to get new target commit tree")?; - - let old_target_tree = repo - .find_commit(target.sha) - .and_then(|commit| commit.tree()) - .context(format!( - "failed to get old target commit tree {}", - target.sha - ))?; - - let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create branch writer")?; - - let use_context = project_repository - .project() - .use_diff_context - .unwrap_or(false); - let context_lines = if use_context { 3_u32 } else { 0_u32 }; - - // try to update every branch - let updated_vbranches = super::get_status_by_branch(gb_repository, project_repository)? - .0 - .into_iter() - .map(|(branch, _)| branch) - .map( - |mut branch: branch::Branch| -> Result> { - let branch_tree = repo.find_tree(branch.tree)?; - - let branch_head_commit = repo.find_commit(branch.head).context(format!( - "failed to find commit {} for branch {}", - branch.head, branch.id - ))?; - let branch_head_tree = branch_head_commit.tree().context(format!( - "failed to find tree for commit {} for branch {}", - branch.head, branch.id - ))?; - - let result_integrated_detected = - |mut branch: branch::Branch| -> Result> { - // branch head tree is the same as the new target tree. - // meaning we can safely use the new target commit as the branch head. - - branch.head = new_target_commit.id(); - - // it also means that the branch is fully integrated into the target. - // disconnect it from the upstream - branch.upstream = None; - branch.upstream_head = None; - - let non_commited_files = diff::trees( - &project_repository.git_repository, - &branch_head_tree, - &branch_tree, - context_lines, - )?; - if non_commited_files.is_empty() { - // if there are no commited files, then the branch is fully merged - // and we can delete it. - branch_writer.delete(&branch)?; - project_repository.delete_branch_reference(&branch)?; - Ok(None) - } else { - branch_writer.write(&mut branch)?; - Ok(Some(branch)) - } - }; - - if branch_head_tree.id() == new_target_tree.id() { - return result_integrated_detected(branch); - } - - // try to merge branch head with new target - let mut branch_tree_merge_index = repo - .merge_trees(&old_target_tree, &branch_tree, &new_target_tree) - .context(format!("failed to merge trees for branch {}", branch.id))?; - - if branch_tree_merge_index.has_conflicts() { - // branch tree conflicts with new target, unapply branch for now. we'll handle it later, when user applies it back. - branch.applied = false; - branch_writer.write(&mut branch)?; - return Ok(Some(branch)); - } - - let branch_merge_index_tree_oid = branch_tree_merge_index.write_tree_to(repo)?; - - if branch_merge_index_tree_oid == new_target_tree.id() { - return result_integrated_detected(branch); - } - - if branch.head == target.sha { - // there are no commits on the branch, so we can just update the head to the new target and calculate the new tree - branch.head = new_target_commit.id(); - branch.tree = branch_merge_index_tree_oid; - branch_writer.write(&mut branch)?; - return Ok(Some(branch)); - } - - let mut branch_head_merge_index = repo - .merge_trees(&old_target_tree, &branch_head_tree, &new_target_tree) - .context(format!( - "failed to merge head tree for branch {}", - branch.id - ))?; - - if branch_head_merge_index.has_conflicts() { - // branch commits conflict with new target, make sure the branch is - // unapplied. conflicts witll be dealt with when applying it back. - branch.applied = false; - branch_writer.write(&mut branch)?; - return Ok(Some(branch)); - } - - // branch commits do not conflict with new target, so lets merge them - let branch_head_merge_tree_oid = branch_head_merge_index - .write_tree_to(repo) - .context(format!( - "failed to write head merge index for {}", - branch.id - ))?; - - let ok_with_force_push = project_repository.project().ok_with_force_push; - - let result_merge = |mut branch: branch::Branch| -> Result> { - // branch was pushed to upstream, and user doesn't like force pushing. - // create a merge commit to avoid the need of force pushing then. - let branch_head_merge_tree = repo - .find_tree(branch_head_merge_tree_oid) - .context("failed to find tree")?; - - let new_target_head = project_repository - .commit( - user, - format!( - "Merged {}/{} into {}", - target.branch.remote(), - target.branch.branch(), - branch.name - ) - .as_str(), - &branch_head_merge_tree, - &[&branch_head_commit, &new_target_commit], - signing_key, - ) - .context("failed to commit merge")?; - - branch.head = new_target_head; - branch.tree = branch_merge_index_tree_oid; - branch_writer.write(&mut branch)?; - Ok(Some(branch)) - }; - - if branch.upstream.is_some() && !ok_with_force_push { - return result_merge(branch); - } - - // branch was not pushed to upstream yet. attempt a rebase, - let (_, committer) = project_repository.git_signatures(user)?; - let mut rebase_options = git2::RebaseOptions::new(); - rebase_options.quiet(true); - rebase_options.inmemory(true); - let mut rebase = repo - .rebase( - Some(branch.head), - Some(new_target_commit.id()), - None, - Some(&mut rebase_options), - ) - .context("failed to rebase")?; - - let mut rebase_success = true; - // check to see if these commits have already been pushed - let mut last_rebase_head = branch.head; - while rebase.next().is_some() { - let index = rebase - .inmemory_index() - .context("failed to get inmemory index")?; - if index.has_conflicts() { - rebase_success = false; - break; - } - - if let Ok(commit_id) = rebase.commit(None, &committer.clone().into(), None) { - last_rebase_head = commit_id.into(); - } else { - rebase_success = false; - break; - } - } - - if rebase_success { - // rebase worked out, rewrite the branch head - rebase.finish(None).context("failed to finish rebase")?; - branch.head = last_rebase_head; - branch.tree = branch_merge_index_tree_oid; - branch_writer.write(&mut branch)?; - return Ok(Some(branch)); - } - - // rebase failed, do a merge commit - rebase.abort().context("failed to abort rebase")?; - - result_merge(branch) - }, - ) - .collect::>>()? - .into_iter() - .flatten() - .collect::>(); - - // ok, now all the problematic branches have been unapplied - // now we calculate and checkout new tree for the working directory - - let final_tree = updated_vbranches - .iter() - .filter(|branch| branch.applied) - .fold(new_target_commit.tree(), |final_tree, branch| { - let final_tree = final_tree?; - let branch_tree = repo.find_tree(branch.tree)?; - let mut merge_result = repo.merge_trees(&new_target_tree, &final_tree, &branch_tree)?; - let final_tree_oid = merge_result.write_tree_to(repo)?; - repo.find_tree(final_tree_oid) - }) - .context("failed to calculate final tree")?; - - repo.checkout_tree(&final_tree).force().checkout().context( - "failed to checkout index, this should not have happened, we should have already detected this", - )?; - - // write new target oid - let target_writer = target::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create target writer")?; - target_writer.write_default(&target::Target { - sha: new_target_commit.id(), - ..target - })?; - - super::integration::update_gitbutler_integration(gb_repository, project_repository)?; - - Ok(()) -} - -pub fn target_to_base_branch( - project_repository: &project_repository::Repository, - target: &target::Target, -) -> Result { - let repo = &project_repository.git_repository; - let branch = repo.find_branch(&target.branch.clone().into())?; - let commit = branch.peel_to_commit()?; - let oid = commit.id(); - - // gather a list of commits between oid and target.sha - let upstream_commits = project_repository - .log(oid, project_repository::LogUntil::Commit(target.sha)) - .context("failed to get upstream commits")? - .iter() - .map(super::commit_to_remote_commit) - .collect::>(); - - // get some recent commits - let recent_commits = project_repository - .log(target.sha, LogUntil::Take(20)) - .context("failed to get recent commits")? - .iter() - .map(super::commit_to_remote_commit) - .collect::>(); - - let base = super::BaseBranch { - branch_name: format!("{}/{}", target.branch.remote(), target.branch.branch()), - remote_name: target.branch.remote().to_string(), - remote_url: target.remote_url.clone(), - base_sha: target.sha, - current_sha: oid, - behind: upstream_commits.len(), - upstream_commits, - recent_commits, - last_fetched_ms: project_repository - .project() - .project_data_last_fetch - .as_ref() - .map(FetchResult::timestamp) - .copied() - .map(|t| t.duration_since(time::UNIX_EPOCH).unwrap().as_millis()), - }; - Ok(base) -} diff --git a/gitbutler-app/src/virtual_branches/branch.rs b/gitbutler-app/src/virtual_branches/branch.rs deleted file mode 100644 index ab6bf4012..000000000 --- a/gitbutler-app/src/virtual_branches/branch.rs +++ /dev/null @@ -1,237 +0,0 @@ -mod file_ownership; -mod hunk; -mod ownership; -mod reader; -mod writer; - -pub use file_ownership::OwnershipClaim; -pub use hunk::Hunk; -pub use ownership::reconcile_claims; -pub use ownership::BranchOwnershipClaims; -pub use reader::BranchReader as Reader; -pub use writer::BranchWriter as Writer; - -use serde::{Deserialize, Serialize}; - -use anyhow::Result; - -use crate::{git, id::Id}; - -pub type BranchId = Id; - -// this is the struct for the virtual branch data that is stored in our data -// store. it is more or less equivalent to a git branch reference, but it is not -// stored or accessible from the git repository itself. it is stored in our -// session storage under the branches/ directory. -#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Default)] -pub struct Branch { - pub id: BranchId, - pub name: String, - pub notes: String, - pub applied: bool, - pub upstream: Option, - // upstream_head is the last commit on we've pushed to the upstream branch - pub upstream_head: Option, - #[serde( - serialize_with = "serialize_u128", - deserialize_with = "deserialize_u128" - )] - pub created_timestamp_ms: u128, - #[serde( - serialize_with = "serialize_u128", - deserialize_with = "deserialize_u128" - )] - pub updated_timestamp_ms: u128, - /// tree is the last git tree written to a session, or merge base tree if this is new. use this for delta calculation from the session data - pub tree: git::Oid, - /// head is id of the last "virtual" commit in this branch - pub head: git::Oid, - pub ownership: BranchOwnershipClaims, - // order is the number by which UI should sort branches - pub order: usize, - // is Some(timestamp), the branch is considered a default destination for new changes. - // if more than one branch is selected, the branch with the highest timestamp wins. - pub selected_for_changes: Option, -} - -fn serialize_u128(x: &u128, s: S) -> Result -where - S: serde::Serializer, -{ - s.serialize_str(&x.to_string()) -} - -fn deserialize_u128<'de, D>(d: D) -> Result -where - D: serde::Deserializer<'de>, -{ - let s = String::deserialize(d)?; - let x: u128 = s.parse().map_err(serde::de::Error::custom)?; - Ok(x) -} - -impl Branch { - pub fn refname(&self) -> git::VirtualRefname { - self.into() - } -} - -#[derive(Debug, Serialize, Deserialize, Default)] -pub struct BranchUpdateRequest { - pub id: BranchId, - pub name: Option, - pub notes: Option, - pub ownership: Option, - pub order: Option, - pub upstream: Option, // just the branch name, so not refs/remotes/origin/branchA, just branchA - pub selected_for_changes: Option, -} - -#[derive(Debug, Serialize, Deserialize, Default)] -pub struct BranchCreateRequest { - pub name: Option, - pub ownership: Option, - pub order: Option, - pub selected_for_changes: Option, -} - -impl Branch { - pub fn from_reader(reader: &crate::reader::Reader<'_>) -> Result { - let results = reader.batch(&[ - "id", - "meta/name", - "meta/notes", - "meta/applied", - "meta/order", - "meta/upstream", - "meta/upstream_head", - "meta/tree", - "meta/head", - "meta/created_timestamp_ms", - "meta/updated_timestamp_ms", - "meta/ownership", - "meta/selected_for_changes", - ])?; - - let id: String = results[0].clone()?.try_into()?; - let id: BranchId = id.parse().map_err(|e| { - crate::reader::Error::Io( - std::io::Error::new(std::io::ErrorKind::Other, format!("id: {}", e)).into(), - ) - })?; - let name: String = results[1].clone()?.try_into()?; - - let notes: String = match results[2].clone() { - Ok(notes) => Ok(notes.try_into()?), - Err(crate::reader::Error::NotFound) => Ok(String::new()), - Err(e) => Err(e), - }?; - - let applied = match results[3].clone() { - Ok(applied) => applied.try_into(), - _ => Ok(false), - } - .unwrap_or(false); - - let order: usize = match results[4].clone() { - Ok(order) => Ok(order.try_into()?), - Err(crate::reader::Error::NotFound) => Ok(0), - Err(e) => Err(e), - }?; - - let upstream = match results[5].clone() { - Ok(crate::reader::Content::UTF8(upstream)) => { - if upstream.is_empty() { - Ok(None) - } else { - upstream - .parse::() - .map(Some) - .map_err(|e| { - crate::reader::Error::Io( - std::io::Error::new( - std::io::ErrorKind::Other, - format!("meta/upstream: {}", e), - ) - .into(), - ) - }) - } - } - Ok(_) | Err(crate::reader::Error::NotFound) => Ok(None), - Err(e) => Err(e), - }?; - - let upstream_head = match results[6].clone() { - Ok(crate::reader::Content::UTF8(upstream_head)) => { - upstream_head.parse().map(Some).map_err(|e| { - crate::reader::Error::Io( - std::io::Error::new( - std::io::ErrorKind::Other, - format!("meta/upstream_head: {}", e), - ) - .into(), - ) - }) - } - Ok(_) | Err(crate::reader::Error::NotFound) => Ok(None), - Err(e) => Err(e), - }?; - - let tree: String = results[7].clone()?.try_into()?; - let head: String = results[8].clone()?.try_into()?; - let created_timestamp_ms = results[9].clone()?.try_into()?; - let updated_timestamp_ms = results[10].clone()?.try_into()?; - - let ownership_string: String = results[11].clone()?.try_into()?; - let ownership = ownership_string.parse().map_err(|e| { - crate::reader::Error::Io( - std::io::Error::new(std::io::ErrorKind::Other, format!("meta/ownership: {}", e)) - .into(), - ) - })?; - - let selected_for_changes = match results[12].clone() { - Ok(raw_ts) => { - let ts = raw_ts.try_into().map_err(|e| { - crate::reader::Error::Io( - std::io::Error::new( - std::io::ErrorKind::Other, - format!("meta/selected_for_changes: {}", e), - ) - .into(), - ) - })?; - Ok(Some(ts)) - } - Err(crate::reader::Error::NotFound) => Ok(None), - Err(e) => Err(e), - }?; - - Ok(Self { - id, - name, - notes, - applied, - upstream, - upstream_head, - tree: tree.parse().map_err(|e| { - crate::reader::Error::Io( - std::io::Error::new(std::io::ErrorKind::Other, format!("meta/tree: {}", e)) - .into(), - ) - })?, - head: head.parse().map_err(|e| { - crate::reader::Error::Io( - std::io::Error::new(std::io::ErrorKind::Other, format!("meta/head: {}", e)) - .into(), - ) - })?, - created_timestamp_ms, - updated_timestamp_ms, - ownership, - order, - selected_for_changes, - }) - } -} diff --git a/gitbutler-app/src/virtual_branches/branch/file_ownership.rs b/gitbutler-app/src/virtual_branches/branch/file_ownership.rs deleted file mode 100644 index e040d2a47..000000000 --- a/gitbutler-app/src/virtual_branches/branch/file_ownership.rs +++ /dev/null @@ -1,178 +0,0 @@ -use std::{fmt, path, str::FromStr, vec}; - -use anyhow::{Context, Result}; - -use super::hunk::Hunk; - -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct OwnershipClaim { - pub file_path: path::PathBuf, - pub hunks: Vec, -} - -impl FromStr for OwnershipClaim { - type Err = anyhow::Error; - - fn from_str(value: &str) -> std::result::Result { - let mut file_path_parts = vec![]; - let mut ranges = vec![]; - for part in value.split(':').rev() { - match part - .split(',') - .map(str::parse) - .collect::>>() - { - Ok(rr) => ranges.extend(rr), - Err(_) => { - file_path_parts.insert(0, part); - } - } - } - - if ranges.is_empty() { - Err(anyhow::anyhow!("ownership ranges cannot be empty")) - } else { - Ok(Self { - file_path: file_path_parts - .join(":") - .parse() - .context(format!("failed to parse file path from {}", value))?, - hunks: ranges.clone(), - }) - } - } -} - -impl OwnershipClaim { - pub fn is_full(&self) -> bool { - self.hunks.is_empty() - } - - pub fn contains(&self, another: &OwnershipClaim) -> bool { - if !self.file_path.eq(&another.file_path) { - return false; - } - - if self.hunks.is_empty() { - // full ownership contains any partial ownership - return true; - } - - if another.hunks.is_empty() { - // partial ownership contains no full ownership - return false; - } - - another.hunks.iter().all(|hunk| self.hunks.contains(hunk)) - } - - // return a copy of self, with another ranges added - pub fn plus(&self, another: &OwnershipClaim) -> OwnershipClaim { - if !self.file_path.eq(&another.file_path) { - return self.clone(); - } - - if self.hunks.is_empty() { - // full ownership + partial ownership = full ownership - return self.clone(); - } - - if another.hunks.is_empty() { - // partial ownership + full ownership = full ownership - return another.clone(); - } - - let mut hunks = self - .hunks - .iter() - .filter(|hunk| !another.hunks.contains(hunk)) - .cloned() - .collect::>(); - - another.hunks.iter().for_each(|hunk| { - hunks.insert(0, hunk.clone()); - }); - - OwnershipClaim { - file_path: self.file_path.clone(), - hunks, - } - } - - // returns (taken, remaining) - // if all of the ranges are removed, return None - pub fn minus( - &self, - another: &OwnershipClaim, - ) -> (Option, Option) { - if !self.file_path.eq(&another.file_path) { - // no changes - return (None, Some(self.clone())); - } - - if another.hunks.is_empty() { - // any ownership - full ownership = empty ownership - return (Some(self.clone()), None); - } - - if self.hunks.is_empty() { - // full ownership - partial ownership = full ownership, since we don't know all the - // hunks. - return (None, Some(self.clone())); - } - - let mut left = self.hunks.clone(); - let mut taken = vec![]; - for range in &another.hunks { - left = left - .iter() - .flat_map(|r: &Hunk| -> Vec { - if r.eq(range) { - taken.push(r.clone()); - vec![] - } else { - vec![r.clone()] - } - }) - .collect(); - } - - ( - if taken.is_empty() { - None - } else { - Some(OwnershipClaim { - file_path: self.file_path.clone(), - hunks: taken, - }) - }, - if left.is_empty() { - None - } else { - Some(OwnershipClaim { - file_path: self.file_path.clone(), - hunks: left, - }) - }, - ) - } -} - -impl fmt::Display for OwnershipClaim { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { - if self.hunks.is_empty() { - write!(f, "{}", self.file_path.display()) - } else { - write!( - f, - "{}:{}", - self.file_path.display(), - self.hunks - .iter() - .map(ToString::to_string) - .collect::>() - .join(",") - ) - } - } -} diff --git a/gitbutler-app/src/virtual_branches/branch/hunk.rs b/gitbutler-app/src/virtual_branches/branch/hunk.rs deleted file mode 100644 index a2271c5d5..000000000 --- a/gitbutler-app/src/virtual_branches/branch/hunk.rs +++ /dev/null @@ -1,169 +0,0 @@ -use std::{fmt::Display, ops::RangeInclusive, str::FromStr}; - -use anyhow::{anyhow, Context, Result}; - -use crate::git::diff; - -#[derive(Debug, Eq, Clone)] -pub struct Hunk { - pub hash: Option, - pub timestamp_ms: Option, - pub start: u32, - pub end: u32, -} - -impl From<&diff::GitHunk> for Hunk { - fn from(hunk: &diff::GitHunk) -> Self { - Hunk { - start: hunk.new_start, - end: hunk.new_start + hunk.new_lines, - hash: Some(Hunk::hash(&hunk.diff)), - timestamp_ms: None, - } - } -} - -impl PartialEq for Hunk { - fn eq(&self, other: &Self) -> bool { - if self.hash.is_some() && other.hash.is_some() { - self.hash == other.hash && self.start == other.start && self.end == other.end - } else { - self.start == other.start && self.end == other.end - } - } -} - -impl From> for Hunk { - fn from(range: RangeInclusive) -> Self { - Hunk { - start: *range.start(), - end: *range.end(), - hash: None, - timestamp_ms: None, - } - } -} - -impl FromStr for Hunk { - type Err = anyhow::Error; - - fn from_str(s: &str) -> std::result::Result { - let mut range = s.split('-'); - let start = if let Some(raw_start) = range.next() { - raw_start - .parse::() - .context(format!("failed to parse start of range: {}", s)) - } else { - Err(anyhow!("invalid range: {}", s)) - }?; - - let end = if let Some(raw_end) = range.next() { - raw_end - .parse::() - .context(format!("failed to parse end of range: {}", s)) - } else { - Err(anyhow!("invalid range: {}", s)) - }?; - - let hash = if let Some(raw_hash) = range.next() { - if raw_hash.is_empty() { - None - } else { - Some(raw_hash.to_string()) - } - } else { - None - }; - - let timestamp_ms = if let Some(raw_timestamp_ms) = range.next() { - Some( - raw_timestamp_ms - .parse::() - .context(format!("failed to parse timestamp_ms of range: {}", s))?, - ) - } else { - None - }; - - Hunk::new(start, end, hash, timestamp_ms) - } -} - -impl Display for Hunk { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}-{}", self.start, self.end)?; - match (self.hash.as_ref(), self.timestamp_ms.as_ref()) { - (Some(hash), Some(timestamp_ms)) => write!(f, "-{}-{}", hash, timestamp_ms), - (Some(hash), None) => write!(f, "-{}", hash), - (None, Some(timestamp_ms)) => write!(f, "--{}", timestamp_ms), - (None, None) => Ok(()), - } - } -} - -impl Hunk { - pub fn new( - start: u32, - end: u32, - hash: Option, - timestamp_ms: Option, - ) -> Result { - if start > end { - Err(anyhow!("invalid range: {}-{}", start, end)) - } else { - Ok(Hunk { - hash, - timestamp_ms, - start, - end, - }) - } - } - - pub fn with_hash(&self, hash: &str) -> Self { - Hunk { - start: self.start, - end: self.end, - hash: Some(hash.to_string()), - timestamp_ms: self.timestamp_ms, - } - } - - pub fn with_timestamp(&self, timestamp_ms: u128) -> Self { - Hunk { - start: self.start, - end: self.end, - hash: self.hash.clone(), - timestamp_ms: Some(timestamp_ms), - } - } - - pub fn timestam_ms(&self) -> Option { - self.timestamp_ms - } - - pub fn contains(&self, line: u32) -> bool { - self.start <= line && self.end >= line - } - - pub fn intersects(&self, another: &diff::GitHunk) -> bool { - self.contains(another.new_start) - || self.contains(another.new_start + another.new_lines) - || another.contains(self.start) - || another.contains(self.end) - } - - pub fn shallow_eq(&self, other: &diff::GitHunk) -> bool { - self.start == other.new_start && self.end == other.new_start + other.new_lines - } - - pub fn hash(diff: &str) -> String { - let addition = diff - .lines() - .skip(1) // skip the first line which is the diff header - .filter(|line| line.starts_with('+') || line.starts_with('-')) // exclude context lines - .collect::>() - .join("\n"); - format!("{:x}", md5::compute(addition)) - } -} diff --git a/gitbutler-app/src/virtual_branches/branch/ownership.rs b/gitbutler-app/src/virtual_branches/branch/ownership.rs deleted file mode 100644 index dda2e78ff..000000000 --- a/gitbutler-app/src/virtual_branches/branch/ownership.rs +++ /dev/null @@ -1,183 +0,0 @@ -use std::{collections::HashSet, fmt, str::FromStr}; - -use itertools::Itertools; -use serde::{Deserialize, Serialize, Serializer}; - -use super::{Branch, OwnershipClaim}; -use anyhow::Result; - -#[derive(Debug, Clone, Default, PartialEq, Eq)] -pub struct BranchOwnershipClaims { - pub claims: Vec, -} - -impl Serialize for BranchOwnershipClaims { - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_str(self.to_string().as_str()) - } -} - -impl<'de> Deserialize<'de> for BranchOwnershipClaims { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - s.parse().map_err(serde::de::Error::custom) - } -} - -impl fmt::Display for BranchOwnershipClaims { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - for file in &self.claims { - writeln!(f, "{}", file)?; - } - Ok(()) - } -} - -impl FromStr for BranchOwnershipClaims { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result { - let mut ownership = BranchOwnershipClaims::default(); - for line in s.lines() { - ownership.claims.push(line.parse()?); - } - Ok(ownership) - } -} - -impl BranchOwnershipClaims { - pub fn is_empty(&self) -> bool { - self.claims.is_empty() - } - - pub fn contains(&self, another: &BranchOwnershipClaims) -> bool { - if another.is_empty() { - return true; - } - - if self.is_empty() { - return false; - } - - for file_ownership in &another.claims { - let mut found = false; - for self_file_ownership in &self.claims { - if self_file_ownership.file_path == file_ownership.file_path - && self_file_ownership.contains(file_ownership) - { - found = true; - break; - } - } - if !found { - return false; - } - } - - true - } - - pub fn put(&mut self, ownership: &OwnershipClaim) { - let target = self - .claims - .iter() - .filter(|o| !o.is_full()) // only consider explicit ownership - .find(|o| o.file_path == ownership.file_path) - .cloned(); - - self.claims - .retain(|o| o.is_full() || o.file_path != ownership.file_path); - - if let Some(target) = target { - self.claims.insert(0, target.plus(ownership)); - } else { - self.claims.insert(0, ownership.clone()); - } - } - - // modifies the ownership in-place and returns the file ownership that was taken, if any. - pub fn take(&mut self, ownership: &OwnershipClaim) -> Vec { - let mut taken = Vec::new(); - let mut remaining = Vec::new(); - for file_ownership in &self.claims { - if file_ownership.file_path == ownership.file_path { - let (taken_ownership, remaining_ownership) = file_ownership.minus(ownership); - if let Some(taken_ownership) = taken_ownership { - taken.push(taken_ownership); - } - if let Some(remaining_ownership) = remaining_ownership { - remaining.push(remaining_ownership); - } - } else { - remaining.push(file_ownership.clone()); - } - } - - self.claims = remaining; - - taken - } -} - -#[derive(Debug, Clone)] -pub struct ClaimOutcome { - pub updated_branch: Branch, - pub removed_claims: Vec, -} -pub fn reconcile_claims( - all_branches: Vec, - claiming_branch: &Branch, - new_claims: &[OwnershipClaim], -) -> Result> { - let mut other_branches = all_branches - .into_iter() - .filter(|branch| branch.applied) - .filter(|branch| branch.id != claiming_branch.id) - .collect::>(); - - let mut claim_outcomes: Vec = Vec::new(); - - for branch in &mut other_branches { - let taken = new_claims - .iter() - .flat_map(|c| branch.ownership.take(c)) - .collect_vec(); - claim_outcomes.push(ClaimOutcome { - updated_branch: branch.clone(), - removed_claims: taken, - }); - } - - // Add the claiming branch to the list of outcomes - claim_outcomes.push(ClaimOutcome { - updated_branch: Branch { - ownership: BranchOwnershipClaims { - claims: new_claims.to_owned(), - }, - ..claiming_branch.clone() - }, - removed_claims: Vec::new(), - }); - - // Check the outcomes consistency and error out if they would result in a hunk being claimed by multiple branches - let mut seen = HashSet::new(); - for outcome in claim_outcomes.clone() { - for claim in outcome.updated_branch.ownership.claims { - for hunk in claim.hunks { - if !seen.insert(format!( - "{}-{}-{}", - claim.file_path.to_str().unwrap_or_default(), - hunk.start, - hunk.end - )) { - return Err(anyhow::anyhow!("inconsistent ownership claims")); - } - } - } - } - - Ok(claim_outcomes) -} diff --git a/gitbutler-app/src/virtual_branches/branch/reader.rs b/gitbutler-app/src/virtual_branches/branch/reader.rs deleted file mode 100644 index cebc0c009..000000000 --- a/gitbutler-app/src/virtual_branches/branch/reader.rs +++ /dev/null @@ -1,19 +0,0 @@ -use crate::{reader, sessions}; - -use super::{Branch, BranchId}; - -pub struct BranchReader<'r> { - reader: &'r reader::Reader<'r>, -} - -impl<'r> BranchReader<'r> { - pub fn new(reader: &'r sessions::Reader<'r>) -> Self { - Self { - reader: reader.reader(), - } - } - - pub fn read(&self, id: &BranchId) -> Result { - Branch::from_reader(&self.reader.sub(format!("branches/{}", id))) - } -} diff --git a/gitbutler-app/src/virtual_branches/branch/writer.rs b/gitbutler-app/src/virtual_branches/branch/writer.rs deleted file mode 100644 index 821bdc8fe..000000000 --- a/gitbutler-app/src/virtual_branches/branch/writer.rs +++ /dev/null @@ -1,160 +0,0 @@ -use std::path; - -use anyhow::Result; - -use crate::{gb_repository, reader, virtual_branches::state::VirtualBranchesHandle, writer}; - -use super::Branch; - -pub struct BranchWriter<'writer> { - repository: &'writer gb_repository::Repository, - writer: writer::DirWriter, - reader: reader::Reader<'writer>, - state_handle: VirtualBranchesHandle, -} - -impl<'writer> BranchWriter<'writer> { - pub fn new>( - repository: &'writer gb_repository::Repository, - path: P, - ) -> Result { - let reader = reader::Reader::open(repository.root())?; - let writer = writer::DirWriter::open(repository.root())?; - let state_handle = VirtualBranchesHandle::new(path.as_ref()); - Ok(Self { - repository, - writer, - reader, - state_handle, - }) - } - - pub fn delete(&self, branch: &Branch) -> Result<()> { - match self - .reader - .sub(format!("branches/{}", branch.id)) - .read("id") - { - Ok(_) => { - self.repository.mark_active_session()?; - let _lock = self.repository.lock(); - self.writer.remove(format!("branches/{}", branch.id))?; - // Write in the state file as well - let _ = self.state_handle.remove_branch(branch.id); - Ok(()) - } - Err(reader::Error::NotFound) => Ok(()), - Err(err) => Err(err.into()), - } - } - - pub fn write(&self, branch: &mut Branch) -> Result<()> { - let reader = self.reader.sub(format!("branches/{}", branch.id)); - match Branch::from_reader(&reader) { - Ok(existing) if existing.eq(branch) => return Ok(()), - Ok(_) | Err(reader::Error::NotFound) => {} - Err(err) => return Err(err.into()), - } - - self.repository.mark_active_session()?; - - branch.updated_timestamp_ms = std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH)? - .as_millis(); - - let mut batch = vec![]; - - batch.push(writer::BatchTask::Write( - format!("branches/{}/id", branch.id), - branch.id.to_string(), - )); - - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/name", branch.id), - branch.name.clone(), - )); - - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/notes", branch.id), - branch.notes.clone(), - )); - - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/order", branch.id), - branch.order.to_string(), - )); - - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/applied", branch.id), - branch.applied.to_string(), - )); - - if let Some(upstream) = &branch.upstream { - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/upstream", branch.id), - upstream.to_string(), - )); - } else { - batch.push(writer::BatchTask::Remove(format!( - "branches/{}/meta/upstream", - branch.id - ))); - } - - if let Some(upstream_head) = &branch.upstream_head { - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/upstream_head", branch.id), - upstream_head.to_string(), - )); - } else { - batch.push(writer::BatchTask::Remove(format!( - "branches/{}/meta/upstream_head", - branch.id - ))); - } - - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/tree", branch.id), - branch.tree.to_string(), - )); - - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/head", branch.id), - branch.head.to_string(), - )); - - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/created_timestamp_ms", branch.id), - branch.created_timestamp_ms.to_string(), - )); - - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/updated_timestamp_ms", branch.id), - branch.updated_timestamp_ms.to_string(), - )); - - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/ownership", branch.id), - branch.ownership.to_string(), - )); - - if let Some(selected_for_changes) = branch.selected_for_changes { - batch.push(writer::BatchTask::Write( - format!("branches/{}/meta/selected_for_changes", branch.id), - selected_for_changes.to_string(), - )); - } else { - batch.push(writer::BatchTask::Remove(format!( - "branches/{}/meta/selected_for_changes", - branch.id - ))); - } - - self.writer.batch(&batch)?; - - // Write in the state file as well - self.state_handle.set_branch(branch.clone())?; - - Ok(()) - } -} diff --git a/gitbutler-app/src/virtual_branches/commands.rs b/gitbutler-app/src/virtual_branches/commands.rs deleted file mode 100644 index 203ad5f99..000000000 --- a/gitbutler-app/src/virtual_branches/commands.rs +++ /dev/null @@ -1,518 +0,0 @@ -use crate::{projects::ProjectId, watcher}; -use anyhow::Context; -use tauri::{AppHandle, Manager}; -use tracing::instrument; - -use crate::{ - assets, - error::{Code, Error}, - git, projects, -}; - -use super::{ - branch::{BranchId, BranchOwnershipClaims}, - controller::{Controller, ControllerError}, - BaseBranch, RemoteBranchFile, -}; - -impl> From> for Error { - fn from(value: ControllerError) -> Self { - match value { - ControllerError::User(error) => error, - ControllerError::Action(error) => error.into(), - ControllerError::VerifyError(error) => error.into(), - ControllerError::Other(error) => { - tracing::error!(?error, "failed to verify branch"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn commit_virtual_branch( - handle: AppHandle, - project_id: ProjectId, - branch: BranchId, - message: &str, - ownership: Option, - run_hooks: bool, -) -> Result { - let oid = handle - .state::() - .create_commit(&project_id, &branch, message, ownership.as_ref(), run_hooks) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(oid) -} - -/// This is a test command. It retrieves the virtual branches state from the gitbutler repository (legacy state) and persists it into a flat TOML file -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn save_vbranches_state( - handle: AppHandle, - project_id: ProjectId, - branch_ids: Vec, -) -> Result<(), Error> { - handle - .state::() - .save_vbranches_state(&project_id, branch_ids) - .await?; - return Ok(()); -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn list_virtual_branches( - handle: AppHandle, - project_id: ProjectId, -) -> Result { - let (branches, uses_diff_context, skipped_files) = handle - .state::() - .list_virtual_branches(&project_id) - .await?; - - // Migration: If use_diff_context is not already set and if there are no vbranches, set use_diff_context to true - let has_active_branches = branches.iter().any(|branch| branch.active); - if !uses_diff_context && !has_active_branches { - let _ = handle - .state::() - .update(&projects::UpdateRequest { - id: project_id, - use_diff_context: Some(true), - ..Default::default() - }) - .await; - } - - let proxy = handle.state::(); - let branches = proxy.proxy_virtual_branches(branches).await; - Ok(super::VirtualBranches { - branches, - skipped_files, - }) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn create_virtual_branch( - handle: AppHandle, - project_id: ProjectId, - branch: super::branch::BranchCreateRequest, -) -> Result { - let branch_id = handle - .state::() - .create_virtual_branch(&project_id, &branch) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(branch_id) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn create_virtual_branch_from_branch( - handle: AppHandle, - project_id: ProjectId, - branch: git::Refname, -) -> Result { - let branch_id = handle - .state::() - .create_virtual_branch_from_branch(&project_id, &branch) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(branch_id) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn merge_virtual_branch_upstream( - handle: AppHandle, - project_id: ProjectId, - branch: BranchId, -) -> Result<(), Error> { - handle - .state::() - .merge_virtual_branch_upstream(&project_id, &branch) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn get_base_branch_data( - handle: AppHandle, - project_id: ProjectId, -) -> Result, Error> { - if let Some(base_branch) = handle - .state::() - .get_base_branch_data(&project_id) - .await? - { - let proxy = handle.state::(); - let base_branch = proxy.proxy_base_branch(base_branch).await; - Ok(Some(base_branch)) - } else { - Ok(None) - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn set_base_branch( - handle: AppHandle, - project_id: ProjectId, - branch: &str, -) -> Result { - let branch_name = format!("refs/remotes/{}", branch) - .parse() - .context("Invalid branch name")?; - let base_branch = handle - .state::() - .set_base_branch(&project_id, &branch_name) - .await?; - let base_branch = handle - .state::() - .proxy_base_branch(base_branch) - .await; - emit_vbranches(&handle, &project_id).await; - Ok(base_branch) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn update_base_branch(handle: AppHandle, project_id: ProjectId) -> Result<(), Error> { - handle - .state::() - .update_base_branch(&project_id) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn update_virtual_branch( - handle: AppHandle, - project_id: ProjectId, - branch: super::branch::BranchUpdateRequest, -) -> Result<(), Error> { - handle - .state::() - .update_virtual_branch(&project_id, branch) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn delete_virtual_branch( - handle: AppHandle, - project_id: ProjectId, - branch_id: BranchId, -) -> Result<(), Error> { - handle - .state::() - .delete_virtual_branch(&project_id, &branch_id) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn apply_branch( - handle: AppHandle, - project_id: ProjectId, - branch: BranchId, -) -> Result<(), Error> { - handle - .state::() - .apply_virtual_branch(&project_id, &branch) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn unapply_branch( - handle: AppHandle, - project_id: ProjectId, - branch: BranchId, -) -> Result<(), Error> { - handle - .state::() - .unapply_virtual_branch(&project_id, &branch) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn unapply_ownership( - handle: AppHandle, - project_id: ProjectId, - ownership: BranchOwnershipClaims, -) -> Result<(), Error> { - handle - .state::() - .unapply_ownership(&project_id, &ownership) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn reset_files( - handle: AppHandle, - project_id: ProjectId, - files: &str, -) -> Result<(), Error> { - // convert files to Vec - let files = files - .split('\n') - .map(std::string::ToString::to_string) - .collect::>(); - handle - .state::() - .reset_files(&project_id, &files) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn push_virtual_branch( - handle: AppHandle, - project_id: ProjectId, - branch_id: BranchId, - with_force: bool, -) -> Result<(), Error> { - let askpass_broker = handle.state::(); - handle - .state::() - .push_virtual_branch( - &project_id, - &branch_id, - with_force, - Some((askpass_broker.inner().clone(), Some(branch_id))), - ) - .await - .map_err(|e| Error::UserError { - code: Code::Unknown, - message: e.to_string(), - })?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn can_apply_virtual_branch( - handle: AppHandle, - project_id: ProjectId, - branch_id: BranchId, -) -> Result { - handle - .state::() - .can_apply_virtual_branch(&project_id, &branch_id) - .await - .map_err(Into::into) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn can_apply_remote_branch( - handle: AppHandle, - project_id: ProjectId, - branch: git::RemoteRefname, -) -> Result { - handle - .state::() - .can_apply_remote_branch(&project_id, &branch) - .await - .map_err(Into::into) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn list_remote_commit_files( - handle: AppHandle, - project_id: ProjectId, - commit_oid: git::Oid, -) -> Result, Error> { - handle - .state::() - .list_remote_commit_files(&project_id, commit_oid) - .await - .map_err(Into::into) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn reset_virtual_branch( - handle: AppHandle, - project_id: ProjectId, - branch_id: BranchId, - target_commit_oid: git::Oid, -) -> Result<(), Error> { - handle - .state::() - .reset_virtual_branch(&project_id, &branch_id, target_commit_oid) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn cherry_pick_onto_virtual_branch( - handle: AppHandle, - project_id: ProjectId, - branch_id: BranchId, - target_commit_oid: git::Oid, -) -> Result, Error> { - let oid = handle - .state::() - .cherry_pick(&project_id, &branch_id, target_commit_oid) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(oid) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn amend_virtual_branch( - handle: AppHandle, - project_id: ProjectId, - branch_id: BranchId, - ownership: BranchOwnershipClaims, -) -> Result { - let oid = handle - .state::() - .amend(&project_id, &branch_id, &ownership) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(oid) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn list_remote_branches( - handle: tauri::AppHandle, - project_id: ProjectId, -) -> Result, Error> { - let branches = handle - .state::() - .list_remote_branches(&project_id) - .await?; - Ok(branches) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn get_remote_branch_data( - handle: tauri::AppHandle, - project_id: ProjectId, - refname: git::Refname, -) -> Result { - let branch_data = handle - .state::() - .get_remote_branch_data(&project_id, &refname) - .await?; - let branch_data = handle - .state::() - .proxy_remote_branch_data(branch_data) - .await; - Ok(branch_data) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn squash_branch_commit( - handle: tauri::AppHandle, - project_id: ProjectId, - branch_id: BranchId, - target_commit_oid: git::Oid, -) -> Result<(), Error> { - handle - .state::() - .squash(&project_id, &branch_id, target_commit_oid) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn fetch_from_target( - handle: tauri::AppHandle, - project_id: ProjectId, - action: Option, -) -> Result { - let askpass_broker = handle - .state::() - .inner() - .clone(); - let base_branch = handle - .state::() - .fetch_from_target( - &project_id, - Some(( - askpass_broker, - action.unwrap_or_else(|| "unknown".to_string()), - )), - ) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(base_branch) -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn move_commit( - handle: tauri::AppHandle, - project_id: ProjectId, - commit_oid: git::Oid, - target_branch_id: BranchId, -) -> Result<(), Error> { - handle - .state::() - .move_commit(&project_id, &target_branch_id, commit_oid) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -// XXX(qix-): Is this command used? -#[allow(dead_code)] -pub async fn update_commit_message( - handle: tauri::AppHandle, - project_id: ProjectId, - branch_id: BranchId, - commit_oid: git::Oid, - message: &str, -) -> Result<(), Error> { - handle - .state::() - .update_commit_message(&project_id, &branch_id, commit_oid, message) - .await?; - emit_vbranches(&handle, &project_id).await; - Ok(()) -} - -async fn emit_vbranches(handle: &AppHandle, project_id: &projects::ProjectId) { - if let Err(error) = handle - .state::() - .post(watcher::Event::CalculateVirtualBranches(*project_id)) - .await - { - tracing::error!(?error); - } -} diff --git a/gitbutler-app/src/virtual_branches/context.rs b/gitbutler-app/src/virtual_branches/context.rs deleted file mode 100644 index 3733fc587..000000000 --- a/gitbutler-app/src/virtual_branches/context.rs +++ /dev/null @@ -1,124 +0,0 @@ -use crate::git::diff; - -pub fn hunk_with_context( - hunk_diff: &str, - hunk_old_start_line: usize, - hunk_new_start_line: usize, - is_binary: bool, - context_lines: usize, - file_lines_before: &[&str], - change_type: diff::ChangeType, -) -> diff::GitHunk { - let diff_lines = hunk_diff - .lines() - .map(std::string::ToString::to_string) - .collect::>(); - if diff_lines.is_empty() { - #[allow(clippy::cast_possible_truncation)] - return diff::GitHunk { - diff: hunk_diff.to_owned(), - old_start: hunk_old_start_line as u32, - old_lines: 0, - new_start: hunk_new_start_line as u32, - new_lines: 0, - binary: is_binary, - change_type, - }; - } - - let new_file = hunk_old_start_line == 0; - let deleted_file = hunk_new_start_line == 0; - - let removed_count = diff_lines - .iter() - .filter(|line| line.starts_with('-')) - .count(); - let added_count = diff_lines - .iter() - .filter(|line| line.starts_with('+')) - .count(); - - // Get context lines before the diff - let mut context_before = Vec::new(); - let before_context_ending_index = if removed_count == 0 { - // Compensate for when the removed_count is 0 - hunk_old_start_line - } else { - hunk_old_start_line.saturating_sub(1) - }; - let before_context_starting_index = before_context_ending_index.saturating_sub(context_lines); - - for index in before_context_starting_index..before_context_ending_index { - if let Some(l) = file_lines_before.get(index) { - let mut s = (*l).to_string(); - s.insert(0, ' '); - context_before.push(s); - } - } - - // Get context lines after the diff - let mut context_after = Vec::new(); - let after_context_starting_index = before_context_ending_index + removed_count; - let after_context_ending_index = after_context_starting_index + context_lines; - - for index in after_context_starting_index..after_context_ending_index { - if let Some(l) = file_lines_before.get(index) { - let mut s = (*l).to_string(); - s.insert(0, ' '); - context_after.push(s); - } - } - - let start_line_before = if new_file { - // If we've created a new file, start_line_before should be 0 - 0 - } else { - before_context_starting_index + 1 - }; - - let start_line_after = if deleted_file { - // If we've deleted a new file, start_line_after should be 0 - 0 - } else if added_count == 0 { - // Compensate for when the added_count is 0 - hunk_new_start_line.saturating_sub(context_before.len()) + 1 - } else { - hunk_new_start_line.saturating_sub(context_before.len()) - }; - - let line_count_before = removed_count + context_before.len() + context_after.len(); - let line_count_after = added_count + context_before.len() + context_after.len(); - let header = format!( - "@@ -{},{} +{},{} @@", - start_line_before, line_count_before, start_line_after, line_count_after - ); - - let body = &diff_lines[1..]; - // Update unidiff body with context lines - let mut b = Vec::new(); - b.extend(context_before.clone()); - b.extend_from_slice(body); - b.extend(context_after.clone()); - let body = b; - - // Construct a new diff with updated header and body - let mut diff_lines = Vec::new(); - diff_lines.push(header); - diff_lines.extend(body); - let mut diff = diff_lines.join("\n"); - // Add trailing newline - diff.push('\n'); - - #[allow(clippy::cast_possible_truncation)] - let hunk = diff::GitHunk { - diff, - old_start: start_line_before as u32, - old_lines: line_count_before as u32, - new_start: start_line_after as u32, - new_lines: line_count_after as u32, - binary: is_binary, - change_type, - }; - - hunk -} diff --git a/gitbutler-app/src/virtual_branches/controller.rs b/gitbutler-app/src/virtual_branches/controller.rs deleted file mode 100644 index 0c09dc3ef..000000000 --- a/gitbutler-app/src/virtual_branches/controller.rs +++ /dev/null @@ -1,1112 +0,0 @@ -use std::{collections::HashMap, path, sync::Arc}; - -use anyhow::Context; -use tokio::{sync::Semaphore, task::JoinHandle}; - -use crate::{ - askpass::AskpassBroker, - error::Error, - gb_repository, git, keys, project_repository, - projects::{self, ProjectId}, - users, - virtual_branches::state::{VirtualBranches, VirtualBranchesHandle}, -}; - -use super::{ - branch::{BranchId, BranchOwnershipClaims}, - errors::{ - self, FetchFromTargetError, GetBaseBranchDataError, GetRemoteBranchDataError, - IsRemoteBranchMergableError, ListRemoteBranchesError, - }, - target_to_base_branch, BaseBranch, RemoteBranchFile, -}; - -#[derive(Clone)] -pub struct Controller { - local_data_dir: path::PathBuf, - projects: projects::Controller, - users: users::Controller, - keys: keys::Controller, - helper: git::credentials::Helper, - - by_project_id: Arc>>, -} - -impl Controller { - pub fn new( - local_data_dir: path::PathBuf, - projects: projects::Controller, - users: users::Controller, - keys: keys::Controller, - helper: git::credentials::Helper, - ) -> Self { - Self { - by_project_id: Arc::new(tokio::sync::Mutex::new(HashMap::new())), - - local_data_dir, - projects, - users, - keys, - helper, - } - } - - async fn inner(&self, project_id: &ProjectId) -> ControllerInner { - self.by_project_id - .lock() - .await - .entry(*project_id) - .or_insert_with(|| { - ControllerInner::new( - &self.local_data_dir, - &self.projects, - &self.users, - &self.keys, - &self.helper, - ) - }) - .clone() - } - - pub async fn create_commit( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - message: &str, - ownership: Option<&BranchOwnershipClaims>, - run_hooks: bool, - ) -> Result> { - self.inner(project_id) - .await - .create_commit(project_id, branch_id, message, ownership, run_hooks) - .await - } - - pub async fn can_apply_remote_branch( - &self, - project_id: &ProjectId, - branch_name: &git::RemoteRefname, - ) -> Result> { - self.inner(project_id) - .await - .can_apply_remote_branch(project_id, branch_name) - } - - pub async fn can_apply_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ) -> Result { - self.inner(project_id) - .await - .can_apply_virtual_branch(project_id, branch_id) - } - - /// Retrieves the virtual branches state from the gitbutler repository (legacy state) and persists it into a flat TOML file - pub async fn save_vbranches_state( - &self, - project_id: &ProjectId, - branch_ids: Vec, - ) -> Result<(), Error> { - let vbranches_state = self - .inner(project_id) - .await - .get_vbranches_state(project_id, branch_ids)?; - let project = self.projects.get(project_id).map_err(Error::from)?; - // TODO: this should be constructed somewhere else - let state_handle = VirtualBranchesHandle::new(project.path.join(".git").as_path()); - if let Some(default_target) = vbranches_state.default_target { - state_handle.set_default_target(default_target)?; - } - for (id, target) in vbranches_state.branch_targets { - state_handle.set_branch_target(id, target)?; - } - for (_, branch) in vbranches_state.branches { - state_handle.set_branch(branch)?; - } - Ok(()) - } - - pub async fn list_virtual_branches( - &self, - project_id: &ProjectId, - ) -> Result< - (Vec, bool, Vec), - ControllerError, - > { - self.inner(project_id) - .await - .list_virtual_branches(project_id) - .await - } - - pub async fn create_virtual_branch( - &self, - project_id: &ProjectId, - create: &super::branch::BranchCreateRequest, - ) -> Result> { - self.inner(project_id) - .await - .create_virtual_branch(project_id, create) - .await - } - - pub async fn create_virtual_branch_from_branch( - &self, - project_id: &ProjectId, - branch: &git::Refname, - ) -> Result> { - self.inner(project_id) - .await - .create_virtual_branch_from_branch(project_id, branch) - .await - } - - pub async fn get_base_branch_data( - &self, - project_id: &ProjectId, - ) -> Result, ControllerError> { - self.inner(project_id) - .await - .get_base_branch_data(project_id) - } - - pub async fn list_remote_commit_files( - &self, - project_id: &ProjectId, - commit_oid: git::Oid, - ) -> Result, Error> { - self.inner(project_id) - .await - .list_remote_commit_files(project_id, commit_oid) - } - - pub async fn set_base_branch( - &self, - project_id: &ProjectId, - target_branch: &git::RemoteRefname, - ) -> Result> { - self.inner(project_id) - .await - .set_base_branch(project_id, target_branch) - } - - pub async fn merge_virtual_branch_upstream( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .merge_virtual_branch_upstream(project_id, branch_id) - .await - } - - pub async fn update_base_branch( - &self, - project_id: &ProjectId, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .update_base_branch(project_id) - .await - } - - pub async fn update_virtual_branch( - &self, - project_id: &ProjectId, - branch_update: super::branch::BranchUpdateRequest, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .update_virtual_branch(project_id, branch_update) - .await - } - pub async fn delete_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .delete_virtual_branch(project_id, branch_id) - .await - } - - pub async fn apply_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .apply_virtual_branch(project_id, branch_id) - .await - } - - pub async fn unapply_ownership( - &self, - project_id: &ProjectId, - ownership: &BranchOwnershipClaims, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .unapply_ownership(project_id, ownership) - .await - } - - pub async fn reset_files( - &self, - project_id: &ProjectId, - files: &Vec, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .reset_files(project_id, files) - .await - } - - pub async fn amend( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ownership: &BranchOwnershipClaims, - ) -> Result> { - self.inner(project_id) - .await - .amend(project_id, branch_id, ownership) - .await - } - - pub async fn reset_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - target_commit_oid: git::Oid, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .reset_virtual_branch(project_id, branch_id, target_commit_oid) - .await - } - - pub async fn unapply_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .unapply_virtual_branch(project_id, branch_id) - .await - } - - pub async fn push_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - with_force: bool, - askpass: Option<(AskpassBroker, Option)>, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .push_virtual_branch(project_id, branch_id, with_force, askpass) - .await - } - - pub async fn cherry_pick( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - commit_oid: git::Oid, - ) -> Result, ControllerError> { - self.inner(project_id) - .await - .cherry_pick(project_id, branch_id, commit_oid) - .await - } - - pub async fn list_remote_branches( - &self, - project_id: &ProjectId, - ) -> Result, ControllerError> { - self.inner(project_id) - .await - .list_remote_branches(project_id) - } - - pub async fn get_remote_branch_data( - &self, - project_id: &ProjectId, - refname: &git::Refname, - ) -> Result> { - self.inner(project_id) - .await - .get_remote_branch_data(project_id, refname) - } - - pub async fn squash( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - commit_oid: git::Oid, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .squash(project_id, branch_id, commit_oid) - .await - } - - pub async fn update_commit_message( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - commit_oid: git::Oid, - message: &str, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .update_commit_message(project_id, branch_id, commit_oid, message) - .await - } - - pub async fn fetch_from_target( - &self, - project_id: &ProjectId, - askpass: Option<(AskpassBroker, String)>, - ) -> Result> { - self.inner(project_id) - .await - .fetch_from_target(project_id, askpass) - .await - } - - pub async fn move_commit( - &self, - project_id: &ProjectId, - target_branch_id: &BranchId, - commit_oid: git::Oid, - ) -> Result<(), ControllerError> { - self.inner(project_id) - .await - .move_commit(project_id, target_branch_id, commit_oid) - .await - } -} - -#[derive(Clone)] -struct ControllerInner { - local_data_dir: path::PathBuf, - semaphore: Arc, - - projects: projects::Controller, - users: users::Controller, - keys: keys::Controller, - helper: git::credentials::Helper, -} - -#[derive(Debug, thiserror::Error)] -pub enum ControllerError -where - E: Into, -{ - #[error(transparent)] - VerifyError(#[from] errors::VerifyError), - #[error(transparent)] - Action(E), - #[error(transparent)] - User(#[from] Error), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl ControllerInner { - pub fn new( - data_dir: &path::Path, - projects: &projects::Controller, - users: &users::Controller, - keys: &keys::Controller, - helper: &git::credentials::Helper, - ) -> Self { - Self { - local_data_dir: data_dir.to_path_buf(), - semaphore: Arc::new(Semaphore::new(1)), - projects: projects.clone(), - users: users.clone(), - keys: keys.clone(), - helper: helper.clone(), - } - } - - pub async fn create_commit( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - message: &str, - ownership: Option<&BranchOwnershipClaims>, - run_hooks: bool, - ) -> Result> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, user| { - let signing_key = project_repository - .config() - .sign_commits() - .context("failed to get sign commits option")? - .then(|| { - self.keys - .get_or_create() - .context("failed to get private key") - }) - .transpose()?; - - super::commit( - gb_repository, - project_repository, - branch_id, - message, - ownership, - signing_key.as_ref(), - user, - run_hooks, - ) - .map_err(Into::into) - }) - } - - pub fn can_apply_remote_branch( - &self, - project_id: &ProjectId, - branch_name: &git::RemoteRefname, - ) -> Result> { - let project = self.projects.get(project_id).map_err(Error::from)?; - let project_repository = - project_repository::Repository::open(&project).map_err(Error::from)?; - let user = self.users.get_user().map_err(Error::from)?; - let gb_repository = gb_repository::Repository::open( - &self.local_data_dir, - &project_repository, - user.as_ref(), - ) - .context("failed to open gitbutler repository")?; - super::is_remote_branch_mergeable(&gb_repository, &project_repository, branch_name) - .map_err(ControllerError::Action) - } - - pub fn can_apply_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ) -> Result { - let project = self.projects.get(project_id)?; - let project_repository = project_repository::Repository::open(&project)?; - let user = self.users.get_user().context("failed to get user")?; - let gb_repository = gb_repository::Repository::open( - &self.local_data_dir, - &project_repository, - user.as_ref(), - ) - .context("failed to open gitbutler repository")?; - super::is_virtual_branch_mergeable(&gb_repository, &project_repository, branch_id) - .map_err(Into::into) - } - - /// Retrieves the virtual branches state from the gitbutler repository (legacy state) - pub fn get_vbranches_state( - &self, - project_id: &ProjectId, - branch_ids: Vec, - ) -> Result { - let project = self.projects.get(project_id)?; - let project_repository = project_repository::Repository::open(&project)?; - let user = self.users.get_user().context("failed to get user")?; - let gb_repository = gb_repository::Repository::open( - &self.local_data_dir, - &project_repository, - user.as_ref(), - ) - .context("failed to open gitbutler repository")?; - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get or create current session")?; - let session_reader = crate::sessions::Reader::open(&gb_repository, ¤t_session) - .context("failed to open current session")?; - let target_reader = super::target::Reader::new(&session_reader); - let branch_reader = super::branch::Reader::new(&session_reader); - - let default_target = target_reader - .read_default() - .context("failed to read target")?; - - let mut branches: HashMap = HashMap::new(); - let mut branch_targets: HashMap = HashMap::new(); - - for branch_id in branch_ids { - let branch = branch_reader - .read(&branch_id) - .context("failed to read branch")?; - branches.insert(branch_id, branch); - let target = target_reader - .read(&branch_id) - .context("failed to read target")?; - branch_targets.insert(branch_id, target); - } - - Ok(VirtualBranches { - default_target: Some(default_target), - branch_targets, - branches, - }) - } - - pub async fn list_virtual_branches( - &self, - project_id: &ProjectId, - ) -> Result< - (Vec, bool, Vec), - ControllerError, - > { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, _| { - super::list_virtual_branches(gb_repository, project_repository).map_err(Into::into) - }) - } - - pub async fn create_virtual_branch( - &self, - project_id: &ProjectId, - create: &super::branch::BranchCreateRequest, - ) -> Result> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, _| { - let branch_id = - super::create_virtual_branch(gb_repository, project_repository, create)?.id; - Ok(branch_id) - }) - } - - pub async fn create_virtual_branch_from_branch( - &self, - project_id: &ProjectId, - branch: &git::Refname, - ) -> Result> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, user| { - let signing_key = project_repository - .config() - .sign_commits() - .context("failed to get sign commits option")? - .then(|| { - self.keys - .get_or_create() - .context("failed to get private key") - }) - .transpose()?; - - super::create_virtual_branch_from_branch( - gb_repository, - project_repository, - branch, - signing_key.as_ref(), - user, - ) - }) - } - - pub fn get_base_branch_data( - &self, - project_id: &ProjectId, - ) -> Result, ControllerError> { - let project = self.projects.get(project_id).map_err(Error::from)?; - let project_repository = - project_repository::Repository::open(&project).map_err(Error::from)?; - let user = self.users.get_user().map_err(Error::from)?; - let gb_repository = gb_repository::Repository::open( - &self.local_data_dir, - &project_repository, - user.as_ref(), - ) - .context("failed to open gitbutler repository")?; - let base_branch = super::get_base_branch_data(&gb_repository, &project_repository) - .map_err(ControllerError::Action)?; - Ok(base_branch) - } - - pub fn list_remote_commit_files( - &self, - project_id: &ProjectId, - commit_oid: git::Oid, - ) -> Result, Error> { - let project = self.projects.get(project_id)?; - let project_repository = project_repository::Repository::open(&project)?; - let use_context = project_repository - .project() - .use_diff_context - .unwrap_or(false); - let context_lines = if use_context { 3_u32 } else { 0_u32 }; - super::list_remote_commit_files( - &project_repository.git_repository, - commit_oid, - context_lines, - ) - .map_err(Into::into) - } - - pub fn set_base_branch( - &self, - project_id: &ProjectId, - target_branch: &git::RemoteRefname, - ) -> Result> { - let project = self.projects.get(project_id).map_err(Error::from)?; - let user = self.users.get_user().map_err(Error::from)?; - let project_repository = - project_repository::Repository::open(&project).map_err(Error::from)?; - let gb_repository = gb_repository::Repository::open( - &self.local_data_dir, - &project_repository, - user.as_ref(), - ) - .context("failed to open gitbutler repository")?; - - super::set_base_branch(&gb_repository, &project_repository, target_branch) - .map_err(ControllerError::Action) - } - - pub async fn merge_virtual_branch_upstream( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, user| { - let signing_key = project_repository - .config() - .sign_commits() - .context("failed to get sign commits option")? - .then(|| { - self.keys - .get_or_create() - .context("failed to get private key") - }) - .transpose()?; - - super::merge_virtual_branch_upstream( - gb_repository, - project_repository, - branch_id, - signing_key.as_ref(), - user, - ) - .map_err(Into::into) - }) - } - - pub async fn update_base_branch( - &self, - project_id: &ProjectId, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, user| { - let signing_key = project_repository - .config() - .sign_commits() - .context("failed to get sign commits option")? - .then(|| { - self.keys - .get_or_create() - .context("failed to get private key") - }) - .transpose()?; - - super::update_base_branch( - gb_repository, - project_repository, - user, - signing_key.as_ref(), - ) - .map_err(Into::into) - }) - } - - pub async fn update_virtual_branch( - &self, - project_id: &ProjectId, - branch_update: super::branch::BranchUpdateRequest, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, _| { - super::update_branch(gb_repository, project_repository, branch_update)?; - Ok(()) - }) - } - - pub async fn delete_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, _| { - super::delete_branch(gb_repository, project_repository, branch_id)?; - Ok(()) - }) - } - - pub async fn apply_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, user| { - let signing_key = project_repository - .config() - .sign_commits() - .context("failed to get sign commits option")? - .then(|| { - self.keys - .get_or_create() - .context("failed to get private key") - }) - .transpose()?; - - super::apply_branch( - gb_repository, - project_repository, - branch_id, - signing_key.as_ref(), - user, - ) - .map_err(Into::into) - }) - } - - pub async fn unapply_ownership( - &self, - project_id: &ProjectId, - ownership: &BranchOwnershipClaims, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, _| { - super::unapply_ownership(gb_repository, project_repository, ownership) - .map_err(Into::into) - }) - } - - pub async fn reset_files( - &self, - project_id: &ProjectId, - ownership: &Vec, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |_, project_repository, _| { - super::reset_files(project_repository, ownership).map_err(Into::into) - }) - } - - pub async fn amend( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ownership: &BranchOwnershipClaims, - ) -> Result> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, _| { - super::amend(gb_repository, project_repository, branch_id, ownership) - .map_err(Into::into) - }) - } - - pub async fn reset_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - target_commit_oid: git::Oid, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, _| { - super::reset_branch( - gb_repository, - project_repository, - branch_id, - target_commit_oid, - ) - .map_err(Into::into) - }) - } - - pub async fn unapply_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, _| { - super::unapply_branch(gb_repository, project_repository, branch_id) - .map(|_| ()) - .map_err(Into::into) - }) - } - - pub async fn push_virtual_branch( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - with_force: bool, - askpass: Option<(AskpassBroker, Option)>, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - let helper = self.helper.clone(); - let project_id = *project_id; - let branch_id = *branch_id; - self.with_verify_branch_async(&project_id, move |gb_repository, project_repository, _| { - super::push( - project_repository, - gb_repository, - &branch_id, - with_force, - &helper, - askpass, - ) - })? - .await - .map_err(|e| ControllerError::Other(e.into()))? - .map_err(ControllerError::Action) - } - - pub async fn cherry_pick( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - commit_oid: git::Oid, - ) -> Result, ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, _| { - super::cherry_pick(gb_repository, project_repository, branch_id, commit_oid) - .map_err(Into::into) - }) - } - - pub fn list_remote_branches( - &self, - project_id: &ProjectId, - ) -> Result, ControllerError> { - let project = self.projects.get(project_id).map_err(Error::from)?; - let project_repository = - project_repository::Repository::open(&project).map_err(Error::from)?; - let user = self.users.get_user().map_err(Error::from)?; - let gb_repository = gb_repository::Repository::open( - &self.local_data_dir, - &project_repository, - user.as_ref(), - ) - .context("failed to open gitbutler repository")?; - super::list_remote_branches(&gb_repository, &project_repository) - .map_err(ControllerError::Action) - } - - pub fn get_remote_branch_data( - &self, - project_id: &ProjectId, - refname: &git::Refname, - ) -> Result> { - let project = self.projects.get(project_id).map_err(Error::from)?; - let project_repository = - project_repository::Repository::open(&project).map_err(Error::from)?; - let user = self.users.get_user().map_err(Error::from)?; - let gb_repository = gb_repository::Repository::open( - &self.local_data_dir, - &project_repository, - user.as_ref(), - ) - .context("failed to open gitbutler repository")?; - super::get_branch_data(&gb_repository, &project_repository, refname) - .map_err(ControllerError::Action) - } - - pub async fn squash( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - commit_oid: git::Oid, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, _| { - super::squash(gb_repository, project_repository, branch_id, commit_oid) - .map_err(Into::into) - }) - } - - pub async fn update_commit_message( - &self, - project_id: &ProjectId, - branch_id: &BranchId, - commit_oid: git::Oid, - message: &str, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - self.with_verify_branch(project_id, |gb_repository, project_repository, _| { - super::update_commit_message( - gb_repository, - project_repository, - branch_id, - commit_oid, - message, - ) - .map_err(Into::into) - }) - } - - pub async fn fetch_from_target( - &self, - project_id: &ProjectId, - askpass: Option<(AskpassBroker, String)>, - ) -> Result> { - let project = self.projects.get(project_id).map_err(Error::from)?; - let mut project_repository = - project_repository::Repository::open(&project).map_err(Error::from)?; - let user = self.users.get_user().map_err(Error::from)?; - let gb_repository = gb_repository::Repository::open( - &self.local_data_dir, - &project_repository, - user.as_ref(), - ) - .context("failed to open gitbutler repository")?; - - let default_target = gb_repository - .default_target() - .context("failed to get default target")? - .ok_or(FetchFromTargetError::DefaultTargetNotSet( - errors::DefaultTargetNotSetError { - project_id: *project_id, - }, - )) - .map_err(ControllerError::Action)?; - - let project_data_last_fetched = match project_repository - .fetch(default_target.branch.remote(), &self.helper, askpass) - .map_err(errors::FetchFromTargetError::Remote) - { - Ok(()) => projects::FetchResult::Fetched { - timestamp: std::time::SystemTime::now(), - }, - Err(error) => projects::FetchResult::Error { - timestamp: std::time::SystemTime::now(), - error: error.to_string(), - }, - }; - - let updated_project = self - .projects - .update(&projects::UpdateRequest { - id: *project_id, - project_data_last_fetched: Some(project_data_last_fetched), - ..Default::default() - }) - .await - .context("failed to update project")?; - - project_repository.set_project(&updated_project); - - let base_branch = target_to_base_branch(&project_repository, &default_target) - .context("failed to convert target to base branch")?; - - Ok(base_branch) - } - - pub async fn move_commit( - &self, - project_id: &ProjectId, - target_branch_id: &BranchId, - commit_oid: git::Oid, - ) -> Result<(), ControllerError> { - let _permit = self.semaphore.acquire().await; - - self.with_verify_branch(project_id, |gb_repository, project_repository, user| { - let signing_key = project_repository - .config() - .sign_commits() - .context("failed to get sign commits option")? - .then(|| { - self.keys - .get_or_create() - .context("failed to get private key") - }) - .transpose()?; - super::move_commit( - gb_repository, - project_repository, - target_branch_id, - commit_oid, - user, - signing_key.as_ref(), - ) - .map_err(Into::into) - }) - } -} - -impl ControllerInner { - fn with_verify_branch>( - &self, - project_id: &ProjectId, - action: impl FnOnce( - &gb_repository::Repository, - &project_repository::Repository, - Option<&users::User>, - ) -> Result, - ) -> Result> { - let project = self.projects.get(project_id).map_err(Error::from)?; - let project_repository = - project_repository::Repository::open(&project).map_err(Error::from)?; - let user = self.users.get_user().map_err(Error::from)?; - let gb_repository = gb_repository::Repository::open( - &self.local_data_dir, - &project_repository, - user.as_ref(), - ) - .context("failed to open gitbutler repository")?; - super::integration::verify_branch(&gb_repository, &project_repository)?; - action(&gb_repository, &project_repository, user.as_ref()).map_err(ControllerError::Action) - } - - fn with_verify_branch_async + Send + 'static>( - &self, - project_id: &ProjectId, - action: impl FnOnce( - &gb_repository::Repository, - &project_repository::Repository, - Option<&users::User>, - ) -> Result - + Send - + 'static, - ) -> Result>, ControllerError> { - let local_data_dir = self.local_data_dir.clone(); - let project = self.projects.get(project_id).map_err(Error::from)?; - let project_repository = - project_repository::Repository::open(&project).map_err(Error::from)?; - let user = self.users.get_user().map_err(Error::from)?; - let gb_repository = - gb_repository::Repository::open(&local_data_dir, &project_repository, user.as_ref()) - .context("failed to open gitbutler repository")?; - super::integration::verify_branch(&gb_repository, &project_repository)?; - Ok(tokio::task::spawn_blocking(move || { - action(&gb_repository, &project_repository, user.as_ref()) - })) - } -} diff --git a/gitbutler-app/src/virtual_branches/errors.rs b/gitbutler-app/src/virtual_branches/errors.rs deleted file mode 100644 index f772623fa..000000000 --- a/gitbutler-app/src/virtual_branches/errors.rs +++ /dev/null @@ -1,837 +0,0 @@ -use crate::{ - error::Error, - git, - project_repository::{self, RemoteError}, - projects::ProjectId, -}; - -use super::{branch::BranchOwnershipClaims, BranchId, GITBUTLER_INTEGRATION_REFERENCE}; - -#[derive(Debug, thiserror::Error)] -pub enum VerifyError { - #[error("head is detached")] - DetachedHead, - #[error("head is {0}")] - InvalidHead(String), - #[error("integration commit not found")] - NoIntegrationCommit, - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl From for crate::error::Error { - fn from(value: VerifyError) -> Self { - match value { - VerifyError::DetachedHead => crate::error::Error::UserError { - code: crate::error::Code::ProjectHead, - message: format!( - "Project in detached head state. Please checkout {0} to continue.", - GITBUTLER_INTEGRATION_REFERENCE.branch() - ), - }, - VerifyError::InvalidHead(head) => crate::error::Error::UserError { - code: crate::error::Code::ProjectHead, - message: format!( - "Project is on {}. Please checkout {} to continue.", - head, - GITBUTLER_INTEGRATION_REFERENCE.branch() - ), - }, - VerifyError::NoIntegrationCommit => crate::error::Error::UserError { - code: crate::error::Code::ProjectHead, - message: "GibButler's integration commit not found on head.".to_string(), - }, - VerifyError::Other(error) => { - tracing::error!(?error); - crate::error::Error::Unknown - } - } - } -} - -#[derive(Debug, thiserror::Error)] -pub enum DeleteBranchError { - #[error(transparent)] - UnapplyBranch(#[from] UnapplyBranchError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum ResetBranchError { - #[error("commit {0} not in the branch")] - CommitNotFoundInBranch(git::Oid), - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum ApplyBranchError { - #[error("project")] - Conflict(ProjectConflictError), - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error("branch conflicts with other branches - sorry bro.")] - BranchConflicts(BranchId), - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum UnapplyOwnershipError { - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("project is in conflict state")] - Conflict(ProjectConflictError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum UnapplyBranchError { - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum FlushAppliedVbranchesError { - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum ListVirtualBranchesError { - #[error("project")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum CreateVirtualBranchError { - #[error("project")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum MergeVirtualBranchUpstreamError { - #[error("project")] - Conflict(ProjectConflictError), - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum CommitError { - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("will not commit conflicted files")] - Conflicted(ProjectConflictError), - #[error("commit hook rejected")] - CommitHookRejected(String), - #[error("commit msg hook rejected")] - CommitMsgHookRejected(String), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum PushError { - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error(transparent)] - Remote(#[from] project_repository::RemoteError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum IsRemoteBranchMergableError { - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("branch not found")] - BranchNotFound(git::RemoteRefname), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum IsVirtualBranchMergeable { - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug)] -pub struct ForcePushNotAllowedError { - pub project_id: ProjectId, -} - -impl From for Error { - fn from(_value: ForcePushNotAllowedError) -> Self { - Error::UserError { - code: crate::error::Code::Branches, - message: "Action will lead to force pushing, which is not allowed for this".to_string(), - } - } -} - -#[derive(Debug, thiserror::Error)] -pub enum AmendError { - #[error("force push not allowed")] - ForcePushNotAllowed(ForcePushNotAllowedError), - #[error("target ownership not found")] - TargetOwnerhshipNotFound(BranchOwnershipClaims), - #[error("branch has no commits")] - BranchHasNoCommits, - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error("project is in conflict state")] - Conflict(ProjectConflictError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} -#[derive(Debug, thiserror::Error)] -pub enum CherryPickError { - #[error("target commit {0} not found ")] - CommitNotFound(git::Oid), - #[error("can not cherry pick not applied branch")] - NotApplied, - #[error("project is in conflict state")] - Conflict(ProjectConflictError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum SquashError { - #[error("force push not allowed")] - ForcePushNotAllowed(ForcePushNotAllowedError), - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("commit {0} not in the branch")] - CommitNotFound(git::Oid), - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error("project is in conflict state")] - Conflict(ProjectConflictError), - #[error("can not squash root commit")] - CantSquashRootCommit, - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum FetchFromTargetError { - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("failed to fetch")] - Remote(RemoteError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl From for Error { - fn from(value: FetchFromTargetError) -> Self { - match value { - FetchFromTargetError::DefaultTargetNotSet(error) => error.into(), - FetchFromTargetError::Remote(error) => error.into(), - FetchFromTargetError::Other(error) => { - tracing::error!(?error, "fetch from target error"); - Error::Unknown - } - } - } -} - -#[derive(Debug, thiserror::Error)] -pub enum UpdateCommitMessageError { - #[error("force push not allowed")] - ForcePushNotAllowed(ForcePushNotAllowedError), - #[error("empty message")] - EmptyMessage, - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("commit {0} not in the branch")] - CommitNotFound(git::Oid), - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error("project is in conflict state")] - Conflict(ProjectConflictError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl From for Error { - fn from(value: UpdateCommitMessageError) -> Self { - match value { - UpdateCommitMessageError::ForcePushNotAllowed(error) => error.into(), - UpdateCommitMessageError::EmptyMessage => Error::UserError { - message: "Commit message can not be empty".to_string(), - code: crate::error::Code::Branches, - }, - UpdateCommitMessageError::DefaultTargetNotSet(error) => error.into(), - UpdateCommitMessageError::CommitNotFound(oid) => Error::UserError { - message: format!("Commit {} not found", oid), - code: crate::error::Code::Branches, - }, - UpdateCommitMessageError::BranchNotFound(error) => error.into(), - UpdateCommitMessageError::Conflict(error) => error.into(), - UpdateCommitMessageError::Other(error) => { - tracing::error!(?error, "update commit message error"); - Error::Unknown - } - } - } -} - -#[derive(Debug, thiserror::Error)] -pub enum GetBaseBranchDataError { - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum SetBaseBranchError { - #[error("wd is dirty")] - DirtyWorkingDirectory, - #[error("branch {0} not found")] - BranchNotFound(git::RemoteRefname), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum UpdateBaseBranchError { - #[error("project is in conflicting state")] - Conflict(ProjectConflictError), - #[error("no default target set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum MoveCommitError { - #[error("source branch contains hunks locked to the target commit")] - SourceLocked, - #[error("project is in conflicted state")] - Conflicted(ProjectConflictError), - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error("commit not found")] - CommitNotFound(git::Oid), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl From for crate::error::Error { - fn from(value: MoveCommitError) -> Self { - match value { - MoveCommitError::SourceLocked => Error::UserError { - message: "Source branch contains hunks locked to the target commit".to_string(), - code: crate::error::Code::Branches, - }, - MoveCommitError::Conflicted(error) => error.into(), - MoveCommitError::DefaultTargetNotSet(error) => error.into(), - MoveCommitError::BranchNotFound(error) => error.into(), - MoveCommitError::CommitNotFound(oid) => Error::UserError { - message: format!("Commit {} not found", oid), - code: crate::error::Code::Branches, - }, - MoveCommitError::Other(error) => { - tracing::error!(?error, "move commit to vbranch error"); - Error::Unknown - } - } - } -} - -#[derive(Debug, thiserror::Error)] -pub enum CreateVirtualBranchFromBranchError { - #[error("failed to apply")] - ApplyBranch(ApplyBranchError), - #[error("can't make branch from default target")] - CantMakeBranchFromDefaultTarget, - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("{0} not found")] - BranchNotFound(git::Refname), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug)] -pub struct ProjectConflictError { - pub project_id: ProjectId, -} - -impl From for Error { - fn from(value: ProjectConflictError) -> Self { - Error::UserError { - code: crate::error::Code::ProjectConflict, - message: format!("project {} is in a conflicted state", value.project_id), - } - } -} - -#[derive(Debug)] -pub struct DefaultTargetNotSetError { - pub project_id: ProjectId, -} - -impl From for Error { - fn from(value: DefaultTargetNotSetError) -> Self { - Error::UserError { - code: crate::error::Code::ProjectConflict, - message: format!( - "project {} does not have a default target set", - value.project_id - ), - } - } -} - -#[derive(Debug)] -pub struct BranchNotFoundError { - pub project_id: ProjectId, - pub branch_id: BranchId, -} - -impl From for Error { - fn from(value: BranchNotFoundError) -> Self { - Error::UserError { - code: crate::error::Code::Branches, - message: format!("branch {} not found", value.branch_id), - } - } -} - -#[derive(Debug, thiserror::Error)] -pub enum UpdateBranchError { - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error("branch not found")] - BranchNotFound(BranchNotFoundError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl From for Error { - fn from(value: UpdateBranchError) -> Self { - match value { - UpdateBranchError::DefaultTargetNotSet(error) => error.into(), - UpdateBranchError::BranchNotFound(error) => error.into(), - UpdateBranchError::Other(error) => { - tracing::error!(?error, "update branch error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: CreateVirtualBranchFromBranchError) -> Self { - match value { - CreateVirtualBranchFromBranchError::ApplyBranch(error) => error.into(), - CreateVirtualBranchFromBranchError::CantMakeBranchFromDefaultTarget => { - Error::UserError { - message: "Can not create a branch from default target".to_string(), - code: crate::error::Code::Branches, - } - } - CreateVirtualBranchFromBranchError::DefaultTargetNotSet(error) => error.into(), - CreateVirtualBranchFromBranchError::BranchNotFound(name) => Error::UserError { - message: format!("Branch {} not found", name), - code: crate::error::Code::Branches, - }, - CreateVirtualBranchFromBranchError::Other(error) => { - tracing::error!(?error, "create virtual branch from branch error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: CommitError) -> Self { - match value { - CommitError::BranchNotFound(error) => error.into(), - CommitError::DefaultTargetNotSet(error) => error.into(), - CommitError::Conflicted(error) => error.into(), - CommitError::CommitHookRejected(error) => Error::UserError { - code: crate::error::Code::PreCommitHook, - message: error, - }, - CommitError::CommitMsgHookRejected(error) => Error::UserError { - code: crate::error::Code::CommitMsgHook, - message: error, - }, - CommitError::Other(error) => { - tracing::error!(?error, "commit error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: IsRemoteBranchMergableError) -> Self { - match value { - IsRemoteBranchMergableError::BranchNotFound(name) => Error::UserError { - message: format!("Remote branch {} not found", name), - code: crate::error::Code::Branches, - }, - IsRemoteBranchMergableError::DefaultTargetNotSet(error) => error.into(), - IsRemoteBranchMergableError::Other(error) => { - tracing::error!(?error, "is remote branch mergable error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: DeleteBranchError) -> Self { - match value { - DeleteBranchError::UnapplyBranch(error) => error.into(), - DeleteBranchError::Other(error) => { - tracing::error!(?error, "delete branch error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: ApplyBranchError) -> Self { - match value { - ApplyBranchError::DefaultTargetNotSet(error) => error.into(), - ApplyBranchError::Conflict(error) => error.into(), - ApplyBranchError::BranchNotFound(error) => error.into(), - ApplyBranchError::BranchConflicts(id) => Error::UserError { - message: format!("Branch {} is in a conflicing state", id), - code: crate::error::Code::Branches, - }, - ApplyBranchError::Other(error) => { - tracing::error!(?error, "apply branch error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: IsVirtualBranchMergeable) -> Self { - match value { - IsVirtualBranchMergeable::BranchNotFound(error) => error.into(), - IsVirtualBranchMergeable::DefaultTargetNotSet(error) => error.into(), - IsVirtualBranchMergeable::Other(error) => { - tracing::error!(?error, "is remote branch mergable error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: ListVirtualBranchesError) -> Self { - match value { - ListVirtualBranchesError::DefaultTargetNotSet(error) => error.into(), - ListVirtualBranchesError::Other(error) => { - tracing::error!(?error, "list virtual branches error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: CreateVirtualBranchError) -> Self { - match value { - CreateVirtualBranchError::DefaultTargetNotSet(error) => error.into(), - CreateVirtualBranchError::Other(error) => { - tracing::error!(?error, "create virtual branch error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: GetBaseBranchDataError) -> Self { - match value { - GetBaseBranchDataError::Other(error) => { - tracing::error!(?error, "get base branch data error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: ListRemoteCommitFilesError) -> Self { - match value { - ListRemoteCommitFilesError::CommitNotFound(oid) => Error::UserError { - message: format!("Commit {} not found", oid), - code: crate::error::Code::Branches, - }, - ListRemoteCommitFilesError::Other(error) => { - tracing::error!(?error, "list remote commit files error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: SetBaseBranchError) -> Self { - match value { - SetBaseBranchError::DirtyWorkingDirectory => Error::UserError { - message: "Current HEAD is dirty.".to_string(), - code: crate::error::Code::ProjectConflict, - }, - SetBaseBranchError::BranchNotFound(name) => Error::UserError { - message: format!("remote branch '{}' not found", name), - code: crate::error::Code::Branches, - }, - SetBaseBranchError::Other(error) => { - tracing::error!(?error, "set base branch error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: MergeVirtualBranchUpstreamError) -> Self { - match value { - MergeVirtualBranchUpstreamError::BranchNotFound(error) => error.into(), - MergeVirtualBranchUpstreamError::Conflict(error) => error.into(), - MergeVirtualBranchUpstreamError::Other(error) => { - tracing::error!(?error, "merge virtual branch upstream error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: UpdateBaseBranchError) -> Self { - match value { - UpdateBaseBranchError::Conflict(error) => error.into(), - UpdateBaseBranchError::DefaultTargetNotSet(error) => error.into(), - UpdateBaseBranchError::Other(error) => { - tracing::error!(?error, "update base branch error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: UnapplyOwnershipError) -> Self { - match value { - UnapplyOwnershipError::DefaultTargetNotSet(error) => error.into(), - UnapplyOwnershipError::Conflict(error) => error.into(), - UnapplyOwnershipError::Other(error) => { - tracing::error!(?error, "unapply ownership error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: AmendError) -> Self { - match value { - AmendError::ForcePushNotAllowed(error) => error.into(), - AmendError::Conflict(error) => error.into(), - AmendError::BranchNotFound(error) => error.into(), - AmendError::BranchHasNoCommits => Error::UserError { - message: "Branch has no commits - there is nothing to amend to".to_string(), - code: crate::error::Code::Branches, - }, - AmendError::DefaultTargetNotSet(error) => error.into(), - AmendError::TargetOwnerhshipNotFound(_) => Error::UserError { - message: "target ownership not found".to_string(), - code: crate::error::Code::Branches, - }, - AmendError::Other(error) => { - tracing::error!(?error, "amend error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: ResetBranchError) -> Self { - match value { - ResetBranchError::BranchNotFound(error) => error.into(), - ResetBranchError::DefaultTargetNotSet(error) => error.into(), - ResetBranchError::CommitNotFoundInBranch(oid) => Error::UserError { - code: crate::error::Code::Branches, - message: format!("commit {} not found", oid), - }, - ResetBranchError::Other(error) => { - tracing::error!(?error, "reset branch error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: UnapplyBranchError) -> Self { - match value { - UnapplyBranchError::DefaultTargetNotSet(error) => error.into(), - UnapplyBranchError::BranchNotFound(error) => error.into(), - UnapplyBranchError::Other(error) => { - tracing::error!(?error, "unapply branch error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: PushError) -> Self { - match value { - PushError::Remote(error) => error.into(), - PushError::BranchNotFound(error) => error.into(), - PushError::DefaultTargetNotSet(error) => error.into(), - PushError::Other(error) => { - tracing::error!(?error, "push error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: FlushAppliedVbranchesError) -> Self { - match value { - FlushAppliedVbranchesError::Other(error) => { - tracing::error!(?error, "flush workspace error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: CherryPickError) -> Self { - match value { - CherryPickError::NotApplied => Error::UserError { - message: "can not cherry pick non applied branch".to_string(), - code: crate::error::Code::Branches, - }, - CherryPickError::Conflict(error) => error.into(), - CherryPickError::CommitNotFound(oid) => Error::UserError { - message: format!("commit {oid} not found"), - code: crate::error::Code::Branches, - }, - CherryPickError::Other(error) => { - tracing::error!(?error, "cherry pick error"); - Error::Unknown - } - } - } -} - -#[derive(Debug, thiserror::Error)] -pub enum ListRemoteCommitFilesError { - #[error("failed to find commit {0}")] - CommitNotFound(git::Oid), - #[error("failed to find commit")] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum ListRemoteBranchesError { - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(Debug, thiserror::Error)] -pub enum GetRemoteBranchDataError { - #[error("default target not set")] - DefaultTargetNotSet(DefaultTargetNotSetError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -impl From for Error { - fn from(value: GetRemoteBranchDataError) -> Self { - match value { - GetRemoteBranchDataError::DefaultTargetNotSet(error) => error.into(), - GetRemoteBranchDataError::Other(error) => { - tracing::error!(?error, "get remote branch data error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: ListRemoteBranchesError) -> Self { - match value { - ListRemoteBranchesError::DefaultTargetNotSet(error) => error.into(), - ListRemoteBranchesError::Other(error) => { - tracing::error!(?error, "list remote branches error"); - Error::Unknown - } - } - } -} - -impl From for Error { - fn from(value: SquashError) -> Self { - match value { - SquashError::ForcePushNotAllowed(error) => error.into(), - SquashError::DefaultTargetNotSet(error) => error.into(), - SquashError::BranchNotFound(error) => error.into(), - SquashError::Conflict(error) => error.into(), - SquashError::CantSquashRootCommit => Error::UserError { - message: "can not squash root branch commit".to_string(), - code: crate::error::Code::Branches, - }, - SquashError::CommitNotFound(oid) => Error::UserError { - message: format!("commit {oid} not found"), - code: crate::error::Code::Branches, - }, - SquashError::Other(error) => { - tracing::error!(?error, "squash error"); - Error::Unknown - } - } - } -} diff --git a/gitbutler-app/src/virtual_branches/files.rs b/gitbutler-app/src/virtual_branches/files.rs deleted file mode 100644 index 508500aa8..000000000 --- a/gitbutler-app/src/virtual_branches/files.rs +++ /dev/null @@ -1,96 +0,0 @@ -use std::path; - -use anyhow::{Context, Result}; -use serde::Serialize; - -use crate::git::{self, diff, show}; - -use super::errors; -use crate::virtual_branches::context; - -#[derive(Debug, PartialEq, Clone, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct RemoteBranchFile { - pub path: path::PathBuf, - pub hunks: Vec, - pub binary: bool, -} - -pub fn list_remote_commit_files( - repository: &git::Repository, - commit_oid: git::Oid, - context_lines: u32, -) -> Result, errors::ListRemoteCommitFilesError> { - let commit = match repository.find_commit(commit_oid) { - Ok(commit) => Ok(commit), - Err(git::Error::NotFound(_)) => Err(errors::ListRemoteCommitFilesError::CommitNotFound( - commit_oid, - )), - Err(error) => Err(errors::ListRemoteCommitFilesError::Other(error.into())), - }?; - - if commit.parent_count() == 0 { - return Ok(vec![]); - } - - let parent = commit.parent(0).context("failed to get parent commit")?; - let commit_tree = commit.tree().context("failed to get commit tree")?; - let parent_tree = parent.tree().context("failed to get parent tree")?; - let diff = diff::trees(repository, &parent_tree, &commit_tree, context_lines)?; - let diff = diff::diff_files_to_hunks(&diff); - - let mut files = diff - .into_iter() - .map(|(file_path, hunks)| RemoteBranchFile { - path: file_path.clone(), - hunks: hunks.clone(), - binary: hunks.iter().any(|h| h.binary), - }) - .collect::>(); - - if context_lines == 0 { - files = files_with_hunk_context(repository, &parent_tree, files, 3) - .context("failed to add context to hunk")?; - } - Ok(files) -} - -fn files_with_hunk_context( - repository: &git::Repository, - parent_tree: &git::Tree, - mut files: Vec, - context_lines: usize, -) -> Result> { - for file in &mut files { - if file.binary { - continue; - } - // Get file content as it looked before the diffs - let file_content_before = - show::show_file_at_tree(repository, file.path.clone(), parent_tree) - .context("failed to get file contents at HEAD")?; - let file_lines_before = file_content_before.split('\n').collect::>(); - - file.hunks = file - .hunks - .iter() - .map(|hunk| { - if hunk.diff.is_empty() { - // noop on empty diff - hunk.clone() - } else { - context::hunk_with_context( - &hunk.diff, - hunk.old_start as usize, - hunk.new_start as usize, - hunk.binary, - context_lines, - &file_lines_before, - hunk.change_type, - ) - } - }) - .collect::>(); - } - Ok(files) -} diff --git a/gitbutler-app/src/virtual_branches/integration.rs b/gitbutler-app/src/virtual_branches/integration.rs deleted file mode 100644 index 833718083..000000000 --- a/gitbutler-app/src/virtual_branches/integration.rs +++ /dev/null @@ -1,351 +0,0 @@ -use std::io::{Read, Write}; - -use anyhow::{Context, Result}; -use lazy_static::lazy_static; - -use crate::{ - gb_repository, - git::{self}, - project_repository::{self, LogUntil}, - reader, sessions, - virtual_branches::branch::BranchCreateRequest, -}; - -use super::errors; - -lazy_static! { - pub static ref GITBUTLER_INTEGRATION_REFERENCE: git::LocalRefname = - git::LocalRefname::new("gitbutler/integration", None); -} - -const GITBUTLER_INTEGRATION_COMMIT_AUTHOR_NAME: &str = "GitButler"; -const GITBUTLER_INTEGRATION_COMMIT_AUTHOR_EMAIL: &str = "gitbutler@gitbutler.com"; - -pub fn update_gitbutler_integration( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, -) -> Result<()> { - let target = gb_repository - .default_target() - .context("failed to get target")? - .context("no target set")?; - - let repo = &project_repository.git_repository; - - // write the currrent target sha to a temp branch as a parent - repo.reference( - &GITBUTLER_INTEGRATION_REFERENCE.clone().into(), - target.sha, - true, - "update target", - )?; - - // get commit object from target.sha - let target_commit = repo.find_commit(target.sha)?; - - // get current repo head for reference - let head = repo.head()?; - let mut prev_head = head.name().unwrap().to_string(); - let mut prev_sha = head.target().unwrap().to_string(); - let integration_file = repo.path().join("integration"); - if prev_head == GITBUTLER_INTEGRATION_REFERENCE.to_string() { - // read the .git/integration file - if let Ok(mut integration_file) = std::fs::File::open(integration_file) { - let mut prev_data = String::new(); - integration_file.read_to_string(&mut prev_data)?; - let parts: Vec<&str> = prev_data.split(':').collect(); - - prev_head = parts[0].to_string(); - prev_sha = parts[1].to_string(); - } - } else { - // we are moving from a regular branch to our gitbutler integration branch, save the original - // write a file to .git/integration with the previous head and name - let mut file = std::fs::File::create(integration_file)?; - prev_head.push(':'); - prev_head.push_str(&prev_sha); - file.write_all(prev_head.as_bytes())?; - } - - // commit index to temp head for the merge - repo.set_head(&GITBUTLER_INTEGRATION_REFERENCE.clone().into()) - .context("failed to set head")?; - - let latest_session = gb_repository - .get_latest_session() - .context("failed to get latest session")? - .context("latest session not found")?; - let session_reader = sessions::Reader::open(gb_repository, &latest_session) - .context("failed to open current session")?; - - // get all virtual branches, we need to try to update them all - let all_virtual_branches = super::iterator::BranchIterator::new(&session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")?; - - let applied_virtual_branches = all_virtual_branches - .iter() - .filter(|branch| branch.applied) - .collect::>(); - - let base_tree = target_commit.tree()?; - let mut final_tree = target_commit.tree()?; - for branch in &applied_virtual_branches { - // merge this branches tree with our tree - let branch_head = repo.find_commit(branch.head)?; - let branch_tree = branch_head.tree()?; - if let Ok(mut result) = repo.merge_trees(&base_tree, &final_tree, &branch_tree) { - if !result.has_conflicts() { - let final_tree_oid = result.write_tree_to(repo)?; - final_tree = repo.find_tree(final_tree_oid)?; - } - } - } - - // message that says how to get back to where they were - let mut message = "GitButler Integration Commit".to_string(); - message.push_str("\n\n"); - message.push_str( - "This is an integration commit for the virtual branches that GitButler is tracking.\n\n", - ); - message.push_str( - "Due to GitButler managing multiple virtual branches, you cannot switch back and\n", - ); - message.push_str("forth between git branches and virtual branches easily. \n\n"); - - message.push_str("If you switch to another branch, GitButler will need to be reinitialized.\n"); - message.push_str("If you commit on this branch, GitButler will throw it away.\n\n"); - message.push_str("Here are the branches that are currently applied:\n"); - for branch in &applied_virtual_branches { - message.push_str(" - "); - message.push_str(branch.name.as_str()); - message.push_str(format!(" ({})", &branch.refname()).as_str()); - message.push('\n'); - - if branch.head != target.sha { - message.push_str(" branch head: "); - message.push_str(&branch.head.to_string()); - message.push('\n'); - } - for file in &branch.ownership.claims { - message.push_str(" - "); - message.push_str(&file.file_path.display().to_string()); - message.push('\n'); - } - } - message.push_str("\nYour previous branch was: "); - message.push_str(&prev_head); - message.push_str("\n\n"); - message.push_str("The sha for that commit was: "); - message.push_str(&prev_sha); - message.push_str("\n\n"); - message.push_str("For more information about what we're doing here, check out our docs:\n"); - message.push_str("https://docs.gitbutler.com/features/virtual-branches/integration-branch\n"); - - let committer = git::Signature::now( - GITBUTLER_INTEGRATION_COMMIT_AUTHOR_NAME, - GITBUTLER_INTEGRATION_COMMIT_AUTHOR_EMAIL, - )?; - - repo.commit( - Some(&"refs/heads/gitbutler/integration".parse().unwrap()), - &committer, - &committer, - &message, - &final_tree, - &[&target_commit], - )?; - - // write final_tree as the current index - let mut index = repo.index()?; - index.read_tree(&final_tree)?; - index.write()?; - - // finally, update the refs/gitbutler/ heads to the states of the current virtual branches - for branch in &all_virtual_branches { - let wip_tree = repo.find_tree(branch.tree)?; - let mut branch_head = repo.find_commit(branch.head)?; - let head_tree = branch_head.tree()?; - - // create a wip commit if there is wip - if head_tree.id() != wip_tree.id() { - let mut message = "GitButler WIP Commit".to_string(); - message.push_str("\n\n"); - message.push_str("This is a WIP commit for the virtual branch '"); - message.push_str(branch.name.as_str()); - message.push_str("'\n\n"); - message.push_str("This commit is used to store the state of the virtual branch\n"); - message.push_str("while you are working on it. It is not meant to be used for\n"); - message.push_str("anything else.\n\n"); - let branch_head_oid = repo.commit( - None, - &committer, - &committer, - &message, - &wip_tree, - &[&branch_head], - )?; - branch_head = repo.find_commit(branch_head_oid)?; - } - - repo.reference( - &branch.refname().into(), - branch_head.id(), - true, - "update virtual branch", - )?; - } - - Ok(()) -} - -pub fn verify_branch( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, -) -> Result<(), errors::VerifyError> { - verify_head_is_set(project_repository)?; - verify_head_is_clean(gb_repository, project_repository)?; - Ok(()) -} - -fn verify_head_is_clean( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, -) -> Result<(), errors::VerifyError> { - let head_commit = project_repository - .git_repository - .head() - .context("failed to get head")? - .peel_to_commit() - .context("failed to peel to commit")?; - - let mut extra_commits = project_repository - .log( - head_commit.id(), - LogUntil::When(Box::new(|commit| Ok(is_integration_commit(commit)))), - ) - .context("failed to get log")?; - - let integration_commit = extra_commits.pop(); - - if integration_commit.is_none() { - // no integration commit found - return Err(errors::VerifyError::NoIntegrationCommit); - } - - if extra_commits.is_empty() { - // no extra commits found, so we're good - return Ok(()); - } - - project_repository - .git_repository - .reset( - integration_commit.as_ref().unwrap(), - git2::ResetType::Soft, - None, - ) - .context("failed to reset to integration commit")?; - - let mut new_branch = super::create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest { - name: extra_commits - .last() - .unwrap() - .message() - .map(ToString::to_string), - ..Default::default() - }, - ) - .context("failed to create virtual branch")?; - - // rebasing the extra commits onto the new branch - let writer = super::branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - extra_commits.reverse(); - let mut head = new_branch.head; - for commit in extra_commits { - let new_branch_head = project_repository - .git_repository - .find_commit(head) - .context("failed to find new branch head")?; - - let rebased_commit_oid = project_repository - .git_repository - .commit( - None, - &commit.author(), - &commit.committer(), - commit.message().unwrap(), - &commit.tree().unwrap(), - &[&new_branch_head], - ) - .context(format!( - "failed to rebase commit {} onto new branch", - commit.id() - ))?; - - let rebased_commit = project_repository - .git_repository - .find_commit(rebased_commit_oid) - .context(format!( - "failed to find rebased commit {}", - rebased_commit_oid - ))?; - - new_branch.head = rebased_commit.id(); - new_branch.tree = rebased_commit.tree_id(); - writer - .write(&mut new_branch) - .context("failed to write branch")?; - - head = rebased_commit.id(); - } - Ok(()) -} - -fn verify_head_is_set( - project_repository: &project_repository::Repository, -) -> Result<(), errors::VerifyError> { - match project_repository - .get_head() - .context("failed to get head") - .map_err(errors::VerifyError::Other)? - .name() - { - Some(refname) if refname.to_string() == GITBUTLER_INTEGRATION_REFERENCE.to_string() => { - Ok(()) - } - None => Err(errors::VerifyError::DetachedHead), - Some(head_name) => Err(errors::VerifyError::InvalidHead(head_name.to_string())), - } -} - -fn is_integration_commit(commit: &git::Commit) -> bool { - is_integration_commit_author(commit) && is_integration_commit_message(commit) -} - -fn is_integration_commit_author(commit: &git::Commit) -> bool { - is_integration_commit_author_email(commit) && is_integration_commit_author_name(commit) -} - -fn is_integration_commit_author_email(commit: &git::Commit) -> bool { - commit.author().email().map_or(false, |email| { - email == GITBUTLER_INTEGRATION_COMMIT_AUTHOR_EMAIL - }) -} - -fn is_integration_commit_author_name(commit: &git::Commit) -> bool { - commit.author().name().map_or(false, |name| { - name == GITBUTLER_INTEGRATION_COMMIT_AUTHOR_NAME - }) -} - -fn is_integration_commit_message(commit: &git::Commit) -> bool { - commit.message().map_or(false, |message| { - message.starts_with("GitButler Integration Commit") - }) -} diff --git a/gitbutler-app/src/virtual_branches/iterator.rs b/gitbutler-app/src/virtual_branches/iterator.rs deleted file mode 100644 index c169bf62d..000000000 --- a/gitbutler-app/src/virtual_branches/iterator.rs +++ /dev/null @@ -1,56 +0,0 @@ -use std::collections::HashSet; - -use anyhow::Result; - -use crate::sessions; - -use super::branch::{self, BranchId}; - -pub struct BranchIterator<'i> { - branch_reader: branch::Reader<'i>, - ids: Vec, -} - -impl<'i> BranchIterator<'i> { - pub fn new(session_reader: &'i sessions::Reader<'i>) -> Result { - let reader = session_reader.reader(); - let ids_itarator = reader - .list_files("branches")? - .into_iter() - .map(|file_path| { - file_path - .iter() - .next() - .unwrap() - .to_string_lossy() - .to_string() - }) - .filter(|file_path| file_path != "selected") - .filter(|file_path| file_path != "target"); - let unique_ids: HashSet = ids_itarator.collect(); - let mut ids: Vec = unique_ids - .into_iter() - .map(|id| id.parse()) - .filter_map(Result::ok) - .collect(); - ids.sort(); - Ok(Self { - branch_reader: branch::Reader::new(session_reader), - ids, - }) - } -} - -impl Iterator for BranchIterator<'_> { - type Item = Result; - - fn next(&mut self) -> Option { - if self.ids.is_empty() { - return None; - } - - let id = self.ids.remove(0); - let branch = self.branch_reader.read(&id); - Some(branch) - } -} diff --git a/gitbutler-app/src/virtual_branches/remote.rs b/gitbutler-app/src/virtual_branches/remote.rs deleted file mode 100644 index 0cb0d13dc..000000000 --- a/gitbutler-app/src/virtual_branches/remote.rs +++ /dev/null @@ -1,185 +0,0 @@ -use anyhow::{Context, Result}; -use serde::Serialize; - -use crate::{ - gb_repository, git, - project_repository::{self, LogUntil}, -}; - -use super::{errors, Author}; - -// this struct is a mapping to the view `RemoteBranch` type in Typescript -// found in src-tauri/src/routes/repo/[project_id]/types.ts -// -// it holds data calculated for presentation purposes of one Git branch -// with comparison data to the Target commit, determining if it is mergeable, -// and how far ahead or behind the Target it is. -// an array of them can be requested from the frontend to show in the sidebar -// Tray and should only contain branches that have not been converted into -// virtual branches yet (ie, we have no `Branch` struct persisted in our data. -#[derive(Debug, Clone, Serialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct RemoteBranch { - pub sha: git::Oid, - pub name: git::Refname, - pub upstream: Option, - pub last_commit_timestamp_ms: Option, - pub last_commit_author: Option, -} - -#[derive(Debug, Clone, Serialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct RemoteBranchData { - pub sha: git::Oid, - pub name: git::Refname, - pub upstream: Option, - pub behind: u32, - pub commits: Vec, -} - -#[derive(Debug, Clone, PartialEq, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct RemoteCommit { - pub id: String, - pub description: String, - pub created_at: u128, - pub author: Author, -} - -pub fn list_remote_branches( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, -) -> Result, errors::ListRemoteBranchesError> { - let default_target = gb_repository - .default_target() - .context("failed to get default target")? - .ok_or_else(|| { - errors::ListRemoteBranchesError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - let remote_branches = project_repository - .git_repository - .branches(Some(git2::BranchType::Remote)) - .context("failed to list remote branches")? - .flatten() - .map(|(branch, _)| branch) - .map(|branch| branch_to_remote_branch(&branch)) - .collect::>>() - .context("failed to convert branches")? - .into_iter() - .flatten() - .filter(|branch| branch.name.branch() != Some(default_target.branch.branch())) - .collect::>(); - - Ok(remote_branches) -} - -pub fn get_branch_data( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - refname: &git::Refname, -) -> Result { - let default_target = gb_repository - .default_target() - .context("failed to get default target")? - .ok_or_else(|| { - errors::GetRemoteBranchDataError::DefaultTargetNotSet( - errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }, - ) - })?; - - let branch = project_repository - .git_repository - .find_branch(refname) - .context(format!("failed to find branch with refname {refname}"))?; - - let branch_data = branch_to_remote_branch_data(project_repository, &branch, default_target.sha) - .context("failed to get branch data")?; - - branch_data - .ok_or_else(|| { - errors::GetRemoteBranchDataError::Other(anyhow::anyhow!("no data found for branch")) - }) - .map(|branch_data| RemoteBranchData { - sha: branch_data.sha, - name: branch_data.name, - upstream: branch_data.upstream, - behind: branch_data.behind, - commits: branch_data.commits, - }) -} - -pub fn branch_to_remote_branch(branch: &git::Branch) -> Result> { - let commit = branch.peel_to_commit()?; - branch - .target() - .map(|sha| { - let name = git::Refname::try_from(branch).context("could not get branch name")?; - Ok(RemoteBranch { - sha, - upstream: if let git::Refname::Local(local_name) = &name { - local_name.remote().cloned() - } else { - None - }, - name, - last_commit_timestamp_ms: commit - .time() - .seconds() - .try_into() - .map(|t: u128| t * 1000) - .ok(), - last_commit_author: commit.author().name().map(std::string::ToString::to_string), - }) - }) - .transpose() -} - -pub fn branch_to_remote_branch_data( - project_repository: &project_repository::Repository, - branch: &git::Branch, - base: git::Oid, -) -> Result> { - branch - .target() - .map(|sha| { - let ahead = project_repository - .log(sha, LogUntil::Commit(base)) - .context("failed to get ahead commits")?; - - let name = git::Refname::try_from(branch).context("could not get branch name")?; - - let count_behind = project_repository - .distance(base, sha) - .context("failed to get behind count")?; - - Ok(RemoteBranchData { - sha, - upstream: if let git::Refname::Local(local_name) = &name { - local_name.remote().cloned() - } else { - None - }, - name, - behind: count_behind, - commits: ahead - .into_iter() - .map(|commit| commit_to_remote_commit(&commit)) - .collect::>(), - }) - }) - .transpose() -} - -pub fn commit_to_remote_commit(commit: &git::Commit) -> RemoteCommit { - RemoteCommit { - id: commit.id().to_string(), - description: commit.message().unwrap_or_default().to_string(), - created_at: commit.time().seconds().try_into().unwrap(), - author: commit.author().into(), - } -} diff --git a/gitbutler-app/src/virtual_branches/state.rs b/gitbutler-app/src/virtual_branches/state.rs deleted file mode 100644 index 479a251f6..000000000 --- a/gitbutler-app/src/virtual_branches/state.rs +++ /dev/null @@ -1,136 +0,0 @@ -use std::{ - collections::HashMap, - fs::File, - io::{Read, Write}, - path::{Path, PathBuf}, -}; - -use anyhow::Result; -use serde::{Deserialize, Serialize}; - -use crate::virtual_branches::BranchId; - -use super::{target::Target, Branch}; - -/// The state of virtual branches data, as persisted in a TOML file. -#[derive(Serialize, Deserialize, Debug, Default)] -pub struct VirtualBranches { - /// This is the target/base that is set when a repo is added to gb - pub default_target: Option, - /// The targets for each virtual branch - pub branch_targets: HashMap, - /// The current state of the virtual branches - pub branches: HashMap, -} -/// A handle to the state of virtual branches. -/// -/// For all operations, if the state file does not exist, it will be created. -pub struct VirtualBranchesHandle { - /// The path to the file containing the virtual branches state. - file_path: PathBuf, -} - -impl VirtualBranchesHandle { - /// Creates a new concurrency-safe handle to the state of virtual branches. - pub fn new(base_path: &Path) -> Self { - let file_path = base_path.join("virtual_branches.toml"); - Self { file_path } - } - - /// Persists the default target for the given repository. - /// - /// Errors if the file cannot be read or written. - pub fn set_default_target(&self, target: Target) -> Result<()> { - let mut virtual_branches = self.read_file()?; - virtual_branches.default_target = Some(target); - self.write_file(&virtual_branches)?; - Ok(()) - } - - /// Gets the default target for the given repository. - /// - /// Errors if the file cannot be read or written. - #[allow(dead_code)] - pub fn get_default_target(&self) -> Result> { - let virtual_branches = self.read_file()?; - Ok(virtual_branches.default_target) - } - - /// Sets the target for the given virtual branch. - /// - /// Errors if the file cannot be read or written. - pub fn set_branch_target(&self, id: BranchId, target: Target) -> Result<()> { - let mut virtual_branches = self.read_file()?; - virtual_branches.branch_targets.insert(id, target); - self.write_file(&virtual_branches)?; - Ok(()) - } - - /// Gets the target for the given virtual branch. - /// - /// Errors if the file cannot be read or written. - #[allow(dead_code)] - pub fn get_branch_target(&self, id: BranchId) -> Result> { - let virtual_branches = self.read_file()?; - Ok(virtual_branches.branch_targets.get(&id).cloned()) - } - - /// Sets the state of the given virtual branch. - /// - /// Errors if the file cannot be read or written. - pub fn set_branch(&self, branch: Branch) -> Result<()> { - let mut virtual_branches = self.read_file()?; - virtual_branches.branches.insert(branch.id, branch); - self.write_file(&virtual_branches)?; - Ok(()) - } - - /// Removes the given virtual branch. - /// - /// Errors if the file cannot be read or written. - #[allow(dead_code)] - pub fn remove_branch(&self, id: BranchId) -> Result<()> { - let mut virtual_branches = self.read_file()?; - virtual_branches.branches.remove(&id); - self.write_file(&virtual_branches)?; - Ok(()) - } - - /// Gets the state of the given virtual branch. - /// - /// Errors if the file cannot be read or written. - #[allow(dead_code)] - pub fn get_branch(&self, id: BranchId) -> Result> { - let virtual_branches = self.read_file()?; - Ok(virtual_branches.branches.get(&id).cloned()) - } - - /// Reads and parses the state file. - /// - /// If the file does not exist, it will be created. - fn read_file(&self) -> Result { - // let file_path = &self.file_path.lock().await; - if !self.file_path.exists() { - return Ok(VirtualBranches::default()); - } - let mut file: File = File::open(self.file_path.as_path())?; - let mut contents = String::new(); - file.read_to_string(&mut contents)?; - let virtual_branches: VirtualBranches = toml::from_str(&contents)?; - Ok(virtual_branches) - } - - fn write_file(&self, virtual_branches: &VirtualBranches) -> Result<()> { - write(self.file_path.as_path(), virtual_branches) - } -} - -fn write>(file_path: P, virtual_branches: &VirtualBranches) -> Result<()> { - let contents = toml::to_string(&virtual_branches)?; - let temp_file = tempfile::NamedTempFile::new_in(file_path.as_ref().parent().unwrap())?; - let (mut file, temp_path) = temp_file.keep()?; - file.write_all(contents.as_bytes())?; - drop(file); - std::fs::rename(temp_path, file_path.as_ref())?; - Ok(()) -} diff --git a/gitbutler-app/src/virtual_branches/target.rs b/gitbutler-app/src/virtual_branches/target.rs deleted file mode 100644 index ff97e4d31..000000000 --- a/gitbutler-app/src/virtual_branches/target.rs +++ /dev/null @@ -1,105 +0,0 @@ -mod reader; -mod writer; - -use std::str::FromStr; - -use serde::{ser::SerializeStruct, Deserializer, Serializer}; -use serde::{Deserialize, Serialize}; - -pub use reader::TargetReader as Reader; -pub use writer::TargetWriter as Writer; - -use crate::git; - -#[derive(Debug, PartialEq, Clone)] -pub struct Target { - pub branch: git::RemoteRefname, - pub remote_url: String, - pub sha: git::Oid, -} - -impl Serialize for Target { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let mut state = serializer.serialize_struct("Target", 5)?; - state.serialize_field("branchName", &self.branch.branch())?; - state.serialize_field("remoteName", &self.branch.remote())?; - state.serialize_field("remoteUrl", &self.remote_url)?; - state.serialize_field("sha", &self.sha.to_string())?; - state.end() - } -} - -impl<'de> serde::Deserialize<'de> for Target { - fn deserialize>(d: D) -> Result { - #[derive(Debug, Deserialize)] - #[serde(rename_all = "camelCase")] - struct TargetData { - branch_name: String, - remote_name: String, - remote_url: String, - sha: String, - } - let target_data: TargetData = serde::Deserialize::deserialize(d)?; - let sha = git::Oid::from_str(&target_data.sha) - .map_err(|x| serde::de::Error::custom(x.message()))?; - - let target = Target { - branch: git::RemoteRefname::new(&target_data.remote_name, &target_data.branch_name), - remote_url: target_data.remote_url, - sha, - }; - Ok(target) - } -} - -impl Target { - fn try_from(reader: &crate::reader::Reader) -> Result { - let results = reader.batch(&["name", "branch_name", "remote", "remote_url", "sha"])?; - - let name = results[0].clone(); - let branch_name = results[1].clone(); - let remote = results[2].clone(); - let remote_url = results[3].clone(); - let sha = results[4].clone(); - - let branch_name = match name { - Ok(branch) => { - let branch: String = branch.try_into()?; - Ok(branch.clone()) - } - Err(crate::reader::Error::NotFound) => { - // fallback to the old format - let branch_name: String = branch_name?.try_into()?; - Ok(branch_name) - } - Err(e) => Err(crate::reader::Error::Io( - std::io::Error::new(std::io::ErrorKind::Other, format!("branch: {}", e)).into(), - )), - }?; - - let remote_url: String = match remote_url { - Ok(url) => Ok(url.try_into()?), - // fallback to the old format - Err(crate::reader::Error::NotFound) => Ok(remote?.try_into()?), - Err(error) => Err(crate::reader::Error::Io( - std::io::Error::new(std::io::ErrorKind::Other, format!("remote: {}", error)).into(), - )), - }?; - - let sha: String = sha?.try_into()?; - let sha = sha.parse().map_err(|e| { - crate::reader::Error::Io( - std::io::Error::new(std::io::ErrorKind::InvalidData, format!("sha: {}", e)).into(), - ) - })?; - - Ok(Self { - branch: format!("refs/remotes/{}", branch_name).parse().unwrap(), - remote_url, - sha, - }) - } -} diff --git a/gitbutler-app/src/virtual_branches/target/reader.rs b/gitbutler-app/src/virtual_branches/target/reader.rs deleted file mode 100644 index d5fabd7d0..000000000 --- a/gitbutler-app/src/virtual_branches/target/reader.rs +++ /dev/null @@ -1,31 +0,0 @@ -use crate::{reader, sessions, virtual_branches::BranchId}; - -use super::Target; - -pub struct TargetReader<'r> { - reader: &'r reader::Reader<'r>, -} - -impl<'r> TargetReader<'r> { - pub fn new(reader: &'r sessions::Reader<'r>) -> Self { - Self { - reader: reader.reader(), - } - } - - pub fn read_default(&self) -> Result { - Target::try_from(&self.reader.sub("branches/target")) - } - - pub fn read(&self, id: &BranchId) -> Result { - if !self - .reader - .exists(format!("branches/{}/target", id)) - .map_err(reader::Error::from)? - { - return self.read_default(); - } - - Target::try_from(&self.reader.sub(format!("branches/{}/target", id))) - } -} diff --git a/gitbutler-app/src/virtual_branches/target/writer.rs b/gitbutler-app/src/virtual_branches/target/writer.rs deleted file mode 100644 index 6aabd4008..000000000 --- a/gitbutler-app/src/virtual_branches/target/writer.rs +++ /dev/null @@ -1,109 +0,0 @@ -use std::path; - -use anyhow::{Context, Result}; - -use crate::{ - gb_repository, reader, - virtual_branches::{state::VirtualBranchesHandle, BranchId}, - writer, -}; - -use super::Target; - -pub struct TargetWriter<'writer> { - repository: &'writer gb_repository::Repository, - writer: writer::DirWriter, - reader: reader::Reader<'writer>, - state_handle: VirtualBranchesHandle, -} - -impl<'writer> TargetWriter<'writer> { - pub fn new>( - repository: &'writer gb_repository::Repository, - path: P, - ) -> Result { - let reader = reader::Reader::open(&repository.root())?; - let writer = writer::DirWriter::open(repository.root())?; - let state_handle = VirtualBranchesHandle::new(path.as_ref()); - Ok(Self { - repository, - writer, - reader, - state_handle, - }) - } - - pub fn write_default(&self, target: &Target) -> Result<()> { - let reader = self.reader.sub("branches/target"); - match Target::try_from(&reader) { - Ok(existing) if existing.eq(target) => return Ok(()), - Ok(_) | Err(reader::Error::NotFound) => {} - Err(e) => return Err(e.into()), - }; - - self.repository.mark_active_session()?; - - let batch = vec![ - writer::BatchTask::Write( - "branches/target/branch_name", - format!("{}/{}", target.branch.remote(), target.branch.branch()), - ), - writer::BatchTask::Write( - "branches/target/remote_name", - target.branch.remote().to_string(), - ), - writer::BatchTask::Write("branches/target/remote_url", target.remote_url.clone()), - writer::BatchTask::Write("branches/target/sha", target.sha.to_string()), - ]; - - self.writer - .batch(&batch) - .context("Failed to write default target")?; - - // Write in the state file as well - let _ = self.state_handle.set_default_target(target.clone()); - - Ok(()) - } - - pub fn write(&self, id: &BranchId, target: &Target) -> Result<()> { - let reader = self.reader.sub(format!("branches/{}/target", id)); - match Target::try_from(&reader) { - Ok(existing) if existing.eq(target) => return Ok(()), - Ok(_) | Err(reader::Error::NotFound) => {} - Err(e) => return Err(e.into()), - }; - - self.repository - .mark_active_session() - .context("Failed to get or create current session")?; - - let batch = vec![ - writer::BatchTask::Write( - format!("branches/{}/target/branch_name", id), - format!("{}/{}", target.branch.remote(), target.branch.branch()), - ), - writer::BatchTask::Write( - format!("branches/{}/target/remote_name", id), - target.branch.remote().to_string(), - ), - writer::BatchTask::Write( - format!("branches/{}/target/remote_url", id), - target.remote_url.clone(), - ), - writer::BatchTask::Write( - format!("branches/{}/target/sha", id), - target.sha.to_string(), - ), - ]; - - self.writer - .batch(&batch) - .context("Failed to write target")?; - - // Write in the state file as well - let _ = self.state_handle.set_branch_target(*id, target.clone()); - - Ok(()) - } -} diff --git a/gitbutler-app/src/virtual_branches/virtual.rs b/gitbutler-app/src/virtual_branches/virtual.rs deleted file mode 100644 index 132d2be23..000000000 --- a/gitbutler-app/src/virtual_branches/virtual.rs +++ /dev/null @@ -1,4058 +0,0 @@ -use std::{ - collections::HashMap, - hash::Hash, - path::{Path, PathBuf}, - time, vec, -}; - -#[cfg(target_family = "unix")] -use std::os::unix::prelude::*; - -use anyhow::{bail, Context, Result}; -use bstr::ByteSlice; -use diffy::{apply, Patch}; -use git2_hooks::HookResult; -use regex::Regex; -use serde::Serialize; - -use crate::{ - askpass::AskpassBroker, - dedup::{dedup, dedup_fmt}, - gb_repository, - git::{ - self, - diff::{self, diff_files_to_hunks, GitHunk}, - show, Commit, Refname, RemoteRefname, - }, - keys, - project_repository::{self, conflicts, LogUntil}, - reader, sessions, users, -}; - -use super::{ - branch::{ - self, Branch, BranchCreateRequest, BranchId, BranchOwnershipClaims, Hunk, OwnershipClaim, - }, - branch_to_remote_branch, context, errors, target, Iterator, RemoteBranch, -}; - -type AppliedStatuses = Vec<(branch::Branch, HashMap>)>; - -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error("path contains invalid utf-8 characters: {0}")] - InvalidUnicodePath(PathBuf), -} - -// this struct is a mapping to the view `Branch` type in Typescript -// found in src-tauri/src/routes/repo/[project_id]/types.ts -// it holds a materialized view for presentation purposes of the Branch struct in Rust -// which is our persisted data structure for virtual branches -// -// it is not persisted, it is only used for presentation purposes through the ipc -// -#[derive(Debug, PartialEq, Clone, Serialize)] -#[serde(rename_all = "camelCase")] -#[allow(clippy::struct_excessive_bools)] -pub struct VirtualBranch { - pub id: BranchId, - pub name: String, - pub notes: String, - pub active: bool, - pub files: Vec, - pub commits: Vec, - pub requires_force: bool, // does this branch require a force push to the upstream? - pub conflicted: bool, // is this branch currently in a conflicted state (only for the workspace) - pub order: usize, // the order in which this branch should be displayed in the UI - pub upstream: Option, // the upstream branch where this branch pushes to, if any - pub upstream_name: Option, // the upstream branch where this branch will push to on next push - pub base_current: bool, // is this vbranch based on the current base branch? if false, this needs to be manually merged with conflicts - pub ownership: BranchOwnershipClaims, - pub updated_at: u128, - pub selected_for_changes: bool, - pub head: git::Oid, -} - -#[derive(Debug, PartialEq, Clone, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct VirtualBranches { - pub branches: Vec, - pub skipped_files: Vec, -} - -// this is the struct that maps to the view `Commit` type in Typescript -// it is derived from walking the git commits between the `Branch.head` commit -// and the `Target.sha` commit, or, everything that is uniquely committed to -// the virtual branch we assign it to. an array of them are returned as part of -// the `VirtualBranch` struct -// -// it is not persisted, it is only used for presentation purposes through the ipc -// -#[derive(Debug, PartialEq, Clone, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct VirtualBranchCommit { - pub id: git::Oid, - pub description: String, - pub created_at: u128, - pub author: Author, - pub is_remote: bool, - pub files: Vec, - pub is_integrated: bool, - pub parent_ids: Vec, - pub branch_id: BranchId, -} - -// this struct is a mapping to the view `File` type in Typescript -// found in src-tauri/src/routes/repo/[project_id]/types.ts -// it holds a materialized view for presentation purposes of one entry of the -// `Branch.ownership` vector in Rust. an array of them are returned as part of -// the `VirtualBranch` struct, which map to each entry of the `Branch.ownership` vector -// -// it is not persisted, it is only used for presentation purposes through the ipc -// -#[derive(Debug, PartialEq, Clone, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct VirtualBranchFile { - pub id: String, - pub path: PathBuf, - pub hunks: Vec, - pub modified_at: u128, - pub conflicted: bool, - pub binary: bool, - pub large: bool, -} - -// this struct is a mapping to the view `Hunk` type in Typescript -// found in src-tauri/src/routes/repo/[project_id]/types.ts -// it holds a materialized view for presentation purposes of one entry of the -// each hunk in one `Branch.ownership` vector entry in Rust. -// an array of them are returned as part of the `VirtualBranchFile` struct -// -// it is not persisted, it is only used for presentation purposes through the ipc -// -#[derive(Debug, PartialEq, Clone, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct VirtualBranchHunk { - pub id: String, - pub diff: String, - pub modified_at: u128, - pub file_path: PathBuf, - pub hash: String, - pub old_start: u32, - pub start: u32, - pub end: u32, - pub binary: bool, - pub locked: bool, - pub locked_to: Option, - pub change_type: diff::ChangeType, -} - -#[derive(Debug, Serialize, Hash, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct Author { - pub name: String, - pub email: String, - pub gravatar_url: url::Url, -} - -impl From> for Author { - fn from(value: git::Signature) -> Self { - let name = value.name().unwrap_or_default().to_string(); - let email = value.email().unwrap_or_default().to_string(); - - let gravatar_url = url::Url::parse(&format!( - "https://www.gravatar.com/avatar/{:x}?s=100&r=g&d=retro", - md5::compute(email.to_lowercase()) - )) - .unwrap(); - - Author { - name, - email, - gravatar_url, - } - } -} - -pub fn normalize_branch_name(name: &str) -> String { - let pattern = Regex::new("[^A-Za-z0-9_/.#]+").unwrap(); - pattern.replace_all(name, "-").to_string() -} - -pub fn get_default_target( - session_reader: &sessions::Reader, -) -> Result, reader::Error> { - let target_reader = target::Reader::new(session_reader); - match target_reader.read_default() { - Ok(target) => Ok(Some(target)), - Err(reader::Error::NotFound) => Ok(None), - Err(error) => Err(error), - } -} - -pub fn apply_branch( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_id: &BranchId, - signing_key: Option<&keys::PrivateKey>, - user: Option<&users::User>, -) -> Result<(), errors::ApplyBranchError> { - if project_repository.is_resolving() { - return Err(errors::ApplyBranchError::Conflict( - errors::ProjectConflictError { - project_id: project_repository.project().id, - }, - )); - } - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get or create current session")?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) - .context("failed to open current session")?; - - let repo = &project_repository.git_repository; - - let default_target = get_default_target(¤t_session_reader) - .context("failed to get default target")? - .ok_or_else(|| { - errors::ApplyBranchError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create branch writer")?; - - let mut branch = match branch::Reader::new(¤t_session_reader).read(branch_id) { - Ok(branch) => Ok(branch), - Err(reader::Error::NotFound) => Err(errors::ApplyBranchError::BranchNotFound( - errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *branch_id, - }, - )), - Err(error) => Err(errors::ApplyBranchError::Other(error.into())), - }?; - - if branch.applied { - return Ok(()); - } - - let target_commit = repo - .find_commit(default_target.sha) - .context("failed to find target commit")?; - let target_tree = target_commit.tree().context("failed to get target tree")?; - - // calculate the merge base and make sure it's the same as the target commit - // if not, we need to merge or rebase the branch to get it up to date - - let merge_base = repo - .merge_base(default_target.sha, branch.head) - .context(format!( - "failed to find merge base between {} and {}", - default_target.sha, branch.head - ))?; - if merge_base != default_target.sha { - // Branch is out of date, merge or rebase it - let merge_base_tree = repo - .find_commit(merge_base) - .context(format!("failed to find merge base commit {}", merge_base))? - .tree() - .context("failed to find merge base tree")?; - - let branch_tree = repo - .find_tree(branch.tree) - .context("failed to find branch tree")?; - - let mut merge_index = repo - .merge_trees(&merge_base_tree, &branch_tree, &target_tree) - .context("failed to merge trees")?; - - if merge_index.has_conflicts() { - // currently we can only deal with the merge problem branch - for mut branch in super::get_status_by_branch(gb_repository, project_repository)? - .0 - .into_iter() - .map(|(branch, _)| branch) - .filter(|branch| branch.applied) - { - branch.applied = false; - writer.write(&mut branch)?; - } - - // apply the branch - branch.applied = true; - writer.write(&mut branch)?; - - // checkout the conflicts - repo.checkout_index(&mut merge_index) - .allow_conflicts() - .conflict_style_merge() - .force() - .checkout() - .context("failed to checkout index")?; - - // mark conflicts - let conflicts = merge_index - .conflicts() - .context("failed to get merge index conflicts")?; - let mut merge_conflicts = Vec::new(); - for path in conflicts.flatten() { - if let Some(ours) = path.our { - let path = std::str::from_utf8(&ours.path) - .context("failed to convert path to utf8")? - .to_string(); - merge_conflicts.push(path); - } - } - conflicts::mark( - project_repository, - &merge_conflicts, - Some(default_target.sha), - )?; - - return Ok(()); - } - - let head_commit = repo - .find_commit(branch.head) - .context("failed to find head commit")?; - - let merged_branch_tree_oid = merge_index - .write_tree_to(repo) - .context("failed to write tree")?; - - let merged_branch_tree = repo - .find_tree(merged_branch_tree_oid) - .context("failed to find tree")?; - - let ok_with_force_push = project_repository.project().ok_with_force_push; - if branch.upstream.is_some() && !ok_with_force_push { - // branch was pushed to upstream, and user doesn't like force pushing. - // create a merge commit to avoid the need of force pushing then. - - let new_branch_head = project_repository.commit( - user, - format!( - "Merged {}/{} into {}", - default_target.branch.remote(), - default_target.branch.branch(), - branch.name - ) - .as_str(), - &merged_branch_tree, - &[&head_commit, &target_commit], - signing_key, - )?; - - // ok, update the virtual branch - branch.head = new_branch_head; - branch.tree = merged_branch_tree_oid; - writer.write(&mut branch)?; - } else { - // branch was not pushed to upstream yet. attempt a rebase, - let (_, committer) = project_repository.git_signatures(user)?; - let mut rebase_options = git2::RebaseOptions::new(); - rebase_options.quiet(true); - rebase_options.inmemory(true); - let mut rebase = repo - .rebase( - Some(branch.head), - Some(target_commit.id()), - None, - Some(&mut rebase_options), - ) - .context("failed to rebase")?; - - let mut rebase_success = true; - // check to see if these commits have already been pushed - let mut last_rebase_head = branch.head; - while rebase.next().is_some() { - let index = rebase - .inmemory_index() - .context("failed to get inmemory index")?; - if index.has_conflicts() { - rebase_success = false; - break; - } - - if let Ok(commit_id) = rebase.commit(None, &committer.clone().into(), None) { - last_rebase_head = commit_id.into(); - } else { - rebase_success = false; - break; - } - } - - if rebase_success { - // rebase worked out, rewrite the branch head - rebase.finish(None).context("failed to finish rebase")?; - branch.head = last_rebase_head; - branch.tree = merged_branch_tree_oid; - } else { - // rebase failed, do a merge commit - rebase.abort().context("failed to abort rebase")?; - - // get tree from merge_tree_oid - let merge_tree = repo - .find_tree(merged_branch_tree_oid) - .context("failed to find tree")?; - - // commit the merge tree oid - let new_branch_head = project_repository - .commit( - user, - format!( - "Merged {}/{} into {}", - default_target.branch.remote(), - default_target.branch.branch(), - branch.name - ) - .as_str(), - &merge_tree, - &[&head_commit, &target_commit], - signing_key, - ) - .context("failed to commit merge")?; - - branch.head = new_branch_head; - branch.tree = merged_branch_tree_oid; - } - } - } - - let wd_tree = project_repository.get_wd_tree()?; - - let branch_tree = repo - .find_tree(branch.tree) - .context("failed to find branch tree")?; - - // check index for conflicts - let mut merge_index = repo - .merge_trees(&target_tree, &wd_tree, &branch_tree) - .context("failed to merge trees")?; - - if merge_index.has_conflicts() { - return Err(errors::ApplyBranchError::BranchConflicts(*branch_id)); - } - - // apply the branch - branch.applied = true; - writer.write(&mut branch)?; - - ensure_selected_for_changes(¤t_session_reader, &writer) - .context("failed to ensure selected for changes")?; - - // checkout the merge index - repo.checkout_index(&mut merge_index) - .force() - .checkout() - .context("failed to checkout index")?; - - super::integration::update_gitbutler_integration(gb_repository, project_repository)?; - - Ok(()) -} - -pub fn unapply_ownership( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - ownership: &BranchOwnershipClaims, -) -> Result<(), errors::UnapplyOwnershipError> { - if conflicts::is_resolving(project_repository) { - return Err(errors::UnapplyOwnershipError::Conflict( - errors::ProjectConflictError { - project_id: project_repository.project().id, - }, - )); - } - - let latest_session = gb_repository - .get_latest_session() - .context("failed to get or create current session")? - .ok_or_else(|| { - errors::UnapplyOwnershipError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - let latest_session_reader = sessions::Reader::open(gb_repository, &latest_session) - .context("failed to open current session")?; - - let default_target = get_default_target(&latest_session_reader) - .context("failed to get default target")? - .ok_or_else(|| { - errors::UnapplyOwnershipError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - let applied_branches = Iterator::new(&latest_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")? - .into_iter() - .filter(|b| b.applied) - .collect::>(); - - let (applied_statuses, _) = get_applied_status( - gb_repository, - project_repository, - &default_target, - applied_branches, - ) - .context("failed to get status by branch")?; - - let hunks_to_unapply = applied_statuses - .iter() - .map( - |(branch, branch_files)| -> Result> { - let branch_files = calculate_non_commited_diffs( - project_repository, - branch, - &default_target, - branch_files, - )?; - - let mut hunks_to_unapply = Vec::new(); - for (path, hunks) in branch_files { - let ownership_hunks: Vec<&Hunk> = ownership - .claims - .iter() - .filter(|o| o.file_path == path) - .flat_map(|f| &f.hunks) - .collect(); - for hunk in hunks { - if ownership_hunks.contains(&&Hunk::from(&hunk)) { - hunks_to_unapply.push((path.clone(), hunk)); - } - } - } - - hunks_to_unapply.sort_by(|a, b| a.1.old_start.cmp(&b.1.old_start)); - - Ok(hunks_to_unapply) - }, - ) - .collect::>>()? - .into_iter() - .flatten() - .collect::>(); - - let mut diff = HashMap::new(); - for h in hunks_to_unapply { - if let Some(reversed_hunk) = diff::reverse_hunk(&h.1) { - diff.entry(h.0).or_insert_with(Vec::new).push(reversed_hunk); - } else { - return Err(errors::UnapplyOwnershipError::Other(anyhow::anyhow!( - "failed to reverse hunk" - ))); - } - } - - let repo = &project_repository.git_repository; - - let target_commit = repo - .find_commit(default_target.sha) - .context("failed to find target commit")?; - - let base_tree = target_commit.tree().context("failed to get target tree")?; - let final_tree = applied_statuses.into_iter().fold( - target_commit.tree().context("failed to get target tree"), - |final_tree, status| { - let final_tree = final_tree?; - let tree_oid = write_tree(project_repository, &default_target, &status.1)?; - let branch_tree = repo.find_tree(tree_oid)?; - let mut result = repo.merge_trees(&base_tree, &final_tree, &branch_tree)?; - let final_tree_oid = result.write_tree_to(repo)?; - repo.find_tree(final_tree_oid) - .context("failed to find tree") - }, - )?; - - let final_tree_oid = write_tree_onto_tree(project_repository, &final_tree, &diff)?; - let final_tree = repo - .find_tree(final_tree_oid) - .context("failed to find tree")?; - - repo.checkout_tree(&final_tree) - .force() - .remove_untracked() - .checkout() - .context("failed to checkout tree")?; - - super::integration::update_gitbutler_integration(gb_repository, project_repository)?; - - Ok(()) -} - -// reset a file in the project to the index state -pub fn reset_files( - project_repository: &project_repository::Repository, - files: &Vec, -) -> Result<(), errors::UnapplyOwnershipError> { - if conflicts::is_resolving(project_repository) { - return Err(errors::UnapplyOwnershipError::Conflict( - errors::ProjectConflictError { - project_id: project_repository.project().id, - }, - )); - } - - // for each tree, we need to checkout the entry from the index at that path - // or if it doesn't exist, remove the file from the working directory - let repo = &project_repository.git_repository; - let index = repo.index().context("failed to get index")?; - for file in files { - let entry = index.get_path(Path::new(file), 0); - if entry.is_some() { - repo.checkout_index_path(Path::new(file)) - .context("failed to checkout index")?; - } else { - // find the project root - let project_root = &project_repository.project().path; - let path = Path::new(file); - //combine the project root with the file path - let path = &project_root.join(path); - std::fs::remove_file(path).context("failed to remove file")?; - } - } - - Ok(()) -} - -// to unapply a branch, we need to write the current tree out, then remove those file changes from the wd -pub fn unapply_branch( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_id: &BranchId, -) -> Result, errors::UnapplyBranchError> { - let session = &gb_repository - .get_or_create_current_session() - .context("failed to get or create currnt session")?; - - let current_session_reader = - sessions::Reader::open(gb_repository, session).context("failed to open current session")?; - - let branch_reader = branch::Reader::new(¤t_session_reader); - - let mut target_branch = branch_reader.read(branch_id).map_err(|error| match error { - reader::Error::NotFound => { - errors::UnapplyBranchError::BranchNotFound(errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *branch_id, - }) - } - error => errors::UnapplyBranchError::Other(error.into()), - })?; - - if !target_branch.applied { - return Ok(Some(target_branch)); - } - - let default_target = get_default_target(¤t_session_reader) - .context("failed to get default target")? - .ok_or_else(|| { - errors::UnapplyBranchError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - let repo = &project_repository.git_repository; - let target_commit = repo - .find_commit(default_target.sha) - .context("failed to find target commit")?; - - let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - - let final_tree = if conflicts::is_resolving(project_repository) { - // when applying branch leads to a conflict, all other branches are unapplied. - // this means we can just reset to the default target tree. - { - target_branch.applied = false; - target_branch.selected_for_changes = None; - branch_writer.write(&mut target_branch)?; - } - - conflicts::clear(project_repository).context("failed to clear conflicts")?; - - target_commit.tree().context("failed to get target tree")? - } else { - // if we are not resolving, we need to merge the rest of the applied branches - let applied_branches = Iterator::new(¤t_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")? - .into_iter() - .filter(|b| b.applied) - .collect::>(); - - let (applied_statuses, _) = get_applied_status( - gb_repository, - project_repository, - &default_target, - applied_branches, - ) - .context("failed to get status by branch")?; - - let status = applied_statuses - .iter() - .find(|(s, _)| s.id == target_branch.id) - .context("failed to find status for branch"); - - if let Ok((_, files)) = status { - if files.is_empty() { - // if there is nothing to unapply, remove the branch straight away - branch_writer - .delete(&target_branch) - .context("Failed to remove branch")?; - - ensure_selected_for_changes(¤t_session_reader, &branch_writer) - .context("failed to ensure selected for changes")?; - - project_repository.delete_branch_reference(&target_branch)?; - return Ok(None); - } - - target_branch.tree = write_tree(project_repository, &default_target, files)?; - target_branch.applied = false; - target_branch.selected_for_changes = None; - branch_writer.write(&mut target_branch)?; - } - - let target_commit = repo - .find_commit(default_target.sha) - .context("failed to find target commit")?; - - // ok, update the wd with the union of the rest of the branches - let base_tree = target_commit.tree().context("failed to get target tree")?; - - // go through the other applied branches and merge them into the final tree - // then check that out into the working directory - let final_tree = applied_statuses - .into_iter() - .filter(|(branch, _)| &branch.id != branch_id) - .fold( - target_commit.tree().context("failed to get target tree"), - |final_tree, status| { - let final_tree = final_tree?; - let tree_oid = write_tree(project_repository, &default_target, &status.1)?; - let branch_tree = repo.find_tree(tree_oid)?; - let mut result = repo.merge_trees(&base_tree, &final_tree, &branch_tree)?; - let final_tree_oid = result.write_tree_to(repo)?; - repo.find_tree(final_tree_oid) - .context("failed to find tree") - }, - )?; - - ensure_selected_for_changes(¤t_session_reader, &branch_writer) - .context("failed to ensure selected for changes")?; - - final_tree - }; - - // checkout final_tree into the working directory - repo.checkout_tree(&final_tree) - .force() - .remove_untracked() - .checkout() - .context("failed to checkout tree")?; - - super::integration::update_gitbutler_integration(gb_repository, project_repository)?; - - Ok(Some(target_branch)) -} - -fn find_base_tree<'a>( - repo: &'a git::Repository, - branch_commit: &'a git::Commit<'a>, - target_commit: &'a git::Commit<'a>, -) -> Result> { - // find merge base between target_commit and branch_commit - let merge_base = repo - .merge_base(target_commit.id(), branch_commit.id()) - .context("failed to find merge base")?; - // turn oid into a commit - let merge_base_commit = repo - .find_commit(merge_base) - .context("failed to find merge base commit")?; - let base_tree = merge_base_commit - .tree() - .context("failed to get base tree object")?; - Ok(base_tree) -} - -pub fn list_virtual_branches( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, -) -> Result<(Vec, bool, Vec), errors::ListVirtualBranchesError> { - let mut branches: Vec = Vec::new(); - - let default_target = gb_repository - .default_target() - .context("failed to get default target")? - .ok_or_else(|| { - errors::ListVirtualBranchesError::DefaultTargetNotSet( - errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }, - ) - })?; - - let (statuses, skipped_files) = get_status_by_branch(gb_repository, project_repository)?; - let max_selected_for_changes = statuses - .iter() - .filter_map(|(branch, _)| branch.selected_for_changes) - .max() - .unwrap_or(-1); - - for (branch, files) in &statuses { - // check if head tree does not match target tree - // if so, we diff the head tree and the new write_tree output to see what is new and filter the hunks to just those - let files = - calculate_non_commited_diffs(project_repository, branch, &default_target, files)?; - - let repo = &project_repository.git_repository; - - let upstream_branch = match branch - .upstream - .as_ref() - .map(|name| repo.find_branch(&git::Refname::from(name))) - .transpose() - { - Err(git::Error::NotFound(_)) => Ok(None), - Err(error) => Err(error), - Ok(branch) => Ok(branch), - } - .context(format!( - "failed to find upstream branch for {}", - branch.name - ))?; - - let upstram_branch_commit = upstream_branch - .as_ref() - .map(git::Branch::peel_to_commit) - .transpose() - .context(format!( - "failed to find upstream branch commit for {}", - branch.name - ))?; - - // find upstream commits if we found an upstream reference - let mut pushed_commits = HashMap::new(); - if let Some(upstream) = &upstram_branch_commit { - let merge_base = - repo.merge_base(upstream.id(), default_target.sha) - .context(format!( - "failed to find merge base between {} and {}", - upstream.id(), - default_target.sha - ))?; - for oid in project_repository.l(upstream.id(), LogUntil::Commit(merge_base))? { - pushed_commits.insert(oid, true); - } - } - - let mut is_integrated = false; - let mut is_remote = false; - - // find all commits on head that are not on target.sha - let commits = project_repository - .log(branch.head, LogUntil::Commit(default_target.sha)) - .context(format!("failed to get log for branch {}", branch.name))? - .iter() - .map(|commit| { - is_remote = if is_remote { - is_remote - } else { - pushed_commits.contains_key(&commit.id()) - }; - - // only check for integration if we haven't already found an integration - is_integrated = if is_integrated { - is_integrated - } else { - is_commit_integrated(project_repository, &default_target, commit)? - }; - - commit_to_vbranch_commit( - project_repository, - branch, - commit, - is_integrated, - is_remote, - ) - }) - .collect::>>()?; - - // if the branch is not applied, check to see if it's mergeable and up to date - let mut base_current = true; - if !branch.applied { - // determine if this branch is up to date with the target/base - let merge_base = repo - .merge_base(default_target.sha, branch.head) - .context("failed to find merge base")?; - if merge_base != default_target.sha { - base_current = false; - } - } - - let upstream = upstream_branch - .map(|upstream_branch| branch_to_remote_branch(&upstream_branch)) - .transpose()? - .flatten(); - - let mut files = diffs_to_virtual_files(project_repository, &files); - files.sort_by(|a, b| { - branch - .ownership - .claims - .iter() - .position(|o| o.file_path.eq(&a.path)) - .unwrap_or(999) - .cmp( - &branch - .ownership - .claims - .iter() - .position(|id| id.file_path.eq(&b.path)) - .unwrap_or(999), - ) - }); - - let requires_force = is_requires_force(project_repository, branch)?; - let branch = VirtualBranch { - id: branch.id, - name: branch.name.clone(), - notes: branch.notes.clone(), - active: branch.applied, - files, - order: branch.order, - commits, - requires_force, - upstream, - upstream_name: branch - .upstream - .clone() - .and_then(|r| Refname::from(r).branch().map(Into::into)), - conflicted: conflicts::is_resolving(project_repository), - base_current, - ownership: branch.ownership.clone(), - updated_at: branch.updated_timestamp_ms, - selected_for_changes: branch.selected_for_changes == Some(max_selected_for_changes), - head: branch.head, - }; - branches.push(branch); - } - - let branches = branches_with_large_files_abridged(branches); - let mut branches = branches_with_hunk_locks(branches, project_repository)?; - - // If there no context lines are used internally, add them here, before returning to the UI - if context_lines(project_repository) == 0 { - for branch in &mut branches { - branch.files = files_with_hunk_context( - &project_repository.git_repository, - branch.files.clone(), - 3, - branch.head, - ) - .context("failed to add hunk context")?; - } - } - - branches.sort_by(|a, b| a.order.cmp(&b.order)); - - super::integration::update_gitbutler_integration(gb_repository, project_repository)?; - - let uses_diff_context = project_repository - .project() - .use_diff_context - .unwrap_or(false); - Ok((branches, uses_diff_context, skipped_files)) -} - -fn branches_with_large_files_abridged(mut branches: Vec) -> Vec { - for branch in &mut branches { - for file in &mut branch.files { - // Diffs larger than 500kb are considered large - if file.hunks.iter().any(|hunk| hunk.diff.len() > 500_000) { - file.large = true; - file.hunks - .iter_mut() - .for_each(|hunk| hunk.diff = String::new()); - } - } - } - branches -} - -fn branches_with_hunk_locks( - mut branches: Vec, - project_repository: &project_repository::Repository, -) -> Result> { - let all_commits: Vec = branches - .clone() - .iter() - .filter(|branch| branch.active) - .flat_map(|vbranch| vbranch.commits.clone()) - .collect(); - - for commit in all_commits { - let commit = project_repository.git_repository.find_commit(commit.id)?; - let parent = commit.parent(0).context("failed to get parent commit")?; - let commit_tree = commit.tree().context("failed to get commit tree")?; - let parent_tree = parent.tree().context("failed to get parent tree")?; - let commited_file_diffs = diff::trees( - &project_repository.git_repository, - &parent_tree, - &commit_tree, - context_lines(project_repository), - )?; - let commited_file_diffs = diff::diff_files_to_hunks(&commited_file_diffs); - for branch in &mut branches { - for file in &mut branch.files { - for hunk in &mut file.hunks { - let locked = - commited_file_diffs - .get(&file.path) - .map_or(false, |committed_hunks| { - committed_hunks.iter().any(|committed_hunk| { - joined( - committed_hunk.old_start, - committed_hunk.old_start + committed_hunk.new_lines, - hunk.start, - hunk.end, - ) - }) - }); - if locked { - hunk.locked = true; - hunk.locked_to = Some(commit.id()); - } - } - } - } - } - Ok(branches) -} - -fn joined(start_a: u32, end_a: u32, start_b: u32, end_b: u32) -> bool { - (start_a <= start_b && end_a >= start_b) || (start_a <= end_b && end_a >= end_b) -} - -fn files_with_hunk_context( - repository: &git::Repository, - mut files: Vec, - context_lines: usize, - branch_head: git::Oid, -) -> Result> { - for file in &mut files { - if file.binary { - continue; - } - // Get file content as it looked before the diffs - let branch_head_commit = repository.find_commit(branch_head)?; - let head_tree = branch_head_commit.tree()?; - let file_content_before = - show::show_file_at_tree(repository, file.path.clone(), &head_tree) - .context("failed to get file contents at base")?; - let file_lines_before = file_content_before.split('\n').collect::>(); - - // Update each hunk with contex lines before & after - file.hunks = file - .hunks - .iter() - .map(|hunk| { - if hunk.diff.is_empty() { - // noop on empty diff - hunk.clone() - } else { - let hunk_with_ctx = context::hunk_with_context( - &hunk.diff, - hunk.old_start as usize, - hunk.start as usize, - hunk.binary, - context_lines, - &file_lines_before, - hunk.change_type, - ); - to_virtual_branch_hunk(hunk.clone(), hunk_with_ctx) - } - }) - .collect::>(); - } - Ok(files) -} - -fn to_virtual_branch_hunk( - mut hunk: VirtualBranchHunk, - diff_with_context: diff::GitHunk, -) -> VirtualBranchHunk { - hunk.diff = diff_with_context.diff; - hunk.start = diff_with_context.new_start; - hunk.end = diff_with_context.new_start + diff_with_context.new_lines; - hunk -} - -fn is_requires_force( - project_repository: &project_repository::Repository, - branch: &branch::Branch, -) -> Result { - let upstream = if let Some(upstream) = &branch.upstream { - upstream - } else { - return Ok(false); - }; - - let reference = match project_repository - .git_repository - .refname_to_id(&upstream.to_string()) - { - Ok(reference) => reference, - Err(git::Error::NotFound(_)) => return Ok(false), - Err(other) => return Err(other).context("failed to find upstream reference"), - }; - - let upstream_commit = project_repository - .git_repository - .find_commit(reference) - .context("failed to find upstream commit")?; - - let merge_base = project_repository - .git_repository - .merge_base(upstream_commit.id(), branch.head)?; - - Ok(merge_base != upstream_commit.id()) -} - -// given a virtual branch and it's files that are calculated off of a default target, -// return files adjusted to the branch's head commit -pub fn calculate_non_commited_diffs( - project_repository: &project_repository::Repository, - branch: &branch::Branch, - default_target: &target::Target, - files: &HashMap>, -) -> Result>> { - if default_target.sha == branch.head && !branch.applied { - return Ok(files.clone()); - }; - - let branch_tree = if branch.applied { - let target_plus_wd_oid = write_tree(project_repository, default_target, files)?; - project_repository - .git_repository - .find_tree(target_plus_wd_oid) - } else { - project_repository.git_repository.find_tree(branch.tree) - }?; - - let branch_head = project_repository - .git_repository - .find_commit(branch.head)? - .tree()?; - - // do a diff between branch.head and the tree we _would_ commit - let non_commited_diff = diff::trees( - &project_repository.git_repository, - &branch_head, - &branch_tree, - context_lines(project_repository), - ) - .context("failed to diff trees")?; - let mut non_commited_diff = diff::diff_files_to_hunks(&non_commited_diff); - - let workspace_diff = diff::workdir( - &project_repository.git_repository, - &branch.head, - context_lines(project_repository), - )?; - let workspace_diff = diff::diff_files_to_hunks(&workspace_diff); - - // record conflicts resolution - // TODO: this feels out of place. move it somewhere else? - let conflicting_files = conflicts::conflicting_files(project_repository)?; - for (file_path, non_commited_hunks) in &non_commited_diff { - let mut conflicted = false; - if conflicting_files.contains(&file_path.display().to_string()) { - // check file for conflict markers, resolve the file if there are none in any hunk - for hunk in non_commited_hunks { - if hunk.diff.contains("<<<<<<< ours") { - conflicted = true; - } - if hunk.diff.contains(">>>>>>> theirs") { - conflicted = true; - } - } - if !conflicted { - conflicts::resolve(project_repository, &file_path.display().to_string()).unwrap(); - } - } - } - - // Revert back to the original line numbers from all hunks in the workspace - // This is done because the hunks in non_commited_diff have line numbers relative to the vbranch, which would be incorrect for the workspace - // Applies only to branches that are applied (in the workspace) - if branch.applied { - non_commited_diff = non_commited_diff - .into_iter() - .map(|(path, uncommitted_hunks)| { - let all_hunks = workspace_diff.get(&path); - if let Some(all_hunks) = all_hunks { - let hunks = line_agnostic_hunk_intersection(uncommitted_hunks, all_hunks); - (path, hunks) - } else { - (path, uncommitted_hunks) - } - }) - .collect(); - } - - Ok(non_commited_diff) -} - -/// Given two lists of hunks, returns the intersection based on the diff content and disregarding line numbers -/// -/// Since the hunks are not identical, the retuned hunks are the ones from the second argument -/// # Arguments -/// * `left` - A list of hunks -/// * `right` - A list of hunks to return from -/// # Returns -/// * A list of hunks that are present in both `left` and `right`, copied from `right` -fn line_agnostic_hunk_intersection(left: Vec, right: &Vec) -> Vec { - let mut result = Vec::new(); - for l in left { - // Skip the header containing line numbers - let l_diff = l.diff.split("@@").collect::>().pop(); - for r in right { - let r_diff = r.diff.split("@@").collect::>().pop(); - if l_diff == r_diff { - result.push(r.clone()); - break; - } - } - } - result -} - -fn list_virtual_commit_files( - project_repository: &project_repository::Repository, - commit: &git::Commit, -) -> Result> { - if commit.parent_count() == 0 { - return Ok(vec![]); - } - let parent = commit.parent(0).context("failed to get parent commit")?; - let commit_tree = commit.tree().context("failed to get commit tree")?; - let parent_tree = parent.tree().context("failed to get parent tree")?; - let diff = diff::trees( - &project_repository.git_repository, - &parent_tree, - &commit_tree, - context_lines(project_repository), - )?; - let diff = diff::diff_files_to_hunks(&diff); - let hunks_by_filepath = virtual_hunks_by_filepath(&project_repository.project().path, &diff); - Ok(virtual_hunks_to_virtual_files( - project_repository, - &hunks_by_filepath - .values() - .flatten() - .cloned() - .collect::>(), - )) -} - -fn commit_to_vbranch_commit( - repository: &project_repository::Repository, - branch: &branch::Branch, - commit: &git::Commit, - is_integrated: bool, - is_remote: bool, -) -> Result { - let timestamp = u128::try_from(commit.time().seconds())?; - let signature = commit.author(); - let message = commit.message().unwrap().to_string(); - - let files = - list_virtual_commit_files(repository, commit).context("failed to list commit files")?; - - let parent_ids = commit.parents()?.iter().map(Commit::id).collect::>(); - - let commit = VirtualBranchCommit { - id: commit.id(), - created_at: timestamp * 1000, - author: Author::from(signature), - description: message, - is_remote, - files, - is_integrated, - parent_ids, - branch_id: branch.id, - }; - - Ok(commit) -} - -pub fn create_virtual_branch( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - create: &BranchCreateRequest, -) -> Result { - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get or create currnt session")?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) - .context("failed to open current session")?; - - let default_target = get_default_target(¤t_session_reader) - .context("failed to get default target")? - .ok_or_else(|| { - errors::CreateVirtualBranchError::DefaultTargetNotSet( - errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }, - ) - })?; - - let commit = project_repository - .git_repository - .find_commit(default_target.sha) - .context("failed to find default target commit")?; - - let tree = commit - .tree() - .context("failed to find defaut target commit tree")?; - - let mut all_virtual_branches = Iterator::new(¤t_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")?; - all_virtual_branches.sort_by_key(|branch| branch.order); - - let order = create - .order - .unwrap_or(all_virtual_branches.len()) - .clamp(0, all_virtual_branches.len()); - - let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - - let selected_for_changes = if let Some(selected_for_changes) = create.selected_for_changes { - if selected_for_changes { - for mut other_branch in Iterator::new(¤t_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")? - { - other_branch.selected_for_changes = None; - branch_writer.write(&mut other_branch)?; - } - Some(chrono::Utc::now().timestamp_millis()) - } else { - None - } - } else { - (!all_virtual_branches - .iter() - .any(|b| b.selected_for_changes.is_some())) - .then_some(chrono::Utc::now().timestamp_millis()) - }; - - // make space for the new branch - for (i, branch) in all_virtual_branches.iter().enumerate() { - let mut branch = branch.clone(); - let new_order = if i < order { i } else { i + 1 }; - if branch.order != new_order { - branch.order = new_order; - branch_writer - .write(&mut branch) - .context("failed to write branch")?; - } - } - - let now = time::UNIX_EPOCH - .elapsed() - .context("failed to get elapsed time")? - .as_millis(); - - let name = dedup( - &all_virtual_branches - .iter() - .map(|b| b.name.as_str()) - .collect::>(), - create - .name - .as_ref() - .unwrap_or(&"Virtual branch".to_string()), - ); - - let mut branch = Branch { - id: BranchId::generate(), - name, - notes: String::new(), - applied: true, - upstream: None, - upstream_head: None, - tree: tree.id(), - head: default_target.sha, - created_timestamp_ms: now, - updated_timestamp_ms: now, - ownership: BranchOwnershipClaims::default(), - order, - selected_for_changes, - }; - - if let Some(ownership) = &create.ownership { - set_ownership( - ¤t_session_reader, - &branch_writer, - &mut branch, - ownership, - ) - .context("failed to set ownership")?; - } - - branch_writer - .write(&mut branch) - .context("failed to write branch")?; - - project_repository.add_branch_reference(&branch)?; - - Ok(branch) -} - -pub fn merge_virtual_branch_upstream( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_id: &BranchId, - signing_key: Option<&keys::PrivateKey>, - user: Option<&users::User>, -) -> Result<(), errors::MergeVirtualBranchUpstreamError> { - if conflicts::is_conflicting::<&Path>(project_repository, None)? { - return Err(errors::MergeVirtualBranchUpstreamError::Conflict( - errors::ProjectConflictError { - project_id: project_repository.project().id, - }, - )); - } - - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get current session")?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) - .context("failed to open current session")?; - - // get the branch - let branch_reader = branch::Reader::new(¤t_session_reader); - let mut branch = match branch_reader.read(branch_id) { - Ok(branch) => Ok(branch), - Err(reader::Error::NotFound) => Err( - errors::MergeVirtualBranchUpstreamError::BranchNotFound(errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *branch_id, - }), - ), - Err(error) => Err(errors::MergeVirtualBranchUpstreamError::Other(error.into())), - }?; - - // check if the branch upstream can be merged into the wd cleanly - let repo = &project_repository.git_repository; - - // get upstream from the branch and find the remote branch - let mut upstream_commit = None; - let upstream_branch = branch - .upstream - .as_ref() - .context("no upstream branch found")?; - if let Ok(upstream_oid) = repo.refname_to_id(&upstream_branch.to_string()) { - if let Ok(upstream_commit_obj) = repo.find_commit(upstream_oid) { - upstream_commit = Some(upstream_commit_obj); - } - } - - // if there is no upstream commit, then there is nothing to do - if upstream_commit.is_none() { - // no upstream commit, no merge to be done - return Ok(()); - } - - // there is an upstream commit, so lets check it out - let upstream_commit = upstream_commit.unwrap(); - let remote_tree = upstream_commit.tree().context("failed to get tree")?; - - if upstream_commit.id() == branch.head { - // upstream is already merged, nothing to do - return Ok(()); - } - - // if any other branches are applied, unapply them - let applied_branches = Iterator::new(¤t_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")? - .into_iter() - .filter(|b| b.applied) - .filter(|b| b.id != *branch_id) - .collect::>(); - - // unapply all other branches - for other_branch in applied_branches { - unapply_branch(gb_repository, project_repository, &other_branch.id) - .context("failed to unapply branch")?; - } - - // get merge base from remote branch commit and target commit - let merge_base = repo - .merge_base(upstream_commit.id(), branch.head) - .context("failed to find merge base")?; - let merge_tree = repo - .find_commit(merge_base) - .and_then(|c| c.tree()) - .context(format!( - "failed to find merge base commit {} tree", - merge_base - ))?; - - // get wd tree - let wd_tree = project_repository.get_wd_tree()?; - - // try to merge our wd tree with the upstream tree - let mut merge_index = repo - .merge_trees(&merge_tree, &wd_tree, &remote_tree) - .context("failed to merge trees")?; - - if merge_index.has_conflicts() { - // checkout the conflicts - repo.checkout_index(&mut merge_index) - .allow_conflicts() - .conflict_style_merge() - .force() - .checkout() - .context("failed to checkout index")?; - - // mark conflicts - let conflicts = merge_index.conflicts().context("failed to get conflicts")?; - let mut merge_conflicts = Vec::new(); - for path in conflicts.flatten() { - if let Some(ours) = path.our { - let path = std::str::from_utf8(&ours.path) - .context("failed to convert path to utf8")? - .to_string(); - merge_conflicts.push(path); - } - } - conflicts::mark( - project_repository, - &merge_conflicts, - Some(upstream_commit.id()), - )?; - } else { - let merge_tree_oid = merge_index - .write_tree_to(repo) - .context("failed to write tree")?; - let merge_tree = repo - .find_tree(merge_tree_oid) - .context("failed to find merge tree")?; - let branch_writer = - branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - - if *project_repository.project().ok_with_force_push { - // attempt a rebase - let (_, committer) = project_repository.git_signatures(user)?; - let mut rebase_options = git2::RebaseOptions::new(); - rebase_options.quiet(true); - rebase_options.inmemory(true); - let mut rebase = repo - .rebase( - Some(branch.head), - Some(upstream_commit.id()), - None, - Some(&mut rebase_options), - ) - .context("failed to rebase")?; - - let mut rebase_success = true; - // check to see if these commits have already been pushed - let mut last_rebase_head = upstream_commit.id(); - while rebase.next().is_some() { - let index = rebase - .inmemory_index() - .context("failed to get inmemory index")?; - if index.has_conflicts() { - rebase_success = false; - break; - } - - if let Ok(commit_id) = rebase.commit(None, &committer.clone().into(), None) { - last_rebase_head = commit_id.into(); - } else { - rebase_success = false; - break; - } - } - - if rebase_success { - // rebase worked out, rewrite the branch head - rebase.finish(None).context("failed to finish rebase")?; - - project_repository - .git_repository - .checkout_tree(&merge_tree) - .force() - .checkout() - .context("failed to checkout tree")?; - - branch.head = last_rebase_head; - branch.tree = merge_tree_oid; - branch_writer.write(&mut branch)?; - super::integration::update_gitbutler_integration( - gb_repository, - project_repository, - )?; - - return Ok(()); - } - - rebase.abort().context("failed to abort rebase")?; - } - - let head_commit = repo - .find_commit(branch.head) - .context("failed to find head commit")?; - - let new_branch_head = project_repository.commit( - user, - format!( - "Merged {}/{} into {}", - upstream_branch.remote(), - upstream_branch.branch(), - branch.name - ) - .as_str(), - &merge_tree, - &[&head_commit, &upstream_commit], - signing_key, - )?; - - // checkout the merge tree - repo.checkout_tree(&merge_tree) - .force() - .checkout() - .context("failed to checkout tree")?; - - // write the branch data - branch.head = new_branch_head; - branch.tree = merge_tree_oid; - branch_writer.write(&mut branch)?; - } - - super::integration::update_gitbutler_integration(gb_repository, project_repository)?; - - Ok(()) -} - -pub fn update_branch( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_update: branch::BranchUpdateRequest, -) -> Result { - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get or create currnt session")?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) - .context("failed to open current session")?; - let branch_reader = branch::Reader::new(¤t_session_reader); - let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - - let mut branch = branch_reader - .read(&branch_update.id) - .map_err(|error| match error { - reader::Error::NotFound => { - errors::UpdateBranchError::BranchNotFound(errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: branch_update.id, - }) - } - _ => errors::UpdateBranchError::Other(error.into()), - })?; - - if let Some(ownership) = branch_update.ownership { - set_ownership( - ¤t_session_reader, - &branch_writer, - &mut branch, - &ownership, - ) - .context("failed to set ownership")?; - } - - if let Some(name) = branch_update.name { - let all_virtual_branches = Iterator::new(¤t_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")?; - - project_repository.delete_branch_reference(&branch)?; - - branch.name = dedup( - &all_virtual_branches - .iter() - .map(|b| b.name.as_str()) - .collect::>(), - &name, - ); - - project_repository.add_branch_reference(&branch)?; - }; - - if let Some(updated_upstream) = branch_update.upstream { - let default_target = get_default_target(¤t_session_reader) - .context("failed to get default target")? - .ok_or_else(|| { - errors::UpdateBranchError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - let remote_branch = format!( - "refs/remotes/{}/{}", - default_target.branch.remote(), - normalize_branch_name(&updated_upstream) - ) - .parse::() - .unwrap(); - branch.upstream = Some(remote_branch); - }; - - if let Some(notes) = branch_update.notes { - branch.notes = notes; - }; - - if let Some(order) = branch_update.order { - branch.order = order; - }; - - if let Some(selected_for_changes) = branch_update.selected_for_changes { - branch.selected_for_changes = if selected_for_changes { - for mut other_branch in Iterator::new(¤t_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")? - .into_iter() - .filter(|b| b.id != branch.id) - { - other_branch.selected_for_changes = None; - branch_writer.write(&mut other_branch)?; - } - Some(chrono::Utc::now().timestamp_millis()) - } else { - None - }; - }; - - branch_writer - .write(&mut branch) - .context("failed to write target branch")?; - - Ok(branch) -} - -pub fn delete_branch( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_id: &BranchId, -) -> Result<(), errors::DeleteBranchError> { - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get or create currnt session")?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) - .context("failed to open current session")?; - let branch_reader = branch::Reader::new(¤t_session_reader); - let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - - let branch = match branch_reader.read(branch_id) { - Ok(branch) => Ok(branch), - Err(reader::Error::NotFound) => return Ok(()), - Err(error) => Err(error), - } - .context("failed to read branch")?; - - if branch.applied && unapply_branch(gb_repository, project_repository, branch_id)?.is_none() { - return Ok(()); - } - - branch_writer - .delete(&branch) - .context("Failed to remove branch")?; - - project_repository.delete_branch_reference(&branch)?; - - ensure_selected_for_changes(¤t_session_reader, &branch_writer) - .context("failed to ensure selected for changes")?; - - Ok(()) -} - -fn ensure_selected_for_changes( - current_session_reader: &sessions::Reader, - branch_writer: &branch::Writer, -) -> Result<()> { - let mut applied_branches = Iterator::new(current_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")? - .into_iter() - .filter(|b| b.applied) - .collect::>(); - - if applied_branches.is_empty() { - println!("no applied branches"); - return Ok(()); - } - - if applied_branches - .iter() - .any(|b| b.selected_for_changes.is_some()) - { - println!("some branches already selected for changes"); - return Ok(()); - } - - applied_branches.sort_by_key(|branch| branch.order); - - applied_branches[0].selected_for_changes = Some(chrono::Utc::now().timestamp_millis()); - branch_writer.write(&mut applied_branches[0])?; - Ok(()) -} - -fn set_ownership( - session_reader: &sessions::Reader, - branch_writer: &branch::Writer, - target_branch: &mut branch::Branch, - ownership: &branch::BranchOwnershipClaims, -) -> Result<()> { - if target_branch.ownership.eq(ownership) { - // nothing to update - return Ok(()); - } - - let virtual_branches = Iterator::new(session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")?; - - let mut claim_outcomes = - branch::reconcile_claims(virtual_branches, target_branch, &ownership.claims)?; - for claim_outcome in &mut claim_outcomes { - if !claim_outcome.removed_claims.is_empty() { - branch_writer - .write(&mut claim_outcome.updated_branch) - .context("failed to write ownership for branch".to_string())?; - } - } - - // Updates the claiming branch that was passed as mutable state with the new ownership claims - // TODO: remove mutable reference to target_branch - target_branch.ownership = ownership.clone(); - - Ok(()) -} - -fn get_mtime(cache: &mut HashMap, file_path: &PathBuf) -> u128 { - if let Some(mtime) = cache.get(file_path) { - *mtime - } else { - let mtime = file_path - .metadata() - .map_or_else( - |_| time::SystemTime::now(), - |metadata| { - metadata - .modified() - .or(metadata.created()) - .unwrap_or_else(|_| time::SystemTime::now()) - }, - ) - .duration_since(time::UNIX_EPOCH) - .unwrap() - .as_millis(); - cache.insert(file_path.clone(), mtime); - mtime - } -} - -pub fn virtual_hunks_by_filepath( - project_path: &Path, - diff: &HashMap>, -) -> HashMap> { - let mut mtimes: HashMap = HashMap::new(); - diff.iter() - .map(|(file_path, hunks)| { - let hunks = hunks - .iter() - .map(|hunk| VirtualBranchHunk { - id: format!("{}-{}", hunk.new_start, hunk.new_start + hunk.new_lines), - modified_at: get_mtime(&mut mtimes, &project_path.join(file_path)), - file_path: file_path.clone(), - diff: hunk.diff.clone(), - old_start: hunk.old_start, - start: hunk.new_start, - end: hunk.new_start + hunk.new_lines, - binary: hunk.binary, - hash: Hunk::hash(&hunk.diff), - locked: false, - locked_to: None, - change_type: hunk.change_type, - }) - .collect::>(); - (file_path.clone(), hunks) - }) - .collect::>() -} - -pub type BranchStatus = HashMap>; - -// list the virtual branches and their file statuses (statusi?) -#[allow(clippy::type_complexity)] -pub fn get_status_by_branch( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, -) -> Result<(Vec<(branch::Branch, BranchStatus)>, Vec)> { - let latest_session = gb_repository - .get_latest_session() - .context("failed to get latest session")? - .context("latest session not found")?; - let session_reader = sessions::Reader::open(gb_repository, &latest_session) - .context("failed to open current session")?; - - let default_target = - match get_default_target(&session_reader).context("failed to read default target")? { - Some(target) => target, - None => { - return Ok((vec![], vec![])); - } - }; - - let virtual_branches = Iterator::new(&session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")?; - - let applied_virtual_branches = virtual_branches - .iter() - .filter(|branch| branch.applied) - .cloned() - .collect::>(); - - let (applied_status, skipped_files) = get_applied_status( - gb_repository, - project_repository, - &default_target, - applied_virtual_branches, - )?; - - let non_applied_virtual_branches = virtual_branches - .into_iter() - .filter(|branch| !branch.applied) - .collect::>(); - - let non_applied_status = get_non_applied_status( - project_repository, - &default_target, - non_applied_virtual_branches, - )?; - - Ok(( - applied_status - .into_iter() - .chain(non_applied_status) - .collect(), - skipped_files, - )) -} - -// given a list of non applied virtual branches, return the status of each file, comparing the default target with -// virtual branch latest tree -// -// ownerships are not taken into account here, as they are not relevant for non applied branches -fn get_non_applied_status( - project_repository: &project_repository::Repository, - default_target: &target::Target, - virtual_branches: Vec, -) -> Result> { - virtual_branches - .into_iter() - .map( - |branch| -> Result<(branch::Branch, HashMap>)> { - if branch.applied { - bail!("branch {} is applied", branch.name); - } - let branch_tree = project_repository - .git_repository - .find_tree(branch.tree) - .context(format!("failed to find tree {}", branch.tree))?; - - let target_tree = project_repository - .git_repository - .find_commit(default_target.sha) - .context("failed to find target commit")? - .tree() - .context("failed to find target tree")?; - - let diff = diff::trees( - &project_repository.git_repository, - &target_tree, - &branch_tree, - context_lines(project_repository), - )?; - - Ok((branch, diff::diff_files_to_hunks(&diff))) - }, - ) - .collect::>>() -} - -// given a list of applied virtual branches, return the status of each file, comparing the default target with -// the working directory -// -// ownerships are updated if nessessary -fn get_applied_status( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - default_target: &target::Target, - mut virtual_branches: Vec, -) -> Result<(AppliedStatuses, Vec)> { - let base_file_diffs = diff::workdir( - &project_repository.git_repository, - &default_target.sha, - context_lines(project_repository), - ) - .context("failed to diff workdir")?; - - let mut base_diffs: HashMap> = - diff_files_to_hunks(&base_file_diffs); - let mut skipped_files: Vec = Vec::new(); - for (_, file_diff) in base_file_diffs { - if file_diff.skipped { - skipped_files.push(file_diff); - } - } - - // sort by order, so that the default branch is first (left in the ui) - virtual_branches.sort_by(|a, b| a.order.cmp(&b.order)); - - if virtual_branches.is_empty() && !base_diffs.is_empty() { - // no virtual branches, but hunks: create default branch - virtual_branches = vec![create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .context("failed to create default branch")?]; - } - - // align branch ownership to the real hunks: - // - update shifted hunks - // - remove non existent hunks - - let mut diffs_by_branch: HashMap>> = - virtual_branches - .iter() - .map(|branch| (branch.id, HashMap::new())) - .collect(); - - let mut mtimes = HashMap::new(); - - for branch in &mut virtual_branches { - if !branch.applied { - bail!("branch {} is not applied", branch.name); - } - - let old_claims = branch.ownership.claims.clone(); - let new_claims = old_claims - .iter() - .filter_map(|claim| { - let git_diff_hunks = match base_diffs.get_mut(&claim.file_path) { - None => return None, - Some(hunks) => hunks, - }; - - let mtime = get_mtime(&mut mtimes, &claim.file_path); - - let claimed_hunks: Vec = claim - .hunks - .iter() - .filter_map(|claimed_hunk| { - // if any of the current hunks intersects with the owned hunk, we want to keep it - for (i, git_diff_hunk) in git_diff_hunks.iter().enumerate() { - let hash = Hunk::hash(&git_diff_hunk.diff); - // Eq compares hashes first, and if one of the hunks lacks a hash, it compares line numbers - if claimed_hunk.eq(&Hunk::from(git_diff_hunk)) { - // try to re-use old timestamp - let timestamp = claimed_hunk.timestam_ms().unwrap_or(mtime); - // push hunk to the end of the list, preserving the order - diffs_by_branch - .entry(branch.id) - .or_default() - .entry(claim.file_path.clone()) - .or_default() - .push(git_diff_hunk.clone()); - - git_diff_hunks.remove(i); - return Some( - claimed_hunk - .with_timestamp(timestamp) - .with_hash(hash.as_str()), - ); - } else if claimed_hunk.intersects(git_diff_hunk) { - // if it's an intersection, push the hunk to the beginning, - // indicating the the hunk has been updated - diffs_by_branch - .entry(branch.id) - .or_default() - .entry(claim.file_path.clone()) - .or_default() - .insert(0, git_diff_hunk.clone()); - - let updated_hunk = Hunk { - start: git_diff_hunk.new_start, - end: git_diff_hunk.new_start + git_diff_hunk.new_lines, - timestamp_ms: Some(mtime), - hash: Some(hash.clone()), - }; - - // remove the hunk from the current hunks because each hunk can - // only be owned once - git_diff_hunks.remove(i); - - // return updated version, with new hash and/or timestamp - return Some(updated_hunk); - } - } - None - }) - .collect(); - - if claimed_hunks.is_empty() { - // No need for an empty claim - None - } else { - Some(OwnershipClaim { - file_path: claim.file_path.clone(), - hunks: claimed_hunks, - }) - } - }) - .collect(); - - branch.ownership = BranchOwnershipClaims { claims: new_claims }; - } - - let max_selected_for_changes = virtual_branches - .iter() - .filter_map(|b| b.selected_for_changes) - .max() - .unwrap_or(-1); - let default_vbranch_pos = virtual_branches - .iter() - .position(|b| b.selected_for_changes == Some(max_selected_for_changes)) - .unwrap_or(0); - - // put the remaining hunks into the default (first) branch - for (filepath, hunks) in base_diffs { - for hunk in hunks { - virtual_branches[default_vbranch_pos] - .ownership - .put(&OwnershipClaim { - file_path: filepath.clone(), - hunks: vec![Hunk::from(&hunk) - .with_timestamp(get_mtime(&mut mtimes, &filepath)) - .with_hash(Hunk::hash(hunk.diff.as_str()).as_str())], - }); - diffs_by_branch - .entry(virtual_branches[default_vbranch_pos].id) - .or_default() - .entry(filepath.clone()) - .or_default() - .push(hunk.clone()); - } - } - - let mut hunks_by_branch = diffs_by_branch - .into_iter() - .map(|(branch_id, hunks)| { - ( - virtual_branches - .iter() - .find(|b| b.id.eq(&branch_id)) - .unwrap() - .clone(), - hunks, - ) - }) - .collect::>(); - - // write updated state if not resolving - if !project_repository.is_resolving() { - let branch_writer = - branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - for (vbranch, files) in &mut hunks_by_branch { - vbranch.tree = write_tree(project_repository, default_target, files)?; - branch_writer - .write(vbranch) - .context(format!("failed to write virtual branch {}", vbranch.name))?; - } - } - - Ok((hunks_by_branch, skipped_files)) -} - -fn virtual_hunks_to_virtual_files( - project_repository: &project_repository::Repository, - hunks: &[VirtualBranchHunk], -) -> Vec { - hunks - .iter() - .fold(HashMap::>::new(), |mut acc, hunk| { - acc.entry(hunk.file_path.clone()) - .or_default() - .push(hunk.clone()); - acc - }) - .into_iter() - .map(|(file_path, hunks)| VirtualBranchFile { - id: file_path.display().to_string(), - path: file_path.clone(), - hunks: hunks.clone(), - binary: hunks.iter().any(|h| h.binary), - large: false, - modified_at: hunks.iter().map(|h| h.modified_at).max().unwrap_or(0), - conflicted: conflicts::is_conflicting( - project_repository, - Some(&file_path.display().to_string()), - ) - .unwrap_or(false), - }) - .collect::>() -} - -// reset virtual branch to a specific commit -pub fn reset_branch( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_id: &BranchId, - target_commit_oid: git::Oid, -) -> Result<(), errors::ResetBranchError> { - let current_session = gb_repository.get_or_create_current_session()?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; - - let default_target = get_default_target(¤t_session_reader) - .context("failed to read default target")? - .ok_or_else(|| { - errors::ResetBranchError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - let branch_reader = branch::Reader::new(¤t_session_reader); - let mut branch = match branch_reader.read(branch_id) { - Ok(branch) => Ok(branch), - Err(reader::Error::NotFound) => Err(errors::ResetBranchError::BranchNotFound( - errors::BranchNotFoundError { - branch_id: *branch_id, - project_id: project_repository.project().id, - }, - )), - Err(error) => Err(errors::ResetBranchError::Other(error.into())), - }?; - - if branch.head == target_commit_oid { - // nothing to do - return Ok(()); - } - - if default_target.sha != target_commit_oid - && !project_repository - .l(branch.head, LogUntil::Commit(default_target.sha))? - .contains(&target_commit_oid) - { - return Err(errors::ResetBranchError::CommitNotFoundInBranch( - target_commit_oid, - )); - } - - let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - branch.head = target_commit_oid; - branch_writer - .write(&mut branch) - .context("failed to write branch")?; - - super::integration::update_gitbutler_integration(gb_repository, project_repository) - .context("failed to update gitbutler integration")?; - - Ok(()) -} - -fn diffs_to_virtual_files( - project_repository: &project_repository::Repository, - diffs: &HashMap>, -) -> Vec { - let hunks_by_filepath = virtual_hunks_by_filepath(&project_repository.project().path, diffs); - virtual_hunks_to_virtual_files( - project_repository, - &hunks_by_filepath - .values() - .flatten() - .cloned() - .collect::>(), - ) -} - -// this function takes a list of file ownership, -// constructs a tree from those changes on top of the target -// and writes it as a new tree for storage -pub fn write_tree( - project_repository: &project_repository::Repository, - target: &target::Target, - files: &HashMap>, -) -> Result { - write_tree_onto_commit(project_repository, target.sha, files) -} - -pub fn write_tree_onto_commit( - project_repository: &project_repository::Repository, - commit_oid: git::Oid, - files: &HashMap>, -) -> Result { - // read the base sha into an index - let git_repository = &project_repository.git_repository; - - let head_commit = git_repository.find_commit(commit_oid)?; - let base_tree = head_commit.tree()?; - - write_tree_onto_tree(project_repository, &base_tree, files) -} - -pub fn write_tree_onto_tree( - project_repository: &project_repository::Repository, - base_tree: &git::Tree, - files: &HashMap>, -) -> Result { - let git_repository = &project_repository.git_repository; - let mut builder = git_repository.treebuilder(Some(base_tree)); - // now update the index with content in the working directory for each file - for (filepath, hunks) in files { - // convert this string to a Path - let rel_path = Path::new(&filepath); - let full_path = project_repository.path().join(rel_path); - - let is_submodule = - full_path.is_dir() && hunks.len() == 1 && hunks[0].diff.contains("Subproject commit"); - - // if file exists - if full_path.exists() { - // if file is executable, use 755, otherwise 644 - let mut filemode = git::FileMode::Blob; - // check if full_path file is executable - if let Ok(metadata) = std::fs::symlink_metadata(&full_path) { - #[cfg(target_family = "unix")] - { - if metadata.permissions().mode() & 0o111 != 0 { - filemode = git::FileMode::BlobExecutable; - } - } - #[cfg(target_os = "windows")] - { - // TODO(qix-): Pull from `core.filemode` config option to determine - // TODO(qix-): the behavior on windows. For now, we set this to true. - // TODO(qix-): It's not ideal, but it gets us to a windows build faster. - filemode = git::FileMode::BlobExecutable; - } - - if metadata.file_type().is_symlink() { - filemode = git::FileMode::Link; - } - } - - // get the blob - if filemode == git::FileMode::Link { - // it's a symlink, make the content the path of the link - let link_target = std::fs::read_link(&full_path)?; - - // if the link target is inside the project repository, make it relative - let link_target = link_target - .strip_prefix(project_repository.path()) - .unwrap_or(&link_target); - - let blob_oid = git_repository.blob( - link_target - .to_str() - .ok_or_else(|| Error::InvalidUnicodePath(link_target.into()))? - .as_bytes(), - )?; - builder.upsert(rel_path, blob_oid, filemode); - } else if let Ok(tree_entry) = base_tree.get_path(rel_path) { - if hunks.len() == 1 && hunks[0].binary { - let new_blob_oid = &hunks[0].diff; - // convert string to Oid - let new_blob_oid = new_blob_oid.parse().context("failed to diff as oid")?; - builder.upsert(rel_path, new_blob_oid, filemode); - } else { - // blob from tree_entry - let blob = tree_entry - .to_object(git_repository) - .unwrap() - .peel_to_blob() - .context("failed to get blob")?; - - let mut blob_contents = blob.content().to_str()?.to_string(); - - let mut hunks = hunks.clone(); - hunks.sort_by_key(|hunk| hunk.new_start); - let mut all_diffs = String::new(); - for hunk in hunks { - all_diffs.push_str(&hunk.diff); - } - - let patch = Patch::from_str(&all_diffs)?; - blob_contents = apply(&blob_contents, &patch) - .context(format!("failed to apply {}", &all_diffs))?; - - // create a blob - let new_blob_oid = git_repository.blob(blob_contents.as_bytes())?; - // upsert into the builder - builder.upsert(rel_path, new_blob_oid, filemode); - } - } else if is_submodule { - let mut blob_contents = String::new(); - - let mut hunks = hunks.clone(); - hunks.sort_by_key(|hunk| hunk.new_start); - for hunk in hunks { - let patch = Patch::from_str(&hunk.diff)?; - blob_contents = apply(&blob_contents, &patch) - .context(format!("failed to apply {}", &hunk.diff))?; - } - - // create a blob - let new_blob_oid = git_repository.blob(blob_contents.as_bytes())?; - // upsert into the builder - builder.upsert(rel_path, new_blob_oid, filemode); - } else { - // create a git blob from a file on disk - let blob_oid = git_repository - .blob_path(&full_path) - .context(format!("failed to create blob from path {:?}", &full_path))?; - builder.upsert(rel_path, blob_oid, filemode); - } - } else if base_tree.get_path(rel_path).is_ok() { - // remove file from index if it exists in the base tree - builder.remove(rel_path); - } else { - // file not in index or base tree, do nothing - // this is the - } - } - - // now write out the tree - let tree_oid = builder.write().context("failed to write updated tree")?; - - Ok(tree_oid) -} - -fn _print_tree(repo: &git2::Repository, tree: &git2::Tree) -> Result<()> { - println!("tree id: {}", tree.id()); - for entry in tree { - println!( - " entry: {} {}", - entry.name().unwrap_or_default(), - entry.id() - ); - // get entry contents - let object = entry.to_object(repo).context("failed to get object")?; - let blob = object.as_blob().context("failed to get blob")?; - // convert content to string - if let Ok(content) = std::str::from_utf8(blob.content()) { - println!(" blob: {}", content); - } else { - println!(" blob: BINARY"); - } - } - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -pub fn commit( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_id: &BranchId, - message: &str, - ownership: Option<&branch::BranchOwnershipClaims>, - signing_key: Option<&keys::PrivateKey>, - user: Option<&users::User>, - run_hooks: bool, -) -> Result { - let mut message_buffer = message.to_owned(); - - if run_hooks { - let hook_result = project_repository - .git_repository - .run_hook_commit_msg(&mut message_buffer) - .context("failed to run hook")?; - - if let HookResult::RunNotSuccessful { stdout, .. } = hook_result { - return Err(errors::CommitError::CommitMsgHookRejected(stdout)); - } - - let hook_result = project_repository - .git_repository - .run_hook_pre_commit() - .context("failed to run hook")?; - - if let HookResult::RunNotSuccessful { stdout, .. } = hook_result { - return Err(errors::CommitError::CommitHookRejected(stdout)); - } - } - - let message = &message_buffer; - - let default_target = gb_repository - .default_target() - .context("failed to get default target")? - .ok_or_else(|| { - errors::CommitError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - // get the files to commit - let (mut statuses, _) = get_status_by_branch(gb_repository, project_repository) - .context("failed to get status by branch")?; - - let (ref mut branch, files) = statuses - .iter_mut() - .find(|(branch, _)| branch.id == *branch_id) - .ok_or_else(|| { - errors::CommitError::BranchNotFound(errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *branch_id, - }) - })?; - - let files = calculate_non_commited_diffs(project_repository, branch, &default_target, files)?; - if conflicts::is_conflicting::<&Path>(project_repository, None)? { - return Err(errors::CommitError::Conflicted( - errors::ProjectConflictError { - project_id: project_repository.project().id, - }, - )); - } - - let tree_oid = if let Some(ownership) = ownership { - let files = files - .iter() - .filter_map(|(filepath, hunks)| { - let hunks = hunks - .iter() - .filter(|hunk| { - ownership - .claims - .iter() - .find(|f| f.file_path.eq(filepath)) - .map_or(false, |f| { - f.hunks.iter().any(|h| { - h.start == hunk.new_start - && h.end == hunk.new_start + hunk.new_lines - }) - }) - }) - .cloned() - .collect::>(); - if hunks.is_empty() { - None - } else { - Some((filepath.clone(), hunks)) - } - }) - .collect::>(); - write_tree_onto_commit(project_repository, branch.head, &files)? - } else { - write_tree_onto_commit(project_repository, branch.head, &files)? - }; - - let git_repository = &project_repository.git_repository; - let parent_commit = git_repository - .find_commit(branch.head) - .context(format!("failed to find commit {:?}", branch.head))?; - let tree = git_repository - .find_tree(tree_oid) - .context(format!("failed to find tree {:?}", tree_oid))?; - - // now write a commit, using a merge parent if it exists - let extra_merge_parent = - conflicts::merge_parent(project_repository).context("failed to get merge parent")?; - - let commit_oid = match extra_merge_parent { - Some(merge_parent) => { - let merge_parent = git_repository - .find_commit(merge_parent) - .context(format!("failed to find merge parent {:?}", merge_parent))?; - let commit_oid = project_repository.commit( - user, - message, - &tree, - &[&parent_commit, &merge_parent], - signing_key, - )?; - conflicts::clear(project_repository).context("failed to clear conflicts")?; - commit_oid - } - None => project_repository.commit(user, message, &tree, &[&parent_commit], signing_key)?, - }; - - if run_hooks { - project_repository - .git_repository - .run_hook_post_commit() - .context("failed to run hook")?; - } - - // update the virtual branch head - let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - branch.tree = tree_oid; - branch.head = commit_oid; - writer.write(branch).context("failed to write branch")?; - - super::integration::update_gitbutler_integration(gb_repository, project_repository) - .context("failed to update gitbutler integration")?; - - Ok(commit_oid) -} - -pub fn push( - project_repository: &project_repository::Repository, - gb_repository: &gb_repository::Repository, - branch_id: &BranchId, - with_force: bool, - credentials: &git::credentials::Helper, - askpass: Option<(AskpassBroker, Option)>, -) -> Result<(), errors::PushError> { - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get or create currnt session") - .map_err(errors::PushError::Other)?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) - .context("failed to open current session") - .map_err(errors::PushError::Other)?; - - let branch_reader = branch::Reader::new(¤t_session_reader); - let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - - let mut vbranch = branch_reader.read(branch_id).map_err(|error| match error { - reader::Error::NotFound => errors::PushError::BranchNotFound(errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *branch_id, - }), - error => errors::PushError::Other(error.into()), - })?; - - let remote_branch = if let Some(upstream_branch) = vbranch.upstream.as_ref() { - upstream_branch.clone() - } else { - let default_target = get_default_target(¤t_session_reader) - .context("failed to get default target")? - .ok_or_else(|| { - errors::PushError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - let remote_branch = format!( - "refs/remotes/{}/{}", - default_target.branch.remote(), - normalize_branch_name(&vbranch.name) - ) - .parse::() - .context("failed to parse remote branch name")?; - - let remote_branches = project_repository.git_remote_branches()?; - let existing_branches = remote_branches - .iter() - .map(RemoteRefname::branch) - .map(str::to_lowercase) // git is weird about case sensitivity here, assume not case sensitive - .collect::>(); - - remote_branch.with_branch(&dedup_fmt( - &existing_branches - .iter() - .map(String::as_str) - .collect::>(), - remote_branch.branch(), - "-", - )) - }; - - project_repository.push( - &vbranch.head, - &remote_branch, - with_force, - credentials, - None, - askpass.clone(), - )?; - - vbranch.upstream = Some(remote_branch.clone()); - vbranch.upstream_head = Some(vbranch.head); - branch_writer - .write(&mut vbranch) - .context("failed to write target branch after push")?; - project_repository.fetch( - remote_branch.remote(), - credentials, - askpass.map(|(broker, _)| (broker, "modal".to_string())), - )?; - - Ok(()) -} - -fn is_commit_integrated( - project_repository: &project_repository::Repository, - target: &target::Target, - commit: &git::Commit, -) -> Result { - let remote_branch = project_repository - .git_repository - .find_branch(&target.branch.clone().into())?; - let remote_head = remote_branch.peel_to_commit()?; - let upstream_commits = project_repository.l( - remote_head.id(), - project_repository::LogUntil::Commit(target.sha), - )?; - - if target.sha.eq(&commit.id()) { - // could not be integrated if heads are the same. - return Ok(false); - } - - if upstream_commits.is_empty() { - // could not be integrated - there is nothing new upstream. - return Ok(false); - } - - if upstream_commits.contains(&commit.id()) { - return Ok(true); - } - - let merge_base_id = project_repository - .git_repository - .merge_base(target.sha, commit.id())?; - if merge_base_id.eq(&commit.id()) { - // if merge branch is the same as branch head and there are upstream commits - // then it's integrated - return Ok(true); - } - - let merge_base = project_repository - .git_repository - .find_commit(merge_base_id)?; - let merge_base_tree = merge_base.tree()?; - let upstream = project_repository - .git_repository - .find_commit(remote_head.id())?; - let upstream_tree = upstream.tree()?; - - if merge_base_tree.id() == upstream_tree.id() { - // if merge base is the same as upstream tree, then it's integrated - return Ok(true); - } - - // try to merge our tree into the upstream tree - let mut merge_index = project_repository - .git_repository - .merge_trees(&merge_base_tree, &commit.tree()?, &upstream_tree) - .context("failed to merge trees")?; - - if merge_index.has_conflicts() { - return Ok(false); - } - - let merge_tree_oid = merge_index - .write_tree_to(&project_repository.git_repository) - .context("failed to write tree")?; - - // if the merge_tree is the same as the new_target_tree and there are no files (uncommitted changes) - // then the vbranch is fully merged - Ok(merge_tree_oid == upstream_tree.id()) -} - -pub fn is_remote_branch_mergeable( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_name: &git::RemoteRefname, -) -> Result { - // get the current target - let latest_session = gb_repository.get_latest_session()?.ok_or_else(|| { - errors::IsRemoteBranchMergableError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - let session_reader = sessions::Reader::open(gb_repository, &latest_session) - .context("failed to open current session")?; - - let default_target = get_default_target(&session_reader) - .context("failed to get default target")? - .ok_or_else(|| { - errors::IsRemoteBranchMergableError::DefaultTargetNotSet( - errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }, - ) - })?; - - let target_commit = project_repository - .git_repository - .find_commit(default_target.sha) - .context("failed to find target commit")?; - - let branch = match project_repository - .git_repository - .find_branch(&branch_name.into()) - { - Ok(branch) => Ok(branch), - Err(git::Error::NotFound(_)) => Err(errors::IsRemoteBranchMergableError::BranchNotFound( - branch_name.clone(), - )), - Err(error) => Err(errors::IsRemoteBranchMergableError::Other(error.into())), - }?; - let branch_oid = branch.target().context("detatched head")?; - let branch_commit = project_repository - .git_repository - .find_commit(branch_oid) - .context("failed to find branch commit")?; - - let base_tree = find_base_tree( - &project_repository.git_repository, - &branch_commit, - &target_commit, - )?; - - let wd_tree = project_repository.get_wd_tree()?; - - let branch_tree = branch_commit.tree().context("failed to find branch tree")?; - let mergeable = !project_repository - .git_repository - .merge_trees(&base_tree, &branch_tree, &wd_tree) - .context("failed to merge trees")? - .has_conflicts(); - - Ok(mergeable) -} - -pub fn is_virtual_branch_mergeable( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_id: &BranchId, -) -> Result { - let latest_session = gb_repository.get_latest_session()?.ok_or_else(|| { - errors::IsVirtualBranchMergeable::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - let session_reader = sessions::Reader::open(gb_repository, &latest_session) - .context("failed to open current session reader")?; - let branch_reader = branch::Reader::new(&session_reader); - let branch = match branch_reader.read(branch_id) { - Ok(branch) => Ok(branch), - Err(reader::Error::NotFound) => Err(errors::IsVirtualBranchMergeable::BranchNotFound( - errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *branch_id, - }, - )), - Err(error) => Err(errors::IsVirtualBranchMergeable::Other(error.into())), - }?; - - if branch.applied { - return Ok(true); - } - - let default_target = get_default_target(&session_reader) - .context("failed to read default target")? - .ok_or_else(|| { - errors::IsVirtualBranchMergeable::DefaultTargetNotSet( - errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }, - ) - })?; - - // determine if this branch is up to date with the target/base - let merge_base = project_repository - .git_repository - .merge_base(default_target.sha, branch.head) - .context("failed to find merge base")?; - - if merge_base != default_target.sha { - return Ok(false); - } - - let branch_commit = project_repository - .git_repository - .find_commit(branch.head) - .context("failed to find branch commit")?; - - let target_commit = project_repository - .git_repository - .find_commit(default_target.sha) - .context("failed to find target commit")?; - - let base_tree = find_base_tree( - &project_repository.git_repository, - &branch_commit, - &target_commit, - )?; - - let wd_tree = project_repository.get_wd_tree()?; - - // determine if this tree is mergeable - let branch_tree = project_repository - .git_repository - .find_tree(branch.tree) - .context("failed to find branch tree")?; - - let is_mergeable = !project_repository - .git_repository - .merge_trees(&base_tree, &branch_tree, &wd_tree) - .context("failed to merge trees")? - .has_conflicts(); - - Ok(is_mergeable) -} - -pub fn amend( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_id: &BranchId, - target_ownership: &BranchOwnershipClaims, -) -> Result { - if conflicts::is_conflicting::<&Path>(project_repository, None)? { - return Err(errors::AmendError::Conflict(errors::ProjectConflictError { - project_id: project_repository.project().id, - })); - } - - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get or create current session")?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) - .context("failed to open current session")?; - - let all_branches = Iterator::new(¤t_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")? - .into_iter() - .collect::>(); - - if !all_branches.iter().any(|b| b.id == *branch_id) { - return Err(errors::AmendError::BranchNotFound( - errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *branch_id, - }, - )); - } - - let applied_branches = all_branches - .into_iter() - .filter(|b| b.applied) - .collect::>(); - - if !applied_branches.iter().any(|b| b.id == *branch_id) { - return Err(errors::AmendError::BranchNotFound( - errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *branch_id, - }, - )); - } - - let default_target = get_default_target(¤t_session_reader) - .context("failed to read default target")? - .ok_or_else(|| { - errors::AmendError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - let (mut applied_statuses, _) = get_applied_status( - gb_repository, - project_repository, - &default_target, - applied_branches, - )?; - - let (ref mut target_branch, target_status) = applied_statuses - .iter_mut() - .find(|(b, _)| b.id == *branch_id) - .ok_or_else(|| { - errors::AmendError::BranchNotFound(errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *branch_id, - }) - })?; - - if target_branch.upstream.is_some() && !project_repository.project().ok_with_force_push { - // amending to a pushed head commit will cause a force push that is not allowed - return Err(errors::AmendError::ForcePushNotAllowed( - errors::ForcePushNotAllowedError { - project_id: project_repository.project().id, - }, - )); - } - - if project_repository - .l( - target_branch.head, - project_repository::LogUntil::Commit(default_target.sha), - )? - .is_empty() - { - return Err(errors::AmendError::BranchHasNoCommits); - } - - let diffs_to_consider = calculate_non_commited_diffs( - project_repository, - target_branch, - &default_target, - target_status, - )?; - - let head_commit = project_repository - .git_repository - .find_commit(target_branch.head) - .context("failed to find head commit")?; - - let diffs_to_amend = target_ownership - .claims - .iter() - .filter_map(|file_ownership| { - let hunks = diffs_to_consider - .get(&file_ownership.file_path) - .map(|hunks| { - hunks - .iter() - .filter(|hunk| { - file_ownership.hunks.iter().any(|owned_hunk| { - owned_hunk.start == hunk.new_start - && owned_hunk.end == hunk.new_start + hunk.new_lines - }) - }) - .cloned() - .collect::>() - }) - .unwrap_or_default(); - if hunks.is_empty() { - None - } else { - Some((file_ownership.file_path.clone(), hunks)) - } - }) - .collect::>(); - - if diffs_to_amend.is_empty() { - return Err(errors::AmendError::TargetOwnerhshipNotFound( - target_ownership.clone(), - )); - } - - let new_tree_oid = - write_tree_onto_commit(project_repository, target_branch.head, &diffs_to_amend)?; - let new_tree = project_repository - .git_repository - .find_tree(new_tree_oid) - .context("failed to find new tree")?; - - let parents = head_commit - .parents() - .context("failed to find head commit parents")?; - - let commit_oid = project_repository - .git_repository - .commit( - None, - &head_commit.author(), - &head_commit.committer(), - head_commit.message().unwrap_or_default(), - &new_tree, - &parents.iter().collect::>(), - ) - .context("failed to create commit")?; - - let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - target_branch.head = commit_oid; - branch_writer.write(target_branch)?; - - super::integration::update_gitbutler_integration(gb_repository, project_repository)?; - - Ok(commit_oid) -} - -pub fn cherry_pick( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_id: &BranchId, - target_commit_oid: git::Oid, -) -> Result, errors::CherryPickError> { - if conflicts::is_conflicting::<&Path>(project_repository, None)? { - return Err(errors::CherryPickError::Conflict( - errors::ProjectConflictError { - project_id: project_repository.project().id, - }, - )); - } - - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get or create current session")?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) - .context("failed to open current session")?; - let branch_reader = branch::Reader::new(¤t_session_reader); - let mut branch = branch_reader - .read(branch_id) - .context("failed to read branch")?; - - if !branch.applied { - // todo? - return Err(errors::CherryPickError::NotApplied); - } - - let target_commit = project_repository - .git_repository - .find_commit(target_commit_oid) - .map_err(|error| match error { - git::Error::NotFound(_) => errors::CherryPickError::CommitNotFound(target_commit_oid), - error => errors::CherryPickError::Other(error.into()), - })?; - - let branch_head_commit = project_repository - .git_repository - .find_commit(branch.head) - .context("failed to find branch tree")?; - - let default_target = get_default_target(¤t_session_reader) - .context("failed to read default target")? - .context("no default target set")?; - - // if any other branches are applied, unapply them - let applied_branches = Iterator::new(¤t_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")? - .into_iter() - .filter(|b| b.applied) - .collect::>(); - - let (applied_statuses, _) = get_applied_status( - gb_repository, - project_repository, - &default_target, - applied_branches, - )?; - - let branch_files = applied_statuses - .iter() - .find(|(b, _)| b.id == *branch_id) - .map(|(_, f)| f) - .context("branch status not found")?; - - // create a wip commit. we'll use it to offload cherrypick conflicts calculation to libgit. - let wip_commit = { - let wip_tree_oid = write_tree(project_repository, &default_target, branch_files)?; - let wip_tree = project_repository - .git_repository - .find_tree(wip_tree_oid) - .context("failed to find tree")?; - - let signature = git::Signature::now("GitButler", "gitbutler@gitbutler.com") - .context("failed to make gb signature")?; - let oid = project_repository - .git_repository - .commit( - None, - &signature, - &signature, - "wip cherry picking commit", - &wip_tree, - &[&branch_head_commit], - ) - .context("failed to commit wip work")?; - project_repository - .git_repository - .find_commit(oid) - .context("failed to find wip commit")? - }; - - let mut cherrypick_index = project_repository - .git_repository - .cherry_pick(&wip_commit, &target_commit) - .context("failed to cherry pick")?; - - // unapply other branches - for other_branch in applied_statuses - .iter() - .filter(|(b, _)| b.id != branch.id) - .map(|(b, _)| b) - { - unapply_branch(gb_repository, project_repository, &other_branch.id) - .context("failed to unapply branch")?; - } - - let commit_oid = if cherrypick_index.has_conflicts() { - // checkout the conflicts - project_repository - .git_repository - .checkout_index(&mut cherrypick_index) - .allow_conflicts() - .conflict_style_merge() - .force() - .checkout() - .context("failed to checkout conflicts")?; - - // mark conflicts - let conflicts = cherrypick_index - .conflicts() - .context("failed to get conflicts")?; - let mut merge_conflicts = Vec::new(); - for path in conflicts.flatten() { - if let Some(ours) = path.our { - let path = std::str::from_utf8(&ours.path) - .context("failed to convert path")? - .to_string(); - merge_conflicts.push(path); - } - } - conflicts::mark(project_repository, &merge_conflicts, Some(branch.head))?; - - None - } else { - let merge_tree_oid = cherrypick_index - .write_tree_to(&project_repository.git_repository) - .context("failed to write merge tree")?; - let merge_tree = project_repository - .git_repository - .find_tree(merge_tree_oid) - .context("failed to find merge tree")?; - - let branch_head_commit = project_repository - .git_repository - .find_commit(branch.head) - .context("failed to find branch head commit")?; - - let commit_oid = project_repository - .git_repository - .commit( - None, - &target_commit.author(), - &target_commit.committer(), - target_commit.message().unwrap_or_default(), - &merge_tree, - &[&branch_head_commit], - ) - .context("failed to create commit")?; - - // checkout final_tree into the working directory - project_repository - .git_repository - .checkout_tree(&merge_tree) - .force() - .remove_untracked() - .checkout() - .context("failed to checkout final tree")?; - - // update branch status - let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - branch.head = commit_oid; - writer - .write(&mut branch) - .context("failed to write branch")?; - - Some(commit_oid) - }; - - super::integration::update_gitbutler_integration(gb_repository, project_repository) - .context("failed to update gitbutler integration")?; - - Ok(commit_oid) -} - -/// squashes a commit from a virtual branch into it's parent. -pub fn squash( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_id: &BranchId, - commit_oid: git::Oid, -) -> Result<(), errors::SquashError> { - if conflicts::is_conflicting::<&Path>(project_repository, None)? { - return Err(errors::SquashError::Conflict( - errors::ProjectConflictError { - project_id: project_repository.project().id, - }, - )); - } - - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get or create current session")?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) - .context("failed to open current session")?; - let branch_reader = branch::Reader::new(¤t_session_reader); - - let default_target = get_default_target(¤t_session_reader) - .context("failed to read default target")? - .ok_or_else(|| { - errors::SquashError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - let mut branch = branch_reader.read(branch_id).map_err(|error| match error { - reader::Error::NotFound => { - errors::SquashError::BranchNotFound(errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *branch_id, - }) - } - error => errors::SquashError::Other(error.into()), - })?; - - let branch_commit_oids = project_repository.l( - branch.head, - project_repository::LogUntil::Commit(default_target.sha), - )?; - - if !branch_commit_oids.contains(&commit_oid) { - return Err(errors::SquashError::CommitNotFound(commit_oid)); - } - - let commit_to_squash = project_repository - .git_repository - .find_commit(commit_oid) - .context("failed to find commit")?; - - let parent_commit = commit_to_squash - .parent(0) - .context("failed to find parent commit")?; - - let pushed_commit_oids = branch.upstream_head.map_or_else( - || Ok(vec![]), - |upstream_head| { - project_repository.l( - upstream_head, - project_repository::LogUntil::Commit(default_target.sha), - ) - }, - )?; - - if pushed_commit_oids.contains(&parent_commit.id()) - && !project_repository.project().ok_with_force_push - { - // squashing into a pushed commit will cause a force push that is not allowed - return Err(errors::SquashError::ForcePushNotAllowed( - errors::ForcePushNotAllowedError { - project_id: project_repository.project().id, - }, - )); - } - - if !branch_commit_oids.contains(&parent_commit.id()) { - return Err(errors::SquashError::CantSquashRootCommit); - } - - let ids_to_rebase = { - let ids = branch_commit_oids - .split(|oid| oid.eq(&commit_oid)) - .collect::>(); - ids.first().copied() - }; - - // create a commit that: - // * has the tree of the target commit - // * has the message combined of the target commit and parent commit - // * has parents of the parents commit. - let parents = parent_commit - .parents() - .context("failed to find head commit parents")?; - - let new_commit_oid = project_repository - .git_repository - .commit( - None, - &commit_to_squash.author(), - &commit_to_squash.committer(), - &format!( - "{}\n{}", - parent_commit.message().unwrap_or_default(), - commit_to_squash.message().unwrap_or_default(), - ), - &commit_to_squash.tree().context("failed to find tree")?, - &parents.iter().collect::>(), - ) - .context("failed to commit")?; - - let new_head_id = if let Some(ids_to_rebase) = ids_to_rebase { - let mut ids_to_rebase = ids_to_rebase.to_vec(); - ids_to_rebase.reverse(); - - // now, rebase unchanged commits onto the new commit - let commits_to_rebase = ids_to_rebase - .iter() - .map(|oid| project_repository.git_repository.find_commit(*oid)) - .collect::, _>>() - .context("failed to read commits to rebase")?; - - commits_to_rebase - .into_iter() - .fold( - project_repository - .git_repository - .find_commit(new_commit_oid) - .context("failed to find new commit"), - |head, to_rebase| { - let head = head?; - - let mut cherrypick_index = project_repository - .git_repository - .cherry_pick(&head, &to_rebase) - .context("failed to cherry pick")?; - - if cherrypick_index.has_conflicts() { - bail!("failed to rebase"); - } - - let merge_tree_oid = cherrypick_index - .write_tree_to(&project_repository.git_repository) - .context("failed to write merge tree")?; - - let merge_tree = project_repository - .git_repository - .find_tree(merge_tree_oid) - .context("failed to find merge tree")?; - - let commit_oid = project_repository - .git_repository - .commit( - None, - &to_rebase.author(), - &to_rebase.committer(), - to_rebase.message().unwrap_or_default(), - &merge_tree, - &[&head], - ) - .context("failed to create commit")?; - - project_repository - .git_repository - .find_commit(commit_oid) - .context("failed to find commit") - }, - )? - .id() - } else { - new_commit_oid - }; - - // save new branch head - let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - branch.head = new_head_id; - writer - .write(&mut branch) - .context("failed to write branch")?; - - super::integration::update_gitbutler_integration(gb_repository, project_repository)?; - - Ok(()) -} - -pub fn update_commit_message( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - branch_id: &BranchId, - commit_oid: git::Oid, - message: &str, -) -> Result<(), errors::UpdateCommitMessageError> { - if message.is_empty() { - return Err(errors::UpdateCommitMessageError::EmptyMessage); - } - - if conflicts::is_conflicting::<&Path>(project_repository, None)? { - return Err(errors::UpdateCommitMessageError::Conflict( - errors::ProjectConflictError { - project_id: project_repository.project().id, - }, - )); - } - - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get or create current session")?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) - .context("failed to open current session")?; - let branch_reader = branch::Reader::new(¤t_session_reader); - - let default_target = get_default_target(¤t_session_reader) - .context("failed to read default target")? - .ok_or_else(|| { - errors::UpdateCommitMessageError::DefaultTargetNotSet( - errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }, - ) - })?; - - let mut branch = branch_reader.read(branch_id).map_err(|error| match error { - reader::Error::NotFound => { - errors::UpdateCommitMessageError::BranchNotFound(errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *branch_id, - }) - } - error => errors::UpdateCommitMessageError::Other(error.into()), - })?; - - let branch_commit_oids = project_repository.l( - branch.head, - project_repository::LogUntil::Commit(default_target.sha), - )?; - - if !branch_commit_oids.contains(&commit_oid) { - return Err(errors::UpdateCommitMessageError::CommitNotFound(commit_oid)); - } - - let pushed_commit_oids = branch.upstream_head.map_or_else( - || Ok(vec![]), - |upstream_head| { - project_repository.l( - upstream_head, - project_repository::LogUntil::Commit(default_target.sha), - ) - }, - )?; - - if pushed_commit_oids.contains(&commit_oid) && !project_repository.project().ok_with_force_push - { - // updating the message of a pushed commit will cause a force push that is not allowed - return Err(errors::UpdateCommitMessageError::ForcePushNotAllowed( - errors::ForcePushNotAllowedError { - project_id: project_repository.project().id, - }, - )); - } - - let target_commit = project_repository - .git_repository - .find_commit(commit_oid) - .context("failed to find commit")?; - - let ids_to_rebase = { - let ids = branch_commit_oids - .split(|oid| oid.eq(&commit_oid)) - .collect::>(); - ids.first().copied() - }; - - let parents = target_commit - .parents() - .context("failed to find head commit parents")?; - - let new_commit_oid = project_repository - .git_repository - .commit( - None, - &target_commit.author(), - &target_commit.committer(), - message, - &target_commit.tree().context("failed to find tree")?, - &parents.iter().collect::>(), - ) - .context("failed to commit")?; - - let new_head_id = if let Some(ids_to_rebase) = ids_to_rebase { - let mut ids_to_rebase = ids_to_rebase.to_vec(); - ids_to_rebase.reverse(); - // now, rebase unchanged commits onto the new commit - let commits_to_rebase = ids_to_rebase - .iter() - .map(|oid| project_repository.git_repository.find_commit(*oid)) - .collect::, _>>() - .context("failed to read commits to rebase")?; - - commits_to_rebase - .into_iter() - .fold( - project_repository - .git_repository - .find_commit(new_commit_oid) - .context("failed to find new commit"), - |head, to_rebase| { - let head = head?; - - let mut cherrypick_index = project_repository - .git_repository - .cherry_pick(&head, &to_rebase) - .context("failed to cherry pick")?; - - if cherrypick_index.has_conflicts() { - bail!("failed to rebase"); - } - - let merge_tree_oid = cherrypick_index - .write_tree_to(&project_repository.git_repository) - .context("failed to write merge tree")?; - - let merge_tree = project_repository - .git_repository - .find_tree(merge_tree_oid) - .context("failed to find merge tree")?; - - let commit_oid = project_repository - .git_repository - .commit( - None, - &to_rebase.author(), - &to_rebase.committer(), - to_rebase.message().unwrap_or_default(), - &merge_tree, - &[&head], - ) - .context("failed to create commit")?; - - project_repository - .git_repository - .find_commit(commit_oid) - .context("failed to find commit") - }, - )? - .id() - } else { - new_commit_oid - }; - - // save new branch head - let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - branch.head = new_head_id; - writer - .write(&mut branch) - .context("failed to write branch")?; - - super::integration::update_gitbutler_integration(gb_repository, project_repository)?; - - Ok(()) -} - -/// moves commit on top of the to target branch -pub fn move_commit( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - target_branch_id: &BranchId, - commit_oid: git::Oid, - user: Option<&users::User>, - signing_key: Option<&keys::PrivateKey>, -) -> Result<(), errors::MoveCommitError> { - if project_repository.is_resolving() { - return Err(errors::MoveCommitError::Conflicted( - errors::ProjectConflictError { - project_id: project_repository.project().id, - }, - )); - } - - let latest_session = gb_repository - .get_latest_session() - .context("failed to get or create current session")? - .ok_or_else(|| { - errors::MoveCommitError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - let latest_session_reader = sessions::Reader::open(gb_repository, &latest_session) - .context("failed to open current session")?; - - let applied_branches = Iterator::new(&latest_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")? - .into_iter() - .filter(|b| b.applied) - .collect::>(); - - if !applied_branches.iter().any(|b| b.id == *target_branch_id) { - return Err(errors::MoveCommitError::BranchNotFound( - errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *target_branch_id, - }, - )); - } - - let default_target = super::get_default_target(&latest_session_reader) - .context("failed to get default target")? - .ok_or_else(|| { - errors::MoveCommitError::DefaultTargetNotSet(errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }) - })?; - - let (mut applied_statuses, _) = get_applied_status( - gb_repository, - project_repository, - &default_target, - applied_branches, - )?; - - let (ref mut source_branch, source_status) = applied_statuses - .iter_mut() - .find(|(b, _)| b.head == commit_oid) - .ok_or_else(|| errors::MoveCommitError::CommitNotFound(commit_oid))?; - - let source_branch_non_comitted_files = calculate_non_commited_diffs( - project_repository, - source_branch, - &default_target, - source_status, - )?; - - let source_branch_head = project_repository - .git_repository - .find_commit(commit_oid) - .context("failed to find commit")?; - let source_branch_head_parent = source_branch_head - .parent(0) - .context("failed to get parent commit")?; - let source_branch_head_tree = source_branch_head - .tree() - .context("failed to get commit tree")?; - let source_branch_head_parent_tree = source_branch_head_parent - .tree() - .context("failed to get parent tree")?; - let branch_head_diff = diff::trees( - &project_repository.git_repository, - &source_branch_head_parent_tree, - &source_branch_head_tree, - context_lines(project_repository), - )?; - let branch_head_diff = diff::diff_files_to_hunks(&branch_head_diff); - - let is_source_locked = source_branch_non_comitted_files - .iter() - .any(|(path, hunks)| { - branch_head_diff.get(path).map_or(false, |head_diff_hunks| { - hunks.iter().any(|hunk| { - head_diff_hunks.iter().any(|head_hunk| { - joined( - head_hunk.new_start, - head_hunk.new_start + head_hunk.new_lines, - hunk.new_start, - hunk.new_start + hunk.new_lines, - ) - }) - }) - }) - }); - - if is_source_locked { - return Err(errors::MoveCommitError::SourceLocked); - } - - let branch_writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - let branch_reader = branch::Reader::new(&latest_session_reader); - - // move files ownerships from source branch to the destination branch - - let ownerships_to_transfer = branch_head_diff - .iter() - .map(|(file_path, hunks)| { - ( - file_path.clone(), - hunks.iter().map(Into::into).collect::>(), - ) - }) - .map(|(file_path, hunks)| OwnershipClaim { file_path, hunks }) - .flat_map(|file_ownership| source_branch.ownership.take(&file_ownership)) - .collect::>(); - - // reset the source branch to the parent commit - { - source_branch.head = source_branch_head_parent.id(); - branch_writer.write(source_branch)?; - } - - // move the commit to destination branch target branch - { - let mut destination_branch = - branch_reader - .read(target_branch_id) - .map_err(|error| match error { - reader::Error::NotFound => { - errors::MoveCommitError::BranchNotFound(errors::BranchNotFoundError { - project_id: project_repository.project().id, - branch_id: *target_branch_id, - }) - } - error => errors::MoveCommitError::Other(error.into()), - })?; - - for ownership in ownerships_to_transfer { - destination_branch.ownership.put(&ownership); - } - - let new_destination_tree_oid = write_tree_onto_commit( - project_repository, - destination_branch.head, - &branch_head_diff, - ) - .context("failed to write tree onto commit")?; - let new_destination_tree = project_repository - .git_repository - .find_tree(new_destination_tree_oid) - .context("failed to find tree")?; - - let new_destination_head_oid = project_repository - .commit( - user, - source_branch_head.message().unwrap_or_default(), - &new_destination_tree, - &[&project_repository - .git_repository - .find_commit(destination_branch.head) - .context("failed to get dst branch head commit")?], - signing_key, - ) - .context("failed to commit")?; - - destination_branch.head = new_destination_head_oid; - branch_writer.write(&mut destination_branch)?; - } - - super::integration::update_gitbutler_integration(gb_repository, project_repository) - .context("failed to update gitbutler integration")?; - - Ok(()) -} - -pub fn create_virtual_branch_from_branch( - gb_repository: &gb_repository::Repository, - project_repository: &project_repository::Repository, - upstream: &git::Refname, - signing_key: Option<&keys::PrivateKey>, - user: Option<&users::User>, -) -> Result { - if !matches!(upstream, git::Refname::Local(_) | git::Refname::Remote(_)) { - return Err(errors::CreateVirtualBranchFromBranchError::BranchNotFound( - upstream.clone(), - )); - } - - let current_session = gb_repository - .get_or_create_current_session() - .context("failed to get or create current session")?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session) - .context("failed to open current session")?; - - let default_target = super::get_default_target(¤t_session_reader) - .context("failed to get default target")? - .ok_or_else(|| { - errors::CreateVirtualBranchFromBranchError::DefaultTargetNotSet( - errors::DefaultTargetNotSetError { - project_id: project_repository.project().id, - }, - ) - })?; - - if let git::Refname::Remote(remote_upstream) = upstream { - if default_target.branch.eq(remote_upstream) { - return Err( - errors::CreateVirtualBranchFromBranchError::CantMakeBranchFromDefaultTarget, - ); - } - } - - let repo = &project_repository.git_repository; - let head_reference = match repo.find_reference(upstream) { - Ok(head) => Ok(head), - Err(git::Error::NotFound(_)) => Err( - errors::CreateVirtualBranchFromBranchError::BranchNotFound(upstream.clone()), - ), - Err(error) => Err(errors::CreateVirtualBranchFromBranchError::Other( - error.into(), - )), - }?; - let head_commit = head_reference - .peel_to_commit() - .context("failed to peel to commit")?; - let head_commit_tree = head_commit.tree().context("failed to find tree")?; - - let all_virtual_branches = Iterator::new(¤t_session_reader) - .context("failed to create branch iterator")? - .collect::, reader::Error>>() - .context("failed to read virtual branches")? - .into_iter() - .collect::>(); - - let order = all_virtual_branches.len(); - - let selected_for_changes = (!all_virtual_branches - .iter() - .any(|b| b.selected_for_changes.is_some())) - .then_some(chrono::Utc::now().timestamp_millis()); - - let now = time::UNIX_EPOCH - .elapsed() - .context("failed to get elapsed time")? - .as_millis(); - - // only set upstream if it's not the default target - let upstream_branch = match upstream { - git::Refname::Other(_) | git::Refname::Virtual(_) => { - // we only support local or remote branches - return Err(errors::CreateVirtualBranchFromBranchError::BranchNotFound( - upstream.clone(), - )); - } - git::Refname::Remote(remote) => Some(remote.clone()), - git::Refname::Local(local) => local.remote().cloned(), - }; - - // add file ownership based off the diff - let target_commit = repo - .find_commit(default_target.sha) - .map_err(|error| errors::CreateVirtualBranchFromBranchError::Other(error.into()))?; - let merge_base_oid = repo - .merge_base(target_commit.id(), head_commit.id()) - .map_err(|error| errors::CreateVirtualBranchFromBranchError::Other(error.into()))?; - let merge_base_tree = repo - .find_commit(merge_base_oid) - .map_err(|error| errors::CreateVirtualBranchFromBranchError::Other(error.into()))? - .tree() - .map_err(|error| errors::CreateVirtualBranchFromBranchError::Other(error.into()))?; - - // do a diff between the head of this branch and the target base - let diff = diff::trees( - &project_repository.git_repository, - &merge_base_tree, - &head_commit_tree, - context_lines(project_repository), - ) - .context("failed to diff trees")?; - let diff = diff::diff_files_to_hunks(&diff); - - let hunks_by_filepath = - super::virtual_hunks_by_filepath(&project_repository.project().path, &diff); - - // assign ownership to the branch - let ownership = hunks_by_filepath.values().flatten().fold( - branch::BranchOwnershipClaims::default(), - |mut ownership, hunk| { - ownership.put( - &format!("{}:{}", hunk.file_path.display(), hunk.id) - .parse() - .unwrap(), - ); - ownership - }, - ); - - let mut branch = branch::Branch { - id: BranchId::generate(), - name: upstream - .branch() - .expect("always a branch reference") - .to_string(), - notes: String::new(), - applied: false, - upstream_head: upstream_branch.is_some().then_some(head_commit.id()), - upstream: upstream_branch, - tree: head_commit_tree.id(), - head: head_commit.id(), - created_timestamp_ms: now, - updated_timestamp_ms: now, - ownership, - order, - selected_for_changes, - }; - - let writer = branch::Writer::new(gb_repository, project_repository.project().gb_dir()) - .context("failed to create writer")?; - writer - .write(&mut branch) - .context("failed to write branch")?; - - project_repository.add_branch_reference(&branch)?; - - match apply_branch( - gb_repository, - project_repository, - &branch.id, - signing_key, - user, - ) { - Ok(()) => Ok(branch.id), - Err(errors::ApplyBranchError::BranchConflicts(_)) => { - // if branch conflicts with the workspace, it's ok. keep it unapplied - Ok(branch.id) - } - Err(error) => Err(errors::CreateVirtualBranchFromBranchError::ApplyBranch( - error, - )), - } -} - -pub fn context_lines(project_repository: &project_repository::Repository) -> u32 { - let use_context = project_repository - .project() - .use_diff_context - .unwrap_or(false); - - if use_context { - 3_u32 - } else { - 0_u32 - } -} - -#[cfg(test)] -mod tests { - use super::*; - #[test] - fn joined_test() { - assert!(!joined(10, 13, 6, 9)); - assert!(joined(10, 13, 7, 10)); - assert!(joined(10, 13, 8, 11)); - assert!(joined(10, 13, 9, 12)); - assert!(joined(10, 13, 10, 13)); - assert!(joined(10, 13, 11, 14)); - assert!(joined(10, 13, 12, 15)); - assert!(joined(10, 13, 13, 16)); - assert!(!joined(10, 13, 14, 17)); - } - - #[test] - fn normalize_branch_name_test() { - assert_eq!(normalize_branch_name("feature/branch"), "feature/branch"); - assert_eq!(normalize_branch_name("foo#branch"), "foo#branch"); - assert_eq!(normalize_branch_name("foo!branch"), "foo-branch"); - } -} diff --git a/gitbutler-app/src/watcher.rs b/gitbutler-app/src/watcher.rs index ac7c41b35..05e96e163 100644 --- a/gitbutler-app/src/watcher.rs +++ b/gitbutler-app/src/watcher.rs @@ -17,7 +17,7 @@ use tokio::{ }; use tokio_util::sync::CancellationToken; -use crate::projects::{self, ProjectId}; +use gitbutler::projects::{self, Project, ProjectId}; #[derive(Clone)] pub struct Watchers { @@ -80,6 +80,25 @@ impl Watchers { } } +#[async_trait::async_trait] +impl gitbutler::projects::Watchers for Watchers { + fn watch(&self, project: &Project) -> Result<()> { + Watchers::watch(self, project) + } + + async fn stop(&self, id: ProjectId) -> Result<()> { + Watchers::stop(self, &id).await + } + + async fn fetch(&self, id: ProjectId) -> Result<()> { + self.post(Event::FetchGitbutlerData(id)).await + } + + async fn push(&self, id: ProjectId) -> Result<()> { + self.post(Event::PushGitbutlerData(id)).await + } +} + #[derive(Clone)] struct Watcher { inner: Arc, diff --git a/gitbutler-app/src/watcher/dispatchers.rs b/gitbutler-app/src/watcher/dispatchers.rs index 20530e1c7..2c2033683 100644 --- a/gitbutler-app/src/watcher/dispatchers.rs +++ b/gitbutler-app/src/watcher/dispatchers.rs @@ -10,7 +10,7 @@ use tokio::{ }; use tokio_util::sync::CancellationToken; -use crate::projects::ProjectId; +use gitbutler::projects::ProjectId; use super::events; diff --git a/gitbutler-app/src/watcher/dispatchers/file_change.rs b/gitbutler-app/src/watcher/dispatchers/file_change.rs index 47b33d0c7..2cbae1d99 100644 --- a/gitbutler-app/src/watcher/dispatchers/file_change.rs +++ b/gitbutler-app/src/watcher/dispatchers/file_change.rs @@ -13,7 +13,8 @@ use tokio::{ task, }; -use crate::{git, projects::ProjectId, watcher::events}; +use crate::watcher::events; +use gitbutler::{git, projects::ProjectId}; #[derive(Debug, Clone)] pub struct Dispatcher { diff --git a/gitbutler-app/src/watcher/events.rs b/gitbutler-app/src/watcher/events.rs index 514d41fed..ec861c87e 100644 --- a/gitbutler-app/src/watcher/events.rs +++ b/gitbutler-app/src/watcher/events.rs @@ -1,12 +1,14 @@ use std::{fmt::Display, path}; -use crate::{ - analytics, deltas, events, +use gitbutler::{ + deltas, projects::ProjectId, reader, sessions::{self, SessionId}, }; +use crate::{analytics, events}; + #[derive(Debug, PartialEq, Clone)] pub enum Event { Flush(ProjectId, sessions::Session), diff --git a/gitbutler-app/src/watcher/handlers/analytics_handler.rs b/gitbutler-app/src/watcher/handlers/analytics_handler.rs index 4361080e3..22057118c 100644 --- a/gitbutler-app/src/watcher/handlers/analytics_handler.rs +++ b/gitbutler-app/src/watcher/handlers/analytics_handler.rs @@ -1,7 +1,8 @@ use anyhow::{Context, Result}; use tauri::{AppHandle, Manager}; -use crate::{analytics, users}; +use crate::analytics; +use gitbutler::users; use super::events; diff --git a/gitbutler-app/src/watcher/handlers/calculate_deltas_handler.rs b/gitbutler-app/src/watcher/handlers/calculate_deltas_handler.rs index 907fe0357..454c651d1 100644 --- a/gitbutler-app/src/watcher/handlers/calculate_deltas_handler.rs +++ b/gitbutler-app/src/watcher/handlers/calculate_deltas_handler.rs @@ -3,7 +3,7 @@ use std::{path, vec}; use anyhow::{Context, Result}; use tauri::{AppHandle, Manager}; -use crate::{ +use gitbutler::{ deltas, gb_repository, project_repository, projects::{self, ProjectId}, reader, sessions, users, diff --git a/gitbutler-app/src/watcher/handlers/caltulate_virtual_branches_handler.rs b/gitbutler-app/src/watcher/handlers/caltulate_virtual_branches_handler.rs index 3635c78d3..4b0b6d691 100644 --- a/gitbutler-app/src/watcher/handlers/caltulate_virtual_branches_handler.rs +++ b/gitbutler-app/src/watcher/handlers/caltulate_virtual_branches_handler.rs @@ -9,8 +9,9 @@ use governor::{ use tauri::{AppHandle, Manager}; use tokio::sync::Mutex; -use crate::{ - assets, events as app_events, +use crate::events as app_events; +use gitbutler::{ + assets, projects::ProjectId, virtual_branches::{self, controller::ControllerError, VirtualBranches}, }; diff --git a/gitbutler-app/src/watcher/handlers/fetch_gitbutler_data.rs b/gitbutler-app/src/watcher/handlers/fetch_gitbutler_data.rs index 7d8f7e1a3..44950abeb 100644 --- a/gitbutler-app/src/watcher/handlers/fetch_gitbutler_data.rs +++ b/gitbutler-app/src/watcher/handlers/fetch_gitbutler_data.rs @@ -4,7 +4,7 @@ use anyhow::{Context, Result}; use tauri::{AppHandle, Manager}; use tokio::sync::Mutex; -use crate::{gb_repository, project_repository, projects, projects::ProjectId, users}; +use gitbutler::{gb_repository, project_repository, projects, projects::ProjectId, users}; use super::events; diff --git a/gitbutler-app/src/watcher/handlers/filter_ignored_files.rs b/gitbutler-app/src/watcher/handlers/filter_ignored_files.rs index 79ae05f59..4ddb69375 100644 --- a/gitbutler-app/src/watcher/handlers/filter_ignored_files.rs +++ b/gitbutler-app/src/watcher/handlers/filter_ignored_files.rs @@ -10,7 +10,7 @@ use governor::{ use tauri::{AppHandle, Manager}; use tokio::sync::Mutex; -use crate::{ +use gitbutler::{ project_repository, projects::{self, ProjectId}, }; diff --git a/gitbutler-app/src/watcher/handlers/flush_session.rs b/gitbutler-app/src/watcher/handlers/flush_session.rs index ded656a70..b3405d901 100644 --- a/gitbutler-app/src/watcher/handlers/flush_session.rs +++ b/gitbutler-app/src/watcher/handlers/flush_session.rs @@ -4,7 +4,9 @@ use anyhow::{Context, Result}; use tauri::{AppHandle, Manager}; use tokio::sync::Mutex; -use crate::{gb_repository, project_repository, projects, projects::ProjectId, sessions, users}; +use gitbutler::{ + gb_repository, project_repository, projects, projects::ProjectId, sessions, users, +}; use super::events; diff --git a/gitbutler-app/src/watcher/handlers/git_file_change.rs b/gitbutler-app/src/watcher/handlers/git_file_change.rs index 6e38ed208..13ab970d8 100644 --- a/gitbutler-app/src/watcher/handlers/git_file_change.rs +++ b/gitbutler-app/src/watcher/handlers/git_file_change.rs @@ -3,8 +3,9 @@ use std::path; use anyhow::{Context, Result}; use tauri::{AppHandle, Manager}; -use crate::{ - analytics, events as app_events, gb_repository, git, project_repository, +use crate::{analytics, events as app_events}; +use gitbutler::{ + gb_repository, git, project_repository, projects::{self, ProjectId}, users, }; diff --git a/gitbutler-app/src/watcher/handlers/index_handler.rs b/gitbutler-app/src/watcher/handlers/index_handler.rs index 54d855a86..7b5a1655b 100644 --- a/gitbutler-app/src/watcher/handlers/index_handler.rs +++ b/gitbutler-app/src/watcher/handlers/index_handler.rs @@ -3,8 +3,9 @@ use std::path; use anyhow::{Context, Result}; use tauri::{AppHandle, Manager}; -use crate::{ - deltas, events as app_events, gb_repository, project_repository, +use crate::events as app_events; +use gitbutler::{ + deltas, gb_repository, project_repository, projects::{self, ProjectId}, sessions::{self, SessionId}, users, diff --git a/gitbutler-app/src/watcher/handlers/push_gitbutler_data.rs b/gitbutler-app/src/watcher/handlers/push_gitbutler_data.rs index 58fed9b4c..3d3b58961 100644 --- a/gitbutler-app/src/watcher/handlers/push_gitbutler_data.rs +++ b/gitbutler-app/src/watcher/handlers/push_gitbutler_data.rs @@ -4,9 +4,9 @@ use std::sync::{Arc, Mutex, TryLockError}; use anyhow::{Context, Result}; use tauri::{AppHandle, Manager}; -use crate::gb_repository::RemoteError; -use crate::projects::ProjectId; -use crate::{gb_repository, project_repository, projects, users}; +use gitbutler::gb_repository::RemoteError; +use gitbutler::projects::ProjectId; +use gitbutler::{gb_repository, project_repository, projects, users}; use super::events; diff --git a/gitbutler-app/src/watcher/handlers/push_project_to_gitbutler.rs b/gitbutler-app/src/watcher/handlers/push_project_to_gitbutler.rs index 9e561c4cd..2a0406bc2 100644 --- a/gitbutler-app/src/watcher/handlers/push_project_to_gitbutler.rs +++ b/gitbutler-app/src/watcher/handlers/push_project_to_gitbutler.rs @@ -5,7 +5,7 @@ use itertools::Itertools; use tauri::{AppHandle, Manager}; use tokio::sync::Mutex; -use crate::{ +use gitbutler::{ gb_repository, git::{self, Oid, Repository}, project_repository, @@ -129,9 +129,9 @@ impl Handler { async fn push_target( state: &State, project_repository: &project_repository::Repository, - default_target: &crate::virtual_branches::target::Target, + default_target: &gitbutler::virtual_branches::target::Target, gb_code_last_commit: Option, - project_id: &crate::id::Id, + project_id: &gitbutler::id::Id, user: &Option, ) -> Result<(), project_repository::RemoteError> { let ids = batch_rev_walk( @@ -181,7 +181,7 @@ impl Handler { async fn update_project( state: &State, - project_id: &crate::id::Id, + project_id: &gitbutler::id::Id, id: &Oid, ) -> Result<(), project_repository::RemoteError> { state @@ -211,7 +211,7 @@ struct State { fn push_all_refs( project_repository: &project_repository::Repository, user: &Option, - project_id: &crate::id::Id, + project_id: &gitbutler::id::Id, ) -> Result<(), project_repository::RemoteError> { let gb_references = collect_refs(project_repository)?; diff --git a/gitbutler-app/src/windows.rs b/gitbutler-app/src/windows.rs deleted file mode 100644 index bf0e3ac95..000000000 --- a/gitbutler-app/src/windows.rs +++ /dev/null @@ -1,24 +0,0 @@ -use std::os::windows::fs::MetadataExt; - -pub trait MetadataShim { - fn ino(&self) -> u64; - fn dev(&self) -> u64; - fn uid(&self) -> u32; - fn gid(&self) -> u32; -} - -impl MetadataShim for std::fs::Metadata { - fn ino(&self) -> u64 { - self.file_index().expect("file metadata constructed based on directory listing instead of a file (see https://doc.rust-lang.org/std/os/windows/fs/trait.MetadataExt.html#tymethod.file_index)") - } - #[allow(clippy::cast_lossless)] - fn dev(&self) -> u64 { - self.volume_serial_number().expect("file metadata constructed based on directory listing instead of a file (see https://doc.rust-lang.org/std/os/windows/fs/trait.MetadataExt.html#tymethod.volume_serial_number)") as u64 - } - fn uid(&self) -> u32 { - 0 - } - fn gid(&self) -> u32 { - 0 - } -} diff --git a/gitbutler-app/src/writer.rs b/gitbutler-app/src/writer.rs deleted file mode 100644 index dff82d562..000000000 --- a/gitbutler-app/src/writer.rs +++ /dev/null @@ -1,114 +0,0 @@ -use std::path::Path; - -use anyhow::Result; - -use crate::lock; - -pub struct DirWriter(lock::Dir); - -impl DirWriter { - pub fn open>(root: P) -> Result { - lock::Dir::new(root).map(Self) - } -} - -impl DirWriter { - fn write(&self, path: P, contents: C) -> Result<(), std::io::Error> - where - P: AsRef, - C: AsRef<[u8]>, - { - self.batch(&[BatchTask::Write(path, contents)]) - } - - pub fn remove>(&self, path: P) -> Result<(), std::io::Error> { - self.0.batch(|root| { - let path = root.join(path); - if path.exists() { - if path.is_dir() { - std::fs::remove_dir_all(path) - } else { - std::fs::remove_file(path) - } - } else { - Ok(()) - } - })? - } - - pub fn batch(&self, values: &[BatchTask]) -> Result<(), std::io::Error> - where - P: AsRef, - C: AsRef<[u8]>, - { - self.0.batch(|root| { - for value in values { - match value { - BatchTask::Write(path, contents) => { - let path = root.join(path); - if let Some(dir_path) = path.parent() { - if !dir_path.exists() { - std::fs::create_dir_all(dir_path)?; - } - }; - std::fs::write(path, contents)?; - } - BatchTask::Remove(path) => { - let path = root.join(path); - if path.exists() { - if path.is_dir() { - std::fs::remove_dir_all(path)?; - } else { - std::fs::remove_file(path)?; - } - } - } - } - } - Ok(()) - })? - } - - pub fn write_string>( - &self, - path: P, - contents: &str, - ) -> Result<(), std::io::Error> { - self.write(path, contents) - } -} - -pub enum BatchTask, C: AsRef<[u8]>> { - Write(P, C), - Remove(P), -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn write() { - let root = tempfile::tempdir().unwrap(); - let writer = DirWriter::open(root.path()).unwrap(); - writer.write("foo/bar", b"baz").unwrap(); - assert_eq!( - std::fs::read_to_string(root.path().join("foo/bar")).unwrap(), - "baz" - ); - } - - #[test] - fn remove() { - let root = tempfile::tempdir().unwrap(); - let writer = DirWriter::open(root.path()).unwrap(); - writer.remove("foo/bar").unwrap(); - assert!(!root.path().join("foo/bar").exists()); - writer.write("foo/bar", b"baz").unwrap(); - writer.remove("foo/bar").unwrap(); - assert!(!root.path().join("foo/bar").exists()); - writer.write("parent/child", b"baz").unwrap(); - writer.remove("parent").unwrap(); - assert!(!root.path().join("parent").exists()); - } -} diff --git a/gitbutler-app/src/zip.rs b/gitbutler-app/src/zip.rs index 9750c112f..8f2e654ae 100644 --- a/gitbutler-app/src/zip.rs +++ b/gitbutler-app/src/zip.rs @@ -1,165 +1,87 @@ -pub mod commands; -mod controller; -pub use controller::Controller; +pub mod commands { + #![allow(clippy::used_underscore_binding)] + use std::path; -use std::{ - fs, - io::{self, Read, Write}, - path, time, -}; + use tauri::{AppHandle, Manager}; + use tracing::instrument; -use anyhow::{Context, Result}; -use sha2::{Digest, Sha256}; -use walkdir::{DirEntry, WalkDir}; -use zip::{result::ZipError, write, CompressionMethod, ZipWriter}; + use crate::error::{Code, Error}; -#[derive(Clone)] -pub struct Zipper { - cache: path::PathBuf, -} + use gitbutler::zip::controller; -impl Zipper { - pub fn new>(cache_dir: P) -> Self { - let cache = cache_dir.as_ref().to_path_buf().join("archives"); - Self { cache } - } - - // takes a path to create zip of, returns path of a created archive. - pub fn zip>(&self, path: P) -> Result { - let path = path.as_ref(); - if !path.exists() { - return Err(anyhow::anyhow!("{} does not exist", path.display())); - } - if !path.is_dir() { - return Err(anyhow::anyhow!("{} is not a directory", path.display())); - } - let path_hash = calculate_path_hash(path)?; - fs::create_dir_all(&self.cache).context("failed to create cache dir")?; - let archive_path = self.cache.join(format!("{}.zip", path_hash)); - if !archive_path.exists() { - doit(path, &archive_path, CompressionMethod::Bzip2)?; - } - Ok(archive_path) - } -} - -fn doit>( - src_dir: P, - dst_file: P, - method: zip::CompressionMethod, -) -> zip::result::ZipResult<()> { - let src = src_dir.as_ref(); - let dst = dst_file.as_ref(); - if !src.is_dir() { - return Err(ZipError::FileNotFound); - } - - let file = fs::File::create(dst).unwrap(); - - let walkdir = WalkDir::new(src); - let it = walkdir.into_iter(); - - zip_dir(&mut it.filter_map(Result::ok), src, file, method)?; - - Ok(()) -} - -fn zip_dir( - it: &mut dyn Iterator, - prefix: &path::Path, - writer: T, - method: zip::CompressionMethod, -) -> zip::result::ZipResult<()> -where - T: io::Write + io::Seek, -{ - let mut zip = ZipWriter::new(writer); - let options = write::FileOptions::default() - .compression_method(method) - .unix_permissions(0o755); - - let mut buffer = Vec::new(); - for entry in it { - let path = entry.path(); - let name = path.strip_prefix(prefix).unwrap(); - - // Write file or directory explicitly - // Some unzip tools unzip files with directory paths correctly, some do not! - if path.is_file() { - #[allow(deprecated)] - zip.start_file_from_path(name, options)?; - let mut f = fs::File::open(path)?; - - f.read_to_end(&mut buffer)?; - zip.write_all(&buffer)?; - buffer.clear(); - } else if !name.as_os_str().is_empty() { - // Only if not root! Avoids path spec / warning - // and mapname conversion failed error on unzip - #[allow(deprecated)] - zip.add_directory_from_path(name, options)?; + impl From for Error { + fn from(error: controller::ArchiveError) -> Self { + match error { + controller::ArchiveError::GetProject(error) => error.into(), + controller::ArchiveError::Other(error) => { + tracing::error!(?error, "failed to archive project"); + Error::Unknown + } + } } } - zip.finish()?; - Result::Ok(()) -} -// returns hash of a path by calculating metadata hash of all files in it. -fn calculate_path_hash>(path: P) -> Result { - let path = path.as_ref(); - let mut hasher = Sha256::new(); - - if path.is_dir() { - let entries = fs::read_dir(path)?; - let mut entry_paths: Vec<_> = entries - .filter_map(|entry| entry.ok().map(|e| e.path())) - .collect(); - entry_paths.sort(); - - for entry_path in entry_paths { - file_hash(&mut hasher, &entry_path).with_context(|| { - format!( - "failed to calculate hash of file {}", - entry_path.to_str().unwrap() - ) - })?; - } - } else if path.is_file() { - file_hash(&mut hasher, path).with_context(|| { - format!( - "failed to calculate hash of file {}", - path.to_str().unwrap() - ) + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn get_project_archive_path( + handle: AppHandle, + project_id: &str, + ) -> Result { + let project_id = project_id.parse().map_err(|_| Error::UserError { + code: Code::Validation, + message: "Malformed project id".into(), })?; + handle + .state::() + .archive(&project_id) + .map_err(Into::into) } - Ok(format!("{:X}", hasher.finalize())) -} + impl From for Error { + fn from(value: controller::DataArchiveError) -> Self { + match value { + controller::DataArchiveError::GetProject(error) => error.into(), + controller::DataArchiveError::Other(error) => { + tracing::error!(?error, "failed to archive project data"); + Error::Unknown + } + } + } + } -fn file_hash>(digest: &mut Sha256, path: P) -> Result<()> { - let path = path.as_ref(); - let metadata = fs::metadata(path).context("failed to get metadata")?; - digest.update(path.to_str().unwrap().as_bytes()); - digest.update(metadata.len().to_string().as_bytes()); - digest.update( - metadata - .modified() - .unwrap_or(time::UNIX_EPOCH) - .duration_since(time::UNIX_EPOCH) - .unwrap() - .as_secs() - .to_string() - .as_bytes(), - ); - digest.update( - metadata - .created() - .unwrap_or(time::UNIX_EPOCH) - .duration_since(time::UNIX_EPOCH) - .unwrap() - .as_secs() - .to_string() - .as_bytes(), - ); - Ok(()) + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn get_project_data_archive_path( + handle: AppHandle, + project_id: &str, + ) -> Result { + let project_id = project_id.parse().map_err(|_| Error::UserError { + code: Code::Validation, + message: "Malformed project id".into(), + })?; + handle + .state::() + .data_archive(&project_id) + .map_err(Into::into) + } + + impl From for Error { + fn from(error: controller::LogsArchiveError) -> Self { + match error { + controller::LogsArchiveError::Other(error) => { + tracing::error!(?error, "failed to archive logs"); + Error::Unknown + } + } + } + } + + #[tauri::command(async)] + #[instrument(skip(handle))] + pub async fn get_logs_archive_path(handle: AppHandle) -> Result { + handle + .state::() + .logs_archive() + .map_err(Into::into) + } } diff --git a/gitbutler-app/src/zip/commands.rs b/gitbutler-app/src/zip/commands.rs deleted file mode 100644 index aabbe62c2..000000000 --- a/gitbutler-app/src/zip/commands.rs +++ /dev/null @@ -1,85 +0,0 @@ -#![allow(clippy::used_underscore_binding)] -use std::path; - -use tauri::{AppHandle, Manager}; -use tracing::instrument; - -use crate::error::{Code, Error}; - -use super::controller; - -impl From for Error { - fn from(error: controller::ArchiveError) -> Self { - match error { - controller::ArchiveError::GetProject(error) => error.into(), - controller::ArchiveError::Other(error) => { - tracing::error!(?error, "failed to archive project"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn get_project_archive_path( - handle: AppHandle, - project_id: &str, -) -> Result { - let project_id = project_id.parse().map_err(|_| Error::UserError { - code: Code::Validation, - message: "Malformed project id".into(), - })?; - handle - .state::() - .archive(&project_id) - .map_err(Into::into) -} - -impl From for Error { - fn from(value: controller::DataArchiveError) -> Self { - match value { - controller::DataArchiveError::GetProject(error) => error.into(), - controller::DataArchiveError::Other(error) => { - tracing::error!(?error, "failed to archive project data"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn get_project_data_archive_path( - handle: AppHandle, - project_id: &str, -) -> Result { - let project_id = project_id.parse().map_err(|_| Error::UserError { - code: Code::Validation, - message: "Malformed project id".into(), - })?; - handle - .state::() - .data_archive(&project_id) - .map_err(Into::into) -} - -impl From for Error { - fn from(error: controller::LogsArchiveError) -> Self { - match error { - controller::LogsArchiveError::Other(error) => { - tracing::error!(?error, "failed to archive logs"); - Error::Unknown - } - } - } -} - -#[tauri::command(async)] -#[instrument(skip(handle))] -pub async fn get_logs_archive_path(handle: AppHandle) -> Result { - handle - .state::() - .logs_archive() - .map_err(Into::into) -} diff --git a/gitbutler-app/src/zip/controller.rs b/gitbutler-app/src/zip/controller.rs deleted file mode 100644 index 972a27f7b..000000000 --- a/gitbutler-app/src/zip/controller.rs +++ /dev/null @@ -1,72 +0,0 @@ -use std::path; - -use crate::projects::{self, ProjectId}; - -use super::Zipper; - -#[derive(Clone)] -pub struct Controller { - local_data_dir: path::PathBuf, - logs_dir: path::PathBuf, - zipper: Zipper, - #[allow(clippy::struct_field_names)] - projects_controller: projects::Controller, -} - -impl Controller { - pub fn new( - local_data_dir: path::PathBuf, - logs_dir: path::PathBuf, - zipper: Zipper, - projects_controller: projects::Controller, - ) -> Self { - Self { - local_data_dir, - logs_dir, - zipper, - projects_controller, - } - } - - pub fn archive(&self, project_id: &ProjectId) -> Result { - let project = self.projects_controller.get(project_id)?; - self.zipper.zip(project.path).map_err(Into::into) - } - - pub fn data_archive(&self, project_id: &ProjectId) -> Result { - let project = self.projects_controller.get(project_id)?; - self.zipper - .zip( - self.local_data_dir - .join("projects") - .join(project.id.to_string()), - ) - .map_err(Into::into) - } - - pub fn logs_archive(&self) -> Result { - self.zipper.zip(&self.logs_dir).map_err(Into::into) - } -} - -#[derive(thiserror::Error, Debug)] -pub enum ArchiveError { - #[error(transparent)] - GetProject(#[from] projects::GetError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(thiserror::Error, Debug)] -pub enum DataArchiveError { - #[error(transparent)] - GetProject(#[from] projects::GetError), - #[error(transparent)] - Other(#[from] anyhow::Error), -} - -#[derive(thiserror::Error, Debug)] -pub enum LogsArchiveError { - #[error(transparent)] - Other(#[from] anyhow::Error), -} diff --git a/gitbutler-app/tests/app.rs b/gitbutler-app/tests/app.rs index 2369cbe1a..1ddd035f9 100644 --- a/gitbutler-app/tests/app.rs +++ b/gitbutler-app/tests/app.rs @@ -1,261 +1,4 @@ -const VAR_NO_CLEANUP: &str = "GITBUTLER_TESTS_NO_CLEANUP"; - -pub(crate) mod common; -mod suite { - mod gb_repository; - mod projects; - mod virtual_branches; -} - -mod database; -mod deltas; -mod gb_repository; -mod git; -mod keys; -mod lock; -mod reader; -mod sessions; -mod types; -pub mod virtual_branches; +// TODO(ST): move test code into crate and use that, but wait for `crates/` +#[path = "../../tests/shared/mod.rs"] +pub mod shared; mod watcher; -mod zip; - -use std::path::{Path, PathBuf}; -use std::{collections::HashMap, fs}; - -use tempfile::{tempdir, TempDir}; - -pub struct Suite { - pub local_app_data: Option, - pub storage: gitbutler_app::storage::Storage, - pub users: gitbutler_app::users::Controller, - pub projects: gitbutler_app::projects::Controller, - pub keys: gitbutler_app::keys::Controller, -} - -impl Drop for Suite { - fn drop(&mut self) { - if std::env::var_os(VAR_NO_CLEANUP).is_some() { - let _ = self.local_app_data.take().unwrap().into_path(); - } - } -} - -impl Default for Suite { - fn default() -> Self { - let local_app_data = temp_dir(); - let storage = gitbutler_app::storage::Storage::new(&local_app_data); - let users = gitbutler_app::users::Controller::from_path(&local_app_data); - let projects = gitbutler_app::projects::Controller::from_path(&local_app_data); - let keys = gitbutler_app::keys::Controller::from_path(&local_app_data); - Self { - storage, - local_app_data: Some(local_app_data), - users, - projects, - keys, - } - } -} - -impl Suite { - pub fn local_app_data(&self) -> &Path { - self.local_app_data.as_ref().unwrap().path() - } - pub fn sign_in(&self) -> gitbutler_app::users::User { - let user = gitbutler_app::users::User { - name: Some("test".to_string()), - email: "test@email.com".to_string(), - access_token: "token".to_string(), - ..Default::default() - }; - self.users.set_user(&user).expect("failed to add user"); - user - } - - fn project(&self, fs: HashMap) -> (gitbutler_app::projects::Project, TempDir) { - let (repository, tmp) = test_repository(); - for (path, contents) in fs { - if let Some(parent) = path.parent() { - fs::create_dir_all(repository.path().parent().unwrap().join(parent)) - .expect("failed to create dir"); - } - fs::write( - repository.path().parent().unwrap().join(&path), - contents.as_bytes(), - ) - .expect("failed to write file"); - } - commit_all(&repository); - - ( - self.projects - .add(repository.path().parent().unwrap()) - .expect("failed to add project"), - tmp, - ) - } - - pub fn new_case_with_files(&self, fs: HashMap) -> Case { - let (project, project_tmp) = self.project(fs); - Case::new(self, project, project_tmp) - } - - pub fn new_case(&self) -> Case { - self.new_case_with_files(HashMap::new()) - } -} - -pub struct Case<'a> { - suite: &'a Suite, - pub project: gitbutler_app::projects::Project, - pub project_repository: gitbutler_app::project_repository::Repository, - pub gb_repository: gitbutler_app::gb_repository::Repository, - pub credentials: gitbutler_app::git::credentials::Helper, - /// The directory containing the `project_repository` - project_tmp: Option, -} - -impl Drop for Case<'_> { - fn drop(&mut self) { - if let Some(tmp) = self - .project_tmp - .take() - .filter(|_| std::env::var_os(VAR_NO_CLEANUP).is_some()) - { - let _ = tmp.into_path(); - } - } -} - -impl<'a> Case<'a> { - fn new( - suite: &'a Suite, - project: gitbutler_app::projects::Project, - project_tmp: TempDir, - ) -> Case<'a> { - let project_repository = gitbutler_app::project_repository::Repository::open(&project) - .expect("failed to create project repository"); - let gb_repository = gitbutler_app::gb_repository::Repository::open( - suite.local_app_data(), - &project_repository, - None, - ) - .expect("failed to open gb repository"); - let credentials = - gitbutler_app::git::credentials::Helper::from_path(suite.local_app_data()); - Case { - suite, - project, - gb_repository, - project_repository, - project_tmp: Some(project_tmp), - credentials, - } - } - - pub fn refresh(mut self) -> Self { - let project = self - .suite - .projects - .get(&self.project.id) - .expect("failed to get project"); - let project_repository = gitbutler_app::project_repository::Repository::open(&project) - .expect("failed to create project repository"); - let user = self.suite.users.get_user().expect("failed to get user"); - let credentials = - gitbutler_app::git::credentials::Helper::from_path(self.suite.local_app_data()); - Self { - suite: self.suite, - gb_repository: gitbutler_app::gb_repository::Repository::open( - self.suite.local_app_data(), - &project_repository, - user.as_ref(), - ) - .expect("failed to open gb repository"), - credentials, - project_repository, - project, - project_tmp: self.project_tmp.take(), - } - } -} - -pub fn test_database() -> (gitbutler_app::database::Database, TempDir) { - let tmp = temp_dir(); - let db = gitbutler_app::database::Database::open_in_directory(&tmp).unwrap(); - (db, tmp) -} - -pub fn temp_dir() -> TempDir { - tempdir().unwrap() -} - -pub fn empty_bare_repository() -> (gitbutler_app::git::Repository, TempDir) { - let tmp = temp_dir(); - ( - gitbutler_app::git::Repository::init_opts(&tmp, &init_opts_bare()) - .expect("failed to init repository"), - tmp, - ) -} - -pub fn test_repository() -> (gitbutler_app::git::Repository, TempDir) { - let tmp = temp_dir(); - let repository = gitbutler_app::git::Repository::init_opts(&tmp, &init_opts()) - .expect("failed to init repository"); - let mut index = repository.index().expect("failed to get index"); - let oid = index.write_tree().expect("failed to write tree"); - let signature = gitbutler_app::git::Signature::now("test", "test@email.com").unwrap(); - repository - .commit( - Some(&"refs/heads/master".parse().unwrap()), - &signature, - &signature, - "Initial commit", - &repository.find_tree(oid).expect("failed to find tree"), - &[], - ) - .expect("failed to commit"); - (repository, tmp) -} - -pub fn commit_all(repository: &gitbutler_app::git::Repository) -> gitbutler_app::git::Oid { - let mut index = repository.index().expect("failed to get index"); - index - .add_all(["."], git2::IndexAddOption::DEFAULT, None) - .expect("failed to add all"); - index.write().expect("failed to write index"); - let oid = index.write_tree().expect("failed to write tree"); - let signature = gitbutler_app::git::Signature::now("test", "test@email.com").unwrap(); - let head = repository.head().expect("failed to get head"); - let commit_oid = repository - .commit( - Some(&head.name().unwrap()), - &signature, - &signature, - "some commit", - &repository.find_tree(oid).expect("failed to find tree"), - &[&repository - .find_commit( - repository - .refname_to_id("HEAD") - .expect("failed to get head"), - ) - .expect("failed to find commit")], - ) - .expect("failed to commit"); - commit_oid -} - -fn init_opts() -> git2::RepositoryInitOptions { - let mut opts = git2::RepositoryInitOptions::new(); - opts.initial_head("master"); - opts -} - -pub fn init_opts_bare() -> git2::RepositoryInitOptions { - let mut opts = init_opts(); - opts.bare(true); - opts -} diff --git a/gitbutler-app/tests/common/mod.rs b/gitbutler-app/tests/common/mod.rs deleted file mode 100644 index c236e06f7..000000000 --- a/gitbutler-app/tests/common/mod.rs +++ /dev/null @@ -1,355 +0,0 @@ -#![allow(unused)] -use crate::{init_opts, VAR_NO_CLEANUP}; -use gitbutler_app::git; -use std::{path, str::from_utf8}; -use tempfile::TempDir; - -pub fn temp_dir() -> TempDir { - tempfile::tempdir().unwrap() -} - -pub struct TestProject { - local_repository: git::Repository, - local_tmp: Option, - remote_repository: git::Repository, - remote_tmp: Option, -} - -impl Drop for TestProject { - fn drop(&mut self) { - if std::env::var_os(VAR_NO_CLEANUP).is_some() { - let _ = self.local_tmp.take().unwrap().into_path(); - let _ = self.remote_tmp.take().unwrap().into_path(); - } - } -} - -impl Default for TestProject { - fn default() -> Self { - let local_tmp = temp_dir(); - let local_repository = git::Repository::init_opts(local_tmp.path(), &init_opts()) - .expect("failed to init repository"); - let mut index = local_repository.index().expect("failed to get index"); - let oid = index.write_tree().expect("failed to write tree"); - let signature = git::Signature::now("test", "test@email.com").unwrap(); - local_repository - .commit( - Some(&"refs/heads/master".parse().unwrap()), - &signature, - &signature, - "Initial commit", - &local_repository - .find_tree(oid) - .expect("failed to find tree"), - &[], - ) - .expect("failed to commit"); - - let remote_tmp = temp_dir(); - let remote_repository = git::Repository::init_opts( - remote_tmp.path(), - git2::RepositoryInitOptions::new() - .bare(true) - .external_template(false), - ) - .expect("failed to init repository"); - - { - let mut remote = local_repository - .remote( - "origin", - &remote_repository - .path() - .to_str() - .expect("failed to convert path to str") - .parse() - .unwrap(), - ) - .expect("failed to add remote"); - remote - .push(&["refs/heads/master:refs/heads/master"], None) - .expect("failed to push"); - } - - Self { - local_repository, - local_tmp: Some(local_tmp), - remote_repository, - remote_tmp: Some(remote_tmp), - } - } -} - -impl TestProject { - pub fn path(&self) -> &std::path::Path { - self.local_repository.workdir().unwrap() - } - - pub fn push_branch(&self, branch: &git::LocalRefname) { - let mut origin = self.local_repository.find_remote("origin").unwrap(); - origin.push(&[&format!("{branch}:{branch}")], None).unwrap(); - } - - pub fn push(&self) { - let mut origin = self.local_repository.find_remote("origin").unwrap(); - origin - .push(&["refs/heads/master:refs/heads/master"], None) - .unwrap(); - } - - /// git add -A - /// git reset --hard - pub fn reset_hard(&self, oid: Option) { - let mut index = self.local_repository.index().expect("failed to get index"); - index - .add_all(["."], git2::IndexAddOption::DEFAULT, None) - .expect("failed to add all"); - index.write().expect("failed to write index"); - - let head = self.local_repository.head().unwrap(); - let commit = oid.map_or(head.peel_to_commit().unwrap(), |oid| { - self.local_repository.find_commit(oid).unwrap() - }); - - let head_ref = head.name().unwrap(); - let head_ref = self.local_repository.find_reference(&head_ref).unwrap(); - - self.local_repository - .reset(&commit, git2::ResetType::Hard, None) - .unwrap(); - } - - /// fetch remote into local - pub fn fetch(&self) { - let mut remote = self.local_repository.find_remote("origin").unwrap(); - remote - .fetch(&["+refs/heads/*:refs/remotes/origin/*"], None) - .unwrap(); - } - - pub fn rebase_and_merge(&self, branch_name: &git::Refname) { - let branch_name: git::Refname = match branch_name { - git::Refname::Local(local) => format!("refs/heads/{}", local.branch()).parse().unwrap(), - git::Refname::Remote(remote) => { - format!("refs/heads/{}", remote.branch()).parse().unwrap() - } - _ => "INVALID".parse().unwrap(), // todo - }; - let branch = self.remote_repository.find_branch(&branch_name).unwrap(); - let branch_commit = branch.peel_to_commit().unwrap(); - - let master_branch = { - let name: git::Refname = "refs/heads/master".parse().unwrap(); - self.remote_repository.find_branch(&name).unwrap() - }; - let master_branch_commit = master_branch.peel_to_commit().unwrap(); - - let mut rebase_options = git2::RebaseOptions::new(); - rebase_options.quiet(true); - rebase_options.inmemory(true); - - let mut rebase = self - .remote_repository - .rebase( - Some(branch_commit.id()), - Some(master_branch_commit.id()), - None, - Some(&mut rebase_options), - ) - .unwrap(); - - let mut rebase_success = true; - let mut last_rebase_head = branch_commit.id(); - while let Some(Ok(op)) = rebase.next() { - let commit = self.remote_repository.find_commit(op.id().into()).unwrap(); - let index = rebase.inmemory_index().unwrap(); - if index.has_conflicts() { - rebase_success = false; - break; - } - - if let Ok(commit_id) = rebase.commit(None, &commit.committer().into(), None) { - last_rebase_head = commit_id.into(); - } else { - rebase_success = false; - break; - }; - } - - if rebase_success { - self.remote_repository - .reference( - &"refs/heads/master".parse().unwrap(), - last_rebase_head, - true, - &format!("rebase: {}", branch_name), - ) - .unwrap(); - } else { - rebase.abort().unwrap(); - } - } - - /// works like if we'd open and merge a PR on github. does not update local. - pub fn merge(&self, branch_name: &git::Refname) { - let branch_name: git::Refname = match branch_name { - git::Refname::Local(local) => format!("refs/heads/{}", local.branch()).parse().unwrap(), - git::Refname::Remote(remote) => { - format!("refs/heads/{}", remote.branch()).parse().unwrap() - } - _ => "INVALID".parse().unwrap(), // todo - }; - let branch = self.remote_repository.find_branch(&branch_name).unwrap(); - let branch_commit = branch.peel_to_commit().unwrap(); - - let master_branch = { - let name: git::Refname = "refs/heads/master".parse().unwrap(); - self.remote_repository.find_branch(&name).unwrap() - }; - let master_branch_commit = master_branch.peel_to_commit().unwrap(); - - let merge_base = { - let oid = self - .remote_repository - .merge_base(branch_commit.id(), master_branch_commit.id()) - .unwrap(); - self.remote_repository.find_commit(oid).unwrap() - }; - let merge_tree = { - let mut merge_index = self - .remote_repository - .merge_trees( - &merge_base.tree().unwrap(), - &master_branch.peel_to_tree().unwrap(), - &branch.peel_to_tree().unwrap(), - ) - .unwrap(); - let oid = merge_index.write_tree_to(&self.remote_repository).unwrap(); - self.remote_repository.find_tree(oid).unwrap() - }; - - self.remote_repository - .commit( - Some(&"refs/heads/master".parse().unwrap()), - &branch_commit.author(), - &branch_commit.committer(), - &format!("Merge pull request from {}", branch_name), - &merge_tree, - &[&master_branch_commit, &branch_commit], - ) - .unwrap(); - } - - pub fn find_commit(&self, oid: git::Oid) -> Result { - self.local_repository.find_commit(oid) - } - - pub fn checkout_commit(&self, commit_oid: git::Oid) { - let commit = self.local_repository.find_commit(commit_oid).unwrap(); - let commit_tree = commit.tree().unwrap(); - - self.local_repository.set_head_detached(commit_oid).unwrap(); - self.local_repository - .checkout_tree(&commit_tree) - .force() - .checkout() - .unwrap(); - } - - pub fn checkout(&self, branch: &git::LocalRefname) { - let branch: git::Refname = branch.into(); - let tree = match self.local_repository.find_branch(&branch) { - Ok(branch) => branch.peel_to_tree(), - Err(git::Error::NotFound(_)) => { - let head_commit = self - .local_repository - .head() - .unwrap() - .peel_to_commit() - .unwrap(); - self.local_repository - .reference(&branch, head_commit.id(), false, "new branch") - .unwrap(); - head_commit.tree() - } - Err(error) => Err(error), - } - .unwrap(); - self.local_repository.set_head(&branch).unwrap(); - self.local_repository - .checkout_tree(&tree) - .force() - .checkout() - .unwrap(); - } - - /// takes all changes in the working directory and commits them into local - pub fn commit_all(&self, message: &str) -> git::Oid { - let head = self.local_repository.head().unwrap(); - let mut index = self.local_repository.index().expect("failed to get index"); - index - .add_all(["."], git2::IndexAddOption::DEFAULT, None) - .expect("failed to add all"); - index.write().expect("failed to write index"); - let oid = index.write_tree().expect("failed to write tree"); - let signature = git::Signature::now("test", "test@email.com").unwrap(); - self.local_repository - .commit( - head.name().as_ref(), - &signature, - &signature, - message, - &self - .local_repository - .find_tree(oid) - .expect("failed to find tree"), - &[&self - .local_repository - .find_commit( - self.local_repository - .refname_to_id("HEAD") - .expect("failed to get head"), - ) - .expect("failed to find commit")], - ) - .expect("failed to commit") - } - - pub fn references(&self) -> Vec { - self.local_repository - .references() - .expect("failed to get references") - .collect::, _>>() - .expect("failed to read references") - } - - pub fn add_submodule(&self, url: &git::Url, path: &path::Path) { - let mut submodule = self.local_repository.add_submodule(url, path).unwrap(); - let repo = submodule.open().unwrap(); - - // checkout submodule's master head - repo.find_remote("origin") - .unwrap() - .fetch(&["+refs/heads/*:refs/heads/*"], None, None) - .unwrap(); - let reference = repo.find_reference("refs/heads/master").unwrap(); - let reference_head = repo.find_commit(reference.target().unwrap()).unwrap(); - repo.checkout_tree(reference_head.tree().unwrap().as_object(), None) - .unwrap(); - - // be sure that `HEAD` points to the actual head - `git2` seems to initialize it - // with `init.defaultBranch`, causing failure otherwise. - repo.set_head("refs/heads/master"); - submodule.add_finalize().unwrap(); - } -} - -pub mod paths { - use super::temp_dir; - use std::path; - use tempfile::TempDir; - - pub fn data_dir() -> TempDir { - temp_dir() - } -} diff --git a/gitbutler-app/tests/database/mod.rs b/gitbutler-app/tests/database/mod.rs deleted file mode 100644 index a75e8260e..000000000 --- a/gitbutler-app/tests/database/mod.rs +++ /dev/null @@ -1,20 +0,0 @@ -use crate::temp_dir; -use gitbutler_app::database::Database; - -#[test] -fn smoke() { - let data_dir = temp_dir(); - let db = Database::open_in_directory(data_dir.path()).unwrap(); - db.transaction(|tx| { - tx.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)", []) - .unwrap(); - tx.execute("INSERT INTO test (id) VALUES (1)", []).unwrap(); - let mut stmt = tx.prepare("SELECT id FROM test").unwrap(); - let mut rows = stmt.query([]).unwrap(); - let row = rows.next().unwrap().unwrap(); - let id: i32 = row.get(0).unwrap(); - assert_eq!(id, 1_i32); - Ok(()) - }) - .unwrap(); -} diff --git a/gitbutler-app/tests/deltas/document.rs b/gitbutler-app/tests/deltas/document.rs deleted file mode 100644 index 3c35d18a4..000000000 --- a/gitbutler-app/tests/deltas/document.rs +++ /dev/null @@ -1,263 +0,0 @@ -use gitbutler_app::deltas::operations::Operation; -use gitbutler_app::deltas::{Delta, Document}; -use gitbutler_app::reader; - -#[test] -fn new() { - let document = Document::new( - Some(&reader::Content::UTF8("hello world".to_string())), - vec![], - ); - assert!(document.is_ok()); - let document = document.unwrap(); - assert_eq!(document.to_string(), "hello world"); - assert_eq!(document.get_deltas().len(), 0); -} - -#[test] -fn update() { - let document = Document::new( - Some(&reader::Content::UTF8("hello world".to_string())), - vec![], - ); - assert!(document.is_ok()); - let mut document = document.unwrap(); - document - .update(Some(&reader::Content::UTF8("hello world!".to_string()))) - .unwrap(); - assert_eq!(document.to_string(), "hello world!"); - assert_eq!(document.get_deltas().len(), 1); - assert_eq!(document.get_deltas()[0].operations.len(), 1); - assert_eq!( - document.get_deltas()[0].operations[0], - Operation::Insert((11, "!".to_string())) - ); -} - -#[test] -fn empty() { - let document = Document::new(None, vec![]); - assert!(document.is_ok()); - let mut document = document.unwrap(); - document - .update(Some(&reader::Content::UTF8("hello world!".to_string()))) - .unwrap(); - assert_eq!(document.to_string(), "hello world!"); - assert_eq!(document.get_deltas().len(), 1); - assert_eq!(document.get_deltas()[0].operations.len(), 1); - assert_eq!( - document.get_deltas()[0].operations[0], - Operation::Insert((0, "hello world!".to_string())) - ); -} - -#[test] -fn from_deltas() { - let document = Document::new( - None, - vec![ - Delta { - timestamp_ms: 0, - operations: vec![Operation::Insert((0, "hello".to_string()))], - }, - Delta { - timestamp_ms: 1, - operations: vec![Operation::Insert((5, " world".to_string()))], - }, - Delta { - timestamp_ms: 2, - operations: vec![ - Operation::Delete((3, 7)), - Operation::Insert((4, "!".to_string())), - ], - }, - ], - ); - assert!(document.is_ok()); - let document = document.unwrap(); - assert_eq!(document.to_string(), "held!"); -} - -#[test] -fn complex_line() { - let document = Document::new(None, vec![]); - assert!(document.is_ok()); - let mut document = document.unwrap(); - - document - .update(Some(&reader::Content::UTF8("hello".to_string()))) - .unwrap(); - assert_eq!(document.to_string(), "hello"); - assert_eq!(document.get_deltas().len(), 1); - assert_eq!(document.get_deltas()[0].operations.len(), 1); - assert_eq!( - document.get_deltas()[0].operations[0], - Operation::Insert((0, "hello".to_string())) - ); - - document - .update(Some(&reader::Content::UTF8("hello world".to_string()))) - .unwrap(); - assert_eq!(document.to_string(), "hello world"); - assert_eq!(document.get_deltas().len(), 2); - assert_eq!(document.get_deltas()[1].operations.len(), 1); - assert_eq!( - document.get_deltas()[1].operations[0], - Operation::Insert((5, " world".to_string())) - ); - - document - .update(Some(&reader::Content::UTF8("held!".to_string()))) - .unwrap(); - assert_eq!(document.to_string(), "held!"); - assert_eq!(document.get_deltas().len(), 3); - assert_eq!(document.get_deltas()[2].operations.len(), 2); - assert_eq!( - document.get_deltas()[2].operations[0], - Operation::Delete((3, 7)) - ); - assert_eq!( - document.get_deltas()[2].operations[1], - Operation::Insert((4, "!".to_string())), - ); -} - -#[test] -fn multiline_add() { - let document = Document::new(None, vec![]); - assert!(document.is_ok()); - let mut document = document.unwrap(); - - document - .update(Some(&reader::Content::UTF8("first".to_string()))) - .unwrap(); - assert_eq!(document.to_string(), "first"); - assert_eq!(document.get_deltas().len(), 1); - assert_eq!(document.get_deltas()[0].operations.len(), 1); - assert_eq!( - document.get_deltas()[0].operations[0], - Operation::Insert((0, "first".to_string())) - ); - - document - .update(Some(&reader::Content::UTF8("first\ntwo".to_string()))) - .unwrap(); - assert_eq!(document.to_string(), "first\ntwo"); - assert_eq!(document.get_deltas().len(), 2); - assert_eq!(document.get_deltas()[1].operations.len(), 1); - assert_eq!( - document.get_deltas()[1].operations[0], - Operation::Insert((5, "\ntwo".to_string())) - ); - - document - .update(Some(&reader::Content::UTF8( - "first line\nline two".to_string(), - ))) - .unwrap(); - assert_eq!(document.to_string(), "first line\nline two"); - assert_eq!(document.get_deltas().len(), 3); - assert_eq!(document.get_deltas()[2].operations.len(), 2); - assert_eq!( - document.get_deltas()[2].operations[0], - Operation::Insert((5, " line".to_string())) - ); - assert_eq!( - document.get_deltas()[2].operations[1], - Operation::Insert((11, "line ".to_string())) - ); -} - -#[test] -fn multiline_remove() { - let document = Document::new(None, vec![]); - assert!(document.is_ok()); - let mut document = document.unwrap(); - - document - .update(Some(&reader::Content::UTF8( - "first line\nline two".to_string(), - ))) - .unwrap(); - assert_eq!(document.to_string(), "first line\nline two"); - assert_eq!(document.get_deltas().len(), 1); - assert_eq!(document.get_deltas()[0].operations.len(), 1); - assert_eq!( - document.get_deltas()[0].operations[0], - Operation::Insert((0, "first line\nline two".to_string())) - ); - - document - .update(Some(&reader::Content::UTF8("first\ntwo".to_string()))) - .unwrap(); - assert_eq!(document.to_string(), "first\ntwo"); - assert_eq!(document.get_deltas().len(), 2); - assert_eq!(document.get_deltas()[1].operations.len(), 2); - assert_eq!( - document.get_deltas()[1].operations[0], - Operation::Delete((5, 5)) - ); - assert_eq!( - document.get_deltas()[1].operations[1], - Operation::Delete((6, 5)) - ); - - document - .update(Some(&reader::Content::UTF8("first".to_string()))) - .unwrap(); - assert_eq!(document.to_string(), "first"); - assert_eq!(document.get_deltas().len(), 3); - assert_eq!(document.get_deltas()[2].operations.len(), 1); - assert_eq!( - document.get_deltas()[2].operations[0], - Operation::Delete((5, 4)) - ); - - document.update(None).unwrap(); - assert_eq!(document.to_string(), ""); - assert_eq!(document.get_deltas().len(), 4); - assert_eq!(document.get_deltas()[3].operations.len(), 1); - assert_eq!( - document.get_deltas()[3].operations[0], - Operation::Delete((0, 5)) - ); -} - -#[test] -fn binary_to_text() { - let latest = reader::Content::Binary; - let current = reader::Content::UTF8("test".to_string()); - let mut document = Document::new(Some(&latest), vec![]).unwrap(); - let new_deltas = document.update(Some(¤t)).unwrap(); - assert!(new_deltas.is_some()); - assert_eq!(document.to_string(), "test"); -} - -#[test] -fn binary_to_binary() { - let latest = reader::Content::Binary; - let current = reader::Content::Binary; - let mut document = Document::new(Some(&latest), vec![]).unwrap(); - let new_deltas = document.update(Some(¤t)).unwrap(); - assert!(new_deltas.is_some()); - assert_eq!(document.to_string(), ""); -} - -#[test] -fn text_to_binary() { - let latest = reader::Content::UTF8("text".to_string()); - let current = reader::Content::Binary; - let mut document = Document::new(Some(&latest), vec![]).unwrap(); - let new_deltas = document.update(Some(¤t)).unwrap(); - assert!(new_deltas.is_some()); - assert_eq!(document.to_string(), ""); -} - -#[test] -fn unicode() { - let latest = reader::Content::UTF8("\u{1f31a}".to_string()); - let current = reader::Content::UTF8("\u{1f31d}".to_string()); - let mut document = Document::new(Some(&latest), vec![]).unwrap(); - document.update(Some(¤t)).unwrap(); - assert_eq!(document.to_string(), "\u{1f31d}"); -} diff --git a/gitbutler-app/tests/deltas/mod.rs b/gitbutler-app/tests/deltas/mod.rs deleted file mode 100644 index 7241f78bc..000000000 --- a/gitbutler-app/tests/deltas/mod.rs +++ /dev/null @@ -1,146 +0,0 @@ -mod database { - use crate::test_database; - use gitbutler_app::deltas::{operations, Database, Delta}; - use gitbutler_app::projects::ProjectId; - use gitbutler_app::sessions::SessionId; - use std::path; - - #[test] - fn insert_query() -> anyhow::Result<()> { - let (db, _tmp) = test_database(); - let database = Database::new(db); - - let project_id = ProjectId::generate(); - let session_id = SessionId::generate(); - let file_path = path::PathBuf::from("file_path"); - let delta1 = Delta { - timestamp_ms: 0, - operations: vec![operations::Operation::Insert((0, "text".to_string()))], - }; - let deltas = vec![delta1.clone()]; - - database.insert(&project_id, &session_id, &file_path, &deltas)?; - - assert_eq!( - database.list_by_project_id_session_id(&project_id, &session_id, &None)?, - vec![(file_path.display().to_string(), vec![delta1])] - .into_iter() - .collect() - ); - - Ok(()) - } - - #[test] - fn insert_update() -> anyhow::Result<()> { - let (db, _tmp) = test_database(); - let database = Database::new(db); - - let project_id = ProjectId::generate(); - let session_id = SessionId::generate(); - let file_path = path::PathBuf::from("file_path"); - let delta1 = Delta { - timestamp_ms: 0, - operations: vec![operations::Operation::Insert((0, "text".to_string()))], - }; - let delta2 = Delta { - timestamp_ms: 0, - operations: vec![operations::Operation::Insert(( - 0, - "updated_text".to_string(), - ))], - }; - - database.insert(&project_id, &session_id, &file_path, &vec![delta1])?; - database.insert(&project_id, &session_id, &file_path, &vec![delta2.clone()])?; - - assert_eq!( - database.list_by_project_id_session_id(&project_id, &session_id, &None)?, - vec![(file_path.display().to_string(), vec![delta2])] - .into_iter() - .collect() - ); - - Ok(()) - } - - #[test] - fn aggregate_deltas_by_file() -> anyhow::Result<()> { - let (db, _tmp) = test_database(); - let database = Database::new(db); - - let project_id = ProjectId::generate(); - let session_id = SessionId::generate(); - let file_path1 = path::PathBuf::from("file_path1"); - let file_path2 = path::PathBuf::from("file_path2"); - let delta1 = Delta { - timestamp_ms: 1, - operations: vec![operations::Operation::Insert((0, "text".to_string()))], - }; - let delta2 = Delta { - timestamp_ms: 2, - operations: vec![operations::Operation::Insert(( - 0, - "updated_text".to_string(), - ))], - }; - - database.insert(&project_id, &session_id, &file_path1, &vec![delta1.clone()])?; - database.insert(&project_id, &session_id, &file_path2, &vec![delta1.clone()])?; - database.insert(&project_id, &session_id, &file_path2, &vec![delta2.clone()])?; - - assert_eq!( - database.list_by_project_id_session_id(&project_id, &session_id, &None)?, - vec![ - (file_path1.display().to_string(), vec![delta1.clone()]), - (file_path2.display().to_string(), vec![delta1, delta2]) - ] - .into_iter() - .collect() - ); - - Ok(()) - } -} - -mod document; -mod operations; - -mod writer { - use gitbutler_app::deltas::operations::Operation; - use gitbutler_app::{deltas, sessions}; - use std::vec; - - use crate::{Case, Suite}; - - #[test] - fn write_no_vbranches() -> anyhow::Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let deltas_writer = deltas::Writer::new(gb_repository)?; - - let session = gb_repository.get_or_create_current_session()?; - let session_reader = sessions::Reader::open(gb_repository, &session)?; - let deltas_reader = gitbutler_app::deltas::Reader::new(&session_reader); - - let path = "test.txt"; - let deltas = vec![ - gitbutler_app::deltas::Delta { - operations: vec![Operation::Insert((0, "hello".to_string()))], - timestamp_ms: 0, - }, - gitbutler_app::deltas::Delta { - operations: vec![Operation::Insert((5, " world".to_string()))], - timestamp_ms: 0, - }, - ]; - - deltas_writer.write(path, &deltas).unwrap(); - - assert_eq!(deltas_reader.read_file(path).unwrap(), Some(deltas)); - assert_eq!(deltas_reader.read_file("not found").unwrap(), None); - - Ok(()) - } -} diff --git a/gitbutler-app/tests/deltas/operations.rs b/gitbutler-app/tests/deltas/operations.rs deleted file mode 100644 index ed080a986..000000000 --- a/gitbutler-app/tests/deltas/operations.rs +++ /dev/null @@ -1,55 +0,0 @@ -use gitbutler_app::deltas::operations::{get_delta_operations, Operation}; - -#[test] -fn get_delta_operations_insert_end() { - let initial_text = "hello"; - let final_text = "hello world!"; - let operations = get_delta_operations(initial_text, final_text); - assert_eq!(operations.len(), 1); - assert_eq!(operations[0], Operation::Insert((5, " world!".to_string()))); -} - -#[test] -fn get_delta_operations_insert_middle() { - let initial_text = "helloworld"; - let final_text = "hello, world"; - let operations = get_delta_operations(initial_text, final_text); - assert_eq!(operations.len(), 1); - assert_eq!(operations[0], Operation::Insert((5, ", ".to_string()))); -} - -#[test] -fn get_delta_operations_insert_begin() { - let initial_text = "world"; - let final_text = "hello world"; - let operations = get_delta_operations(initial_text, final_text); - assert_eq!(operations.len(), 1); - assert_eq!(operations[0], Operation::Insert((0, "hello ".to_string()))); -} - -#[test] -fn get_delta_operations_delete_end() { - let initial_text = "hello world!"; - let final_text = "hello"; - let operations = get_delta_operations(initial_text, final_text); - assert_eq!(operations.len(), 1); - assert_eq!(operations[0], Operation::Delete((5, 7))); -} - -#[test] -fn get_delta_operations_delete_middle() { - let initial_text = "hello, world"; - let final_text = "helloworld"; - let operations = get_delta_operations(initial_text, final_text); - assert_eq!(operations.len(), 1); - assert_eq!(operations[0], Operation::Delete((5, 2))); -} - -#[test] -fn get_delta_operations_delete_begin() { - let initial_text = "hello world"; - let final_text = "world"; - let operations = get_delta_operations(initial_text, final_text); - assert_eq!(operations.len(), 1); - assert_eq!(operations[0], Operation::Delete((0, 6))); -} diff --git a/gitbutler-app/tests/gb_repository/mod.rs b/gitbutler-app/tests/gb_repository/mod.rs deleted file mode 100644 index 822702a28..000000000 --- a/gitbutler-app/tests/gb_repository/mod.rs +++ /dev/null @@ -1,490 +0,0 @@ -use std::{collections::HashMap, path, thread, time}; - -use anyhow::Result; -use pretty_assertions::assert_eq; -use tempfile::TempDir; - -use crate::init_opts_bare; -use crate::{Case, Suite}; - -use gitbutler_app::{ - deltas::{self, operations::Operation}, - projects::{self, ApiProject, ProjectId}, - reader, - sessions::{self, SessionId}, -}; - -mod repository { - use std::path::PathBuf; - - use crate::{Case, Suite}; - use anyhow::Result; - use pretty_assertions::assert_eq; - - #[test] - fn alternates_file_being_set() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case(); - - let file_content = std::fs::read_to_string( - gb_repository - .git_repository_path() - .join("objects/info/alternates"), - )?; - - let file_content = PathBuf::from(file_content.trim()); - let project_path = project_repository.path().to_path_buf().join(".git/objects"); - - assert_eq!(file_content, project_path); - - Ok(()) - } -} - -fn new_test_remote_repository() -> Result<(git2::Repository, TempDir)> { - let tmp = tempfile::tempdir()?; - let path = tmp.path().to_str().unwrap().to_string(); - let repo_a = git2::Repository::init_opts(path, &init_opts_bare())?; - Ok((repo_a, tmp)) -} - -#[test] -fn get_current_session_writer_should_use_existing_session() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let current_session_1 = gb_repository.get_or_create_current_session()?; - let current_session_2 = gb_repository.get_or_create_current_session()?; - assert_eq!(current_session_1.id, current_session_2.id); - - Ok(()) -} - -#[test] -fn must_not_return_init_session() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - assert!(gb_repository.get_current_session()?.is_none()); - - let iter = gb_repository.get_sessions_iterator()?; - assert_eq!(iter.count(), 0); - - Ok(()) -} - -#[test] -fn must_not_flush_without_current_session() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case(); - - let session = gb_repository.flush(project_repository, None)?; - assert!(session.is_none()); - - let iter = gb_repository.get_sessions_iterator()?; - assert_eq!(iter.count(), 0); - - Ok(()) -} - -#[test] -fn non_empty_repository() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case_with_files(HashMap::from([(path::PathBuf::from("test.txt"), "test")])); - - gb_repository.get_or_create_current_session()?; - gb_repository.flush(project_repository, None)?; - - Ok(()) -} - -#[test] -fn must_flush_current_session() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case(); - - gb_repository.get_or_create_current_session()?; - - let session = gb_repository.flush(project_repository, None)?; - assert!(session.is_some()); - - let iter = gb_repository.get_sessions_iterator()?; - assert_eq!(iter.count(), 1); - - Ok(()) -} - -#[test] -fn list_deltas_from_current_session() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let current_session = gb_repository.get_or_create_current_session()?; - let writer = deltas::Writer::new(gb_repository)?; - writer.write( - "test.txt", - &vec![deltas::Delta { - operations: vec![Operation::Insert((0, "Hello World".to_string()))], - timestamp_ms: 0, - }], - )?; - - let session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read(None)?; - - assert_eq!(deltas.len(), 1); - assert_eq!( - deltas[&path::PathBuf::from("test.txt")][0].operations.len(), - 1 - ); - assert_eq!( - deltas[&path::PathBuf::from("test.txt")][0].operations[0], - Operation::Insert((0, "Hello World".to_string())) - ); - - Ok(()) -} - -#[test] -fn list_deltas_from_flushed_session() { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case(); - - let writer = deltas::Writer::new(gb_repository).unwrap(); - writer - .write( - "test.txt", - &vec![deltas::Delta { - operations: vec![Operation::Insert((0, "Hello World".to_string()))], - timestamp_ms: 0, - }], - ) - .unwrap(); - let session = gb_repository.flush(project_repository, None).unwrap(); - - let session_reader = sessions::Reader::open(gb_repository, &session.unwrap()).unwrap(); - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read(None).unwrap(); - - assert_eq!(deltas.len(), 1); - assert_eq!( - deltas[&path::PathBuf::from("test.txt")][0].operations.len(), - 1 - ); - assert_eq!( - deltas[&path::PathBuf::from("test.txt")][0].operations[0], - Operation::Insert((0, "Hello World".to_string())) - ); -} - -#[test] -fn list_files_from_current_session() { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "Hello World", - )])); - - let current = gb_repository.get_or_create_current_session().unwrap(); - let reader = sessions::Reader::open(gb_repository, ¤t).unwrap(); - let files = reader.files(None).unwrap(); - - assert_eq!(files.len(), 1); - assert_eq!( - files[&path::PathBuf::from("test.txt")], - reader::Content::UTF8("Hello World".to_string()) - ); -} - -#[test] -fn list_files_from_flushed_session() { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "Hello World", - )])); - - gb_repository.get_or_create_current_session().unwrap(); - let session = gb_repository - .flush(project_repository, None) - .unwrap() - .unwrap(); - let reader = sessions::Reader::open(gb_repository, &session).unwrap(); - let files = reader.files(None).unwrap(); - - assert_eq!(files.len(), 1); - assert_eq!( - files[&path::PathBuf::from("test.txt")], - reader::Content::UTF8("Hello World".to_string()) - ); -} - -#[tokio::test] -async fn remote_syncronization() { - // first, crate a remote, pretending it's a cloud - let (cloud, _tmp) = new_test_remote_repository().unwrap(); - let api_project = ApiProject { - name: "test-sync".to_string(), - description: None, - repository_id: "123".to_string(), - git_url: cloud.path().to_str().unwrap().to_string(), - code_git_url: None, - created_at: 0_i32.to_string(), - updated_at: 0_i32.to_string(), - sync: true, - }; - - let suite = Suite::default(); - let user = suite.sign_in(); - - // create first local project, add files, deltas and flush a session - let case_one = suite.new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "Hello World", - )])); - suite - .projects - .update(&projects::UpdateRequest { - id: case_one.project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await - .unwrap(); - let case_one = case_one.refresh(); - - let writer = deltas::Writer::new(&case_one.gb_repository).unwrap(); - writer - .write( - "test.txt", - &vec![deltas::Delta { - operations: vec![Operation::Insert((0, "Hello World".to_string()))], - timestamp_ms: 0, - }], - ) - .unwrap(); - let session_one = case_one - .gb_repository - .flush(&case_one.project_repository, Some(&user)) - .unwrap() - .unwrap(); - case_one.gb_repository.push(Some(&user)).unwrap(); - - // create second local project, fetch it and make sure session is there - let case_two = suite.new_case(); - suite - .projects - .update(&projects::UpdateRequest { - id: case_two.project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await - .unwrap(); - let case_two = case_two.refresh(); - - case_two.gb_repository.fetch(Some(&user)).unwrap(); - - // now it should have the session from the first local project synced - let sessions_two = case_two - .gb_repository - .get_sessions_iterator() - .unwrap() - .map(Result::unwrap) - .collect::>(); - assert_eq!(sessions_two.len(), 1); - assert_eq!(sessions_two[0].id, session_one.id); - - let session_reader = sessions::Reader::open(&case_two.gb_repository, &sessions_two[0]).unwrap(); - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read(None).unwrap(); - let files = session_reader.files(None).unwrap(); - assert_eq!(deltas.len(), 1); - assert_eq!(files.len(), 1); - assert_eq!( - files[&path::PathBuf::from("test.txt")], - reader::Content::UTF8("Hello World".to_string()) - ); - assert_eq!( - deltas[&path::PathBuf::from("test.txt")], - vec![deltas::Delta { - operations: vec![Operation::Insert((0, "Hello World".to_string()))], - timestamp_ms: 0, - }] - ); -} - -#[tokio::test] -async fn remote_sync_order() { - // first, crate a remote, pretending it's a cloud - let (cloud, _tmp) = new_test_remote_repository().unwrap(); - let api_project = projects::ApiProject { - name: "test-sync".to_string(), - description: None, - repository_id: "123".to_string(), - git_url: cloud.path().to_str().unwrap().to_string(), - code_git_url: None, - created_at: 0_i32.to_string(), - updated_at: 0_i32.to_string(), - sync: true, - }; - - let suite = Suite::default(); - - let case_one = suite.new_case(); - suite - .projects - .update(&projects::UpdateRequest { - id: case_one.project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await - .unwrap(); - let case_one = case_one.refresh(); - - let case_two = suite.new_case(); - suite - .projects - .update(&projects::UpdateRequest { - id: case_two.project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await - .unwrap(); - let case_two = case_two.refresh(); - - let user = suite.sign_in(); - - // create session in the first project - case_one - .gb_repository - .get_or_create_current_session() - .unwrap(); - let session_one_first = case_one - .gb_repository - .flush(&case_one.project_repository, Some(&user)) - .unwrap() - .unwrap(); - case_one.gb_repository.push(Some(&user)).unwrap(); - - thread::sleep(time::Duration::from_secs(1)); - - // create session in the second project - case_two - .gb_repository - .get_or_create_current_session() - .unwrap(); - let session_two_first = case_two - .gb_repository - .flush(&case_two.project_repository, Some(&user)) - .unwrap() - .unwrap(); - case_two.gb_repository.push(Some(&user)).unwrap(); - - thread::sleep(time::Duration::from_secs(1)); - - // create second session in the first project - case_one - .gb_repository - .get_or_create_current_session() - .unwrap(); - let session_one_second = case_one - .gb_repository - .flush(&case_one.project_repository, Some(&user)) - .unwrap() - .unwrap(); - case_one.gb_repository.push(Some(&user)).unwrap(); - - thread::sleep(time::Duration::from_secs(1)); - - // create second session in the second project - case_two - .gb_repository - .get_or_create_current_session() - .unwrap(); - let session_two_second = case_two - .gb_repository - .flush(&case_two.project_repository, Some(&user)) - .unwrap() - .unwrap(); - case_two.gb_repository.push(Some(&user)).unwrap(); - - case_one.gb_repository.fetch(Some(&user)).unwrap(); - let sessions_one = case_one - .gb_repository - .get_sessions_iterator() - .unwrap() - .map(Result::unwrap) - .collect::>(); - - case_two.gb_repository.fetch(Some(&user)).unwrap(); - let sessions_two = case_two - .gb_repository - .get_sessions_iterator() - .unwrap() - .map(Result::unwrap) - .collect::>(); - - // make sure the sessions are the same on both repos - assert_eq!(sessions_one.len(), 4); - assert_eq!(sessions_two, sessions_one); - - assert_eq!(sessions_one[0].id, session_two_second.id); - assert_eq!(sessions_one[1].id, session_one_second.id); - assert_eq!(sessions_one[2].id, session_two_first.id); - assert_eq!(sessions_one[3].id, session_one_first.id); -} - -#[test] -fn gitbutler_file() { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case(); - - let session = gb_repository.get_or_create_current_session().unwrap(); - - let gitbutler_file_path = project_repository.path().join(".git/gitbutler.json"); - assert!(gitbutler_file_path.exists()); - - let file_content: serde_json::Value = - serde_json::from_str(&std::fs::read_to_string(&gitbutler_file_path).unwrap()).unwrap(); - let sid: SessionId = file_content["sessionId"].as_str().unwrap().parse().unwrap(); - assert_eq!(sid, session.id); - - let pid: ProjectId = file_content["repositoryId"] - .as_str() - .unwrap() - .parse() - .unwrap(); - assert_eq!(pid, project_repository.project().id); -} diff --git a/gitbutler-app/tests/git/config.rs b/gitbutler-app/tests/git/config.rs deleted file mode 100644 index 730401d70..000000000 --- a/gitbutler-app/tests/git/config.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::test_repository; - -#[test] -pub fn set_str() { - let (repo, _tmp) = test_repository(); - let mut config = repo.config().unwrap(); - config.set_str("test.key", "test.value").unwrap(); - assert_eq!( - config.get_string("test.key").unwrap().unwrap(), - "test.value" - ); -} - -#[test] -pub fn set_bool() { - let (repo, _tmp) = test_repository(); - let mut config = repo.config().unwrap(); - config.set_bool("test.key", true).unwrap(); - assert!(config.get_bool("test.key").unwrap().unwrap()); -} - -#[test] -pub fn get_string_none() { - let (repo, _tmp) = test_repository(); - let config = repo.config().unwrap(); - assert_eq!(config.get_string("test.key").unwrap(), None); -} - -#[test] -pub fn get_bool_none() { - let (repo, _tmp) = test_repository(); - let config = repo.config().unwrap(); - assert_eq!(config.get_bool("test.key").unwrap(), None); -} diff --git a/gitbutler-app/tests/git/credentials.rs b/gitbutler-app/tests/git/credentials.rs deleted file mode 100644 index f524a05d9..000000000 --- a/gitbutler-app/tests/git/credentials.rs +++ /dev/null @@ -1,312 +0,0 @@ -use gitbutler_app::git::credentials::{Credential, Helper, HttpsCredential, SshCredential}; -use gitbutler_app::{keys, project_repository, projects, users}; -use std::path::PathBuf; - -use crate::{temp_dir, test_repository}; - -#[derive(Default)] -struct TestCase<'a> { - remote_url: &'a str, - github_access_token: Option<&'a str>, - preferred_key: projects::AuthKey, - home_dir: Option, -} - -impl TestCase<'_> { - fn run(&self) -> Vec<(String, Vec)> { - let local_app_data = temp_dir(); - - let users = users::Controller::from_path(&local_app_data); - let user = users::User { - github_access_token: self.github_access_token.map(ToString::to_string), - ..Default::default() - }; - users.set_user(&user).unwrap(); - - let keys = keys::Controller::from_path(&local_app_data); - let helper = Helper::new(keys, users, self.home_dir.clone()); - - let (repo, _tmp) = test_repository(); - repo.remote( - "origin", - &self.remote_url.parse().expect("failed to parse remote url"), - ) - .unwrap(); - let project = projects::Project { - path: repo.workdir().unwrap().to_path_buf(), - preferred_key: self.preferred_key.clone(), - ..Default::default() - }; - let project_repository = project_repository::Repository::open(&project).unwrap(); - - let flow = helper.help(&project_repository, "origin").unwrap(); - flow.into_iter() - .map(|(remote, credentials)| { - ( - remote.url().unwrap().as_ref().unwrap().to_string(), - credentials, - ) - }) - .collect::>() - } -} - -mod not_github { - use super::*; - - mod with_preferred_key { - use super::*; - - #[test] - fn https() { - let test_case = TestCase { - remote_url: "https://gitlab.com/test-gitbutler/test.git", - github_access_token: Some("token"), - preferred_key: projects::AuthKey::Local { - private_key_path: PathBuf::from("/tmp/id_rsa"), - }, - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "git@gitlab.com:test-gitbutler/test.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Ssh(SshCredential::Keyfile { - key_path: PathBuf::from("/tmp/id_rsa"), - passphrase: None, - })] - ); - } - - #[test] - fn ssh() { - let test_case = TestCase { - remote_url: "git@gitlab.com:test-gitbutler/test.git", - github_access_token: Some("token"), - preferred_key: projects::AuthKey::Local { - private_key_path: PathBuf::from("/tmp/id_rsa"), - }, - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "git@gitlab.com:test-gitbutler/test.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Ssh(SshCredential::Keyfile { - key_path: PathBuf::from("/tmp/id_rsa"), - passphrase: None, - })] - ); - } - } - - mod with_github_token { - use super::*; - - #[test] - fn https() { - let test_case = TestCase { - remote_url: "https://gitlab.com/test-gitbutler/test.git", - github_access_token: Some("token"), - ..Default::default() - }; - let flow = test_case.run(); - - assert_eq!(flow.len(), 1); - - assert_eq!( - flow[0].0, - "git@gitlab.com:test-gitbutler/test.git".to_string(), - ); - assert_eq!(flow[0].1.len(), 1); - assert!(matches!( - flow[0].1[0], - Credential::Ssh(SshCredential::GitButlerKey(_)) - )); - } - - #[test] - fn ssh() { - let test_case = TestCase { - remote_url: "git@gitlab.com:test-gitbutler/test.git", - github_access_token: Some("token"), - ..Default::default() - }; - let flow = test_case.run(); - - assert_eq!(flow.len(), 1); - - assert_eq!( - flow[0].0, - "git@gitlab.com:test-gitbutler/test.git".to_string(), - ); - assert_eq!(flow[0].1.len(), 1); - assert!(matches!( - flow[0].1[0], - Credential::Ssh(SshCredential::GitButlerKey(_)) - )); - } - } -} - -mod github { - use super::*; - - mod with_github_token { - use super::*; - - #[test] - fn https() { - let test_case = TestCase { - remote_url: "https://github.com/gitbutlerapp/gitbutler.git", - github_access_token: Some("token"), - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "https://github.com/gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Https(HttpsCredential::GitHubToken( - "token".to_string() - ))] - ); - } - - #[test] - fn ssh() { - let test_case = TestCase { - remote_url: "git@github.com:gitbutlerapp/gitbutler.git", - github_access_token: Some("token"), - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "https://github.com/gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Https(HttpsCredential::GitHubToken( - "token".to_string() - ))] - ); - } - } - - mod without_github_token { - use super::*; - - mod without_preferred_key { - use super::*; - - #[test] - fn https() { - let test_case = TestCase { - remote_url: "https://github.com/gitbutlerapp/gitbutler.git", - ..Default::default() - }; - let flow = test_case.run(); - - assert_eq!(flow.len(), 1); - - assert_eq!( - flow[0].0, - "git@github.com:gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!(flow[0].1.len(), 1); - assert!(matches!( - flow[0].1[0], - Credential::Ssh(SshCredential::GitButlerKey(_)) - )); - } - - #[test] - fn ssh() { - let test_case = TestCase { - remote_url: "git@github.com:gitbutlerapp/gitbutler.git", - ..Default::default() - }; - let flow = test_case.run(); - - assert_eq!(flow.len(), 1); - - assert_eq!( - flow[0].0, - "git@github.com:gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!(flow[0].1.len(), 1); - assert!(matches!( - flow[0].1[0], - Credential::Ssh(SshCredential::GitButlerKey(_)) - )); - } - } - - mod with_preferred_key { - use super::*; - - #[test] - fn https() { - let test_case = TestCase { - remote_url: "https://github.com/gitbutlerapp/gitbutler.git", - github_access_token: Some("token"), - preferred_key: projects::AuthKey::Local { - private_key_path: PathBuf::from("/tmp/id_rsa"), - }, - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "git@github.com:gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Ssh(SshCredential::Keyfile { - key_path: PathBuf::from("/tmp/id_rsa"), - passphrase: None, - })] - ); - } - - #[test] - fn ssh() { - let test_case = TestCase { - remote_url: "git@github.com:gitbutlerapp/gitbutler.git", - github_access_token: Some("token"), - preferred_key: projects::AuthKey::Local { - private_key_path: PathBuf::from("/tmp/id_rsa"), - }, - ..Default::default() - }; - let flow = test_case.run(); - assert_eq!(flow.len(), 1); - assert_eq!( - flow[0].0, - "git@github.com:gitbutlerapp/gitbutler.git".to_string(), - ); - assert_eq!( - flow[0].1, - vec![Credential::Ssh(SshCredential::Keyfile { - key_path: PathBuf::from("/tmp/id_rsa"), - passphrase: None, - })] - ); - } - } - } -} diff --git a/gitbutler-app/tests/git/diff.rs b/gitbutler-app/tests/git/diff.rs deleted file mode 100644 index 1997df884..000000000 --- a/gitbutler-app/tests/git/diff.rs +++ /dev/null @@ -1,457 +0,0 @@ -use std::{collections::HashMap, path, thread, time}; - -use anyhow::Result; -use pretty_assertions::assert_eq; -use tempfile::TempDir; - -use crate::init_opts_bare; -use crate::{Case, Suite}; -use gitbutler_app::{ - deltas::{self, operations::Operation}, - projects::{self, ApiProject, ProjectId}, - reader, - sessions::{self, SessionId}, -}; - -fn new_test_remote_repository() -> Result<(git2::Repository, TempDir)> { - let tmp = tempfile::tempdir()?; - let repo_a = git2::Repository::init_opts(&tmp, &init_opts_bare())?; - Ok((repo_a, tmp)) -} - -#[test] -fn get_current_session_writer_should_use_existing_session() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let current_session_1 = gb_repository.get_or_create_current_session()?; - let current_session_2 = gb_repository.get_or_create_current_session()?; - assert_eq!(current_session_1.id, current_session_2.id); - - Ok(()) -} - -#[test] -fn must_not_return_init_session() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - assert!(gb_repository.get_current_session()?.is_none()); - - let iter = gb_repository.get_sessions_iterator()?; - assert_eq!(iter.count(), 0); - - Ok(()) -} - -#[test] -fn must_not_flush_without_current_session() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case(); - - let session = gb_repository.flush(project_repository, None)?; - assert!(session.is_none()); - - let iter = gb_repository.get_sessions_iterator()?; - assert_eq!(iter.count(), 0); - - Ok(()) -} - -#[test] -fn non_empty_repository() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case_with_files(HashMap::from([(path::PathBuf::from("test.txt"), "test")])); - - gb_repository.get_or_create_current_session()?; - gb_repository.flush(project_repository, None)?; - - Ok(()) -} - -#[test] -fn must_flush_current_session() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case(); - - gb_repository.get_or_create_current_session()?; - - let session = gb_repository.flush(project_repository, None)?; - assert!(session.is_some()); - - let iter = gb_repository.get_sessions_iterator()?; - assert_eq!(iter.count(), 1); - - Ok(()) -} - -#[test] -fn list_deltas_from_current_session() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let current_session = gb_repository.get_or_create_current_session()?; - let writer = deltas::Writer::new(gb_repository)?; - writer.write( - "test.txt", - &vec![deltas::Delta { - operations: vec![Operation::Insert((0, "Hello World".to_string()))], - timestamp_ms: 0, - }], - )?; - - let session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read(None)?; - - assert_eq!(deltas.len(), 1); - assert_eq!( - deltas[&path::PathBuf::from("test.txt")][0].operations.len(), - 1 - ); - assert_eq!( - deltas[&path::PathBuf::from("test.txt")][0].operations[0], - Operation::Insert((0, "Hello World".to_string())) - ); - - Ok(()) -} - -#[test] -fn list_deltas_from_flushed_session() { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case(); - - let writer = deltas::Writer::new(gb_repository).unwrap(); - writer - .write( - "test.txt", - &vec![deltas::Delta { - operations: vec![Operation::Insert((0, "Hello World".to_string()))], - timestamp_ms: 0, - }], - ) - .unwrap(); - let session = gb_repository.flush(project_repository, None).unwrap(); - - let session_reader = sessions::Reader::open(gb_repository, &session.unwrap()).unwrap(); - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read(None).unwrap(); - - assert_eq!(deltas.len(), 1); - assert_eq!( - deltas[&path::PathBuf::from("test.txt")][0].operations.len(), - 1 - ); - assert_eq!( - deltas[&path::PathBuf::from("test.txt")][0].operations[0], - Operation::Insert((0, "Hello World".to_string())) - ); -} - -#[test] -fn list_files_from_current_session() { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "Hello World", - )])); - - let current = gb_repository.get_or_create_current_session().unwrap(); - let reader = sessions::Reader::open(gb_repository, ¤t).unwrap(); - let files = reader.files(None).unwrap(); - - assert_eq!(files.len(), 1); - assert_eq!( - files[&path::PathBuf::from("test.txt")], - reader::Content::UTF8("Hello World".to_string()) - ); -} - -#[test] -fn list_files_from_flushed_session() { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "Hello World", - )])); - - gb_repository.get_or_create_current_session().unwrap(); - let session = gb_repository - .flush(project_repository, None) - .unwrap() - .unwrap(); - let reader = sessions::Reader::open(gb_repository, &session).unwrap(); - let files = reader.files(None).unwrap(); - - assert_eq!(files.len(), 1); - assert_eq!( - files[&path::PathBuf::from("test.txt")], - reader::Content::UTF8("Hello World".to_string()) - ); -} - -#[tokio::test] -async fn remote_syncronization() { - // first, crate a remote, pretending it's a cloud - let (cloud, _tmp) = new_test_remote_repository().unwrap(); - let api_project = ApiProject { - name: "test-sync".to_string(), - description: None, - repository_id: "123".to_string(), - git_url: cloud.path().to_str().unwrap().to_string(), - code_git_url: None, - created_at: 0_i32.to_string(), - updated_at: 0_i32.to_string(), - sync: true, - }; - - let suite = Suite::default(); - let user = suite.sign_in(); - - // create first local project, add files, deltas and flush a session - let case_one = suite.new_case_with_files(HashMap::from([( - path::PathBuf::from("test.txt"), - "Hello World", - )])); - suite - .projects - .update(&projects::UpdateRequest { - id: case_one.project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await - .unwrap(); - let case_one = case_one.refresh(); - - let writer = deltas::Writer::new(&case_one.gb_repository).unwrap(); - writer - .write( - "test.txt", - &vec![deltas::Delta { - operations: vec![Operation::Insert((0, "Hello World".to_string()))], - timestamp_ms: 0, - }], - ) - .unwrap(); - let session_one = case_one - .gb_repository - .flush(&case_one.project_repository, Some(&user)) - .unwrap() - .unwrap(); - case_one.gb_repository.push(Some(&user)).unwrap(); - - // create second local project, fetch it and make sure session is there - let case_two = suite.new_case(); - suite - .projects - .update(&projects::UpdateRequest { - id: case_two.project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await - .unwrap(); - let case_two = case_two.refresh(); - - case_two.gb_repository.fetch(Some(&user)).unwrap(); - - // now it should have the session from the first local project synced - let sessions_two = case_two - .gb_repository - .get_sessions_iterator() - .unwrap() - .map(Result::unwrap) - .collect::>(); - assert_eq!(sessions_two.len(), 1); - assert_eq!(sessions_two[0].id, session_one.id); - - let session_reader = sessions::Reader::open(&case_two.gb_repository, &sessions_two[0]).unwrap(); - let deltas_reader = deltas::Reader::new(&session_reader); - let deltas = deltas_reader.read(None).unwrap(); - let files = session_reader.files(None).unwrap(); - assert_eq!(deltas.len(), 1); - assert_eq!(files.len(), 1); - assert_eq!( - files[&path::PathBuf::from("test.txt")], - reader::Content::UTF8("Hello World".to_string()) - ); - assert_eq!( - deltas[&path::PathBuf::from("test.txt")], - vec![deltas::Delta { - operations: vec![Operation::Insert((0, "Hello World".to_string()))], - timestamp_ms: 0, - }] - ); -} - -#[tokio::test] -async fn remote_sync_order() { - // first, crate a remote, pretending it's a cloud - let (cloud, _tmp) = new_test_remote_repository().unwrap(); - let api_project = projects::ApiProject { - name: "test-sync".to_string(), - description: None, - repository_id: "123".to_string(), - git_url: cloud.path().to_str().unwrap().to_string(), - code_git_url: None, - created_at: 0_i32.to_string(), - updated_at: 0_i32.to_string(), - sync: true, - }; - - let suite = Suite::default(); - - let case_one = suite.new_case(); - suite - .projects - .update(&projects::UpdateRequest { - id: case_one.project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await - .unwrap(); - let case_one = case_one.refresh(); - - let case_two = suite.new_case(); - suite - .projects - .update(&projects::UpdateRequest { - id: case_two.project.id, - api: Some(api_project.clone()), - ..Default::default() - }) - .await - .unwrap(); - let case_two = case_two.refresh(); - - let user = suite.sign_in(); - - // create session in the first project - case_one - .gb_repository - .get_or_create_current_session() - .unwrap(); - let session_one_first = case_one - .gb_repository - .flush(&case_one.project_repository, Some(&user)) - .unwrap() - .unwrap(); - case_one.gb_repository.push(Some(&user)).unwrap(); - - thread::sleep(time::Duration::from_secs(1)); - - // create session in the second project - case_two - .gb_repository - .get_or_create_current_session() - .unwrap(); - let session_two_first = case_two - .gb_repository - .flush(&case_two.project_repository, Some(&user)) - .unwrap() - .unwrap(); - case_two.gb_repository.push(Some(&user)).unwrap(); - - thread::sleep(time::Duration::from_secs(1)); - - // create second session in the first project - case_one - .gb_repository - .get_or_create_current_session() - .unwrap(); - let session_one_second = case_one - .gb_repository - .flush(&case_one.project_repository, Some(&user)) - .unwrap() - .unwrap(); - case_one.gb_repository.push(Some(&user)).unwrap(); - - thread::sleep(time::Duration::from_secs(1)); - - // create second session in the second project - case_two - .gb_repository - .get_or_create_current_session() - .unwrap(); - let session_two_second = case_two - .gb_repository - .flush(&case_two.project_repository, Some(&user)) - .unwrap() - .unwrap(); - case_two.gb_repository.push(Some(&user)).unwrap(); - - case_one.gb_repository.fetch(Some(&user)).unwrap(); - let sessions_one = case_one - .gb_repository - .get_sessions_iterator() - .unwrap() - .map(Result::unwrap) - .collect::>(); - - case_two.gb_repository.fetch(Some(&user)).unwrap(); - let sessions_two = case_two - .gb_repository - .get_sessions_iterator() - .unwrap() - .map(Result::unwrap) - .collect::>(); - - // make sure the sessions are the same on both repos - assert_eq!(sessions_one.len(), 4); - assert_eq!(sessions_two, sessions_one); - - assert_eq!(sessions_one[0].id, session_two_second.id); - assert_eq!(sessions_one[1].id, session_one_second.id); - assert_eq!(sessions_one[2].id, session_two_first.id); - assert_eq!(sessions_one[3].id, session_one_first.id); -} - -#[test] -fn gitbutler_file() { - let suite = Suite::default(); - let Case { - gb_repository, - project_repository, - .. - } = &suite.new_case(); - - let session = gb_repository.get_or_create_current_session().unwrap(); - - let gitbutler_file_path = project_repository.path().join(".git/gitbutler.json"); - assert!(gitbutler_file_path.exists()); - - let file_content: serde_json::Value = - serde_json::from_str(&std::fs::read_to_string(&gitbutler_file_path).unwrap()).unwrap(); - let sid: SessionId = file_content["sessionId"].as_str().unwrap().parse().unwrap(); - assert_eq!(sid, session.id); - - let pid: ProjectId = file_content["repositoryId"] - .as_str() - .unwrap() - .parse() - .unwrap(); - assert_eq!(pid, project_repository.project().id); -} diff --git a/gitbutler-app/tests/git/mod.rs b/gitbutler-app/tests/git/mod.rs deleted file mode 100644 index 23bc6d0b7..000000000 --- a/gitbutler-app/tests/git/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod config; -mod credentials; -mod diff; diff --git a/gitbutler-app/tests/keys/mod.rs b/gitbutler-app/tests/keys/mod.rs deleted file mode 100644 index 1655d4221..000000000 --- a/gitbutler-app/tests/keys/mod.rs +++ /dev/null @@ -1,65 +0,0 @@ -use gitbutler_app::keys::{PrivateKey, PublicKey}; - -mod controller { - #[cfg(not(target_os = "windows"))] - mod not_windows { - use gitbutler_app::keys::storage::Storage; - use gitbutler_app::keys::Controller; - use std::fs; - #[cfg(target_family = "unix")] - use std::os::unix::prelude::*; - - use crate::Suite; - - #[test] - fn get_or_create() { - let suite = Suite::default(); - let controller = Controller::new(Storage::from_path(suite.local_app_data())); - - let once = controller.get_or_create().unwrap(); - let twice = controller.get_or_create().unwrap(); - assert_eq!(once, twice); - - // check permissions of the private key - let permissions = fs::metadata(suite.local_app_data().join("keys/ed25519")) - .unwrap() - .permissions(); - let perms = format!("{:o}", permissions.mode()); - assert_eq!(perms, "100600"); - } - } -} - -#[test] -fn to_from_string_private() { - let private_key = PrivateKey::generate(); - let serialized = private_key.to_string(); - let deserialized: PrivateKey = serialized.parse().unwrap(); - assert_eq!(private_key, deserialized); -} - -#[test] -fn to_from_string_public() { - let private_key = PrivateKey::generate(); - let public_key = private_key.public_key(); - let serialized = public_key.to_string(); - let deserialized: PublicKey = serialized.parse().unwrap(); - assert_eq!(public_key, deserialized); -} - -#[test] -fn serde_private() { - let private_key = PrivateKey::generate(); - let serialized = serde_json::to_string(&private_key).unwrap(); - let deserialized: PrivateKey = serde_json::from_str(&serialized).unwrap(); - assert_eq!(private_key, deserialized); -} - -#[test] -fn serde_public() { - let private_key = PrivateKey::generate(); - let public_key = private_key.public_key(); - let serialized = serde_json::to_string(&public_key).unwrap(); - let deserialized: PublicKey = serde_json::from_str(&serialized).unwrap(); - assert_eq!(public_key, deserialized); -} diff --git a/gitbutler-app/tests/lock/mod.rs b/gitbutler-app/tests/lock/mod.rs deleted file mode 100644 index 269e5e6e7..000000000 --- a/gitbutler-app/tests/lock/mod.rs +++ /dev/null @@ -1,91 +0,0 @@ -use gitbutler_app::lock::Dir; - -use crate::temp_dir; - -#[tokio::test] -async fn lock_same_instance() { - let dir_path = temp_dir(); - std::fs::write(dir_path.path().join("file.txt"), "").unwrap(); - let dir = Dir::new(dir_path.path()).unwrap(); - - let (tx, rx) = std::sync::mpsc::sync_channel(1); - - // spawn a task that will signal right after aquireing the lock - let _ = tokio::spawn({ - let dir = dir.clone(); - async move { - dir.batch(|root| { - tx.send(()).unwrap(); - assert_eq!( - std::fs::read_to_string(root.join("file.txt")).unwrap(), - String::new() - ); - std::fs::write(root.join("file.txt"), "1") - }) - } - }) - .await - .unwrap(); - - // then we wait until the lock is aquired - rx.recv().unwrap(); - - // and immidiately try to lock again - dir.batch(|root| { - assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1"); - std::fs::write(root.join("file.txt"), "2") - }) - .unwrap() - .unwrap(); - - assert_eq!( - std::fs::read_to_string(dir_path.path().join("file.txt")).unwrap(), - "2" - ); -} - -#[tokio::test] -async fn lock_different_instances() { - let dir_path = temp_dir(); - std::fs::write(dir_path.path().join("file.txt"), "").unwrap(); - - let (tx, rx) = std::sync::mpsc::sync_channel(1); - - // spawn a task that will signal right after aquireing the lock - let _ = tokio::spawn({ - let dir_path = dir_path.path().to_owned(); - async move { - // one dir instance is created on a separate thread - let dir = Dir::new(&dir_path).unwrap(); - dir.batch(|root| { - tx.send(()).unwrap(); - assert_eq!( - std::fs::read_to_string(root.join("file.txt")).unwrap(), - String::new() - ); - std::fs::write(root.join("file.txt"), "1") - }) - } - }) - .await - .unwrap(); - - // another dir instance is created on the main thread - let dir = Dir::new(&dir_path).unwrap(); - - // then we wait until the lock is aquired - rx.recv().unwrap(); - - // and immidiately try to lock again - dir.batch(|root| { - assert_eq!(std::fs::read_to_string(root.join("file.txt")).unwrap(), "1"); - std::fs::write(root.join("file.txt"), "2") - }) - .unwrap() - .unwrap(); - - assert_eq!( - std::fs::read_to_string(dir_path.path().join("file.txt")).unwrap(), - "2" - ); -} diff --git a/gitbutler-app/tests/reader/mod.rs b/gitbutler-app/tests/reader/mod.rs deleted file mode 100644 index 3c60815cd..000000000 --- a/gitbutler-app/tests/reader/mod.rs +++ /dev/null @@ -1,183 +0,0 @@ -use gitbutler_app::reader::{CommitReader, Content, Reader}; -use std::fs; -use std::path::Path; - -use crate::{commit_all, temp_dir, test_repository}; -use anyhow::Result; - -#[test] -fn directory_reader_read_file() -> Result<()> { - let dir = temp_dir(); - - let file_path = Path::new("test.txt"); - fs::write(dir.path().join(file_path), "test")?; - - let reader = Reader::open(dir.path())?; - assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string())); - - Ok(()) -} - -#[test] -fn commit_reader_read_file() -> Result<()> { - let (repository, _tmp) = test_repository(); - - let file_path = Path::new("test.txt"); - fs::write(repository.path().parent().unwrap().join(file_path), "test")?; - - let oid = commit_all(&repository); - - fs::write(repository.path().parent().unwrap().join(file_path), "test2")?; - - let reader = Reader::from_commit(&repository, &repository.find_commit(oid)?)?; - assert_eq!(reader.read(file_path)?, Content::UTF8("test".to_string())); - - Ok(()) -} - -#[test] -fn reader_list_files_should_return_relative() -> Result<()> { - let dir = temp_dir(); - - fs::write(dir.path().join("test1.txt"), "test")?; - fs::create_dir_all(dir.path().join("dir"))?; - fs::write(dir.path().join("dir").join("test.txt"), "test")?; - - let reader = Reader::open(dir.path())?; - let files = reader.list_files(Path::new("dir"))?; - assert_eq!(files.len(), 1); - assert!(files.contains(&Path::new("test.txt").to_path_buf())); - - Ok(()) -} - -#[test] -fn reader_list_files() -> Result<()> { - let dir = temp_dir(); - - fs::write(dir.path().join("test.txt"), "test")?; - fs::create_dir_all(dir.path().join("dir"))?; - fs::write(dir.path().join("dir").join("test.txt"), "test")?; - - let reader = Reader::open(dir.path())?; - let files = reader.list_files(Path::new(""))?; - assert_eq!(files.len(), 2); - assert!(files.contains(&Path::new("test.txt").to_path_buf())); - assert!(files.contains(&Path::new("dir/test.txt").to_path_buf())); - - Ok(()) -} - -#[test] -fn commit_reader_list_files_should_return_relative() -> Result<()> { - let (repository, _tmp) = test_repository(); - - fs::write( - repository.path().parent().unwrap().join("test1.txt"), - "test", - )?; - fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?; - fs::write( - repository - .path() - .parent() - .unwrap() - .join("dir") - .join("test.txt"), - "test", - )?; - - let oid = commit_all(&repository); - - fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?; - - let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; - let files = reader.list_files(Path::new("dir"))?; - assert_eq!(files.len(), 1); - assert!(files.contains(&Path::new("test.txt").to_path_buf())); - - Ok(()) -} - -#[test] -fn commit_reader_list_files() -> Result<()> { - let (repository, _tmp) = test_repository(); - - fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?; - fs::create_dir_all(repository.path().parent().unwrap().join("dir"))?; - fs::write( - repository - .path() - .parent() - .unwrap() - .join("dir") - .join("test.txt"), - "test", - )?; - - let oid = commit_all(&repository); - - fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?; - - let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; - let files = reader.list_files(Path::new(""))?; - assert_eq!(files.len(), 2); - assert!(files.contains(&Path::new("test.txt").to_path_buf())); - assert!(files.contains(&Path::new("dir/test.txt").to_path_buf())); - - Ok(()) -} - -#[test] -fn directory_reader_exists() -> Result<()> { - let dir = temp_dir(); - - fs::write(dir.path().join("test.txt"), "test")?; - - let reader = Reader::open(dir.path())?; - assert!(reader.exists(Path::new("test.txt"))?); - assert!(!reader.exists(Path::new("test2.txt"))?); - - Ok(()) -} - -#[test] -fn commit_reader_exists() -> Result<()> { - let (repository, _tmp) = test_repository(); - - fs::write(repository.path().parent().unwrap().join("test.txt"), "test")?; - - let oid = commit_all(&repository); - - fs::remove_file(repository.path().parent().unwrap().join("test.txt"))?; - - let reader = CommitReader::new(&repository, &repository.find_commit(oid)?)?; - assert!(reader.exists(Path::new("test.txt"))); - assert!(!reader.exists(Path::new("test2.txt"))); - - Ok(()) -} - -#[test] -fn from_bytes() { - for (bytes, expected) in [ - ("test".as_bytes(), Content::UTF8("test".to_string())), - (&[0, 159, 146, 150, 159, 146, 150], Content::Binary), - ] { - assert_eq!(Content::from(bytes), expected); - } -} - -#[test] -fn serialize_content() { - for (content, expected) in [ - ( - Content::UTF8("test".to_string()), - r#"{"type":"utf8","value":"test"}"#, - ), - (Content::Binary, r#"{"type":"binary"}"#), - (Content::Large, r#"{"type":"large"}"#), - ] { - assert_eq!(serde_json::to_string(&content).unwrap(), expected); - } -} diff --git a/gitbutler-app/tests/sessions/database.rs b/gitbutler-app/tests/sessions/database.rs deleted file mode 100644 index 67abb17d8..000000000 --- a/gitbutler-app/tests/sessions/database.rs +++ /dev/null @@ -1,84 +0,0 @@ -use crate::test_database; -use gitbutler_app::projects::ProjectId; -use gitbutler_app::sessions::{session, Database, Session, SessionId}; - -#[test] -fn insert_query() -> anyhow::Result<()> { - let (db, _tmp) = test_database(); - println!("0"); - let database = Database::new(db); - println!("1"); - - let project_id = ProjectId::generate(); - let session1 = Session { - id: SessionId::generate(), - hash: None, - meta: session::Meta { - branch: None, - commit: None, - start_timestamp_ms: 1, - last_timestamp_ms: 2, - }, - }; - let session2 = session::Session { - id: SessionId::generate(), - hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()), - meta: session::Meta { - branch: Some("branch2".to_string()), - commit: Some("commit2".to_string()), - start_timestamp_ms: 3, - last_timestamp_ms: 4, - }, - }; - let sessions = vec![&session1, &session2]; - - database.insert(&project_id, &sessions)?; - - assert_eq!( - database.list_by_project_id(&project_id, None)?, - vec![session2.clone(), session1.clone()] - ); - assert_eq!(database.get_by_id(&session1.id)?.unwrap(), session1); - assert_eq!(database.get_by_id(&session2.id)?.unwrap(), session2); - assert_eq!(database.get_by_id(&SessionId::generate())?, None); - - Ok(()) -} - -#[test] -fn update() -> anyhow::Result<()> { - let (db, _tmp) = test_database(); - let database = Database::new(db); - - let project_id = ProjectId::generate(); - let session = session::Session { - id: SessionId::generate(), - hash: None, - meta: session::Meta { - branch: None, - commit: None, - start_timestamp_ms: 1, - last_timestamp_ms: 2, - }, - }; - let session_updated = session::Session { - id: session.id, - hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()), - meta: session::Meta { - branch: Some("branch2".to_string()), - commit: Some("commit2".to_string()), - start_timestamp_ms: 3, - last_timestamp_ms: 4, - }, - }; - database.insert(&project_id, &[&session])?; - database.insert(&project_id, &[&session_updated])?; - - assert_eq!( - database.list_by_project_id(&project_id, None)?, - vec![session_updated.clone()] - ); - assert_eq!(database.get_by_id(&session.id)?.unwrap(), session_updated); - - Ok(()) -} diff --git a/gitbutler-app/tests/sessions/mod.rs b/gitbutler-app/tests/sessions/mod.rs deleted file mode 100644 index f6f6287d0..000000000 --- a/gitbutler-app/tests/sessions/mod.rs +++ /dev/null @@ -1,106 +0,0 @@ -mod database; - -use anyhow::Result; - -use crate::{Case, Suite}; -use gitbutler_app::sessions::{self, session::SessionId}; - -#[test] -fn should_not_write_session_with_hash() { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let session = sessions::Session { - id: SessionId::generate(), - hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()), - meta: sessions::Meta { - start_timestamp_ms: 0, - last_timestamp_ms: 1, - branch: Some("branch".to_string()), - commit: Some("commit".to_string()), - }, - }; - - assert!(sessions::Writer::new(gb_repository) - .unwrap() - .write(&session) - .is_err()); -} - -#[test] -fn should_write_full_session() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let session = sessions::Session { - id: SessionId::generate(), - hash: None, - meta: sessions::Meta { - start_timestamp_ms: 0, - last_timestamp_ms: 1, - branch: Some("branch".to_string()), - commit: Some("commit".to_string()), - }, - }; - - sessions::Writer::new(gb_repository)?.write(&session)?; - - assert_eq!( - std::fs::read_to_string(gb_repository.session_path().join("meta/id"))?, - session.id.to_string() - ); - assert_eq!( - std::fs::read_to_string(gb_repository.session_path().join("meta/commit"))?, - "commit" - ); - assert_eq!( - std::fs::read_to_string(gb_repository.session_path().join("meta/branch"))?, - "branch" - ); - assert_eq!( - std::fs::read_to_string(gb_repository.session_path().join("meta/start"))?, - "0" - ); - assert_ne!( - std::fs::read_to_string(gb_repository.session_path().join("meta/last"))?, - "1" - ); - - Ok(()) -} - -#[test] -fn should_write_partial_session() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let session = sessions::Session { - id: SessionId::generate(), - hash: None, - meta: sessions::Meta { - start_timestamp_ms: 0, - last_timestamp_ms: 1, - branch: None, - commit: None, - }, - }; - - sessions::Writer::new(gb_repository)?.write(&session)?; - - assert_eq!( - std::fs::read_to_string(gb_repository.session_path().join("meta/id"))?, - session.id.to_string() - ); - assert!(!gb_repository.session_path().join("meta/commit").exists()); - assert!(!gb_repository.session_path().join("meta/branch").exists()); - assert_eq!( - std::fs::read_to_string(gb_repository.session_path().join("meta/start"))?, - "0" - ); - assert_ne!( - std::fs::read_to_string(gb_repository.session_path().join("meta/last"))?, - "1" - ); - - Ok(()) -} diff --git a/gitbutler-app/tests/suite/gb_repository.rs b/gitbutler-app/tests/suite/gb_repository.rs deleted file mode 100644 index a1942bff6..000000000 --- a/gitbutler-app/tests/suite/gb_repository.rs +++ /dev/null @@ -1,149 +0,0 @@ -use crate::common::{paths, TestProject}; -use gitbutler_app::{gb_repository, git, project_repository, projects}; -use std::path; - -mod init { - use super::*; - - #[test] - fn handle_file_symlink() { - let test_project = TestProject::default(); - - let data_dir = paths::data_dir(); - let projects = projects::Controller::from_path(&data_dir); - - let project = projects - .add(test_project.path()) - .expect("failed to add project"); - - std::fs::write(project.path.join("file"), "content").unwrap(); - std::fs::hard_link(project.path.join("file"), project.path.join("link")).unwrap(); - - let project_repository = project_repository::Repository::open(&project).unwrap(); - - gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); - } - - #[test] - #[cfg(target_family = "unix")] - fn handle_dir_symlink() { - let test_project = TestProject::default(); - - let data_dir = paths::data_dir(); - let projects = projects::Controller::from_path(&data_dir); - - let project = projects - .add(test_project.path()) - .expect("failed to add project"); - - std::fs::create_dir_all(project.path.join("dir")).unwrap(); - std::fs::write(project.path.join("dir/file"), "content").unwrap(); - std::os::unix::fs::symlink(project.path.join("dir"), project.path.join("dir_link")) - .unwrap(); - - let project_repository = project_repository::Repository::open(&project).unwrap(); - - gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); - } - - #[test] - #[cfg(target_family = "unix")] - fn handle_dir_symlink_symlink() { - let test_project = TestProject::default(); - - let data_dir = paths::data_dir(); - let projects = projects::Controller::from_path(&data_dir); - - let project = projects - .add(test_project.path()) - .expect("failed to add project"); - - std::fs::create_dir_all(project.path.join("dir")).unwrap(); - std::fs::write(project.path.join("dir/file"), "content").unwrap(); - std::os::unix::fs::symlink(project.path.join("dir"), project.path.join("dir_link")) - .unwrap(); - std::os::unix::fs::symlink( - project.path.join("dir_link"), - project.path.join("link_link"), - ) - .unwrap(); - - let project_repository = project_repository::Repository::open(&project).unwrap(); - - gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); - } -} - -mod flush { - use super::*; - - #[test] - fn handle_file_symlink() { - let test_project = TestProject::default(); - - let data_dir = paths::data_dir(); - let projects = projects::Controller::from_path(&data_dir); - - let project = projects - .add(test_project.path()) - .expect("failed to add project"); - - let project_repository = project_repository::Repository::open(&project).unwrap(); - - let gb_repo = - gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); - - std::fs::write(project.path.join("file"), "content").unwrap(); - std::fs::hard_link(project.path.join("file"), project.path.join("link")).unwrap(); - - gb_repo.flush(&project_repository, None).unwrap(); - } - - #[test] - #[cfg(target_family = "unix")] - fn handle_dir_symlink() { - let test_project = TestProject::default(); - - let data_dir = paths::data_dir(); - let projects = projects::Controller::from_path(&data_dir); - - let project = projects - .add(test_project.path()) - .expect("failed to add project"); - - let project_repository = project_repository::Repository::open(&project).unwrap(); - - let gb_repo = - gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); - - std::fs::create_dir_all(project.path.join("dir")).unwrap(); - std::fs::write(project.path.join("dir/file"), "content").unwrap(); - std::os::unix::fs::symlink(project.path.join("dir"), project.path.join("dir_link")) - .unwrap(); - - gb_repo.flush(&project_repository, None).unwrap(); - } - - #[test] - fn handle_submodules() { - let test_project = TestProject::default(); - - let data_dir = paths::data_dir(); - let projects = projects::Controller::from_path(&data_dir); - - let project = projects - .add(test_project.path()) - .expect("failed to add project"); - - let project_repository = project_repository::Repository::open(&project).unwrap(); - - let gb_repo = - gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap(); - - let project = TestProject::default(); - let submodule_url: git::Url = project.path().display().to_string().parse().unwrap(); - test_project.add_submodule(&submodule_url, path::Path::new("submodule")); - - gb_repo.flush(&project_repository, None).unwrap(); - } -} diff --git a/gitbutler-app/tests/suite/projects.rs b/gitbutler-app/tests/suite/projects.rs deleted file mode 100644 index eb4459dd8..000000000 --- a/gitbutler-app/tests/suite/projects.rs +++ /dev/null @@ -1,71 +0,0 @@ -use gitbutler_app::projects::Controller; -use tempfile::TempDir; - -use crate::common::{self, paths}; - -pub fn new() -> (Controller, TempDir) { - let data_dir = paths::data_dir(); - let controller = Controller::from_path(&data_dir); - (controller, data_dir) -} - -mod add { - use super::*; - - #[test] - fn success() { - let (controller, _tmp) = new(); - let repository = common::TestProject::default(); - let path = repository.path(); - let project = controller.add(path).unwrap(); - assert_eq!(project.path, path); - assert_eq!(project.title, path.iter().last().unwrap().to_str().unwrap()); - } - - mod error { - use gitbutler_app::projects::AddError; - - use super::*; - - #[test] - fn missing() { - let (controller, _tmp) = new(); - let tmp = tempfile::tempdir().unwrap(); - assert!(matches!( - controller.add(tmp.path().join("missing")), - Err(AddError::PathNotFound) - )); - } - - #[test] - fn not_git() { - let (controller, _tmp) = new(); - let tmp = tempfile::tempdir().unwrap(); - let path = tmp.path(); - std::fs::write(path.join("file.txt"), "hello world").unwrap(); - assert!(matches!( - controller.add(path), - Err(AddError::NotAGitRepository) - )); - } - - #[test] - fn empty() { - let (controller, _tmp) = new(); - let tmp = tempfile::tempdir().unwrap(); - assert!(matches!( - controller.add(tmp.path()), - Err(AddError::NotAGitRepository) - )); - } - - #[test] - fn twice() { - let (controller, _tmp) = new(); - let repository = common::TestProject::default(); - let path = repository.path(); - controller.add(path).unwrap(); - assert!(matches!(controller.add(path), Err(AddError::AlreadyExists))); - } - } -} diff --git a/gitbutler-app/tests/suite/virtual_branches/amend.rs b/gitbutler-app/tests/suite/virtual_branches/amend.rs deleted file mode 100644 index 3465e904f..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/amend.rs +++ /dev/null @@ -1,352 +0,0 @@ -use super::*; - -#[tokio::test] -async fn to_default_target() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - // amend without head commit - fs::write(repository.path().join("file2.txt"), "content").unwrap(); - let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); - assert!(matches!( - controller - .amend(project_id, &branch_id, &to_amend) - .await - .unwrap_err(), - ControllerError::Action(errors::AmendError::BranchHasNoCommits) - )); -} - -#[tokio::test] -async fn forcepush_allowed() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = &Test::default(); - - projects - .update(&projects::UpdateRequest { - id: *project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - projects - .update(&projects::UpdateRequest { - id: *project_id, - ok_with_force_push: Some(true), - ..Default::default() - }) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // create commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap(); - }; - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - { - // amend another hunk - fs::write(repository.path().join("file2.txt"), "content2").unwrap(); - let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); - controller - .amend(project_id, &branch_id, &to_amend) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert!(branch.requires_force); - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 2); - } -} - -#[tokio::test] -async fn forcepush_forbidden() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - projects - .update(&projects::UpdateRequest { - id: *project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // create commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap(); - }; - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - { - fs::write(repository.path().join("file2.txt"), "content2").unwrap(); - let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); - assert!(matches!( - controller - .amend(project_id, &branch_id, &to_amend) - .await - .unwrap_err(), - ControllerError::Action(errors::AmendError::ForcePushNotAllowed(_)) - )); - } -} - -#[tokio::test] -async fn non_locked_hunk() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // create commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 1); - }; - - { - // amend another hunk - fs::write(repository.path().join("file2.txt"), "content2").unwrap(); - let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); - controller - .amend(project_id, &branch_id, &to_amend) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 2); - } -} - -#[tokio::test] -async fn locked_hunk() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // create commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 1); - assert_eq!( - branch.commits[0].files[0].hunks[0].diff, - "@@ -0,0 +1 @@\n+content\n\\ No newline at end of file\n" - ); - }; - - { - // amend another hunk - fs::write(repository.path().join("file.txt"), "more content").unwrap(); - let to_amend: branch::BranchOwnershipClaims = "file.txt:1-2".parse().unwrap(); - controller - .amend(project_id, &branch_id, &to_amend) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 1); - assert_eq!( - branch.commits[0].files[0].hunks[0].diff, - "@@ -0,0 +1 @@\n+more content\n\\ No newline at end of file\n" - ); - } -} - -#[tokio::test] -async fn non_existing_ownership() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // create commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits[0].files.len(), 1); - }; - - { - // amend non existing hunk - let to_amend: branch::BranchOwnershipClaims = "file2.txt:1-2".parse().unwrap(); - assert!(matches!( - controller - .amend(project_id, &branch_id, &to_amend) - .await - .unwrap_err(), - ControllerError::Action(errors::AmendError::TargetOwnerhshipNotFound(_)) - )); - } -} diff --git a/gitbutler-app/tests/suite/virtual_branches/apply_virtual_branch.rs b/gitbutler-app/tests/suite/virtual_branches/apply_virtual_branch.rs deleted file mode 100644 index b422439fa..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/apply_virtual_branch.rs +++ /dev/null @@ -1,278 +0,0 @@ -use super::*; - -#[tokio::test] -async fn deltect_conflict() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = { - let branch1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - fs::write(repository.path().join("file.txt"), "branch one").unwrap(); - - branch1_id - }; - - // unapply first vbranch - controller - .unapply_virtual_branch(project_id, &branch1_id) - .await - .unwrap(); - - { - // create another vbranch that conflicts with the first one - controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - fs::write(repository.path().join("file.txt"), "branch two").unwrap(); - } - - { - // it should not be possible to apply the first branch - assert!(!controller - .can_apply_virtual_branch(project_id, &branch1_id) - .await - .unwrap()); - - assert!(matches!( - controller - .apply_virtual_branch(project_id, &branch1_id) - .await, - Err(ControllerError::Action( - errors::ApplyBranchError::BranchConflicts(_) - )) - )); - } -} - -#[tokio::test] -async fn rebase_commit() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "one").unwrap(); - fs::write(repository.path().join("another_file.txt"), "").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = { - // create a branch with some commited work - let branch1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - fs::write(repository.path().join("another_file.txt"), "virtual").unwrap(); - - controller - .create_commit(project_id, &branch1_id, "virtual commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(branches[0].active); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - - branch1_id - }; - - { - // unapply first vbranch - controller - .unapply_virtual_branch(project_id, &branch1_id) - .await - .unwrap(); - - assert_eq!( - fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), - "" - ); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "one" - ); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!branches[0].active); - } - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // branch is stil unapplied - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!branches[0].active); - assert!(!branches[0].conflicted); - - assert_eq!( - fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), - "" - ); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "two" - ); - } - - { - // apply first vbranch again - controller - .apply_virtual_branch(project_id, &branch1_id) - .await - .unwrap(); - - // it should be rebased - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - - assert_eq!( - fs::read_to_string(repository.path().join("another_file.txt")).unwrap(), - "virtual" - ); - - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "two" - ); - } -} - -#[tokio::test] -async fn rebase_work() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = { - // make a branch with some work - let branch1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - fs::write(repository.path().join("another_file.txt"), "").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(branches[0].active); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - - branch1_id - }; - - { - // unapply first vbranch - controller - .unapply_virtual_branch(project_id, &branch1_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(!branches[0].active); - - assert!(!repository.path().join("another_file.txt").exists()); - assert!(!repository.path().join("file.txt").exists()); - } - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // first branch is stil unapplied - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(!branches[0].active); - assert!(!branches[0].conflicted); - - assert!(!repository.path().join("another_file.txt").exists()); - assert!(repository.path().join("file.txt").exists()); - } - - { - // apply first vbranch again - controller - .apply_virtual_branch(project_id, &branch1_id) - .await - .unwrap(); - - // workdir should be rebased, and work should be restored - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - - assert!(repository.path().join("another_file.txt").exists()); - assert!(repository.path().join("file.txt").exists()); - } -} diff --git a/gitbutler-app/tests/suite/virtual_branches/cherry_pick.rs b/gitbutler-app/tests/suite/virtual_branches/cherry_pick.rs deleted file mode 100644 index 212219fcd..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/cherry_pick.rs +++ /dev/null @@ -1,382 +0,0 @@ -use super::*; - -mod cleanly { - - use super::*; - - #[tokio::test] - async fn applied() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one = { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - let commit_two = { - fs::write(repository.path().join("file.txt"), "content two").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - controller - .reset_virtual_branch(project_id, &branch_id, commit_one) - .await - .unwrap(); - - repository.reset_hard(None); - - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - - let cherry_picked_commit_oid = controller - .cherry_pick(project_id, &branch_id, commit_two) - .await - .unwrap(); - assert!(cherry_picked_commit_oid.is_some()); - assert!(repository.path().join("file.txt").exists()); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content two" - ); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert_eq!(branches[0].commits.len(), 2); - assert_eq!(branches[0].commits[0].id, cherry_picked_commit_oid.unwrap()); - assert_eq!(branches[0].commits[1].id, commit_one); - } - - #[tokio::test] - async fn to_different_branch() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one = { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - let commit_two = { - fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - controller - .reset_virtual_branch(project_id, &branch_id, commit_one) - .await - .unwrap(); - - repository.reset_hard(None); - - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - assert!(!repository.path().join("file_two.txt").exists()); - - let branch_two_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let cherry_picked_commit_oid = controller - .cherry_pick(project_id, &branch_two_id, commit_two) - .await - .unwrap(); - assert!(cherry_picked_commit_oid.is_some()); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert!(repository.path().join("file_two.txt").exists()); - assert_eq!( - fs::read_to_string(repository.path().join("file_two.txt")).unwrap(), - "content two" - ); - - assert_eq!(branches.len(), 2); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, commit_one); - - assert_eq!(branches[1].id, branch_two_id); - assert!(branches[1].active); - assert_eq!(branches[1].commits.len(), 1); - assert_eq!(branches[1].commits[0].id, cherry_picked_commit_oid.unwrap()); - } - - #[tokio::test] - async fn non_applied() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - let commit_three_oid = { - fs::write(repository.path().join("file_three.txt"), "content three").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit", None, false) - .await - .unwrap() - }; - - controller - .reset_virtual_branch(project_id, &branch_id, commit_one_oid) - .await - .unwrap(); - - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - assert!(matches!( - controller - .cherry_pick(project_id, &branch_id, commit_three_oid) - .await, - Err(ControllerError::Action(errors::CherryPickError::NotApplied)) - )); - } -} - -mod with_conflicts { - - use super::*; - - #[tokio::test] - async fn applied() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one = { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file_two.txt"), "content two").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - let commit_three = { - fs::write(repository.path().join("file_three.txt"), "content three").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - controller - .reset_virtual_branch(project_id, &branch_id, commit_one) - .await - .unwrap(); - - repository.reset_hard(None); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - assert!(!repository.path().join("file_two.txt").exists()); - assert!(!repository.path().join("file_three.txt").exists()); - - // introduce conflict with the remote commit - fs::write(repository.path().join("file_three.txt"), "conflict").unwrap(); - - { - // cherry picking leads to conflict - let cherry_picked_commit_oid = controller - .cherry_pick(project_id, &branch_id, commit_three) - .await - .unwrap(); - assert!(cherry_picked_commit_oid.is_none()); - - assert_eq!( - fs::read_to_string(repository.path().join("file_three.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\ncontent three\n>>>>>>> theirs\n" - ); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert_eq!(branches[0].files.len(), 1); - assert!(branches[0].files[0].conflicted); - assert_eq!(branches[0].commits.len(), 1); - } - - { - // conflict can be resolved - fs::write(repository.path().join("file_three.txt"), "resolved").unwrap(); - let commited_oid = controller - .create_commit(project_id, &branch_id, "resolution", None, false) - .await - .unwrap(); - - let commit = repository.find_commit(commited_oid).unwrap(); - assert_eq!(commit.parent_count(), 2); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].requires_force); - assert!(!branches[0].conflicted); - assert_eq!(branches[0].commits.len(), 2); - // resolution commit is there - assert_eq!(branches[0].commits[0].id, commited_oid); - assert_eq!(branches[0].commits[1].id, commit_one); - } - } - - #[tokio::test] - async fn non_applied() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - let commit_oid = { - let first = repository.commit_all("commit"); - fs::write(repository.path().join("file.txt"), "content").unwrap(); - let second = repository.commit_all("commit"); - repository.push(); - repository.reset_hard(Some(first)); - second - }; - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - // introduce conflict with the remote commit - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - assert!(matches!( - controller - .cherry_pick(project_id, &branch_id, commit_oid) - .await, - Err(ControllerError::Action(errors::CherryPickError::NotApplied)) - )); - } -} diff --git a/gitbutler-app/tests/suite/virtual_branches/create_commit.rs b/gitbutler-app/tests/suite/virtual_branches/create_commit.rs deleted file mode 100644 index 95a2ebfbe..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/create_commit.rs +++ /dev/null @@ -1,198 +0,0 @@ -use super::*; - -#[tokio::test] -async fn should_lock_updated_hunks() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // by default, hunks are not locked - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - assert!(!branch.files[0].hunks[0].locked); - } - - controller - .create_commit(project_id, &branch_id, "test", None, false) - .await - .unwrap(); - - { - // change in the committed hunks leads to hunk locking - fs::write(repository.path().join("file.txt"), "updated content").unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - assert!(branch.files[0].hunks[0].locked); - } -} - -#[tokio::test] -async fn should_not_lock_disjointed_hunks() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - let mut lines: Vec<_> = (0_i32..24_i32).map(|i| format!("line {}", i)).collect(); - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - repository.commit_all("my commit"); - repository.push(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // new hunk in the middle of the file - lines[12] = "commited stuff".to_string(); - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - assert!(!branch.files[0].hunks[0].locked); - } - - controller - .create_commit(project_id, &branch_id, "test commit", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - { - // hunk before the commited part is not locked - let mut changed_lines = lines.clone(); - changed_lines[0] = "updated line".to_string(); - fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - assert!(!branch.files[0].hunks[0].locked); - // cleanup - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - } - { - // hunk after the commited part is not locked - let mut changed_lines = lines.clone(); - changed_lines[23] = "updated line".to_string(); - fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - assert!(!branch.files[0].hunks[0].locked); - // cleanup - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - } - { - // hunk before the commited part but with overlapping context - let mut changed_lines = lines.clone(); - changed_lines[10] = "updated line".to_string(); - fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - // TODO: We lock this hunk, but can we afford not lock it? - assert!(branch.files[0].hunks[0].locked); - // cleanup - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - } - { - // hunk after the commited part but with overlapping context - let mut changed_lines = lines.clone(); - changed_lines[14] = "updated line".to_string(); - fs::write(repository.path().join("file.txt"), changed_lines.join("\n")).unwrap(); - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].path.display().to_string(), "file.txt"); - assert_eq!(branch.files[0].hunks.len(), 1); - // TODO: We lock this hunk, but can we afford not lock it? - assert!(branch.files[0].hunks[0].locked); - // cleanup - fs::write(repository.path().join("file.txt"), lines.clone().join("\n")).unwrap(); - } -} diff --git a/gitbutler-app/tests/suite/virtual_branches/create_virtual_branch_from_branch.rs b/gitbutler-app/tests/suite/virtual_branches/create_virtual_branch_from_branch.rs deleted file mode 100644 index f3a62b6f2..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/create_virtual_branch_from_branch.rs +++ /dev/null @@ -1,382 +0,0 @@ -use super::*; - -#[tokio::test] -async fn integration() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_name = { - // make a remote branch - - let branch_id = controller - .create_virtual_branch(project_id, &super::branch::BranchCreateRequest::default()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "first\n").unwrap(); - controller - .create_commit(project_id, &branch_id, "first", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == branch_id) - .unwrap(); - - let name = branch.upstream.unwrap().name; - - controller - .delete_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - name - }; - - // checkout a existing remote branch - let branch_id = controller - .create_virtual_branch_from_branch(project_id, &branch_name) - .await - .unwrap(); - - { - // add a commit - std::fs::write(repository.path().join("file.txt"), "first\nsecond").unwrap(); - - controller - .create_commit(project_id, &branch_id, "second", None, false) - .await - .unwrap(); - } - - { - // meanwhile, there is a new commit on master - repository.checkout(&"refs/heads/master".parse().unwrap()); - std::fs::write(repository.path().join("another.txt"), "").unwrap(); - repository.commit_all("another"); - repository.push_branch(&"refs/heads/master".parse().unwrap()); - repository.checkout(&"refs/heads/gitbutler/integration".parse().unwrap()); - } - - { - // merge branch into master - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == branch_id) - .unwrap(); - - assert!(branch.commits[0].is_remote); - assert!(!branch.commits[0].is_integrated); - assert!(branch.commits[1].is_remote); - assert!(!branch.commits[1].is_integrated); - - repository.rebase_and_merge(&branch_name); - } - - { - // should mark commits as integrated - controller - .fetch_from_target(project_id, None) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == branch_id) - .unwrap(); - - assert!(branch.commits[0].is_remote); - assert!(branch.commits[0].is_integrated); - assert!(branch.commits[1].is_remote); - assert!(branch.commits[1].is_integrated); - } -} - -#[tokio::test] -async fn no_conflicts() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - { - // create a remote branch - let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); - repository.checkout(&branch_name); - fs::write(repository.path().join("file.txt"), "first").unwrap(); - repository.commit_all("first"); - repository.push_branch(&branch_name); - repository.checkout(&"refs/heads/master".parse().unwrap()); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert!(branches.is_empty()); - - let branch_id = controller - .create_virtual_branch_from_branch( - project_id, - &"refs/remotes/origin/branch".parse().unwrap(), - ) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].description, "first"); -} - -#[tokio::test] -async fn conflicts_with_uncommited() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - { - // create a remote branch - let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); - repository.checkout(&branch_name); - fs::write(repository.path().join("file.txt"), "first").unwrap(); - repository.commit_all("first"); - repository.push_branch(&branch_name); - repository.checkout(&"refs/heads/master".parse().unwrap()); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // create a local branch that conflicts with remote - { - std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - }; - - // branch should be created unapplied, because of the conflict - - let new_branch_id = controller - .create_virtual_branch_from_branch( - project_id, - &"refs/remotes/origin/branch".parse().unwrap(), - ) - .await - .unwrap(); - let new_branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == new_branch_id) - .unwrap(); - assert!(!new_branch.active); - assert_eq!(new_branch.commits.len(), 1); - assert!(new_branch.upstream.is_some()); -} - -#[tokio::test] -async fn conflicts_with_commited() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - { - // create a remote branch - let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); - repository.checkout(&branch_name); - fs::write(repository.path().join("file.txt"), "first").unwrap(); - repository.commit_all("first"); - repository.push_branch(&branch_name); - repository.checkout(&"refs/heads/master".parse().unwrap()); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // create a local branch that conflicts with remote - { - std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - controller - .create_commit(project_id, &branches[0].id, "hej", None, false) - .await - .unwrap(); - }; - - // branch should be created unapplied, because of the conflict - - let new_branch_id = controller - .create_virtual_branch_from_branch( - project_id, - &"refs/remotes/origin/branch".parse().unwrap(), - ) - .await - .unwrap(); - let new_branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == new_branch_id) - .unwrap(); - assert!(!new_branch.active); - assert_eq!(new_branch.commits.len(), 1); - assert!(new_branch.upstream.is_some()); -} - -#[tokio::test] -async fn from_default_target() { - let Test { - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // branch should be created unapplied, because of the conflict - - assert!(matches!( - controller - .create_virtual_branch_from_branch( - project_id, - &"refs/remotes/origin/master".parse().unwrap(), - ) - .await - .unwrap_err(), - ControllerError::Action( - errors::CreateVirtualBranchFromBranchError::CantMakeBranchFromDefaultTarget - ) - )); -} - -#[tokio::test] -async fn from_non_existent_branch() { - let Test { - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // branch should be created unapplied, because of the conflict - - assert!(matches!( - controller - .create_virtual_branch_from_branch( - project_id, - &"refs/remotes/origin/branch".parse().unwrap(), - ) - .await - .unwrap_err(), - ControllerError::Action(errors::CreateVirtualBranchFromBranchError::BranchNotFound( - _ - )) - )); -} - -#[tokio::test] -async fn from_state_remote_branch() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - { - // create a remote branch - let branch_name: git::LocalRefname = "refs/heads/branch".parse().unwrap(); - repository.checkout(&branch_name); - fs::write(repository.path().join("file.txt"), "branch commit").unwrap(); - repository.commit_all("branch commit"); - repository.push_branch(&branch_name); - repository.checkout(&"refs/heads/master".parse().unwrap()); - - // make remote branch stale - std::fs::write(repository.path().join("antoher_file.txt"), "master commit").unwrap(); - repository.commit_all("master commit"); - repository.push(); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch_from_branch( - project_id, - &"refs/remotes/origin/branch".parse().unwrap(), - ) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].files.is_empty()); - assert_eq!(branches[0].commits[0].description, "branch commit"); -} diff --git a/gitbutler-app/tests/suite/virtual_branches/delete_virtual_branch.rs b/gitbutler-app/tests/suite/virtual_branches/delete_virtual_branch.rs deleted file mode 100644 index b930a0763..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/delete_virtual_branch.rs +++ /dev/null @@ -1,78 +0,0 @@ -use super::*; - -#[tokio::test] -async fn should_unapply_diff() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // write some - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - - controller - .delete_virtual_branch(project_id, &branches[0].id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - assert!(!repository.path().join("file.txt").exists()); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); -} - -#[tokio::test] -async fn should_remove_reference() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let id = controller - .create_virtual_branch( - project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - controller - .delete_virtual_branch(project_id, &id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); -} diff --git a/gitbutler-app/tests/suite/virtual_branches/fetch_from_target.rs b/gitbutler-app/tests/suite/virtual_branches/fetch_from_target.rs deleted file mode 100644 index 7b3f1c72f..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/fetch_from_target.rs +++ /dev/null @@ -1,46 +0,0 @@ -use super::*; - -#[tokio::test] -async fn should_update_last_fetched() { - let Test { - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let before_fetch = controller.get_base_branch_data(project_id).await.unwrap(); - assert!(before_fetch.unwrap().last_fetched_ms.is_none()); - - let fetch = controller - .fetch_from_target(project_id, None) - .await - .unwrap(); - assert!(fetch.last_fetched_ms.is_some()); - - let after_fetch = controller.get_base_branch_data(project_id).await.unwrap(); - assert!(after_fetch.as_ref().unwrap().last_fetched_ms.is_some()); - assert_eq!(fetch.last_fetched_ms, after_fetch.unwrap().last_fetched_ms); - - let second_fetch = controller - .fetch_from_target(project_id, None) - .await - .unwrap(); - assert!(second_fetch.last_fetched_ms.is_some()); - assert_ne!(fetch.last_fetched_ms, second_fetch.last_fetched_ms); - - let after_second_fetch = controller.get_base_branch_data(project_id).await.unwrap(); - assert!(after_second_fetch - .as_ref() - .unwrap() - .last_fetched_ms - .is_some()); - assert_eq!( - second_fetch.last_fetched_ms, - after_second_fetch.unwrap().last_fetched_ms - ); -} diff --git a/gitbutler-app/tests/suite/virtual_branches/init.rs b/gitbutler-app/tests/suite/virtual_branches/init.rs deleted file mode 100644 index 9cf4c478e..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/init.rs +++ /dev/null @@ -1,213 +0,0 @@ -use super::*; - -#[tokio::test] -async fn twice() { - let data_dir = paths::data_dir(); - let keys = keys::Controller::from_path(&data_dir); - let projects = projects::Controller::from_path(&data_dir); - let users = users::Controller::from_path(&data_dir); - let helper = git::credentials::Helper::from_path(&data_dir); - - let test_project = TestProject::default(); - - let controller = Controller::new( - data_dir.path().into(), - projects.clone(), - users, - keys, - helper, - ); - - { - let project = projects - .add(test_project.path()) - .expect("failed to add project"); - controller - .set_base_branch(&project.id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - assert!(controller - .list_virtual_branches(&project.id) - .await - .unwrap() - .0 - .is_empty()); - projects.delete(&project.id).await.unwrap(); - controller - .list_virtual_branches(&project.id) - .await - .unwrap_err(); - } - - { - let project = projects.add(test_project.path()).unwrap(); - controller - .set_base_branch(&project.id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // even though project is on gitbutler/integration, we should not import it - assert!(controller - .list_virtual_branches(&project.id) - .await - .unwrap() - .0 - .is_empty()); - } -} - -#[tokio::test] -async fn dirty_non_target() { - // a situation when you initialize project while being on the local verison of the master - // that has uncommited changes. - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - repository.checkout(&"refs/heads/some-feature".parse().unwrap()); - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].files[0].hunks.len(), 1); - assert!(branches[0].upstream.is_none()); - assert_eq!(branches[0].name, "some-feature"); -} - -#[tokio::test] -async fn dirty_target() { - // a situation when you initialize project while being on the local verison of the master - // that has uncommited changes. - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].files[0].hunks.len(), 1); - assert!(branches[0].upstream.is_none()); - assert_eq!(branches[0].name, "master"); -} - -#[tokio::test] -async fn commit_on_non_target_local() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - repository.checkout(&"refs/heads/some-feature".parse().unwrap()); - fs::write(repository.path().join("file.txt"), "content").unwrap(); - repository.commit_all("commit on target"); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].files.is_empty()); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].upstream.is_none()); - assert_eq!(branches[0].name, "some-feature"); -} - -#[tokio::test] -async fn commit_on_non_target_remote() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - repository.checkout(&"refs/heads/some-feature".parse().unwrap()); - fs::write(repository.path().join("file.txt"), "content").unwrap(); - repository.commit_all("commit on target"); - repository.push_branch(&"refs/heads/some-feature".parse().unwrap()); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].files.is_empty()); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].upstream.is_some()); - assert_eq!(branches[0].name, "some-feature"); -} - -#[tokio::test] -async fn commit_on_target() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - repository.commit_all("commit on target"); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].files.is_empty()); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].upstream.is_none()); - assert_eq!(branches[0].name, "master"); -} - -#[tokio::test] -async fn submodule() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - let project = TestProject::default(); - let submodule_url: git::Url = project.path().display().to_string().parse().unwrap(); - repository.add_submodule(&submodule_url, path::Path::new("submodule")); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].files[0].hunks.len(), 1); -} diff --git a/gitbutler-app/tests/suite/virtual_branches/mod.rs b/gitbutler-app/tests/suite/virtual_branches/mod.rs deleted file mode 100644 index 5534c34c3..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/mod.rs +++ /dev/null @@ -1,176 +0,0 @@ -use std::{fs, path, str::FromStr}; -use tempfile::TempDir; - -use crate::common::{paths, TestProject}; -use crate::VAR_NO_CLEANUP; -use gitbutler_app::{ - git, keys, - projects::{self, ProjectId}, - users, - virtual_branches::{branch, controller::ControllerError, errors, Controller}, -}; - -struct Test { - repository: TestProject, - project_id: ProjectId, - projects: projects::Controller, - controller: Controller, - data_dir: Option, -} - -impl Drop for Test { - fn drop(&mut self) { - if std::env::var_os(VAR_NO_CLEANUP).is_some() { - let _ = self.data_dir.take().unwrap().into_path(); - } - } -} - -impl Default for Test { - fn default() -> Self { - let data_dir = paths::data_dir(); - let keys = keys::Controller::from_path(&data_dir); - let projects = projects::Controller::from_path(&data_dir); - let users = users::Controller::from_path(&data_dir); - let helper = git::credentials::Helper::from_path(&data_dir); - - let test_project = TestProject::default(); - let project = projects - .add(test_project.path()) - .expect("failed to add project"); - - Self { - repository: test_project, - project_id: project.id, - controller: Controller::new( - data_dir.path().into(), - projects.clone(), - users, - keys, - helper, - ), - projects, - data_dir: Some(data_dir), - } - } -} - -mod amend; -mod apply_virtual_branch; -mod cherry_pick; -mod create_commit; -mod create_virtual_branch_from_branch; -mod delete_virtual_branch; -mod fetch_from_target; -mod init; -mod move_commit_to_vbranch; -mod references; -mod reset_virtual_branch; -mod selected_for_changes; -mod set_base_branch; -mod squash; -mod unapply; -mod unapply_ownership; -mod update_base_branch; -mod update_commit_message; -mod upstream; - -#[tokio::test] -async fn resolve_conflict_flow() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(branches[0].active); - - branch1_id - }; - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // there is a conflict now, so the branch should be inactive - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(!branches[0].active); - } - - { - // when we apply conflicted branch, it has conflict - controller - .apply_virtual_branch(project_id, &branch1_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - - // and the conflict markers are in the file - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - - { - // can't commit conflicts - assert!(matches!( - controller - .create_commit(project_id, &branch1_id, "commit conflicts", None, false) - .await, - Err(ControllerError::Action(errors::CommitError::Conflicted(_))) - )); - } - - { - // fixing the conflict removes conflicted mark - fs::write(repository.path().join("file.txt"), "resolved").unwrap(); - let commit_oid = controller - .create_commit(project_id, &branch1_id, "resolution", None, false) - .await - .unwrap(); - - let commit = repository.find_commit(commit_oid).unwrap(); - assert_eq!(commit.parent_count(), 2); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - } -} diff --git a/gitbutler-app/tests/suite/virtual_branches/move_commit_to_vbranch.rs b/gitbutler-app/tests/suite/virtual_branches/move_commit_to_vbranch.rs deleted file mode 100644 index 34848382b..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/move_commit_to_vbranch.rs +++ /dev/null @@ -1,324 +0,0 @@ -use crate::suite::virtual_branches::Test; -use gitbutler_app::git; -use gitbutler_app::virtual_branches::controller::ControllerError; -use gitbutler_app::virtual_branches::{branch, errors, BranchId}; -use std::str::FromStr; - -#[tokio::test] -async fn no_diffs() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - let commit_oid = controller - .create_commit(project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - let target_branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - controller - .move_commit(project_id, &target_branch_id, commit_oid) - .await - .unwrap(); - - let destination_branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == target_branch_id) - .unwrap(); - - let source_branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == source_branch_id) - .unwrap(); - - assert_eq!(destination_branch.commits.len(), 1); - assert_eq!(destination_branch.files.len(), 0); - assert_eq!(source_branch.commits.len(), 0); - assert_eq!(source_branch.files.len(), 0); -} - -#[tokio::test] -async fn diffs_on_source_branch() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - let commit_oid = controller - .create_commit(project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - std::fs::write( - repository.path().join("another file.txt"), - "another content", - ) - .unwrap(); - - let target_branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - controller - .move_commit(project_id, &target_branch_id, commit_oid) - .await - .unwrap(); - - let destination_branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == target_branch_id) - .unwrap(); - - let source_branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == source_branch_id) - .unwrap(); - - assert_eq!(destination_branch.commits.len(), 1); - assert_eq!(destination_branch.files.len(), 0); - assert_eq!(source_branch.commits.len(), 0); - assert_eq!(source_branch.files.len(), 1); -} - -#[tokio::test] -async fn diffs_on_target_branch() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - let commit_oid = controller - .create_commit(project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - let target_branch_id = controller - .create_virtual_branch( - project_id, - &branch::BranchCreateRequest { - selected_for_changes: Some(true), - ..Default::default() - }, - ) - .await - .unwrap(); - - std::fs::write( - repository.path().join("another file.txt"), - "another content", - ) - .unwrap(); - - controller - .move_commit(project_id, &target_branch_id, commit_oid) - .await - .unwrap(); - - let destination_branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == target_branch_id) - .unwrap(); - - let source_branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == source_branch_id) - .unwrap(); - - assert_eq!(destination_branch.commits.len(), 1); - assert_eq!(destination_branch.files.len(), 1); - assert_eq!(source_branch.commits.len(), 0); - assert_eq!(source_branch.files.len(), 0); -} - -#[tokio::test] -async fn locked_hunks_on_source_branch() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - let commit_oid = controller - .create_commit(project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "locked content").unwrap(); - - let target_branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - assert!(matches!( - controller - .move_commit(project_id, &target_branch_id, commit_oid) - .await - .unwrap_err(), - ControllerError::Action(errors::MoveCommitError::SourceLocked) - )); -} - -#[tokio::test] -async fn no_commit() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - controller - .create_commit(project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - let target_branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - assert!(matches!( - controller - .move_commit( - project_id, - &target_branch_id, - git::Oid::from_str("a99c95cca7a60f1a2180c2f86fb18af97333c192").unwrap() - ) - .await - .unwrap_err(), - ControllerError::Action(errors::MoveCommitError::CommitNotFound(_)) - )); -} - -#[tokio::test] -async fn no_branch() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - let source_branch_id = branches[0].id; - - let commit_oid = controller - .create_commit(project_id, &source_branch_id, "commit", None, false) - .await - .unwrap(); - - assert!(matches!( - controller - .move_commit(project_id, &BranchId::generate(), commit_oid) - .await - .unwrap_err(), - ControllerError::Action(errors::MoveCommitError::BranchNotFound(_)) - )); -} diff --git a/gitbutler-app/tests/suite/virtual_branches/references.rs b/gitbutler-app/tests/suite/virtual_branches/references.rs deleted file mode 100644 index 91682ee34..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/references.rs +++ /dev/null @@ -1,366 +0,0 @@ -use super::*; - -mod create_virtual_branch { - use super::*; - - #[tokio::test] - async fn simple() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert_eq!(branches[0].name, "Virtual branch"); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(refnames.contains(&"refs/gitbutler/Virtual-branch".to_string())); - } - - #[tokio::test] - async fn duplicate_name() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch( - project_id, - &gitbutler_app::virtual_branches::branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let branch2_id = controller - .create_virtual_branch( - project_id, - &gitbutler_app::virtual_branches::branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 2); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].name, "name"); - assert_eq!(branches[1].id, branch2_id); - assert_eq!(branches[1].name, "name 1"); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(refnames.contains(&"refs/gitbutler/name".to_string())); - assert!(refnames.contains(&"refs/gitbutler/name-1".to_string())); - } -} - -mod update_virtual_branch { - use super::*; - - #[tokio::test] - async fn simple() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch( - project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - controller - .update_virtual_branch( - project_id, - branch::BranchUpdateRequest { - id: branch_id, - name: Some("new name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert_eq!(branches[0].name, "new name"); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(!refnames.contains(&"refs/gitbutler/name".to_string())); - assert!(refnames.contains(&"refs/gitbutler/new-name".to_string())); - } - - #[tokio::test] - async fn duplicate_name() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch( - project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let branch2_id = controller - .create_virtual_branch( - project_id, - &branch::BranchCreateRequest { - ..Default::default() - }, - ) - .await - .unwrap(); - - controller - .update_virtual_branch( - project_id, - branch::BranchUpdateRequest { - id: branch2_id, - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 2); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].name, "name"); - assert_eq!(branches[1].id, branch2_id); - assert_eq!(branches[1].name, "name 1"); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(refnames.contains(&"refs/gitbutler/name".to_string())); - assert!(refnames.contains(&"refs/gitbutler/name-1".to_string())); - } -} - -mod push_virtual_branch { - - use super::*; - - #[tokio::test] - async fn simple() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch( - project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - controller - .create_commit(project_id, &branch1_id, "test", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(project_id, &branch1_id, false, None) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].name, "name"); - assert_eq!( - branches[0].upstream.as_ref().unwrap().name.to_string(), - "refs/remotes/origin/name" - ); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(refnames.contains(&branches[0].upstream.clone().unwrap().name.to_string())); - } - - #[tokio::test] - async fn duplicate_names() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = { - // create and push branch with some work - let branch1_id = controller - .create_virtual_branch( - project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch1_id, "test", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(project_id, &branch1_id, false, None) - .await - .unwrap(); - branch1_id - }; - - // rename first branch - controller - .update_virtual_branch( - project_id, - branch::BranchUpdateRequest { - id: branch1_id, - name: Some("updated name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - - let branch2_id = { - // create another branch with first branch's old name and push it - let branch2_id = controller - .create_virtual_branch( - project_id, - &branch::BranchCreateRequest { - name: Some("name".to_string()), - ..Default::default() - }, - ) - .await - .unwrap(); - fs::write(repository.path().join("file.txt"), "updated content").unwrap(); - controller - .create_commit(project_id, &branch2_id, "test", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(project_id, &branch2_id, false, None) - .await - .unwrap(); - branch2_id - }; - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 2); - // first branch is pushing to old ref remotely - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].name, "updated name"); - assert_eq!( - branches[0].upstream.as_ref().unwrap().name, - "refs/remotes/origin/name".parse().unwrap() - ); - // new branch is pushing to new ref remotely - assert_eq!(branches[1].id, branch2_id); - assert_eq!(branches[1].name, "name"); - assert_eq!( - branches[1].upstream.as_ref().unwrap().name, - "refs/remotes/origin/name-1".parse().unwrap() - ); - - let refnames = repository - .references() - .into_iter() - .filter_map(|reference| reference.name().map(|name| name.to_string())) - .collect::>(); - assert!(refnames.contains(&branches[0].upstream.clone().unwrap().name.to_string())); - assert!(refnames.contains(&branches[1].upstream.clone().unwrap().name.to_string())); - } -} diff --git a/gitbutler-app/tests/suite/virtual_branches/reset_virtual_branch.rs b/gitbutler-app/tests/suite/virtual_branches/reset_virtual_branch.rs deleted file mode 100644 index 7a67efb1a..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/reset_virtual_branch.rs +++ /dev/null @@ -1,267 +0,0 @@ -use crate::suite::virtual_branches::Test; -use gitbutler_app::virtual_branches::{ - branch, controller::ControllerError, errors::ResetBranchError, -}; -use std::fs; - -#[tokio::test] -async fn to_head() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let oid = { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - // commit changes - let oid = controller - .create_commit(project_id, &branch1_id, "commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - - oid - }; - - { - // reset changes to head - controller - .reset_virtual_branch(project_id, &branch1_id, oid) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - } -} - -#[tokio::test] -async fn to_target() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - let base_branch = controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - // commit changes - let oid = controller - .create_commit(project_id, &branch1_id, "commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - } - - { - // reset changes to head - controller - .reset_virtual_branch(project_id, &branch1_id, base_branch.base_sha) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 0); - assert_eq!(branches[0].files.len(), 1); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - } -} - -#[tokio::test] -async fn to_commit() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let first_commit_oid = { - // commit some changes - - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let oid = controller - .create_commit(project_id, &branch1_id, "commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - - oid - }; - - { - // commit some more - fs::write(repository.path().join("file.txt"), "more content").unwrap(); - - let second_commit_oid = controller - .create_commit(project_id, &branch1_id, "commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 2); - assert_eq!(branches[0].commits[0].id, second_commit_oid); - assert_eq!(branches[0].commits[1].id, first_commit_oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "more content" - ); - } - - { - // reset changes to the first commit - controller - .reset_virtual_branch(project_id, &branch1_id, first_commit_oid) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, first_commit_oid); - assert_eq!(branches[0].files.len(), 1); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "more content" - ); - } -} - -#[tokio::test] -async fn to_non_existing() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file.txt"), "content").unwrap(); - - // commit changes - let oid = controller - .create_commit(project_id, &branch1_id, "commit", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].commits[0].id, oid); - assert_eq!(branches[0].files.len(), 0); - assert_eq!( - fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "content" - ); - - oid - }; - - assert!(matches!( - controller - .reset_virtual_branch( - project_id, - &branch1_id, - "fe14df8c66b73c6276f7bb26102ad91da680afcb".parse().unwrap() - ) - .await, - Err(ControllerError::Action( - ResetBranchError::CommitNotFoundInBranch(_) - )) - )); -} diff --git a/gitbutler-app/tests/suite/virtual_branches/selected_for_changes.rs b/gitbutler-app/tests/suite/virtual_branches/selected_for_changes.rs deleted file mode 100644 index cfeb16a20..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/selected_for_changes.rs +++ /dev/null @@ -1,375 +0,0 @@ -use super::*; - -#[tokio::test] -async fn unapplying_selected_branch_selects_anther() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file one.txt"), "").unwrap(); - - // first branch should be created as default - let b_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - // if default branch exists, new branch should not be created as default - let b2_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - - let b = branches.iter().find(|b| b.id == b_id).unwrap(); - - let b2 = branches.iter().find(|b| b.id == b2_id).unwrap(); - - assert!(b.selected_for_changes); - assert!(!b2.selected_for_changes); - - controller - .unapply_virtual_branch(project_id, &b_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - - assert_eq!(branches.len(), 2); - assert_eq!(branches[0].id, b.id); - assert!(!branches[0].selected_for_changes); - assert!(!branches[0].active); - assert_eq!(branches[1].id, b2.id); - assert!(branches[1].selected_for_changes); - assert!(branches[1].active); -} - -#[tokio::test] -async fn deleting_selected_branch_selects_anther() { - let Test { - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // first branch should be created as default - let b_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - // if default branch exists, new branch should not be created as default - let b2_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - - let b = branches.iter().find(|b| b.id == b_id).unwrap(); - - let b2 = branches.iter().find(|b| b.id == b2_id).unwrap(); - - assert!(b.selected_for_changes); - assert!(!b2.selected_for_changes); - - controller - .delete_virtual_branch(project_id, &b_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, b2.id); - assert!(branches[0].selected_for_changes); -} - -#[tokio::test] -async fn create_virtual_branch_should_set_selected_for_changes() { - let Test { - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // first branch should be created as default - let b_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b_id) - .unwrap(); - assert!(branch.selected_for_changes); - - // if default branch exists, new branch should not be created as default - let b_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b_id) - .unwrap(); - assert!(!branch.selected_for_changes); - - // explicitly don't make this one default - let b_id = controller - .create_virtual_branch( - project_id, - &branch::BranchCreateRequest { - selected_for_changes: Some(false), - ..Default::default() - }, - ) - .await - .unwrap(); - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b_id) - .unwrap(); - assert!(!branch.selected_for_changes); - - // explicitly make this one default - let b_id = controller - .create_virtual_branch( - project_id, - &branch::BranchCreateRequest { - selected_for_changes: Some(true), - ..Default::default() - }, - ) - .await - .unwrap(); - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b_id) - .unwrap(); - assert!(branch.selected_for_changes); -} - -#[tokio::test] -async fn update_virtual_branch_should_reset_selected_for_changes() { - let Test { - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let b1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - let b1 = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b1_id) - .unwrap(); - assert!(b1.selected_for_changes); - - let b2_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - let b2 = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b2_id) - .unwrap(); - assert!(!b2.selected_for_changes); - - controller - .update_virtual_branch( - project_id, - branch::BranchUpdateRequest { - id: b2_id, - selected_for_changes: Some(true), - ..Default::default() - }, - ) - .await - .unwrap(); - - let b1 = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b1_id) - .unwrap(); - assert!(!b1.selected_for_changes); - - let b2 = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b2_id) - .unwrap(); - assert!(b2.selected_for_changes); -} - -#[tokio::test] -async fn unapply_virtual_branch_should_reset_selected_for_changes() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let b1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let b1 = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b1_id) - .unwrap(); - assert!(b1.selected_for_changes); - - controller - .unapply_virtual_branch(project_id, &b1_id) - .await - .unwrap(); - - let b1 = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == b1_id) - .unwrap(); - assert!(!b1.selected_for_changes); -} - -#[tokio::test] -async fn hunks_distribution() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches[0].files.len(), 1); - - controller - .create_virtual_branch( - project_id, - &branch::BranchCreateRequest { - selected_for_changes: Some(true), - ..Default::default() - }, - ) - .await - .unwrap(); - std::fs::write(repository.path().join("another_file.txt"), "content").unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[1].files.len(), 1); -} - -#[tokio::test] -async fn applying_first_branch() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - controller - .unapply_virtual_branch(project_id, &branches[0].id) - .await - .unwrap(); - controller - .apply_virtual_branch(project_id, &branches[0].id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].active); - assert!(branches[0].selected_for_changes); -} diff --git a/gitbutler-app/tests/suite/virtual_branches/set_base_branch.rs b/gitbutler-app/tests/suite/virtual_branches/set_base_branch.rs deleted file mode 100644 index 23dd2da50..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/set_base_branch.rs +++ /dev/null @@ -1,235 +0,0 @@ -use super::*; - -#[tokio::test] -async fn success() { - let Test { - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); -} - -mod error { - use super::*; - - #[tokio::test] - async fn missing() { - let Test { - project_id, - controller, - .. - } = &Test::default(); - - assert!(matches!( - controller - .set_base_branch( - project_id, - &git::RemoteRefname::from_str("refs/remotes/origin/missing").unwrap(), - ) - .await - .unwrap_err(), - ControllerError::Action(errors::SetBaseBranchError::BranchNotFound(_)) - )); - } -} - -mod go_back_to_integration { - use pretty_assertions::assert_eq; - - use super::*; - - #[tokio::test] - async fn should_preserve_applied_vbranches() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - std::fs::write(repository.path().join("file.txt"), "one").unwrap(); - let oid_one = repository.commit_all("one"); - std::fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("two"); - repository.push(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let vbranch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - std::fs::write(repository.path().join("another file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &vbranch_id, "one", None, false) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - repository.checkout_commit(oid_one); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, vbranch_id); - assert!(branches[0].active); - } - - #[tokio::test] - async fn from_target_branch_index_conflicts() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - std::fs::write(repository.path().join("file.txt"), "one").unwrap(); - let oid_one = repository.commit_all("one"); - std::fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("two"); - repository.push(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert!(branches.is_empty()); - - repository.checkout_commit(oid_one); - std::fs::write(repository.path().join("file.txt"), "tree").unwrap(); - - assert!(matches!( - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap_err(), - ControllerError::Action(errors::SetBaseBranchError::DirtyWorkingDirectory) - )); - } - - #[tokio::test] - async fn from_target_branch_with_uncommited() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - std::fs::write(repository.path().join("file.txt"), "one").unwrap(); - let oid_one = repository.commit_all("one"); - std::fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("two"); - repository.push(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert!(branches.is_empty()); - - repository.checkout_commit(oid_one); - std::fs::write(repository.path().join("another file.txt"), "tree").unwrap(); - - assert!(matches!( - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .map_err(|error| dbg!(error)) - .unwrap_err(), - ControllerError::Action(errors::SetBaseBranchError::DirtyWorkingDirectory) - )); - } - - #[tokio::test] - async fn from_target_branch_with_commit() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - std::fs::write(repository.path().join("file.txt"), "one").unwrap(); - let oid_one = repository.commit_all("one"); - std::fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("two"); - repository.push(); - - let base = controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert!(branches.is_empty()); - - repository.checkout_commit(oid_one); - std::fs::write(repository.path().join("another file.txt"), "tree").unwrap(); - repository.commit_all("three"); - - let base_two = controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - assert_eq!(base_two, base); - } - - #[tokio::test] - async fn from_target_branch_without_any_changes() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - std::fs::write(repository.path().join("file.txt"), "one").unwrap(); - let oid_one = repository.commit_all("one"); - std::fs::write(repository.path().join("file.txt"), "two").unwrap(); - repository.commit_all("two"); - repository.push(); - - let base = controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert!(branches.is_empty()); - - repository.checkout_commit(oid_one); - - let base_two = controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - assert_eq!(base_two, base); - } -} diff --git a/gitbutler-app/tests/suite/virtual_branches/squash.rs b/gitbutler-app/tests/suite/virtual_branches/squash.rs deleted file mode 100644 index 52d390fa9..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/squash.rs +++ /dev/null @@ -1,356 +0,0 @@ -use super::*; - -#[tokio::test] -async fn head() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - let commit_four_oid = { - fs::write(repository.path().join("file four.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit four", None, false) - .await - .unwrap() - }; - - controller - .squash(project_id, &branch_id, commit_four_oid) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!( - descriptions, - vec!["commit three\ncommit four", "commit two", "commit one"] - ); -} - -#[tokio::test] -async fn middle() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - let commit_two_oid = { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file four.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit four", None, false) - .await - .unwrap() - }; - - controller - .squash(project_id, &branch_id, commit_two_oid) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!( - descriptions, - vec!["commit four", "commit three", "commit one\ncommit two"] - ); -} - -#[tokio::test] -async fn forcepush_allowed() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = &Test::default(); - - projects - .update(&projects::UpdateRequest { - id: *project_id, - ok_with_force_push: Some(true), - ..Default::default() - }) - .await - .unwrap(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - let commit_two_oid = { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file four.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit four", None, false) - .await - .unwrap() - }; - - controller - .squash(project_id, &branch_id, commit_two_oid) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!( - descriptions, - vec!["commit four", "commit three", "commit one\ncommit two"] - ); - assert!(branch.requires_force); -} - -#[tokio::test] -async fn forcepush_forbidden() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - projects - .update(&projects::UpdateRequest { - id: *project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - let commit_two_oid = { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file four.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit four", None, false) - .await - .unwrap() - }; - - assert!(matches!( - controller - .squash(project_id, &branch_id, commit_two_oid) - .await - .unwrap_err(), - ControllerError::Action(errors::SquashError::ForcePushNotAllowed(_)) - )); -} - -#[tokio::test] -async fn root() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - assert!(matches!( - controller - .squash(project_id, &branch_id, commit_one_oid) - .await - .unwrap_err(), - ControllerError::Action(errors::SquashError::CantSquashRootCommit) - )); -} diff --git a/gitbutler-app/tests/suite/virtual_branches/unapply.rs b/gitbutler-app/tests/suite/virtual_branches/unapply.rs deleted file mode 100644 index 7bfd69aaf..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/unapply.rs +++ /dev/null @@ -1,177 +0,0 @@ -use super::*; - -#[tokio::test] -async fn unapply_with_data() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - std::fs::write(repository.path().join("file.txt"), "content").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - controller - .unapply_virtual_branch(project_id, &branches[0].id) - .await - .unwrap(); - - assert!(!repository.path().join("file.txt").exists()); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(!branches[0].active); -} - -#[tokio::test] -async fn conflicting() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a conflicting branch, and stash it - - std::fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].base_current); - assert!(branches[0].active); - assert_eq!(branches[0].files[0].hunks[0].diff, "@@ -1 +1 @@\n-first\n\\ No newline at end of file\n+conflict\n\\ No newline at end of file\n"); - - controller - .unapply_virtual_branch(project_id, &branches[0].id) - .await - .unwrap(); - - branches[0].id - }; - - { - // update base branch, causing conflict - controller.update_base_branch(project_id).await.unwrap(); - - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|branch| branch.id == branch_id) - .unwrap(); - assert!(!branch.base_current); - assert!(!branch.active); - } - - { - // apply branch, it should conflict - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert!(branch.base_current); - assert!(branch.conflicted); - assert_eq!(branch.files[0].hunks[0].diff, "@@ -1 +1,5 @@\n-first\n\\ No newline at end of file\n+<<<<<<< ours\n+conflict\n+=======\n+second\n+>>>>>>> theirs\n"); - } - - { - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert!(!branch.active); - assert!(!branch.base_current); - assert!(!branch.conflicted); - assert_eq!(branch.files[0].hunks[0].diff, "@@ -1 +1 @@\n-first\n\\ No newline at end of file\n+conflict\n\\ No newline at end of file\n"); - } -} - -#[tokio::test] -async fn delete_if_empty() { - let Test { - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - - controller - .unapply_virtual_branch(project_id, &branches[0].id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 0); -} diff --git a/gitbutler-app/tests/suite/virtual_branches/unapply_ownership.rs b/gitbutler-app/tests/suite/virtual_branches/unapply_ownership.rs deleted file mode 100644 index 2e3f285d9..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/unapply_ownership.rs +++ /dev/null @@ -1,61 +0,0 @@ -use crate::suite::virtual_branches::Test; -use gitbutler_app::virtual_branches::branch; -use gitbutler_app::virtual_branches::branch::BranchOwnershipClaims; -use std::fs; - -#[tokio::test] -async fn should_unapply_with_commits() { - let Test { - project_id, - controller, - repository, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write( - repository.path().join("file.txt"), - "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n", - ) - .unwrap(); - controller - .create_commit(project_id, &branch_id, "test", None, false) - .await - .unwrap(); - - // change in the committed hunks leads to hunk locking - fs::write( - repository.path().join("file.txt"), - "_\n2\n3\n4\n5\n6\n7\n8\n9\n_\n", - ) - .unwrap(); - - controller - .unapply_ownership( - project_id, - &"file.txt:1-5,7-11" - .parse::() - .unwrap(), - ) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - assert!(branch.files.is_empty()); -} diff --git a/gitbutler-app/tests/suite/virtual_branches/update_base_branch.rs b/gitbutler-app/tests/suite/virtual_branches/update_base_branch.rs deleted file mode 100644 index 30735255d..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/update_base_branch.rs +++ /dev/null @@ -1,1929 +0,0 @@ -use super::*; - -mod unapplied_branch { - - use super::*; - - #[tokio::test] - async fn conflicts_with_uncommitted_work() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that is unapplied and contains not commited conflict - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(project_id).await.unwrap(); - - // branch should not be changed. - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(!controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_not_pushed() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(project_id).await.unwrap(); - - // should not change the branch. - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_pushed() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(project_id).await.unwrap(); - - // should not change the branch. - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_not_pushed_fixed_with_more_work() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); - - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(project_id).await.unwrap(); - - // should rebase upstream, and leave uncommited file as is - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); // TODO: should be true - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); // TODO: should be true - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_pushed_fixed_with_more_work() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); - - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(project_id).await.unwrap(); - - // should not touch the branch - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!(branches[0].files.len(), 1); - assert!(!controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn no_conflicts() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); - - controller - .create_commit( - project_id, - &branch_id, - "non conflicting commit", - None, - false, - ) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "still no conflicts").unwrap(); - - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - branch_id - }; - - { - // fetching remote - controller.update_base_branch(project_id).await.unwrap(); - - // should update branch base - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert!(branches[0].upstream.is_none()); - assert!(controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - } - } - - #[tokio::test] - async fn integrated_commit_plus_work() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - repository.commit_all("first"); - repository.push(); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "second").unwrap(); - controller - .create_commit(project_id, &branch_id, "second", None, false) - .await - .unwrap(); - - // more local work in the same branch - fs::write(repository.path().join("file2.txt"), "other").unwrap(); - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - { - // merge branch upstream - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - repository.merge(&branch.upstream.as_ref().unwrap().name); - repository.fetch(); - } - - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - branch_id - }; - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // should remove integrated commit, but leave work - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(branches[0].upstream.is_none()); - assert!(controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - assert_eq!( - std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), - "other" - ); - } - } - - #[tokio::test] - async fn all_integrated() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "second").unwrap(); - - controller - .create_commit(project_id, &branch_id, "second", None, false) - .await - .unwrap(); - - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - }; - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // should remove identical branch - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - } - } - - #[tokio::test] - async fn integrate_work_while_being_behind() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // open pr - fs::write(repository.path().join("file2.txt"), "new file").unwrap(); - controller - .create_commit(project_id, &branch_id, "second", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - } - - controller - .unapply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - { - // merge pr - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0[0] - .clone(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - repository.fetch(); - } - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // just removes integrated branch - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - } - } -} - -mod applied_branch { - - use super::*; - - #[tokio::test] - async fn conflicts_with_uncommitted_work() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - - branch_id - }; - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // should stash conflicing branch - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(!controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_not_pushed() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(project_id).await.unwrap(); - - // should stash the branch. - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_pushed() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(project_id).await.unwrap(); - - // should stash the branch. - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nconflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_not_pushed_fixed_with_more_work() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(project_id).await.unwrap(); - - // should rebase upstream, and leave uncommited file as is - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); // TODO: should be true - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert!(!controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); // TODO: should be true - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - #[tokio::test] - async fn commited_conflict_pushed_fixed_with_more_work() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch with a commit that conflicts with upstream, and work that fixes - // that conflict - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "conflict").unwrap(); - controller - .create_commit(project_id, &branch_id, "conflicting commit", None, false) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "fix conflict").unwrap(); - - branch_id - }; - - { - // when fetching remote - controller.update_base_branch(project_id).await.unwrap(); - - // should merge upstream, and leave uncommited file as is. - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(!branches[0].base_current); // TODO: should be true - assert_eq!(branches[0].commits.len(), 1); // TODO: should be 2 - assert_eq!(branches[0].files.len(), 1); - assert!(!controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); // TODO: should be true - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "<<<<<<< ours\nfix conflict\n=======\nsecond\n>>>>>>> theirs\n" - ); - } - } - - mod no_conflicts_pushed { - use super::*; - - #[tokio::test] - async fn force_push_ok() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - projects - .update(&projects::UpdateRequest { - id: *project_id, - ok_with_force_push: Some(true), - ..Default::default() - }) - .await - .unwrap(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); - - controller - .create_commit(project_id, &branch_id, "no conflicts", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); - - branch_id - }; - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // rebases branch, since the branch is pushed and force pushing is - // allowed - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].requires_force); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert!(!branches[0].commits[0].is_remote); - assert!(!branches[0].commits[0].is_integrated); - assert!(controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - } - - #[tokio::test] - async fn force_push_not_ok() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); - - controller - .create_commit(project_id, &branch_id, "no conflicts", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); - - branch_id - }; - - projects - .update(&projects::UpdateRequest { - id: *project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // creates a merge commit, since the branch is pushed - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(!branches[0].requires_force); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 2); - assert!(!branches[0].commits[0].is_remote); - assert!(!branches[0].commits[0].is_integrated); - assert!(branches[0].commits[1].is_remote); - assert!(!branches[0].commits[1].is_integrated); - assert!(controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - } - } - - #[tokio::test] - async fn no_conflicts() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "no conflict").unwrap(); - - controller - .create_commit(project_id, &branch_id, "no conflicts", None, false) - .await - .unwrap(); - - fs::write(repository.path().join("file2.txt"), "still no conflict").unwrap(); - - branch_id - }; - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // just rebases branch - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert!(controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - - { - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 1); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - assert_eq!( - std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), - "still no conflict" - ); - } - } - - #[tokio::test] - async fn integrated_commit_plus_work() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - repository.commit_all("first"); - repository.push(); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "second").unwrap(); - - controller - .create_commit(project_id, &branch_id, "second", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - { - // merge branch upstream - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - repository.fetch(); - } - - // more local work in the same branch - fs::write(repository.path().join("file2.txt"), "other").unwrap(); - - branch_id - }; - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // should remove integrated commit, but leave non integrated work as is - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert!(controller - .can_apply_virtual_branch(project_id, &branch_id) - .await - .unwrap()); - } - - { - // applying the branch should produce conflict markers - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - assert_eq!( - std::fs::read_to_string(repository.path().join("file.txt")).unwrap(), - "second" - ); - assert_eq!( - std::fs::read_to_string(repository.path().join("file2.txt")).unwrap(), - "other" - ); - } - } - - #[tokio::test] - async fn integrated_with_locked_conflicting_hunks() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write( - repository.path().join("file.txt"), - "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n", - ) - .unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write( - repository.path().join("file.txt"), - "1\n2\n3\n4\n5\n6\n17\n8\n9\n10\n11\n12\n", - ) - .unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - // branch has no conflict - let branch_id = { - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write( - repository.path().join("file.txt"), - "1\n2\n3\n4\n5\n6\n7\n8\n19\n10\n11\n12\n", - ) - .unwrap(); - - controller - .create_commit(project_id, &branch_id, "first", None, false) - .await - .unwrap(); - - branch_id - }; - - // push the branch - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - // another locked conflicing hunk - fs::write( - repository.path().join("file.txt"), - "1\n2\n3\n4\n5\n6\n77\n8\n19\n10\n11\n12\n", - ) - .unwrap(); - - { - // merge branch remotely - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0[0] - .clone(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - } - - repository.fetch(); - - { - controller.update_base_branch(project_id).await.unwrap(); - - // removes integrated commit, leaves non commited work as is - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(!branches[0].active); - assert!(branches[0].commits.is_empty()); - assert!(!branches[0].files.is_empty()); - } - - { - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].active); - assert!(branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].files[0].hunks.len(), 1); - assert_eq!(branches[0].files[0].hunks[0].diff, "@@ -4,7 +4,11 @@\n 4\n 5\n 6\n-7\n+<<<<<<< ours\n+77\n+=======\n+17\n+>>>>>>> theirs\n 8\n 19\n 10\n"); - assert_eq!(branches[0].commits.len(), 0); - } - } - - #[tokio::test] - async fn integrated_with_locked_hunks() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = &Test::default(); - - projects - .update(&projects::UpdateRequest { - id: *project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "first").unwrap(); - - controller - .create_commit(project_id, &branch_id, "first", None, false) - .await - .unwrap(); - - branch_id - }; - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - // another non-locked hunk - fs::write(repository.path().join("file.txt"), "first\nsecond").unwrap(); - - { - // push and merge branch remotely - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0[0] - .clone(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - } - - repository.fetch(); - - { - controller.update_base_branch(project_id).await.unwrap(); - - // removes integrated commit, leaves non commited work as is - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].commits.is_empty()); - assert!(branches[0].upstream.is_none()); - assert_eq!(branches[0].files.len(), 1); - } - - { - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); // no merge commit - } - } - - #[tokio::test] - async fn integrated_with_non_locked_hunks() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "first").unwrap(); - - controller - .create_commit(project_id, &branch_id, "first", None, false) - .await - .unwrap(); - - branch_id - }; - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - // another non-locked hunk - fs::write(repository.path().join("another_file.txt"), "first").unwrap(); - - { - // push and merge branch remotely - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0[0] - .clone(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - } - - repository.fetch(); - - { - controller.update_base_branch(project_id).await.unwrap(); - - // removes integrated commit, leaves non commited work as is - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch_id); - assert!(branches[0].active); - assert!(branches[0].commits.is_empty()); - assert!(branches[0].upstream.is_none()); - assert!(!branches[0].files.is_empty()); - } - - { - controller - .apply_virtual_branch(project_id, &branch_id) - .await - .unwrap(); - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert!(branches[0].active); - assert!(!branches[0].conflicted); - assert!(branches[0].base_current); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].commits.len(), 0); - } - } - - #[tokio::test] - async fn all_integrated() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - { - // make a branch that conflicts with the remote branch, but doesn't know about it yet - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - fs::write(repository.path().join("file.txt"), "second").unwrap(); - - controller - .create_commit(project_id, &branch_id, "second", None, false) - .await - .unwrap(); - }; - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // just removes integrated branch - - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - } - } - - #[tokio::test] - async fn integrate_work_while_being_behind() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - // make sure we have an undiscovered commit in the remote branch - { - fs::write(repository.path().join("file.txt"), "first").unwrap(); - let first_commit_oid = repository.commit_all("first"); - fs::write(repository.path().join("file.txt"), "second").unwrap(); - repository.commit_all("second"); - repository.push(); - repository.reset_hard(Some(first_commit_oid)); - } - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - // open pr - fs::write(repository.path().join("file2.txt"), "new file").unwrap(); - controller - .create_commit(project_id, &branch_id, "second", None, false) - .await - .unwrap(); - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - } - - { - // merge pr - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0[0] - .clone(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - repository.fetch(); - } - - { - // fetch remote - controller.update_base_branch(project_id).await.unwrap(); - - // just removes integrated branch - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 0); - } - } -} diff --git a/gitbutler-app/tests/suite/virtual_branches/update_commit_message.rs b/gitbutler-app/tests/suite/virtual_branches/update_commit_message.rs deleted file mode 100644 index a5ca0f5d6..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/update_commit_message.rs +++ /dev/null @@ -1,364 +0,0 @@ -use super::*; - -#[tokio::test] -async fn head() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - let commit_three_oid = { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - controller - .update_commit_message( - project_id, - &branch_id, - commit_three_oid, - "commit three updated", - ) - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - - assert_eq!( - descriptions, - vec!["commit three updated", "commit two", "commit one"] - ); -} - -#[tokio::test] -async fn middle() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - let commit_two_oid = { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - controller - .update_commit_message(project_id, &branch_id, commit_two_oid, "commit two updated") - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!( - descriptions, - vec!["commit three", "commit two updated", "commit one"] - ); -} - -#[tokio::test] -async fn forcepush_allowed() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - projects - .update(&projects::UpdateRequest { - id: *project_id, - ok_with_force_push: Some(true), - ..Default::default() - }) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - controller - .update_commit_message(project_id, &branch_id, commit_one_oid, "commit one updated") - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!(descriptions, vec!["commit one updated"]); - assert!(branch.requires_force); -} - -#[tokio::test] -async fn forcepush_forbidden() { - let Test { - repository, - project_id, - controller, - projects, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - projects - .update(&projects::UpdateRequest { - id: *project_id, - ok_with_force_push: Some(false), - ..Default::default() - }) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - controller - .push_virtual_branch(project_id, &branch_id, false, None) - .await - .unwrap(); - - assert!(matches!( - controller - .update_commit_message(project_id, &branch_id, commit_one_oid, "commit one updated",) - .await - .unwrap_err(), - ControllerError::Action(errors::UpdateCommitMessageError::ForcePushNotAllowed(_)) - )); -} - -#[tokio::test] -async fn root() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file two.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit two", None, false) - .await - .unwrap() - }; - - { - fs::write(repository.path().join("file three.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit three", None, false) - .await - .unwrap() - }; - - controller - .update_commit_message(project_id, &branch_id, commit_one_oid, "commit one updated") - .await - .unwrap(); - - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch_id) - .unwrap(); - - let descriptions = branch - .commits - .iter() - .map(|c| c.description.clone()) - .collect::>(); - assert_eq!( - descriptions, - vec!["commit three", "commit two", "commit one updated"] - ); -} - -#[tokio::test] -async fn empty() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let commit_one_oid = { - fs::write(repository.path().join("file one.txt"), "").unwrap(); - controller - .create_commit(project_id, &branch_id, "commit one", None, false) - .await - .unwrap() - }; - - assert!(matches!( - controller - .update_commit_message(project_id, &branch_id, commit_one_oid, "",) - .await, - Err(ControllerError::Action( - errors::UpdateCommitMessageError::EmptyMessage - )) - )); -} diff --git a/gitbutler-app/tests/suite/virtual_branches/upstream.rs b/gitbutler-app/tests/suite/virtual_branches/upstream.rs deleted file mode 100644 index aca22ac38..000000000 --- a/gitbutler-app/tests/suite/virtual_branches/upstream.rs +++ /dev/null @@ -1,149 +0,0 @@ -use super::*; - -#[tokio::test] -async fn detect_upstream_commits() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let oid1 = { - // create first commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - let oid2 = { - // create second commit - fs::write(repository.path().join("file.txt"), "content2").unwrap(); - controller - .create_commit(project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - // push - controller - .push_virtual_branch(project_id, &branch1_id, false, None) - .await - .unwrap(); - - let oid3 = { - // create third commit - fs::write(repository.path().join("file.txt"), "content3").unwrap(); - controller - .create_commit(project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - { - // should correctly detect pushed commits - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 3); - assert_eq!(branches[0].commits[0].id, oid3); - assert!(!branches[0].commits[0].is_remote); - assert_eq!(branches[0].commits[1].id, oid2); - assert!(branches[0].commits[1].is_remote); - assert_eq!(branches[0].commits[2].id, oid1); - assert!(branches[0].commits[2].is_remote); - } -} - -#[tokio::test] -async fn detect_integrated_commits() { - let Test { - repository, - project_id, - controller, - .. - } = &Test::default(); - - controller - .set_base_branch(project_id, &"refs/remotes/origin/master".parse().unwrap()) - .await - .unwrap(); - - let branch1_id = controller - .create_virtual_branch(project_id, &branch::BranchCreateRequest::default()) - .await - .unwrap(); - - let oid1 = { - // create first commit - fs::write(repository.path().join("file.txt"), "content").unwrap(); - controller - .create_commit(project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - let oid2 = { - // create second commit - fs::write(repository.path().join("file.txt"), "content2").unwrap(); - controller - .create_commit(project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - // push - controller - .push_virtual_branch(project_id, &branch1_id, false, None) - .await - .unwrap(); - - { - // merge branch upstream - let branch = controller - .list_virtual_branches(project_id) - .await - .unwrap() - .0 - .into_iter() - .find(|b| b.id == branch1_id) - .unwrap(); - repository.merge(&branch.upstream.as_ref().unwrap().name); - repository.fetch(); - } - - let oid3 = { - // create third commit - fs::write(repository.path().join("file.txt"), "content3").unwrap(); - controller - .create_commit(project_id, &branch1_id, "commit", None, false) - .await - .unwrap() - }; - - { - // should correctly detect pushed commits - let (branches, _, _) = controller.list_virtual_branches(project_id).await.unwrap(); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].id, branch1_id); - assert_eq!(branches[0].commits.len(), 3); - assert_eq!(branches[0].commits[0].id, oid3); - assert!(!branches[0].commits[0].is_integrated); - assert_eq!(branches[0].commits[1].id, oid2); - assert!(branches[0].commits[1].is_integrated); - assert_eq!(branches[0].commits[2].id, oid1); - assert!(branches[0].commits[2].is_integrated); - } -} diff --git a/gitbutler-app/tests/types/mod.rs b/gitbutler-app/tests/types/mod.rs deleted file mode 100644 index 98b97dd38..000000000 --- a/gitbutler-app/tests/types/mod.rs +++ /dev/null @@ -1,19 +0,0 @@ -use gitbutler_app::types::default_true::DefaultTrue; - -#[test] -#[allow(clippy::bool_assert_comparison)] -fn default_true() { - let default_true = DefaultTrue::default(); - assert!(default_true); - assert_eq!(default_true, true); - assert_eq!(!default_true, false); - assert!(!!default_true); - - if !(*default_true) { - unreachable!("default_true is false") - } - - let mut default_true = DefaultTrue::default(); - *default_true = false; - assert!(!default_true); -} diff --git a/gitbutler-app/tests/virtual_branches/branch/context.rs b/gitbutler-app/tests/virtual_branches/branch/context.rs deleted file mode 100644 index 215d6fbc6..000000000 --- a/gitbutler-app/tests/virtual_branches/branch/context.rs +++ /dev/null @@ -1,522 +0,0 @@ -use gitbutler_app::git::diff; -use gitbutler_app::virtual_branches::context::hunk_with_context; - -#[test] -fn replace_line_mid_file() { - let hunk_diff = r#"@@ -8 +8 @@ default = ["serde", "rusqlite"] --serde = ["dep:serde", "uuid/serde"] -+SERDE = ["dep:serde", "uuid/serde"] -"#; - let with_ctx = hunk_with_context( - hunk_diff, - 8, - 8, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - let expected = r#"@@ -5,7 +5,7 @@ - - [features] - default = ["serde", "rusqlite"] --serde = ["dep:serde", "uuid/serde"] -+SERDE = ["dep:serde", "uuid/serde"] - rusqlite = ["dep:rusqlite"] - - [dependencies] -"#; - assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); - assert_eq!(with_ctx.old_start, 5); - assert_eq!(with_ctx.old_lines, 7); - assert_eq!(with_ctx.new_start, 5); - assert_eq!(with_ctx.new_lines, 7); -} - -#[test] -fn replace_line_top_file() { - let hunk_diff = r#"@@ -2 +2 @@ --name = "gitbutler-core" -+NAME = "gitbutler-core" -"#; - let with_ctx = hunk_with_context( - hunk_diff, - 2, - 2, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - assert_eq!( - with_ctx.diff.replace("\n \n", "\n\n"), - r#"@@ -1,5 +1,5 @@ - [package] --name = "gitbutler-core" -+NAME = "gitbutler-core" - version = "0.0.0" - edition = "2021" - -"# - ); - assert_eq!(with_ctx.old_start, 1); - assert_eq!(with_ctx.old_lines, 5); - assert_eq!(with_ctx.new_start, 1); - assert_eq!(with_ctx.new_lines, 5); -} - -#[test] -fn replace_line_start_file() { - let hunk_diff = "@@ -1 +1 @@ --[package] -+[PACKAGE] -"; - let with_ctx = hunk_with_context( - hunk_diff, - 1, - 1, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - assert_eq!( - with_ctx.diff.replace("\n \n", "\n\n"), - r#"@@ -1,4 +1,4 @@ --[package] -+[PACKAGE] - name = "gitbutler-core" - version = "0.0.0" - edition = "2021" -"# - ); - assert_eq!(with_ctx.old_start, 1); - assert_eq!(with_ctx.old_lines, 4); - assert_eq!(with_ctx.new_start, 1); - assert_eq!(with_ctx.new_lines, 4); -} - -#[test] -fn replace_line_bottom_file() { - let hunk_diff = "@@ -13 +13 @@ --serde = { workspace = true, optional = true } -+SERDE = { workspace = true, optional = true } -"; - let with_ctx = hunk_with_context( - hunk_diff, - 13, - 13, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - assert_eq!( - with_ctx.diff.replace("\n \n", "\n\n"), - r#"@@ -10,5 +10,5 @@ - - [dependencies] - rusqlite = { workspace = true, optional = true } --serde = { workspace = true, optional = true } -+SERDE = { workspace = true, optional = true } - uuid = { workspace = true, features = ["v4", "fast-rng"] } -"# - ); - assert_eq!(with_ctx.old_start, 10); - assert_eq!(with_ctx.old_lines, 5); - assert_eq!(with_ctx.new_start, 10); - assert_eq!(with_ctx.new_lines, 5); -} - -#[test] -fn replace_with_more_lines() { - let hunk_diff = r#"@@ -8 +8,4 @@ --serde = ["dep:serde", "uuid/serde"] -+one -+two -+three -+four -"#; - let with_ctx = hunk_with_context( - hunk_diff, - 8, - 8, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - assert_eq!( - with_ctx.diff.replace("\n \n", "\n\n"), - r#"@@ -5,7 +5,10 @@ - - [features] - default = ["serde", "rusqlite"] --serde = ["dep:serde", "uuid/serde"] -+one -+two -+three -+four - rusqlite = ["dep:rusqlite"] - - [dependencies] -"# - ); - assert_eq!(with_ctx.old_start, 5); - assert_eq!(with_ctx.old_lines, 7); - assert_eq!(with_ctx.new_start, 5); - assert_eq!(with_ctx.new_lines, 10); -} - -#[test] -fn replace_with_less_lines() { - let hunk_diff = r#"@@ -7,3 +7 @@ --default = ["serde", "rusqlite"] --serde = ["dep:serde", "uuid/serde"] --rusqlite = ["dep:rusqlite"] -+foo = ["foo"] -"#; - let with_ctx = hunk_with_context( - hunk_diff, - 7, - 7, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - assert_eq!( - with_ctx.diff.replace("\n \n", "\n\n"), - r#"@@ -4,9 +4,7 @@ - edition = "2021" - - [features] --default = ["serde", "rusqlite"] --serde = ["dep:serde", "uuid/serde"] --rusqlite = ["dep:rusqlite"] -+foo = ["foo"] - - [dependencies] - rusqlite = { workspace = true, optional = true } -"# - ); - assert_eq!(with_ctx.old_start, 4); - assert_eq!(with_ctx.old_lines, 9); - assert_eq!(with_ctx.new_start, 4); - assert_eq!(with_ctx.new_lines, 7); -} - -#[test] -fn empty_string_doesnt_panic() { - let hunk_diff = ""; - let with_ctx = hunk_with_context( - hunk_diff, - 1, - 1, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - assert_eq!(with_ctx.diff, ""); -} - -#[test] -fn removed_file() { - let hunk_diff = r#"@@ -1,14 +0,0 @@ --[package] --name = "gitbutler-core" --version = "0.0.0" --edition = "2021" -- --[features] --default = ["serde", "rusqlite"] --serde = ["dep:serde", "uuid/serde"] --rusqlite = ["dep:rusqlite"] -- --[dependencies] --rusqlite = { workspace = true, optional = true } --serde = { workspace = true, optional = true } --uuid = { workspace = true, features = ["v4", "fast-rng"] } -"#; - let with_ctx = hunk_with_context( - hunk_diff, - 1, - 0, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), hunk_diff); - assert_eq!(with_ctx.old_start, 1); - assert_eq!(with_ctx.old_lines, 14); - assert_eq!(with_ctx.new_start, 0); - assert_eq!(with_ctx.new_lines, 0); -} -#[test] -fn new_file() { - let hunk_diff = "@@ -0,0 +1,5 @@ -+line 1 -+line 2 -+line 3 -+line 4 -+line 5 -"; - let with_ctx = hunk_with_context( - hunk_diff, - 0, - 1, - false, - 3, - &Vec::new(), - diff::ChangeType::Added, - ); - assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), hunk_diff); - assert_eq!(with_ctx.old_start, 0); - assert_eq!(with_ctx.old_lines, 0); - assert_eq!(with_ctx.new_start, 1); - assert_eq!(with_ctx.new_lines, 5); -} - -#[test] -fn only_add_lines() { - let hunk_diff = "@@ -8,0 +9,3 @@ -+one -+two -+three -"; - let with_ctx = hunk_with_context( - hunk_diff, - 8, - 9, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - let expected = r#"@@ -6,6 +6,9 @@ - [features] - default = ["serde", "rusqlite"] - serde = ["dep:serde", "uuid/serde"] -+one -+two -+three - rusqlite = ["dep:rusqlite"] - - [dependencies] -"#; - assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); - assert_eq!(with_ctx.old_start, 6); - assert_eq!(with_ctx.old_lines, 6); - assert_eq!(with_ctx.new_start, 6); - assert_eq!(with_ctx.new_lines, 9); -} - -#[test] -fn only_add_lines_with_additions_below() { - let hunk_diff = "@@ -8,0 +13,3 @@ -+one -+two -+three -"; - let with_ctx = hunk_with_context( - hunk_diff, - 8, - 13, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - let expected = r#"@@ -6,6 +10,9 @@ - [features] - default = ["serde", "rusqlite"] - serde = ["dep:serde", "uuid/serde"] -+one -+two -+three - rusqlite = ["dep:rusqlite"] - - [dependencies] -"#; - assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); - assert_eq!(with_ctx.old_start, 6); - assert_eq!(with_ctx.old_lines, 6); - assert_eq!(with_ctx.new_start, 10); - assert_eq!(with_ctx.new_lines, 9); -} - -#[test] -fn only_remove_lines() { - let hunk_diff = r#"@@ -7,3 +6,0 @@ --default = ["serde", "rusqlite"] --serde = ["dep:serde", "uuid/serde"] --rusqlite = ["dep:rusqlite"] -"#; - let expected = r#"@@ -4,9 +4,6 @@ - edition = "2021" - - [features] --default = ["serde", "rusqlite"] --serde = ["dep:serde", "uuid/serde"] --rusqlite = ["dep:rusqlite"] - - [dependencies] - rusqlite = { workspace = true, optional = true } -"#; - let with_ctx = hunk_with_context( - hunk_diff, - 7, - 6, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); - assert_eq!(with_ctx.old_start, 4); - assert_eq!(with_ctx.old_lines, 9); - assert_eq!(with_ctx.new_start, 4); - assert_eq!(with_ctx.new_lines, 6); -} - -#[test] -fn only_remove_lines_with_additions_below() { - let hunk_diff = r#"@@ -7,3 +10,0 @@ --default = ["serde", "rusqlite"] --serde = ["dep:serde", "uuid/serde"] --rusqlite = ["dep:rusqlite"] -"#; - let expected = r#"@@ -4,9 +8,6 @@ - edition = "2021" - - [features] --default = ["serde", "rusqlite"] --serde = ["dep:serde", "uuid/serde"] --rusqlite = ["dep:rusqlite"] - - [dependencies] - rusqlite = { workspace = true, optional = true } -"#; - let with_ctx = hunk_with_context( - hunk_diff, - 7, - 10, - false, - 3, - &file_lines(), - diff::ChangeType::Added, - ); - assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); - assert_eq!(with_ctx.old_start, 4); - assert_eq!(with_ctx.old_lines, 9); - assert_eq!(with_ctx.new_start, 8); - assert_eq!(with_ctx.new_lines, 6); -} - -#[test] -fn weird_testcase() { - let hunk_diff = "@@ -11,2 +10,0 @@ -- -- @waiting_users = User.where(approved: false).count -"; - let with_ctx = hunk_with_context( - hunk_diff, - 11, - 10, - false, - 3, - &file_lines_2(), - diff::ChangeType::Added, - ); - let expected = "@@ -8,8 +8,6 @@ - .order(:created_at) - .page params[:page] - @total = @registrations.total_count -- -- @waiting_users = User.where(approved: false).count - end - - def invite -"; - assert_eq!(with_ctx.diff.replace("\n \n", "\n\n"), expected); - assert_eq!(with_ctx.old_start, 8); - assert_eq!(with_ctx.old_lines, 8); - assert_eq!(with_ctx.new_start, 8); - assert_eq!(with_ctx.new_lines, 6); -} - -#[test] -fn new_line_added() { - let hunk_diff = "@@ -2,0 +3 @@ alias( -+ newstuff -"; - let with_ctx = hunk_with_context( - hunk_diff, - 2, - 3, - false, - 3, - &file_lines_3(), - diff::ChangeType::Added, - ); - let expected = r#"@@ -1,4 +1,5 @@ - alias( - name = "rdeps", -+ newstuff - actual = "//java/com/videlov/rdeps:rdeps", - ) -"#; - assert_eq!(with_ctx.diff, expected); -} - -fn file_lines() -> Vec<&'static str> { - let file_lines_before = r#"[package] -name = "gitbutler-core" -version = "0.0.0" -edition = "2021" - -[features] -default = ["serde", "rusqlite"] -serde = ["dep:serde", "uuid/serde"] -rusqlite = ["dep:rusqlite"] - -[dependencies] -rusqlite = { workspace = true, optional = true } -serde = { workspace = true, optional = true } -uuid = { workspace = true, features = ["v4", "fast-rng"] } -"#; - file_lines_before.lines().collect::>() -} - -fn file_lines_2() -> Vec<&'static str> { - let file_lines_before = r#"class Admin::WaitingController < Admin::AdminController - def index - @registrations = Registration.where(invited_at: nil) - if params[:q] - @registrations = @registrations.where("email LIKE ?", "%#{params[:q]}%") - end - @registrations = @registrations.includes(:invite_code) - .order(:created_at) - .page params[:page] - @total = @registrations.total_count - - @waiting_users = User.where(approved: false).count - end - - def invite - if params[:id] - @registrations = Registration.where(id: params[:id]) -"#; - file_lines_before.lines().collect::>() -} - -fn file_lines_3() -> Vec<&'static str> { - let file_lines_before = r#"alias( - name = "rdeps", - actual = "//java/com/videlov/rdeps:rdeps", -) -"#; - file_lines_before.lines().collect::>() -} diff --git a/gitbutler-app/tests/virtual_branches/branch/file_ownership.rs b/gitbutler-app/tests/virtual_branches/branch/file_ownership.rs deleted file mode 100644 index 7d7ff0c36..000000000 --- a/gitbutler-app/tests/virtual_branches/branch/file_ownership.rs +++ /dev/null @@ -1,151 +0,0 @@ -use gitbutler_app::virtual_branches::branch::OwnershipClaim; - -#[test] -fn parse_ownership() { - let ownership: OwnershipClaim = "foo/bar.rs:1-2,4-5".parse().unwrap(); - assert_eq!( - ownership, - OwnershipClaim { - file_path: "foo/bar.rs".into(), - hunks: vec![(1..=2).into(), (4..=5).into()] - } - ); -} - -#[test] -fn parse_ownership_tricky_file_name() { - assert_eq!("file:name:1-2,4-5".parse::().unwrap(), { - OwnershipClaim { - file_path: "file:name".into(), - hunks: vec![(1..=2).into(), (4..=5).into()], - } - }); -} - -#[test] -fn parse_ownership_no_ranges() { - "foo/bar.rs".parse::().unwrap_err(); -} - -#[test] -fn ownership_to_from_string() { - let ownership = OwnershipClaim { - file_path: "foo/bar.rs".into(), - hunks: vec![(1..=2).into(), (4..=5).into()], - }; - assert_eq!(ownership.to_string(), "foo/bar.rs:1-2,4-5".to_string()); - assert_eq!( - ownership.to_string().parse::().unwrap(), - ownership - ); -} - -#[test] -fn plus() { - vec![ - ("file.txt:1-10", "another.txt:1-5", "file.txt:1-10"), - ("file.txt:1-10,3-14", "file.txt:3-14", "file.txt:3-14,1-10"), - ("file.txt:5-10", "file.txt:1-5", "file.txt:1-5,5-10"), - ("file.txt:1-10", "file.txt:1-5", "file.txt:1-5,1-10"), - ("file.txt:1-5,2-2", "file.txt:1-10", "file.txt:1-10,1-5,2-2"), - ( - "file.txt:1-10", - "file.txt:8-15,20-25", - "file.txt:20-25,8-15,1-10", - ), - ("file.txt:1-10", "file.txt:1-10", "file.txt:1-10"), - ("file.txt:1-10,3-15", "file.txt:1-10", "file.txt:1-10,3-15"), - ] - .into_iter() - .map(|(a, b, expected)| { - ( - a.parse::().unwrap(), - b.parse::().unwrap(), - expected.parse::().unwrap(), - ) - }) - .for_each(|(a, b, expected)| { - let got = a.plus(&b); - assert_eq!( - got, expected, - "{} plus {}, expected {}, got {}", - a, b, expected, got - ); - }); -} - -#[test] -fn minus() { - vec![ - ( - "file.txt:1-10", - "another.txt:1-5", - (None, Some("file.txt:1-10")), - ), - ( - "file.txt:1-10", - "file.txt:1-5", - (None, Some("file.txt:1-10")), - ), - ( - "file.txt:1-10", - "file.txt:11-15", - (None, Some("file.txt:1-10")), - ), - ( - "file.txt:1-10", - "file.txt:1-10", - (Some("file.txt:1-10"), None), - ), - ( - "file.txt:1-10,11-15", - "file.txt:11-15", - (Some("file.txt:11-15"), Some("file.txt:1-10")), - ), - ( - "file.txt:1-10,11-15,15-17", - "file.txt:1-10,15-17", - (Some("file.txt:1-10,15-17"), Some("file.txt:11-15")), - ), - ] - .into_iter() - .map(|(a, b, expected)| { - ( - a.parse::().unwrap(), - b.parse::().unwrap(), - ( - expected.0.map(|s| s.parse::().unwrap()), - expected.1.map(|s| s.parse::().unwrap()), - ), - ) - }) - .for_each(|(a, b, expected)| { - let got = a.minus(&b); - assert_eq!( - got, expected, - "{} minus {}, expected {:?}, got {:?}", - a, b, expected, got - ); - }); -} - -#[test] -fn equal() { - vec![ - ("file.txt:1-10", "file.txt:1-10", true), - ("file.txt:1-10", "file.txt:1-11", false), - ("file.txt:1-10,11-15", "file.txt:11-15,1-10", false), - ("file.txt:1-10,11-15", "file.txt:1-10,11-15", true), - ] - .into_iter() - .map(|(a, b, expected)| { - ( - a.parse::().unwrap(), - b.parse::().unwrap(), - expected, - ) - }) - .for_each(|(a, b, expected)| { - assert_eq!(a == b, expected, "{} == {}, expected {}", a, b, expected); - }); -} diff --git a/gitbutler-app/tests/virtual_branches/branch/hunk.rs b/gitbutler-app/tests/virtual_branches/branch/hunk.rs deleted file mode 100644 index d91d7313a..000000000 --- a/gitbutler-app/tests/virtual_branches/branch/hunk.rs +++ /dev/null @@ -1,89 +0,0 @@ -use gitbutler_app::virtual_branches::branch::Hunk; - -#[test] -fn to_from_string() { - let hunk = "1-2".parse::().unwrap(); - assert_eq!("1-2", hunk.to_string()); -} - -#[test] -fn parse_invalid() { - "3-2".parse::().unwrap_err(); -} - -#[test] -fn parse_with_hash() { - assert_eq!( - "2-3-hash".parse::().unwrap(), - Hunk::new(2, 3, Some("hash".to_string()), None).unwrap() - ); -} - -#[test] -fn parse_with_timestamp() { - assert_eq!( - "2-3--123".parse::().unwrap(), - Hunk::new(2, 3, None, Some(123)).unwrap() - ); -} - -#[test] -fn parse_invalid_2() { - "3-2".parse::().unwrap_err(); -} - -#[test] -fn to_string_no_hash() { - assert_eq!( - "1-2--123", - Hunk::new(1, 2, None, Some(123)).unwrap().to_string() - ); -} - -#[test] -fn eq() { - for (a, b, expected) in vec![ - ( - "1-2".parse::().unwrap(), - "1-2".parse::().unwrap(), - true, - ), - ( - "1-2".parse::().unwrap(), - "2-3".parse::().unwrap(), - false, - ), - ( - "1-2-abc".parse::().unwrap(), - "1-2-abc".parse::().unwrap(), - true, - ), - ( - "1-2-abc".parse::().unwrap(), - "2-3-abc".parse::().unwrap(), - false, - ), - ( - "1-2".parse::().unwrap(), - "1-2-abc".parse::().unwrap(), - true, - ), - ( - "1-2-abc".parse::().unwrap(), - "1-2".parse::().unwrap(), - true, - ), - ( - "1-2-abc".parse::().unwrap(), - "1-2-bcd".parse::().unwrap(), - false, - ), - ( - "1-2-abc".parse::().unwrap(), - "2-3-bcd".parse::().unwrap(), - false, - ), - ] { - assert_eq!(a == b, expected, "comapring {} and {}", a, b); - } -} diff --git a/gitbutler-app/tests/virtual_branches/branch/mod.rs b/gitbutler-app/tests/virtual_branches/branch/mod.rs deleted file mode 100644 index 93303001b..000000000 --- a/gitbutler-app/tests/virtual_branches/branch/mod.rs +++ /dev/null @@ -1,8 +0,0 @@ -use gitbutler_app::virtual_branches::Branch; - -mod context; -mod file_ownership; -mod hunk; -mod ownership; -mod reader; -mod writer; diff --git a/gitbutler-app/tests/virtual_branches/branch/ownership.rs b/gitbutler-app/tests/virtual_branches/branch/ownership.rs deleted file mode 100644 index f4889e48c..000000000 --- a/gitbutler-app/tests/virtual_branches/branch/ownership.rs +++ /dev/null @@ -1,284 +0,0 @@ -use gitbutler_app::virtual_branches::branch::{ - reconcile_claims, BranchOwnershipClaims, Hunk, OwnershipClaim, -}; -use gitbutler_app::virtual_branches::Branch; - -use std::{path::PathBuf, vec}; - -#[test] -fn reconcile_ownership_simple() { - let branch_a = Branch { - name: "a".to_string(), - ownership: BranchOwnershipClaims { - claims: vec![OwnershipClaim { - file_path: PathBuf::from("foo"), - hunks: vec![ - Hunk { - start: 1, - end: 3, - hash: Some("1,3".to_string()), - timestamp_ms: None, - }, - Hunk { - start: 4, - end: 6, - hash: Some("4,6".to_string()), - timestamp_ms: None, - }, - ], - }], - }, - applied: true, - ..Default::default() - }; - let branch_b = Branch { - name: "b".to_string(), - ownership: BranchOwnershipClaims { - claims: vec![OwnershipClaim { - file_path: PathBuf::from("foo"), - hunks: vec![Hunk { - start: 7, - end: 9, - hash: Some("7,9".to_string()), - timestamp_ms: None, - }], - }], - }, - applied: true, - ..Default::default() - }; - let all_branches: Vec = vec![branch_a.clone(), branch_b.clone()]; - let claim: Vec = vec![OwnershipClaim { - file_path: PathBuf::from("foo"), - hunks: vec![ - Hunk { - start: 4, - end: 6, - hash: Some("4,6".to_string()), - timestamp_ms: None, - }, - Hunk { - start: 7, - end: 9, - hash: Some("9,7".to_string()), - timestamp_ms: None, - }, - ], - }]; - let claim_outcomes = reconcile_claims(all_branches.clone(), &branch_b, &claim).unwrap(); - assert_eq!(claim_outcomes.len(), all_branches.len()); - assert_eq!(claim_outcomes[0].updated_branch.id, branch_a.id); - assert_eq!(claim_outcomes[1].updated_branch.id, branch_b.id); - - assert_eq!( - claim_outcomes[0].updated_branch.ownership, - BranchOwnershipClaims { - claims: vec![OwnershipClaim { - file_path: PathBuf::from("foo"), - hunks: vec![Hunk { - start: 1, - end: 3, - hash: Some("1,3".to_string()), - timestamp_ms: None, - },], - }], - } - ); - - assert_eq!( - claim_outcomes[1].updated_branch.ownership, - BranchOwnershipClaims { - claims: vec![OwnershipClaim { - file_path: PathBuf::from("foo"), - hunks: vec![ - Hunk { - start: 4, - end: 6, - hash: Some("4,6".to_string()), - timestamp_ms: None, - }, - Hunk { - start: 7, - end: 9, - hash: Some("9,7".to_string()), - timestamp_ms: None, - }, - ], - }], - } - ); -} - -#[test] -fn ownership() { - let ownership = "src/main.rs:0-100\nsrc/main2.rs:200-300".parse::(); - assert!(ownership.is_ok()); - let ownership = ownership.unwrap(); - assert_eq!(ownership.claims.len(), 2); - assert_eq!( - ownership.claims[0], - "src/main.rs:0-100".parse::().unwrap() - ); - assert_eq!( - ownership.claims[1], - "src/main2.rs:200-300".parse::().unwrap() - ); -} - -#[test] -fn ownership_2() { - let ownership = "src/main.rs:0-100\nsrc/main2.rs:200-300".parse::(); - assert!(ownership.is_ok()); - let ownership = ownership.unwrap(); - assert_eq!(ownership.claims.len(), 2); - assert_eq!( - ownership.claims[0], - "src/main.rs:0-100".parse::().unwrap() - ); - assert_eq!( - ownership.claims[1], - "src/main2.rs:200-300".parse::().unwrap() - ); -} - -#[test] -fn put() { - let mut ownership = "src/main.rs:0-100" - .parse::() - .unwrap(); - ownership.put(&"src/main.rs:200-300".parse::().unwrap()); - assert_eq!(ownership.claims.len(), 1); - assert_eq!( - ownership.claims[0], - "src/main.rs:200-300,0-100" - .parse::() - .unwrap() - ); -} - -#[test] -fn put_2() { - let mut ownership = "src/main.rs:0-100" - .parse::() - .unwrap(); - ownership.put(&"src/main.rs2:200-300".parse::().unwrap()); - assert_eq!(ownership.claims.len(), 2); - assert_eq!( - ownership.claims[0], - "src/main.rs2:200-300".parse::().unwrap() - ); - assert_eq!( - ownership.claims[1], - "src/main.rs:0-100".parse::().unwrap() - ); -} - -#[test] -fn put_3() { - let mut ownership = "src/main.rs:0-100\nsrc/main2.rs:100-200" - .parse::() - .unwrap(); - ownership.put(&"src/main2.rs:200-300".parse::().unwrap()); - assert_eq!(ownership.claims.len(), 2); - assert_eq!( - ownership.claims[0], - "src/main2.rs:200-300,100-200" - .parse::() - .unwrap() - ); - assert_eq!( - ownership.claims[1], - "src/main.rs:0-100".parse::().unwrap() - ); -} - -#[test] -fn put_4() { - let mut ownership = "src/main.rs:0-100\nsrc/main2.rs:100-200" - .parse::() - .unwrap(); - ownership.put(&"src/main2.rs:100-200".parse::().unwrap()); - assert_eq!(ownership.claims.len(), 2); - assert_eq!( - ownership.claims[0], - "src/main2.rs:100-200".parse::().unwrap() - ); - assert_eq!( - ownership.claims[1], - "src/main.rs:0-100".parse::().unwrap() - ); -} - -#[test] -fn put_7() { - let mut ownership = "src/main.rs:100-200" - .parse::() - .unwrap(); - ownership.put(&"src/main.rs:100-200".parse::().unwrap()); - assert_eq!(ownership.claims.len(), 1); - assert_eq!( - ownership.claims[0], - "src/main.rs:100-200".parse::().unwrap() - ); -} - -#[test] -fn take_1() { - let mut ownership = "src/main.rs:100-200,200-300" - .parse::() - .unwrap(); - let taken = ownership.take(&"src/main.rs:100-200".parse::().unwrap()); - assert_eq!(ownership.claims.len(), 1); - assert_eq!( - ownership.claims[0], - "src/main.rs:200-300".parse::().unwrap() - ); - assert_eq!( - taken, - vec!["src/main.rs:100-200".parse::().unwrap()] - ); -} - -#[test] -fn equal() { - for (a, b, expected) in vec![ - ( - "src/main.rs:100-200" - .parse::() - .unwrap(), - "src/main.rs:100-200" - .parse::() - .unwrap(), - true, - ), - ( - "src/main.rs:100-200\nsrc/main1.rs:300-400\n" - .parse::() - .unwrap(), - "src/main.rs:100-200" - .parse::() - .unwrap(), - false, - ), - ( - "src/main.rs:100-200\nsrc/main1.rs:300-400\n" - .parse::() - .unwrap(), - "src/main.rs:100-200\nsrc/main1.rs:300-400\n" - .parse::() - .unwrap(), - true, - ), - ( - "src/main.rs:300-400\nsrc/main1.rs:100-200\n" - .parse::() - .unwrap(), - "src/main1.rs:100-200\nsrc/main.rs:300-400\n" - .parse::() - .unwrap(), - false, - ), - ] { - assert_eq!(a == b, expected, "{:#?} == {:#?}", a, b); - } -} diff --git a/gitbutler-app/tests/virtual_branches/branch/reader.rs b/gitbutler-app/tests/virtual_branches/branch/reader.rs deleted file mode 100644 index f8c4a04d2..000000000 --- a/gitbutler-app/tests/virtual_branches/branch/reader.rs +++ /dev/null @@ -1,98 +0,0 @@ -use std::sync::atomic::{AtomicUsize, Ordering}; - -use anyhow::Result; -use once_cell::sync::Lazy; - -use crate::{Case, Suite}; -use gitbutler_app::virtual_branches::branch::BranchOwnershipClaims; -use gitbutler_app::virtual_branches::{branch, Branch, BranchId}; - -static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - -fn test_branch() -> Branch { - TEST_INDEX.fetch_add(1, Ordering::Relaxed); - - Branch { - id: BranchId::generate(), - name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), - notes: String::new(), - applied: true, - order: TEST_INDEX.load(Ordering::Relaxed), - upstream: Some( - format!( - "refs/remotes/origin/upstream_{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - upstream_head: Some( - format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, - updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, - head: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - tree: format!( - "0123456789abcdef0123456789abcdef012345{}", - TEST_INDEX.load(Ordering::Relaxed) + 10 - ) - .parse() - .unwrap(), - ownership: BranchOwnershipClaims { - claims: vec![format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed)) - .parse() - .unwrap()], - }, - selected_for_changes: Some(1), - } -} - -#[test] -fn read_not_found() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let session = gb_repository.get_or_create_current_session()?; - let session_reader = gitbutler_app::sessions::Reader::open(gb_repository, &session)?; - - let reader = branch::Reader::new(&session_reader); - let result = reader.read(&BranchId::generate()); - assert!(result.is_err()); - assert_eq!(result.unwrap_err().to_string(), "file not found"); - - Ok(()) -} - -#[test] -fn read_override() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = &suite.new_case(); - - let mut branch = test_branch(); - - let writer = branch::Writer::new(gb_repository, project.gb_dir())?; - writer.write(&mut branch)?; - - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = gitbutler_app::sessions::Reader::open(gb_repository, &session)?; - - let reader = branch::Reader::new(&session_reader); - - assert_eq!(branch, reader.read(&branch.id).unwrap()); - - Ok(()) -} diff --git a/gitbutler-app/tests/virtual_branches/branch/writer.rs b/gitbutler-app/tests/virtual_branches/branch/writer.rs deleted file mode 100644 index 34e1f9b1b..000000000 --- a/gitbutler-app/tests/virtual_branches/branch/writer.rs +++ /dev/null @@ -1,220 +0,0 @@ -use std::{ - fs, - sync::atomic::{AtomicUsize, Ordering}, -}; - -use anyhow::Context; -use gitbutler_app::virtual_branches::branch; -use once_cell::sync::Lazy; - -use crate::{Case, Suite}; - -use self::branch::BranchId; - -use super::*; - -static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - -fn new_test_branch() -> Branch { - TEST_INDEX.fetch_add(1, Ordering::Relaxed); - - Branch { - id: BranchId::generate(), - name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), - notes: String::new(), - applied: true, - upstream: Some( - format!( - "refs/remotes/origin/upstream_{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - upstream_head: None, - created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, - updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, - head: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - tree: format!( - "0123456789abcdef0123456789abcdef012345{}", - TEST_INDEX.load(Ordering::Relaxed) + 10 - ) - .parse() - .unwrap(), - ownership: gitbutler_app::virtual_branches::branch::BranchOwnershipClaims { - claims: vec![gitbutler_app::virtual_branches::branch::OwnershipClaim { - file_path: format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed)).into(), - hunks: vec![], - }], - }, - order: TEST_INDEX.load(Ordering::Relaxed), - selected_for_changes: Some(1), - } -} - -#[test] -fn write_branch() -> anyhow::Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = &suite.new_case(); - - let mut branch = new_test_branch(); - - let writer = branch::Writer::new(gb_repository, project.gb_dir())?; - writer.write(&mut branch)?; - - let root = gb_repository - .root() - .join("branches") - .join(branch.id.to_string()); - - assert_eq!( - fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) - .context("Failed to read branch name")?, - branch.name - ); - assert_eq!( - fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? - .parse::() - .context("Failed to read branch applied")?, - branch.applied - ); - assert_eq!( - fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) - .context("Failed to read branch upstream")?, - branch.upstream.clone().unwrap().to_string() - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("created_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch created timestamp")? - .parse::() - .context("Failed to parse branch created timestamp")?, - branch.created_timestamp_ms - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("updated_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch updated timestamp")? - .parse::() - .context("Failed to parse branch updated timestamp")?, - branch.updated_timestamp_ms - ); - - writer.delete(&branch)?; - fs::read_dir(root).unwrap_err(); - - Ok(()) -} - -#[test] -fn should_create_session() -> anyhow::Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = &suite.new_case(); - - let mut branch = new_test_branch(); - - let writer = branch::Writer::new(gb_repository, project.gb_dir())?; - writer.write(&mut branch)?; - - assert!(gb_repository.get_current_session()?.is_some()); - - Ok(()) -} - -#[test] -fn should_update() -> anyhow::Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = &suite.new_case(); - - let mut branch = new_test_branch(); - - let writer = branch::Writer::new(gb_repository, project.gb_dir())?; - writer.write(&mut branch)?; - - let mut updated_branch = Branch { - name: "updated_name".to_string(), - applied: false, - upstream: Some("refs/remotes/origin/upstream_updated".parse().unwrap()), - created_timestamp_ms: 2, - updated_timestamp_ms: 3, - ownership: gitbutler_app::virtual_branches::branch::BranchOwnershipClaims { - claims: vec![], - }, - ..branch.clone() - }; - - writer.write(&mut updated_branch)?; - - let root = gb_repository - .root() - .join("branches") - .join(branch.id.to_string()); - - assert_eq!( - fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) - .context("Failed to read branch name")?, - updated_branch.name - ); - assert_eq!( - fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? - .parse::() - .context("Failed to read branch applied")?, - updated_branch.applied - ); - assert_eq!( - fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) - .context("Failed to read branch upstream")?, - updated_branch.upstream.unwrap().to_string() - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("created_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch created timestamp")? - .parse::() - .context("Failed to parse branch created timestamp")?, - updated_branch.created_timestamp_ms - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("updated_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch updated timestamp")? - .parse::() - .context("Failed to parse branch updated timestamp")?, - updated_branch.updated_timestamp_ms - ); - - Ok(()) -} diff --git a/gitbutler-app/tests/virtual_branches/iterator.rs b/gitbutler-app/tests/virtual_branches/iterator.rs deleted file mode 100644 index fbe0809df..000000000 --- a/gitbutler-app/tests/virtual_branches/iterator.rs +++ /dev/null @@ -1,117 +0,0 @@ -use std::sync::atomic::{AtomicUsize, Ordering}; - -use anyhow::Result; -use gitbutler_app::virtual_branches; -use once_cell::sync::Lazy; - -use crate::{Case, Suite}; - -static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - -fn new_test_branch() -> virtual_branches::branch::Branch { - TEST_INDEX.fetch_add(1, Ordering::Relaxed); - - virtual_branches::branch::Branch { - id: virtual_branches::BranchId::generate(), - name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), - notes: String::new(), - applied: true, - upstream: Some( - format!( - "refs/remotes/origin/upstream_{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - upstream_head: None, - created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, - updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, - head: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - tree: format!( - "0123456789abcdef0123456789abcdef012345{}", - TEST_INDEX.load(Ordering::Relaxed) + 10 - ) - .parse() - .unwrap(), - ownership: virtual_branches::branch::BranchOwnershipClaims::default(), - order: TEST_INDEX.load(Ordering::Relaxed), - selected_for_changes: Some(1), - } -} - -static TEST_TARGET_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - -fn new_test_target() -> virtual_branches::target::Target { - virtual_branches::target::Target { - branch: format!( - "refs/remotes/branch name{}/remote name {}", - TEST_TARGET_INDEX.load(Ordering::Relaxed), - TEST_TARGET_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - remote_url: format!("remote url {}", TEST_TARGET_INDEX.load(Ordering::Relaxed)), - sha: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_TARGET_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - } -} - -#[test] -fn empty_iterator() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let session = gb_repository.get_or_create_current_session()?; - let session_reader = gitbutler_app::sessions::Reader::open(gb_repository, &session)?; - - let iter = virtual_branches::Iterator::new(&session_reader)?; - - assert_eq!(iter.count(), 0); - - Ok(()) -} - -#[test] -fn iterate_all() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = &suite.new_case(); - - let target_writer = - gitbutler_app::virtual_branches::target::Writer::new(gb_repository, project.gb_dir())?; - target_writer.write_default(&new_test_target())?; - - let branch_writer = - gitbutler_app::virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; - let mut branch_1 = new_test_branch(); - branch_writer.write(&mut branch_1)?; - let mut branch_2 = new_test_branch(); - branch_writer.write(&mut branch_2)?; - let mut branch_3 = new_test_branch(); - branch_writer.write(&mut branch_3)?; - - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = gitbutler_app::sessions::Reader::open(gb_repository, &session)?; - - let iter = virtual_branches::Iterator::new(&session_reader)? - .collect::, gitbutler_app::reader::Error>>()?; - assert_eq!(iter.len(), 3); - assert!(iter.contains(&branch_1)); - assert!(iter.contains(&branch_2)); - assert!(iter.contains(&branch_3)); - - Ok(()) -} diff --git a/gitbutler-app/tests/virtual_branches/mod.rs b/gitbutler-app/tests/virtual_branches/mod.rs deleted file mode 100644 index b928e3c63..000000000 --- a/gitbutler-app/tests/virtual_branches/mod.rs +++ /dev/null @@ -1,2549 +0,0 @@ -mod branch; -mod iterator; -mod target; - -use std::{collections::HashMap, io::Write}; - -use anyhow::{Context, Result}; -use pretty_assertions::assert_eq; -use std::path::{Path, PathBuf}; -#[cfg(target_family = "unix")] -use std::{ - fs::Permissions, - os::unix::{fs::symlink, prelude::*}, -}; - -use crate::{commit_all, empty_bare_repository, Case, Suite}; -use gitbutler_app::{ - gb_repository, git, project_repository, reader, sessions, virtual_branches, - virtual_branches::errors::CommitError, -}; - -use gitbutler_app::virtual_branches::branch::{BranchCreateRequest, BranchOwnershipClaims}; -use gitbutler_app::virtual_branches::integration::verify_branch; -use gitbutler_app::virtual_branches::{ - apply_branch, commit, create_virtual_branch, is_remote_branch_mergeable, - is_virtual_branch_mergeable, list_remote_branches, merge_virtual_branch_upstream, - unapply_ownership, update_branch, -}; - -pub fn set_test_target( - gb_repo: &gb_repository::Repository, - project_repository: &project_repository::Repository, -) -> Result<()> { - let (remote_repo, _tmp) = empty_bare_repository(); - let mut remote = project_repository - .git_repository - .remote( - "origin", - &remote_repo.path().to_str().unwrap().parse().unwrap(), - ) - .expect("failed to add remote"); - remote.push(&["refs/heads/master:refs/heads/master"], None)?; - - virtual_branches::target::Writer::new(gb_repo, project_repository.project().gb_dir())? - .write_default(&virtual_branches::target::Target { - branch: "refs/remotes/origin/master".parse().unwrap(), - remote_url: remote_repo.path().to_str().unwrap().parse().unwrap(), - sha: remote_repo.head().unwrap().target().unwrap(), - }) - .expect("failed to write target"); - - virtual_branches::integration::update_gitbutler_integration(gb_repo, project_repository) - .expect("failed to update integration"); - - Ok(()) -} - -#[test] -fn commit_on_branch_then_change_file_then_get_status() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - project_repository, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([ - (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), - (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), - ])); - - set_test_target(gb_repository, project_repository)?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line0\nline1\nline2\nline3\nline4\n", - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches[0]; - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.commits.len(), 0); - - // commit - commit( - gb_repository, - project_repository, - &branch1_id, - "test commit", - None, - None, - None, - false, - )?; - - // status (no files) - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches[0]; - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits.len(), 1); - - std::fs::write( - Path::new(&project.path).join("test2.txt"), - "line5\nline6\nlineBLAH\nline7\nline8\n", - )?; - - // should have just the last change now, the other line is committed - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches[0]; - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.commits.len(), 1); - - Ok(()) -} - -#[test] -fn signed_commit() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - gb_repository, - project_repository, - .. - } = &suite.new_case_with_files(HashMap::from([ - (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), - (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), - ])); - - set_test_target(gb_repository, project_repository)?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line0\nline1\nline2\nline3\nline4\n", - )?; - - let mut config = project_repository - .git_repository - .config() - .with_context(|| "failed to get config")?; - config.set_str("gitbutler.signCommits", "true")?; - - // commit - commit( - gb_repository, - project_repository, - &branch1_id, - "test commit", - None, - Some(suite.keys.get_or_create()?).as_ref(), - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository).unwrap(); - let commit_id = &branches[0].commits[0].id; - let commit_obj = project_repository.git_repository.find_commit(*commit_id)?; - // check the raw_header contains the string "SSH SIGNATURE" - assert!(commit_obj.raw_header().unwrap().contains("SSH SIGNATURE")); - - Ok(()) -} - -#[test] -fn track_binary_files() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case(); - - let file_path = Path::new("test.txt"); - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\n", - )?; - let file_path2 = Path::new("test2.txt"); - std::fs::write( - Path::new(&project.path).join(file_path2), - "line5\nline6\nline7\nline8\n", - )?; - // add a binary file - let image_data: [u8; 12] = [ - 255, 0, 0, // Red pixel - 0, 0, 255, // Blue pixel - 255, 255, 0, // Yellow pixel - 0, 255, 0, // Green pixel - ]; - let mut file = std::fs::File::create(Path::new(&project.path).join("image.bin"))?; - file.write_all(&image_data)?; - commit_all(&project_repository.git_repository); - - set_test_target(gb_repository, project_repository)?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - // test file change - std::fs::write( - Path::new(&project.path).join(file_path2), - "line5\nline6\nline7\nline8\nline9\n", - )?; - - // add a binary file - let image_data: [u8; 12] = [ - 255, 0, 0, // Red pixel - 0, 255, 0, // Green pixel - 0, 0, 255, // Blue pixel - 255, 255, 0, // Yellow pixel - ]; - let mut file = std::fs::File::create(Path::new(&project.path).join("image.bin"))?; - file.write_all(&image_data)?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches[0]; - assert_eq!(branch.files.len(), 2); - let img_file = &branch - .files - .iter() - .find(|b| b.path.as_os_str() == "image.bin") - .unwrap(); - assert!(img_file.binary); - assert_eq!( - img_file.hunks[0].diff, - "944996dd82015a616247c72b251e41661e528ae1" - ); - - // commit - commit( - gb_repository, - project_repository, - &branch1_id, - "test commit", - None, - None, - None, - false, - )?; - - // status (no files) - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository).unwrap(); - let commit_id = &branches[0].commits[0].id; - let commit_obj = project_repository.git_repository.find_commit(*commit_id)?; - let tree = commit_obj.tree()?; - let files = tree_to_entry_list(&project_repository.git_repository, &tree); - assert_eq!(files[0].0, "image.bin"); - assert_eq!(files[0].3, "944996dd82015a616247c72b251e41661e528ae1"); - - let image_data: [u8; 12] = [ - 0, 255, 0, // Green pixel - 255, 0, 0, // Red pixel - 255, 255, 0, // Yellow pixel - 0, 0, 255, // Blue pixel - ]; - let mut file = std::fs::File::create(Path::new(&project.path).join("image.bin"))?; - file.write_all(&image_data)?; - - // commit - commit( - gb_repository, - project_repository, - &branch1_id, - "test commit", - None, - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository).unwrap(); - let commit_id = &branches[0].commits[0].id; - // get tree from commit_id - let commit_obj = project_repository.git_repository.find_commit(*commit_id)?; - let tree = commit_obj.tree()?; - let files = tree_to_entry_list(&project_repository.git_repository, &tree); - - assert_eq!(files[0].0, "image.bin"); - assert_eq!(files[0].3, "ea6901a04d1eed6ebf6822f4360bda9f008fa317"); - - Ok(()) -} - -#[test] -fn create_branch_with_ownership() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - project_repository, - gb_repository, - .. - } = &suite.new_case(); - - set_test_target(gb_repository, project_repository)?; - - let file_path = Path::new("test.txt"); - std::fs::write(Path::new(&project.path).join(file_path), "line1\nline2\n").unwrap(); - - let branch0 = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch"); - - virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status"); - - let current_session = gb_repository.get_or_create_current_session().unwrap(); - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session).unwrap(); - let branch_reader = virtual_branches::branch::Reader::new(¤t_session_reader); - let branch0 = branch_reader.read(&branch0.id).unwrap(); - - let branch1 = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest { - ownership: Some(branch0.ownership), - ..Default::default() - }, - ) - .expect("failed to create virtual branch"); - - let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status") - .0; - - let files_by_branch_id = statuses - .iter() - .map(|(branch, files)| (branch.id, files)) - .collect::>(); - - assert_eq!(files_by_branch_id.len(), 2); - assert_eq!(files_by_branch_id[&branch0.id].len(), 0); - assert_eq!(files_by_branch_id[&branch1.id].len(), 1); - - Ok(()) -} - -#[test] -fn create_branch_in_the_middle() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - gb_repository, - .. - } = &suite.new_case(); - - set_test_target(gb_repository, project_repository)?; - - create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch"); - create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch"); - create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest { - order: Some(1), - ..Default::default() - }, - ) - .expect("failed to create virtual branch"); - - let current_session = gb_repository.get_or_create_current_session()?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; - - let mut branches = virtual_branches::Iterator::new(¤t_session_reader)? - .collect::, reader::Error>>() - .expect("failed to read branches"); - branches.sort_by_key(|b| b.order); - assert_eq!(branches.len(), 3); - assert_eq!(branches[0].name, "Virtual branch"); - assert_eq!(branches[1].name, "Virtual branch 2"); - assert_eq!(branches[2].name, "Virtual branch 1"); - - Ok(()) -} - -#[test] -fn create_branch_no_arguments() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - gb_repository, - .. - } = &suite.new_case(); - - set_test_target(gb_repository, project_repository)?; - - create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch"); - - let current_session = gb_repository.get_or_create_current_session()?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; - - let branches = virtual_branches::Iterator::new(¤t_session_reader)? - .collect::, reader::Error>>() - .expect("failed to read branches"); - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].name, "Virtual branch"); - assert!(branches[0].applied); - assert_eq!(branches[0].ownership, BranchOwnershipClaims::default()); - assert_eq!(branches[0].order, 0); - - Ok(()) -} - -#[test] -fn hunk_expantion() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case(); - - set_test_target(gb_repository, project_repository)?; - - let file_path = Path::new("test.txt"); - std::fs::write(Path::new(&project.path).join(file_path), "line1\nline2\n")?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - let branch2_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status") - .0; - - let files_by_branch_id = statuses - .iter() - .map(|(branch, files)| (branch.id, files)) - .collect::>(); - - assert_eq!(files_by_branch_id.len(), 2); - assert_eq!(files_by_branch_id[&branch1_id].len(), 1); - assert_eq!(files_by_branch_id[&branch2_id].len(), 0); - - // even though selected branch has changed - update_branch( - gb_repository, - project_repository, - virtual_branches::branch::BranchUpdateRequest { - id: branch1_id, - order: Some(1), - ..Default::default() - }, - )?; - update_branch( - gb_repository, - project_repository, - virtual_branches::branch::BranchUpdateRequest { - id: branch2_id, - order: Some(0), - ..Default::default() - }, - )?; - - // a slightly different hunk should still go to the same branch - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\n", - )?; - - let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status") - .0; - let files_by_branch_id = statuses - .iter() - .map(|(branch, files)| (branch.id, files)) - .collect::>(); - - assert_eq!(files_by_branch_id.len(), 2); - assert_eq!(files_by_branch_id[&branch1_id].len(), 1); - assert_eq!(files_by_branch_id[&branch2_id].len(), 0); - - Ok(()) -} - -#[test] -fn get_status_files_by_branch_no_hunks_no_branches() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - gb_repository, - .. - } = &suite.new_case(); - - set_test_target(gb_repository, project_repository)?; - - let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status") - .0; - - assert_eq!(statuses.len(), 0); - - Ok(()) -} - -#[test] -fn get_status_files_by_branch() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case(); - - set_test_target(gb_repository, project_repository)?; - - let file_path = Path::new("test.txt"); - std::fs::write(Path::new(&project.path).join(file_path), "line1\nline2\n")?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - let branch2_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status") - .0; - let files_by_branch_id = statuses - .iter() - .map(|(branch, files)| (branch.id, files)) - .collect::>(); - - assert_eq!(files_by_branch_id.len(), 2); - assert_eq!(files_by_branch_id[&branch1_id].len(), 1); - assert_eq!(files_by_branch_id[&branch2_id].len(), 0); - - Ok(()) -} - -#[test] -fn move_hunks_multiple_sources() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([( - PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\n", - )])); - - set_test_target(gb_repository, project_repository)?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - let branch2_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - let branch3_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\n", - )?; - - let current_session = gb_repository.get_or_create_current_session()?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; - let branch_reader = virtual_branches::branch::Reader::new(¤t_session_reader); - let branch_writer = virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; - let mut branch2 = branch_reader.read(&branch2_id)?; - branch2.ownership = BranchOwnershipClaims { - claims: vec!["test.txt:1-5".parse()?], - }; - branch_writer.write(&mut branch2)?; - let mut branch1 = branch_reader.read(&branch1_id)?; - branch1.ownership = BranchOwnershipClaims { - claims: vec!["test.txt:11-15".parse()?], - }; - branch_writer.write(&mut branch1)?; - - let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status") - .0; - - let files_by_branch_id = statuses - .iter() - .map(|(branch, files)| (branch.id, files)) - .collect::>(); - - assert_eq!(files_by_branch_id.len(), 3); - assert_eq!(files_by_branch_id[&branch1_id].len(), 1); - // assert_eq!(files_by_branch_id[&branch1_id][0].hunks.len(), 1); - assert_eq!(files_by_branch_id[&branch2_id].len(), 1); - // assert_eq!(files_by_branch_id[&branch2_id][0].hunks.len(), 1); - assert_eq!(files_by_branch_id[&branch3_id].len(), 0); - - update_branch( - gb_repository, - project_repository, - virtual_branches::branch::BranchUpdateRequest { - id: branch3_id, - ownership: Some("test.txt:1-5,11-15".parse()?), - ..Default::default() - }, - )?; - - let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status") - .0; - - let files_by_branch_id = statuses - .iter() - .map(|(branch, files)| (branch.id, files)) - .collect::>(); - - assert_eq!(files_by_branch_id.len(), 3); - assert_eq!(files_by_branch_id[&branch1_id].len(), 0); - assert_eq!(files_by_branch_id[&branch2_id].len(), 0); - assert_eq!(files_by_branch_id[&branch3_id].len(), 1); - assert_eq!( - files_by_branch_id[&branch3_id][Path::new("test.txt")].len(), - 2 - ); - assert_eq!( - files_by_branch_id[&branch3_id][Path::new("test.txt")][0].diff, - "@@ -1,3 +1,4 @@\n+line0\n line1\n line2\n line3\n" - ); - assert_eq!( - files_by_branch_id[&branch3_id][Path::new("test.txt")][1].diff, - "@@ -10,3 +11,4 @@ line9\n line10\n line11\n line12\n+line13\n" - ); - Ok(()) -} - -#[test] -fn move_hunks_partial_explicitly() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([( - PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\n", - )])); - - set_test_target(gb_repository, project_repository)?; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\n", - )?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - let branch2_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status") - .0; - let files_by_branch_id = statuses - .iter() - .map(|(branch, files)| (branch.id, files)) - .collect::>(); - - assert_eq!(files_by_branch_id.len(), 2); - assert_eq!(files_by_branch_id[&branch1_id].len(), 1); - // assert_eq!(files_by_branch_id[&branch1_id][0].hunks.len(), 2); - assert_eq!(files_by_branch_id[&branch2_id].len(), 0); - - update_branch( - gb_repository, - project_repository, - virtual_branches::branch::BranchUpdateRequest { - id: branch2_id, - ownership: Some("test.txt:1-5".parse()?), - ..Default::default() - }, - )?; - - let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status") - .0; - - let files_by_branch_id = statuses - .iter() - .map(|(branch, files)| (branch.id, files)) - .collect::>(); - - assert_eq!(files_by_branch_id.len(), 2); - assert_eq!(files_by_branch_id[&branch1_id].len(), 1); - assert_eq!( - files_by_branch_id[&branch1_id][Path::new("test.txt")].len(), - 1 - ); - assert_eq!( - files_by_branch_id[&branch1_id][Path::new("test.txt")][0].diff, - "@@ -11,3 +12,4 @@ line10\n line11\n line12\n line13\n+line14\n" - ); - - assert_eq!(files_by_branch_id[&branch2_id].len(), 1); - assert_eq!( - files_by_branch_id[&branch2_id][Path::new("test.txt")].len(), - 1 - ); - assert_eq!( - files_by_branch_id[&branch2_id][Path::new("test.txt")][0].diff, - "@@ -1,3 +1,4 @@\n+line0\n line1\n line2\n line3\n" - ); - - Ok(()) -} - -#[test] -fn add_new_hunk_to_the_end() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([( - PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline13\nline14\n", - )])); - - set_test_target(gb_repository, project_repository)?; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\nline15\n", - )?; - - create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch"); - - let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status") - .0; - assert_eq!( - statuses[0].1[Path::new("test.txt")][0].diff, - "@@ -11,5 +11,5 @@ line10\n line11\n line12\n line13\n-line13\n line14\n+line15\n" - ); - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\nline11\nline12\nline13\nline14\nline15\n", - )?; - - let statuses = virtual_branches::get_status_by_branch(gb_repository, project_repository) - .expect("failed to get status") - .0; - - assert_eq!( - statuses[0].1[Path::new("test.txt")][0].diff, - "@@ -11,5 +12,5 @@ line10\n line11\n line12\n line13\n-line13\n line14\n+line15\n" - ); - assert_eq!( - statuses[0].1[Path::new("test.txt")][1].diff, - "@@ -1,3 +1,4 @@\n+line0\n line1\n line2\n line3\n" - ); - - Ok(()) -} - -#[test] -fn merge_vbranch_upstream_clean_rebase() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case(); - - // create a commit and set the target - let file_path = Path::new("test.txt"); - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\n", - )?; - commit_all(&project_repository.git_repository); - let target_oid = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nupstream\n", - )?; - // add a commit to the target branch it's pointing to so there is something "upstream" - commit_all(&project_repository.git_repository); - let last_push = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - - // coworker adds some work - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nupstream\ncoworker work\n", - )?; - - commit_all(&project_repository.git_repository); - let coworker_work = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - - //update repo ref refs/remotes/origin/master to up_target oid - project_repository.git_repository.reference( - &"refs/remotes/origin/master".parse().unwrap(), - coworker_work, - true, - "update target", - )?; - - // revert to our file - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nupstream\n", - )?; - - set_test_target(gb_repository, project_repository)?; - virtual_branches::target::Writer::new(gb_repository, project_repository.project().gb_dir())? - .write_default(&virtual_branches::target::Target { - branch: "refs/remotes/origin/master".parse().unwrap(), - remote_url: "origin".to_string(), - sha: target_oid, - })?; - - // add some uncommitted work - let file_path2 = Path::new("test2.txt"); - std::fs::write(Path::new(&project.path).join(file_path2), "file2\n")?; - - let remote_branch: git::RemoteRefname = "refs/remotes/origin/master".parse().unwrap(); - let branch_writer = virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; - let mut branch = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch"); - branch.upstream = Some(remote_branch.clone()); - branch.head = last_push; - branch_writer - .write(&mut branch) - .context("failed to write target branch after push")?; - - // create the branch - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch1 = &branches[0]; - assert_eq!(branch1.files.len(), 1); - assert_eq!(branch1.commits.len(), 1); - // assert_eq!(branch1.upstream.as_ref().unwrap().commits.len(), 1); - - merge_virtual_branch_upstream( - gb_repository, - project_repository, - &branch1.id, - Some(suite.keys.get_or_create()?).as_ref(), - None, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch1 = &branches[0]; - - let contents = std::fs::read(Path::new(&project.path).join(file_path))?; - assert_eq!( - "line1\nline2\nline3\nline4\nupstream\ncoworker work\n", - String::from_utf8(contents)? - ); - let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; - assert_eq!("file2\n", String::from_utf8(contents)?); - assert_eq!(branch1.files.len(), 1); - assert_eq!(branch1.commits.len(), 2); - // assert_eq!(branch1.upstream.as_ref().unwrap().commits.len(), 0); - - Ok(()) -} - -#[test] -fn merge_vbranch_upstream_conflict() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case(); - - // create a commit and set the target - let file_path = Path::new("test.txt"); - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\n", - )?; - commit_all(&project_repository.git_repository); - let target_oid = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nupstream\n", - )?; - // add a commit to the target branch it's pointing to so there is something "upstream" - commit_all(&project_repository.git_repository); - let last_push = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - - // coworker adds some work - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nupstream\ncoworker work\n", - )?; - - commit_all(&project_repository.git_repository); - let coworker_work = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - - //update repo ref refs/remotes/origin/master to up_target oid - project_repository.git_repository.reference( - &"refs/remotes/origin/master".parse().unwrap(), - coworker_work, - true, - "update target", - )?; - - // revert to our file - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nupstream\n", - )?; - - set_test_target(gb_repository, project_repository)?; - virtual_branches::target::Writer::new(gb_repository, project.gb_dir())?.write_default( - &virtual_branches::target::Target { - branch: "refs/remotes/origin/master".parse().unwrap(), - remote_url: "origin".to_string(), - sha: target_oid, - }, - )?; - - // add some uncommitted work - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nupstream\nother side\n", - )?; - - let remote_branch: git::RemoteRefname = "refs/remotes/origin/master".parse().unwrap(); - let branch_writer = virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; - let mut branch = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch"); - branch.upstream = Some(remote_branch.clone()); - branch.head = last_push; - branch_writer - .write(&mut branch) - .context("failed to write target branch after push")?; - - // create the branch - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch1 = &branches[0]; - - assert_eq!(branch1.files.len(), 1); - assert_eq!(branch1.commits.len(), 1); - // assert_eq!(branch1.upstream.as_ref().unwrap().commits.len(), 1); - - merge_virtual_branch_upstream(gb_repository, project_repository, &branch1.id, None, None)?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch1 = &branches[0]; - let contents = std::fs::read(Path::new(&project.path).join(file_path))?; - - assert_eq!( - "line1\nline2\nline3\nline4\nupstream\n<<<<<<< ours\nother side\n=======\ncoworker work\n>>>>>>> theirs\n", - String::from_utf8(contents)? - ); - - assert_eq!(branch1.files.len(), 1); - assert_eq!(branch1.commits.len(), 1); - assert!(branch1.conflicted); - - // fix the conflict - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nupstream\nother side\ncoworker work\n", - )?; - - // make gb see the conflict resolution - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - assert!(branches[0].conflicted); - - // commit the merge resolution - commit( - gb_repository, - project_repository, - &branch1.id, - "fix merge conflict", - None, - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch1 = &branches[0]; - assert!(!branch1.conflicted); - assert_eq!(branch1.files.len(), 0); - assert_eq!(branch1.commits.len(), 3); - - // make sure the last commit was a merge commit (2 parents) - let last_id = &branch1.commits[0].id; - let last_commit = project_repository.git_repository.find_commit(*last_id)?; - assert_eq!(last_commit.parent_count(), 2); - - Ok(()) -} - -#[test] -fn unapply_ownership_partial() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([( - PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\n", - )])); - - set_test_target(gb_repository, project_repository)?; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line1\nline2\nline3\nline4\nbranch1\n", - )?; - - create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch"); - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].files.len(), 1); - assert_eq!(branches[0].ownership.claims.len(), 1); - assert_eq!(branches[0].files[0].hunks.len(), 1); - assert_eq!(branches[0].ownership.claims[0].hunks.len(), 1); - assert_eq!( - std::fs::read_to_string(Path::new(&project.path).join("test.txt"))?, - "line1\nline2\nline3\nline4\nbranch1\n" - ); - - unapply_ownership( - gb_repository, - project_repository, - &"test.txt:2-6".parse().unwrap(), - ) - .unwrap(); - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - assert_eq!(branches.len(), 1); - assert_eq!(branches[0].files.len(), 0); - assert_eq!(branches[0].ownership.claims.len(), 0); - assert_eq!( - std::fs::read_to_string(Path::new(&project.path).join("test.txt"))?, - "line1\nline2\nline3\nline4\n" - ); - - Ok(()) -} - -#[test] -fn unapply_branch() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - project_repository, - gb_repository, - .. - } = &suite.new_case(); - - // create a commit and set the target - let file_path = Path::new("test.txt"); - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\n", - )?; - commit_all(&project_repository.git_repository); - - set_test_target(gb_repository, project_repository)?; - - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nbranch1\n", - )?; - let file_path2 = Path::new("test2.txt"); - std::fs::write(Path::new(&project.path).join(file_path2), "line5\nline6\n")?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - let branch2_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - update_branch( - gb_repository, - project_repository, - virtual_branches::branch::BranchUpdateRequest { - id: branch2_id, - ownership: Some("test2.txt:1-3".parse()?), - ..Default::default() - }, - )?; - - let contents = std::fs::read(Path::new(&project.path).join(file_path))?; - assert_eq!( - "line1\nline2\nline3\nline4\nbranch1\n", - String::from_utf8(contents)? - ); - let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; - assert_eq!("line5\nline6\n", String::from_utf8(contents)?); - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - assert_eq!(branch.files.len(), 1); - assert!(branch.active); - - virtual_branches::unapply_branch(gb_repository, project_repository, &branch1_id)?; - - let contents = std::fs::read(Path::new(&project.path).join(file_path))?; - assert_eq!("line1\nline2\nline3\nline4\n", String::from_utf8(contents)?); - let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; - assert_eq!("line5\nline6\n", String::from_utf8(contents)?); - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - assert_eq!(branch.files.len(), 1); - assert!(!branch.active); - - apply_branch(gb_repository, project_repository, &branch1_id, None, None)?; - let contents = std::fs::read(Path::new(&project.path).join(file_path))?; - assert_eq!( - "line1\nline2\nline3\nline4\nbranch1\n", - String::from_utf8(contents)? - ); - let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; - assert_eq!("line5\nline6\n", String::from_utf8(contents)?); - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - assert_eq!(branch.files.len(), 1); - assert!(branch.active); - - Ok(()) -} - -#[test] -fn apply_unapply_added_deleted_files() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - project_repository, - gb_repository, - .. - } = &suite.new_case(); - - // create a commit and set the target - let file_path = Path::new("test.txt"); - std::fs::write(Path::new(&project.path).join(file_path), "file1\n")?; - let file_path2 = Path::new("test2.txt"); - std::fs::write(Path::new(&project.path).join(file_path2), "file2\n")?; - commit_all(&project_repository.git_repository); - - set_test_target(gb_repository, project_repository)?; - - // rm file_path2, add file3 - std::fs::remove_file(Path::new(&project.path).join(file_path2))?; - let file_path3 = Path::new("test3.txt"); - std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; - - let branch2_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - let branch3_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - update_branch( - gb_repository, - project_repository, - virtual_branches::branch::BranchUpdateRequest { - id: branch2_id, - ownership: Some("test2.txt:0-0".parse()?), - ..Default::default() - }, - )?; - update_branch( - gb_repository, - project_repository, - virtual_branches::branch::BranchUpdateRequest { - id: branch3_id, - ownership: Some("test3.txt:1-2".parse()?), - ..Default::default() - }, - )?; - - virtual_branches::unapply_branch(gb_repository, project_repository, &branch2_id)?; - // check that file2 is back - let contents = std::fs::read(Path::new(&project.path).join(file_path2))?; - assert_eq!("file2\n", String::from_utf8(contents)?); - - virtual_branches::unapply_branch(gb_repository, project_repository, &branch3_id)?; - // check that file3 is gone - assert!(!Path::new(&project.path).join(file_path3).exists()); - - apply_branch(gb_repository, project_repository, &branch2_id, None, None)?; - // check that file2 is gone - assert!(!Path::new(&project.path).join(file_path2).exists()); - - apply_branch(gb_repository, project_repository, &branch3_id, None, None)?; - // check that file3 is back - let contents = std::fs::read(Path::new(&project.path).join(file_path3))?; - assert_eq!("file3\n", String::from_utf8(contents)?); - - Ok(()) -} - -#[test] -fn detect_mergeable_branch() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - project_repository, - gb_repository, - .. - } = &suite.new_case(); - - // create a commit and set the target - let file_path = Path::new("test.txt"); - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\n", - )?; - commit_all(&project_repository.git_repository); - - set_test_target(gb_repository, project_repository)?; - - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nbranch1\n", - )?; - let file_path4 = Path::new("test4.txt"); - std::fs::write(Path::new(&project.path).join(file_path4), "line5\nline6\n")?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - let branch2_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - let current_session = gb_repository.get_or_create_current_session()?; - let current_session_reader = sessions::Reader::open(gb_repository, ¤t_session)?; - let branch_reader = virtual_branches::branch::Reader::new(¤t_session_reader); - let branch_writer = virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; - - update_branch( - gb_repository, - project_repository, - virtual_branches::branch::BranchUpdateRequest { - id: branch2_id, - ownership: Some("test4.txt:1-3".parse()?), - ..Default::default() - }, - ) - .expect("failed to update branch"); - - // unapply both branches and create some conflicting ones - virtual_branches::unapply_branch(gb_repository, project_repository, &branch1_id)?; - virtual_branches::unapply_branch(gb_repository, project_repository, &branch2_id)?; - - project_repository - .git_repository - .set_head(&"refs/heads/master".parse().unwrap())?; - project_repository - .git_repository - .checkout_head(Some(&mut git2::build::CheckoutBuilder::default().force()))?; - - // create an upstream remote conflicting commit - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nupstream\n", - )?; - commit_all(&project_repository.git_repository); - let up_target = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - project_repository.git_repository.reference( - &"refs/remotes/origin/remote_branch".parse().unwrap(), - up_target, - true, - "update target", - )?; - - // revert content and write a mergeable branch - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\n", - )?; - let file_path3 = Path::new("test3.txt"); - std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; - commit_all(&project_repository.git_repository); - let up_target = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - project_repository.git_repository.reference( - &"refs/remotes/origin/remote_branch2".parse().unwrap(), - up_target, - true, - "update target", - )?; - // remove file_path3 - std::fs::remove_file(Path::new(&project.path).join(file_path3))?; - - project_repository - .git_repository - .set_head(&"refs/heads/gitbutler/integration".parse().unwrap())?; - project_repository - .git_repository - .checkout_head(Some(&mut git2::build::CheckoutBuilder::default().force()))?; - - // create branches that conflict with our earlier branches - create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch"); - let branch4_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - // branch3 conflicts with branch1 and remote_branch - std::fs::write( - Path::new(&project.path).join(file_path), - "line1\nline2\nline3\nline4\nbranch3\n", - )?; - - // branch4 conflicts with branch2 - let file_path2 = Path::new("test2.txt"); - std::fs::write( - Path::new(&project.path).join(file_path2), - "line1\nline2\nline3\nline4\nbranch4\n", - )?; - - let mut branch4 = branch_reader.read(&branch4_id)?; - branch4.ownership = BranchOwnershipClaims { - claims: vec!["test2.txt:1-6".parse()?], - }; - branch_writer.write(&mut branch4)?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - assert_eq!(branches.len(), 4); - - let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - assert!(!branch1.active); - assert!(!is_virtual_branch_mergeable(gb_repository, project_repository, &branch1.id).unwrap()); - - let branch2 = &branches.iter().find(|b| b.id == branch2_id).unwrap(); - assert!(!branch2.active); - assert!(is_virtual_branch_mergeable(gb_repository, project_repository, &branch2.id).unwrap()); - - let remotes = - list_remote_branches(gb_repository, project_repository).expect("failed to list remotes"); - let _remote1 = &remotes - .iter() - .find(|b| b.name.to_string() == "refs/remotes/origin/remote_branch") - .unwrap(); - assert!(!is_remote_branch_mergeable( - gb_repository, - project_repository, - &"refs/remotes/origin/remote_branch".parse().unwrap() - ) - .unwrap()); - // assert_eq!(remote1.commits.len(), 1); - - let _remote2 = &remotes - .iter() - .find(|b| b.name.to_string() == "refs/remotes/origin/remote_branch2") - .unwrap(); - assert!(is_remote_branch_mergeable( - gb_repository, - project_repository, - &"refs/remotes/origin/remote_branch2".parse().unwrap() - ) - .unwrap()); - // assert_eq!(remote2.commits.len(), 2); - - Ok(()) -} - -#[test] -fn upstream_integrated_vbranch() -> Result<()> { - // ok, we need a vbranch with some work and an upstream target that also includes that work, but the base is behind - // plus a branch with work not in upstream so we can see that it is not included in the vbranch - - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([ - (PathBuf::from("test.txt"), "file1\n"), - (PathBuf::from("test2.txt"), "file2\n"), - (PathBuf::from("test3.txt"), "file3\n"), - ])); - - let base_commit = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "file1\nversion2\n", - )?; - commit_all(&project_repository.git_repository); - - let upstream_commit = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - project_repository.git_repository.reference( - &"refs/remotes/origin/master".parse().unwrap(), - upstream_commit, - true, - "update target", - )?; - - virtual_branches::target::Writer::new(gb_repository, project_repository.project().gb_dir())? - .write_default(&virtual_branches::target::Target { - branch: "refs/remotes/origin/master".parse().unwrap(), - remote_url: "http://origin.com/project".to_string(), - sha: base_commit, - })?; - project_repository - .git_repository - .remote("origin", &"http://origin.com/project".parse().unwrap())?; - virtual_branches::integration::update_gitbutler_integration(gb_repository, project_repository)?; - - // create vbranches, one integrated, one not - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - let branch2_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - let branch3_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - std::fs::write( - Path::new(&project.path).join("test2.txt"), - "file2\nversion2\n", - )?; - - std::fs::write( - Path::new(&project.path).join("test3.txt"), - "file3\nversion2\n", - )?; - - update_branch( - gb_repository, - project_repository, - virtual_branches::branch::BranchUpdateRequest { - id: branch1_id, - name: Some("integrated".to_string()), - ownership: Some("test.txt:1-2".parse()?), - ..Default::default() - }, - )?; - - update_branch( - gb_repository, - project_repository, - virtual_branches::branch::BranchUpdateRequest { - id: branch2_id, - name: Some("not integrated".to_string()), - ownership: Some("test2.txt:1-2".parse()?), - ..Default::default() - }, - )?; - - update_branch( - gb_repository, - project_repository, - virtual_branches::branch::BranchUpdateRequest { - id: branch3_id, - name: Some("not committed".to_string()), - ownership: Some("test3.txt:1-2".parse()?), - ..Default::default() - }, - )?; - - // create a new virtual branch from the remote branch - commit( - gb_repository, - project_repository, - &branch1_id, - "integrated commit", - None, - None, - None, - false, - )?; - commit( - gb_repository, - project_repository, - &branch2_id, - "non-integrated commit", - None, - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - - let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - assert!(branch1.commits.iter().any(|c| c.is_integrated)); - assert_eq!(branch1.files.len(), 0); - assert_eq!(branch1.commits.len(), 1); - - let branch2 = &branches.iter().find(|b| b.id == branch2_id).unwrap(); - assert!(!branch2.commits.iter().any(|c| c.is_integrated)); - assert_eq!(branch2.files.len(), 0); - assert_eq!(branch2.commits.len(), 1); - - let branch3 = &branches.iter().find(|b| b.id == branch3_id).unwrap(); - assert!(!branch3.commits.iter().any(|c| c.is_integrated)); - assert_eq!(branch3.files.len(), 1); - assert_eq!(branch3.commits.len(), 0); - - Ok(()) -} - -#[test] -fn commit_same_hunk_twice() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([( - PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", - )])); - - set_test_target(gb_repository, project_repository)?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].hunks.len(), 1); - assert_eq!(branch.commits.len(), 0); - - // commit - commit( - gb_repository, - project_repository, - &branch1_id, - "first commit to test.txt", - None, - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - assert_eq!(branch.files.len(), 0, "no files expected"); - - assert_eq!(branch.commits.len(), 1, "file should have been commited"); - assert_eq!(branch.commits[0].files.len(), 1, "hunks expected"); - assert_eq!( - branch.commits[0].files[0].hunks.len(), - 1, - "one hunk should have been commited" - ); - - // update same lines - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line1\nPATCH1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - assert_eq!(branch.files.len(), 1, "one file should be changed"); - assert_eq!(branch.commits.len(), 1, "commit is still there"); - - commit( - gb_repository, - project_repository, - &branch1_id, - "second commit to test.txt", - None, - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - assert_eq!( - branch.files.len(), - 0, - "all changes should have been commited" - ); - - assert_eq!(branch.commits.len(), 2, "two commits expected"); - assert_eq!(branch.commits[0].files.len(), 1); - assert_eq!(branch.commits[0].files[0].hunks.len(), 1); - assert_eq!(branch.commits[1].files.len(), 1); - assert_eq!(branch.commits[1].files[0].hunks.len(), 1); - - Ok(()) -} - -#[test] -fn commit_same_file_twice() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([( - PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", - )])); - - set_test_target(gb_repository, project_repository)?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].hunks.len(), 1); - assert_eq!(branch.commits.len(), 0); - - // commit - commit( - gb_repository, - project_repository, - &branch1_id, - "first commit to test.txt", - None, - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - assert_eq!(branch.files.len(), 0, "no files expected"); - - assert_eq!(branch.commits.len(), 1, "file should have been commited"); - assert_eq!(branch.commits[0].files.len(), 1, "hunks expected"); - assert_eq!( - branch.commits[0].files[0].hunks.len(), - 1, - "one hunk should have been commited" - ); - - // add second patch - - std::fs::write( - Path::new(&project.path).join("file.txt"), - "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\npatch2\nline11\nline12\n", - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - assert_eq!(branch.files.len(), 1, "one file should be changed"); - assert_eq!(branch.commits.len(), 1, "commit is still there"); - - commit( - gb_repository, - project_repository, - &branch1_id, - "second commit to test.txt", - None, - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - assert_eq!( - branch.files.len(), - 0, - "all changes should have been commited" - ); - - assert_eq!(branch.commits.len(), 2, "two commits expected"); - assert_eq!(branch.commits[0].files.len(), 1); - assert_eq!(branch.commits[0].files[0].hunks.len(), 1); - assert_eq!(branch.commits[1].files.len(), 1); - assert_eq!(branch.commits[1].files[0].hunks.len(), 1); - - Ok(()) -} - -#[test] -fn commit_partial_by_hunk() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([( - PathBuf::from("test.txt"), - "line1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\nline11\nline12\n", - )])); - - set_test_target(gb_repository, project_repository)?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line1\npatch1\nline2\nline3\nline4\nline5\nmiddle\nmiddle\nmiddle\nmiddle\nline6\nline7\nline8\nline9\nline10\nmiddle\nmiddle\nmiddle\npatch2\nline11\nline12\n", - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].hunks.len(), 2); - assert_eq!(branch.commits.len(), 0); - - // commit - commit( - gb_repository, - project_repository, - &branch1_id, - "first commit to test.txt", - Some(&"test.txt:1-6".parse::().unwrap()), - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - assert_eq!(branch.files.len(), 1); - assert_eq!(branch.files[0].hunks.len(), 1); - assert_eq!(branch.commits.len(), 1); - assert_eq!(branch.commits[0].files.len(), 1); - assert_eq!(branch.commits[0].files[0].hunks.len(), 1); - - commit( - gb_repository, - project_repository, - &branch1_id, - "second commit to test.txt", - Some(&"test.txt:16-22".parse::().unwrap()), - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - assert_eq!(branch.files.len(), 0); - assert_eq!(branch.commits.len(), 2); - assert_eq!(branch.commits[0].files.len(), 1); - assert_eq!(branch.commits[0].files[0].hunks.len(), 1); - assert_eq!(branch.commits[1].files.len(), 1); - assert_eq!(branch.commits[1].files[0].hunks.len(), 1); - - Ok(()) -} - -#[test] -fn commit_partial_by_file() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([ - (PathBuf::from("test.txt"), "file1\n"), - (PathBuf::from("test2.txt"), "file2\n"), - ])); - - let commit1_oid = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - let commit1 = project_repository - .git_repository - .find_commit(commit1_oid) - .unwrap(); - - set_test_target(gb_repository, project_repository)?; - - // remove file - std::fs::remove_file(Path::new(&project.path).join("test2.txt"))?; - // add new file - let file_path3 = Path::new("test3.txt"); - std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - // commit - commit( - gb_repository, - project_repository, - &branch1_id, - "branch1 commit", - None, - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - // branch one test.txt has just the 1st and 3rd hunks applied - let commit2 = &branch1.commits[0].id; - let commit2 = project_repository - .git_repository - .find_commit(*commit2) - .expect("failed to get commit object"); - - let tree = commit1.tree().expect("failed to get tree"); - let file_list = tree_to_file_list(&project_repository.git_repository, &tree); - assert_eq!(file_list, vec!["test.txt", "test2.txt"]); - - // get the tree - let tree = commit2.tree().expect("failed to get tree"); - let file_list = tree_to_file_list(&project_repository.git_repository, &tree); - assert_eq!(file_list, vec!["test.txt", "test3.txt"]); - - Ok(()) -} - -#[test] -fn commit_add_and_delete_files() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([ - (PathBuf::from("test.txt"), "file1\n"), - (PathBuf::from("test2.txt"), "file2\n"), - ])); - - let commit1_oid = project_repository - .git_repository - .head() - .unwrap() - .target() - .unwrap(); - let commit1 = project_repository - .git_repository - .find_commit(commit1_oid) - .unwrap(); - - set_test_target(gb_repository, project_repository)?; - - // remove file - std::fs::remove_file(Path::new(&project.path).join("test2.txt"))?; - // add new file - let file_path3 = Path::new("test3.txt"); - std::fs::write(Path::new(&project.path).join(file_path3), "file3\n")?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - // commit - commit( - gb_repository, - project_repository, - &branch1_id, - "branch1 commit", - None, - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - // branch one test.txt has just the 1st and 3rd hunks applied - let commit2 = &branch1.commits[0].id; - let commit2 = project_repository - .git_repository - .find_commit(*commit2) - .expect("failed to get commit object"); - - let tree = commit1.tree().expect("failed to get tree"); - let file_list = tree_to_file_list(&project_repository.git_repository, &tree); - assert_eq!(file_list, vec!["test.txt", "test2.txt"]); - - // get the tree - let tree = commit2.tree().expect("failed to get tree"); - let file_list = tree_to_file_list(&project_repository.git_repository, &tree); - assert_eq!(file_list, vec!["test.txt", "test3.txt"]); - - Ok(()) -} - -#[test] -#[cfg(target_family = "unix")] -fn commit_executable_and_symlinks() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case_with_files(HashMap::from([ - (PathBuf::from("test.txt"), "file1\n"), - (PathBuf::from("test2.txt"), "file2\n"), - ])); - - set_test_target(gb_repository, project_repository)?; - - // add symlinked file - let file_path3 = Path::new("test3.txt"); - let src = Path::new(&project.path).join("test2.txt"); - let dst = Path::new(&project.path).join(file_path3); - symlink(src, dst)?; - - // add executable - let file_path4 = Path::new("test4.bin"); - let exec = Path::new(&project.path).join(file_path4); - std::fs::write(&exec, "exec\n")?; - let permissions = std::fs::metadata(&exec)?.permissions(); - let new_permissions = Permissions::from_mode(permissions.mode() | 0o111); // Add execute permission - std::fs::set_permissions(&exec, new_permissions)?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - // commit - commit( - gb_repository, - project_repository, - &branch1_id, - "branch1 commit", - None, - None, - None, - false, - )?; - - let (branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - let branch1 = &branches.iter().find(|b| b.id == branch1_id).unwrap(); - - let commit = &branch1.commits[0].id; - let commit = project_repository - .git_repository - .find_commit(*commit) - .expect("failed to get commit object"); - - let tree = commit.tree().expect("failed to get tree"); - - let list = tree_to_entry_list(&project_repository.git_repository, &tree); - assert_eq!(list[0].0, "test.txt"); - assert_eq!(list[0].1, "100644"); - assert_eq!(list[1].0, "test2.txt"); - assert_eq!(list[1].1, "100644"); - assert_eq!(list[2].0, "test3.txt"); - assert_eq!(list[2].1, "120000"); - assert_eq!(list[2].2, "test2.txt"); - assert_eq!(list[3].0, "test4.bin"); - assert_eq!(list[3].1, "100755"); - - Ok(()) -} - -fn tree_to_file_list(repository: &git::Repository, tree: &git::Tree) -> Vec { - let mut file_list = Vec::new(); - tree.walk(|_, entry| { - let path = entry.name().unwrap(); - let entry = tree.get_path(Path::new(path)).unwrap(); - let object = entry.to_object(repository).unwrap(); - if object.kind() == Some(git2::ObjectType::Blob) { - file_list.push(path.to_string()); - } - git::TreeWalkResult::Continue - }) - .expect("failed to walk tree"); - file_list -} - -fn tree_to_entry_list( - repository: &git::Repository, - tree: &git::Tree, -) -> Vec<(String, String, String, String)> { - let mut file_list = Vec::new(); - tree.walk(|_root, entry| { - let path = entry.name().unwrap(); - let entry = tree.get_path(Path::new(path)).unwrap(); - let object = entry.to_object(repository).unwrap(); - let blob = object.as_blob().expect("failed to get blob"); - // convert content to string - let octal_mode = format!("{:o}", entry.filemode()); - if let Ok(content) = - std::str::from_utf8(blob.content()).context("failed to convert content to string") - { - file_list.push(( - path.to_string(), - octal_mode, - content.to_string(), - blob.id().to_string(), - )); - } else { - file_list.push(( - path.to_string(), - octal_mode, - "BINARY".to_string(), - blob.id().to_string(), - )); - } - git::TreeWalkResult::Continue - }) - .expect("failed to walk tree"); - file_list -} - -#[test] -fn verify_branch_commits_to_integration() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - project, - gb_repository, - .. - } = &suite.new_case(); - - set_test_target(gb_repository, project_repository)?; - - verify_branch(gb_repository, project_repository).unwrap(); - - // write two commits - let file_path2 = Path::new("test2.txt"); - std::fs::write(Path::new(&project.path).join(file_path2), "file")?; - commit_all(&project_repository.git_repository); - std::fs::write(Path::new(&project.path).join(file_path2), "update")?; - commit_all(&project_repository.git_repository); - - // verify puts commits onto the virtual branch - verify_branch(gb_repository, project_repository).unwrap(); - - // one virtual branch with two commits was created - let (virtual_branches, _, _) = - virtual_branches::list_virtual_branches(gb_repository, project_repository)?; - assert_eq!(virtual_branches.len(), 1); - - let branch = &virtual_branches.first().unwrap(); - assert_eq!(branch.commits.len(), 2); - assert_eq!(branch.commits.len(), 2); - - Ok(()) -} - -#[test] -fn verify_branch_not_integration() -> Result<()> { - let suite = Suite::default(); - let Case { - project_repository, - gb_repository, - .. - } = &suite.new_case(); - - set_test_target(gb_repository, project_repository)?; - - verify_branch(gb_repository, project_repository).unwrap(); - - project_repository - .git_repository - .set_head(&"refs/heads/master".parse().unwrap())?; - - let verify_result = verify_branch(gb_repository, project_repository); - assert!(verify_result.is_err()); - assert_eq!( - verify_result.unwrap_err().to_string(), - "head is refs/heads/master" - ); - - Ok(()) -} - -#[test] -fn pre_commit_hook_rejection() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - gb_repository, - project_repository, - .. - } = &suite.new_case_with_files(HashMap::from([ - (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), - (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), - ])); - - set_test_target(gb_repository, project_repository)?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line0\nline1\nline2\nline3\nline4\n", - )?; - - let hook = b"#!/bin/sh - echo 'rejected' - exit 1 - "; - - git2_hooks::create_hook( - (&project_repository.git_repository).into(), - git2_hooks::HOOK_PRE_COMMIT, - hook, - ); - - let res = commit( - gb_repository, - project_repository, - &branch1_id, - "test commit", - None, - Some(suite.keys.get_or_create()?).as_ref(), - None, - true, - ); - - let error = res.unwrap_err(); - - assert!(matches!(error, CommitError::CommitHookRejected(_))); - - let CommitError::CommitHookRejected(output) = error else { - unreachable!() - }; - - assert_eq!(&output, "rejected\n"); - - Ok(()) -} - -#[test] -fn post_commit_hook() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - gb_repository, - project_repository, - .. - } = &suite.new_case_with_files(HashMap::from([ - (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), - (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), - ])); - - set_test_target(gb_repository, project_repository)?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line0\nline1\nline2\nline3\nline4\n", - )?; - - let hook = b"#!/bin/sh - touch hook_ran - "; - - git2_hooks::create_hook( - (&project_repository.git_repository).into(), - git2_hooks::HOOK_POST_COMMIT, - hook, - ); - - let hook_ran_proof = project_repository - .git_repository - .path() - .parent() - .unwrap() - .join("hook_ran"); - - assert!(!hook_ran_proof.exists()); - - commit( - gb_repository, - project_repository, - &branch1_id, - "test commit", - None, - Some(suite.keys.get_or_create()?).as_ref(), - None, - true, - )?; - - assert!(hook_ran_proof.exists()); - - Ok(()) -} - -#[test] -fn commit_msg_hook_rejection() -> Result<()> { - let suite = Suite::default(); - let Case { - project, - gb_repository, - project_repository, - .. - } = &suite.new_case_with_files(HashMap::from([ - (PathBuf::from("test.txt"), "line1\nline2\nline3\nline4\n"), - (PathBuf::from("test2.txt"), "line5\nline6\nline7\nline8\n"), - ])); - - set_test_target(gb_repository, project_repository)?; - - let branch1_id = create_virtual_branch( - gb_repository, - project_repository, - &BranchCreateRequest::default(), - ) - .expect("failed to create virtual branch") - .id; - - std::fs::write( - Path::new(&project.path).join("test.txt"), - "line0\nline1\nline2\nline3\nline4\n", - )?; - - let hook = b"#!/bin/sh - echo 'rejected' - exit 1 - "; - - git2_hooks::create_hook( - (&project_repository.git_repository).into(), - git2_hooks::HOOK_COMMIT_MSG, - hook, - ); - - let res = commit( - gb_repository, - project_repository, - &branch1_id, - "test commit", - None, - Some(suite.keys.get_or_create()?).as_ref(), - None, - true, - ); - - let error = res.unwrap_err(); - - assert!(matches!(error, CommitError::CommitMsgHookRejected(_))); - - let CommitError::CommitMsgHookRejected(output) = error else { - unreachable!() - }; - - assert_eq!(&output, "rejected\n"); - - Ok(()) -} diff --git a/gitbutler-app/tests/virtual_branches/target/mod.rs b/gitbutler-app/tests/virtual_branches/target/mod.rs deleted file mode 100644 index 582c1894a..000000000 --- a/gitbutler-app/tests/virtual_branches/target/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -mod reader; -mod writer; diff --git a/gitbutler-app/tests/virtual_branches/target/reader.rs b/gitbutler-app/tests/virtual_branches/target/reader.rs deleted file mode 100644 index 874e0c8e0..000000000 --- a/gitbutler-app/tests/virtual_branches/target/reader.rs +++ /dev/null @@ -1,150 +0,0 @@ -use gitbutler_app::virtual_branches::target::Target; -use gitbutler_app::virtual_branches::{target, BranchId}; -use std::sync::atomic::{AtomicUsize, Ordering}; - -use anyhow::Result; -use once_cell::sync::Lazy; - -use crate::{Case, Suite}; - -static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - -fn test_branch() -> gitbutler_app::virtual_branches::branch::Branch { - TEST_INDEX.fetch_add(1, Ordering::Relaxed); - - gitbutler_app::virtual_branches::branch::Branch { - id: BranchId::generate(), - name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), - notes: String::new(), - applied: true, - upstream: Some( - format!( - "refs/remotes/origin/upstream_{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - upstream_head: None, - created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, - updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, - head: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - tree: format!( - "0123456789abcdef0123456789abcdef012345{}", - (TEST_INDEX.load(Ordering::Relaxed) + 10) - ) - .parse() - .unwrap(), - ownership: gitbutler_app::virtual_branches::branch::BranchOwnershipClaims { - claims: vec![gitbutler_app::virtual_branches::branch::OwnershipClaim { - file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(), - hunks: vec![], - }], - }, - order: TEST_INDEX.load(Ordering::Relaxed), - selected_for_changes: None, - } -} - -#[test] -fn read_not_found() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let session = gb_repository.get_or_create_current_session()?; - let session_reader = gitbutler_app::sessions::Reader::open(gb_repository, &session)?; - - let reader = target::Reader::new(&session_reader); - let result = reader.read(&BranchId::generate()); - assert!(result.is_err()); - assert_eq!(result.unwrap_err().to_string(), "file not found"); - - Ok(()) -} - -#[test] -fn read_deprecated_format() -> Result<()> { - let suite = Suite::default(); - let Case { gb_repository, .. } = &suite.new_case(); - - let writer = gitbutler_app::writer::DirWriter::open(gb_repository.root())?; - writer - .write_string("branches/target/name", "origin/master") - .unwrap(); - writer - .write_string( - "branches/target/remote", - "git@github.com:gitbutlerapp/gitbutler.git", - ) - .unwrap(); - writer - .write_string( - "branches/target/sha", - "dd945831869e9593448aa622fa4342bbfb84813d", - ) - .unwrap(); - - let session = gb_repository.get_or_create_current_session()?; - let session_reader = gitbutler_app::sessions::Reader::open(gb_repository, &session)?; - let reader = target::Reader::new(&session_reader); - - let read = reader.read_default().unwrap(); - assert_eq!(read.branch.branch(), "master"); - assert_eq!(read.branch.remote(), "origin"); - assert_eq!(read.remote_url, "git@github.com:gitbutlerapp/gitbutler.git"); - assert_eq!( - read.sha.to_string(), - "dd945831869e9593448aa622fa4342bbfb84813d" - ); - - Ok(()) -} - -#[test] -fn read_override_target() -> Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = &suite.new_case(); - - let mut branch = test_branch(); - - let target = Target { - branch: "refs/remotes/remote/branch".parse().unwrap(), - remote_url: "remote url".to_string(), - sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(), - }; - - let default_target = Target { - branch: "refs/remotes/default remote/default branch" - .parse() - .unwrap(), - remote_url: "default remote url".to_string(), - sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), - }; - - let branch_writer = - gitbutler_app::virtual_branches::branch::Writer::new(gb_repository, project.gb_dir())?; - branch_writer.write(&mut branch)?; - - let session = gb_repository.get_current_session()?.unwrap(); - let session_reader = gitbutler_app::sessions::Reader::open(gb_repository, &session)?; - - let target_writer = target::Writer::new(gb_repository, project.gb_dir())?; - let reader = target::Reader::new(&session_reader); - - target_writer.write_default(&default_target)?; - assert_eq!(default_target, reader.read(&branch.id)?); - - target_writer.write(&branch.id, &target)?; - assert_eq!(target, reader.read(&branch.id)?); - - Ok(()) -} diff --git a/gitbutler-app/tests/virtual_branches/target/writer.rs b/gitbutler-app/tests/virtual_branches/target/writer.rs deleted file mode 100644 index 9ccc2bb3d..000000000 --- a/gitbutler-app/tests/virtual_branches/target/writer.rs +++ /dev/null @@ -1,212 +0,0 @@ -use anyhow::Context; -use std::{ - fs, - sync::atomic::{AtomicUsize, Ordering}, -}; - -use once_cell::sync::Lazy; - -use crate::{Case, Suite}; -use gitbutler_app::virtual_branches::target::Target; -use gitbutler_app::virtual_branches::{branch, target, BranchId}; - -static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); - -fn test_branch() -> branch::Branch { - TEST_INDEX.fetch_add(1, Ordering::Relaxed); - - branch::Branch { - id: BranchId::generate(), - name: format!("branch_name_{}", TEST_INDEX.load(Ordering::Relaxed)), - notes: format!("branch_notes_{}", TEST_INDEX.load(Ordering::Relaxed)), - applied: true, - created_timestamp_ms: TEST_INDEX.load(Ordering::Relaxed) as u128, - upstream: Some( - format!( - "refs/remotes/origin/upstream_{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - ), - upstream_head: None, - updated_timestamp_ms: (TEST_INDEX.load(Ordering::Relaxed) + 100) as u128, - head: format!( - "0123456789abcdef0123456789abcdef0123456{}", - TEST_INDEX.load(Ordering::Relaxed) - ) - .parse() - .unwrap(), - tree: format!( - "0123456789abcdef0123456789abcdef012345{}", - TEST_INDEX.load(Ordering::Relaxed) + 10 - ) - .parse() - .unwrap(), - ownership: branch::BranchOwnershipClaims { - claims: vec![branch::OwnershipClaim { - file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(), - hunks: vec![], - }], - }, - order: TEST_INDEX.load(Ordering::Relaxed), - selected_for_changes: None, - } -} - -#[test] -fn write() -> anyhow::Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = &suite.new_case(); - - let mut branch = test_branch(); - let target = Target { - branch: "refs/remotes/remote name/branch name".parse().unwrap(), - remote_url: "remote url".to_string(), - sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), - }; - - let branch_writer = branch::Writer::new(gb_repository, project.gb_dir())?; - branch_writer.write(&mut branch)?; - - let target_writer = target::Writer::new(gb_repository, project.gb_dir())?; - target_writer.write(&branch.id, &target)?; - - let root = gb_repository - .root() - .join("branches") - .join(branch.id.to_string()); - - assert_eq!( - fs::read_to_string(root.join("meta").join("name").to_str().unwrap()) - .context("Failed to read branch name")?, - branch.name - ); - assert_eq!( - fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap()) - .context("Failed to read branch target name")?, - format!("{}/{}", target.branch.remote(), target.branch.branch()) - ); - assert_eq!( - fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap()) - .context("Failed to read branch target name name")?, - target.branch.remote() - ); - assert_eq!( - fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap()) - .context("Failed to read branch target remote url")?, - target.remote_url - ); - assert_eq!( - fs::read_to_string(root.join("target").join("sha").to_str().unwrap()) - .context("Failed to read branch target sha")?, - target.sha.to_string() - ); - - assert_eq!( - fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())? - .parse::() - .context("Failed to read branch applied")?, - branch.applied - ); - assert_eq!( - fs::read_to_string(root.join("meta").join("upstream").to_str().unwrap()) - .context("Failed to read branch upstream")?, - branch.upstream.unwrap().to_string() - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("created_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch created timestamp")? - .parse::() - .context("Failed to parse branch created timestamp")?, - branch.created_timestamp_ms - ); - assert_eq!( - fs::read_to_string( - root.join("meta") - .join("updated_timestamp_ms") - .to_str() - .unwrap() - ) - .context("Failed to read branch updated timestamp")? - .parse::() - .context("Failed to parse branch updated timestamp")?, - branch.updated_timestamp_ms - ); - - Ok(()) -} - -#[test] -fn should_update() -> anyhow::Result<()> { - let suite = Suite::default(); - let Case { - gb_repository, - project, - .. - } = &suite.new_case(); - - let mut branch = test_branch(); - let target = Target { - branch: "refs/remotes/remote name/branch name".parse().unwrap(), - remote_url: "remote url".to_string(), - sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(), - }; - - let branch_writer = branch::Writer::new(gb_repository, project.gb_dir())?; - branch_writer.write(&mut branch)?; - let target_writer = target::Writer::new(gb_repository, project.gb_dir())?; - target_writer.write(&branch.id, &target)?; - - let updated_target = Target { - branch: "refs/remotes/updated remote name/updated branch name" - .parse() - .unwrap(), - remote_url: "updated remote url".to_string(), - sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(), - }; - - target_writer.write(&branch.id, &updated_target)?; - - let root = gb_repository - .root() - .join("branches") - .join(branch.id.to_string()); - - assert_eq!( - fs::read_to_string(root.join("target").join("branch_name").to_str().unwrap()) - .context("Failed to read branch target branch name")?, - format!( - "{}/{}", - updated_target.branch.remote(), - updated_target.branch.branch() - ) - ); - - assert_eq!( - fs::read_to_string(root.join("target").join("remote_name").to_str().unwrap()) - .context("Failed to read branch target remote name")?, - updated_target.branch.remote() - ); - assert_eq!( - fs::read_to_string(root.join("target").join("remote_url").to_str().unwrap()) - .context("Failed to read branch target remote url")?, - updated_target.remote_url - ); - assert_eq!( - fs::read_to_string(root.join("target").join("sha").to_str().unwrap()) - .context("Failed to read branch target sha")?, - updated_target.sha.to_string() - ); - - Ok(()) -} diff --git a/gitbutler-app/tests/watcher/handler/calculate_delta_handler.rs b/gitbutler-app/tests/watcher/handler/calculate_delta_handler.rs index 443026fb3..af3cf34e9 100644 --- a/gitbutler-app/tests/watcher/handler/calculate_delta_handler.rs +++ b/gitbutler-app/tests/watcher/handler/calculate_delta_handler.rs @@ -7,13 +7,13 @@ use std::{ use once_cell::sync::Lazy; -use crate::{commit_all, Case, Suite}; -use gitbutler_app::watcher::handlers::calculate_deltas_handler::Handler; -use gitbutler_app::{ +use crate::shared::{commit_all, Case, Suite}; +use gitbutler::{ deltas::{self, operations::Operation}, reader, sessions, virtual_branches::{self, branch}, }; +use gitbutler_app::watcher::handlers::calculate_deltas_handler::Handler; use self::branch::BranchId; @@ -663,7 +663,7 @@ fn should_persist_branches_targets_state_between_sessions() -> Result<()> { let branches = virtual_branches::Iterator::new(&session_reader) .unwrap() - .collect::, gitbutler_app::reader::Error>>() + .collect::, gitbutler::reader::Error>>() .unwrap() .into_iter() .collect::>(); @@ -719,7 +719,7 @@ fn should_restore_branches_targets_state_from_head_session() -> Result<()> { let branches = virtual_branches::Iterator::new(&session_reader) .unwrap() - .collect::, gitbutler_app::reader::Error>>() + .collect::, gitbutler::reader::Error>>() .unwrap() .into_iter() .collect::>(); diff --git a/gitbutler-app/tests/watcher/handler/fetch_gitbutler_data.rs b/gitbutler-app/tests/watcher/handler/fetch_gitbutler_data.rs index 93dc2a02c..d147f4b13 100644 --- a/gitbutler-app/tests/watcher/handler/fetch_gitbutler_data.rs +++ b/gitbutler-app/tests/watcher/handler/fetch_gitbutler_data.rs @@ -1,10 +1,10 @@ use std::time::SystemTime; -use gitbutler_app::projects; +use gitbutler::projects; use pretty_assertions::assert_eq; +use crate::shared::{Case, Suite}; use crate::watcher::handler::test_remote_repository; -use crate::{Case, Suite}; use gitbutler_app::watcher::handlers::fetch_gitbutler_data::Handler; #[tokio::test] diff --git a/gitbutler-app/tests/watcher/handler/git_file_change.rs b/gitbutler-app/tests/watcher/handler/git_file_change.rs index b090d71bd..3c9d0ce3b 100644 --- a/gitbutler-app/tests/watcher/handler/git_file_change.rs +++ b/gitbutler-app/tests/watcher/handler/git_file_change.rs @@ -1,10 +1,10 @@ use anyhow::Result; use std::fs; -use gitbutler_app::projects; +use gitbutler::projects; use pretty_assertions::assert_eq; -use crate::{Case, Suite}; +use crate::shared::{Case, Suite}; use gitbutler_app::watcher::handlers::git_file_change::Handler; use gitbutler_app::watcher::{handlers, Event}; diff --git a/gitbutler-app/tests/watcher/handler/mod.rs b/gitbutler-app/tests/watcher/handler/mod.rs index 658ec3efd..0ef1b56df 100644 --- a/gitbutler-app/tests/watcher/handler/mod.rs +++ b/gitbutler-app/tests/watcher/handler/mod.rs @@ -1,4 +1,4 @@ -use crate::init_opts_bare; +use crate::shared::init_opts_bare; use tempfile::TempDir; fn test_remote_repository() -> anyhow::Result<(git2::Repository, TempDir)> { diff --git a/gitbutler-app/tests/watcher/handler/push_project_to_gitbutler.rs b/gitbutler-app/tests/watcher/handler/push_project_to_gitbutler.rs index e1b943de1..33280fa9b 100644 --- a/gitbutler-app/tests/watcher/handler/push_project_to_gitbutler.rs +++ b/gitbutler-app/tests/watcher/handler/push_project_to_gitbutler.rs @@ -1,12 +1,12 @@ use anyhow::Result; -use gitbutler_app::{git, projects}; +use gitbutler::{git, projects}; use std::collections::HashMap; use std::path::PathBuf; -use crate::virtual_branches::set_test_target; +use crate::shared::virtual_branches::set_test_target; +use crate::shared::{Case, Suite}; use crate::watcher::handler::test_remote_repository; -use crate::{Case, Suite}; -use gitbutler_app::project_repository::LogUntil; +use gitbutler::project_repository::LogUntil; use gitbutler_app::watcher::handlers::push_project_to_gitbutler::Handler; fn log_walk(repo: &git2::Repository, head: git::Oid) -> Vec { diff --git a/gitbutler-app/tests/zip/mod.rs b/gitbutler-app/tests/zip/mod.rs deleted file mode 100644 index f35ac2f84..000000000 --- a/gitbutler-app/tests/zip/mod.rs +++ /dev/null @@ -1,47 +0,0 @@ -use gitbutler_app::zip::Zipper; -use walkdir::WalkDir; - -use std::fs::File; -use std::io::Write; -use tempfile::tempdir; - -#[test] -fn zip_dir() { - let tmp_dir = tempdir().unwrap(); - let tmp_dir_path = tmp_dir.path(); - let file_path = tmp_dir_path.join("test.txt"); - let mut file = File::create(file_path).unwrap(); - file.write_all(b"test").unwrap(); - - let zipper_cache = tempdir().unwrap(); - let zipper = Zipper::new(zipper_cache.path()); - let zip_file_path = zipper.zip(tmp_dir).unwrap(); - assert!(zip_file_path.exists()); -} - -#[test] -fn zip_file() { - let tmp_dir = tempdir().unwrap(); - let tmp_dir_path = tmp_dir.path(); - let file_path = tmp_dir_path.join("test.txt"); - let mut file = File::create(&file_path).unwrap(); - file.write_all(b"test").unwrap(); - - let zipper_cache = tempdir().unwrap(); - let zipper = Zipper::new(zipper_cache.path()); - zipper.zip(file_path).unwrap_err(); -} - -#[test] -fn zip_once() { - let tmp_dir = tempdir().unwrap(); - let tmp_dir_path = tmp_dir.path(); - let file_path = tmp_dir_path.join("test.txt"); - let mut file = File::create(file_path).unwrap(); - file.write_all(b"test").unwrap(); - - let zipper_cache = tempdir().unwrap(); - let zipper = Zipper::new(zipper_cache.path()); - assert_eq!(zipper.zip(&tmp_dir).unwrap(), zipper.zip(&tmp_dir).unwrap()); - assert_eq!(WalkDir::new(tmp_dir).into_iter().count(), 1); -} diff --git a/src/deltas.rs b/src/deltas.rs index 63d7c3db2..8bb23303d 100644 --- a/src/deltas.rs +++ b/src/deltas.rs @@ -1,4 +1,4 @@ -mod controller; +pub mod controller; mod delta; mod document; mod reader; diff --git a/src/keys.rs b/src/keys.rs index 69b03a6b2..852f758bc 100644 --- a/src/keys.rs +++ b/src/keys.rs @@ -1,4 +1,4 @@ -mod controller; +pub mod controller; mod key; pub mod storage; diff --git a/src/sessions.rs b/src/sessions.rs index 1ac34de9c..458810ee5 100644 --- a/src/sessions.rs +++ b/src/sessions.rs @@ -1,4 +1,4 @@ -mod controller; +pub mod controller; mod iterator; mod reader; pub mod session; diff --git a/src/zip.rs b/src/zip.rs index 577e9fc02..f16d4be2e 100644 --- a/src/zip.rs +++ b/src/zip.rs @@ -1,4 +1,4 @@ -mod controller; +pub mod controller; pub use controller::Controller; use std::{ diff --git a/tests/app.rs b/tests/app.rs index 8781ad1e5..6e4079a5d 100644 --- a/tests/app.rs +++ b/tests/app.rs @@ -1,6 +1,5 @@ -const VAR_NO_CLEANUP: &str = "GITBUTLER_TESTS_NO_CLEANUP"; +pub mod shared; -pub(crate) mod common; mod suite { mod gb_repository; mod projects; @@ -18,242 +17,3 @@ mod sessions; mod types; pub mod virtual_branches; mod zip; - -use std::path::{Path, PathBuf}; -use std::{collections::HashMap, fs}; - -use tempfile::{tempdir, TempDir}; - -pub struct Suite { - pub local_app_data: Option, - pub storage: gitbutler::storage::Storage, - pub users: gitbutler::users::Controller, - pub projects: gitbutler::projects::Controller, - pub keys: gitbutler::keys::Controller, -} - -impl Drop for Suite { - fn drop(&mut self) { - if std::env::var_os(VAR_NO_CLEANUP).is_some() { - let _ = self.local_app_data.take().unwrap().into_path(); - } - } -} - -impl Default for Suite { - fn default() -> Self { - let local_app_data = temp_dir(); - let storage = gitbutler::storage::Storage::new(&local_app_data); - let users = gitbutler::users::Controller::from_path(&local_app_data); - let projects = gitbutler::projects::Controller::from_path(&local_app_data); - let keys = gitbutler::keys::Controller::from_path(&local_app_data); - Self { - storage, - local_app_data: Some(local_app_data), - users, - projects, - keys, - } - } -} - -impl Suite { - pub fn local_app_data(&self) -> &Path { - self.local_app_data.as_ref().unwrap().path() - } - pub fn sign_in(&self) -> gitbutler::users::User { - let user = gitbutler::users::User { - name: Some("test".to_string()), - email: "test@email.com".to_string(), - access_token: "token".to_string(), - ..Default::default() - }; - self.users.set_user(&user).expect("failed to add user"); - user - } - - fn project(&self, fs: HashMap) -> (gitbutler::projects::Project, TempDir) { - let (repository, tmp) = test_repository(); - for (path, contents) in fs { - if let Some(parent) = path.parent() { - fs::create_dir_all(repository.path().parent().unwrap().join(parent)) - .expect("failed to create dir"); - } - fs::write( - repository.path().parent().unwrap().join(&path), - contents.as_bytes(), - ) - .expect("failed to write file"); - } - commit_all(&repository); - - ( - self.projects - .add(repository.path().parent().unwrap()) - .expect("failed to add project"), - tmp, - ) - } - - pub fn new_case_with_files(&self, fs: HashMap) -> Case { - let (project, project_tmp) = self.project(fs); - Case::new(self, project, project_tmp) - } - - pub fn new_case(&self) -> Case { - self.new_case_with_files(HashMap::new()) - } -} - -pub struct Case<'a> { - suite: &'a Suite, - pub project: gitbutler::projects::Project, - pub project_repository: gitbutler::project_repository::Repository, - pub gb_repository: gitbutler::gb_repository::Repository, - pub credentials: gitbutler::git::credentials::Helper, - /// The directory containing the `project_repository` - project_tmp: Option, -} - -impl Drop for Case<'_> { - fn drop(&mut self) { - if let Some(tmp) = self - .project_tmp - .take() - .filter(|_| std::env::var_os(VAR_NO_CLEANUP).is_some()) - { - let _ = tmp.into_path(); - } - } -} - -impl<'a> Case<'a> { - fn new( - suite: &'a Suite, - project: gitbutler::projects::Project, - project_tmp: TempDir, - ) -> Case<'a> { - let project_repository = gitbutler::project_repository::Repository::open(&project) - .expect("failed to create project repository"); - let gb_repository = gitbutler::gb_repository::Repository::open( - suite.local_app_data(), - &project_repository, - None, - ) - .expect("failed to open gb repository"); - let credentials = gitbutler::git::credentials::Helper::from_path(suite.local_app_data()); - Case { - suite, - project, - gb_repository, - project_repository, - project_tmp: Some(project_tmp), - credentials, - } - } - - pub fn refresh(mut self) -> Self { - let project = self - .suite - .projects - .get(&self.project.id) - .expect("failed to get project"); - let project_repository = gitbutler::project_repository::Repository::open(&project) - .expect("failed to create project repository"); - let user = self.suite.users.get_user().expect("failed to get user"); - let credentials = - gitbutler::git::credentials::Helper::from_path(self.suite.local_app_data()); - Self { - suite: self.suite, - gb_repository: gitbutler::gb_repository::Repository::open( - self.suite.local_app_data(), - &project_repository, - user.as_ref(), - ) - .expect("failed to open gb repository"), - credentials, - project_repository, - project, - project_tmp: self.project_tmp.take(), - } - } -} - -pub fn test_database() -> (gitbutler::database::Database, TempDir) { - let tmp = temp_dir(); - let db = gitbutler::database::Database::open_in_directory(&tmp).unwrap(); - (db, tmp) -} - -pub fn temp_dir() -> TempDir { - tempdir().unwrap() -} - -pub fn empty_bare_repository() -> (gitbutler::git::Repository, TempDir) { - let tmp = temp_dir(); - ( - gitbutler::git::Repository::init_opts(&tmp, &init_opts_bare()) - .expect("failed to init repository"), - tmp, - ) -} - -pub fn test_repository() -> (gitbutler::git::Repository, TempDir) { - let tmp = temp_dir(); - let repository = gitbutler::git::Repository::init_opts(&tmp, &init_opts()) - .expect("failed to init repository"); - let mut index = repository.index().expect("failed to get index"); - let oid = index.write_tree().expect("failed to write tree"); - let signature = gitbutler::git::Signature::now("test", "test@email.com").unwrap(); - repository - .commit( - Some(&"refs/heads/master".parse().unwrap()), - &signature, - &signature, - "Initial commit", - &repository.find_tree(oid).expect("failed to find tree"), - &[], - ) - .expect("failed to commit"); - (repository, tmp) -} - -pub fn commit_all(repository: &gitbutler::git::Repository) -> gitbutler::git::Oid { - let mut index = repository.index().expect("failed to get index"); - index - .add_all(["."], git2::IndexAddOption::DEFAULT, None) - .expect("failed to add all"); - index.write().expect("failed to write index"); - let oid = index.write_tree().expect("failed to write tree"); - let signature = gitbutler::git::Signature::now("test", "test@email.com").unwrap(); - let head = repository.head().expect("failed to get head"); - let commit_oid = repository - .commit( - Some(&head.name().unwrap()), - &signature, - &signature, - "some commit", - &repository.find_tree(oid).expect("failed to find tree"), - &[&repository - .find_commit( - repository - .refname_to_id("HEAD") - .expect("failed to get head"), - ) - .expect("failed to find commit")], - ) - .expect("failed to commit"); - commit_oid -} - -fn init_opts() -> git2::RepositoryInitOptions { - let mut opts = git2::RepositoryInitOptions::new(); - opts.initial_head("master"); - opts -} - -pub fn init_opts_bare() -> git2::RepositoryInitOptions { - let mut opts = init_opts(); - opts.bare(true); - opts -} diff --git a/tests/database/mod.rs b/tests/database/mod.rs index 97a95c517..37223235f 100644 --- a/tests/database/mod.rs +++ b/tests/database/mod.rs @@ -1,4 +1,4 @@ -use crate::temp_dir; +use crate::shared::temp_dir; use gitbutler::database::Database; #[test] diff --git a/tests/deltas/mod.rs b/tests/deltas/mod.rs index 8cc2dfb69..df87ed792 100644 --- a/tests/deltas/mod.rs +++ b/tests/deltas/mod.rs @@ -1,5 +1,5 @@ mod database { - use crate::test_database; + use crate::shared::test_database; use gitbutler::deltas::{operations, Database, Delta}; use gitbutler::projects::ProjectId; use gitbutler::sessions::SessionId; @@ -111,7 +111,7 @@ mod writer { use gitbutler::{deltas, sessions}; use std::vec; - use crate::{Case, Suite}; + use crate::shared::{Case, Suite}; #[test] fn write_no_vbranches() -> anyhow::Result<()> { diff --git a/tests/gb_repository/mod.rs b/tests/gb_repository/mod.rs index efe55788c..9505fb05b 100644 --- a/tests/gb_repository/mod.rs +++ b/tests/gb_repository/mod.rs @@ -4,8 +4,8 @@ use anyhow::Result; use pretty_assertions::assert_eq; use tempfile::TempDir; -use crate::init_opts_bare; -use crate::{Case, Suite}; +use crate::shared::init_opts_bare; +use crate::shared::{Case, Suite}; use gitbutler::{ deltas::{self, operations::Operation}, @@ -17,7 +17,7 @@ use gitbutler::{ mod repository { use std::path::PathBuf; - use crate::{Case, Suite}; + use crate::shared::{Case, Suite}; use anyhow::Result; use pretty_assertions::assert_eq; diff --git a/tests/git/config.rs b/tests/git/config.rs index 730401d70..6f94b90a6 100644 --- a/tests/git/config.rs +++ b/tests/git/config.rs @@ -1,4 +1,4 @@ -use crate::test_repository; +use crate::shared::test_repository; #[test] pub fn set_str() { diff --git a/tests/git/credentials.rs b/tests/git/credentials.rs index 92e5c9e27..00fa1d86c 100644 --- a/tests/git/credentials.rs +++ b/tests/git/credentials.rs @@ -2,7 +2,7 @@ use gitbutler::git::credentials::{Credential, Helper, HttpsCredential, SshCreden use gitbutler::{keys, project_repository, projects, users}; use std::path::PathBuf; -use crate::{temp_dir, test_repository}; +use crate::shared::{temp_dir, test_repository}; #[derive(Default)] struct TestCase<'a> { diff --git a/tests/git/diff.rs b/tests/git/diff.rs index 557e9dfd2..5494c2328 100644 --- a/tests/git/diff.rs +++ b/tests/git/diff.rs @@ -4,8 +4,8 @@ use anyhow::Result; use pretty_assertions::assert_eq; use tempfile::TempDir; -use crate::init_opts_bare; -use crate::{Case, Suite}; +use crate::shared::init_opts_bare; +use crate::shared::{Case, Suite}; use gitbutler::{ deltas::{self, operations::Operation}, projects::{self, ApiProject, ProjectId}, diff --git a/tests/keys/mod.rs b/tests/keys/mod.rs index 4068cee55..5856691b7 100644 --- a/tests/keys/mod.rs +++ b/tests/keys/mod.rs @@ -9,7 +9,7 @@ mod controller { #[cfg(target_family = "unix")] use std::os::unix::prelude::*; - use crate::Suite; + use crate::shared::Suite; #[test] fn get_or_create() { diff --git a/tests/lock/mod.rs b/tests/lock/mod.rs index 071f42992..40a4442b1 100644 --- a/tests/lock/mod.rs +++ b/tests/lock/mod.rs @@ -1,6 +1,6 @@ use gitbutler::lock::Dir; -use crate::temp_dir; +use crate::shared::temp_dir; #[tokio::test] async fn lock_same_instance() { diff --git a/tests/reader/mod.rs b/tests/reader/mod.rs index 418d5fbf8..549c68903 100644 --- a/tests/reader/mod.rs +++ b/tests/reader/mod.rs @@ -2,7 +2,7 @@ use gitbutler::reader::{CommitReader, Content, Reader}; use std::fs; use std::path::Path; -use crate::{commit_all, temp_dir, test_repository}; +use crate::shared::{commit_all, temp_dir, test_repository}; use anyhow::Result; #[test] diff --git a/tests/sessions/database.rs b/tests/sessions/database.rs index 70cb8310e..185e02f29 100644 --- a/tests/sessions/database.rs +++ b/tests/sessions/database.rs @@ -1,4 +1,4 @@ -use crate::test_database; +use crate::shared::test_database; use gitbutler::projects::ProjectId; use gitbutler::sessions::{session, Database, Session, SessionId}; diff --git a/tests/sessions/mod.rs b/tests/sessions/mod.rs index 6671f7a02..bc9d79efb 100644 --- a/tests/sessions/mod.rs +++ b/tests/sessions/mod.rs @@ -2,7 +2,7 @@ mod database; use anyhow::Result; -use crate::{Case, Suite}; +use crate::shared::{Case, Suite}; use gitbutler::sessions::{self, session::SessionId}; #[test] diff --git a/tests/shared/mod.rs b/tests/shared/mod.rs new file mode 100644 index 000000000..5d1f78815 --- /dev/null +++ b/tests/shared/mod.rs @@ -0,0 +1,61 @@ +pub const VAR_NO_CLEANUP: &str = "GITBUTLER_TESTS_NO_CLEANUP"; + +mod test_project; +pub use test_project::TestProject; + +mod suite; +pub use suite::*; + +pub mod paths { + use super::temp_dir; + use tempfile::TempDir; + + pub fn data_dir() -> TempDir { + temp_dir() + } +} + +pub mod virtual_branches { + use crate::shared::empty_bare_repository; + use gitbutler::{gb_repository, project_repository, virtual_branches}; + + pub fn set_test_target( + gb_repo: &gb_repository::Repository, + project_repository: &project_repository::Repository, + ) -> anyhow::Result<()> { + let (remote_repo, _tmp) = empty_bare_repository(); + let mut remote = project_repository + .git_repository + .remote( + "origin", + &remote_repo.path().to_str().unwrap().parse().unwrap(), + ) + .expect("failed to add remote"); + remote.push(&["refs/heads/master:refs/heads/master"], None)?; + + virtual_branches::target::Writer::new(gb_repo, project_repository.project().gb_dir())? + .write_default(&virtual_branches::target::Target { + branch: "refs/remotes/origin/master".parse().unwrap(), + remote_url: remote_repo.path().to_str().unwrap().parse().unwrap(), + sha: remote_repo.head().unwrap().target().unwrap(), + }) + .expect("failed to write target"); + + virtual_branches::integration::update_gitbutler_integration(gb_repo, project_repository) + .expect("failed to update integration"); + + Ok(()) + } +} + +pub fn init_opts() -> git2::RepositoryInitOptions { + let mut opts = git2::RepositoryInitOptions::new(); + opts.initial_head("master"); + opts +} + +pub fn init_opts_bare() -> git2::RepositoryInitOptions { + let mut opts = init_opts(); + opts.bare(true); + opts +} diff --git a/tests/shared/suite.rs b/tests/shared/suite.rs new file mode 100644 index 000000000..eee4e9b60 --- /dev/null +++ b/tests/shared/suite.rs @@ -0,0 +1,227 @@ +use std::path::{Path, PathBuf}; +use std::{collections::HashMap, fs}; + +use crate::shared::{init_opts, init_opts_bare, VAR_NO_CLEANUP}; +use tempfile::{tempdir, TempDir}; + +pub struct Suite { + pub local_app_data: Option, + pub storage: gitbutler::storage::Storage, + pub users: gitbutler::users::Controller, + pub projects: gitbutler::projects::Controller, + pub keys: gitbutler::keys::Controller, +} + +impl Drop for Suite { + fn drop(&mut self) { + if std::env::var_os(VAR_NO_CLEANUP).is_some() { + let _ = self.local_app_data.take().unwrap().into_path(); + } + } +} + +impl Default for Suite { + fn default() -> Self { + let local_app_data = temp_dir(); + let storage = gitbutler::storage::Storage::new(&local_app_data); + let users = gitbutler::users::Controller::from_path(&local_app_data); + let projects = gitbutler::projects::Controller::from_path(&local_app_data); + let keys = gitbutler::keys::Controller::from_path(&local_app_data); + Self { + storage, + local_app_data: Some(local_app_data), + users, + projects, + keys, + } + } +} + +impl Suite { + pub fn local_app_data(&self) -> &Path { + self.local_app_data.as_ref().unwrap().path() + } + pub fn sign_in(&self) -> gitbutler::users::User { + let user = gitbutler::users::User { + name: Some("test".to_string()), + email: "test@email.com".to_string(), + access_token: "token".to_string(), + ..Default::default() + }; + self.users.set_user(&user).expect("failed to add user"); + user + } + + fn project(&self, fs: HashMap) -> (gitbutler::projects::Project, TempDir) { + let (repository, tmp) = test_repository(); + for (path, contents) in fs { + if let Some(parent) = path.parent() { + fs::create_dir_all(repository.path().parent().unwrap().join(parent)) + .expect("failed to create dir"); + } + fs::write( + repository.path().parent().unwrap().join(&path), + contents.as_bytes(), + ) + .expect("failed to write file"); + } + commit_all(&repository); + + ( + self.projects + .add(repository.path().parent().unwrap()) + .expect("failed to add project"), + tmp, + ) + } + + pub fn new_case_with_files(&self, fs: HashMap) -> Case { + let (project, project_tmp) = self.project(fs); + Case::new(self, project, project_tmp) + } + + pub fn new_case(&self) -> Case { + self.new_case_with_files(HashMap::new()) + } +} + +pub struct Case<'a> { + suite: &'a Suite, + pub project: gitbutler::projects::Project, + pub project_repository: gitbutler::project_repository::Repository, + pub gb_repository: gitbutler::gb_repository::Repository, + pub credentials: gitbutler::git::credentials::Helper, + /// The directory containing the `project_repository` + project_tmp: Option, +} + +impl Drop for Case<'_> { + fn drop(&mut self) { + if let Some(tmp) = self + .project_tmp + .take() + .filter(|_| std::env::var_os(VAR_NO_CLEANUP).is_some()) + { + let _ = tmp.into_path(); + } + } +} + +impl<'a> Case<'a> { + fn new( + suite: &'a Suite, + project: gitbutler::projects::Project, + project_tmp: TempDir, + ) -> Case<'a> { + let project_repository = gitbutler::project_repository::Repository::open(&project) + .expect("failed to create project repository"); + let gb_repository = gitbutler::gb_repository::Repository::open( + suite.local_app_data(), + &project_repository, + None, + ) + .expect("failed to open gb repository"); + let credentials = gitbutler::git::credentials::Helper::from_path(suite.local_app_data()); + Case { + suite, + project, + gb_repository, + project_repository, + project_tmp: Some(project_tmp), + credentials, + } + } + + pub fn refresh(mut self) -> Self { + let project = self + .suite + .projects + .get(&self.project.id) + .expect("failed to get project"); + let project_repository = gitbutler::project_repository::Repository::open(&project) + .expect("failed to create project repository"); + let user = self.suite.users.get_user().expect("failed to get user"); + let credentials = + gitbutler::git::credentials::Helper::from_path(self.suite.local_app_data()); + Self { + suite: self.suite, + gb_repository: gitbutler::gb_repository::Repository::open( + self.suite.local_app_data(), + &project_repository, + user.as_ref(), + ) + .expect("failed to open gb repository"), + credentials, + project_repository, + project, + project_tmp: self.project_tmp.take(), + } + } +} + +pub fn test_database() -> (gitbutler::database::Database, TempDir) { + let tmp = temp_dir(); + let db = gitbutler::database::Database::open_in_directory(&tmp).unwrap(); + (db, tmp) +} + +pub fn temp_dir() -> TempDir { + tempdir().unwrap() +} + +pub fn empty_bare_repository() -> (gitbutler::git::Repository, TempDir) { + let tmp = temp_dir(); + ( + gitbutler::git::Repository::init_opts(&tmp, &init_opts_bare()) + .expect("failed to init repository"), + tmp, + ) +} + +pub fn test_repository() -> (gitbutler::git::Repository, TempDir) { + let tmp = temp_dir(); + let repository = gitbutler::git::Repository::init_opts(&tmp, &init_opts()) + .expect("failed to init repository"); + let mut index = repository.index().expect("failed to get index"); + let oid = index.write_tree().expect("failed to write tree"); + let signature = gitbutler::git::Signature::now("test", "test@email.com").unwrap(); + repository + .commit( + Some(&"refs/heads/master".parse().unwrap()), + &signature, + &signature, + "Initial commit", + &repository.find_tree(oid).expect("failed to find tree"), + &[], + ) + .expect("failed to commit"); + (repository, tmp) +} + +pub fn commit_all(repository: &gitbutler::git::Repository) -> gitbutler::git::Oid { + let mut index = repository.index().expect("failed to get index"); + index + .add_all(["."], git2::IndexAddOption::DEFAULT, None) + .expect("failed to add all"); + index.write().expect("failed to write index"); + let oid = index.write_tree().expect("failed to write tree"); + let signature = gitbutler::git::Signature::now("test", "test@email.com").unwrap(); + let head = repository.head().expect("failed to get head"); + let commit_oid = repository + .commit( + Some(&head.name().unwrap()), + &signature, + &signature, + "some commit", + &repository.find_tree(oid).expect("failed to find tree"), + &[&repository + .find_commit( + repository + .refname_to_id("HEAD") + .expect("failed to get head"), + ) + .expect("failed to find commit")], + ) + .expect("failed to commit"); + commit_oid +} diff --git a/tests/common/mod.rs b/tests/shared/test_project.rs similarity index 98% rename from tests/common/mod.rs rename to tests/shared/test_project.rs index 6a337dd80..9d7fe3cb1 100644 --- a/tests/common/mod.rs +++ b/tests/shared/test_project.rs @@ -1,5 +1,6 @@ #![allow(unused)] -use crate::{init_opts, VAR_NO_CLEANUP}; + +use crate::shared::{init_opts, VAR_NO_CLEANUP}; use gitbutler::git; use std::{path, str::from_utf8}; use tempfile::TempDir; @@ -343,13 +344,3 @@ impl TestProject { submodule.add_finalize().unwrap(); } } - -pub mod paths { - use super::temp_dir; - use std::path; - use tempfile::TempDir; - - pub fn data_dir() -> TempDir { - temp_dir() - } -} diff --git a/tests/suite/gb_repository.rs b/tests/suite/gb_repository.rs index d02414c83..03af30c03 100644 --- a/tests/suite/gb_repository.rs +++ b/tests/suite/gb_repository.rs @@ -1,4 +1,4 @@ -use crate::common::{paths, TestProject}; +use crate::shared::{paths, TestProject}; use gitbutler::{gb_repository, git, project_repository, projects}; use std::path; diff --git a/tests/suite/projects.rs b/tests/suite/projects.rs index 5d30240c2..422d9a195 100644 --- a/tests/suite/projects.rs +++ b/tests/suite/projects.rs @@ -1,7 +1,7 @@ use gitbutler::projects::Controller; use tempfile::TempDir; -use crate::common::{self, paths}; +use crate::shared::{self, paths}; pub fn new() -> (Controller, TempDir) { let data_dir = paths::data_dir(); @@ -15,7 +15,7 @@ mod add { #[test] fn success() { let (controller, _tmp) = new(); - let repository = common::TestProject::default(); + let repository = shared::TestProject::default(); let path = repository.path(); let project = controller.add(path).unwrap(); assert_eq!(project.path, path); @@ -62,7 +62,7 @@ mod add { #[test] fn twice() { let (controller, _tmp) = new(); - let repository = common::TestProject::default(); + let repository = shared::TestProject::default(); let path = repository.path(); controller.add(path).unwrap(); assert!(matches!(controller.add(path), Err(AddError::AlreadyExists))); diff --git a/tests/suite/virtual_branches/mod.rs b/tests/suite/virtual_branches/mod.rs index c71ebd6a5..f08b80286 100644 --- a/tests/suite/virtual_branches/mod.rs +++ b/tests/suite/virtual_branches/mod.rs @@ -1,8 +1,8 @@ use std::{fs, path, str::FromStr}; use tempfile::TempDir; -use crate::common::{paths, TestProject}; -use crate::VAR_NO_CLEANUP; +use crate::shared::VAR_NO_CLEANUP; +use crate::shared::{paths, TestProject}; use gitbutler::{ git, keys, projects::{self, ProjectId}, diff --git a/tests/virtual_branches/branch/reader.rs b/tests/virtual_branches/branch/reader.rs index f99c5816d..61cdf3d94 100644 --- a/tests/virtual_branches/branch/reader.rs +++ b/tests/virtual_branches/branch/reader.rs @@ -3,7 +3,7 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use anyhow::Result; use once_cell::sync::Lazy; -use crate::{Case, Suite}; +use crate::shared::{Case, Suite}; use gitbutler::virtual_branches::branch::BranchOwnershipClaims; use gitbutler::virtual_branches::{branch, Branch, BranchId}; diff --git a/tests/virtual_branches/branch/writer.rs b/tests/virtual_branches/branch/writer.rs index 9fcc8598a..1e65e18c5 100644 --- a/tests/virtual_branches/branch/writer.rs +++ b/tests/virtual_branches/branch/writer.rs @@ -7,7 +7,7 @@ use anyhow::Context; use gitbutler::virtual_branches::branch; use once_cell::sync::Lazy; -use crate::{Case, Suite}; +use crate::shared::{Case, Suite}; use self::branch::BranchId; diff --git a/tests/virtual_branches/iterator.rs b/tests/virtual_branches/iterator.rs index df2521773..22dee020b 100644 --- a/tests/virtual_branches/iterator.rs +++ b/tests/virtual_branches/iterator.rs @@ -4,7 +4,7 @@ use anyhow::Result; use gitbutler::virtual_branches; use once_cell::sync::Lazy; -use crate::{Case, Suite}; +use crate::shared::{Case, Suite}; static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); diff --git a/tests/virtual_branches/mod.rs b/tests/virtual_branches/mod.rs index 3d255c155..ead1d0506 100644 --- a/tests/virtual_branches/mod.rs +++ b/tests/virtual_branches/mod.rs @@ -13,12 +13,10 @@ use std::{ os::unix::{fs::symlink, prelude::*}, }; -use crate::{commit_all, empty_bare_repository, Case, Suite}; -use gitbutler::{ - gb_repository, git, project_repository, reader, sessions, virtual_branches, - virtual_branches::errors::CommitError, -}; +use crate::shared::{commit_all, Case, Suite}; +use gitbutler::{git, reader, sessions, virtual_branches, virtual_branches::errors::CommitError}; +use crate::shared::virtual_branches::set_test_target; use gitbutler::virtual_branches::branch::{BranchCreateRequest, BranchOwnershipClaims}; use gitbutler::virtual_branches::integration::verify_branch; use gitbutler::virtual_branches::{ @@ -27,34 +25,6 @@ use gitbutler::virtual_branches::{ unapply_ownership, update_branch, }; -pub fn set_test_target( - gb_repo: &gb_repository::Repository, - project_repository: &project_repository::Repository, -) -> Result<()> { - let (remote_repo, _tmp) = empty_bare_repository(); - let mut remote = project_repository - .git_repository - .remote( - "origin", - &remote_repo.path().to_str().unwrap().parse().unwrap(), - ) - .expect("failed to add remote"); - remote.push(&["refs/heads/master:refs/heads/master"], None)?; - - virtual_branches::target::Writer::new(gb_repo, project_repository.project().gb_dir())? - .write_default(&virtual_branches::target::Target { - branch: "refs/remotes/origin/master".parse().unwrap(), - remote_url: remote_repo.path().to_str().unwrap().parse().unwrap(), - sha: remote_repo.head().unwrap().target().unwrap(), - }) - .expect("failed to write target"); - - virtual_branches::integration::update_gitbutler_integration(gb_repo, project_repository) - .expect("failed to update integration"); - - Ok(()) -} - #[test] fn commit_on_branch_then_change_file_then_get_status() -> Result<()> { let suite = Suite::default(); diff --git a/tests/virtual_branches/target/reader.rs b/tests/virtual_branches/target/reader.rs index f6aa4349d..b3a8586c9 100644 --- a/tests/virtual_branches/target/reader.rs +++ b/tests/virtual_branches/target/reader.rs @@ -5,7 +5,7 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use anyhow::Result; use once_cell::sync::Lazy; -use crate::{Case, Suite}; +use crate::shared::{Case, Suite}; static TEST_INDEX: Lazy = Lazy::new(|| AtomicUsize::new(0)); diff --git a/tests/virtual_branches/target/writer.rs b/tests/virtual_branches/target/writer.rs index 5fbe22031..764db9e4d 100644 --- a/tests/virtual_branches/target/writer.rs +++ b/tests/virtual_branches/target/writer.rs @@ -6,7 +6,7 @@ use std::{ use once_cell::sync::Lazy; -use crate::{Case, Suite}; +use crate::shared::{Case, Suite}; use gitbutler::virtual_branches::target::Target; use gitbutler::virtual_branches::{branch, target, BranchId}; From 8e9133092f955198300be0a16bba55323205af0b Mon Sep 17 00:00:00 2001 From: Sebastian Thiel Date: Sat, 30 Mar 2024 11:01:58 +0100 Subject: [PATCH 4/5] minimize app-crate dependencies After the library was removed, it needs less as well. --- Cargo.lock | 102 ++++++++++++++------------------------- gitbutler-app/Cargo.toml | 32 +----------- 2 files changed, 39 insertions(+), 95 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e8ddbaed3..70c61d166 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -276,7 +276,7 @@ dependencies = [ "glib-sys", "gobject-sys", "libc", - "system-deps 6.1.1", + "system-deps 6.2.0", ] [[package]] @@ -625,7 +625,7 @@ checksum = "3c55d429bef56ac9172d25fecb85dc8068307d17acd74b377866b7a1ef25d3c8" dependencies = [ "glib-sys", "libc", - "system-deps 6.1.1", + "system-deps 6.2.0", ] [[package]] @@ -676,9 +676,9 @@ dependencies = [ [[package]] name = "cfg-expr" -version = "0.15.4" +version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b40ccee03b5175c18cde8f37e7d2a33bcef6f8ec8f7cc0d81090d1bb380949c9" +checksum = "fa50868b64a9a6fda9d593ce778849ea8715cd2a3d2cc17ffdb4a2f2f2f1961d" dependencies = [ "smallvec", "target-lexicon", @@ -1451,9 +1451,9 @@ dependencies = [ [[package]] name = "fiat-crypto" -version = "0.2.1" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0870c84016d4b481be5c9f323c24f65e31e901ae618f0e80f4308fb00de1d2d" +checksum = "1676f435fc1dadde4d03e43f5d62b259e1ce5f40bd4ffb21db2b42ebe59c1382" [[package]] name = "field-offset" @@ -1670,9 +1670,9 @@ checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-timer" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" @@ -1740,7 +1740,7 @@ dependencies = [ "glib-sys", "gobject-sys", "libc", - "system-deps 6.1.1", + "system-deps 6.2.0", ] [[package]] @@ -1757,7 +1757,7 @@ dependencies = [ "libc", "pango-sys", "pkg-config", - "system-deps 6.1.1", + "system-deps 6.2.0", ] [[package]] @@ -1771,7 +1771,7 @@ dependencies = [ "gobject-sys", "libc", "pkg-config", - "system-deps 6.1.1", + "system-deps 6.2.0", ] [[package]] @@ -1783,7 +1783,7 @@ dependencies = [ "gdk-sys", "glib-sys", "libc", - "system-deps 6.1.1", + "system-deps 6.2.0", "x11", ] @@ -1875,7 +1875,7 @@ dependencies = [ "glib-sys", "gobject-sys", "libc", - "system-deps 6.1.1", + "system-deps 6.2.0", "winapi 0.3.9", ] @@ -1963,47 +1963,25 @@ dependencies = [ "async-trait", "backoff", "backtrace", - "bstr 1.9.1", - "byteorder", "chrono", "console-subscriber", - "diffy", - "filetime", - "fslock", "futures", "git2", - "git2-hooks", "gitbutler", - "gitbutler-git", "governor", "itertools 0.12.1", - "lazy_static", "log", - "md5", "nonzero_ext", "notify", "notify-debouncer-full", - "num_cpus", "once_cell", "pretty_assertions", - "r2d2", - "r2d2_sqlite", - "rand 0.8.5", - "refinery", - "regex", "reqwest 0.12.2", - "resolve-path", - "rusqlite", "sentry", "sentry-tracing", "serde", "serde_json", - "sha1", - "sha2", - "similar", "slug", - "ssh-key", - "ssh2", "tauri", "tauri-build", "tauri-plugin-context-menu", @@ -2015,15 +1993,9 @@ dependencies = [ "thiserror", "tokio", "tokio-util", - "toml 0.8.12", "tracing", "tracing-appender", "tracing-subscriber", - "url", - "urlencoding", - "uuid", - "walkdir", - "zip", ] [[package]] @@ -2091,7 +2063,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef4b192f8e65e9cf76cbf4ea71fa8e3be4a0e18ffe3d68b8da6836974cc5bad4" dependencies = [ "libc", - "system-deps 6.1.1", + "system-deps 6.2.0", ] [[package]] @@ -2121,7 +2093,7 @@ checksum = "0d57ce44246becd17153bd035ab4d32cfee096a657fc01f2231c9278378d1e0a" dependencies = [ "glib-sys", "libc", - "system-deps 6.1.1", + "system-deps 6.2.0", ] [[package]] @@ -2193,7 +2165,7 @@ dependencies = [ "gobject-sys", "libc", "pango-sys", - "system-deps 6.1.1", + "system-deps 6.2.0", ] [[package]] @@ -2287,11 +2259,11 @@ dependencies = [ [[package]] name = "hdrhistogram" -version = "7.5.2" +version = "7.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f19b9f54f7c7f55e31401bb647626ce0cf0f67b0004982ce815b3ee72a02aa8" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" dependencies = [ - "base64 0.13.1", + "base64 0.21.3", "byteorder", "flate2", "nom", @@ -3055,9 +3027,9 @@ checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" [[package]] name = "matchit" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed1202b2a6f884ae56f04cff409ab315c5ce26b5e58d7412e484f01fd52f52ef" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" [[package]] name = "md5" @@ -3445,9 +3417,9 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.64" +version = "0.10.62" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" +checksum = "8cde4d2d9200ad5909f8dac647e29482e07c3a35de8a13fce7c9c7747ad9f671" dependencies = [ "bitflags 2.4.0", "cfg-if", @@ -3590,7 +3562,7 @@ dependencies = [ "glib-sys", "gobject-sys", "libc", - "system-deps 6.1.1", + "system-deps 6.2.0", ] [[package]] @@ -4047,9 +4019,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fdd22f3b9c31b53c060df4a0613a1c7f062d4115a2b984dd15b1858f7e340d" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" dependencies = [ "bytes", "prost-derive", @@ -4057,9 +4029,9 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "265baba7fabd416cf5078179f7d2cbeca4ce7a9041111900675ea7c4cb8a4c32" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" dependencies = [ "anyhow", "itertools 0.11.0", @@ -4070,9 +4042,9 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e081b29f63d83a4bc75cfc9f3fe424f9156cf92d8a4f0c9407cce9a1b67327cf" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" dependencies = [ "prost", ] @@ -5476,14 +5448,14 @@ dependencies = [ [[package]] name = "system-deps" -version = "6.1.1" +version = "6.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30c2de8a4d8f4b823d634affc9cd2a74ec98c53a756f317e529a48046cbf71f3" +checksum = "2a2d580ff6a20c55dfb86be5f9c238f67835d0e81cbdea8bf5680e0897320331" dependencies = [ - "cfg-expr 0.15.4", + "cfg-expr 0.15.7", "heck 0.4.1", "pkg-config", - "toml 0.7.6", + "toml 0.8.12", "version-compare 0.1.1", ] @@ -6005,9 +5977,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", "pin-project-lite", @@ -6587,7 +6559,7 @@ dependencies = [ "pango-sys", "pkg-config", "soup2-sys", - "system-deps 6.1.1", + "system-deps 6.2.0", ] [[package]] diff --git a/gitbutler-app/Cargo.toml b/gitbutler-app/Cargo.toml index 81f9248a2..292fa3ad0 100644 --- a/gitbutler-app/Cargo.toml +++ b/gitbutler-app/Cargo.toml @@ -18,52 +18,31 @@ test = false tauri-build = { version = "1.5", features = [] } [dev-dependencies] -once_cell = "1.19" +#once_cell = "1.19" pretty_assertions = "1.4" +tempfile = "3.10" [dependencies] -toml = "0.8.12" anyhow = "1.0.81" async-trait = "0.1.79" backoff = "0.4.0" backtrace = { version = "0.3.71", optional = true } -bstr = "1.9.1" -byteorder = "1.5.0" chrono = { version = "0.4.37", features = ["serde"] } console-subscriber = "0.2.0" -diffy = "0.3.0" -filetime = "0.2.23" -fslock = "0.2.1" futures = "0.3" git2.workspace = true -git2-hooks = "0.3" governor = "0.6.3" itertools = "0.12" -lazy_static = "1.4.0" -md5 = "0.7.0" nonzero_ext = "0.3.0" notify = { version = "6.0.1" } notify-debouncer-full = "0.3.1" -num_cpus = "1.16.0" once_cell = "1.19" -r2d2 = "0.8.10" -r2d2_sqlite = "0.22.0" -rand = "0.8.5" -refinery = { version = "0.8", features = [ "rusqlite" ] } -regex = "1.10" reqwest = { version = "0.12.2", features = ["json"] } -resolve-path = "0.1.0" -rusqlite.workspace = true sentry = { version = "0.32", optional = true, features = ["backtrace", "contexts", "panic", "transport", "anyhow", "debug-images", "reqwest", "native-tls" ] } sentry-tracing = "0.32.0" serde.workspace = true serde_json = { version = "1.0", features = [ "std", "arbitrary_precision" ] } -sha1 = "0.10.6" -sha2 = "0.10.8" -similar = { version = "2.4.0", features = ["unicode"] } slug = "0.1.5" -ssh-key = { version = "0.6.5", features = [ "alloc", "ed25519" ] } -ssh2 = { version = "0.9.4", features = ["vendored-openssl"] } tauri = { version = "1.6.1", features = [ "http-all", "os-all", "dialog-open", "fs-read-file", "path-all", "process-relaunch", "protocol-asset", "shell-open", "window-maximize", "window-start-dragging", "window-unmaximize"] } tauri-plugin-context-menu = { git = "https://github.com/c2r0b/tauri-plugin-context-menu", branch = "main" } tauri-plugin-single-instance = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" } @@ -77,13 +56,6 @@ tokio-util = "0.7.10" tracing = "0.1.40" tracing-appender = "0.2.3" tracing-subscriber = "0.3.17" -url = "2.5" -urlencoding = "2.1.3" -uuid.workspace = true -walkdir = "2.5.0" -zip = "0.6.5" -tempfile = "3.10" -gitbutler-git = { path = "../gitbutler-git" } gitbutler = { path = "../" } [lints.clippy] From 63f75c956e537460e58922db3446f9ee126ff508 Mon Sep 17 00:00:00 2001 From: Sebastian Thiel Date: Sat, 30 Mar 2024 11:19:07 +0100 Subject: [PATCH 5/5] assure CI runs the new library crate --- .github/workflows/push.yaml | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/.github/workflows/push.yaml b/.github/workflows/push.yaml index 00b4fe01e..fd7f6136f 100644 --- a/.github/workflows/push.yaml +++ b/.github/workflows/push.yaml @@ -14,6 +14,7 @@ jobs: outputs: node: ${{ steps.filter.outputs.node }} rust: ${{ steps.filter.outputs.rust }} + gitbutler: ${{ steps.filter.outputs.gitbutler }} gitbutler-app: ${{ steps.filter.outputs.gitbutler-app }} gitbutler-changeset: ${{ steps.filter.outputs.gitbutler-changeset }} gitbutler-git: ${{ steps.filter.outputs.gitbutler-git }} @@ -36,6 +37,10 @@ jobs: - 'gitbutler-!(ui)/**' gitbutler-app: - *any-rust + gitbutler: + - *rust + - 'src/**' + - 'tests/**' gitbutler-changeset: - *rust - 'gitbutler-changeset/**' @@ -97,6 +102,28 @@ jobs: env: RUSTDOCFLAGS: -Dwarnings + check-gitbutler: + needs: [changes, rust-init] + if: ${{ needs.changes.outputs.gitbutler == 'true' }} + runs-on: ubuntu-latest + container: + image: ghcr.io/gitbutlerapp/ci-base-image:latest + strategy: + matrix: + action: + - test + - check + - check-tests + features: + - '' + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/check-crate + with: + crate: gitbutler + features: ${{ toJson(matrix.features) }} + action: ${{ matrix.action }} + check-gitbutler-app: needs: [changes, rust-init] if: ${{ needs.changes.outputs.gitbutler-app == 'true' }}