make sure nothing is flashed when idle

This commit is contained in:
Nikita Galaiko 2024-01-22 15:37:42 +01:00 committed by GitButler
parent e2d1777a19
commit 4fda8370a4
14 changed files with 178 additions and 122 deletions

View File

@ -77,6 +77,10 @@ impl Repository {
&self.project
}
pub fn set_project(&mut self, project: &projects::Project) {
self.project = project.clone();
}
pub fn get_head(&self) -> Result<git::Reference, git::Error> {
let head = self.git_repository.head()?;
Ok(head)

View File

@ -74,6 +74,8 @@ pub struct Project {
pub gitbutler_data_last_fetch: Option<FetchResult>,
#[serde(default)]
pub gitbutler_code_push_state: Option<CodePushState>,
#[serde(default)]
pub project_data_last_fetch: Option<FetchResult>,
}
impl AsRef<Project> for Project {

View File

@ -44,6 +44,7 @@ pub struct UpdateRequest {
pub preferred_key: Option<project::AuthKey>,
pub ok_with_force_push: Option<bool>,
pub gitbutler_code_push_state: Option<project::CodePushState>,
pub project_data_last_fetched: Option<project::FetchResult>,
}
#[derive(Debug, thiserror::Error)]
@ -116,6 +117,10 @@ impl Storage {
project.gitbutler_data_last_fetch = Some(gitbutler_data_last_fetched.clone());
}
if let Some(project_data_last_fetched) = update_request.project_data_last_fetched.as_ref() {
project.project_data_last_fetch = Some(project_data_last_fetched.clone());
}
if let Some(state) = update_request.gitbutler_code_push_state {
project.gitbutler_code_push_state = Some(state);
}

View File

@ -11,6 +11,7 @@ use crate::{
},
keys,
project_repository::{self, LogUntil},
projects::FetchResult,
users,
virtual_branches::branch::Ownership,
};
@ -111,7 +112,6 @@ pub fn set_base_branch(
branch: target_branch_ref.clone(),
remote_url: remote_url.to_string(),
sha: commit_oid,
last_fetched_ms: None,
};
let target_writer =
@ -553,7 +553,13 @@ pub fn target_to_base_branch(
behind: upstream_commits.len(),
upstream_commits,
recent_commits,
last_fetched_ms: target.last_fetched_ms,
last_fetched_ms: project_repository
.project()
.project_data_last_fetch
.as_ref()
.map(FetchResult::timestamp)
.copied()
.map(|t| t.duration_since(time::UNIX_EPOCH).unwrap().as_millis()),
};
Ok(base)
}

View File

@ -1,28 +1,51 @@
use anyhow::Result;
use crate::{gb_repository, writer};
use crate::{gb_repository, reader, writer};
use super::Branch;
pub struct BranchWriter<'writer> {
repository: &'writer gb_repository::Repository,
writer: writer::DirWriter,
reader: reader::Reader<'writer>,
}
impl<'writer> BranchWriter<'writer> {
pub fn new(repository: &'writer gb_repository::Repository) -> Result<Self, std::io::Error> {
writer::DirWriter::open(repository.root()).map(|writer| Self { repository, writer })
let reader = reader::Reader::open(repository.root())?;
let writer = writer::DirWriter::open(repository.root())?;
Ok(Self {
repository,
writer,
reader,
})
}
pub fn delete(&self, branch: &Branch) -> Result<()> {
match self
.reader
.sub(format!("branches/{}", branch.id))
.read("id")
{
Ok(_) => {
self.repository.mark_active_session()?;
let _lock = self.repository.lock();
self.writer.remove(format!("branches/{}", branch.id))?;
Ok(())
}
Err(reader::Error::NotFound) => Ok(()),
Err(err) => Err(err.into()),
}
}
pub fn write(&self, branch: &mut Branch) -> Result<()> {
let reader = self.reader.sub(format!("branches/{}", branch.id));
match Branch::try_from(&reader) {
Ok(existing) if existing.eq(branch) => return Ok(()),
Ok(_) | Err(reader::Error::NotFound) => {}
Err(err) => return Err(err.into()),
}
self.repository.mark_active_session()?;
branch.updated_timestamp_ms = std::time::SystemTime::now()
@ -177,7 +200,7 @@ mod tests {
.unwrap(),
ownership: branch::Ownership {
files: vec![branch::FileOwnership {
file_path: format!("file/{}", TEST_INDEX.load(Ordering::Relaxed)).into(),
file_path: format!("file/{}:1-2", TEST_INDEX.load(Ordering::Relaxed)).into(),
hunks: vec![],
}],
},

View File

@ -19,7 +19,7 @@ use super::{
self, FetchFromTargetError, GetBaseBranchDataError, IsRemoteBranchMergableError,
ListRemoteBranchesError,
},
target, target_to_base_branch, BaseBranch, RemoteBranchFile,
target_to_base_branch, BaseBranch, RemoteBranchFile,
};
#[derive(Clone)]
@ -342,7 +342,10 @@ impl Controller {
&self,
project_id: &ProjectId,
) -> Result<BaseBranch, ControllerError<errors::FetchFromTargetError>> {
self.inner(project_id).await.fetch_from_target(project_id)
self.inner(project_id)
.await
.fetch_from_target(project_id)
.await
}
}
@ -839,11 +842,21 @@ impl ControllerInner {
})
}
pub fn fetch_from_target(
pub async fn fetch_from_target(
&self,
project_id: &ProjectId,
) -> Result<BaseBranch, ControllerError<errors::FetchFromTargetError>> {
self.with_verify_branch(project_id, |gb_repository, project_repository, _| {
let project = self.projects.get(project_id).map_err(Error::from)?;
let mut project_repository =
project_repository::Repository::open(&project).map_err(Error::from)?;
let user = self.users.get_user().map_err(Error::from)?;
let gb_repository = gb_repository::Repository::open(
&self.local_data_dir,
&project_repository,
user.as_ref(),
)
.context("failed to open gitbutler repository")?;
let default_target = gb_repository
.default_target()
.context("failed to get default target")?
@ -851,31 +864,38 @@ impl ControllerInner {
errors::DefaultTargetNotSetError {
project_id: *project_id,
},
))?;
))
.map_err(ControllerError::Action)?;
project_repository
let project_data_last_fetched = match project_repository
.fetch(default_target.branch.remote(), &self.helper)
.map_err(errors::FetchFromTargetError::Remote)?;
.map_err(errors::FetchFromTargetError::Remote)
{
Ok(()) => projects::FetchResult::Fetched {
timestamp: std::time::SystemTime::now(),
},
Err(error) => projects::FetchResult::Error {
timestamp: std::time::SystemTime::now(),
error: error.to_string(),
},
};
let target_writer =
target::Writer::new(gb_repository).context("failed to open target writer")?;
target_writer
.write_default(&target::Target {
last_fetched_ms: Some(
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis(),
),
..default_target.clone()
let updated_project = self
.projects
.update(&projects::UpdateRequest {
id: *project_id,
project_data_last_fetched: Some(project_data_last_fetched),
..Default::default()
})
.context("failed to write default target")?;
.await
.context("failed to update project")?;
let base_branch = target_to_base_branch(project_repository, &default_target)
project_repository.set_project(&updated_project);
let base_branch = target_to_base_branch(&project_repository, &default_target)
.context("failed to convert target to base branch")?;
Ok(base_branch)
})
}
}

View File

@ -114,7 +114,6 @@ mod tests {
fn test_target() -> target::Target {
target::Target {
last_fetched_ms: None,
branch: format!(
"refs/remotes/branch name{}/remote name {}",
TEST_TARGET_INDEX.load(Ordering::Relaxed),

View File

@ -13,7 +13,6 @@ pub struct Target {
pub branch: git::RemoteRefname,
pub remote_url: String,
pub sha: git::Oid,
pub last_fetched_ms: Option<u128>,
}
impl Serialize for Target {
@ -26,28 +25,19 @@ impl Serialize for Target {
state.serialize_field("remoteName", &self.branch.remote())?;
state.serialize_field("remoteUrl", &self.remote_url)?;
state.serialize_field("sha", &self.sha.to_string())?;
state.serialize_field("lastFetchedMs", &self.last_fetched_ms)?;
state.end()
}
}
impl Target {
fn try_from(reader: &crate::reader::Reader) -> Result<Target, crate::reader::Error> {
let results = reader.batch(&[
"name",
"branch_name",
"remote",
"remote_url",
"sha",
"last_fetched_ms",
])?;
let results = reader.batch(&["name", "branch_name", "remote", "remote_url", "sha"])?;
let name = results[0].clone();
let branch_name = results[1].clone();
let remote = results[2].clone();
let remote_url = results[3].clone();
let sha = results[4].clone();
let last_fetched_ms = results[5].clone();
let branch_name = match name {
Ok(branch) => {
@ -80,25 +70,10 @@ impl Target {
)
})?;
let last_fetched_ms: Option<u128> = match last_fetched_ms {
Ok(last_fetched) => Some(last_fetched.try_into().map_err(|e| {
crate::reader::Error::Io(
std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!("last_fetched_ms: {}", e),
)
.into(),
)
})?),
Err(crate::reader::Error::NotFound) => None,
Err(e) => return Err(e),
};
Ok(Self {
branch: format!("refs/remotes/{}", branch_name).parse().unwrap(),
remote_url,
sha,
last_fetched_ms,
})
}
}

View File

@ -151,7 +151,6 @@ mod tests {
branch: "refs/remotes/remote/branch".parse().unwrap(),
remote_url: "remote url".to_string(),
sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(),
last_fetched_ms: None,
};
let default_target = Target {
@ -160,7 +159,6 @@ mod tests {
.unwrap(),
remote_url: "default remote url".to_string(),
sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(),
last_fetched_ms: Some(1),
};
let branch_writer = branch::Writer::new(&gb_repository)?;

View File

@ -1,23 +1,37 @@
use anyhow::{Context, Result};
use crate::{gb_repository, virtual_branches::BranchId, writer};
use crate::{gb_repository, reader, virtual_branches::BranchId, writer};
use super::Target;
pub struct TargetWriter<'writer> {
repository: &'writer gb_repository::Repository,
writer: writer::DirWriter,
reader: reader::Reader<'writer>,
}
impl<'writer> TargetWriter<'writer> {
pub fn new(repository: &'writer gb_repository::Repository) -> Result<Self, std::io::Error> {
writer::DirWriter::open(repository.root()).map(|writer| Self { repository, writer })
let reader = reader::Reader::open(&repository.root())?;
let writer = writer::DirWriter::open(repository.root())?;
Ok(Self {
repository,
writer,
reader,
})
}
pub fn write_default(&self, target: &Target) -> Result<()> {
let reader = self.reader.sub("branches/target");
match Target::try_from(&reader) {
Ok(existing) if existing.eq(target) => return Ok(()),
Ok(_) | Err(reader::Error::NotFound) => {}
Err(e) => return Err(e.into()),
};
self.repository.mark_active_session()?;
let mut batch = vec![
let batch = vec![
writer::BatchTask::Write(
"branches/target/branch_name",
format!("{}/{}", target.branch.remote(), target.branch.branch()),
@ -30,15 +44,6 @@ impl<'writer> TargetWriter<'writer> {
writer::BatchTask::Write("branches/target/sha", target.sha.to_string()),
];
if let Some(last_fetched) = target.last_fetched_ms {
batch.push(writer::BatchTask::Write(
"branches/target/last_fetched_ms",
last_fetched.to_string(),
));
} else {
batch.push(writer::BatchTask::Remove("branches/target/last_fetched_ms"));
}
self.writer
.batch(&batch)
.context("Failed to write default target")?;
@ -47,11 +52,18 @@ impl<'writer> TargetWriter<'writer> {
}
pub fn write(&self, id: &BranchId, target: &Target) -> Result<()> {
let reader = self.reader.sub(format!("branches/{}/target", id));
match Target::try_from(&reader) {
Ok(existing) if existing.eq(target) => return Ok(()),
Ok(_) | Err(reader::Error::NotFound) => {}
Err(e) => return Err(e.into()),
};
self.repository
.mark_active_session()
.context("Failed to get or create current session")?;
let mut batch = vec![
let batch = vec![
writer::BatchTask::Write(
format!("branches/{}/target/branch_name", id),
format!("{}/{}", target.branch.remote(), target.branch.branch()),
@ -70,18 +82,6 @@ impl<'writer> TargetWriter<'writer> {
),
];
if let Some(last_fetched) = target.last_fetched_ms {
batch.push(writer::BatchTask::Write(
format!("branches/{}/target/last_fetched_ms", id),
last_fetched.to_string(),
));
} else {
batch.push(writer::BatchTask::Remove(format!(
"branches/{}/target/last_fetched_ms",
id
)));
}
self.writer
.batch(&batch)
.context("Failed to write target")?;
@ -159,7 +159,6 @@ mod tests {
branch: "refs/remotes/remote name/branch name".parse().unwrap(),
remote_url: "remote url".to_string(),
sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(),
last_fetched_ms: Some(1),
};
let branch_writer = branch::Writer::new(&gb_repository)?;
@ -198,16 +197,6 @@ mod tests {
.context("Failed to read branch target sha")?,
target.sha.to_string()
);
assert_eq!(
fs::read_to_string(
root.join("target")
.join("last_fetched_ms")
.to_str()
.unwrap()
)
.context("Failed to read branch target last fetched")?,
"1"
);
assert_eq!(
fs::read_to_string(root.join("meta").join("applied").to_str().unwrap())?
@ -257,7 +246,6 @@ mod tests {
branch: "refs/remotes/remote name/branch name".parse().unwrap(),
remote_url: "remote url".to_string(),
sha: "0123456789abcdef0123456789abcdef01234567".parse().unwrap(),
last_fetched_ms: Some(1),
};
let branch_writer = branch::Writer::new(&gb_repository)?;
@ -271,7 +259,6 @@ mod tests {
.unwrap(),
remote_url: "updated remote url".to_string(),
sha: "fedcba9876543210fedcba9876543210fedcba98".parse().unwrap(),
last_fetched_ms: Some(2),
};
target_writer.write(&branch.id, &updated_target)?;

View File

@ -36,7 +36,6 @@ pub fn set_test_target(
target::Writer::new(gb_repo)?
.write_default(&target::Target {
last_fetched_ms: None,
branch: "refs/remotes/origin/master".parse().unwrap(),
remote_url: remote_repo.path().to_str().unwrap().parse().unwrap(),
sha: remote_repo.head().unwrap().target().unwrap(),
@ -1014,7 +1013,6 @@ fn test_merge_vbranch_upstream_clean() -> Result<()> {
set_test_target(&gb_repository, &project_repository)?;
target::Writer::new(&gb_repository)?.write_default(&target::Target {
last_fetched_ms: None,
branch: "refs/remotes/origin/master".parse().unwrap(),
remote_url: "origin".to_string(),
sha: target_oid,
@ -1144,7 +1142,6 @@ fn test_merge_vbranch_upstream_conflict() -> Result<()> {
set_test_target(&gb_repository, &project_repository)?;
target::Writer::new(&gb_repository)?.write_default(&target::Target {
last_fetched_ms: None,
branch: "refs/remotes/origin/master".parse().unwrap(),
remote_url: "origin".to_string(),
sha: target_oid,
@ -1767,7 +1764,6 @@ fn test_upstream_integrated_vbranch() -> Result<()> {
)?;
target::Writer::new(&gb_repository)?.write_default(&target::Target {
last_fetched_ms: None,
branch: "refs/remotes/origin/master".parse().unwrap(),
remote_url: "http://origin.com/project".to_string(),
sha: base_commit,

View File

@ -190,7 +190,6 @@ mod test {
fn test_target() -> virtual_branches::target::Target {
virtual_branches::target::Target {
last_fetched_ms: None,
branch: format!(
"refs/remotes/remote name {}/branch name {}",
TEST_TARGET_INDEX.load(Ordering::Relaxed),

View File

@ -61,15 +61,14 @@ impl Handler {
let mut events = vec![];
let last_fetched_ms = gb_repo
.default_target()
.context("failed to get default target")?
.and_then(|target| target.last_fetched_ms)
.unwrap_or(0);
let last_fetched =
time::UNIX_EPOCH + time::Duration::from_millis(last_fetched_ms.try_into()?);
let project_data_last_fetch = project
.project_data_last_fetch
.as_ref()
.map(FetchResult::timestamp)
.copied()
.unwrap_or(time::UNIX_EPOCH);
if now.duration_since(last_fetched)? > PROJECT_FETCH_INTERVAL {
if now.duration_since(project_data_last_fetch)? > PROJECT_FETCH_INTERVAL {
events.push(events::Event::FetchProjectData(*project_id));
}

View File

@ -1288,6 +1288,49 @@ async fn resolve_conflict_flow() {
}
}
mod fetch_from_target {
use super::*;
#[tokio::test]
async fn should_update_last_fetched() {
let Test {
project_id,
controller,
..
} = Test::default();
controller
.set_base_branch(&project_id, &"refs/remotes/origin/master".parse().unwrap())
.await
.unwrap();
let before_fetch = controller.get_base_branch_data(&project_id).await.unwrap();
assert!(before_fetch.unwrap().last_fetched_ms.is_none());
let fetch = controller.fetch_from_target(&project_id).await.unwrap();
assert!(fetch.last_fetched_ms.is_some());
let after_fetch = controller.get_base_branch_data(&project_id).await.unwrap();
assert!(after_fetch.as_ref().unwrap().last_fetched_ms.is_some());
assert_eq!(fetch.last_fetched_ms, after_fetch.unwrap().last_fetched_ms);
let second_fetch = controller.fetch_from_target(&project_id).await.unwrap();
assert!(second_fetch.last_fetched_ms.is_some());
assert_ne!(fetch.last_fetched_ms, second_fetch.last_fetched_ms);
let after_second_fetch = controller.get_base_branch_data(&project_id).await.unwrap();
assert!(after_second_fetch
.as_ref()
.unwrap()
.last_fetched_ms
.is_some());
assert_eq!(
second_fetch.last_fetched_ms,
after_second_fetch.unwrap().last_fetched_ms
);
}
}
mod update_base_branch {
use super::*;