mirror of
https://github.com/gitbutlerapp/gitbutler.git
synced 2024-11-23 03:26:36 +03:00
assure conflicts::mark()
writes its file atomically
That way it can't be observed half-written, or remain in a half-written state in case of crash.
This commit is contained in:
parent
b3b87b34a5
commit
3e79238e7f
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -2005,6 +2005,7 @@ dependencies = [
|
||||
"gitbutler-command-context",
|
||||
"gitbutler-commit",
|
||||
"gitbutler-error",
|
||||
"gitbutler-fs",
|
||||
"gitbutler-git",
|
||||
"gitbutler-id",
|
||||
"gitbutler-oplog",
|
||||
|
@ -21,6 +21,7 @@ gitbutler-id.workspace = true
|
||||
gitbutler-time.workspace = true
|
||||
gitbutler-commit.workspace = true
|
||||
gitbutler-url.workspace = true
|
||||
gitbutler-fs.workspace = true
|
||||
serde = { workspace = true, features = ["std"] }
|
||||
bstr = "1.9.1"
|
||||
diffy = "0.3.0"
|
||||
|
@ -1,10 +1,9 @@
|
||||
// stuff to manage merge conflict state
|
||||
// this is the dumbest possible way to do this, but it is a placeholder
|
||||
// conflicts are stored one path per line in .git/conflicts
|
||||
// merge parent is stored in .git/base_merge_parent
|
||||
// conflicts are removed as they are resolved, the conflicts file is removed when there are no more conflicts
|
||||
// the merge parent file is removed when the merge is complete
|
||||
|
||||
/// stuff to manage merge conflict state.
|
||||
/// This is the dumbest possible way to do this, but it is a placeholder.
|
||||
/// Conflicts are stored one path per line in .git/conflicts.
|
||||
/// Merge parent is stored in .git/base_merge_parent.
|
||||
/// Conflicts are removed as they are resolved, the conflicts file is removed when there are no more conflicts
|
||||
/// or when the merge is complete.
|
||||
use std::{
|
||||
io::{BufRead, Write},
|
||||
path::{Path, PathBuf},
|
||||
@ -25,19 +24,20 @@ pub(crate) fn mark<P: AsRef<Path>, A: AsRef<[P]>>(
|
||||
if paths.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
let conflicts_path = repository.repo().path().join("conflicts");
|
||||
// write all the file paths to a file on disk
|
||||
let mut file = std::fs::File::create(conflicts_path)?;
|
||||
let mut buf = Vec::<u8>::with_capacity(512);
|
||||
for path in paths {
|
||||
file.write_all(path.as_ref().as_os_str().as_encoded_bytes())?;
|
||||
file.write_all(b"\n")?;
|
||||
buf.write_all(path.as_ref().as_os_str().as_encoded_bytes())?;
|
||||
buf.write_all(b"\n")?;
|
||||
}
|
||||
gitbutler_fs::write(repository.repo().path().join("conflicts"), buf)?;
|
||||
|
||||
if let Some(parent) = parent {
|
||||
let merge_path = repository.repo().path().join("base_merge_parent");
|
||||
// write all the file paths to a file on disk
|
||||
let mut file = std::fs::File::create(merge_path)?;
|
||||
file.write_all(parent.to_string().as_bytes())?;
|
||||
gitbutler_fs::write(
|
||||
repository.repo().path().join("base_merge_parent"),
|
||||
parent.to_string().as_bytes(),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -74,11 +74,10 @@ pub fn resolve<P: AsRef<Path>>(repository: &ProjectRepository, path: P) -> Resul
|
||||
}
|
||||
}
|
||||
|
||||
// remove file
|
||||
std::fs::remove_file(conflicts_path)?;
|
||||
|
||||
// re-write file if needed
|
||||
if !remaining.is_empty() {
|
||||
// re-write file if needed, otherwise remove file entirely
|
||||
if remaining.is_empty() {
|
||||
std::fs::remove_file(conflicts_path)?;
|
||||
} else {
|
||||
mark(repository, &remaining, None)?;
|
||||
}
|
||||
Ok(())
|
||||
|
@ -10,7 +10,7 @@ use crate::{
|
||||
target::Target,
|
||||
};
|
||||
use gitbutler_error::error::Code;
|
||||
use gitbutler_fs::fs::read_toml_file_or_default;
|
||||
use gitbutler_fs::read_toml_file_or_default;
|
||||
// use gitbutler_project::Project;
|
||||
use gitbutler_reference::Refname;
|
||||
use itertools::Itertools;
|
||||
@ -247,5 +247,5 @@ impl VirtualBranchesHandle {
|
||||
}
|
||||
|
||||
fn write<P: AsRef<Path>>(file_path: P, virtual_branches: &VirtualBranches) -> Result<()> {
|
||||
gitbutler_fs::fs::write(file_path, toml::to_string(&virtual_branches)?)
|
||||
gitbutler_fs::write(file_path, toml::to_string(&virtual_branches)?)
|
||||
}
|
||||
|
@ -1,115 +0,0 @@
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::{
|
||||
io::Write,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use bstr::BString;
|
||||
use gix::{
|
||||
dir::walk::EmissionMode,
|
||||
tempfile::{create_dir::Retries, AutoRemove, ContainingDirectory},
|
||||
};
|
||||
use serde::de::DeserializeOwned;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
// Returns an ordered list of relative paths for files inside a directory recursively.
|
||||
pub fn list_files<P: AsRef<Path>>(dir_path: P, ignore_prefixes: &[P]) -> Result<Vec<PathBuf>> {
|
||||
let mut files = vec![];
|
||||
let dir_path = dir_path.as_ref();
|
||||
if !dir_path.exists() {
|
||||
return Ok(files);
|
||||
}
|
||||
for entry in WalkDir::new(dir_path) {
|
||||
let entry = entry?;
|
||||
if !entry.file_type().is_dir() {
|
||||
let path = entry.path();
|
||||
let path = path.strip_prefix(dir_path)?;
|
||||
let path = path.to_path_buf();
|
||||
if ignore_prefixes
|
||||
.iter()
|
||||
.any(|prefix| path.starts_with(prefix.as_ref()))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
// Return an iterator of worktree-relative slash-separated paths for files inside the `worktree_dir`, recursively.
|
||||
// Fails if the `worktree_dir` isn't a valid git repository.
|
||||
pub fn iter_worktree_files(
|
||||
worktree_dir: impl AsRef<Path>,
|
||||
) -> Result<impl Iterator<Item = BString>> {
|
||||
let repo = gix::open(worktree_dir.as_ref())?;
|
||||
let index = repo.index_or_empty()?;
|
||||
let disabled_interrupt_handling = Default::default();
|
||||
let options = repo
|
||||
.dirwalk_options()?
|
||||
.emit_tracked(true)
|
||||
.emit_untracked(EmissionMode::Matching);
|
||||
Ok(repo
|
||||
.dirwalk_iter(index, None::<&str>, disabled_interrupt_handling, options)?
|
||||
.filter_map(Result::ok)
|
||||
.map(|e| e.entry.rela_path))
|
||||
}
|
||||
|
||||
/// Write a single file so that the write either fully succeeds, or fully fails,
|
||||
/// assuming the containing directory already exists.
|
||||
pub fn write<P: AsRef<Path>>(file_path: P, contents: impl AsRef<[u8]>) -> anyhow::Result<()> {
|
||||
let mut temp_file = gix::tempfile::new(
|
||||
file_path.as_ref().parent().unwrap(),
|
||||
ContainingDirectory::Exists,
|
||||
AutoRemove::Tempfile,
|
||||
)?;
|
||||
temp_file.write_all(contents.as_ref())?;
|
||||
Ok(persist_tempfile(temp_file, file_path)?)
|
||||
}
|
||||
|
||||
/// Write a single file so that the write either fully succeeds, or fully fails,
|
||||
/// and create all leading directories.
|
||||
pub fn create_dirs_then_write<P: AsRef<Path>>(
|
||||
file_path: P,
|
||||
contents: impl AsRef<[u8]>,
|
||||
) -> std::io::Result<()> {
|
||||
let mut temp_file = gix::tempfile::new(
|
||||
file_path.as_ref().parent().unwrap(),
|
||||
ContainingDirectory::CreateAllRaceProof(Retries::default()),
|
||||
AutoRemove::Tempfile,
|
||||
)?;
|
||||
temp_file.write_all(contents.as_ref())?;
|
||||
persist_tempfile(temp_file, file_path)
|
||||
}
|
||||
|
||||
fn persist_tempfile(
|
||||
tempfile: gix::tempfile::Handle<gix::tempfile::handle::Writable>,
|
||||
to_path: impl AsRef<Path>,
|
||||
) -> std::io::Result<()> {
|
||||
match tempfile.persist(to_path) {
|
||||
Ok(Some(_opened_file)) => Ok(()),
|
||||
Ok(None) => unreachable!(
|
||||
"BUG: a signal has caused the tempfile to be removed, but we didn't install a handler"
|
||||
),
|
||||
Err(err) => Err(err.error),
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads and parses the state file.
|
||||
///
|
||||
/// If the file does not exist, it will be created.
|
||||
pub fn read_toml_file_or_default<T: DeserializeOwned + Default>(path: &Path) -> Result<T> {
|
||||
let mut file = match File::open(path) {
|
||||
Ok(f) => f,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(T::default()),
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
let value: T =
|
||||
toml::from_str(&contents).with_context(|| format!("Failed to parse {}", path.display()))?;
|
||||
Ok(value)
|
||||
}
|
@ -1 +1,115 @@
|
||||
pub mod fs;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::{
|
||||
io::Write,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use bstr::BString;
|
||||
use gix::{
|
||||
dir::walk::EmissionMode,
|
||||
tempfile::{create_dir::Retries, AutoRemove, ContainingDirectory},
|
||||
};
|
||||
use serde::de::DeserializeOwned;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
// Returns an ordered list of relative paths for files inside a directory recursively.
|
||||
pub fn list_files<P: AsRef<Path>>(dir_path: P, ignore_prefixes: &[P]) -> Result<Vec<PathBuf>> {
|
||||
let mut files = vec![];
|
||||
let dir_path = dir_path.as_ref();
|
||||
if !dir_path.exists() {
|
||||
return Ok(files);
|
||||
}
|
||||
for entry in WalkDir::new(dir_path) {
|
||||
let entry = entry?;
|
||||
if !entry.file_type().is_dir() {
|
||||
let path = entry.path();
|
||||
let path = path.strip_prefix(dir_path)?;
|
||||
let path = path.to_path_buf();
|
||||
if ignore_prefixes
|
||||
.iter()
|
||||
.any(|prefix| path.starts_with(prefix.as_ref()))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
// Return an iterator of worktree-relative slash-separated paths for files inside the `worktree_dir`, recursively.
|
||||
// Fails if the `worktree_dir` isn't a valid git repository.
|
||||
pub fn iter_worktree_files(
|
||||
worktree_dir: impl AsRef<Path>,
|
||||
) -> Result<impl Iterator<Item = BString>> {
|
||||
let repo = gix::open(worktree_dir.as_ref())?;
|
||||
let index = repo.index_or_empty()?;
|
||||
let disabled_interrupt_handling = Default::default();
|
||||
let options = repo
|
||||
.dirwalk_options()?
|
||||
.emit_tracked(true)
|
||||
.emit_untracked(EmissionMode::Matching);
|
||||
Ok(repo
|
||||
.dirwalk_iter(index, None::<&str>, disabled_interrupt_handling, options)?
|
||||
.filter_map(Result::ok)
|
||||
.map(|e| e.entry.rela_path))
|
||||
}
|
||||
|
||||
/// Write a single file so that the write either fully succeeds, or fully fails,
|
||||
/// assuming the containing directory already exists.
|
||||
pub fn write<P: AsRef<Path>>(file_path: P, contents: impl AsRef<[u8]>) -> anyhow::Result<()> {
|
||||
let mut temp_file = gix::tempfile::new(
|
||||
file_path.as_ref().parent().unwrap(),
|
||||
ContainingDirectory::Exists,
|
||||
AutoRemove::Tempfile,
|
||||
)?;
|
||||
temp_file.write_all(contents.as_ref())?;
|
||||
Ok(persist_tempfile(temp_file, file_path)?)
|
||||
}
|
||||
|
||||
/// Write a single file so that the write either fully succeeds, or fully fails,
|
||||
/// and create all leading directories.
|
||||
pub fn create_dirs_then_write<P: AsRef<Path>>(
|
||||
file_path: P,
|
||||
contents: impl AsRef<[u8]>,
|
||||
) -> std::io::Result<()> {
|
||||
let mut temp_file = gix::tempfile::new(
|
||||
file_path.as_ref().parent().unwrap(),
|
||||
ContainingDirectory::CreateAllRaceProof(Retries::default()),
|
||||
AutoRemove::Tempfile,
|
||||
)?;
|
||||
temp_file.write_all(contents.as_ref())?;
|
||||
persist_tempfile(temp_file, file_path)
|
||||
}
|
||||
|
||||
fn persist_tempfile(
|
||||
tempfile: gix::tempfile::Handle<gix::tempfile::handle::Writable>,
|
||||
to_path: impl AsRef<Path>,
|
||||
) -> std::io::Result<()> {
|
||||
match tempfile.persist(to_path) {
|
||||
Ok(Some(_opened_file)) => Ok(()),
|
||||
Ok(None) => unreachable!(
|
||||
"BUG: a signal has caused the tempfile to be removed, but we didn't install a handler"
|
||||
),
|
||||
Err(err) => Err(err.error),
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads and parses the state file.
|
||||
///
|
||||
/// If the file does not exist, it will be created.
|
||||
pub fn read_toml_file_or_default<T: DeserializeOwned + Default>(path: &Path) -> Result<T> {
|
||||
let mut file = match File::open(path) {
|
||||
Ok(f) => f,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(T::default()),
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
let value: T =
|
||||
toml::from_str(&contents).with_context(|| format!("Failed to parse {}", path.display()))?;
|
||||
Ok(value)
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ use anyhow::{Context, Result};
|
||||
use gitbutler_branch::{
|
||||
GITBUTLER_INTEGRATION_COMMIT_AUTHOR_EMAIL, GITBUTLER_INTEGRATION_COMMIT_AUTHOR_NAME,
|
||||
};
|
||||
use gitbutler_fs::fs::write;
|
||||
use gitbutler_fs::write;
|
||||
use gix::config::tree::Key;
|
||||
use std::path::Path;
|
||||
|
||||
|
@ -4,7 +4,7 @@ use std::{
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
use gitbutler_fs::fs::read_toml_file_or_default;
|
||||
use gitbutler_fs::read_toml_file_or_default;
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
|
||||
use super::OPLOG_FILE_NAME;
|
||||
@ -92,6 +92,6 @@ impl OplogHandle {
|
||||
|
||||
fn write_file(&self, mut oplog: Oplog) -> Result<()> {
|
||||
oplog.modified_at = SystemTime::now();
|
||||
gitbutler_fs::fs::write(&self.file_path, toml::to_string(&oplog)?)
|
||||
gitbutler_fs::write(&self.file_path, toml::to_string(&oplog)?)
|
||||
}
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ impl Storage {
|
||||
/// Generally, the filesystem is used for synchronization, not in-memory primitives.
|
||||
pub fn write(&self, rela_path: impl AsRef<Path>, content: &str) -> std::io::Result<()> {
|
||||
let file_path = self.local_data_dir.join(rela_path);
|
||||
gitbutler_fs::fs::create_dirs_then_write(file_path, content)
|
||||
gitbutler_fs::create_dirs_then_write(file_path, content)
|
||||
}
|
||||
|
||||
/// Delete the file or directory at `rela_path`.
|
||||
|
Loading…
Reference in New Issue
Block a user