cleanup some cranky rules

This commit is contained in:
Nikita Galaiko 2023-10-19 15:25:27 +02:00 committed by GitButler
parent 173f047f8e
commit 048b118569
16 changed files with 161 additions and 151 deletions

View File

@ -51,13 +51,13 @@ deny = [
"clippy::unnecessary_self_imports",
"clippy::unneeded_field_pattern",
"clippy::unseparated_literal_suffix",
"clippy::if_then_some_else_none",
"clippy::use_debug"
#TODO:
#clippy::if_then_some_else_none
#clippy::partial_pub_fields
#clippy::print_stdout
#clippy::unwrap_in_result
#clippy::unwrap_used
#clippy::use_debug
#clippy::unwrap_in_result
]
allow = [
@ -74,9 +74,4 @@ allow = [
"clippy::inconsistent_struct_constructor",
"clippy::match_wildcard_for_single_variants",
"clippy::unnested_or_patterns",
#TODO: should probably be cleaned up as any of these could lead to panics or unexpected behaviour (the cast-ones)
"clippy::cast_sign_loss",
"clippy::cast_lossless",
"clippy::match_same_arms",
"clippy::similar_names"
]

View File

@ -1,3 +1,5 @@
use std::time::UNIX_EPOCH;
use anyhow::{Context, Result};
use clap::Args;
use colored::Colorize;
@ -36,14 +38,28 @@ impl super::RunCommand for Info {
.unwrap_or("none".to_string())
.blue()
);
println!(
" project_data_last_fetched: {:?}",
app.project().project_data_last_fetch
);
println!(
" project_gitbutler_data_last_fetched: {:?}",
app.project().gitbutler_data_last_fetch
);
if let Some(last_fetched) = &app.project().project_data_last_fetch {
println!(
" project_data_last_fetched: {}",
last_fetched
.timestamp()
.duration_since(UNIX_EPOCH)?
.as_secs()
.to_string()
.blue()
);
}
if let Some(last_fetched) = &app.project().gitbutler_data_last_fetch {
println!(
" gitbutler_data_last_fetched: {}",
last_fetched
.timestamp()
.duration_since(UNIX_EPOCH)?
.as_secs()
.to_string()
.blue()
);
}
println!(
" path: {}",
app.project().path.display().to_string().blue()

View File

@ -1,4 +1,4 @@
use std::{str, sync::Arc};
use std::{fmt, str, sync::Arc};
use tauri::AppHandle;
@ -18,6 +18,21 @@ pub enum Event {
},
}
impl fmt::Display for Event {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Event::HeadChange {
project_id,
reference_name,
} => write!(
f,
"HeadChange(project_id: {}, reference_name: {})",
project_id, reference_name
),
}
}
}
impl Event {
pub fn project_id(&self) -> &ProjectId {
match self {

View File

@ -152,9 +152,7 @@ fn main() {
Ok(())
})
.plugin(tauri_plugin_window_state::Builder::default().build())
.plugin(tauri_plugin_single_instance::init(|app, argv, cwd| {
println!("{}, {argv:?}, {cwd}", app.package_info().name);
}))
.plugin(tauri_plugin_single_instance::init(|_, _, _| {}))
.invoke_handler(tauri::generate_handler![
commands::list_deltas,
commands::list_sessions,

View File

@ -45,8 +45,7 @@ impl Document {
pub fn update(&mut self, value: Option<&reader::Content>) -> Result<Option<delta::Delta>> {
let new_text = match value {
Some(reader::Content::UTF8(value)) => value,
Some(_) => "",
None => "",
Some(_) | None => "",
};
let operations = operations::get_delta_operations(&self.to_string(), new_text);

View File

@ -637,8 +637,8 @@ fn build_wd_tree(
let metadata = abs_path.metadata().with_context(|| {
format!("failed to get metadata for {}", abs_path.display())
})?;
let mtime = FileTime::from_last_modification_time(&metadata);
let ctime = FileTime::from_creation_time(&metadata).unwrap_or(mtime);
let modify_time = FileTime::from_last_modification_time(&metadata);
let create_time = FileTime::from_creation_time(&metadata).unwrap_or(modify_time);
let file_content = match session_wd_reader
.read(&file_path)
@ -674,8 +674,8 @@ fn build_wd_tree(
index
.add(&git::IndexEntry {
ctime,
mtime,
ctime: create_time,
mtime: modify_time,
dev: metadata.dev().try_into()?,
ino: metadata.ino().try_into()?,
mode: 33188,
@ -801,8 +801,8 @@ fn add_wd_path(
let metadata = file_path
.metadata()
.with_context(|| "failed to get metadata for".to_string())?;
let mtime = FileTime::from_last_modification_time(&metadata);
let ctime = FileTime::from_creation_time(&metadata).unwrap_or(mtime);
let modify_time = FileTime::from_last_modification_time(&metadata);
let create_time = FileTime::from_creation_time(&metadata).unwrap_or(modify_time);
// look for files that are bigger than 4GB, which are not supported by git
// insert a pointer as the blob content instead
@ -845,8 +845,8 @@ fn add_wd_path(
// create a new IndexEntry from the file metadata
index
.add(&git::IndexEntry {
ctime,
mtime,
ctime: create_time,
mtime: modify_time,
dev: metadata.dev().try_into()?,
ino: metadata.ino().try_into()?,
mode: 33188,
@ -942,12 +942,12 @@ fn add_log_path(
.join("logs")
.join(rel_file_path);
let metadata = file_path.metadata()?;
let mtime = FileTime::from_last_modification_time(&metadata);
let ctime = FileTime::from_creation_time(&metadata).unwrap_or(mtime);
let modify_time = FileTime::from_last_modification_time(&metadata);
let create_time = FileTime::from_creation_time(&metadata).unwrap_or(modify_time);
index.add(&git::IndexEntry {
ctime,
mtime,
ctime: create_time,
mtime: modify_time,
dev: metadata.dev().try_into()?,
ino: metadata.ino().try_into()?,
mode: 33188,
@ -999,14 +999,14 @@ fn add_file_to_index(
) -> Result<()> {
let blob = gb_repository.git_repository.blob_path(abs_file_path)?;
let metadata = abs_file_path.metadata()?;
let mtime = FileTime::from_last_modification_time(&metadata);
let ctime = FileTime::from_creation_time(&metadata).unwrap_or(mtime);
let modified_time = FileTime::from_last_modification_time(&metadata);
let create_time = FileTime::from_creation_time(&metadata).unwrap_or(modified_time);
// create a new IndexEntry from the file metadata
index
.add(&git::IndexEntry {
ctime,
mtime,
ctime: create_time,
mtime: modified_time,
dev: metadata.dev().try_into()?,
ino: metadata.ino().try_into()?,
mode: 33188,

View File

@ -114,11 +114,11 @@ impl From<git2::IndexEntry> for IndexEntry {
fn from(value: git2::IndexEntry) -> Self {
Self {
ctime: FileTime::from_unix_time(
value.ctime.seconds() as i64,
i64::try_from(value.ctime.seconds()).unwrap(),
value.ctime.nanoseconds(),
),
mtime: FileTime::from_unix_time(
value.mtime.seconds() as i64,
i64::try_from(value.mtime.seconds()).unwrap(),
value.mtime.nanoseconds(),
),
dev: value.dev,

View File

@ -41,8 +41,7 @@ pub enum FetchResult {
impl FetchResult {
pub fn timestamp(&self) -> &time::SystemTime {
match self {
FetchResult::Fetched { timestamp } => timestamp,
FetchResult::Error { timestamp, .. } => timestamp,
FetchResult::Fetched { timestamp } | FetchResult::Error { timestamp, .. } => timestamp,
}
}
}

View File

@ -435,8 +435,7 @@ fn index_delta(
let changes = all_changes
.iter_all_changes()
.filter_map(|change| match change.tag() {
ChangeTag::Delete => change.as_str(),
ChangeTag::Insert => change.as_str(),
ChangeTag::Delete | ChangeTag::Insert => change.as_str(),
ChangeTag::Equal => None,
})
.map(str::trim)

View File

@ -396,8 +396,7 @@ pub fn update_base_branch(
)?;
}
}
Some(upstream) => {
println!("upstream: {:?}", upstream);
Some(_) => {
// get tree from merge_tree_oid
let merge_tree = repo
.find_tree(merge_tree_oid)
@ -531,11 +530,8 @@ pub fn create_virtual_branch_from_branch(
git::BranchName::Remote(remote) => Some(remote.clone()),
git::BranchName::Local(local) => {
let remote_name = format!("{}/{}", default_target.branch.remote(), local.branch());
if remote_name != default_target.branch.branch() {
Some(format!("refs/remotes/{}", remote_name).parse().unwrap())
} else {
None
}
(remote_name != default_target.branch.branch())
.then(|| format!("refs/remotes/{}", remote_name).parse().unwrap())
}
};

View File

@ -107,8 +107,7 @@ impl TryFrom<&dyn crate::reader::Reader> for Branch {
))
})
}
Ok(_) => Ok(None),
Err(crate::reader::Error::NotFound) => Ok(None),
Ok(_) | Err(crate::reader::Error::NotFound) => Ok(None),
Err(e) => Err(e),
}?;
@ -128,8 +127,7 @@ impl TryFrom<&dyn crate::reader::Reader> for Branch {
})
}
}
Ok(_) => Ok(None),
Err(crate::reader::Error::NotFound) => Ok(None),
Ok(_) | Err(crate::reader::Error::NotFound) => Ok(None),
Err(e) => Err(e),
}?;

View File

@ -96,19 +96,17 @@ impl Controller {
) -> Result<git::Oid, Error> {
self.with_lock(project_id, || {
self.with_verify_branch(project_id, |gb_repository, project_repository, user| {
let signing_key = if project_repository
let signing_key = project_repository
.config()
.sign_commits()
.context("failed to get sign commits option")?
{
Some(
.then(|| {
self.keys
.get_or_create()
.context("failed to get private key")?,
)
} else {
None
};
.context("failed to get private key")
})
.transpose()?;
super::commit(
gb_repository,
project_repository,
@ -212,19 +210,16 @@ impl Controller {
)
.map_err(Error::Other)?;
let signing_key = if project_repository
let signing_key = project_repository
.config()
.sign_commits()
.context("failed to get sign commits option")?
{
Some(
.then(|| {
self.keys
.get_or_create()
.context("failed to get private key")?,
)
} else {
None
};
.context("failed to get private key")
})
.transpose()?;
// also apply the branch
super::apply_branch(
@ -315,19 +310,17 @@ impl Controller {
return Err(Error::Conflicting);
}
let signing_key = if project_repository
let signing_key = project_repository
.config()
.sign_commits()
.context("failed to get sign commits option")?
{
Some(
.then(|| {
self.keys
.get_or_create()
.context("failed to get private key")?,
)
} else {
None
};
.context("failed to get private key")
})
.transpose()?;
super::merge_virtual_branch_upstream(
gb_repository,
project_repository,
@ -386,19 +379,17 @@ impl Controller {
) -> Result<(), Error> {
self.with_lock(project_id, || {
self.with_verify_branch(project_id, |gb_repository, project_repository, user| {
let signing_key = if project_repository
let signing_key = project_repository
.config()
.sign_commits()
.context("failed to get sign commits option")?
{
Some(
.then(|| {
self.keys
.get_or_create()
.context("failed to get private key")?,
)
} else {
None
};
.context("failed to get private key")
})
.transpose()?;
super::apply_branch(
gb_repository,
project_repository,

View File

@ -830,12 +830,14 @@ fn calculate_non_commited_files(
.into_iter()
.map(|(file_path, mut non_commited_hunks)| {
// sort non commited hunks the same way as the real hunks are sorted
non_commited_hunks.sort_by_key(|h| {
non_commited_hunks.sort_by_key(|hunk| {
file_hunks.get(&file_path).map_or(Some(0), |hunks| {
hunks.iter().position(|h2| {
let h_range = [h.start..=h.end];
let h2_range = [h2.start..=h2.end];
h2_range.iter().any(|line| h_range.contains(line))
hunks.iter().position(|another_hunk| {
let hunk_range = [hunk.start..=hunk.end];
let another_hunk_range = [another_hunk.start..=another_hunk.end];
another_hunk_range
.iter()
.any(|line| hunk_range.contains(line))
})
})
});
@ -923,7 +925,7 @@ pub fn commit_to_vbranch_commit(
commit: &git::Commit,
upstream_commits: Option<&HashMap<git::Oid, bool>>,
) -> Result<VirtualBranchCommit> {
let timestamp = commit.time().seconds() as u128;
let timestamp = u128::try_from(commit.time().seconds())?;
let signature = commit.author();
let message = commit.message().unwrap().to_string();
@ -1939,15 +1941,19 @@ fn write_tree_onto_commit(
}
fn _print_tree(repo: &git2::Repository, tree: &git2::Tree) -> Result<()> {
println!("tree id: {:?}", tree.id());
println!("tree id: {}", tree.id());
for entry in tree {
println!(" entry: {:?} {:?}", entry.name(), entry.id());
println!(
" entry: {} {}",
entry.name().unwrap_or_default(),
entry.id()
);
// get entry contents
let object = entry.to_object(repo).context("failed to get object")?;
let blob = object.as_blob().context("failed to get blob")?;
// convert content to string
if let Ok(content) = std::str::from_utf8(blob.content()) {
println!(" blob: {:?}", content);
println!(" blob: {}", content);
} else {
println!(" blob: BINARY");
}

View File

@ -36,18 +36,18 @@ impl Event {
match self {
Event::Analytics(event) => event.project_id(),
Event::Emit(event) => event.project_id(),
Event::IndexAll(project_id) => project_id,
Event::Tick(project_id, _) => project_id,
Event::FetchGitbutlerData(project_id, _) => project_id,
Event::FetchProjectData(project_id, _) => project_id,
Event::Flush(project_id, _) => project_id,
Event::GitFileChange(project_id, _) => project_id,
Event::ProjectFileChange(project_id, _) => project_id,
Event::Session(project_id, _) => project_id,
Event::Bookmark(bookmark) => &bookmark.project_id,
Event::SessionFile((project_id, _, _, _)) => project_id,
Event::SessionDelta((project_id, _, _, _)) => project_id,
Event::PushGitbutlerData(project_id) => project_id,
Event::IndexAll(project_id)
| Event::Tick(project_id, _)
| Event::FetchGitbutlerData(project_id, _)
| Event::FetchProjectData(project_id, _)
| Event::Flush(project_id, _)
| Event::GitFileChange(project_id, _)
| Event::ProjectFileChange(project_id, _)
| Event::Session(project_id, _)
| Event::SessionFile((project_id, _, _, _))
| Event::SessionDelta((project_id, _, _, _))
| Event::PushGitbutlerData(project_id) => project_id,
}
}
}
@ -55,7 +55,7 @@ impl Event {
impl Display for Event {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Event::Analytics(event) => write!(f, "Analytics({:?})", event),
Event::Analytics(event) => write!(f, "Analytics({})", event),
Event::Emit(event) => write!(f, "Emit({})", event.name()),
Event::IndexAll(project_id) => write!(f, "IndexAll({})", project_id),
Event::Tick(project_id, ts) => write!(

View File

@ -63,18 +63,17 @@ impl Handler {
gb_repo: &gb_repository::Repository,
path: &path::Path,
) -> Result<Option<Vec<deltas::Delta>>> {
let current_session = gb_repo.get_current_session()?;
if current_session.is_none() {
return Ok(None);
if let Some(current_session) = gb_repo.get_current_session()? {
let session_reader = sessions::Reader::open(gb_repo, &current_session)
.context("failed to get session reader")?;
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader
.read_file(path)
.context("failed to get file deltas")?;
Ok(deltas)
} else {
Ok(None)
}
let current_session = current_session.unwrap();
let session_reader = sessions::Reader::open(gb_repo, &current_session)
.context("failed to get session reader")?;
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader
.read_file(path)
.context("failed to get file deltas")?;
Ok(deltas)
}
pub fn handle<P: AsRef<std::path::Path>>(
@ -146,40 +145,39 @@ impl Handler {
.update(current_wd_file_content.as_ref())
.context("failed to calculate new deltas")?;
if new_delta.is_none() {
tracing::debug!(%project_id, path = %path.display(), "no new deltas, ignoring");
return Ok(vec![]);
}
let new_delta = new_delta.as_ref().unwrap();
if let Some(new_delta) = new_delta {
let deltas = text_doc.get_deltas();
let deltas = text_doc.get_deltas();
let writer = deltas::Writer::new(&gb_repository);
writer
.write(path, &deltas)
.with_context(|| "failed to write deltas")?;
let writer = deltas::Writer::new(&gb_repository);
writer
.write(path, &deltas)
.with_context(|| "failed to write deltas")?;
if let Some(reader::Content::UTF8(text)) = current_wd_file_content {
writer.write_wd_file(path, &text)
} else {
writer.write_wd_file(path, "")
}?;
if let Some(reader::Content::UTF8(text)) = current_wd_file_content {
writer.write_wd_file(path, &text)
Ok(vec![
events::Event::SessionFile((
*project_id,
current_session.id,
path.to_path_buf(),
latest_file_content,
)),
events::Event::Session(*project_id, current_session.clone()),
events::Event::SessionDelta((
*project_id,
current_session.id,
path.to_path_buf(),
new_delta.clone(),
)),
])
} else {
writer.write_wd_file(path, "")
}?;
Ok(vec![
events::Event::SessionFile((
*project_id,
current_session.id,
path.to_path_buf(),
latest_file_content,
)),
events::Event::Session(*project_id, current_session.clone()),
events::Event::SessionDelta((
*project_id,
current_session.id,
path.to_path_buf(),
new_delta.clone(),
)),
])
tracing::debug!(%project_id, path = %path.display(), "no new deltas, ignoring");
Ok(vec![])
}
}
}

View File

@ -34,7 +34,7 @@ impl DirWriter {
impl Writer for DirWriter {
fn write(&self, path: &str, contents: &[u8]) -> Result<()> {
let file_path = self.root.join(path);
let dir_path = file_path.parent().unwrap();
let dir_path = file_path.parent().context("failed to get parent")?;
std::fs::create_dir_all(dir_path).context("failed to create directory")?;
std::fs::write(file_path, contents)?;
Ok(())
@ -42,7 +42,7 @@ impl Writer for DirWriter {
fn append_string(&self, path: &str, contents: &str) -> Result<()> {
let file_path = self.root.join(path);
let dir_path = file_path.parent().unwrap();
let dir_path = file_path.parent().context("failed to get parent")?;
std::fs::create_dir_all(dir_path).context("failed to create directory")?;
let mut file = std::fs::OpenOptions::new()
.create(true)