Merge pull request #1279 from gitbutlerapp/Refactor-reader-struct

refactor: use content that is either utf8, large or binary
This commit is contained in:
Nikita Galaiko 2023-10-03 12:20:32 +02:00 committed by GitHub
commit 9faf08e47f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 687 additions and 468 deletions

View File

@ -230,8 +230,8 @@ impl App {
&self,
project_id: &str,
session_id: &str,
paths: Option<Vec<&str>>,
) -> Result<HashMap<String, String>> {
paths: Option<Vec<path::PathBuf>>,
) -> Result<HashMap<path::PathBuf, reader::Content>> {
let session = self
.sessions_database
.get_by_project_id_id(project_id, session_id)

View File

@ -1,3 +1,5 @@
use std::path;
use anyhow::Result;
use serde_jsonlines::JsonLinesReader;
@ -18,9 +20,12 @@ impl<'reader> BookmarksReader<'reader> {
}
pub fn read(&self) -> Result<Vec<Bookmark>> {
match self.session_reader.read_string("session/bookmarks.jsonl") {
Ok(content) => {
let iter = JsonLinesReader::new(content.as_bytes()).read_all();
match self
.session_reader
.read(&path::Path::new("session/bookmarks.jsonl").to_path_buf())
{
Ok(reader::Content::UTF8(content)) => {
let iter = JsonLinesReader::new(content.as_bytes()).read_all::<Bookmark>();
let mut bookmarks = vec![];
for result in iter {
if result.is_err() {
@ -30,6 +35,7 @@ impl<'reader> BookmarksReader<'reader> {
}
Ok(bookmarks)
}
Ok(_) => Err(anyhow::anyhow!("bookmarks.jsonl is not UTF8 encoded")),
Err(reader::Error::NotFound) => Ok(vec![]),
Err(err) => Err(err.into()),
}

View File

@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::{collections::HashMap, path};
use anyhow::{Context, Result};
use tauri::{AppHandle, Manager};
@ -29,7 +29,7 @@ impl Database {
&self,
project_id: &str,
session_id: &str,
file_path: &str,
file_path: &path::Path,
deltas: &Vec<delta::Delta>,
) -> Result<()> {
self.database.transaction(|tx| -> Result<()> {
@ -41,7 +41,7 @@ impl Database {
stmt.execute(rusqlite::named_params! {
":project_id": project_id,
":session_id": session_id,
":file_path": file_path,
":file_path": file_path.display().to_string(),
":timestamp_ms": timestamp_ms,
":operations": operations,
})
@ -143,18 +143,18 @@ mod tests {
let project_id = "project_id";
let session_id = "session_id";
let file_path = "file_path";
let file_path = path::PathBuf::from("file_path");
let delta1 = delta::Delta {
timestamp_ms: 0,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
};
let deltas = vec![delta1.clone()];
database.insert(project_id, session_id, file_path, &deltas)?;
database.insert(project_id, session_id, &file_path, &deltas)?;
assert_eq!(
database.list_by_project_id_session_id(project_id, session_id, None)?,
vec![(file_path.to_string(), vec![delta1])]
vec![(file_path.display().to_string(), vec![delta1])]
.into_iter()
.collect()
);
@ -169,7 +169,7 @@ mod tests {
let project_id = "project_id";
let session_id = "session_id";
let file_path = "file_path";
let file_path = path::PathBuf::from("file_path");
let delta1 = delta::Delta {
timestamp_ms: 0,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
@ -182,12 +182,12 @@ mod tests {
))],
};
database.insert(project_id, session_id, file_path, &vec![delta1])?;
database.insert(project_id, session_id, file_path, &vec![delta2.clone()])?;
database.insert(project_id, session_id, &file_path, &vec![delta1])?;
database.insert(project_id, session_id, &file_path, &vec![delta2.clone()])?;
assert_eq!(
database.list_by_project_id_session_id(project_id, session_id, None)?,
vec![(file_path.to_string(), vec![delta2])]
vec![(file_path.display().to_string(), vec![delta2])]
.into_iter()
.collect()
);
@ -202,8 +202,8 @@ mod tests {
let project_id = "project_id";
let session_id = "session_id";
let file_path1 = "file_path1";
let file_path2 = "file_path2";
let file_path1 = path::PathBuf::from("file_path1");
let file_path2 = path::PathBuf::from("file_path2");
let delta1 = delta::Delta {
timestamp_ms: 1,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
@ -216,15 +216,15 @@ mod tests {
))],
};
database.insert(project_id, session_id, file_path1, &vec![delta1.clone()])?;
database.insert(project_id, session_id, file_path2, &vec![delta1.clone()])?;
database.insert(project_id, session_id, file_path2, &vec![delta2.clone()])?;
database.insert(project_id, session_id, &file_path1, &vec![delta1.clone()])?;
database.insert(project_id, session_id, &file_path2, &vec![delta1.clone()])?;
database.insert(project_id, session_id, &file_path2, &vec![delta2.clone()])?;
assert_eq!(
database.list_by_project_id_session_id(project_id, session_id, None)?,
vec![
(file_path1.to_string(), vec![delta1.clone()]),
(file_path2.to_string(), vec![delta1, delta2])
(file_path1.display().to_string(), vec![delta1.clone()]),
(file_path2.display().to_string(), vec![delta1, delta2])
]
.into_iter()
.collect()

View File

@ -1,3 +1,5 @@
use crate::reader;
use super::{delta, operations};
use anyhow::Result;
use std::{
@ -26,9 +28,9 @@ impl Document {
}
// returns a text document where internal state is seeded with value, and deltas are applied.
pub fn new(value: Option<&str>, deltas: Vec<delta::Delta>) -> Result<Document> {
pub fn new(value: Option<&reader::Content>, deltas: Vec<delta::Delta>) -> Result<Document> {
let mut all_deltas = vec![];
if let Some(value) = value {
if let Some(reader::Content::UTF8(value)) = value {
all_deltas.push(delta::Delta {
operations: operations::get_delta_operations("", value),
timestamp_ms: 0,
@ -40,10 +42,26 @@ impl Document {
Ok(Document { doc, deltas })
}
pub fn update(&mut self, value: &str) -> Result<Option<delta::Delta>> {
let operations = operations::get_delta_operations(&self.to_string(), value);
pub fn update(&mut self, value: Option<&reader::Content>) -> Result<Option<delta::Delta>> {
let new_text = match value {
Some(reader::Content::UTF8(value)) => value,
Some(_) => "",
None => "",
};
let operations = operations::get_delta_operations(&self.to_string(), new_text);
if operations.is_empty() {
return Ok(None);
if matches!(value, Some(reader::Content::UTF8(_))) {
return Ok(None);
} else {
return Ok(Some(delta::Delta {
operations,
timestamp_ms: SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_millis(),
}));
}
}
let delta = delta::Delta {
operations,
@ -72,7 +90,10 @@ mod tests {
#[test]
fn test_new() {
let document = Document::new(Some("hello world"), vec![]);
let document = Document::new(
Some(&reader::Content::UTF8("hello world".to_string())),
vec![],
);
assert!(document.is_ok());
let document = document.unwrap();
assert_eq!(document.to_string(), "hello world");
@ -81,10 +102,15 @@ mod tests {
#[test]
fn test_update() {
let document = Document::new(Some("hello world"), vec![]);
let document = Document::new(
Some(&reader::Content::UTF8("hello world".to_string())),
vec![],
);
assert!(document.is_ok());
let mut document = document.unwrap();
document.update("hello world!").unwrap();
document
.update(Some(&reader::Content::UTF8("hello world!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world!");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
@ -99,7 +125,9 @@ mod tests {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document.update("hello world!").unwrap();
document
.update(Some(&reader::Content::UTF8("hello world!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world!");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
@ -142,7 +170,9 @@ mod tests {
assert!(document.is_ok());
let mut document = document.unwrap();
document.update("hello").unwrap();
document
.update(Some(&reader::Content::UTF8("hello".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
@ -151,7 +181,9 @@ mod tests {
Operation::Insert((0, "hello".to_string()))
);
document.update("hello world").unwrap();
document
.update(Some(&reader::Content::UTF8("hello world".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 1);
@ -160,7 +192,9 @@ mod tests {
Operation::Insert((5, " world".to_string()))
);
document.update("held!").unwrap();
document
.update(Some(&reader::Content::UTF8("held!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "held!");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 2);
@ -180,7 +214,9 @@ mod tests {
assert!(document.is_ok());
let mut document = document.unwrap();
document.update("first").unwrap();
document
.update(Some(&reader::Content::UTF8("first".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
@ -189,7 +225,9 @@ mod tests {
Operation::Insert((0, "first".to_string()))
);
document.update("first\ntwo").unwrap();
document
.update(Some(&reader::Content::UTF8("first\ntwo".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first\ntwo");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 1);
@ -198,7 +236,11 @@ mod tests {
Operation::Insert((5, "\ntwo".to_string()))
);
document.update("first line\nline two").unwrap();
document
.update(Some(&reader::Content::UTF8(
"first line\nline two".to_string(),
)))
.unwrap();
assert_eq!(document.to_string(), "first line\nline two");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 2);
@ -218,7 +260,11 @@ mod tests {
assert!(document.is_ok());
let mut document = document.unwrap();
document.update("first line\nline two").unwrap();
document
.update(Some(&reader::Content::UTF8(
"first line\nline two".to_string(),
)))
.unwrap();
assert_eq!(document.to_string(), "first line\nline two");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
@ -227,7 +273,9 @@ mod tests {
Operation::Insert((0, "first line\nline two".to_string()))
);
document.update("first\ntwo").unwrap();
document
.update(Some(&reader::Content::UTF8("first\ntwo".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first\ntwo");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 2);
@ -240,7 +288,9 @@ mod tests {
Operation::Delete((6, 5))
);
document.update("first").unwrap();
document
.update(Some(&reader::Content::UTF8("first".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 1);
@ -249,7 +299,7 @@ mod tests {
Operation::Delete((5, 4))
);
document.update("").unwrap();
document.update(None).unwrap();
assert_eq!(document.to_string(), "");
assert_eq!(document.get_deltas().len(), 4);
assert_eq!(document.get_deltas()[3].operations.len(), 1);
@ -259,12 +309,42 @@ mod tests {
);
}
#[test]
fn test_binary_to_text() {
let latest = reader::Content::Binary;
let current = reader::Content::UTF8("test".to_string());
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "test");
}
#[test]
fn test_binary_to_binary() {
let latest = reader::Content::Binary;
let current = reader::Content::Binary;
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "");
}
#[test]
fn test_text_to_binary() {
let latest = reader::Content::UTF8("text".to_string());
let current = reader::Content::Binary;
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "");
}
#[test]
fn test_unicode() {
let latest = Some("🌚");
let current = "🌝";
let mut document = Document::new(latest, vec![]).unwrap();
document.update(current).unwrap();
let latest = reader::Content::UTF8("🌚".to_string());
let current = reader::Content::UTF8("🌝".to_string());
let mut document = Document::new(Some(&latest), vec![]).unwrap();
document.update(Some(&current)).unwrap();
assert_eq!(document.to_string(), "🌝");
}
}

View File

@ -18,8 +18,8 @@ impl<'reader> DeltasReader<'reader> {
pub fn read_file<P: AsRef<std::path::Path>>(&self, path: P) -> Result<Option<Vec<Delta>>> {
let path = path.as_ref();
let file_deltas_path = std::path::Path::new("session/deltas").join(path);
match self.reader.read_string(file_deltas_path.to_str().unwrap()) {
Ok(content) => {
match self.reader.read(&file_deltas_path) {
Ok(reader::Content::UTF8(content)) => {
if content.is_empty() {
// this is a leftover from some bug, shouldn't happen anymore
Ok(None)
@ -27,18 +27,25 @@ impl<'reader> DeltasReader<'reader> {
Ok(Some(serde_json::from_str(&content)?))
}
}
Ok(_) => Err(anyhow::anyhow!(
"expected {} to be utf8 content",
file_deltas_path.display()
)),
Err(reader::Error::NotFound) => Ok(None),
Err(err) => Err(err.into()),
}
}
pub fn read(&self, paths: Option<Vec<&str>>) -> Result<HashMap<String, Vec<Delta>>> {
pub fn read(
&self,
paths: Option<Vec<&path::Path>>,
) -> Result<HashMap<path::PathBuf, Vec<Delta>>> {
let deltas_dir = path::Path::new("session/deltas");
let files = self.reader.list_files(deltas_dir.to_str().unwrap())?;
let files = self.reader.list_files(deltas_dir)?;
let mut result = HashMap::new();
for file_path in files {
if let Some(paths) = paths.as_ref() {
if !paths.iter().any(|path| file_path.eq(path)) {
if !paths.into_iter().any(|path| file_path.eq(path)) {
continue;
}
}

View File

@ -1,7 +1,7 @@
use anyhow::{Context, Result};
use tauri::{AppHandle, Manager};
use crate::{bookmarks, deltas, sessions};
use crate::{bookmarks, deltas, reader, sessions};
#[derive(Clone)]
pub struct Sender {
@ -74,7 +74,12 @@ impl Event {
}
}
pub fn file(project_id: &str, session_id: &str, file_path: &str, contents: &str) -> Self {
pub fn file(
project_id: &str,
session_id: &str,
file_path: &str,
contents: Option<&reader::Content>,
) -> Self {
Event {
name: format!("project://{}/sessions/{}/files", project_id, session_id),
payload: serde_json::json!({

View File

@ -640,7 +640,7 @@ fn build_wd_tree(
let session_wd_reader = reader::DirReader::open(gb_repository.session_wd_path());
let session_wd_files = session_wd_reader
.list_files(".")
.list_files(&path::PathBuf::from("."))
.context("failed to read session wd files")?;
for file_path in session_wd_files {
let abs_path = gb_repository.session_wd_path().join(&file_path);
@ -651,10 +651,26 @@ fn build_wd_tree(
let ctime = FileTime::from_creation_time(&metadata).unwrap_or(mtime);
let file_content = match session_wd_reader
.read_string(&file_path)
.read(&file_path)
.context("failed to read file")
{
Result::Ok(content) => content,
Result::Ok(reader::Content::UTF8(content)) => content,
Result::Ok(reader::Content::Large) => {
tracing::error!(
project_id = gb_repository.project_id,
path = %abs_path.display(),
"large file in session working directory"
);
continue;
}
Result::Ok(reader::Content::Binary) => {
tracing::error!(
project_id = gb_repository.project_id,
path = %abs_path.display(),
"binary file in session working directory"
);
continue;
}
Err(error) => {
tracing::error!(
project_id = gb_repository.project_id,
@ -678,10 +694,12 @@ fn build_wd_tree(
file_size: metadata.len().try_into().unwrap(),
flags: 10, // normal flags for normal file (for the curious: https://git-scm.com/docs/index-format)
flags_extended: 0, // no extended flags
path: file_path.clone().into(),
path: file_path.display().to_string().into(),
id: gb_repository.git_repository.blob(file_content.as_bytes())?,
})
.with_context(|| format!("failed to add index entry for {}", file_path))?;
.with_context(|| {
format!("failed to add index entry for {}", file_path.display())
})?;
}
let wd_tree_oid = index

View File

@ -1,6 +1,6 @@
use std::{thread, time};
use std::{path, thread, time};
use crate::{deltas, gb_repository, projects, sessions, test_utils, users};
use crate::{deltas, gb_repository, projects, reader, sessions, test_utils, users};
use anyhow::Result;
use tempfile::tempdir;
@ -166,9 +166,14 @@ fn test_list_deltas_from_current_session() -> Result<()> {
let deltas = deltas_reader.read(None)?;
assert_eq!(deltas.len(), 1);
assert_eq!(deltas.get("test.txt").unwrap()[0].operations.len(), 1);
assert_eq!(
deltas.get("test.txt").unwrap()[0].operations[0],
deltas.get(&path::PathBuf::from("test.txt")).unwrap()[0]
.operations
.len(),
1
);
assert_eq!(
deltas.get(&path::PathBuf::from("test.txt")).unwrap()[0].operations[0],
deltas::Operation::Insert((0, "Hello World".to_string()))
);
@ -202,9 +207,14 @@ fn test_list_deltas_from_flushed_session() -> Result<()> {
let deltas = deltas_reader.read(None)?;
assert_eq!(deltas.len(), 1);
assert_eq!(deltas.get("test.txt").unwrap()[0].operations.len(), 1);
assert_eq!(
deltas.get("test.txt").unwrap()[0].operations[0],
deltas.get(&path::PathBuf::from("test.txt")).unwrap()[0]
.operations
.len(),
1
);
assert_eq!(
deltas.get(&path::PathBuf::from("test.txt")).unwrap()[0].operations[0],
deltas::Operation::Insert((0, "Hello World".to_string()))
);
@ -236,7 +246,10 @@ fn test_list_files_from_current_session() -> Result<()> {
let files = reader.files(None)?;
assert_eq!(files.len(), 1);
assert_eq!(files.get("test.txt").unwrap(), "Hello World");
assert_eq!(
files.get(&path::PathBuf::from("test.txt")).unwrap(),
&reader::Content::UTF8("Hello World".to_string())
);
Ok(())
}
@ -267,7 +280,10 @@ fn test_list_files_from_flushed_session() -> Result<()> {
let files = reader.files(None)?;
assert_eq!(files.len(), 1);
assert_eq!(files.get("test.txt").unwrap(), "Hello World");
assert_eq!(
files.get(&path::PathBuf::from("test.txt")).unwrap(),
&reader::Content::UTF8("Hello World".to_string())
);
Ok(())
}
@ -348,9 +364,12 @@ fn test_remote_syncronization() -> Result<()> {
let files = session_reader.files(None)?;
assert_eq!(deltas.len(), 1);
assert_eq!(files.len(), 1);
assert_eq!(files.get("test.txt").unwrap(), "Hello World");
assert_eq!(
deltas.get("test.txt").unwrap(),
files.get(&path::PathBuf::from("test.txt")).unwrap(),
&reader::Content::UTF8("Hello World".to_string())
);
assert_eq!(
deltas.get(&path::PathBuf::from("test.txt")).unwrap(),
&vec![deltas::Delta {
operations: vec![deltas::Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,

View File

@ -0,0 +1,17 @@
pub struct Blob<'a>(git2::Blob<'a>);
impl<'a> From<git2::Blob<'a>> for Blob<'a> {
fn from(value: git2::Blob<'a>) -> Self {
Self(value)
}
}
impl Blob<'_> {
pub fn content(&self) -> &[u8] {
self.0.content()
}
pub fn size(&self) -> usize {
self.0.size()
}
}

View File

@ -1,6 +1,9 @@
pub mod credentials;
pub mod diff;
mod blob;
pub use blob::*;
mod error;
pub use error::*;

View File

@ -3,8 +3,8 @@ use std::{path, str};
use crate::keys;
use super::{
AnnotatedCommit, Branch, BranchName, Commit, Config, Index, Oid, Reference, Remote, Result,
Signature, Tree, TreeBuilder,
AnnotatedCommit, Blob, Branch, BranchName, Commit, Config, Index, Oid, Reference, Remote,
Result, Signature, Tree, TreeBuilder,
};
// wrapper around git2::Repository to get control over how it's used.
@ -151,8 +151,11 @@ impl Repository {
.map_err(Into::into)
}
pub fn find_blob(&self, id: Oid) -> Result<git2::Blob> {
self.0.find_blob(id.into()).map_err(Into::into)
pub fn find_blob(&self, id: Oid) -> Result<Blob> {
self.0
.find_blob(id.into())
.map(Into::into)
.map_err(Into::into)
}
pub fn revwalk(&self) -> Result<git2::Revwalk> {

View File

@ -212,8 +212,8 @@ async fn list_session_files(
handle: tauri::AppHandle,
project_id: &str,
session_id: &str,
paths: Option<Vec<&str>>,
) -> Result<HashMap<String, String>, Error> {
paths: Option<Vec<path::PathBuf>>,
) -> Result<HashMap<path::PathBuf, reader::Content>, Error> {
let app = handle.state::<app::App>();
let files = app
.list_session_files(project_id, session_id, paths)

View File

@ -1,69 +1,25 @@
use std::{path, str};
use std::{num, path, str};
use anyhow::{Context, Result};
use serde::{ser::SerializeStruct, Serialize};
use crate::{fs, git};
#[derive(Debug, PartialEq)]
pub enum Content {
UTF8(String),
Binary(Vec<u8>),
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("file not found")]
NotFound,
#[error("io error: {0}")]
IOError(std::io::Error),
Io(#[from] std::io::Error),
#[error(transparent)]
From(#[from] FromError),
}
pub trait Reader {
fn read(&self, file_path: &str) -> Result<Content, Error>;
fn list_files(&self, dir_path: &str) -> Result<Vec<String>>;
fn exists(&self, file_path: &str) -> bool;
fn size(&self, file_path: &str) -> Result<usize>;
fn is_dir(&self, file_path: &str) -> bool;
fn read_usize(&self, file_path: &str) -> Result<usize, Error> {
let s = self.read_string(file_path)?;
s.parse::<usize>().map_err(|_| {
Error::IOError(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"file is not usize",
))
})
}
fn read_string(&self, file_path: &str) -> Result<String, Error> {
match self.read(file_path)? {
Content::UTF8(s) => Ok(s),
Content::Binary(_) => Err(Error::IOError(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"file is not utf8",
))),
}
}
fn read_u128(&self, file_path: &str) -> Result<u128, Error> {
let s = self.read_string(file_path)?;
s.parse::<u128>().map_err(|_| {
Error::IOError(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"file is not u128",
))
})
}
fn read_bool(&self, file_path: &str) -> Result<bool, Error> {
let s = self.read_string(file_path)?;
s.parse::<bool>().map_err(|_| {
Error::IOError(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"file is not bool",
))
})
}
fn read(&self, file_path: &path::Path) -> Result<Content, Error>;
fn list_files(&self, dir_path: &path::Path) -> Result<Vec<path::PathBuf>>;
fn is_dir(&self, file_path: &path::Path) -> bool;
fn exists(&self, file_path: &path::Path) -> bool;
}
pub struct DirReader {
@ -77,43 +33,32 @@ impl DirReader {
}
impl Reader for DirReader {
fn is_dir(&self, file_path: &str) -> bool {
fn is_dir(&self, file_path: &path::Path) -> bool {
let path = self.root.join(file_path);
path.exists() && path.is_dir()
}
fn size(&self, file_path: &str) -> Result<usize> {
fn exists(&self, file_path: &path::Path) -> bool {
let path = self.root.join(file_path);
if !path.exists() {
return Ok(0);
}
let metadata = std::fs::metadata(path)?;
Ok(metadata.len().try_into()?)
path.exists()
}
fn read(&self, path: &str) -> Result<Content, Error> {
fn read(&self, path: &path::Path) -> Result<Content, Error> {
let path = self.root.join(path);
if !path.exists() {
return Err(Error::NotFound);
}
let content = std::fs::read(path).map_err(Error::IOError)?;
match String::from_utf8_lossy(&content).into_owned() {
s if s.as_bytes().eq(&content) => Ok(Content::UTF8(s)),
_ => Ok(Content::Binary(content)),
}
let content = Content::try_from(&path).map_err(Error::Io)?;
Ok(content)
}
fn list_files(&self, dir_path: &str) -> Result<Vec<String>> {
let files: Vec<String> = fs::list_files(self.root.join(dir_path))?
.iter()
.map(|f| f.to_str().unwrap().to_string())
.filter(|f| !f.starts_with(".git"))
.collect();
Ok(files)
}
fn exists(&self, file_path: &str) -> bool {
std::path::Path::new(self.root.join(file_path).as_path()).exists()
fn list_files(&self, dir_path: &path::Path) -> Result<Vec<path::PathBuf>> {
fs::list_files(self.root.join(dir_path)).map(|files| {
files
.into_iter()
.filter(|f| !f.starts_with(".git"))
.collect::<Vec<_>>()
})
}
}
@ -144,11 +89,11 @@ impl<'reader> CommitReader<'reader> {
}
impl Reader for CommitReader<'_> {
fn is_dir(&self, file_path: &str) -> bool {
fn is_dir(&self, file_path: &path::Path) -> bool {
let entry = match self
.tree
.get_path(std::path::Path::new(file_path))
.with_context(|| format!("{}: tree entry not found", file_path))
.context(format!("{}: tree entry not found", file_path.display()))
{
Ok(entry) => entry,
Err(_) => return false,
@ -156,27 +101,11 @@ impl Reader for CommitReader<'_> {
entry.kind() == Some(git2::ObjectType::Tree)
}
fn size(&self, file_path: &str) -> Result<usize> {
let entry = match self
.tree
.get_path(std::path::Path::new(file_path))
.with_context(|| format!("{}: tree entry not found", file_path))
{
Ok(entry) => entry,
Err(_) => return Ok(0),
};
let blob = match self.repository.find_blob(entry.id()) {
Ok(blob) => blob,
Err(_) => return Ok(0),
};
Ok(blob.size())
}
fn read(&self, path: &str) -> Result<Content, Error> {
fn read(&self, path: &path::Path) -> Result<Content, Error> {
let entry = match self
.tree
.get_path(std::path::Path::new(path))
.with_context(|| format!("{}: tree entry not found", path))
.context(format!("{}: tree entry not found", path.display()))
{
Ok(entry) => entry,
Err(_) => return Err(Error::NotFound),
@ -185,15 +114,11 @@ impl Reader for CommitReader<'_> {
Ok(blob) => blob,
Err(_) => return Err(Error::NotFound),
};
let content = blob.content();
match String::from_utf8_lossy(content).into_owned() {
s if s.as_bytes().eq(content) => Ok(Content::UTF8(s)),
_ => Ok(Content::Binary(content.to_vec())),
}
Ok(Content::from(&blob))
}
fn list_files(&self, dir_path: &str) -> Result<Vec<String>> {
let mut files: Vec<String> = Vec::new();
fn list_files(&self, dir_path: &path::Path) -> Result<Vec<path::PathBuf>> {
let mut files = vec![];
let dir_path = std::path::Path::new(dir_path);
self.tree
.walk(git2::TreeWalkMode::PreOrder, |root, entry| {
@ -210,14 +135,7 @@ impl Reader for CommitReader<'_> {
return git2::TreeWalkResult::Ok;
}
files.push(
entry_path
.strip_prefix(dir_path)
.unwrap()
.to_str()
.unwrap()
.to_string(),
);
files.push(entry_path.strip_prefix(dir_path).unwrap().to_path_buf());
git2::TreeWalkResult::Ok
})
@ -226,8 +144,8 @@ impl Reader for CommitReader<'_> {
Ok(files)
}
fn exists(&self, file_path: &str) -> bool {
self.tree.get_path(std::path::Path::new(file_path)).is_ok()
fn exists(&self, file_path: &path::Path) -> bool {
self.tree.get_path(file_path).is_ok()
}
}
@ -246,28 +164,157 @@ impl<'reader> SubReader<'reader> {
}
impl Reader for SubReader<'_> {
fn is_dir(&self, file_path: &str) -> bool {
self.reader
.is_dir(self.prefix.join(file_path).to_str().unwrap())
fn is_dir(&self, file_path: &path::Path) -> bool {
self.reader.is_dir(&self.prefix.join(file_path))
}
fn size(&self, file_path: &str) -> Result<usize> {
self.reader
.size(self.prefix.join(file_path).to_str().unwrap())
fn read(&self, path: &path::Path) -> Result<Content, Error> {
self.reader.read(&self.prefix.join(path))
}
fn read(&self, path: &str) -> Result<Content, Error> {
self.reader.read(self.prefix.join(path).to_str().unwrap())
fn list_files(&self, dir_path: &path::Path) -> Result<Vec<path::PathBuf>> {
self.reader.list_files(&self.prefix.join(dir_path))
}
fn list_files(&self, dir_path: &str) -> Result<Vec<String>> {
self.reader
.list_files(self.prefix.join(dir_path).to_str().unwrap())
fn exists(&self, file_path: &path::Path) -> bool {
self.reader.exists(&self.prefix.join(file_path))
}
}
fn exists(&self, file_path: &str) -> bool {
self.reader
.exists(self.prefix.join(file_path).to_str().unwrap())
#[derive(Debug, thiserror::Error)]
pub enum FromError {
#[error(transparent)]
ParseInt(#[from] num::ParseIntError),
#[error(transparent)]
ParseBool(#[from] str::ParseBoolError),
#[error("file is binary")]
Binary,
#[error("file too large")]
Large,
}
#[derive(Debug, Clone, PartialEq)]
pub enum Content {
UTF8(String),
Binary,
Large,
}
impl Serialize for Content {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
Content::UTF8(text) => {
let mut state = serializer.serialize_struct("Content", 2)?;
state.serialize_field("type", "utf8")?;
state.serialize_field("value", text)?;
state.end()
}
Content::Binary => {
let mut state = serializer.serialize_struct("Content", 1)?;
state.serialize_field("type", "binary")?;
state.end()
}
Content::Large => {
let mut state = serializer.serialize_struct("Content", 1)?;
state.serialize_field("type", "large")?;
state.end()
}
}
}
}
impl Content {
const MAX_SIZE: usize = 1024 * 1024 * 10; // 10 MB
}
impl From<&str> for Content {
fn from(text: &str) -> Self {
if text.len() > Self::MAX_SIZE {
Content::Large
} else {
Content::UTF8(text.to_string())
}
}
}
impl TryFrom<&path::PathBuf> for Content {
type Error = std::io::Error;
fn try_from(value: &path::PathBuf) -> Result<Self, Self::Error> {
let metadata = std::fs::metadata(value)?;
if metadata.len() > Content::MAX_SIZE as u64 {
return Ok(Content::Large);
}
let content = std::fs::read(value)?;
Ok(content.as_slice().into())
}
}
impl From<&git::Blob<'_>> for Content {
fn from(value: &git::Blob) -> Self {
if value.size() > Content::MAX_SIZE {
Content::Large
} else {
value.content().into()
}
}
}
impl From<&[u8]> for Content {
fn from(bytes: &[u8]) -> Self {
if bytes.len() > Self::MAX_SIZE {
Content::Large
} else {
match String::from_utf8(bytes.to_vec()) {
Err(_) => Content::Binary,
Ok(text) => Content::UTF8(text),
}
}
}
}
impl TryFrom<Content> for usize {
type Error = FromError;
fn try_from(content: Content) -> Result<Self, Self::Error> {
match content {
Content::UTF8(text) => text.parse().map_err(FromError::ParseInt),
Content::Binary => Err(FromError::Binary),
Content::Large => Err(FromError::Large),
}
}
}
impl TryFrom<Content> for String {
type Error = FromError;
fn try_from(content: Content) -> Result<Self, Self::Error> {
match content {
Content::UTF8(text) => Ok(text),
Content::Binary => Err(FromError::Binary),
Content::Large => Err(FromError::Large),
}
}
}
impl TryFrom<Content> for u128 {
type Error = FromError;
fn try_from(content: Content) -> Result<Self, Self::Error> {
let text: String = content.try_into()?;
text.parse().map_err(FromError::ParseInt)
}
}
impl TryFrom<Content> for bool {
type Error = FromError;
fn try_from(content: Content) -> Result<Self, Self::Error> {
let text: String = content.try_into()?;
text.parse().map_err(FromError::ParseBool)
}
}
@ -285,10 +332,10 @@ mod tests {
let reader = DirReader::open(dir.clone());
std::fs::create_dir(dir.join("dir"))?;
std::fs::write(dir.join("dir/test.txt"), "test")?;
assert!(reader.is_dir("."));
assert!(reader.is_dir("dir"));
assert!(!reader.is_dir("dir/test.txt"));
assert!(!reader.is_dir("404.txt"));
assert!(reader.is_dir(path::Path::new(".")));
assert!(reader.is_dir(path::Path::new("dir")));
assert!(!reader.is_dir(path::Path::new("dir/test.txt")));
assert!(!reader.is_dir(path::Path::new("404.txt")));
Ok(())
}
@ -296,7 +343,7 @@ mod tests {
fn test_directory_reader_read_file() -> Result<()> {
let dir = test_utils::temp_dir();
let file_path = "test.txt";
let file_path = path::Path::new("test.txt");
std::fs::write(dir.join(file_path), "test")?;
let reader = DirReader::open(dir.to_path_buf());
@ -317,9 +364,9 @@ mod tests {
let oid = test_utils::commit_all(&repository);
let reader = CommitReader::from_commit(&repository, &repository.find_commit(oid)?)?;
assert!(reader.is_dir("dir"));
assert!(!reader.is_dir("dir/test.txt"));
assert!(!reader.is_dir("404.txt"));
assert!(reader.is_dir(path::Path::new("dir")));
assert!(!reader.is_dir(path::Path::new("dir/test.txt")));
assert!(!reader.is_dir(path::Path::new("404.txt")));
Ok(())
}
@ -327,7 +374,7 @@ mod tests {
fn test_commit_reader_read_file() -> Result<()> {
let repository = test_utils::test_repository();
let file_path = "test.txt";
let file_path = path::Path::new("test.txt");
std::fs::write(repository.path().parent().unwrap().join(file_path), "test")?;
let oid = test_utils::commit_all(&repository);
@ -349,9 +396,9 @@ mod tests {
std::fs::write(dir.join("dir").join("test.txt"), "test")?;
let reader = DirReader::open(dir.to_path_buf());
let files = reader.list_files("dir")?;
let files = reader.list_files(path::Path::new("dir"))?;
assert_eq!(files.len(), 1);
assert!(files.contains(&"test.txt".to_string()));
assert!(files.contains(&path::Path::new("test.txt").to_path_buf()));
Ok(())
}
@ -365,10 +412,10 @@ mod tests {
std::fs::write(dir.join("dir").join("test.txt"), "test")?;
let reader = DirReader::open(dir.to_path_buf());
let files = reader.list_files("")?;
let files = reader.list_files(path::Path::new(""))?;
assert_eq!(files.len(), 2);
assert!(files.contains(&"test.txt".to_string()));
assert!(files.contains(&"dir/test.txt".to_string()));
assert!(files.contains(&path::Path::new("test.txt").to_path_buf()));
assert!(files.contains(&path::Path::new("dir/test.txt").to_path_buf()));
Ok(())
}
@ -397,9 +444,9 @@ mod tests {
std::fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?;
let reader = CommitReader::from_commit(&repository, &repository.find_commit(oid)?)?;
let files = reader.list_files("dir")?;
let files = reader.list_files(path::Path::new("dir"))?;
assert_eq!(files.len(), 1);
assert!(files.contains(&"test.txt".to_string()));
assert!(files.contains(&path::Path::new("test.txt").to_path_buf()));
Ok(())
}
@ -425,10 +472,10 @@ mod tests {
std::fs::remove_dir_all(repository.path().parent().unwrap().join("dir"))?;
let reader = CommitReader::from_commit(&repository, &repository.find_commit(oid)?)?;
let files = reader.list_files("")?;
let files = reader.list_files(path::Path::new(""))?;
assert_eq!(files.len(), 2);
assert!(files.contains(&"test.txt".to_string()));
assert!(files.contains(&"dir/test.txt".to_string()));
assert!(files.contains(&path::Path::new("test.txt").to_path_buf()));
assert!(files.contains(&path::Path::new("dir/test.txt").to_path_buf()));
Ok(())
}
@ -440,8 +487,8 @@ mod tests {
std::fs::write(dir.join("test.txt"), "test")?;
let reader = DirReader::open(dir.to_path_buf());
assert!(reader.exists("test.txt"));
assert!(!reader.exists("test2.txt"));
assert!(reader.exists(path::Path::new("test.txt")));
assert!(!reader.exists(path::Path::new("test2.txt")));
Ok(())
}
@ -457,9 +504,37 @@ mod tests {
std::fs::remove_file(repository.path().parent().unwrap().join("test.txt"))?;
let reader = CommitReader::from_commit(&repository, &repository.find_commit(oid)?)?;
assert!(reader.exists("test.txt"));
assert!(!reader.exists("test2.txt"));
assert!(reader.exists(path::Path::new("test.txt")));
assert!(!reader.exists(path::Path::new("test2.txt")));
Ok(())
}
#[test]
fn test_from_bytes() {
vec![
("test".as_bytes(), Content::UTF8("test".to_string())),
(&[0, 159, 146, 150, 159, 146, 150], Content::Binary),
]
.into_iter()
.for_each(|(bytes, expected)| {
assert_eq!(Content::from(bytes), expected);
});
}
#[test]
fn test_serialize_content() {
vec![
(
Content::UTF8("test".to_string()),
r#"{"type":"utf8","value":"test"}"#,
),
(Content::Binary, r#"{"type":"binary"}"#),
(Content::Large, r#"{"type":"large"}"#),
]
.into_iter()
.for_each(|(content, expected)| {
assert_eq!(serde_json::to_string(&content).unwrap(), expected);
});
}
}

View File

@ -14,7 +14,7 @@ use tantivy::{query::QueryParser, Term};
use tantivy::{schema::IndexRecordOption, tokenizer};
use tauri::AppHandle;
use crate::{bookmarks, deltas, gb_repository, sessions};
use crate::{bookmarks, deltas, gb_repository, reader, sessions};
use super::{index, meta};
@ -330,18 +330,20 @@ fn index_session(
return Ok(());
}
let files = session_reader
.files(Some(deltas.keys().map(|k| k.as_str()).collect()))
.with_context(|| "could not list files for session")?;
.files(Some(deltas.keys().map(|s| s.to_path_buf()).collect()))
.context("could not list files for session")?;
// index every file
for (file_path, deltas) in deltas.into_iter() {
// keep the state of the file after each delta operation
// we need it to calculate diff for delete operations
let mut file_text: Vec<char> = files
.get(&file_path)
.map(|f| f.as_str())
.unwrap_or("")
.chars()
.collect();
.map(|content| match content {
reader::Content::UTF8(text) => text.chars().collect(),
_ => vec![],
})
.unwrap_or_default();
// for every deltas for the file
for (i, delta) in deltas.into_iter().enumerate() {
index_delta(
@ -393,7 +395,7 @@ fn index_delta(
session_id: &str,
project_id: &str,
file_text: &mut Vec<char>,
file_path: &str,
file_path: &path::Path,
i: usize,
delta: &deltas::Delta,
) -> Result<()> {
@ -417,9 +419,10 @@ fn index_delta(
// for every operation in the delta
for operation in &delta.operations {
// don't forget to apply the operation to the file_text
operation
.apply(file_text)
.with_context(|| format!("Could not apply operation to file {}", file_path))?;
operation.apply(file_text).context(format!(
"Could not apply operation to file {}",
file_path.display()
))?;
}
let old = &prev_file_text.iter().collect::<String>();
@ -440,7 +443,7 @@ fn index_delta(
doc.index = Some(i.try_into()?);
doc.session_id = Some(session_id.to_string());
doc.file_path = Some(file_path.to_string());
doc.file_path = Some(file_path.display().to_string());
doc.project_id = Some(project_id.to_string());
doc.timestamp_ms = Some(delta.timestamp_ms.try_into()?);
doc.diff = Some(changes);

View File

@ -17,48 +17,40 @@ pub struct SessionReader<'reader> {
}
impl Reader for SessionReader<'_> {
fn read(&self, file_path: &str) -> Result<reader::Content, reader::Error> {
fn read(&self, file_path: &path::Path) -> Result<reader::Content, reader::Error> {
self.reader.read(file_path)
}
fn list_files(&self, dir_path: &str) -> Result<Vec<String>> {
fn list_files(&self, dir_path: &path::Path) -> Result<Vec<path::PathBuf>> {
self.reader.list_files(dir_path)
}
fn exists(&self, file_path: &str) -> bool {
self.reader.exists(file_path)
}
fn size(&self, file_path: &str) -> Result<usize> {
self.reader.size(file_path)
}
fn is_dir(&self, file_path: &str) -> bool {
fn is_dir(&self, file_path: &path::Path) -> bool {
self.reader.is_dir(file_path)
}
fn exists(&self, file_path: &path::Path) -> bool {
self.reader.exists(file_path)
}
}
impl<'reader> SessionReader<'reader> {
pub fn open(repository: &'reader gb_repository::Repository, session: &Session) -> Result<Self> {
let wd_reader = reader::DirReader::open(repository.root());
let current_session_id = wd_reader.read_string(
repository
.session_path()
.join("meta")
.join("id")
.to_str()
.unwrap(),
);
if current_session_id.is_ok() && current_session_id.as_ref().unwrap() == &session.id {
let head_commit = repository.git_repository.head()?.peel_to_commit()?;
return Ok(SessionReader {
reader: Box::new(wd_reader),
previous_reader: CommitReader::from_commit(
&repository.git_repository,
&head_commit,
)?,
});
if let Ok(reader::Content::UTF8(current_session_id)) =
wd_reader.read(&repository.session_path().join("meta").join("id"))
{
if current_session_id == session.id {
let head_commit = repository.git_repository.head()?.peel_to_commit()?;
return Ok(SessionReader {
reader: Box::new(wd_reader),
previous_reader: CommitReader::from_commit(
&repository.git_repository,
&head_commit,
)?,
});
}
}
let session_hash = if let Some(hash) = &session.hash {
@ -89,8 +81,11 @@ impl<'reader> SessionReader<'reader> {
})
}
pub fn files(&self, paths: Option<Vec<&str>>) -> Result<HashMap<String, String>> {
let files = self.previous_reader.list_files("wd")?;
pub fn files(
&self,
paths: Option<Vec<path::PathBuf>>,
) -> Result<HashMap<path::PathBuf, reader::Content>> {
let files = self.previous_reader.list_files(path::Path::new("wd"))?;
let mut files_with_content = HashMap::new();
for file_path in files {
if let Some(paths) = paths.as_ref() {
@ -98,12 +93,7 @@ impl<'reader> SessionReader<'reader> {
continue;
}
}
match self.file(&file_path)? {
reader::Content::UTF8(content) => {
files_with_content.insert(file_path.clone(), content);
}
reader::Content::Binary(_) => {}
}
files_with_content.insert(file_path.clone(), self.file(&file_path)?);
}
Ok(files_with_content)
@ -112,6 +102,6 @@ impl<'reader> SessionReader<'reader> {
pub fn file<P: AsRef<path::Path>>(&self, path: P) -> Result<reader::Content, reader::Error> {
let path = path.as_ref();
self.previous_reader
.read(std::path::Path::new("wd").join(path).to_str().unwrap())
.read(&std::path::Path::new("wd").join(path))
}
}

View File

@ -1,3 +1,5 @@
use std::path;
use anyhow::{Context, Result};
use serde::Serialize;
use thiserror::Error;
@ -38,30 +40,39 @@ impl TryFrom<&dyn reader::Reader> for Session {
type Error = SessionError;
fn try_from(reader: &dyn reader::Reader) -> Result<Self, Self::Error> {
if !reader.exists("session/meta") {
if !reader.exists(path::Path::new("session/meta")) {
return Err(SessionError::NoSession);
}
let id = reader
.read_string("session/meta/id")
.with_context(|| "failed to read session id")
let id: String = reader
.read(path::Path::new("session/meta/id"))
.context("failed to read session id")
.map_err(SessionError::Err)?
.try_into()
.context("failed to parse session id")
.map_err(SessionError::Err)?;
let start_timestamp_ms = reader
.read_string("session/meta/start")
.with_context(|| "failed to read session start timestamp")
.read(path::Path::new("session/meta/start"))
.context("failed to read session start timestamp")
.map_err(SessionError::Err)?
.parse::<u128>()
.with_context(|| "failed to parse session start timestamp")
.try_into()
.context("failed to parse session start timestamp")
.map_err(SessionError::Err)?;
let last_timestamp_ms = reader
.read_string("session/meta/last")
.with_context(|| "failed to read session last timestamp")
.read(path::Path::new("session/meta/last"))
.context("failed to read session last timestamp")
.map_err(SessionError::Err)?
.parse::<u128>()
.with_context(|| "failed to parse session last timestamp")
.try_into()
.context("failed to parse session last timestamp")
.map_err(SessionError::Err)?;
let branch = reader.read_string("session/meta/branch");
let commit = reader.read_string("session/meta/commit");
let branch = match reader.read(path::Path::new("session/meta/branch")) {
Ok(reader::Content::UTF8(branch)) => Some(branch.to_string()),
_ => None,
};
let commit = match reader.read(path::Path::new("session/meta/commit")) {
Ok(reader::Content::UTF8(commit)) => Some(commit.to_string()),
_ => None,
};
Ok(Self {
id,
@ -69,16 +80,8 @@ impl TryFrom<&dyn reader::Reader> for Session {
meta: Meta {
start_timestamp_ms,
last_timestamp_ms,
branch: if let Ok(branch) = branch {
Some(branch)
} else {
None
},
commit: if let Ok(commit) = commit {
Some(commit)
} else {
None
},
branch,
commit,
},
})
}

View File

@ -1,4 +1,4 @@
use std::time;
use std::{path, time};
use anyhow::{anyhow, Context, Result};
@ -28,9 +28,15 @@ impl<'writer> SessionWriter<'writer> {
let reader = reader::DirReader::open(self.repository.root());
let current_session_id = reader.read_string("session/meta/id");
let current_session_id = if let Ok(reader::Content::UTF8(current_session_id)) =
reader.read(&path::PathBuf::from("session/meta/id"))
{
Some(current_session_id)
} else {
None
};
if current_session_id.is_ok() && !current_session_id.as_ref().unwrap().eq(&session.id) {
if current_session_id.is_some() && current_session_id.as_ref() != Some(&session.id) {
return Err(anyhow!(
"{}: can not open writer for {} because a writer for {} is still open",
self.repository.project_id,
@ -51,7 +57,7 @@ impl<'writer> SessionWriter<'writer> {
)
.context("failed to write last timestamp")?;
if current_session_id.is_ok() && current_session_id.as_ref().unwrap().eq(&session.id) {
if current_session_id.is_some() && current_session_id.as_ref() == Some(&session.id) {
return Ok(());
}

View File

@ -10,6 +10,8 @@ pub use ownership::Ownership;
pub use reader::BranchReader as Reader;
pub use writer::BranchWriter as Writer;
use std::path;
use serde::{Deserialize, Serialize};
use anyhow::Result;
@ -57,49 +59,29 @@ impl TryFrom<&dyn crate::reader::Reader> for Branch {
type Error = crate::reader::Error;
fn try_from(reader: &dyn crate::reader::Reader) -> Result<Self, Self::Error> {
let id = reader.read_string("id").map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
std::io::ErrorKind::Other,
format!("id: {}", e),
))
})?;
let name = reader.read_string("meta/name").map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/name: {}", e),
))
})?;
let id: String = reader.read(&path::PathBuf::from("id"))?.try_into()?;
let name: String = reader.read(&path::PathBuf::from("meta/name"))?.try_into()?;
let notes = match reader.read_string("meta/notes") {
Ok(notes) => Ok(notes),
let notes: String = match reader.read(&path::PathBuf::from("meta/notes")) {
Ok(notes) => Ok(notes.try_into()?),
Err(crate::reader::Error::NotFound) => Ok("".to_string()),
Err(e) => Err(e),
}?;
let applied = reader
.read_bool("meta/applied")
.map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/applied: {}", e),
))
})
.or(Ok(false))?;
let applied = match reader.read(&path::PathBuf::from("meta/applied")) {
Ok(applied) => applied.try_into(),
_ => Ok(false),
}
.unwrap_or(false);
let order = match reader.read_usize("meta/order") {
Ok(order) => Ok(order),
let order: usize = match reader.read(&path::PathBuf::from("meta/order")) {
Ok(order) => Ok(order.try_into()?),
Err(crate::reader::Error::NotFound) => Ok(0),
Err(e) => Err(e),
}
.map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/order: {}", e),
))
})?;
}?;
let upstream = match reader.read_string("meta/upstream") {
Ok(upstream) => {
let upstream = match reader.read(&path::PathBuf::from("meta/upstream")) {
Ok(crate::reader::Content::UTF8(upstream)) => {
if upstream.is_empty() {
Ok(None)
} else {
@ -107,50 +89,32 @@ impl TryFrom<&dyn crate::reader::Reader> for Branch {
.parse::<git::RemoteBranchName>()
.map(Some)
.map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
crate::reader::Error::Io(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/upstream: {}", e),
))
})
}
}
Ok(_) => Ok(None),
Err(crate::reader::Error::NotFound) => Ok(None),
Err(e) => Err(e),
}?;
let tree = reader.read_string("meta/tree").map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/tree: {}", e),
))
})?;
let head = reader.read_string("meta/head").map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/head: {}", e),
))
})?;
let created_timestamp_ms = reader.read_u128("meta/created_timestamp_ms").map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/created_timestamp_ms: {}", e),
))
})?;
let updated_timestamp_ms = reader.read_u128("meta/updated_timestamp_ms").map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/updated_timestamp_ms: {}", e),
))
})?;
let tree: String = reader.read(&path::PathBuf::from("meta/tree"))?.try_into()?;
let head: String = reader.read(&path::PathBuf::from("meta/head"))?.try_into()?;
let created_timestamp_ms = reader
.read(&path::PathBuf::from("meta/created_timestamp_ms"))?
.try_into()?;
let updated_timestamp_ms = reader
.read(&path::PathBuf::from("meta/updated_timestamp_ms"))?
.try_into()?;
let ownership_string = reader.read_string("meta/ownership").map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/ownership: {}", e),
))
})?;
let ownership_string: String = reader
.read(&path::PathBuf::from("meta/ownership"))?
.try_into()?;
let ownership = ownership_string.parse().map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
crate::reader::Error::Io(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/ownership: {}", e),
))
@ -163,13 +127,13 @@ impl TryFrom<&dyn crate::reader::Reader> for Branch {
applied,
upstream,
tree: tree.parse().map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
crate::reader::Error::Io(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/tree: {}", e),
))
})?,
head: head.parse().map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
crate::reader::Error::Io(std::io::Error::new(
std::io::ErrorKind::Other,
format!("meta/head: {}", e),
))

View File

@ -1,3 +1,5 @@
use std::path;
use crate::reader::{self, Reader, SubReader};
use super::Branch;
@ -16,7 +18,10 @@ impl<'reader> BranchReader<'reader> {
}
pub fn read(&self, id: &str) -> Result<Branch, reader::Error> {
if !self.reader.exists(&format!("branches/{}", id)) {
if !self
.reader
.exists(&path::PathBuf::from(format!("branches/{}", id)))
{
return Err(reader::Error::NotFound);
}

View File

@ -1,4 +1,4 @@
use std::collections::HashSet;
use std::{collections::HashSet, path};
use anyhow::Result;
@ -14,9 +14,17 @@ pub struct BranchIterator<'iterator> {
impl<'iterator> BranchIterator<'iterator> {
pub fn new(reader: &'iterator dyn reader::Reader) -> Result<Self> {
let ids_itarator = reader
.list_files("branches")?
.list_files(&path::PathBuf::from("branches"))?
.into_iter()
.map(|file_path| file_path.split('/').next().unwrap().to_string())
.map(|file_path| {
file_path
.display()
.to_string()
.split('/')
.next()
.unwrap()
.to_string()
})
.filter(|file_path| file_path != "selected")
.filter(|file_path| file_path != "target");
let unique_ids: HashSet<String> = ids_itarator.collect();

View File

@ -1,6 +1,8 @@
mod reader;
mod writer;
use std::path;
use serde::{ser::SerializeStruct, Serialize, Serializer};
pub use reader::TargetReader as Reader;
@ -31,10 +33,12 @@ impl Serialize for Target {
// this is a backwards compatibile with the old format
fn read_remote_url(reader: &dyn crate::reader::Reader) -> Result<String, crate::reader::Error> {
match reader.read_string("remote_url") {
Ok(url) => Ok(url),
match reader.read(&path::PathBuf::from("remote_url")) {
Ok(url) => Ok(url.try_into()?),
// fallback to the old format
Err(crate::reader::Error::NotFound) => reader.read_string("remote"),
Err(crate::reader::Error::NotFound) => {
Ok(reader.read(&path::PathBuf::from("remote"))?.try_into()?)
}
Err(e) => Err(e),
}
}
@ -43,15 +47,20 @@ fn read_remote_url(reader: &dyn crate::reader::Reader) -> Result<String, crate::
fn read_remote_name_branch_name(
reader: &dyn crate::reader::Reader,
) -> Result<(String, String), crate::reader::Error> {
match reader.read_string("name") {
match reader.read(&path::PathBuf::from("name")) {
Ok(branch) => {
let branch: String = branch.try_into()?;
let parts = branch.split('/').collect::<Vec<_>>();
Ok((parts[0].to_string(), branch))
Ok((parts[0].to_string(), branch.to_string()))
}
Err(crate::reader::Error::NotFound) => {
// fallback to the old format
let remote_name = reader.read_string("remote_name")?;
let branch_name = reader.read_string("branch_name")?;
let remote_name: String = reader
.read(&path::PathBuf::from("remote_name"))?
.try_into()?;
let branch_name: String = reader
.read(&path::PathBuf::from("branch_name"))?
.try_into()?;
Ok((remote_name, branch_name))
}
Err(e) => Err(e),
@ -63,32 +72,24 @@ impl TryFrom<&dyn crate::reader::Reader> for Target {
fn try_from(reader: &dyn crate::reader::Reader) -> Result<Self, Self::Error> {
let (_, branch_name) = read_remote_name_branch_name(reader).map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
crate::reader::Error::Io(std::io::Error::new(
std::io::ErrorKind::Other,
format!("branch: {}", e),
))
})?;
let remote_url = read_remote_url(reader).map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
crate::reader::Error::Io(std::io::Error::new(
std::io::ErrorKind::Other,
format!("remote: {}", e),
))
})?;
let sha = reader
.read_string("sha")
.map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
std::io::ErrorKind::Other,
format!("sha: {}", e),
))
})?
.parse()
.map_err(|e| {
crate::reader::Error::IOError(std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!("sha: {}", e),
))
})?;
let sha: String = reader.read(&path::PathBuf::from("sha"))?.try_into()?;
let sha = sha.parse().map_err(|e| {
crate::reader::Error::Io(std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!("sha: {}", e),
))
})?;
Ok(Self {
branch: format!("refs/remotes/{}", branch_name).parse().unwrap(),

View File

@ -1,3 +1,5 @@
use std::path;
use crate::reader::{self, SubReader};
use super::Target;
@ -12,7 +14,7 @@ impl<'reader> TargetReader<'reader> {
}
pub fn read_default(&self) -> Result<Target, reader::Error> {
if !self.reader.exists("branches/target") {
if !self.reader.exists(&path::PathBuf::from("branches/target")) {
return Err(reader::Error::NotFound);
}
@ -21,7 +23,10 @@ impl<'reader> TargetReader<'reader> {
}
pub fn read(&self, id: &str) -> Result<Target, reader::Error> {
if !self.reader.exists(&format!("branches/{}/target", id)) {
if !self
.reader
.exists(&path::PathBuf::from(format!("branches/{}/target", id)))
{
return self.read_default();
}

View File

@ -1,6 +1,6 @@
use std::{fmt::Display, path, time};
use crate::{analytics, bookmarks, deltas, events, sessions};
use crate::{analytics, bookmarks, deltas, events, reader, sessions};
#[derive(Debug, PartialEq, Clone)]
pub enum Event {
@ -16,7 +16,7 @@ pub enum Event {
ProjectFileChange(String, path::PathBuf),
Session(String, sessions::Session),
SessionFile((String, String, path::PathBuf, String)),
SessionFile((String, String, path::PathBuf, Option<reader::Content>)),
SessionDelta((String, String, path::PathBuf, deltas::Delta)),
Bookmark(bookmarks::Bookmark),

View File

@ -45,7 +45,7 @@ impl Handler {
&self,
project_id: &str,
session_id: &str,
file_path: &str,
file_path: &path::Path,
deltas: &Vec<deltas::Delta>,
) -> Result<Vec<events::Event>> {
self.deltas_database

View File

@ -96,7 +96,7 @@ impl Handler {
project_id,
session_id,
&file_path.display().to_string(),
contents,
contents.as_ref(),
))])
}
@ -108,12 +108,7 @@ impl Handler {
events::Event::SessionDelta((project_id, session_id, path, delta)) => {
let mut events = self
.index_handler
.index_deltas(
project_id,
session_id,
path.to_str().unwrap(),
&vec![delta.clone()],
)
.index_deltas(project_id, session_id, path, &vec![delta.clone()])
.context("failed to index deltas")?;
events.push(events::Event::Emit(app_events::Event::deltas(

View File

@ -48,35 +48,18 @@ impl TryFrom<&AppHandle> for Handler {
impl Handler {
// Returns Some(file_content) or None if the file is ignored.
fn get_current_file_content(
fn get_current_file(
&self,
project_repository: &project_repository::Repository,
project_id: &str,
path: &std::path::Path,
) -> Result<Option<String>> {
if project_repository.is_path_ignored(path)? {
return Ok(None);
) -> Result<reader::Content, reader::Error> {
if project_repository.is_path_ignored(path).unwrap_or(false) {
return Err(reader::Error::NotFound);
}
let reader = project_repository.get_wd_reader();
let path = path.to_str().unwrap();
if !reader.exists(path) {
return Ok(Some(String::new()));
}
if reader.size(path)? > 100_000 {
tracing::warn!(project_id, path, "ignoring large file");
return Ok(None);
}
match reader.read(path)? {
reader::Content::UTF8(content) => Ok(Some(content)),
reader::Content::Binary(_) => {
tracing::warn!(project_id, path, "ignoring non-utf8 file");
Ok(None)
}
}
reader.read(path)
}
// returns deltas for the file that are already part of the current session (if any)
@ -138,27 +121,22 @@ impl Handler {
let path = path.as_ref();
let current_wd_file_content = match self
.get_current_file_content(&project_repository, project_id, path)
.context("failed to get current file content")?
{
Some(content) => content,
None => return Ok(vec![]),
let current_wd_file_content = match self.get_current_file(&project_repository, path) {
Ok(content) => Some(content),
Err(reader::Error::NotFound) => None,
Err(err) => Err(err).context("failed to get file content")?,
};
let current_session = gb_repository
.get_or_create_current_session()
.context("failed to get or create current session")?;
let current_session_reader = sessions::Reader::open(&gb_repository, &current_session)
.context("failed to get session reader")?;
let latest_file_content = match current_session_reader.file(path) {
Ok(reader::Content::UTF8(content)) => content,
Ok(reader::Content::Binary(_)) => {
tracing::warn!(project_id, path = %path.display(), "ignoring non-utf8 file");
return Ok(vec![]);
}
Err(reader::Error::NotFound) => "".to_string(),
Ok(content) => Some(content),
Err(reader::Error::NotFound) => None,
Err(err) => Err(err).context("failed to get file content")?,
};
@ -167,13 +145,14 @@ impl Handler {
.with_context(|| "failed to get current deltas")?;
let mut text_doc = deltas::Document::new(
Some(&latest_file_content),
latest_file_content.as_ref(),
current_deltas.unwrap_or_default(),
)?;
let new_delta = text_doc
.update(&current_wd_file_content)
.update(current_wd_file_content.as_ref())
.context("failed to calculate new deltas")?;
if new_delta.is_none() {
tracing::debug!(project_id, path = %path.display(), "no new deltas, ignoring");
return Ok(vec![]);
@ -185,9 +164,12 @@ impl Handler {
writer
.write(path, &deltas)
.with_context(|| "failed to write deltas")?;
writer
.write_wd_file(path, &current_wd_file_content)
.with_context(|| "failed to write file")?;
if let Some(reader::Content::UTF8(text)) = current_wd_file_content {
writer
.write_wd_file(path, &text)
.with_context(|| "failed to write file")?;
}
Ok(vec![
events::Event::SessionFile((
@ -471,7 +453,7 @@ mod test {
}
#[test]
fn test_register_file_delted() -> Result<()> {
fn test_register_file_deleted() -> Result<()> {
let repository = test_utils::test_repository();
let project = projects::Project::try_from(&repository)?;
let project_repo = project_repository::Repository::open(&project)?;
@ -598,10 +580,10 @@ mod test {
assert_eq!(files.len(), 1);
}
let base_file = files.get(&relative_file_path.to_str().unwrap().to_string());
let base_file = files.get(&relative_file_path.to_path_buf());
let mut text: Vec<char> = match base_file {
Some(file) => file.chars().collect(),
None => vec![],
Some(reader::Content::UTF8(file)) => file.chars().collect(),
_ => vec![],
};
for operation in operations {
@ -688,10 +670,10 @@ mod test {
assert_eq!(files.len(), 1);
}
let base_file = files.get(&relative_file_path.to_str().unwrap().to_string());
let base_file = files.get(&relative_file_path.to_path_buf());
let mut text: Vec<char> = match base_file {
Some(file) => file.chars().collect(),
None => vec![],
Some(reader::Content::UTF8(file)) => file.chars().collect(),
_ => vec![],
};
for operation in operations {
@ -749,10 +731,10 @@ mod test {
let reader = sessions::Reader::open(&gb_repo, &session).unwrap();
let files = reader.files(None).unwrap();
let base_file = files.get(&relative_file_path.to_str().unwrap().to_string());
let base_file = files.get(&relative_file_path.to_path_buf());
let mut text: Vec<char> = match base_file {
Some(file) => file.chars().collect(),
None => vec![],
Some(reader::Content::UTF8(file)) => file.chars().collect(),
_ => vec![],
};
for operation in operations {

View File

@ -1,14 +1,16 @@
import { invoke, listen } from '$lib/ipc';
export type FileContent = { type: 'utf8'; value: string } | { type: 'binary' } | { type: 'large' };
export async function list(params: { projectId: string; sessionId: string; paths?: string[] }) {
return invoke<Partial<Record<string, string>>>('list_session_files', params);
return invoke<Partial<Record<string, FileContent>>>('list_session_files', params);
}
export function subscribe(
params: { projectId: string; sessionId: string },
callback: (params: { filePath: string; contents: string }) => Promise<void> | void
callback: (params: { filePath: string; contents: FileContent | null }) => Promise<void> | void
) {
return listen<{ contents: string; filePath: string }>(
return listen<{ contents: FileContent | null; filePath: string }>(
`project://${params.projectId}/sessions/${params.sessionId}/files`,
(event) => callback({ ...params, ...event.payload })
);

View File

@ -2,7 +2,7 @@ import { writable, type Loadable, Loaded } from 'svelte-loadable-store';
import * as files from '$lib/api/ipc/files';
import { get, type Readable } from '@square/svelte-store';
type Files = Partial<Record<string, string>>;
type Files = Partial<Record<string, files.FileContent>>;
const stores: Partial<Record<string, Readable<Loadable<Files>>>> = {};
@ -24,7 +24,7 @@ export function getFilesStore(params: {
} else {
set({
...oldValue.value,
[filePath]: contents
[filePath]: contents || undefined
});
}
});

View File

@ -11,6 +11,7 @@ import { invoke } from '$lib/ipc';
import { isDelete, isInsert, type Delta } from '$lib/api/ipc/deltas';
import type { Session } from '$lib/api/ipc/sessions';
import { get } from 'svelte/store';
import type { FileContent } from '$lib/api/ipc/files';
export function getVirtualBranchStore(
projectId: string,
@ -116,7 +117,7 @@ export async function withFileContent(
.map((branch) => branch.files)
.flat()
.map((file) => file.path);
const sessionFiles = await invoke<Partial<Record<string, string>>>('list_session_files', {
const sessionFiles = await invoke<Partial<Record<string, FileContent>>>('list_session_files', {
projectId: projectId,
sessionId: sessionId,
paths: filePaths
@ -128,9 +129,13 @@ export async function withFileContent(
});
const branchesWithContnent = branches.map((branch) => {
branch.files.map((file) => {
const contentAtSessionStart = sessionFiles[file.path] || '';
const contentAtSessionStart = sessionFiles[file.path];
const ds = sessionDeltas[file.path] || [];
file.content = applyDeltas(contentAtSessionStart, ds);
if (contentAtSessionStart?.type === 'utf8') {
file.content = applyDeltas(contentAtSessionStart.value, ds);
} else {
file.content = applyDeltas('', ds);
}
});
return branch;
});

View File

@ -49,7 +49,15 @@
),
files: derived(getFilesStore({ projectId, sessionId: session.id }), (files) =>
Object.fromEntries(
Object.entries(files).filter(([path]) => (filter ? path === filter : true))
Object.entries(files)
.filter(([path]) => (filter ? path === filter : true))
.map(([path, file]) => {
if (file?.type === 'utf8') {
return [path, file.value];
} else {
return [path, undefined];
}
})
)
)
}))

View File

@ -28,7 +28,14 @@
index
}: search.SearchResult) => {
const [doc, dd] = await Promise.all([
files.list({ projectId, sessionId, paths: [filePath] }).then((r) => r[filePath] ?? ''),
files.list({ projectId, sessionId, paths: [filePath] }).then((r) => {
const file = r[filePath];
if (file?.type === 'utf8') {
return file.value;
} else {
return '';
}
}),
deltas
.listDeltas({ projectId, sessionId, paths: [filePath] })
.then((r) => r[filePath] ?? [])

View File

@ -35,6 +35,8 @@
export let selectable = false;
export let selectedOwnership: Writable<Ownership>;
$: console.log(file)
const userSettings = getContext<SettingsStore>(SETTINGS_CONTEXT);
const dispatch = createEventDispatcher<{
expanded: boolean;