Merge pull request #3750 from gitbutlerapp/deltas-reader-file-session-listing

remove unused sessions / deltas implementation
This commit is contained in:
Kiril Videlov 2024-05-12 03:52:44 +02:00 committed by GitHub
commit 459d17f547
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
39 changed files with 1 additions and 3965 deletions

View File

@ -1,47 +0,0 @@
use std::{path, sync::Arc};
use anyhow::{Context, Result};
use r2d2::Pool;
use r2d2_sqlite::SqliteConnectionManager;
use refinery::config::Config;
use rusqlite::Transaction;
mod embedded {
use refinery::embed_migrations;
embed_migrations!("src/database/migrations");
}
#[derive(Clone)]
pub struct Database {
pool: Arc<Pool<SqliteConnectionManager>>,
}
impl Database {
pub fn open_in_directory<P: AsRef<path::Path>>(path: P) -> Result<Self> {
let path = path.as_ref().to_path_buf().join("database.sqlite3");
let manager = SqliteConnectionManager::file(&path);
let pool = r2d2::Pool::new(manager)?;
let mut cfg = Config::new(refinery::config::ConfigDbType::Sqlite)
.set_db_path(path.as_path().to_str().unwrap());
embedded::migrations::runner()
.run(&mut cfg)
.map(|report| {
report
.applied_migrations()
.iter()
.for_each(|migration| tracing::info!(%migration, "migration applied"));
})
.context("Failed to run migrations")?;
Ok(Self {
pool: Arc::new(pool),
})
}
pub fn transaction<T>(&self, f: impl FnOnce(&Transaction) -> Result<T>) -> Result<T> {
let mut conn = self.pool.get()?;
let tx = conn.transaction().context("Failed to start transaction")?;
let result = f(&tx)?;
tx.commit().context("Failed to commit transaction")?;
Ok(result)
}
}

View File

@ -1,12 +0,0 @@
CREATE TABLE `deltas` (
`session_id` text NOT NULL,
`project_id` text NOT NULL,
`timestamp_ms` text NOT NULL,
`operations` blob NOT NULL,
`file_path` text NOT NULL,
PRIMARY KEY (`project_id`, `session_id`, `timestamp_ms`, `file_path`)
);
CREATE INDEX `deltas_project_id_session_id_index` ON `deltas` (`project_id`, `session_id`);
CREATE INDEX `deltas_project_id_session_id_file_path_index` ON `deltas` (`project_id`, `session_id`, `file_path`);

View File

@ -1,11 +0,0 @@
CREATE TABLE `sessions` (
`id` text NOT NULL PRIMARY KEY,
`project_id` text NOT NULL,
`hash` text,
`branch` text,
`commit` text,
`start_timestamp_ms` text NOT NULL,
`last_timestamp_ms` text NOT NULL
);
CREATE INDEX `sessions_project_id_index` ON `sessions` (`project_id`);

View File

@ -1,14 +0,0 @@
CREATE TABLE `files` (
`project_id` text NOT NULL,
`session_id` text NOT NULL,
`file_path` text NOT NULL,
`sha1` blob NOT NULL,
PRIMARY KEY (`project_id`, `session_id`, `file_path`)
);
CREATE INDEX `files_project_id_session_id_index` ON `files` (`project_id`, `session_id`);
CREATE TABLE `contents` (
`sha1` blob NOT NULL PRIMARY KEY,
`content` blob NOT NULL
);

View File

@ -1,8 +0,0 @@
CREATE TABLE `bookmarks` (
`id` text NOT NULL PRIMARY KEY,
`project_id` text NOT NULL,
`timestamp_ms` text NOT NULL,
`note` text NOT NULL
);
CREATE INDEX bookmarks_project_id_idx ON `bookmarks` (`project_id`);

View File

@ -1,16 +0,0 @@
ALTER TABLE `bookmarks`
ADD `created_timestamp_ms` text NOT NULL DEFAULT 0;
UPDATE
`bookmarks`
SET
`created_timestamp_ms` = `timestamp_ms`;
ALTER TABLE `bookmarks`
DROP COLUMN `timestamp_ms`;
ALTER TABLE `bookmarks`
ADD `updated_timestamp_ms` text;
ALTER TABLE `bookmarks`
ADD `deleted` boolean NOT NULL DEFAULT FALSE;

View File

@ -1,28 +0,0 @@
ALTER TABLE bookmarks RENAME TO bookmarks_old;
DROP INDEX `bookmarks_project_id_idx`;
CREATE TABLE bookmarks (
`project_id` text NOT NULL,
`timestamp_ms` text NOT NULL,
`note` text NOT NULL,
`deleted` boolean NOT NULL,
`created_timestamp_ms` text NOT NULL,
`updated_timestamp_ms` text NOT NULL,
PRIMARY KEY (`project_id`, `timestamp_ms`)
);
CREATE INDEX `bookmarks_project_id_idx` ON `bookmarks` (`project_id`);
INSERT INTO bookmarks (`project_id`, `timestamp_ms`, `note`, `deleted`, `created_timestamp_ms`, `updated_timestamp_ms`)
SELECT
`project_id`,
`created_timestamp_ms`,
`note`,
`deleted`,
`created_timestamp_ms`,
`updated_timestamp_ms`
FROM
bookmarks_old;
DROP TABLE bookmarks_old;

View File

@ -1 +0,0 @@
CREATE INDEX `sessions_project_id_id_index` ON `sessions` (`project_id`, `id`);

View File

@ -1,2 +0,0 @@
DROP TABLE files;
DROP TABLE contents;

View File

@ -1 +0,0 @@
DROP TABLE bookmarks;

View File

@ -1,121 +0,0 @@
use std::{collections::HashMap, path};
use anyhow::{Context, Result};
use super::{delta, operations};
use crate::{database, projects::ProjectId, sessions::SessionId};
#[derive(Clone)]
pub struct Database {
database: database::Database,
}
impl Database {
pub fn new(database: database::Database) -> Database {
Database { database }
}
pub fn insert(
&self,
project_id: &ProjectId,
session_id: &SessionId,
file_path: &path::Path,
deltas: &[delta::Delta],
) -> Result<()> {
self.database.transaction(|tx| -> Result<()> {
let mut stmt = insert_stmt(tx).context("Failed to prepare insert statement")?;
for delta in deltas {
let operations = serde_json::to_vec(&delta.operations)
.context("Failed to serialize operations")?;
let timestamp_ms = delta.timestamp_ms.to_string();
stmt.execute(rusqlite::named_params! {
":project_id": project_id,
":session_id": session_id,
":file_path": file_path.display().to_string(),
":timestamp_ms": timestamp_ms,
":operations": operations,
})
.context("Failed to execute insert statement")?;
}
Ok(())
})?;
Ok(())
}
pub fn list_by_project_id_session_id(
&self,
project_id: &ProjectId,
session_id: &SessionId,
file_path_filter: &Option<Vec<&str>>,
) -> Result<HashMap<String, Vec<delta::Delta>>> {
self.database
.transaction(|tx| -> Result<HashMap<String, Vec<delta::Delta>>> {
let mut stmt = list_by_project_id_session_id_stmt(tx)
.context("Failed to prepare query statement")?;
let mut rows = stmt
.query(rusqlite::named_params! {
":project_id": project_id,
":session_id": session_id,
})
.context("Failed to execute query statement")?;
let mut deltas: HashMap<String, Vec<super::Delta>> = HashMap::new();
while let Some(row) = rows
.next()
.context("Failed to iterate over query results")?
{
let file_path: String = row.get(0).context("Failed to get file_path")?;
if let Some(file_path_filter) = &file_path_filter {
if !file_path_filter.contains(&file_path.as_str()) {
continue;
}
}
let timestamp_ms: String = row.get(1).context("Failed to get timestamp_ms")?;
let operations: Vec<u8> = row.get(2).context("Failed to get operations")?;
let operations: Vec<operations::Operation> =
serde_json::from_slice(&operations)
.context("Failed to deserialize operations")?;
let timestamp_ms: u128 = timestamp_ms
.parse()
.context("Failed to parse timestamp_ms as u64")?;
let delta = delta::Delta {
operations,
timestamp_ms,
};
if let Some(deltas_for_file_path) = deltas.get_mut(&file_path) {
deltas_for_file_path.push(delta);
} else {
deltas.insert(file_path, vec![delta]);
}
}
Ok(deltas)
})
}
}
fn list_by_project_id_session_id_stmt<'conn>(
tx: &'conn rusqlite::Transaction,
) -> Result<rusqlite::CachedStatement<'conn>> {
Ok(tx.prepare_cached(
"
SELECT `file_path`, `timestamp_ms`, `operations`
FROM `deltas`
WHERE `session_id` = :session_id AND `project_id` = :project_id
ORDER BY `timestamp_ms` ASC",
)?)
}
fn insert_stmt<'conn>(
tx: &'conn rusqlite::Transaction,
) -> Result<rusqlite::CachedStatement<'conn>> {
Ok(tx.prepare_cached(
"INSERT INTO `deltas` (
`project_id`, `session_id`, `timestamp_ms`, `operations`, `file_path`
) VALUES (
:project_id, :session_id, :timestamp_ms, :operations, :file_path
)
ON CONFLICT(`project_id`, `session_id`, `file_path`, `timestamp_ms`) DO UPDATE SET
`operations` = :operations
",
)?)
}

View File

@ -1,10 +0,0 @@
use serde::{Deserialize, Serialize};
use super::operations;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Delta {
pub operations: Vec<operations::Operation>,
pub timestamp_ms: u128,
}

View File

@ -1,74 +0,0 @@
use std::fmt::{Display, Formatter};
use anyhow::Result;
use super::{delta, operations};
use crate::reader;
#[derive(Debug, Clone, Default)]
pub struct Document {
doc: Vec<char>,
deltas: Vec<delta::Delta>,
}
fn apply_deltas(doc: &mut Vec<char>, deltas: &Vec<delta::Delta>) -> Result<()> {
for delta in deltas {
for operation in &delta.operations {
operation.apply(doc)?;
}
}
Ok(())
}
impl Document {
pub fn get_deltas(&self) -> Vec<delta::Delta> {
self.deltas.clone()
}
// returns a text document where internal state is seeded with value, and deltas are applied.
pub fn new(value: Option<&reader::Content>, deltas: Vec<delta::Delta>) -> Result<Document> {
let mut all_deltas = vec![];
if let Some(reader::Content::UTF8(value)) = value {
all_deltas.push(delta::Delta {
operations: operations::get_delta_operations("", value),
timestamp_ms: 0,
});
}
all_deltas.append(&mut deltas.clone());
let mut doc = vec![];
apply_deltas(&mut doc, &all_deltas)?;
Ok(Document { doc, deltas })
}
pub fn update(&mut self, value: Option<&reader::Content>) -> Result<Option<delta::Delta>> {
let new_text = match value {
Some(reader::Content::UTF8(value)) => value,
Some(_) | None => "",
};
let operations = operations::get_delta_operations(&self.to_string(), new_text);
if operations.is_empty() {
if let Some(reader::Content::UTF8(value)) = value {
if !value.is_empty() {
return Ok(None);
}
}
}
let delta = delta::Delta {
operations,
timestamp_ms: crate::time::now_ms(),
};
apply_deltas(&mut self.doc, &vec![delta.clone()])?;
self.deltas.push(delta.clone());
Ok(Some(delta))
}
}
impl Display for Document {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.doc.iter().collect::<String>())
}
}

View File

@ -1,13 +0,0 @@
mod delta;
mod document;
mod reader;
mod writer;
pub mod database;
pub mod operations;
pub use database::Database;
pub use delta::Delta;
pub use document::Document;
pub use reader::DeltasReader as Reader;
pub use writer::DeltasWriter as Writer;

View File

@ -1,117 +0,0 @@
use std::cmp::Ordering;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use similar::{ChangeTag, TextDiff};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
pub enum Operation {
// corresponds to YText.insert(index, chunk)
// TODO(ST): Should probably be BString, but it's related to delta-code.
Insert((usize, String)),
// corresponds to YText.remove_range(index, len)
Delete((usize, usize)),
}
impl Operation {
pub fn apply(&self, text: &mut Vec<char>) -> Result<()> {
match self {
Operation::Insert((index, chunk)) => match index.cmp(&text.len()) {
Ordering::Greater => Err(anyhow::anyhow!(
"Index out of bounds, {} > {}",
index,
text.len()
)),
Ordering::Equal => {
text.extend(chunk.chars());
Ok(())
}
Ordering::Less => {
text.splice(*index..*index, chunk.chars());
Ok(())
}
},
Operation::Delete((index, len)) => {
if *index > text.len() {
Err(anyhow::anyhow!(
"Index out of bounds, {} > {}",
index,
text.len()
))
} else if *index + *len > text.len() {
Err(anyhow::anyhow!(
"Index + length out of bounds, {} > {}",
index + len,
text.len()
))
} else {
text.splice(*index..(*index + *len), "".chars());
Ok(())
}
}
}
}
}
// merges touching operations of the same type in to one operation
// e.g. [Insert((0, "hello")), Insert((5, " world"))] -> [Insert((0, "hello world"))]
// e.g. [Delete((0, 5)), Delete((5, 5))] -> [Delete((0, 10))]
// e.g. [Insert((0, "hello")), Delete((0, 5))] -> [Insert((0, "hello")), Delete((0, 5))]
fn merge_touching(ops: &Vec<Operation>) -> Vec<Operation> {
let mut merged = vec![];
for op in ops {
match (merged.last_mut(), op) {
(Some(Operation::Insert((index, chunk))), Operation::Insert((index2, chunk2))) => {
if *index + chunk.len() == *index2 {
chunk.push_str(chunk2);
} else {
merged.push(op.clone());
}
}
(Some(Operation::Delete((index, len))), Operation::Delete((index2, len2))) => {
if *index == *index2 {
*len += len2;
} else {
merged.push(op.clone());
}
}
_ => merged.push(op.clone()),
}
}
merged
}
pub fn get_delta_operations(initial_text: &str, final_text: &str) -> Vec<Operation> {
if initial_text == final_text {
return vec![];
}
let changeset = TextDiff::configure().diff_graphemes(initial_text, final_text);
let mut deltas = vec![];
let mut offset = 0;
for change in changeset.iter_all_changes() {
match change.tag() {
ChangeTag::Delete => {
deltas.push(Operation::Delete((
offset,
change.as_str().unwrap_or("").chars().count(),
)));
}
ChangeTag::Insert => {
let text = change.as_str().unwrap();
deltas.push(Operation::Insert((offset, text.to_string())));
offset = change.new_index().unwrap() + text.chars().count();
}
ChangeTag::Equal => {
let text = change.as_str().unwrap();
offset = change.new_index().unwrap() + text.chars().count();
}
}
}
merge_touching(&deltas)
}

View File

@ -1,88 +0,0 @@
use std::{collections::HashMap, path};
use anyhow::{Context, Result};
use super::Delta;
use crate::{reader, sessions};
pub struct DeltasReader<'reader> {
reader: &'reader reader::Reader<'reader>,
}
impl<'reader> From<&'reader reader::Reader<'reader>> for DeltasReader<'reader> {
fn from(reader: &'reader reader::Reader<'reader>) -> Self {
DeltasReader { reader }
}
}
#[derive(thiserror::Error, Debug)]
pub enum ReadError {
#[error("not found")]
NotFound,
#[error(transparent)]
Other(#[from] anyhow::Error),
}
impl<'reader> DeltasReader<'reader> {
pub fn new(reader: &'reader sessions::Reader<'reader>) -> Self {
DeltasReader {
reader: reader.reader(),
}
}
pub fn read_file<P: AsRef<std::path::Path>>(&self, path: P) -> Result<Option<Vec<Delta>>> {
match self.read(Some(&[path.as_ref()])) {
Ok(deltas) => Ok(deltas.into_iter().next().map(|(_, deltas)| deltas)),
Err(ReadError::NotFound) => Ok(None),
Err(err) => Err(err.into()),
}
}
pub fn read(
&self,
filter: Option<&[&path::Path]>,
) -> Result<HashMap<path::PathBuf, Vec<Delta>>, ReadError> {
let deltas_dir = path::Path::new("session/deltas");
let mut paths = self.reader.list_files(deltas_dir)?;
if let Some(filter) = filter {
paths = paths
.into_iter()
.filter(|file_path| filter.iter().any(|path| file_path.eq(path)))
.collect::<Vec<_>>();
}
paths = paths.iter().map(|path| deltas_dir.join(path)).collect();
let files = self.reader.batch(&paths).context("failed to batch read")?;
let files = files
.into_iter()
.map(|file| {
file.map_err(|error| match error {
reader::Error::NotFound => ReadError::NotFound,
error => ReadError::Other(error.into()),
})
})
.collect::<Result<Vec<_>, _>>()?;
Ok(paths
.into_iter()
.zip(files)
.filter_map(|(path, file)| {
path.strip_prefix(deltas_dir)
.ok()
.map(|path| (path.to_path_buf(), file))
})
.filter_map(|(path, file)| {
if let reader::Content::UTF8(content) = file {
if content.is_empty() {
// this is a leftover from some bug, shouldn't happen anymore
return None;
}
let deltas = serde_json::from_str(&content).ok()?;
Some(Ok((path, deltas)))
} else {
Some(Err(anyhow::anyhow!("unexpected content type")))
}
})
.collect::<Result<HashMap<_, _>>>()?)
}
}

View File

@ -1,72 +0,0 @@
use std::path::PathBuf;
use anyhow::Result;
use super::Delta;
use crate::{gb_repository, writer};
pub struct DeltasWriter<'writer> {
repository: &'writer gb_repository::Repository,
writer: writer::DirWriter,
}
impl<'writer> DeltasWriter<'writer> {
pub fn new(repository: &'writer gb_repository::Repository) -> Result<Self, std::io::Error> {
writer::DirWriter::open(repository.root()).map(|writer| Self { repository, writer })
}
pub fn write<P: AsRef<std::path::Path>>(&self, path: P, deltas: &Vec<Delta>) -> Result<()> {
self.repository.mark_active_session()?;
let _lock = self.repository.lock();
let path = path.as_ref();
let raw_deltas = serde_json::to_string(&deltas)?;
self.writer
.write_string(PathBuf::from("session/deltas").join(path), &raw_deltas)?;
tracing::trace!(
project_id = %self.repository.get_project_id(),
path = %path.display(),
"wrote deltas"
);
Ok(())
}
pub fn remove_wd_file<P: AsRef<std::path::Path>>(&self, path: P) -> Result<()> {
self.repository.mark_active_session()?;
let _lock = self.repository.lock();
let path = path.as_ref();
self.writer.remove(PathBuf::from("session/wd").join(path))?;
tracing::trace!(
project_id = %self.repository.get_project_id(),
path = %path.display(),
"deleted session wd file"
);
Ok(())
}
pub fn write_wd_file<P: AsRef<std::path::Path>>(&self, path: P, contents: &str) -> Result<()> {
self.repository.mark_active_session()?;
let _lock = self.repository.lock();
let path = path.as_ref();
self.writer
.write_string(PathBuf::from("session/wd").join(path), contents)?;
tracing::trace!(
project_id = %self.repository.get_project_id(),
path = %path.display(),
"wrote session wd file"
);
Ok(())
}
}

View File

@ -1,3 +0,0 @@
mod repository;
pub use repository::{RemoteError, Repository};

View File

@ -1,916 +0,0 @@
#[cfg(target_family = "unix")]
use std::os::unix::prelude::*;
use std::{
collections::HashSet,
fs::File,
io::{BufReader, Read},
path,
};
use anyhow::{anyhow, bail, Context, Result};
use filetime::FileTime;
use fslock::LockFile;
use sha2::{Digest, Sha256};
use crate::error::Code;
#[cfg(target_os = "windows")]
use crate::windows::MetadataShim;
use crate::{
deltas, fs, git, project_repository,
projects::{self, ProjectId},
reader,
sessions::{self, SessionId},
users,
virtual_branches::target,
};
pub struct Repository {
git_repository: git::Repository,
project: projects::Project,
lock_path: path::PathBuf,
}
impl Repository {
pub fn open(
root: &path::Path,
project_repository: &project_repository::Repository,
user: Option<&users::User>,
) -> Result<Self> {
let project = project_repository.project();
let project_objects_path = project.path.join(".git/objects");
if !project_objects_path.exists() {
bail!("path not found: {}", project_objects_path.display());
}
let projects_dir = root.join("projects");
let path = projects_dir.join(project.id.to_string());
let lock_path = projects_dir.join(format!("{}.lock", project.id));
if path.exists() {
let git_repository = git::Repository::open(path.clone())
.with_context(|| format!("{}: failed to open git repository", path.display()))?;
git_repository
.add_disk_alternate(project_objects_path.to_str().unwrap())
.context("failed to add disk alternate")?;
Ok(Self {
git_repository,
project: project.clone(),
lock_path,
})
} else {
std::fs::create_dir_all(&path).context("failed to create project directory")?;
let git_repository = git::Repository::init_opts(
&path,
git2::RepositoryInitOptions::new()
.bare(true)
.initial_head("refs/heads/current")
.external_template(false),
)
.with_context(|| format!("{}: failed to initialize git repository", path.display()))?;
git_repository
.add_disk_alternate(project_objects_path.to_str().unwrap())
.context("failed to add disk alternate")?;
let gb_repository = Self {
git_repository,
project: project.clone(),
lock_path,
};
let _lock = gb_repository.lock();
let session = gb_repository.create_current_session(project_repository)?;
drop(_lock);
gb_repository
.flush_session(project_repository, &session, user)
.context("failed to run initial flush")?;
Ok(gb_repository)
}
}
pub fn get_project_id(&self) -> &ProjectId {
&self.project.id
}
fn remote(&self, user: Option<&users::User>) -> Result<Option<(git::Remote, String)>> {
// only push if logged in
let access_token = match user {
Some(user) => user.access_token.clone(),
None => return Ok(None),
};
// only push if project is connected
let remote_url = match &self.project.api {
Some(api) => api.git_url.clone(),
None => return Ok(None),
};
let remote = self
.git_repository
.remote_anonymous(&remote_url.parse().unwrap())
.with_context(|| {
format!(
"failed to create anonymous remote for {}",
remote_url.as_str()
)
})?;
Ok(Some((remote, access_token)))
}
pub fn fetch(&self, user: Option<&users::User>) -> Result<(), RemoteError> {
let (mut remote, access_token) = match self.remote(user)? {
Some((remote, access_token)) => (remote, access_token),
None => return Result::Ok(()),
};
let mut callbacks = git2::RemoteCallbacks::new();
if self.project.omit_certificate_check.unwrap_or(false) {
callbacks.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk));
}
callbacks.push_update_reference(move |refname, message| {
tracing::debug!(
project_id = %self.project.id,
refname,
message,
"pulling reference"
);
Result::Ok(())
});
callbacks.push_transfer_progress(move |one, two, three| {
tracing::debug!(
project_id = %self.project.id,
"transferred {}/{}/{} objects",
one,
two,
three
);
});
let mut fetch_opts = git2::FetchOptions::new();
fetch_opts.remote_callbacks(callbacks);
let auth_header = format!("Authorization: {}", access_token);
let headers = &[auth_header.as_str()];
fetch_opts.custom_headers(headers);
remote
.fetch(&["refs/heads/*:refs/remotes/*"], Some(&mut fetch_opts))
.map_err(|error| match error {
git::Error::Network(error) => {
tracing::warn!(project_id = %self.project.id, error = %error, "failed to fetch gb repo");
RemoteError::Network
}
error => RemoteError::Other(error.into()),
})?;
tracing::info!(
project_id = %self.project.id,
"gb repo fetched",
);
Result::Ok(())
}
pub fn push(&self, user: Option<&users::User>) -> Result<(), RemoteError> {
let (mut remote, access_token) = match self.remote(user)? {
Some((remote, access_token)) => (remote, access_token),
None => return Ok(()),
};
// Set the remote's callbacks
let mut callbacks = git2::RemoteCallbacks::new();
if self.project.omit_certificate_check.unwrap_or(false) {
callbacks.certificate_check(|_, _| Ok(git2::CertificateCheckStatus::CertificateOk));
}
callbacks.push_update_reference(move |refname, message| {
tracing::debug!(
project_id = %self.project.id,
refname,
message,
"pushing reference"
);
Result::Ok(())
});
callbacks.push_transfer_progress(move |current, total, bytes| {
tracing::debug!(
project_id = %self.project.id,
"transferred {}/{}/{} objects",
current,
total,
bytes
);
});
let mut push_options = git2::PushOptions::new();
push_options.remote_callbacks(callbacks);
let auth_header = format!("Authorization: {}", access_token);
let headers = &[auth_header.as_str()];
push_options.custom_headers(headers);
let remote_refspec = format!("refs/heads/current:refs/heads/{}", self.project.id);
// Push to the remote
remote
.push(&[&remote_refspec], Some(&mut push_options))
.map_err(|error| match error {
git::Error::Network(error) => {
tracing::warn!(project_id = %self.project.id, error = %error, "failed to push gb repo");
RemoteError::Network
}
error => RemoteError::Other(error.into()),
})?;
tracing::info!(project_id = %self.project.id, "gb repository pushed");
Ok(())
}
// take branches from the last session and put them into the current session
fn copy_branches(&self) -> Result<()> {
let vb_state = self.project.virtual_branches();
let branches = vb_state
.list_branches()
.context("failed to read virtual branches")?;
// copy default target
let default_target = match vb_state.get_default_target() {
Result::Ok(target) => Ok(Some(target)),
Err(reader::Error::NotFound) => Ok(None),
Err(err) => Err(err).context("failed to read default target"),
}?;
if let Some(default_target) = default_target.clone() {
vb_state
.set_default_target(default_target)
.context("failed to write default target")?;
}
// copy branch targets
for branch in &branches {
let target = vb_state
.get_branch_target(&branch.id)
.with_context(|| format!("{}: failed to read target", branch.id))?;
if let Some(default_target) = default_target.clone() {
if default_target == target {
continue;
}
}
vb_state
.set_branch_target(branch.id, target)
.with_context(|| format!("{}: failed to write target", branch.id))?;
}
// copy branches that we don't already have
for branch in &branches {
vb_state
.set_branch(branch.clone())
.with_context(|| format!("{}: failed to write branch", branch.id))?;
}
Ok(())
}
fn create_current_session(
&self,
project_repository: &project_repository::Repository,
) -> Result<sessions::Session> {
let now_ms = crate::time::now_ms();
let meta = match project_repository.get_head() {
Result::Ok(head) => sessions::Meta {
start_timestamp_ms: now_ms,
last_timestamp_ms: now_ms,
branch: head.name().map(|name| name.to_string()),
commit: Some(head.peel_to_commit()?.id().to_string()),
},
Err(_) => sessions::Meta {
start_timestamp_ms: now_ms,
last_timestamp_ms: now_ms,
branch: None,
commit: None,
},
};
let session = sessions::Session {
id: SessionId::generate(),
hash: None,
meta,
};
// write session to disk
sessions::Writer::new(self)
.context("failed to create session writer")?
.write(&session)
.context("failed to write session")?;
tracing::info!(
project_id = %self.project.id,
session_id = %session.id,
"created new session"
);
self.flush_gitbutler_file(&session.id)?;
Ok(session)
}
pub fn lock(&self) -> LockFile {
let mut lockfile = LockFile::open(&self.lock_path).expect("failed to open lock file");
lockfile.lock().expect("failed to obtain lock on lock file");
lockfile
}
pub fn mark_active_session(&self) -> Result<()> {
let current_session = self
.get_or_create_current_session()
.context("failed to get current session")?;
let updated_session = sessions::Session {
meta: sessions::Meta {
last_timestamp_ms: crate::time::now_ms(),
..current_session.meta
},
..current_session
};
sessions::Writer::new(self)
.context("failed to create session writer")?
.write(&updated_session)
.context("failed to write session")?;
Ok(())
}
pub fn get_latest_session(&self) -> Result<Option<sessions::Session>> {
if let Some(current_session) = self.get_current_session()? {
Ok(Some(current_session))
} else {
let mut sessions_iterator = self.get_sessions_iterator()?;
sessions_iterator
.next()
.transpose()
.context("failed to get latest session")
}
}
pub fn get_or_create_current_session(&self) -> Result<sessions::Session> {
let _lock = self.lock();
let reader = reader::Reader::open(&self.root())?;
match sessions::Session::try_from(&reader) {
Result::Ok(session) => Ok(session),
Err(sessions::SessionError::NoSession) => {
let project_repository = project_repository::Repository::open(&self.project)
.context("failed to open project repository")?;
let session = self
.create_current_session(&project_repository)
.context("failed to create current session")?;
drop(_lock);
self.copy_branches().context("failed to unpack branches")?;
Ok(session)
}
Err(err) => Err(err).context("failed to read current session"),
}
}
pub fn flush(
&self,
project_repository: &project_repository::Repository,
user: Option<&users::User>,
) -> Result<Option<sessions::Session>> {
let current_session = self
.get_current_session()
.context("failed to get current session")?;
if current_session.is_none() {
return Ok(None);
}
let current_session = current_session.unwrap();
let current_session = self
.flush_session(project_repository, &current_session, user)
.context(format!("failed to flush session {}", current_session.id))?;
Ok(Some(current_session))
}
pub fn flush_session(
&self,
project_repository: &project_repository::Repository,
session: &sessions::Session,
user: Option<&users::User>,
) -> Result<sessions::Session> {
if session.hash.is_some() {
return Ok(session.clone());
}
if !self.root().exists() {
return Err(anyhow!("nothing to flush"));
}
let _lock = self.lock();
// update last timestamp
let session_writer =
sessions::Writer::new(self).context("failed to create session writer")?;
session_writer.write(session)?;
let mut tree_builder = self.git_repository.treebuilder(None);
tree_builder.upsert(
"session",
build_session_tree(self).context("failed to build session tree")?,
git::FileMode::Tree,
);
tree_builder.upsert(
"wd",
build_wd_tree(self, project_repository)
.context("failed to build working directory tree")?,
git::FileMode::Tree,
);
tree_builder.upsert(
"branches",
build_branches_tree(self).context("failed to build branches tree")?,
git::FileMode::Tree,
);
let tree_id = tree_builder.write().context("failed to write tree")?;
let commit_oid =
write_gb_commit(tree_id, self, user).context("failed to write gb commit")?;
tracing::info!(
project_id = %self.project.id,
session_id = %session.id,
%commit_oid,
"flushed session"
);
session_writer.remove()?;
let session = sessions::Session {
hash: Some(commit_oid),
..session.clone()
};
Ok(session)
}
pub fn get_sessions_iterator(&self) -> Result<sessions::SessionsIterator<'_>> {
sessions::SessionsIterator::new(&self.git_repository)
}
pub fn get_current_session(&self) -> Result<Option<sessions::Session>> {
let _lock = self.lock();
let reader = reader::Reader::open(&self.root())?;
match sessions::Session::try_from(&reader) {
Ok(session) => Ok(Some(session)),
Err(sessions::SessionError::NoSession) => Ok(None),
Err(sessions::SessionError::Other(err)) => Err(err),
}
}
pub fn root(&self) -> std::path::PathBuf {
self.git_repository.path().join("gitbutler")
}
pub fn session_path(&self) -> std::path::PathBuf {
self.root().join("session")
}
pub fn git_repository_path(&self) -> &std::path::Path {
self.git_repository.path()
}
pub fn session_wd_path(&self) -> std::path::PathBuf {
self.session_path().join("wd")
}
pub fn default_target(&self) -> Result<Option<target::Target>> {
let vb_state = self.project.virtual_branches();
match vb_state.get_default_target() {
Result::Ok(target) => Ok(Some(target)),
Err(reader::Error::NotFound) => Ok(None),
Err(err) => Err(err.into()),
}
}
fn flush_gitbutler_file(&self, session_id: &SessionId) -> Result<()> {
let gb_path = self.git_repository.path();
let project_id = self.project.id.to_string();
let gb_file_content = serde_json::json!({
"sessionId": session_id,
"repositoryId": project_id,
"gbPath": gb_path,
"api": self.project.api,
});
let gb_file_path = self.project.path.join(".git/gitbutler.json");
std::fs::write(&gb_file_path, gb_file_content.to_string())?;
tracing::debug!("gitbutler file updated: {:?}", gb_file_path);
Ok(())
}
pub fn git_repository(&self) -> &git::Repository {
&self.git_repository
}
}
fn build_wd_tree(
gb_repository: &Repository,
project_repository: &project_repository::Repository,
) -> Result<git::Oid> {
match gb_repository
.git_repository
.find_reference(&"refs/heads/current".parse().unwrap())
{
Result::Ok(reference) => build_wd_tree_from_reference(gb_repository, &reference)
.context("failed to build wd index"),
Err(git::Error::NotFound(_)) => build_wd_tree_from_repo(gb_repository, project_repository)
.context("failed to build wd index"),
Err(e) => Err(e.into()),
}
}
fn build_wd_tree_from_reference(
gb_repository: &Repository,
reference: &git::Reference,
) -> Result<git::Oid> {
// start off with the last tree as a base
let tree = reference.peel_to_tree()?;
let wd_tree_entry = tree.get_name("wd").unwrap();
let wd_tree = gb_repository.git_repository.find_tree(wd_tree_entry.id())?;
let mut index = git::Index::try_from(&wd_tree)?;
// write updated files on top of the last tree
for file_path in fs::list_files(gb_repository.session_wd_path(), &[]).with_context(|| {
format!(
"failed to session working directory files list files in {}",
gb_repository.session_wd_path().display()
)
})? {
add_wd_path(
&mut index,
&gb_repository.session_wd_path(),
&file_path,
gb_repository,
)
.with_context(|| {
format!(
"failed to add session working directory path {}",
file_path.display()
)
})?;
}
let session_reader = reader::Reader::open(&gb_repository.root())?;
let deltas = deltas::Reader::from(&session_reader)
.read(None)
.context("failed to read deltas")?;
let wd_files = session_reader.list_files(path::Path::new("session/wd"))?;
let wd_files = wd_files.iter().collect::<HashSet<_>>();
// if a file has delta, but doesn't exist in wd, it was deleted
let deleted_files = deltas
.keys()
.filter(|key| !wd_files.contains(key))
.collect::<Vec<_>>();
for deleted_file in deleted_files {
index
.remove_path(deleted_file)
.context("failed to remove path")?;
}
let wd_tree_oid = index
.write_tree_to(&gb_repository.git_repository)
.context("failed to write wd tree")?;
Ok(wd_tree_oid)
}
// build wd index from the working directory files new session wd files
// this is important because we want to make sure session files are in sync with session deltas
fn build_wd_tree_from_repo(
gb_repository: &Repository,
project_repository: &project_repository::Repository,
) -> Result<git::Oid> {
let mut index = git::Index::new()?;
let mut added = HashSet::new();
// first, add session/wd files. session/wd are written at the same time as deltas, so it's important to add them first
// to make sure they are in sync with the deltas
for file_path in fs::list_files(gb_repository.session_wd_path(), &[]).with_context(|| {
format!(
"failed to session working directory files list files in {}",
gb_repository.session_wd_path().display()
)
})? {
if project_repository
.git_repository
.is_path_ignored(&file_path)
.unwrap_or(true)
{
continue;
}
add_wd_path(
&mut index,
&gb_repository.session_wd_path(),
&file_path,
gb_repository,
)
.with_context(|| {
format!(
"failed to add session working directory path {}",
file_path.display()
)
})?;
added.insert(file_path);
}
// finally, add files from the working directory if they aren't already in the index
let worktree_relative_files =
fs::iter_worktree_files(project_repository.root()).with_context(|| {
format!(
"failed to working directory list files in {}",
project_repository.root().display()
)
})?;
for file_path in
worktree_relative_files.map(|rela_path| gix::path::from_bstr(rela_path).into_owned())
{
if added.contains(&file_path) {
continue;
}
add_wd_path(
&mut index,
project_repository.root(),
&file_path,
gb_repository,
)
.with_context(|| {
crate::error::Context::new(
Code::Projects,
format!("failed to add '{}' to temporary index", file_path.display()),
)
})?;
}
let tree_oid = index
.write_tree_to(&gb_repository.git_repository)
.context("failed to write tree to repo")?;
Ok(tree_oid)
}
// take a file path we see and add it to our in-memory index
// we call this from build_initial_wd_tree, which is smart about using the existing index to avoid rehashing files that haven't changed
// and also looks for large files and puts in a placeholder hash in the LFS format
// TODO: actually upload the file to LFS
fn add_wd_path(
index: &mut git::Index,
dir: &std::path::Path,
rel_file_path: &std::path::Path,
gb_repository: &Repository,
) -> Result<()> {
let file_path = dir.join(rel_file_path);
let metadata = std::fs::symlink_metadata(&file_path).context("failed to get metadata for")?;
let modify_time = FileTime::from_last_modification_time(&metadata);
let create_time = FileTime::from_creation_time(&metadata).unwrap_or(modify_time);
// look for files that are bigger than 4GB, which are not supported by git
// insert a pointer as the blob content instead
// TODO: size limit should be configurable
let blob = if metadata.is_symlink() {
// it's a symlink, make the content the path of the link
let link_target = std::fs::read_link(&file_path)?;
// if the link target is inside the project repository, make it relative
let link_target = link_target.strip_prefix(dir).unwrap_or(&link_target);
gb_repository.git_repository.blob(
link_target
.to_str()
.ok_or_else(|| anyhow!("non-UTF-8 in '{}'", link_target.display()))?
.as_bytes(),
)?
} else if metadata.len() > 100_000_000 {
tracing::warn!(
project_id = %gb_repository.project.id,
path = %file_path.display(),
"file too big"
);
// get a sha256 hash of the file first
let sha = sha256_digest(&file_path)?;
// put togther a git lfs pointer file: https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md
let mut lfs_pointer = String::from("version https://git-lfs.github.com/spec/v1\n");
lfs_pointer.push_str("oid sha256:");
lfs_pointer.push_str(&sha);
lfs_pointer.push('\n');
lfs_pointer.push_str("size ");
lfs_pointer.push_str(&metadata.len().to_string());
lfs_pointer.push('\n');
// write the file to the .git/lfs/objects directory
// create the directory recursively if it doesn't exist
let lfs_objects_dir = gb_repository.git_repository.path().join("lfs/objects");
std::fs::create_dir_all(lfs_objects_dir.clone())?;
let lfs_path = lfs_objects_dir.join(sha);
std::fs::copy(file_path, lfs_path)?;
gb_repository.git_repository.blob(lfs_pointer.as_bytes())?
} else {
// read the file into a blob, get the object id
gb_repository.git_repository.blob_path(&file_path)?
};
// create a new IndexEntry from the file metadata
// truncation is ok https://libgit2.org/libgit2/#HEAD/type/git_index_entry
#[allow(clippy::cast_possible_truncation)]
index
.add(&git::IndexEntry {
ctime: create_time,
mtime: modify_time,
dev: metadata.dev() as u32,
ino: metadata.ino() as u32,
mode: 33188,
uid: metadata.uid(),
gid: metadata.gid(),
file_size: metadata.len() as u32,
flags: 10, // normal flags for normal file (for the curious: https://git-scm.com/docs/index-format)
flags_extended: 0, // no extended flags
path: rel_file_path.to_str().unwrap().to_string().into(),
id: blob,
})
.with_context(|| format!("failed to add index entry for {}", rel_file_path.display()))?;
Ok(())
}
/// calculates sha256 digest of a large file as lowercase hex string via streaming buffer
/// used to calculate the hash of large files that are not supported by git
fn sha256_digest(path: &std::path::Path) -> Result<String> {
let input = File::open(path)?;
let mut reader = BufReader::new(input);
let digest = {
let mut hasher = Sha256::new();
let mut buffer = [0; 1024];
loop {
let count = reader.read(&mut buffer)?;
if count == 0 {
break;
}
hasher.update(&buffer[..count]);
}
hasher.finalize()
};
Ok(format!("{:X}", digest))
}
fn build_branches_tree(gb_repository: &Repository) -> Result<git::Oid> {
let mut index = git::Index::new()?;
let branches_dir = gb_repository.root().join("branches");
for file_path in
fs::list_files(&branches_dir, &[]).context("failed to find branches directory")?
{
let file_path = std::path::Path::new(&file_path);
add_file_to_index(
gb_repository,
&mut index,
file_path,
&branches_dir.join(file_path),
)
.context("failed to add branch file to index")?;
}
let tree_oid = index
.write_tree_to(&gb_repository.git_repository)
.context("failed to write index to tree")?;
Ok(tree_oid)
}
fn build_session_tree(gb_repository: &Repository) -> Result<git::Oid> {
let mut index = git::Index::new()?;
// add all files in the working directory to the in-memory index, skipping for matching entries in the repo index
for file_path in fs::list_files(
gb_repository.session_path(),
&[path::Path::new("wd").to_path_buf()],
)
.context("failed to list session files")?
{
add_file_to_index(
gb_repository,
&mut index,
&file_path,
&gb_repository.session_path().join(&file_path),
)
.with_context(|| format!("failed to add session file: {}", file_path.display()))?;
}
let tree_oid = index
.write_tree_to(&gb_repository.git_repository)
.context("failed to write index to tree")?;
Ok(tree_oid)
}
// this is a helper function for build_gb_tree that takes paths under .git/gb/session and adds them to the in-memory index
fn add_file_to_index(
gb_repository: &Repository,
index: &mut git::Index,
rel_file_path: &std::path::Path,
abs_file_path: &std::path::Path,
) -> Result<()> {
let blob = gb_repository.git_repository.blob_path(abs_file_path)?;
let metadata = abs_file_path.metadata()?;
let modified_time = FileTime::from_last_modification_time(&metadata);
let create_time = FileTime::from_creation_time(&metadata).unwrap_or(modified_time);
// create a new IndexEntry from the file metadata
// truncation is ok https://libgit2.org/libgit2/#HEAD/type/git_index_entry
#[allow(clippy::cast_possible_truncation)]
index
.add(&git::IndexEntry {
ctime: create_time,
mtime: modified_time,
dev: metadata.dev() as u32,
ino: metadata.ino() as u32,
mode: 33188,
uid: metadata.uid(),
gid: metadata.gid(),
file_size: metadata.len() as u32,
flags: 10, // normal flags for normal file (for the curious: https://git-scm.com/docs/index-format)
flags_extended: 0, // no extended flags
path: rel_file_path.to_str().unwrap().into(),
id: blob,
})
.with_context(|| format!("Failed to add file to index: {}", abs_file_path.display()))?;
Ok(())
}
// write a new commit object to the repo
// this is called once we have a tree of deltas, metadata and current wd snapshot
// and either creates or updates the refs/heads/current ref
fn write_gb_commit(
tree_id: git::Oid,
gb_repository: &Repository,
user: Option<&users::User>,
) -> Result<git::Oid> {
let comitter = git::Signature::now("gitbutler", "gitbutler@localhost")?;
let author = match user {
None => comitter.clone(),
Some(user) => git::Signature::try_from(user)?,
};
let current_refname: git::Refname = "refs/heads/current".parse().unwrap();
match gb_repository
.git_repository
.find_reference(&current_refname)
{
Result::Ok(reference) => {
let last_commit = reference.peel_to_commit()?;
let new_commit = gb_repository.git_repository.commit(
Some(&current_refname),
&author, // author
&comitter, // committer
"gitbutler check", // commit message
&gb_repository.git_repository.find_tree(tree_id).unwrap(), // tree
&[&last_commit], // parents
)?;
Ok(new_commit)
}
Err(git::Error::NotFound(_)) => {
let new_commit = gb_repository.git_repository.commit(
Some(&current_refname),
&author, // author
&comitter, // committer
"gitbutler check", // commit message
&gb_repository.git_repository.find_tree(tree_id).unwrap(), // tree
&[], // parents
)?;
Ok(new_commit)
}
Err(e) => Err(e.into()),
}
}
#[derive(Debug, thiserror::Error)]
pub enum RemoteError {
#[error("network error")]
Network,
#[error(transparent)]
Other(#[from] anyhow::Error),
}

View File

@ -15,12 +15,9 @@
pub mod askpass;
pub mod assets;
pub mod database;
pub mod dedup;
pub mod deltas;
pub mod error;
pub mod fs;
pub mod gb_repository;
pub mod git;
pub mod id;
pub mod keys;
@ -30,7 +27,6 @@ pub mod path;
pub mod project_repository;
pub mod projects;
pub mod reader;
pub mod sessions;
pub mod ssh;
pub mod storage;
pub mod time;

View File

@ -1,181 +0,0 @@
use anyhow::{Context, Result};
use super::session::{self, SessionId};
use crate::{database, projects::ProjectId};
#[derive(Clone)]
pub struct Database {
database: database::Database,
}
impl Database {
pub fn new(database: database::Database) -> Database {
Database { database }
}
pub fn insert(&self, project_id: &ProjectId, sessions: &[&session::Session]) -> Result<()> {
self.database.transaction(|tx| -> Result<()> {
let mut stmt = insert_stmt(tx).context("Failed to prepare insert statement")?;
for session in sessions {
stmt.execute(rusqlite::named_params! {
":id": session.id,
":project_id": project_id,
":hash": session.hash.map(|hash| hash.to_string()),
":branch": session.meta.branch,
":commit": session.meta.commit,
":start_timestamp_ms": session.meta.start_timestamp_ms.to_string(),
":last_timestamp_ms": session.meta.last_timestamp_ms.to_string(),
})
.context("Failed to execute insert statement")?;
}
Ok(())
})?;
Ok(())
}
pub fn list_by_project_id(
&self,
project_id: &ProjectId,
earliest_timestamp_ms: Option<u128>,
) -> Result<Vec<session::Session>> {
self.database.transaction(|tx| {
let mut stmt = list_by_project_id_stmt(tx)
.context("Failed to prepare list_by_project_id statement")?;
let mut rows = stmt
.query(rusqlite::named_params! {
":project_id": project_id,
})
.context("Failed to execute list_by_project_id statement")?;
let mut sessions = Vec::new();
while let Some(row) = rows
.next()
.context("Failed to iterate over list_by_project_id results")?
{
let session = parse_row(row)?;
if let Some(earliest_timestamp_ms) = earliest_timestamp_ms {
if session.meta.last_timestamp_ms < earliest_timestamp_ms {
continue;
}
}
sessions.push(session);
}
Ok(sessions)
})
}
pub fn get_by_project_id_id(
&self,
project_id: &ProjectId,
id: &SessionId,
) -> Result<Option<session::Session>> {
self.database.transaction(|tx| {
let mut stmt = get_by_project_id_id_stmt(tx)
.context("Failed to prepare get_by_project_id_id statement")?;
let mut rows = stmt
.query(rusqlite::named_params! {
":project_id": project_id,
":id": id,
})
.context("Failed to execute get_by_project_id_id statement")?;
if let Some(row) = rows
.next()
.context("Failed to iterate over get_by_project_id_id results")?
{
Ok(Some(parse_row(row)?))
} else {
Ok(None)
}
})
}
pub fn get_by_id(&self, id: &SessionId) -> Result<Option<session::Session>> {
self.database.transaction(|tx| {
let mut stmt = get_by_id_stmt(tx).context("Failed to prepare get_by_id statement")?;
let mut rows = stmt
.query(rusqlite::named_params! {
":id": id,
})
.context("Failed to execute get_by_id statement")?;
if let Some(row) = rows
.next()
.context("Failed to iterate over get_by_id results")?
{
Ok(Some(parse_row(row)?))
} else {
Ok(None)
}
})
}
}
fn parse_row(row: &rusqlite::Row) -> Result<session::Session> {
Ok(session::Session {
id: row.get(0).context("Failed to get id")?,
hash: row
.get::<usize, Option<String>>(2)
.context("Failed to get hash")?
.map(|hash| hash.parse().context("Failed to parse hash"))
.transpose()?,
meta: session::Meta {
branch: row.get(3).context("Failed to get branch")?,
commit: row.get(4).context("Failed to get commit")?,
start_timestamp_ms: row
.get::<usize, String>(5)
.context("Failed to get start_timestamp_ms")?
.parse()
.context("Failed to parse start_timestamp_ms")?,
last_timestamp_ms: row
.get::<usize, String>(6)
.context("Failed to get last_timestamp_ms")?
.parse()
.context("Failed to parse last_timestamp_ms")?,
},
})
}
fn list_by_project_id_stmt<'conn>(
tx: &'conn rusqlite::Transaction,
) -> Result<rusqlite::CachedStatement<'conn>> {
Ok(tx.prepare_cached(
"SELECT `id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms` FROM `sessions` WHERE `project_id` = :project_id ORDER BY `start_timestamp_ms` DESC",
)?)
}
fn get_by_project_id_id_stmt<'conn>(
tx: &'conn rusqlite::Transaction,
) -> Result<rusqlite::CachedStatement<'conn>> {
Ok(tx.prepare_cached(
"SELECT `id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms` FROM `sessions` WHERE `project_id` = :project_id AND `id` = :id",
)?)
}
fn get_by_id_stmt<'conn>(
tx: &'conn rusqlite::Transaction,
) -> Result<rusqlite::CachedStatement<'conn>> {
Ok(tx.prepare_cached(
"SELECT `id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms` FROM `sessions` WHERE `id` = :id",
)?)
}
fn insert_stmt<'conn>(
tx: &'conn rusqlite::Transaction,
) -> Result<rusqlite::CachedStatement<'conn>> {
Ok(tx.prepare_cached(
"INSERT INTO 'sessions' (
`id`, `project_id`, `hash`, `branch`, `commit`, `start_timestamp_ms`, `last_timestamp_ms`
) VALUES (
:id, :project_id, :hash, :branch, :commit, :start_timestamp_ms, :last_timestamp_ms
) ON CONFLICT(`id`) DO UPDATE SET
`project_id` = :project_id,
`hash` = :hash,
`branch` = :branch,
`commit` = :commit,
`start_timestamp_ms` = :start_timestamp_ms,
`last_timestamp_ms` = :last_timestamp_ms
",
)?)
}

View File

@ -1,67 +0,0 @@
use anyhow::{Context, Result};
use super::{Session, SessionError};
use crate::{git, reader};
pub struct SessionsIterator<'iterator> {
git_repository: &'iterator git::Repository,
iter: git2::Revwalk<'iterator>,
}
impl<'iterator> SessionsIterator<'iterator> {
pub(crate) fn new(git_repository: &'iterator git::Repository) -> Result<Self> {
let mut iter = git_repository
.revwalk()
.context("failed to create revwalk")?;
iter.set_sorting(git2::Sort::TOPOLOGICAL | git2::Sort::TIME)
.context("failed to set sorting")?;
let branches = git_repository.branches(None)?;
for branch in branches {
let (branch, _) = branch.context("failed to get branch")?;
iter.push(branch.peel_to_commit()?.id().into())
.with_context(|| format!("failed to push branch {:?}", branch.name()))?;
}
Ok(Self {
git_repository,
iter,
})
}
}
impl<'iterator> Iterator for SessionsIterator<'iterator> {
type Item = Result<Session>;
fn next(&mut self) -> Option<Self::Item> {
match self.iter.next() {
Some(Result::Ok(oid)) => {
let commit = match self.git_repository.find_commit(oid.into()) {
Result::Ok(commit) => commit,
Err(err) => return Some(Err(err.into())),
};
if commit.parent_count() == 0 {
// skip initial commit, as it's impossible to get a list of files from it
// it's only used to bootstrap the history
return self.next();
}
let commit_reader = match reader::Reader::from_commit(self.git_repository, &commit)
{
Result::Ok(commit_reader) => commit_reader,
Err(err) => return Some(Err(err)),
};
let session = match Session::try_from(&commit_reader) {
Result::Ok(session) => session,
Err(SessionError::NoSession) => return None,
Err(err) => return Some(Err(err.into())),
};
Some(Ok(session))
}
Some(Err(err)) => Some(Err(err.into())),
None => None,
}
}
}

View File

@ -1,12 +0,0 @@
mod iterator;
mod reader;
pub mod session;
mod writer;
pub mod database;
pub use database::Database;
pub use iterator::SessionsIterator;
pub use reader::SessionReader as Reader;
pub use session::{Meta, Session, SessionError, SessionId};
pub use writer::SessionWriter as Writer;

View File

@ -1,104 +0,0 @@
use std::{collections::HashMap, path};
use anyhow::{anyhow, Context, Result};
use super::Session;
use crate::{gb_repository, reader};
pub struct SessionReader<'reader> {
// reader for the current session. commit or wd
reader: reader::Reader<'reader>,
// reader for the previous session's commit
previous_reader: reader::Reader<'reader>,
}
#[derive(thiserror::Error, Debug)]
pub enum FileError {
#[error(transparent)]
Reader(#[from] reader::Error),
#[error(transparent)]
Other(#[from] anyhow::Error),
}
impl<'reader> SessionReader<'reader> {
pub fn reader(&self) -> &reader::Reader<'reader> {
&self.reader
}
pub fn open(repository: &'reader gb_repository::Repository, session: &Session) -> Result<Self> {
let wd_reader = reader::Reader::open(&repository.root())?;
if let Ok(reader::Content::UTF8(current_session_id)) = wd_reader.read("session/meta/id") {
if current_session_id == session.id.to_string() {
let head_commit = repository.git_repository().head()?.peel_to_commit()?;
return Ok(SessionReader {
reader: wd_reader,
previous_reader: reader::Reader::from_commit(
repository.git_repository(),
&head_commit,
)?,
});
}
}
let session_hash = if let Some(hash) = &session.hash {
hash
} else {
return Err(anyhow!(
"can not open reader for {} because it has no commit hash nor it is a current session",
session.id
));
};
let commit = repository
.git_repository()
.find_commit(*session_hash)
.context("failed to get commit")?;
let commit_reader = reader::Reader::from_commit(repository.git_repository(), &commit)?;
Ok(SessionReader {
reader: commit_reader,
previous_reader: reader::Reader::from_commit(
repository.git_repository(),
&commit.parent(0)?,
)?,
})
}
pub fn files(
&self,
filter: Option<&[&path::Path]>,
) -> Result<HashMap<path::PathBuf, reader::Content>, FileError> {
let wd_dir = path::Path::new("wd");
let mut paths = self.previous_reader.list_files(wd_dir)?;
if let Some(filter) = filter {
paths = paths
.into_iter()
.filter(|file_path| filter.iter().any(|path| file_path.eq(path)))
.collect::<Vec<_>>();
}
paths = paths.iter().map(|path| wd_dir.join(path)).collect();
let files = self
.previous_reader
.batch(&paths)
.context("failed to batch read")?;
let files = files.into_iter().collect::<Result<Vec<_>, _>>()?;
Ok(paths
.into_iter()
.zip(files)
.filter_map(|(path, file)| {
path.strip_prefix(wd_dir)
.ok()
.map(|path| (path.to_path_buf(), file))
})
.collect::<HashMap<_, _>>())
}
pub fn file<P: AsRef<path::Path>>(&self, path: P) -> Result<reader::Content, reader::Error> {
let path = path.as_ref();
self.previous_reader
.read(std::path::Path::new("wd").join(path))
}
}

View File

@ -1,126 +0,0 @@
use std::path;
use anyhow::{Context, Result};
use serde::Serialize;
use thiserror::Error;
use crate::{git, id::Id, reader};
#[derive(Debug, Clone, Serialize, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Meta {
// timestamp of when the session was created
pub start_timestamp_ms: u128,
// timestamp of when the session was last active
pub last_timestamp_ms: u128,
// session branch name
pub branch: Option<String>,
// session commit hash
pub commit: Option<String>,
}
pub type SessionId = Id<Session>;
#[derive(Debug, Clone, Serialize, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Session {
pub id: SessionId,
// if hash is not set, the session is not saved aka current
pub hash: Option<git::Oid>,
pub meta: Meta,
}
#[derive(Error, Debug)]
pub enum SessionError {
#[error("session does not exist")]
NoSession,
#[error("{0}")]
Other(#[from] anyhow::Error),
}
impl TryFrom<&reader::Reader<'_>> for Session {
type Error = SessionError;
fn try_from(reader: &reader::Reader) -> Result<Self, Self::Error> {
let results = reader
.batch(&[
path::Path::new("session/meta/id"),
path::Path::new("session/meta/start"),
path::Path::new("session/meta/last"),
path::Path::new("session/meta/branch"),
path::Path::new("session/meta/commit"),
])
.context("failed to batch read")?;
let id = &results[0];
let start_timestamp_ms = &results[1];
let last_timestamp_ms = &results[2];
let branch = &results[3];
let commit = &results[4];
let id = id.clone().map_err(|error| match error {
reader::Error::NotFound => SessionError::NoSession,
error => SessionError::Other(error.into()),
})?;
let id: String = id
.try_into()
.context("failed to parse session id as string")
.map_err(SessionError::Other)?;
let id: SessionId = id.parse().context("failed to parse session id as uuid")?;
let start_timestamp_ms = start_timestamp_ms.clone().map_err(|error| match error {
reader::Error::NotFound => SessionError::NoSession,
error => SessionError::Other(error.into()),
})?;
let start_timestamp_ms: u128 = start_timestamp_ms
.try_into()
.context("failed to parse session start timestamp as number")
.map_err(SessionError::Other)?;
let last_timestamp_ms = last_timestamp_ms.clone().map_err(|error| match error {
reader::Error::NotFound => SessionError::NoSession,
error => SessionError::Other(error.into()),
})?;
let last_timestamp_ms: u128 = last_timestamp_ms
.try_into()
.context("failed to parse session last timestamp as number")
.map_err(SessionError::Other)?;
let branch = match branch.clone() {
Ok(branch) => {
let branch = branch
.try_into()
.context("failed to parse session branch as string")?;
Ok(Some(branch))
}
Err(reader::Error::NotFound) => Ok(None),
Err(e) => Err(e),
}
.context("failed to parse session branch as string")?;
let commit = match commit.clone() {
Ok(commit) => {
let commit = commit
.try_into()
.context("failed to parse session commit as string")?;
Ok(Some(commit))
}
Err(reader::Error::NotFound) => Ok(None),
Err(e) => Err(e),
}
.context("failed to parse session commit as string")?;
Ok(Self {
id,
hash: reader.commit_id(),
meta: Meta {
start_timestamp_ms,
last_timestamp_ms,
branch,
commit,
},
})
}
}

View File

@ -1,101 +0,0 @@
use anyhow::{anyhow, Context, Result};
use super::Session;
use crate::{gb_repository, reader, writer};
pub struct SessionWriter<'writer> {
repository: &'writer gb_repository::Repository,
writer: writer::DirWriter,
}
impl<'writer> SessionWriter<'writer> {
pub fn new(repository: &'writer gb_repository::Repository) -> Result<Self, std::io::Error> {
writer::DirWriter::open(repository.root())
.map(|writer| SessionWriter { repository, writer })
}
pub fn remove(&self) -> Result<()> {
self.writer.remove("session")?;
tracing::debug!(
project_id = %self.repository.get_project_id(),
"deleted session"
);
Ok(())
}
pub fn write(&self, session: &Session) -> Result<()> {
if session.hash.is_some() {
return Err(anyhow!("can not open writer for a session with a hash"));
}
let reader = reader::Reader::open(&self.repository.root())
.context("failed to open current session reader")?;
let current_session_id =
if let Ok(reader::Content::UTF8(current_session_id)) = reader.read("session/meta/id") {
Some(current_session_id)
} else {
None
};
if current_session_id.is_some()
&& current_session_id.as_ref() != Some(&session.id.to_string())
{
return Err(anyhow!(
"{}: can not open writer for {} because a writer for {} is still open",
self.repository.get_project_id(),
session.id,
current_session_id.unwrap()
));
}
let mut batch = vec![writer::BatchTask::Write(
"session/meta/last",
crate::time::now_ms().to_string(),
)];
if current_session_id.is_some()
&& current_session_id.as_ref() == Some(&session.id.to_string())
{
self.writer
.batch(&batch)
.context("failed to write last timestamp")?;
return Ok(());
}
batch.push(writer::BatchTask::Write(
"session/meta/id",
session.id.to_string(),
));
batch.push(writer::BatchTask::Write(
"session/meta/start",
session.meta.start_timestamp_ms.to_string(),
));
if let Some(branch) = session.meta.branch.as_ref() {
batch.push(writer::BatchTask::Write(
"session/meta/branch",
branch.to_string(),
));
} else {
batch.push(writer::BatchTask::Remove("session/meta/branch"));
}
if let Some(commit) = session.meta.commit.as_ref() {
batch.push(writer::BatchTask::Write(
"session/meta/commit",
commit.to_string(),
));
} else {
batch.push(writer::BatchTask::Remove("session/meta/commit"));
}
self.writer
.batch(&batch)
.context("failed to write session meta")?;
Ok(())
}
}

View File

@ -1,18 +1,13 @@
mod suite {
mod gb_repository;
mod projects;
mod virtual_branches;
}
mod database;
mod deltas;
mod error;
mod gb_repository;
mod git;
mod keys;
mod lock;
mod reader;
mod sessions;
mod types;
pub mod virtual_branches;
mod zip;

View File

@ -1,21 +0,0 @@
use gitbutler_core::database::Database;
use gitbutler_testsupport::temp_dir;
#[test]
fn smoke() {
let data_dir = temp_dir();
let db = Database::open_in_directory(data_dir.path()).unwrap();
db.transaction(|tx| {
tx.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)", [])
.unwrap();
tx.execute("INSERT INTO test (id) VALUES (1)", []).unwrap();
let mut stmt = tx.prepare("SELECT id FROM test").unwrap();
let mut rows = stmt.query([]).unwrap();
let row = rows.next().unwrap().unwrap();
let id: i32 = row.get(0).unwrap();
assert_eq!(id, 1_i32);
Ok(())
})
.unwrap();
}

View File

@ -1,264 +0,0 @@
use gitbutler_core::{
deltas::{operations::Operation, Delta, Document},
reader,
};
#[test]
fn new() {
let document = Document::new(
Some(&reader::Content::UTF8("hello world".to_string())),
vec![],
);
assert!(document.is_ok());
let document = document.unwrap();
assert_eq!(document.to_string(), "hello world");
assert_eq!(document.get_deltas().len(), 0);
}
#[test]
fn update() {
let document = Document::new(
Some(&reader::Content::UTF8("hello world".to_string())),
vec![],
);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("hello world!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world!");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((11, "!".to_string()))
);
}
#[test]
fn empty() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("hello world!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world!");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "hello world!".to_string()))
);
}
#[test]
fn from_deltas() {
let document = Document::new(
None,
vec![
Delta {
timestamp_ms: 0,
operations: vec![Operation::Insert((0, "hello".to_string()))],
},
Delta {
timestamp_ms: 1,
operations: vec![Operation::Insert((5, " world".to_string()))],
},
Delta {
timestamp_ms: 2,
operations: vec![
Operation::Delete((3, 7)),
Operation::Insert((4, "!".to_string())),
],
},
],
);
assert!(document.is_ok());
let document = document.unwrap();
assert_eq!(document.to_string(), "held!");
}
#[test]
fn complex_line() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("hello".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "hello".to_string()))
);
document
.update(Some(&reader::Content::UTF8("hello world".to_string())))
.unwrap();
assert_eq!(document.to_string(), "hello world");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 1);
assert_eq!(
document.get_deltas()[1].operations[0],
Operation::Insert((5, " world".to_string()))
);
document
.update(Some(&reader::Content::UTF8("held!".to_string())))
.unwrap();
assert_eq!(document.to_string(), "held!");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 2);
assert_eq!(
document.get_deltas()[2].operations[0],
Operation::Delete((3, 7))
);
assert_eq!(
document.get_deltas()[2].operations[1],
Operation::Insert((4, "!".to_string())),
);
}
#[test]
fn multiline_add() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8("first".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "first".to_string()))
);
document
.update(Some(&reader::Content::UTF8("first\ntwo".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first\ntwo");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 1);
assert_eq!(
document.get_deltas()[1].operations[0],
Operation::Insert((5, "\ntwo".to_string()))
);
document
.update(Some(&reader::Content::UTF8(
"first line\nline two".to_string(),
)))
.unwrap();
assert_eq!(document.to_string(), "first line\nline two");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 2);
assert_eq!(
document.get_deltas()[2].operations[0],
Operation::Insert((5, " line".to_string()))
);
assert_eq!(
document.get_deltas()[2].operations[1],
Operation::Insert((11, "line ".to_string()))
);
}
#[test]
fn multiline_remove() {
let document = Document::new(None, vec![]);
assert!(document.is_ok());
let mut document = document.unwrap();
document
.update(Some(&reader::Content::UTF8(
"first line\nline two".to_string(),
)))
.unwrap();
assert_eq!(document.to_string(), "first line\nline two");
assert_eq!(document.get_deltas().len(), 1);
assert_eq!(document.get_deltas()[0].operations.len(), 1);
assert_eq!(
document.get_deltas()[0].operations[0],
Operation::Insert((0, "first line\nline two".to_string()))
);
document
.update(Some(&reader::Content::UTF8("first\ntwo".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first\ntwo");
assert_eq!(document.get_deltas().len(), 2);
assert_eq!(document.get_deltas()[1].operations.len(), 2);
assert_eq!(
document.get_deltas()[1].operations[0],
Operation::Delete((5, 5))
);
assert_eq!(
document.get_deltas()[1].operations[1],
Operation::Delete((6, 5))
);
document
.update(Some(&reader::Content::UTF8("first".to_string())))
.unwrap();
assert_eq!(document.to_string(), "first");
assert_eq!(document.get_deltas().len(), 3);
assert_eq!(document.get_deltas()[2].operations.len(), 1);
assert_eq!(
document.get_deltas()[2].operations[0],
Operation::Delete((5, 4))
);
document.update(None).unwrap();
assert_eq!(document.to_string(), "");
assert_eq!(document.get_deltas().len(), 4);
assert_eq!(document.get_deltas()[3].operations.len(), 1);
assert_eq!(
document.get_deltas()[3].operations[0],
Operation::Delete((0, 5))
);
}
#[test]
fn binary_to_text() {
let latest = reader::Content::Binary;
let current = reader::Content::UTF8("test".to_string());
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "test");
}
#[test]
fn binary_to_binary() {
let latest = reader::Content::Binary;
let current = reader::Content::Binary;
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "");
}
#[test]
fn text_to_binary() {
let latest = reader::Content::UTF8("text".to_string());
let current = reader::Content::Binary;
let mut document = Document::new(Some(&latest), vec![]).unwrap();
let new_deltas = document.update(Some(&current)).unwrap();
assert!(new_deltas.is_some());
assert_eq!(document.to_string(), "");
}
#[test]
fn unicode() {
let latest = reader::Content::UTF8("\u{1f31a}".to_string());
let current = reader::Content::UTF8("\u{1f31d}".to_string());
let mut document = Document::new(Some(&latest), vec![]).unwrap();
document.update(Some(&current)).unwrap();
assert_eq!(document.to_string(), "\u{1f31d}");
}

View File

@ -1,150 +0,0 @@
mod database {
use std::path;
use gitbutler_core::{
deltas::{operations, Database, Delta},
projects::ProjectId,
sessions::SessionId,
};
use gitbutler_testsupport::test_database;
#[test]
fn insert_query() -> anyhow::Result<()> {
let (db, _tmp) = test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session_id = SessionId::generate();
let file_path = path::PathBuf::from("file_path");
let delta1 = Delta {
timestamp_ms: 0,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
};
let deltas = vec![delta1.clone()];
database.insert(&project_id, &session_id, &file_path, &deltas)?;
assert_eq!(
database.list_by_project_id_session_id(&project_id, &session_id, &None)?,
vec![(file_path.display().to_string(), vec![delta1])]
.into_iter()
.collect()
);
Ok(())
}
#[test]
fn insert_update() -> anyhow::Result<()> {
let (db, _tmp) = test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session_id = SessionId::generate();
let file_path = path::PathBuf::from("file_path");
let delta1 = Delta {
timestamp_ms: 0,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
};
let delta2 = Delta {
timestamp_ms: 0,
operations: vec![operations::Operation::Insert((
0,
"updated_text".to_string(),
))],
};
database.insert(&project_id, &session_id, &file_path, &[delta1])?;
database.insert(&project_id, &session_id, &file_path, &[delta2.clone()])?;
assert_eq!(
database.list_by_project_id_session_id(&project_id, &session_id, &None)?,
vec![(file_path.display().to_string(), vec![delta2])]
.into_iter()
.collect()
);
Ok(())
}
#[test]
fn aggregate_deltas_by_file() -> anyhow::Result<()> {
let (db, _tmp) = test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session_id = SessionId::generate();
let file_path1 = path::PathBuf::from("file_path1");
let file_path2 = path::PathBuf::from("file_path2");
let delta1 = Delta {
timestamp_ms: 1,
operations: vec![operations::Operation::Insert((0, "text".to_string()))],
};
let delta2 = Delta {
timestamp_ms: 2,
operations: vec![operations::Operation::Insert((
0,
"updated_text".to_string(),
))],
};
database.insert(&project_id, &session_id, &file_path1, &[delta1.clone()])?;
database.insert(&project_id, &session_id, &file_path2, &[delta1.clone()])?;
database.insert(&project_id, &session_id, &file_path2, &[delta2.clone()])?;
assert_eq!(
database.list_by_project_id_session_id(&project_id, &session_id, &None)?,
vec![
(file_path1.display().to_string(), vec![delta1.clone()]),
(file_path2.display().to_string(), vec![delta1, delta2])
]
.into_iter()
.collect()
);
Ok(())
}
}
mod document;
mod operations;
mod writer {
use std::vec;
use gitbutler_core::{deltas, deltas::operations::Operation, sessions};
use gitbutler_testsupport::{Case, Suite};
#[test]
fn write_no_vbranches() -> anyhow::Result<()> {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case();
let deltas_writer = deltas::Writer::new(gb_repository)?;
let session = gb_repository.get_or_create_current_session()?;
let session_reader = sessions::Reader::open(gb_repository, &session)?;
let deltas_reader = gitbutler_core::deltas::Reader::new(&session_reader);
let path = "test.txt";
let deltas = vec![
gitbutler_core::deltas::Delta {
operations: vec![Operation::Insert((0, "hello".to_string()))],
timestamp_ms: 0,
},
gitbutler_core::deltas::Delta {
operations: vec![Operation::Insert((5, " world".to_string()))],
timestamp_ms: 0,
},
];
deltas_writer.write(path, &deltas).unwrap();
assert_eq!(deltas_reader.read_file(path).unwrap(), Some(deltas));
assert_eq!(deltas_reader.read_file("not found").unwrap(), None);
Ok(())
}
}

View File

@ -1,55 +0,0 @@
use gitbutler_core::deltas::operations::{get_delta_operations, Operation};
#[test]
fn get_delta_operations_insert_end() {
let initial_text = "hello";
let final_text = "hello world!";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Insert((5, " world!".to_string())));
}
#[test]
fn get_delta_operations_insert_middle() {
let initial_text = "helloworld";
let final_text = "hello, world";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Insert((5, ", ".to_string())));
}
#[test]
fn get_delta_operations_insert_begin() {
let initial_text = "world";
let final_text = "hello world";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Insert((0, "hello ".to_string())));
}
#[test]
fn get_delta_operations_delete_end() {
let initial_text = "hello world!";
let final_text = "hello";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Delete((5, 7)));
}
#[test]
fn get_delta_operations_delete_middle() {
let initial_text = "hello, world";
let final_text = "helloworld";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Delete((5, 2)));
}
#[test]
fn get_delta_operations_delete_begin() {
let initial_text = "hello world";
let final_text = "world";
let operations = get_delta_operations(initial_text, final_text);
assert_eq!(operations.len(), 1);
assert_eq!(operations[0], Operation::Delete((0, 6)));
}

View File

@ -1,489 +0,0 @@
use std::{collections::HashMap, path, thread, time};
use anyhow::Result;
use gitbutler_core::{
deltas::{self, operations::Operation},
projects::{self, ApiProject, ProjectId},
reader,
sessions::{self, SessionId},
};
use pretty_assertions::assert_eq;
use tempfile::TempDir;
use gitbutler_testsupport::{init_opts_bare, Case, Suite};
mod repository {
use std::path::PathBuf;
use anyhow::Result;
use pretty_assertions::assert_eq;
use gitbutler_testsupport::{Case, Suite};
#[test]
fn alternates_file_being_set() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case();
let file_content = std::fs::read_to_string(
gb_repository
.git_repository_path()
.join("objects/info/alternates"),
)?;
let file_content = PathBuf::from(file_content.trim());
let project_path = project_repository.path().to_path_buf().join(".git/objects");
assert_eq!(file_content, project_path);
Ok(())
}
}
fn new_test_remote_repository() -> Result<(git2::Repository, TempDir)> {
let tmp = tempfile::tempdir()?;
let path = tmp.path().to_str().unwrap().to_string();
let repo_a = git2::Repository::init_opts(path, &init_opts_bare())?;
Ok((repo_a, tmp))
}
#[test]
fn get_current_session_writer_should_use_existing_session() -> Result<()> {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case();
let current_session_1 = gb_repository.get_or_create_current_session()?;
let current_session_2 = gb_repository.get_or_create_current_session()?;
assert_eq!(current_session_1.id, current_session_2.id);
Ok(())
}
#[test]
fn must_not_return_init_session() -> Result<()> {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case();
assert!(gb_repository.get_current_session()?.is_none());
let iter = gb_repository.get_sessions_iterator()?;
assert_eq!(iter.count(), 0);
Ok(())
}
#[test]
fn must_not_flush_without_current_session() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case();
let session = gb_repository.flush(project_repository, None)?;
assert!(session.is_none());
let iter = gb_repository.get_sessions_iterator()?;
assert_eq!(iter.count(), 0);
Ok(())
}
#[test]
fn non_empty_repository() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case_with_files(HashMap::from([(path::PathBuf::from("test.txt"), "test")]));
gb_repository.get_or_create_current_session()?;
gb_repository.flush(project_repository, None)?;
Ok(())
}
#[test]
fn must_flush_current_session() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case();
gb_repository.get_or_create_current_session()?;
let session = gb_repository.flush(project_repository, None)?;
assert!(session.is_some());
let iter = gb_repository.get_sessions_iterator()?;
assert_eq!(iter.count(), 1);
Ok(())
}
#[test]
fn list_deltas_from_current_session() -> Result<()> {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case();
let current_session = gb_repository.get_or_create_current_session()?;
let writer = deltas::Writer::new(gb_repository)?;
writer.write(
"test.txt",
&vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}],
)?;
let session_reader = sessions::Reader::open(gb_repository, &current_session)?;
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read(None)?;
assert_eq!(deltas.len(), 1);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations.len(),
1
);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations[0],
Operation::Insert((0, "Hello World".to_string()))
);
Ok(())
}
#[test]
fn list_deltas_from_flushed_session() {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case();
let writer = deltas::Writer::new(gb_repository).unwrap();
writer
.write(
"test.txt",
&vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}],
)
.unwrap();
let session = gb_repository.flush(project_repository, None).unwrap();
let session_reader = sessions::Reader::open(gb_repository, &session.unwrap()).unwrap();
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read(None).unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations.len(),
1
);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations[0],
Operation::Insert((0, "Hello World".to_string()))
);
}
#[test]
fn list_files_from_current_session() {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case_with_files(HashMap::from([(
path::PathBuf::from("test.txt"),
"Hello World",
)]));
let current = gb_repository.get_or_create_current_session().unwrap();
let reader = sessions::Reader::open(gb_repository, &current).unwrap();
let files = reader.files(None).unwrap();
assert_eq!(files.len(), 1);
assert_eq!(
files[&path::PathBuf::from("test.txt")],
reader::Content::UTF8("Hello World".to_string())
);
}
#[test]
fn list_files_from_flushed_session() {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case_with_files(HashMap::from([(
path::PathBuf::from("test.txt"),
"Hello World",
)]));
gb_repository.get_or_create_current_session().unwrap();
let session = gb_repository
.flush(project_repository, None)
.unwrap()
.unwrap();
let reader = sessions::Reader::open(gb_repository, &session).unwrap();
let files = reader.files(None).unwrap();
assert_eq!(files.len(), 1);
assert_eq!(
files[&path::PathBuf::from("test.txt")],
reader::Content::UTF8("Hello World".to_string())
);
}
#[tokio::test]
async fn remote_syncronization() {
// first, crate a remote, pretending it's a cloud
let (cloud, _tmp) = new_test_remote_repository().unwrap();
let api_project = ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: cloud.path().to_str().unwrap().to_string(),
code_git_url: None,
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
let suite = Suite::default();
let user = suite.sign_in();
// create first local project, add files, deltas and flush a session
let case_one = suite.new_case_with_files(HashMap::from([(
path::PathBuf::from("test.txt"),
"Hello World",
)]));
suite
.projects
.update(&projects::UpdateRequest {
id: case_one.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_one = case_one.refresh(&suite);
let writer = deltas::Writer::new(&case_one.gb_repository).unwrap();
writer
.write(
"test.txt",
&vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}],
)
.unwrap();
let session_one = case_one
.gb_repository
.flush(&case_one.project_repository, Some(&user))
.unwrap()
.unwrap();
case_one.gb_repository.push(Some(&user)).unwrap();
// create second local project, fetch it and make sure session is there
let case_two = suite.new_case();
suite
.projects
.update(&projects::UpdateRequest {
id: case_two.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_two = case_two.refresh(&suite);
case_two.gb_repository.fetch(Some(&user)).unwrap();
// now it should have the session from the first local project synced
let sessions_two = case_two
.gb_repository
.get_sessions_iterator()
.unwrap()
.map(Result::unwrap)
.collect::<Vec<_>>();
assert_eq!(sessions_two.len(), 1);
assert_eq!(sessions_two[0].id, session_one.id);
let session_reader = sessions::Reader::open(&case_two.gb_repository, &sessions_two[0]).unwrap();
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read(None).unwrap();
let files = session_reader.files(None).unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(files.len(), 1);
assert_eq!(
files[&path::PathBuf::from("test.txt")],
reader::Content::UTF8("Hello World".to_string())
);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")],
vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}]
);
}
#[tokio::test]
async fn remote_sync_order() {
// first, crate a remote, pretending it's a cloud
let (cloud, _tmp) = new_test_remote_repository().unwrap();
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: cloud.path().to_str().unwrap().to_string(),
code_git_url: None,
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
let suite = Suite::default();
let case_one = suite.new_case();
suite
.projects
.update(&projects::UpdateRequest {
id: case_one.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_one = case_one.refresh(&suite);
let case_two = suite.new_case();
suite
.projects
.update(&projects::UpdateRequest {
id: case_two.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_two = case_two.refresh(&suite);
let user = suite.sign_in();
// create session in the first project
case_one
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_one_first = case_one
.gb_repository
.flush(&case_one.project_repository, Some(&user))
.unwrap()
.unwrap();
case_one.gb_repository.push(Some(&user)).unwrap();
thread::sleep(time::Duration::from_secs(1));
// create session in the second project
case_two
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_two_first = case_two
.gb_repository
.flush(&case_two.project_repository, Some(&user))
.unwrap()
.unwrap();
case_two.gb_repository.push(Some(&user)).unwrap();
thread::sleep(time::Duration::from_secs(1));
// create second session in the first project
case_one
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_one_second = case_one
.gb_repository
.flush(&case_one.project_repository, Some(&user))
.unwrap()
.unwrap();
case_one.gb_repository.push(Some(&user)).unwrap();
thread::sleep(time::Duration::from_secs(1));
// create second session in the second project
case_two
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_two_second = case_two
.gb_repository
.flush(&case_two.project_repository, Some(&user))
.unwrap()
.unwrap();
case_two.gb_repository.push(Some(&user)).unwrap();
case_one.gb_repository.fetch(Some(&user)).unwrap();
let sessions_one = case_one
.gb_repository
.get_sessions_iterator()
.unwrap()
.map(Result::unwrap)
.collect::<Vec<_>>();
case_two.gb_repository.fetch(Some(&user)).unwrap();
let sessions_two = case_two
.gb_repository
.get_sessions_iterator()
.unwrap()
.map(Result::unwrap)
.collect::<Vec<_>>();
// make sure the sessions are the same on both repos
assert_eq!(sessions_one.len(), 4);
assert_eq!(sessions_two, sessions_one);
assert_eq!(sessions_one[0].id, session_two_second.id);
assert_eq!(sessions_one[1].id, session_one_second.id);
assert_eq!(sessions_one[2].id, session_two_first.id);
assert_eq!(sessions_one[3].id, session_one_first.id);
}
#[test]
fn gitbutler_file() {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case();
let session = gb_repository.get_or_create_current_session().unwrap();
let gitbutler_file_path = project_repository.path().join(".git/gitbutler.json");
assert!(gitbutler_file_path.exists());
let file_content: serde_json::Value =
serde_json::from_str(&std::fs::read_to_string(&gitbutler_file_path).unwrap()).unwrap();
let sid: SessionId = file_content["sessionId"].as_str().unwrap().parse().unwrap();
assert_eq!(sid, session.id);
let pid: ProjectId = file_content["repositoryId"]
.as_str()
.unwrap()
.parse()
.unwrap();
assert_eq!(pid, project_repository.project().id);
}

View File

@ -1,456 +0,0 @@
use std::{collections::HashMap, path, thread, time};
use anyhow::Result;
use gitbutler_core::{
deltas::{self, operations::Operation},
projects::{self, ApiProject, ProjectId},
reader,
sessions::{self, SessionId},
};
use pretty_assertions::assert_eq;
use tempfile::TempDir;
use gitbutler_testsupport::{init_opts_bare, Case, Suite};
fn new_test_remote_repository() -> Result<(git2::Repository, TempDir)> {
let tmp = tempfile::tempdir()?;
let repo_a = git2::Repository::init_opts(&tmp, &init_opts_bare())?;
Ok((repo_a, tmp))
}
#[test]
fn get_current_session_writer_should_use_existing_session() -> Result<()> {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case();
let current_session_1 = gb_repository.get_or_create_current_session()?;
let current_session_2 = gb_repository.get_or_create_current_session()?;
assert_eq!(current_session_1.id, current_session_2.id);
Ok(())
}
#[test]
fn must_not_return_init_session() -> Result<()> {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case();
assert!(gb_repository.get_current_session()?.is_none());
let iter = gb_repository.get_sessions_iterator()?;
assert_eq!(iter.count(), 0);
Ok(())
}
#[test]
fn must_not_flush_without_current_session() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case();
let session = gb_repository.flush(project_repository, None)?;
assert!(session.is_none());
let iter = gb_repository.get_sessions_iterator()?;
assert_eq!(iter.count(), 0);
Ok(())
}
#[test]
fn non_empty_repository() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case_with_files(HashMap::from([(path::PathBuf::from("test.txt"), "test")]));
gb_repository.get_or_create_current_session()?;
gb_repository.flush(project_repository, None)?;
Ok(())
}
#[test]
fn must_flush_current_session() -> Result<()> {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case();
gb_repository.get_or_create_current_session()?;
let session = gb_repository.flush(project_repository, None)?;
assert!(session.is_some());
let iter = gb_repository.get_sessions_iterator()?;
assert_eq!(iter.count(), 1);
Ok(())
}
#[test]
fn list_deltas_from_current_session() -> Result<()> {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case();
let current_session = gb_repository.get_or_create_current_session()?;
let writer = deltas::Writer::new(gb_repository)?;
writer.write(
"test.txt",
&vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}],
)?;
let session_reader = sessions::Reader::open(gb_repository, &current_session)?;
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read(None)?;
assert_eq!(deltas.len(), 1);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations.len(),
1
);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations[0],
Operation::Insert((0, "Hello World".to_string()))
);
Ok(())
}
#[test]
fn list_deltas_from_flushed_session() {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case();
let writer = deltas::Writer::new(gb_repository).unwrap();
writer
.write(
"test.txt",
&vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}],
)
.unwrap();
let session = gb_repository.flush(project_repository, None).unwrap();
let session_reader = sessions::Reader::open(gb_repository, &session.unwrap()).unwrap();
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read(None).unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations.len(),
1
);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")][0].operations[0],
Operation::Insert((0, "Hello World".to_string()))
);
}
#[test]
fn list_files_from_current_session() {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case_with_files(HashMap::from([(
path::PathBuf::from("test.txt"),
"Hello World",
)]));
let current = gb_repository.get_or_create_current_session().unwrap();
let reader = sessions::Reader::open(gb_repository, &current).unwrap();
let files = reader.files(None).unwrap();
assert_eq!(files.len(), 1);
assert_eq!(
files[&path::PathBuf::from("test.txt")],
reader::Content::UTF8("Hello World".to_string())
);
}
#[test]
fn list_files_from_flushed_session() {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case_with_files(HashMap::from([(
path::PathBuf::from("test.txt"),
"Hello World",
)]));
gb_repository.get_or_create_current_session().unwrap();
let session = gb_repository
.flush(project_repository, None)
.unwrap()
.unwrap();
let reader = sessions::Reader::open(gb_repository, &session).unwrap();
let files = reader.files(None).unwrap();
assert_eq!(files.len(), 1);
assert_eq!(
files[&path::PathBuf::from("test.txt")],
reader::Content::UTF8("Hello World".to_string())
);
}
#[tokio::test]
async fn remote_syncronization() {
// first, crate a remote, pretending it's a cloud
let (cloud, _tmp) = new_test_remote_repository().unwrap();
let api_project = ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: cloud.path().to_str().unwrap().to_string(),
code_git_url: None,
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
let suite = Suite::default();
let user = suite.sign_in();
// create first local project, add files, deltas and flush a session
let case_one = suite.new_case_with_files(HashMap::from([(
path::PathBuf::from("test.txt"),
"Hello World",
)]));
suite
.projects
.update(&projects::UpdateRequest {
id: case_one.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_one = case_one.refresh(&suite);
let writer = deltas::Writer::new(&case_one.gb_repository).unwrap();
writer
.write(
"test.txt",
&vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}],
)
.unwrap();
let session_one = case_one
.gb_repository
.flush(&case_one.project_repository, Some(&user))
.unwrap()
.unwrap();
case_one.gb_repository.push(Some(&user)).unwrap();
// create second local project, fetch it and make sure session is there
let case_two = suite.new_case();
suite
.projects
.update(&projects::UpdateRequest {
id: case_two.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_two = case_two.refresh(&suite);
case_two.gb_repository.fetch(Some(&user)).unwrap();
// now it should have the session from the first local project synced
let sessions_two = case_two
.gb_repository
.get_sessions_iterator()
.unwrap()
.map(Result::unwrap)
.collect::<Vec<_>>();
assert_eq!(sessions_two.len(), 1);
assert_eq!(sessions_two[0].id, session_one.id);
let session_reader = sessions::Reader::open(&case_two.gb_repository, &sessions_two[0]).unwrap();
let deltas_reader = deltas::Reader::new(&session_reader);
let deltas = deltas_reader.read(None).unwrap();
let files = session_reader.files(None).unwrap();
assert_eq!(deltas.len(), 1);
assert_eq!(files.len(), 1);
assert_eq!(
files[&path::PathBuf::from("test.txt")],
reader::Content::UTF8("Hello World".to_string())
);
assert_eq!(
deltas[&path::PathBuf::from("test.txt")],
vec![deltas::Delta {
operations: vec![Operation::Insert((0, "Hello World".to_string()))],
timestamp_ms: 0,
}]
);
}
#[tokio::test]
async fn remote_sync_order() {
// first, crate a remote, pretending it's a cloud
let (cloud, _tmp) = new_test_remote_repository().unwrap();
let api_project = projects::ApiProject {
name: "test-sync".to_string(),
description: None,
repository_id: "123".to_string(),
git_url: cloud.path().to_str().unwrap().to_string(),
code_git_url: None,
created_at: 0_i32.to_string(),
updated_at: 0_i32.to_string(),
sync: true,
};
let suite = Suite::default();
let case_one = suite.new_case();
suite
.projects
.update(&projects::UpdateRequest {
id: case_one.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_one = case_one.refresh(&suite);
let case_two = suite.new_case();
suite
.projects
.update(&projects::UpdateRequest {
id: case_two.project.id,
api: Some(api_project.clone()),
..Default::default()
})
.await
.unwrap();
let case_two = case_two.refresh(&suite);
let user = suite.sign_in();
// create session in the first project
case_one
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_one_first = case_one
.gb_repository
.flush(&case_one.project_repository, Some(&user))
.unwrap()
.unwrap();
case_one.gb_repository.push(Some(&user)).unwrap();
thread::sleep(time::Duration::from_secs(1));
// create session in the second project
case_two
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_two_first = case_two
.gb_repository
.flush(&case_two.project_repository, Some(&user))
.unwrap()
.unwrap();
case_two.gb_repository.push(Some(&user)).unwrap();
thread::sleep(time::Duration::from_secs(1));
// create second session in the first project
case_one
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_one_second = case_one
.gb_repository
.flush(&case_one.project_repository, Some(&user))
.unwrap()
.unwrap();
case_one.gb_repository.push(Some(&user)).unwrap();
thread::sleep(time::Duration::from_secs(1));
// create second session in the second project
case_two
.gb_repository
.get_or_create_current_session()
.unwrap();
let session_two_second = case_two
.gb_repository
.flush(&case_two.project_repository, Some(&user))
.unwrap()
.unwrap();
case_two.gb_repository.push(Some(&user)).unwrap();
case_one.gb_repository.fetch(Some(&user)).unwrap();
let sessions_one = case_one
.gb_repository
.get_sessions_iterator()
.unwrap()
.map(Result::unwrap)
.collect::<Vec<_>>();
case_two.gb_repository.fetch(Some(&user)).unwrap();
let sessions_two = case_two
.gb_repository
.get_sessions_iterator()
.unwrap()
.map(Result::unwrap)
.collect::<Vec<_>>();
// make sure the sessions are the same on both repos
assert_eq!(sessions_one.len(), 4);
assert_eq!(sessions_two, sessions_one);
assert_eq!(sessions_one[0].id, session_two_second.id);
assert_eq!(sessions_one[1].id, session_one_second.id);
assert_eq!(sessions_one[2].id, session_two_first.id);
assert_eq!(sessions_one[3].id, session_one_first.id);
}
#[test]
fn gitbutler_file() {
let suite = Suite::default();
let Case {
gb_repository,
project_repository,
..
} = &suite.new_case();
let session = gb_repository.get_or_create_current_session().unwrap();
let gitbutler_file_path = project_repository.path().join(".git/gitbutler.json");
assert!(gitbutler_file_path.exists());
let file_content: serde_json::Value =
serde_json::from_str(&std::fs::read_to_string(&gitbutler_file_path).unwrap()).unwrap();
let sid: SessionId = file_content["sessionId"].as_str().unwrap().parse().unwrap();
assert_eq!(sid, session.id);
let pid: ProjectId = file_content["repositoryId"]
.as_str()
.unwrap()
.parse()
.unwrap();
assert_eq!(pid, project_repository.project().id);
}

View File

@ -1,3 +1,2 @@
mod config;
mod credentials;
mod diff;

View File

@ -1,87 +0,0 @@
use gitbutler_core::{
projects::ProjectId,
sessions::{session, Database, Session, SessionId},
};
use gitbutler_testsupport::test_database;
#[test]
fn insert_query() -> anyhow::Result<()> {
let (db, _tmp) = test_database();
println!("0");
let database = Database::new(db);
println!("1");
let project_id = ProjectId::generate();
let session1 = Session {
id: SessionId::generate(),
hash: None,
meta: session::Meta {
branch: None,
commit: None,
start_timestamp_ms: 1,
last_timestamp_ms: 2,
},
};
let session2 = session::Session {
id: SessionId::generate(),
hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()),
meta: session::Meta {
branch: Some("branch2".to_string()),
commit: Some("commit2".to_string()),
start_timestamp_ms: 3,
last_timestamp_ms: 4,
},
};
let sessions = vec![&session1, &session2];
database.insert(&project_id, &sessions)?;
assert_eq!(
database.list_by_project_id(&project_id, None)?,
vec![session2.clone(), session1.clone()]
);
assert_eq!(database.get_by_id(&session1.id)?.unwrap(), session1);
assert_eq!(database.get_by_id(&session2.id)?.unwrap(), session2);
assert_eq!(database.get_by_id(&SessionId::generate())?, None);
Ok(())
}
#[test]
fn update() -> anyhow::Result<()> {
let (db, _tmp) = test_database();
let database = Database::new(db);
let project_id = ProjectId::generate();
let session = session::Session {
id: SessionId::generate(),
hash: None,
meta: session::Meta {
branch: None,
commit: None,
start_timestamp_ms: 1,
last_timestamp_ms: 2,
},
};
let session_updated = session::Session {
id: session.id,
hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()),
meta: session::Meta {
branch: Some("branch2".to_string()),
commit: Some("commit2".to_string()),
start_timestamp_ms: 3,
last_timestamp_ms: 4,
},
};
database.insert(&project_id, &[&session])?;
database.insert(&project_id, &[&session_updated])?;
assert_eq!(
database.list_by_project_id(&project_id, None)?,
vec![session_updated.clone()]
);
assert_eq!(database.get_by_id(&session.id)?.unwrap(), session_updated);
Ok(())
}

View File

@ -1,106 +0,0 @@
mod database;
use anyhow::Result;
use gitbutler_core::sessions::{self, session::SessionId};
use gitbutler_testsupport::{Case, Suite};
#[test]
fn should_not_write_session_with_hash() {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case();
let session = sessions::Session {
id: SessionId::generate(),
hash: Some("08f23df1b9c2dec3d0c826a3ae745f9b821a1a26".parse().unwrap()),
meta: sessions::Meta {
start_timestamp_ms: 0,
last_timestamp_ms: 1,
branch: Some("branch".to_string()),
commit: Some("commit".to_string()),
},
};
assert!(sessions::Writer::new(gb_repository)
.unwrap()
.write(&session)
.is_err());
}
#[test]
fn should_write_full_session() -> Result<()> {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case();
let session = sessions::Session {
id: SessionId::generate(),
hash: None,
meta: sessions::Meta {
start_timestamp_ms: 0,
last_timestamp_ms: 1,
branch: Some("branch".to_string()),
commit: Some("commit".to_string()),
},
};
sessions::Writer::new(gb_repository)?.write(&session)?;
assert_eq!(
std::fs::read_to_string(gb_repository.session_path().join("meta/id"))?,
session.id.to_string()
);
assert_eq!(
std::fs::read_to_string(gb_repository.session_path().join("meta/commit"))?,
"commit"
);
assert_eq!(
std::fs::read_to_string(gb_repository.session_path().join("meta/branch"))?,
"branch"
);
assert_eq!(
std::fs::read_to_string(gb_repository.session_path().join("meta/start"))?,
"0"
);
assert_ne!(
std::fs::read_to_string(gb_repository.session_path().join("meta/last"))?,
"1"
);
Ok(())
}
#[test]
fn should_write_partial_session() -> Result<()> {
let suite = Suite::default();
let Case { gb_repository, .. } = &suite.new_case();
let session = sessions::Session {
id: SessionId::generate(),
hash: None,
meta: sessions::Meta {
start_timestamp_ms: 0,
last_timestamp_ms: 1,
branch: None,
commit: None,
},
};
sessions::Writer::new(gb_repository)?.write(&session)?;
assert_eq!(
std::fs::read_to_string(gb_repository.session_path().join("meta/id"))?,
session.id.to_string()
);
assert!(!gb_repository.session_path().join("meta/commit").exists());
assert!(!gb_repository.session_path().join("meta/branch").exists());
assert_eq!(
std::fs::read_to_string(gb_repository.session_path().join("meta/start"))?,
"0"
);
assert_ne!(
std::fs::read_to_string(gb_repository.session_path().join("meta/last"))?,
"1"
);
Ok(())
}

View File

@ -1,151 +0,0 @@
use std::path;
use gitbutler_core::{gb_repository, git, project_repository, projects};
use gitbutler_testsupport::{paths, TestProject};
mod init {
use super::*;
#[test]
fn handle_file_symlink() {
let test_project = TestProject::default();
let data_dir = paths::data_dir();
let projects = projects::Controller::from_path(data_dir.path());
let project = projects
.add(test_project.path())
.expect("failed to add project");
std::fs::write(project.path.join("file"), "content").unwrap();
std::fs::hard_link(project.path.join("file"), project.path.join("link")).unwrap();
let project_repository = project_repository::Repository::open(&project).unwrap();
gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap();
}
#[test]
#[cfg(target_family = "unix")]
fn handle_dir_symlink() {
let test_project = TestProject::default();
let data_dir = paths::data_dir();
let projects = projects::Controller::from_path(data_dir.path());
let project = projects
.add(test_project.path())
.expect("failed to add project");
std::fs::create_dir_all(project.path.join("dir")).unwrap();
std::fs::write(project.path.join("dir/file"), "content").unwrap();
std::os::unix::fs::symlink(project.path.join("dir"), project.path.join("dir_link"))
.unwrap();
let project_repository = project_repository::Repository::open(&project).unwrap();
gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap();
}
#[test]
#[cfg(target_family = "unix")]
fn handle_dir_symlink_symlink() {
let test_project = TestProject::default();
let data_dir = paths::data_dir();
let projects = projects::Controller::from_path(data_dir.path());
let project = projects
.add(test_project.path())
.expect("failed to add project");
std::fs::create_dir_all(project.path.join("dir")).unwrap();
std::fs::write(project.path.join("dir/file"), "content").unwrap();
std::os::unix::fs::symlink(project.path.join("dir"), project.path.join("dir_link"))
.unwrap();
std::os::unix::fs::symlink(
project.path.join("dir_link"),
project.path.join("link_link"),
)
.unwrap();
let project_repository = project_repository::Repository::open(&project).unwrap();
gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap();
}
}
mod flush {
use super::*;
#[test]
fn handle_file_symlink() {
let test_project = TestProject::default();
let data_dir = paths::data_dir();
let projects = projects::Controller::from_path(data_dir.path());
let project = projects
.add(test_project.path())
.expect("failed to add project");
let project_repository = project_repository::Repository::open(&project).unwrap();
let gb_repo =
gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap();
std::fs::write(project.path.join("file"), "content").unwrap();
std::fs::hard_link(project.path.join("file"), project.path.join("link")).unwrap();
gb_repo.flush(&project_repository, None).unwrap();
}
#[test]
#[cfg(target_family = "unix")]
fn handle_dir_symlink() {
let test_project = TestProject::default();
let data_dir = paths::data_dir();
let projects = projects::Controller::from_path(data_dir.path());
let project = projects
.add(test_project.path())
.expect("failed to add project");
let project_repository = project_repository::Repository::open(&project).unwrap();
let gb_repo =
gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap();
std::fs::create_dir_all(project.path.join("dir")).unwrap();
std::fs::write(project.path.join("dir/file"), "content").unwrap();
std::os::unix::fs::symlink(project.path.join("dir"), project.path.join("dir_link"))
.unwrap();
gb_repo.flush(&project_repository, None).unwrap();
}
#[test]
fn handle_submodules() {
let test_project = TestProject::default();
let data_dir = paths::data_dir();
let projects = projects::Controller::from_path(data_dir.path());
let project = projects
.add(test_project.path())
.expect("failed to add project");
let project_repository = project_repository::Repository::open(&project).unwrap();
let gb_repo =
gb_repository::Repository::open(data_dir.path(), &project_repository, None).unwrap();
let project = TestProject::default();
let submodule_url: git::Url = project.path().display().to_string().parse().unwrap();
test_project.add_submodule(&submodule_url, path::Path::new("submodule"));
gb_repo.flush(&project_repository, None).unwrap();
}
}

View File

@ -15,7 +15,7 @@
use std::path::PathBuf;
use gitbutler_core::{assets, database, git, storage};
use gitbutler_core::{assets, git, storage};
use gitbutler_tauri::{
app, askpass, commands, github, keys, logs, menu, projects, undo, users, virtual_branches,
watcher, zip,
@ -112,17 +112,11 @@ fn main() {
app_handle.manage(assets::Proxy::new(app_cache_dir.join("images")));
let database_controller = database::Database::open_in_directory(&app_data_dir).expect("failed to open database");
app_handle.manage(database_controller.clone());
let zipper = gitbutler_core::zip::Zipper::new(&app_cache_dir);
app_handle.manage(zipper.clone());
app_handle.manage(gitbutler_core::zip::Controller::new(app_data_dir.clone(), app_log_dir.clone(), zipper.clone(), projects_controller.clone()));
let deltas_database_controller = gitbutler_core::deltas::database::Database::new(database_controller.clone());
app_handle.manage(deltas_database_controller.clone());
let keys_storage_controller = gitbutler_core::keys::storage::Storage::new(storage_controller.clone());
app_handle.manage(keys_storage_controller.clone());
@ -143,9 +137,6 @@ fn main() {
git_credentials_controller.clone(),
));
let sessions_database_controller = gitbutler_core::sessions::database::Database::new(database_controller.clone());
app_handle.manage(sessions_database_controller.clone());
let app = app::App::new(
projects_controller,
);

View File

@ -92,7 +92,6 @@ impl Suite {
pub struct Case {
pub project: gitbutler_core::projects::Project,
pub project_repository: gitbutler_core::project_repository::Repository,
pub gb_repository: gitbutler_core::gb_repository::Repository,
pub credentials: gitbutler_core::git::credentials::Helper,
/// The directory containing the `project_repository`
project_tmp: Option<TempDir>,
@ -118,17 +117,10 @@ impl Case {
) -> Case {
let project_repository = gitbutler_core::project_repository::Repository::open(&project)
.expect("failed to create project repository");
let gb_repository = gitbutler_core::gb_repository::Repository::open(
suite.local_app_data(),
&project_repository,
None,
)
.expect("failed to open gb repository");
let credentials =
gitbutler_core::git::credentials::Helper::from_path(suite.local_app_data());
Case {
project,
gb_repository,
project_repository,
project_tmp: Some(project_tmp),
credentials,
@ -142,16 +134,9 @@ impl Case {
.expect("failed to get project");
let project_repository = gitbutler_core::project_repository::Repository::open(&project)
.expect("failed to create project repository");
let user = suite.users.get_user().expect("failed to get user");
let credentials =
gitbutler_core::git::credentials::Helper::from_path(suite.local_app_data());
Self {
gb_repository: gitbutler_core::gb_repository::Repository::open(
suite.local_app_data(),
&project_repository,
user.as_ref(),
)
.expect("failed to open gb repository"),
credentials,
project_repository,
project,
@ -160,12 +145,6 @@ impl Case {
}
}
pub fn test_database() -> (gitbutler_core::database::Database, TempDir) {
let tmp = temp_dir();
let db = gitbutler_core::database::Database::open_in_directory(&tmp).unwrap();
(db, tmp)
}
pub fn temp_dir() -> TempDir {
tempdir().unwrap()
}