mirror of
https://github.com/gitbutlerapp/gitbutler.git
synced 2024-12-28 12:05:22 +03:00
frontend successfully lists sessions !!
This commit is contained in:
parent
e75863135f
commit
ab352b0e4e
1
src-tauri/Cargo.lock
generated
1
src-tauri/Cargo.lock
generated
@ -1021,6 +1021,7 @@ dependencies = [
|
||||
"tauri-plugin-log",
|
||||
"tauri-plugin-window-state",
|
||||
"uuid 1.3.0",
|
||||
"walkdir",
|
||||
"yrs",
|
||||
]
|
||||
|
||||
|
@ -29,6 +29,7 @@ filetime = "0.2.19"
|
||||
sha2 = "0.10.6"
|
||||
sentry-tauri = "0.1.0"
|
||||
sentry = "0.27"
|
||||
walkdir = "2.3.2"
|
||||
|
||||
[features]
|
||||
# by default Tauri runs in production mode
|
||||
|
@ -224,6 +224,10 @@ pub fn save_current_file_deltas(
|
||||
|
||||
pub fn list_current_deltas(project_path: &Path) -> Result<HashMap<String, Vec<Delta>>, Error> {
|
||||
let deltas_path = project_path.join(".git/gb/session/deltas");
|
||||
if !deltas_path.exists() {
|
||||
return Ok(HashMap::new());
|
||||
}
|
||||
|
||||
let file_paths = fs::list_files(&deltas_path).map_err(|e| Error {
|
||||
message: format!("Could not list delta files at {}", deltas_path.display()),
|
||||
cause: e.into(),
|
||||
|
@ -1,41 +1,19 @@
|
||||
use std::{fs, path::Path};
|
||||
use std::path::Path;
|
||||
|
||||
fn list_files_abs(dir_path: &Path) -> Result<Vec<String>, std::io::Error> {
|
||||
let mut files = Vec::new();
|
||||
if dir_path.is_dir() {
|
||||
for entry in fs::read_dir(dir_path)? {
|
||||
let entry = entry?;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
// Returns an ordered list of relative paths for files inside a directory recursively.
|
||||
pub fn list_files(dir_path: &Path) -> Result<Vec<String>, std::io::Error> {
|
||||
let mut files = vec![];
|
||||
for entry in WalkDir::new(dir_path) {
|
||||
let entry = entry?;
|
||||
if entry.file_type().is_file() {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
let mut sub_files = list_files(&path)?;
|
||||
files.append(&mut sub_files);
|
||||
} else {
|
||||
match path.to_str() {
|
||||
Some(path) => files.push(path.to_string()),
|
||||
None => {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Invalid path",
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
let path = path.strip_prefix(dir_path).unwrap();
|
||||
let path = path.to_str().unwrap().to_string();
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
files.sort();
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
// Returns an ordered list of relative paths for files inside a directory recursively.
|
||||
pub fn list_files(dir_path: &Path) -> Result<Vec<String>, std::io::Error> {
|
||||
list_files_abs(dir_path).map(|files| {
|
||||
files
|
||||
.iter()
|
||||
.filter_map(|file| {
|
||||
let file_path = Path::new(file);
|
||||
let relative_path = file_path.strip_prefix(dir_path).ok()?;
|
||||
relative_path.to_str().map(|s| s.to_string())
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
@ -75,6 +75,41 @@ fn list_project_files(state: State<'_, AppState>, project_id: &str) -> Result<Ve
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
fn list_sessions(
|
||||
state: State<'_, AppState>,
|
||||
project_id: &str,
|
||||
) -> Result<Vec<sessions::Session>, Error> {
|
||||
match state
|
||||
.projects_storage
|
||||
.get_project(project_id)
|
||||
.map_err(|e| {
|
||||
log::error!("{}", e);
|
||||
Error {
|
||||
message: "Failed to get project".to_string(),
|
||||
}
|
||||
})? {
|
||||
Some(project) => {
|
||||
let repo = Repository::open(project.path).map_err(|e| {
|
||||
log::error!("{}", e);
|
||||
Error {
|
||||
message: "Failed to open project".to_string(),
|
||||
}
|
||||
})?;
|
||||
let sessions = sessions::list_sessions(&repo).map_err(|e| {
|
||||
log::error!("{}", e);
|
||||
Error {
|
||||
message: "Failed to list sessions".to_string(),
|
||||
}
|
||||
})?;
|
||||
Ok(sessions)
|
||||
}
|
||||
None => Err(Error {
|
||||
message: "Project not found".to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
fn read_project_file(
|
||||
state: State<'_, AppState>,
|
||||
@ -281,7 +316,8 @@ fn main() {
|
||||
add_project,
|
||||
list_projects,
|
||||
delete_project,
|
||||
list_deltas
|
||||
list_deltas,
|
||||
list_sessions,
|
||||
])
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application")
|
||||
|
@ -1,5 +1,9 @@
|
||||
use std::path::Path;
|
||||
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Meta {
|
||||
// timestamp of when the session was created
|
||||
pub start_ts: u64,
|
||||
@ -11,10 +15,47 @@ pub struct Meta {
|
||||
pub commit: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Session {
|
||||
pub meta: Meta,
|
||||
}
|
||||
|
||||
impl Session {
|
||||
pub fn from_commit(repo: &git2::Repository, commit: &git2::Commit) -> Result<Self, Error> {
|
||||
let tree = commit.tree().map_err(|err| Error {
|
||||
cause: err.into(),
|
||||
message: "Error while getting commit tree".to_string(),
|
||||
})?;
|
||||
|
||||
let start = read_as_string(repo, &tree, Path::new("session/meta/start"))?
|
||||
.parse::<u64>()
|
||||
.map_err(|err| Error {
|
||||
cause: ErrorCause::ParseIntError(err),
|
||||
message: "Error while parsing start file".to_string(),
|
||||
})?;
|
||||
|
||||
let last = read_as_string(repo, &tree, Path::new("session/meta/last"))?
|
||||
.parse::<u64>()
|
||||
.map_err(|err| Error {
|
||||
cause: ErrorCause::ParseIntError(err),
|
||||
message: "Error while parsing last file".to_string(),
|
||||
})?;
|
||||
|
||||
let branch = read_as_string(repo, &tree, Path::new("session/meta/branch"))?;
|
||||
let commit = read_as_string(repo, &tree, Path::new("session/meta/commit"))?;
|
||||
|
||||
Ok(Session {
|
||||
meta: Meta {
|
||||
start_ts: start,
|
||||
last_ts: last,
|
||||
branch,
|
||||
commit,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Error {
|
||||
pub cause: ErrorCause,
|
||||
@ -28,6 +69,8 @@ impl std::error::Error for Error {
|
||||
ErrorCause::ParseIntError(err) => Some(err),
|
||||
ErrorCause::SessionExistsError => Some(self),
|
||||
ErrorCause::SessionDoesNotExistError => Some(self),
|
||||
ErrorCause::GitError(err) => Some(err),
|
||||
ErrorCause::ParseUtf8Error(err) => Some(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -39,6 +82,8 @@ impl std::fmt::Display for Error {
|
||||
ErrorCause::ParseIntError(ref e) => write!(f, "{}: {}", self.message, e),
|
||||
ErrorCause::SessionExistsError => write!(f, "{}", self.message),
|
||||
ErrorCause::SessionDoesNotExistError => write!(f, "{}", self.message),
|
||||
ErrorCause::GitError(ref e) => write!(f, "{}: {}", self.message, e),
|
||||
ErrorCause::ParseUtf8Error(ref e) => write!(f, "{}: {}", self.message, e),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -47,8 +92,22 @@ impl std::fmt::Display for Error {
|
||||
pub enum ErrorCause {
|
||||
IOError(std::io::Error),
|
||||
ParseIntError(std::num::ParseIntError),
|
||||
GitError(git2::Error),
|
||||
SessionExistsError,
|
||||
SessionDoesNotExistError,
|
||||
ParseUtf8Error(std::string::FromUtf8Error),
|
||||
}
|
||||
|
||||
impl From<std::string::FromUtf8Error> for ErrorCause {
|
||||
fn from(err: std::string::FromUtf8Error) -> Self {
|
||||
ErrorCause::ParseUtf8Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<git2::Error> for ErrorCause {
|
||||
fn from(err: git2::Error) -> Self {
|
||||
ErrorCause::GitError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for ErrorCause {
|
||||
@ -98,8 +157,9 @@ fn write_current_session(session_path: &Path, session: &Session) -> Result<(), E
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_current_session(project_path: &Path, session: &Session) -> Result<(), Error> {
|
||||
let session_path = project_path.join(".git/gb/session");
|
||||
pub fn update_current_session(repo: &git2::Repository, session: &Session) -> Result<(), Error> {
|
||||
log::debug!("{}: Updating current session", repo.path().display());
|
||||
let session_path = repo.path().join("gb/session");
|
||||
if session_path.exists() {
|
||||
write_current_session(&session_path, session)
|
||||
} else {
|
||||
@ -110,9 +170,9 @@ pub fn update_current_session(project_path: &Path, session: &Session) -> Result<
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_current_session(project_path: &Path, session: &Session) -> Result<(), Error> {
|
||||
log::debug!("{}: Creating current session", project_path.display());
|
||||
let session_path = project_path.join(".git/gb/session");
|
||||
pub fn create_current_session(repo: &git2::Repository, session: &Session) -> Result<(), Error> {
|
||||
log::debug!("{}: Creating current session", repo.path().display());
|
||||
let session_path = repo.path().join("gb/session");
|
||||
if session_path.exists() {
|
||||
Err(Error {
|
||||
cause: ErrorCause::SessionExistsError,
|
||||
@ -123,17 +183,17 @@ pub fn create_current_session(project_path: &Path, session: &Session) -> Result<
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete_current_session(project_path: &Path) -> Result<(), std::io::Error> {
|
||||
log::debug!("{}: Deleting current session", project_path.display());
|
||||
let session_path = project_path.join(".git/gb/session");
|
||||
pub fn delete_current_session(repo: &git2::Repository) -> Result<(), std::io::Error> {
|
||||
log::debug!("{}: Deleting current session", repo.path().display());
|
||||
let session_path = repo.path().join("gb/session");
|
||||
if session_path.exists() {
|
||||
std::fs::remove_dir_all(session_path)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_current_session(project_path: &Path) -> Result<Option<Session>, Error> {
|
||||
let session_path = project_path.join(".git/gb/session");
|
||||
pub fn get_current_session(repo: &git2::Repository) -> Result<Option<Session>, Error> {
|
||||
let session_path = repo.path().join("gb/session");
|
||||
if !session_path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
@ -185,3 +245,72 @@ pub fn get_current_session(project_path: &Path) -> Result<Option<Session>, Error
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn list_sessions(repo: &git2::Repository) -> Result<Vec<Session>, Error> {
|
||||
match repo.revparse_single("refs/gitbutler/current") {
|
||||
Err(_) => Ok(vec![]),
|
||||
Ok(object) => {
|
||||
let gitbutler_head = repo.find_commit(object.id()).map_err(|err| Error {
|
||||
cause: err.into(),
|
||||
message: "Failed to find gitbutler head".to_string(),
|
||||
})?;
|
||||
// list all commits from gitbutler head to the first commit
|
||||
let mut walker = repo.revwalk().map_err(|err| Error {
|
||||
cause: err.into(),
|
||||
message: "Failed to create revwalk".to_string(),
|
||||
})?;
|
||||
walker.push(gitbutler_head.id()).map_err(|err| Error {
|
||||
cause: err.into(),
|
||||
message: "Failed to push gitbutler head".to_string(),
|
||||
})?;
|
||||
walker.set_sorting(git2::Sort::TIME).map_err(|err| Error {
|
||||
cause: err.into(),
|
||||
message: "Failed to set sorting".to_string(),
|
||||
})?;
|
||||
|
||||
let mut sessions: Vec<Session> = vec![];
|
||||
for id in walker {
|
||||
let id = id.map_err(|err| Error {
|
||||
cause: err.into(),
|
||||
message: "Failed to get commit id".to_string(),
|
||||
})?;
|
||||
let commit = repo.find_commit(id).map_err(|err| Error {
|
||||
cause: err.into(),
|
||||
message: "Failed to find commit".to_string(),
|
||||
})?;
|
||||
sessions.push(Session::from_commit(repo, &commit)?);
|
||||
}
|
||||
Ok(sessions)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_as_string(
|
||||
repo: &git2::Repository,
|
||||
tree: &git2::Tree,
|
||||
path: &Path,
|
||||
) -> Result<String, Error> {
|
||||
match tree.get_path(path) {
|
||||
Ok(tree_entry) => {
|
||||
let blob = tree_entry
|
||||
.to_object(repo)
|
||||
.map_err(|err| Error {
|
||||
cause: err.into(),
|
||||
message: "Error while getting tree entry object".to_string(),
|
||||
})?
|
||||
.into_blob()
|
||||
.unwrap();
|
||||
let contents = String::from_utf8(blob.content().to_vec()).map_err(|err| Error {
|
||||
cause: err.into(),
|
||||
message: "Error while parsing blob as utf8".to_string(),
|
||||
})?;
|
||||
Ok(contents)
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(Error {
|
||||
cause: err.into(),
|
||||
message: "Error while getting tree entry".to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -232,17 +232,16 @@ pub fn get_latest_file_contents(
|
||||
// this function is called when the user modifies a file, it writes starting metadata if not there
|
||||
// and also touches the last activity timestamp, so we can tell when we are idle
|
||||
fn write_beginning_meta_files(repo: &Repository) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let project_path = repo.workdir().unwrap();
|
||||
let now_ts = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
match sessions::get_current_session(project_path)
|
||||
match sessions::get_current_session(repo)
|
||||
.map_err(|e| format!("Error while getting current session: {}", e.to_string()))?
|
||||
{
|
||||
Some(mut session) => {
|
||||
session.meta.last_ts = now_ts;
|
||||
sessions::update_current_session(project_path, &session)
|
||||
sessions::update_current_session(repo, &session)
|
||||
.map_err(|e| format!("Error while updating current session: {}", e.to_string()))?;
|
||||
Ok(())
|
||||
}
|
||||
@ -256,7 +255,7 @@ fn write_beginning_meta_files(repo: &Repository) -> Result<(), Box<dyn std::erro
|
||||
commit: head.peel_to_commit()?.id().to_string(),
|
||||
},
|
||||
};
|
||||
sessions::create_current_session(project_path, &session)
|
||||
sessions::create_current_session(repo, &session)
|
||||
.map_err(|e| format!("Error while creating current session: {}", e.to_string()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -77,7 +77,7 @@ fn check_for_changes(repo: &Repository) -> Result<(), Box<dyn std::error::Error>
|
||||
commit_oid
|
||||
);
|
||||
|
||||
sessions::delete_current_session(repo.workdir().unwrap())?;
|
||||
sessions::delete_current_session(repo)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -89,7 +89,7 @@ fn check_for_changes(repo: &Repository) -> Result<(), Box<dyn std::error::Error>
|
||||
// and that there has been no activity in the last 5 minutes (the session appears to be over)
|
||||
// and the start was at most an hour ago
|
||||
fn ready_to_commit(repo: &Repository) -> Result<bool, Box<dyn std::error::Error>> {
|
||||
if let Some(current_session) = sessions::get_current_session(repo.workdir().unwrap())? {
|
||||
if let Some(current_session) = sessions::get_current_session(repo)? {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
@ -98,6 +98,7 @@ fn ready_to_commit(repo: &Repository) -> Result<bool, Box<dyn std::error::Error>
|
||||
let elapsed_last = now - current_session.meta.last_ts;
|
||||
let elapsed_start = now - current_session.meta.start_ts;
|
||||
|
||||
// TODO: uncomment
|
||||
if (elapsed_last > FIVE_MINUTES) || (elapsed_start > ONE_HOUR) {
|
||||
Ok(true)
|
||||
} else {
|
||||
@ -178,7 +179,7 @@ fn add_path(
|
||||
|
||||
// something is different, or not found, so we need to create a new entry
|
||||
|
||||
log::debug!("Adding path: {}", file_path.display());
|
||||
log::debug!("Adding wd path: {}", file_path.display());
|
||||
|
||||
// look for files that are bigger than 4GB, which are not supported by git
|
||||
// insert a pointer as the blob content instead
|
||||
@ -216,28 +217,21 @@ fn add_path(
|
||||
};
|
||||
|
||||
// create a new IndexEntry from the file metadata
|
||||
let new_entry = git2::IndexEntry {
|
||||
ctime: IndexTime::new(
|
||||
ctime.seconds().try_into().unwrap(),
|
||||
ctime.nanoseconds().try_into().unwrap(),
|
||||
),
|
||||
mtime: IndexTime::new(
|
||||
mtime.seconds().try_into().unwrap(),
|
||||
mtime.nanoseconds().try_into().unwrap(),
|
||||
),
|
||||
dev: metadata.dev().try_into().unwrap(),
|
||||
ino: metadata.ino().try_into().unwrap(),
|
||||
index.add(&git2::IndexEntry {
|
||||
ctime: IndexTime::new(ctime.seconds().try_into()?, ctime.nanoseconds().try_into()?),
|
||||
mtime: IndexTime::new(mtime.seconds().try_into()?, mtime.nanoseconds().try_into()?),
|
||||
dev: metadata.dev().try_into()?,
|
||||
ino: metadata.ino().try_into()?,
|
||||
mode: metadata.mode(),
|
||||
uid: metadata.uid().try_into().unwrap(),
|
||||
gid: metadata.gid().try_into().unwrap(),
|
||||
file_size: metadata.len().try_into().unwrap(),
|
||||
uid: metadata.uid().try_into()?,
|
||||
gid: metadata.gid().try_into()?,
|
||||
file_size: metadata.len().try_into()?,
|
||||
flags: 10, // normal flags for normal file (for the curious: https://git-scm.com/docs/index-format)
|
||||
flags_extended: 0, // no extended flags
|
||||
path: rel_file_path.to_str().unwrap().to_string().into(),
|
||||
id: blob,
|
||||
};
|
||||
})?;
|
||||
|
||||
index.add(&new_entry)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -283,18 +277,16 @@ fn build_gb_tree(
|
||||
|
||||
// add all files in the working directory to the in-memory index, skipping for matching entries in the repo index
|
||||
let session_dir = repo.path().join("gb/session");
|
||||
for path in fs::list_files(&session_dir)? {
|
||||
let file_path = Path::new(&path);
|
||||
add_simple_path(&repo, session_index, &file_path)?;
|
||||
for session_file in fs::list_files(&session_dir)? {
|
||||
let file_path = Path::new(&session_file);
|
||||
add_session_path(&repo, session_index, &file_path)?;
|
||||
}
|
||||
|
||||
// write the in-memory index to the repo
|
||||
let session_tree = session_index.write_tree_to(&repo).unwrap();
|
||||
let session_tree = session_index.write_tree_to(&repo)?;
|
||||
|
||||
// insert the session tree oid as a subdirectory under the name 'session'
|
||||
tree_builder
|
||||
.insert("session", session_tree, 0o040000)
|
||||
.unwrap();
|
||||
tree_builder.insert("session", session_tree, 0o040000)?;
|
||||
|
||||
// write the new tree and return the Oid
|
||||
let tree = tree_builder.write().unwrap();
|
||||
@ -302,56 +294,35 @@ fn build_gb_tree(
|
||||
}
|
||||
|
||||
// this is a helper function for build_gb_tree that takes paths under .git/gb/session and adds them to the in-memory index
|
||||
fn add_simple_path(
|
||||
fn add_session_path(
|
||||
repo: &Repository,
|
||||
index: &mut git2::Index,
|
||||
rel_file_path: &Path,
|
||||
) -> Result<(), git2::Error> {
|
||||
let abs_file_path = repo.workdir().unwrap().join(rel_file_path);
|
||||
let file_path = Path::new(&abs_file_path);
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let file_path = repo.path().join("gb/session").join(rel_file_path);
|
||||
|
||||
log::debug!("Adding path: {}", file_path.display());
|
||||
log::debug!("Adding session path: {}", file_path.display());
|
||||
|
||||
let blob = repo.blob_path(file_path).unwrap();
|
||||
let blob = repo.blob_path(&file_path)?;
|
||||
let metadata = file_path.metadata().unwrap();
|
||||
let mtime = FileTime::from_last_modification_time(&metadata);
|
||||
let ctime = FileTime::from_creation_time(&metadata).unwrap();
|
||||
|
||||
// create a new IndexEntry from the file metadata
|
||||
let new_entry = git2::IndexEntry {
|
||||
ctime: IndexTime::new(
|
||||
ctime
|
||||
.seconds()
|
||||
.try_into()
|
||||
.map_err(|_| git2::Error::from_str("ctime seconds out of range"))?,
|
||||
ctime
|
||||
.nanoseconds()
|
||||
.try_into()
|
||||
.map_err(|_| git2::Error::from_str("ctime nanoseconds out of range"))?,
|
||||
),
|
||||
mtime: IndexTime::new(
|
||||
mtime
|
||||
.seconds()
|
||||
.try_into()
|
||||
.map_err(|_| git2::Error::from_str("mtime seconds out of range"))?,
|
||||
mtime
|
||||
.nanoseconds()
|
||||
.try_into()
|
||||
.map_err(|_| git2::Error::from_str("mtime nanoseconds out of range"))?,
|
||||
),
|
||||
dev: metadata.dev().try_into().unwrap(),
|
||||
ino: metadata.ino().try_into().unwrap(),
|
||||
index.add(&git2::IndexEntry {
|
||||
ctime: IndexTime::new(ctime.seconds().try_into()?, ctime.nanoseconds().try_into()?),
|
||||
mtime: IndexTime::new(mtime.seconds().try_into()?, mtime.nanoseconds().try_into()?),
|
||||
dev: metadata.dev().try_into()?,
|
||||
ino: metadata.ino().try_into()?,
|
||||
mode: metadata.mode(),
|
||||
uid: metadata.uid().try_into().unwrap(),
|
||||
gid: metadata.gid().try_into().unwrap(),
|
||||
file_size: metadata.len().try_into().unwrap(),
|
||||
uid: metadata.uid().try_into()?,
|
||||
gid: metadata.gid().try_into()?,
|
||||
file_size: metadata.len().try_into()?,
|
||||
flags: 10, // normal flags for normal file (for the curious: https://git-scm.com/docs/index-format)
|
||||
flags_extended: 0, // no extended flags
|
||||
path: rel_file_path.to_str().unwrap().into(),
|
||||
id: blob,
|
||||
};
|
||||
|
||||
index.add(&new_entry)?;
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -2,4 +2,4 @@ export * as crdt from "./crdt";
|
||||
export * as database from "./database";
|
||||
export * as projects from "./projects";
|
||||
export * as log from "./log";
|
||||
export * as session from "./session";
|
||||
export * as session from "./sessions";
|
||||
|
@ -1,3 +1,5 @@
|
||||
import { invoke } from "@tauri-apps/api";
|
||||
import { writable } from "svelte/store";
|
||||
import type { Delta } from "./crdt";
|
||||
|
||||
export type SessionFile = {
|
||||
@ -95,3 +97,14 @@ export let dummySessions: Session[] = [
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const list = (params: { projectId: string }) =>
|
||||
invoke<Record<string, Delta[]>>("list_sessions", params);
|
||||
|
||||
export default async (params: { projectId: string }) => {
|
||||
const init = await list(params);
|
||||
const store = writable(init);
|
||||
return {
|
||||
subscribe: store.subscribe,
|
||||
};
|
||||
};
|
@ -4,7 +4,7 @@ import { derived, readable } from "svelte/store";
|
||||
import type { LayoutLoad } from "./$types";
|
||||
// import crdt from "$lib/crdt";
|
||||
import { building } from "$app/environment";
|
||||
import type { Session } from "$lib/session";
|
||||
import type { Session } from "$lib/sessions";
|
||||
|
||||
export const prerender = false;
|
||||
|
||||
@ -12,33 +12,35 @@ export const load: LayoutLoad = async ({ parent, params }) => {
|
||||
const { projects } = await parent();
|
||||
const deltas = building
|
||||
? readable<Record<string, Delta[]>>({})
|
||||
: await (await import("$lib/crdt")).default({ projectId: params.id })
|
||||
: await (await import("$lib/crdt")).default({ projectId: params.id });
|
||||
const sessions = building
|
||||
? readable<Session[]>([])
|
||||
: await (await import("$lib/sessions")).default({ projectId: params.id });
|
||||
|
||||
return {
|
||||
testSessions: sessions,
|
||||
project: derived(projects, (projects) =>
|
||||
projects.find((project) => project.id === params.id)
|
||||
),
|
||||
deltas,
|
||||
sessions: derived(deltas, (deltas) => {
|
||||
const files = Object.entries(deltas).map(([key, value]) => (
|
||||
{
|
||||
name: key,
|
||||
path: key, // TODO
|
||||
linesTouched: 0, // TODO
|
||||
numberOfEdits: 0, // TODO
|
||||
deltas: value,
|
||||
}
|
||||
))
|
||||
const files = Object.entries(deltas).map(([key, value]) => ({
|
||||
name: key,
|
||||
path: key, // TODO
|
||||
linesTouched: 0, // TODO
|
||||
numberOfEdits: 0, // TODO
|
||||
deltas: value,
|
||||
}));
|
||||
const infiniteSession: Session = {
|
||||
hash: "1-a1b2c3d4e5f6g7h8i9j0", // TODO: set this when we have a snapshot
|
||||
startTime: 0, // TODO: set this when we have a snapshot
|
||||
endTime: 0, // TODO: set this when we have a snapshot
|
||||
branchName: "infinite-session-x", // TODO: set this when we have a snapshot
|
||||
files: files,
|
||||
activities: [] // TODO: set this when we have activities (e.g. push, commit, etc.)
|
||||
}
|
||||
activities: [], // TODO: set this when we have activities (e.g. push, commit, etc.)
|
||||
};
|
||||
// TODO: until we have multiple snapshots, putting all crdt changes into one session
|
||||
return [infiniteSession]
|
||||
return [infiniteSession];
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
@ -3,14 +3,13 @@
|
||||
import { TimelineDay } from "$lib/components/timeline";
|
||||
|
||||
export let data: PageData;
|
||||
const { project, sessions } = data;
|
||||
const { project, sessions, testSessions } = data;
|
||||
|
||||
$: console.log($testSessions);
|
||||
</script>
|
||||
|
||||
<div>
|
||||
{#if $project}
|
||||
<TimelineDay
|
||||
projectId={$project?.id}
|
||||
sessions={$sessions}
|
||||
/>
|
||||
<TimelineDay projectId={$project?.id} sessions={$sessions} />
|
||||
{/if}
|
||||
</div>
|
||||
|
Loading…
Reference in New Issue
Block a user