collect filemonitor events into lists to avoid excessive recomputation.

Previously, each file change both in `.git` as well as in the worktree would
cause a complete recomputation. This computation included opening a git
repository at least once (probaby more often), to make an 'is-ignored' check.

The latter is very expensive in `git2` and gets more expensive the more
files there are.

Now the repository is opened when needed, and we re-use it for all applicable
file paths.
This commit is contained in:
Sebastian Thiel 2024-04-13 19:16:25 +02:00
parent af225bd9e0
commit 2e969d1507
No known key found for this signature in database
GPG Key ID: 9CB5EE7895E8268B
9 changed files with 308 additions and 223 deletions

View File

@ -26,7 +26,7 @@ impl<'writer> DeltasWriter<'writer> {
self.writer
.write_string(PathBuf::from("session/deltas").join(path), &raw_deltas)?;
tracing::debug!(
tracing::trace!(
project_id = %self.repository.get_project_id(),
path = %path.display(),
"wrote deltas"
@ -43,7 +43,7 @@ impl<'writer> DeltasWriter<'writer> {
let path = path.as_ref();
self.writer.remove(PathBuf::from("session/wd").join(path))?;
tracing::debug!(
tracing::trace!(
project_id = %self.repository.get_project_id(),
path = %path.display(),
"deleted session wd file"
@ -61,7 +61,7 @@ impl<'writer> DeltasWriter<'writer> {
self.writer
.write_string(PathBuf::from("session/wd").join(path), contents)?;
tracing::debug!(
tracing::trace!(
project_id = %self.repository.get_project_id(),
path = %path.display(),
"wrote session wd file"

View File

@ -66,6 +66,8 @@ pub struct Project {
pub id: ProjectId,
pub title: String,
pub description: Option<String>,
// TODO(ST): Keep track of the `git_dir` separately and use it, particularly in `file_monitor.rs` (#3062)
/// The worktree path of the projects repository.
pub path: path::PathBuf,
#[serde(default)]
pub preferred_key: AuthKey,

View File

@ -20,7 +20,7 @@ impl Event {
app_handle
.emit_all(&self.name, Some(&self.payload))
.context("emit event")?;
tracing::debug!(event_name = self.name);
tracing::trace!(event_name = self.name);
Ok(())
}

View File

@ -1,4 +1,5 @@
use std::{fmt::Display, path};
use std::fmt::Display;
use std::path::PathBuf;
use gitbutler_core::{projects::ProjectId, sessions};
@ -12,8 +13,8 @@ pub(super) enum InternalEvent {
PushGitbutlerData(ProjectId),
// From file monitor
GitFileChange(ProjectId, path::PathBuf),
ProjectFileChange(ProjectId, path::PathBuf),
GitFilesChange(ProjectId, Vec<PathBuf>),
ProjectFilesChange(ProjectId, Vec<PathBuf>),
}
/// This type captures all operations that can be fed into a watcher that runs in the background.
@ -58,14 +59,40 @@ impl Display for InternalEvent {
InternalEvent::Flush(project_id, session) => {
write!(f, "Flush({}, {})", project_id, session.id)
}
InternalEvent::GitFileChange(project_id, path) => {
write!(f, "GitFileChange({}, {})", project_id, path.display())
InternalEvent::GitFilesChange(project_id, paths) => {
write!(
f,
"GitFileChange({}, {})",
project_id,
comma_separated_paths(paths)
)
}
InternalEvent::ProjectFileChange(project_id, path) => {
write!(f, "ProjectFileChange({}, {})", project_id, path.display())
InternalEvent::ProjectFilesChange(project_id, paths) => {
write!(
f,
"ProjectFileChange({}, {})",
project_id,
comma_separated_paths(paths)
)
}
InternalEvent::CalculateVirtualBranches(pid) => write!(f, "VirtualBranch({})", pid),
InternalEvent::PushGitbutlerData(pid) => write!(f, "PushGitbutlerData({})", pid),
}
}
}
fn comma_separated_paths(paths: &[PathBuf]) -> String {
const MAX_LISTING: usize = 5;
let listing = paths
.iter()
.take(MAX_LISTING)
.filter_map(|path| path.to_str())
.collect::<Vec<_>>()
.join(", ");
let remaining = paths.len().saturating_sub(MAX_LISTING);
if remaining > 0 {
format!("{listing} […{remaining} more]")
} else {
listing
}
}

View File

@ -1,3 +1,4 @@
use std::collections::HashSet;
use std::path::Path;
use std::{path, time::Duration};
@ -7,6 +8,7 @@ use gitbutler_core::{git, projects::ProjectId};
use notify::Watcher;
use notify_debouncer_full::new_debouncer;
use tokio::task;
use tracing::Level;
/// The timeout for debouncing file change events.
/// This is used to prevent multiple events from being sent for a single file change.
@ -66,57 +68,109 @@ pub fn spawn(
})
.context("failed to start watcher")?;
let repo = git::Repository::open(worktree_path).context(format!(
"failed to open project repository: {}",
worktree_path.display()
))?;
tracing::debug!(%project_id, "file watcher started");
let path = worktree_path.to_owned();
let worktree_path = worktree_path.to_owned();
task::spawn_blocking(move || {
tracing::debug!(%project_id, "file watcher started");
let _debouncer = debouncer;
let _runtime = tracing::span!(Level::INFO, "file monitor", %project_id ).entered();
'outer: for result in notify_rx {
let stats = tracing::span!(
Level::INFO,
"handle debounced events",
ignored = tracing::field::Empty,
project = tracing::field::Empty,
project_dedup = tracing::field::Empty,
git = tracing::field::Empty,
git_dedup = tracing::field::Empty,
git_noop = tracing::field::Empty,
fs_events = tracing::field::Empty,
)
.entered();
let (mut ignored, mut git_noop) = (0, 0);
match result {
Err(err) => {
tracing::error!(?err, "file watcher error");
tracing::error!(?err, "ignored file watcher error");
}
Ok(events) => {
let file_paths = events
let maybe_repo = git::Repository::open(&worktree_path).with_context(
|| {
format!(
"failed to open project repository: {}",
worktree_path.display()
)
},
).map(Some).unwrap_or_else(|err| {
tracing::error!(?err, "will consider changes to all files as repository couldn't be opened");
None
});
let num_events = events.len();
let classified_file_paths = events
.into_iter()
.filter(|event| is_interesting_kind(event.kind))
.flat_map(|event| event.event.paths)
.filter(|file| is_interesting_file(&repo, file));
for file_path in file_paths {
match file_path.strip_prefix(&path) {
Ok(relative_file_path) => {
if relative_file_path.as_os_str().is_empty() {
continue;
}
let event = if let Ok(stripped) =
relative_file_path.strip_prefix(".git")
{
InternalEvent::GitFileChange(project_id, stripped.to_owned())
} else {
InternalEvent::ProjectFileChange(
project_id,
relative_file_path.to_path_buf(),
)
};
if out.send(event).is_err() {
tracing::info!("channel closed - stopping file watcher");
break 'outer;
.map(|file| {
let kind = maybe_repo
.as_ref()
.map_or(FileKind::Project, |repo| classify_file(repo, &file));
(file, kind)
});
let (mut stripped_git_paths, mut worktree_relative_paths) =
(HashSet::new(), HashSet::new());
for (file_path, kind) in classified_file_paths {
match kind {
FileKind::ProjectIgnored => ignored += 1,
FileKind::GitUninteresting => git_noop += 1,
FileKind::Project | FileKind::Git => {
match file_path.strip_prefix(&worktree_path) {
Ok(relative_file_path) => {
if relative_file_path.as_os_str().is_empty() {
continue;
}
if let Ok(stripped) =
relative_file_path.strip_prefix(".git")
{
stripped_git_paths.insert(stripped.to_owned());
} else {
worktree_relative_paths
.insert(relative_file_path.to_owned());
};
}
Err(err) => {
tracing::error!(%project_id, ?err, "failed to strip prefix");
}
}
}
Err(err) => {
tracing::error!(%project_id, ?err, "failed to strip prefix");
}
}
}
stats.record("fs_events", num_events);
stats.record("ignored", ignored);
stats.record("git_noop", git_noop);
stats.record("git", stripped_git_paths.len());
stats.record("project", worktree_relative_paths.len());
if !stripped_git_paths.is_empty() {
let paths_dedup: Vec<_> = stripped_git_paths.into_iter().collect();
stats.record("git_dedup", paths_dedup.len());
let event = InternalEvent::GitFilesChange(project_id, paths_dedup);
if out.send(event).is_err() {
tracing::info!("channel closed - stopping file watcher");
break 'outer;
}
}
if !worktree_relative_paths.is_empty() {
let paths_dedup: Vec<_> = worktree_relative_paths.into_iter().collect();
stats.record("project_dedup", paths_dedup.len());
let event = InternalEvent::ProjectFilesChange(project_id, paths_dedup);
if out.send(event).is_err() {
tracing::info!("channel closed - stopping file watcher");
break 'outer;
}
}
}
}
}
tracing::debug!(%project_id, "file watcher stopped");
});
Ok(())
}
@ -140,15 +194,33 @@ fn is_interesting_kind(kind: notify::EventKind) -> bool {
)
}
fn is_interesting_file(git_repo: &git::Repository, file_path: &Path) -> bool {
if file_path.starts_with(git_repo.path()) {
let check_file_path = file_path.strip_prefix(git_repo.path()).unwrap();
check_file_path.ends_with("FETCH_HEAD")
/// A classification for a changed file.
enum FileKind {
/// A file in the `.git` repository of the current project itself.
Git,
/// Like `Git`, but shouldn't have any effect.
GitUninteresting,
/// A file in the worktree of the current project.
Project,
/// A file that was ignored in the project, and thus shouldn't trigger a computation.
ProjectIgnored,
}
fn classify_file(git_repo: &git::Repository, file_path: &Path) -> FileKind {
if let Ok(check_file_path) = file_path.strip_prefix(git_repo.path()) {
if check_file_path.ends_with("FETCH_HEAD")
|| check_file_path.eq(path::Path::new("logs/HEAD"))
|| check_file_path.eq(path::Path::new("HEAD"))
|| check_file_path.eq(path::Path::new("GB_FLUSH"))
|| check_file_path.eq(path::Path::new("index"))
{
FileKind::Git
} else {
FileKind::GitUninteresting
}
} else if git_repo.is_path_ignored(file_path).unwrap_or(false) {
FileKind::ProjectIgnored
} else {
!git_repo.is_path_ignored(file_path).unwrap_or(false)
FileKind::Project
}
}

View File

@ -44,7 +44,7 @@ pub struct Handler {
deltas_db: deltas::Database,
/// A rate-limiter for the `is-ignored` computation
is_ignored_limit: Arc<RateLimiter<NotKeyed, InMemoryState, QuantaClock>>,
recalc_all_limit: Arc<RateLimiter<NotKeyed, InMemoryState, QuantaClock>>,
/// A function to send events - decoupled from app-handle for testing purposes.
#[allow(clippy::type_complexity)]
@ -105,7 +105,7 @@ impl Handler {
Arc::new(RateLimiter::direct(quota))
};
// There could be an application (e.g an IDE) which is constantly writing, so the threshold cant be too high
let is_ignored_limit = {
let recalc_all_limit = {
let quota = Quota::with_period(Duration::from_millis(5)).expect("valid quota");
Arc::new(RateLimiter::direct(quota))
};
@ -119,26 +119,25 @@ impl Handler {
calc_vbranch_limit,
sessions_db,
deltas_db,
is_ignored_limit,
recalc_all_limit,
send_event: Arc::new(send_event),
}
}
/// Handle the events that come in from the filesystem, or the public API.
#[instrument(skip(self, now), fields(event = %event), level = "debug", err(Debug))]
#[instrument(skip(self, now), fields(event = %event), err(Debug))]
pub(super) async fn handle(
&self,
event: events::InternalEvent,
now: time::SystemTime,
) -> Result<()> {
match event {
events::InternalEvent::ProjectFileChange(project_id, path) => {
self.recalculate_everything_unless_ignored(path, project_id)
.await
events::InternalEvent::ProjectFilesChange(project_id, path) => {
self.recalculate_everything(path, project_id).await
}
events::InternalEvent::GitFileChange(project_id, path) => self
.git_file_change(path, project_id)
events::InternalEvent::GitFilesChange(project_id, paths) => self
.git_files_change(paths, project_id)
.await
.context("failed to handle git file change event"),
@ -165,24 +164,6 @@ impl Handler {
}
impl Handler {
fn session_delta(
&self,
project_id: ProjectId,
session_id: SessionId,
path: &Path,
delta: &deltas::Delta,
) -> Result<()> {
self.index_deltas(project_id, session_id, path, std::slice::from_ref(delta))
.context("failed to index deltas")?;
self.emit_app_event(&app_events::Event::deltas(
project_id,
session_id,
std::slice::from_ref(delta),
path,
))
}
fn emit_app_event(&self, event: &crate::events::Event) -> Result<()> {
(self.send_event)(event).context("failed to send event")
}
@ -194,7 +175,7 @@ impl Handler {
file_path: &Path,
contents: Option<&reader::Content>,
) -> Result<()> {
(self.send_event)(&app_events::Event::file(
self.emit_app_event(&app_events::Event::file(
project_id,
session_id,
&file_path.display().to_string(),
@ -243,8 +224,10 @@ impl Handler {
Ok(())
}
#[instrument(skip(self, project_id))]
async fn calculate_virtual_branches(&self, project_id: ProjectId) -> Result<()> {
if self.calc_vbranch_limit.check().is_err() {
tracing::warn!("rate limited");
return Ok(());
}
match self
@ -360,32 +343,19 @@ impl Handler {
Ok(())
}
// TODO(ST): figure out if this is needed, it's going to be very slow. The file monitor already filters,
// however, it uses a cached project which might not see changes to the .gitignore files.
// so opening a fresh repo (or doing the minimal work to get there) seems to be required at first,
// but one should handle all paths at once.
async fn recalculate_everything_unless_ignored(
#[instrument(skip(self, paths, project_id), fields(paths = paths.len()))]
async fn recalculate_everything(
&self,
path: PathBuf,
paths: Vec<PathBuf>,
project_id: ProjectId,
) -> Result<()> {
if self.is_ignored_limit.check().is_err() {
if self.recalc_all_limit.check().is_err() {
tracing::warn!("rate limited");
return Ok(());
}
let project = self
.projects
.get(&project_id)
.context("failed to get project")?;
let project_repository = project_repository::Repository::open(&project)
.with_context(|| "failed to open project repository for project")?;
if project_repository.is_path_ignored(&path).unwrap_or(false) {
return Ok(());
}
let calc_deltas = tokio::task::spawn_blocking({
let this = self.clone();
move || this.calculate_deltas(path, project_id)
move || this.calculate_deltas(paths, project_id)
});
self.calculate_virtual_branches(project_id).await?;
calc_deltas.await??;
@ -394,10 +364,13 @@ impl Handler {
pub async fn git_file_change(
&self,
path: impl AsRef<Path>,
path: impl Into<PathBuf>,
project_id: ProjectId,
) -> Result<()> {
let path = path.as_ref();
self.git_files_change(vec![path.into()], project_id).await
}
pub async fn git_files_change(&self, paths: Vec<PathBuf>, project_id: ProjectId) -> Result<()> {
let project = self
.projects
.get(&project_id)
@ -407,76 +380,74 @@ impl Handler {
.context("failed to open project repository for project")
};
let Some(file_name) = path.to_str() else {
return Ok(());
};
match file_name {
"FETCH_HEAD" => {
self.emit_app_event(&app_events::Event::git_fetch(project_id))?;
self.calculate_virtual_branches(project_id).await?;
Ok(())
}
"logs/HEAD" => {
self.emit_app_event(&app_events::Event::git_activity(project.id))?;
Ok(())
}
"GB_FLUSH" => {
let user = self.users.get_user()?;
let project_repository = open_projects_repository()?;
let gb_repo = gb_repository::Repository::open(
&self.local_data_dir,
&project_repository,
user.as_ref(),
)
.context("failed to open repository")?;
for path in paths {
let Some(file_name) = path.to_str() else {
continue;
};
match file_name {
"FETCH_HEAD" => {
self.emit_app_event(&app_events::Event::git_fetch(project_id))?;
self.calculate_virtual_branches(project_id).await?;
}
"logs/HEAD" => {
self.emit_app_event(&app_events::Event::git_activity(project.id))?;
}
"GB_FLUSH" => {
let user = self.users.get_user()?;
let project_repository = open_projects_repository()?;
let gb_repo = gb_repository::Repository::open(
&self.local_data_dir,
&project_repository,
user.as_ref(),
)
.context("failed to open repository")?;
let gb_flush_path = project.path.join(".git/GB_FLUSH");
if gb_flush_path.exists() {
if let Err(err) = std::fs::remove_file(&gb_flush_path) {
tracing::error!(%project_id, path = %gb_flush_path.display(), "GB_FLUSH file delete error: {err}");
}
let gb_flush_path = project.path.join(".git/GB_FLUSH");
if gb_flush_path.exists() {
if let Err(err) = std::fs::remove_file(&gb_flush_path) {
tracing::error!(%project_id, path = %gb_flush_path.display(), "GB_FLUSH file delete error: {err}");
}
if let Some(current_session) = gb_repo
.get_current_session()
.context("failed to get current session")?
{
return self.flush_session(project.id, &current_session).await;
if let Some(current_session) = gb_repo
.get_current_session()
.context("failed to get current session")?
{
self.flush_session(project.id, &current_session).await?;
}
}
}
Ok(())
}
"HEAD" => {
let project_repository = open_projects_repository()?;
let head_ref = project_repository
.get_head()
.context("failed to get head")?;
let head_ref_name = head_ref.name().context("failed to get head name")?;
if head_ref_name.to_string() != "refs/heads/gitbutler/integration" {
let mut integration_reference = project_repository
.git_repository
.find_reference(&git::Refname::from(git::LocalRefname::new(
"gitbutler/integration",
None,
)))?;
integration_reference.delete()?;
"HEAD" => {
let project_repository = open_projects_repository()?;
let head_ref = project_repository
.get_head()
.context("failed to get head")?;
let head_ref_name = head_ref.name().context("failed to get head name")?;
if head_ref_name.to_string() != "refs/heads/gitbutler/integration" {
let mut integration_reference = project_repository
.git_repository
.find_reference(&git::Refname::from(git::LocalRefname::new(
"gitbutler/integration",
None,
)))?;
integration_reference.delete()?;
}
if let Some(head) = head_ref.name() {
self.send_analytics_event_none_blocking(&analytics::Event::HeadChange {
project_id,
reference_name: head_ref_name.to_string(),
})?;
self.emit_app_event(&app_events::Event::git_head(
project_id,
&head.to_string(),
))?;
}
}
if let Some(head) = head_ref.name() {
self.send_analytics_event_none_blocking(&analytics::Event::HeadChange {
project_id,
reference_name: head_ref_name.to_string(),
})?;
self.emit_app_event(&app_events::Event::git_head(
project_id,
&head.to_string(),
))?;
"index" => {
self.emit_app_event(&app_events::Event::git_index(project.id))?;
}
Ok(())
_ => {}
}
"index" => {
self.emit_app_event(&app_events::Event::git_index(project.id))?;
Ok(())
}
_ => Ok(()),
}
Ok(())
}
}

View File

@ -2,10 +2,12 @@ use anyhow::{Context, Result};
use gitbutler_core::{
deltas, gb_repository, project_repository, projects::ProjectId, reader, sessions,
};
use std::path::Path;
use std::path::{Path, PathBuf};
use tracing::instrument;
impl super::Handler {
pub fn calculate_deltas(&self, path: impl AsRef<Path>, project_id: ProjectId) -> Result<()> {
#[instrument(skip(self, paths, project_id))]
pub fn calculate_deltas(&self, paths: Vec<PathBuf>, project_id: ProjectId) -> Result<()> {
let project = self
.projects
.get(&project_id)
@ -35,74 +37,83 @@ impl super::Handler {
}
}
let path = path.as_ref();
let current_wd_file_content =
match Self::file_content_if_not_ignored(&project_repository, path) {
Ok(content) => Some(content),
Err(reader::Error::NotFound) => None,
Err(err) => Err(err).context("failed to get file content")?,
};
let current_session = gb_repository
.get_or_create_current_session()
.context("failed to get or create current session")?;
let current_session_reader = sessions::Reader::open(&gb_repository, &current_session)
.context("failed to get session reader")?;
let latest_file_content = match current_session_reader.file(path) {
Ok(content) => Some(content),
Err(reader::Error::NotFound) => None,
Err(err) => Err(err).context("failed to get file content")?,
};
let deltas_reader = deltas::Reader::new(&current_session_reader);
let current_deltas = deltas_reader
.read_file(path)
.context("failed to get file deltas")?;
let mut text_doc = deltas::Document::new(
latest_file_content.as_ref(),
current_deltas.unwrap_or_default(),
)?;
let new_delta = text_doc
.update(current_wd_file_content.as_ref())
.context("failed to calculate new deltas")?;
let Some(new_delta) = new_delta else {
tracing::debug!(%project_id, path = %path.display(), "no new deltas, ignoring");
return Ok(());
};
let deltas = text_doc.get_deltas();
let writer = deltas::Writer::new(&gb_repository).context("failed to open deltas writer")?;
writer
.write(path, &deltas)
.context("failed to write deltas")?;
match &current_wd_file_content {
Some(reader::Content::UTF8(text)) => writer.write_wd_file(path, text),
Some(_) => writer.write_wd_file(path, ""),
None => writer.remove_wd_file(path),
}?;
let num_paths = paths.len();
let mut num_no_delta = 0;
std::thread::scope(|_scope| -> Result<()> {
for path in paths {
let path = path.as_path();
let current_wd_file_content = match Self::file_content(&project_repository, path) {
Ok(content) => Some(content),
Err(reader::Error::NotFound) => None,
Err(err) => Err(err).context("failed to get file content")?,
};
let latest_file_content = match current_session_reader.file(path) {
Ok(content) => Some(content),
Err(reader::Error::NotFound) => None,
Err(err) => Err(err).context("failed to get file content")?,
};
let current_deltas = deltas_reader
.read_file(path)
.context("failed to get file deltas")?;
let mut text_doc = deltas::Document::new(
latest_file_content.as_ref(),
current_deltas.unwrap_or_default(),
)?;
let new_delta = text_doc
.update(current_wd_file_content.as_ref())
.context("failed to calculate new deltas")?;
self.emit_session_file(
project_id,
current_session.id,
path,
latest_file_content.as_ref(),
)?;
let Some(new_delta) = new_delta else {
num_no_delta += 1;
continue;
};
let deltas = text_doc.get_deltas();
writer
.write(path, &deltas)
.context("failed to write deltas")?;
match &current_wd_file_content {
Some(reader::Content::UTF8(text)) => writer.write_wd_file(path, text),
Some(_) => writer.write_wd_file(path, ""),
None => writer.remove_wd_file(path),
}?;
let session_id = current_session.id;
self.emit_session_file(project_id, session_id, path, latest_file_content.as_ref())?;
self.index_deltas(
project_id,
session_id,
path,
std::slice::from_ref(&new_delta),
)
.context("failed to index deltas")?;
self.emit_app_event(&crate::events::Event::deltas(
project_id,
session_id,
std::slice::from_ref(&new_delta),
path,
))?;
}
Ok(())
})?;
self.index_session(project_id, &current_session)?;
self.session_delta(project_id, current_session.id, path, &new_delta)?;
tracing::debug!(%project_id, paths_without_deltas = num_no_delta, paths_with_delta = num_paths - num_no_delta);
Ok(())
}
// TODO(ST): ignored checks shouldn't be necessary here as `path` is only here because it's not ignored.
// Also it seems odd it fails if the file is ignored, and that it uses `reader::Error` even though
// itself just uses `std::io::Error`.
fn file_content_if_not_ignored(
fn file_content(
project_repository: &project_repository::Repository,
path: &Path,
) -> Result<reader::Content, reader::Error> {
if project_repository.is_path_ignored(path).unwrap_or(false) {
return Err(reader::Error::NotFound);
}
let full_path = project_repository.project().path.join(path);
if !full_path.exists() {
return Err(reader::Error::NotFound);

View File

@ -34,10 +34,10 @@ impl State {
pub(super) fn calculate_delta(
&self,
path: impl AsRef<Path>,
path: impl Into<PathBuf>,
project_id: ProjectId,
) -> Result<()> {
self.inner.calculate_deltas(path, project_id)?;
self.inner.calculate_deltas(vec![path.into()], project_id)?;
Ok(())
}
}

View File

@ -33,8 +33,8 @@ async fn flush_session() -> Result<()> {
let events = fixture.events();
assert_eq!(events.len(), 4);
assert!(events[0].name().ends_with("/files"));
assert!(events[1].name().ends_with("/sessions"));
assert!(events[2].name().ends_with("/deltas"));
assert!(events[1].name().ends_with("/deltas"));
assert!(events[2].name().ends_with("/sessions"));
assert!(events[3].name().ends_with("/sessions"));
Ok(())
}
@ -58,8 +58,8 @@ async fn do_not_flush_session_if_file_is_missing() -> Result<()> {
let events = fixture.events();
assert_eq!(events.len(), 3);
assert!(events[0].name().ends_with("/files"));
assert!(events[1].name().ends_with("/sessions"));
assert!(events[2].name().ends_with("/deltas"));
assert!(events[1].name().ends_with("/deltas"));
assert!(events[2].name().ends_with("/sessions"));
Ok(())
}
@ -87,6 +87,8 @@ fn create_new_session_via_new_file(
fs::write(project.path.join("test.txt"), "test").unwrap();
let handler = fixture.new_handler();
handler.calculate_deltas("test.txt", project.id).unwrap();
handler
.calculate_deltas(vec!["test.txt".into()], project.id)
.unwrap();
handler
}